1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA. */
26 #include "insn-flags.h"
27 #include "insn-codes.h"
29 #include "insn-config.h"
33 #include "typeclass.h"
35 #define CEIL(x,y) (((x) + (y) - 1) / (y))
37 /* Decide whether a function's arguments should be processed
38 from first to last or from last to first. */
40 #ifdef STACK_GROWS_DOWNWARD
42 #define PUSH_ARGS_REVERSED /* If it's last to first */
46 #ifndef STACK_PUSH_CODE
47 #ifdef STACK_GROWS_DOWNWARD
48 #define STACK_PUSH_CODE PRE_DEC
50 #define STACK_PUSH_CODE PRE_INC
54 /* Like STACK_BOUNDARY but in units of bytes, not bits. */
55 #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
57 /* If this is nonzero, we do not bother generating VOLATILE
58 around volatile memory references, and we are willing to
59 output indirect addresses. If cse is to follow, we reject
60 indirect addresses so a useful potential cse is generated;
61 if it is used only once, instruction combination will produce
62 the same indirect address eventually. */
65 /* Nonzero to generate code for all the subroutines within an
66 expression before generating the upper levels of the expression.
67 Nowadays this is never zero. */
68 int do_preexpand_calls = 1;
70 /* Number of units that we should eventually pop off the stack.
71 These are the arguments to function calls that have already returned. */
72 int pending_stack_adjust;
74 /* Nonzero means stack pops must not be deferred, and deferred stack
75 pops must not be output. It is nonzero inside a function call,
76 inside a conditional expression, inside a statement expression,
77 and in other cases as well. */
78 int inhibit_defer_pop;
80 /* A list of all cleanups which belong to the arguments of
81 function calls being expanded by expand_call. */
82 tree cleanups_this_call;
84 /* Nonzero means __builtin_saveregs has already been done in this function.
85 The value is the pseudoreg containing the value __builtin_saveregs
87 static rtx saveregs_value;
90 static void store_constructor ();
91 static rtx store_field ();
92 static rtx expand_builtin ();
93 static rtx compare ();
94 static rtx do_store_flag ();
95 static void preexpand_calls ();
96 static rtx expand_increment ();
97 static void init_queue ();
99 void do_pending_stack_adjust ();
100 static void do_jump_for_compare ();
101 static void do_jump_by_parts_equality ();
102 static void do_jump_by_parts_equality_rtx ();
103 static void do_jump_by_parts_greater ();
105 /* Record for each mode whether we can move a register directly to or
106 from an object of that mode in memory. If we can't, we won't try
107 to use that mode directly when accessing a field of that mode. */
109 static char direct_load[NUM_MACHINE_MODES];
110 static char direct_store[NUM_MACHINE_MODES];
112 /* MOVE_RATIO is the number of move instructions that is better than
116 #if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
119 /* A value of around 6 would minimize code size; infinity would minimize
121 #define MOVE_RATIO 15
125 /* This array records the insn_code of insns to perform block moves. */
126 static enum insn_code movstr_optab[NUM_MACHINE_MODES];
128 /* SLOW_UNALIGNED_ACCESS is non-zero if unaligned accesses are very slow. */
130 #ifndef SLOW_UNALIGNED_ACCESS
131 #define SLOW_UNALIGNED_ACCESS 0
134 /* This is run once per compilation to set up which modes can be used
135 directly in memory and to initialize the block move optab. */
141 enum machine_mode mode;
142 /* Try indexing by frame ptr and try by stack ptr.
143 It is known that on the Convex the stack ptr isn't a valid index.
144 With luck, one or the other is valid on any machine. */
145 rtx mem = gen_rtx (MEM, VOIDmode, stack_pointer_rtx);
146 rtx mem1 = gen_rtx (MEM, VOIDmode, frame_pointer_rtx);
149 insn = emit_insn (gen_rtx (SET, 0, 0));
150 pat = PATTERN (insn);
152 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
153 mode = (enum machine_mode) ((int) mode + 1))
159 direct_load[(int) mode] = direct_store[(int) mode] = 0;
160 PUT_MODE (mem, mode);
161 PUT_MODE (mem1, mode);
163 /* See if there is some register that can be used in this mode and
164 directly loaded or stored from memory. */
166 if (mode != VOIDmode && mode != BLKmode)
167 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
168 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
171 if (! HARD_REGNO_MODE_OK (regno, mode))
174 reg = gen_rtx (REG, mode, regno);
177 SET_DEST (pat) = reg;
178 if (recog (pat, insn, &num_clobbers) >= 0)
179 direct_load[(int) mode] = 1;
181 SET_SRC (pat) = mem1;
182 SET_DEST (pat) = reg;
183 if (recog (pat, insn, &num_clobbers) >= 0)
184 direct_load[(int) mode] = 1;
187 SET_DEST (pat) = mem;
188 if (recog (pat, insn, &num_clobbers) >= 0)
189 direct_store[(int) mode] = 1;
192 SET_DEST (pat) = mem1;
193 if (recog (pat, insn, &num_clobbers) >= 0)
194 direct_store[(int) mode] = 1;
197 movstr_optab[(int) mode] = CODE_FOR_nothing;
204 movstr_optab[(int) QImode] = CODE_FOR_movstrqi;
208 movstr_optab[(int) HImode] = CODE_FOR_movstrhi;
212 movstr_optab[(int) SImode] = CODE_FOR_movstrsi;
216 movstr_optab[(int) DImode] = CODE_FOR_movstrdi;
220 movstr_optab[(int) TImode] = CODE_FOR_movstrti;
224 /* This is run at the start of compiling a function. */
231 pending_stack_adjust = 0;
232 inhibit_defer_pop = 0;
233 cleanups_this_call = 0;
238 /* Save all variables describing the current status into the structure *P.
239 This is used before starting a nested function. */
245 /* Instead of saving the postincrement queue, empty it. */
248 p->pending_stack_adjust = pending_stack_adjust;
249 p->inhibit_defer_pop = inhibit_defer_pop;
250 p->cleanups_this_call = cleanups_this_call;
251 p->saveregs_value = saveregs_value;
252 p->forced_labels = forced_labels;
254 pending_stack_adjust = 0;
255 inhibit_defer_pop = 0;
256 cleanups_this_call = 0;
261 /* Restore all variables describing the current status from the structure *P.
262 This is used after a nested function. */
265 restore_expr_status (p)
268 pending_stack_adjust = p->pending_stack_adjust;
269 inhibit_defer_pop = p->inhibit_defer_pop;
270 cleanups_this_call = p->cleanups_this_call;
271 saveregs_value = p->saveregs_value;
272 forced_labels = p->forced_labels;
275 /* Manage the queue of increment instructions to be output
276 for POSTINCREMENT_EXPR expressions, etc. */
278 static rtx pending_chain;
280 /* Queue up to increment (or change) VAR later. BODY says how:
281 BODY should be the same thing you would pass to emit_insn
282 to increment right away. It will go to emit_insn later on.
284 The value is a QUEUED expression to be used in place of VAR
285 where you want to guarantee the pre-incrementation value of VAR. */
288 enqueue_insn (var, body)
291 pending_chain = gen_rtx (QUEUED, GET_MODE (var),
292 var, NULL_RTX, NULL_RTX, body, pending_chain);
293 return pending_chain;
296 /* Use protect_from_queue to convert a QUEUED expression
297 into something that you can put immediately into an instruction.
298 If the queued incrementation has not happened yet,
299 protect_from_queue returns the variable itself.
300 If the incrementation has happened, protect_from_queue returns a temp
301 that contains a copy of the old value of the variable.
303 Any time an rtx which might possibly be a QUEUED is to be put
304 into an instruction, it must be passed through protect_from_queue first.
305 QUEUED expressions are not meaningful in instructions.
307 Do not pass a value through protect_from_queue and then hold
308 on to it for a while before putting it in an instruction!
309 If the queue is flushed in between, incorrect code will result. */
312 protect_from_queue (x, modify)
316 register RTX_CODE code = GET_CODE (x);
318 #if 0 /* A QUEUED can hang around after the queue is forced out. */
319 /* Shortcut for most common case. */
320 if (pending_chain == 0)
326 /* A special hack for read access to (MEM (QUEUED ...))
327 to facilitate use of autoincrement.
328 Make a copy of the contents of the memory location
329 rather than a copy of the address, but not
330 if the value is of mode BLKmode. */
331 if (code == MEM && GET_MODE (x) != BLKmode
332 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
334 register rtx y = XEXP (x, 0);
335 XEXP (x, 0) = QUEUED_VAR (y);
338 register rtx temp = gen_reg_rtx (GET_MODE (x));
339 emit_insn_before (gen_move_insn (temp, x),
345 /* Otherwise, recursively protect the subexpressions of all
346 the kinds of rtx's that can contain a QUEUED. */
348 XEXP (x, 0) = protect_from_queue (XEXP (x, 0), 0);
349 else if (code == PLUS || code == MULT)
351 XEXP (x, 0) = protect_from_queue (XEXP (x, 0), 0);
352 XEXP (x, 1) = protect_from_queue (XEXP (x, 1), 0);
356 /* If the increment has not happened, use the variable itself. */
357 if (QUEUED_INSN (x) == 0)
358 return QUEUED_VAR (x);
359 /* If the increment has happened and a pre-increment copy exists,
361 if (QUEUED_COPY (x) != 0)
362 return QUEUED_COPY (x);
363 /* The increment has happened but we haven't set up a pre-increment copy.
364 Set one up now, and use it. */
365 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
366 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
368 return QUEUED_COPY (x);
371 /* Return nonzero if X contains a QUEUED expression:
372 if it contains anything that will be altered by a queued increment.
373 We handle only combinations of MEM, PLUS, MINUS and MULT operators
374 since memory addresses generally contain only those. */
380 register enum rtx_code code = GET_CODE (x);
386 return queued_subexp_p (XEXP (x, 0));
390 return queued_subexp_p (XEXP (x, 0))
391 || queued_subexp_p (XEXP (x, 1));
396 /* Perform all the pending incrementations. */
402 while (p = pending_chain)
404 QUEUED_INSN (p) = emit_insn (QUEUED_BODY (p));
405 pending_chain = QUEUED_NEXT (p);
416 /* Copy data from FROM to TO, where the machine modes are not the same.
417 Both modes may be integer, or both may be floating.
418 UNSIGNEDP should be nonzero if FROM is an unsigned type.
419 This causes zero-extension instead of sign-extension. */
422 convert_move (to, from, unsignedp)
423 register rtx to, from;
426 enum machine_mode to_mode = GET_MODE (to);
427 enum machine_mode from_mode = GET_MODE (from);
428 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
429 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
433 /* rtx code for making an equivalent value. */
434 enum rtx_code equiv_code = (unsignedp ? ZERO_EXTEND : SIGN_EXTEND);
436 to = protect_from_queue (to, 1);
437 from = protect_from_queue (from, 0);
439 if (to_real != from_real)
442 /* If FROM is a SUBREG that indicates that we have already done at least
443 the required extension, strip it. We don't handle such SUBREGs as
446 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
447 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
448 >= GET_MODE_SIZE (to_mode))
449 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
450 from = gen_lowpart (to_mode, from), from_mode = to_mode;
452 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
455 if (to_mode == from_mode
456 || (from_mode == VOIDmode && CONSTANT_P (from)))
458 emit_move_insn (to, from);
464 #ifdef HAVE_extendsfdf2
465 if (HAVE_extendsfdf2 && from_mode == SFmode && to_mode == DFmode)
467 emit_unop_insn (CODE_FOR_extendsfdf2, to, from, UNKNOWN);
471 #ifdef HAVE_extendsfxf2
472 if (HAVE_extendsfxf2 && from_mode == SFmode && to_mode == XFmode)
474 emit_unop_insn (CODE_FOR_extendsfxf2, to, from, UNKNOWN);
478 #ifdef HAVE_extendsftf2
479 if (HAVE_extendsftf2 && from_mode == SFmode && to_mode == TFmode)
481 emit_unop_insn (CODE_FOR_extendsftf2, to, from, UNKNOWN);
485 #ifdef HAVE_extenddfxf2
486 if (HAVE_extenddfxf2 && from_mode == DFmode && to_mode == XFmode)
488 emit_unop_insn (CODE_FOR_extenddfxf2, to, from, UNKNOWN);
492 #ifdef HAVE_extenddftf2
493 if (HAVE_extenddftf2 && from_mode == DFmode && to_mode == TFmode)
495 emit_unop_insn (CODE_FOR_extenddftf2, to, from, UNKNOWN);
499 #ifdef HAVE_truncdfsf2
500 if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
502 emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
506 #ifdef HAVE_truncxfsf2
507 if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode)
509 emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN);
513 #ifdef HAVE_trunctfsf2
514 if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
516 emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
520 #ifdef HAVE_truncxfdf2
521 if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode)
523 emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN);
527 #ifdef HAVE_trunctfdf2
528 if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
530 emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
542 libcall = extendsfdf2_libfunc;
546 libcall = extendsfxf2_libfunc;
550 libcall = extendsftf2_libfunc;
559 libcall = truncdfsf2_libfunc;
563 libcall = extenddfxf2_libfunc;
567 libcall = extenddftf2_libfunc;
576 libcall = truncxfsf2_libfunc;
580 libcall = truncxfdf2_libfunc;
589 libcall = trunctfsf2_libfunc;
593 libcall = trunctfdf2_libfunc;
599 if (libcall == (rtx) 0)
600 /* This conversion is not implemented yet. */
603 emit_library_call (libcall, 1, to_mode, 1, from, from_mode);
604 emit_move_insn (to, hard_libcall_value (to_mode));
608 /* Now both modes are integers. */
610 /* Handle expanding beyond a word. */
611 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
612 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
619 enum machine_mode lowpart_mode;
620 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
622 /* Try converting directly if the insn is supported. */
623 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
626 /* If FROM is a SUBREG, put it into a register. Do this
627 so that we always generate the same set of insns for
628 better cse'ing; if an intermediate assignment occurred,
629 we won't be doing the operation directly on the SUBREG. */
630 if (optimize > 0 && GET_CODE (from) == SUBREG)
631 from = force_reg (from_mode, from);
632 emit_unop_insn (code, to, from, equiv_code);
635 /* Next, try converting via full word. */
636 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
637 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
638 != CODE_FOR_nothing))
640 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
641 emit_unop_insn (code, to,
642 gen_lowpart (word_mode, to), equiv_code);
646 /* No special multiword conversion insn; do it by hand. */
649 /* Get a copy of FROM widened to a word, if necessary. */
650 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
651 lowpart_mode = word_mode;
653 lowpart_mode = from_mode;
655 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
657 lowpart = gen_lowpart (lowpart_mode, to);
658 emit_move_insn (lowpart, lowfrom);
660 /* Compute the value to put in each remaining word. */
662 fill_value = const0_rtx;
667 && insn_operand_mode[(int) CODE_FOR_slt][0] == word_mode
668 && STORE_FLAG_VALUE == -1)
670 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
672 fill_value = gen_reg_rtx (word_mode);
673 emit_insn (gen_slt (fill_value));
679 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
680 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
682 fill_value = convert_to_mode (word_mode, fill_value, 1);
686 /* Fill the remaining words. */
687 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
689 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
690 rtx subword = operand_subword (to, index, 1, to_mode);
695 if (fill_value != subword)
696 emit_move_insn (subword, fill_value);
699 insns = get_insns ();
702 emit_no_conflict_block (insns, to, from, NULL_RTX,
703 gen_rtx (equiv_code, to_mode, from));
707 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD)
709 convert_move (to, gen_lowpart (word_mode, from), 0);
713 /* Handle pointer conversion */ /* SPEE 900220 */
714 if (to_mode == PSImode)
716 if (from_mode != SImode)
717 from = convert_to_mode (SImode, from, unsignedp);
719 #ifdef HAVE_truncsipsi
722 emit_unop_insn (CODE_FOR_truncsipsi, to, from, UNKNOWN);
725 #endif /* HAVE_truncsipsi */
729 if (from_mode == PSImode)
731 if (to_mode != SImode)
733 from = convert_to_mode (SImode, from, unsignedp);
738 #ifdef HAVE_extendpsisi
739 if (HAVE_extendpsisi)
741 emit_unop_insn (CODE_FOR_extendpsisi, to, from, UNKNOWN);
744 #endif /* HAVE_extendpsisi */
749 /* Now follow all the conversions between integers
750 no more than a word long. */
752 /* For truncation, usually we can just refer to FROM in a narrower mode. */
753 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
754 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
755 GET_MODE_BITSIZE (from_mode))
756 && ((GET_CODE (from) == MEM
757 && ! MEM_VOLATILE_P (from)
758 && direct_load[(int) to_mode]
759 && ! mode_dependent_address_p (XEXP (from, 0)))
760 || GET_CODE (from) == REG
761 || GET_CODE (from) == SUBREG))
763 emit_move_insn (to, gen_lowpart (to_mode, from));
767 /* For truncation, usually we can just refer to FROM in a narrower mode. */
768 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
770 /* Convert directly if that works. */
771 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
774 /* If FROM is a SUBREG, put it into a register. Do this
775 so that we always generate the same set of insns for
776 better cse'ing; if an intermediate assignment occurred,
777 we won't be doing the operation directly on the SUBREG. */
778 if (optimize > 0 && GET_CODE (from) == SUBREG)
779 from = force_reg (from_mode, from);
780 emit_unop_insn (code, to, from, equiv_code);
785 enum machine_mode intermediate;
787 /* Search for a mode to convert via. */
788 for (intermediate = from_mode; intermediate != VOIDmode;
789 intermediate = GET_MODE_WIDER_MODE (intermediate))
790 if ((can_extend_p (to_mode, intermediate, unsignedp)
792 && (can_extend_p (intermediate, from_mode, unsignedp)
793 != CODE_FOR_nothing))
795 convert_move (to, convert_to_mode (intermediate, from,
796 unsignedp), unsignedp);
800 /* No suitable intermediate mode. */
805 /* Support special truncate insns for certain modes. */
807 if (from_mode == DImode && to_mode == SImode)
809 #ifdef HAVE_truncdisi2
812 emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
816 convert_move (to, force_reg (from_mode, from), unsignedp);
820 if (from_mode == DImode && to_mode == HImode)
822 #ifdef HAVE_truncdihi2
825 emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
829 convert_move (to, force_reg (from_mode, from), unsignedp);
833 if (from_mode == DImode && to_mode == QImode)
835 #ifdef HAVE_truncdiqi2
838 emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
842 convert_move (to, force_reg (from_mode, from), unsignedp);
846 if (from_mode == SImode && to_mode == HImode)
848 #ifdef HAVE_truncsihi2
851 emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
855 convert_move (to, force_reg (from_mode, from), unsignedp);
859 if (from_mode == SImode && to_mode == QImode)
861 #ifdef HAVE_truncsiqi2
864 emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
868 convert_move (to, force_reg (from_mode, from), unsignedp);
872 if (from_mode == HImode && to_mode == QImode)
874 #ifdef HAVE_trunchiqi2
877 emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
881 convert_move (to, force_reg (from_mode, from), unsignedp);
885 /* Handle truncation of volatile memrefs, and so on;
886 the things that couldn't be truncated directly,
887 and for which there was no special instruction. */
888 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
890 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
891 emit_move_insn (to, temp);
895 /* Mode combination is not recognized. */
899 /* Return an rtx for a value that would result
900 from converting X to mode MODE.
901 Both X and MODE may be floating, or both integer.
902 UNSIGNEDP is nonzero if X is an unsigned value.
903 This can be done by referring to a part of X in place
904 or by copying to a new temporary with conversion.
906 This function *must not* call protect_from_queue
907 except when putting X into an insn (in which case convert_move does it). */
910 convert_to_mode (mode, x, unsignedp)
911 enum machine_mode mode;
917 /* If FROM is a SUBREG that indicates that we have already done at least
918 the required extension, strip it. */
920 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
921 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
922 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
923 x = gen_lowpart (mode, x);
925 if (mode == GET_MODE (x))
928 /* There is one case that we must handle specially: If we are converting
929 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
930 we are to interpret the constant as unsigned, gen_lowpart will do
931 the wrong if the constant appears negative. What we want to do is
932 make the high-order word of the constant zero, not all ones. */
934 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
935 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
936 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
937 return immed_double_const (INTVAL (x), (HOST_WIDE_INT) 0, mode);
939 /* We can do this with a gen_lowpart if both desired and current modes
940 are integer, and this is either a constant integer, a register, or a
941 non-volatile MEM. Except for the constant case, we must be narrowing
944 if (GET_CODE (x) == CONST_INT
945 || (GET_MODE_CLASS (mode) == MODE_INT
946 && GET_MODE_CLASS (GET_MODE (x)) == MODE_INT
947 && (GET_CODE (x) == CONST_DOUBLE
948 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (GET_MODE (x))
949 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x))
950 && direct_load[(int) mode]
951 || GET_CODE (x) == REG)))))
952 return gen_lowpart (mode, x);
954 temp = gen_reg_rtx (mode);
955 convert_move (temp, x, unsignedp);
959 /* Generate several move instructions to copy LEN bytes
960 from block FROM to block TO. (These are MEM rtx's with BLKmode).
961 The caller must pass FROM and TO
962 through protect_from_queue before calling.
963 ALIGN (in bytes) is maximum alignment we can assume. */
965 struct move_by_pieces
974 int explicit_inc_from;
980 static void move_by_pieces_1 ();
981 static int move_by_pieces_ninsns ();
984 move_by_pieces (to, from, len, align)
988 struct move_by_pieces data;
989 rtx to_addr = XEXP (to, 0), from_addr = XEXP (from, 0);
990 int max_size = MOVE_MAX + 1;
993 data.to_addr = to_addr;
994 data.from_addr = from_addr;
998 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
999 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1001 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1002 || GET_CODE (from_addr) == POST_INC
1003 || GET_CODE (from_addr) == POST_DEC);
1005 data.explicit_inc_from = 0;
1006 data.explicit_inc_to = 0;
1008 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1009 if (data.reverse) data.offset = len;
1012 /* If copying requires more than two move insns,
1013 copy addresses to registers (to make displacements shorter)
1014 and use post-increment if available. */
1015 if (!(data.autinc_from && data.autinc_to)
1016 && move_by_pieces_ninsns (len, align) > 2)
1018 #ifdef HAVE_PRE_DECREMENT
1019 if (data.reverse && ! data.autinc_from)
1021 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1022 data.autinc_from = 1;
1023 data.explicit_inc_from = -1;
1026 #ifdef HAVE_POST_INCREMENT
1027 if (! data.autinc_from)
1029 data.from_addr = copy_addr_to_reg (from_addr);
1030 data.autinc_from = 1;
1031 data.explicit_inc_from = 1;
1034 if (!data.autinc_from && CONSTANT_P (from_addr))
1035 data.from_addr = copy_addr_to_reg (from_addr);
1036 #ifdef HAVE_PRE_DECREMENT
1037 if (data.reverse && ! data.autinc_to)
1039 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1041 data.explicit_inc_to = -1;
1044 #ifdef HAVE_POST_INCREMENT
1045 if (! data.reverse && ! data.autinc_to)
1047 data.to_addr = copy_addr_to_reg (to_addr);
1049 data.explicit_inc_to = 1;
1052 if (!data.autinc_to && CONSTANT_P (to_addr))
1053 data.to_addr = copy_addr_to_reg (to_addr);
1056 if (! (STRICT_ALIGNMENT || SLOW_UNALIGNED_ACCESS)
1057 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
1060 /* First move what we can in the largest integer mode, then go to
1061 successively smaller modes. */
1063 while (max_size > 1)
1065 enum machine_mode mode = VOIDmode, tmode;
1066 enum insn_code icode;
1068 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1069 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1070 if (GET_MODE_SIZE (tmode) < max_size)
1073 if (mode == VOIDmode)
1076 icode = mov_optab->handlers[(int) mode].insn_code;
1077 if (icode != CODE_FOR_nothing
1078 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1079 GET_MODE_SIZE (mode)))
1080 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1082 max_size = GET_MODE_SIZE (mode);
1085 /* The code above should have handled everything. */
1090 /* Return number of insns required to move L bytes by pieces.
1091 ALIGN (in bytes) is maximum alignment we can assume. */
1094 move_by_pieces_ninsns (l, align)
1098 register int n_insns = 0;
1099 int max_size = MOVE_MAX + 1;
1101 if (! (STRICT_ALIGNMENT || SLOW_UNALIGNED_ACCESS)
1102 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
1105 while (max_size > 1)
1107 enum machine_mode mode = VOIDmode, tmode;
1108 enum insn_code icode;
1110 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1111 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1112 if (GET_MODE_SIZE (tmode) < max_size)
1115 if (mode == VOIDmode)
1118 icode = mov_optab->handlers[(int) mode].insn_code;
1119 if (icode != CODE_FOR_nothing
1120 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1121 GET_MODE_SIZE (mode)))
1122 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1124 max_size = GET_MODE_SIZE (mode);
1130 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1131 with move instructions for mode MODE. GENFUN is the gen_... function
1132 to make a move insn for that mode. DATA has all the other info. */
1135 move_by_pieces_1 (genfun, mode, data)
1137 enum machine_mode mode;
1138 struct move_by_pieces *data;
1140 register int size = GET_MODE_SIZE (mode);
1141 register rtx to1, from1;
1143 while (data->len >= size)
1145 if (data->reverse) data->offset -= size;
1147 to1 = (data->autinc_to
1148 ? gen_rtx (MEM, mode, data->to_addr)
1149 : change_address (data->to, mode,
1150 plus_constant (data->to_addr, data->offset)));
1153 ? gen_rtx (MEM, mode, data->from_addr)
1154 : change_address (data->from, mode,
1155 plus_constant (data->from_addr, data->offset)));
1157 #ifdef HAVE_PRE_DECREMENT
1158 if (data->explicit_inc_to < 0)
1159 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
1160 if (data->explicit_inc_from < 0)
1161 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (-size)));
1164 emit_insn ((*genfun) (to1, from1));
1165 #ifdef HAVE_POST_INCREMENT
1166 if (data->explicit_inc_to > 0)
1167 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1168 if (data->explicit_inc_from > 0)
1169 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1172 if (! data->reverse) data->offset += size;
1178 /* Emit code to move a block Y to a block X.
1179 This may be done with string-move instructions,
1180 with multiple scalar move instructions, or with a library call.
1182 Both X and Y must be MEM rtx's (perhaps inside VOLATILE)
1184 SIZE is an rtx that says how long they are.
1185 ALIGN is the maximum alignment we can assume they have,
1186 measured in bytes. */
1189 emit_block_move (x, y, size, align)
1194 if (GET_MODE (x) != BLKmode)
1197 if (GET_MODE (y) != BLKmode)
1200 x = protect_from_queue (x, 1);
1201 y = protect_from_queue (y, 0);
1202 size = protect_from_queue (size, 0);
1204 if (GET_CODE (x) != MEM)
1206 if (GET_CODE (y) != MEM)
1211 if (GET_CODE (size) == CONST_INT
1212 && (move_by_pieces_ninsns (INTVAL (size), align) < MOVE_RATIO))
1213 move_by_pieces (x, y, INTVAL (size), align);
1216 /* Try the most limited insn first, because there's no point
1217 including more than one in the machine description unless
1218 the more limited one has some advantage. */
1220 rtx opalign = GEN_INT (align);
1221 enum machine_mode mode;
1223 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1224 mode = GET_MODE_WIDER_MODE (mode))
1226 enum insn_code code = movstr_optab[(int) mode];
1228 if (code != CODE_FOR_nothing
1229 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1230 here because if SIZE is less than the mode mask, as it is
1231 returned by the macro, it will definately be less than the
1232 actual mode mask. */
1233 && (unsigned) INTVAL (size) <= GET_MODE_MASK (mode)
1234 && (insn_operand_predicate[(int) code][0] == 0
1235 || (*insn_operand_predicate[(int) code][0]) (x, BLKmode))
1236 && (insn_operand_predicate[(int) code][1] == 0
1237 || (*insn_operand_predicate[(int) code][1]) (y, BLKmode))
1238 && (insn_operand_predicate[(int) code][3] == 0
1239 || (*insn_operand_predicate[(int) code][3]) (opalign,
1243 rtx last = get_last_insn ();
1246 op2 = convert_to_mode (mode, size, 1);
1247 if (insn_operand_predicate[(int) code][2] != 0
1248 && ! (*insn_operand_predicate[(int) code][2]) (op2, mode))
1249 op2 = copy_to_mode_reg (mode, op2);
1251 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1258 delete_insns_since (last);
1262 #ifdef TARGET_MEM_FUNCTIONS
1263 emit_library_call (memcpy_libfunc, 0,
1264 VOIDmode, 3, XEXP (x, 0), Pmode,
1266 convert_to_mode (Pmode, size, 1), Pmode);
1268 emit_library_call (bcopy_libfunc, 0,
1269 VOIDmode, 3, XEXP (y, 0), Pmode,
1271 convert_to_mode (Pmode, size, 1), Pmode);
1276 /* Copy all or part of a value X into registers starting at REGNO.
1277 The number of registers to be filled is NREGS. */
1280 move_block_to_reg (regno, x, nregs, mode)
1284 enum machine_mode mode;
1289 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1290 x = validize_mem (force_const_mem (mode, x));
1292 /* See if the machine can do this with a load multiple insn. */
1293 #ifdef HAVE_load_multiple
1294 last = get_last_insn ();
1295 pat = gen_load_multiple (gen_rtx (REG, word_mode, regno), x,
1303 delete_insns_since (last);
1306 for (i = 0; i < nregs; i++)
1307 emit_move_insn (gen_rtx (REG, word_mode, regno + i),
1308 operand_subword_force (x, i, mode));
1311 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1312 The number of registers to be filled is NREGS. */
1315 move_block_from_reg (regno, x, nregs)
1323 /* See if the machine can do this with a store multiple insn. */
1324 #ifdef HAVE_store_multiple
1325 last = get_last_insn ();
1326 pat = gen_store_multiple (x, gen_rtx (REG, word_mode, regno),
1334 delete_insns_since (last);
1337 for (i = 0; i < nregs; i++)
1339 rtx tem = operand_subword (x, i, 1, BLKmode);
1344 emit_move_insn (tem, gen_rtx (REG, word_mode, regno + i));
1348 /* Mark NREGS consecutive regs, starting at REGNO, as being live now. */
1351 use_regs (regno, nregs)
1357 for (i = 0; i < nregs; i++)
1358 emit_insn (gen_rtx (USE, VOIDmode, gen_rtx (REG, word_mode, regno + i)));
1361 /* Mark the instructions since PREV as a libcall block.
1362 Add REG_LIBCALL to PREV and add a REG_RETVAL to the most recent insn. */
1371 /* Find the instructions to mark */
1373 insn_first = NEXT_INSN (prev);
1375 insn_first = get_insns ();
1377 insn_last = get_last_insn ();
1379 REG_NOTES (insn_last) = gen_rtx (INSN_LIST, REG_RETVAL, insn_first,
1380 REG_NOTES (insn_last));
1382 REG_NOTES (insn_first) = gen_rtx (INSN_LIST, REG_LIBCALL, insn_last,
1383 REG_NOTES (insn_first));
1386 /* Write zeros through the storage of OBJECT.
1387 If OBJECT has BLKmode, SIZE is its length in bytes. */
1390 clear_storage (object, size)
1394 if (GET_MODE (object) == BLKmode)
1396 #ifdef TARGET_MEM_FUNCTIONS
1397 emit_library_call (memset_libfunc, 0,
1399 XEXP (object, 0), Pmode, const0_rtx, Pmode,
1400 GEN_INT (size), Pmode);
1402 emit_library_call (bzero_libfunc, 0,
1404 XEXP (object, 0), Pmode,
1405 GEN_INT (size), Pmode);
1409 emit_move_insn (object, const0_rtx);
1412 /* Generate code to copy Y into X.
1413 Both Y and X must have the same mode, except that
1414 Y can be a constant with VOIDmode.
1415 This mode cannot be BLKmode; use emit_block_move for that.
1417 Return the last instruction emitted. */
1420 emit_move_insn (x, y)
1423 enum machine_mode mode = GET_MODE (x);
1424 enum machine_mode submode;
1425 enum mode_class class = GET_MODE_CLASS (mode);
1428 x = protect_from_queue (x, 1);
1429 y = protect_from_queue (y, 0);
1431 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
1434 if (CONSTANT_P (y) && ! LEGITIMATE_CONSTANT_P (y))
1435 y = force_const_mem (mode, y);
1437 /* If X or Y are memory references, verify that their addresses are valid
1439 if (GET_CODE (x) == MEM
1440 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
1441 && ! push_operand (x, GET_MODE (x)))
1443 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
1444 x = change_address (x, VOIDmode, XEXP (x, 0));
1446 if (GET_CODE (y) == MEM
1447 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
1449 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
1450 y = change_address (y, VOIDmode, XEXP (y, 0));
1452 if (mode == BLKmode)
1455 if (class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
1456 submode = mode_for_size (GET_MODE_UNIT_SIZE (mode) * BITS_PER_UNIT,
1457 (class == MODE_COMPLEX_INT
1458 ? MODE_INT : MODE_FLOAT),
1461 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
1463 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
1465 /* Expand complex moves by moving real part and imag part, if posible. */
1466 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
1467 && submode != BLKmode
1468 && (mov_optab->handlers[(int) submode].insn_code
1469 != CODE_FOR_nothing))
1471 /* Don't split destination if it is a stack push. */
1472 int stack = push_operand (x, GET_MODE (x));
1473 rtx prev = get_last_insn ();
1475 /* Tell flow that the whole of the destination is being set. */
1476 if (GET_CODE (x) == REG)
1477 emit_insn (gen_rtx (CLOBBER, VOIDmode, x));
1479 /* If this is a stack, push the highpart first, so it
1480 will be in the argument order.
1482 In that case, change_address is used only to convert
1483 the mode, not to change the address. */
1484 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
1485 ((stack ? change_address (x, submode, (rtx) 0)
1486 : gen_highpart (submode, x)),
1487 gen_highpart (submode, y)));
1488 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
1489 ((stack ? change_address (x, submode, (rtx) 0)
1490 : gen_lowpart (submode, x)),
1491 gen_lowpart (submode, y)));
1495 return get_last_insn ();
1498 /* This will handle any multi-word mode that lacks a move_insn pattern.
1499 However, you will get better code if you define such patterns,
1500 even if they must turn into multiple assembler instructions. */
1501 else if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
1504 rtx prev_insn = get_last_insn ();
1507 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
1510 rtx xpart = operand_subword (x, i, 1, mode);
1511 rtx ypart = operand_subword (y, i, 1, mode);
1513 /* If we can't get a part of Y, put Y into memory if it is a
1514 constant. Otherwise, force it into a register. If we still
1515 can't get a part of Y, abort. */
1516 if (ypart == 0 && CONSTANT_P (y))
1518 y = force_const_mem (mode, y);
1519 ypart = operand_subword (y, i, 1, mode);
1521 else if (ypart == 0)
1522 ypart = operand_subword_force (y, i, mode);
1524 if (xpart == 0 || ypart == 0)
1527 last_insn = emit_move_insn (xpart, ypart);
1529 /* Mark these insns as a libcall block. */
1530 group_insns (prev_insn);
1538 /* Pushing data onto the stack. */
1540 /* Push a block of length SIZE (perhaps variable)
1541 and return an rtx to address the beginning of the block.
1542 Note that it is not possible for the value returned to be a QUEUED.
1543 The value may be virtual_outgoing_args_rtx.
1545 EXTRA is the number of bytes of padding to push in addition to SIZE.
1546 BELOW nonzero means this padding comes at low addresses;
1547 otherwise, the padding comes at high addresses. */
1550 push_block (size, extra, below)
1555 if (CONSTANT_P (size))
1556 anti_adjust_stack (plus_constant (size, extra));
1557 else if (GET_CODE (size) == REG && extra == 0)
1558 anti_adjust_stack (size);
1561 rtx temp = copy_to_mode_reg (Pmode, size);
1563 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
1564 temp, 0, OPTAB_LIB_WIDEN);
1565 anti_adjust_stack (temp);
1568 #ifdef STACK_GROWS_DOWNWARD
1569 temp = virtual_outgoing_args_rtx;
1570 if (extra != 0 && below)
1571 temp = plus_constant (temp, extra);
1573 if (GET_CODE (size) == CONST_INT)
1574 temp = plus_constant (virtual_outgoing_args_rtx,
1575 - INTVAL (size) - (below ? 0 : extra));
1576 else if (extra != 0 && !below)
1577 temp = gen_rtx (PLUS, Pmode, virtual_outgoing_args_rtx,
1578 negate_rtx (Pmode, plus_constant (size, extra)));
1580 temp = gen_rtx (PLUS, Pmode, virtual_outgoing_args_rtx,
1581 negate_rtx (Pmode, size));
1584 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
1590 return gen_rtx (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
1593 /* Generate code to push X onto the stack, assuming it has mode MODE and
1595 MODE is redundant except when X is a CONST_INT (since they don't
1597 SIZE is an rtx for the size of data to be copied (in bytes),
1598 needed only if X is BLKmode.
1600 ALIGN (in bytes) is maximum alignment we can assume.
1602 If PARTIAL is nonzero, then copy that many of the first words
1603 of X into registers starting with REG, and push the rest of X.
1604 The amount of space pushed is decreased by PARTIAL words,
1605 rounded *down* to a multiple of PARM_BOUNDARY.
1606 REG must be a hard register in this case.
1608 EXTRA is the amount in bytes of extra space to leave next to this arg.
1609 This is ignored if an argument block has already been allocated.
1611 On a machine that lacks real push insns, ARGS_ADDR is the address of
1612 the bottom of the argument block for this call. We use indexing off there
1613 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
1614 argument block has not been preallocated.
1616 ARGS_SO_FAR is the size of args previously pushed for this call. */
1619 emit_push_insn (x, mode, type, size, align, partial, reg, extra,
1620 args_addr, args_so_far)
1622 enum machine_mode mode;
1633 enum direction stack_direction
1634 #ifdef STACK_GROWS_DOWNWARD
1640 /* Decide where to pad the argument: `downward' for below,
1641 `upward' for above, or `none' for don't pad it.
1642 Default is below for small data on big-endian machines; else above. */
1643 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
1645 /* Invert direction if stack is post-update. */
1646 if (STACK_PUSH_CODE == POST_INC || STACK_PUSH_CODE == POST_DEC)
1647 if (where_pad != none)
1648 where_pad = (where_pad == downward ? upward : downward);
1650 xinner = x = protect_from_queue (x, 0);
1652 if (mode == BLKmode)
1654 /* Copy a block into the stack, entirely or partially. */
1657 int used = partial * UNITS_PER_WORD;
1658 int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
1666 /* USED is now the # of bytes we need not copy to the stack
1667 because registers will take care of them. */
1670 xinner = change_address (xinner, BLKmode,
1671 plus_constant (XEXP (xinner, 0), used));
1673 /* If the partial register-part of the arg counts in its stack size,
1674 skip the part of stack space corresponding to the registers.
1675 Otherwise, start copying to the beginning of the stack space,
1676 by setting SKIP to 0. */
1677 #ifndef REG_PARM_STACK_SPACE
1683 #ifdef PUSH_ROUNDING
1684 /* Do it with several push insns if that doesn't take lots of insns
1685 and if there is no difficulty with push insns that skip bytes
1686 on the stack for alignment purposes. */
1688 && GET_CODE (size) == CONST_INT
1690 && (move_by_pieces_ninsns ((unsigned) INTVAL (size) - used, align)
1692 /* Here we avoid the case of a structure whose weak alignment
1693 forces many pushes of a small amount of data,
1694 and such small pushes do rounding that causes trouble. */
1695 && ((! STRICT_ALIGNMENT && ! SLOW_UNALIGNED_ACCESS)
1696 || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT
1697 || PUSH_ROUNDING (align) == align)
1698 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
1700 /* Push padding now if padding above and stack grows down,
1701 or if padding below and stack grows up.
1702 But if space already allocated, this has already been done. */
1703 if (extra && args_addr == 0
1704 && where_pad != none && where_pad != stack_direction)
1705 anti_adjust_stack (GEN_INT (extra));
1707 move_by_pieces (gen_rtx (MEM, BLKmode, gen_push_operand ()), xinner,
1708 INTVAL (size) - used, align);
1711 #endif /* PUSH_ROUNDING */
1713 /* Otherwise make space on the stack and copy the data
1714 to the address of that space. */
1716 /* Deduct words put into registers from the size we must copy. */
1719 if (GET_CODE (size) == CONST_INT)
1720 size = GEN_INT (INTVAL (size) - used);
1722 size = expand_binop (GET_MODE (size), sub_optab, size,
1723 GEN_INT (used), NULL_RTX, 0,
1727 /* Get the address of the stack space.
1728 In this case, we do not deal with EXTRA separately.
1729 A single stack adjust will do. */
1732 temp = push_block (size, extra, where_pad == downward);
1735 else if (GET_CODE (args_so_far) == CONST_INT)
1736 temp = memory_address (BLKmode,
1737 plus_constant (args_addr,
1738 skip + INTVAL (args_so_far)));
1740 temp = memory_address (BLKmode,
1741 plus_constant (gen_rtx (PLUS, Pmode,
1742 args_addr, args_so_far),
1745 /* TEMP is the address of the block. Copy the data there. */
1746 if (GET_CODE (size) == CONST_INT
1747 && (move_by_pieces_ninsns ((unsigned) INTVAL (size), align)
1750 move_by_pieces (gen_rtx (MEM, BLKmode, temp), xinner,
1751 INTVAL (size), align);
1754 /* Try the most limited insn first, because there's no point
1755 including more than one in the machine description unless
1756 the more limited one has some advantage. */
1757 #ifdef HAVE_movstrqi
1759 && GET_CODE (size) == CONST_INT
1760 && ((unsigned) INTVAL (size)
1761 < (1 << (GET_MODE_BITSIZE (QImode) - 1))))
1763 emit_insn (gen_movstrqi (gen_rtx (MEM, BLKmode, temp),
1764 xinner, size, GEN_INT (align)));
1768 #ifdef HAVE_movstrhi
1770 && GET_CODE (size) == CONST_INT
1771 && ((unsigned) INTVAL (size)
1772 < (1 << (GET_MODE_BITSIZE (HImode) - 1))))
1774 emit_insn (gen_movstrhi (gen_rtx (MEM, BLKmode, temp),
1775 xinner, size, GEN_INT (align)));
1779 #ifdef HAVE_movstrsi
1782 emit_insn (gen_movstrsi (gen_rtx (MEM, BLKmode, temp),
1783 xinner, size, GEN_INT (align)));
1787 #ifdef HAVE_movstrdi
1790 emit_insn (gen_movstrdi (gen_rtx (MEM, BLKmode, temp),
1791 xinner, size, GEN_INT (align)));
1796 #ifndef ACCUMULATE_OUTGOING_ARGS
1797 /* If the source is referenced relative to the stack pointer,
1798 copy it to another register to stabilize it. We do not need
1799 to do this if we know that we won't be changing sp. */
1801 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
1802 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
1803 temp = copy_to_reg (temp);
1806 /* Make inhibit_defer_pop nonzero around the library call
1807 to force it to pop the bcopy-arguments right away. */
1809 #ifdef TARGET_MEM_FUNCTIONS
1810 emit_library_call (memcpy_libfunc, 0,
1811 VOIDmode, 3, temp, Pmode, XEXP (xinner, 0), Pmode,
1814 emit_library_call (bcopy_libfunc, 0,
1815 VOIDmode, 3, XEXP (xinner, 0), Pmode, temp, Pmode,
1821 else if (partial > 0)
1823 /* Scalar partly in registers. */
1825 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
1828 /* # words of start of argument
1829 that we must make space for but need not store. */
1830 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
1831 int args_offset = INTVAL (args_so_far);
1834 /* Push padding now if padding above and stack grows down,
1835 or if padding below and stack grows up.
1836 But if space already allocated, this has already been done. */
1837 if (extra && args_addr == 0
1838 && where_pad != none && where_pad != stack_direction)
1839 anti_adjust_stack (GEN_INT (extra));
1841 /* If we make space by pushing it, we might as well push
1842 the real data. Otherwise, we can leave OFFSET nonzero
1843 and leave the space uninitialized. */
1847 /* Now NOT_STACK gets the number of words that we don't need to
1848 allocate on the stack. */
1849 not_stack = partial - offset;
1851 /* If the partial register-part of the arg counts in its stack size,
1852 skip the part of stack space corresponding to the registers.
1853 Otherwise, start copying to the beginning of the stack space,
1854 by setting SKIP to 0. */
1855 #ifndef REG_PARM_STACK_SPACE
1861 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1862 x = validize_mem (force_const_mem (mode, x));
1864 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
1865 SUBREGs of such registers are not allowed. */
1866 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
1867 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
1868 x = copy_to_reg (x);
1870 /* Loop over all the words allocated on the stack for this arg. */
1871 /* We can do it by words, because any scalar bigger than a word
1872 has a size a multiple of a word. */
1873 #ifndef PUSH_ARGS_REVERSED
1874 for (i = not_stack; i < size; i++)
1876 for (i = size - 1; i >= not_stack; i--)
1878 if (i >= not_stack + offset)
1879 emit_push_insn (operand_subword_force (x, i, mode),
1880 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
1882 GEN_INT (args_offset + ((i - not_stack + skip)
1883 * UNITS_PER_WORD)));
1889 /* Push padding now if padding above and stack grows down,
1890 or if padding below and stack grows up.
1891 But if space already allocated, this has already been done. */
1892 if (extra && args_addr == 0
1893 && where_pad != none && where_pad != stack_direction)
1894 anti_adjust_stack (GEN_INT (extra));
1896 #ifdef PUSH_ROUNDING
1898 addr = gen_push_operand ();
1901 if (GET_CODE (args_so_far) == CONST_INT)
1903 = memory_address (mode,
1904 plus_constant (args_addr, INTVAL (args_so_far)));
1906 addr = memory_address (mode, gen_rtx (PLUS, Pmode, args_addr,
1909 emit_move_insn (gen_rtx (MEM, mode, addr), x);
1913 /* If part should go in registers, copy that part
1914 into the appropriate registers. Do this now, at the end,
1915 since mem-to-mem copies above may do function calls. */
1917 move_block_to_reg (REGNO (reg), x, partial, mode);
1919 if (extra && args_addr == 0 && where_pad == stack_direction)
1920 anti_adjust_stack (GEN_INT (extra));
1923 /* Output a library call to function FUN (a SYMBOL_REF rtx)
1924 (emitting the queue unless NO_QUEUE is nonzero),
1925 for a value of mode OUTMODE,
1926 with NARGS different arguments, passed as alternating rtx values
1927 and machine_modes to convert them to.
1928 The rtx values should have been passed through protect_from_queue already.
1930 NO_QUEUE will be true if and only if the library call is a `const' call
1931 which will be enclosed in REG_LIBCALL/REG_RETVAL notes; it is equivalent
1932 to the variable is_const in expand_call.
1934 NO_QUEUE must be true for const calls, because if it isn't, then
1935 any pending increment will be emitted between REG_LIBCALL/REG_RETVAL notes,
1936 and will be lost if the libcall sequence is optimized away.
1938 NO_QUEUE must be false for non-const calls, because if it isn't, the
1939 call insn will have its CONST_CALL_P bit set, and it will be incorrectly
1940 optimized. For instance, the instruction scheduler may incorrectly
1941 move memory references across the non-const call. */
1944 emit_library_call (va_alist)
1948 struct args_size args_size;
1949 register int argnum;
1950 enum machine_mode outmode;
1957 CUMULATIVE_ARGS args_so_far;
1958 struct arg { rtx value; enum machine_mode mode; rtx reg; int partial;
1959 struct args_size offset; struct args_size size; };
1961 int old_inhibit_defer_pop = inhibit_defer_pop;
1966 orgfun = fun = va_arg (p, rtx);
1967 no_queue = va_arg (p, int);
1968 outmode = va_arg (p, enum machine_mode);
1969 nargs = va_arg (p, int);
1971 /* Copy all the libcall-arguments out of the varargs data
1972 and into a vector ARGVEC.
1974 Compute how to pass each argument. We only support a very small subset
1975 of the full argument passing conventions to limit complexity here since
1976 library functions shouldn't have many args. */
1978 argvec = (struct arg *) alloca (nargs * sizeof (struct arg));
1980 INIT_CUMULATIVE_ARGS (args_so_far, (tree)0, fun);
1982 args_size.constant = 0;
1985 for (count = 0; count < nargs; count++)
1987 rtx val = va_arg (p, rtx);
1988 enum machine_mode mode = va_arg (p, enum machine_mode);
1990 /* We cannot convert the arg value to the mode the library wants here;
1991 must do it earlier where we know the signedness of the arg. */
1993 || (GET_MODE (val) != mode && GET_MODE (val) != VOIDmode))
1996 /* On some machines, there's no way to pass a float to a library fcn.
1997 Pass it as a double instead. */
1998 #ifdef LIBGCC_NEEDS_DOUBLE
1999 if (LIBGCC_NEEDS_DOUBLE && mode == SFmode)
2000 val = convert_to_mode (DFmode, val, 0), mode = DFmode;
2003 /* There's no need to call protect_from_queue, because
2004 either emit_move_insn or emit_push_insn will do that. */
2006 /* Make sure it is a reasonable operand for a move or push insn. */
2007 if (GET_CODE (val) != REG && GET_CODE (val) != MEM
2008 && ! (CONSTANT_P (val) && LEGITIMATE_CONSTANT_P (val)))
2009 val = force_operand (val, NULL_RTX);
2011 argvec[count].value = val;
2012 argvec[count].mode = mode;
2014 #ifdef FUNCTION_ARG_PASS_BY_REFERENCE
2015 if (FUNCTION_ARG_PASS_BY_REFERENCE (args_so_far, mode, NULL_TREE, 1))
2019 argvec[count].reg = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
2020 if (argvec[count].reg && GET_CODE (argvec[count].reg) == EXPR_LIST)
2022 #ifdef FUNCTION_ARG_PARTIAL_NREGS
2023 argvec[count].partial
2024 = FUNCTION_ARG_PARTIAL_NREGS (args_so_far, mode, NULL_TREE, 1);
2026 argvec[count].partial = 0;
2029 locate_and_pad_parm (mode, NULL_TREE,
2030 argvec[count].reg && argvec[count].partial == 0,
2031 NULL_TREE, &args_size, &argvec[count].offset,
2032 &argvec[count].size);
2034 if (argvec[count].size.var)
2037 #ifndef REG_PARM_STACK_SPACE
2038 if (argvec[count].partial)
2039 argvec[count].size.constant -= argvec[count].partial * UNITS_PER_WORD;
2042 if (argvec[count].reg == 0 || argvec[count].partial != 0
2043 #ifdef REG_PARM_STACK_SPACE
2047 args_size.constant += argvec[count].size.constant;
2049 #ifdef ACCUMULATE_OUTGOING_ARGS
2050 /* If this arg is actually passed on the stack, it might be
2051 clobbering something we already put there (this library call might
2052 be inside the evaluation of an argument to a function whose call
2053 requires the stack). This will only occur when the library call
2054 has sufficient args to run out of argument registers. Abort in
2055 this case; if this ever occurs, code must be added to save and
2056 restore the arg slot. */
2058 if (argvec[count].reg == 0 || argvec[count].partial != 0)
2062 FUNCTION_ARG_ADVANCE (args_so_far, mode, (tree)0, 1);
2066 /* If this machine requires an external definition for library
2067 functions, write one out. */
2068 assemble_external_libcall (fun);
2070 #ifdef STACK_BOUNDARY
2071 args_size.constant = (((args_size.constant + (STACK_BYTES - 1))
2072 / STACK_BYTES) * STACK_BYTES);
2075 #ifdef REG_PARM_STACK_SPACE
2076 args_size.constant = MAX (args_size.constant,
2077 REG_PARM_STACK_SPACE ((tree) 0));
2080 #ifdef ACCUMULATE_OUTGOING_ARGS
2081 if (args_size.constant > current_function_outgoing_args_size)
2082 current_function_outgoing_args_size = args_size.constant;
2083 args_size.constant = 0;
2086 #ifndef PUSH_ROUNDING
2087 argblock = push_block (GEN_INT (args_size.constant), 0, 0);
2090 #ifdef PUSH_ARGS_REVERSED
2098 /* Push the args that need to be pushed. */
2100 for (count = 0; count < nargs; count++, argnum += inc)
2102 register enum machine_mode mode = argvec[argnum].mode;
2103 register rtx val = argvec[argnum].value;
2104 rtx reg = argvec[argnum].reg;
2105 int partial = argvec[argnum].partial;
2107 if (! (reg != 0 && partial == 0))
2108 emit_push_insn (val, mode, NULL_TREE, NULL_RTX, 0, partial, reg, 0,
2109 argblock, GEN_INT (argvec[count].offset.constant));
2113 #ifdef PUSH_ARGS_REVERSED
2119 /* Now load any reg parms into their regs. */
2121 for (count = 0; count < nargs; count++, argnum += inc)
2123 register enum machine_mode mode = argvec[argnum].mode;
2124 register rtx val = argvec[argnum].value;
2125 rtx reg = argvec[argnum].reg;
2126 int partial = argvec[argnum].partial;
2128 if (reg != 0 && partial == 0)
2129 emit_move_insn (reg, val);
2133 /* For version 1.37, try deleting this entirely. */
2137 /* Any regs containing parms remain in use through the call. */
2139 for (count = 0; count < nargs; count++)
2140 if (argvec[count].reg != 0)
2141 emit_insn (gen_rtx (USE, VOIDmode, argvec[count].reg));
2143 use_insns = get_insns ();
2146 fun = prepare_call_address (fun, NULL_TREE, &use_insns);
2148 /* Don't allow popping to be deferred, since then
2149 cse'ing of library calls could delete a call and leave the pop. */
2152 /* We pass the old value of inhibit_defer_pop + 1 to emit_call_1, which
2153 will set inhibit_defer_pop to that value. */
2155 emit_call_1 (fun, get_identifier (XSTR (orgfun, 0)), args_size.constant, 0,
2156 FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1),
2157 outmode != VOIDmode ? hard_libcall_value (outmode) : NULL_RTX,
2158 old_inhibit_defer_pop + 1, use_insns, no_queue);
2160 /* Now restore inhibit_defer_pop to its actual original value. */
2164 /* Expand an assignment that stores the value of FROM into TO.
2165 If WANT_VALUE is nonzero, return an rtx for the value of TO.
2166 (This may contain a QUEUED rtx.)
2167 Otherwise, the returned value is not meaningful.
2169 SUGGEST_REG is no longer actually used.
2170 It used to mean, copy the value through a register
2171 and return that register, if that is possible.
2172 But now we do this if WANT_VALUE.
2174 If the value stored is a constant, we return the constant. */
2177 expand_assignment (to, from, want_value, suggest_reg)
2182 register rtx to_rtx = 0;
2185 /* Don't crash if the lhs of the assignment was erroneous. */
2187 if (TREE_CODE (to) == ERROR_MARK)
2188 return expand_expr (from, NULL_RTX, VOIDmode, 0);
2190 /* Assignment of a structure component needs special treatment
2191 if the structure component's rtx is not simply a MEM.
2192 Assignment of an array element at a constant index
2193 has the same problem. */
2195 if (TREE_CODE (to) == COMPONENT_REF
2196 || TREE_CODE (to) == BIT_FIELD_REF
2197 || (TREE_CODE (to) == ARRAY_REF
2198 && TREE_CODE (TREE_OPERAND (to, 1)) == INTEGER_CST
2199 && TREE_CODE (TYPE_SIZE (TREE_TYPE (to))) == INTEGER_CST))
2201 enum machine_mode mode1;
2207 tree tem = get_inner_reference (to, &bitsize, &bitpos, &offset,
2208 &mode1, &unsignedp, &volatilep);
2210 /* If we are going to use store_bit_field and extract_bit_field,
2211 make sure to_rtx will be safe for multiple use. */
2213 if (mode1 == VOIDmode && want_value)
2214 tem = stabilize_reference (tem);
2216 to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, 0);
2219 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
2221 if (GET_CODE (to_rtx) != MEM)
2223 to_rtx = change_address (to_rtx, VOIDmode,
2224 gen_rtx (PLUS, Pmode, XEXP (to_rtx, 0),
2225 force_reg (Pmode, offset_rtx)));
2229 if (GET_CODE (to_rtx) == MEM)
2230 MEM_VOLATILE_P (to_rtx) = 1;
2231 #if 0 /* This was turned off because, when a field is volatile
2232 in an object which is not volatile, the object may be in a register,
2233 and then we would abort over here. */
2239 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
2241 /* Spurious cast makes HPUX compiler happy. */
2242 ? (enum machine_mode) TYPE_MODE (TREE_TYPE (to))
2245 /* Required alignment of containing datum. */
2246 TYPE_ALIGN (TREE_TYPE (tem)) / BITS_PER_UNIT,
2247 int_size_in_bytes (TREE_TYPE (tem)));
2248 preserve_temp_slots (result);
2254 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
2255 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
2258 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, 0);
2260 /* In case we are returning the contents of an object which overlaps
2261 the place the value is being stored, use a safe function when copying
2262 a value through a pointer into a structure value return block. */
2263 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
2264 && current_function_returns_struct
2265 && !current_function_returns_pcc_struct)
2267 rtx from_rtx = expand_expr (from, NULL_RTX, VOIDmode, 0);
2268 rtx size = expr_size (from);
2270 #ifdef TARGET_MEM_FUNCTIONS
2271 emit_library_call (memcpy_libfunc, 0,
2272 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
2273 XEXP (from_rtx, 0), Pmode,
2276 emit_library_call (bcopy_libfunc, 0,
2277 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
2278 XEXP (to_rtx, 0), Pmode,
2282 preserve_temp_slots (to_rtx);
2287 /* Compute FROM and store the value in the rtx we got. */
2289 result = store_expr (from, to_rtx, want_value);
2290 preserve_temp_slots (result);
2295 /* Generate code for computing expression EXP,
2296 and storing the value into TARGET.
2297 Returns TARGET or an equivalent value.
2298 TARGET may contain a QUEUED rtx.
2300 If SUGGEST_REG is nonzero, copy the value through a register
2301 and return that register, if that is possible.
2303 If the value stored is a constant, we return the constant. */
2306 store_expr (exp, target, suggest_reg)
2308 register rtx target;
2312 int dont_return_target = 0;
2314 if (TREE_CODE (exp) == COMPOUND_EXPR)
2316 /* Perform first part of compound expression, then assign from second
2318 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
2320 return store_expr (TREE_OPERAND (exp, 1), target, suggest_reg);
2322 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
2324 /* For conditional expression, get safe form of the target. Then
2325 test the condition, doing the appropriate assignment on either
2326 side. This avoids the creation of unnecessary temporaries.
2327 For non-BLKmode, it is more efficient not to do this. */
2329 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
2332 target = protect_from_queue (target, 1);
2335 jumpifnot (TREE_OPERAND (exp, 0), lab1);
2336 store_expr (TREE_OPERAND (exp, 1), target, suggest_reg);
2338 emit_jump_insn (gen_jump (lab2));
2341 store_expr (TREE_OPERAND (exp, 2), target, suggest_reg);
2347 else if (suggest_reg && GET_CODE (target) == MEM
2348 && GET_MODE (target) != BLKmode)
2349 /* If target is in memory and caller wants value in a register instead,
2350 arrange that. Pass TARGET as target for expand_expr so that,
2351 if EXP is another assignment, SUGGEST_REG will be nonzero for it.
2352 We know expand_expr will not use the target in that case. */
2354 temp = expand_expr (exp, cse_not_expected ? NULL_RTX : target,
2355 GET_MODE (target), 0);
2356 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
2357 temp = copy_to_reg (temp);
2358 dont_return_target = 1;
2360 else if (queued_subexp_p (target))
2361 /* If target contains a postincrement, it is not safe
2362 to use as the returned value. It would access the wrong
2363 place by the time the queued increment gets output.
2364 So copy the value through a temporary and use that temp
2367 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
2369 /* Expand EXP into a new pseudo. */
2370 temp = gen_reg_rtx (GET_MODE (target));
2371 temp = expand_expr (exp, temp, GET_MODE (target), 0);
2374 temp = expand_expr (exp, NULL_RTX, GET_MODE (target), 0);
2375 dont_return_target = 1;
2377 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
2378 /* If this is an scalar in a register that is stored in a wider mode
2379 than the declared mode, compute the result into its declared mode
2380 and then convert to the wider mode. Our value is the computed
2383 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
2384 convert_move (SUBREG_REG (target), temp,
2385 SUBREG_PROMOTED_UNSIGNED_P (target));
2390 temp = expand_expr (exp, target, GET_MODE (target), 0);
2391 /* DO return TARGET if it's a specified hardware register.
2392 expand_return relies on this. */
2393 if (!(target && GET_CODE (target) == REG
2394 && REGNO (target) < FIRST_PSEUDO_REGISTER)
2395 && CONSTANT_P (temp))
2396 dont_return_target = 1;
2399 /* If value was not generated in the target, store it there.
2400 Convert the value to TARGET's type first if nec. */
2402 if (temp != target && TREE_CODE (exp) != ERROR_MARK)
2404 target = protect_from_queue (target, 1);
2405 if (GET_MODE (temp) != GET_MODE (target)
2406 && GET_MODE (temp) != VOIDmode)
2408 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
2409 if (dont_return_target)
2411 /* In this case, we will return TEMP,
2412 so make sure it has the proper mode.
2413 But don't forget to store the value into TARGET. */
2414 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
2415 emit_move_insn (target, temp);
2418 convert_move (target, temp, unsignedp);
2421 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
2423 /* Handle copying a string constant into an array.
2424 The string constant may be shorter than the array.
2425 So copy just the string's actual length, and clear the rest. */
2428 /* Get the size of the data type of the string,
2429 which is actually the size of the target. */
2430 size = expr_size (exp);
2431 if (GET_CODE (size) == CONST_INT
2432 && INTVAL (size) < TREE_STRING_LENGTH (exp))
2433 emit_block_move (target, temp, size,
2434 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
2437 /* Compute the size of the data to copy from the string. */
2439 = fold (build (MIN_EXPR, sizetype,
2440 size_binop (CEIL_DIV_EXPR,
2441 TYPE_SIZE (TREE_TYPE (exp)),
2442 size_int (BITS_PER_UNIT)),
2444 build_int_2 (TREE_STRING_LENGTH (exp), 0))));
2445 rtx copy_size_rtx = expand_expr (copy_size, NULL_RTX,
2449 /* Copy that much. */
2450 emit_block_move (target, temp, copy_size_rtx,
2451 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
2453 /* Figure out how much is left in TARGET
2454 that we have to clear. */
2455 if (GET_CODE (copy_size_rtx) == CONST_INT)
2457 temp = plus_constant (XEXP (target, 0),
2458 TREE_STRING_LENGTH (exp));
2459 size = plus_constant (size,
2460 - TREE_STRING_LENGTH (exp));
2464 enum machine_mode size_mode = Pmode;
2466 temp = force_reg (Pmode, XEXP (target, 0));
2467 temp = expand_binop (size_mode, add_optab, temp,
2468 copy_size_rtx, NULL_RTX, 0,
2471 size = expand_binop (size_mode, sub_optab, size,
2472 copy_size_rtx, NULL_RTX, 0,
2475 emit_cmp_insn (size, const0_rtx, LT, NULL_RTX,
2476 GET_MODE (size), 0, 0);
2477 label = gen_label_rtx ();
2478 emit_jump_insn (gen_blt (label));
2481 if (size != const0_rtx)
2483 #ifdef TARGET_MEM_FUNCTIONS
2484 emit_library_call (memset_libfunc, 0, VOIDmode, 3,
2485 temp, Pmode, const0_rtx, Pmode, size, Pmode);
2487 emit_library_call (bzero_libfunc, 0, VOIDmode, 2,
2488 temp, Pmode, size, Pmode);
2495 else if (GET_MODE (temp) == BLKmode)
2496 emit_block_move (target, temp, expr_size (exp),
2497 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
2499 emit_move_insn (target, temp);
2501 if (dont_return_target)
2506 /* Store the value of constructor EXP into the rtx TARGET.
2507 TARGET is either a REG or a MEM. */
2510 store_constructor (exp, target)
2514 tree type = TREE_TYPE (exp);
2516 /* We know our target cannot conflict, since safe_from_p has been called. */
2518 /* Don't try copying piece by piece into a hard register
2519 since that is vulnerable to being clobbered by EXP.
2520 Instead, construct in a pseudo register and then copy it all. */
2521 if (GET_CODE (target) == REG && REGNO (target) < FIRST_PSEUDO_REGISTER)
2523 rtx temp = gen_reg_rtx (GET_MODE (target));
2524 store_constructor (exp, temp);
2525 emit_move_insn (target, temp);
2530 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE)
2534 /* Inform later passes that the whole union value is dead. */
2535 if (TREE_CODE (type) == UNION_TYPE)
2536 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
2538 /* If we are building a static constructor into a register,
2539 set the initial value as zero so we can fold the value into
2541 else if (GET_CODE (target) == REG && TREE_STATIC (exp))
2542 emit_move_insn (target, const0_rtx);
2544 /* If the constructor has fewer fields than the structure,
2545 clear the whole structure first. */
2546 else if (list_length (CONSTRUCTOR_ELTS (exp))
2547 != list_length (TYPE_FIELDS (type)))
2548 clear_storage (target, int_size_in_bytes (type));
2550 /* Inform later passes that the old value is dead. */
2551 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
2553 /* Store each element of the constructor into
2554 the corresponding field of TARGET. */
2556 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
2558 register tree field = TREE_PURPOSE (elt);
2559 register enum machine_mode mode;
2564 /* Just ignore missing fields.
2565 We cleared the whole structure, above,
2566 if any fields are missing. */
2570 bitsize = TREE_INT_CST_LOW (DECL_SIZE (field));
2571 unsignedp = TREE_UNSIGNED (field);
2572 mode = DECL_MODE (field);
2573 if (DECL_BIT_FIELD (field))
2576 if (TREE_CODE (DECL_FIELD_BITPOS (field)) != INTEGER_CST)
2577 /* ??? This case remains to be written. */
2580 bitpos = TREE_INT_CST_LOW (DECL_FIELD_BITPOS (field));
2582 store_field (target, bitsize, bitpos, mode, TREE_VALUE (elt),
2583 /* The alignment of TARGET is
2584 at least what its type requires. */
2586 TYPE_ALIGN (type) / BITS_PER_UNIT,
2587 int_size_in_bytes (type));
2590 else if (TREE_CODE (type) == ARRAY_TYPE)
2594 tree domain = TYPE_DOMAIN (type);
2595 HOST_WIDE_INT minelt = TREE_INT_CST_LOW (TYPE_MIN_VALUE (domain));
2596 HOST_WIDE_INT maxelt = TREE_INT_CST_LOW (TYPE_MAX_VALUE (domain));
2597 tree elttype = TREE_TYPE (type);
2599 /* If the constructor has fewer fields than the structure,
2600 clear the whole structure first. Similarly if this this is
2601 static constructor of a non-BLKmode object. */
2603 if (list_length (CONSTRUCTOR_ELTS (exp)) < maxelt - minelt + 1
2604 || (GET_CODE (target) == REG && TREE_STATIC (exp)))
2605 clear_storage (target, maxelt - minelt + 1);
2607 /* Inform later passes that the old value is dead. */
2608 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
2610 /* Store each element of the constructor into
2611 the corresponding element of TARGET, determined
2612 by counting the elements. */
2613 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
2615 elt = TREE_CHAIN (elt), i++)
2617 register enum machine_mode mode;
2622 mode = TYPE_MODE (elttype);
2623 bitsize = GET_MODE_BITSIZE (mode);
2624 unsignedp = TREE_UNSIGNED (elttype);
2626 bitpos = (i * TREE_INT_CST_LOW (TYPE_SIZE (elttype)));
2628 store_field (target, bitsize, bitpos, mode, TREE_VALUE (elt),
2629 /* The alignment of TARGET is
2630 at least what its type requires. */
2632 TYPE_ALIGN (type) / BITS_PER_UNIT,
2633 int_size_in_bytes (type));
2641 /* Store the value of EXP (an expression tree)
2642 into a subfield of TARGET which has mode MODE and occupies
2643 BITSIZE bits, starting BITPOS bits from the start of TARGET.
2644 If MODE is VOIDmode, it means that we are storing into a bit-field.
2646 If VALUE_MODE is VOIDmode, return nothing in particular.
2647 UNSIGNEDP is not used in this case.
2649 Otherwise, return an rtx for the value stored. This rtx
2650 has mode VALUE_MODE if that is convenient to do.
2651 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
2653 ALIGN is the alignment that TARGET is known to have, measured in bytes.
2654 TOTAL_SIZE is the size in bytes of the structure, or -1 if varying. */
2657 store_field (target, bitsize, bitpos, mode, exp, value_mode,
2658 unsignedp, align, total_size)
2660 int bitsize, bitpos;
2661 enum machine_mode mode;
2663 enum machine_mode value_mode;
2668 HOST_WIDE_INT width_mask = 0;
2670 if (bitsize < HOST_BITS_PER_WIDE_INT)
2671 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
2673 /* If we are storing into an unaligned field of an aligned union that is
2674 in a register, we may have the mode of TARGET being an integer mode but
2675 MODE == BLKmode. In that case, get an aligned object whose size and
2676 alignment are the same as TARGET and store TARGET into it (we can avoid
2677 the store if the field being stored is the entire width of TARGET). Then
2678 call ourselves recursively to store the field into a BLKmode version of
2679 that object. Finally, load from the object into TARGET. This is not
2680 very efficient in general, but should only be slightly more expensive
2681 than the otherwise-required unaligned accesses. Perhaps this can be
2682 cleaned up later. */
2685 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
2687 rtx object = assign_stack_temp (GET_MODE (target),
2688 GET_MODE_SIZE (GET_MODE (target)), 0);
2689 rtx blk_object = copy_rtx (object);
2691 PUT_MODE (blk_object, BLKmode);
2693 if (bitsize != GET_MODE_BITSIZE (GET_MODE (target)))
2694 emit_move_insn (object, target);
2696 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0,
2699 emit_move_insn (target, object);
2704 /* If the structure is in a register or if the component
2705 is a bit field, we cannot use addressing to access it.
2706 Use bit-field techniques or SUBREG to store in it. */
2708 if (mode == VOIDmode
2709 || (mode != BLKmode && ! direct_store[(int) mode])
2710 || GET_CODE (target) == REG
2711 || GET_CODE (target) == SUBREG)
2713 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
2714 /* Store the value in the bitfield. */
2715 store_bit_field (target, bitsize, bitpos, mode, temp, align, total_size);
2716 if (value_mode != VOIDmode)
2718 /* The caller wants an rtx for the value. */
2719 /* If possible, avoid refetching from the bitfield itself. */
2721 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
2724 enum machine_mode tmode;
2727 return expand_and (temp, GEN_INT (width_mask), NULL_RTX);
2728 tmode = GET_MODE (temp);
2729 if (tmode == VOIDmode)
2731 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
2732 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
2733 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
2735 return extract_bit_field (target, bitsize, bitpos, unsignedp,
2736 NULL_RTX, value_mode, 0, align,
2743 rtx addr = XEXP (target, 0);
2746 /* If a value is wanted, it must be the lhs;
2747 so make the address stable for multiple use. */
2749 if (value_mode != VOIDmode && GET_CODE (addr) != REG
2750 && ! CONSTANT_ADDRESS_P (addr)
2751 /* A frame-pointer reference is already stable. */
2752 && ! (GET_CODE (addr) == PLUS
2753 && GET_CODE (XEXP (addr, 1)) == CONST_INT
2754 && (XEXP (addr, 0) == virtual_incoming_args_rtx
2755 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
2756 addr = copy_to_reg (addr);
2758 /* Now build a reference to just the desired component. */
2760 to_rtx = change_address (target, mode,
2761 plus_constant (addr, (bitpos / BITS_PER_UNIT)));
2762 MEM_IN_STRUCT_P (to_rtx) = 1;
2764 return store_expr (exp, to_rtx, value_mode != VOIDmode);
2768 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
2769 or an ARRAY_REF, look for nested COMPONENT_REFs, BIT_FIELD_REFs, or
2770 ARRAY_REFs at constant positions and find the ultimate containing object,
2773 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
2774 bit position, and *PUNSIGNEDP to the signedness of the field.
2775 If the position of the field is variable, we store a tree
2776 giving the variable offset (in units) in *POFFSET.
2777 This offset is in addition to the bit position.
2778 If the position is not variable, we store 0 in *POFFSET.
2780 If any of the extraction expressions is volatile,
2781 we store 1 in *PVOLATILEP. Otherwise we don't change that.
2783 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
2784 is a mode that can be used to access the field. In that case, *PBITSIZE
2787 If the field describes a variable-sized object, *PMODE is set to
2788 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
2789 this case, but the address of the object can be found. */
2792 get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode, punsignedp, pvolatilep)
2797 enum machine_mode *pmode;
2802 enum machine_mode mode = VOIDmode;
2805 if (TREE_CODE (exp) == COMPONENT_REF)
2807 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
2808 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
2809 mode = DECL_MODE (TREE_OPERAND (exp, 1));
2810 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
2812 else if (TREE_CODE (exp) == BIT_FIELD_REF)
2814 size_tree = TREE_OPERAND (exp, 1);
2815 *punsignedp = TREE_UNSIGNED (exp);
2819 mode = TYPE_MODE (TREE_TYPE (exp));
2820 *pbitsize = GET_MODE_BITSIZE (mode);
2821 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
2826 if (TREE_CODE (size_tree) != INTEGER_CST)
2827 mode = BLKmode, *pbitsize = -1;
2829 *pbitsize = TREE_INT_CST_LOW (size_tree);
2832 /* Compute cumulative bit-offset for nested component-refs and array-refs,
2833 and find the ultimate containing object. */
2839 if (TREE_CODE (exp) == INDIRECT_REF && flag_volatile)
2842 if (TREE_CODE (exp) == COMPONENT_REF || TREE_CODE (exp) == BIT_FIELD_REF)
2844 tree pos = (TREE_CODE (exp) == COMPONENT_REF
2845 ? DECL_FIELD_BITPOS (TREE_OPERAND (exp, 1))
2846 : TREE_OPERAND (exp, 2));
2848 if (TREE_CODE (pos) == PLUS_EXPR)
2851 if (TREE_CODE (TREE_OPERAND (pos, 0)) == INTEGER_CST)
2853 constant = TREE_OPERAND (pos, 0);
2854 var = TREE_OPERAND (pos, 1);
2856 else if (TREE_CODE (TREE_OPERAND (pos, 1)) == INTEGER_CST)
2858 constant = TREE_OPERAND (pos, 1);
2859 var = TREE_OPERAND (pos, 0);
2863 *pbitpos += TREE_INT_CST_LOW (constant);
2865 offset = size_binop (PLUS_EXPR, offset,
2866 size_binop (FLOOR_DIV_EXPR, var,
2867 size_int (BITS_PER_UNIT)));
2869 offset = size_binop (FLOOR_DIV_EXPR, var,
2870 size_int (BITS_PER_UNIT));
2872 else if (TREE_CODE (pos) == INTEGER_CST)
2873 *pbitpos += TREE_INT_CST_LOW (pos);
2876 /* Assume here that the offset is a multiple of a unit.
2877 If not, there should be an explicitly added constant. */
2879 offset = size_binop (PLUS_EXPR, offset,
2880 size_binop (FLOOR_DIV_EXPR, pos,
2881 size_int (BITS_PER_UNIT)));
2883 offset = size_binop (FLOOR_DIV_EXPR, pos,
2884 size_int (BITS_PER_UNIT));
2888 else if (TREE_CODE (exp) == ARRAY_REF
2889 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
2890 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST)
2892 *pbitpos += (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))
2893 * TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (exp))));
2895 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
2896 && ! ((TREE_CODE (exp) == NOP_EXPR
2897 || TREE_CODE (exp) == CONVERT_EXPR)
2898 && (TYPE_MODE (TREE_TYPE (exp))
2899 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
2902 /* If any reference in the chain is volatile, the effect is volatile. */
2903 if (TREE_THIS_VOLATILE (exp))
2905 exp = TREE_OPERAND (exp, 0);
2908 /* If this was a bit-field, see if there is a mode that allows direct
2909 access in case EXP is in memory. */
2910 if (mode == VOIDmode && *pbitpos % *pbitsize == 0)
2912 mode = mode_for_size (*pbitsize, MODE_INT, 0);
2913 if (mode == BLKmode)
2920 /* We aren't finished fixing the callers to really handle nonzero offset. */
2928 /* Given an rtx VALUE that may contain additions and multiplications,
2929 return an equivalent value that just refers to a register or memory.
2930 This is done by generating instructions to perform the arithmetic
2931 and returning a pseudo-register containing the value.
2933 The returned value may be a REG, SUBREG, MEM or constant. */
2936 force_operand (value, target)
2939 register optab binoptab = 0;
2940 /* Use a temporary to force order of execution of calls to
2944 /* Use subtarget as the target for operand 0 of a binary operation. */
2945 register rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
2947 if (GET_CODE (value) == PLUS)
2948 binoptab = add_optab;
2949 else if (GET_CODE (value) == MINUS)
2950 binoptab = sub_optab;
2951 else if (GET_CODE (value) == MULT)
2953 op2 = XEXP (value, 1);
2954 if (!CONSTANT_P (op2)
2955 && !(GET_CODE (op2) == REG && op2 != subtarget))
2957 tmp = force_operand (XEXP (value, 0), subtarget);
2958 return expand_mult (GET_MODE (value), tmp,
2959 force_operand (op2, NULL_RTX),
2965 op2 = XEXP (value, 1);
2966 if (!CONSTANT_P (op2)
2967 && !(GET_CODE (op2) == REG && op2 != subtarget))
2969 if (binoptab == sub_optab && GET_CODE (op2) == CONST_INT)
2971 binoptab = add_optab;
2972 op2 = negate_rtx (GET_MODE (value), op2);
2975 /* Check for an addition with OP2 a constant integer and our first
2976 operand a PLUS of a virtual register and something else. In that
2977 case, we want to emit the sum of the virtual register and the
2978 constant first and then add the other value. This allows virtual
2979 register instantiation to simply modify the constant rather than
2980 creating another one around this addition. */
2981 if (binoptab == add_optab && GET_CODE (op2) == CONST_INT
2982 && GET_CODE (XEXP (value, 0)) == PLUS
2983 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
2984 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
2985 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
2987 rtx temp = expand_binop (GET_MODE (value), binoptab,
2988 XEXP (XEXP (value, 0), 0), op2,
2989 subtarget, 0, OPTAB_LIB_WIDEN);
2990 return expand_binop (GET_MODE (value), binoptab, temp,
2991 force_operand (XEXP (XEXP (value, 0), 1), 0),
2992 target, 0, OPTAB_LIB_WIDEN);
2995 tmp = force_operand (XEXP (value, 0), subtarget);
2996 return expand_binop (GET_MODE (value), binoptab, tmp,
2997 force_operand (op2, NULL_RTX),
2998 target, 0, OPTAB_LIB_WIDEN);
2999 /* We give UNSIGNEP = 0 to expand_binop
3000 because the only operations we are expanding here are signed ones. */
3005 /* Subroutine of expand_expr:
3006 save the non-copied parts (LIST) of an expr (LHS), and return a list
3007 which can restore these values to their previous values,
3008 should something modify their storage. */
3011 save_noncopied_parts (lhs, list)
3018 for (tail = list; tail; tail = TREE_CHAIN (tail))
3019 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
3020 parts = chainon (parts, save_noncopied_parts (lhs, TREE_VALUE (tail)));
3023 tree part = TREE_VALUE (tail);
3024 tree part_type = TREE_TYPE (part);
3025 tree to_be_saved = build (COMPONENT_REF, part_type, lhs, part);
3026 rtx target = assign_stack_temp (TYPE_MODE (part_type),
3027 int_size_in_bytes (part_type), 0);
3028 if (! memory_address_p (TYPE_MODE (part_type), XEXP (target, 0)))
3029 target = change_address (target, TYPE_MODE (part_type), NULL_RTX);
3030 parts = tree_cons (to_be_saved,
3031 build (RTL_EXPR, part_type, NULL_TREE,
3034 store_expr (TREE_PURPOSE (parts), RTL_EXPR_RTL (TREE_VALUE (parts)), 0);
3039 /* Subroutine of expand_expr:
3040 record the non-copied parts (LIST) of an expr (LHS), and return a list
3041 which specifies the initial values of these parts. */
3044 init_noncopied_parts (lhs, list)
3051 for (tail = list; tail; tail = TREE_CHAIN (tail))
3052 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
3053 parts = chainon (parts, init_noncopied_parts (lhs, TREE_VALUE (tail)));
3056 tree part = TREE_VALUE (tail);
3057 tree part_type = TREE_TYPE (part);
3058 tree to_be_initialized = build (COMPONENT_REF, part_type, lhs, part);
3059 parts = tree_cons (TREE_PURPOSE (tail), to_be_initialized, parts);
3064 /* Subroutine of expand_expr: return nonzero iff there is no way that
3065 EXP can reference X, which is being modified. */
3068 safe_from_p (x, exp)
3078 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
3079 find the underlying pseudo. */
3080 if (GET_CODE (x) == SUBREG)
3083 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
3087 /* If X is a location in the outgoing argument area, it is always safe. */
3088 if (GET_CODE (x) == MEM
3089 && (XEXP (x, 0) == virtual_outgoing_args_rtx
3090 || (GET_CODE (XEXP (x, 0)) == PLUS
3091 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx)))
3094 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
3097 exp_rtl = DECL_RTL (exp);
3104 if (TREE_CODE (exp) == TREE_LIST)
3105 return ((TREE_VALUE (exp) == 0
3106 || safe_from_p (x, TREE_VALUE (exp)))
3107 && (TREE_CHAIN (exp) == 0
3108 || safe_from_p (x, TREE_CHAIN (exp))));
3113 return safe_from_p (x, TREE_OPERAND (exp, 0));
3117 return (safe_from_p (x, TREE_OPERAND (exp, 0))
3118 && safe_from_p (x, TREE_OPERAND (exp, 1)));
3122 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
3123 the expression. If it is set, we conflict iff we are that rtx or
3124 both are in memory. Otherwise, we check all operands of the
3125 expression recursively. */
3127 switch (TREE_CODE (exp))
3130 return staticp (TREE_OPERAND (exp, 0));
3133 if (GET_CODE (x) == MEM)
3138 exp_rtl = CALL_EXPR_RTL (exp);
3141 /* Assume that the call will clobber all hard registers and
3143 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
3144 || GET_CODE (x) == MEM)
3151 exp_rtl = RTL_EXPR_RTL (exp);
3153 /* We don't know what this can modify. */
3158 case WITH_CLEANUP_EXPR:
3159 exp_rtl = RTL_EXPR_RTL (exp);
3163 exp_rtl = SAVE_EXPR_RTL (exp);
3167 /* The only operand we look at is operand 1. The rest aren't
3168 part of the expression. */
3169 return safe_from_p (x, TREE_OPERAND (exp, 1));
3171 case METHOD_CALL_EXPR:
3172 /* This takes a rtx argument, but shouldn't appear here. */
3176 /* If we have an rtx, we do not need to scan our operands. */
3180 nops = tree_code_length[(int) TREE_CODE (exp)];
3181 for (i = 0; i < nops; i++)
3182 if (TREE_OPERAND (exp, i) != 0
3183 && ! safe_from_p (x, TREE_OPERAND (exp, i)))
3187 /* If we have an rtl, find any enclosed object. Then see if we conflict
3191 if (GET_CODE (exp_rtl) == SUBREG)
3193 exp_rtl = SUBREG_REG (exp_rtl);
3194 if (GET_CODE (exp_rtl) == REG
3195 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
3199 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
3200 are memory and EXP is not readonly. */
3201 return ! (rtx_equal_p (x, exp_rtl)
3202 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
3203 && ! TREE_READONLY (exp)));
3206 /* If we reach here, it is safe. */
3210 /* Subroutine of expand_expr: return nonzero iff EXP is an
3211 expression whose type is statically determinable. */
3217 if (TREE_CODE (exp) == PARM_DECL
3218 || TREE_CODE (exp) == VAR_DECL
3219 || TREE_CODE (exp) == CALL_EXPR || TREE_CODE (exp) == TARGET_EXPR
3220 || TREE_CODE (exp) == COMPONENT_REF
3221 || TREE_CODE (exp) == ARRAY_REF)
3226 /* expand_expr: generate code for computing expression EXP.
3227 An rtx for the computed value is returned. The value is never null.
3228 In the case of a void EXP, const0_rtx is returned.
3230 The value may be stored in TARGET if TARGET is nonzero.
3231 TARGET is just a suggestion; callers must assume that
3232 the rtx returned may not be the same as TARGET.
3234 If TARGET is CONST0_RTX, it means that the value will be ignored.
3236 If TMODE is not VOIDmode, it suggests generating the
3237 result in mode TMODE. But this is done only when convenient.
3238 Otherwise, TMODE is ignored and the value generated in its natural mode.
3239 TMODE is just a suggestion; callers must assume that
3240 the rtx returned may not have mode TMODE.
3242 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
3243 with a constant address even if that address is not normally legitimate.
3244 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
3246 If MODIFIER is EXPAND_SUM then when EXP is an addition
3247 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
3248 or a nest of (PLUS ...) and (MINUS ...) where the terms are
3249 products as above, or REG or MEM, or constant.
3250 Ordinarily in such cases we would output mul or add instructions
3251 and then return a pseudo reg containing the sum.
3253 EXPAND_INITIALIZER is much like EXPAND_SUM except that
3254 it also marks a label as absolutely required (it can't be dead).
3255 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
3256 This is used for outputting expressions used in initializers. */
3259 expand_expr (exp, target, tmode, modifier)
3262 enum machine_mode tmode;
3263 enum expand_modifier modifier;
3265 register rtx op0, op1, temp;
3266 tree type = TREE_TYPE (exp);
3267 int unsignedp = TREE_UNSIGNED (type);
3268 register enum machine_mode mode = TYPE_MODE (type);
3269 register enum tree_code code = TREE_CODE (exp);
3271 /* Use subtarget as the target for operand 0 of a binary operation. */
3272 rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
3273 rtx original_target = target;
3274 int ignore = target == const0_rtx;
3277 /* Don't use hard regs as subtargets, because the combiner
3278 can only handle pseudo regs. */
3279 if (subtarget && REGNO (subtarget) < FIRST_PSEUDO_REGISTER)
3281 /* Avoid subtargets inside loops,
3282 since they hide some invariant expressions. */
3283 if (preserve_subexpressions_p ())
3286 if (ignore) target = 0, original_target = 0;
3288 /* If will do cse, generate all results into pseudo registers
3289 since 1) that allows cse to find more things
3290 and 2) otherwise cse could produce an insn the machine
3293 if (! cse_not_expected && mode != BLKmode && target
3294 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER))
3297 /* Ensure we reference a volatile object even if value is ignored. */
3298 if (ignore && TREE_THIS_VOLATILE (exp)
3299 && mode != VOIDmode && mode != BLKmode)
3301 target = gen_reg_rtx (mode);
3302 temp = expand_expr (exp, target, VOIDmode, modifier);
3304 emit_move_insn (target, temp);
3312 tree function = decl_function_context (exp);
3313 /* Handle using a label in a containing function. */
3314 if (function != current_function_decl && function != 0)
3316 struct function *p = find_function_data (function);
3317 /* Allocate in the memory associated with the function
3318 that the label is in. */
3319 push_obstacks (p->function_obstack,
3320 p->function_maybepermanent_obstack);
3322 p->forced_labels = gen_rtx (EXPR_LIST, VOIDmode,
3323 label_rtx (exp), p->forced_labels);
3326 else if (modifier == EXPAND_INITIALIZER)
3327 forced_labels = gen_rtx (EXPR_LIST, VOIDmode,
3328 label_rtx (exp), forced_labels);
3329 temp = gen_rtx (MEM, FUNCTION_MODE,
3330 gen_rtx (LABEL_REF, Pmode, label_rtx (exp)));
3331 if (function != current_function_decl && function != 0)
3332 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
3337 if (DECL_RTL (exp) == 0)
3339 error_with_decl (exp, "prior parameter's size depends on `%s'");
3340 return CONST0_RTX (mode);
3346 if (DECL_RTL (exp) == 0)
3348 /* Ensure variable marked as used
3349 even if it doesn't go through a parser. */
3350 TREE_USED (exp) = 1;
3351 /* Handle variables inherited from containing functions. */
3352 context = decl_function_context (exp);
3354 /* We treat inline_function_decl as an alias for the current function
3355 because that is the inline function whose vars, types, etc.
3356 are being merged into the current function.
3357 See expand_inline_function. */
3358 if (context != 0 && context != current_function_decl
3359 && context != inline_function_decl
3360 /* If var is static, we don't need a static chain to access it. */
3361 && ! (GET_CODE (DECL_RTL (exp)) == MEM
3362 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
3366 /* Mark as non-local and addressable. */
3367 DECL_NONLOCAL (exp) = 1;
3368 mark_addressable (exp);
3369 if (GET_CODE (DECL_RTL (exp)) != MEM)
3371 addr = XEXP (DECL_RTL (exp), 0);
3372 if (GET_CODE (addr) == MEM)
3373 addr = gen_rtx (MEM, Pmode, fix_lexical_addr (XEXP (addr, 0), exp));
3375 addr = fix_lexical_addr (addr, exp);
3376 return change_address (DECL_RTL (exp), mode, addr);
3379 /* This is the case of an array whose size is to be determined
3380 from its initializer, while the initializer is still being parsed.
3382 if (GET_CODE (DECL_RTL (exp)) == MEM
3383 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
3384 return change_address (DECL_RTL (exp), GET_MODE (DECL_RTL (exp)),
3385 XEXP (DECL_RTL (exp), 0));
3386 if (GET_CODE (DECL_RTL (exp)) == MEM
3387 && modifier != EXPAND_CONST_ADDRESS
3388 && modifier != EXPAND_SUM
3389 && modifier != EXPAND_INITIALIZER)
3391 /* DECL_RTL probably contains a constant address.
3392 On RISC machines where a constant address isn't valid,
3393 make some insns to get that address into a register. */
3394 if (!memory_address_p (DECL_MODE (exp), XEXP (DECL_RTL (exp), 0))
3396 && CONSTANT_ADDRESS_P (XEXP (DECL_RTL (exp), 0))))
3397 return change_address (DECL_RTL (exp), VOIDmode,
3398 copy_rtx (XEXP (DECL_RTL (exp), 0)));
3401 /* If the mode of DECL_RTL does not match that of the decl, it
3402 must be a promoted value. We return a SUBREG of the wanted mode,
3403 but mark it so that we know that it was already extended. */
3405 if (GET_CODE (DECL_RTL (exp)) == REG
3406 && GET_MODE (DECL_RTL (exp)) != mode)
3408 enum machine_mode decl_mode = DECL_MODE (exp);
3410 /* Get the signedness used for this variable. Ensure we get the
3411 same mode we got when the variable was declared. */
3413 PROMOTE_MODE (decl_mode, unsignedp, type);
3415 if (decl_mode != GET_MODE (DECL_RTL (exp)))
3418 temp = gen_rtx (SUBREG, mode, DECL_RTL (exp), 0);
3419 SUBREG_PROMOTED_VAR_P (temp) = 1;
3420 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
3424 return DECL_RTL (exp);
3427 return immed_double_const (TREE_INT_CST_LOW (exp),
3428 TREE_INT_CST_HIGH (exp),
3432 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, 0);
3435 /* If optimized, generate immediate CONST_DOUBLE
3436 which will be turned into memory by reload if necessary.
3438 We used to force a register so that loop.c could see it. But
3439 this does not allow gen_* patterns to perform optimizations with
3440 the constants. It also produces two insns in cases like "x = 1.0;".
3441 On most machines, floating-point constants are not permitted in
3442 many insns, so we'd end up copying it to a register in any case.
3444 Now, we do the copying in expand_binop, if appropriate. */
3445 return immed_real_const (exp);
3449 if (! TREE_CST_RTL (exp))
3450 output_constant_def (exp);
3452 /* TREE_CST_RTL probably contains a constant address.
3453 On RISC machines where a constant address isn't valid,
3454 make some insns to get that address into a register. */
3455 if (GET_CODE (TREE_CST_RTL (exp)) == MEM
3456 && modifier != EXPAND_CONST_ADDRESS
3457 && modifier != EXPAND_INITIALIZER
3458 && modifier != EXPAND_SUM
3459 && !memory_address_p (mode, XEXP (TREE_CST_RTL (exp), 0)))
3460 return change_address (TREE_CST_RTL (exp), VOIDmode,
3461 copy_rtx (XEXP (TREE_CST_RTL (exp), 0)));
3462 return TREE_CST_RTL (exp);
3465 context = decl_function_context (exp);
3466 /* We treat inline_function_decl as an alias for the current function
3467 because that is the inline function whose vars, types, etc.
3468 are being merged into the current function.
3469 See expand_inline_function. */
3470 if (context == current_function_decl || context == inline_function_decl)
3473 /* If this is non-local, handle it. */
3476 temp = SAVE_EXPR_RTL (exp);
3477 if (temp && GET_CODE (temp) == REG)
3479 put_var_into_stack (exp);
3480 temp = SAVE_EXPR_RTL (exp);
3482 if (temp == 0 || GET_CODE (temp) != MEM)
3484 return change_address (temp, mode,
3485 fix_lexical_addr (XEXP (temp, 0), exp));
3487 if (SAVE_EXPR_RTL (exp) == 0)
3489 if (mode == BLKmode)
3491 = assign_stack_temp (mode,
3492 int_size_in_bytes (TREE_TYPE (exp)), 0);
3495 enum machine_mode var_mode = mode;
3497 if (TREE_CODE (type) == INTEGER_TYPE
3498 || TREE_CODE (type) == ENUMERAL_TYPE
3499 || TREE_CODE (type) == BOOLEAN_TYPE
3500 || TREE_CODE (type) == CHAR_TYPE
3501 || TREE_CODE (type) == REAL_TYPE
3502 || TREE_CODE (type) == POINTER_TYPE
3503 || TREE_CODE (type) == OFFSET_TYPE)
3505 PROMOTE_MODE (var_mode, unsignedp, type);
3508 temp = gen_reg_rtx (var_mode);
3511 SAVE_EXPR_RTL (exp) = temp;
3512 store_expr (TREE_OPERAND (exp, 0), temp, 0);
3513 if (!optimize && GET_CODE (temp) == REG)
3514 save_expr_regs = gen_rtx (EXPR_LIST, VOIDmode, temp,
3518 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
3519 must be a promoted value. We return a SUBREG of the wanted mode,
3520 but mark it so that we know that it was already extended. Note
3521 that `unsignedp' was modified above in this case. */
3523 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
3524 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
3526 temp = gen_rtx (SUBREG, mode, SAVE_EXPR_RTL (exp), 0);
3527 SUBREG_PROMOTED_VAR_P (temp) = 1;
3528 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
3532 return SAVE_EXPR_RTL (exp);
3535 /* Exit the current loop if the body-expression is true. */
3537 rtx label = gen_label_rtx ();
3538 do_jump (TREE_OPERAND (exp, 0), label, NULL_RTX);
3539 expand_exit_loop (NULL_PTR);
3545 expand_start_loop (1);
3546 expand_expr_stmt (TREE_OPERAND (exp, 0));
3553 tree vars = TREE_OPERAND (exp, 0);
3554 int vars_need_expansion = 0;
3556 /* Need to open a binding contour here because
3557 if there are any cleanups they most be contained here. */
3558 expand_start_bindings (0);
3560 /* Mark the corresponding BLOCK for output in its proper place. */
3561 if (TREE_OPERAND (exp, 2) != 0
3562 && ! TREE_USED (TREE_OPERAND (exp, 2)))
3563 insert_block (TREE_OPERAND (exp, 2));
3565 /* If VARS have not yet been expanded, expand them now. */
3568 if (DECL_RTL (vars) == 0)
3570 vars_need_expansion = 1;
3573 expand_decl_init (vars);
3574 vars = TREE_CHAIN (vars);
3577 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, modifier);
3579 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
3585 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
3587 emit_insns (RTL_EXPR_SEQUENCE (exp));
3588 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
3589 return RTL_EXPR_RTL (exp);
3592 /* All elts simple constants => refer to a constant in memory. But
3593 if this is a non-BLKmode mode, let it store a field at a time
3594 since that should make a CONST_INT or CONST_DOUBLE when we
3596 if (TREE_STATIC (exp) && (mode == BLKmode || TREE_ADDRESSABLE (exp)))
3598 rtx constructor = output_constant_def (exp);
3599 if (modifier != EXPAND_CONST_ADDRESS
3600 && modifier != EXPAND_INITIALIZER
3601 && modifier != EXPAND_SUM
3602 && !memory_address_p (GET_MODE (constructor),
3603 XEXP (constructor, 0)))
3604 constructor = change_address (constructor, VOIDmode,
3605 XEXP (constructor, 0));
3612 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
3613 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode, 0);
3618 if (target == 0 || ! safe_from_p (target, exp))
3620 if (mode != BLKmode && ! TREE_ADDRESSABLE (exp))
3621 target = gen_reg_rtx (mode);
3624 rtx safe_target = assign_stack_temp (mode, int_size_in_bytes (type), 0);
3626 MEM_IN_STRUCT_P (safe_target) = MEM_IN_STRUCT_P (target);
3627 target = safe_target;
3630 store_constructor (exp, target);
3636 tree exp1 = TREE_OPERAND (exp, 0);
3639 /* A SAVE_EXPR as the address in an INDIRECT_EXPR is generated
3640 for *PTR += ANYTHING where PTR is put inside the SAVE_EXPR.
3641 This code has the same general effect as simply doing
3642 expand_expr on the save expr, except that the expression PTR
3643 is computed for use as a memory address. This means different
3644 code, suitable for indexing, may be generated. */
3645 if (TREE_CODE (exp1) == SAVE_EXPR
3646 && SAVE_EXPR_RTL (exp1) == 0
3647 && TREE_CODE (exp2 = TREE_OPERAND (exp1, 0)) != ERROR_MARK
3648 && TYPE_MODE (TREE_TYPE (exp1)) == Pmode
3649 && TYPE_MODE (TREE_TYPE (exp2)) == Pmode)
3651 temp = expand_expr (TREE_OPERAND (exp1, 0), NULL_RTX,
3652 VOIDmode, EXPAND_SUM);
3653 op0 = memory_address (mode, temp);
3654 op0 = copy_all_regs (op0);
3655 SAVE_EXPR_RTL (exp1) = op0;
3659 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
3660 op0 = memory_address (mode, op0);
3663 temp = gen_rtx (MEM, mode, op0);
3664 /* If address was computed by addition,
3665 mark this as an element of an aggregate. */
3666 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
3667 || (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR
3668 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) == PLUS_EXPR)
3669 || TREE_CODE (TREE_TYPE (exp)) == ARRAY_TYPE
3670 || TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE
3671 || TREE_CODE (TREE_TYPE (exp)) == UNION_TYPE
3672 || (TREE_CODE (exp1) == ADDR_EXPR
3673 && (exp2 = TREE_OPERAND (exp1, 0))
3674 && (TREE_CODE (TREE_TYPE (exp2)) == ARRAY_TYPE
3675 || TREE_CODE (TREE_TYPE (exp2)) == RECORD_TYPE
3676 || TREE_CODE (TREE_TYPE (exp2)) == UNION_TYPE)))
3677 MEM_IN_STRUCT_P (temp) = 1;
3678 MEM_VOLATILE_P (temp) = TREE_THIS_VOLATILE (exp) || flag_volatile;
3679 #if 0 /* It is incorrectto set RTX_UNCHANGING_P here, because the fact that
3680 a location is accessed through a pointer to const does not mean
3681 that the value there can never change. */
3682 RTX_UNCHANGING_P (temp) = TREE_READONLY (exp);
3688 if (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST
3689 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
3691 /* Nonconstant array index or nonconstant element size.
3692 Generate the tree for *(&array+index) and expand that,
3693 except do it in a language-independent way
3694 and don't complain about non-lvalue arrays.
3695 `mark_addressable' should already have been called
3696 for any array for which this case will be reached. */
3698 /* Don't forget the const or volatile flag from the array element. */
3699 tree variant_type = build_type_variant (type,
3700 TREE_READONLY (exp),
3701 TREE_THIS_VOLATILE (exp));
3702 tree array_adr = build1 (ADDR_EXPR, build_pointer_type (variant_type),
3703 TREE_OPERAND (exp, 0));
3704 tree index = TREE_OPERAND (exp, 1);
3707 /* Convert the integer argument to a type the same size as a pointer
3708 so the multiply won't overflow spuriously. */
3709 if (TYPE_PRECISION (TREE_TYPE (index)) != POINTER_SIZE)
3710 index = convert (type_for_size (POINTER_SIZE, 0), index);
3712 /* Don't think the address has side effects
3713 just because the array does.
3714 (In some cases the address might have side effects,
3715 and we fail to record that fact here. However, it should not
3716 matter, since expand_expr should not care.) */
3717 TREE_SIDE_EFFECTS (array_adr) = 0;
3719 elt = build1 (INDIRECT_REF, type,
3720 fold (build (PLUS_EXPR, TYPE_POINTER_TO (variant_type),
3722 fold (build (MULT_EXPR,
3723 TYPE_POINTER_TO (variant_type),
3724 index, size_in_bytes (type))))));
3726 /* Volatility, etc., of new expression is same as old expression. */
3727 TREE_SIDE_EFFECTS (elt) = TREE_SIDE_EFFECTS (exp);
3728 TREE_THIS_VOLATILE (elt) = TREE_THIS_VOLATILE (exp);
3729 TREE_READONLY (elt) = TREE_READONLY (exp);
3731 return expand_expr (elt, target, tmode, modifier);
3734 /* Fold an expression like: "foo"[2].
3735 This is not done in fold so it won't happen inside &. */
3738 tree arg0 = TREE_OPERAND (exp, 0);
3739 tree arg1 = TREE_OPERAND (exp, 1);
3741 if (TREE_CODE (arg0) == STRING_CST
3742 && TREE_CODE (arg1) == INTEGER_CST
3743 && !TREE_INT_CST_HIGH (arg1)
3744 && (i = TREE_INT_CST_LOW (arg1)) < TREE_STRING_LENGTH (arg0))
3746 if (TREE_TYPE (TREE_TYPE (arg0)) == integer_type_node)
3748 exp = build_int_2 (((int *)TREE_STRING_POINTER (arg0))[i], 0);
3749 TREE_TYPE (exp) = integer_type_node;
3750 return expand_expr (exp, target, tmode, modifier);
3752 if (TREE_TYPE (TREE_TYPE (arg0)) == char_type_node)
3754 exp = build_int_2 (TREE_STRING_POINTER (arg0)[i], 0);
3755 TREE_TYPE (exp) = integer_type_node;
3756 return expand_expr (convert (TREE_TYPE (TREE_TYPE (arg0)), exp), target, tmode, modifier);
3761 /* If this is a constant index into a constant array,
3762 just get the value from the array. Handle both the cases when
3763 we have an explicit constructor and when our operand is a variable
3764 that was declared const. */
3766 if (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
3767 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
3769 tree index = fold (TREE_OPERAND (exp, 1));
3770 if (TREE_CODE (index) == INTEGER_CST
3771 && TREE_INT_CST_HIGH (index) == 0)
3773 int i = TREE_INT_CST_LOW (index);
3774 tree elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0));
3777 elem = TREE_CHAIN (elem);
3779 return expand_expr (fold (TREE_VALUE (elem)), target,
3784 else if (TREE_READONLY (TREE_OPERAND (exp, 0))
3785 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
3786 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == ARRAY_TYPE
3787 && TREE_CODE (TREE_OPERAND (exp, 0)) == VAR_DECL
3788 && DECL_INITIAL (TREE_OPERAND (exp, 0))
3790 && (TREE_CODE (DECL_INITIAL (TREE_OPERAND (exp, 0)))
3793 tree index = fold (TREE_OPERAND (exp, 1));
3794 if (TREE_CODE (index) == INTEGER_CST
3795 && TREE_INT_CST_HIGH (index) == 0)
3797 int i = TREE_INT_CST_LOW (index);
3798 tree init = DECL_INITIAL (TREE_OPERAND (exp, 0));
3800 if (TREE_CODE (init) == CONSTRUCTOR)
3802 tree elem = CONSTRUCTOR_ELTS (init);
3805 elem = TREE_CHAIN (elem);
3807 return expand_expr (fold (TREE_VALUE (elem)), target,
3810 else if (TREE_CODE (init) == STRING_CST
3811 && i < TREE_STRING_LENGTH (init))
3813 temp = GEN_INT (TREE_STRING_POINTER (init)[i]);
3814 return convert_to_mode (mode, temp, 0);
3818 /* Treat array-ref with constant index as a component-ref. */
3822 /* If the operand is a CONSTRUCTOR, we can just extract the
3823 appropriate field if it is present. */
3824 if (code != ARRAY_REF
3825 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR)
3829 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
3830 elt = TREE_CHAIN (elt))
3831 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1))
3832 return expand_expr (TREE_VALUE (elt), target, tmode, modifier);
3836 enum machine_mode mode1;
3841 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
3842 &mode1, &unsignedp, &volatilep);
3844 /* In some cases, we will be offsetting OP0's address by a constant.
3845 So get it as a sum, if possible. If we will be using it
3846 directly in an insn, we validate it. */
3847 op0 = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_SUM);
3849 /* If this is a constant, put it into a register if it is a
3850 legimate constant and memory if it isn't. */
3851 if (CONSTANT_P (op0))
3853 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
3854 if (LEGITIMATE_CONSTANT_P (op0))
3855 op0 = force_reg (mode, op0);
3857 op0 = validize_mem (force_const_mem (mode, op0));
3862 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
3864 if (GET_CODE (op0) != MEM)
3866 op0 = change_address (op0, VOIDmode,
3867 gen_rtx (PLUS, Pmode, XEXP (op0, 0),
3868 force_reg (Pmode, offset_rtx)));
3871 /* Don't forget about volatility even if this is a bitfield. */
3872 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
3874 op0 = copy_rtx (op0);
3875 MEM_VOLATILE_P (op0) = 1;
3878 if (mode1 == VOIDmode
3879 || (mode1 != BLKmode && ! direct_load[(int) mode1]
3880 && modifier != EXPAND_CONST_ADDRESS
3881 && modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
3882 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG)
3884 /* In cases where an aligned union has an unaligned object
3885 as a field, we might be extracting a BLKmode value from
3886 an integer-mode (e.g., SImode) object. Handle this case
3887 by doing the extract into an object as wide as the field
3888 (which we know to be the width of a basic mode), then
3889 storing into memory, and changing the mode to BLKmode. */
3890 enum machine_mode ext_mode = mode;
3892 if (ext_mode == BLKmode)
3893 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
3895 if (ext_mode == BLKmode)
3898 op0 = extract_bit_field (validize_mem (op0), bitsize, bitpos,
3899 unsignedp, target, ext_mode, ext_mode,
3900 TYPE_ALIGN (TREE_TYPE (tem)) / BITS_PER_UNIT,
3901 int_size_in_bytes (TREE_TYPE (tem)));
3902 if (mode == BLKmode)
3904 rtx new = assign_stack_temp (ext_mode,
3905 bitsize / BITS_PER_UNIT, 0);
3907 emit_move_insn (new, op0);
3908 op0 = copy_rtx (new);
3909 PUT_MODE (op0, BLKmode);
3915 /* Get a reference to just this component. */
3916 if (modifier == EXPAND_CONST_ADDRESS
3917 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
3918 op0 = gen_rtx (MEM, mode1, plus_constant (XEXP (op0, 0),
3919 (bitpos / BITS_PER_UNIT)));
3921 op0 = change_address (op0, mode1,
3922 plus_constant (XEXP (op0, 0),
3923 (bitpos / BITS_PER_UNIT)));
3924 MEM_IN_STRUCT_P (op0) = 1;
3925 MEM_VOLATILE_P (op0) |= volatilep;
3926 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode)
3929 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
3930 convert_move (target, op0, unsignedp);
3936 tree base = build_unary_op (ADDR_EXPR, TREE_OPERAND (exp, 0), 0);
3937 tree addr = build (PLUS_EXPR, type, base, TREE_OPERAND (exp, 1));
3938 op0 = expand_expr (addr, NULL_RTX, VOIDmode, EXPAND_SUM);
3939 temp = gen_rtx (MEM, mode, memory_address (mode, op0));
3940 MEM_IN_STRUCT_P (temp) = 1;
3941 MEM_VOLATILE_P (temp) = TREE_THIS_VOLATILE (exp) || flag_volatile;
3942 #if 0 /* It is incorrectto set RTX_UNCHANGING_P here, because the fact that
3943 a location is accessed through a pointer to const does not mean
3944 that the value there can never change. */
3945 RTX_UNCHANGING_P (temp) = TREE_READONLY (exp);
3950 /* Intended for a reference to a buffer of a file-object in Pascal.
3951 But it's not certain that a special tree code will really be
3952 necessary for these. INDIRECT_REF might work for them. */
3956 /* IN_EXPR: Inlined pascal set IN expression.
3959 rlo = set_low - (set_low%bits_per_word);
3960 the_word = set [ (index - rlo)/bits_per_word ];
3961 bit_index = index % bits_per_word;
3962 bitmask = 1 << bit_index;
3963 return !!(the_word & bitmask); */
3965 preexpand_calls (exp);
3967 tree set = TREE_OPERAND (exp, 0);
3968 tree index = TREE_OPERAND (exp, 1);
3969 tree set_type = TREE_TYPE (set);
3971 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
3972 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
3978 rtx diff, quo, rem, addr, bit, result;
3979 rtx setval, setaddr;
3980 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
3983 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
3985 /* If domain is empty, answer is no. */
3986 if (tree_int_cst_lt (set_high_bound, set_low_bound))
3989 index_val = expand_expr (index, 0, VOIDmode, 0);
3990 lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
3991 hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
3992 setval = expand_expr (set, 0, VOIDmode, 0);
3993 setaddr = XEXP (setval, 0);
3995 /* Compare index against bounds, if they are constant. */
3996 if (GET_CODE (index_val) == CONST_INT
3997 && GET_CODE (lo_r) == CONST_INT)
3999 if (INTVAL (index_val) < INTVAL (lo_r))
4003 if (GET_CODE (index_val) == CONST_INT
4004 && GET_CODE (hi_r) == CONST_INT)
4006 if (INTVAL (hi_r) < INTVAL (index_val))
4010 /* If we get here, we have to generate the code for both cases
4011 (in range and out of range). */
4013 op0 = gen_label_rtx ();
4014 op1 = gen_label_rtx ();
4016 if (! (GET_CODE (index_val) == CONST_INT
4017 && GET_CODE (lo_r) == CONST_INT))
4019 emit_cmp_insn (index_val, lo_r, LT, 0, GET_MODE (index_val), 0, 0);
4020 emit_jump_insn (gen_blt (op1));
4023 if (! (GET_CODE (index_val) == CONST_INT
4024 && GET_CODE (hi_r) == CONST_INT))
4026 emit_cmp_insn (index_val, hi_r, GT, 0, GET_MODE (index_val), 0, 0);
4027 emit_jump_insn (gen_bgt (op1));
4030 /* Calculate the element number of bit zero in the first word
4032 if (GET_CODE (lo_r) == CONST_INT)
4033 rlow = gen_rtx (CONST_INT, VOIDmode,
4034 INTVAL (lo_r) & ~ (1 << BITS_PER_UNIT));
4036 rlow = expand_binop (index_mode, and_optab,
4037 lo_r, gen_rtx (CONST_INT, VOIDmode,
4038 ~ (1 << BITS_PER_UNIT)),
4039 0, 0, OPTAB_LIB_WIDEN);
4041 diff = expand_binop (index_mode, sub_optab,
4042 index_val, rlow, 0, 0, OPTAB_LIB_WIDEN);
4044 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
4045 gen_rtx (CONST_INT, VOIDmode, BITS_PER_UNIT),
4047 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
4048 gen_rtx (CONST_INT, VOIDmode, BITS_PER_UNIT),
4050 addr = memory_address (byte_mode,
4051 expand_binop (index_mode, add_optab,
4053 /* Extract the bit we want to examine */
4054 bit = expand_shift (RSHIFT_EXPR, byte_mode,
4055 gen_rtx (MEM, byte_mode, addr), rem, 0, 1);
4056 result = expand_binop (SImode, and_optab, bit, const1_rtx, target,
4057 1, OPTAB_LIB_WIDEN);
4058 emit_move_insn (target, result);
4060 /* Output the code to handle the out-of-range case. */
4063 emit_move_insn (target, const0_rtx);
4068 case WITH_CLEANUP_EXPR:
4069 if (RTL_EXPR_RTL (exp) == 0)
4072 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
4074 = tree_cons (NULL_TREE, TREE_OPERAND (exp, 2), cleanups_this_call);
4075 /* That's it for this cleanup. */
4076 TREE_OPERAND (exp, 2) = 0;
4078 return RTL_EXPR_RTL (exp);
4081 /* Check for a built-in function. */
4082 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
4083 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) == FUNCTION_DECL
4084 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
4085 return expand_builtin (exp, target, subtarget, tmode, ignore);
4086 /* If this call was expanded already by preexpand_calls,
4087 just return the result we got. */
4088 if (CALL_EXPR_RTL (exp) != 0)
4089 return CALL_EXPR_RTL (exp);
4090 return expand_call (exp, target, ignore);
4092 case NON_LVALUE_EXPR:
4095 case REFERENCE_EXPR:
4096 if (TREE_CODE (type) == VOID_TYPE || ignore)
4098 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
4101 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
4102 return expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, modifier);
4103 if (TREE_CODE (type) == UNION_TYPE)
4105 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
4108 if (mode == BLKmode)
4110 if (TYPE_SIZE (type) == 0
4111 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
4113 target = assign_stack_temp (BLKmode,
4114 (TREE_INT_CST_LOW (TYPE_SIZE (type))
4115 + BITS_PER_UNIT - 1)
4116 / BITS_PER_UNIT, 0);
4119 target = gen_reg_rtx (mode);
4121 if (GET_CODE (target) == MEM)
4122 /* Store data into beginning of memory target. */
4123 store_expr (TREE_OPERAND (exp, 0),
4124 change_address (target, TYPE_MODE (valtype), 0), 0);
4126 else if (GET_CODE (target) == REG)
4127 /* Store this field into a union of the proper type. */
4128 store_field (target, GET_MODE_BITSIZE (TYPE_MODE (valtype)), 0,
4129 TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
4131 int_size_in_bytes (TREE_TYPE (TREE_OPERAND (exp, 0))));
4135 /* Return the entire union. */
4138 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, 0);
4139 if (GET_MODE (op0) == mode || GET_MODE (op0) == VOIDmode)
4141 if (modifier == EXPAND_INITIALIZER)
4142 return gen_rtx (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
4143 if (flag_force_mem && GET_CODE (op0) == MEM)
4144 op0 = copy_to_reg (op0);
4147 return convert_to_mode (mode, op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
4149 convert_move (target, op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
4153 /* We come here from MINUS_EXPR when the second operand is a constant. */
4155 this_optab = add_optab;
4157 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
4158 something else, make sure we add the register to the constant and
4159 then to the other thing. This case can occur during strength
4160 reduction and doing it this way will produce better code if the
4161 frame pointer or argument pointer is eliminated.
4163 fold-const.c will ensure that the constant is always in the inner
4164 PLUS_EXPR, so the only case we need to do anything about is if
4165 sp, ap, or fp is our second argument, in which case we must swap
4166 the innermost first argument and our second argument. */
4168 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
4169 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
4170 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
4171 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
4172 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
4173 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
4175 tree t = TREE_OPERAND (exp, 1);
4177 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
4178 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
4181 /* If the result is to be Pmode and we are adding an integer to
4182 something, we might be forming a constant. So try to use
4183 plus_constant. If it produces a sum and we can't accept it,
4184 use force_operand. This allows P = &ARR[const] to generate
4185 efficient code on machines where a SYMBOL_REF is not a valid
4188 If this is an EXPAND_SUM call, always return the sum. */
4189 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
4190 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
4191 && (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
4194 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
4196 op1 = plus_constant (op1, TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)));
4197 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
4198 op1 = force_operand (op1, target);
4202 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
4203 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
4204 && (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
4207 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
4209 op0 = plus_constant (op0, TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)));
4210 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
4211 op0 = force_operand (op0, target);
4215 /* No sense saving up arithmetic to be done
4216 if it's all in the wrong mode to form part of an address.
4217 And force_operand won't know whether to sign-extend or
4219 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
4220 || mode != Pmode) goto binop;
4222 preexpand_calls (exp);
4223 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
4226 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, modifier);
4227 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, modifier);
4229 /* Make sure any term that's a sum with a constant comes last. */
4230 if (GET_CODE (op0) == PLUS
4231 && CONSTANT_P (XEXP (op0, 1)))
4237 /* If adding to a sum including a constant,
4238 associate it to put the constant outside. */
4239 if (GET_CODE (op1) == PLUS
4240 && CONSTANT_P (XEXP (op1, 1)))
4242 rtx constant_term = const0_rtx;
4244 temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0);
4247 /* Ensure that MULT comes first if there is one. */
4248 else if (GET_CODE (op0) == MULT)
4249 op0 = gen_rtx (PLUS, mode, op0, XEXP (op1, 0));
4251 op0 = gen_rtx (PLUS, mode, XEXP (op1, 0), op0);
4253 /* Let's also eliminate constants from op0 if possible. */
4254 op0 = eliminate_constant_term (op0, &constant_term);
4256 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
4257 their sum should be a constant. Form it into OP1, since the
4258 result we want will then be OP0 + OP1. */
4260 temp = simplify_binary_operation (PLUS, mode, constant_term,
4265 op1 = gen_rtx (PLUS, mode, constant_term, XEXP (op1, 1));
4268 /* Put a constant term last and put a multiplication first. */
4269 if (CONSTANT_P (op0) || GET_CODE (op1) == MULT)
4270 temp = op1, op1 = op0, op0 = temp;
4272 temp = simplify_binary_operation (PLUS, mode, op0, op1);
4273 return temp ? temp : gen_rtx (PLUS, mode, op0, op1);
4276 /* Handle difference of two symbolic constants,
4277 for the sake of an initializer. */
4278 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
4279 && really_constant_p (TREE_OPERAND (exp, 0))
4280 && really_constant_p (TREE_OPERAND (exp, 1)))
4282 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX,
4283 VOIDmode, modifier);
4284 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
4285 VOIDmode, modifier);
4286 return gen_rtx (MINUS, mode, op0, op1);
4288 /* Convert A - const to A + (-const). */
4289 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
4291 exp = build (PLUS_EXPR, type, TREE_OPERAND (exp, 0),
4292 fold (build1 (NEGATE_EXPR, type,
4293 TREE_OPERAND (exp, 1))));
4296 this_optab = sub_optab;
4300 preexpand_calls (exp);
4301 /* If first operand is constant, swap them.
4302 Thus the following special case checks need only
4303 check the second operand. */
4304 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
4306 register tree t1 = TREE_OPERAND (exp, 0);
4307 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
4308 TREE_OPERAND (exp, 1) = t1;
4311 /* Attempt to return something suitable for generating an
4312 indexed address, for machines that support that. */
4314 if (modifier == EXPAND_SUM && mode == Pmode
4315 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
4316 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
4318 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, EXPAND_SUM);
4320 /* Apply distributive law if OP0 is x+c. */
4321 if (GET_CODE (op0) == PLUS
4322 && GET_CODE (XEXP (op0, 1)) == CONST_INT)
4323 return gen_rtx (PLUS, mode,
4324 gen_rtx (MULT, mode, XEXP (op0, 0),
4325 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))),
4326 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))
4327 * INTVAL (XEXP (op0, 1))));
4329 if (GET_CODE (op0) != REG)
4330 op0 = force_operand (op0, NULL_RTX);
4331 if (GET_CODE (op0) != REG)
4332 op0 = copy_to_mode_reg (mode, op0);
4334 return gen_rtx (MULT, mode, op0,
4335 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))));
4338 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
4341 /* Check for multiplying things that have been extended
4342 from a narrower type. If this machine supports multiplying
4343 in that narrower type with a result in the desired type,
4344 do it that way, and avoid the explicit type-conversion. */
4345 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
4346 && TREE_CODE (type) == INTEGER_TYPE
4347 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
4348 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
4349 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
4350 && int_fits_type_p (TREE_OPERAND (exp, 1),
4351 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
4352 /* Don't use a widening multiply if a shift will do. */
4353 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
4354 > HOST_BITS_PER_WIDE_INT)
4355 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
4357 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
4358 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
4360 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
4361 /* If both operands are extended, they must either both
4362 be zero-extended or both be sign-extended. */
4363 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
4365 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
4367 enum machine_mode innermode
4368 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
4369 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
4370 ? umul_widen_optab : smul_widen_optab);
4371 if (mode == GET_MODE_WIDER_MODE (innermode)
4372 && this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
4374 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
4375 NULL_RTX, VOIDmode, 0);
4376 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
4377 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
4380 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
4381 NULL_RTX, VOIDmode, 0);
4385 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
4386 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
4387 return expand_mult (mode, op0, op1, target, unsignedp);
4389 case TRUNC_DIV_EXPR:
4390 case FLOOR_DIV_EXPR:
4392 case ROUND_DIV_EXPR:
4393 case EXACT_DIV_EXPR:
4394 preexpand_calls (exp);
4395 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
4397 /* Possible optimization: compute the dividend with EXPAND_SUM
4398 then if the divisor is constant can optimize the case
4399 where some terms of the dividend have coeffs divisible by it. */
4400 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
4401 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
4402 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
4405 this_optab = flodiv_optab;
4408 case TRUNC_MOD_EXPR:
4409 case FLOOR_MOD_EXPR:
4411 case ROUND_MOD_EXPR:
4412 preexpand_calls (exp);
4413 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
4415 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
4416 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
4417 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
4419 case FIX_ROUND_EXPR:
4420 case FIX_FLOOR_EXPR:
4422 abort (); /* Not used for C. */
4424 case FIX_TRUNC_EXPR:
4425 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
4427 target = gen_reg_rtx (mode);
4428 expand_fix (target, op0, unsignedp);
4432 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
4434 target = gen_reg_rtx (mode);
4435 /* expand_float can't figure out what to do if FROM has VOIDmode.
4436 So give it the correct mode. With -O, cse will optimize this. */
4437 if (GET_MODE (op0) == VOIDmode)
4438 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
4440 expand_float (target, op0,
4441 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
4445 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
4446 temp = expand_unop (mode, neg_optab, op0, target, 0);
4452 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
4454 /* Handle complex values specially. */
4456 enum machine_mode opmode
4457 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
4459 if (GET_MODE_CLASS (opmode) == MODE_COMPLEX_INT
4460 || GET_MODE_CLASS (opmode) == MODE_COMPLEX_FLOAT)
4461 return expand_complex_abs (opmode, op0, target, unsignedp);
4464 /* Unsigned abs is simply the operand. Testing here means we don't
4465 risk generating incorrect code below. */
4466 if (TREE_UNSIGNED (type))
4469 /* First try to do it with a special abs instruction. */
4470 temp = expand_unop (mode, abs_optab, op0, target, 0);
4474 /* If this machine has expensive jumps, we can do integer absolute
4475 value of X as (((signed) x >> (W-1)) ^ x) - ((signed) x >> (W-1)),
4476 where W is the width of MODE. */
4478 if (GET_MODE_CLASS (mode) == MODE_INT && BRANCH_COST >= 2)
4480 rtx extended = expand_shift (RSHIFT_EXPR, mode, op0,
4481 size_int (GET_MODE_BITSIZE (mode) - 1),
4484 temp = expand_binop (mode, xor_optab, extended, op0, target, 0,
4487 temp = expand_binop (mode, sub_optab, temp, extended, target, 0,
4494 /* If that does not win, use conditional jump and negate. */
4495 target = original_target;
4496 temp = gen_label_rtx ();
4497 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 0))
4498 || (GET_CODE (target) == REG
4499 && REGNO (target) < FIRST_PSEUDO_REGISTER))
4500 target = gen_reg_rtx (mode);
4501 emit_move_insn (target, op0);
4502 emit_cmp_insn (target,
4503 expand_expr (convert (type, integer_zero_node),
4504 NULL_RTX, VOIDmode, 0),
4505 GE, NULL_RTX, mode, 0, 0);
4507 emit_jump_insn (gen_bge (temp));
4508 op0 = expand_unop (mode, neg_optab, target, target, 0);
4510 emit_move_insn (target, op0);
4517 target = original_target;
4518 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 1))
4519 || (GET_CODE (target) == REG
4520 && REGNO (target) < FIRST_PSEUDO_REGISTER))
4521 target = gen_reg_rtx (mode);
4522 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
4523 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
4525 /* First try to do it with a special MIN or MAX instruction.
4526 If that does not win, use a conditional jump to select the proper
4528 this_optab = (TREE_UNSIGNED (type)
4529 ? (code == MIN_EXPR ? umin_optab : umax_optab)
4530 : (code == MIN_EXPR ? smin_optab : smax_optab));
4532 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
4538 emit_move_insn (target, op0);
4539 op0 = gen_label_rtx ();
4540 if (code == MAX_EXPR)
4541 temp = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)))
4542 ? compare_from_rtx (target, op1, GEU, 1, mode, NULL_RTX, 0)
4543 : compare_from_rtx (target, op1, GE, 0, mode, NULL_RTX, 0));
4545 temp = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)))
4546 ? compare_from_rtx (target, op1, LEU, 1, mode, NULL_RTX, 0)
4547 : compare_from_rtx (target, op1, LE, 0, mode, NULL_RTX, 0));
4548 if (temp == const0_rtx)
4549 emit_move_insn (target, op1);
4550 else if (temp != const_true_rtx)
4552 if (bcc_gen_fctn[(int) GET_CODE (temp)] != 0)
4553 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (temp)]) (op0));
4556 emit_move_insn (target, op1);
4561 /* ??? Can optimize when the operand of this is a bitwise operation,
4562 by using a different bitwise operation. */
4564 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
4565 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
4571 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
4572 temp = expand_unop (mode, ffs_optab, op0, target, 1);
4577 /* ??? Can optimize bitwise operations with one arg constant.
4578 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
4579 and (a bitwise1 b) bitwise2 b (etc)
4580 but that is probably not worth while. */
4582 /* BIT_AND_EXPR is for bitwise anding.
4583 TRUTH_AND_EXPR is for anding two boolean values
4584 when we want in all cases to compute both of them.
4585 In general it is fastest to do TRUTH_AND_EXPR by
4586 computing both operands as actual zero-or-1 values
4587 and then bitwise anding. In cases where there cannot
4588 be any side effects, better code would be made by
4589 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR;
4590 but the question is how to recognize those cases. */
4592 case TRUTH_AND_EXPR:
4594 this_optab = and_optab;
4597 /* See comment above about TRUTH_AND_EXPR; it applies here too. */
4600 this_optab = ior_optab;
4604 this_optab = xor_optab;
4611 preexpand_calls (exp);
4612 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
4614 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
4615 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
4618 /* Could determine the answer when only additive constants differ.
4619 Also, the addition of one can be handled by changing the condition. */
4626 preexpand_calls (exp);
4627 temp = do_store_flag (exp, target, tmode != VOIDmode ? tmode : mode, 0);
4630 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
4631 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
4633 && GET_CODE (original_target) == REG
4634 && (GET_MODE (original_target)
4635 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
4637 temp = expand_expr (TREE_OPERAND (exp, 0), original_target, VOIDmode, 0);
4638 if (temp != original_target)
4639 temp = copy_to_reg (temp);
4640 op1 = gen_label_rtx ();
4641 emit_cmp_insn (temp, const0_rtx, EQ, NULL_RTX,
4642 GET_MODE (temp), unsignedp, 0);
4643 emit_jump_insn (gen_beq (op1));
4644 emit_move_insn (temp, const1_rtx);
4648 /* If no set-flag instruction, must generate a conditional
4649 store into a temporary variable. Drop through
4650 and handle this like && and ||. */
4652 case TRUTH_ANDIF_EXPR:
4653 case TRUTH_ORIF_EXPR:
4654 if (target == 0 || ! safe_from_p (target, exp)
4655 /* Make sure we don't have a hard reg (such as function's return
4656 value) live across basic blocks, if not optimizing. */
4657 || (!optimize && GET_CODE (target) == REG
4658 && REGNO (target) < FIRST_PSEUDO_REGISTER))
4659 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
4660 emit_clr_insn (target);
4661 op1 = gen_label_rtx ();
4662 jumpifnot (exp, op1);
4663 emit_0_to_1_insn (target);
4667 case TRUTH_NOT_EXPR:
4668 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
4669 /* The parser is careful to generate TRUTH_NOT_EXPR
4670 only with operands that are always zero or one. */
4671 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
4672 target, 1, OPTAB_LIB_WIDEN);
4678 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
4680 return expand_expr (TREE_OPERAND (exp, 1),
4681 (ignore ? const0_rtx : target),
4686 /* Note that COND_EXPRs whose type is a structure or union
4687 are required to be constructed to contain assignments of
4688 a temporary variable, so that we can evaluate them here
4689 for side effect only. If type is void, we must do likewise. */
4691 /* If an arm of the branch requires a cleanup,
4692 only that cleanup is performed. */
4695 tree binary_op = 0, unary_op = 0;
4696 tree old_cleanups = cleanups_this_call;
4697 cleanups_this_call = 0;
4699 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
4700 convert it to our mode, if necessary. */
4701 if (integer_onep (TREE_OPERAND (exp, 1))
4702 && integer_zerop (TREE_OPERAND (exp, 2))
4703 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
4705 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, modifier);
4706 if (GET_MODE (op0) == mode)
4709 target = gen_reg_rtx (mode);
4710 convert_move (target, op0, unsignedp);
4714 /* If we are not to produce a result, we have no target. Otherwise,
4715 if a target was specified use it; it will not be used as an
4716 intermediate target unless it is safe. If no target, use a
4719 if (mode == VOIDmode || ignore)
4721 else if (original_target
4722 && safe_from_p (original_target, TREE_OPERAND (exp, 0)))
4723 temp = original_target;
4724 else if (mode == BLKmode)
4726 if (TYPE_SIZE (type) == 0
4727 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
4729 temp = assign_stack_temp (BLKmode,
4730 (TREE_INT_CST_LOW (TYPE_SIZE (type))
4731 + BITS_PER_UNIT - 1)
4732 / BITS_PER_UNIT, 0);
4735 temp = gen_reg_rtx (mode);
4737 /* Check for X ? A + B : A. If we have this, we can copy
4738 A to the output and conditionally add B. Similarly for unary
4739 operations. Don't do this if X has side-effects because
4740 those side effects might affect A or B and the "?" operation is
4741 a sequence point in ANSI. (We test for side effects later.) */
4743 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
4744 && operand_equal_p (TREE_OPERAND (exp, 2),
4745 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
4746 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
4747 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
4748 && operand_equal_p (TREE_OPERAND (exp, 1),
4749 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
4750 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
4751 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
4752 && operand_equal_p (TREE_OPERAND (exp, 2),
4753 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
4754 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
4755 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
4756 && operand_equal_p (TREE_OPERAND (exp, 1),
4757 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
4758 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
4760 /* If we had X ? A + 1 : A and we can do the test of X as a store-flag
4761 operation, do this as A + (X != 0). Similarly for other simple
4762 binary operators. */
4763 if (singleton && binary_op
4764 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
4765 && (TREE_CODE (binary_op) == PLUS_EXPR
4766 || TREE_CODE (binary_op) == MINUS_EXPR
4767 || TREE_CODE (binary_op) == BIT_IOR_EXPR
4768 || TREE_CODE (binary_op) == BIT_XOR_EXPR
4769 || TREE_CODE (binary_op) == BIT_AND_EXPR)
4770 && integer_onep (TREE_OPERAND (binary_op, 1))
4771 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
4774 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR ? add_optab
4775 : TREE_CODE (binary_op) == MINUS_EXPR ? sub_optab
4776 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
4777 : TREE_CODE (binary_op) == BIT_XOR_EXPR ? xor_optab
4780 /* If we had X ? A : A + 1, do this as A + (X == 0).
4782 We have to invert the truth value here and then put it
4783 back later if do_store_flag fails. We cannot simply copy
4784 TREE_OPERAND (exp, 0) to another variable and modify that
4785 because invert_truthvalue can modify the tree pointed to
4787 if (singleton == TREE_OPERAND (exp, 1))
4788 TREE_OPERAND (exp, 0)
4789 = invert_truthvalue (TREE_OPERAND (exp, 0));
4791 result = do_store_flag (TREE_OPERAND (exp, 0),
4792 (safe_from_p (temp, singleton)
4794 mode, BRANCH_COST <= 1);
4798 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
4799 return expand_binop (mode, boptab, op1, result, temp,
4800 unsignedp, OPTAB_LIB_WIDEN);
4802 else if (singleton == TREE_OPERAND (exp, 1))
4803 TREE_OPERAND (exp, 0)
4804 = invert_truthvalue (TREE_OPERAND (exp, 0));
4808 op0 = gen_label_rtx ();
4810 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
4814 /* If the target conflicts with the other operand of the
4815 binary op, we can't use it. Also, we can't use the target
4816 if it is a hard register, because evaluating the condition
4817 might clobber it. */
4819 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1)))
4820 || (GET_CODE (temp) == REG
4821 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
4822 temp = gen_reg_rtx (mode);
4823 store_expr (singleton, temp, 0);
4826 expand_expr (singleton,
4827 ignore ? const1_rtx : NULL_RTX, VOIDmode, 0);
4828 if (cleanups_this_call)
4830 sorry ("aggregate value in COND_EXPR");
4831 cleanups_this_call = 0;
4833 if (singleton == TREE_OPERAND (exp, 1))
4834 jumpif (TREE_OPERAND (exp, 0), op0);
4836 jumpifnot (TREE_OPERAND (exp, 0), op0);
4838 if (binary_op && temp == 0)
4839 /* Just touch the other operand. */
4840 expand_expr (TREE_OPERAND (binary_op, 1),
4841 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
4843 store_expr (build (TREE_CODE (binary_op), type,
4844 make_tree (type, temp),
4845 TREE_OPERAND (binary_op, 1)),
4848 store_expr (build1 (TREE_CODE (unary_op), type,
4849 make_tree (type, temp)),
4854 /* This is now done in jump.c and is better done there because it
4855 produces shorter register lifetimes. */
4857 /* Check for both possibilities either constants or variables
4858 in registers (but not the same as the target!). If so, can
4859 save branches by assigning one, branching, and assigning the
4861 else if (temp && GET_MODE (temp) != BLKmode
4862 && (TREE_CONSTANT (TREE_OPERAND (exp, 1))
4863 || ((TREE_CODE (TREE_OPERAND (exp, 1)) == PARM_DECL
4864 || TREE_CODE (TREE_OPERAND (exp, 1)) == VAR_DECL)
4865 && DECL_RTL (TREE_OPERAND (exp, 1))
4866 && GET_CODE (DECL_RTL (TREE_OPERAND (exp, 1))) == REG
4867 && DECL_RTL (TREE_OPERAND (exp, 1)) != temp))
4868 && (TREE_CONSTANT (TREE_OPERAND (exp, 2))
4869 || ((TREE_CODE (TREE_OPERAND (exp, 2)) == PARM_DECL
4870 || TREE_CODE (TREE_OPERAND (exp, 2)) == VAR_DECL)
4871 && DECL_RTL (TREE_OPERAND (exp, 2))
4872 && GET_CODE (DECL_RTL (TREE_OPERAND (exp, 2))) == REG
4873 && DECL_RTL (TREE_OPERAND (exp, 2)) != temp)))
4875 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
4876 temp = gen_reg_rtx (mode);
4877 store_expr (TREE_OPERAND (exp, 2), temp, 0);
4878 jumpifnot (TREE_OPERAND (exp, 0), op0);
4879 store_expr (TREE_OPERAND (exp, 1), temp, 0);
4883 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
4884 comparison operator. If we have one of these cases, set the
4885 output to A, branch on A (cse will merge these two references),
4886 then set the output to FOO. */
4888 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
4889 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
4890 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
4891 TREE_OPERAND (exp, 1), 0)
4892 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
4893 && safe_from_p (temp, TREE_OPERAND (exp, 2)))
4895 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
4896 temp = gen_reg_rtx (mode);
4897 store_expr (TREE_OPERAND (exp, 1), temp, 0);
4898 jumpif (TREE_OPERAND (exp, 0), op0);
4899 store_expr (TREE_OPERAND (exp, 2), temp, 0);
4903 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
4904 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
4905 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
4906 TREE_OPERAND (exp, 2), 0)
4907 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
4908 && safe_from_p (temp, TREE_OPERAND (exp, 1)))
4910 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
4911 temp = gen_reg_rtx (mode);
4912 store_expr (TREE_OPERAND (exp, 2), temp, 0);
4913 jumpifnot (TREE_OPERAND (exp, 0), op0);
4914 store_expr (TREE_OPERAND (exp, 1), temp, 0);
4919 op1 = gen_label_rtx ();
4920 jumpifnot (TREE_OPERAND (exp, 0), op0);
4922 store_expr (TREE_OPERAND (exp, 1), temp, 0);
4924 expand_expr (TREE_OPERAND (exp, 1),
4925 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
4926 if (cleanups_this_call)
4928 sorry ("aggregate value in COND_EXPR");
4929 cleanups_this_call = 0;
4933 emit_jump_insn (gen_jump (op1));
4937 store_expr (TREE_OPERAND (exp, 2), temp, 0);
4939 expand_expr (TREE_OPERAND (exp, 2),
4940 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
4943 if (cleanups_this_call)
4945 sorry ("aggregate value in COND_EXPR");
4946 cleanups_this_call = 0;
4952 cleanups_this_call = old_cleanups;
4958 /* Something needs to be initialized, but we didn't know
4959 where that thing was when building the tree. For example,
4960 it could be the return value of a function, or a parameter
4961 to a function which lays down in the stack, or a temporary
4962 variable which must be passed by reference.
4964 We guarantee that the expression will either be constructed
4965 or copied into our original target. */
4967 tree slot = TREE_OPERAND (exp, 0);
4970 if (TREE_CODE (slot) != VAR_DECL)
4975 if (DECL_RTL (slot) != 0)
4977 target = DECL_RTL (slot);
4978 /* If we have already expanded the slot, so don't do
4980 if (TREE_OPERAND (exp, 1) == NULL_TREE)
4985 target = assign_stack_temp (mode, int_size_in_bytes (type), 0);
4986 /* All temp slots at this level must not conflict. */
4987 preserve_temp_slots (target);
4988 DECL_RTL (slot) = target;
4992 /* I bet this needs to be done, and I bet that it needs to
4993 be above, inside the else clause. The reason is
4994 simple, how else is it going to get cleaned up? (mrs)
4996 The reason is probably did not work before, and was
4997 commented out is because this was re-expanding already
4998 expanded target_exprs (target == 0 and DECL_RTL (slot)
4999 != 0) also cleaning them up many times as well. :-( */
5001 /* Since SLOT is not known to the called function
5002 to belong to its stack frame, we must build an explicit
5003 cleanup. This case occurs when we must build up a reference
5004 to pass the reference as an argument. In this case,
5005 it is very likely that such a reference need not be
5008 if (TREE_OPERAND (exp, 2) == 0)
5009 TREE_OPERAND (exp, 2) = maybe_build_cleanup (slot);
5010 if (TREE_OPERAND (exp, 2))
5011 cleanups_this_call = tree_cons (NULL_TREE, TREE_OPERAND (exp, 2),
5012 cleanups_this_call);
5017 /* This case does occur, when expanding a parameter which
5018 needs to be constructed on the stack. The target
5019 is the actual stack address that we want to initialize.
5020 The function we call will perform the cleanup in this case. */
5022 DECL_RTL (slot) = target;
5025 exp1 = TREE_OPERAND (exp, 1);
5026 /* Mark it as expanded. */
5027 TREE_OPERAND (exp, 1) = NULL_TREE;
5029 return expand_expr (exp1, target, tmode, modifier);
5034 tree lhs = TREE_OPERAND (exp, 0);
5035 tree rhs = TREE_OPERAND (exp, 1);
5036 tree noncopied_parts = 0;
5037 tree lhs_type = TREE_TYPE (lhs);
5039 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
5040 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0 && !fixed_type_p (rhs))
5041 noncopied_parts = init_noncopied_parts (stabilize_reference (lhs),
5042 TYPE_NONCOPIED_PARTS (lhs_type));
5043 while (noncopied_parts != 0)
5045 expand_assignment (TREE_VALUE (noncopied_parts),
5046 TREE_PURPOSE (noncopied_parts), 0, 0);
5047 noncopied_parts = TREE_CHAIN (noncopied_parts);
5054 /* If lhs is complex, expand calls in rhs before computing it.
5055 That's so we don't compute a pointer and save it over a call.
5056 If lhs is simple, compute it first so we can give it as a
5057 target if the rhs is just a call. This avoids an extra temp and copy
5058 and that prevents a partial-subsumption which makes bad code.
5059 Actually we could treat component_ref's of vars like vars. */
5061 tree lhs = TREE_OPERAND (exp, 0);
5062 tree rhs = TREE_OPERAND (exp, 1);
5063 tree noncopied_parts = 0;
5064 tree lhs_type = TREE_TYPE (lhs);
5068 if (TREE_CODE (lhs) != VAR_DECL
5069 && TREE_CODE (lhs) != RESULT_DECL
5070 && TREE_CODE (lhs) != PARM_DECL)
5071 preexpand_calls (exp);
5073 /* Check for |= or &= of a bitfield of size one into another bitfield
5074 of size 1. In this case, (unless we need the result of the
5075 assignment) we can do this more efficiently with a
5076 test followed by an assignment, if necessary.
5078 ??? At this point, we can't get a BIT_FIELD_REF here. But if
5079 things change so we do, this code should be enhanced to
5082 && TREE_CODE (lhs) == COMPONENT_REF
5083 && (TREE_CODE (rhs) == BIT_IOR_EXPR
5084 || TREE_CODE (rhs) == BIT_AND_EXPR)
5085 && TREE_OPERAND (rhs, 0) == lhs
5086 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
5087 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (lhs, 1))) == 1
5088 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))) == 1)
5090 rtx label = gen_label_rtx ();
5092 do_jump (TREE_OPERAND (rhs, 1),
5093 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
5094 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
5095 expand_assignment (lhs, convert (TREE_TYPE (rhs),
5096 (TREE_CODE (rhs) == BIT_IOR_EXPR
5098 : integer_zero_node)),
5100 do_pending_stack_adjust ();
5105 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0
5106 && ! (fixed_type_p (lhs) && fixed_type_p (rhs)))
5107 noncopied_parts = save_noncopied_parts (stabilize_reference (lhs),
5108 TYPE_NONCOPIED_PARTS (lhs_type));
5110 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
5111 while (noncopied_parts != 0)
5113 expand_assignment (TREE_PURPOSE (noncopied_parts),
5114 TREE_VALUE (noncopied_parts), 0, 0);
5115 noncopied_parts = TREE_CHAIN (noncopied_parts);
5120 case PREINCREMENT_EXPR:
5121 case PREDECREMENT_EXPR:
5122 return expand_increment (exp, 0);
5124 case POSTINCREMENT_EXPR:
5125 case POSTDECREMENT_EXPR:
5126 /* Faster to treat as pre-increment if result is not used. */
5127 return expand_increment (exp, ! ignore);
5130 /* Are we taking the address of a nested function? */
5131 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
5132 && decl_function_context (TREE_OPERAND (exp, 0)) != 0)
5134 op0 = trampoline_address (TREE_OPERAND (exp, 0));
5135 op0 = force_operand (op0, target);
5139 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode,
5140 (modifier == EXPAND_INITIALIZER
5141 ? modifier : EXPAND_CONST_ADDRESS));
5142 if (GET_CODE (op0) != MEM)
5145 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
5146 return XEXP (op0, 0);
5147 op0 = force_operand (XEXP (op0, 0), target);
5149 if (flag_force_addr && GET_CODE (op0) != REG)
5150 return force_reg (Pmode, op0);
5153 case ENTRY_VALUE_EXPR:
5156 /* COMPLEX type for Extended Pascal & Fortran */
5159 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
5163 /* Get the rtx code of the operands. */
5164 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
5165 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
5168 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
5170 prev = get_last_insn ();
5172 /* Tell flow that the whole of the destination is being set. */
5173 if (GET_CODE (target) == REG)
5174 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
5176 /* Move the real (op0) and imaginary (op1) parts to their location. */
5177 emit_move_insn (gen_realpart (mode, target), op0);
5178 emit_move_insn (gen_imagpart (mode, target), op1);
5180 /* Complex construction should appear as a single unit. */
5187 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
5188 return gen_realpart (mode, op0);
5191 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
5192 return gen_imagpart (mode, op0);
5196 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
5200 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
5203 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
5205 prev = get_last_insn ();
5207 /* Tell flow that the whole of the destination is being set. */
5208 if (GET_CODE (target) == REG)
5209 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
5211 /* Store the realpart and the negated imagpart to target. */
5212 emit_move_insn (gen_realpart (mode, target), gen_realpart (mode, op0));
5214 imag_t = gen_imagpart (mode, target);
5215 temp = expand_unop (mode, neg_optab,
5216 gen_imagpart (mode, op0), imag_t, 0);
5218 emit_move_insn (imag_t, temp);
5220 /* Conjugate should appear as a single unit */
5230 return (*lang_expand_expr) (exp, target, tmode, modifier);
5233 /* Here to do an ordinary binary operator, generating an instruction
5234 from the optab already placed in `this_optab'. */
5236 preexpand_calls (exp);
5237 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
5239 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
5240 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
5242 temp = expand_binop (mode, this_optab, op0, op1, target,
5243 unsignedp, OPTAB_LIB_WIDEN);
5249 /* Return the alignment in bits of EXP, a pointer valued expression.
5250 But don't return more than MAX_ALIGN no matter what.
5251 The alignment returned is, by default, the alignment of the thing that
5252 EXP points to (if it is not a POINTER_TYPE, 0 is returned).
5254 Otherwise, look at the expression to see if we can do better, i.e., if the
5255 expression is actually pointing at an object whose alignment is tighter. */
5258 get_pointer_alignment (exp, max_align)
5262 unsigned align, inner;
5264 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
5267 align = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
5268 align = MIN (align, max_align);
5272 switch (TREE_CODE (exp))
5276 case NON_LVALUE_EXPR:
5277 exp = TREE_OPERAND (exp, 0);
5278 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
5280 inner = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
5281 inner = MIN (inner, max_align);
5282 align = MAX (align, inner);
5286 /* If sum of pointer + int, restrict our maximum alignment to that
5287 imposed by the integer. If not, we can't do any better than
5289 if (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST)
5292 while (((TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)) * BITS_PER_UNIT)
5297 exp = TREE_OPERAND (exp, 0);
5301 /* See what we are pointing at and look at its alignment. */
5302 exp = TREE_OPERAND (exp, 0);
5303 if (TREE_CODE (exp) == FUNCTION_DECL)
5304 align = MAX (align, FUNCTION_BOUNDARY);
5305 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'd')
5306 align = MAX (align, DECL_ALIGN (exp));
5307 #ifdef CONSTANT_ALIGNMENT
5308 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'c')
5309 align = CONSTANT_ALIGNMENT (exp, align);
5311 return MIN (align, max_align);
5319 /* Return the tree node and offset if a given argument corresponds to
5320 a string constant. */
5323 string_constant (arg, ptr_offset)
5329 if (TREE_CODE (arg) == ADDR_EXPR
5330 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
5332 *ptr_offset = integer_zero_node;
5333 return TREE_OPERAND (arg, 0);
5335 else if (TREE_CODE (arg) == PLUS_EXPR)
5337 tree arg0 = TREE_OPERAND (arg, 0);
5338 tree arg1 = TREE_OPERAND (arg, 1);
5343 if (TREE_CODE (arg0) == ADDR_EXPR
5344 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
5347 return TREE_OPERAND (arg0, 0);
5349 else if (TREE_CODE (arg1) == ADDR_EXPR
5350 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
5353 return TREE_OPERAND (arg1, 0);
5360 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
5361 way, because it could contain a zero byte in the middle.
5362 TREE_STRING_LENGTH is the size of the character array, not the string.
5364 Unfortunately, string_constant can't access the values of const char
5365 arrays with initializers, so neither can we do so here. */
5375 src = string_constant (src, &offset_node);
5378 max = TREE_STRING_LENGTH (src);
5379 ptr = TREE_STRING_POINTER (src);
5380 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
5382 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
5383 compute the offset to the following null if we don't know where to
5384 start searching for it. */
5386 for (i = 0; i < max; i++)
5389 /* We don't know the starting offset, but we do know that the string
5390 has no internal zero bytes. We can assume that the offset falls
5391 within the bounds of the string; otherwise, the programmer deserves
5392 what he gets. Subtract the offset from the length of the string,
5394 /* This would perhaps not be valid if we were dealing with named
5395 arrays in addition to literal string constants. */
5396 return size_binop (MINUS_EXPR, size_int (max), offset_node);
5399 /* We have a known offset into the string. Start searching there for
5400 a null character. */
5401 if (offset_node == 0)
5405 /* Did we get a long long offset? If so, punt. */
5406 if (TREE_INT_CST_HIGH (offset_node) != 0)
5408 offset = TREE_INT_CST_LOW (offset_node);
5410 /* If the offset is known to be out of bounds, warn, and call strlen at
5412 if (offset < 0 || offset > max)
5414 warning ("offset outside bounds of constant string");
5417 /* Use strlen to search for the first zero byte. Since any strings
5418 constructed with build_string will have nulls appended, we win even
5419 if we get handed something like (char[4])"abcd".
5421 Since OFFSET is our starting index into the string, no further
5422 calculation is needed. */
5423 return size_int (strlen (ptr + offset));
5426 /* Expand an expression EXP that calls a built-in function,
5427 with result going to TARGET if that's convenient
5428 (and in mode MODE if that's convenient).
5429 SUBTARGET may be used as the target for computing one of EXP's operands.
5430 IGNORE is nonzero if the value is to be ignored. */
5433 expand_builtin (exp, target, subtarget, mode, ignore)
5437 enum machine_mode mode;
5440 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
5441 tree arglist = TREE_OPERAND (exp, 1);
5444 enum machine_mode value_mode = TYPE_MODE (TREE_TYPE (exp));
5445 optab builtin_optab;
5447 switch (DECL_FUNCTION_CODE (fndecl))
5452 /* build_function_call changes these into ABS_EXPR. */
5457 case BUILT_IN_FSQRT:
5458 /* If not optimizing, call the library function. */
5463 /* Arg could be wrong type if user redeclared this fcn wrong. */
5464 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != REAL_TYPE)
5465 return CONST0_RTX (TYPE_MODE (TREE_TYPE (exp)));
5467 /* Stabilize and compute the argument. */
5468 if (TREE_CODE (TREE_VALUE (arglist)) != VAR_DECL
5469 && TREE_CODE (TREE_VALUE (arglist)) != PARM_DECL)
5471 exp = copy_node (exp);
5472 arglist = copy_node (arglist);
5473 TREE_OPERAND (exp, 1) = arglist;
5474 TREE_VALUE (arglist) = save_expr (TREE_VALUE (arglist));
5476 op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
5478 /* Make a suitable register to place result in. */
5479 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
5484 switch (DECL_FUNCTION_CODE (fndecl))
5487 builtin_optab = sin_optab; break;
5489 builtin_optab = cos_optab; break;
5490 case BUILT_IN_FSQRT:
5491 builtin_optab = sqrt_optab; break;
5496 /* Compute into TARGET.
5497 Set TARGET to wherever the result comes back. */
5498 target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
5499 builtin_optab, op0, target, 0);
5501 /* If we were unable to expand via the builtin, stop the
5502 sequence (without outputting the insns) and break, causing
5503 a call the the library function. */
5510 /* Check the results by default. But if flag_fast_math is turned on,
5511 then assume sqrt will always be called with valid arguments. */
5513 if (! flag_fast_math)
5515 /* Don't define the builtin FP instructions
5516 if your machine is not IEEE. */
5517 if (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT)
5520 lab1 = gen_label_rtx ();
5522 /* Test the result; if it is NaN, set errno=EDOM because
5523 the argument was not in the domain. */
5524 emit_cmp_insn (target, target, EQ, 0, GET_MODE (target), 0, 0);
5525 emit_jump_insn (gen_beq (lab1));
5529 #ifdef GEN_ERRNO_RTX
5530 rtx errno_rtx = GEN_ERRNO_RTX;
5533 = gen_rtx (MEM, word_mode, gen_rtx (SYMBOL_REF, Pmode, "*errno"));
5536 emit_move_insn (errno_rtx, GEN_INT (TARGET_EDOM));
5539 /* We can't set errno=EDOM directly; let the library call do it.
5540 Pop the arguments right away in case the call gets deleted. */
5542 expand_call (exp, target, 0);
5549 /* Output the entire sequence. */
5550 insns = get_insns ();
5556 case BUILT_IN_SAVEREGS:
5557 /* Don't do __builtin_saveregs more than once in a function.
5558 Save the result of the first call and reuse it. */
5559 if (saveregs_value != 0)
5560 return saveregs_value;
5562 /* When this function is called, it means that registers must be
5563 saved on entry to this function. So we migrate the
5564 call to the first insn of this function. */
5567 rtx valreg, saved_valreg;
5569 /* Now really call the function. `expand_call' does not call
5570 expand_builtin, so there is no danger of infinite recursion here. */
5573 #ifdef EXPAND_BUILTIN_SAVEREGS
5574 /* Do whatever the machine needs done in this case. */
5575 temp = EXPAND_BUILTIN_SAVEREGS (arglist);
5577 /* The register where the function returns its value
5578 is likely to have something else in it, such as an argument.
5579 So preserve that register around the call. */
5580 if (value_mode != VOIDmode)
5582 valreg = hard_libcall_value (value_mode);
5583 saved_valreg = gen_reg_rtx (value_mode);
5584 emit_move_insn (saved_valreg, valreg);
5587 /* Generate the call, putting the value in a pseudo. */
5588 temp = expand_call (exp, target, ignore);
5590 if (value_mode != VOIDmode)
5591 emit_move_insn (valreg, saved_valreg);
5597 saveregs_value = temp;
5599 /* This won't work inside a SEQUENCE--it really has to be
5600 at the start of the function. */
5601 if (in_sequence_p ())
5603 /* Better to do this than to crash. */
5604 error ("`va_start' used within `({...})'");
5608 /* Put the sequence after the NOTE that starts the function. */
5609 emit_insns_before (seq, NEXT_INSN (get_insns ()));
5613 /* __builtin_args_info (N) returns word N of the arg space info
5614 for the current function. The number and meanings of words
5615 is controlled by the definition of CUMULATIVE_ARGS. */
5616 case BUILT_IN_ARGS_INFO:
5618 int nwords = sizeof (CUMULATIVE_ARGS) / sizeof (int);
5620 int *word_ptr = (int *) ¤t_function_args_info;
5621 tree type, elts, result;
5623 if (sizeof (CUMULATIVE_ARGS) % sizeof (int) != 0)
5624 fatal ("CUMULATIVE_ARGS type defined badly; see %s, line %d",
5625 __FILE__, __LINE__);
5629 tree arg = TREE_VALUE (arglist);
5630 if (TREE_CODE (arg) != INTEGER_CST)
5631 error ("argument of __builtin_args_info must be constant");
5634 int wordnum = TREE_INT_CST_LOW (arg);
5636 if (wordnum < 0 || wordnum >= nwords)
5637 error ("argument of __builtin_args_info out of range");
5639 return GEN_INT (word_ptr[wordnum]);
5643 error ("missing argument in __builtin_args_info");
5648 for (i = 0; i < nwords; i++)
5649 elts = tree_cons (NULL_TREE, build_int_2 (word_ptr[i], 0));
5651 type = build_array_type (integer_type_node,
5652 build_index_type (build_int_2 (nwords, 0)));
5653 result = build (CONSTRUCTOR, type, NULL_TREE, nreverse (elts));
5654 TREE_CONSTANT (result) = 1;
5655 TREE_STATIC (result) = 1;
5656 result = build (INDIRECT_REF, build_pointer_type (type), result);
5657 TREE_CONSTANT (result) = 1;
5658 return expand_expr (result, NULL_RTX, VOIDmode, 0);
5662 /* Return the address of the first anonymous stack arg. */
5663 case BUILT_IN_NEXT_ARG:
5665 tree fntype = TREE_TYPE (current_function_decl);
5666 if (!(TYPE_ARG_TYPES (fntype) != 0
5667 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
5668 != void_type_node)))
5670 error ("`va_start' used in function with fixed args");
5675 return expand_binop (Pmode, add_optab,
5676 current_function_internal_arg_pointer,
5677 current_function_arg_offset_rtx,
5678 NULL_RTX, 0, OPTAB_LIB_WIDEN);
5680 case BUILT_IN_CLASSIFY_TYPE:
5683 tree type = TREE_TYPE (TREE_VALUE (arglist));
5684 enum tree_code code = TREE_CODE (type);
5685 if (code == VOID_TYPE)
5686 return GEN_INT (void_type_class);
5687 if (code == INTEGER_TYPE)
5688 return GEN_INT (integer_type_class);
5689 if (code == CHAR_TYPE)
5690 return GEN_INT (char_type_class);
5691 if (code == ENUMERAL_TYPE)
5692 return GEN_INT (enumeral_type_class);
5693 if (code == BOOLEAN_TYPE)
5694 return GEN_INT (boolean_type_class);
5695 if (code == POINTER_TYPE)
5696 return GEN_INT (pointer_type_class);
5697 if (code == REFERENCE_TYPE)
5698 return GEN_INT (reference_type_class);
5699 if (code == OFFSET_TYPE)
5700 return GEN_INT (offset_type_class);
5701 if (code == REAL_TYPE)
5702 return GEN_INT (real_type_class);
5703 if (code == COMPLEX_TYPE)
5704 return GEN_INT (complex_type_class);
5705 if (code == FUNCTION_TYPE)
5706 return GEN_INT (function_type_class);
5707 if (code == METHOD_TYPE)
5708 return GEN_INT (method_type_class);
5709 if (code == RECORD_TYPE)
5710 return GEN_INT (record_type_class);
5711 if (code == UNION_TYPE)
5712 return GEN_INT (union_type_class);
5713 if (code == ARRAY_TYPE)
5714 return GEN_INT (array_type_class);
5715 if (code == STRING_TYPE)
5716 return GEN_INT (string_type_class);
5717 if (code == SET_TYPE)
5718 return GEN_INT (set_type_class);
5719 if (code == FILE_TYPE)
5720 return GEN_INT (file_type_class);
5721 if (code == LANG_TYPE)
5722 return GEN_INT (lang_type_class);
5724 return GEN_INT (no_type_class);
5726 case BUILT_IN_CONSTANT_P:
5730 return (TREE_CODE_CLASS (TREE_CODE (TREE_VALUE (arglist))) == 'c'
5731 ? const1_rtx : const0_rtx);
5733 case BUILT_IN_FRAME_ADDRESS:
5734 /* The argument must be a nonnegative integer constant.
5735 It counts the number of frames to scan up the stack.
5736 The value is the address of that frame. */
5737 case BUILT_IN_RETURN_ADDRESS:
5738 /* The argument must be a nonnegative integer constant.
5739 It counts the number of frames to scan up the stack.
5740 The value is the return address saved in that frame. */
5742 /* Warning about missing arg was already issued. */
5744 else if (TREE_CODE (TREE_VALUE (arglist)) != INTEGER_CST)
5746 error ("invalid arg to __builtin_return_address");
5749 else if (tree_int_cst_lt (TREE_VALUE (arglist), integer_zero_node))
5751 error ("invalid arg to __builtin_return_address");
5756 int count = TREE_INT_CST_LOW (TREE_VALUE (arglist));
5757 rtx tem = frame_pointer_rtx;
5760 /* Scan back COUNT frames to the specified frame. */
5761 for (i = 0; i < count; i++)
5763 /* Assume the dynamic chain pointer is in the word that
5764 the frame address points to, unless otherwise specified. */
5765 #ifdef DYNAMIC_CHAIN_ADDRESS
5766 tem = DYNAMIC_CHAIN_ADDRESS (tem);
5768 tem = memory_address (Pmode, tem);
5769 tem = copy_to_reg (gen_rtx (MEM, Pmode, tem));
5772 /* For __builtin_frame_address, return what we've got. */
5773 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
5776 /* For __builtin_return_address,
5777 Get the return address from that frame. */
5778 #ifdef RETURN_ADDR_RTX
5779 return RETURN_ADDR_RTX (count, tem);
5781 tem = memory_address (Pmode,
5782 plus_constant (tem, GET_MODE_SIZE (Pmode)));
5783 return copy_to_reg (gen_rtx (MEM, Pmode, tem));
5787 case BUILT_IN_ALLOCA:
5789 /* Arg could be non-integer if user redeclared this fcn wrong. */
5790 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE)
5792 current_function_calls_alloca = 1;
5793 /* Compute the argument. */
5794 op0 = expand_expr (TREE_VALUE (arglist), NULL_RTX, VOIDmode, 0);
5796 /* Allocate the desired space. */
5797 target = allocate_dynamic_stack_space (op0, target, BITS_PER_UNIT);
5799 /* Record the new stack level for nonlocal gotos. */
5800 if (nonlocal_goto_handler_slot != 0)
5801 emit_stack_save (SAVE_NONLOCAL, &nonlocal_goto_stack_level, NULL_RTX);
5805 /* If not optimizing, call the library function. */
5810 /* Arg could be non-integer if user redeclared this fcn wrong. */
5811 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE)
5814 /* Compute the argument. */
5815 op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
5816 /* Compute ffs, into TARGET if possible.
5817 Set TARGET to wherever the result comes back. */
5818 target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
5819 ffs_optab, op0, target, 1);
5824 case BUILT_IN_STRLEN:
5825 /* If not optimizing, call the library function. */
5830 /* Arg could be non-pointer if user redeclared this fcn wrong. */
5831 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE)
5835 tree src = TREE_VALUE (arglist);
5836 tree len = c_strlen (src);
5839 = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
5841 rtx result, src_rtx, char_rtx;
5842 enum machine_mode insn_mode = value_mode, char_mode;
5843 enum insn_code icode;
5845 /* If the length is known, just return it. */
5847 return expand_expr (len, target, mode, 0);
5849 /* If SRC is not a pointer type, don't do this operation inline. */
5853 /* Call a function if we can't compute strlen in the right mode. */
5855 while (insn_mode != VOIDmode)
5857 icode = strlen_optab->handlers[(int) insn_mode].insn_code;
5858 if (icode != CODE_FOR_nothing)
5861 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
5863 if (insn_mode == VOIDmode)
5866 /* Make a place to write the result of the instruction. */
5869 && GET_CODE (result) == REG
5870 && GET_MODE (result) == insn_mode
5871 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
5872 result = gen_reg_rtx (insn_mode);
5874 /* Make sure the operands are acceptable to the predicates. */
5876 if (! (*insn_operand_predicate[(int)icode][0]) (result, insn_mode))
5877 result = gen_reg_rtx (insn_mode);
5879 src_rtx = memory_address (BLKmode,
5880 expand_expr (src, NULL_RTX, Pmode,
5882 if (! (*insn_operand_predicate[(int)icode][1]) (src_rtx, Pmode))
5883 src_rtx = copy_to_mode_reg (Pmode, src_rtx);
5885 char_rtx = const0_rtx;
5886 char_mode = insn_operand_mode[(int)icode][2];
5887 if (! (*insn_operand_predicate[(int)icode][2]) (char_rtx, char_mode))
5888 char_rtx = copy_to_mode_reg (char_mode, char_rtx);
5890 emit_insn (GEN_FCN (icode) (result,
5891 gen_rtx (MEM, BLKmode, src_rtx),
5892 char_rtx, GEN_INT (align)));
5894 /* Return the value in the proper mode for this function. */
5895 if (GET_MODE (result) == value_mode)
5897 else if (target != 0)
5899 convert_move (target, result, 0);
5903 return convert_to_mode (value_mode, result, 0);
5906 case BUILT_IN_STRCPY:
5907 /* If not optimizing, call the library function. */
5912 /* Arg could be non-pointer if user redeclared this fcn wrong. */
5913 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
5914 || TREE_CHAIN (arglist) == 0
5915 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE)
5919 tree len = c_strlen (TREE_VALUE (TREE_CHAIN (arglist)));
5924 len = size_binop (PLUS_EXPR, len, integer_one_node);
5926 chainon (arglist, build_tree_list (NULL_TREE, len));
5930 case BUILT_IN_MEMCPY:
5931 /* If not optimizing, call the library function. */
5936 /* Arg could be non-pointer if user redeclared this fcn wrong. */
5937 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
5938 || TREE_CHAIN (arglist) == 0
5939 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
5940 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
5941 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
5945 tree dest = TREE_VALUE (arglist);
5946 tree src = TREE_VALUE (TREE_CHAIN (arglist));
5947 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
5950 = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
5952 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
5955 /* If either SRC or DEST is not a pointer type, don't do
5956 this operation in-line. */
5957 if (src_align == 0 || dest_align == 0)
5959 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRCPY)
5960 TREE_CHAIN (TREE_CHAIN (arglist)) = 0;
5964 dest_rtx = expand_expr (dest, NULL_RTX, Pmode, EXPAND_NORMAL);
5966 /* Copy word part most expediently. */
5967 emit_block_move (gen_rtx (MEM, BLKmode,
5968 memory_address (BLKmode, dest_rtx)),
5969 gen_rtx (MEM, BLKmode,
5970 memory_address (BLKmode,
5971 expand_expr (src, NULL_RTX,
5974 expand_expr (len, NULL_RTX, VOIDmode, 0),
5975 MIN (src_align, dest_align));
5979 /* These comparison functions need an instruction that returns an actual
5980 index. An ordinary compare that just sets the condition codes
5982 #ifdef HAVE_cmpstrsi
5983 case BUILT_IN_STRCMP:
5984 /* If not optimizing, call the library function. */
5989 /* Arg could be non-pointer if user redeclared this fcn wrong. */
5990 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
5991 || TREE_CHAIN (arglist) == 0
5992 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE)
5994 else if (!HAVE_cmpstrsi)
5997 tree arg1 = TREE_VALUE (arglist);
5998 tree arg2 = TREE_VALUE (TREE_CHAIN (arglist));
6002 len = c_strlen (arg1);
6004 len = size_binop (PLUS_EXPR, integer_one_node, len);
6005 len2 = c_strlen (arg2);
6007 len2 = size_binop (PLUS_EXPR, integer_one_node, len2);
6009 /* If we don't have a constant length for the first, use the length
6010 of the second, if we know it. We don't require a constant for
6011 this case; some cost analysis could be done if both are available
6012 but neither is constant. For now, assume they're equally cheap.
6014 If both strings have constant lengths, use the smaller. This
6015 could arise if optimization results in strcpy being called with
6016 two fixed strings, or if the code was machine-generated. We should
6017 add some code to the `memcmp' handler below to deal with such
6018 situations, someday. */
6019 if (!len || TREE_CODE (len) != INTEGER_CST)
6026 else if (len2 && TREE_CODE (len2) == INTEGER_CST)
6028 if (tree_int_cst_lt (len2, len))
6032 chainon (arglist, build_tree_list (NULL_TREE, len));
6036 case BUILT_IN_MEMCMP:
6037 /* If not optimizing, call the library function. */
6042 /* Arg could be non-pointer if user redeclared this fcn wrong. */
6043 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
6044 || TREE_CHAIN (arglist) == 0
6045 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
6046 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
6047 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
6049 else if (!HAVE_cmpstrsi)
6052 tree arg1 = TREE_VALUE (arglist);
6053 tree arg2 = TREE_VALUE (TREE_CHAIN (arglist));
6054 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
6058 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
6060 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
6061 enum machine_mode insn_mode
6062 = insn_operand_mode[(int) CODE_FOR_cmpstrsi][0];
6064 /* If we don't have POINTER_TYPE, call the function. */
6065 if (arg1_align == 0 || arg2_align == 0)
6067 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRCMP)
6068 TREE_CHAIN (TREE_CHAIN (arglist)) = 0;
6072 /* Make a place to write the result of the instruction. */
6075 && GET_CODE (result) == REG && GET_MODE (result) == insn_mode
6076 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
6077 result = gen_reg_rtx (insn_mode);
6079 emit_insn (gen_cmpstrsi (result,
6080 gen_rtx (MEM, BLKmode,
6081 expand_expr (arg1, NULL_RTX, Pmode,
6083 gen_rtx (MEM, BLKmode,
6084 expand_expr (arg2, NULL_RTX, Pmode,
6086 expand_expr (len, NULL_RTX, VOIDmode, 0),
6087 GEN_INT (MIN (arg1_align, arg2_align))));
6089 /* Return the value in the proper mode for this function. */
6090 mode = TYPE_MODE (TREE_TYPE (exp));
6091 if (GET_MODE (result) == mode)
6093 else if (target != 0)
6095 convert_move (target, result, 0);
6099 return convert_to_mode (mode, result, 0);
6102 case BUILT_IN_STRCMP:
6103 case BUILT_IN_MEMCMP:
6107 default: /* just do library call, if unknown builtin */
6108 error ("built-in function %s not currently supported",
6109 IDENTIFIER_POINTER (DECL_NAME (fndecl)));
6112 /* The switch statement above can drop through to cause the function
6113 to be called normally. */
6115 return expand_call (exp, target, ignore);
6118 /* Expand code for a post- or pre- increment or decrement
6119 and return the RTX for the result.
6120 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
6123 expand_increment (exp, post)
6127 register rtx op0, op1;
6128 register rtx temp, value;
6129 register tree incremented = TREE_OPERAND (exp, 0);
6130 optab this_optab = add_optab;
6132 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
6133 int op0_is_copy = 0;
6135 /* Stabilize any component ref that might need to be
6136 evaluated more than once below. */
6137 if (TREE_CODE (incremented) == BIT_FIELD_REF
6138 || (TREE_CODE (incremented) == COMPONENT_REF
6139 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
6140 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
6141 incremented = stabilize_reference (incremented);
6143 /* Compute the operands as RTX.
6144 Note whether OP0 is the actual lvalue or a copy of it:
6145 I believe it is a copy iff it is a register or subreg
6146 and insns were generated in computing it. */
6148 temp = get_last_insn ();
6149 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, 0);
6151 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
6152 in place but intead must do sign- or zero-extension during assignment,
6153 so we copy it into a new register and let the code below use it as
6156 Note that we can safely modify this SUBREG since it is know not to be
6157 shared (it was made by the expand_expr call above). */
6159 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
6160 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
6162 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
6163 && temp != get_last_insn ());
6164 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
6166 /* Decide whether incrementing or decrementing. */
6167 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
6168 || TREE_CODE (exp) == PREDECREMENT_EXPR)
6169 this_optab = sub_optab;
6171 /* If OP0 is not the actual lvalue, but rather a copy in a register,
6172 then we cannot just increment OP0. We must
6173 therefore contrive to increment the original value.
6174 Then we can return OP0 since it is a copy of the old value. */
6177 /* This is the easiest way to increment the value wherever it is.
6178 Problems with multiple evaluation of INCREMENTED
6179 are prevented because either (1) it is a component_ref,
6180 in which case it was stabilized above, or (2) it is an array_ref
6181 with constant index in an array in a register, which is
6182 safe to reevaluate. */
6183 tree newexp = build ((this_optab == add_optab
6184 ? PLUS_EXPR : MINUS_EXPR),
6187 TREE_OPERAND (exp, 1));
6188 temp = expand_assignment (incremented, newexp, ! post, 0);
6189 return post ? op0 : temp;
6192 /* Convert decrement by a constant into a negative increment. */
6193 if (this_optab == sub_optab
6194 && GET_CODE (op1) == CONST_INT)
6196 op1 = GEN_INT (- INTVAL (op1));
6197 this_optab = add_optab;
6202 /* We have a true reference to the value in OP0.
6203 If there is an insn to add or subtract in this mode, queue it. */
6205 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
6206 op0 = stabilize (op0);
6209 icode = (int) this_optab->handlers[(int) mode].insn_code;
6210 if (icode != (int) CODE_FOR_nothing
6211 /* Make sure that OP0 is valid for operands 0 and 1
6212 of the insn we want to queue. */
6213 && (*insn_operand_predicate[icode][0]) (op0, mode)
6214 && (*insn_operand_predicate[icode][1]) (op0, mode))
6216 if (! (*insn_operand_predicate[icode][2]) (op1, mode))
6217 op1 = force_reg (mode, op1);
6219 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
6223 /* Preincrement, or we can't increment with one simple insn. */
6225 /* Save a copy of the value before inc or dec, to return it later. */
6226 temp = value = copy_to_reg (op0);
6228 /* Arrange to return the incremented value. */
6229 /* Copy the rtx because expand_binop will protect from the queue,
6230 and the results of that would be invalid for us to return
6231 if our caller does emit_queue before using our result. */
6232 temp = copy_rtx (value = op0);
6234 /* Increment however we can. */
6235 op1 = expand_binop (mode, this_optab, value, op1, op0,
6236 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
6237 /* Make sure the value is stored into OP0. */
6239 emit_move_insn (op0, op1);
6244 /* Expand all function calls contained within EXP, innermost ones first.
6245 But don't look within expressions that have sequence points.
6246 For each CALL_EXPR, record the rtx for its value
6247 in the CALL_EXPR_RTL field. */
6250 preexpand_calls (exp)
6253 register int nops, i;
6254 int type = TREE_CODE_CLASS (TREE_CODE (exp));
6256 if (! do_preexpand_calls)
6259 /* Only expressions and references can contain calls. */
6261 if (type != 'e' && type != '<' && type != '1' && type != '2' && type != 'r')
6264 switch (TREE_CODE (exp))
6267 /* Do nothing if already expanded. */
6268 if (CALL_EXPR_RTL (exp) != 0)
6271 /* Do nothing to built-in functions. */
6272 if (TREE_CODE (TREE_OPERAND (exp, 0)) != ADDR_EXPR
6273 || TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) != FUNCTION_DECL
6274 || ! DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
6275 CALL_EXPR_RTL (exp) = expand_call (exp, NULL_RTX, 0);
6280 case TRUTH_ANDIF_EXPR:
6281 case TRUTH_ORIF_EXPR:
6282 /* If we find one of these, then we can be sure
6283 the adjust will be done for it (since it makes jumps).
6284 Do it now, so that if this is inside an argument
6285 of a function, we don't get the stack adjustment
6286 after some other args have already been pushed. */
6287 do_pending_stack_adjust ();
6292 case WITH_CLEANUP_EXPR:
6296 if (SAVE_EXPR_RTL (exp) != 0)
6300 nops = tree_code_length[(int) TREE_CODE (exp)];
6301 for (i = 0; i < nops; i++)
6302 if (TREE_OPERAND (exp, i) != 0)
6304 type = TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, i)));
6305 if (type == 'e' || type == '<' || type == '1' || type == '2'
6307 preexpand_calls (TREE_OPERAND (exp, i));
6311 /* At the start of a function, record that we have no previously-pushed
6312 arguments waiting to be popped. */
6315 init_pending_stack_adjust ()
6317 pending_stack_adjust = 0;
6320 /* When exiting from function, if safe, clear out any pending stack adjust
6321 so the adjustment won't get done. */
6324 clear_pending_stack_adjust ()
6326 #ifdef EXIT_IGNORE_STACK
6327 if (! flag_omit_frame_pointer && EXIT_IGNORE_STACK
6328 && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline)
6329 && ! flag_inline_functions)
6330 pending_stack_adjust = 0;
6334 /* Pop any previously-pushed arguments that have not been popped yet. */
6337 do_pending_stack_adjust ()
6339 if (inhibit_defer_pop == 0)
6341 if (pending_stack_adjust != 0)
6342 adjust_stack (GEN_INT (pending_stack_adjust));
6343 pending_stack_adjust = 0;
6347 /* Expand all cleanups up to OLD_CLEANUPS.
6348 Needed here, and also for language-dependent calls. */
6351 expand_cleanups_to (old_cleanups)
6354 while (cleanups_this_call != old_cleanups)
6356 expand_expr (TREE_VALUE (cleanups_this_call), NULL_RTX, VOIDmode, 0);
6357 cleanups_this_call = TREE_CHAIN (cleanups_this_call);
6361 /* Expand conditional expressions. */
6363 /* Generate code to evaluate EXP and jump to LABEL if the value is zero.
6364 LABEL is an rtx of code CODE_LABEL, in this function and all the
6368 jumpifnot (exp, label)
6372 do_jump (exp, label, NULL_RTX);
6375 /* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
6382 do_jump (exp, NULL_RTX, label);
6385 /* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
6386 the result is zero, or IF_TRUE_LABEL if the result is one.
6387 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
6388 meaning fall through in that case.
6390 do_jump always does any pending stack adjust except when it does not
6391 actually perform a jump. An example where there is no jump
6392 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
6394 This function is responsible for optimizing cases such as
6395 &&, || and comparison operators in EXP. */
6398 do_jump (exp, if_false_label, if_true_label)
6400 rtx if_false_label, if_true_label;
6402 register enum tree_code code = TREE_CODE (exp);
6403 /* Some cases need to create a label to jump to
6404 in order to properly fall through.
6405 These cases set DROP_THROUGH_LABEL nonzero. */
6406 rtx drop_through_label = 0;
6420 temp = integer_zerop (exp) ? if_false_label : if_true_label;
6426 /* This is not true with #pragma weak */
6428 /* The address of something can never be zero. */
6430 emit_jump (if_true_label);
6435 if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
6436 || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
6437 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF)
6440 /* If we are narrowing the operand, we have to do the compare in the
6442 if ((TYPE_PRECISION (TREE_TYPE (exp))
6443 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
6445 case NON_LVALUE_EXPR:
6446 case REFERENCE_EXPR:
6451 /* These cannot change zero->non-zero or vice versa. */
6452 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
6456 /* This is never less insns than evaluating the PLUS_EXPR followed by
6457 a test and can be longer if the test is eliminated. */
6459 /* Reduce to minus. */
6460 exp = build (MINUS_EXPR, TREE_TYPE (exp),
6461 TREE_OPERAND (exp, 0),
6462 fold (build1 (NEGATE_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
6463 TREE_OPERAND (exp, 1))));
6464 /* Process as MINUS. */
6468 /* Non-zero iff operands of minus differ. */
6469 comparison = compare (build (NE_EXPR, TREE_TYPE (exp),
6470 TREE_OPERAND (exp, 0),
6471 TREE_OPERAND (exp, 1)),
6476 /* If we are AND'ing with a small constant, do this comparison in the
6477 smallest type that fits. If the machine doesn't have comparisons
6478 that small, it will be converted back to the wider comparison.
6479 This helps if we are testing the sign bit of a narrower object.
6480 combine can't do this for us because it can't know whether a
6481 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
6483 if (! SLOW_BYTE_ACCESS
6484 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
6485 && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
6486 && (i = floor_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))) >= 0
6487 && (type = type_for_size (i + 1, 1)) != 0
6488 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
6489 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
6490 != CODE_FOR_nothing))
6492 do_jump (convert (type, exp), if_false_label, if_true_label);
6497 case TRUTH_NOT_EXPR:
6498 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
6501 case TRUTH_ANDIF_EXPR:
6502 if (if_false_label == 0)
6503 if_false_label = drop_through_label = gen_label_rtx ();
6504 do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
6505 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
6508 case TRUTH_ORIF_EXPR:
6509 if (if_true_label == 0)
6510 if_true_label = drop_through_label = gen_label_rtx ();
6511 do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
6512 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
6516 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
6519 do_pending_stack_adjust ();
6520 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
6527 int bitsize, bitpos, unsignedp;
6528 enum machine_mode mode;
6533 /* Get description of this reference. We don't actually care
6534 about the underlying object here. */
6535 get_inner_reference (exp, &bitsize, &bitpos, &offset,
6536 &mode, &unsignedp, &volatilep);
6538 type = type_for_size (bitsize, unsignedp);
6539 if (! SLOW_BYTE_ACCESS
6540 && type != 0 && bitsize >= 0
6541 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
6542 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
6543 != CODE_FOR_nothing))
6545 do_jump (convert (type, exp), if_false_label, if_true_label);
6552 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
6553 if (integer_onep (TREE_OPERAND (exp, 1))
6554 && integer_zerop (TREE_OPERAND (exp, 2)))
6555 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
6557 else if (integer_zerop (TREE_OPERAND (exp, 1))
6558 && integer_onep (TREE_OPERAND (exp, 2)))
6559 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
6563 register rtx label1 = gen_label_rtx ();
6564 drop_through_label = gen_label_rtx ();
6565 do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX);
6566 /* Now the THEN-expression. */
6567 do_jump (TREE_OPERAND (exp, 1),
6568 if_false_label ? if_false_label : drop_through_label,
6569 if_true_label ? if_true_label : drop_through_label);
6570 /* In case the do_jump just above never jumps. */
6571 do_pending_stack_adjust ();
6572 emit_label (label1);
6573 /* Now the ELSE-expression. */
6574 do_jump (TREE_OPERAND (exp, 2),
6575 if_false_label ? if_false_label : drop_through_label,
6576 if_true_label ? if_true_label : drop_through_label);
6581 if (integer_zerop (TREE_OPERAND (exp, 1)))
6582 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
6583 else if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
6586 !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
6587 do_jump_by_parts_equality (exp, if_false_label, if_true_label);
6589 comparison = compare (exp, EQ, EQ);
6593 if (integer_zerop (TREE_OPERAND (exp, 1)))
6594 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
6595 else if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
6598 !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
6599 do_jump_by_parts_equality (exp, if_true_label, if_false_label);
6601 comparison = compare (exp, NE, NE);
6605 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
6607 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
6608 do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
6610 comparison = compare (exp, LT, LTU);
6614 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
6616 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
6617 do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
6619 comparison = compare (exp, LE, LEU);
6623 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
6625 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
6626 do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
6628 comparison = compare (exp, GT, GTU);
6632 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
6634 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
6635 do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
6637 comparison = compare (exp, GE, GEU);
6642 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
6644 /* This is not needed any more and causes poor code since it causes
6645 comparisons and tests from non-SI objects to have different code
6647 /* Copy to register to avoid generating bad insns by cse
6648 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
6649 if (!cse_not_expected && GET_CODE (temp) == MEM)
6650 temp = copy_to_reg (temp);
6652 do_pending_stack_adjust ();
6653 if (GET_CODE (temp) == CONST_INT)
6654 comparison = (temp == const0_rtx ? const0_rtx : const_true_rtx);
6655 else if (GET_CODE (temp) == LABEL_REF)
6656 comparison = const_true_rtx;
6657 else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
6658 && !can_compare_p (GET_MODE (temp)))
6659 /* Note swapping the labels gives us not-equal. */
6660 do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label);
6661 else if (GET_MODE (temp) != VOIDmode)
6662 comparison = compare_from_rtx (temp, CONST0_RTX (GET_MODE (temp)),
6663 NE, TREE_UNSIGNED (TREE_TYPE (exp)),
6664 GET_MODE (temp), NULL_RTX, 0);
6669 /* Do any postincrements in the expression that was tested. */
6672 /* If COMPARISON is nonzero here, it is an rtx that can be substituted
6673 straight into a conditional jump instruction as the jump condition.
6674 Otherwise, all the work has been done already. */
6676 if (comparison == const_true_rtx)
6679 emit_jump (if_true_label);
6681 else if (comparison == const0_rtx)
6684 emit_jump (if_false_label);
6686 else if (comparison)
6687 do_jump_for_compare (comparison, if_false_label, if_true_label);
6691 if (drop_through_label)
6693 /* If do_jump produces code that might be jumped around,
6694 do any stack adjusts from that code, before the place
6695 where control merges in. */
6696 do_pending_stack_adjust ();
6697 emit_label (drop_through_label);
6701 /* Given a comparison expression EXP for values too wide to be compared
6702 with one insn, test the comparison and jump to the appropriate label.
6703 The code of EXP is ignored; we always test GT if SWAP is 0,
6704 and LT if SWAP is 1. */
6707 do_jump_by_parts_greater (exp, swap, if_false_label, if_true_label)
6710 rtx if_false_label, if_true_label;
6712 rtx op0 = expand_expr (TREE_OPERAND (exp, swap), NULL_RTX, VOIDmode, 0);
6713 rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), NULL_RTX, VOIDmode, 0);
6714 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
6715 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
6716 rtx drop_through_label = 0;
6717 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
6720 if (! if_true_label || ! if_false_label)
6721 drop_through_label = gen_label_rtx ();
6722 if (! if_true_label)
6723 if_true_label = drop_through_label;
6724 if (! if_false_label)
6725 if_false_label = drop_through_label;
6727 /* Compare a word at a time, high order first. */
6728 for (i = 0; i < nwords; i++)
6731 rtx op0_word, op1_word;
6733 if (WORDS_BIG_ENDIAN)
6735 op0_word = operand_subword_force (op0, i, mode);
6736 op1_word = operand_subword_force (op1, i, mode);
6740 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
6741 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
6744 /* All but high-order word must be compared as unsigned. */
6745 comp = compare_from_rtx (op0_word, op1_word,
6746 (unsignedp || i > 0) ? GTU : GT,
6747 unsignedp, word_mode, NULL_RTX, 0);
6748 if (comp == const_true_rtx)
6749 emit_jump (if_true_label);
6750 else if (comp != const0_rtx)
6751 do_jump_for_compare (comp, NULL_RTX, if_true_label);
6753 /* Consider lower words only if these are equal. */
6754 comp = compare_from_rtx (op0_word, op1_word, NE, unsignedp, word_mode,
6756 if (comp == const_true_rtx)
6757 emit_jump (if_false_label);
6758 else if (comp != const0_rtx)
6759 do_jump_for_compare (comp, NULL_RTX, if_false_label);
6763 emit_jump (if_false_label);
6764 if (drop_through_label)
6765 emit_label (drop_through_label);
6768 /* Given an EQ_EXPR expression EXP for values too wide to be compared
6769 with one insn, test the comparison and jump to the appropriate label. */
6772 do_jump_by_parts_equality (exp, if_false_label, if_true_label)
6774 rtx if_false_label, if_true_label;
6776 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
6777 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
6778 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
6779 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
6781 rtx drop_through_label = 0;
6783 if (! if_false_label)
6784 drop_through_label = if_false_label = gen_label_rtx ();
6786 for (i = 0; i < nwords; i++)
6788 rtx comp = compare_from_rtx (operand_subword_force (op0, i, mode),
6789 operand_subword_force (op1, i, mode),
6790 EQ, TREE_UNSIGNED (TREE_TYPE (exp)),
6791 word_mode, NULL_RTX, 0);
6792 if (comp == const_true_rtx)
6793 emit_jump (if_false_label);
6794 else if (comp != const0_rtx)
6795 do_jump_for_compare (comp, if_false_label, NULL_RTX);
6799 emit_jump (if_true_label);
6800 if (drop_through_label)
6801 emit_label (drop_through_label);
6804 /* Jump according to whether OP0 is 0.
6805 We assume that OP0 has an integer mode that is too wide
6806 for the available compare insns. */
6809 do_jump_by_parts_equality_rtx (op0, if_false_label, if_true_label)
6811 rtx if_false_label, if_true_label;
6813 int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD;
6815 rtx drop_through_label = 0;
6817 if (! if_false_label)
6818 drop_through_label = if_false_label = gen_label_rtx ();
6820 for (i = 0; i < nwords; i++)
6822 rtx comp = compare_from_rtx (operand_subword_force (op0, i,
6824 const0_rtx, EQ, 1, word_mode, NULL_RTX, 0);
6825 if (comp == const_true_rtx)
6826 emit_jump (if_false_label);
6827 else if (comp != const0_rtx)
6828 do_jump_for_compare (comp, if_false_label, NULL_RTX);
6832 emit_jump (if_true_label);
6833 if (drop_through_label)
6834 emit_label (drop_through_label);
6837 /* Given a comparison expression in rtl form, output conditional branches to
6838 IF_TRUE_LABEL, IF_FALSE_LABEL, or both. */
6841 do_jump_for_compare (comparison, if_false_label, if_true_label)
6842 rtx comparison, if_false_label, if_true_label;
6846 if (bcc_gen_fctn[(int) GET_CODE (comparison)] != 0)
6847 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (comparison)]) (if_true_label));
6852 emit_jump (if_false_label);
6854 else if (if_false_label)
6857 rtx prev = PREV_INSN (get_last_insn ());
6860 /* Output the branch with the opposite condition. Then try to invert
6861 what is generated. If more than one insn is a branch, or if the
6862 branch is not the last insn written, abort. If we can't invert
6863 the branch, emit make a true label, redirect this jump to that,
6864 emit a jump to the false label and define the true label. */
6866 if (bcc_gen_fctn[(int) GET_CODE (comparison)] != 0)
6867 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (comparison)]) (if_false_label));
6871 /* Here we get the insn before what was just emitted.
6872 On some machines, emitting the branch can discard
6873 the previous compare insn and emit a replacement. */
6875 /* If there's only one preceding insn... */
6876 insn = get_insns ();
6878 insn = NEXT_INSN (prev);
6880 for (insn = NEXT_INSN (insn); insn; insn = NEXT_INSN (insn))
6881 if (GET_CODE (insn) == JUMP_INSN)
6888 if (branch != get_last_insn ())
6891 if (! invert_jump (branch, if_false_label))
6893 if_true_label = gen_label_rtx ();
6894 redirect_jump (branch, if_true_label);
6895 emit_jump (if_false_label);
6896 emit_label (if_true_label);
6901 /* Generate code for a comparison expression EXP
6902 (including code to compute the values to be compared)
6903 and set (CC0) according to the result.
6904 SIGNED_CODE should be the rtx operation for this comparison for
6905 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
6907 We force a stack adjustment unless there are currently
6908 things pushed on the stack that aren't yet used. */
6911 compare (exp, signed_code, unsigned_code)
6913 enum rtx_code signed_code, unsigned_code;
6916 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
6918 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
6919 register tree type = TREE_TYPE (TREE_OPERAND (exp, 0));
6920 register enum machine_mode mode = TYPE_MODE (type);
6921 int unsignedp = TREE_UNSIGNED (type);
6922 enum rtx_code code = unsignedp ? unsigned_code : signed_code;
6924 return compare_from_rtx (op0, op1, code, unsignedp, mode,
6926 ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
6927 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
6930 /* Like compare but expects the values to compare as two rtx's.
6931 The decision as to signed or unsigned comparison must be made by the caller.
6933 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
6936 If ALIGN is non-zero, it is the alignment of this type; if zero, the
6937 size of MODE should be used. */
6940 compare_from_rtx (op0, op1, code, unsignedp, mode, size, align)
6941 register rtx op0, op1;
6944 enum machine_mode mode;
6948 /* If one operand is constant, make it the second one. */
6950 if (GET_CODE (op0) == CONST_INT || GET_CODE (op0) == CONST_DOUBLE)
6955 code = swap_condition (code);
6960 op0 = force_not_mem (op0);
6961 op1 = force_not_mem (op1);
6964 do_pending_stack_adjust ();
6966 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT)
6967 return simplify_relational_operation (code, mode, op0, op1);
6970 /* There's no need to do this now that combine.c can eliminate lots of
6971 sign extensions. This can be less efficient in certain cases on other
6974 /* If this is a signed equality comparison, we can do it as an
6975 unsigned comparison since zero-extension is cheaper than sign
6976 extension and comparisons with zero are done as unsigned. This is
6977 the case even on machines that can do fast sign extension, since
6978 zero-extension is easier to combinen with other operations than
6979 sign-extension is. If we are comparing against a constant, we must
6980 convert it to what it would look like unsigned. */
6981 if ((code == EQ || code == NE) && ! unsignedp
6982 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
6984 if (GET_CODE (op1) == CONST_INT
6985 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
6986 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
6991 emit_cmp_insn (op0, op1, code, size, mode, unsignedp, align);
6993 return gen_rtx (code, VOIDmode, cc0_rtx, const0_rtx);
6996 /* Generate code to calculate EXP using a store-flag instruction
6997 and return an rtx for the result. EXP is either a comparison
6998 or a TRUTH_NOT_EXPR whose operand is a comparison.
7000 If TARGET is nonzero, store the result there if convenient.
7002 If ONLY_CHEAP is non-zero, only do this if it is likely to be very
7005 Return zero if there is no suitable set-flag instruction
7006 available on this machine.
7008 Once expand_expr has been called on the arguments of the comparison,
7009 we are committed to doing the store flag, since it is not safe to
7010 re-evaluate the expression. We emit the store-flag insn by calling
7011 emit_store_flag, but only expand the arguments if we have a reason
7012 to believe that emit_store_flag will be successful. If we think that
7013 it will, but it isn't, we have to simulate the store-flag with a
7014 set/jump/set sequence. */
7017 do_store_flag (exp, target, mode, only_cheap)
7020 enum machine_mode mode;
7024 tree arg0, arg1, type;
7026 enum machine_mode operand_mode;
7030 enum insn_code icode;
7031 rtx subtarget = target;
7032 rtx result, label, pattern, jump_pat;
7034 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
7035 result at the end. We can't simply invert the test since it would
7036 have already been inverted if it were valid. This case occurs for
7037 some floating-point comparisons. */
7039 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
7040 invert = 1, exp = TREE_OPERAND (exp, 0);
7042 arg0 = TREE_OPERAND (exp, 0);
7043 arg1 = TREE_OPERAND (exp, 1);
7044 type = TREE_TYPE (arg0);
7045 operand_mode = TYPE_MODE (type);
7046 unsignedp = TREE_UNSIGNED (type);
7048 /* We won't bother with BLKmode store-flag operations because it would mean
7049 passing a lot of information to emit_store_flag. */
7050 if (operand_mode == BLKmode)
7056 /* Get the rtx comparison code to use. We know that EXP is a comparison
7057 operation of some type. Some comparisons against 1 and -1 can be
7058 converted to comparisons with zero. Do so here so that the tests
7059 below will be aware that we have a comparison with zero. These
7060 tests will not catch constants in the first operand, but constants
7061 are rarely passed as the first operand. */
7063 switch (TREE_CODE (exp))
7072 if (integer_onep (arg1))
7073 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
7075 code = unsignedp ? LTU : LT;
7078 if (integer_all_onesp (arg1))
7079 arg1 = integer_zero_node, code = unsignedp ? LTU : LT;
7081 code = unsignedp ? LEU : LE;
7084 if (integer_all_onesp (arg1))
7085 arg1 = integer_zero_node, code = unsignedp ? GEU : GE;
7087 code = unsignedp ? GTU : GT;
7090 if (integer_onep (arg1))
7091 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
7093 code = unsignedp ? GEU : GE;
7099 /* Put a constant second. */
7100 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
7102 tem = arg0; arg0 = arg1; arg1 = tem;
7103 code = swap_condition (code);
7106 /* If this is an equality or inequality test of a single bit, we can
7107 do this by shifting the bit being tested to the low-order bit and
7108 masking the result with the constant 1. If the condition was EQ,
7109 we xor it with 1. This does not require an scc insn and is faster
7110 than an scc insn even if we have it. */
7112 if ((code == NE || code == EQ)
7113 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
7114 && integer_pow2p (TREE_OPERAND (arg0, 1))
7115 && TYPE_PRECISION (type) <= HOST_BITS_PER_WIDE_INT)
7117 int bitnum = exact_log2 (INTVAL (expand_expr (TREE_OPERAND (arg0, 1),
7118 NULL_RTX, VOIDmode, 0)));
7120 if (subtarget == 0 || GET_CODE (subtarget) != REG
7121 || GET_MODE (subtarget) != operand_mode
7122 || ! safe_from_p (subtarget, TREE_OPERAND (arg0, 0)))
7125 op0 = expand_expr (TREE_OPERAND (arg0, 0), subtarget, VOIDmode, 0);
7128 op0 = expand_shift (RSHIFT_EXPR, GET_MODE (op0), op0,
7129 size_int (bitnum), target, 1);
7131 if (GET_MODE (op0) != mode)
7132 op0 = convert_to_mode (mode, op0, 1);
7134 if (bitnum != TYPE_PRECISION (type) - 1)
7135 op0 = expand_and (op0, const1_rtx, target);
7137 if ((code == EQ && ! invert) || (code == NE && invert))
7138 op0 = expand_binop (mode, xor_optab, op0, const1_rtx, target, 0,
7144 /* Now see if we are likely to be able to do this. Return if not. */
7145 if (! can_compare_p (operand_mode))
7147 icode = setcc_gen_code[(int) code];
7148 if (icode == CODE_FOR_nothing
7149 || (only_cheap && insn_operand_mode[(int) icode][0] != mode))
7151 /* We can only do this if it is one of the special cases that
7152 can be handled without an scc insn. */
7153 if ((code == LT && integer_zerop (arg1))
7154 || (! only_cheap && code == GE && integer_zerop (arg1)))
7156 else if (BRANCH_COST >= 0
7157 && ! only_cheap && (code == NE || code == EQ)
7158 && TREE_CODE (type) != REAL_TYPE
7159 && ((abs_optab->handlers[(int) operand_mode].insn_code
7160 != CODE_FOR_nothing)
7161 || (ffs_optab->handlers[(int) operand_mode].insn_code
7162 != CODE_FOR_nothing)))
7168 preexpand_calls (exp);
7169 if (subtarget == 0 || GET_CODE (subtarget) != REG
7170 || GET_MODE (subtarget) != operand_mode
7171 || ! safe_from_p (subtarget, arg1))
7174 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
7175 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
7178 target = gen_reg_rtx (mode);
7180 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
7181 because, if the emit_store_flag does anything it will succeed and
7182 OP0 and OP1 will not be used subsequently. */
7184 result = emit_store_flag (target, code,
7185 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
7186 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
7187 operand_mode, unsignedp, 1);
7192 result = expand_binop (mode, xor_optab, result, const1_rtx,
7193 result, 0, OPTAB_LIB_WIDEN);
7197 /* If this failed, we have to do this with set/compare/jump/set code. */
7198 if (target == 0 || GET_CODE (target) != REG
7199 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
7200 target = gen_reg_rtx (GET_MODE (target));
7202 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
7203 result = compare_from_rtx (op0, op1, code, unsignedp,
7204 operand_mode, NULL_RTX, 0);
7205 if (GET_CODE (result) == CONST_INT)
7206 return (((result == const0_rtx && ! invert)
7207 || (result != const0_rtx && invert))
7208 ? const0_rtx : const1_rtx);
7210 label = gen_label_rtx ();
7211 if (bcc_gen_fctn[(int) code] == 0)
7214 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
7215 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
7221 /* Generate a tablejump instruction (used for switch statements). */
7223 #ifdef HAVE_tablejump
7225 /* INDEX is the value being switched on, with the lowest value
7226 in the table already subtracted.
7227 MODE is its expected mode (needed if INDEX is constant).
7228 RANGE is the length of the jump table.
7229 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
7231 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
7232 index value is out of range. */
7235 do_tablejump (index, mode, range, table_label, default_label)
7236 rtx index, range, table_label, default_label;
7237 enum machine_mode mode;
7239 register rtx temp, vector;
7241 /* Do an unsigned comparison (in the proper mode) between the index
7242 expression and the value which represents the length of the range.
7243 Since we just finished subtracting the lower bound of the range
7244 from the index expression, this comparison allows us to simultaneously
7245 check that the original index expression value is both greater than
7246 or equal to the minimum value of the range and less than or equal to
7247 the maximum value of the range. */
7249 emit_cmp_insn (range, index, LTU, NULL_RTX, mode, 0, 0);
7250 emit_jump_insn (gen_bltu (default_label));
7252 /* If index is in range, it must fit in Pmode.
7253 Convert to Pmode so we can index with it. */
7255 index = convert_to_mode (Pmode, index, 1);
7257 /* If flag_force_addr were to affect this address
7258 it could interfere with the tricky assumptions made
7259 about addresses that contain label-refs,
7260 which may be valid only very near the tablejump itself. */
7261 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
7262 GET_MODE_SIZE, because this indicates how large insns are. The other
7263 uses should all be Pmode, because they are addresses. This code
7264 could fail if addresses and insns are not the same size. */
7265 index = memory_address_noforce
7267 gen_rtx (PLUS, Pmode,
7268 gen_rtx (MULT, Pmode, index,
7269 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
7270 gen_rtx (LABEL_REF, Pmode, table_label)));
7271 temp = gen_reg_rtx (CASE_VECTOR_MODE);
7272 vector = gen_rtx (MEM, CASE_VECTOR_MODE, index);
7273 RTX_UNCHANGING_P (vector) = 1;
7274 convert_move (temp, vector, 0);
7276 emit_jump_insn (gen_tablejump (temp, table_label));
7278 #ifndef CASE_VECTOR_PC_RELATIVE
7279 /* If we are generating PIC code or if the table is PC-relative, the
7280 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
7286 #endif /* HAVE_tablejump */