1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA. */
26 #include "insn-flags.h"
27 #include "insn-codes.h"
29 #include "insn-config.h"
33 #include "typeclass.h"
35 #define CEIL(x,y) (((x) + (y) - 1) / (y))
37 /* Decide whether a function's arguments should be processed
38 from first to last or from last to first. */
40 #ifdef STACK_GROWS_DOWNWARD
42 #define PUSH_ARGS_REVERSED /* If it's last to first */
46 #ifndef STACK_PUSH_CODE
47 #ifdef STACK_GROWS_DOWNWARD
48 #define STACK_PUSH_CODE PRE_DEC
50 #define STACK_PUSH_CODE PRE_INC
54 /* Like STACK_BOUNDARY but in units of bytes, not bits. */
55 #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
57 /* If this is nonzero, we do not bother generating VOLATILE
58 around volatile memory references, and we are willing to
59 output indirect addresses. If cse is to follow, we reject
60 indirect addresses so a useful potential cse is generated;
61 if it is used only once, instruction combination will produce
62 the same indirect address eventually. */
65 /* Nonzero to generate code for all the subroutines within an
66 expression before generating the upper levels of the expression.
67 Nowadays this is never zero. */
68 int do_preexpand_calls = 1;
70 /* Number of units that we should eventually pop off the stack.
71 These are the arguments to function calls that have already returned. */
72 int pending_stack_adjust;
74 /* Nonzero means stack pops must not be deferred, and deferred stack
75 pops must not be output. It is nonzero inside a function call,
76 inside a conditional expression, inside a statement expression,
77 and in other cases as well. */
78 int inhibit_defer_pop;
80 /* A list of all cleanups which belong to the arguments of
81 function calls being expanded by expand_call. */
82 tree cleanups_this_call;
84 /* Nonzero means __builtin_saveregs has already been done in this function.
85 The value is the pseudoreg containing the value __builtin_saveregs
87 static rtx saveregs_value;
90 static void store_constructor ();
91 static rtx store_field ();
92 static rtx expand_builtin ();
93 static rtx compare ();
94 static rtx do_store_flag ();
95 static void preexpand_calls ();
96 static rtx expand_increment ();
97 static void init_queue ();
99 void do_pending_stack_adjust ();
100 static void do_jump_for_compare ();
101 static void do_jump_by_parts_equality ();
102 static void do_jump_by_parts_equality_rtx ();
103 static void do_jump_by_parts_greater ();
105 /* Record for each mode whether we can move a register directly to or
106 from an object of that mode in memory. If we can't, we won't try
107 to use that mode directly when accessing a field of that mode. */
109 static char direct_load[NUM_MACHINE_MODES];
110 static char direct_store[NUM_MACHINE_MODES];
112 /* MOVE_RATIO is the number of move instructions that is better than
116 #if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
119 /* A value of around 6 would minimize code size; infinity would minimize
121 #define MOVE_RATIO 15
125 /* This array records the insn_code of insns to perform block moves. */
126 static enum insn_code movstr_optab[NUM_MACHINE_MODES];
128 /* SLOW_UNALIGNED_ACCESS is non-zero if unaligned accesses are very slow. */
130 #ifndef SLOW_UNALIGNED_ACCESS
131 #define SLOW_UNALIGNED_ACCESS 0
134 /* This is run once per compilation to set up which modes can be used
135 directly in memory and to initialize the block move optab. */
141 enum machine_mode mode;
142 /* Try indexing by frame ptr and try by stack ptr.
143 It is known that on the Convex the stack ptr isn't a valid index.
144 With luck, one or the other is valid on any machine. */
145 rtx mem = gen_rtx (MEM, VOIDmode, stack_pointer_rtx);
146 rtx mem1 = gen_rtx (MEM, VOIDmode, frame_pointer_rtx);
149 insn = emit_insn (gen_rtx (SET, 0, 0));
150 pat = PATTERN (insn);
152 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
153 mode = (enum machine_mode) ((int) mode + 1))
159 direct_load[(int) mode] = direct_store[(int) mode] = 0;
160 PUT_MODE (mem, mode);
161 PUT_MODE (mem1, mode);
163 /* See if there is some register that can be used in this mode and
164 directly loaded or stored from memory. */
166 if (mode != VOIDmode && mode != BLKmode)
167 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
168 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
171 if (! HARD_REGNO_MODE_OK (regno, mode))
174 reg = gen_rtx (REG, mode, regno);
177 SET_DEST (pat) = reg;
178 if (recog (pat, insn, &num_clobbers) >= 0)
179 direct_load[(int) mode] = 1;
181 SET_SRC (pat) = mem1;
182 SET_DEST (pat) = reg;
183 if (recog (pat, insn, &num_clobbers) >= 0)
184 direct_load[(int) mode] = 1;
187 SET_DEST (pat) = mem;
188 if (recog (pat, insn, &num_clobbers) >= 0)
189 direct_store[(int) mode] = 1;
192 SET_DEST (pat) = mem1;
193 if (recog (pat, insn, &num_clobbers) >= 0)
194 direct_store[(int) mode] = 1;
197 movstr_optab[(int) mode] = CODE_FOR_nothing;
204 movstr_optab[(int) QImode] = CODE_FOR_movstrqi;
208 movstr_optab[(int) HImode] = CODE_FOR_movstrhi;
212 movstr_optab[(int) SImode] = CODE_FOR_movstrsi;
216 movstr_optab[(int) DImode] = CODE_FOR_movstrdi;
220 movstr_optab[(int) TImode] = CODE_FOR_movstrti;
224 /* This is run at the start of compiling a function. */
231 pending_stack_adjust = 0;
232 inhibit_defer_pop = 0;
233 cleanups_this_call = 0;
238 /* Save all variables describing the current status into the structure *P.
239 This is used before starting a nested function. */
245 /* Instead of saving the postincrement queue, empty it. */
248 p->pending_stack_adjust = pending_stack_adjust;
249 p->inhibit_defer_pop = inhibit_defer_pop;
250 p->cleanups_this_call = cleanups_this_call;
251 p->saveregs_value = saveregs_value;
252 p->forced_labels = forced_labels;
254 pending_stack_adjust = 0;
255 inhibit_defer_pop = 0;
256 cleanups_this_call = 0;
261 /* Restore all variables describing the current status from the structure *P.
262 This is used after a nested function. */
265 restore_expr_status (p)
268 pending_stack_adjust = p->pending_stack_adjust;
269 inhibit_defer_pop = p->inhibit_defer_pop;
270 cleanups_this_call = p->cleanups_this_call;
271 saveregs_value = p->saveregs_value;
272 forced_labels = p->forced_labels;
275 /* Manage the queue of increment instructions to be output
276 for POSTINCREMENT_EXPR expressions, etc. */
278 static rtx pending_chain;
280 /* Queue up to increment (or change) VAR later. BODY says how:
281 BODY should be the same thing you would pass to emit_insn
282 to increment right away. It will go to emit_insn later on.
284 The value is a QUEUED expression to be used in place of VAR
285 where you want to guarantee the pre-incrementation value of VAR. */
288 enqueue_insn (var, body)
291 pending_chain = gen_rtx (QUEUED, GET_MODE (var),
292 var, NULL_RTX, NULL_RTX, body, pending_chain);
293 return pending_chain;
296 /* Use protect_from_queue to convert a QUEUED expression
297 into something that you can put immediately into an instruction.
298 If the queued incrementation has not happened yet,
299 protect_from_queue returns the variable itself.
300 If the incrementation has happened, protect_from_queue returns a temp
301 that contains a copy of the old value of the variable.
303 Any time an rtx which might possibly be a QUEUED is to be put
304 into an instruction, it must be passed through protect_from_queue first.
305 QUEUED expressions are not meaningful in instructions.
307 Do not pass a value through protect_from_queue and then hold
308 on to it for a while before putting it in an instruction!
309 If the queue is flushed in between, incorrect code will result. */
312 protect_from_queue (x, modify)
316 register RTX_CODE code = GET_CODE (x);
318 #if 0 /* A QUEUED can hang around after the queue is forced out. */
319 /* Shortcut for most common case. */
320 if (pending_chain == 0)
326 /* A special hack for read access to (MEM (QUEUED ...))
327 to facilitate use of autoincrement.
328 Make a copy of the contents of the memory location
329 rather than a copy of the address, but not
330 if the value is of mode BLKmode. */
331 if (code == MEM && GET_MODE (x) != BLKmode
332 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
334 register rtx y = XEXP (x, 0);
335 XEXP (x, 0) = QUEUED_VAR (y);
338 register rtx temp = gen_reg_rtx (GET_MODE (x));
339 emit_insn_before (gen_move_insn (temp, x),
345 /* Otherwise, recursively protect the subexpressions of all
346 the kinds of rtx's that can contain a QUEUED. */
348 XEXP (x, 0) = protect_from_queue (XEXP (x, 0), 0);
349 else if (code == PLUS || code == MULT)
351 XEXP (x, 0) = protect_from_queue (XEXP (x, 0), 0);
352 XEXP (x, 1) = protect_from_queue (XEXP (x, 1), 0);
356 /* If the increment has not happened, use the variable itself. */
357 if (QUEUED_INSN (x) == 0)
358 return QUEUED_VAR (x);
359 /* If the increment has happened and a pre-increment copy exists,
361 if (QUEUED_COPY (x) != 0)
362 return QUEUED_COPY (x);
363 /* The increment has happened but we haven't set up a pre-increment copy.
364 Set one up now, and use it. */
365 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
366 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
368 return QUEUED_COPY (x);
371 /* Return nonzero if X contains a QUEUED expression:
372 if it contains anything that will be altered by a queued increment.
373 We handle only combinations of MEM, PLUS, MINUS and MULT operators
374 since memory addresses generally contain only those. */
380 register enum rtx_code code = GET_CODE (x);
386 return queued_subexp_p (XEXP (x, 0));
390 return queued_subexp_p (XEXP (x, 0))
391 || queued_subexp_p (XEXP (x, 1));
396 /* Perform all the pending incrementations. */
402 while (p = pending_chain)
404 QUEUED_INSN (p) = emit_insn (QUEUED_BODY (p));
405 pending_chain = QUEUED_NEXT (p);
416 /* Copy data from FROM to TO, where the machine modes are not the same.
417 Both modes may be integer, or both may be floating.
418 UNSIGNEDP should be nonzero if FROM is an unsigned type.
419 This causes zero-extension instead of sign-extension. */
422 convert_move (to, from, unsignedp)
423 register rtx to, from;
426 enum machine_mode to_mode = GET_MODE (to);
427 enum machine_mode from_mode = GET_MODE (from);
428 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
429 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
433 /* rtx code for making an equivalent value. */
434 enum rtx_code equiv_code = (unsignedp ? ZERO_EXTEND : SIGN_EXTEND);
436 to = protect_from_queue (to, 1);
437 from = protect_from_queue (from, 0);
439 if (to_real != from_real)
442 /* If FROM is a SUBREG that indicates that we have already done at least
443 the required extension, strip it. We don't handle such SUBREGs as
446 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
447 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
448 >= GET_MODE_SIZE (to_mode))
449 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
450 from = gen_lowpart (to_mode, from), from_mode = to_mode;
452 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
455 if (to_mode == from_mode
456 || (from_mode == VOIDmode && CONSTANT_P (from)))
458 emit_move_insn (to, from);
464 #ifdef HAVE_extendsfdf2
465 if (HAVE_extendsfdf2 && from_mode == SFmode && to_mode == DFmode)
467 emit_unop_insn (CODE_FOR_extendsfdf2, to, from, UNKNOWN);
471 #ifdef HAVE_extendsfxf2
472 if (HAVE_extendsfxf2 && from_mode == SFmode && to_mode == XFmode)
474 emit_unop_insn (CODE_FOR_extendsfxf2, to, from, UNKNOWN);
478 #ifdef HAVE_extendsftf2
479 if (HAVE_extendsftf2 && from_mode == SFmode && to_mode == TFmode)
481 emit_unop_insn (CODE_FOR_extendsftf2, to, from, UNKNOWN);
485 #ifdef HAVE_extenddfxf2
486 if (HAVE_extenddfxf2 && from_mode == DFmode && to_mode == XFmode)
488 emit_unop_insn (CODE_FOR_extenddfxf2, to, from, UNKNOWN);
492 #ifdef HAVE_extenddftf2
493 if (HAVE_extenddftf2 && from_mode == DFmode && to_mode == TFmode)
495 emit_unop_insn (CODE_FOR_extenddftf2, to, from, UNKNOWN);
499 #ifdef HAVE_truncdfsf2
500 if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
502 emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
506 #ifdef HAVE_truncxfsf2
507 if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode)
509 emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN);
513 #ifdef HAVE_trunctfsf2
514 if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
516 emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
520 #ifdef HAVE_truncxfdf2
521 if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode)
523 emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN);
527 #ifdef HAVE_trunctfdf2
528 if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
530 emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
542 libcall = extendsfdf2_libfunc;
546 libcall = extendsfxf2_libfunc;
550 libcall = extendsftf2_libfunc;
559 libcall = truncdfsf2_libfunc;
563 libcall = extenddfxf2_libfunc;
567 libcall = extenddftf2_libfunc;
576 libcall = truncxfsf2_libfunc;
580 libcall = truncxfdf2_libfunc;
589 libcall = trunctfsf2_libfunc;
593 libcall = trunctfdf2_libfunc;
599 if (libcall == (rtx) 0)
600 /* This conversion is not implemented yet. */
603 emit_library_call (libcall, 1, to_mode, 1, from, from_mode);
604 emit_move_insn (to, hard_libcall_value (to_mode));
608 /* Now both modes are integers. */
610 /* Handle expanding beyond a word. */
611 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
612 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
619 enum machine_mode lowpart_mode;
620 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
622 /* Try converting directly if the insn is supported. */
623 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
626 /* If FROM is a SUBREG, put it into a register. Do this
627 so that we always generate the same set of insns for
628 better cse'ing; if an intermediate assignment occurred,
629 we won't be doing the operation directly on the SUBREG. */
630 if (optimize > 0 && GET_CODE (from) == SUBREG)
631 from = force_reg (from_mode, from);
632 emit_unop_insn (code, to, from, equiv_code);
635 /* Next, try converting via full word. */
636 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
637 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
638 != CODE_FOR_nothing))
640 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
641 emit_unop_insn (code, to,
642 gen_lowpart (word_mode, to), equiv_code);
646 /* No special multiword conversion insn; do it by hand. */
649 /* Get a copy of FROM widened to a word, if necessary. */
650 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
651 lowpart_mode = word_mode;
653 lowpart_mode = from_mode;
655 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
657 lowpart = gen_lowpart (lowpart_mode, to);
658 emit_move_insn (lowpart, lowfrom);
660 /* Compute the value to put in each remaining word. */
662 fill_value = const0_rtx;
667 && insn_operand_mode[(int) CODE_FOR_slt][0] == word_mode
668 && STORE_FLAG_VALUE == -1)
670 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
672 fill_value = gen_reg_rtx (word_mode);
673 emit_insn (gen_slt (fill_value));
679 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
680 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
682 fill_value = convert_to_mode (word_mode, fill_value, 1);
686 /* Fill the remaining words. */
687 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
689 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
690 rtx subword = operand_subword (to, index, 1, to_mode);
695 if (fill_value != subword)
696 emit_move_insn (subword, fill_value);
699 insns = get_insns ();
702 emit_no_conflict_block (insns, to, from, NULL_RTX,
703 gen_rtx (equiv_code, to_mode, from));
707 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD)
709 convert_move (to, gen_lowpart (word_mode, from), 0);
713 /* Handle pointer conversion */ /* SPEE 900220 */
714 if (to_mode == PSImode)
716 if (from_mode != SImode)
717 from = convert_to_mode (SImode, from, unsignedp);
719 #ifdef HAVE_truncsipsi
722 emit_unop_insn (CODE_FOR_truncsipsi, to, from, UNKNOWN);
725 #endif /* HAVE_truncsipsi */
729 if (from_mode == PSImode)
731 if (to_mode != SImode)
733 from = convert_to_mode (SImode, from, unsignedp);
738 #ifdef HAVE_extendpsisi
739 if (HAVE_extendpsisi)
741 emit_unop_insn (CODE_FOR_extendpsisi, to, from, UNKNOWN);
744 #endif /* HAVE_extendpsisi */
749 /* Now follow all the conversions between integers
750 no more than a word long. */
752 /* For truncation, usually we can just refer to FROM in a narrower mode. */
753 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
754 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
755 GET_MODE_BITSIZE (from_mode))
756 && ((GET_CODE (from) == MEM
757 && ! MEM_VOLATILE_P (from)
758 && direct_load[(int) to_mode]
759 && ! mode_dependent_address_p (XEXP (from, 0)))
760 || GET_CODE (from) == REG
761 || GET_CODE (from) == SUBREG))
763 emit_move_insn (to, gen_lowpart (to_mode, from));
767 /* For truncation, usually we can just refer to FROM in a narrower mode. */
768 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
770 /* Convert directly if that works. */
771 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
774 /* If FROM is a SUBREG, put it into a register. Do this
775 so that we always generate the same set of insns for
776 better cse'ing; if an intermediate assignment occurred,
777 we won't be doing the operation directly on the SUBREG. */
778 if (optimize > 0 && GET_CODE (from) == SUBREG)
779 from = force_reg (from_mode, from);
780 emit_unop_insn (code, to, from, equiv_code);
785 enum machine_mode intermediate;
787 /* Search for a mode to convert via. */
788 for (intermediate = from_mode; intermediate != VOIDmode;
789 intermediate = GET_MODE_WIDER_MODE (intermediate))
790 if ((can_extend_p (to_mode, intermediate, unsignedp)
792 && (can_extend_p (intermediate, from_mode, unsignedp)
793 != CODE_FOR_nothing))
795 convert_move (to, convert_to_mode (intermediate, from,
796 unsignedp), unsignedp);
800 /* No suitable intermediate mode. */
805 /* Support special truncate insns for certain modes. */
807 if (from_mode == DImode && to_mode == SImode)
809 #ifdef HAVE_truncdisi2
812 emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
816 convert_move (to, force_reg (from_mode, from), unsignedp);
820 if (from_mode == DImode && to_mode == HImode)
822 #ifdef HAVE_truncdihi2
825 emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
829 convert_move (to, force_reg (from_mode, from), unsignedp);
833 if (from_mode == DImode && to_mode == QImode)
835 #ifdef HAVE_truncdiqi2
838 emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
842 convert_move (to, force_reg (from_mode, from), unsignedp);
846 if (from_mode == SImode && to_mode == HImode)
848 #ifdef HAVE_truncsihi2
851 emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
855 convert_move (to, force_reg (from_mode, from), unsignedp);
859 if (from_mode == SImode && to_mode == QImode)
861 #ifdef HAVE_truncsiqi2
864 emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
868 convert_move (to, force_reg (from_mode, from), unsignedp);
872 if (from_mode == HImode && to_mode == QImode)
874 #ifdef HAVE_trunchiqi2
877 emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
881 convert_move (to, force_reg (from_mode, from), unsignedp);
885 /* Handle truncation of volatile memrefs, and so on;
886 the things that couldn't be truncated directly,
887 and for which there was no special instruction. */
888 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
890 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
891 emit_move_insn (to, temp);
895 /* Mode combination is not recognized. */
899 /* Return an rtx for a value that would result
900 from converting X to mode MODE.
901 Both X and MODE may be floating, or both integer.
902 UNSIGNEDP is nonzero if X is an unsigned value.
903 This can be done by referring to a part of X in place
904 or by copying to a new temporary with conversion.
906 This function *must not* call protect_from_queue
907 except when putting X into an insn (in which case convert_move does it). */
910 convert_to_mode (mode, x, unsignedp)
911 enum machine_mode mode;
917 /* If FROM is a SUBREG that indicates that we have already done at least
918 the required extension, strip it. */
920 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
921 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
922 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
923 x = gen_lowpart (mode, x);
925 if (mode == GET_MODE (x))
928 /* There is one case that we must handle specially: If we are converting
929 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
930 we are to interpret the constant as unsigned, gen_lowpart will do
931 the wrong if the constant appears negative. What we want to do is
932 make the high-order word of the constant zero, not all ones. */
934 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
935 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
936 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
937 return immed_double_const (INTVAL (x), (HOST_WIDE_INT) 0, mode);
939 /* We can do this with a gen_lowpart if both desired and current modes
940 are integer, and this is either a constant integer, a register, or a
941 non-volatile MEM. Except for the constant case, we must be narrowing
944 if (GET_CODE (x) == CONST_INT
945 || (GET_MODE_CLASS (mode) == MODE_INT
946 && GET_MODE_CLASS (GET_MODE (x)) == MODE_INT
947 && (GET_CODE (x) == CONST_DOUBLE
948 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (GET_MODE (x))
949 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x))
950 && direct_load[(int) mode]
951 || GET_CODE (x) == REG)))))
952 return gen_lowpart (mode, x);
954 temp = gen_reg_rtx (mode);
955 convert_move (temp, x, unsignedp);
959 /* Generate several move instructions to copy LEN bytes
960 from block FROM to block TO. (These are MEM rtx's with BLKmode).
961 The caller must pass FROM and TO
962 through protect_from_queue before calling.
963 ALIGN (in bytes) is maximum alignment we can assume. */
965 struct move_by_pieces
974 int explicit_inc_from;
980 static void move_by_pieces_1 ();
981 static int move_by_pieces_ninsns ();
984 move_by_pieces (to, from, len, align)
988 struct move_by_pieces data;
989 rtx to_addr = XEXP (to, 0), from_addr = XEXP (from, 0);
990 int max_size = MOVE_MAX + 1;
993 data.to_addr = to_addr;
994 data.from_addr = from_addr;
998 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
999 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1001 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1002 || GET_CODE (from_addr) == POST_INC
1003 || GET_CODE (from_addr) == POST_DEC);
1005 data.explicit_inc_from = 0;
1006 data.explicit_inc_to = 0;
1008 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1009 if (data.reverse) data.offset = len;
1012 /* If copying requires more than two move insns,
1013 copy addresses to registers (to make displacements shorter)
1014 and use post-increment if available. */
1015 if (!(data.autinc_from && data.autinc_to)
1016 && move_by_pieces_ninsns (len, align) > 2)
1018 #ifdef HAVE_PRE_DECREMENT
1019 if (data.reverse && ! data.autinc_from)
1021 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1022 data.autinc_from = 1;
1023 data.explicit_inc_from = -1;
1026 #ifdef HAVE_POST_INCREMENT
1027 if (! data.autinc_from)
1029 data.from_addr = copy_addr_to_reg (from_addr);
1030 data.autinc_from = 1;
1031 data.explicit_inc_from = 1;
1034 if (!data.autinc_from && CONSTANT_P (from_addr))
1035 data.from_addr = copy_addr_to_reg (from_addr);
1036 #ifdef HAVE_PRE_DECREMENT
1037 if (data.reverse && ! data.autinc_to)
1039 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1041 data.explicit_inc_to = -1;
1044 #ifdef HAVE_POST_INCREMENT
1045 if (! data.reverse && ! data.autinc_to)
1047 data.to_addr = copy_addr_to_reg (to_addr);
1049 data.explicit_inc_to = 1;
1052 if (!data.autinc_to && CONSTANT_P (to_addr))
1053 data.to_addr = copy_addr_to_reg (to_addr);
1056 if (! (STRICT_ALIGNMENT || SLOW_UNALIGNED_ACCESS)
1057 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
1060 /* First move what we can in the largest integer mode, then go to
1061 successively smaller modes. */
1063 while (max_size > 1)
1065 enum machine_mode mode = VOIDmode, tmode;
1066 enum insn_code icode;
1068 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1069 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1070 if (GET_MODE_SIZE (tmode) < max_size)
1073 if (mode == VOIDmode)
1076 icode = mov_optab->handlers[(int) mode].insn_code;
1077 if (icode != CODE_FOR_nothing
1078 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1079 GET_MODE_SIZE (mode)))
1080 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1082 max_size = GET_MODE_SIZE (mode);
1085 /* The code above should have handled everything. */
1090 /* Return number of insns required to move L bytes by pieces.
1091 ALIGN (in bytes) is maximum alignment we can assume. */
1094 move_by_pieces_ninsns (l, align)
1098 register int n_insns = 0;
1099 int max_size = MOVE_MAX + 1;
1101 if (! (STRICT_ALIGNMENT || SLOW_UNALIGNED_ACCESS)
1102 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
1105 while (max_size > 1)
1107 enum machine_mode mode = VOIDmode, tmode;
1108 enum insn_code icode;
1110 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1111 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1112 if (GET_MODE_SIZE (tmode) < max_size)
1115 if (mode == VOIDmode)
1118 icode = mov_optab->handlers[(int) mode].insn_code;
1119 if (icode != CODE_FOR_nothing
1120 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1121 GET_MODE_SIZE (mode)))
1122 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1124 max_size = GET_MODE_SIZE (mode);
1130 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1131 with move instructions for mode MODE. GENFUN is the gen_... function
1132 to make a move insn for that mode. DATA has all the other info. */
1135 move_by_pieces_1 (genfun, mode, data)
1137 enum machine_mode mode;
1138 struct move_by_pieces *data;
1140 register int size = GET_MODE_SIZE (mode);
1141 register rtx to1, from1;
1143 while (data->len >= size)
1145 if (data->reverse) data->offset -= size;
1147 to1 = (data->autinc_to
1148 ? gen_rtx (MEM, mode, data->to_addr)
1149 : change_address (data->to, mode,
1150 plus_constant (data->to_addr, data->offset)));
1153 ? gen_rtx (MEM, mode, data->from_addr)
1154 : change_address (data->from, mode,
1155 plus_constant (data->from_addr, data->offset)));
1157 #ifdef HAVE_PRE_DECREMENT
1158 if (data->explicit_inc_to < 0)
1159 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
1160 if (data->explicit_inc_from < 0)
1161 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (-size)));
1164 emit_insn ((*genfun) (to1, from1));
1165 #ifdef HAVE_POST_INCREMENT
1166 if (data->explicit_inc_to > 0)
1167 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1168 if (data->explicit_inc_from > 0)
1169 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1172 if (! data->reverse) data->offset += size;
1178 /* Emit code to move a block Y to a block X.
1179 This may be done with string-move instructions,
1180 with multiple scalar move instructions, or with a library call.
1182 Both X and Y must be MEM rtx's (perhaps inside VOLATILE)
1184 SIZE is an rtx that says how long they are.
1185 ALIGN is the maximum alignment we can assume they have,
1186 measured in bytes. */
1189 emit_block_move (x, y, size, align)
1194 if (GET_MODE (x) != BLKmode)
1197 if (GET_MODE (y) != BLKmode)
1200 x = protect_from_queue (x, 1);
1201 y = protect_from_queue (y, 0);
1202 size = protect_from_queue (size, 0);
1204 if (GET_CODE (x) != MEM)
1206 if (GET_CODE (y) != MEM)
1211 if (GET_CODE (size) == CONST_INT
1212 && (move_by_pieces_ninsns (INTVAL (size), align) < MOVE_RATIO))
1213 move_by_pieces (x, y, INTVAL (size), align);
1216 /* Try the most limited insn first, because there's no point
1217 including more than one in the machine description unless
1218 the more limited one has some advantage. */
1220 rtx opalign = GEN_INT (align);
1221 enum machine_mode mode;
1223 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1224 mode = GET_MODE_WIDER_MODE (mode))
1226 enum insn_code code = movstr_optab[(int) mode];
1228 if (code != CODE_FOR_nothing
1229 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1230 here because if SIZE is less than the mode mask, as it is
1231 returned by the macro, it will definately be less than the
1232 actual mode mask. */
1233 && (unsigned) INTVAL (size) <= GET_MODE_MASK (mode)
1234 && (insn_operand_predicate[(int) code][0] == 0
1235 || (*insn_operand_predicate[(int) code][0]) (x, BLKmode))
1236 && (insn_operand_predicate[(int) code][1] == 0
1237 || (*insn_operand_predicate[(int) code][1]) (y, BLKmode))
1238 && (insn_operand_predicate[(int) code][3] == 0
1239 || (*insn_operand_predicate[(int) code][3]) (opalign,
1243 rtx last = get_last_insn ();
1246 op2 = convert_to_mode (mode, size, 1);
1247 if (insn_operand_predicate[(int) code][2] != 0
1248 && ! (*insn_operand_predicate[(int) code][2]) (op2, mode))
1249 op2 = copy_to_mode_reg (mode, op2);
1251 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1258 delete_insns_since (last);
1262 #ifdef TARGET_MEM_FUNCTIONS
1263 emit_library_call (memcpy_libfunc, 0,
1264 VOIDmode, 3, XEXP (x, 0), Pmode,
1266 convert_to_mode (Pmode, size, 1), Pmode);
1268 emit_library_call (bcopy_libfunc, 0,
1269 VOIDmode, 3, XEXP (y, 0), Pmode,
1271 convert_to_mode (Pmode, size, 1), Pmode);
1276 /* Copy all or part of a value X into registers starting at REGNO.
1277 The number of registers to be filled is NREGS. */
1280 move_block_to_reg (regno, x, nregs, mode)
1284 enum machine_mode mode;
1289 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1290 x = validize_mem (force_const_mem (mode, x));
1292 /* See if the machine can do this with a load multiple insn. */
1293 #ifdef HAVE_load_multiple
1294 last = get_last_insn ();
1295 pat = gen_load_multiple (gen_rtx (REG, word_mode, regno), x,
1303 delete_insns_since (last);
1306 for (i = 0; i < nregs; i++)
1307 emit_move_insn (gen_rtx (REG, word_mode, regno + i),
1308 operand_subword_force (x, i, mode));
1311 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1312 The number of registers to be filled is NREGS. */
1315 move_block_from_reg (regno, x, nregs)
1323 /* See if the machine can do this with a store multiple insn. */
1324 #ifdef HAVE_store_multiple
1325 last = get_last_insn ();
1326 pat = gen_store_multiple (x, gen_rtx (REG, word_mode, regno),
1334 delete_insns_since (last);
1337 for (i = 0; i < nregs; i++)
1339 rtx tem = operand_subword (x, i, 1, BLKmode);
1344 emit_move_insn (tem, gen_rtx (REG, word_mode, regno + i));
1348 /* Mark NREGS consecutive regs, starting at REGNO, as being live now. */
1351 use_regs (regno, nregs)
1357 for (i = 0; i < nregs; i++)
1358 emit_insn (gen_rtx (USE, VOIDmode, gen_rtx (REG, word_mode, regno + i)));
1361 /* Mark the instructions since PREV as a libcall block.
1362 Add REG_LIBCALL to PREV and add a REG_RETVAL to the most recent insn. */
1371 /* Find the instructions to mark */
1373 insn_first = NEXT_INSN (prev);
1375 insn_first = get_insns ();
1377 insn_last = get_last_insn ();
1379 REG_NOTES (insn_last) = gen_rtx (INSN_LIST, REG_RETVAL, insn_first,
1380 REG_NOTES (insn_last));
1382 REG_NOTES (insn_first) = gen_rtx (INSN_LIST, REG_LIBCALL, insn_last,
1383 REG_NOTES (insn_first));
1386 /* Write zeros through the storage of OBJECT.
1387 If OBJECT has BLKmode, SIZE is its length in bytes. */
1390 clear_storage (object, size)
1394 if (GET_MODE (object) == BLKmode)
1396 #ifdef TARGET_MEM_FUNCTIONS
1397 emit_library_call (memset_libfunc, 0,
1399 XEXP (object, 0), Pmode, const0_rtx, Pmode,
1400 GEN_INT (size), Pmode);
1402 emit_library_call (bzero_libfunc, 0,
1404 XEXP (object, 0), Pmode,
1405 GEN_INT (size), Pmode);
1409 emit_move_insn (object, const0_rtx);
1412 /* Generate code to copy Y into X.
1413 Both Y and X must have the same mode, except that
1414 Y can be a constant with VOIDmode.
1415 This mode cannot be BLKmode; use emit_block_move for that.
1417 Return the last instruction emitted. */
1420 emit_move_insn (x, y)
1423 enum machine_mode mode = GET_MODE (x);
1424 enum machine_mode submode;
1425 enum mode_class class = GET_MODE_CLASS (mode);
1428 x = protect_from_queue (x, 1);
1429 y = protect_from_queue (y, 0);
1431 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
1434 if (CONSTANT_P (y) && ! LEGITIMATE_CONSTANT_P (y))
1435 y = force_const_mem (mode, y);
1437 /* If X or Y are memory references, verify that their addresses are valid
1439 if (GET_CODE (x) == MEM
1440 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
1441 && ! push_operand (x, GET_MODE (x)))
1443 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
1444 x = change_address (x, VOIDmode, XEXP (x, 0));
1446 if (GET_CODE (y) == MEM
1447 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
1449 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
1450 y = change_address (y, VOIDmode, XEXP (y, 0));
1452 if (mode == BLKmode)
1455 if (class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
1456 submode = mode_for_size (GET_MODE_UNIT_SIZE (mode) * BITS_PER_UNIT,
1457 (class == MODE_COMPLEX_INT
1458 ? MODE_INT : MODE_FLOAT),
1461 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
1463 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
1465 /* Expand complex moves by moving real part and imag part, if posible. */
1466 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
1467 && submode != BLKmode
1468 && (mov_optab->handlers[(int) submode].insn_code
1469 != CODE_FOR_nothing))
1471 /* Don't split destination if it is a stack push. */
1472 int stack = push_operand (x, GET_MODE (x));
1473 rtx prev = get_last_insn ();
1475 /* Tell flow that the whole of the destination is being set. */
1476 if (GET_CODE (x) == REG)
1477 emit_insn (gen_rtx (CLOBBER, VOIDmode, x));
1479 /* If this is a stack, push the highpart first, so it
1480 will be in the argument order.
1482 In that case, change_address is used only to convert
1483 the mode, not to change the address. */
1484 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
1485 ((stack ? change_address (x, submode, (rtx) 0)
1486 : gen_highpart (submode, x)),
1487 gen_highpart (submode, y)));
1488 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
1489 ((stack ? change_address (x, submode, (rtx) 0)
1490 : gen_lowpart (submode, x)),
1491 gen_lowpart (submode, y)));
1495 return get_last_insn ();
1498 /* This will handle any multi-word mode that lacks a move_insn pattern.
1499 However, you will get better code if you define such patterns,
1500 even if they must turn into multiple assembler instructions. */
1501 else if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
1504 rtx prev_insn = get_last_insn ();
1507 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
1510 rtx xpart = operand_subword (x, i, 1, mode);
1511 rtx ypart = operand_subword (y, i, 1, mode);
1513 /* If we can't get a part of Y, put Y into memory if it is a
1514 constant. Otherwise, force it into a register. If we still
1515 can't get a part of Y, abort. */
1516 if (ypart == 0 && CONSTANT_P (y))
1518 y = force_const_mem (mode, y);
1519 ypart = operand_subword (y, i, 1, mode);
1521 else if (ypart == 0)
1522 ypart = operand_subword_force (y, i, mode);
1524 if (xpart == 0 || ypart == 0)
1527 last_insn = emit_move_insn (xpart, ypart);
1529 /* Mark these insns as a libcall block. */
1530 group_insns (prev_insn);
1538 /* Pushing data onto the stack. */
1540 /* Push a block of length SIZE (perhaps variable)
1541 and return an rtx to address the beginning of the block.
1542 Note that it is not possible for the value returned to be a QUEUED.
1543 The value may be virtual_outgoing_args_rtx.
1545 EXTRA is the number of bytes of padding to push in addition to SIZE.
1546 BELOW nonzero means this padding comes at low addresses;
1547 otherwise, the padding comes at high addresses. */
1550 push_block (size, extra, below)
1555 if (CONSTANT_P (size))
1556 anti_adjust_stack (plus_constant (size, extra));
1557 else if (GET_CODE (size) == REG && extra == 0)
1558 anti_adjust_stack (size);
1561 rtx temp = copy_to_mode_reg (Pmode, size);
1563 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
1564 temp, 0, OPTAB_LIB_WIDEN);
1565 anti_adjust_stack (temp);
1568 #ifdef STACK_GROWS_DOWNWARD
1569 temp = virtual_outgoing_args_rtx;
1570 if (extra != 0 && below)
1571 temp = plus_constant (temp, extra);
1573 if (GET_CODE (size) == CONST_INT)
1574 temp = plus_constant (virtual_outgoing_args_rtx,
1575 - INTVAL (size) - (below ? 0 : extra));
1576 else if (extra != 0 && !below)
1577 temp = gen_rtx (PLUS, Pmode, virtual_outgoing_args_rtx,
1578 negate_rtx (Pmode, plus_constant (size, extra)));
1580 temp = gen_rtx (PLUS, Pmode, virtual_outgoing_args_rtx,
1581 negate_rtx (Pmode, size));
1584 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
1590 return gen_rtx (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
1593 /* Generate code to push X onto the stack, assuming it has mode MODE and
1595 MODE is redundant except when X is a CONST_INT (since they don't
1597 SIZE is an rtx for the size of data to be copied (in bytes),
1598 needed only if X is BLKmode.
1600 ALIGN (in bytes) is maximum alignment we can assume.
1602 If PARTIAL is nonzero, then copy that many of the first words
1603 of X into registers starting with REG, and push the rest of X.
1604 The amount of space pushed is decreased by PARTIAL words,
1605 rounded *down* to a multiple of PARM_BOUNDARY.
1606 REG must be a hard register in this case.
1608 EXTRA is the amount in bytes of extra space to leave next to this arg.
1609 This is ignored if an argument block has already been allocated.
1611 On a machine that lacks real push insns, ARGS_ADDR is the address of
1612 the bottom of the argument block for this call. We use indexing off there
1613 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
1614 argument block has not been preallocated.
1616 ARGS_SO_FAR is the size of args previously pushed for this call. */
1619 emit_push_insn (x, mode, type, size, align, partial, reg, extra,
1620 args_addr, args_so_far)
1622 enum machine_mode mode;
1633 enum direction stack_direction
1634 #ifdef STACK_GROWS_DOWNWARD
1640 /* Decide where to pad the argument: `downward' for below,
1641 `upward' for above, or `none' for don't pad it.
1642 Default is below for small data on big-endian machines; else above. */
1643 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
1645 /* Invert direction if stack is post-update. */
1646 if (STACK_PUSH_CODE == POST_INC || STACK_PUSH_CODE == POST_DEC)
1647 if (where_pad != none)
1648 where_pad = (where_pad == downward ? upward : downward);
1650 xinner = x = protect_from_queue (x, 0);
1652 if (mode == BLKmode)
1654 /* Copy a block into the stack, entirely or partially. */
1657 int used = partial * UNITS_PER_WORD;
1658 int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
1666 /* USED is now the # of bytes we need not copy to the stack
1667 because registers will take care of them. */
1670 xinner = change_address (xinner, BLKmode,
1671 plus_constant (XEXP (xinner, 0), used));
1673 /* If the partial register-part of the arg counts in its stack size,
1674 skip the part of stack space corresponding to the registers.
1675 Otherwise, start copying to the beginning of the stack space,
1676 by setting SKIP to 0. */
1677 #ifndef REG_PARM_STACK_SPACE
1683 #ifdef PUSH_ROUNDING
1684 /* Do it with several push insns if that doesn't take lots of insns
1685 and if there is no difficulty with push insns that skip bytes
1686 on the stack for alignment purposes. */
1688 && GET_CODE (size) == CONST_INT
1690 && (move_by_pieces_ninsns ((unsigned) INTVAL (size) - used, align)
1692 /* Here we avoid the case of a structure whose weak alignment
1693 forces many pushes of a small amount of data,
1694 and such small pushes do rounding that causes trouble. */
1695 && ((! STRICT_ALIGNMENT && ! SLOW_UNALIGNED_ACCESS)
1696 || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT
1697 || PUSH_ROUNDING (align) == align)
1698 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
1700 /* Push padding now if padding above and stack grows down,
1701 or if padding below and stack grows up.
1702 But if space already allocated, this has already been done. */
1703 if (extra && args_addr == 0
1704 && where_pad != none && where_pad != stack_direction)
1705 anti_adjust_stack (GEN_INT (extra));
1707 move_by_pieces (gen_rtx (MEM, BLKmode, gen_push_operand ()), xinner,
1708 INTVAL (size) - used, align);
1711 #endif /* PUSH_ROUNDING */
1713 /* Otherwise make space on the stack and copy the data
1714 to the address of that space. */
1716 /* Deduct words put into registers from the size we must copy. */
1719 if (GET_CODE (size) == CONST_INT)
1720 size = GEN_INT (INTVAL (size) - used);
1722 size = expand_binop (GET_MODE (size), sub_optab, size,
1723 GEN_INT (used), NULL_RTX, 0,
1727 /* Get the address of the stack space.
1728 In this case, we do not deal with EXTRA separately.
1729 A single stack adjust will do. */
1732 temp = push_block (size, extra, where_pad == downward);
1735 else if (GET_CODE (args_so_far) == CONST_INT)
1736 temp = memory_address (BLKmode,
1737 plus_constant (args_addr,
1738 skip + INTVAL (args_so_far)));
1740 temp = memory_address (BLKmode,
1741 plus_constant (gen_rtx (PLUS, Pmode,
1742 args_addr, args_so_far),
1745 /* TEMP is the address of the block. Copy the data there. */
1746 if (GET_CODE (size) == CONST_INT
1747 && (move_by_pieces_ninsns ((unsigned) INTVAL (size), align)
1750 move_by_pieces (gen_rtx (MEM, BLKmode, temp), xinner,
1751 INTVAL (size), align);
1754 /* Try the most limited insn first, because there's no point
1755 including more than one in the machine description unless
1756 the more limited one has some advantage. */
1757 #ifdef HAVE_movstrqi
1759 && GET_CODE (size) == CONST_INT
1760 && ((unsigned) INTVAL (size)
1761 < (1 << (GET_MODE_BITSIZE (QImode) - 1))))
1763 emit_insn (gen_movstrqi (gen_rtx (MEM, BLKmode, temp),
1764 xinner, size, GEN_INT (align)));
1768 #ifdef HAVE_movstrhi
1770 && GET_CODE (size) == CONST_INT
1771 && ((unsigned) INTVAL (size)
1772 < (1 << (GET_MODE_BITSIZE (HImode) - 1))))
1774 emit_insn (gen_movstrhi (gen_rtx (MEM, BLKmode, temp),
1775 xinner, size, GEN_INT (align)));
1779 #ifdef HAVE_movstrsi
1782 emit_insn (gen_movstrsi (gen_rtx (MEM, BLKmode, temp),
1783 xinner, size, GEN_INT (align)));
1787 #ifdef HAVE_movstrdi
1790 emit_insn (gen_movstrdi (gen_rtx (MEM, BLKmode, temp),
1791 xinner, size, GEN_INT (align)));
1796 #ifndef ACCUMULATE_OUTGOING_ARGS
1797 /* If the source is referenced relative to the stack pointer,
1798 copy it to another register to stabilize it. We do not need
1799 to do this if we know that we won't be changing sp. */
1801 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
1802 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
1803 temp = copy_to_reg (temp);
1806 /* Make inhibit_defer_pop nonzero around the library call
1807 to force it to pop the bcopy-arguments right away. */
1809 #ifdef TARGET_MEM_FUNCTIONS
1810 emit_library_call (memcpy_libfunc, 0,
1811 VOIDmode, 3, temp, Pmode, XEXP (xinner, 0), Pmode,
1814 emit_library_call (bcopy_libfunc, 0,
1815 VOIDmode, 3, XEXP (xinner, 0), Pmode, temp, Pmode,
1821 else if (partial > 0)
1823 /* Scalar partly in registers. */
1825 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
1828 /* # words of start of argument
1829 that we must make space for but need not store. */
1830 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
1831 int args_offset = INTVAL (args_so_far);
1834 /* Push padding now if padding above and stack grows down,
1835 or if padding below and stack grows up.
1836 But if space already allocated, this has already been done. */
1837 if (extra && args_addr == 0
1838 && where_pad != none && where_pad != stack_direction)
1839 anti_adjust_stack (GEN_INT (extra));
1841 /* If we make space by pushing it, we might as well push
1842 the real data. Otherwise, we can leave OFFSET nonzero
1843 and leave the space uninitialized. */
1847 /* Now NOT_STACK gets the number of words that we don't need to
1848 allocate on the stack. */
1849 not_stack = partial - offset;
1851 /* If the partial register-part of the arg counts in its stack size,
1852 skip the part of stack space corresponding to the registers.
1853 Otherwise, start copying to the beginning of the stack space,
1854 by setting SKIP to 0. */
1855 #ifndef REG_PARM_STACK_SPACE
1861 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1862 x = validize_mem (force_const_mem (mode, x));
1864 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
1865 SUBREGs of such registers are not allowed. */
1866 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
1867 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
1868 x = copy_to_reg (x);
1870 /* Loop over all the words allocated on the stack for this arg. */
1871 /* We can do it by words, because any scalar bigger than a word
1872 has a size a multiple of a word. */
1873 #ifndef PUSH_ARGS_REVERSED
1874 for (i = not_stack; i < size; i++)
1876 for (i = size - 1; i >= not_stack; i--)
1878 if (i >= not_stack + offset)
1879 emit_push_insn (operand_subword_force (x, i, mode),
1880 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
1882 GEN_INT (args_offset + ((i - not_stack + skip)
1883 * UNITS_PER_WORD)));
1889 /* Push padding now if padding above and stack grows down,
1890 or if padding below and stack grows up.
1891 But if space already allocated, this has already been done. */
1892 if (extra && args_addr == 0
1893 && where_pad != none && where_pad != stack_direction)
1894 anti_adjust_stack (GEN_INT (extra));
1896 #ifdef PUSH_ROUNDING
1898 addr = gen_push_operand ();
1901 if (GET_CODE (args_so_far) == CONST_INT)
1903 = memory_address (mode,
1904 plus_constant (args_addr, INTVAL (args_so_far)));
1906 addr = memory_address (mode, gen_rtx (PLUS, Pmode, args_addr,
1909 emit_move_insn (gen_rtx (MEM, mode, addr), x);
1913 /* If part should go in registers, copy that part
1914 into the appropriate registers. Do this now, at the end,
1915 since mem-to-mem copies above may do function calls. */
1917 move_block_to_reg (REGNO (reg), x, partial, mode);
1919 if (extra && args_addr == 0 && where_pad == stack_direction)
1920 anti_adjust_stack (GEN_INT (extra));
1923 /* Output a library call to function FUN (a SYMBOL_REF rtx)
1924 (emitting the queue unless NO_QUEUE is nonzero),
1925 for a value of mode OUTMODE,
1926 with NARGS different arguments, passed as alternating rtx values
1927 and machine_modes to convert them to.
1928 The rtx values should have been passed through protect_from_queue already.
1930 NO_QUEUE will be true if and only if the library call is a `const' call
1931 which will be enclosed in REG_LIBCALL/REG_RETVAL notes; it is equivalent
1932 to the variable is_const in expand_call.
1934 NO_QUEUE must be true for const calls, because if it isn't, then
1935 any pending increment will be emitted between REG_LIBCALL/REG_RETVAL notes,
1936 and will be lost if the libcall sequence is optimized away.
1938 NO_QUEUE must be false for non-const calls, because if it isn't, the
1939 call insn will have its CONST_CALL_P bit set, and it will be incorrectly
1940 optimized. For instance, the instruction scheduler may incorrectly
1941 move memory references across the non-const call. */
1944 emit_library_call (va_alist)
1948 struct args_size args_size;
1949 register int argnum;
1950 enum machine_mode outmode;
1957 CUMULATIVE_ARGS args_so_far;
1958 struct arg { rtx value; enum machine_mode mode; rtx reg; int partial;
1959 struct args_size offset; struct args_size size; };
1961 int old_inhibit_defer_pop = inhibit_defer_pop;
1966 orgfun = fun = va_arg (p, rtx);
1967 no_queue = va_arg (p, int);
1968 outmode = va_arg (p, enum machine_mode);
1969 nargs = va_arg (p, int);
1971 /* Copy all the libcall-arguments out of the varargs data
1972 and into a vector ARGVEC.
1974 Compute how to pass each argument. We only support a very small subset
1975 of the full argument passing conventions to limit complexity here since
1976 library functions shouldn't have many args. */
1978 argvec = (struct arg *) alloca (nargs * sizeof (struct arg));
1980 INIT_CUMULATIVE_ARGS (args_so_far, (tree)0, fun);
1982 args_size.constant = 0;
1985 for (count = 0; count < nargs; count++)
1987 rtx val = va_arg (p, rtx);
1988 enum machine_mode mode = va_arg (p, enum machine_mode);
1990 /* We cannot convert the arg value to the mode the library wants here;
1991 must do it earlier where we know the signedness of the arg. */
1993 || (GET_MODE (val) != mode && GET_MODE (val) != VOIDmode))
1996 /* On some machines, there's no way to pass a float to a library fcn.
1997 Pass it as a double instead. */
1998 #ifdef LIBGCC_NEEDS_DOUBLE
1999 if (LIBGCC_NEEDS_DOUBLE && mode == SFmode)
2000 val = convert_to_mode (DFmode, val, 0), mode = DFmode;
2003 /* There's no need to call protect_from_queue, because
2004 either emit_move_insn or emit_push_insn will do that. */
2006 /* Make sure it is a reasonable operand for a move or push insn. */
2007 if (GET_CODE (val) != REG && GET_CODE (val) != MEM
2008 && ! (CONSTANT_P (val) && LEGITIMATE_CONSTANT_P (val)))
2009 val = force_operand (val, NULL_RTX);
2011 argvec[count].value = val;
2012 argvec[count].mode = mode;
2014 #ifdef FUNCTION_ARG_PASS_BY_REFERENCE
2015 if (FUNCTION_ARG_PASS_BY_REFERENCE (args_so_far, mode, NULL_TREE, 1))
2019 argvec[count].reg = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
2020 if (argvec[count].reg && GET_CODE (argvec[count].reg) == EXPR_LIST)
2022 #ifdef FUNCTION_ARG_PARTIAL_NREGS
2023 argvec[count].partial
2024 = FUNCTION_ARG_PARTIAL_NREGS (args_so_far, mode, NULL_TREE, 1);
2026 argvec[count].partial = 0;
2029 locate_and_pad_parm (mode, NULL_TREE,
2030 argvec[count].reg && argvec[count].partial == 0,
2031 NULL_TREE, &args_size, &argvec[count].offset,
2032 &argvec[count].size);
2034 if (argvec[count].size.var)
2037 #ifndef REG_PARM_STACK_SPACE
2038 if (argvec[count].partial)
2039 argvec[count].size.constant -= argvec[count].partial * UNITS_PER_WORD;
2042 if (argvec[count].reg == 0 || argvec[count].partial != 0
2043 #ifdef REG_PARM_STACK_SPACE
2047 args_size.constant += argvec[count].size.constant;
2049 #ifdef ACCUMULATE_OUTGOING_ARGS
2050 /* If this arg is actually passed on the stack, it might be
2051 clobbering something we already put there (this library call might
2052 be inside the evaluation of an argument to a function whose call
2053 requires the stack). This will only occur when the library call
2054 has sufficient args to run out of argument registers. Abort in
2055 this case; if this ever occurs, code must be added to save and
2056 restore the arg slot. */
2058 if (argvec[count].reg == 0 || argvec[count].partial != 0)
2062 FUNCTION_ARG_ADVANCE (args_so_far, mode, (tree)0, 1);
2066 /* If this machine requires an external definition for library
2067 functions, write one out. */
2068 assemble_external_libcall (fun);
2070 #ifdef STACK_BOUNDARY
2071 args_size.constant = (((args_size.constant + (STACK_BYTES - 1))
2072 / STACK_BYTES) * STACK_BYTES);
2075 #ifdef REG_PARM_STACK_SPACE
2076 args_size.constant = MAX (args_size.constant,
2077 REG_PARM_STACK_SPACE ((tree) 0));
2080 #ifdef ACCUMULATE_OUTGOING_ARGS
2081 if (args_size.constant > current_function_outgoing_args_size)
2082 current_function_outgoing_args_size = args_size.constant;
2083 args_size.constant = 0;
2086 #ifndef PUSH_ROUNDING
2087 argblock = push_block (GEN_INT (args_size.constant), 0, 0);
2090 #ifdef PUSH_ARGS_REVERSED
2098 /* Push the args that need to be pushed. */
2100 for (count = 0; count < nargs; count++, argnum += inc)
2102 register enum machine_mode mode = argvec[argnum].mode;
2103 register rtx val = argvec[argnum].value;
2104 rtx reg = argvec[argnum].reg;
2105 int partial = argvec[argnum].partial;
2107 if (! (reg != 0 && partial == 0))
2108 emit_push_insn (val, mode, NULL_TREE, NULL_RTX, 0, partial, reg, 0,
2109 argblock, GEN_INT (argvec[count].offset.constant));
2113 #ifdef PUSH_ARGS_REVERSED
2119 /* Now load any reg parms into their regs. */
2121 for (count = 0; count < nargs; count++, argnum += inc)
2123 register enum machine_mode mode = argvec[argnum].mode;
2124 register rtx val = argvec[argnum].value;
2125 rtx reg = argvec[argnum].reg;
2126 int partial = argvec[argnum].partial;
2128 if (reg != 0 && partial == 0)
2129 emit_move_insn (reg, val);
2133 /* For version 1.37, try deleting this entirely. */
2137 /* Any regs containing parms remain in use through the call. */
2139 for (count = 0; count < nargs; count++)
2140 if (argvec[count].reg != 0)
2141 emit_insn (gen_rtx (USE, VOIDmode, argvec[count].reg));
2143 use_insns = get_insns ();
2146 fun = prepare_call_address (fun, NULL_TREE, &use_insns);
2148 /* Don't allow popping to be deferred, since then
2149 cse'ing of library calls could delete a call and leave the pop. */
2152 /* We pass the old value of inhibit_defer_pop + 1 to emit_call_1, which
2153 will set inhibit_defer_pop to that value. */
2155 emit_call_1 (fun, get_identifier (XSTR (orgfun, 0)), args_size.constant, 0,
2156 FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1),
2157 outmode != VOIDmode ? hard_libcall_value (outmode) : NULL_RTX,
2158 old_inhibit_defer_pop + 1, use_insns, no_queue);
2160 /* Now restore inhibit_defer_pop to its actual original value. */
2164 /* Expand an assignment that stores the value of FROM into TO.
2165 If WANT_VALUE is nonzero, return an rtx for the value of TO.
2166 (This may contain a QUEUED rtx.)
2167 Otherwise, the returned value is not meaningful.
2169 SUGGEST_REG is no longer actually used.
2170 It used to mean, copy the value through a register
2171 and return that register, if that is possible.
2172 But now we do this if WANT_VALUE.
2174 If the value stored is a constant, we return the constant. */
2177 expand_assignment (to, from, want_value, suggest_reg)
2182 register rtx to_rtx = 0;
2185 /* Don't crash if the lhs of the assignment was erroneous. */
2187 if (TREE_CODE (to) == ERROR_MARK)
2188 return expand_expr (from, NULL_RTX, VOIDmode, 0);
2190 /* Assignment of a structure component needs special treatment
2191 if the structure component's rtx is not simply a MEM.
2192 Assignment of an array element at a constant index
2193 has the same problem. */
2195 if (TREE_CODE (to) == COMPONENT_REF
2196 || TREE_CODE (to) == BIT_FIELD_REF
2197 || (TREE_CODE (to) == ARRAY_REF
2198 && TREE_CODE (TREE_OPERAND (to, 1)) == INTEGER_CST
2199 && TREE_CODE (TYPE_SIZE (TREE_TYPE (to))) == INTEGER_CST))
2201 enum machine_mode mode1;
2207 tree tem = get_inner_reference (to, &bitsize, &bitpos, &offset,
2208 &mode1, &unsignedp, &volatilep);
2210 /* If we are going to use store_bit_field and extract_bit_field,
2211 make sure to_rtx will be safe for multiple use. */
2213 if (mode1 == VOIDmode && want_value)
2214 tem = stabilize_reference (tem);
2216 to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, 0);
2219 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
2221 if (GET_CODE (to_rtx) != MEM)
2223 to_rtx = change_address (to_rtx, VOIDmode,
2224 gen_rtx (PLUS, Pmode, XEXP (to_rtx, 0),
2225 force_reg (Pmode, offset_rtx)));
2229 if (GET_CODE (to_rtx) == MEM)
2230 MEM_VOLATILE_P (to_rtx) = 1;
2231 #if 0 /* This was turned off because, when a field is volatile
2232 in an object which is not volatile, the object may be in a register,
2233 and then we would abort over here. */
2239 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
2241 /* Spurious cast makes HPUX compiler happy. */
2242 ? (enum machine_mode) TYPE_MODE (TREE_TYPE (to))
2245 /* Required alignment of containing datum. */
2246 TYPE_ALIGN (TREE_TYPE (tem)) / BITS_PER_UNIT,
2247 int_size_in_bytes (TREE_TYPE (tem)));
2248 preserve_temp_slots (result);
2254 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
2255 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
2258 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, 0);
2260 /* In case we are returning the contents of an object which overlaps
2261 the place the value is being stored, use a safe function when copying
2262 a value through a pointer into a structure value return block. */
2263 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
2264 && current_function_returns_struct
2265 && !current_function_returns_pcc_struct)
2267 rtx from_rtx = expand_expr (from, NULL_RTX, VOIDmode, 0);
2268 rtx size = expr_size (from);
2270 #ifdef TARGET_MEM_FUNCTIONS
2271 emit_library_call (memcpy_libfunc, 0,
2272 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
2273 XEXP (from_rtx, 0), Pmode,
2276 emit_library_call (bcopy_libfunc, 0,
2277 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
2278 XEXP (to_rtx, 0), Pmode,
2282 preserve_temp_slots (to_rtx);
2287 /* Compute FROM and store the value in the rtx we got. */
2289 result = store_expr (from, to_rtx, want_value);
2290 preserve_temp_slots (result);
2295 /* Generate code for computing expression EXP,
2296 and storing the value into TARGET.
2297 Returns TARGET or an equivalent value.
2298 TARGET may contain a QUEUED rtx.
2300 If SUGGEST_REG is nonzero, copy the value through a register
2301 and return that register, if that is possible.
2303 If the value stored is a constant, we return the constant. */
2306 store_expr (exp, target, suggest_reg)
2308 register rtx target;
2312 int dont_return_target = 0;
2314 if (TREE_CODE (exp) == COMPOUND_EXPR)
2316 /* Perform first part of compound expression, then assign from second
2318 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
2320 return store_expr (TREE_OPERAND (exp, 1), target, suggest_reg);
2322 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
2324 /* For conditional expression, get safe form of the target. Then
2325 test the condition, doing the appropriate assignment on either
2326 side. This avoids the creation of unnecessary temporaries.
2327 For non-BLKmode, it is more efficient not to do this. */
2329 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
2332 target = protect_from_queue (target, 1);
2335 jumpifnot (TREE_OPERAND (exp, 0), lab1);
2336 store_expr (TREE_OPERAND (exp, 1), target, suggest_reg);
2338 emit_jump_insn (gen_jump (lab2));
2341 store_expr (TREE_OPERAND (exp, 2), target, suggest_reg);
2347 else if (suggest_reg && GET_CODE (target) == MEM
2348 && GET_MODE (target) != BLKmode)
2349 /* If target is in memory and caller wants value in a register instead,
2350 arrange that. Pass TARGET as target for expand_expr so that,
2351 if EXP is another assignment, SUGGEST_REG will be nonzero for it.
2352 We know expand_expr will not use the target in that case. */
2354 temp = expand_expr (exp, cse_not_expected ? NULL_RTX : target,
2355 GET_MODE (target), 0);
2356 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
2357 temp = copy_to_reg (temp);
2358 dont_return_target = 1;
2360 else if (queued_subexp_p (target))
2361 /* If target contains a postincrement, it is not safe
2362 to use as the returned value. It would access the wrong
2363 place by the time the queued increment gets output.
2364 So copy the value through a temporary and use that temp
2367 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
2369 /* Expand EXP into a new pseudo. */
2370 temp = gen_reg_rtx (GET_MODE (target));
2371 temp = expand_expr (exp, temp, GET_MODE (target), 0);
2374 temp = expand_expr (exp, NULL_RTX, GET_MODE (target), 0);
2375 dont_return_target = 1;
2377 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
2378 /* If this is an scalar in a register that is stored in a wider mode
2379 than the declared mode, compute the result into its declared mode
2380 and then convert to the wider mode. Our value is the computed
2383 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
2384 convert_move (SUBREG_REG (target), temp,
2385 SUBREG_PROMOTED_UNSIGNED_P (target));
2390 temp = expand_expr (exp, target, GET_MODE (target), 0);
2391 /* DO return TARGET if it's a specified hardware register.
2392 expand_return relies on this. */
2393 if (!(target && GET_CODE (target) == REG
2394 && REGNO (target) < FIRST_PSEUDO_REGISTER)
2395 && CONSTANT_P (temp))
2396 dont_return_target = 1;
2399 /* If value was not generated in the target, store it there.
2400 Convert the value to TARGET's type first if nec. */
2402 if (temp != target && TREE_CODE (exp) != ERROR_MARK)
2404 target = protect_from_queue (target, 1);
2405 if (GET_MODE (temp) != GET_MODE (target)
2406 && GET_MODE (temp) != VOIDmode)
2408 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
2409 if (dont_return_target)
2411 /* In this case, we will return TEMP,
2412 so make sure it has the proper mode.
2413 But don't forget to store the value into TARGET. */
2414 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
2415 emit_move_insn (target, temp);
2418 convert_move (target, temp, unsignedp);
2421 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
2423 /* Handle copying a string constant into an array.
2424 The string constant may be shorter than the array.
2425 So copy just the string's actual length, and clear the rest. */
2428 /* Get the size of the data type of the string,
2429 which is actually the size of the target. */
2430 size = expr_size (exp);
2431 if (GET_CODE (size) == CONST_INT
2432 && INTVAL (size) < TREE_STRING_LENGTH (exp))
2433 emit_block_move (target, temp, size,
2434 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
2437 /* Compute the size of the data to copy from the string. */
2439 = fold (build (MIN_EXPR, sizetype,
2440 size_binop (CEIL_DIV_EXPR,
2441 TYPE_SIZE (TREE_TYPE (exp)),
2442 size_int (BITS_PER_UNIT)),
2444 build_int_2 (TREE_STRING_LENGTH (exp), 0))));
2445 rtx copy_size_rtx = expand_expr (copy_size, NULL_RTX,
2449 /* Copy that much. */
2450 emit_block_move (target, temp, copy_size_rtx,
2451 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
2453 /* Figure out how much is left in TARGET
2454 that we have to clear. */
2455 if (GET_CODE (copy_size_rtx) == CONST_INT)
2457 temp = plus_constant (XEXP (target, 0),
2458 TREE_STRING_LENGTH (exp));
2459 size = plus_constant (size,
2460 - TREE_STRING_LENGTH (exp));
2464 enum machine_mode size_mode = Pmode;
2466 temp = force_reg (Pmode, XEXP (target, 0));
2467 temp = expand_binop (size_mode, add_optab, temp,
2468 copy_size_rtx, NULL_RTX, 0,
2471 size = expand_binop (size_mode, sub_optab, size,
2472 copy_size_rtx, NULL_RTX, 0,
2475 emit_cmp_insn (size, const0_rtx, LT, NULL_RTX,
2476 GET_MODE (size), 0, 0);
2477 label = gen_label_rtx ();
2478 emit_jump_insn (gen_blt (label));
2481 if (size != const0_rtx)
2483 #ifdef TARGET_MEM_FUNCTIONS
2484 emit_library_call (memset_libfunc, 0, VOIDmode, 3,
2485 temp, Pmode, const0_rtx, Pmode, size, Pmode);
2487 emit_library_call (bzero_libfunc, 0, VOIDmode, 2,
2488 temp, Pmode, size, Pmode);
2495 else if (GET_MODE (temp) == BLKmode)
2496 emit_block_move (target, temp, expr_size (exp),
2497 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
2499 emit_move_insn (target, temp);
2501 if (dont_return_target)
2506 /* Store the value of constructor EXP into the rtx TARGET.
2507 TARGET is either a REG or a MEM. */
2510 store_constructor (exp, target)
2514 tree type = TREE_TYPE (exp);
2516 /* We know our target cannot conflict, since safe_from_p has been called. */
2518 /* Don't try copying piece by piece into a hard register
2519 since that is vulnerable to being clobbered by EXP.
2520 Instead, construct in a pseudo register and then copy it all. */
2521 if (GET_CODE (target) == REG && REGNO (target) < FIRST_PSEUDO_REGISTER)
2523 rtx temp = gen_reg_rtx (GET_MODE (target));
2524 store_constructor (exp, temp);
2525 emit_move_insn (target, temp);
2530 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE)
2534 /* Inform later passes that the whole union value is dead. */
2535 if (TREE_CODE (type) == UNION_TYPE)
2536 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
2538 /* If we are building a static constructor into a register,
2539 set the initial value as zero so we can fold the value into
2541 else if (GET_CODE (target) == REG && TREE_STATIC (exp))
2542 emit_move_insn (target, const0_rtx);
2544 /* If the constructor has fewer fields than the structure,
2545 clear the whole structure first. */
2546 else if (list_length (CONSTRUCTOR_ELTS (exp))
2547 != list_length (TYPE_FIELDS (type)))
2548 clear_storage (target, int_size_in_bytes (type));
2550 /* Inform later passes that the old value is dead. */
2551 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
2553 /* Store each element of the constructor into
2554 the corresponding field of TARGET. */
2556 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
2558 register tree field = TREE_PURPOSE (elt);
2559 register enum machine_mode mode;
2564 /* Just ignore missing fields.
2565 We cleared the whole structure, above,
2566 if any fields are missing. */
2570 bitsize = TREE_INT_CST_LOW (DECL_SIZE (field));
2571 unsignedp = TREE_UNSIGNED (field);
2572 mode = DECL_MODE (field);
2573 if (DECL_BIT_FIELD (field))
2576 if (TREE_CODE (DECL_FIELD_BITPOS (field)) != INTEGER_CST)
2577 /* ??? This case remains to be written. */
2580 bitpos = TREE_INT_CST_LOW (DECL_FIELD_BITPOS (field));
2582 store_field (target, bitsize, bitpos, mode, TREE_VALUE (elt),
2583 /* The alignment of TARGET is
2584 at least what its type requires. */
2586 TYPE_ALIGN (type) / BITS_PER_UNIT,
2587 int_size_in_bytes (type));
2590 else if (TREE_CODE (type) == ARRAY_TYPE)
2594 tree domain = TYPE_DOMAIN (type);
2595 HOST_WIDE_INT minelt = TREE_INT_CST_LOW (TYPE_MIN_VALUE (domain));
2596 HOST_WIDE_INT maxelt = TREE_INT_CST_LOW (TYPE_MAX_VALUE (domain));
2597 tree elttype = TREE_TYPE (type);
2599 /* If the constructor has fewer fields than the structure,
2600 clear the whole structure first. Similarly if this this is
2601 static constructor of a non-BLKmode object. */
2603 if (list_length (CONSTRUCTOR_ELTS (exp)) < maxelt - minelt + 1
2604 || (GET_CODE (target) == REG && TREE_STATIC (exp)))
2605 clear_storage (target, maxelt - minelt + 1);
2607 /* Inform later passes that the old value is dead. */
2608 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
2610 /* Store each element of the constructor into
2611 the corresponding element of TARGET, determined
2612 by counting the elements. */
2613 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
2615 elt = TREE_CHAIN (elt), i++)
2617 register enum machine_mode mode;
2622 mode = TYPE_MODE (elttype);
2623 bitsize = GET_MODE_BITSIZE (mode);
2624 unsignedp = TREE_UNSIGNED (elttype);
2626 bitpos = (i * TREE_INT_CST_LOW (TYPE_SIZE (elttype)));
2628 store_field (target, bitsize, bitpos, mode, TREE_VALUE (elt),
2629 /* The alignment of TARGET is
2630 at least what its type requires. */
2632 TYPE_ALIGN (type) / BITS_PER_UNIT,
2633 int_size_in_bytes (type));
2641 /* Store the value of EXP (an expression tree)
2642 into a subfield of TARGET which has mode MODE and occupies
2643 BITSIZE bits, starting BITPOS bits from the start of TARGET.
2644 If MODE is VOIDmode, it means that we are storing into a bit-field.
2646 If VALUE_MODE is VOIDmode, return nothing in particular.
2647 UNSIGNEDP is not used in this case.
2649 Otherwise, return an rtx for the value stored. This rtx
2650 has mode VALUE_MODE if that is convenient to do.
2651 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
2653 ALIGN is the alignment that TARGET is known to have, measured in bytes.
2654 TOTAL_SIZE is the size in bytes of the structure, or -1 if varying. */
2657 store_field (target, bitsize, bitpos, mode, exp, value_mode,
2658 unsignedp, align, total_size)
2660 int bitsize, bitpos;
2661 enum machine_mode mode;
2663 enum machine_mode value_mode;
2668 HOST_WIDE_INT width_mask = 0;
2670 if (bitsize < HOST_BITS_PER_WIDE_INT)
2671 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
2673 /* If we are storing into an unaligned field of an aligned union that is
2674 in a register, we may have the mode of TARGET being an integer mode but
2675 MODE == BLKmode. In that case, get an aligned object whose size and
2676 alignment are the same as TARGET and store TARGET into it (we can avoid
2677 the store if the field being stored is the entire width of TARGET). Then
2678 call ourselves recursively to store the field into a BLKmode version of
2679 that object. Finally, load from the object into TARGET. This is not
2680 very efficient in general, but should only be slightly more expensive
2681 than the otherwise-required unaligned accesses. Perhaps this can be
2682 cleaned up later. */
2685 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
2687 rtx object = assign_stack_temp (GET_MODE (target),
2688 GET_MODE_SIZE (GET_MODE (target)), 0);
2689 rtx blk_object = copy_rtx (object);
2691 PUT_MODE (blk_object, BLKmode);
2693 if (bitsize != GET_MODE_BITSIZE (GET_MODE (target)))
2694 emit_move_insn (object, target);
2696 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0,
2699 emit_move_insn (target, object);
2704 /* If the structure is in a register or if the component
2705 is a bit field, we cannot use addressing to access it.
2706 Use bit-field techniques or SUBREG to store in it. */
2708 if (mode == VOIDmode
2709 || (mode != BLKmode && ! direct_store[(int) mode])
2710 || GET_CODE (target) == REG
2711 || GET_CODE (target) == SUBREG)
2713 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
2714 /* Store the value in the bitfield. */
2715 store_bit_field (target, bitsize, bitpos, mode, temp, align, total_size);
2716 if (value_mode != VOIDmode)
2718 /* The caller wants an rtx for the value. */
2719 /* If possible, avoid refetching from the bitfield itself. */
2721 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
2724 enum machine_mode tmode;
2727 return expand_and (temp, GEN_INT (width_mask), NULL_RTX);
2728 tmode = GET_MODE (temp);
2729 if (tmode == VOIDmode)
2731 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
2732 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
2733 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
2735 return extract_bit_field (target, bitsize, bitpos, unsignedp,
2736 NULL_RTX, value_mode, 0, align,
2743 rtx addr = XEXP (target, 0);
2746 /* If a value is wanted, it must be the lhs;
2747 so make the address stable for multiple use. */
2749 if (value_mode != VOIDmode && GET_CODE (addr) != REG
2750 && ! CONSTANT_ADDRESS_P (addr)
2751 /* A frame-pointer reference is already stable. */
2752 && ! (GET_CODE (addr) == PLUS
2753 && GET_CODE (XEXP (addr, 1)) == CONST_INT
2754 && (XEXP (addr, 0) == virtual_incoming_args_rtx
2755 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
2756 addr = copy_to_reg (addr);
2758 /* Now build a reference to just the desired component. */
2760 to_rtx = change_address (target, mode,
2761 plus_constant (addr, (bitpos / BITS_PER_UNIT)));
2762 MEM_IN_STRUCT_P (to_rtx) = 1;
2764 return store_expr (exp, to_rtx, value_mode != VOIDmode);
2768 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
2769 or an ARRAY_REF, look for nested COMPONENT_REFs, BIT_FIELD_REFs, or
2770 ARRAY_REFs at constant positions and find the ultimate containing object,
2773 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
2774 bit position, and *PUNSIGNEDP to the signedness of the field.
2775 If the position of the field is variable, we store a tree
2776 giving the variable offset (in units) in *POFFSET.
2777 This offset is in addition to the bit position.
2778 If the position is not variable, we store 0 in *POFFSET.
2780 If any of the extraction expressions is volatile,
2781 we store 1 in *PVOLATILEP. Otherwise we don't change that.
2783 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
2784 is a mode that can be used to access the field. In that case, *PBITSIZE
2787 If the field describes a variable-sized object, *PMODE is set to
2788 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
2789 this case, but the address of the object can be found. */
2792 get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode, punsignedp, pvolatilep)
2797 enum machine_mode *pmode;
2802 enum machine_mode mode = VOIDmode;
2805 if (TREE_CODE (exp) == COMPONENT_REF)
2807 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
2808 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
2809 mode = DECL_MODE (TREE_OPERAND (exp, 1));
2810 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
2812 else if (TREE_CODE (exp) == BIT_FIELD_REF)
2814 size_tree = TREE_OPERAND (exp, 1);
2815 *punsignedp = TREE_UNSIGNED (exp);
2819 mode = TYPE_MODE (TREE_TYPE (exp));
2820 *pbitsize = GET_MODE_BITSIZE (mode);
2821 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
2826 if (TREE_CODE (size_tree) != INTEGER_CST)
2827 mode = BLKmode, *pbitsize = -1;
2829 *pbitsize = TREE_INT_CST_LOW (size_tree);
2832 /* Compute cumulative bit-offset for nested component-refs and array-refs,
2833 and find the ultimate containing object. */
2839 if (TREE_CODE (exp) == COMPONENT_REF || TREE_CODE (exp) == BIT_FIELD_REF)
2841 tree pos = (TREE_CODE (exp) == COMPONENT_REF
2842 ? DECL_FIELD_BITPOS (TREE_OPERAND (exp, 1))
2843 : TREE_OPERAND (exp, 2));
2845 if (TREE_CODE (pos) == PLUS_EXPR)
2848 if (TREE_CODE (TREE_OPERAND (pos, 0)) == INTEGER_CST)
2850 constant = TREE_OPERAND (pos, 0);
2851 var = TREE_OPERAND (pos, 1);
2853 else if (TREE_CODE (TREE_OPERAND (pos, 1)) == INTEGER_CST)
2855 constant = TREE_OPERAND (pos, 1);
2856 var = TREE_OPERAND (pos, 0);
2860 *pbitpos += TREE_INT_CST_LOW (constant);
2862 offset = size_binop (PLUS_EXPR, offset,
2863 size_binop (FLOOR_DIV_EXPR, var,
2864 size_int (BITS_PER_UNIT)));
2866 offset = size_binop (FLOOR_DIV_EXPR, var,
2867 size_int (BITS_PER_UNIT));
2869 else if (TREE_CODE (pos) == INTEGER_CST)
2870 *pbitpos += TREE_INT_CST_LOW (pos);
2873 /* Assume here that the offset is a multiple of a unit.
2874 If not, there should be an explicitly added constant. */
2876 offset = size_binop (PLUS_EXPR, offset,
2877 size_binop (FLOOR_DIV_EXPR, pos,
2878 size_int (BITS_PER_UNIT)));
2880 offset = size_binop (FLOOR_DIV_EXPR, pos,
2881 size_int (BITS_PER_UNIT));
2885 else if (TREE_CODE (exp) == ARRAY_REF
2886 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
2887 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST)
2889 *pbitpos += (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))
2890 * TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (exp))));
2892 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
2893 && ! ((TREE_CODE (exp) == NOP_EXPR
2894 || TREE_CODE (exp) == CONVERT_EXPR)
2895 && (TYPE_MODE (TREE_TYPE (exp))
2896 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
2899 /* If any reference in the chain is volatile, the effect is volatile. */
2900 if (TREE_THIS_VOLATILE (exp))
2902 exp = TREE_OPERAND (exp, 0);
2905 /* If this was a bit-field, see if there is a mode that allows direct
2906 access in case EXP is in memory. */
2907 if (mode == VOIDmode && *pbitpos % *pbitsize == 0)
2909 mode = mode_for_size (*pbitsize, MODE_INT, 0);
2910 if (mode == BLKmode)
2917 /* We aren't finished fixing the callers to really handle nonzero offset. */
2925 /* Given an rtx VALUE that may contain additions and multiplications,
2926 return an equivalent value that just refers to a register or memory.
2927 This is done by generating instructions to perform the arithmetic
2928 and returning a pseudo-register containing the value.
2930 The returned value may be a REG, SUBREG, MEM or constant. */
2933 force_operand (value, target)
2936 register optab binoptab = 0;
2937 /* Use a temporary to force order of execution of calls to
2941 /* Use subtarget as the target for operand 0 of a binary operation. */
2942 register rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
2944 if (GET_CODE (value) == PLUS)
2945 binoptab = add_optab;
2946 else if (GET_CODE (value) == MINUS)
2947 binoptab = sub_optab;
2948 else if (GET_CODE (value) == MULT)
2950 op2 = XEXP (value, 1);
2951 if (!CONSTANT_P (op2)
2952 && !(GET_CODE (op2) == REG && op2 != subtarget))
2954 tmp = force_operand (XEXP (value, 0), subtarget);
2955 return expand_mult (GET_MODE (value), tmp,
2956 force_operand (op2, NULL_RTX),
2962 op2 = XEXP (value, 1);
2963 if (!CONSTANT_P (op2)
2964 && !(GET_CODE (op2) == REG && op2 != subtarget))
2966 if (binoptab == sub_optab && GET_CODE (op2) == CONST_INT)
2968 binoptab = add_optab;
2969 op2 = negate_rtx (GET_MODE (value), op2);
2972 /* Check for an addition with OP2 a constant integer and our first
2973 operand a PLUS of a virtual register and something else. In that
2974 case, we want to emit the sum of the virtual register and the
2975 constant first and then add the other value. This allows virtual
2976 register instantiation to simply modify the constant rather than
2977 creating another one around this addition. */
2978 if (binoptab == add_optab && GET_CODE (op2) == CONST_INT
2979 && GET_CODE (XEXP (value, 0)) == PLUS
2980 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
2981 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
2982 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
2984 rtx temp = expand_binop (GET_MODE (value), binoptab,
2985 XEXP (XEXP (value, 0), 0), op2,
2986 subtarget, 0, OPTAB_LIB_WIDEN);
2987 return expand_binop (GET_MODE (value), binoptab, temp,
2988 force_operand (XEXP (XEXP (value, 0), 1), 0),
2989 target, 0, OPTAB_LIB_WIDEN);
2992 tmp = force_operand (XEXP (value, 0), subtarget);
2993 return expand_binop (GET_MODE (value), binoptab, tmp,
2994 force_operand (op2, NULL_RTX),
2995 target, 0, OPTAB_LIB_WIDEN);
2996 /* We give UNSIGNEP = 0 to expand_binop
2997 because the only operations we are expanding here are signed ones. */
3002 /* Subroutine of expand_expr:
3003 save the non-copied parts (LIST) of an expr (LHS), and return a list
3004 which can restore these values to their previous values,
3005 should something modify their storage. */
3008 save_noncopied_parts (lhs, list)
3015 for (tail = list; tail; tail = TREE_CHAIN (tail))
3016 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
3017 parts = chainon (parts, save_noncopied_parts (lhs, TREE_VALUE (tail)));
3020 tree part = TREE_VALUE (tail);
3021 tree part_type = TREE_TYPE (part);
3022 tree to_be_saved = build (COMPONENT_REF, part_type, lhs, part);
3023 rtx target = assign_stack_temp (TYPE_MODE (part_type),
3024 int_size_in_bytes (part_type), 0);
3025 if (! memory_address_p (TYPE_MODE (part_type), XEXP (target, 0)))
3026 target = change_address (target, TYPE_MODE (part_type), NULL_RTX);
3027 parts = tree_cons (to_be_saved,
3028 build (RTL_EXPR, part_type, NULL_TREE,
3031 store_expr (TREE_PURPOSE (parts), RTL_EXPR_RTL (TREE_VALUE (parts)), 0);
3036 /* Subroutine of expand_expr:
3037 record the non-copied parts (LIST) of an expr (LHS), and return a list
3038 which specifies the initial values of these parts. */
3041 init_noncopied_parts (lhs, list)
3048 for (tail = list; tail; tail = TREE_CHAIN (tail))
3049 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
3050 parts = chainon (parts, init_noncopied_parts (lhs, TREE_VALUE (tail)));
3053 tree part = TREE_VALUE (tail);
3054 tree part_type = TREE_TYPE (part);
3055 tree to_be_initialized = build (COMPONENT_REF, part_type, lhs, part);
3056 parts = tree_cons (TREE_PURPOSE (tail), to_be_initialized, parts);
3061 /* Subroutine of expand_expr: return nonzero iff there is no way that
3062 EXP can reference X, which is being modified. */
3065 safe_from_p (x, exp)
3075 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
3076 find the underlying pseudo. */
3077 if (GET_CODE (x) == SUBREG)
3080 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
3084 /* If X is a location in the outgoing argument area, it is always safe. */
3085 if (GET_CODE (x) == MEM
3086 && (XEXP (x, 0) == virtual_outgoing_args_rtx
3087 || (GET_CODE (XEXP (x, 0)) == PLUS
3088 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx)))
3091 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
3094 exp_rtl = DECL_RTL (exp);
3101 if (TREE_CODE (exp) == TREE_LIST)
3102 return ((TREE_VALUE (exp) == 0
3103 || safe_from_p (x, TREE_VALUE (exp)))
3104 && (TREE_CHAIN (exp) == 0
3105 || safe_from_p (x, TREE_CHAIN (exp))));
3110 return safe_from_p (x, TREE_OPERAND (exp, 0));
3114 return (safe_from_p (x, TREE_OPERAND (exp, 0))
3115 && safe_from_p (x, TREE_OPERAND (exp, 1)));
3119 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
3120 the expression. If it is set, we conflict iff we are that rtx or
3121 both are in memory. Otherwise, we check all operands of the
3122 expression recursively. */
3124 switch (TREE_CODE (exp))
3127 return staticp (TREE_OPERAND (exp, 0));
3130 if (GET_CODE (x) == MEM)
3135 exp_rtl = CALL_EXPR_RTL (exp);
3138 /* Assume that the call will clobber all hard registers and
3140 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
3141 || GET_CODE (x) == MEM)
3148 exp_rtl = RTL_EXPR_RTL (exp);
3150 /* We don't know what this can modify. */
3155 case WITH_CLEANUP_EXPR:
3156 exp_rtl = RTL_EXPR_RTL (exp);
3160 exp_rtl = SAVE_EXPR_RTL (exp);
3164 /* The only operand we look at is operand 1. The rest aren't
3165 part of the expression. */
3166 return safe_from_p (x, TREE_OPERAND (exp, 1));
3168 case METHOD_CALL_EXPR:
3169 /* This takes a rtx argument, but shouldn't appear here. */
3173 /* If we have an rtx, we do not need to scan our operands. */
3177 nops = tree_code_length[(int) TREE_CODE (exp)];
3178 for (i = 0; i < nops; i++)
3179 if (TREE_OPERAND (exp, i) != 0
3180 && ! safe_from_p (x, TREE_OPERAND (exp, i)))
3184 /* If we have an rtl, find any enclosed object. Then see if we conflict
3188 if (GET_CODE (exp_rtl) == SUBREG)
3190 exp_rtl = SUBREG_REG (exp_rtl);
3191 if (GET_CODE (exp_rtl) == REG
3192 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
3196 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
3197 are memory and EXP is not readonly. */
3198 return ! (rtx_equal_p (x, exp_rtl)
3199 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
3200 && ! TREE_READONLY (exp)));
3203 /* If we reach here, it is safe. */
3207 /* Subroutine of expand_expr: return nonzero iff EXP is an
3208 expression whose type is statically determinable. */
3214 if (TREE_CODE (exp) == PARM_DECL
3215 || TREE_CODE (exp) == VAR_DECL
3216 || TREE_CODE (exp) == CALL_EXPR || TREE_CODE (exp) == TARGET_EXPR
3217 || TREE_CODE (exp) == COMPONENT_REF
3218 || TREE_CODE (exp) == ARRAY_REF)
3223 /* expand_expr: generate code for computing expression EXP.
3224 An rtx for the computed value is returned. The value is never null.
3225 In the case of a void EXP, const0_rtx is returned.
3227 The value may be stored in TARGET if TARGET is nonzero.
3228 TARGET is just a suggestion; callers must assume that
3229 the rtx returned may not be the same as TARGET.
3231 If TARGET is CONST0_RTX, it means that the value will be ignored.
3233 If TMODE is not VOIDmode, it suggests generating the
3234 result in mode TMODE. But this is done only when convenient.
3235 Otherwise, TMODE is ignored and the value generated in its natural mode.
3236 TMODE is just a suggestion; callers must assume that
3237 the rtx returned may not have mode TMODE.
3239 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
3240 with a constant address even if that address is not normally legitimate.
3241 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
3243 If MODIFIER is EXPAND_SUM then when EXP is an addition
3244 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
3245 or a nest of (PLUS ...) and (MINUS ...) where the terms are
3246 products as above, or REG or MEM, or constant.
3247 Ordinarily in such cases we would output mul or add instructions
3248 and then return a pseudo reg containing the sum.
3250 EXPAND_INITIALIZER is much like EXPAND_SUM except that
3251 it also marks a label as absolutely required (it can't be dead).
3252 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
3253 This is used for outputting expressions used in initializers. */
3256 expand_expr (exp, target, tmode, modifier)
3259 enum machine_mode tmode;
3260 enum expand_modifier modifier;
3262 register rtx op0, op1, temp;
3263 tree type = TREE_TYPE (exp);
3264 int unsignedp = TREE_UNSIGNED (type);
3265 register enum machine_mode mode = TYPE_MODE (type);
3266 register enum tree_code code = TREE_CODE (exp);
3268 /* Use subtarget as the target for operand 0 of a binary operation. */
3269 rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
3270 rtx original_target = target;
3271 int ignore = target == const0_rtx;
3274 /* Don't use hard regs as subtargets, because the combiner
3275 can only handle pseudo regs. */
3276 if (subtarget && REGNO (subtarget) < FIRST_PSEUDO_REGISTER)
3278 /* Avoid subtargets inside loops,
3279 since they hide some invariant expressions. */
3280 if (preserve_subexpressions_p ())
3283 if (ignore) target = 0, original_target = 0;
3285 /* If will do cse, generate all results into pseudo registers
3286 since 1) that allows cse to find more things
3287 and 2) otherwise cse could produce an insn the machine
3290 if (! cse_not_expected && mode != BLKmode && target
3291 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER))
3294 /* Ensure we reference a volatile object even if value is ignored. */
3295 if (ignore && TREE_THIS_VOLATILE (exp)
3296 && mode != VOIDmode && mode != BLKmode)
3298 target = gen_reg_rtx (mode);
3299 temp = expand_expr (exp, target, VOIDmode, modifier);
3301 emit_move_insn (target, temp);
3309 tree function = decl_function_context (exp);
3310 /* Handle using a label in a containing function. */
3311 if (function != current_function_decl && function != 0)
3313 struct function *p = find_function_data (function);
3314 /* Allocate in the memory associated with the function
3315 that the label is in. */
3316 push_obstacks (p->function_obstack,
3317 p->function_maybepermanent_obstack);
3319 p->forced_labels = gen_rtx (EXPR_LIST, VOIDmode,
3320 label_rtx (exp), p->forced_labels);
3323 else if (modifier == EXPAND_INITIALIZER)
3324 forced_labels = gen_rtx (EXPR_LIST, VOIDmode,
3325 label_rtx (exp), forced_labels);
3326 temp = gen_rtx (MEM, FUNCTION_MODE,
3327 gen_rtx (LABEL_REF, Pmode, label_rtx (exp)));
3328 if (function != current_function_decl && function != 0)
3329 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
3334 if (DECL_RTL (exp) == 0)
3336 error_with_decl (exp, "prior parameter's size depends on `%s'");
3337 return CONST0_RTX (mode);
3343 if (DECL_RTL (exp) == 0)
3345 /* Ensure variable marked as used
3346 even if it doesn't go through a parser. */
3347 TREE_USED (exp) = 1;
3348 /* Handle variables inherited from containing functions. */
3349 context = decl_function_context (exp);
3351 /* We treat inline_function_decl as an alias for the current function
3352 because that is the inline function whose vars, types, etc.
3353 are being merged into the current function.
3354 See expand_inline_function. */
3355 if (context != 0 && context != current_function_decl
3356 && context != inline_function_decl
3357 /* If var is static, we don't need a static chain to access it. */
3358 && ! (GET_CODE (DECL_RTL (exp)) == MEM
3359 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
3363 /* Mark as non-local and addressable. */
3364 DECL_NONLOCAL (exp) = 1;
3365 mark_addressable (exp);
3366 if (GET_CODE (DECL_RTL (exp)) != MEM)
3368 addr = XEXP (DECL_RTL (exp), 0);
3369 if (GET_CODE (addr) == MEM)
3370 addr = gen_rtx (MEM, Pmode, fix_lexical_addr (XEXP (addr, 0), exp));
3372 addr = fix_lexical_addr (addr, exp);
3373 return change_address (DECL_RTL (exp), mode, addr);
3376 /* This is the case of an array whose size is to be determined
3377 from its initializer, while the initializer is still being parsed.
3379 if (GET_CODE (DECL_RTL (exp)) == MEM
3380 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
3381 return change_address (DECL_RTL (exp), GET_MODE (DECL_RTL (exp)),
3382 XEXP (DECL_RTL (exp), 0));
3383 if (GET_CODE (DECL_RTL (exp)) == MEM
3384 && modifier != EXPAND_CONST_ADDRESS
3385 && modifier != EXPAND_SUM
3386 && modifier != EXPAND_INITIALIZER)
3388 /* DECL_RTL probably contains a constant address.
3389 On RISC machines where a constant address isn't valid,
3390 make some insns to get that address into a register. */
3391 if (!memory_address_p (DECL_MODE (exp), XEXP (DECL_RTL (exp), 0))
3393 && CONSTANT_ADDRESS_P (XEXP (DECL_RTL (exp), 0))))
3394 return change_address (DECL_RTL (exp), VOIDmode,
3395 copy_rtx (XEXP (DECL_RTL (exp), 0)));
3398 /* If the mode of DECL_RTL does not match that of the decl, it
3399 must be a promoted value. We return a SUBREG of the wanted mode,
3400 but mark it so that we know that it was already extended. */
3402 if (GET_CODE (DECL_RTL (exp)) == REG
3403 && GET_MODE (DECL_RTL (exp)) != mode)
3405 enum machine_mode decl_mode = DECL_MODE (exp);
3407 /* Get the signedness used for this variable. Ensure we get the
3408 same mode we got when the variable was declared. */
3410 PROMOTE_MODE (decl_mode, unsignedp, type);
3412 if (decl_mode != GET_MODE (DECL_RTL (exp)))
3415 temp = gen_rtx (SUBREG, mode, DECL_RTL (exp), 0);
3416 SUBREG_PROMOTED_VAR_P (temp) = 1;
3417 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
3421 return DECL_RTL (exp);
3424 return immed_double_const (TREE_INT_CST_LOW (exp),
3425 TREE_INT_CST_HIGH (exp),
3429 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, 0);
3432 /* If optimized, generate immediate CONST_DOUBLE
3433 which will be turned into memory by reload if necessary.
3435 We used to force a register so that loop.c could see it. But
3436 this does not allow gen_* patterns to perform optimizations with
3437 the constants. It also produces two insns in cases like "x = 1.0;".
3438 On most machines, floating-point constants are not permitted in
3439 many insns, so we'd end up copying it to a register in any case.
3441 Now, we do the copying in expand_binop, if appropriate. */
3442 return immed_real_const (exp);
3446 if (! TREE_CST_RTL (exp))
3447 output_constant_def (exp);
3449 /* TREE_CST_RTL probably contains a constant address.
3450 On RISC machines where a constant address isn't valid,
3451 make some insns to get that address into a register. */
3452 if (GET_CODE (TREE_CST_RTL (exp)) == MEM
3453 && modifier != EXPAND_CONST_ADDRESS
3454 && modifier != EXPAND_INITIALIZER
3455 && modifier != EXPAND_SUM
3456 && !memory_address_p (mode, XEXP (TREE_CST_RTL (exp), 0)))
3457 return change_address (TREE_CST_RTL (exp), VOIDmode,
3458 copy_rtx (XEXP (TREE_CST_RTL (exp), 0)));
3459 return TREE_CST_RTL (exp);
3462 context = decl_function_context (exp);
3463 /* We treat inline_function_decl as an alias for the current function
3464 because that is the inline function whose vars, types, etc.
3465 are being merged into the current function.
3466 See expand_inline_function. */
3467 if (context == current_function_decl || context == inline_function_decl)
3470 /* If this is non-local, handle it. */
3473 temp = SAVE_EXPR_RTL (exp);
3474 if (temp && GET_CODE (temp) == REG)
3476 put_var_into_stack (exp);
3477 temp = SAVE_EXPR_RTL (exp);
3479 if (temp == 0 || GET_CODE (temp) != MEM)
3481 return change_address (temp, mode,
3482 fix_lexical_addr (XEXP (temp, 0), exp));
3484 if (SAVE_EXPR_RTL (exp) == 0)
3486 if (mode == BLKmode)
3488 = assign_stack_temp (mode,
3489 int_size_in_bytes (TREE_TYPE (exp)), 0);
3492 enum machine_mode var_mode = mode;
3494 if (TREE_CODE (type) == INTEGER_TYPE
3495 || TREE_CODE (type) == ENUMERAL_TYPE
3496 || TREE_CODE (type) == BOOLEAN_TYPE
3497 || TREE_CODE (type) == CHAR_TYPE
3498 || TREE_CODE (type) == REAL_TYPE
3499 || TREE_CODE (type) == POINTER_TYPE
3500 || TREE_CODE (type) == OFFSET_TYPE)
3502 PROMOTE_MODE (var_mode, unsignedp, type);
3505 temp = gen_reg_rtx (var_mode);
3508 SAVE_EXPR_RTL (exp) = temp;
3509 store_expr (TREE_OPERAND (exp, 0), temp, 0);
3510 if (!optimize && GET_CODE (temp) == REG)
3511 save_expr_regs = gen_rtx (EXPR_LIST, VOIDmode, temp,
3515 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
3516 must be a promoted value. We return a SUBREG of the wanted mode,
3517 but mark it so that we know that it was already extended. Note
3518 that `unsignedp' was modified above in this case. */
3520 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
3521 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
3523 temp = gen_rtx (SUBREG, mode, SAVE_EXPR_RTL (exp), 0);
3524 SUBREG_PROMOTED_VAR_P (temp) = 1;
3525 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
3529 return SAVE_EXPR_RTL (exp);
3532 /* Exit the current loop if the body-expression is true. */
3534 rtx label = gen_label_rtx ();
3535 do_jump (TREE_OPERAND (exp, 0), label, NULL_RTX);
3536 expand_exit_loop (NULL_PTR);
3542 expand_start_loop (1);
3543 expand_expr_stmt (TREE_OPERAND (exp, 0));
3550 tree vars = TREE_OPERAND (exp, 0);
3551 int vars_need_expansion = 0;
3553 /* Need to open a binding contour here because
3554 if there are any cleanups they most be contained here. */
3555 expand_start_bindings (0);
3557 /* Mark the corresponding BLOCK for output in its proper place. */
3558 if (TREE_OPERAND (exp, 2) != 0
3559 && ! TREE_USED (TREE_OPERAND (exp, 2)))
3560 insert_block (TREE_OPERAND (exp, 2));
3562 /* If VARS have not yet been expanded, expand them now. */
3565 if (DECL_RTL (vars) == 0)
3567 vars_need_expansion = 1;
3570 expand_decl_init (vars);
3571 vars = TREE_CHAIN (vars);
3574 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, modifier);
3576 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
3582 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
3584 emit_insns (RTL_EXPR_SEQUENCE (exp));
3585 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
3586 return RTL_EXPR_RTL (exp);
3589 /* All elts simple constants => refer to a constant in memory. But
3590 if this is a non-BLKmode mode, let it store a field at a time
3591 since that should make a CONST_INT or CONST_DOUBLE when we
3593 if (TREE_STATIC (exp) && (mode == BLKmode || TREE_ADDRESSABLE (exp)))
3595 rtx constructor = output_constant_def (exp);
3596 if (modifier != EXPAND_CONST_ADDRESS
3597 && modifier != EXPAND_INITIALIZER
3598 && modifier != EXPAND_SUM
3599 && !memory_address_p (GET_MODE (constructor),
3600 XEXP (constructor, 0)))
3601 constructor = change_address (constructor, VOIDmode,
3602 XEXP (constructor, 0));
3609 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
3610 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode, 0);
3615 if (target == 0 || ! safe_from_p (target, exp))
3617 if (mode != BLKmode && ! TREE_ADDRESSABLE (exp))
3618 target = gen_reg_rtx (mode);
3621 enum tree_code c = TREE_CODE (type);
3623 = assign_stack_temp (mode, int_size_in_bytes (type), 0);
3624 if (c == RECORD_TYPE || c == UNION_TYPE || c == ARRAY_TYPE)
3625 MEM_IN_STRUCT_P (target) = 1;
3628 store_constructor (exp, target);
3634 tree exp1 = TREE_OPERAND (exp, 0);
3637 /* A SAVE_EXPR as the address in an INDIRECT_EXPR is generated
3638 for *PTR += ANYTHING where PTR is put inside the SAVE_EXPR.
3639 This code has the same general effect as simply doing
3640 expand_expr on the save expr, except that the expression PTR
3641 is computed for use as a memory address. This means different
3642 code, suitable for indexing, may be generated. */
3643 if (TREE_CODE (exp1) == SAVE_EXPR
3644 && SAVE_EXPR_RTL (exp1) == 0
3645 && TREE_CODE (exp2 = TREE_OPERAND (exp1, 0)) != ERROR_MARK
3646 && TYPE_MODE (TREE_TYPE (exp1)) == Pmode
3647 && TYPE_MODE (TREE_TYPE (exp2)) == Pmode)
3649 temp = expand_expr (TREE_OPERAND (exp1, 0), NULL_RTX,
3650 VOIDmode, EXPAND_SUM);
3651 op0 = memory_address (mode, temp);
3652 op0 = copy_all_regs (op0);
3653 SAVE_EXPR_RTL (exp1) = op0;
3657 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
3658 op0 = memory_address (mode, op0);
3661 temp = gen_rtx (MEM, mode, op0);
3662 /* If address was computed by addition,
3663 mark this as an element of an aggregate. */
3664 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
3665 || (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR
3666 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) == PLUS_EXPR)
3667 || TREE_CODE (TREE_TYPE (exp)) == ARRAY_TYPE
3668 || TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE
3669 || TREE_CODE (TREE_TYPE (exp)) == UNION_TYPE
3670 || (TREE_CODE (exp1) == ADDR_EXPR
3671 && (exp2 = TREE_OPERAND (exp1, 0))
3672 && (TREE_CODE (TREE_TYPE (exp2)) == ARRAY_TYPE
3673 || TREE_CODE (TREE_TYPE (exp2)) == RECORD_TYPE
3674 || TREE_CODE (TREE_TYPE (exp2)) == UNION_TYPE)))
3675 MEM_IN_STRUCT_P (temp) = 1;
3676 MEM_VOLATILE_P (temp) = TREE_THIS_VOLATILE (exp);
3677 #if 0 /* It is incorrectto set RTX_UNCHANGING_P here, because the fact that
3678 a location is accessed through a pointer to const does not mean
3679 that the value there can never change. */
3680 RTX_UNCHANGING_P (temp) = TREE_READONLY (exp);
3686 if (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST
3687 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
3689 /* Nonconstant array index or nonconstant element size.
3690 Generate the tree for *(&array+index) and expand that,
3691 except do it in a language-independent way
3692 and don't complain about non-lvalue arrays.
3693 `mark_addressable' should already have been called
3694 for any array for which this case will be reached. */
3696 /* Don't forget the const or volatile flag from the array element. */
3697 tree variant_type = build_type_variant (type,
3698 TREE_READONLY (exp),
3699 TREE_THIS_VOLATILE (exp));
3700 tree array_adr = build1 (ADDR_EXPR, build_pointer_type (variant_type),
3701 TREE_OPERAND (exp, 0));
3702 tree index = TREE_OPERAND (exp, 1);
3705 /* Convert the integer argument to a type the same size as a pointer
3706 so the multiply won't overflow spuriously. */
3707 if (TYPE_PRECISION (TREE_TYPE (index)) != POINTER_SIZE)
3708 index = convert (type_for_size (POINTER_SIZE, 0), index);
3710 /* Don't think the address has side effects
3711 just because the array does.
3712 (In some cases the address might have side effects,
3713 and we fail to record that fact here. However, it should not
3714 matter, since expand_expr should not care.) */
3715 TREE_SIDE_EFFECTS (array_adr) = 0;
3717 elt = build1 (INDIRECT_REF, type,
3718 fold (build (PLUS_EXPR, TYPE_POINTER_TO (variant_type),
3720 fold (build (MULT_EXPR,
3721 TYPE_POINTER_TO (variant_type),
3722 index, size_in_bytes (type))))));
3724 /* Volatility, etc., of new expression is same as old expression. */
3725 TREE_SIDE_EFFECTS (elt) = TREE_SIDE_EFFECTS (exp);
3726 TREE_THIS_VOLATILE (elt) = TREE_THIS_VOLATILE (exp);
3727 TREE_READONLY (elt) = TREE_READONLY (exp);
3729 return expand_expr (elt, target, tmode, modifier);
3732 /* Fold an expression like: "foo"[2].
3733 This is not done in fold so it won't happen inside &. */
3736 tree arg0 = TREE_OPERAND (exp, 0);
3737 tree arg1 = TREE_OPERAND (exp, 1);
3739 if (TREE_CODE (arg0) == STRING_CST
3740 && TREE_CODE (arg1) == INTEGER_CST
3741 && !TREE_INT_CST_HIGH (arg1)
3742 && (i = TREE_INT_CST_LOW (arg1)) < TREE_STRING_LENGTH (arg0))
3744 if (TREE_TYPE (TREE_TYPE (arg0)) == integer_type_node)
3746 exp = build_int_2 (((int *)TREE_STRING_POINTER (arg0))[i], 0);
3747 TREE_TYPE (exp) = integer_type_node;
3748 return expand_expr (exp, target, tmode, modifier);
3750 if (TREE_TYPE (TREE_TYPE (arg0)) == char_type_node)
3752 exp = build_int_2 (TREE_STRING_POINTER (arg0)[i], 0);
3753 TREE_TYPE (exp) = integer_type_node;
3754 return expand_expr (convert (TREE_TYPE (TREE_TYPE (arg0)), exp), target, tmode, modifier);
3759 /* If this is a constant index into a constant array,
3760 just get the value from the array. Handle both the cases when
3761 we have an explicit constructor and when our operand is a variable
3762 that was declared const. */
3764 if (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
3765 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
3767 tree index = fold (TREE_OPERAND (exp, 1));
3768 if (TREE_CODE (index) == INTEGER_CST
3769 && TREE_INT_CST_HIGH (index) == 0)
3771 int i = TREE_INT_CST_LOW (index);
3772 tree elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0));
3775 elem = TREE_CHAIN (elem);
3777 return expand_expr (fold (TREE_VALUE (elem)), target,
3782 else if (TREE_READONLY (TREE_OPERAND (exp, 0))
3783 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
3784 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == ARRAY_TYPE
3785 && TREE_CODE (TREE_OPERAND (exp, 0)) == VAR_DECL
3786 && DECL_INITIAL (TREE_OPERAND (exp, 0))
3788 && (TREE_CODE (DECL_INITIAL (TREE_OPERAND (exp, 0)))
3791 tree index = fold (TREE_OPERAND (exp, 1));
3792 if (TREE_CODE (index) == INTEGER_CST
3793 && TREE_INT_CST_HIGH (index) == 0)
3795 int i = TREE_INT_CST_LOW (index);
3796 tree init = DECL_INITIAL (TREE_OPERAND (exp, 0));
3798 if (TREE_CODE (init) == CONSTRUCTOR)
3800 tree elem = CONSTRUCTOR_ELTS (init);
3803 elem = TREE_CHAIN (elem);
3805 return expand_expr (fold (TREE_VALUE (elem)), target,
3808 else if (TREE_CODE (init) == STRING_CST
3809 && i < TREE_STRING_LENGTH (init))
3811 temp = GEN_INT (TREE_STRING_POINTER (init)[i]);
3812 return convert_to_mode (mode, temp, 0);
3816 /* Treat array-ref with constant index as a component-ref. */
3820 /* If the operand is a CONSTRUCTOR, we can just extract the
3821 appropriate field if it is present. */
3822 if (code != ARRAY_REF
3823 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR)
3827 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
3828 elt = TREE_CHAIN (elt))
3829 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1))
3830 return expand_expr (TREE_VALUE (elt), target, tmode, modifier);
3834 enum machine_mode mode1;
3839 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
3840 &mode1, &unsignedp, &volatilep);
3842 /* In some cases, we will be offsetting OP0's address by a constant.
3843 So get it as a sum, if possible. If we will be using it
3844 directly in an insn, we validate it. */
3845 op0 = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_SUM);
3847 /* If this is a constant, put it into a register if it is a
3848 legimate constant and memory if it isn't. */
3849 if (CONSTANT_P (op0))
3851 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
3852 if (LEGITIMATE_CONSTANT_P (op0))
3853 op0 = force_reg (mode, op0);
3855 op0 = validize_mem (force_const_mem (mode, op0));
3860 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
3862 if (GET_CODE (op0) != MEM)
3864 op0 = change_address (op0, VOIDmode,
3865 gen_rtx (PLUS, Pmode, XEXP (op0, 0),
3866 force_reg (Pmode, offset_rtx)));
3869 /* Don't forget about volatility even if this is a bitfield. */
3870 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
3872 op0 = copy_rtx (op0);
3873 MEM_VOLATILE_P (op0) = 1;
3876 if (mode1 == VOIDmode
3877 || (mode1 != BLKmode && ! direct_load[(int) mode1]
3878 && modifier != EXPAND_CONST_ADDRESS
3879 && modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
3880 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG)
3882 /* In cases where an aligned union has an unaligned object
3883 as a field, we might be extracting a BLKmode value from
3884 an integer-mode (e.g., SImode) object. Handle this case
3885 by doing the extract into an object as wide as the field
3886 (which we know to be the width of a basic mode), then
3887 storing into memory, and changing the mode to BLKmode. */
3888 enum machine_mode ext_mode = mode;
3890 if (ext_mode == BLKmode)
3891 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
3893 if (ext_mode == BLKmode)
3896 op0 = extract_bit_field (validize_mem (op0), bitsize, bitpos,
3897 unsignedp, target, ext_mode, ext_mode,
3898 TYPE_ALIGN (TREE_TYPE (tem)) / BITS_PER_UNIT,
3899 int_size_in_bytes (TREE_TYPE (tem)));
3900 if (mode == BLKmode)
3902 rtx new = assign_stack_temp (ext_mode,
3903 bitsize / BITS_PER_UNIT, 0);
3905 emit_move_insn (new, op0);
3906 op0 = copy_rtx (new);
3907 PUT_MODE (op0, BLKmode);
3913 /* Get a reference to just this component. */
3914 if (modifier == EXPAND_CONST_ADDRESS
3915 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
3916 op0 = gen_rtx (MEM, mode1, plus_constant (XEXP (op0, 0),
3917 (bitpos / BITS_PER_UNIT)));
3919 op0 = change_address (op0, mode1,
3920 plus_constant (XEXP (op0, 0),
3921 (bitpos / BITS_PER_UNIT)));
3922 MEM_IN_STRUCT_P (op0) = 1;
3923 MEM_VOLATILE_P (op0) |= volatilep;
3924 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode)
3927 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
3928 convert_move (target, op0, unsignedp);
3934 tree base = build_unary_op (ADDR_EXPR, TREE_OPERAND (exp, 0), 0);
3935 tree addr = build (PLUS_EXPR, type, base, TREE_OPERAND (exp, 1));
3936 op0 = expand_expr (addr, NULL_RTX, VOIDmode, EXPAND_SUM);
3937 temp = gen_rtx (MEM, mode, memory_address (mode, op0));
3938 MEM_IN_STRUCT_P (temp) = 1;
3939 MEM_VOLATILE_P (temp) = TREE_THIS_VOLATILE (exp);
3940 #if 0 /* It is incorrectto set RTX_UNCHANGING_P here, because the fact that
3941 a location is accessed through a pointer to const does not mean
3942 that the value there can never change. */
3943 RTX_UNCHANGING_P (temp) = TREE_READONLY (exp);
3948 /* Intended for a reference to a buffer of a file-object in Pascal.
3949 But it's not certain that a special tree code will really be
3950 necessary for these. INDIRECT_REF might work for them. */
3954 /* IN_EXPR: Inlined pascal set IN expression.
3957 rlo = set_low - (set_low%bits_per_word);
3958 the_word = set [ (index - rlo)/bits_per_word ];
3959 bit_index = index % bits_per_word;
3960 bitmask = 1 << bit_index;
3961 return !!(the_word & bitmask); */
3963 preexpand_calls (exp);
3965 tree set = TREE_OPERAND (exp, 0);
3966 tree index = TREE_OPERAND (exp, 1);
3967 tree set_type = TREE_TYPE (set);
3969 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
3970 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
3976 rtx diff, quo, rem, addr, bit, result;
3977 rtx setval, setaddr;
3978 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
3981 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
3983 /* If domain is empty, answer is no. */
3984 if (tree_int_cst_lt (set_high_bound, set_low_bound))
3987 index_val = expand_expr (index, 0, VOIDmode, 0);
3988 lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
3989 hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
3990 setval = expand_expr (set, 0, VOIDmode, 0);
3991 setaddr = XEXP (setval, 0);
3993 /* Compare index against bounds, if they are constant. */
3994 if (GET_CODE (index_val) == CONST_INT
3995 && GET_CODE (lo_r) == CONST_INT)
3997 if (INTVAL (index_val) < INTVAL (lo_r))
4001 if (GET_CODE (index_val) == CONST_INT
4002 && GET_CODE (hi_r) == CONST_INT)
4004 if (INTVAL (hi_r) < INTVAL (index_val))
4008 /* If we get here, we have to generate the code for both cases
4009 (in range and out of range). */
4011 op0 = gen_label_rtx ();
4012 op1 = gen_label_rtx ();
4014 if (! (GET_CODE (index_val) == CONST_INT
4015 && GET_CODE (lo_r) == CONST_INT))
4017 emit_cmp_insn (index_val, lo_r, LT, 0, GET_MODE (index_val), 0, 0);
4018 emit_jump_insn (gen_blt (op1));
4021 if (! (GET_CODE (index_val) == CONST_INT
4022 && GET_CODE (hi_r) == CONST_INT))
4024 emit_cmp_insn (index_val, hi_r, GT, 0, GET_MODE (index_val), 0, 0);
4025 emit_jump_insn (gen_bgt (op1));
4028 /* Calculate the element number of bit zero in the first word
4030 if (GET_CODE (lo_r) == CONST_INT)
4031 rlow = gen_rtx (CONST_INT, VOIDmode,
4032 INTVAL (lo_r) & ~ (1 << BITS_PER_UNIT));
4034 rlow = expand_binop (index_mode, and_optab,
4035 lo_r, gen_rtx (CONST_INT, VOIDmode,
4036 ~ (1 << BITS_PER_UNIT)),
4037 0, 0, OPTAB_LIB_WIDEN);
4039 diff = expand_binop (index_mode, sub_optab,
4040 index_val, rlow, 0, 0, OPTAB_LIB_WIDEN);
4042 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
4043 gen_rtx (CONST_INT, VOIDmode, BITS_PER_UNIT),
4045 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
4046 gen_rtx (CONST_INT, VOIDmode, BITS_PER_UNIT),
4048 addr = memory_address (byte_mode,
4049 expand_binop (index_mode, add_optab,
4051 /* Extract the bit we want to examine */
4052 bit = expand_shift (RSHIFT_EXPR, byte_mode,
4053 gen_rtx (MEM, byte_mode, addr), rem, 0, 1);
4054 result = expand_binop (SImode, and_optab, bit, const1_rtx, target,
4055 1, OPTAB_LIB_WIDEN);
4056 emit_move_insn (target, result);
4058 /* Output the code to handle the out-of-range case. */
4061 emit_move_insn (target, const0_rtx);
4066 case WITH_CLEANUP_EXPR:
4067 if (RTL_EXPR_RTL (exp) == 0)
4070 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
4072 = tree_cons (NULL_TREE, TREE_OPERAND (exp, 2), cleanups_this_call);
4073 /* That's it for this cleanup. */
4074 TREE_OPERAND (exp, 2) = 0;
4076 return RTL_EXPR_RTL (exp);
4079 /* Check for a built-in function. */
4080 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
4081 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) == FUNCTION_DECL
4082 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
4083 return expand_builtin (exp, target, subtarget, tmode, ignore);
4084 /* If this call was expanded already by preexpand_calls,
4085 just return the result we got. */
4086 if (CALL_EXPR_RTL (exp) != 0)
4087 return CALL_EXPR_RTL (exp);
4088 return expand_call (exp, target, ignore);
4090 case NON_LVALUE_EXPR:
4093 case REFERENCE_EXPR:
4094 if (TREE_CODE (type) == VOID_TYPE || ignore)
4096 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
4099 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
4100 return expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, modifier);
4101 if (TREE_CODE (type) == UNION_TYPE)
4103 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
4106 if (mode == BLKmode)
4108 if (TYPE_SIZE (type) == 0
4109 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
4111 target = assign_stack_temp (BLKmode,
4112 (TREE_INT_CST_LOW (TYPE_SIZE (type))
4113 + BITS_PER_UNIT - 1)
4114 / BITS_PER_UNIT, 0);
4117 target = gen_reg_rtx (mode);
4119 if (GET_CODE (target) == MEM)
4120 /* Store data into beginning of memory target. */
4121 store_expr (TREE_OPERAND (exp, 0),
4122 change_address (target, TYPE_MODE (valtype), 0), 0);
4124 else if (GET_CODE (target) == REG)
4125 /* Store this field into a union of the proper type. */
4126 store_field (target, GET_MODE_BITSIZE (TYPE_MODE (valtype)), 0,
4127 TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
4129 int_size_in_bytes (TREE_TYPE (TREE_OPERAND (exp, 0))));
4133 /* Return the entire union. */
4136 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, 0);
4137 if (GET_MODE (op0) == mode)
4139 /* If arg is a constant integer being extended from a narrower mode,
4140 we must really truncate to get the extended bits right. Otherwise
4141 (unsigned long) (unsigned char) ("\377"[0])
4142 would come out as ffffffff. */
4143 if (GET_MODE (op0) == VOIDmode
4144 && (GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
4145 < GET_MODE_BITSIZE (mode)))
4147 /* MODE must be narrower than HOST_BITS_PER_INT. */
4148 int width = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
4150 if (width < HOST_BITS_PER_WIDE_INT)
4152 HOST_WIDE_INT val = (GET_CODE (op0) == CONST_INT ? INTVAL (op0)
4153 : CONST_DOUBLE_LOW (op0));
4154 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)))
4155 || !(val & ((HOST_WIDE_INT) 1 << (width - 1))))
4156 val &= ((HOST_WIDE_INT) 1 << width) - 1;
4158 val |= ~(((HOST_WIDE_INT) 1 << width) - 1);
4160 op0 = GEN_INT (val);
4164 op0 = (simplify_unary_operation
4165 ((TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)))
4166 ? ZERO_EXTEND : SIGN_EXTEND),
4168 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))));
4173 if (GET_MODE (op0) == VOIDmode)
4175 if (modifier == EXPAND_INITIALIZER)
4176 return gen_rtx (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
4177 if (flag_force_mem && GET_CODE (op0) == MEM)
4178 op0 = copy_to_reg (op0);
4181 return convert_to_mode (mode, op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
4183 convert_move (target, op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
4187 /* We come here from MINUS_EXPR when the second operand is a constant. */
4189 this_optab = add_optab;
4191 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
4192 something else, make sure we add the register to the constant and
4193 then to the other thing. This case can occur during strength
4194 reduction and doing it this way will produce better code if the
4195 frame pointer or argument pointer is eliminated.
4197 fold-const.c will ensure that the constant is always in the inner
4198 PLUS_EXPR, so the only case we need to do anything about is if
4199 sp, ap, or fp is our second argument, in which case we must swap
4200 the innermost first argument and our second argument. */
4202 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
4203 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
4204 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
4205 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
4206 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
4207 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
4209 tree t = TREE_OPERAND (exp, 1);
4211 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
4212 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
4215 /* If the result is to be Pmode and we are adding an integer to
4216 something, we might be forming a constant. So try to use
4217 plus_constant. If it produces a sum and we can't accept it,
4218 use force_operand. This allows P = &ARR[const] to generate
4219 efficient code on machines where a SYMBOL_REF is not a valid
4222 If this is an EXPAND_SUM call, always return the sum. */
4223 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
4224 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
4225 && (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
4228 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
4230 op1 = plus_constant (op1, TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)));
4231 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
4232 op1 = force_operand (op1, target);
4236 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
4237 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
4238 && (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
4241 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
4243 op0 = plus_constant (op0, TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)));
4244 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
4245 op0 = force_operand (op0, target);
4249 /* No sense saving up arithmetic to be done
4250 if it's all in the wrong mode to form part of an address.
4251 And force_operand won't know whether to sign-extend or
4253 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
4254 || mode != Pmode) goto binop;
4256 preexpand_calls (exp);
4257 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
4260 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, modifier);
4261 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, modifier);
4263 /* Make sure any term that's a sum with a constant comes last. */
4264 if (GET_CODE (op0) == PLUS
4265 && CONSTANT_P (XEXP (op0, 1)))
4271 /* If adding to a sum including a constant,
4272 associate it to put the constant outside. */
4273 if (GET_CODE (op1) == PLUS
4274 && CONSTANT_P (XEXP (op1, 1)))
4276 rtx constant_term = const0_rtx;
4278 temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0);
4281 /* Ensure that MULT comes first if there is one. */
4282 else if (GET_CODE (op0) == MULT)
4283 op0 = gen_rtx (PLUS, mode, op0, XEXP (op1, 0));
4285 op0 = gen_rtx (PLUS, mode, XEXP (op1, 0), op0);
4287 /* Let's also eliminate constants from op0 if possible. */
4288 op0 = eliminate_constant_term (op0, &constant_term);
4290 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
4291 their sum should be a constant. Form it into OP1, since the
4292 result we want will then be OP0 + OP1. */
4294 temp = simplify_binary_operation (PLUS, mode, constant_term,
4299 op1 = gen_rtx (PLUS, mode, constant_term, XEXP (op1, 1));
4302 /* Put a constant term last and put a multiplication first. */
4303 if (CONSTANT_P (op0) || GET_CODE (op1) == MULT)
4304 temp = op1, op1 = op0, op0 = temp;
4306 temp = simplify_binary_operation (PLUS, mode, op0, op1);
4307 return temp ? temp : gen_rtx (PLUS, mode, op0, op1);
4310 /* Handle difference of two symbolic constants,
4311 for the sake of an initializer. */
4312 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
4313 && really_constant_p (TREE_OPERAND (exp, 0))
4314 && really_constant_p (TREE_OPERAND (exp, 1)))
4316 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX,
4317 VOIDmode, modifier);
4318 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
4319 VOIDmode, modifier);
4320 return gen_rtx (MINUS, mode, op0, op1);
4322 /* Convert A - const to A + (-const). */
4323 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
4325 exp = build (PLUS_EXPR, type, TREE_OPERAND (exp, 0),
4326 fold (build1 (NEGATE_EXPR, type,
4327 TREE_OPERAND (exp, 1))));
4330 this_optab = sub_optab;
4334 preexpand_calls (exp);
4335 /* If first operand is constant, swap them.
4336 Thus the following special case checks need only
4337 check the second operand. */
4338 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
4340 register tree t1 = TREE_OPERAND (exp, 0);
4341 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
4342 TREE_OPERAND (exp, 1) = t1;
4345 /* Attempt to return something suitable for generating an
4346 indexed address, for machines that support that. */
4348 if (modifier == EXPAND_SUM && mode == Pmode
4349 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
4350 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
4352 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, EXPAND_SUM);
4354 /* Apply distributive law if OP0 is x+c. */
4355 if (GET_CODE (op0) == PLUS
4356 && GET_CODE (XEXP (op0, 1)) == CONST_INT)
4357 return gen_rtx (PLUS, mode,
4358 gen_rtx (MULT, mode, XEXP (op0, 0),
4359 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))),
4360 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))
4361 * INTVAL (XEXP (op0, 1))));
4363 if (GET_CODE (op0) != REG)
4364 op0 = force_operand (op0, NULL_RTX);
4365 if (GET_CODE (op0) != REG)
4366 op0 = copy_to_mode_reg (mode, op0);
4368 return gen_rtx (MULT, mode, op0,
4369 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))));
4372 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
4375 /* Check for multiplying things that have been extended
4376 from a narrower type. If this machine supports multiplying
4377 in that narrower type with a result in the desired type,
4378 do it that way, and avoid the explicit type-conversion. */
4379 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
4380 && TREE_CODE (type) == INTEGER_TYPE
4381 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
4382 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
4383 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
4384 && int_fits_type_p (TREE_OPERAND (exp, 1),
4385 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
4386 /* Don't use a widening multiply if a shift will do. */
4387 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
4388 > HOST_BITS_PER_WIDE_INT)
4389 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
4391 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
4392 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
4394 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
4395 /* If both operands are extended, they must either both
4396 be zero-extended or both be sign-extended. */
4397 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
4399 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
4401 enum machine_mode innermode
4402 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
4403 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
4404 ? umul_widen_optab : smul_widen_optab);
4405 if (mode == GET_MODE_WIDER_MODE (innermode)
4406 && this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
4408 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
4409 NULL_RTX, VOIDmode, 0);
4410 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
4411 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
4414 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
4415 NULL_RTX, VOIDmode, 0);
4419 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
4420 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
4421 return expand_mult (mode, op0, op1, target, unsignedp);
4423 case TRUNC_DIV_EXPR:
4424 case FLOOR_DIV_EXPR:
4426 case ROUND_DIV_EXPR:
4427 case EXACT_DIV_EXPR:
4428 preexpand_calls (exp);
4429 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
4431 /* Possible optimization: compute the dividend with EXPAND_SUM
4432 then if the divisor is constant can optimize the case
4433 where some terms of the dividend have coeffs divisible by it. */
4434 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
4435 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
4436 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
4439 this_optab = flodiv_optab;
4442 case TRUNC_MOD_EXPR:
4443 case FLOOR_MOD_EXPR:
4445 case ROUND_MOD_EXPR:
4446 preexpand_calls (exp);
4447 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
4449 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
4450 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
4451 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
4453 case FIX_ROUND_EXPR:
4454 case FIX_FLOOR_EXPR:
4456 abort (); /* Not used for C. */
4458 case FIX_TRUNC_EXPR:
4459 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
4461 target = gen_reg_rtx (mode);
4462 expand_fix (target, op0, unsignedp);
4466 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
4468 target = gen_reg_rtx (mode);
4469 /* expand_float can't figure out what to do if FROM has VOIDmode.
4470 So give it the correct mode. With -O, cse will optimize this. */
4471 if (GET_MODE (op0) == VOIDmode)
4472 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
4474 expand_float (target, op0,
4475 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
4479 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
4480 temp = expand_unop (mode, neg_optab, op0, target, 0);
4486 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
4488 /* Handle complex values specially. */
4490 enum machine_mode opmode
4491 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
4493 if (GET_MODE_CLASS (opmode) == MODE_COMPLEX_INT
4494 || GET_MODE_CLASS (opmode) == MODE_COMPLEX_FLOAT)
4495 return expand_complex_abs (opmode, op0, target, unsignedp);
4498 /* Unsigned abs is simply the operand. Testing here means we don't
4499 risk generating incorrect code below. */
4500 if (TREE_UNSIGNED (type))
4503 /* First try to do it with a special abs instruction. */
4504 temp = expand_unop (mode, abs_optab, op0, target, 0);
4508 /* If this machine has expensive jumps, we can do integer absolute
4509 value of X as (((signed) x >> (W-1)) ^ x) - ((signed) x >> (W-1)),
4510 where W is the width of MODE. */
4512 if (GET_MODE_CLASS (mode) == MODE_INT && BRANCH_COST >= 2)
4514 rtx extended = expand_shift (RSHIFT_EXPR, mode, op0,
4515 size_int (GET_MODE_BITSIZE (mode) - 1),
4518 temp = expand_binop (mode, xor_optab, extended, op0, target, 0,
4521 temp = expand_binop (mode, sub_optab, temp, extended, target, 0,
4528 /* If that does not win, use conditional jump and negate. */
4529 target = original_target;
4530 temp = gen_label_rtx ();
4531 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 0))
4532 || (GET_CODE (target) == REG
4533 && REGNO (target) < FIRST_PSEUDO_REGISTER))
4534 target = gen_reg_rtx (mode);
4535 emit_move_insn (target, op0);
4536 emit_cmp_insn (target,
4537 expand_expr (convert (type, integer_zero_node),
4538 NULL_RTX, VOIDmode, 0),
4539 GE, NULL_RTX, mode, 0, 0);
4541 emit_jump_insn (gen_bge (temp));
4542 op0 = expand_unop (mode, neg_optab, target, target, 0);
4544 emit_move_insn (target, op0);
4551 target = original_target;
4552 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 1))
4553 || (GET_CODE (target) == REG
4554 && REGNO (target) < FIRST_PSEUDO_REGISTER))
4555 target = gen_reg_rtx (mode);
4556 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
4557 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
4559 /* First try to do it with a special MIN or MAX instruction.
4560 If that does not win, use a conditional jump to select the proper
4562 this_optab = (TREE_UNSIGNED (type)
4563 ? (code == MIN_EXPR ? umin_optab : umax_optab)
4564 : (code == MIN_EXPR ? smin_optab : smax_optab));
4566 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
4572 emit_move_insn (target, op0);
4573 op0 = gen_label_rtx ();
4574 if (code == MAX_EXPR)
4575 temp = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)))
4576 ? compare_from_rtx (target, op1, GEU, 1, mode, NULL_RTX, 0)
4577 : compare_from_rtx (target, op1, GE, 0, mode, NULL_RTX, 0));
4579 temp = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)))
4580 ? compare_from_rtx (target, op1, LEU, 1, mode, NULL_RTX, 0)
4581 : compare_from_rtx (target, op1, LE, 0, mode, NULL_RTX, 0));
4582 if (temp == const0_rtx)
4583 emit_move_insn (target, op1);
4584 else if (temp != const_true_rtx)
4586 if (bcc_gen_fctn[(int) GET_CODE (temp)] != 0)
4587 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (temp)]) (op0));
4590 emit_move_insn (target, op1);
4595 /* ??? Can optimize when the operand of this is a bitwise operation,
4596 by using a different bitwise operation. */
4598 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
4599 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
4605 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
4606 temp = expand_unop (mode, ffs_optab, op0, target, 1);
4611 /* ??? Can optimize bitwise operations with one arg constant.
4612 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
4613 and (a bitwise1 b) bitwise2 b (etc)
4614 but that is probably not worth while. */
4616 /* BIT_AND_EXPR is for bitwise anding.
4617 TRUTH_AND_EXPR is for anding two boolean values
4618 when we want in all cases to compute both of them.
4619 In general it is fastest to do TRUTH_AND_EXPR by
4620 computing both operands as actual zero-or-1 values
4621 and then bitwise anding. In cases where there cannot
4622 be any side effects, better code would be made by
4623 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR;
4624 but the question is how to recognize those cases. */
4626 case TRUTH_AND_EXPR:
4628 this_optab = and_optab;
4631 /* See comment above about TRUTH_AND_EXPR; it applies here too. */
4634 this_optab = ior_optab;
4637 case TRUTH_XOR_EXPR:
4639 this_optab = xor_optab;
4646 preexpand_calls (exp);
4647 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
4649 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
4650 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
4653 /* Could determine the answer when only additive constants differ.
4654 Also, the addition of one can be handled by changing the condition. */
4661 preexpand_calls (exp);
4662 temp = do_store_flag (exp, target, tmode != VOIDmode ? tmode : mode, 0);
4665 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
4666 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
4668 && GET_CODE (original_target) == REG
4669 && (GET_MODE (original_target)
4670 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
4672 temp = expand_expr (TREE_OPERAND (exp, 0), original_target, VOIDmode, 0);
4673 if (temp != original_target)
4674 temp = copy_to_reg (temp);
4675 op1 = gen_label_rtx ();
4676 emit_cmp_insn (temp, const0_rtx, EQ, NULL_RTX,
4677 GET_MODE (temp), unsignedp, 0);
4678 emit_jump_insn (gen_beq (op1));
4679 emit_move_insn (temp, const1_rtx);
4683 /* If no set-flag instruction, must generate a conditional
4684 store into a temporary variable. Drop through
4685 and handle this like && and ||. */
4687 case TRUTH_ANDIF_EXPR:
4688 case TRUTH_ORIF_EXPR:
4689 if (target == 0 || ! safe_from_p (target, exp)
4690 /* Make sure we don't have a hard reg (such as function's return
4691 value) live across basic blocks, if not optimizing. */
4692 || (!optimize && GET_CODE (target) == REG
4693 && REGNO (target) < FIRST_PSEUDO_REGISTER))
4694 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
4695 emit_clr_insn (target);
4696 op1 = gen_label_rtx ();
4697 jumpifnot (exp, op1);
4698 emit_0_to_1_insn (target);
4702 case TRUTH_NOT_EXPR:
4703 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
4704 /* The parser is careful to generate TRUTH_NOT_EXPR
4705 only with operands that are always zero or one. */
4706 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
4707 target, 1, OPTAB_LIB_WIDEN);
4713 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
4715 return expand_expr (TREE_OPERAND (exp, 1),
4716 (ignore ? const0_rtx : target),
4721 /* Note that COND_EXPRs whose type is a structure or union
4722 are required to be constructed to contain assignments of
4723 a temporary variable, so that we can evaluate them here
4724 for side effect only. If type is void, we must do likewise. */
4726 /* If an arm of the branch requires a cleanup,
4727 only that cleanup is performed. */
4730 tree binary_op = 0, unary_op = 0;
4731 tree old_cleanups = cleanups_this_call;
4732 cleanups_this_call = 0;
4734 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
4735 convert it to our mode, if necessary. */
4736 if (integer_onep (TREE_OPERAND (exp, 1))
4737 && integer_zerop (TREE_OPERAND (exp, 2))
4738 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
4740 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, modifier);
4741 if (GET_MODE (op0) == mode)
4744 target = gen_reg_rtx (mode);
4745 convert_move (target, op0, unsignedp);
4749 /* If we are not to produce a result, we have no target. Otherwise,
4750 if a target was specified use it; it will not be used as an
4751 intermediate target unless it is safe. If no target, use a
4754 if (mode == VOIDmode || ignore)
4756 else if (original_target
4757 && safe_from_p (original_target, TREE_OPERAND (exp, 0)))
4758 temp = original_target;
4759 else if (mode == BLKmode)
4761 if (TYPE_SIZE (type) == 0
4762 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
4764 temp = assign_stack_temp (BLKmode,
4765 (TREE_INT_CST_LOW (TYPE_SIZE (type))
4766 + BITS_PER_UNIT - 1)
4767 / BITS_PER_UNIT, 0);
4770 temp = gen_reg_rtx (mode);
4772 /* Check for X ? A + B : A. If we have this, we can copy
4773 A to the output and conditionally add B. Similarly for unary
4774 operations. Don't do this if X has side-effects because
4775 those side effects might affect A or B and the "?" operation is
4776 a sequence point in ANSI. (We test for side effects later.) */
4778 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
4779 && operand_equal_p (TREE_OPERAND (exp, 2),
4780 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
4781 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
4782 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
4783 && operand_equal_p (TREE_OPERAND (exp, 1),
4784 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
4785 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
4786 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
4787 && operand_equal_p (TREE_OPERAND (exp, 2),
4788 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
4789 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
4790 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
4791 && operand_equal_p (TREE_OPERAND (exp, 1),
4792 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
4793 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
4795 /* If we had X ? A + 1 : A and we can do the test of X as a store-flag
4796 operation, do this as A + (X != 0). Similarly for other simple
4797 binary operators. */
4798 if (singleton && binary_op
4799 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
4800 && (TREE_CODE (binary_op) == PLUS_EXPR
4801 || TREE_CODE (binary_op) == MINUS_EXPR
4802 || TREE_CODE (binary_op) == BIT_IOR_EXPR
4803 || TREE_CODE (binary_op) == BIT_XOR_EXPR
4804 || TREE_CODE (binary_op) == BIT_AND_EXPR)
4805 && integer_onep (TREE_OPERAND (binary_op, 1))
4806 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
4809 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR ? add_optab
4810 : TREE_CODE (binary_op) == MINUS_EXPR ? sub_optab
4811 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
4812 : TREE_CODE (binary_op) == BIT_XOR_EXPR ? xor_optab
4815 /* If we had X ? A : A + 1, do this as A + (X == 0).
4817 We have to invert the truth value here and then put it
4818 back later if do_store_flag fails. We cannot simply copy
4819 TREE_OPERAND (exp, 0) to another variable and modify that
4820 because invert_truthvalue can modify the tree pointed to
4822 if (singleton == TREE_OPERAND (exp, 1))
4823 TREE_OPERAND (exp, 0)
4824 = invert_truthvalue (TREE_OPERAND (exp, 0));
4826 result = do_store_flag (TREE_OPERAND (exp, 0),
4827 (safe_from_p (temp, singleton)
4829 mode, BRANCH_COST <= 1);
4833 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
4834 return expand_binop (mode, boptab, op1, result, temp,
4835 unsignedp, OPTAB_LIB_WIDEN);
4837 else if (singleton == TREE_OPERAND (exp, 1))
4838 TREE_OPERAND (exp, 0)
4839 = invert_truthvalue (TREE_OPERAND (exp, 0));
4843 op0 = gen_label_rtx ();
4845 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
4849 /* If the target conflicts with the other operand of the
4850 binary op, we can't use it. Also, we can't use the target
4851 if it is a hard register, because evaluating the condition
4852 might clobber it. */
4854 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1)))
4855 || (GET_CODE (temp) == REG
4856 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
4857 temp = gen_reg_rtx (mode);
4858 store_expr (singleton, temp, 0);
4861 expand_expr (singleton,
4862 ignore ? const1_rtx : NULL_RTX, VOIDmode, 0);
4863 if (cleanups_this_call)
4865 sorry ("aggregate value in COND_EXPR");
4866 cleanups_this_call = 0;
4868 if (singleton == TREE_OPERAND (exp, 1))
4869 jumpif (TREE_OPERAND (exp, 0), op0);
4871 jumpifnot (TREE_OPERAND (exp, 0), op0);
4873 if (binary_op && temp == 0)
4874 /* Just touch the other operand. */
4875 expand_expr (TREE_OPERAND (binary_op, 1),
4876 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
4878 store_expr (build (TREE_CODE (binary_op), type,
4879 make_tree (type, temp),
4880 TREE_OPERAND (binary_op, 1)),
4883 store_expr (build1 (TREE_CODE (unary_op), type,
4884 make_tree (type, temp)),
4889 /* This is now done in jump.c and is better done there because it
4890 produces shorter register lifetimes. */
4892 /* Check for both possibilities either constants or variables
4893 in registers (but not the same as the target!). If so, can
4894 save branches by assigning one, branching, and assigning the
4896 else if (temp && GET_MODE (temp) != BLKmode
4897 && (TREE_CONSTANT (TREE_OPERAND (exp, 1))
4898 || ((TREE_CODE (TREE_OPERAND (exp, 1)) == PARM_DECL
4899 || TREE_CODE (TREE_OPERAND (exp, 1)) == VAR_DECL)
4900 && DECL_RTL (TREE_OPERAND (exp, 1))
4901 && GET_CODE (DECL_RTL (TREE_OPERAND (exp, 1))) == REG
4902 && DECL_RTL (TREE_OPERAND (exp, 1)) != temp))
4903 && (TREE_CONSTANT (TREE_OPERAND (exp, 2))
4904 || ((TREE_CODE (TREE_OPERAND (exp, 2)) == PARM_DECL
4905 || TREE_CODE (TREE_OPERAND (exp, 2)) == VAR_DECL)
4906 && DECL_RTL (TREE_OPERAND (exp, 2))
4907 && GET_CODE (DECL_RTL (TREE_OPERAND (exp, 2))) == REG
4908 && DECL_RTL (TREE_OPERAND (exp, 2)) != temp)))
4910 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
4911 temp = gen_reg_rtx (mode);
4912 store_expr (TREE_OPERAND (exp, 2), temp, 0);
4913 jumpifnot (TREE_OPERAND (exp, 0), op0);
4914 store_expr (TREE_OPERAND (exp, 1), temp, 0);
4918 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
4919 comparison operator. If we have one of these cases, set the
4920 output to A, branch on A (cse will merge these two references),
4921 then set the output to FOO. */
4923 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
4924 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
4925 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
4926 TREE_OPERAND (exp, 1), 0)
4927 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
4928 && safe_from_p (temp, TREE_OPERAND (exp, 2)))
4930 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
4931 temp = gen_reg_rtx (mode);
4932 store_expr (TREE_OPERAND (exp, 1), temp, 0);
4933 jumpif (TREE_OPERAND (exp, 0), op0);
4934 store_expr (TREE_OPERAND (exp, 2), temp, 0);
4938 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
4939 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
4940 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
4941 TREE_OPERAND (exp, 2), 0)
4942 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
4943 && safe_from_p (temp, TREE_OPERAND (exp, 1)))
4945 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
4946 temp = gen_reg_rtx (mode);
4947 store_expr (TREE_OPERAND (exp, 2), temp, 0);
4948 jumpifnot (TREE_OPERAND (exp, 0), op0);
4949 store_expr (TREE_OPERAND (exp, 1), temp, 0);
4954 op1 = gen_label_rtx ();
4955 jumpifnot (TREE_OPERAND (exp, 0), op0);
4957 store_expr (TREE_OPERAND (exp, 1), temp, 0);
4959 expand_expr (TREE_OPERAND (exp, 1),
4960 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
4961 if (cleanups_this_call)
4963 sorry ("aggregate value in COND_EXPR");
4964 cleanups_this_call = 0;
4968 emit_jump_insn (gen_jump (op1));
4972 store_expr (TREE_OPERAND (exp, 2), temp, 0);
4974 expand_expr (TREE_OPERAND (exp, 2),
4975 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
4978 if (cleanups_this_call)
4980 sorry ("aggregate value in COND_EXPR");
4981 cleanups_this_call = 0;
4987 cleanups_this_call = old_cleanups;
4993 /* Something needs to be initialized, but we didn't know
4994 where that thing was when building the tree. For example,
4995 it could be the return value of a function, or a parameter
4996 to a function which lays down in the stack, or a temporary
4997 variable which must be passed by reference.
4999 We guarantee that the expression will either be constructed
5000 or copied into our original target. */
5002 tree slot = TREE_OPERAND (exp, 0);
5005 if (TREE_CODE (slot) != VAR_DECL)
5010 if (DECL_RTL (slot) != 0)
5012 target = DECL_RTL (slot);
5013 /* If we have already expanded the slot, so don't do
5015 if (TREE_OPERAND (exp, 1) == NULL_TREE)
5020 target = assign_stack_temp (mode, int_size_in_bytes (type), 0);
5021 /* All temp slots at this level must not conflict. */
5022 preserve_temp_slots (target);
5023 DECL_RTL (slot) = target;
5027 /* I bet this needs to be done, and I bet that it needs to
5028 be above, inside the else clause. The reason is
5029 simple, how else is it going to get cleaned up? (mrs)
5031 The reason is probably did not work before, and was
5032 commented out is because this was re-expanding already
5033 expanded target_exprs (target == 0 and DECL_RTL (slot)
5034 != 0) also cleaning them up many times as well. :-( */
5036 /* Since SLOT is not known to the called function
5037 to belong to its stack frame, we must build an explicit
5038 cleanup. This case occurs when we must build up a reference
5039 to pass the reference as an argument. In this case,
5040 it is very likely that such a reference need not be
5043 if (TREE_OPERAND (exp, 2) == 0)
5044 TREE_OPERAND (exp, 2) = maybe_build_cleanup (slot);
5045 if (TREE_OPERAND (exp, 2))
5046 cleanups_this_call = tree_cons (NULL_TREE, TREE_OPERAND (exp, 2),
5047 cleanups_this_call);
5052 /* This case does occur, when expanding a parameter which
5053 needs to be constructed on the stack. The target
5054 is the actual stack address that we want to initialize.
5055 The function we call will perform the cleanup in this case. */
5057 DECL_RTL (slot) = target;
5060 exp1 = TREE_OPERAND (exp, 1);
5061 /* Mark it as expanded. */
5062 TREE_OPERAND (exp, 1) = NULL_TREE;
5064 return expand_expr (exp1, target, tmode, modifier);
5069 tree lhs = TREE_OPERAND (exp, 0);
5070 tree rhs = TREE_OPERAND (exp, 1);
5071 tree noncopied_parts = 0;
5072 tree lhs_type = TREE_TYPE (lhs);
5074 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
5075 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0 && !fixed_type_p (rhs))
5076 noncopied_parts = init_noncopied_parts (stabilize_reference (lhs),
5077 TYPE_NONCOPIED_PARTS (lhs_type));
5078 while (noncopied_parts != 0)
5080 expand_assignment (TREE_VALUE (noncopied_parts),
5081 TREE_PURPOSE (noncopied_parts), 0, 0);
5082 noncopied_parts = TREE_CHAIN (noncopied_parts);
5089 /* If lhs is complex, expand calls in rhs before computing it.
5090 That's so we don't compute a pointer and save it over a call.
5091 If lhs is simple, compute it first so we can give it as a
5092 target if the rhs is just a call. This avoids an extra temp and copy
5093 and that prevents a partial-subsumption which makes bad code.
5094 Actually we could treat component_ref's of vars like vars. */
5096 tree lhs = TREE_OPERAND (exp, 0);
5097 tree rhs = TREE_OPERAND (exp, 1);
5098 tree noncopied_parts = 0;
5099 tree lhs_type = TREE_TYPE (lhs);
5103 if (TREE_CODE (lhs) != VAR_DECL
5104 && TREE_CODE (lhs) != RESULT_DECL
5105 && TREE_CODE (lhs) != PARM_DECL)
5106 preexpand_calls (exp);
5108 /* Check for |= or &= of a bitfield of size one into another bitfield
5109 of size 1. In this case, (unless we need the result of the
5110 assignment) we can do this more efficiently with a
5111 test followed by an assignment, if necessary.
5113 ??? At this point, we can't get a BIT_FIELD_REF here. But if
5114 things change so we do, this code should be enhanced to
5117 && TREE_CODE (lhs) == COMPONENT_REF
5118 && (TREE_CODE (rhs) == BIT_IOR_EXPR
5119 || TREE_CODE (rhs) == BIT_AND_EXPR)
5120 && TREE_OPERAND (rhs, 0) == lhs
5121 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
5122 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (lhs, 1))) == 1
5123 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))) == 1)
5125 rtx label = gen_label_rtx ();
5127 do_jump (TREE_OPERAND (rhs, 1),
5128 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
5129 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
5130 expand_assignment (lhs, convert (TREE_TYPE (rhs),
5131 (TREE_CODE (rhs) == BIT_IOR_EXPR
5133 : integer_zero_node)),
5135 do_pending_stack_adjust ();
5140 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0
5141 && ! (fixed_type_p (lhs) && fixed_type_p (rhs)))
5142 noncopied_parts = save_noncopied_parts (stabilize_reference (lhs),
5143 TYPE_NONCOPIED_PARTS (lhs_type));
5145 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
5146 while (noncopied_parts != 0)
5148 expand_assignment (TREE_PURPOSE (noncopied_parts),
5149 TREE_VALUE (noncopied_parts), 0, 0);
5150 noncopied_parts = TREE_CHAIN (noncopied_parts);
5155 case PREINCREMENT_EXPR:
5156 case PREDECREMENT_EXPR:
5157 return expand_increment (exp, 0);
5159 case POSTINCREMENT_EXPR:
5160 case POSTDECREMENT_EXPR:
5161 /* Faster to treat as pre-increment if result is not used. */
5162 return expand_increment (exp, ! ignore);
5165 /* Are we taking the address of a nested function? */
5166 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
5167 && decl_function_context (TREE_OPERAND (exp, 0)) != 0)
5169 op0 = trampoline_address (TREE_OPERAND (exp, 0));
5170 op0 = force_operand (op0, target);
5174 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode,
5175 (modifier == EXPAND_INITIALIZER
5176 ? modifier : EXPAND_CONST_ADDRESS));
5177 if (GET_CODE (op0) != MEM)
5180 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
5181 return XEXP (op0, 0);
5182 op0 = force_operand (XEXP (op0, 0), target);
5184 if (flag_force_addr && GET_CODE (op0) != REG)
5185 return force_reg (Pmode, op0);
5188 case ENTRY_VALUE_EXPR:
5191 /* COMPLEX type for Extended Pascal & Fortran */
5194 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
5198 /* Get the rtx code of the operands. */
5199 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
5200 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
5203 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
5205 prev = get_last_insn ();
5207 /* Tell flow that the whole of the destination is being set. */
5208 if (GET_CODE (target) == REG)
5209 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
5211 /* Move the real (op0) and imaginary (op1) parts to their location. */
5212 emit_move_insn (gen_realpart (mode, target), op0);
5213 emit_move_insn (gen_imagpart (mode, target), op1);
5215 /* Complex construction should appear as a single unit. */
5222 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
5223 return gen_realpart (mode, op0);
5226 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
5227 return gen_imagpart (mode, op0);
5231 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
5235 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
5238 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
5240 prev = get_last_insn ();
5242 /* Tell flow that the whole of the destination is being set. */
5243 if (GET_CODE (target) == REG)
5244 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
5246 /* Store the realpart and the negated imagpart to target. */
5247 emit_move_insn (gen_realpart (mode, target), gen_realpart (mode, op0));
5249 imag_t = gen_imagpart (mode, target);
5250 temp = expand_unop (mode, neg_optab,
5251 gen_imagpart (mode, op0), imag_t, 0);
5253 emit_move_insn (imag_t, temp);
5255 /* Conjugate should appear as a single unit */
5265 return (*lang_expand_expr) (exp, target, tmode, modifier);
5268 /* Here to do an ordinary binary operator, generating an instruction
5269 from the optab already placed in `this_optab'. */
5271 preexpand_calls (exp);
5272 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
5274 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
5275 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
5277 temp = expand_binop (mode, this_optab, op0, op1, target,
5278 unsignedp, OPTAB_LIB_WIDEN);
5284 /* Return the alignment in bits of EXP, a pointer valued expression.
5285 But don't return more than MAX_ALIGN no matter what.
5286 The alignment returned is, by default, the alignment of the thing that
5287 EXP points to (if it is not a POINTER_TYPE, 0 is returned).
5289 Otherwise, look at the expression to see if we can do better, i.e., if the
5290 expression is actually pointing at an object whose alignment is tighter. */
5293 get_pointer_alignment (exp, max_align)
5297 unsigned align, inner;
5299 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
5302 align = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
5303 align = MIN (align, max_align);
5307 switch (TREE_CODE (exp))
5311 case NON_LVALUE_EXPR:
5312 exp = TREE_OPERAND (exp, 0);
5313 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
5315 inner = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
5316 inner = MIN (inner, max_align);
5317 align = MAX (align, inner);
5321 /* If sum of pointer + int, restrict our maximum alignment to that
5322 imposed by the integer. If not, we can't do any better than
5324 if (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST)
5327 while (((TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)) * BITS_PER_UNIT)
5332 exp = TREE_OPERAND (exp, 0);
5336 /* See what we are pointing at and look at its alignment. */
5337 exp = TREE_OPERAND (exp, 0);
5338 if (TREE_CODE (exp) == FUNCTION_DECL)
5339 align = MAX (align, FUNCTION_BOUNDARY);
5340 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'd')
5341 align = MAX (align, DECL_ALIGN (exp));
5342 #ifdef CONSTANT_ALIGNMENT
5343 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'c')
5344 align = CONSTANT_ALIGNMENT (exp, align);
5346 return MIN (align, max_align);
5354 /* Return the tree node and offset if a given argument corresponds to
5355 a string constant. */
5358 string_constant (arg, ptr_offset)
5364 if (TREE_CODE (arg) == ADDR_EXPR
5365 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
5367 *ptr_offset = integer_zero_node;
5368 return TREE_OPERAND (arg, 0);
5370 else if (TREE_CODE (arg) == PLUS_EXPR)
5372 tree arg0 = TREE_OPERAND (arg, 0);
5373 tree arg1 = TREE_OPERAND (arg, 1);
5378 if (TREE_CODE (arg0) == ADDR_EXPR
5379 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
5382 return TREE_OPERAND (arg0, 0);
5384 else if (TREE_CODE (arg1) == ADDR_EXPR
5385 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
5388 return TREE_OPERAND (arg1, 0);
5395 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
5396 way, because it could contain a zero byte in the middle.
5397 TREE_STRING_LENGTH is the size of the character array, not the string.
5399 Unfortunately, string_constant can't access the values of const char
5400 arrays with initializers, so neither can we do so here. */
5410 src = string_constant (src, &offset_node);
5413 max = TREE_STRING_LENGTH (src);
5414 ptr = TREE_STRING_POINTER (src);
5415 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
5417 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
5418 compute the offset to the following null if we don't know where to
5419 start searching for it. */
5421 for (i = 0; i < max; i++)
5424 /* We don't know the starting offset, but we do know that the string
5425 has no internal zero bytes. We can assume that the offset falls
5426 within the bounds of the string; otherwise, the programmer deserves
5427 what he gets. Subtract the offset from the length of the string,
5429 /* This would perhaps not be valid if we were dealing with named
5430 arrays in addition to literal string constants. */
5431 return size_binop (MINUS_EXPR, size_int (max), offset_node);
5434 /* We have a known offset into the string. Start searching there for
5435 a null character. */
5436 if (offset_node == 0)
5440 /* Did we get a long long offset? If so, punt. */
5441 if (TREE_INT_CST_HIGH (offset_node) != 0)
5443 offset = TREE_INT_CST_LOW (offset_node);
5445 /* If the offset is known to be out of bounds, warn, and call strlen at
5447 if (offset < 0 || offset > max)
5449 warning ("offset outside bounds of constant string");
5452 /* Use strlen to search for the first zero byte. Since any strings
5453 constructed with build_string will have nulls appended, we win even
5454 if we get handed something like (char[4])"abcd".
5456 Since OFFSET is our starting index into the string, no further
5457 calculation is needed. */
5458 return size_int (strlen (ptr + offset));
5461 /* Expand an expression EXP that calls a built-in function,
5462 with result going to TARGET if that's convenient
5463 (and in mode MODE if that's convenient).
5464 SUBTARGET may be used as the target for computing one of EXP's operands.
5465 IGNORE is nonzero if the value is to be ignored. */
5468 expand_builtin (exp, target, subtarget, mode, ignore)
5472 enum machine_mode mode;
5475 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
5476 tree arglist = TREE_OPERAND (exp, 1);
5479 enum machine_mode value_mode = TYPE_MODE (TREE_TYPE (exp));
5480 optab builtin_optab;
5482 switch (DECL_FUNCTION_CODE (fndecl))
5487 /* build_function_call changes these into ABS_EXPR. */
5492 case BUILT_IN_FSQRT:
5493 /* If not optimizing, call the library function. */
5498 /* Arg could be wrong type if user redeclared this fcn wrong. */
5499 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != REAL_TYPE)
5500 return CONST0_RTX (TYPE_MODE (TREE_TYPE (exp)));
5502 /* Stabilize and compute the argument. */
5503 if (TREE_CODE (TREE_VALUE (arglist)) != VAR_DECL
5504 && TREE_CODE (TREE_VALUE (arglist)) != PARM_DECL)
5506 exp = copy_node (exp);
5507 arglist = copy_node (arglist);
5508 TREE_OPERAND (exp, 1) = arglist;
5509 TREE_VALUE (arglist) = save_expr (TREE_VALUE (arglist));
5511 op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
5513 /* Make a suitable register to place result in. */
5514 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
5519 switch (DECL_FUNCTION_CODE (fndecl))
5522 builtin_optab = sin_optab; break;
5524 builtin_optab = cos_optab; break;
5525 case BUILT_IN_FSQRT:
5526 builtin_optab = sqrt_optab; break;
5531 /* Compute into TARGET.
5532 Set TARGET to wherever the result comes back. */
5533 target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
5534 builtin_optab, op0, target, 0);
5536 /* If we were unable to expand via the builtin, stop the
5537 sequence (without outputting the insns) and break, causing
5538 a call the the library function. */
5545 /* Check the results by default. But if flag_fast_math is turned on,
5546 then assume sqrt will always be called with valid arguments. */
5548 if (! flag_fast_math)
5550 /* Don't define the builtin FP instructions
5551 if your machine is not IEEE. */
5552 if (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT)
5555 lab1 = gen_label_rtx ();
5557 /* Test the result; if it is NaN, set errno=EDOM because
5558 the argument was not in the domain. */
5559 emit_cmp_insn (target, target, EQ, 0, GET_MODE (target), 0, 0);
5560 emit_jump_insn (gen_beq (lab1));
5564 #ifdef GEN_ERRNO_RTX
5565 rtx errno_rtx = GEN_ERRNO_RTX;
5568 = gen_rtx (MEM, word_mode, gen_rtx (SYMBOL_REF, Pmode, "*errno"));
5571 emit_move_insn (errno_rtx, GEN_INT (TARGET_EDOM));
5574 /* We can't set errno=EDOM directly; let the library call do it.
5575 Pop the arguments right away in case the call gets deleted. */
5577 expand_call (exp, target, 0);
5584 /* Output the entire sequence. */
5585 insns = get_insns ();
5591 case BUILT_IN_SAVEREGS:
5592 /* Don't do __builtin_saveregs more than once in a function.
5593 Save the result of the first call and reuse it. */
5594 if (saveregs_value != 0)
5595 return saveregs_value;
5597 /* When this function is called, it means that registers must be
5598 saved on entry to this function. So we migrate the
5599 call to the first insn of this function. */
5602 rtx valreg, saved_valreg;
5604 /* Now really call the function. `expand_call' does not call
5605 expand_builtin, so there is no danger of infinite recursion here. */
5608 #ifdef EXPAND_BUILTIN_SAVEREGS
5609 /* Do whatever the machine needs done in this case. */
5610 temp = EXPAND_BUILTIN_SAVEREGS (arglist);
5612 /* The register where the function returns its value
5613 is likely to have something else in it, such as an argument.
5614 So preserve that register around the call. */
5615 if (value_mode != VOIDmode)
5617 valreg = hard_libcall_value (value_mode);
5618 saved_valreg = gen_reg_rtx (value_mode);
5619 emit_move_insn (saved_valreg, valreg);
5622 /* Generate the call, putting the value in a pseudo. */
5623 temp = expand_call (exp, target, ignore);
5625 if (value_mode != VOIDmode)
5626 emit_move_insn (valreg, saved_valreg);
5632 saveregs_value = temp;
5634 /* This won't work inside a SEQUENCE--it really has to be
5635 at the start of the function. */
5636 if (in_sequence_p ())
5638 /* Better to do this than to crash. */
5639 error ("`va_start' used within `({...})'");
5643 /* Put the sequence after the NOTE that starts the function. */
5644 emit_insns_before (seq, NEXT_INSN (get_insns ()));
5648 /* __builtin_args_info (N) returns word N of the arg space info
5649 for the current function. The number and meanings of words
5650 is controlled by the definition of CUMULATIVE_ARGS. */
5651 case BUILT_IN_ARGS_INFO:
5653 int nwords = sizeof (CUMULATIVE_ARGS) / sizeof (int);
5655 int *word_ptr = (int *) ¤t_function_args_info;
5656 tree type, elts, result;
5658 if (sizeof (CUMULATIVE_ARGS) % sizeof (int) != 0)
5659 fatal ("CUMULATIVE_ARGS type defined badly; see %s, line %d",
5660 __FILE__, __LINE__);
5664 tree arg = TREE_VALUE (arglist);
5665 if (TREE_CODE (arg) != INTEGER_CST)
5666 error ("argument of __builtin_args_info must be constant");
5669 int wordnum = TREE_INT_CST_LOW (arg);
5671 if (wordnum < 0 || wordnum >= nwords)
5672 error ("argument of __builtin_args_info out of range");
5674 return GEN_INT (word_ptr[wordnum]);
5678 error ("missing argument in __builtin_args_info");
5683 for (i = 0; i < nwords; i++)
5684 elts = tree_cons (NULL_TREE, build_int_2 (word_ptr[i], 0));
5686 type = build_array_type (integer_type_node,
5687 build_index_type (build_int_2 (nwords, 0)));
5688 result = build (CONSTRUCTOR, type, NULL_TREE, nreverse (elts));
5689 TREE_CONSTANT (result) = 1;
5690 TREE_STATIC (result) = 1;
5691 result = build (INDIRECT_REF, build_pointer_type (type), result);
5692 TREE_CONSTANT (result) = 1;
5693 return expand_expr (result, NULL_RTX, VOIDmode, 0);
5697 /* Return the address of the first anonymous stack arg. */
5698 case BUILT_IN_NEXT_ARG:
5700 tree fntype = TREE_TYPE (current_function_decl);
5701 if (!(TYPE_ARG_TYPES (fntype) != 0
5702 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
5703 != void_type_node)))
5705 error ("`va_start' used in function with fixed args");
5710 return expand_binop (Pmode, add_optab,
5711 current_function_internal_arg_pointer,
5712 current_function_arg_offset_rtx,
5713 NULL_RTX, 0, OPTAB_LIB_WIDEN);
5715 case BUILT_IN_CLASSIFY_TYPE:
5718 tree type = TREE_TYPE (TREE_VALUE (arglist));
5719 enum tree_code code = TREE_CODE (type);
5720 if (code == VOID_TYPE)
5721 return GEN_INT (void_type_class);
5722 if (code == INTEGER_TYPE)
5723 return GEN_INT (integer_type_class);
5724 if (code == CHAR_TYPE)
5725 return GEN_INT (char_type_class);
5726 if (code == ENUMERAL_TYPE)
5727 return GEN_INT (enumeral_type_class);
5728 if (code == BOOLEAN_TYPE)
5729 return GEN_INT (boolean_type_class);
5730 if (code == POINTER_TYPE)
5731 return GEN_INT (pointer_type_class);
5732 if (code == REFERENCE_TYPE)
5733 return GEN_INT (reference_type_class);
5734 if (code == OFFSET_TYPE)
5735 return GEN_INT (offset_type_class);
5736 if (code == REAL_TYPE)
5737 return GEN_INT (real_type_class);
5738 if (code == COMPLEX_TYPE)
5739 return GEN_INT (complex_type_class);
5740 if (code == FUNCTION_TYPE)
5741 return GEN_INT (function_type_class);
5742 if (code == METHOD_TYPE)
5743 return GEN_INT (method_type_class);
5744 if (code == RECORD_TYPE)
5745 return GEN_INT (record_type_class);
5746 if (code == UNION_TYPE)
5747 return GEN_INT (union_type_class);
5748 if (code == ARRAY_TYPE)
5749 return GEN_INT (array_type_class);
5750 if (code == STRING_TYPE)
5751 return GEN_INT (string_type_class);
5752 if (code == SET_TYPE)
5753 return GEN_INT (set_type_class);
5754 if (code == FILE_TYPE)
5755 return GEN_INT (file_type_class);
5756 if (code == LANG_TYPE)
5757 return GEN_INT (lang_type_class);
5759 return GEN_INT (no_type_class);
5761 case BUILT_IN_CONSTANT_P:
5765 return (TREE_CODE_CLASS (TREE_CODE (TREE_VALUE (arglist))) == 'c'
5766 ? const1_rtx : const0_rtx);
5768 case BUILT_IN_FRAME_ADDRESS:
5769 /* The argument must be a nonnegative integer constant.
5770 It counts the number of frames to scan up the stack.
5771 The value is the address of that frame. */
5772 case BUILT_IN_RETURN_ADDRESS:
5773 /* The argument must be a nonnegative integer constant.
5774 It counts the number of frames to scan up the stack.
5775 The value is the return address saved in that frame. */
5777 /* Warning about missing arg was already issued. */
5779 else if (TREE_CODE (TREE_VALUE (arglist)) != INTEGER_CST)
5781 error ("invalid arg to __builtin_return_address");
5784 else if (tree_int_cst_lt (TREE_VALUE (arglist), integer_zero_node))
5786 error ("invalid arg to __builtin_return_address");
5791 int count = TREE_INT_CST_LOW (TREE_VALUE (arglist));
5792 rtx tem = frame_pointer_rtx;
5795 /* Scan back COUNT frames to the specified frame. */
5796 for (i = 0; i < count; i++)
5798 /* Assume the dynamic chain pointer is in the word that
5799 the frame address points to, unless otherwise specified. */
5800 #ifdef DYNAMIC_CHAIN_ADDRESS
5801 tem = DYNAMIC_CHAIN_ADDRESS (tem);
5803 tem = memory_address (Pmode, tem);
5804 tem = copy_to_reg (gen_rtx (MEM, Pmode, tem));
5807 /* For __builtin_frame_address, return what we've got. */
5808 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
5811 /* For __builtin_return_address,
5812 Get the return address from that frame. */
5813 #ifdef RETURN_ADDR_RTX
5814 return RETURN_ADDR_RTX (count, tem);
5816 tem = memory_address (Pmode,
5817 plus_constant (tem, GET_MODE_SIZE (Pmode)));
5818 return copy_to_reg (gen_rtx (MEM, Pmode, tem));
5822 case BUILT_IN_ALLOCA:
5824 /* Arg could be non-integer if user redeclared this fcn wrong. */
5825 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE)
5827 current_function_calls_alloca = 1;
5828 /* Compute the argument. */
5829 op0 = expand_expr (TREE_VALUE (arglist), NULL_RTX, VOIDmode, 0);
5831 /* Allocate the desired space. */
5832 target = allocate_dynamic_stack_space (op0, target, BITS_PER_UNIT);
5834 /* Record the new stack level for nonlocal gotos. */
5835 if (nonlocal_goto_handler_slot != 0)
5836 emit_stack_save (SAVE_NONLOCAL, &nonlocal_goto_stack_level, NULL_RTX);
5840 /* If not optimizing, call the library function. */
5845 /* Arg could be non-integer if user redeclared this fcn wrong. */
5846 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE)
5849 /* Compute the argument. */
5850 op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
5851 /* Compute ffs, into TARGET if possible.
5852 Set TARGET to wherever the result comes back. */
5853 target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
5854 ffs_optab, op0, target, 1);
5859 case BUILT_IN_STRLEN:
5860 /* If not optimizing, call the library function. */
5865 /* Arg could be non-pointer if user redeclared this fcn wrong. */
5866 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE)
5870 tree src = TREE_VALUE (arglist);
5871 tree len = c_strlen (src);
5874 = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
5876 rtx result, src_rtx, char_rtx;
5877 enum machine_mode insn_mode = value_mode, char_mode;
5878 enum insn_code icode;
5880 /* If the length is known, just return it. */
5882 return expand_expr (len, target, mode, 0);
5884 /* If SRC is not a pointer type, don't do this operation inline. */
5888 /* Call a function if we can't compute strlen in the right mode. */
5890 while (insn_mode != VOIDmode)
5892 icode = strlen_optab->handlers[(int) insn_mode].insn_code;
5893 if (icode != CODE_FOR_nothing)
5896 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
5898 if (insn_mode == VOIDmode)
5901 /* Make a place to write the result of the instruction. */
5904 && GET_CODE (result) == REG
5905 && GET_MODE (result) == insn_mode
5906 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
5907 result = gen_reg_rtx (insn_mode);
5909 /* Make sure the operands are acceptable to the predicates. */
5911 if (! (*insn_operand_predicate[(int)icode][0]) (result, insn_mode))
5912 result = gen_reg_rtx (insn_mode);
5914 src_rtx = memory_address (BLKmode,
5915 expand_expr (src, NULL_RTX, Pmode,
5917 if (! (*insn_operand_predicate[(int)icode][1]) (src_rtx, Pmode))
5918 src_rtx = copy_to_mode_reg (Pmode, src_rtx);
5920 char_rtx = const0_rtx;
5921 char_mode = insn_operand_mode[(int)icode][2];
5922 if (! (*insn_operand_predicate[(int)icode][2]) (char_rtx, char_mode))
5923 char_rtx = copy_to_mode_reg (char_mode, char_rtx);
5925 emit_insn (GEN_FCN (icode) (result,
5926 gen_rtx (MEM, BLKmode, src_rtx),
5927 char_rtx, GEN_INT (align)));
5929 /* Return the value in the proper mode for this function. */
5930 if (GET_MODE (result) == value_mode)
5932 else if (target != 0)
5934 convert_move (target, result, 0);
5938 return convert_to_mode (value_mode, result, 0);
5941 case BUILT_IN_STRCPY:
5942 /* If not optimizing, call the library function. */
5947 /* Arg could be non-pointer if user redeclared this fcn wrong. */
5948 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
5949 || TREE_CHAIN (arglist) == 0
5950 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE)
5954 tree len = c_strlen (TREE_VALUE (TREE_CHAIN (arglist)));
5959 len = size_binop (PLUS_EXPR, len, integer_one_node);
5961 chainon (arglist, build_tree_list (NULL_TREE, len));
5965 case BUILT_IN_MEMCPY:
5966 /* If not optimizing, call the library function. */
5971 /* Arg could be non-pointer if user redeclared this fcn wrong. */
5972 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
5973 || TREE_CHAIN (arglist) == 0
5974 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
5975 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
5976 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
5980 tree dest = TREE_VALUE (arglist);
5981 tree src = TREE_VALUE (TREE_CHAIN (arglist));
5982 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
5985 = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
5987 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
5990 /* If either SRC or DEST is not a pointer type, don't do
5991 this operation in-line. */
5992 if (src_align == 0 || dest_align == 0)
5994 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRCPY)
5995 TREE_CHAIN (TREE_CHAIN (arglist)) = 0;
5999 dest_rtx = expand_expr (dest, NULL_RTX, Pmode, EXPAND_NORMAL);
6001 /* Copy word part most expediently. */
6002 emit_block_move (gen_rtx (MEM, BLKmode,
6003 memory_address (BLKmode, dest_rtx)),
6004 gen_rtx (MEM, BLKmode,
6005 memory_address (BLKmode,
6006 expand_expr (src, NULL_RTX,
6009 expand_expr (len, NULL_RTX, VOIDmode, 0),
6010 MIN (src_align, dest_align));
6014 /* These comparison functions need an instruction that returns an actual
6015 index. An ordinary compare that just sets the condition codes
6017 #ifdef HAVE_cmpstrsi
6018 case BUILT_IN_STRCMP:
6019 /* If not optimizing, call the library function. */
6024 /* Arg could be non-pointer if user redeclared this fcn wrong. */
6025 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
6026 || TREE_CHAIN (arglist) == 0
6027 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE)
6029 else if (!HAVE_cmpstrsi)
6032 tree arg1 = TREE_VALUE (arglist);
6033 tree arg2 = TREE_VALUE (TREE_CHAIN (arglist));
6037 len = c_strlen (arg1);
6039 len = size_binop (PLUS_EXPR, integer_one_node, len);
6040 len2 = c_strlen (arg2);
6042 len2 = size_binop (PLUS_EXPR, integer_one_node, len2);
6044 /* If we don't have a constant length for the first, use the length
6045 of the second, if we know it. We don't require a constant for
6046 this case; some cost analysis could be done if both are available
6047 but neither is constant. For now, assume they're equally cheap.
6049 If both strings have constant lengths, use the smaller. This
6050 could arise if optimization results in strcpy being called with
6051 two fixed strings, or if the code was machine-generated. We should
6052 add some code to the `memcmp' handler below to deal with such
6053 situations, someday. */
6054 if (!len || TREE_CODE (len) != INTEGER_CST)
6061 else if (len2 && TREE_CODE (len2) == INTEGER_CST)
6063 if (tree_int_cst_lt (len2, len))
6067 chainon (arglist, build_tree_list (NULL_TREE, len));
6071 case BUILT_IN_MEMCMP:
6072 /* If not optimizing, call the library function. */
6077 /* Arg could be non-pointer if user redeclared this fcn wrong. */
6078 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
6079 || TREE_CHAIN (arglist) == 0
6080 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
6081 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
6082 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
6084 else if (!HAVE_cmpstrsi)
6087 tree arg1 = TREE_VALUE (arglist);
6088 tree arg2 = TREE_VALUE (TREE_CHAIN (arglist));
6089 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
6093 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
6095 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
6096 enum machine_mode insn_mode
6097 = insn_operand_mode[(int) CODE_FOR_cmpstrsi][0];
6099 /* If we don't have POINTER_TYPE, call the function. */
6100 if (arg1_align == 0 || arg2_align == 0)
6102 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRCMP)
6103 TREE_CHAIN (TREE_CHAIN (arglist)) = 0;
6107 /* Make a place to write the result of the instruction. */
6110 && GET_CODE (result) == REG && GET_MODE (result) == insn_mode
6111 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
6112 result = gen_reg_rtx (insn_mode);
6114 emit_insn (gen_cmpstrsi (result,
6115 gen_rtx (MEM, BLKmode,
6116 expand_expr (arg1, NULL_RTX, Pmode,
6118 gen_rtx (MEM, BLKmode,
6119 expand_expr (arg2, NULL_RTX, Pmode,
6121 expand_expr (len, NULL_RTX, VOIDmode, 0),
6122 GEN_INT (MIN (arg1_align, arg2_align))));
6124 /* Return the value in the proper mode for this function. */
6125 mode = TYPE_MODE (TREE_TYPE (exp));
6126 if (GET_MODE (result) == mode)
6128 else if (target != 0)
6130 convert_move (target, result, 0);
6134 return convert_to_mode (mode, result, 0);
6137 case BUILT_IN_STRCMP:
6138 case BUILT_IN_MEMCMP:
6142 default: /* just do library call, if unknown builtin */
6143 error ("built-in function %s not currently supported",
6144 IDENTIFIER_POINTER (DECL_NAME (fndecl)));
6147 /* The switch statement above can drop through to cause the function
6148 to be called normally. */
6150 return expand_call (exp, target, ignore);
6153 /* Expand code for a post- or pre- increment or decrement
6154 and return the RTX for the result.
6155 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
6158 expand_increment (exp, post)
6162 register rtx op0, op1;
6163 register rtx temp, value;
6164 register tree incremented = TREE_OPERAND (exp, 0);
6165 optab this_optab = add_optab;
6167 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
6168 int op0_is_copy = 0;
6170 /* Stabilize any component ref that might need to be
6171 evaluated more than once below. */
6172 if (TREE_CODE (incremented) == BIT_FIELD_REF
6173 || (TREE_CODE (incremented) == COMPONENT_REF
6174 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
6175 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
6176 incremented = stabilize_reference (incremented);
6178 /* Compute the operands as RTX.
6179 Note whether OP0 is the actual lvalue or a copy of it:
6180 I believe it is a copy iff it is a register or subreg
6181 and insns were generated in computing it. */
6183 temp = get_last_insn ();
6184 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, 0);
6186 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
6187 in place but intead must do sign- or zero-extension during assignment,
6188 so we copy it into a new register and let the code below use it as
6191 Note that we can safely modify this SUBREG since it is know not to be
6192 shared (it was made by the expand_expr call above). */
6194 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
6195 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
6197 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
6198 && temp != get_last_insn ());
6199 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
6201 /* Decide whether incrementing or decrementing. */
6202 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
6203 || TREE_CODE (exp) == PREDECREMENT_EXPR)
6204 this_optab = sub_optab;
6206 /* If OP0 is not the actual lvalue, but rather a copy in a register,
6207 then we cannot just increment OP0. We must
6208 therefore contrive to increment the original value.
6209 Then we can return OP0 since it is a copy of the old value. */
6212 /* This is the easiest way to increment the value wherever it is.
6213 Problems with multiple evaluation of INCREMENTED
6214 are prevented because either (1) it is a component_ref,
6215 in which case it was stabilized above, or (2) it is an array_ref
6216 with constant index in an array in a register, which is
6217 safe to reevaluate. */
6218 tree newexp = build ((this_optab == add_optab
6219 ? PLUS_EXPR : MINUS_EXPR),
6222 TREE_OPERAND (exp, 1));
6223 temp = expand_assignment (incremented, newexp, ! post, 0);
6224 return post ? op0 : temp;
6227 /* Convert decrement by a constant into a negative increment. */
6228 if (this_optab == sub_optab
6229 && GET_CODE (op1) == CONST_INT)
6231 op1 = GEN_INT (- INTVAL (op1));
6232 this_optab = add_optab;
6237 /* We have a true reference to the value in OP0.
6238 If there is an insn to add or subtract in this mode, queue it. */
6240 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
6241 op0 = stabilize (op0);
6244 icode = (int) this_optab->handlers[(int) mode].insn_code;
6245 if (icode != (int) CODE_FOR_nothing
6246 /* Make sure that OP0 is valid for operands 0 and 1
6247 of the insn we want to queue. */
6248 && (*insn_operand_predicate[icode][0]) (op0, mode)
6249 && (*insn_operand_predicate[icode][1]) (op0, mode))
6251 if (! (*insn_operand_predicate[icode][2]) (op1, mode))
6252 op1 = force_reg (mode, op1);
6254 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
6258 /* Preincrement, or we can't increment with one simple insn. */
6260 /* Save a copy of the value before inc or dec, to return it later. */
6261 temp = value = copy_to_reg (op0);
6263 /* Arrange to return the incremented value. */
6264 /* Copy the rtx because expand_binop will protect from the queue,
6265 and the results of that would be invalid for us to return
6266 if our caller does emit_queue before using our result. */
6267 temp = copy_rtx (value = op0);
6269 /* Increment however we can. */
6270 op1 = expand_binop (mode, this_optab, value, op1, op0,
6271 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
6272 /* Make sure the value is stored into OP0. */
6274 emit_move_insn (op0, op1);
6279 /* Expand all function calls contained within EXP, innermost ones first.
6280 But don't look within expressions that have sequence points.
6281 For each CALL_EXPR, record the rtx for its value
6282 in the CALL_EXPR_RTL field. */
6285 preexpand_calls (exp)
6288 register int nops, i;
6289 int type = TREE_CODE_CLASS (TREE_CODE (exp));
6291 if (! do_preexpand_calls)
6294 /* Only expressions and references can contain calls. */
6296 if (type != 'e' && type != '<' && type != '1' && type != '2' && type != 'r')
6299 switch (TREE_CODE (exp))
6302 /* Do nothing if already expanded. */
6303 if (CALL_EXPR_RTL (exp) != 0)
6306 /* Do nothing to built-in functions. */
6307 if (TREE_CODE (TREE_OPERAND (exp, 0)) != ADDR_EXPR
6308 || TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) != FUNCTION_DECL
6309 || ! DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
6310 CALL_EXPR_RTL (exp) = expand_call (exp, NULL_RTX, 0);
6315 case TRUTH_ANDIF_EXPR:
6316 case TRUTH_ORIF_EXPR:
6317 /* If we find one of these, then we can be sure
6318 the adjust will be done for it (since it makes jumps).
6319 Do it now, so that if this is inside an argument
6320 of a function, we don't get the stack adjustment
6321 after some other args have already been pushed. */
6322 do_pending_stack_adjust ();
6327 case WITH_CLEANUP_EXPR:
6331 if (SAVE_EXPR_RTL (exp) != 0)
6335 nops = tree_code_length[(int) TREE_CODE (exp)];
6336 for (i = 0; i < nops; i++)
6337 if (TREE_OPERAND (exp, i) != 0)
6339 type = TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, i)));
6340 if (type == 'e' || type == '<' || type == '1' || type == '2'
6342 preexpand_calls (TREE_OPERAND (exp, i));
6346 /* At the start of a function, record that we have no previously-pushed
6347 arguments waiting to be popped. */
6350 init_pending_stack_adjust ()
6352 pending_stack_adjust = 0;
6355 /* When exiting from function, if safe, clear out any pending stack adjust
6356 so the adjustment won't get done. */
6359 clear_pending_stack_adjust ()
6361 #ifdef EXIT_IGNORE_STACK
6362 if (! flag_omit_frame_pointer && EXIT_IGNORE_STACK
6363 && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline)
6364 && ! flag_inline_functions)
6365 pending_stack_adjust = 0;
6369 /* Pop any previously-pushed arguments that have not been popped yet. */
6372 do_pending_stack_adjust ()
6374 if (inhibit_defer_pop == 0)
6376 if (pending_stack_adjust != 0)
6377 adjust_stack (GEN_INT (pending_stack_adjust));
6378 pending_stack_adjust = 0;
6382 /* Expand all cleanups up to OLD_CLEANUPS.
6383 Needed here, and also for language-dependent calls. */
6386 expand_cleanups_to (old_cleanups)
6389 while (cleanups_this_call != old_cleanups)
6391 expand_expr (TREE_VALUE (cleanups_this_call), NULL_RTX, VOIDmode, 0);
6392 cleanups_this_call = TREE_CHAIN (cleanups_this_call);
6396 /* Expand conditional expressions. */
6398 /* Generate code to evaluate EXP and jump to LABEL if the value is zero.
6399 LABEL is an rtx of code CODE_LABEL, in this function and all the
6403 jumpifnot (exp, label)
6407 do_jump (exp, label, NULL_RTX);
6410 /* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
6417 do_jump (exp, NULL_RTX, label);
6420 /* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
6421 the result is zero, or IF_TRUE_LABEL if the result is one.
6422 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
6423 meaning fall through in that case.
6425 do_jump always does any pending stack adjust except when it does not
6426 actually perform a jump. An example where there is no jump
6427 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
6429 This function is responsible for optimizing cases such as
6430 &&, || and comparison operators in EXP. */
6433 do_jump (exp, if_false_label, if_true_label)
6435 rtx if_false_label, if_true_label;
6437 register enum tree_code code = TREE_CODE (exp);
6438 /* Some cases need to create a label to jump to
6439 in order to properly fall through.
6440 These cases set DROP_THROUGH_LABEL nonzero. */
6441 rtx drop_through_label = 0;
6455 temp = integer_zerop (exp) ? if_false_label : if_true_label;
6461 /* This is not true with #pragma weak */
6463 /* The address of something can never be zero. */
6465 emit_jump (if_true_label);
6470 if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
6471 || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
6472 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF)
6475 /* If we are narrowing the operand, we have to do the compare in the
6477 if ((TYPE_PRECISION (TREE_TYPE (exp))
6478 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
6480 case NON_LVALUE_EXPR:
6481 case REFERENCE_EXPR:
6486 /* These cannot change zero->non-zero or vice versa. */
6487 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
6491 /* This is never less insns than evaluating the PLUS_EXPR followed by
6492 a test and can be longer if the test is eliminated. */
6494 /* Reduce to minus. */
6495 exp = build (MINUS_EXPR, TREE_TYPE (exp),
6496 TREE_OPERAND (exp, 0),
6497 fold (build1 (NEGATE_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
6498 TREE_OPERAND (exp, 1))));
6499 /* Process as MINUS. */
6503 /* Non-zero iff operands of minus differ. */
6504 comparison = compare (build (NE_EXPR, TREE_TYPE (exp),
6505 TREE_OPERAND (exp, 0),
6506 TREE_OPERAND (exp, 1)),
6511 /* If we are AND'ing with a small constant, do this comparison in the
6512 smallest type that fits. If the machine doesn't have comparisons
6513 that small, it will be converted back to the wider comparison.
6514 This helps if we are testing the sign bit of a narrower object.
6515 combine can't do this for us because it can't know whether a
6516 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
6518 if (! SLOW_BYTE_ACCESS
6519 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
6520 && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
6521 && (i = floor_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))) >= 0
6522 && (type = type_for_size (i + 1, 1)) != 0
6523 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
6524 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
6525 != CODE_FOR_nothing))
6527 do_jump (convert (type, exp), if_false_label, if_true_label);
6532 case TRUTH_NOT_EXPR:
6533 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
6536 case TRUTH_ANDIF_EXPR:
6537 if (if_false_label == 0)
6538 if_false_label = drop_through_label = gen_label_rtx ();
6539 do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
6540 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
6543 case TRUTH_ORIF_EXPR:
6544 if (if_true_label == 0)
6545 if_true_label = drop_through_label = gen_label_rtx ();
6546 do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
6547 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
6551 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
6554 do_pending_stack_adjust ();
6555 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
6562 int bitsize, bitpos, unsignedp;
6563 enum machine_mode mode;
6568 /* Get description of this reference. We don't actually care
6569 about the underlying object here. */
6570 get_inner_reference (exp, &bitsize, &bitpos, &offset,
6571 &mode, &unsignedp, &volatilep);
6573 type = type_for_size (bitsize, unsignedp);
6574 if (! SLOW_BYTE_ACCESS
6575 && type != 0 && bitsize >= 0
6576 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
6577 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
6578 != CODE_FOR_nothing))
6580 do_jump (convert (type, exp), if_false_label, if_true_label);
6587 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
6588 if (integer_onep (TREE_OPERAND (exp, 1))
6589 && integer_zerop (TREE_OPERAND (exp, 2)))
6590 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
6592 else if (integer_zerop (TREE_OPERAND (exp, 1))
6593 && integer_onep (TREE_OPERAND (exp, 2)))
6594 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
6598 register rtx label1 = gen_label_rtx ();
6599 drop_through_label = gen_label_rtx ();
6600 do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX);
6601 /* Now the THEN-expression. */
6602 do_jump (TREE_OPERAND (exp, 1),
6603 if_false_label ? if_false_label : drop_through_label,
6604 if_true_label ? if_true_label : drop_through_label);
6605 /* In case the do_jump just above never jumps. */
6606 do_pending_stack_adjust ();
6607 emit_label (label1);
6608 /* Now the ELSE-expression. */
6609 do_jump (TREE_OPERAND (exp, 2),
6610 if_false_label ? if_false_label : drop_through_label,
6611 if_true_label ? if_true_label : drop_through_label);
6616 if (integer_zerop (TREE_OPERAND (exp, 1)))
6617 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
6618 else if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
6621 !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
6622 do_jump_by_parts_equality (exp, if_false_label, if_true_label);
6624 comparison = compare (exp, EQ, EQ);
6628 if (integer_zerop (TREE_OPERAND (exp, 1)))
6629 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
6630 else if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
6633 !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
6634 do_jump_by_parts_equality (exp, if_true_label, if_false_label);
6636 comparison = compare (exp, NE, NE);
6640 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
6642 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
6643 do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
6645 comparison = compare (exp, LT, LTU);
6649 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
6651 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
6652 do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
6654 comparison = compare (exp, LE, LEU);
6658 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
6660 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
6661 do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
6663 comparison = compare (exp, GT, GTU);
6667 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
6669 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
6670 do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
6672 comparison = compare (exp, GE, GEU);
6677 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
6679 /* This is not needed any more and causes poor code since it causes
6680 comparisons and tests from non-SI objects to have different code
6682 /* Copy to register to avoid generating bad insns by cse
6683 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
6684 if (!cse_not_expected && GET_CODE (temp) == MEM)
6685 temp = copy_to_reg (temp);
6687 do_pending_stack_adjust ();
6688 if (GET_CODE (temp) == CONST_INT)
6689 comparison = (temp == const0_rtx ? const0_rtx : const_true_rtx);
6690 else if (GET_CODE (temp) == LABEL_REF)
6691 comparison = const_true_rtx;
6692 else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
6693 && !can_compare_p (GET_MODE (temp)))
6694 /* Note swapping the labels gives us not-equal. */
6695 do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label);
6696 else if (GET_MODE (temp) != VOIDmode)
6697 comparison = compare_from_rtx (temp, CONST0_RTX (GET_MODE (temp)),
6698 NE, TREE_UNSIGNED (TREE_TYPE (exp)),
6699 GET_MODE (temp), NULL_RTX, 0);
6704 /* Do any postincrements in the expression that was tested. */
6707 /* If COMPARISON is nonzero here, it is an rtx that can be substituted
6708 straight into a conditional jump instruction as the jump condition.
6709 Otherwise, all the work has been done already. */
6711 if (comparison == const_true_rtx)
6714 emit_jump (if_true_label);
6716 else if (comparison == const0_rtx)
6719 emit_jump (if_false_label);
6721 else if (comparison)
6722 do_jump_for_compare (comparison, if_false_label, if_true_label);
6726 if (drop_through_label)
6728 /* If do_jump produces code that might be jumped around,
6729 do any stack adjusts from that code, before the place
6730 where control merges in. */
6731 do_pending_stack_adjust ();
6732 emit_label (drop_through_label);
6736 /* Given a comparison expression EXP for values too wide to be compared
6737 with one insn, test the comparison and jump to the appropriate label.
6738 The code of EXP is ignored; we always test GT if SWAP is 0,
6739 and LT if SWAP is 1. */
6742 do_jump_by_parts_greater (exp, swap, if_false_label, if_true_label)
6745 rtx if_false_label, if_true_label;
6747 rtx op0 = expand_expr (TREE_OPERAND (exp, swap), NULL_RTX, VOIDmode, 0);
6748 rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), NULL_RTX, VOIDmode, 0);
6749 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
6750 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
6751 rtx drop_through_label = 0;
6752 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
6755 if (! if_true_label || ! if_false_label)
6756 drop_through_label = gen_label_rtx ();
6757 if (! if_true_label)
6758 if_true_label = drop_through_label;
6759 if (! if_false_label)
6760 if_false_label = drop_through_label;
6762 /* Compare a word at a time, high order first. */
6763 for (i = 0; i < nwords; i++)
6766 rtx op0_word, op1_word;
6768 if (WORDS_BIG_ENDIAN)
6770 op0_word = operand_subword_force (op0, i, mode);
6771 op1_word = operand_subword_force (op1, i, mode);
6775 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
6776 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
6779 /* All but high-order word must be compared as unsigned. */
6780 comp = compare_from_rtx (op0_word, op1_word,
6781 (unsignedp || i > 0) ? GTU : GT,
6782 unsignedp, word_mode, NULL_RTX, 0);
6783 if (comp == const_true_rtx)
6784 emit_jump (if_true_label);
6785 else if (comp != const0_rtx)
6786 do_jump_for_compare (comp, NULL_RTX, if_true_label);
6788 /* Consider lower words only if these are equal. */
6789 comp = compare_from_rtx (op0_word, op1_word, NE, unsignedp, word_mode,
6791 if (comp == const_true_rtx)
6792 emit_jump (if_false_label);
6793 else if (comp != const0_rtx)
6794 do_jump_for_compare (comp, NULL_RTX, if_false_label);
6798 emit_jump (if_false_label);
6799 if (drop_through_label)
6800 emit_label (drop_through_label);
6803 /* Given an EQ_EXPR expression EXP for values too wide to be compared
6804 with one insn, test the comparison and jump to the appropriate label. */
6807 do_jump_by_parts_equality (exp, if_false_label, if_true_label)
6809 rtx if_false_label, if_true_label;
6811 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
6812 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
6813 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
6814 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
6816 rtx drop_through_label = 0;
6818 if (! if_false_label)
6819 drop_through_label = if_false_label = gen_label_rtx ();
6821 for (i = 0; i < nwords; i++)
6823 rtx comp = compare_from_rtx (operand_subword_force (op0, i, mode),
6824 operand_subword_force (op1, i, mode),
6825 EQ, TREE_UNSIGNED (TREE_TYPE (exp)),
6826 word_mode, NULL_RTX, 0);
6827 if (comp == const_true_rtx)
6828 emit_jump (if_false_label);
6829 else if (comp != const0_rtx)
6830 do_jump_for_compare (comp, if_false_label, NULL_RTX);
6834 emit_jump (if_true_label);
6835 if (drop_through_label)
6836 emit_label (drop_through_label);
6839 /* Jump according to whether OP0 is 0.
6840 We assume that OP0 has an integer mode that is too wide
6841 for the available compare insns. */
6844 do_jump_by_parts_equality_rtx (op0, if_false_label, if_true_label)
6846 rtx if_false_label, if_true_label;
6848 int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD;
6850 rtx drop_through_label = 0;
6852 if (! if_false_label)
6853 drop_through_label = if_false_label = gen_label_rtx ();
6855 for (i = 0; i < nwords; i++)
6857 rtx comp = compare_from_rtx (operand_subword_force (op0, i,
6859 const0_rtx, EQ, 1, word_mode, NULL_RTX, 0);
6860 if (comp == const_true_rtx)
6861 emit_jump (if_false_label);
6862 else if (comp != const0_rtx)
6863 do_jump_for_compare (comp, if_false_label, NULL_RTX);
6867 emit_jump (if_true_label);
6868 if (drop_through_label)
6869 emit_label (drop_through_label);
6872 /* Given a comparison expression in rtl form, output conditional branches to
6873 IF_TRUE_LABEL, IF_FALSE_LABEL, or both. */
6876 do_jump_for_compare (comparison, if_false_label, if_true_label)
6877 rtx comparison, if_false_label, if_true_label;
6881 if (bcc_gen_fctn[(int) GET_CODE (comparison)] != 0)
6882 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (comparison)]) (if_true_label));
6887 emit_jump (if_false_label);
6889 else if (if_false_label)
6892 rtx prev = PREV_INSN (get_last_insn ());
6895 /* Output the branch with the opposite condition. Then try to invert
6896 what is generated. If more than one insn is a branch, or if the
6897 branch is not the last insn written, abort. If we can't invert
6898 the branch, emit make a true label, redirect this jump to that,
6899 emit a jump to the false label and define the true label. */
6901 if (bcc_gen_fctn[(int) GET_CODE (comparison)] != 0)
6902 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (comparison)]) (if_false_label));
6906 /* Here we get the insn before what was just emitted.
6907 On some machines, emitting the branch can discard
6908 the previous compare insn and emit a replacement. */
6910 /* If there's only one preceding insn... */
6911 insn = get_insns ();
6913 insn = NEXT_INSN (prev);
6915 for (insn = NEXT_INSN (insn); insn; insn = NEXT_INSN (insn))
6916 if (GET_CODE (insn) == JUMP_INSN)
6923 if (branch != get_last_insn ())
6926 if (! invert_jump (branch, if_false_label))
6928 if_true_label = gen_label_rtx ();
6929 redirect_jump (branch, if_true_label);
6930 emit_jump (if_false_label);
6931 emit_label (if_true_label);
6936 /* Generate code for a comparison expression EXP
6937 (including code to compute the values to be compared)
6938 and set (CC0) according to the result.
6939 SIGNED_CODE should be the rtx operation for this comparison for
6940 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
6942 We force a stack adjustment unless there are currently
6943 things pushed on the stack that aren't yet used. */
6946 compare (exp, signed_code, unsigned_code)
6948 enum rtx_code signed_code, unsigned_code;
6951 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
6953 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
6954 register tree type = TREE_TYPE (TREE_OPERAND (exp, 0));
6955 register enum machine_mode mode = TYPE_MODE (type);
6956 int unsignedp = TREE_UNSIGNED (type);
6957 enum rtx_code code = unsignedp ? unsigned_code : signed_code;
6959 return compare_from_rtx (op0, op1, code, unsignedp, mode,
6961 ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
6962 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
6965 /* Like compare but expects the values to compare as two rtx's.
6966 The decision as to signed or unsigned comparison must be made by the caller.
6968 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
6971 If ALIGN is non-zero, it is the alignment of this type; if zero, the
6972 size of MODE should be used. */
6975 compare_from_rtx (op0, op1, code, unsignedp, mode, size, align)
6976 register rtx op0, op1;
6979 enum machine_mode mode;
6983 /* If one operand is constant, make it the second one. */
6985 if (GET_CODE (op0) == CONST_INT || GET_CODE (op0) == CONST_DOUBLE)
6990 code = swap_condition (code);
6995 op0 = force_not_mem (op0);
6996 op1 = force_not_mem (op1);
6999 do_pending_stack_adjust ();
7001 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT)
7002 return simplify_relational_operation (code, mode, op0, op1);
7005 /* There's no need to do this now that combine.c can eliminate lots of
7006 sign extensions. This can be less efficient in certain cases on other
7009 /* If this is a signed equality comparison, we can do it as an
7010 unsigned comparison since zero-extension is cheaper than sign
7011 extension and comparisons with zero are done as unsigned. This is
7012 the case even on machines that can do fast sign extension, since
7013 zero-extension is easier to combinen with other operations than
7014 sign-extension is. If we are comparing against a constant, we must
7015 convert it to what it would look like unsigned. */
7016 if ((code == EQ || code == NE) && ! unsignedp
7017 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
7019 if (GET_CODE (op1) == CONST_INT
7020 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
7021 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
7026 emit_cmp_insn (op0, op1, code, size, mode, unsignedp, align);
7028 return gen_rtx (code, VOIDmode, cc0_rtx, const0_rtx);
7031 /* Generate code to calculate EXP using a store-flag instruction
7032 and return an rtx for the result. EXP is either a comparison
7033 or a TRUTH_NOT_EXPR whose operand is a comparison.
7035 If TARGET is nonzero, store the result there if convenient.
7037 If ONLY_CHEAP is non-zero, only do this if it is likely to be very
7040 Return zero if there is no suitable set-flag instruction
7041 available on this machine.
7043 Once expand_expr has been called on the arguments of the comparison,
7044 we are committed to doing the store flag, since it is not safe to
7045 re-evaluate the expression. We emit the store-flag insn by calling
7046 emit_store_flag, but only expand the arguments if we have a reason
7047 to believe that emit_store_flag will be successful. If we think that
7048 it will, but it isn't, we have to simulate the store-flag with a
7049 set/jump/set sequence. */
7052 do_store_flag (exp, target, mode, only_cheap)
7055 enum machine_mode mode;
7059 tree arg0, arg1, type;
7061 enum machine_mode operand_mode;
7065 enum insn_code icode;
7066 rtx subtarget = target;
7067 rtx result, label, pattern, jump_pat;
7069 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
7070 result at the end. We can't simply invert the test since it would
7071 have already been inverted if it were valid. This case occurs for
7072 some floating-point comparisons. */
7074 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
7075 invert = 1, exp = TREE_OPERAND (exp, 0);
7077 arg0 = TREE_OPERAND (exp, 0);
7078 arg1 = TREE_OPERAND (exp, 1);
7079 type = TREE_TYPE (arg0);
7080 operand_mode = TYPE_MODE (type);
7081 unsignedp = TREE_UNSIGNED (type);
7083 /* We won't bother with BLKmode store-flag operations because it would mean
7084 passing a lot of information to emit_store_flag. */
7085 if (operand_mode == BLKmode)
7091 /* Get the rtx comparison code to use. We know that EXP is a comparison
7092 operation of some type. Some comparisons against 1 and -1 can be
7093 converted to comparisons with zero. Do so here so that the tests
7094 below will be aware that we have a comparison with zero. These
7095 tests will not catch constants in the first operand, but constants
7096 are rarely passed as the first operand. */
7098 switch (TREE_CODE (exp))
7107 if (integer_onep (arg1))
7108 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
7110 code = unsignedp ? LTU : LT;
7113 if (integer_all_onesp (arg1))
7114 arg1 = integer_zero_node, code = unsignedp ? LTU : LT;
7116 code = unsignedp ? LEU : LE;
7119 if (integer_all_onesp (arg1))
7120 arg1 = integer_zero_node, code = unsignedp ? GEU : GE;
7122 code = unsignedp ? GTU : GT;
7125 if (integer_onep (arg1))
7126 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
7128 code = unsignedp ? GEU : GE;
7134 /* Put a constant second. */
7135 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
7137 tem = arg0; arg0 = arg1; arg1 = tem;
7138 code = swap_condition (code);
7141 /* If this is an equality or inequality test of a single bit, we can
7142 do this by shifting the bit being tested to the low-order bit and
7143 masking the result with the constant 1. If the condition was EQ,
7144 we xor it with 1. This does not require an scc insn and is faster
7145 than an scc insn even if we have it. */
7147 if ((code == NE || code == EQ)
7148 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
7149 && integer_pow2p (TREE_OPERAND (arg0, 1))
7150 && TYPE_PRECISION (type) <= HOST_BITS_PER_WIDE_INT)
7152 int bitnum = exact_log2 (INTVAL (expand_expr (TREE_OPERAND (arg0, 1),
7153 NULL_RTX, VOIDmode, 0)));
7155 if (subtarget == 0 || GET_CODE (subtarget) != REG
7156 || GET_MODE (subtarget) != operand_mode
7157 || ! safe_from_p (subtarget, TREE_OPERAND (arg0, 0)))
7160 op0 = expand_expr (TREE_OPERAND (arg0, 0), subtarget, VOIDmode, 0);
7163 op0 = expand_shift (RSHIFT_EXPR, GET_MODE (op0), op0,
7164 size_int (bitnum), target, 1);
7166 if (GET_MODE (op0) != mode)
7167 op0 = convert_to_mode (mode, op0, 1);
7169 if (bitnum != TYPE_PRECISION (type) - 1)
7170 op0 = expand_and (op0, const1_rtx, target);
7172 if ((code == EQ && ! invert) || (code == NE && invert))
7173 op0 = expand_binop (mode, xor_optab, op0, const1_rtx, target, 0,
7179 /* Now see if we are likely to be able to do this. Return if not. */
7180 if (! can_compare_p (operand_mode))
7182 icode = setcc_gen_code[(int) code];
7183 if (icode == CODE_FOR_nothing
7184 || (only_cheap && insn_operand_mode[(int) icode][0] != mode))
7186 /* We can only do this if it is one of the special cases that
7187 can be handled without an scc insn. */
7188 if ((code == LT && integer_zerop (arg1))
7189 || (! only_cheap && code == GE && integer_zerop (arg1)))
7191 else if (BRANCH_COST >= 0
7192 && ! only_cheap && (code == NE || code == EQ)
7193 && TREE_CODE (type) != REAL_TYPE
7194 && ((abs_optab->handlers[(int) operand_mode].insn_code
7195 != CODE_FOR_nothing)
7196 || (ffs_optab->handlers[(int) operand_mode].insn_code
7197 != CODE_FOR_nothing)))
7203 preexpand_calls (exp);
7204 if (subtarget == 0 || GET_CODE (subtarget) != REG
7205 || GET_MODE (subtarget) != operand_mode
7206 || ! safe_from_p (subtarget, arg1))
7209 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
7210 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
7213 target = gen_reg_rtx (mode);
7215 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
7216 because, if the emit_store_flag does anything it will succeed and
7217 OP0 and OP1 will not be used subsequently. */
7219 result = emit_store_flag (target, code,
7220 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
7221 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
7222 operand_mode, unsignedp, 1);
7227 result = expand_binop (mode, xor_optab, result, const1_rtx,
7228 result, 0, OPTAB_LIB_WIDEN);
7232 /* If this failed, we have to do this with set/compare/jump/set code. */
7233 if (target == 0 || GET_CODE (target) != REG
7234 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
7235 target = gen_reg_rtx (GET_MODE (target));
7237 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
7238 result = compare_from_rtx (op0, op1, code, unsignedp,
7239 operand_mode, NULL_RTX, 0);
7240 if (GET_CODE (result) == CONST_INT)
7241 return (((result == const0_rtx && ! invert)
7242 || (result != const0_rtx && invert))
7243 ? const0_rtx : const1_rtx);
7245 label = gen_label_rtx ();
7246 if (bcc_gen_fctn[(int) code] == 0)
7249 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
7250 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
7256 /* Generate a tablejump instruction (used for switch statements). */
7258 #ifdef HAVE_tablejump
7260 /* INDEX is the value being switched on, with the lowest value
7261 in the table already subtracted.
7262 MODE is its expected mode (needed if INDEX is constant).
7263 RANGE is the length of the jump table.
7264 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
7266 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
7267 index value is out of range. */
7270 do_tablejump (index, mode, range, table_label, default_label)
7271 rtx index, range, table_label, default_label;
7272 enum machine_mode mode;
7274 register rtx temp, vector;
7276 /* Do an unsigned comparison (in the proper mode) between the index
7277 expression and the value which represents the length of the range.
7278 Since we just finished subtracting the lower bound of the range
7279 from the index expression, this comparison allows us to simultaneously
7280 check that the original index expression value is both greater than
7281 or equal to the minimum value of the range and less than or equal to
7282 the maximum value of the range. */
7284 emit_cmp_insn (range, index, LTU, NULL_RTX, mode, 0, 0);
7285 emit_jump_insn (gen_bltu (default_label));
7287 /* If index is in range, it must fit in Pmode.
7288 Convert to Pmode so we can index with it. */
7290 index = convert_to_mode (Pmode, index, 1);
7292 /* If flag_force_addr were to affect this address
7293 it could interfere with the tricky assumptions made
7294 about addresses that contain label-refs,
7295 which may be valid only very near the tablejump itself. */
7296 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
7297 GET_MODE_SIZE, because this indicates how large insns are. The other
7298 uses should all be Pmode, because they are addresses. This code
7299 could fail if addresses and insns are not the same size. */
7300 index = memory_address_noforce
7302 gen_rtx (PLUS, Pmode,
7303 gen_rtx (MULT, Pmode, index,
7304 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
7305 gen_rtx (LABEL_REF, Pmode, table_label)));
7306 temp = gen_reg_rtx (CASE_VECTOR_MODE);
7307 vector = gen_rtx (MEM, CASE_VECTOR_MODE, index);
7308 RTX_UNCHANGING_P (vector) = 1;
7309 convert_move (temp, vector, 0);
7311 emit_jump_insn (gen_tablejump (temp, table_label));
7313 #ifndef CASE_VECTOR_PC_RELATIVE
7314 /* If we are generating PIC code or if the table is PC-relative, the
7315 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
7321 #endif /* HAVE_tablejump */