1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA. */
26 #include "insn-flags.h"
27 #include "insn-codes.h"
29 #include "insn-config.h"
33 #include "typeclass.h"
35 #define CEIL(x,y) (((x) + (y) - 1) / (y))
37 /* Decide whether a function's arguments should be processed
38 from first to last or from last to first. */
40 #ifdef STACK_GROWS_DOWNWARD
42 #define PUSH_ARGS_REVERSED /* If it's last to first */
46 #ifndef STACK_PUSH_CODE
47 #ifdef STACK_GROWS_DOWNWARD
48 #define STACK_PUSH_CODE PRE_DEC
50 #define STACK_PUSH_CODE PRE_INC
54 /* Like STACK_BOUNDARY but in units of bytes, not bits. */
55 #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
57 /* If this is nonzero, we do not bother generating VOLATILE
58 around volatile memory references, and we are willing to
59 output indirect addresses. If cse is to follow, we reject
60 indirect addresses so a useful potential cse is generated;
61 if it is used only once, instruction combination will produce
62 the same indirect address eventually. */
65 /* Nonzero to generate code for all the subroutines within an
66 expression before generating the upper levels of the expression.
67 Nowadays this is never zero. */
68 int do_preexpand_calls = 1;
70 /* Number of units that we should eventually pop off the stack.
71 These are the arguments to function calls that have already returned. */
72 int pending_stack_adjust;
74 /* Nonzero means stack pops must not be deferred, and deferred stack
75 pops must not be output. It is nonzero inside a function call,
76 inside a conditional expression, inside a statement expression,
77 and in other cases as well. */
78 int inhibit_defer_pop;
80 /* A list of all cleanups which belong to the arguments of
81 function calls being expanded by expand_call. */
82 tree cleanups_this_call;
84 /* Nonzero means __builtin_saveregs has already been done in this function.
85 The value is the pseudoreg containing the value __builtin_saveregs
87 static rtx saveregs_value;
90 static void store_constructor ();
91 static rtx store_field ();
92 static rtx expand_builtin ();
93 static rtx compare ();
94 static rtx do_store_flag ();
95 static void preexpand_calls ();
96 static rtx expand_increment ();
97 static void init_queue ();
99 void do_pending_stack_adjust ();
100 static void do_jump_for_compare ();
101 static void do_jump_by_parts_equality ();
102 static void do_jump_by_parts_equality_rtx ();
103 static void do_jump_by_parts_greater ();
105 /* Record for each mode whether we can move a register directly to or
106 from an object of that mode in memory. If we can't, we won't try
107 to use that mode directly when accessing a field of that mode. */
109 static char direct_load[NUM_MACHINE_MODES];
110 static char direct_store[NUM_MACHINE_MODES];
112 /* MOVE_RATIO is the number of move instructions that is better than
116 #if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
119 /* A value of around 6 would minimize code size; infinity would minimize
121 #define MOVE_RATIO 15
125 /* This array records the insn_code of insns to perform block moves. */
126 static enum insn_code movstr_optab[NUM_MACHINE_MODES];
128 /* SLOW_UNALIGNED_ACCESS is non-zero if unaligned accesses are very slow. */
130 #ifndef SLOW_UNALIGNED_ACCESS
131 #define SLOW_UNALIGNED_ACCESS 0
134 /* This is run once per compilation to set up which modes can be used
135 directly in memory and to initialize the block move optab. */
141 enum machine_mode mode;
142 rtx mem = gen_rtx (MEM, VOIDmode, stack_pointer_rtx);
145 insn = emit_insn (gen_rtx (SET, 0, 0));
146 pat = PATTERN (insn);
148 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
149 mode = (enum machine_mode) ((int) mode + 1))
155 direct_load[(int) mode] = direct_store[(int) mode] = 0;
156 PUT_MODE (mem, mode);
158 /* See if there is some register that can be used in this mode and
159 directly loaded or stored from memory. */
161 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
162 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
165 if (! HARD_REGNO_MODE_OK (regno, mode))
168 reg = gen_rtx (REG, mode, regno);
171 SET_DEST (pat) = reg;
172 if (recog (pat, insn, &num_clobbers) >= 0)
173 direct_load[(int) mode] = 1;
176 SET_DEST (pat) = mem;
177 if (recog (pat, insn, &num_clobbers) >= 0)
178 direct_store[(int) mode] = 1;
181 movstr_optab[(int) mode] = CODE_FOR_nothing;
188 movstr_optab[(int) QImode] = CODE_FOR_movstrqi;
192 movstr_optab[(int) HImode] = CODE_FOR_movstrhi;
196 movstr_optab[(int) SImode] = CODE_FOR_movstrsi;
200 movstr_optab[(int) DImode] = CODE_FOR_movstrdi;
204 movstr_optab[(int) TImode] = CODE_FOR_movstrti;
208 /* This is run at the start of compiling a function. */
215 pending_stack_adjust = 0;
216 inhibit_defer_pop = 0;
217 cleanups_this_call = 0;
222 /* Save all variables describing the current status into the structure *P.
223 This is used before starting a nested function. */
229 /* Instead of saving the postincrement queue, empty it. */
232 p->pending_stack_adjust = pending_stack_adjust;
233 p->inhibit_defer_pop = inhibit_defer_pop;
234 p->cleanups_this_call = cleanups_this_call;
235 p->saveregs_value = saveregs_value;
236 p->forced_labels = forced_labels;
238 pending_stack_adjust = 0;
239 inhibit_defer_pop = 0;
240 cleanups_this_call = 0;
245 /* Restore all variables describing the current status from the structure *P.
246 This is used after a nested function. */
249 restore_expr_status (p)
252 pending_stack_adjust = p->pending_stack_adjust;
253 inhibit_defer_pop = p->inhibit_defer_pop;
254 cleanups_this_call = p->cleanups_this_call;
255 saveregs_value = p->saveregs_value;
256 forced_labels = p->forced_labels;
259 /* Manage the queue of increment instructions to be output
260 for POSTINCREMENT_EXPR expressions, etc. */
262 static rtx pending_chain;
264 /* Queue up to increment (or change) VAR later. BODY says how:
265 BODY should be the same thing you would pass to emit_insn
266 to increment right away. It will go to emit_insn later on.
268 The value is a QUEUED expression to be used in place of VAR
269 where you want to guarantee the pre-incrementation value of VAR. */
272 enqueue_insn (var, body)
275 pending_chain = gen_rtx (QUEUED, GET_MODE (var),
276 var, NULL_RTX, NULL_RTX, body, pending_chain);
277 return pending_chain;
280 /* Use protect_from_queue to convert a QUEUED expression
281 into something that you can put immediately into an instruction.
282 If the queued incrementation has not happened yet,
283 protect_from_queue returns the variable itself.
284 If the incrementation has happened, protect_from_queue returns a temp
285 that contains a copy of the old value of the variable.
287 Any time an rtx which might possibly be a QUEUED is to be put
288 into an instruction, it must be passed through protect_from_queue first.
289 QUEUED expressions are not meaningful in instructions.
291 Do not pass a value through protect_from_queue and then hold
292 on to it for a while before putting it in an instruction!
293 If the queue is flushed in between, incorrect code will result. */
296 protect_from_queue (x, modify)
300 register RTX_CODE code = GET_CODE (x);
302 #if 0 /* A QUEUED can hang around after the queue is forced out. */
303 /* Shortcut for most common case. */
304 if (pending_chain == 0)
310 /* A special hack for read access to (MEM (QUEUED ...))
311 to facilitate use of autoincrement.
312 Make a copy of the contents of the memory location
313 rather than a copy of the address, but not
314 if the value is of mode BLKmode. */
315 if (code == MEM && GET_MODE (x) != BLKmode
316 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
318 register rtx y = XEXP (x, 0);
319 XEXP (x, 0) = QUEUED_VAR (y);
322 register rtx temp = gen_reg_rtx (GET_MODE (x));
323 emit_insn_before (gen_move_insn (temp, x),
329 /* Otherwise, recursively protect the subexpressions of all
330 the kinds of rtx's that can contain a QUEUED. */
332 XEXP (x, 0) = protect_from_queue (XEXP (x, 0), 0);
333 else if (code == PLUS || code == MULT)
335 XEXP (x, 0) = protect_from_queue (XEXP (x, 0), 0);
336 XEXP (x, 1) = protect_from_queue (XEXP (x, 1), 0);
340 /* If the increment has not happened, use the variable itself. */
341 if (QUEUED_INSN (x) == 0)
342 return QUEUED_VAR (x);
343 /* If the increment has happened and a pre-increment copy exists,
345 if (QUEUED_COPY (x) != 0)
346 return QUEUED_COPY (x);
347 /* The increment has happened but we haven't set up a pre-increment copy.
348 Set one up now, and use it. */
349 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
350 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
352 return QUEUED_COPY (x);
355 /* Return nonzero if X contains a QUEUED expression:
356 if it contains anything that will be altered by a queued increment.
357 We handle only combinations of MEM, PLUS, MINUS and MULT operators
358 since memory addresses generally contain only those. */
364 register enum rtx_code code = GET_CODE (x);
370 return queued_subexp_p (XEXP (x, 0));
374 return queued_subexp_p (XEXP (x, 0))
375 || queued_subexp_p (XEXP (x, 1));
380 /* Perform all the pending incrementations. */
386 while (p = pending_chain)
388 QUEUED_INSN (p) = emit_insn (QUEUED_BODY (p));
389 pending_chain = QUEUED_NEXT (p);
400 /* Copy data from FROM to TO, where the machine modes are not the same.
401 Both modes may be integer, or both may be floating.
402 UNSIGNEDP should be nonzero if FROM is an unsigned type.
403 This causes zero-extension instead of sign-extension. */
406 convert_move (to, from, unsignedp)
407 register rtx to, from;
410 enum machine_mode to_mode = GET_MODE (to);
411 enum machine_mode from_mode = GET_MODE (from);
412 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
413 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
417 /* rtx code for making an equivalent value. */
418 enum rtx_code equiv_code = (unsignedp ? ZERO_EXTEND : SIGN_EXTEND);
420 to = protect_from_queue (to, 1);
421 from = protect_from_queue (from, 0);
423 if (to_real != from_real)
426 if (to_mode == from_mode
427 || (from_mode == VOIDmode && CONSTANT_P (from)))
429 emit_move_insn (to, from);
435 #ifdef HAVE_extendsfdf2
436 if (HAVE_extendsfdf2 && from_mode == SFmode && to_mode == DFmode)
438 emit_unop_insn (CODE_FOR_extendsfdf2, to, from, UNKNOWN);
442 #ifdef HAVE_extendsfxf2
443 if (HAVE_extendsfxf2 && from_mode == SFmode && to_mode == XFmode)
445 emit_unop_insn (CODE_FOR_extendsfxf2, to, from, UNKNOWN);
449 #ifdef HAVE_extendsftf2
450 if (HAVE_extendsftf2 && from_mode == SFmode && to_mode == TFmode)
452 emit_unop_insn (CODE_FOR_extendsftf2, to, from, UNKNOWN);
456 #ifdef HAVE_extenddfxf2
457 if (HAVE_extenddfxf2 && from_mode == DFmode && to_mode == XFmode)
459 emit_unop_insn (CODE_FOR_extenddfxf2, to, from, UNKNOWN);
463 #ifdef HAVE_extenddftf2
464 if (HAVE_extenddftf2 && from_mode == DFmode && to_mode == TFmode)
466 emit_unop_insn (CODE_FOR_extenddftf2, to, from, UNKNOWN);
470 #ifdef HAVE_truncdfsf2
471 if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
473 emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
477 #ifdef HAVE_truncxfsf2
478 if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode)
480 emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN);
484 #ifdef HAVE_trunctfsf2
485 if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
487 emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
491 #ifdef HAVE_truncxfdf2
492 if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode)
494 emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN);
498 #ifdef HAVE_trunctfdf2
499 if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
501 emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
513 libcall = extendsfdf2_libfunc;
517 libcall = extendsfxf2_libfunc;
521 libcall = extendsftf2_libfunc;
530 libcall = truncdfsf2_libfunc;
534 libcall = extenddfxf2_libfunc;
538 libcall = extenddftf2_libfunc;
547 libcall = truncxfsf2_libfunc;
551 libcall = truncxfdf2_libfunc;
560 libcall = trunctfsf2_libfunc;
564 libcall = trunctfdf2_libfunc;
570 if (libcall == (rtx) 0)
571 /* This conversion is not implemented yet. */
574 emit_library_call (libcall, 1, to_mode, 1, from, from_mode);
575 emit_move_insn (to, hard_libcall_value (to_mode));
579 /* Now both modes are integers. */
581 /* Handle expanding beyond a word. */
582 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
583 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
590 enum machine_mode lowpart_mode;
591 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
593 /* Try converting directly if the insn is supported. */
594 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
597 /* If FROM is a SUBREG, put it into a register. Do this
598 so that we always generate the same set of insns for
599 better cse'ing; if an intermediate assignment occurred,
600 we won't be doing the operation directly on the SUBREG. */
601 if (optimize > 0 && GET_CODE (from) == SUBREG)
602 from = force_reg (from_mode, from);
603 emit_unop_insn (code, to, from, equiv_code);
606 /* Next, try converting via full word. */
607 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
608 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
609 != CODE_FOR_nothing))
611 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
612 emit_unop_insn (code, to,
613 gen_lowpart (word_mode, to), equiv_code);
617 /* No special multiword conversion insn; do it by hand. */
620 /* Get a copy of FROM widened to a word, if necessary. */
621 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
622 lowpart_mode = word_mode;
624 lowpart_mode = from_mode;
626 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
628 lowpart = gen_lowpart (lowpart_mode, to);
629 emit_move_insn (lowpart, lowfrom);
631 /* Compute the value to put in each remaining word. */
633 fill_value = const0_rtx;
638 && insn_operand_mode[(int) CODE_FOR_slt][0] == word_mode
639 && STORE_FLAG_VALUE == -1)
641 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
643 fill_value = gen_reg_rtx (word_mode);
644 emit_insn (gen_slt (fill_value));
650 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
651 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
653 fill_value = convert_to_mode (word_mode, fill_value, 1);
657 /* Fill the remaining words. */
658 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
660 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
661 rtx subword = operand_subword (to, index, 1, to_mode);
666 if (fill_value != subword)
667 emit_move_insn (subword, fill_value);
670 insns = get_insns ();
673 emit_no_conflict_block (insns, to, from, NULL_RTX,
674 gen_rtx (equiv_code, to_mode, from));
678 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD)
680 convert_move (to, gen_lowpart (word_mode, from), 0);
684 /* Handle pointer conversion */ /* SPEE 900220 */
685 if (to_mode == PSImode)
687 if (from_mode != SImode)
688 from = convert_to_mode (SImode, from, unsignedp);
690 #ifdef HAVE_truncsipsi
693 emit_unop_insn (CODE_FOR_truncsipsi, to, from, UNKNOWN);
696 #endif /* HAVE_truncsipsi */
700 if (from_mode == PSImode)
702 if (to_mode != SImode)
704 from = convert_to_mode (SImode, from, unsignedp);
709 #ifdef HAVE_extendpsisi
710 if (HAVE_extendpsisi)
712 emit_unop_insn (CODE_FOR_extendpsisi, to, from, UNKNOWN);
715 #endif /* HAVE_extendpsisi */
720 /* Now follow all the conversions between integers
721 no more than a word long. */
723 /* For truncation, usually we can just refer to FROM in a narrower mode. */
724 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
725 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
726 GET_MODE_BITSIZE (from_mode))
727 && ((GET_CODE (from) == MEM
728 && ! MEM_VOLATILE_P (from)
729 && direct_load[(int) to_mode]
730 && ! mode_dependent_address_p (XEXP (from, 0)))
731 || GET_CODE (from) == REG
732 || GET_CODE (from) == SUBREG))
734 emit_move_insn (to, gen_lowpart (to_mode, from));
738 /* For truncation, usually we can just refer to FROM in a narrower mode. */
739 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
741 /* Convert directly if that works. */
742 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
745 /* If FROM is a SUBREG, put it into a register. Do this
746 so that we always generate the same set of insns for
747 better cse'ing; if an intermediate assignment occurred,
748 we won't be doing the operation directly on the SUBREG. */
749 if (optimize > 0 && GET_CODE (from) == SUBREG)
750 from = force_reg (from_mode, from);
751 emit_unop_insn (code, to, from, equiv_code);
756 enum machine_mode intermediate;
758 /* Search for a mode to convert via. */
759 for (intermediate = from_mode; intermediate != VOIDmode;
760 intermediate = GET_MODE_WIDER_MODE (intermediate))
761 if ((can_extend_p (to_mode, intermediate, unsignedp)
763 && (can_extend_p (intermediate, from_mode, unsignedp)
764 != CODE_FOR_nothing))
766 convert_move (to, convert_to_mode (intermediate, from,
767 unsignedp), unsignedp);
771 /* No suitable intermediate mode. */
776 /* Support special truncate insns for certain modes. */
778 if (from_mode == DImode && to_mode == SImode)
780 #ifdef HAVE_truncdisi2
783 emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
787 convert_move (to, force_reg (from_mode, from), unsignedp);
791 if (from_mode == DImode && to_mode == HImode)
793 #ifdef HAVE_truncdihi2
796 emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
800 convert_move (to, force_reg (from_mode, from), unsignedp);
804 if (from_mode == DImode && to_mode == QImode)
806 #ifdef HAVE_truncdiqi2
809 emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
813 convert_move (to, force_reg (from_mode, from), unsignedp);
817 if (from_mode == SImode && to_mode == HImode)
819 #ifdef HAVE_truncsihi2
822 emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
826 convert_move (to, force_reg (from_mode, from), unsignedp);
830 if (from_mode == SImode && to_mode == QImode)
832 #ifdef HAVE_truncsiqi2
835 emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
839 convert_move (to, force_reg (from_mode, from), unsignedp);
843 if (from_mode == HImode && to_mode == QImode)
845 #ifdef HAVE_trunchiqi2
848 emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
852 convert_move (to, force_reg (from_mode, from), unsignedp);
856 /* Handle truncation of volatile memrefs, and so on;
857 the things that couldn't be truncated directly,
858 and for which there was no special instruction. */
859 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
861 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
862 emit_move_insn (to, temp);
866 /* Mode combination is not recognized. */
870 /* Return an rtx for a value that would result
871 from converting X to mode MODE.
872 Both X and MODE may be floating, or both integer.
873 UNSIGNEDP is nonzero if X is an unsigned value.
874 This can be done by referring to a part of X in place
875 or by copying to a new temporary with conversion.
877 This function *must not* call protect_from_queue
878 except when putting X into an insn (in which case convert_move does it). */
881 convert_to_mode (mode, x, unsignedp)
882 enum machine_mode mode;
888 if (mode == GET_MODE (x))
891 /* There is one case that we must handle specially: If we are converting
892 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
893 we are to interpret the constant as unsigned, gen_lowpart will do
894 the wrong if the constant appears negative. What we want to do is
895 make the high-order word of the constant zero, not all ones. */
897 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
898 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
899 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
900 return immed_double_const (INTVAL (x), (HOST_WIDE_INT) 0, mode);
902 /* We can do this with a gen_lowpart if both desired and current modes
903 are integer, and this is either a constant integer, a register, or a
904 non-volatile MEM. Except for the constant case, we must be narrowing
907 if (GET_CODE (x) == CONST_INT
908 || (GET_MODE_CLASS (mode) == MODE_INT
909 && GET_MODE_CLASS (GET_MODE (x)) == MODE_INT
910 && (GET_CODE (x) == CONST_DOUBLE
911 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (GET_MODE (x))
912 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x))
913 && direct_load[(int) mode]
914 || GET_CODE (x) == REG)))))
915 return gen_lowpart (mode, x);
917 temp = gen_reg_rtx (mode);
918 convert_move (temp, x, unsignedp);
922 /* Generate several move instructions to copy LEN bytes
923 from block FROM to block TO. (These are MEM rtx's with BLKmode).
924 The caller must pass FROM and TO
925 through protect_from_queue before calling.
926 ALIGN (in bytes) is maximum alignment we can assume. */
928 struct move_by_pieces
937 int explicit_inc_from;
943 static void move_by_pieces_1 ();
944 static int move_by_pieces_ninsns ();
947 move_by_pieces (to, from, len, align)
951 struct move_by_pieces data;
952 rtx to_addr = XEXP (to, 0), from_addr = XEXP (from, 0);
953 int max_size = MOVE_MAX + 1;
956 data.to_addr = to_addr;
957 data.from_addr = from_addr;
961 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
962 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
964 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
965 || GET_CODE (from_addr) == POST_INC
966 || GET_CODE (from_addr) == POST_DEC);
968 data.explicit_inc_from = 0;
969 data.explicit_inc_to = 0;
971 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
972 if (data.reverse) data.offset = len;
975 /* If copying requires more than two move insns,
976 copy addresses to registers (to make displacements shorter)
977 and use post-increment if available. */
978 if (!(data.autinc_from && data.autinc_to)
979 && move_by_pieces_ninsns (len, align) > 2)
981 #ifdef HAVE_PRE_DECREMENT
982 if (data.reverse && ! data.autinc_from)
984 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
985 data.autinc_from = 1;
986 data.explicit_inc_from = -1;
989 #ifdef HAVE_POST_INCREMENT
990 if (! data.autinc_from)
992 data.from_addr = copy_addr_to_reg (from_addr);
993 data.autinc_from = 1;
994 data.explicit_inc_from = 1;
997 if (!data.autinc_from && CONSTANT_P (from_addr))
998 data.from_addr = copy_addr_to_reg (from_addr);
999 #ifdef HAVE_PRE_DECREMENT
1000 if (data.reverse && ! data.autinc_to)
1002 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1004 data.explicit_inc_to = -1;
1007 #ifdef HAVE_POST_INCREMENT
1008 if (! data.reverse && ! data.autinc_to)
1010 data.to_addr = copy_addr_to_reg (to_addr);
1012 data.explicit_inc_to = 1;
1015 if (!data.autinc_to && CONSTANT_P (to_addr))
1016 data.to_addr = copy_addr_to_reg (to_addr);
1019 if (! (STRICT_ALIGNMENT || SLOW_UNALIGNED_ACCESS)
1020 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
1023 /* First move what we can in the largest integer mode, then go to
1024 successively smaller modes. */
1026 while (max_size > 1)
1028 enum machine_mode mode = VOIDmode, tmode;
1029 enum insn_code icode;
1031 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1032 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1033 if (GET_MODE_SIZE (tmode) < max_size)
1036 if (mode == VOIDmode)
1039 icode = mov_optab->handlers[(int) mode].insn_code;
1040 if (icode != CODE_FOR_nothing
1041 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1042 GET_MODE_SIZE (mode)))
1043 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1045 max_size = GET_MODE_SIZE (mode);
1048 /* The code above should have handled everything. */
1053 /* Return number of insns required to move L bytes by pieces.
1054 ALIGN (in bytes) is maximum alignment we can assume. */
1057 move_by_pieces_ninsns (l, align)
1061 register int n_insns = 0;
1062 int max_size = MOVE_MAX + 1;
1064 if (! (STRICT_ALIGNMENT || SLOW_UNALIGNED_ACCESS)
1065 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
1068 while (max_size > 1)
1070 enum machine_mode mode = VOIDmode, tmode;
1071 enum insn_code icode;
1073 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1074 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1075 if (GET_MODE_SIZE (tmode) < max_size)
1078 if (mode == VOIDmode)
1081 icode = mov_optab->handlers[(int) mode].insn_code;
1082 if (icode != CODE_FOR_nothing
1083 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1084 GET_MODE_SIZE (mode)))
1085 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1087 max_size = GET_MODE_SIZE (mode);
1093 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1094 with move instructions for mode MODE. GENFUN is the gen_... function
1095 to make a move insn for that mode. DATA has all the other info. */
1098 move_by_pieces_1 (genfun, mode, data)
1100 enum machine_mode mode;
1101 struct move_by_pieces *data;
1103 register int size = GET_MODE_SIZE (mode);
1104 register rtx to1, from1;
1106 while (data->len >= size)
1108 if (data->reverse) data->offset -= size;
1110 to1 = (data->autinc_to
1111 ? gen_rtx (MEM, mode, data->to_addr)
1112 : change_address (data->to, mode,
1113 plus_constant (data->to_addr, data->offset)));
1116 ? gen_rtx (MEM, mode, data->from_addr)
1117 : change_address (data->from, mode,
1118 plus_constant (data->from_addr, data->offset)));
1120 #ifdef HAVE_PRE_DECREMENT
1121 if (data->explicit_inc_to < 0)
1122 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
1123 if (data->explicit_inc_from < 0)
1124 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (-size)));
1127 emit_insn ((*genfun) (to1, from1));
1128 #ifdef HAVE_POST_INCREMENT
1129 if (data->explicit_inc_to > 0)
1130 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1131 if (data->explicit_inc_from > 0)
1132 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1135 if (! data->reverse) data->offset += size;
1141 /* Emit code to move a block Y to a block X.
1142 This may be done with string-move instructions,
1143 with multiple scalar move instructions, or with a library call.
1145 Both X and Y must be MEM rtx's (perhaps inside VOLATILE)
1147 SIZE is an rtx that says how long they are.
1148 ALIGN is the maximum alignment we can assume they have,
1149 measured in bytes. */
1152 emit_block_move (x, y, size, align)
1157 if (GET_MODE (x) != BLKmode)
1160 if (GET_MODE (y) != BLKmode)
1163 x = protect_from_queue (x, 1);
1164 y = protect_from_queue (y, 0);
1165 size = protect_from_queue (size, 0);
1167 if (GET_CODE (x) != MEM)
1169 if (GET_CODE (y) != MEM)
1174 if (GET_CODE (size) == CONST_INT
1175 && (move_by_pieces_ninsns (INTVAL (size), align) < MOVE_RATIO))
1176 move_by_pieces (x, y, INTVAL (size), align);
1179 /* Try the most limited insn first, because there's no point
1180 including more than one in the machine description unless
1181 the more limited one has some advantage. */
1183 rtx opalign = GEN_INT (align);
1184 enum machine_mode mode;
1186 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1187 mode = GET_MODE_WIDER_MODE (mode))
1189 enum insn_code code = movstr_optab[(int) mode];
1191 if (code != CODE_FOR_nothing
1192 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
1193 && (unsigned) INTVAL (size) <= GET_MODE_MASK (mode)
1194 && (insn_operand_predicate[(int) code][0] == 0
1195 || (*insn_operand_predicate[(int) code][0]) (x, BLKmode))
1196 && (insn_operand_predicate[(int) code][1] == 0
1197 || (*insn_operand_predicate[(int) code][1]) (y, BLKmode))
1198 && (insn_operand_predicate[(int) code][3] == 0
1199 || (*insn_operand_predicate[(int) code][3]) (opalign,
1203 rtx last = get_last_insn ();
1206 op2 = convert_to_mode (mode, size, 1);
1207 if (insn_operand_predicate[(int) code][2] != 0
1208 && ! (*insn_operand_predicate[(int) code][2]) (op2, mode))
1209 op2 = copy_to_mode_reg (mode, op2);
1211 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1218 delete_insns_since (last);
1222 #ifdef TARGET_MEM_FUNCTIONS
1223 emit_library_call (memcpy_libfunc, 0,
1224 VOIDmode, 3, XEXP (x, 0), Pmode,
1226 convert_to_mode (Pmode, size, 1), Pmode);
1228 emit_library_call (bcopy_libfunc, 0,
1229 VOIDmode, 3, XEXP (y, 0), Pmode,
1231 convert_to_mode (Pmode, size, 1), Pmode);
1236 /* Copy all or part of a value X into registers starting at REGNO.
1237 The number of registers to be filled is NREGS. */
1240 move_block_to_reg (regno, x, nregs, mode)
1244 enum machine_mode mode;
1249 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1250 x = validize_mem (force_const_mem (mode, x));
1252 /* See if the machine can do this with a load multiple insn. */
1253 #ifdef HAVE_load_multiple
1254 last = get_last_insn ();
1255 pat = gen_load_multiple (gen_rtx (REG, word_mode, regno), x,
1263 delete_insns_since (last);
1266 for (i = 0; i < nregs; i++)
1267 emit_move_insn (gen_rtx (REG, word_mode, regno + i),
1268 operand_subword_force (x, i, mode));
1271 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1272 The number of registers to be filled is NREGS. */
1275 move_block_from_reg (regno, x, nregs)
1283 /* See if the machine can do this with a store multiple insn. */
1284 #ifdef HAVE_store_multiple
1285 last = get_last_insn ();
1286 pat = gen_store_multiple (x, gen_rtx (REG, word_mode, regno),
1294 delete_insns_since (last);
1297 for (i = 0; i < nregs; i++)
1299 rtx tem = operand_subword (x, i, 1, BLKmode);
1304 emit_move_insn (tem, gen_rtx (REG, word_mode, regno + i));
1308 /* Mark NREGS consecutive regs, starting at REGNO, as being live now. */
1311 use_regs (regno, nregs)
1317 for (i = 0; i < nregs; i++)
1318 emit_insn (gen_rtx (USE, VOIDmode, gen_rtx (REG, word_mode, regno + i)));
1321 /* Write zeros through the storage of OBJECT.
1322 If OBJECT has BLKmode, SIZE is its length in bytes. */
1325 clear_storage (object, size)
1329 if (GET_MODE (object) == BLKmode)
1331 #ifdef TARGET_MEM_FUNCTIONS
1332 emit_library_call (memset_libfunc, 0,
1334 XEXP (object, 0), Pmode, const0_rtx, Pmode,
1335 GEN_INT (size), Pmode);
1337 emit_library_call (bzero_libfunc, 0,
1339 XEXP (object, 0), Pmode,
1340 GEN_INT (size), Pmode);
1344 emit_move_insn (object, const0_rtx);
1347 /* Generate code to copy Y into X.
1348 Both Y and X must have the same mode, except that
1349 Y can be a constant with VOIDmode.
1350 This mode cannot be BLKmode; use emit_block_move for that.
1352 Return the last instruction emitted. */
1355 emit_move_insn (x, y)
1358 enum machine_mode mode = GET_MODE (x);
1361 x = protect_from_queue (x, 1);
1362 y = protect_from_queue (y, 0);
1364 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
1367 if (CONSTANT_P (y) && ! LEGITIMATE_CONSTANT_P (y))
1368 y = force_const_mem (mode, y);
1370 /* If X or Y are memory references, verify that their addresses are valid
1372 if (GET_CODE (x) == MEM
1373 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
1374 && ! push_operand (x, GET_MODE (x)))
1376 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
1377 x = change_address (x, VOIDmode, XEXP (x, 0));
1379 if (GET_CODE (y) == MEM
1380 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
1382 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
1383 y = change_address (y, VOIDmode, XEXP (y, 0));
1385 if (mode == BLKmode)
1388 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
1390 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
1392 /* This will handle any multi-word mode that lacks a move_insn pattern.
1393 However, you will get better code if you define such patterns,
1394 even if they must turn into multiple assembler instructions. */
1395 else if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
1400 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
1403 rtx xpart = operand_subword (x, i, 1, mode);
1404 rtx ypart = operand_subword (y, i, 1, mode);
1406 /* If we can't get a part of Y, put Y into memory if it is a
1407 constant. Otherwise, force it into a register. If we still
1408 can't get a part of Y, abort. */
1409 if (ypart == 0 && CONSTANT_P (y))
1411 y = force_const_mem (mode, y);
1412 ypart = operand_subword (y, i, 1, mode);
1414 else if (ypart == 0)
1415 ypart = operand_subword_force (y, i, mode);
1417 if (xpart == 0 || ypart == 0)
1420 last_insn = emit_move_insn (xpart, ypart);
1428 /* Pushing data onto the stack. */
1430 /* Push a block of length SIZE (perhaps variable)
1431 and return an rtx to address the beginning of the block.
1432 Note that it is not possible for the value returned to be a QUEUED.
1433 The value may be virtual_outgoing_args_rtx.
1435 EXTRA is the number of bytes of padding to push in addition to SIZE.
1436 BELOW nonzero means this padding comes at low addresses;
1437 otherwise, the padding comes at high addresses. */
1440 push_block (size, extra, below)
1445 if (CONSTANT_P (size))
1446 anti_adjust_stack (plus_constant (size, extra));
1447 else if (GET_CODE (size) == REG && extra == 0)
1448 anti_adjust_stack (size);
1451 rtx temp = copy_to_mode_reg (Pmode, size);
1453 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
1454 temp, 0, OPTAB_LIB_WIDEN);
1455 anti_adjust_stack (temp);
1458 #ifdef STACK_GROWS_DOWNWARD
1459 temp = virtual_outgoing_args_rtx;
1460 if (extra != 0 && below)
1461 temp = plus_constant (temp, extra);
1463 if (GET_CODE (size) == CONST_INT)
1464 temp = plus_constant (virtual_outgoing_args_rtx,
1465 - INTVAL (size) - (below ? 0 : extra));
1466 else if (extra != 0 && !below)
1467 temp = gen_rtx (PLUS, Pmode, virtual_outgoing_args_rtx,
1468 negate_rtx (Pmode, plus_constant (size, extra)));
1470 temp = gen_rtx (PLUS, Pmode, virtual_outgoing_args_rtx,
1471 negate_rtx (Pmode, size));
1474 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
1480 return gen_rtx (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
1483 /* Generate code to push X onto the stack, assuming it has mode MODE and
1485 MODE is redundant except when X is a CONST_INT (since they don't
1487 SIZE is an rtx for the size of data to be copied (in bytes),
1488 needed only if X is BLKmode.
1490 ALIGN (in bytes) is maximum alignment we can assume.
1492 If PARTIAL is nonzero, then copy that many of the first words
1493 of X into registers starting with REG, and push the rest of X.
1494 The amount of space pushed is decreased by PARTIAL words,
1495 rounded *down* to a multiple of PARM_BOUNDARY.
1496 REG must be a hard register in this case.
1498 EXTRA is the amount in bytes of extra space to leave next to this arg.
1499 This is ignored if an argument block has already been allocated.
1501 On a machine that lacks real push insns, ARGS_ADDR is the address of
1502 the bottom of the argument block for this call. We use indexing off there
1503 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
1504 argument block has not been preallocated.
1506 ARGS_SO_FAR is the size of args previously pushed for this call. */
1509 emit_push_insn (x, mode, type, size, align, partial, reg, extra,
1510 args_addr, args_so_far)
1512 enum machine_mode mode;
1523 enum direction stack_direction
1524 #ifdef STACK_GROWS_DOWNWARD
1530 /* Decide where to pad the argument: `downward' for below,
1531 `upward' for above, or `none' for don't pad it.
1532 Default is below for small data on big-endian machines; else above. */
1533 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
1535 /* Invert direction if stack is post-update. */
1536 if (STACK_PUSH_CODE == POST_INC || STACK_PUSH_CODE == POST_DEC)
1537 if (where_pad != none)
1538 where_pad = (where_pad == downward ? upward : downward);
1540 xinner = x = protect_from_queue (x, 0);
1542 if (mode == BLKmode)
1544 /* Copy a block into the stack, entirely or partially. */
1547 int used = partial * UNITS_PER_WORD;
1548 int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
1556 /* USED is now the # of bytes we need not copy to the stack
1557 because registers will take care of them. */
1560 xinner = change_address (xinner, BLKmode,
1561 plus_constant (XEXP (xinner, 0), used));
1563 /* If the partial register-part of the arg counts in its stack size,
1564 skip the part of stack space corresponding to the registers.
1565 Otherwise, start copying to the beginning of the stack space,
1566 by setting SKIP to 0. */
1567 #ifndef REG_PARM_STACK_SPACE
1573 #ifdef PUSH_ROUNDING
1574 /* Do it with several push insns if that doesn't take lots of insns
1575 and if there is no difficulty with push insns that skip bytes
1576 on the stack for alignment purposes. */
1578 && GET_CODE (size) == CONST_INT
1580 && (move_by_pieces_ninsns ((unsigned) INTVAL (size) - used, align)
1582 /* Here we avoid the case of a structure whose weak alignment
1583 forces many pushes of a small amount of data,
1584 and such small pushes do rounding that causes trouble. */
1585 && ((! STRICT_ALIGNMENT && ! SLOW_UNALIGNED_ACCESS)
1586 || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT
1587 || PUSH_ROUNDING (align) == align)
1588 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
1590 /* Push padding now if padding above and stack grows down,
1591 or if padding below and stack grows up.
1592 But if space already allocated, this has already been done. */
1593 if (extra && args_addr == 0
1594 && where_pad != none && where_pad != stack_direction)
1595 anti_adjust_stack (GEN_INT (extra));
1597 move_by_pieces (gen_rtx (MEM, BLKmode, gen_push_operand ()), xinner,
1598 INTVAL (size) - used, align);
1601 #endif /* PUSH_ROUNDING */
1603 /* Otherwise make space on the stack and copy the data
1604 to the address of that space. */
1606 /* Deduct words put into registers from the size we must copy. */
1609 if (GET_CODE (size) == CONST_INT)
1610 size = GEN_INT (INTVAL (size) - used);
1612 size = expand_binop (GET_MODE (size), sub_optab, size,
1613 GEN_INT (used), NULL_RTX, 0,
1617 /* Get the address of the stack space.
1618 In this case, we do not deal with EXTRA separately.
1619 A single stack adjust will do. */
1622 temp = push_block (size, extra, where_pad == downward);
1625 else if (GET_CODE (args_so_far) == CONST_INT)
1626 temp = memory_address (BLKmode,
1627 plus_constant (args_addr,
1628 skip + INTVAL (args_so_far)));
1630 temp = memory_address (BLKmode,
1631 plus_constant (gen_rtx (PLUS, Pmode,
1632 args_addr, args_so_far),
1635 /* TEMP is the address of the block. Copy the data there. */
1636 if (GET_CODE (size) == CONST_INT
1637 && (move_by_pieces_ninsns ((unsigned) INTVAL (size), align)
1640 move_by_pieces (gen_rtx (MEM, BLKmode, temp), xinner,
1641 INTVAL (size), align);
1644 /* Try the most limited insn first, because there's no point
1645 including more than one in the machine description unless
1646 the more limited one has some advantage. */
1647 #ifdef HAVE_movstrqi
1649 && GET_CODE (size) == CONST_INT
1650 && ((unsigned) INTVAL (size)
1651 < (1 << (GET_MODE_BITSIZE (QImode) - 1))))
1653 emit_insn (gen_movstrqi (gen_rtx (MEM, BLKmode, temp),
1654 xinner, size, GEN_INT (align)));
1658 #ifdef HAVE_movstrhi
1660 && GET_CODE (size) == CONST_INT
1661 && ((unsigned) INTVAL (size)
1662 < (1 << (GET_MODE_BITSIZE (HImode) - 1))))
1664 emit_insn (gen_movstrhi (gen_rtx (MEM, BLKmode, temp),
1665 xinner, size, GEN_INT (align)));
1669 #ifdef HAVE_movstrsi
1672 emit_insn (gen_movstrsi (gen_rtx (MEM, BLKmode, temp),
1673 xinner, size, GEN_INT (align)));
1677 #ifdef HAVE_movstrdi
1680 emit_insn (gen_movstrdi (gen_rtx (MEM, BLKmode, temp),
1681 xinner, size, GEN_INT (align)));
1686 #ifndef ACCUMULATE_OUTGOING_ARGS
1687 /* If the source is referenced relative to the stack pointer,
1688 copy it to another register to stabilize it. We do not need
1689 to do this if we know that we won't be changing sp. */
1691 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
1692 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
1693 temp = copy_to_reg (temp);
1696 /* Make inhibit_defer_pop nonzero around the library call
1697 to force it to pop the bcopy-arguments right away. */
1699 #ifdef TARGET_MEM_FUNCTIONS
1700 emit_library_call (memcpy_libfunc, 0,
1701 VOIDmode, 3, temp, Pmode, XEXP (xinner, 0), Pmode,
1704 emit_library_call (bcopy_libfunc, 0,
1705 VOIDmode, 3, XEXP (xinner, 0), Pmode, temp, Pmode,
1711 else if (partial > 0)
1713 /* Scalar partly in registers. */
1715 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
1718 /* # words of start of argument
1719 that we must make space for but need not store. */
1720 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
1721 int args_offset = INTVAL (args_so_far);
1724 /* Push padding now if padding above and stack grows down,
1725 or if padding below and stack grows up.
1726 But if space already allocated, this has already been done. */
1727 if (extra && args_addr == 0
1728 && where_pad != none && where_pad != stack_direction)
1729 anti_adjust_stack (GEN_INT (extra));
1731 /* If we make space by pushing it, we might as well push
1732 the real data. Otherwise, we can leave OFFSET nonzero
1733 and leave the space uninitialized. */
1737 /* Now NOT_STACK gets the number of words that we don't need to
1738 allocate on the stack. */
1739 not_stack = partial - offset;
1741 /* If the partial register-part of the arg counts in its stack size,
1742 skip the part of stack space corresponding to the registers.
1743 Otherwise, start copying to the beginning of the stack space,
1744 by setting SKIP to 0. */
1745 #ifndef REG_PARM_STACK_SPACE
1751 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1752 x = validize_mem (force_const_mem (mode, x));
1754 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
1755 SUBREGs of such registers are not allowed. */
1756 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
1757 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
1758 x = copy_to_reg (x);
1760 /* Loop over all the words allocated on the stack for this arg. */
1761 /* We can do it by words, because any scalar bigger than a word
1762 has a size a multiple of a word. */
1763 #ifndef PUSH_ARGS_REVERSED
1764 for (i = not_stack; i < size; i++)
1766 for (i = size - 1; i >= not_stack; i--)
1768 if (i >= not_stack + offset)
1769 emit_push_insn (operand_subword_force (x, i, mode),
1770 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
1772 GEN_INT (args_offset + ((i - not_stack + skip)
1773 * UNITS_PER_WORD)));
1779 /* Push padding now if padding above and stack grows down,
1780 or if padding below and stack grows up.
1781 But if space already allocated, this has already been done. */
1782 if (extra && args_addr == 0
1783 && where_pad != none && where_pad != stack_direction)
1784 anti_adjust_stack (GEN_INT (extra));
1786 #ifdef PUSH_ROUNDING
1788 addr = gen_push_operand ();
1791 if (GET_CODE (args_so_far) == CONST_INT)
1793 = memory_address (mode,
1794 plus_constant (args_addr, INTVAL (args_so_far)));
1796 addr = memory_address (mode, gen_rtx (PLUS, Pmode, args_addr,
1799 emit_move_insn (gen_rtx (MEM, mode, addr), x);
1803 /* If part should go in registers, copy that part
1804 into the appropriate registers. Do this now, at the end,
1805 since mem-to-mem copies above may do function calls. */
1807 move_block_to_reg (REGNO (reg), x, partial, mode);
1809 if (extra && args_addr == 0 && where_pad == stack_direction)
1810 anti_adjust_stack (GEN_INT (extra));
1813 /* Output a library call to function FUN (a SYMBOL_REF rtx)
1814 (emitting the queue unless NO_QUEUE is nonzero),
1815 for a value of mode OUTMODE,
1816 with NARGS different arguments, passed as alternating rtx values
1817 and machine_modes to convert them to.
1818 The rtx values should have been passed through protect_from_queue already.
1820 NO_QUEUE will be true if and only if the library call is a `const' call
1821 which will be enclosed in REG_LIBCALL/REG_RETVAL notes; it is equivalent
1822 to the variable is_const in expand_call.
1824 NO_QUEUE must be true for const calls, because if it isn't, then
1825 any pending increment will be emitted between REG_LIBCALL/REG_RETVAL notes,
1826 and will be lost if the libcall sequence is optimized away.
1828 NO_QUEUE must be false for non-const calls, because if it isn't, the
1829 call insn will have its CONST_CALL_P bit set, and it will be incorrectly
1830 optimized. For instance, the instruction scheduler may incorrectly
1831 move memory references across the non-const call. */
1834 emit_library_call (va_alist)
1838 struct args_size args_size;
1839 register int argnum;
1840 enum machine_mode outmode;
1847 CUMULATIVE_ARGS args_so_far;
1848 struct arg { rtx value; enum machine_mode mode; rtx reg; int partial;
1849 struct args_size offset; struct args_size size; };
1851 int old_inhibit_defer_pop = inhibit_defer_pop;
1856 orgfun = fun = va_arg (p, rtx);
1857 no_queue = va_arg (p, int);
1858 outmode = va_arg (p, enum machine_mode);
1859 nargs = va_arg (p, int);
1861 /* Copy all the libcall-arguments out of the varargs data
1862 and into a vector ARGVEC.
1864 Compute how to pass each argument. We only support a very small subset
1865 of the full argument passing conventions to limit complexity here since
1866 library functions shouldn't have many args. */
1868 argvec = (struct arg *) alloca (nargs * sizeof (struct arg));
1870 INIT_CUMULATIVE_ARGS (args_so_far, (tree)0, fun);
1872 args_size.constant = 0;
1875 for (count = 0; count < nargs; count++)
1877 rtx val = va_arg (p, rtx);
1878 enum machine_mode mode = va_arg (p, enum machine_mode);
1880 /* We cannot convert the arg value to the mode the library wants here;
1881 must do it earlier where we know the signedness of the arg. */
1883 || (GET_MODE (val) != mode && GET_MODE (val) != VOIDmode))
1886 /* On some machines, there's no way to pass a float to a library fcn.
1887 Pass it as a double instead. */
1888 #ifdef LIBGCC_NEEDS_DOUBLE
1889 if (LIBGCC_NEEDS_DOUBLE && mode == SFmode)
1890 val = convert_to_mode (DFmode, val), mode = DFmode;
1893 /* There's no need to call protect_from_queue, because
1894 either emit_move_insn or emit_push_insn will do that. */
1896 /* Make sure it is a reasonable operand for a move or push insn. */
1897 if (GET_CODE (val) != REG && GET_CODE (val) != MEM
1898 && ! (CONSTANT_P (val) && LEGITIMATE_CONSTANT_P (val)))
1899 val = force_operand (val, NULL_RTX);
1901 argvec[count].value = val;
1902 argvec[count].mode = mode;
1904 #ifdef FUNCTION_ARG_PASS_BY_REFERENCE
1905 if (FUNCTION_ARG_PASS_BY_REFERENCE (args_so_far, mode, NULL_TREE, 1))
1909 argvec[count].reg = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
1910 if (argvec[count].reg && GET_CODE (argvec[count].reg) == EXPR_LIST)
1912 #ifdef FUNCTION_ARG_PARTIAL_NREGS
1913 argvec[count].partial
1914 = FUNCTION_ARG_PARTIAL_NREGS (args_so_far, mode, NULL_TREE, 1);
1916 argvec[count].partial = 0;
1919 locate_and_pad_parm (mode, NULL_TREE,
1920 argvec[count].reg && argvec[count].partial == 0,
1921 NULL_TREE, &args_size, &argvec[count].offset,
1922 &argvec[count].size);
1924 if (argvec[count].size.var)
1927 #ifndef REG_PARM_STACK_SPACE
1928 if (argvec[count].partial)
1929 argvec[count].size.constant -= argvec[count].partial * UNITS_PER_WORD;
1932 if (argvec[count].reg == 0 || argvec[count].partial != 0
1933 #ifdef REG_PARM_STACK_SPACE
1937 args_size.constant += argvec[count].size.constant;
1939 #ifdef ACCUMULATE_OUTGOING_ARGS
1940 /* If this arg is actually passed on the stack, it might be
1941 clobbering something we already put there (this library call might
1942 be inside the evaluation of an argument to a function whose call
1943 requires the stack). This will only occur when the library call
1944 has sufficient args to run out of argument registers. Abort in
1945 this case; if this ever occurs, code must be added to save and
1946 restore the arg slot. */
1948 if (argvec[count].reg == 0 || argvec[count].partial != 0)
1952 FUNCTION_ARG_ADVANCE (args_so_far, mode, (tree)0, 1);
1956 /* If this machine requires an external definition for library
1957 functions, write one out. */
1958 assemble_external_libcall (fun);
1960 #ifdef STACK_BOUNDARY
1961 args_size.constant = (((args_size.constant + (STACK_BYTES - 1))
1962 / STACK_BYTES) * STACK_BYTES);
1965 #ifdef REG_PARM_STACK_SPACE
1966 args_size.constant = MAX (args_size.constant,
1967 REG_PARM_STACK_SPACE ((tree) 0));
1970 #ifdef ACCUMULATE_OUTGOING_ARGS
1971 if (args_size.constant > current_function_outgoing_args_size)
1972 current_function_outgoing_args_size = args_size.constant;
1973 args_size.constant = 0;
1976 #ifndef PUSH_ROUNDING
1977 argblock = push_block (GEN_INT (args_size.constant), 0, 0);
1980 #ifdef PUSH_ARGS_REVERSED
1988 /* Push the args that need to be pushed. */
1990 for (count = 0; count < nargs; count++, argnum += inc)
1992 register enum machine_mode mode = argvec[argnum].mode;
1993 register rtx val = argvec[argnum].value;
1994 rtx reg = argvec[argnum].reg;
1995 int partial = argvec[argnum].partial;
1997 if (! (reg != 0 && partial == 0))
1998 emit_push_insn (val, mode, NULL_TREE, NULL_RTX, 0, partial, reg, 0,
1999 argblock, GEN_INT (argvec[count].offset.constant));
2003 #ifdef PUSH_ARGS_REVERSED
2009 /* Now load any reg parms into their regs. */
2011 for (count = 0; count < nargs; count++, argnum += inc)
2013 register enum machine_mode mode = argvec[argnum].mode;
2014 register rtx val = argvec[argnum].value;
2015 rtx reg = argvec[argnum].reg;
2016 int partial = argvec[argnum].partial;
2018 if (reg != 0 && partial == 0)
2019 emit_move_insn (reg, val);
2023 /* For version 1.37, try deleting this entirely. */
2027 /* Any regs containing parms remain in use through the call. */
2029 for (count = 0; count < nargs; count++)
2030 if (argvec[count].reg != 0)
2031 emit_insn (gen_rtx (USE, VOIDmode, argvec[count].reg));
2033 use_insns = get_insns ();
2036 fun = prepare_call_address (fun, NULL_TREE, &use_insns);
2038 /* Don't allow popping to be deferred, since then
2039 cse'ing of library calls could delete a call and leave the pop. */
2042 /* We pass the old value of inhibit_defer_pop + 1 to emit_call_1, which
2043 will set inhibit_defer_pop to that value. */
2045 emit_call_1 (fun, get_identifier (XSTR (orgfun, 0)), args_size.constant, 0,
2046 FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1),
2047 outmode != VOIDmode ? hard_libcall_value (outmode) : NULL_RTX,
2048 old_inhibit_defer_pop + 1, use_insns, no_queue);
2050 /* Now restore inhibit_defer_pop to its actual original value. */
2054 /* Expand an assignment that stores the value of FROM into TO.
2055 If WANT_VALUE is nonzero, return an rtx for the value of TO.
2056 (This may contain a QUEUED rtx.)
2057 Otherwise, the returned value is not meaningful.
2059 SUGGEST_REG is no longer actually used.
2060 It used to mean, copy the value through a register
2061 and return that register, if that is possible.
2062 But now we do this if WANT_VALUE.
2064 If the value stored is a constant, we return the constant. */
2067 expand_assignment (to, from, want_value, suggest_reg)
2072 register rtx to_rtx = 0;
2075 /* Don't crash if the lhs of the assignment was erroneous. */
2077 if (TREE_CODE (to) == ERROR_MARK)
2078 return expand_expr (from, NULL_RTX, VOIDmode, 0);
2080 /* Assignment of a structure component needs special treatment
2081 if the structure component's rtx is not simply a MEM.
2082 Assignment of an array element at a constant index
2083 has the same problem. */
2085 if (TREE_CODE (to) == COMPONENT_REF
2086 || TREE_CODE (to) == BIT_FIELD_REF
2087 || (TREE_CODE (to) == ARRAY_REF
2088 && TREE_CODE (TREE_OPERAND (to, 1)) == INTEGER_CST
2089 && TREE_CODE (TYPE_SIZE (TREE_TYPE (to))) == INTEGER_CST))
2091 enum machine_mode mode1;
2097 tree tem = get_inner_reference (to, &bitsize, &bitpos, &offset,
2098 &mode1, &unsignedp, &volatilep);
2100 /* If we are going to use store_bit_field and extract_bit_field,
2101 make sure to_rtx will be safe for multiple use. */
2103 if (mode1 == VOIDmode && want_value)
2104 tem = stabilize_reference (tem);
2106 to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, 0);
2109 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
2111 if (GET_CODE (to_rtx) != MEM)
2113 to_rtx = change_address (to_rtx, VOIDmode,
2114 gen_rtx (PLUS, Pmode, XEXP (to_rtx, 0),
2115 force_reg (Pmode, offset_rtx)));
2119 if (GET_CODE (to_rtx) == MEM)
2120 MEM_VOLATILE_P (to_rtx) = 1;
2121 #if 0 /* This was turned off because, when a field is volatile
2122 in an object which is not volatile, the object may be in a register,
2123 and then we would abort over here. */
2129 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
2131 /* Spurious cast makes HPUX compiler happy. */
2132 ? (enum machine_mode) TYPE_MODE (TREE_TYPE (to))
2135 /* Required alignment of containing datum. */
2136 TYPE_ALIGN (TREE_TYPE (tem)) / BITS_PER_UNIT,
2137 int_size_in_bytes (TREE_TYPE (tem)));
2138 preserve_temp_slots (result);
2144 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
2145 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
2148 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, 0);
2150 /* In case we are returning the contents of an object which overlaps
2151 the place the value is being stored, use a safe function when copying
2152 a value through a pointer into a structure value return block. */
2153 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
2154 && current_function_returns_struct
2155 && !current_function_returns_pcc_struct)
2157 rtx from_rtx = expand_expr (from, NULL_RTX, VOIDmode, 0);
2158 rtx size = expr_size (from);
2160 #ifdef TARGET_MEM_FUNCTIONS
2161 emit_library_call (memcpy_libfunc, 0,
2162 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
2163 XEXP (from_rtx, 0), Pmode,
2166 emit_library_call (bcopy_libfunc, 0,
2167 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
2168 XEXP (to_rtx, 0), Pmode,
2172 preserve_temp_slots (to_rtx);
2177 /* Compute FROM and store the value in the rtx we got. */
2179 result = store_expr (from, to_rtx, want_value);
2180 preserve_temp_slots (result);
2185 /* Generate code for computing expression EXP,
2186 and storing the value into TARGET.
2187 Returns TARGET or an equivalent value.
2188 TARGET may contain a QUEUED rtx.
2190 If SUGGEST_REG is nonzero, copy the value through a register
2191 and return that register, if that is possible.
2193 If the value stored is a constant, we return the constant. */
2196 store_expr (exp, target, suggest_reg)
2198 register rtx target;
2202 int dont_return_target = 0;
2204 if (TREE_CODE (exp) == COMPOUND_EXPR)
2206 /* Perform first part of compound expression, then assign from second
2208 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
2210 return store_expr (TREE_OPERAND (exp, 1), target, suggest_reg);
2212 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
2214 /* For conditional expression, get safe form of the target. Then
2215 test the condition, doing the appropriate assignment on either
2216 side. This avoids the creation of unnecessary temporaries.
2217 For non-BLKmode, it is more efficient not to do this. */
2219 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
2222 target = protect_from_queue (target, 1);
2225 jumpifnot (TREE_OPERAND (exp, 0), lab1);
2226 store_expr (TREE_OPERAND (exp, 1), target, suggest_reg);
2228 emit_jump_insn (gen_jump (lab2));
2231 store_expr (TREE_OPERAND (exp, 2), target, suggest_reg);
2237 else if (suggest_reg && GET_CODE (target) == MEM
2238 && GET_MODE (target) != BLKmode)
2239 /* If target is in memory and caller wants value in a register instead,
2240 arrange that. Pass TARGET as target for expand_expr so that,
2241 if EXP is another assignment, SUGGEST_REG will be nonzero for it.
2242 We know expand_expr will not use the target in that case. */
2244 temp = expand_expr (exp, cse_not_expected ? NULL_RTX : target,
2245 GET_MODE (target), 0);
2246 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
2247 temp = copy_to_reg (temp);
2248 dont_return_target = 1;
2250 else if (queued_subexp_p (target))
2251 /* If target contains a postincrement, it is not safe
2252 to use as the returned value. It would access the wrong
2253 place by the time the queued increment gets output.
2254 So copy the value through a temporary and use that temp
2257 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
2259 /* Expand EXP into a new pseudo. */
2260 temp = gen_reg_rtx (GET_MODE (target));
2261 temp = expand_expr (exp, temp, GET_MODE (target), 0);
2264 temp = expand_expr (exp, NULL_RTX, GET_MODE (target), 0);
2265 dont_return_target = 1;
2269 temp = expand_expr (exp, target, GET_MODE (target), 0);
2270 /* DO return TARGET if it's a specified hardware register.
2271 expand_return relies on this. */
2272 if (!(target && GET_CODE (target) == REG
2273 && REGNO (target) < FIRST_PSEUDO_REGISTER)
2274 && CONSTANT_P (temp))
2275 dont_return_target = 1;
2278 /* If value was not generated in the target, store it there.
2279 Convert the value to TARGET's type first if nec. */
2281 if (temp != target && TREE_CODE (exp) != ERROR_MARK)
2283 target = protect_from_queue (target, 1);
2284 if (GET_MODE (temp) != GET_MODE (target)
2285 && GET_MODE (temp) != VOIDmode)
2287 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
2288 if (dont_return_target)
2290 /* In this case, we will return TEMP,
2291 so make sure it has the proper mode.
2292 But don't forget to store the value into TARGET. */
2293 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
2294 emit_move_insn (target, temp);
2297 convert_move (target, temp, unsignedp);
2300 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
2302 /* Handle copying a string constant into an array.
2303 The string constant may be shorter than the array.
2304 So copy just the string's actual length, and clear the rest. */
2307 /* Get the size of the data type of the string,
2308 which is actually the size of the target. */
2309 size = expr_size (exp);
2310 if (GET_CODE (size) == CONST_INT
2311 && INTVAL (size) < TREE_STRING_LENGTH (exp))
2312 emit_block_move (target, temp, size,
2313 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
2316 /* Compute the size of the data to copy from the string. */
2318 = fold (build (MIN_EXPR, sizetype,
2319 size_binop (CEIL_DIV_EXPR,
2320 TYPE_SIZE (TREE_TYPE (exp)),
2321 size_int (BITS_PER_UNIT)),
2323 build_int_2 (TREE_STRING_LENGTH (exp), 0))));
2324 rtx copy_size_rtx = expand_expr (copy_size, NULL_RTX,
2328 /* Copy that much. */
2329 emit_block_move (target, temp, copy_size_rtx,
2330 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
2332 /* Figure out how much is left in TARGET
2333 that we have to clear. */
2334 if (GET_CODE (copy_size_rtx) == CONST_INT)
2336 temp = plus_constant (XEXP (target, 0),
2337 TREE_STRING_LENGTH (exp));
2338 size = plus_constant (size,
2339 - TREE_STRING_LENGTH (exp));
2343 enum machine_mode size_mode = Pmode;
2345 temp = force_reg (Pmode, XEXP (target, 0));
2346 temp = expand_binop (size_mode, add_optab, temp,
2347 copy_size_rtx, NULL_RTX, 0,
2350 size = expand_binop (size_mode, sub_optab, size,
2351 copy_size_rtx, NULL_RTX, 0,
2354 emit_cmp_insn (size, const0_rtx, LT, NULL_RTX,
2355 GET_MODE (size), 0, 0);
2356 label = gen_label_rtx ();
2357 emit_jump_insn (gen_blt (label));
2360 if (size != const0_rtx)
2362 #ifdef TARGET_MEM_FUNCTIONS
2363 emit_library_call (memset_libfunc, 0, VOIDmode, 3,
2364 temp, Pmode, const0_rtx, Pmode, size, Pmode);
2366 emit_library_call (bzero_libfunc, 0, VOIDmode, 2,
2367 temp, Pmode, size, Pmode);
2374 else if (GET_MODE (temp) == BLKmode)
2375 emit_block_move (target, temp, expr_size (exp),
2376 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
2378 emit_move_insn (target, temp);
2380 if (dont_return_target)
2385 /* Store the value of constructor EXP into the rtx TARGET.
2386 TARGET is either a REG or a MEM. */
2389 store_constructor (exp, target)
2393 tree type = TREE_TYPE (exp);
2395 /* We know our target cannot conflict, since safe_from_p has been called. */
2397 /* Don't try copying piece by piece into a hard register
2398 since that is vulnerable to being clobbered by EXP.
2399 Instead, construct in a pseudo register and then copy it all. */
2400 if (GET_CODE (target) == REG && REGNO (target) < FIRST_PSEUDO_REGISTER)
2402 rtx temp = gen_reg_rtx (GET_MODE (target));
2403 store_constructor (exp, temp);
2404 emit_move_insn (target, temp);
2409 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE)
2413 /* Inform later passes that the whole union value is dead. */
2414 if (TREE_CODE (type) == UNION_TYPE)
2415 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
2417 /* If we are building a static constructor into a register,
2418 set the initial value as zero so we can fold the value into
2420 else if (GET_CODE (target) == REG && TREE_STATIC (exp))
2421 emit_move_insn (target, const0_rtx);
2423 /* If the constructor has fewer fields than the structure,
2424 clear the whole structure first. */
2425 else if (list_length (CONSTRUCTOR_ELTS (exp))
2426 != list_length (TYPE_FIELDS (type)))
2427 clear_storage (target, int_size_in_bytes (type));
2429 /* Inform later passes that the old value is dead. */
2430 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
2432 /* Store each element of the constructor into
2433 the corresponding field of TARGET. */
2435 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
2437 register tree field = TREE_PURPOSE (elt);
2438 register enum machine_mode mode;
2443 /* Just ignore missing fields.
2444 We cleared the whole structure, above,
2445 if any fields are missing. */
2449 bitsize = TREE_INT_CST_LOW (DECL_SIZE (field));
2450 unsignedp = TREE_UNSIGNED (field);
2451 mode = DECL_MODE (field);
2452 if (DECL_BIT_FIELD (field))
2455 if (TREE_CODE (DECL_FIELD_BITPOS (field)) != INTEGER_CST)
2456 /* ??? This case remains to be written. */
2459 bitpos = TREE_INT_CST_LOW (DECL_FIELD_BITPOS (field));
2461 store_field (target, bitsize, bitpos, mode, TREE_VALUE (elt),
2462 /* The alignment of TARGET is
2463 at least what its type requires. */
2465 TYPE_ALIGN (type) / BITS_PER_UNIT,
2466 int_size_in_bytes (type));
2469 else if (TREE_CODE (type) == ARRAY_TYPE)
2473 tree domain = TYPE_DOMAIN (type);
2474 HOST_WIDE_INT minelt = TREE_INT_CST_LOW (TYPE_MIN_VALUE (domain));
2475 HOST_WIDE_INT maxelt = TREE_INT_CST_LOW (TYPE_MAX_VALUE (domain));
2476 tree elttype = TREE_TYPE (type);
2478 /* If the constructor has fewer fields than the structure,
2479 clear the whole structure first. Similarly if this this is
2480 static constructor of a non-BLKmode object. */
2482 if (list_length (CONSTRUCTOR_ELTS (exp)) < maxelt - minelt + 1
2483 || (GET_CODE (target) == REG && TREE_STATIC (exp)))
2484 clear_storage (target, maxelt - minelt + 1);
2486 /* Inform later passes that the old value is dead. */
2487 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
2489 /* Store each element of the constructor into
2490 the corresponding element of TARGET, determined
2491 by counting the elements. */
2492 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
2494 elt = TREE_CHAIN (elt), i++)
2496 register enum machine_mode mode;
2501 mode = TYPE_MODE (elttype);
2502 bitsize = GET_MODE_BITSIZE (mode);
2503 unsignedp = TREE_UNSIGNED (elttype);
2505 bitpos = (i * TREE_INT_CST_LOW (TYPE_SIZE (elttype)));
2507 store_field (target, bitsize, bitpos, mode, TREE_VALUE (elt),
2508 /* The alignment of TARGET is
2509 at least what its type requires. */
2511 TYPE_ALIGN (type) / BITS_PER_UNIT,
2512 int_size_in_bytes (type));
2520 /* Store the value of EXP (an expression tree)
2521 into a subfield of TARGET which has mode MODE and occupies
2522 BITSIZE bits, starting BITPOS bits from the start of TARGET.
2523 If MODE is VOIDmode, it means that we are storing into a bit-field.
2525 If VALUE_MODE is VOIDmode, return nothing in particular.
2526 UNSIGNEDP is not used in this case.
2528 Otherwise, return an rtx for the value stored. This rtx
2529 has mode VALUE_MODE if that is convenient to do.
2530 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
2532 ALIGN is the alignment that TARGET is known to have, measured in bytes.
2533 TOTAL_SIZE is the size in bytes of the structure, or -1 if varying. */
2536 store_field (target, bitsize, bitpos, mode, exp, value_mode,
2537 unsignedp, align, total_size)
2539 int bitsize, bitpos;
2540 enum machine_mode mode;
2542 enum machine_mode value_mode;
2547 HOST_WIDE_INT width_mask = 0;
2549 if (bitsize < HOST_BITS_PER_WIDE_INT)
2550 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
2552 /* If we are storing into an unaligned field of an aligned union that is
2553 in a register, we may have the mode of TARGET being an integer mode but
2554 MODE == BLKmode. In that case, get an aligned object whose size and
2555 alignment are the same as TARGET and store TARGET into it (we can avoid
2556 the store if the field being stored is the entire width of TARGET). Then
2557 call ourselves recursively to store the field into a BLKmode version of
2558 that object. Finally, load from the object into TARGET. This is not
2559 very efficient in general, but should only be slightly more expensive
2560 than the otherwise-required unaligned accesses. Perhaps this can be
2561 cleaned up later. */
2564 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
2566 rtx object = assign_stack_temp (GET_MODE (target),
2567 GET_MODE_SIZE (GET_MODE (target)), 0);
2568 rtx blk_object = copy_rtx (object);
2570 PUT_MODE (blk_object, BLKmode);
2572 if (bitsize != GET_MODE_BITSIZE (GET_MODE (target)))
2573 emit_move_insn (object, target);
2575 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0,
2578 emit_move_insn (target, object);
2583 /* If the structure is in a register or if the component
2584 is a bit field, we cannot use addressing to access it.
2585 Use bit-field techniques or SUBREG to store in it. */
2587 if (mode == VOIDmode
2588 || (mode != BLKmode && ! direct_store[(int) mode])
2589 || GET_CODE (target) == REG
2590 || GET_CODE (target) == SUBREG)
2592 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
2593 /* Store the value in the bitfield. */
2594 store_bit_field (target, bitsize, bitpos, mode, temp, align, total_size);
2595 if (value_mode != VOIDmode)
2597 /* The caller wants an rtx for the value. */
2598 /* If possible, avoid refetching from the bitfield itself. */
2600 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
2601 return expand_and (temp, GEN_INT (width_mask), NULL_RTX);
2602 return extract_bit_field (target, bitsize, bitpos, unsignedp,
2603 NULL_RTX, value_mode, 0, align,
2610 rtx addr = XEXP (target, 0);
2613 /* If a value is wanted, it must be the lhs;
2614 so make the address stable for multiple use. */
2616 if (value_mode != VOIDmode && GET_CODE (addr) != REG
2617 && ! CONSTANT_ADDRESS_P (addr)
2618 /* A frame-pointer reference is already stable. */
2619 && ! (GET_CODE (addr) == PLUS
2620 && GET_CODE (XEXP (addr, 1)) == CONST_INT
2621 && (XEXP (addr, 0) == virtual_incoming_args_rtx
2622 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
2623 addr = copy_to_reg (addr);
2625 /* Now build a reference to just the desired component. */
2627 to_rtx = change_address (target, mode,
2628 plus_constant (addr, (bitpos / BITS_PER_UNIT)));
2629 MEM_IN_STRUCT_P (to_rtx) = 1;
2631 return store_expr (exp, to_rtx, value_mode != VOIDmode);
2635 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
2636 or an ARRAY_REF, look for nested COMPONENT_REFs, BIT_FIELD_REFs, or
2637 ARRAY_REFs at constant positions and find the ultimate containing object,
2640 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
2641 bit position, and *PUNSIGNEDP to the signedness of the field.
2642 If the position of the field is variable, we store a tree
2643 giving the variable offset (in units) in *POFFSET.
2644 This offset is in addition to the bit position.
2645 If the position is not variable, we store 0 in *POFFSET.
2647 If any of the extraction expressions is volatile,
2648 we store 1 in *PVOLATILEP. Otherwise we don't change that.
2650 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
2651 is a mode that can be used to access the field. In that case, *PBITSIZE
2654 If the field describes a variable-sized object, *PMODE is set to
2655 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
2656 this case, but the address of the object can be found. */
2659 get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode, punsignedp, pvolatilep)
2664 enum machine_mode *pmode;
2669 enum machine_mode mode = VOIDmode;
2672 if (TREE_CODE (exp) == COMPONENT_REF)
2674 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
2675 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
2676 mode = DECL_MODE (TREE_OPERAND (exp, 1));
2677 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
2679 else if (TREE_CODE (exp) == BIT_FIELD_REF)
2681 size_tree = TREE_OPERAND (exp, 1);
2682 *punsignedp = TREE_UNSIGNED (exp);
2686 mode = TYPE_MODE (TREE_TYPE (exp));
2687 *pbitsize = GET_MODE_BITSIZE (mode);
2688 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
2693 if (TREE_CODE (size_tree) != INTEGER_CST)
2694 mode = BLKmode, *pbitsize = -1;
2696 *pbitsize = TREE_INT_CST_LOW (size_tree);
2699 /* Compute cumulative bit-offset for nested component-refs and array-refs,
2700 and find the ultimate containing object. */
2706 if (TREE_CODE (exp) == COMPONENT_REF || TREE_CODE (exp) == BIT_FIELD_REF)
2708 tree pos = (TREE_CODE (exp) == COMPONENT_REF
2709 ? DECL_FIELD_BITPOS (TREE_OPERAND (exp, 1))
2710 : TREE_OPERAND (exp, 2));
2712 if (TREE_CODE (pos) == PLUS_EXPR)
2715 if (TREE_CODE (TREE_OPERAND (pos, 0)) == INTEGER_CST)
2717 constant = TREE_OPERAND (pos, 0);
2718 var = TREE_OPERAND (pos, 1);
2720 else if (TREE_CODE (TREE_OPERAND (pos, 1)) == INTEGER_CST)
2722 constant = TREE_OPERAND (pos, 1);
2723 var = TREE_OPERAND (pos, 0);
2727 *pbitpos += TREE_INT_CST_LOW (constant);
2729 offset = size_binop (PLUS_EXPR, offset,
2730 size_binop (FLOOR_DIV_EXPR, var,
2731 size_int (BITS_PER_UNIT)));
2733 offset = size_binop (FLOOR_DIV_EXPR, var,
2734 size_int (BITS_PER_UNIT));
2736 else if (TREE_CODE (pos) == INTEGER_CST)
2737 *pbitpos += TREE_INT_CST_LOW (pos);
2740 /* Assume here that the offset is a multiple of a unit.
2741 If not, there should be an explicitly added constant. */
2743 offset = size_binop (PLUS_EXPR, offset,
2744 size_binop (FLOOR_DIV_EXPR, pos,
2745 size_int (BITS_PER_UNIT)));
2747 offset = size_binop (FLOOR_DIV_EXPR, pos,
2748 size_int (BITS_PER_UNIT));
2752 else if (TREE_CODE (exp) == ARRAY_REF
2753 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
2754 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST)
2756 *pbitpos += (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))
2757 * TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (exp))));
2759 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
2760 && ! ((TREE_CODE (exp) == NOP_EXPR
2761 || TREE_CODE (exp) == CONVERT_EXPR)
2762 && (TYPE_MODE (TREE_TYPE (exp))
2763 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
2766 /* If any reference in the chain is volatile, the effect is volatile. */
2767 if (TREE_THIS_VOLATILE (exp))
2769 exp = TREE_OPERAND (exp, 0);
2772 /* If this was a bit-field, see if there is a mode that allows direct
2773 access in case EXP is in memory. */
2774 if (mode == VOIDmode && *pbitpos % *pbitsize == 0)
2776 mode = mode_for_size (*pbitsize, MODE_INT, 0);
2777 if (mode == BLKmode)
2784 /* We aren't finished fixing the callers to really handle nonzero offset. */
2792 /* Given an rtx VALUE that may contain additions and multiplications,
2793 return an equivalent value that just refers to a register or memory.
2794 This is done by generating instructions to perform the arithmetic
2795 and returning a pseudo-register containing the value. */
2798 force_operand (value, target)
2801 register optab binoptab = 0;
2802 /* Use a temporary to force order of execution of calls to
2806 /* Use subtarget as the target for operand 0 of a binary operation. */
2807 register rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
2809 if (GET_CODE (value) == PLUS)
2810 binoptab = add_optab;
2811 else if (GET_CODE (value) == MINUS)
2812 binoptab = sub_optab;
2813 else if (GET_CODE (value) == MULT)
2815 op2 = XEXP (value, 1);
2816 if (!CONSTANT_P (op2)
2817 && !(GET_CODE (op2) == REG && op2 != subtarget))
2819 tmp = force_operand (XEXP (value, 0), subtarget);
2820 return expand_mult (GET_MODE (value), tmp,
2821 force_operand (op2, NULL_RTX),
2827 op2 = XEXP (value, 1);
2828 if (!CONSTANT_P (op2)
2829 && !(GET_CODE (op2) == REG && op2 != subtarget))
2831 if (binoptab == sub_optab && GET_CODE (op2) == CONST_INT)
2833 binoptab = add_optab;
2834 op2 = negate_rtx (GET_MODE (value), op2);
2837 /* Check for an addition with OP2 a constant integer and our first
2838 operand a PLUS of a virtual register and something else. In that
2839 case, we want to emit the sum of the virtual register and the
2840 constant first and then add the other value. This allows virtual
2841 register instantiation to simply modify the constant rather than
2842 creating another one around this addition. */
2843 if (binoptab == add_optab && GET_CODE (op2) == CONST_INT
2844 && GET_CODE (XEXP (value, 0)) == PLUS
2845 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
2846 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
2847 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
2849 rtx temp = expand_binop (GET_MODE (value), binoptab,
2850 XEXP (XEXP (value, 0), 0), op2,
2851 subtarget, 0, OPTAB_LIB_WIDEN);
2852 return expand_binop (GET_MODE (value), binoptab, temp,
2853 force_operand (XEXP (XEXP (value, 0), 1), 0),
2854 target, 0, OPTAB_LIB_WIDEN);
2857 tmp = force_operand (XEXP (value, 0), subtarget);
2858 return expand_binop (GET_MODE (value), binoptab, tmp,
2859 force_operand (op2, NULL_RTX),
2860 target, 0, OPTAB_LIB_WIDEN);
2861 /* We give UNSIGNEP = 0 to expand_binop
2862 because the only operations we are expanding here are signed ones. */
2867 /* Subroutine of expand_expr:
2868 save the non-copied parts (LIST) of an expr (LHS), and return a list
2869 which can restore these values to their previous values,
2870 should something modify their storage. */
2873 save_noncopied_parts (lhs, list)
2880 for (tail = list; tail; tail = TREE_CHAIN (tail))
2881 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
2882 parts = chainon (parts, save_noncopied_parts (lhs, TREE_VALUE (tail)));
2885 tree part = TREE_VALUE (tail);
2886 tree part_type = TREE_TYPE (part);
2887 tree to_be_saved = build (COMPONENT_REF, part_type, lhs, part);
2888 rtx target = assign_stack_temp (TYPE_MODE (part_type),
2889 int_size_in_bytes (part_type), 0);
2890 if (! memory_address_p (TYPE_MODE (part_type), XEXP (target, 0)))
2891 target = change_address (target, TYPE_MODE (part_type), NULL_RTX);
2892 parts = tree_cons (to_be_saved,
2893 build (RTL_EXPR, part_type, NULL_TREE,
2896 store_expr (TREE_PURPOSE (parts), RTL_EXPR_RTL (TREE_VALUE (parts)), 0);
2901 /* Subroutine of expand_expr:
2902 record the non-copied parts (LIST) of an expr (LHS), and return a list
2903 which specifies the initial values of these parts. */
2906 init_noncopied_parts (lhs, list)
2913 for (tail = list; tail; tail = TREE_CHAIN (tail))
2914 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
2915 parts = chainon (parts, init_noncopied_parts (lhs, TREE_VALUE (tail)));
2918 tree part = TREE_VALUE (tail);
2919 tree part_type = TREE_TYPE (part);
2920 tree to_be_initialized = build (COMPONENT_REF, part_type, lhs, part);
2921 parts = tree_cons (TREE_PURPOSE (tail), to_be_initialized, parts);
2926 /* Subroutine of expand_expr: return nonzero iff there is no way that
2927 EXP can reference X, which is being modified. */
2930 safe_from_p (x, exp)
2940 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
2941 find the underlying pseudo. */
2942 if (GET_CODE (x) == SUBREG)
2945 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
2949 /* If X is a location in the outgoing argument area, it is always safe. */
2950 if (GET_CODE (x) == MEM
2951 && (XEXP (x, 0) == virtual_outgoing_args_rtx
2952 || (GET_CODE (XEXP (x, 0)) == PLUS
2953 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx)))
2956 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
2959 exp_rtl = DECL_RTL (exp);
2966 if (TREE_CODE (exp) == TREE_LIST)
2967 return ((TREE_VALUE (exp) == 0
2968 || safe_from_p (x, TREE_VALUE (exp)))
2969 && (TREE_CHAIN (exp) == 0
2970 || safe_from_p (x, TREE_CHAIN (exp))));
2975 return safe_from_p (x, TREE_OPERAND (exp, 0));
2979 return (safe_from_p (x, TREE_OPERAND (exp, 0))
2980 && safe_from_p (x, TREE_OPERAND (exp, 1)));
2984 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
2985 the expression. If it is set, we conflict iff we are that rtx or
2986 both are in memory. Otherwise, we check all operands of the
2987 expression recursively. */
2989 switch (TREE_CODE (exp))
2992 return staticp (TREE_OPERAND (exp, 0));
2995 if (GET_CODE (x) == MEM)
3000 exp_rtl = CALL_EXPR_RTL (exp);
3003 /* Assume that the call will clobber all hard registers and
3005 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
3006 || GET_CODE (x) == MEM)
3013 exp_rtl = RTL_EXPR_RTL (exp);
3015 /* We don't know what this can modify. */
3020 case WITH_CLEANUP_EXPR:
3021 exp_rtl = RTL_EXPR_RTL (exp);
3025 exp_rtl = SAVE_EXPR_RTL (exp);
3029 /* The only operand we look at is operand 1. The rest aren't
3030 part of the expression. */
3031 return safe_from_p (x, TREE_OPERAND (exp, 1));
3033 case METHOD_CALL_EXPR:
3034 /* This takes a rtx argument, but shouldn't appear here. */
3038 /* If we have an rtx, we do not need to scan our operands. */
3042 nops = tree_code_length[(int) TREE_CODE (exp)];
3043 for (i = 0; i < nops; i++)
3044 if (TREE_OPERAND (exp, i) != 0
3045 && ! safe_from_p (x, TREE_OPERAND (exp, i)))
3049 /* If we have an rtl, find any enclosed object. Then see if we conflict
3053 if (GET_CODE (exp_rtl) == SUBREG)
3055 exp_rtl = SUBREG_REG (exp_rtl);
3056 if (GET_CODE (exp_rtl) == REG
3057 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
3061 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
3062 are memory and EXP is not readonly. */
3063 return ! (rtx_equal_p (x, exp_rtl)
3064 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
3065 && ! TREE_READONLY (exp)));
3068 /* If we reach here, it is safe. */
3072 /* Subroutine of expand_expr: return nonzero iff EXP is an
3073 expression whose type is statically determinable. */
3079 if (TREE_CODE (exp) == PARM_DECL
3080 || TREE_CODE (exp) == VAR_DECL
3081 || TREE_CODE (exp) == CALL_EXPR || TREE_CODE (exp) == TARGET_EXPR
3082 || TREE_CODE (exp) == COMPONENT_REF
3083 || TREE_CODE (exp) == ARRAY_REF)
3088 /* expand_expr: generate code for computing expression EXP.
3089 An rtx for the computed value is returned. The value is never null.
3090 In the case of a void EXP, const0_rtx is returned.
3092 The value may be stored in TARGET if TARGET is nonzero.
3093 TARGET is just a suggestion; callers must assume that
3094 the rtx returned may not be the same as TARGET.
3096 If TARGET is CONST0_RTX, it means that the value will be ignored.
3098 If TMODE is not VOIDmode, it suggests generating the
3099 result in mode TMODE. But this is done only when convenient.
3100 Otherwise, TMODE is ignored and the value generated in its natural mode.
3101 TMODE is just a suggestion; callers must assume that
3102 the rtx returned may not have mode TMODE.
3104 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
3105 with a constant address even if that address is not normally legitimate.
3106 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
3108 If MODIFIER is EXPAND_SUM then when EXP is an addition
3109 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
3110 or a nest of (PLUS ...) and (MINUS ...) where the terms are
3111 products as above, or REG or MEM, or constant.
3112 Ordinarily in such cases we would output mul or add instructions
3113 and then return a pseudo reg containing the sum.
3115 EXPAND_INITIALIZER is much like EXPAND_SUM except that
3116 it also marks a label as absolutely required (it can't be dead).
3117 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
3118 This is used for outputting expressions used in initializers. */
3121 expand_expr (exp, target, tmode, modifier)
3124 enum machine_mode tmode;
3125 enum expand_modifier modifier;
3127 register rtx op0, op1, temp;
3128 tree type = TREE_TYPE (exp);
3129 int unsignedp = TREE_UNSIGNED (type);
3130 register enum machine_mode mode = TYPE_MODE (type);
3131 register enum tree_code code = TREE_CODE (exp);
3133 /* Use subtarget as the target for operand 0 of a binary operation. */
3134 rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
3135 rtx original_target = target;
3136 int ignore = target == const0_rtx;
3139 /* Don't use hard regs as subtargets, because the combiner
3140 can only handle pseudo regs. */
3141 if (subtarget && REGNO (subtarget) < FIRST_PSEUDO_REGISTER)
3143 /* Avoid subtargets inside loops,
3144 since they hide some invariant expressions. */
3145 if (preserve_subexpressions_p ())
3148 if (ignore) target = 0, original_target = 0;
3150 /* If will do cse, generate all results into pseudo registers
3151 since 1) that allows cse to find more things
3152 and 2) otherwise cse could produce an insn the machine
3155 if (! cse_not_expected && mode != BLKmode && target
3156 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER))
3159 /* Ensure we reference a volatile object even if value is ignored. */
3160 if (ignore && TREE_THIS_VOLATILE (exp)
3161 && mode != VOIDmode && mode != BLKmode)
3163 target = gen_reg_rtx (mode);
3164 temp = expand_expr (exp, target, VOIDmode, modifier);
3166 emit_move_insn (target, temp);
3174 tree function = decl_function_context (exp);
3175 /* Handle using a label in a containing function. */
3176 if (function != current_function_decl && function != 0)
3178 struct function *p = find_function_data (function);
3179 /* Allocate in the memory associated with the function
3180 that the label is in. */
3181 push_obstacks (p->function_obstack,
3182 p->function_maybepermanent_obstack);
3184 p->forced_labels = gen_rtx (EXPR_LIST, VOIDmode,
3185 label_rtx (exp), p->forced_labels);
3188 else if (modifier == EXPAND_INITIALIZER)
3189 forced_labels = gen_rtx (EXPR_LIST, VOIDmode,
3190 label_rtx (exp), forced_labels);
3191 temp = gen_rtx (MEM, FUNCTION_MODE,
3192 gen_rtx (LABEL_REF, Pmode, label_rtx (exp)));
3193 if (function != current_function_decl && function != 0)
3194 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
3199 if (DECL_RTL (exp) == 0)
3201 error_with_decl (exp, "prior parameter's size depends on `%s'");
3202 return CONST0_RTX (mode);
3208 if (DECL_RTL (exp) == 0)
3210 /* Ensure variable marked as used
3211 even if it doesn't go through a parser. */
3212 TREE_USED (exp) = 1;
3213 /* Handle variables inherited from containing functions. */
3214 context = decl_function_context (exp);
3216 /* We treat inline_function_decl as an alias for the current function
3217 because that is the inline function whose vars, types, etc.
3218 are being merged into the current function.
3219 See expand_inline_function. */
3220 if (context != 0 && context != current_function_decl
3221 && context != inline_function_decl
3222 /* If var is static, we don't need a static chain to access it. */
3223 && ! (GET_CODE (DECL_RTL (exp)) == MEM
3224 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
3228 /* Mark as non-local and addressable. */
3229 DECL_NONLOCAL (exp) = 1;
3230 mark_addressable (exp);
3231 if (GET_CODE (DECL_RTL (exp)) != MEM)
3233 addr = XEXP (DECL_RTL (exp), 0);
3234 if (GET_CODE (addr) == MEM)
3235 addr = gen_rtx (MEM, Pmode, fix_lexical_addr (XEXP (addr, 0), exp));
3237 addr = fix_lexical_addr (addr, exp);
3238 return change_address (DECL_RTL (exp), mode, addr);
3241 /* This is the case of an array whose size is to be determined
3242 from its initializer, while the initializer is still being parsed.
3244 if (GET_CODE (DECL_RTL (exp)) == MEM
3245 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
3246 return change_address (DECL_RTL (exp), GET_MODE (DECL_RTL (exp)),
3247 XEXP (DECL_RTL (exp), 0));
3248 if (GET_CODE (DECL_RTL (exp)) == MEM
3249 && modifier != EXPAND_CONST_ADDRESS
3250 && modifier != EXPAND_SUM
3251 && modifier != EXPAND_INITIALIZER)
3253 /* DECL_RTL probably contains a constant address.
3254 On RISC machines where a constant address isn't valid,
3255 make some insns to get that address into a register. */
3256 if (!memory_address_p (DECL_MODE (exp), XEXP (DECL_RTL (exp), 0))
3258 && CONSTANT_ADDRESS_P (XEXP (DECL_RTL (exp), 0))))
3259 return change_address (DECL_RTL (exp), VOIDmode,
3260 copy_rtx (XEXP (DECL_RTL (exp), 0)));
3262 return DECL_RTL (exp);
3265 return immed_double_const (TREE_INT_CST_LOW (exp),
3266 TREE_INT_CST_HIGH (exp),
3270 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, 0);
3273 /* If optimized, generate immediate CONST_DOUBLE
3274 which will be turned into memory by reload if necessary.
3276 We used to force a register so that loop.c could see it. But
3277 this does not allow gen_* patterns to perform optimizations with
3278 the constants. It also produces two insns in cases like "x = 1.0;".
3279 On most machines, floating-point constants are not permitted in
3280 many insns, so we'd end up copying it to a register in any case.
3282 Now, we do the copying in expand_binop, if appropriate. */
3283 return immed_real_const (exp);
3287 if (! TREE_CST_RTL (exp))
3288 output_constant_def (exp);
3290 /* TREE_CST_RTL probably contains a constant address.
3291 On RISC machines where a constant address isn't valid,
3292 make some insns to get that address into a register. */
3293 if (GET_CODE (TREE_CST_RTL (exp)) == MEM
3294 && modifier != EXPAND_CONST_ADDRESS
3295 && modifier != EXPAND_INITIALIZER
3296 && modifier != EXPAND_SUM
3297 && !memory_address_p (mode, XEXP (TREE_CST_RTL (exp), 0)))
3298 return change_address (TREE_CST_RTL (exp), VOIDmode,
3299 copy_rtx (XEXP (TREE_CST_RTL (exp), 0)));
3300 return TREE_CST_RTL (exp);
3303 context = decl_function_context (exp);
3304 /* We treat inline_function_decl as an alias for the current function
3305 because that is the inline function whose vars, types, etc.
3306 are being merged into the current function.
3307 See expand_inline_function. */
3308 if (context == current_function_decl || context == inline_function_decl)
3311 /* If this is non-local, handle it. */
3314 temp = SAVE_EXPR_RTL (exp);
3315 if (temp && GET_CODE (temp) == REG)
3317 put_var_into_stack (exp);
3318 temp = SAVE_EXPR_RTL (exp);
3320 if (temp == 0 || GET_CODE (temp) != MEM)
3322 return change_address (temp, mode,
3323 fix_lexical_addr (XEXP (temp, 0), exp));
3325 if (SAVE_EXPR_RTL (exp) == 0)
3327 if (mode == BLKmode)
3329 = assign_stack_temp (mode,
3330 int_size_in_bytes (TREE_TYPE (exp)), 0);
3332 temp = gen_reg_rtx (mode);
3333 SAVE_EXPR_RTL (exp) = temp;
3334 store_expr (TREE_OPERAND (exp, 0), temp, 0);
3335 if (!optimize && GET_CODE (temp) == REG)
3336 save_expr_regs = gen_rtx (EXPR_LIST, VOIDmode, temp,
3339 return SAVE_EXPR_RTL (exp);
3342 /* Exit the current loop if the body-expression is true. */
3344 rtx label = gen_label_rtx ();
3345 do_jump (TREE_OPERAND (exp, 0), label, NULL_RTX);
3346 expand_exit_loop (NULL_PTR);
3352 expand_start_loop (1);
3353 expand_expr_stmt (TREE_OPERAND (exp, 0));
3360 tree vars = TREE_OPERAND (exp, 0);
3361 int vars_need_expansion = 0;
3363 /* Need to open a binding contour here because
3364 if there are any cleanups they most be contained here. */
3365 expand_start_bindings (0);
3367 /* Mark the corresponding BLOCK for output in its proper place. */
3368 if (TREE_OPERAND (exp, 2) != 0
3369 && ! TREE_USED (TREE_OPERAND (exp, 2)))
3370 insert_block (TREE_OPERAND (exp, 2));
3372 /* If VARS have not yet been expanded, expand them now. */
3375 if (DECL_RTL (vars) == 0)
3377 vars_need_expansion = 1;
3380 expand_decl_init (vars);
3381 vars = TREE_CHAIN (vars);
3384 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, modifier);
3386 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
3392 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
3394 emit_insns (RTL_EXPR_SEQUENCE (exp));
3395 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
3396 return RTL_EXPR_RTL (exp);
3399 /* All elts simple constants => refer to a constant in memory. But
3400 if this is a non-BLKmode mode, let it store a field at a time
3401 since that should make a CONST_INT or CONST_DOUBLE when we
3403 if (TREE_STATIC (exp) && (mode == BLKmode || TREE_ADDRESSABLE (exp)))
3405 rtx constructor = output_constant_def (exp);
3406 if (modifier != EXPAND_CONST_ADDRESS
3407 && modifier != EXPAND_INITIALIZER
3408 && modifier != EXPAND_SUM
3409 && !memory_address_p (GET_MODE (constructor),
3410 XEXP (constructor, 0)))
3411 constructor = change_address (constructor, VOIDmode,
3412 XEXP (constructor, 0));
3419 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
3420 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode, 0);
3425 if (target == 0 || ! safe_from_p (target, exp))
3427 if (mode != BLKmode && ! TREE_ADDRESSABLE (exp))
3428 target = gen_reg_rtx (mode);
3431 rtx safe_target = assign_stack_temp (mode, int_size_in_bytes (type), 0);
3433 MEM_IN_STRUCT_P (safe_target) = MEM_IN_STRUCT_P (target);
3434 target = safe_target;
3437 store_constructor (exp, target);
3443 tree exp1 = TREE_OPERAND (exp, 0);
3446 /* A SAVE_EXPR as the address in an INDIRECT_EXPR is generated
3447 for *PTR += ANYTHING where PTR is put inside the SAVE_EXPR.
3448 This code has the same general effect as simply doing
3449 expand_expr on the save expr, except that the expression PTR
3450 is computed for use as a memory address. This means different
3451 code, suitable for indexing, may be generated. */
3452 if (TREE_CODE (exp1) == SAVE_EXPR
3453 && SAVE_EXPR_RTL (exp1) == 0
3454 && TREE_CODE (exp2 = TREE_OPERAND (exp1, 0)) != ERROR_MARK
3455 && TYPE_MODE (TREE_TYPE (exp1)) == Pmode
3456 && TYPE_MODE (TREE_TYPE (exp2)) == Pmode)
3458 temp = expand_expr (TREE_OPERAND (exp1, 0), NULL_RTX,
3459 VOIDmode, EXPAND_SUM);
3460 op0 = memory_address (mode, temp);
3461 op0 = copy_all_regs (op0);
3462 SAVE_EXPR_RTL (exp1) = op0;
3466 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
3467 op0 = memory_address (mode, op0);
3470 temp = gen_rtx (MEM, mode, op0);
3471 /* If address was computed by addition,
3472 mark this as an element of an aggregate. */
3473 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
3474 || (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR
3475 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) == PLUS_EXPR)
3476 || TREE_CODE (TREE_TYPE (exp)) == ARRAY_TYPE
3477 || TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE
3478 || TREE_CODE (TREE_TYPE (exp)) == UNION_TYPE
3479 || (TREE_CODE (exp1) == ADDR_EXPR
3480 && (exp2 = TREE_OPERAND (exp1, 0))
3481 && (TREE_CODE (TREE_TYPE (exp2)) == ARRAY_TYPE
3482 || TREE_CODE (TREE_TYPE (exp2)) == RECORD_TYPE
3483 || TREE_CODE (TREE_TYPE (exp2)) == UNION_TYPE)))
3484 MEM_IN_STRUCT_P (temp) = 1;
3485 MEM_VOLATILE_P (temp) = TREE_THIS_VOLATILE (exp) || flag_volatile;
3486 #if 0 /* It is incorrectto set RTX_UNCHANGING_P here, because the fact that
3487 a location is accessed through a pointer to const does not mean
3488 that the value there can never change. */
3489 RTX_UNCHANGING_P (temp) = TREE_READONLY (exp);
3495 if (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST
3496 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
3498 /* Nonconstant array index or nonconstant element size.
3499 Generate the tree for *(&array+index) and expand that,
3500 except do it in a language-independent way
3501 and don't complain about non-lvalue arrays.
3502 `mark_addressable' should already have been called
3503 for any array for which this case will be reached. */
3505 /* Don't forget the const or volatile flag from the array element. */
3506 tree variant_type = build_type_variant (type,
3507 TREE_READONLY (exp),
3508 TREE_THIS_VOLATILE (exp));
3509 tree array_adr = build1 (ADDR_EXPR, build_pointer_type (variant_type),
3510 TREE_OPERAND (exp, 0));
3511 tree index = TREE_OPERAND (exp, 1);
3514 /* Convert the integer argument to a type the same size as a pointer
3515 so the multiply won't overflow spuriously. */
3516 if (TYPE_PRECISION (TREE_TYPE (index)) != POINTER_SIZE)
3517 index = convert (type_for_size (POINTER_SIZE, 0), index);
3519 /* Don't think the address has side effects
3520 just because the array does.
3521 (In some cases the address might have side effects,
3522 and we fail to record that fact here. However, it should not
3523 matter, since expand_expr should not care.) */
3524 TREE_SIDE_EFFECTS (array_adr) = 0;
3526 elt = build1 (INDIRECT_REF, type,
3527 fold (build (PLUS_EXPR, TYPE_POINTER_TO (variant_type),
3529 fold (build (MULT_EXPR,
3530 TYPE_POINTER_TO (variant_type),
3531 index, size_in_bytes (type))))));
3533 /* Volatility, etc., of new expression is same as old expression. */
3534 TREE_SIDE_EFFECTS (elt) = TREE_SIDE_EFFECTS (exp);
3535 TREE_THIS_VOLATILE (elt) = TREE_THIS_VOLATILE (exp);
3536 TREE_READONLY (elt) = TREE_READONLY (exp);
3538 return expand_expr (elt, target, tmode, modifier);
3541 /* Fold an expression like: "foo"[2].
3542 This is not done in fold so it won't happen inside &. */
3545 tree arg0 = TREE_OPERAND (exp, 0);
3546 tree arg1 = TREE_OPERAND (exp, 1);
3548 if (TREE_CODE (arg0) == STRING_CST
3549 && TREE_CODE (arg1) == INTEGER_CST
3550 && !TREE_INT_CST_HIGH (arg1)
3551 && (i = TREE_INT_CST_LOW (arg1)) < TREE_STRING_LENGTH (arg0))
3553 if (TREE_TYPE (TREE_TYPE (arg0)) == integer_type_node)
3555 exp = build_int_2 (((int *)TREE_STRING_POINTER (arg0))[i], 0);
3556 TREE_TYPE (exp) = integer_type_node;
3557 return expand_expr (exp, target, tmode, modifier);
3559 if (TREE_TYPE (TREE_TYPE (arg0)) == char_type_node)
3561 exp = build_int_2 (TREE_STRING_POINTER (arg0)[i], 0);
3562 TREE_TYPE (exp) = integer_type_node;
3563 return expand_expr (convert (TREE_TYPE (TREE_TYPE (arg0)), exp), target, tmode, modifier);
3568 /* If this is a constant index into a constant array,
3569 just get the value from the array. Handle both the cases when
3570 we have an explicit constructor and when our operand is a variable
3571 that was declared const. */
3573 if (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
3574 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
3576 tree index = fold (TREE_OPERAND (exp, 1));
3577 if (TREE_CODE (index) == INTEGER_CST
3578 && TREE_INT_CST_HIGH (index) == 0)
3580 int i = TREE_INT_CST_LOW (index);
3581 tree elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0));
3584 elem = TREE_CHAIN (elem);
3586 return expand_expr (fold (TREE_VALUE (elem)), target,
3591 else if (TREE_READONLY (TREE_OPERAND (exp, 0))
3592 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
3593 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == ARRAY_TYPE
3594 && TREE_CODE (TREE_OPERAND (exp, 0)) == VAR_DECL
3595 && DECL_INITIAL (TREE_OPERAND (exp, 0))
3597 && (TREE_CODE (DECL_INITIAL (TREE_OPERAND (exp, 0)))
3600 tree index = fold (TREE_OPERAND (exp, 1));
3601 if (TREE_CODE (index) == INTEGER_CST
3602 && TREE_INT_CST_HIGH (index) == 0)
3604 int i = TREE_INT_CST_LOW (index);
3605 tree init = DECL_INITIAL (TREE_OPERAND (exp, 0));
3607 if (TREE_CODE (init) == CONSTRUCTOR)
3609 tree elem = CONSTRUCTOR_ELTS (init);
3612 elem = TREE_CHAIN (elem);
3614 return expand_expr (fold (TREE_VALUE (elem)), target,
3617 else if (TREE_CODE (init) == STRING_CST
3618 && i < TREE_STRING_LENGTH (init))
3620 temp = GEN_INT (TREE_STRING_POINTER (init)[i]);
3621 return convert_to_mode (mode, temp, 0);
3625 /* Treat array-ref with constant index as a component-ref. */
3629 /* If the operand is a CONSTRUCTOR, we can just extract the
3630 appropriate field if it is present. */
3631 if (code != ARRAY_REF
3632 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR)
3636 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
3637 elt = TREE_CHAIN (elt))
3638 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1))
3639 return expand_expr (TREE_VALUE (elt), target, tmode, modifier);
3643 enum machine_mode mode1;
3648 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
3649 &mode1, &unsignedp, &volatilep);
3651 /* In some cases, we will be offsetting OP0's address by a constant.
3652 So get it as a sum, if possible. If we will be using it
3653 directly in an insn, we validate it. */
3654 op0 = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_SUM);
3656 /* If this is a constant, put it into a register if it is a
3657 legimate constant and memory if it isn't. */
3658 if (CONSTANT_P (op0))
3660 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
3661 if (LEGITIMATE_CONSTANT_P (op0))
3662 op0 = force_reg (mode, op0);
3664 op0 = validize_mem (force_const_mem (mode, op0));
3669 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
3671 if (GET_CODE (op0) != MEM)
3673 op0 = change_address (op0, VOIDmode,
3674 gen_rtx (PLUS, Pmode, XEXP (op0, 0),
3675 force_reg (Pmode, offset_rtx)));
3678 /* Don't forget about volatility even if this is a bitfield. */
3679 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
3681 op0 = copy_rtx (op0);
3682 MEM_VOLATILE_P (op0) = 1;
3685 if (mode1 == VOIDmode
3686 || (mode1 != BLKmode && ! direct_load[(int) mode1]
3687 && modifier != EXPAND_CONST_ADDRESS
3688 && modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
3689 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG)
3691 /* In cases where an aligned union has an unaligned object
3692 as a field, we might be extracting a BLKmode value from
3693 an integer-mode (e.g., SImode) object. Handle this case
3694 by doing the extract into an object as wide as the field
3695 (which we know to be the width of a basic mode), then
3696 storing into memory, and changing the mode to BLKmode. */
3697 enum machine_mode ext_mode = mode;
3699 if (ext_mode == BLKmode)
3700 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
3702 if (ext_mode == BLKmode)
3705 op0 = extract_bit_field (validize_mem (op0), bitsize, bitpos,
3706 unsignedp, target, ext_mode, ext_mode,
3707 TYPE_ALIGN (TREE_TYPE (tem)) / BITS_PER_UNIT,
3708 int_size_in_bytes (TREE_TYPE (tem)));
3709 if (mode == BLKmode)
3711 rtx new = assign_stack_temp (ext_mode,
3712 bitsize / BITS_PER_UNIT, 0);
3714 emit_move_insn (new, op0);
3715 op0 = copy_rtx (new);
3716 PUT_MODE (op0, BLKmode);
3722 /* Get a reference to just this component. */
3723 if (modifier == EXPAND_CONST_ADDRESS
3724 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
3725 op0 = gen_rtx (MEM, mode1, plus_constant (XEXP (op0, 0),
3726 (bitpos / BITS_PER_UNIT)));
3728 op0 = change_address (op0, mode1,
3729 plus_constant (XEXP (op0, 0),
3730 (bitpos / BITS_PER_UNIT)));
3731 MEM_IN_STRUCT_P (op0) = 1;
3732 MEM_VOLATILE_P (op0) |= volatilep;
3733 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode)
3736 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
3737 convert_move (target, op0, unsignedp);
3743 tree base = build_unary_op (ADDR_EXPR, TREE_OPERAND (exp, 0), 0);
3744 tree addr = build (PLUS_EXPR, type, base, TREE_OPERAND (exp, 1));
3745 op0 = expand_expr (addr, NULL_RTX, VOIDmode, EXPAND_SUM);
3746 temp = gen_rtx (MEM, mode, memory_address (mode, op0));
3747 MEM_IN_STRUCT_P (temp) = 1;
3748 MEM_VOLATILE_P (temp) = TREE_THIS_VOLATILE (exp) || flag_volatile;
3749 #if 0 /* It is incorrectto set RTX_UNCHANGING_P here, because the fact that
3750 a location is accessed through a pointer to const does not mean
3751 that the value there can never change. */
3752 RTX_UNCHANGING_P (temp) = TREE_READONLY (exp);
3757 /* Intended for a reference to a buffer of a file-object in Pascal.
3758 But it's not certain that a special tree code will really be
3759 necessary for these. INDIRECT_REF might work for them. */
3763 case WITH_CLEANUP_EXPR:
3764 if (RTL_EXPR_RTL (exp) == 0)
3767 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
3769 = tree_cons (NULL_TREE, TREE_OPERAND (exp, 2), cleanups_this_call);
3770 /* That's it for this cleanup. */
3771 TREE_OPERAND (exp, 2) = 0;
3773 return RTL_EXPR_RTL (exp);
3776 /* Check for a built-in function. */
3777 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
3778 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) == FUNCTION_DECL
3779 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
3780 return expand_builtin (exp, target, subtarget, tmode, ignore);
3781 /* If this call was expanded already by preexpand_calls,
3782 just return the result we got. */
3783 if (CALL_EXPR_RTL (exp) != 0)
3784 return CALL_EXPR_RTL (exp);
3785 return expand_call (exp, target, ignore);
3787 case NON_LVALUE_EXPR:
3790 case REFERENCE_EXPR:
3791 if (TREE_CODE (type) == VOID_TYPE || ignore)
3793 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
3796 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
3797 return expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, modifier);
3798 if (TREE_CODE (type) == UNION_TYPE)
3800 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
3803 if (mode == BLKmode)
3805 if (TYPE_SIZE (type) == 0
3806 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
3808 target = assign_stack_temp (BLKmode,
3809 (TREE_INT_CST_LOW (TYPE_SIZE (type))
3810 + BITS_PER_UNIT - 1)
3811 / BITS_PER_UNIT, 0);
3814 target = gen_reg_rtx (mode);
3816 if (GET_CODE (target) == MEM)
3817 /* Store data into beginning of memory target. */
3818 store_expr (TREE_OPERAND (exp, 0),
3819 change_address (target, TYPE_MODE (valtype), 0),
3821 else if (GET_CODE (target) == REG)
3822 /* Store this field into a union of the proper type. */
3823 store_field (target, GET_MODE_BITSIZE (TYPE_MODE (valtype)), 0,
3824 TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
3826 int_size_in_bytes (TREE_TYPE (TREE_OPERAND (exp, 0))));
3830 /* Return the entire union. */
3833 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, mode, modifier);
3834 if (GET_MODE (op0) == mode || GET_MODE (op0) == VOIDmode)
3836 if (modifier == EXPAND_INITIALIZER)
3837 return gen_rtx (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
3838 if (flag_force_mem && GET_CODE (op0) == MEM)
3839 op0 = copy_to_reg (op0);
3842 return convert_to_mode (mode, op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
3844 convert_move (target, op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
3848 /* We come here from MINUS_EXPR when the second operand is a constant. */
3850 this_optab = add_optab;
3852 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
3853 something else, make sure we add the register to the constant and
3854 then to the other thing. This case can occur during strength
3855 reduction and doing it this way will produce better code if the
3856 frame pointer or argument pointer is eliminated.
3858 fold-const.c will ensure that the constant is always in the inner
3859 PLUS_EXPR, so the only case we need to do anything about is if
3860 sp, ap, or fp is our second argument, in which case we must swap
3861 the innermost first argument and our second argument. */
3863 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
3864 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
3865 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
3866 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
3867 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
3868 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
3870 tree t = TREE_OPERAND (exp, 1);
3872 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
3873 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
3876 /* If the result is to be Pmode and we are adding an integer to
3877 something, we might be forming a constant. So try to use
3878 plus_constant. If it produces a sum and we can't accept it,
3879 use force_operand. This allows P = &ARR[const] to generate
3880 efficient code on machines where a SYMBOL_REF is not a valid
3883 If this is an EXPAND_SUM call, always return the sum. */
3884 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
3885 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
3886 && (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
3889 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
3891 op1 = plus_constant (op1, TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)));
3892 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
3893 op1 = force_operand (op1, target);
3897 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
3898 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
3899 && (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
3902 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
3904 op0 = plus_constant (op0, TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)));
3905 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
3906 op0 = force_operand (op0, target);
3910 /* No sense saving up arithmetic to be done
3911 if it's all in the wrong mode to form part of an address.
3912 And force_operand won't know whether to sign-extend or
3914 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
3915 || mode != Pmode) goto binop;
3917 preexpand_calls (exp);
3918 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
3921 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, modifier);
3922 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, modifier);
3924 /* Make sure any term that's a sum with a constant comes last. */
3925 if (GET_CODE (op0) == PLUS
3926 && CONSTANT_P (XEXP (op0, 1)))
3932 /* If adding to a sum including a constant,
3933 associate it to put the constant outside. */
3934 if (GET_CODE (op1) == PLUS
3935 && CONSTANT_P (XEXP (op1, 1)))
3937 rtx constant_term = const0_rtx;
3939 temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0);
3942 /* Ensure that MULT comes first if there is one. */
3943 else if (GET_CODE (op0) == MULT)
3944 op0 = gen_rtx (PLUS, mode, op0, XEXP (op1, 0));
3946 op0 = gen_rtx (PLUS, mode, XEXP (op1, 0), op0);
3948 /* Let's also eliminate constants from op0 if possible. */
3949 op0 = eliminate_constant_term (op0, &constant_term);
3951 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
3952 their sum should be a constant. Form it into OP1, since the
3953 result we want will then be OP0 + OP1. */
3955 temp = simplify_binary_operation (PLUS, mode, constant_term,
3960 op1 = gen_rtx (PLUS, mode, constant_term, XEXP (op1, 1));
3963 /* Put a constant term last and put a multiplication first. */
3964 if (CONSTANT_P (op0) || GET_CODE (op1) == MULT)
3965 temp = op1, op1 = op0, op0 = temp;
3967 temp = simplify_binary_operation (PLUS, mode, op0, op1);
3968 return temp ? temp : gen_rtx (PLUS, mode, op0, op1);
3971 /* Handle difference of two symbolic constants,
3972 for the sake of an initializer. */
3973 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
3974 && really_constant_p (TREE_OPERAND (exp, 0))
3975 && really_constant_p (TREE_OPERAND (exp, 1)))
3977 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX,
3978 VOIDmode, modifier);
3979 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
3980 VOIDmode, modifier);
3981 return gen_rtx (MINUS, mode, op0, op1);
3983 /* Convert A - const to A + (-const). */
3984 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
3986 exp = build (PLUS_EXPR, type, TREE_OPERAND (exp, 0),
3987 fold (build1 (NEGATE_EXPR, type,
3988 TREE_OPERAND (exp, 1))));
3991 this_optab = sub_optab;
3995 preexpand_calls (exp);
3996 /* If first operand is constant, swap them.
3997 Thus the following special case checks need only
3998 check the second operand. */
3999 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
4001 register tree t1 = TREE_OPERAND (exp, 0);
4002 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
4003 TREE_OPERAND (exp, 1) = t1;
4006 /* Attempt to return something suitable for generating an
4007 indexed address, for machines that support that. */
4009 if (modifier == EXPAND_SUM && mode == Pmode
4010 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
4011 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
4013 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, EXPAND_SUM);
4015 /* Apply distributive law if OP0 is x+c. */
4016 if (GET_CODE (op0) == PLUS
4017 && GET_CODE (XEXP (op0, 1)) == CONST_INT)
4018 return gen_rtx (PLUS, mode,
4019 gen_rtx (MULT, mode, XEXP (op0, 0),
4020 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))),
4021 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))
4022 * INTVAL (XEXP (op0, 1))));
4024 if (GET_CODE (op0) != REG)
4025 op0 = force_operand (op0, NULL_RTX);
4026 if (GET_CODE (op0) != REG)
4027 op0 = copy_to_mode_reg (mode, op0);
4029 return gen_rtx (MULT, mode, op0,
4030 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))));
4033 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
4036 /* Check for multiplying things that have been extended
4037 from a narrower type. If this machine supports multiplying
4038 in that narrower type with a result in the desired type,
4039 do it that way, and avoid the explicit type-conversion. */
4040 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
4041 && TREE_CODE (type) == INTEGER_TYPE
4042 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
4043 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
4044 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
4045 && int_fits_type_p (TREE_OPERAND (exp, 1),
4046 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
4047 /* Don't use a widening multiply if a shift will do. */
4048 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
4049 > HOST_BITS_PER_WIDE_INT)
4050 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
4052 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
4053 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
4055 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
4056 /* If both operands are extended, they must either both
4057 be zero-extended or both be sign-extended. */
4058 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
4060 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
4062 enum machine_mode innermode
4063 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
4064 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
4065 ? umul_widen_optab : smul_widen_optab);
4066 if (mode == GET_MODE_WIDER_MODE (innermode)
4067 && this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
4069 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
4070 NULL_RTX, VOIDmode, 0);
4071 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
4072 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
4075 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
4076 NULL_RTX, VOIDmode, 0);
4080 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
4081 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
4082 return expand_mult (mode, op0, op1, target, unsignedp);
4084 case TRUNC_DIV_EXPR:
4085 case FLOOR_DIV_EXPR:
4087 case ROUND_DIV_EXPR:
4088 case EXACT_DIV_EXPR:
4089 preexpand_calls (exp);
4090 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
4092 /* Possible optimization: compute the dividend with EXPAND_SUM
4093 then if the divisor is constant can optimize the case
4094 where some terms of the dividend have coeffs divisible by it. */
4095 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
4096 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
4097 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
4100 this_optab = flodiv_optab;
4103 case TRUNC_MOD_EXPR:
4104 case FLOOR_MOD_EXPR:
4106 case ROUND_MOD_EXPR:
4107 preexpand_calls (exp);
4108 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
4110 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
4111 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
4112 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
4114 case FIX_ROUND_EXPR:
4115 case FIX_FLOOR_EXPR:
4117 abort (); /* Not used for C. */
4119 case FIX_TRUNC_EXPR:
4120 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
4122 target = gen_reg_rtx (mode);
4123 expand_fix (target, op0, unsignedp);
4127 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
4129 target = gen_reg_rtx (mode);
4130 /* expand_float can't figure out what to do if FROM has VOIDmode.
4131 So give it the correct mode. With -O, cse will optimize this. */
4132 if (GET_MODE (op0) == VOIDmode)
4133 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
4135 expand_float (target, op0,
4136 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
4140 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
4141 temp = expand_unop (mode, neg_optab, op0, target, 0);
4147 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
4149 /* Unsigned abs is simply the operand. Testing here means we don't
4150 risk generating incorrect code below. */
4151 if (TREE_UNSIGNED (type))
4154 /* First try to do it with a special abs instruction. */
4155 temp = expand_unop (mode, abs_optab, op0, target, 0);
4159 /* If this machine has expensive jumps, we can do integer absolute
4160 value of X as (((signed) x >> (W-1)) ^ x) - ((signed) x >> (W-1)),
4161 where W is the width of MODE. */
4163 if (GET_MODE_CLASS (mode) == MODE_INT && BRANCH_COST >= 2)
4165 rtx extended = expand_shift (RSHIFT_EXPR, mode, op0,
4166 size_int (GET_MODE_BITSIZE (mode) - 1),
4169 temp = expand_binop (mode, xor_optab, extended, op0, target, 0,
4172 temp = expand_binop (mode, sub_optab, temp, extended, target, 0,
4179 /* If that does not win, use conditional jump and negate. */
4180 target = original_target;
4181 temp = gen_label_rtx ();
4182 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 0))
4183 || (GET_CODE (target) == REG
4184 && REGNO (target) < FIRST_PSEUDO_REGISTER))
4185 target = gen_reg_rtx (mode);
4186 emit_move_insn (target, op0);
4187 emit_cmp_insn (target,
4188 expand_expr (convert (type, integer_zero_node),
4189 NULL_RTX, VOIDmode, 0),
4190 GE, NULL_RTX, mode, 0, 0);
4192 emit_jump_insn (gen_bge (temp));
4193 op0 = expand_unop (mode, neg_optab, target, target, 0);
4195 emit_move_insn (target, op0);
4202 target = original_target;
4203 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 1))
4204 || (GET_CODE (target) == REG
4205 && REGNO (target) < FIRST_PSEUDO_REGISTER))
4206 target = gen_reg_rtx (mode);
4207 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
4208 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
4210 /* First try to do it with a special MIN or MAX instruction.
4211 If that does not win, use a conditional jump to select the proper
4213 this_optab = (TREE_UNSIGNED (type)
4214 ? (code == MIN_EXPR ? umin_optab : umax_optab)
4215 : (code == MIN_EXPR ? smin_optab : smax_optab));
4217 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
4223 emit_move_insn (target, op0);
4224 op0 = gen_label_rtx ();
4225 if (code == MAX_EXPR)
4226 temp = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)))
4227 ? compare_from_rtx (target, op1, GEU, 1, mode, NULL_RTX, 0)
4228 : compare_from_rtx (target, op1, GE, 0, mode, NULL_RTX, 0));
4230 temp = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)))
4231 ? compare_from_rtx (target, op1, LEU, 1, mode, NULL_RTX, 0)
4232 : compare_from_rtx (target, op1, LE, 0, mode, NULL_RTX, 0));
4233 if (temp == const0_rtx)
4234 emit_move_insn (target, op1);
4235 else if (temp != const_true_rtx)
4237 if (bcc_gen_fctn[(int) GET_CODE (temp)] != 0)
4238 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (temp)]) (op0));
4241 emit_move_insn (target, op1);
4246 /* ??? Can optimize when the operand of this is a bitwise operation,
4247 by using a different bitwise operation. */
4249 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
4250 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
4256 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
4257 temp = expand_unop (mode, ffs_optab, op0, target, 1);
4262 /* ??? Can optimize bitwise operations with one arg constant.
4263 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
4264 and (a bitwise1 b) bitwise2 b (etc)
4265 but that is probably not worth while. */
4267 /* BIT_AND_EXPR is for bitwise anding.
4268 TRUTH_AND_EXPR is for anding two boolean values
4269 when we want in all cases to compute both of them.
4270 In general it is fastest to do TRUTH_AND_EXPR by
4271 computing both operands as actual zero-or-1 values
4272 and then bitwise anding. In cases where there cannot
4273 be any side effects, better code would be made by
4274 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR;
4275 but the question is how to recognize those cases. */
4277 case TRUTH_AND_EXPR:
4279 this_optab = and_optab;
4282 /* See comment above about TRUTH_AND_EXPR; it applies here too. */
4285 this_optab = ior_optab;
4289 this_optab = xor_optab;
4296 preexpand_calls (exp);
4297 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
4299 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
4300 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
4303 /* Could determine the answer when only additive constants differ.
4304 Also, the addition of one can be handled by changing the condition. */
4311 preexpand_calls (exp);
4312 temp = do_store_flag (exp, target, tmode != VOIDmode ? tmode : mode, 0);
4315 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
4316 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
4318 && GET_CODE (original_target) == REG
4319 && (GET_MODE (original_target)
4320 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
4322 temp = expand_expr (TREE_OPERAND (exp, 0), original_target, VOIDmode, 0);
4323 if (temp != original_target)
4324 temp = copy_to_reg (temp);
4325 op1 = gen_label_rtx ();
4326 emit_cmp_insn (temp, const0_rtx, EQ, NULL_RTX,
4327 GET_MODE (temp), unsignedp, 0);
4328 emit_jump_insn (gen_beq (op1));
4329 emit_move_insn (temp, const1_rtx);
4333 /* If no set-flag instruction, must generate a conditional
4334 store into a temporary variable. Drop through
4335 and handle this like && and ||. */
4337 case TRUTH_ANDIF_EXPR:
4338 case TRUTH_ORIF_EXPR:
4339 if (target == 0 || ! safe_from_p (target, exp)
4340 /* Make sure we don't have a hard reg (such as function's return
4341 value) live across basic blocks, if not optimizing. */
4342 || (!optimize && GET_CODE (target) == REG
4343 && REGNO (target) < FIRST_PSEUDO_REGISTER))
4344 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
4345 emit_clr_insn (target);
4346 op1 = gen_label_rtx ();
4347 jumpifnot (exp, op1);
4348 emit_0_to_1_insn (target);
4352 case TRUTH_NOT_EXPR:
4353 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
4354 /* The parser is careful to generate TRUTH_NOT_EXPR
4355 only with operands that are always zero or one. */
4356 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
4357 target, 1, OPTAB_LIB_WIDEN);
4363 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
4365 return expand_expr (TREE_OPERAND (exp, 1),
4366 (ignore ? const0_rtx : target),
4371 /* Note that COND_EXPRs whose type is a structure or union
4372 are required to be constructed to contain assignments of
4373 a temporary variable, so that we can evaluate them here
4374 for side effect only. If type is void, we must do likewise. */
4376 /* If an arm of the branch requires a cleanup,
4377 only that cleanup is performed. */
4380 tree binary_op = 0, unary_op = 0;
4381 tree old_cleanups = cleanups_this_call;
4382 cleanups_this_call = 0;
4384 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
4385 convert it to our mode, if necessary. */
4386 if (integer_onep (TREE_OPERAND (exp, 1))
4387 && integer_zerop (TREE_OPERAND (exp, 2))
4388 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
4390 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, modifier);
4391 if (GET_MODE (op0) == mode)
4394 target = gen_reg_rtx (mode);
4395 convert_move (target, op0, unsignedp);
4399 /* If we are not to produce a result, we have no target. Otherwise,
4400 if a target was specified use it; it will not be used as an
4401 intermediate target unless it is safe. If no target, use a
4404 if (mode == VOIDmode || ignore)
4406 else if (original_target
4407 && safe_from_p (original_target, TREE_OPERAND (exp, 0)))
4408 temp = original_target;
4409 else if (mode == BLKmode)
4411 if (TYPE_SIZE (type) == 0
4412 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
4414 temp = assign_stack_temp (BLKmode,
4415 (TREE_INT_CST_LOW (TYPE_SIZE (type))
4416 + BITS_PER_UNIT - 1)
4417 / BITS_PER_UNIT, 0);
4420 temp = gen_reg_rtx (mode);
4422 /* Check for X ? A + B : A. If we have this, we can copy
4423 A to the output and conditionally add B. Similarly for unary
4424 operations. Don't do this if X has side-effects because
4425 those side effects might affect A or B and the "?" operation is
4426 a sequence point in ANSI. (We test for side effects later.) */
4428 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
4429 && operand_equal_p (TREE_OPERAND (exp, 2),
4430 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
4431 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
4432 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
4433 && operand_equal_p (TREE_OPERAND (exp, 1),
4434 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
4435 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
4436 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
4437 && operand_equal_p (TREE_OPERAND (exp, 2),
4438 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
4439 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
4440 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
4441 && operand_equal_p (TREE_OPERAND (exp, 1),
4442 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
4443 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
4445 /* If we had X ? A + 1 : A and we can do the test of X as a store-flag
4446 operation, do this as A + (X != 0). Similarly for other simple
4447 binary operators. */
4448 if (singleton && binary_op
4449 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
4450 && (TREE_CODE (binary_op) == PLUS_EXPR
4451 || TREE_CODE (binary_op) == MINUS_EXPR
4452 || TREE_CODE (binary_op) == BIT_IOR_EXPR
4453 || TREE_CODE (binary_op) == BIT_XOR_EXPR
4454 || TREE_CODE (binary_op) == BIT_AND_EXPR)
4455 && integer_onep (TREE_OPERAND (binary_op, 1))
4456 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
4459 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR ? add_optab
4460 : TREE_CODE (binary_op) == MINUS_EXPR ? sub_optab
4461 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
4462 : TREE_CODE (binary_op) == BIT_XOR_EXPR ? xor_optab
4465 /* If we had X ? A : A + 1, do this as A + (X == 0).
4467 We have to invert the truth value here and then put it
4468 back later if do_store_flag fails. We cannot simply copy
4469 TREE_OPERAND (exp, 0) to another variable and modify that
4470 because invert_truthvalue can modify the tree pointed to
4472 if (singleton == TREE_OPERAND (exp, 1))
4473 TREE_OPERAND (exp, 0)
4474 = invert_truthvalue (TREE_OPERAND (exp, 0));
4476 result = do_store_flag (TREE_OPERAND (exp, 0),
4477 (safe_from_p (temp, singleton)
4479 mode, BRANCH_COST <= 1);
4483 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
4484 return expand_binop (mode, boptab, op1, result, temp,
4485 unsignedp, OPTAB_LIB_WIDEN);
4487 else if (singleton == TREE_OPERAND (exp, 1))
4488 TREE_OPERAND (exp, 0)
4489 = invert_truthvalue (TREE_OPERAND (exp, 0));
4493 op0 = gen_label_rtx ();
4495 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
4499 /* If the target conflicts with the other operand of the
4500 binary op, we can't use it. Also, we can't use the target
4501 if it is a hard register, because evaluating the condition
4502 might clobber it. */
4504 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1)))
4505 || (GET_CODE (temp) == REG
4506 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
4507 temp = gen_reg_rtx (mode);
4508 store_expr (singleton, temp, 0);
4511 expand_expr (singleton,
4512 ignore ? const1_rtx : NULL_RTX, VOIDmode, 0);
4513 if (cleanups_this_call)
4515 sorry ("aggregate value in COND_EXPR");
4516 cleanups_this_call = 0;
4518 if (singleton == TREE_OPERAND (exp, 1))
4519 jumpif (TREE_OPERAND (exp, 0), op0);
4521 jumpifnot (TREE_OPERAND (exp, 0), op0);
4523 if (binary_op && temp == 0)
4524 /* Just touch the other operand. */
4525 expand_expr (TREE_OPERAND (binary_op, 1),
4526 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
4528 store_expr (build (TREE_CODE (binary_op), type,
4529 make_tree (type, temp),
4530 TREE_OPERAND (binary_op, 1)),
4533 store_expr (build1 (TREE_CODE (unary_op), type,
4534 make_tree (type, temp)),
4539 /* This is now done in jump.c and is better done there because it
4540 produces shorter register lifetimes. */
4542 /* Check for both possibilities either constants or variables
4543 in registers (but not the same as the target!). If so, can
4544 save branches by assigning one, branching, and assigning the
4546 else if (temp && GET_MODE (temp) != BLKmode
4547 && (TREE_CONSTANT (TREE_OPERAND (exp, 1))
4548 || ((TREE_CODE (TREE_OPERAND (exp, 1)) == PARM_DECL
4549 || TREE_CODE (TREE_OPERAND (exp, 1)) == VAR_DECL)
4550 && DECL_RTL (TREE_OPERAND (exp, 1))
4551 && GET_CODE (DECL_RTL (TREE_OPERAND (exp, 1))) == REG
4552 && DECL_RTL (TREE_OPERAND (exp, 1)) != temp))
4553 && (TREE_CONSTANT (TREE_OPERAND (exp, 2))
4554 || ((TREE_CODE (TREE_OPERAND (exp, 2)) == PARM_DECL
4555 || TREE_CODE (TREE_OPERAND (exp, 2)) == VAR_DECL)
4556 && DECL_RTL (TREE_OPERAND (exp, 2))
4557 && GET_CODE (DECL_RTL (TREE_OPERAND (exp, 2))) == REG
4558 && DECL_RTL (TREE_OPERAND (exp, 2)) != temp)))
4560 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
4561 temp = gen_reg_rtx (mode);
4562 store_expr (TREE_OPERAND (exp, 2), temp, 0);
4563 jumpifnot (TREE_OPERAND (exp, 0), op0);
4564 store_expr (TREE_OPERAND (exp, 1), temp, 0);
4568 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
4569 comparison operator. If we have one of these cases, set the
4570 output to A, branch on A (cse will merge these two references),
4571 then set the output to FOO. */
4573 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
4574 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
4575 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
4576 TREE_OPERAND (exp, 1), 0)
4577 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
4578 && safe_from_p (temp, TREE_OPERAND (exp, 2)))
4580 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
4581 temp = gen_reg_rtx (mode);
4582 store_expr (TREE_OPERAND (exp, 1), temp, 0);
4583 jumpif (TREE_OPERAND (exp, 0), op0);
4584 store_expr (TREE_OPERAND (exp, 2), temp, 0);
4588 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
4589 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
4590 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
4591 TREE_OPERAND (exp, 2), 0)
4592 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
4593 && safe_from_p (temp, TREE_OPERAND (exp, 1)))
4595 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
4596 temp = gen_reg_rtx (mode);
4597 store_expr (TREE_OPERAND (exp, 2), temp, 0);
4598 jumpifnot (TREE_OPERAND (exp, 0), op0);
4599 store_expr (TREE_OPERAND (exp, 1), temp, 0);
4604 op1 = gen_label_rtx ();
4605 jumpifnot (TREE_OPERAND (exp, 0), op0);
4607 store_expr (TREE_OPERAND (exp, 1), temp, 0);
4609 expand_expr (TREE_OPERAND (exp, 1),
4610 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
4611 if (cleanups_this_call)
4613 sorry ("aggregate value in COND_EXPR");
4614 cleanups_this_call = 0;
4618 emit_jump_insn (gen_jump (op1));
4622 store_expr (TREE_OPERAND (exp, 2), temp, 0);
4624 expand_expr (TREE_OPERAND (exp, 2),
4625 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
4628 if (cleanups_this_call)
4630 sorry ("aggregate value in COND_EXPR");
4631 cleanups_this_call = 0;
4637 cleanups_this_call = old_cleanups;
4643 /* Something needs to be initialized, but we didn't know
4644 where that thing was when building the tree. For example,
4645 it could be the return value of a function, or a parameter
4646 to a function which lays down in the stack, or a temporary
4647 variable which must be passed by reference.
4649 We guarantee that the expression will either be constructed
4650 or copied into our original target. */
4652 tree slot = TREE_OPERAND (exp, 0);
4655 if (TREE_CODE (slot) != VAR_DECL)
4660 if (DECL_RTL (slot) != 0)
4662 target = DECL_RTL (slot);
4663 /* If we have already expanded the slot, so don't do
4665 if (TREE_OPERAND (exp, 1) == NULL_TREE)
4670 target = assign_stack_temp (mode, int_size_in_bytes (type), 0);
4671 /* All temp slots at this level must not conflict. */
4672 preserve_temp_slots (target);
4673 DECL_RTL (slot) = target;
4677 /* I bet this needs to be done, and I bet that it needs to
4678 be above, inside the else clause. The reason is
4679 simple, how else is it going to get cleaned up? (mrs)
4681 The reason is probably did not work before, and was
4682 commented out is because this was re-expanding already
4683 expanded target_exprs (target == 0 and DECL_RTL (slot)
4684 != 0) also cleaning them up many times as well. :-( */
4686 /* Since SLOT is not known to the called function
4687 to belong to its stack frame, we must build an explicit
4688 cleanup. This case occurs when we must build up a reference
4689 to pass the reference as an argument. In this case,
4690 it is very likely that such a reference need not be
4693 if (TREE_OPERAND (exp, 2) == 0)
4694 TREE_OPERAND (exp, 2) = maybe_build_cleanup (slot);
4695 if (TREE_OPERAND (exp, 2))
4696 cleanups_this_call = tree_cons (NULL_TREE, TREE_OPERAND (exp, 2),
4697 cleanups_this_call);
4702 /* This case does occur, when expanding a parameter which
4703 needs to be constructed on the stack. The target
4704 is the actual stack address that we want to initialize.
4705 The function we call will perform the cleanup in this case. */
4707 DECL_RTL (slot) = target;
4710 exp1 = TREE_OPERAND (exp, 1);
4711 /* Mark it as expanded. */
4712 TREE_OPERAND (exp, 1) = NULL_TREE;
4714 return expand_expr (exp1, target, tmode, modifier);
4719 tree lhs = TREE_OPERAND (exp, 0);
4720 tree rhs = TREE_OPERAND (exp, 1);
4721 tree noncopied_parts = 0;
4722 tree lhs_type = TREE_TYPE (lhs);
4724 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
4725 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0 && !fixed_type_p (rhs))
4726 noncopied_parts = init_noncopied_parts (stabilize_reference (lhs),
4727 TYPE_NONCOPIED_PARTS (lhs_type));
4728 while (noncopied_parts != 0)
4730 expand_assignment (TREE_VALUE (noncopied_parts),
4731 TREE_PURPOSE (noncopied_parts), 0, 0);
4732 noncopied_parts = TREE_CHAIN (noncopied_parts);
4739 /* If lhs is complex, expand calls in rhs before computing it.
4740 That's so we don't compute a pointer and save it over a call.
4741 If lhs is simple, compute it first so we can give it as a
4742 target if the rhs is just a call. This avoids an extra temp and copy
4743 and that prevents a partial-subsumption which makes bad code.
4744 Actually we could treat component_ref's of vars like vars. */
4746 tree lhs = TREE_OPERAND (exp, 0);
4747 tree rhs = TREE_OPERAND (exp, 1);
4748 tree noncopied_parts = 0;
4749 tree lhs_type = TREE_TYPE (lhs);
4753 if (TREE_CODE (lhs) != VAR_DECL
4754 && TREE_CODE (lhs) != RESULT_DECL
4755 && TREE_CODE (lhs) != PARM_DECL)
4756 preexpand_calls (exp);
4758 /* Check for |= or &= of a bitfield of size one into another bitfield
4759 of size 1. In this case, (unless we need the result of the
4760 assignment) we can do this more efficiently with a
4761 test followed by an assignment, if necessary.
4763 ??? At this point, we can't get a BIT_FIELD_REF here. But if
4764 things change so we do, this code should be enhanced to
4767 && TREE_CODE (lhs) == COMPONENT_REF
4768 && (TREE_CODE (rhs) == BIT_IOR_EXPR
4769 || TREE_CODE (rhs) == BIT_AND_EXPR)
4770 && TREE_OPERAND (rhs, 0) == lhs
4771 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
4772 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (lhs, 1))) == 1
4773 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))) == 1)
4775 rtx label = gen_label_rtx ();
4777 do_jump (TREE_OPERAND (rhs, 1),
4778 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
4779 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
4780 expand_assignment (lhs, convert (TREE_TYPE (rhs),
4781 (TREE_CODE (rhs) == BIT_IOR_EXPR
4783 : integer_zero_node)),
4785 do_pending_stack_adjust ();
4790 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0
4791 && ! (fixed_type_p (lhs) && fixed_type_p (rhs)))
4792 noncopied_parts = save_noncopied_parts (stabilize_reference (lhs),
4793 TYPE_NONCOPIED_PARTS (lhs_type));
4795 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
4796 while (noncopied_parts != 0)
4798 expand_assignment (TREE_PURPOSE (noncopied_parts),
4799 TREE_VALUE (noncopied_parts), 0, 0);
4800 noncopied_parts = TREE_CHAIN (noncopied_parts);
4805 case PREINCREMENT_EXPR:
4806 case PREDECREMENT_EXPR:
4807 return expand_increment (exp, 0);
4809 case POSTINCREMENT_EXPR:
4810 case POSTDECREMENT_EXPR:
4811 /* Faster to treat as pre-increment if result is not used. */
4812 return expand_increment (exp, ! ignore);
4815 /* Are we taking the address of a nested function? */
4816 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
4817 && decl_function_context (TREE_OPERAND (exp, 0)) != 0)
4819 op0 = trampoline_address (TREE_OPERAND (exp, 0));
4820 op0 = force_operand (op0, target);
4824 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode,
4825 (modifier == EXPAND_INITIALIZER
4826 ? modifier : EXPAND_CONST_ADDRESS));
4827 if (GET_CODE (op0) != MEM)
4830 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
4831 return XEXP (op0, 0);
4832 op0 = force_operand (XEXP (op0, 0), target);
4834 if (flag_force_addr && GET_CODE (op0) != REG)
4835 return force_reg (Pmode, op0);
4838 case ENTRY_VALUE_EXPR:
4845 return (*lang_expand_expr) (exp, target, tmode, modifier);
4848 /* Here to do an ordinary binary operator, generating an instruction
4849 from the optab already placed in `this_optab'. */
4851 preexpand_calls (exp);
4852 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
4854 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
4855 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
4857 temp = expand_binop (mode, this_optab, op0, op1, target,
4858 unsignedp, OPTAB_LIB_WIDEN);
4864 /* Return the alignment in bits of EXP, a pointer valued expression.
4865 But don't return more than MAX_ALIGN no matter what.
4866 The alignment returned is, by default, the alignment of the thing that
4867 EXP points to (if it is not a POINTER_TYPE, 0 is returned).
4869 Otherwise, look at the expression to see if we can do better, i.e., if the
4870 expression is actually pointing at an object whose alignment is tighter. */
4873 get_pointer_alignment (exp, max_align)
4877 unsigned align, inner;
4879 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
4882 align = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
4883 align = MIN (align, max_align);
4887 switch (TREE_CODE (exp))
4891 case NON_LVALUE_EXPR:
4892 exp = TREE_OPERAND (exp, 0);
4893 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
4895 inner = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
4896 inner = MIN (inner, max_align);
4897 align = MAX (align, inner);
4901 /* If sum of pointer + int, restrict our maximum alignment to that
4902 imposed by the integer. If not, we can't do any better than
4904 if (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST)
4907 while (((TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)) * BITS_PER_UNIT)
4912 exp = TREE_OPERAND (exp, 0);
4916 /* See what we are pointing at and look at its alignment. */
4917 exp = TREE_OPERAND (exp, 0);
4918 if (TREE_CODE (exp) == FUNCTION_DECL)
4919 align = MAX (align, FUNCTION_BOUNDARY);
4920 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'd')
4921 align = MAX (align, DECL_ALIGN (exp));
4922 #ifdef CONSTANT_ALIGNMENT
4923 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'c')
4924 align = CONSTANT_ALIGNMENT (exp, align);
4926 return MIN (align, max_align);
4934 /* Return the tree node and offset if a given argument corresponds to
4935 a string constant. */
4938 string_constant (arg, ptr_offset)
4944 if (TREE_CODE (arg) == ADDR_EXPR
4945 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
4947 *ptr_offset = integer_zero_node;
4948 return TREE_OPERAND (arg, 0);
4950 else if (TREE_CODE (arg) == PLUS_EXPR)
4952 tree arg0 = TREE_OPERAND (arg, 0);
4953 tree arg1 = TREE_OPERAND (arg, 1);
4958 if (TREE_CODE (arg0) == ADDR_EXPR
4959 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
4962 return TREE_OPERAND (arg0, 0);
4964 else if (TREE_CODE (arg1) == ADDR_EXPR
4965 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
4968 return TREE_OPERAND (arg1, 0);
4975 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
4976 way, because it could contain a zero byte in the middle.
4977 TREE_STRING_LENGTH is the size of the character array, not the string.
4979 Unfortunately, string_constant can't access the values of const char
4980 arrays with initializers, so neither can we do so here. */
4990 src = string_constant (src, &offset_node);
4993 max = TREE_STRING_LENGTH (src);
4994 ptr = TREE_STRING_POINTER (src);
4995 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
4997 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
4998 compute the offset to the following null if we don't know where to
4999 start searching for it. */
5001 for (i = 0; i < max; i++)
5004 /* We don't know the starting offset, but we do know that the string
5005 has no internal zero bytes. We can assume that the offset falls
5006 within the bounds of the string; otherwise, the programmer deserves
5007 what he gets. Subtract the offset from the length of the string,
5009 /* This would perhaps not be valid if we were dealing with named
5010 arrays in addition to literal string constants. */
5011 return size_binop (MINUS_EXPR, size_int (max), offset_node);
5014 /* We have a known offset into the string. Start searching there for
5015 a null character. */
5016 if (offset_node == 0)
5020 /* Did we get a long long offset? If so, punt. */
5021 if (TREE_INT_CST_HIGH (offset_node) != 0)
5023 offset = TREE_INT_CST_LOW (offset_node);
5025 /* If the offset is known to be out of bounds, warn, and call strlen at
5027 if (offset < 0 || offset > max)
5029 warning ("offset outside bounds of constant string");
5032 /* Use strlen to search for the first zero byte. Since any strings
5033 constructed with build_string will have nulls appended, we win even
5034 if we get handed something like (char[4])"abcd".
5036 Since OFFSET is our starting index into the string, no further
5037 calculation is needed. */
5038 return size_int (strlen (ptr + offset));
5041 /* Expand an expression EXP that calls a built-in function,
5042 with result going to TARGET if that's convenient
5043 (and in mode MODE if that's convenient).
5044 SUBTARGET may be used as the target for computing one of EXP's operands.
5045 IGNORE is nonzero if the value is to be ignored. */
5048 expand_builtin (exp, target, subtarget, mode, ignore)
5052 enum machine_mode mode;
5055 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
5056 tree arglist = TREE_OPERAND (exp, 1);
5059 enum machine_mode value_mode = TYPE_MODE (TREE_TYPE (exp));
5060 optab builtin_optab;
5062 switch (DECL_FUNCTION_CODE (fndecl))
5067 /* build_function_call changes these into ABS_EXPR. */
5072 case BUILT_IN_FSQRT:
5073 /* If not optimizing, call the library function. */
5078 /* Arg could be wrong type if user redeclared this fcn wrong. */
5079 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != REAL_TYPE)
5080 return CONST0_RTX (TYPE_MODE (TREE_TYPE (exp)));
5082 /* Stabilize and compute the argument. */
5083 if (TREE_CODE (TREE_VALUE (arglist)) != VAR_DECL
5084 && TREE_CODE (TREE_VALUE (arglist)) != PARM_DECL)
5086 exp = copy_node (exp);
5087 arglist = copy_node (arglist);
5088 TREE_OPERAND (exp, 1) = arglist;
5089 TREE_VALUE (arglist) = save_expr (TREE_VALUE (arglist));
5091 op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
5093 /* Make a suitable register to place result in. */
5094 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
5099 switch (DECL_FUNCTION_CODE (fndecl))
5102 builtin_optab = sin_optab; break;
5104 builtin_optab = cos_optab; break;
5105 case BUILT_IN_FSQRT:
5106 builtin_optab = sqrt_optab; break;
5111 /* Compute into TARGET.
5112 Set TARGET to wherever the result comes back. */
5113 target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
5114 builtin_optab, op0, target, 0);
5116 /* If we were unable to expand via the builtin, stop the
5117 sequence (without outputting the insns) and break, causing
5118 a call the the library function. */
5125 /* Check the results by default. But if flag_fast_math is turned on,
5126 then assume sqrt will always be called with valid arguments. */
5128 if (! flag_fast_math)
5130 /* Don't define the builtin FP instructions
5131 if your machine is not IEEE. */
5132 if (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT)
5135 lab1 = gen_label_rtx ();
5137 /* Test the result; if it is NaN, set errno=EDOM because
5138 the argument was not in the domain. */
5139 emit_cmp_insn (target, target, EQ, 0, GET_MODE (target), 0, 0);
5140 emit_jump_insn (gen_beq (lab1));
5144 #ifdef GEN_ERRNO_RTX
5145 rtx errno_rtx = GEN_ERRNO_RTX;
5148 = gen_rtx (MEM, word_mode, gen_rtx (SYMBOL_REF, Pmode, "*errno"));
5151 emit_move_insn (errno_rtx, GEN_INT (TARGET_EDOM));
5154 /* We can't set errno=EDOM directly; let the library call do it.
5155 Pop the arguments right away in case the call gets deleted. */
5157 expand_call (exp, target, 0);
5164 /* Output the entire sequence. */
5165 insns = get_insns ();
5171 case BUILT_IN_SAVEREGS:
5172 /* Don't do __builtin_saveregs more than once in a function.
5173 Save the result of the first call and reuse it. */
5174 if (saveregs_value != 0)
5175 return saveregs_value;
5177 /* When this function is called, it means that registers must be
5178 saved on entry to this function. So we migrate the
5179 call to the first insn of this function. */
5182 rtx valreg, saved_valreg;
5184 /* Now really call the function. `expand_call' does not call
5185 expand_builtin, so there is no danger of infinite recursion here. */
5188 #ifdef EXPAND_BUILTIN_SAVEREGS
5189 /* Do whatever the machine needs done in this case. */
5190 temp = EXPAND_BUILTIN_SAVEREGS (arglist);
5192 /* The register where the function returns its value
5193 is likely to have something else in it, such as an argument.
5194 So preserve that register around the call. */
5195 if (value_mode != VOIDmode)
5197 valreg = hard_libcall_value (value_mode);
5198 saved_valreg = gen_reg_rtx (value_mode);
5199 emit_move_insn (saved_valreg, valreg);
5202 /* Generate the call, putting the value in a pseudo. */
5203 temp = expand_call (exp, target, ignore);
5205 if (value_mode != VOIDmode)
5206 emit_move_insn (valreg, saved_valreg);
5212 saveregs_value = temp;
5214 /* This won't work inside a SEQUENCE--it really has to be
5215 at the start of the function. */
5216 if (in_sequence_p ())
5218 /* Better to do this than to crash. */
5219 error ("`va_start' used within `({...})'");
5223 /* Put the sequence after the NOTE that starts the function. */
5224 emit_insns_before (seq, NEXT_INSN (get_insns ()));
5228 /* __builtin_args_info (N) returns word N of the arg space info
5229 for the current function. The number and meanings of words
5230 is controlled by the definition of CUMULATIVE_ARGS. */
5231 case BUILT_IN_ARGS_INFO:
5233 int nwords = sizeof (CUMULATIVE_ARGS) / sizeof (int);
5235 int *word_ptr = (int *) ¤t_function_args_info;
5236 tree type, elts, result;
5238 if (sizeof (CUMULATIVE_ARGS) % sizeof (int) != 0)
5239 fatal ("CUMULATIVE_ARGS type defined badly; see %s, line %d",
5240 __FILE__, __LINE__);
5244 tree arg = TREE_VALUE (arglist);
5245 if (TREE_CODE (arg) != INTEGER_CST)
5246 error ("argument of __builtin_args_info must be constant");
5249 int wordnum = TREE_INT_CST_LOW (arg);
5251 if (wordnum < 0 || wordnum >= nwords)
5252 error ("argument of __builtin_args_info out of range");
5254 return GEN_INT (word_ptr[wordnum]);
5258 error ("missing argument in __builtin_args_info");
5263 for (i = 0; i < nwords; i++)
5264 elts = tree_cons (NULL_TREE, build_int_2 (word_ptr[i], 0));
5266 type = build_array_type (integer_type_node,
5267 build_index_type (build_int_2 (nwords, 0)));
5268 result = build (CONSTRUCTOR, type, NULL_TREE, nreverse (elts));
5269 TREE_CONSTANT (result) = 1;
5270 TREE_STATIC (result) = 1;
5271 result = build (INDIRECT_REF, build_pointer_type (type), result);
5272 TREE_CONSTANT (result) = 1;
5273 return expand_expr (result, NULL_RTX, VOIDmode, 0);
5277 /* Return the address of the first anonymous stack arg. */
5278 case BUILT_IN_NEXT_ARG:
5280 tree fntype = TREE_TYPE (current_function_decl);
5281 if (!(TYPE_ARG_TYPES (fntype) != 0
5282 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
5283 != void_type_node)))
5285 error ("`va_start' used in function with fixed args");
5290 return expand_binop (Pmode, add_optab,
5291 current_function_internal_arg_pointer,
5292 current_function_arg_offset_rtx,
5293 NULL_RTX, 0, OPTAB_LIB_WIDEN);
5295 case BUILT_IN_CLASSIFY_TYPE:
5298 tree type = TREE_TYPE (TREE_VALUE (arglist));
5299 enum tree_code code = TREE_CODE (type);
5300 if (code == VOID_TYPE)
5301 return GEN_INT (void_type_class);
5302 if (code == INTEGER_TYPE)
5303 return GEN_INT (integer_type_class);
5304 if (code == CHAR_TYPE)
5305 return GEN_INT (char_type_class);
5306 if (code == ENUMERAL_TYPE)
5307 return GEN_INT (enumeral_type_class);
5308 if (code == BOOLEAN_TYPE)
5309 return GEN_INT (boolean_type_class);
5310 if (code == POINTER_TYPE)
5311 return GEN_INT (pointer_type_class);
5312 if (code == REFERENCE_TYPE)
5313 return GEN_INT (reference_type_class);
5314 if (code == OFFSET_TYPE)
5315 return GEN_INT (offset_type_class);
5316 if (code == REAL_TYPE)
5317 return GEN_INT (real_type_class);
5318 if (code == COMPLEX_TYPE)
5319 return GEN_INT (complex_type_class);
5320 if (code == FUNCTION_TYPE)
5321 return GEN_INT (function_type_class);
5322 if (code == METHOD_TYPE)
5323 return GEN_INT (method_type_class);
5324 if (code == RECORD_TYPE)
5325 return GEN_INT (record_type_class);
5326 if (code == UNION_TYPE)
5327 return GEN_INT (union_type_class);
5328 if (code == ARRAY_TYPE)
5329 return GEN_INT (array_type_class);
5330 if (code == STRING_TYPE)
5331 return GEN_INT (string_type_class);
5332 if (code == SET_TYPE)
5333 return GEN_INT (set_type_class);
5334 if (code == FILE_TYPE)
5335 return GEN_INT (file_type_class);
5336 if (code == LANG_TYPE)
5337 return GEN_INT (lang_type_class);
5339 return GEN_INT (no_type_class);
5341 case BUILT_IN_CONSTANT_P:
5345 return (TREE_CODE_CLASS (TREE_CODE (TREE_VALUE (arglist))) == 'c'
5346 ? const1_rtx : const0_rtx);
5348 case BUILT_IN_FRAME_ADDRESS:
5349 /* The argument must be a nonnegative integer constant.
5350 It counts the number of frames to scan up the stack.
5351 The value is the address of that frame. */
5352 case BUILT_IN_RETURN_ADDRESS:
5353 /* The argument must be a nonnegative integer constant.
5354 It counts the number of frames to scan up the stack.
5355 The value is the return address saved in that frame. */
5357 /* Warning about missing arg was already issued. */
5359 else if (TREE_CODE (TREE_VALUE (arglist)) != INTEGER_CST)
5361 error ("invalid arg to __builtin_return_address");
5364 else if (tree_int_cst_lt (TREE_VALUE (arglist), integer_zero_node))
5366 error ("invalid arg to __builtin_return_address");
5371 int count = TREE_INT_CST_LOW (TREE_VALUE (arglist));
5372 rtx tem = frame_pointer_rtx;
5375 /* Scan back COUNT frames to the specified frame. */
5376 for (i = 0; i < count; i++)
5378 /* Assume the dynamic chain pointer is in the word that
5379 the frame address points to, unless otherwise specified. */
5380 #ifdef DYNAMIC_CHAIN_ADDRESS
5381 tem = DYNAMIC_CHAIN_ADDRESS (tem);
5383 tem = memory_address (Pmode, tem);
5384 tem = copy_to_reg (gen_rtx (MEM, Pmode, tem));
5387 /* For __builtin_frame_address, return what we've got. */
5388 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
5391 /* For __builtin_return_address,
5392 Get the return address from that frame. */
5393 #ifdef RETURN_ADDR_RTX
5394 return RETURN_ADDR_RTX (count, tem);
5396 tem = memory_address (Pmode,
5397 plus_constant (tem, GET_MODE_SIZE (Pmode)));
5398 return copy_to_reg (gen_rtx (MEM, Pmode, tem));
5402 case BUILT_IN_ALLOCA:
5404 /* Arg could be non-integer if user redeclared this fcn wrong. */
5405 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE)
5407 current_function_calls_alloca = 1;
5408 /* Compute the argument. */
5409 op0 = expand_expr (TREE_VALUE (arglist), NULL_RTX, VOIDmode, 0);
5411 /* Allocate the desired space. */
5412 target = allocate_dynamic_stack_space (op0, target, BITS_PER_UNIT);
5414 /* Record the new stack level for nonlocal gotos. */
5415 if (nonlocal_goto_handler_slot != 0)
5416 emit_stack_save (SAVE_NONLOCAL, &nonlocal_goto_stack_level, NULL_RTX);
5420 /* If not optimizing, call the library function. */
5425 /* Arg could be non-integer if user redeclared this fcn wrong. */
5426 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE)
5429 /* Compute the argument. */
5430 op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
5431 /* Compute ffs, into TARGET if possible.
5432 Set TARGET to wherever the result comes back. */
5433 target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
5434 ffs_optab, op0, target, 1);
5439 case BUILT_IN_STRLEN:
5440 /* If not optimizing, call the library function. */
5445 /* Arg could be non-pointer if user redeclared this fcn wrong. */
5446 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE)
5450 tree src = TREE_VALUE (arglist);
5451 tree len = c_strlen (src);
5454 = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
5456 rtx result, src_rtx, char_rtx;
5457 enum machine_mode insn_mode = value_mode, char_mode;
5458 enum insn_code icode;
5460 /* If the length is known, just return it. */
5462 return expand_expr (len, target, mode, 0);
5464 /* If SRC is not a pointer type, don't do this operation inline. */
5468 /* Call a function if we can't compute strlen in the right mode. */
5470 while (insn_mode != VOIDmode)
5472 icode = strlen_optab->handlers[(int) insn_mode].insn_code;
5473 if (icode != CODE_FOR_nothing)
5476 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
5478 if (insn_mode == VOIDmode)
5481 /* Make a place to write the result of the instruction. */
5484 && GET_CODE (result) == REG
5485 && GET_MODE (result) == insn_mode
5486 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
5487 result = gen_reg_rtx (insn_mode);
5489 /* Make sure the operands are acceptable to the predicates. */
5491 if (! (*insn_operand_predicate[(int)icode][0]) (result, insn_mode))
5492 result = gen_reg_rtx (insn_mode);
5494 src_rtx = memory_address (BLKmode,
5495 expand_expr (src, NULL_RTX, Pmode,
5497 if (! (*insn_operand_predicate[(int)icode][1]) (src_rtx, Pmode))
5498 src_rtx = copy_to_mode_reg (Pmode, src_rtx);
5500 char_rtx = const0_rtx;
5501 char_mode = insn_operand_mode[(int)icode][2];
5502 if (! (*insn_operand_predicate[(int)icode][2]) (char_rtx, char_mode))
5503 char_rtx = copy_to_mode_reg (char_mode, char_rtx);
5505 emit_insn (GEN_FCN (icode) (result,
5506 gen_rtx (MEM, BLKmode, src_rtx),
5507 char_rtx, GEN_INT (align)));
5509 /* Return the value in the proper mode for this function. */
5510 if (GET_MODE (result) == value_mode)
5512 else if (target != 0)
5514 convert_move (target, result, 0);
5518 return convert_to_mode (value_mode, result, 0);
5521 case BUILT_IN_STRCPY:
5522 /* If not optimizing, call the library function. */
5527 /* Arg could be non-pointer if user redeclared this fcn wrong. */
5528 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
5529 || TREE_CHAIN (arglist) == 0
5530 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE)
5534 tree len = c_strlen (TREE_VALUE (TREE_CHAIN (arglist)));
5539 len = size_binop (PLUS_EXPR, len, integer_one_node);
5541 chainon (arglist, build_tree_list (NULL_TREE, len));
5545 case BUILT_IN_MEMCPY:
5546 /* If not optimizing, call the library function. */
5551 /* Arg could be non-pointer if user redeclared this fcn wrong. */
5552 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
5553 || TREE_CHAIN (arglist) == 0
5554 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
5555 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
5556 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
5560 tree dest = TREE_VALUE (arglist);
5561 tree src = TREE_VALUE (TREE_CHAIN (arglist));
5562 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
5565 = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
5567 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
5570 /* If either SRC or DEST is not a pointer type, don't do
5571 this operation in-line. */
5572 if (src_align == 0 || dest_align == 0)
5574 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRCPY)
5575 TREE_CHAIN (TREE_CHAIN (arglist)) = 0;
5579 dest_rtx = expand_expr (dest, NULL_RTX, Pmode, EXPAND_NORMAL);
5581 /* Copy word part most expediently. */
5582 emit_block_move (gen_rtx (MEM, BLKmode,
5583 memory_address (BLKmode, dest_rtx)),
5584 gen_rtx (MEM, BLKmode,
5585 memory_address (BLKmode,
5586 expand_expr (src, NULL_RTX,
5589 expand_expr (len, NULL_RTX, VOIDmode, 0),
5590 MIN (src_align, dest_align));
5594 /* These comparison functions need an instruction that returns an actual
5595 index. An ordinary compare that just sets the condition codes
5597 #ifdef HAVE_cmpstrsi
5598 case BUILT_IN_STRCMP:
5599 /* If not optimizing, call the library function. */
5604 /* Arg could be non-pointer if user redeclared this fcn wrong. */
5605 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
5606 || TREE_CHAIN (arglist) == 0
5607 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE)
5609 else if (!HAVE_cmpstrsi)
5612 tree arg1 = TREE_VALUE (arglist);
5613 tree arg2 = TREE_VALUE (TREE_CHAIN (arglist));
5617 len = c_strlen (arg1);
5619 len = size_binop (PLUS_EXPR, integer_one_node, len);
5620 len2 = c_strlen (arg2);
5622 len2 = size_binop (PLUS_EXPR, integer_one_node, len2);
5624 /* If we don't have a constant length for the first, use the length
5625 of the second, if we know it. We don't require a constant for
5626 this case; some cost analysis could be done if both are available
5627 but neither is constant. For now, assume they're equally cheap.
5629 If both strings have constant lengths, use the smaller. This
5630 could arise if optimization results in strcpy being called with
5631 two fixed strings, or if the code was machine-generated. We should
5632 add some code to the `memcmp' handler below to deal with such
5633 situations, someday. */
5634 if (!len || TREE_CODE (len) != INTEGER_CST)
5641 else if (len2 && TREE_CODE (len2) == INTEGER_CST)
5643 if (tree_int_cst_lt (len2, len))
5647 chainon (arglist, build_tree_list (NULL_TREE, len));
5651 case BUILT_IN_MEMCMP:
5652 /* If not optimizing, call the library function. */
5657 /* Arg could be non-pointer if user redeclared this fcn wrong. */
5658 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
5659 || TREE_CHAIN (arglist) == 0
5660 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
5661 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
5662 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
5664 else if (!HAVE_cmpstrsi)
5667 tree arg1 = TREE_VALUE (arglist);
5668 tree arg2 = TREE_VALUE (TREE_CHAIN (arglist));
5669 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
5673 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
5675 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
5676 enum machine_mode insn_mode
5677 = insn_operand_mode[(int) CODE_FOR_cmpstrsi][0];
5679 /* If we don't have POINTER_TYPE, call the function. */
5680 if (arg1_align == 0 || arg2_align == 0)
5682 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRCMP)
5683 TREE_CHAIN (TREE_CHAIN (arglist)) = 0;
5687 /* Make a place to write the result of the instruction. */
5690 && GET_CODE (result) == REG && GET_MODE (result) == insn_mode
5691 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
5692 result = gen_reg_rtx (insn_mode);
5694 emit_insn (gen_cmpstrsi (result,
5695 gen_rtx (MEM, BLKmode,
5696 expand_expr (arg1, NULL_RTX, Pmode,
5698 gen_rtx (MEM, BLKmode,
5699 expand_expr (arg2, NULL_RTX, Pmode,
5701 expand_expr (len, NULL_RTX, VOIDmode, 0),
5702 GEN_INT (MIN (arg1_align, arg2_align))));
5704 /* Return the value in the proper mode for this function. */
5705 mode = TYPE_MODE (TREE_TYPE (exp));
5706 if (GET_MODE (result) == mode)
5708 else if (target != 0)
5710 convert_move (target, result, 0);
5714 return convert_to_mode (mode, result, 0);
5717 case BUILT_IN_STRCMP:
5718 case BUILT_IN_MEMCMP:
5722 default: /* just do library call, if unknown builtin */
5723 error ("built-in function %s not currently supported",
5724 IDENTIFIER_POINTER (DECL_NAME (fndecl)));
5727 /* The switch statement above can drop through to cause the function
5728 to be called normally. */
5730 return expand_call (exp, target, ignore);
5733 /* Expand code for a post- or pre- increment or decrement
5734 and return the RTX for the result.
5735 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
5738 expand_increment (exp, post)
5742 register rtx op0, op1;
5743 register rtx temp, value;
5744 register tree incremented = TREE_OPERAND (exp, 0);
5745 optab this_optab = add_optab;
5747 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
5748 int op0_is_copy = 0;
5750 /* Stabilize any component ref that might need to be
5751 evaluated more than once below. */
5752 if (TREE_CODE (incremented) == BIT_FIELD_REF
5753 || (TREE_CODE (incremented) == COMPONENT_REF
5754 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
5755 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
5756 incremented = stabilize_reference (incremented);
5758 /* Compute the operands as RTX.
5759 Note whether OP0 is the actual lvalue or a copy of it:
5760 I believe it is a copy iff it is a register or subreg
5761 and insns were generated in computing it. */
5762 temp = get_last_insn ();
5763 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, 0);
5764 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
5765 && temp != get_last_insn ());
5766 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
5768 /* Decide whether incrementing or decrementing. */
5769 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
5770 || TREE_CODE (exp) == PREDECREMENT_EXPR)
5771 this_optab = sub_optab;
5773 /* If OP0 is not the actual lvalue, but rather a copy in a register,
5774 then we cannot just increment OP0. We must
5775 therefore contrive to increment the original value.
5776 Then we can return OP0 since it is a copy of the old value. */
5779 /* This is the easiest way to increment the value wherever it is.
5780 Problems with multiple evaluation of INCREMENTED
5781 are prevented because either (1) it is a component_ref,
5782 in which case it was stabilized above, or (2) it is an array_ref
5783 with constant index in an array in a register, which is
5784 safe to reevaluate. */
5785 tree newexp = build ((this_optab == add_optab
5786 ? PLUS_EXPR : MINUS_EXPR),
5789 TREE_OPERAND (exp, 1));
5790 temp = expand_assignment (incremented, newexp, ! post, 0);
5791 return post ? op0 : temp;
5794 /* Convert decrement by a constant into a negative increment. */
5795 if (this_optab == sub_optab
5796 && GET_CODE (op1) == CONST_INT)
5798 op1 = GEN_INT (- INTVAL (op1));
5799 this_optab = add_optab;
5804 /* We have a true reference to the value in OP0.
5805 If there is an insn to add or subtract in this mode, queue it. */
5807 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
5808 op0 = stabilize (op0);
5811 icode = (int) this_optab->handlers[(int) mode].insn_code;
5812 if (icode != (int) CODE_FOR_nothing
5813 /* Make sure that OP0 is valid for operands 0 and 1
5814 of the insn we want to queue. */
5815 && (*insn_operand_predicate[icode][0]) (op0, mode)
5816 && (*insn_operand_predicate[icode][1]) (op0, mode))
5818 if (! (*insn_operand_predicate[icode][2]) (op1, mode))
5819 op1 = force_reg (mode, op1);
5821 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
5825 /* Preincrement, or we can't increment with one simple insn. */
5827 /* Save a copy of the value before inc or dec, to return it later. */
5828 temp = value = copy_to_reg (op0);
5830 /* Arrange to return the incremented value. */
5831 /* Copy the rtx because expand_binop will protect from the queue,
5832 and the results of that would be invalid for us to return
5833 if our caller does emit_queue before using our result. */
5834 temp = copy_rtx (value = op0);
5836 /* Increment however we can. */
5837 op1 = expand_binop (mode, this_optab, value, op1, op0,
5838 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
5839 /* Make sure the value is stored into OP0. */
5841 emit_move_insn (op0, op1);
5846 /* Expand all function calls contained within EXP, innermost ones first.
5847 But don't look within expressions that have sequence points.
5848 For each CALL_EXPR, record the rtx for its value
5849 in the CALL_EXPR_RTL field. */
5852 preexpand_calls (exp)
5855 register int nops, i;
5856 int type = TREE_CODE_CLASS (TREE_CODE (exp));
5858 if (! do_preexpand_calls)
5861 /* Only expressions and references can contain calls. */
5863 if (type != 'e' && type != '<' && type != '1' && type != '2' && type != 'r')
5866 switch (TREE_CODE (exp))
5869 /* Do nothing if already expanded. */
5870 if (CALL_EXPR_RTL (exp) != 0)
5873 /* Do nothing to built-in functions. */
5874 if (TREE_CODE (TREE_OPERAND (exp, 0)) != ADDR_EXPR
5875 || TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) != FUNCTION_DECL
5876 || ! DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
5877 CALL_EXPR_RTL (exp) = expand_call (exp, NULL_RTX, 0);
5882 case TRUTH_ANDIF_EXPR:
5883 case TRUTH_ORIF_EXPR:
5884 /* If we find one of these, then we can be sure
5885 the adjust will be done for it (since it makes jumps).
5886 Do it now, so that if this is inside an argument
5887 of a function, we don't get the stack adjustment
5888 after some other args have already been pushed. */
5889 do_pending_stack_adjust ();
5894 case WITH_CLEANUP_EXPR:
5898 if (SAVE_EXPR_RTL (exp) != 0)
5902 nops = tree_code_length[(int) TREE_CODE (exp)];
5903 for (i = 0; i < nops; i++)
5904 if (TREE_OPERAND (exp, i) != 0)
5906 type = TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, i)));
5907 if (type == 'e' || type == '<' || type == '1' || type == '2'
5909 preexpand_calls (TREE_OPERAND (exp, i));
5913 /* At the start of a function, record that we have no previously-pushed
5914 arguments waiting to be popped. */
5917 init_pending_stack_adjust ()
5919 pending_stack_adjust = 0;
5922 /* When exiting from function, if safe, clear out any pending stack adjust
5923 so the adjustment won't get done. */
5926 clear_pending_stack_adjust ()
5928 #ifdef EXIT_IGNORE_STACK
5929 if (! flag_omit_frame_pointer && EXIT_IGNORE_STACK
5930 && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline)
5931 && ! flag_inline_functions)
5932 pending_stack_adjust = 0;
5936 /* Pop any previously-pushed arguments that have not been popped yet. */
5939 do_pending_stack_adjust ()
5941 if (inhibit_defer_pop == 0)
5943 if (pending_stack_adjust != 0)
5944 adjust_stack (GEN_INT (pending_stack_adjust));
5945 pending_stack_adjust = 0;
5949 /* Expand all cleanups up to OLD_CLEANUPS.
5950 Needed here, and also for language-dependent calls. */
5953 expand_cleanups_to (old_cleanups)
5956 while (cleanups_this_call != old_cleanups)
5958 expand_expr (TREE_VALUE (cleanups_this_call), NULL_RTX, VOIDmode, 0);
5959 cleanups_this_call = TREE_CHAIN (cleanups_this_call);
5963 /* Expand conditional expressions. */
5965 /* Generate code to evaluate EXP and jump to LABEL if the value is zero.
5966 LABEL is an rtx of code CODE_LABEL, in this function and all the
5970 jumpifnot (exp, label)
5974 do_jump (exp, label, NULL_RTX);
5977 /* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
5984 do_jump (exp, NULL_RTX, label);
5987 /* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
5988 the result is zero, or IF_TRUE_LABEL if the result is one.
5989 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
5990 meaning fall through in that case.
5992 do_jump always does any pending stack adjust except when it does not
5993 actually perform a jump. An example where there is no jump
5994 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
5996 This function is responsible for optimizing cases such as
5997 &&, || and comparison operators in EXP. */
6000 do_jump (exp, if_false_label, if_true_label)
6002 rtx if_false_label, if_true_label;
6004 register enum tree_code code = TREE_CODE (exp);
6005 /* Some cases need to create a label to jump to
6006 in order to properly fall through.
6007 These cases set DROP_THROUGH_LABEL nonzero. */
6008 rtx drop_through_label = 0;
6022 temp = integer_zerop (exp) ? if_false_label : if_true_label;
6028 /* This is not true with #pragma weak */
6030 /* The address of something can never be zero. */
6032 emit_jump (if_true_label);
6037 if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
6038 || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
6039 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF)
6042 /* If we are narrowing the operand, we have to do the compare in the
6044 if ((TYPE_PRECISION (TREE_TYPE (exp))
6045 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
6047 case NON_LVALUE_EXPR:
6048 case REFERENCE_EXPR:
6053 /* These cannot change zero->non-zero or vice versa. */
6054 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
6058 /* This is never less insns than evaluating the PLUS_EXPR followed by
6059 a test and can be longer if the test is eliminated. */
6061 /* Reduce to minus. */
6062 exp = build (MINUS_EXPR, TREE_TYPE (exp),
6063 TREE_OPERAND (exp, 0),
6064 fold (build1 (NEGATE_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
6065 TREE_OPERAND (exp, 1))));
6066 /* Process as MINUS. */
6070 /* Non-zero iff operands of minus differ. */
6071 comparison = compare (build (NE_EXPR, TREE_TYPE (exp),
6072 TREE_OPERAND (exp, 0),
6073 TREE_OPERAND (exp, 1)),
6078 /* If we are AND'ing with a small constant, do this comparison in the
6079 smallest type that fits. If the machine doesn't have comparisons
6080 that small, it will be converted back to the wider comparison.
6081 This helps if we are testing the sign bit of a narrower object.
6082 combine can't do this for us because it can't know whether a
6083 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
6085 if (! SLOW_BYTE_ACCESS
6086 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
6087 && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
6088 && (i = floor_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))) >= 0
6089 && (type = type_for_size (i + 1, 1)) != 0
6090 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
6091 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
6092 != CODE_FOR_nothing))
6094 do_jump (convert (type, exp), if_false_label, if_true_label);
6099 case TRUTH_NOT_EXPR:
6100 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
6103 case TRUTH_ANDIF_EXPR:
6104 if (if_false_label == 0)
6105 if_false_label = drop_through_label = gen_label_rtx ();
6106 do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
6107 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
6110 case TRUTH_ORIF_EXPR:
6111 if (if_true_label == 0)
6112 if_true_label = drop_through_label = gen_label_rtx ();
6113 do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
6114 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
6118 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
6121 do_pending_stack_adjust ();
6122 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
6129 int bitsize, bitpos, unsignedp;
6130 enum machine_mode mode;
6135 /* Get description of this reference. We don't actually care
6136 about the underlying object here. */
6137 get_inner_reference (exp, &bitsize, &bitpos, &offset,
6138 &mode, &unsignedp, &volatilep);
6140 type = type_for_size (bitsize, unsignedp);
6141 if (! SLOW_BYTE_ACCESS
6142 && type != 0 && bitsize >= 0
6143 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
6144 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
6145 != CODE_FOR_nothing))
6147 do_jump (convert (type, exp), if_false_label, if_true_label);
6154 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
6155 if (integer_onep (TREE_OPERAND (exp, 1))
6156 && integer_zerop (TREE_OPERAND (exp, 2)))
6157 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
6159 else if (integer_zerop (TREE_OPERAND (exp, 1))
6160 && integer_onep (TREE_OPERAND (exp, 2)))
6161 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
6165 register rtx label1 = gen_label_rtx ();
6166 drop_through_label = gen_label_rtx ();
6167 do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX);
6168 /* Now the THEN-expression. */
6169 do_jump (TREE_OPERAND (exp, 1),
6170 if_false_label ? if_false_label : drop_through_label,
6171 if_true_label ? if_true_label : drop_through_label);
6172 /* In case the do_jump just above never jumps. */
6173 do_pending_stack_adjust ();
6174 emit_label (label1);
6175 /* Now the ELSE-expression. */
6176 do_jump (TREE_OPERAND (exp, 2),
6177 if_false_label ? if_false_label : drop_through_label,
6178 if_true_label ? if_true_label : drop_through_label);
6183 if (integer_zerop (TREE_OPERAND (exp, 1)))
6184 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
6185 else if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
6188 !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
6189 do_jump_by_parts_equality (exp, if_false_label, if_true_label);
6191 comparison = compare (exp, EQ, EQ);
6195 if (integer_zerop (TREE_OPERAND (exp, 1)))
6196 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
6197 else if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
6200 !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
6201 do_jump_by_parts_equality (exp, if_true_label, if_false_label);
6203 comparison = compare (exp, NE, NE);
6207 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
6209 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
6210 do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
6212 comparison = compare (exp, LT, LTU);
6216 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
6218 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
6219 do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
6221 comparison = compare (exp, LE, LEU);
6225 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
6227 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
6228 do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
6230 comparison = compare (exp, GT, GTU);
6234 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
6236 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
6237 do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
6239 comparison = compare (exp, GE, GEU);
6244 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
6246 /* This is not needed any more and causes poor code since it causes
6247 comparisons and tests from non-SI objects to have different code
6249 /* Copy to register to avoid generating bad insns by cse
6250 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
6251 if (!cse_not_expected && GET_CODE (temp) == MEM)
6252 temp = copy_to_reg (temp);
6254 do_pending_stack_adjust ();
6255 if (GET_CODE (temp) == CONST_INT)
6256 comparison = (temp == const0_rtx ? const0_rtx : const_true_rtx);
6257 else if (GET_CODE (temp) == LABEL_REF)
6258 comparison = const_true_rtx;
6259 else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
6260 && !can_compare_p (GET_MODE (temp)))
6261 /* Note swapping the labels gives us not-equal. */
6262 do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label);
6263 else if (GET_MODE (temp) != VOIDmode)
6264 comparison = compare_from_rtx (temp, CONST0_RTX (GET_MODE (temp)),
6265 NE, TREE_UNSIGNED (TREE_TYPE (exp)),
6266 GET_MODE (temp), NULL_RTX, 0);
6271 /* Do any postincrements in the expression that was tested. */
6274 /* If COMPARISON is nonzero here, it is an rtx that can be substituted
6275 straight into a conditional jump instruction as the jump condition.
6276 Otherwise, all the work has been done already. */
6278 if (comparison == const_true_rtx)
6281 emit_jump (if_true_label);
6283 else if (comparison == const0_rtx)
6286 emit_jump (if_false_label);
6288 else if (comparison)
6289 do_jump_for_compare (comparison, if_false_label, if_true_label);
6293 if (drop_through_label)
6295 /* If do_jump produces code that might be jumped around,
6296 do any stack adjusts from that code, before the place
6297 where control merges in. */
6298 do_pending_stack_adjust ();
6299 emit_label (drop_through_label);
6303 /* Given a comparison expression EXP for values too wide to be compared
6304 with one insn, test the comparison and jump to the appropriate label.
6305 The code of EXP is ignored; we always test GT if SWAP is 0,
6306 and LT if SWAP is 1. */
6309 do_jump_by_parts_greater (exp, swap, if_false_label, if_true_label)
6312 rtx if_false_label, if_true_label;
6314 rtx op0 = expand_expr (TREE_OPERAND (exp, swap), NULL_RTX, VOIDmode, 0);
6315 rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), NULL_RTX, VOIDmode, 0);
6316 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
6317 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
6318 rtx drop_through_label = 0;
6319 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
6322 if (! if_true_label || ! if_false_label)
6323 drop_through_label = gen_label_rtx ();
6324 if (! if_true_label)
6325 if_true_label = drop_through_label;
6326 if (! if_false_label)
6327 if_false_label = drop_through_label;
6329 /* Compare a word at a time, high order first. */
6330 for (i = 0; i < nwords; i++)
6333 rtx op0_word, op1_word;
6335 if (WORDS_BIG_ENDIAN)
6337 op0_word = operand_subword_force (op0, i, mode);
6338 op1_word = operand_subword_force (op1, i, mode);
6342 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
6343 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
6346 /* All but high-order word must be compared as unsigned. */
6347 comp = compare_from_rtx (op0_word, op1_word,
6348 (unsignedp || i > 0) ? GTU : GT,
6349 unsignedp, word_mode, NULL_RTX, 0);
6350 if (comp == const_true_rtx)
6351 emit_jump (if_true_label);
6352 else if (comp != const0_rtx)
6353 do_jump_for_compare (comp, NULL_RTX, if_true_label);
6355 /* Consider lower words only if these are equal. */
6356 comp = compare_from_rtx (op0_word, op1_word, NE, unsignedp, word_mode,
6358 if (comp == const_true_rtx)
6359 emit_jump (if_false_label);
6360 else if (comp != const0_rtx)
6361 do_jump_for_compare (comp, NULL_RTX, if_false_label);
6365 emit_jump (if_false_label);
6366 if (drop_through_label)
6367 emit_label (drop_through_label);
6370 /* Given an EQ_EXPR expression EXP for values too wide to be compared
6371 with one insn, test the comparison and jump to the appropriate label. */
6374 do_jump_by_parts_equality (exp, if_false_label, if_true_label)
6376 rtx if_false_label, if_true_label;
6378 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
6379 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
6380 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
6381 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
6383 rtx drop_through_label = 0;
6385 if (! if_false_label)
6386 drop_through_label = if_false_label = gen_label_rtx ();
6388 for (i = 0; i < nwords; i++)
6390 rtx comp = compare_from_rtx (operand_subword_force (op0, i, mode),
6391 operand_subword_force (op1, i, mode),
6392 EQ, TREE_UNSIGNED (TREE_TYPE (exp)),
6393 word_mode, NULL_RTX, 0);
6394 if (comp == const_true_rtx)
6395 emit_jump (if_false_label);
6396 else if (comp != const0_rtx)
6397 do_jump_for_compare (comp, if_false_label, NULL_RTX);
6401 emit_jump (if_true_label);
6402 if (drop_through_label)
6403 emit_label (drop_through_label);
6406 /* Jump according to whether OP0 is 0.
6407 We assume that OP0 has an integer mode that is too wide
6408 for the available compare insns. */
6411 do_jump_by_parts_equality_rtx (op0, if_false_label, if_true_label)
6413 rtx if_false_label, if_true_label;
6415 int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD;
6417 rtx drop_through_label = 0;
6419 if (! if_false_label)
6420 drop_through_label = if_false_label = gen_label_rtx ();
6422 for (i = 0; i < nwords; i++)
6424 rtx comp = compare_from_rtx (operand_subword_force (op0, i,
6426 const0_rtx, EQ, 1, word_mode, NULL_RTX, 0);
6427 if (comp == const_true_rtx)
6428 emit_jump (if_false_label);
6429 else if (comp != const0_rtx)
6430 do_jump_for_compare (comp, if_false_label, NULL_RTX);
6434 emit_jump (if_true_label);
6435 if (drop_through_label)
6436 emit_label (drop_through_label);
6439 /* Given a comparison expression in rtl form, output conditional branches to
6440 IF_TRUE_LABEL, IF_FALSE_LABEL, or both. */
6443 do_jump_for_compare (comparison, if_false_label, if_true_label)
6444 rtx comparison, if_false_label, if_true_label;
6448 if (bcc_gen_fctn[(int) GET_CODE (comparison)] != 0)
6449 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (comparison)]) (if_true_label));
6454 emit_jump (if_false_label);
6456 else if (if_false_label)
6459 rtx prev = PREV_INSN (get_last_insn ());
6462 /* Output the branch with the opposite condition. Then try to invert
6463 what is generated. If more than one insn is a branch, or if the
6464 branch is not the last insn written, abort. If we can't invert
6465 the branch, emit make a true label, redirect this jump to that,
6466 emit a jump to the false label and define the true label. */
6468 if (bcc_gen_fctn[(int) GET_CODE (comparison)] != 0)
6469 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (comparison)]) (if_false_label));
6473 /* Here we get the insn before what was just emitted.
6474 On some machines, emitting the branch can discard
6475 the previous compare insn and emit a replacement. */
6477 /* If there's only one preceding insn... */
6478 insn = get_insns ();
6480 insn = NEXT_INSN (prev);
6482 for (insn = NEXT_INSN (insn); insn; insn = NEXT_INSN (insn))
6483 if (GET_CODE (insn) == JUMP_INSN)
6490 if (branch != get_last_insn ())
6493 if (! invert_jump (branch, if_false_label))
6495 if_true_label = gen_label_rtx ();
6496 redirect_jump (branch, if_true_label);
6497 emit_jump (if_false_label);
6498 emit_label (if_true_label);
6503 /* Generate code for a comparison expression EXP
6504 (including code to compute the values to be compared)
6505 and set (CC0) according to the result.
6506 SIGNED_CODE should be the rtx operation for this comparison for
6507 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
6509 We force a stack adjustment unless there are currently
6510 things pushed on the stack that aren't yet used. */
6513 compare (exp, signed_code, unsigned_code)
6515 enum rtx_code signed_code, unsigned_code;
6518 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
6520 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
6521 register tree type = TREE_TYPE (TREE_OPERAND (exp, 0));
6522 register enum machine_mode mode = TYPE_MODE (type);
6523 int unsignedp = TREE_UNSIGNED (type);
6524 enum rtx_code code = unsignedp ? unsigned_code : signed_code;
6526 return compare_from_rtx (op0, op1, code, unsignedp, mode,
6528 ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
6529 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
6532 /* Like compare but expects the values to compare as two rtx's.
6533 The decision as to signed or unsigned comparison must be made by the caller.
6535 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
6538 If ALIGN is non-zero, it is the alignment of this type; if zero, the
6539 size of MODE should be used. */
6542 compare_from_rtx (op0, op1, code, unsignedp, mode, size, align)
6543 register rtx op0, op1;
6546 enum machine_mode mode;
6550 /* If one operand is constant, make it the second one. */
6552 if (GET_CODE (op0) == CONST_INT || GET_CODE (op0) == CONST_DOUBLE)
6557 code = swap_condition (code);
6562 op0 = force_not_mem (op0);
6563 op1 = force_not_mem (op1);
6566 do_pending_stack_adjust ();
6568 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT)
6569 return simplify_relational_operation (code, mode, op0, op1);
6572 /* There's no need to do this now that combine.c can eliminate lots of
6573 sign extensions. This can be less efficient in certain cases on other
6576 /* If this is a signed equality comparison, we can do it as an
6577 unsigned comparison since zero-extension is cheaper than sign
6578 extension and comparisons with zero are done as unsigned. This is
6579 the case even on machines that can do fast sign extension, since
6580 zero-extension is easier to combinen with other operations than
6581 sign-extension is. If we are comparing against a constant, we must
6582 convert it to what it would look like unsigned. */
6583 if ((code == EQ || code == NE) && ! unsignedp
6584 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
6586 if (GET_CODE (op1) == CONST_INT
6587 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
6588 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
6593 emit_cmp_insn (op0, op1, code, size, mode, unsignedp, align);
6595 return gen_rtx (code, VOIDmode, cc0_rtx, const0_rtx);
6598 /* Generate code to calculate EXP using a store-flag instruction
6599 and return an rtx for the result. EXP is either a comparison
6600 or a TRUTH_NOT_EXPR whose operand is a comparison.
6602 If TARGET is nonzero, store the result there if convenient.
6604 If ONLY_CHEAP is non-zero, only do this if it is likely to be very
6607 Return zero if there is no suitable set-flag instruction
6608 available on this machine.
6610 Once expand_expr has been called on the arguments of the comparison,
6611 we are committed to doing the store flag, since it is not safe to
6612 re-evaluate the expression. We emit the store-flag insn by calling
6613 emit_store_flag, but only expand the arguments if we have a reason
6614 to believe that emit_store_flag will be successful. If we think that
6615 it will, but it isn't, we have to simulate the store-flag with a
6616 set/jump/set sequence. */
6619 do_store_flag (exp, target, mode, only_cheap)
6622 enum machine_mode mode;
6626 tree arg0, arg1, type;
6628 enum machine_mode operand_mode;
6632 enum insn_code icode;
6633 rtx subtarget = target;
6634 rtx result, label, pattern, jump_pat;
6636 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
6637 result at the end. We can't simply invert the test since it would
6638 have already been inverted if it were valid. This case occurs for
6639 some floating-point comparisons. */
6641 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
6642 invert = 1, exp = TREE_OPERAND (exp, 0);
6644 arg0 = TREE_OPERAND (exp, 0);
6645 arg1 = TREE_OPERAND (exp, 1);
6646 type = TREE_TYPE (arg0);
6647 operand_mode = TYPE_MODE (type);
6648 unsignedp = TREE_UNSIGNED (type);
6650 /* We won't bother with BLKmode store-flag operations because it would mean
6651 passing a lot of information to emit_store_flag. */
6652 if (operand_mode == BLKmode)
6658 /* Get the rtx comparison code to use. We know that EXP is a comparison
6659 operation of some type. Some comparisons against 1 and -1 can be
6660 converted to comparisons with zero. Do so here so that the tests
6661 below will be aware that we have a comparison with zero. These
6662 tests will not catch constants in the first operand, but constants
6663 are rarely passed as the first operand. */
6665 switch (TREE_CODE (exp))
6674 if (integer_onep (arg1))
6675 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
6677 code = unsignedp ? LTU : LT;
6680 if (integer_all_onesp (arg1))
6681 arg1 = integer_zero_node, code = unsignedp ? LTU : LT;
6683 code = unsignedp ? LEU : LE;
6686 if (integer_all_onesp (arg1))
6687 arg1 = integer_zero_node, code = unsignedp ? GEU : GE;
6689 code = unsignedp ? GTU : GT;
6692 if (integer_onep (arg1))
6693 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
6695 code = unsignedp ? GEU : GE;
6701 /* Put a constant second. */
6702 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
6704 tem = arg0; arg0 = arg1; arg1 = tem;
6705 code = swap_condition (code);
6708 /* If this is an equality or inequality test of a single bit, we can
6709 do this by shifting the bit being tested to the low-order bit and
6710 masking the result with the constant 1. If the condition was EQ,
6711 we xor it with 1. This does not require an scc insn and is faster
6712 than an scc insn even if we have it. */
6714 if ((code == NE || code == EQ)
6715 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6716 && integer_pow2p (TREE_OPERAND (arg0, 1))
6717 && TYPE_PRECISION (type) <= HOST_BITS_PER_WIDE_INT)
6719 int bitnum = exact_log2 (INTVAL (expand_expr (TREE_OPERAND (arg0, 1),
6720 NULL_RTX, VOIDmode, 0)));
6722 if (subtarget == 0 || GET_CODE (subtarget) != REG
6723 || GET_MODE (subtarget) != operand_mode
6724 || ! safe_from_p (subtarget, TREE_OPERAND (arg0, 0)))
6727 op0 = expand_expr (TREE_OPERAND (arg0, 0), subtarget, VOIDmode, 0);
6730 op0 = expand_shift (RSHIFT_EXPR, GET_MODE (op0), op0,
6731 size_int (bitnum), target, 1);
6733 if (GET_MODE (op0) != mode)
6734 op0 = convert_to_mode (mode, op0, 1);
6736 if (bitnum != TYPE_PRECISION (type) - 1)
6737 op0 = expand_and (op0, const1_rtx, target);
6739 if ((code == EQ && ! invert) || (code == NE && invert))
6740 op0 = expand_binop (mode, xor_optab, op0, const1_rtx, target, 0,
6746 /* Now see if we are likely to be able to do this. Return if not. */
6747 if (! can_compare_p (operand_mode))
6749 icode = setcc_gen_code[(int) code];
6750 if (icode == CODE_FOR_nothing
6751 || (only_cheap && insn_operand_mode[(int) icode][0] != mode))
6753 /* We can only do this if it is one of the special cases that
6754 can be handled without an scc insn. */
6755 if ((code == LT && integer_zerop (arg1))
6756 || (! only_cheap && code == GE && integer_zerop (arg1)))
6758 else if (BRANCH_COST >= 0
6759 && ! only_cheap && (code == NE || code == EQ)
6760 && TREE_CODE (type) != REAL_TYPE
6761 && ((abs_optab->handlers[(int) operand_mode].insn_code
6762 != CODE_FOR_nothing)
6763 || (ffs_optab->handlers[(int) operand_mode].insn_code
6764 != CODE_FOR_nothing)))
6770 preexpand_calls (exp);
6771 if (subtarget == 0 || GET_CODE (subtarget) != REG
6772 || GET_MODE (subtarget) != operand_mode
6773 || ! safe_from_p (subtarget, arg1))
6776 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
6777 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
6780 target = gen_reg_rtx (mode);
6782 result = emit_store_flag (target, code, op0, op1, operand_mode,
6788 result = expand_binop (mode, xor_optab, result, const1_rtx,
6789 result, 0, OPTAB_LIB_WIDEN);
6793 /* If this failed, we have to do this with set/compare/jump/set code. */
6794 if (target == 0 || GET_CODE (target) != REG
6795 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
6796 target = gen_reg_rtx (GET_MODE (target));
6798 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
6799 result = compare_from_rtx (op0, op1, code, unsignedp,
6800 operand_mode, NULL_RTX, 0);
6801 if (GET_CODE (result) == CONST_INT)
6802 return (((result == const0_rtx && ! invert)
6803 || (result != const0_rtx && invert))
6804 ? const0_rtx : const1_rtx);
6806 label = gen_label_rtx ();
6807 if (bcc_gen_fctn[(int) code] == 0)
6810 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
6811 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
6817 /* Generate a tablejump instruction (used for switch statements). */
6819 #ifdef HAVE_tablejump
6821 /* INDEX is the value being switched on, with the lowest value
6822 in the table already subtracted.
6823 MODE is its expected mode (needed if INDEX is constant).
6824 RANGE is the length of the jump table.
6825 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
6827 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
6828 index value is out of range. */
6831 do_tablejump (index, mode, range, table_label, default_label)
6832 rtx index, range, table_label, default_label;
6833 enum machine_mode mode;
6835 register rtx temp, vector;
6837 /* Do an unsigned comparison (in the proper mode) between the index
6838 expression and the value which represents the length of the range.
6839 Since we just finished subtracting the lower bound of the range
6840 from the index expression, this comparison allows us to simultaneously
6841 check that the original index expression value is both greater than
6842 or equal to the minimum value of the range and less than or equal to
6843 the maximum value of the range. */
6845 emit_cmp_insn (range, index, LTU, NULL_RTX, mode, 0, 0);
6846 emit_jump_insn (gen_bltu (default_label));
6848 /* If index is in range, it must fit in Pmode.
6849 Convert to Pmode so we can index with it. */
6851 index = convert_to_mode (Pmode, index, 1);
6853 /* If flag_force_addr were to affect this address
6854 it could interfere with the tricky assumptions made
6855 about addresses that contain label-refs,
6856 which may be valid only very near the tablejump itself. */
6857 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
6858 GET_MODE_SIZE, because this indicates how large insns are. The other
6859 uses should all be Pmode, because they are addresses. This code
6860 could fail if addresses and insns are not the same size. */
6861 index = memory_address_noforce
6863 gen_rtx (PLUS, Pmode,
6864 gen_rtx (MULT, Pmode, index,
6865 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
6866 gen_rtx (LABEL_REF, Pmode, table_label)));
6867 temp = gen_reg_rtx (CASE_VECTOR_MODE);
6868 vector = gen_rtx (MEM, CASE_VECTOR_MODE, index);
6869 RTX_UNCHANGING_P (vector) = 1;
6870 convert_move (temp, vector, 0);
6872 emit_jump_insn (gen_tablejump (temp, table_label));
6874 #ifndef CASE_VECTOR_PC_RELATIVE
6875 /* If we are generating PIC code or if the table is PC-relative, the
6876 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
6882 #endif /* HAVE_tablejump */