1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA. */
26 #include "insn-flags.h"
27 #include "insn-codes.h"
29 #include "insn-config.h"
33 #include "typeclass.h"
35 #define CEIL(x,y) (((x) + (y) - 1) / (y))
37 /* Decide whether a function's arguments should be processed
38 from first to last or from last to first. */
40 #ifdef STACK_GROWS_DOWNWARD
42 #define PUSH_ARGS_REVERSED /* If it's last to first */
46 #ifndef STACK_PUSH_CODE
47 #ifdef STACK_GROWS_DOWNWARD
48 #define STACK_PUSH_CODE PRE_DEC
50 #define STACK_PUSH_CODE PRE_INC
54 /* Like STACK_BOUNDARY but in units of bytes, not bits. */
55 #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
57 /* If this is nonzero, we do not bother generating VOLATILE
58 around volatile memory references, and we are willing to
59 output indirect addresses. If cse is to follow, we reject
60 indirect addresses so a useful potential cse is generated;
61 if it is used only once, instruction combination will produce
62 the same indirect address eventually. */
65 /* Nonzero to generate code for all the subroutines within an
66 expression before generating the upper levels of the expression.
67 Nowadays this is never zero. */
68 int do_preexpand_calls = 1;
70 /* Number of units that we should eventually pop off the stack.
71 These are the arguments to function calls that have already returned. */
72 int pending_stack_adjust;
74 /* Nonzero means stack pops must not be deferred, and deferred stack
75 pops must not be output. It is nonzero inside a function call,
76 inside a conditional expression, inside a statement expression,
77 and in other cases as well. */
78 int inhibit_defer_pop;
80 /* A list of all cleanups which belong to the arguments of
81 function calls being expanded by expand_call. */
82 tree cleanups_this_call;
84 /* Nonzero means __builtin_saveregs has already been done in this function.
85 The value is the pseudoreg containing the value __builtin_saveregs
87 static rtx saveregs_value;
90 static void store_constructor ();
91 static rtx store_field ();
92 static rtx expand_builtin ();
93 static rtx compare ();
94 static rtx do_store_flag ();
95 static void preexpand_calls ();
96 static rtx expand_increment ();
97 static void init_queue ();
99 void do_pending_stack_adjust ();
100 static void do_jump_for_compare ();
101 static void do_jump_by_parts_equality ();
102 static void do_jump_by_parts_equality_rtx ();
103 static void do_jump_by_parts_greater ();
105 /* Record for each mode whether we can move a register directly to or
106 from an object of that mode in memory. If we can't, we won't try
107 to use that mode directly when accessing a field of that mode. */
109 static char direct_load[NUM_MACHINE_MODES];
110 static char direct_store[NUM_MACHINE_MODES];
112 /* MOVE_RATIO is the number of move instructions that is better than
116 #if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
119 /* A value of around 6 would minimize code size; infinity would minimize
121 #define MOVE_RATIO 15
125 /* This array records the insn_code of insns to perform block moves. */
126 static enum insn_code movstr_optab[NUM_MACHINE_MODES];
128 /* SLOW_UNALIGNED_ACCESS is non-zero if unaligned accesses are very slow. */
130 #ifndef SLOW_UNALIGNED_ACCESS
131 #define SLOW_UNALIGNED_ACCESS 0
134 /* This is run once per compilation to set up which modes can be used
135 directly in memory and to initialize the block move optab. */
141 enum machine_mode mode;
142 rtx mem = gen_rtx (MEM, VOIDmode, stack_pointer_rtx);
145 insn = emit_insn (gen_rtx (SET, 0, 0));
146 pat = PATTERN (insn);
148 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
149 mode = (enum machine_mode) ((int) mode + 1))
155 direct_load[(int) mode] = direct_store[(int) mode] = 0;
156 PUT_MODE (mem, mode);
158 /* See if there is some register that can be used in this mode and
159 directly loaded or stored from memory. */
161 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
162 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
165 if (! HARD_REGNO_MODE_OK (regno, mode))
168 reg = gen_rtx (REG, mode, regno);
171 SET_DEST (pat) = reg;
172 if (recog (pat, insn, &num_clobbers) >= 0)
173 direct_load[(int) mode] = 1;
176 SET_DEST (pat) = mem;
177 if (recog (pat, insn, &num_clobbers) >= 0)
178 direct_store[(int) mode] = 1;
181 movstr_optab[(int) mode] = CODE_FOR_nothing;
188 movstr_optab[(int) QImode] = CODE_FOR_movstrqi;
192 movstr_optab[(int) HImode] = CODE_FOR_movstrhi;
196 movstr_optab[(int) SImode] = CODE_FOR_movstrsi;
200 movstr_optab[(int) DImode] = CODE_FOR_movstrdi;
204 movstr_optab[(int) TImode] = CODE_FOR_movstrti;
208 /* This is run at the start of compiling a function. */
215 pending_stack_adjust = 0;
216 inhibit_defer_pop = 0;
217 cleanups_this_call = 0;
222 /* Save all variables describing the current status into the structure *P.
223 This is used before starting a nested function. */
229 /* Instead of saving the postincrement queue, empty it. */
232 p->pending_stack_adjust = pending_stack_adjust;
233 p->inhibit_defer_pop = inhibit_defer_pop;
234 p->cleanups_this_call = cleanups_this_call;
235 p->saveregs_value = saveregs_value;
236 p->forced_labels = forced_labels;
238 pending_stack_adjust = 0;
239 inhibit_defer_pop = 0;
240 cleanups_this_call = 0;
245 /* Restore all variables describing the current status from the structure *P.
246 This is used after a nested function. */
249 restore_expr_status (p)
252 pending_stack_adjust = p->pending_stack_adjust;
253 inhibit_defer_pop = p->inhibit_defer_pop;
254 cleanups_this_call = p->cleanups_this_call;
255 saveregs_value = p->saveregs_value;
256 forced_labels = p->forced_labels;
259 /* Manage the queue of increment instructions to be output
260 for POSTINCREMENT_EXPR expressions, etc. */
262 static rtx pending_chain;
264 /* Queue up to increment (or change) VAR later. BODY says how:
265 BODY should be the same thing you would pass to emit_insn
266 to increment right away. It will go to emit_insn later on.
268 The value is a QUEUED expression to be used in place of VAR
269 where you want to guarantee the pre-incrementation value of VAR. */
272 enqueue_insn (var, body)
275 pending_chain = gen_rtx (QUEUED, GET_MODE (var),
276 var, NULL_RTX, NULL_RTX, body, pending_chain);
277 return pending_chain;
280 /* Use protect_from_queue to convert a QUEUED expression
281 into something that you can put immediately into an instruction.
282 If the queued incrementation has not happened yet,
283 protect_from_queue returns the variable itself.
284 If the incrementation has happened, protect_from_queue returns a temp
285 that contains a copy of the old value of the variable.
287 Any time an rtx which might possibly be a QUEUED is to be put
288 into an instruction, it must be passed through protect_from_queue first.
289 QUEUED expressions are not meaningful in instructions.
291 Do not pass a value through protect_from_queue and then hold
292 on to it for a while before putting it in an instruction!
293 If the queue is flushed in between, incorrect code will result. */
296 protect_from_queue (x, modify)
300 register RTX_CODE code = GET_CODE (x);
302 #if 0 /* A QUEUED can hang around after the queue is forced out. */
303 /* Shortcut for most common case. */
304 if (pending_chain == 0)
310 /* A special hack for read access to (MEM (QUEUED ...))
311 to facilitate use of autoincrement.
312 Make a copy of the contents of the memory location
313 rather than a copy of the address, but not
314 if the value is of mode BLKmode. */
315 if (code == MEM && GET_MODE (x) != BLKmode
316 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
318 register rtx y = XEXP (x, 0);
319 XEXP (x, 0) = QUEUED_VAR (y);
322 register rtx temp = gen_reg_rtx (GET_MODE (x));
323 emit_insn_before (gen_move_insn (temp, x),
329 /* Otherwise, recursively protect the subexpressions of all
330 the kinds of rtx's that can contain a QUEUED. */
332 XEXP (x, 0) = protect_from_queue (XEXP (x, 0), 0);
333 else if (code == PLUS || code == MULT)
335 XEXP (x, 0) = protect_from_queue (XEXP (x, 0), 0);
336 XEXP (x, 1) = protect_from_queue (XEXP (x, 1), 0);
340 /* If the increment has not happened, use the variable itself. */
341 if (QUEUED_INSN (x) == 0)
342 return QUEUED_VAR (x);
343 /* If the increment has happened and a pre-increment copy exists,
345 if (QUEUED_COPY (x) != 0)
346 return QUEUED_COPY (x);
347 /* The increment has happened but we haven't set up a pre-increment copy.
348 Set one up now, and use it. */
349 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
350 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
352 return QUEUED_COPY (x);
355 /* Return nonzero if X contains a QUEUED expression:
356 if it contains anything that will be altered by a queued increment.
357 We handle only combinations of MEM, PLUS, MINUS and MULT operators
358 since memory addresses generally contain only those. */
364 register enum rtx_code code = GET_CODE (x);
370 return queued_subexp_p (XEXP (x, 0));
374 return queued_subexp_p (XEXP (x, 0))
375 || queued_subexp_p (XEXP (x, 1));
380 /* Perform all the pending incrementations. */
386 while (p = pending_chain)
388 QUEUED_INSN (p) = emit_insn (QUEUED_BODY (p));
389 pending_chain = QUEUED_NEXT (p);
400 /* Copy data from FROM to TO, where the machine modes are not the same.
401 Both modes may be integer, or both may be floating.
402 UNSIGNEDP should be nonzero if FROM is an unsigned type.
403 This causes zero-extension instead of sign-extension. */
406 convert_move (to, from, unsignedp)
407 register rtx to, from;
410 enum machine_mode to_mode = GET_MODE (to);
411 enum machine_mode from_mode = GET_MODE (from);
412 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
413 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
417 /* rtx code for making an equivalent value. */
418 enum rtx_code equiv_code = (unsignedp ? ZERO_EXTEND : SIGN_EXTEND);
420 to = protect_from_queue (to, 1);
421 from = protect_from_queue (from, 0);
423 if (to_real != from_real)
426 if (to_mode == from_mode
427 || (from_mode == VOIDmode && CONSTANT_P (from)))
429 emit_move_insn (to, from);
435 #ifdef HAVE_extendsfdf2
436 if (HAVE_extendsfdf2 && from_mode == SFmode && to_mode == DFmode)
438 emit_unop_insn (CODE_FOR_extendsfdf2, to, from, UNKNOWN);
442 #ifdef HAVE_extendsfxf2
443 if (HAVE_extendsfxf2 && from_mode == SFmode && to_mode == XFmode)
445 emit_unop_insn (CODE_FOR_extendsfxf2, to, from, UNKNOWN);
449 #ifdef HAVE_extendsftf2
450 if (HAVE_extendsftf2 && from_mode == SFmode && to_mode == TFmode)
452 emit_unop_insn (CODE_FOR_extendsftf2, to, from, UNKNOWN);
456 #ifdef HAVE_extenddfxf2
457 if (HAVE_extenddfxf2 && from_mode == DFmode && to_mode == XFmode)
459 emit_unop_insn (CODE_FOR_extenddfxf2, to, from, UNKNOWN);
463 #ifdef HAVE_extenddftf2
464 if (HAVE_extenddftf2 && from_mode == DFmode && to_mode == TFmode)
466 emit_unop_insn (CODE_FOR_extenddftf2, to, from, UNKNOWN);
470 #ifdef HAVE_truncdfsf2
471 if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
473 emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
477 #ifdef HAVE_truncxfsf2
478 if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode)
480 emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN);
484 #ifdef HAVE_trunctfsf2
485 if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
487 emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
491 #ifdef HAVE_truncxfdf2
492 if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode)
494 emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN);
498 #ifdef HAVE_trunctfdf2
499 if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
501 emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
513 libcall = extendsfdf2_libfunc;
517 libcall = extendsfxf2_libfunc;
521 libcall = extendsftf2_libfunc;
530 libcall = truncdfsf2_libfunc;
534 libcall = extenddfxf2_libfunc;
538 libcall = extenddftf2_libfunc;
547 libcall = truncxfsf2_libfunc;
551 libcall = truncxfdf2_libfunc;
560 libcall = trunctfsf2_libfunc;
564 libcall = trunctfdf2_libfunc;
570 if (libcall == (rtx) 0)
571 /* This conversion is not implemented yet. */
574 emit_library_call (libcall, 1, to_mode, 1, from, from_mode);
575 emit_move_insn (to, hard_libcall_value (to_mode));
579 /* Now both modes are integers. */
581 /* Handle expanding beyond a word. */
582 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
583 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
590 enum machine_mode lowpart_mode;
591 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
593 /* Try converting directly if the insn is supported. */
594 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
597 /* If FROM is a SUBREG, put it into a register. Do this
598 so that we always generate the same set of insns for
599 better cse'ing; if an intermediate assignment occurred,
600 we won't be doing the operation directly on the SUBREG. */
601 if (optimize > 0 && GET_CODE (from) == SUBREG)
602 from = force_reg (from_mode, from);
603 emit_unop_insn (code, to, from, equiv_code);
606 /* Next, try converting via full word. */
607 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
608 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
609 != CODE_FOR_nothing))
611 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
612 emit_unop_insn (code, to,
613 gen_lowpart (word_mode, to), equiv_code);
617 /* No special multiword conversion insn; do it by hand. */
620 /* Get a copy of FROM widened to a word, if necessary. */
621 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
622 lowpart_mode = word_mode;
624 lowpart_mode = from_mode;
626 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
628 lowpart = gen_lowpart (lowpart_mode, to);
629 emit_move_insn (lowpart, lowfrom);
631 /* Compute the value to put in each remaining word. */
633 fill_value = const0_rtx;
638 && insn_operand_mode[(int) CODE_FOR_slt][0] == word_mode
639 && STORE_FLAG_VALUE == -1)
641 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
643 fill_value = gen_reg_rtx (word_mode);
644 emit_insn (gen_slt (fill_value));
650 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
651 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
653 fill_value = convert_to_mode (word_mode, fill_value, 1);
657 /* Fill the remaining words. */
658 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
660 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
661 rtx subword = operand_subword (to, index, 1, to_mode);
666 if (fill_value != subword)
667 emit_move_insn (subword, fill_value);
670 insns = get_insns ();
673 emit_no_conflict_block (insns, to, from, NULL_RTX,
674 gen_rtx (equiv_code, to_mode, from));
678 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD)
680 convert_move (to, gen_lowpart (word_mode, from), 0);
684 /* Handle pointer conversion */ /* SPEE 900220 */
685 if (to_mode == PSImode)
687 if (from_mode != SImode)
688 from = convert_to_mode (SImode, from, unsignedp);
690 #ifdef HAVE_truncsipsi
693 emit_unop_insn (CODE_FOR_truncsipsi, to, from, UNKNOWN);
696 #endif /* HAVE_truncsipsi */
700 if (from_mode == PSImode)
702 if (to_mode != SImode)
704 from = convert_to_mode (SImode, from, unsignedp);
709 #ifdef HAVE_extendpsisi
710 if (HAVE_extendpsisi)
712 emit_unop_insn (CODE_FOR_extendpsisi, to, from, UNKNOWN);
715 #endif /* HAVE_extendpsisi */
720 /* Now follow all the conversions between integers
721 no more than a word long. */
723 /* For truncation, usually we can just refer to FROM in a narrower mode. */
724 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
725 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
726 GET_MODE_BITSIZE (from_mode))
727 && ((GET_CODE (from) == MEM
728 && ! MEM_VOLATILE_P (from)
729 && direct_load[(int) to_mode]
730 && ! mode_dependent_address_p (XEXP (from, 0)))
731 || GET_CODE (from) == REG
732 || GET_CODE (from) == SUBREG))
734 emit_move_insn (to, gen_lowpart (to_mode, from));
738 /* For truncation, usually we can just refer to FROM in a narrower mode. */
739 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
741 /* Convert directly if that works. */
742 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
745 /* If FROM is a SUBREG, put it into a register. Do this
746 so that we always generate the same set of insns for
747 better cse'ing; if an intermediate assignment occurred,
748 we won't be doing the operation directly on the SUBREG. */
749 if (optimize > 0 && GET_CODE (from) == SUBREG)
750 from = force_reg (from_mode, from);
751 emit_unop_insn (code, to, from, equiv_code);
756 enum machine_mode intermediate;
758 /* Search for a mode to convert via. */
759 for (intermediate = from_mode; intermediate != VOIDmode;
760 intermediate = GET_MODE_WIDER_MODE (intermediate))
761 if ((can_extend_p (to_mode, intermediate, unsignedp)
763 && (can_extend_p (intermediate, from_mode, unsignedp)
764 != CODE_FOR_nothing))
766 convert_move (to, convert_to_mode (intermediate, from,
767 unsignedp), unsignedp);
771 /* No suitable intermediate mode. */
776 /* Support special truncate insns for certain modes. */
778 if (from_mode == DImode && to_mode == SImode)
780 #ifdef HAVE_truncdisi2
783 emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
787 convert_move (to, force_reg (from_mode, from), unsignedp);
791 if (from_mode == DImode && to_mode == HImode)
793 #ifdef HAVE_truncdihi2
796 emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
800 convert_move (to, force_reg (from_mode, from), unsignedp);
804 if (from_mode == DImode && to_mode == QImode)
806 #ifdef HAVE_truncdiqi2
809 emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
813 convert_move (to, force_reg (from_mode, from), unsignedp);
817 if (from_mode == SImode && to_mode == HImode)
819 #ifdef HAVE_truncsihi2
822 emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
826 convert_move (to, force_reg (from_mode, from), unsignedp);
830 if (from_mode == SImode && to_mode == QImode)
832 #ifdef HAVE_truncsiqi2
835 emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
839 convert_move (to, force_reg (from_mode, from), unsignedp);
843 if (from_mode == HImode && to_mode == QImode)
845 #ifdef HAVE_trunchiqi2
848 emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
852 convert_move (to, force_reg (from_mode, from), unsignedp);
856 /* Handle truncation of volatile memrefs, and so on;
857 the things that couldn't be truncated directly,
858 and for which there was no special instruction. */
859 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
861 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
862 emit_move_insn (to, temp);
866 /* Mode combination is not recognized. */
870 /* Return an rtx for a value that would result
871 from converting X to mode MODE.
872 Both X and MODE may be floating, or both integer.
873 UNSIGNEDP is nonzero if X is an unsigned value.
874 This can be done by referring to a part of X in place
875 or by copying to a new temporary with conversion.
877 This function *must not* call protect_from_queue
878 except when putting X into an insn (in which case convert_move does it). */
881 convert_to_mode (mode, x, unsignedp)
882 enum machine_mode mode;
888 if (mode == GET_MODE (x))
891 /* There is one case that we must handle specially: If we are converting
892 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
893 we are to interpret the constant as unsigned, gen_lowpart will do
894 the wrong if the constant appears negative. What we want to do is
895 make the high-order word of the constant zero, not all ones. */
897 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
898 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
899 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
900 return immed_double_const (INTVAL (x), (HOST_WIDE_INT) 0, mode);
902 /* We can do this with a gen_lowpart if both desired and current modes
903 are integer, and this is either a constant integer, a register, or a
904 non-volatile MEM. Except for the constant case, we must be narrowing
907 if (GET_CODE (x) == CONST_INT
908 || (GET_MODE_CLASS (mode) == MODE_INT
909 && GET_MODE_CLASS (GET_MODE (x)) == MODE_INT
910 && (GET_CODE (x) == CONST_DOUBLE
911 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (GET_MODE (x))
912 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x))
913 && direct_load[(int) mode]
914 || GET_CODE (x) == REG)))))
915 return gen_lowpart (mode, x);
917 temp = gen_reg_rtx (mode);
918 convert_move (temp, x, unsignedp);
922 /* Generate several move instructions to copy LEN bytes
923 from block FROM to block TO. (These are MEM rtx's with BLKmode).
924 The caller must pass FROM and TO
925 through protect_from_queue before calling.
926 ALIGN (in bytes) is maximum alignment we can assume. */
928 struct move_by_pieces
937 int explicit_inc_from;
943 static void move_by_pieces_1 ();
944 static int move_by_pieces_ninsns ();
947 move_by_pieces (to, from, len, align)
951 struct move_by_pieces data;
952 rtx to_addr = XEXP (to, 0), from_addr = XEXP (from, 0);
953 int max_size = MOVE_MAX + 1;
956 data.to_addr = to_addr;
957 data.from_addr = from_addr;
961 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
962 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
964 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
965 || GET_CODE (from_addr) == POST_INC
966 || GET_CODE (from_addr) == POST_DEC);
968 data.explicit_inc_from = 0;
969 data.explicit_inc_to = 0;
971 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
972 if (data.reverse) data.offset = len;
975 /* If copying requires more than two move insns,
976 copy addresses to registers (to make displacements shorter)
977 and use post-increment if available. */
978 if (!(data.autinc_from && data.autinc_to)
979 && move_by_pieces_ninsns (len, align) > 2)
981 #ifdef HAVE_PRE_DECREMENT
982 if (data.reverse && ! data.autinc_from)
984 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
985 data.autinc_from = 1;
986 data.explicit_inc_from = -1;
989 #ifdef HAVE_POST_INCREMENT
990 if (! data.autinc_from)
992 data.from_addr = copy_addr_to_reg (from_addr);
993 data.autinc_from = 1;
994 data.explicit_inc_from = 1;
997 if (!data.autinc_from && CONSTANT_P (from_addr))
998 data.from_addr = copy_addr_to_reg (from_addr);
999 #ifdef HAVE_PRE_DECREMENT
1000 if (data.reverse && ! data.autinc_to)
1002 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1004 data.explicit_inc_to = -1;
1007 #ifdef HAVE_POST_INCREMENT
1008 if (! data.reverse && ! data.autinc_to)
1010 data.to_addr = copy_addr_to_reg (to_addr);
1012 data.explicit_inc_to = 1;
1015 if (!data.autinc_to && CONSTANT_P (to_addr))
1016 data.to_addr = copy_addr_to_reg (to_addr);
1019 if (! (STRICT_ALIGNMENT || SLOW_UNALIGNED_ACCESS)
1020 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
1023 /* First move what we can in the largest integer mode, then go to
1024 successively smaller modes. */
1026 while (max_size > 1)
1028 enum machine_mode mode = VOIDmode, tmode;
1029 enum insn_code icode;
1031 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1032 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1033 if (GET_MODE_SIZE (tmode) < max_size)
1036 if (mode == VOIDmode)
1039 icode = mov_optab->handlers[(int) mode].insn_code;
1040 if (icode != CODE_FOR_nothing
1041 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1042 GET_MODE_SIZE (mode)))
1043 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1045 max_size = GET_MODE_SIZE (mode);
1048 /* The code above should have handled everything. */
1053 /* Return number of insns required to move L bytes by pieces.
1054 ALIGN (in bytes) is maximum alignment we can assume. */
1057 move_by_pieces_ninsns (l, align)
1061 register int n_insns = 0;
1062 int max_size = MOVE_MAX + 1;
1064 if (! (STRICT_ALIGNMENT || SLOW_UNALIGNED_ACCESS)
1065 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
1068 while (max_size > 1)
1070 enum machine_mode mode = VOIDmode, tmode;
1071 enum insn_code icode;
1073 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1074 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1075 if (GET_MODE_SIZE (tmode) < max_size)
1078 if (mode == VOIDmode)
1081 icode = mov_optab->handlers[(int) mode].insn_code;
1082 if (icode != CODE_FOR_nothing
1083 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1084 GET_MODE_SIZE (mode)))
1085 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1087 max_size = GET_MODE_SIZE (mode);
1093 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1094 with move instructions for mode MODE. GENFUN is the gen_... function
1095 to make a move insn for that mode. DATA has all the other info. */
1098 move_by_pieces_1 (genfun, mode, data)
1100 enum machine_mode mode;
1101 struct move_by_pieces *data;
1103 register int size = GET_MODE_SIZE (mode);
1104 register rtx to1, from1;
1106 while (data->len >= size)
1108 if (data->reverse) data->offset -= size;
1110 to1 = (data->autinc_to
1111 ? gen_rtx (MEM, mode, data->to_addr)
1112 : change_address (data->to, mode,
1113 plus_constant (data->to_addr, data->offset)));
1116 ? gen_rtx (MEM, mode, data->from_addr)
1117 : change_address (data->from, mode,
1118 plus_constant (data->from_addr, data->offset)));
1120 #ifdef HAVE_PRE_DECREMENT
1121 if (data->explicit_inc_to < 0)
1122 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
1123 if (data->explicit_inc_from < 0)
1124 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (-size)));
1127 emit_insn ((*genfun) (to1, from1));
1128 #ifdef HAVE_POST_INCREMENT
1129 if (data->explicit_inc_to > 0)
1130 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1131 if (data->explicit_inc_from > 0)
1132 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1135 if (! data->reverse) data->offset += size;
1141 /* Emit code to move a block Y to a block X.
1142 This may be done with string-move instructions,
1143 with multiple scalar move instructions, or with a library call.
1145 Both X and Y must be MEM rtx's (perhaps inside VOLATILE)
1147 SIZE is an rtx that says how long they are.
1148 ALIGN is the maximum alignment we can assume they have,
1149 measured in bytes. */
1152 emit_block_move (x, y, size, align)
1157 if (GET_MODE (x) != BLKmode)
1160 if (GET_MODE (y) != BLKmode)
1163 x = protect_from_queue (x, 1);
1164 y = protect_from_queue (y, 0);
1165 size = protect_from_queue (size, 0);
1167 if (GET_CODE (x) != MEM)
1169 if (GET_CODE (y) != MEM)
1174 if (GET_CODE (size) == CONST_INT
1175 && (move_by_pieces_ninsns (INTVAL (size), align) < MOVE_RATIO))
1176 move_by_pieces (x, y, INTVAL (size), align);
1179 /* Try the most limited insn first, because there's no point
1180 including more than one in the machine description unless
1181 the more limited one has some advantage. */
1183 rtx opalign = GEN_INT (align);
1184 enum machine_mode mode;
1186 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1187 mode = GET_MODE_WIDER_MODE (mode))
1189 enum insn_code code = movstr_optab[(int) mode];
1191 if (code != CODE_FOR_nothing
1192 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
1193 && (unsigned) INTVAL (size) <= GET_MODE_MASK (mode)
1194 && (insn_operand_predicate[(int) code][0] == 0
1195 || (*insn_operand_predicate[(int) code][0]) (x, BLKmode))
1196 && (insn_operand_predicate[(int) code][1] == 0
1197 || (*insn_operand_predicate[(int) code][1]) (y, BLKmode))
1198 && (insn_operand_predicate[(int) code][3] == 0
1199 || (*insn_operand_predicate[(int) code][3]) (opalign,
1203 rtx last = get_last_insn ();
1206 if (insn_operand_predicate[(int) code][2] != 0
1207 && ! (*insn_operand_predicate[(int) code][2]) (op2, mode))
1208 op2 = copy_to_mode_reg (mode, op2);
1210 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1217 delete_insns_since (last);
1221 #ifdef TARGET_MEM_FUNCTIONS
1222 emit_library_call (memcpy_libfunc, 0,
1223 VOIDmode, 3, XEXP (x, 0), Pmode,
1225 convert_to_mode (Pmode, size, 1), Pmode);
1227 emit_library_call (bcopy_libfunc, 0,
1228 VOIDmode, 3, XEXP (y, 0), Pmode,
1230 convert_to_mode (Pmode, size, 1), Pmode);
1235 /* Copy all or part of a value X into registers starting at REGNO.
1236 The number of registers to be filled is NREGS. */
1239 move_block_to_reg (regno, x, nregs, mode)
1243 enum machine_mode mode;
1248 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1249 x = validize_mem (force_const_mem (mode, x));
1251 /* See if the machine can do this with a load multiple insn. */
1252 #ifdef HAVE_load_multiple
1253 last = get_last_insn ();
1254 pat = gen_load_multiple (gen_rtx (REG, word_mode, regno), x,
1262 delete_insns_since (last);
1265 for (i = 0; i < nregs; i++)
1266 emit_move_insn (gen_rtx (REG, word_mode, regno + i),
1267 operand_subword_force (x, i, mode));
1270 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1271 The number of registers to be filled is NREGS. */
1274 move_block_from_reg (regno, x, nregs)
1282 /* See if the machine can do this with a store multiple insn. */
1283 #ifdef HAVE_store_multiple
1284 last = get_last_insn ();
1285 pat = gen_store_multiple (x, gen_rtx (REG, word_mode, regno),
1293 delete_insns_since (last);
1296 for (i = 0; i < nregs; i++)
1298 rtx tem = operand_subword (x, i, 1, BLKmode);
1303 emit_move_insn (tem, gen_rtx (REG, word_mode, regno + i));
1307 /* Mark NREGS consecutive regs, starting at REGNO, as being live now. */
1310 use_regs (regno, nregs)
1316 for (i = 0; i < nregs; i++)
1317 emit_insn (gen_rtx (USE, VOIDmode, gen_rtx (REG, word_mode, regno + i)));
1320 /* Write zeros through the storage of OBJECT.
1321 If OBJECT has BLKmode, SIZE is its length in bytes. */
1324 clear_storage (object, size)
1328 if (GET_MODE (object) == BLKmode)
1330 #ifdef TARGET_MEM_FUNCTIONS
1331 emit_library_call (memset_libfunc, 0,
1333 XEXP (object, 0), Pmode, const0_rtx, Pmode,
1334 GEN_INT (size), Pmode);
1336 emit_library_call (bzero_libfunc, 0,
1338 XEXP (object, 0), Pmode,
1339 GEN_INT (size), Pmode);
1343 emit_move_insn (object, const0_rtx);
1346 /* Generate code to copy Y into X.
1347 Both Y and X must have the same mode, except that
1348 Y can be a constant with VOIDmode.
1349 This mode cannot be BLKmode; use emit_block_move for that.
1351 Return the last instruction emitted. */
1354 emit_move_insn (x, y)
1357 enum machine_mode mode = GET_MODE (x);
1360 x = protect_from_queue (x, 1);
1361 y = protect_from_queue (y, 0);
1363 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
1366 if (CONSTANT_P (y) && ! LEGITIMATE_CONSTANT_P (y))
1367 y = force_const_mem (mode, y);
1369 /* If X or Y are memory references, verify that their addresses are valid
1371 if (GET_CODE (x) == MEM
1372 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
1373 && ! push_operand (x, GET_MODE (x)))
1375 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
1376 x = change_address (x, VOIDmode, XEXP (x, 0));
1378 if (GET_CODE (y) == MEM
1379 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
1381 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
1382 y = change_address (y, VOIDmode, XEXP (y, 0));
1384 if (mode == BLKmode)
1387 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
1389 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
1391 /* This will handle any multi-word mode that lacks a move_insn pattern.
1392 However, you will get better code if you define such patterns,
1393 even if they must turn into multiple assembler instructions. */
1394 else if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
1399 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
1402 rtx xpart = operand_subword (x, i, 1, mode);
1403 rtx ypart = operand_subword (y, i, 1, mode);
1405 /* If we can't get a part of Y, put Y into memory if it is a
1406 constant. Otherwise, force it into a register. If we still
1407 can't get a part of Y, abort. */
1408 if (ypart == 0 && CONSTANT_P (y))
1410 y = force_const_mem (mode, y);
1411 ypart = operand_subword (y, i, 1, mode);
1413 else if (ypart == 0)
1414 ypart = operand_subword_force (y, i, mode);
1416 if (xpart == 0 || ypart == 0)
1419 last_insn = emit_move_insn (xpart, ypart);
1427 /* Pushing data onto the stack. */
1429 /* Push a block of length SIZE (perhaps variable)
1430 and return an rtx to address the beginning of the block.
1431 Note that it is not possible for the value returned to be a QUEUED.
1432 The value may be virtual_outgoing_args_rtx.
1434 EXTRA is the number of bytes of padding to push in addition to SIZE.
1435 BELOW nonzero means this padding comes at low addresses;
1436 otherwise, the padding comes at high addresses. */
1439 push_block (size, extra, below)
1444 if (CONSTANT_P (size))
1445 anti_adjust_stack (plus_constant (size, extra));
1446 else if (GET_CODE (size) == REG && extra == 0)
1447 anti_adjust_stack (size);
1450 rtx temp = copy_to_mode_reg (Pmode, size);
1452 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
1453 temp, 0, OPTAB_LIB_WIDEN);
1454 anti_adjust_stack (temp);
1457 #ifdef STACK_GROWS_DOWNWARD
1458 temp = virtual_outgoing_args_rtx;
1459 if (extra != 0 && below)
1460 temp = plus_constant (temp, extra);
1462 if (GET_CODE (size) == CONST_INT)
1463 temp = plus_constant (virtual_outgoing_args_rtx,
1464 - INTVAL (size) - (below ? 0 : extra));
1465 else if (extra != 0 && !below)
1466 temp = gen_rtx (PLUS, Pmode, virtual_outgoing_args_rtx,
1467 negate_rtx (Pmode, plus_constant (size, extra)));
1469 temp = gen_rtx (PLUS, Pmode, virtual_outgoing_args_rtx,
1470 negate_rtx (Pmode, size));
1473 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
1479 return gen_rtx (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
1482 /* Generate code to push X onto the stack, assuming it has mode MODE and
1484 MODE is redundant except when X is a CONST_INT (since they don't
1486 SIZE is an rtx for the size of data to be copied (in bytes),
1487 needed only if X is BLKmode.
1489 ALIGN (in bytes) is maximum alignment we can assume.
1491 If PARTIAL is nonzero, then copy that many of the first words
1492 of X into registers starting with REG, and push the rest of X.
1493 The amount of space pushed is decreased by PARTIAL words,
1494 rounded *down* to a multiple of PARM_BOUNDARY.
1495 REG must be a hard register in this case.
1497 EXTRA is the amount in bytes of extra space to leave next to this arg.
1498 This is ignored if an argument block has already been allocated.
1500 On a machine that lacks real push insns, ARGS_ADDR is the address of
1501 the bottom of the argument block for this call. We use indexing off there
1502 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
1503 argument block has not been preallocated.
1505 ARGS_SO_FAR is the size of args previously pushed for this call. */
1508 emit_push_insn (x, mode, type, size, align, partial, reg, extra,
1509 args_addr, args_so_far)
1511 enum machine_mode mode;
1522 enum direction stack_direction
1523 #ifdef STACK_GROWS_DOWNWARD
1529 /* Decide where to pad the argument: `downward' for below,
1530 `upward' for above, or `none' for don't pad it.
1531 Default is below for small data on big-endian machines; else above. */
1532 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
1534 /* Invert direction if stack is post-update. */
1535 if (STACK_PUSH_CODE == POST_INC || STACK_PUSH_CODE == POST_DEC)
1536 if (where_pad != none)
1537 where_pad = (where_pad == downward ? upward : downward);
1539 xinner = x = protect_from_queue (x, 0);
1541 if (mode == BLKmode)
1543 /* Copy a block into the stack, entirely or partially. */
1546 int used = partial * UNITS_PER_WORD;
1547 int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
1555 /* USED is now the # of bytes we need not copy to the stack
1556 because registers will take care of them. */
1559 xinner = change_address (xinner, BLKmode,
1560 plus_constant (XEXP (xinner, 0), used));
1562 /* If the partial register-part of the arg counts in its stack size,
1563 skip the part of stack space corresponding to the registers.
1564 Otherwise, start copying to the beginning of the stack space,
1565 by setting SKIP to 0. */
1566 #ifndef REG_PARM_STACK_SPACE
1572 #ifdef PUSH_ROUNDING
1573 /* Do it with several push insns if that doesn't take lots of insns
1574 and if there is no difficulty with push insns that skip bytes
1575 on the stack for alignment purposes. */
1577 && GET_CODE (size) == CONST_INT
1579 && (move_by_pieces_ninsns ((unsigned) INTVAL (size) - used, align)
1581 /* Here we avoid the case of a structure whose weak alignment
1582 forces many pushes of a small amount of data,
1583 and such small pushes do rounding that causes trouble. */
1584 && ((! STRICT_ALIGNMENT && ! SLOW_UNALIGNED_ACCESS)
1585 || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT
1586 || PUSH_ROUNDING (align) == align)
1587 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
1589 /* Push padding now if padding above and stack grows down,
1590 or if padding below and stack grows up.
1591 But if space already allocated, this has already been done. */
1592 if (extra && args_addr == 0
1593 && where_pad != none && where_pad != stack_direction)
1594 anti_adjust_stack (GEN_INT (extra));
1596 move_by_pieces (gen_rtx (MEM, BLKmode, gen_push_operand ()), xinner,
1597 INTVAL (size) - used, align);
1600 #endif /* PUSH_ROUNDING */
1602 /* Otherwise make space on the stack and copy the data
1603 to the address of that space. */
1605 /* Deduct words put into registers from the size we must copy. */
1608 if (GET_CODE (size) == CONST_INT)
1609 size = GEN_INT (INTVAL (size) - used);
1611 size = expand_binop (GET_MODE (size), sub_optab, size,
1612 GEN_INT (used), NULL_RTX, 0,
1616 /* Get the address of the stack space.
1617 In this case, we do not deal with EXTRA separately.
1618 A single stack adjust will do. */
1621 temp = push_block (size, extra, where_pad == downward);
1624 else if (GET_CODE (args_so_far) == CONST_INT)
1625 temp = memory_address (BLKmode,
1626 plus_constant (args_addr,
1627 skip + INTVAL (args_so_far)));
1629 temp = memory_address (BLKmode,
1630 plus_constant (gen_rtx (PLUS, Pmode,
1631 args_addr, args_so_far),
1634 /* TEMP is the address of the block. Copy the data there. */
1635 if (GET_CODE (size) == CONST_INT
1636 && (move_by_pieces_ninsns ((unsigned) INTVAL (size), align)
1639 move_by_pieces (gen_rtx (MEM, BLKmode, temp), xinner,
1640 INTVAL (size), align);
1643 /* Try the most limited insn first, because there's no point
1644 including more than one in the machine description unless
1645 the more limited one has some advantage. */
1646 #ifdef HAVE_movstrqi
1648 && GET_CODE (size) == CONST_INT
1649 && ((unsigned) INTVAL (size)
1650 < (1 << (GET_MODE_BITSIZE (QImode) - 1))))
1652 emit_insn (gen_movstrqi (gen_rtx (MEM, BLKmode, temp),
1653 xinner, size, GEN_INT (align)));
1657 #ifdef HAVE_movstrhi
1659 && GET_CODE (size) == CONST_INT
1660 && ((unsigned) INTVAL (size)
1661 < (1 << (GET_MODE_BITSIZE (HImode) - 1))))
1663 emit_insn (gen_movstrhi (gen_rtx (MEM, BLKmode, temp),
1664 xinner, size, GEN_INT (align)));
1668 #ifdef HAVE_movstrsi
1671 emit_insn (gen_movstrsi (gen_rtx (MEM, BLKmode, temp),
1672 xinner, size, GEN_INT (align)));
1676 #ifdef HAVE_movstrdi
1679 emit_insn (gen_movstrdi (gen_rtx (MEM, BLKmode, temp),
1680 xinner, size, GEN_INT (align)));
1685 #ifndef ACCUMULATE_OUTGOING_ARGS
1686 /* If the source is referenced relative to the stack pointer,
1687 copy it to another register to stabilize it. We do not need
1688 to do this if we know that we won't be changing sp. */
1690 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
1691 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
1692 temp = copy_to_reg (temp);
1695 /* Make inhibit_defer_pop nonzero around the library call
1696 to force it to pop the bcopy-arguments right away. */
1698 #ifdef TARGET_MEM_FUNCTIONS
1699 emit_library_call (memcpy_libfunc, 0,
1700 VOIDmode, 3, temp, Pmode, XEXP (xinner, 0), Pmode,
1703 emit_library_call (bcopy_libfunc, 0,
1704 VOIDmode, 3, XEXP (xinner, 0), Pmode, temp, Pmode,
1710 else if (partial > 0)
1712 /* Scalar partly in registers. */
1714 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
1717 /* # words of start of argument
1718 that we must make space for but need not store. */
1719 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
1720 int args_offset = INTVAL (args_so_far);
1723 /* Push padding now if padding above and stack grows down,
1724 or if padding below and stack grows up.
1725 But if space already allocated, this has already been done. */
1726 if (extra && args_addr == 0
1727 && where_pad != none && where_pad != stack_direction)
1728 anti_adjust_stack (GEN_INT (extra));
1730 /* If we make space by pushing it, we might as well push
1731 the real data. Otherwise, we can leave OFFSET nonzero
1732 and leave the space uninitialized. */
1736 /* Now NOT_STACK gets the number of words that we don't need to
1737 allocate on the stack. */
1738 not_stack = partial - offset;
1740 /* If the partial register-part of the arg counts in its stack size,
1741 skip the part of stack space corresponding to the registers.
1742 Otherwise, start copying to the beginning of the stack space,
1743 by setting SKIP to 0. */
1744 #ifndef REG_PARM_STACK_SPACE
1750 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1751 x = validize_mem (force_const_mem (mode, x));
1753 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
1754 SUBREGs of such registers are not allowed. */
1755 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
1756 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
1757 x = copy_to_reg (x);
1759 /* Loop over all the words allocated on the stack for this arg. */
1760 /* We can do it by words, because any scalar bigger than a word
1761 has a size a multiple of a word. */
1762 #ifndef PUSH_ARGS_REVERSED
1763 for (i = not_stack; i < size; i++)
1765 for (i = size - 1; i >= not_stack; i--)
1767 if (i >= not_stack + offset)
1768 emit_push_insn (operand_subword_force (x, i, mode),
1769 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
1771 GEN_INT (args_offset + ((i - not_stack + skip)
1772 * UNITS_PER_WORD)));
1778 /* Push padding now if padding above and stack grows down,
1779 or if padding below and stack grows up.
1780 But if space already allocated, this has already been done. */
1781 if (extra && args_addr == 0
1782 && where_pad != none && where_pad != stack_direction)
1783 anti_adjust_stack (GEN_INT (extra));
1785 #ifdef PUSH_ROUNDING
1787 addr = gen_push_operand ();
1790 if (GET_CODE (args_so_far) == CONST_INT)
1792 = memory_address (mode,
1793 plus_constant (args_addr, INTVAL (args_so_far)));
1795 addr = memory_address (mode, gen_rtx (PLUS, Pmode, args_addr,
1798 emit_move_insn (gen_rtx (MEM, mode, addr), x);
1802 /* If part should go in registers, copy that part
1803 into the appropriate registers. Do this now, at the end,
1804 since mem-to-mem copies above may do function calls. */
1806 move_block_to_reg (REGNO (reg), x, partial, mode);
1808 if (extra && args_addr == 0 && where_pad == stack_direction)
1809 anti_adjust_stack (GEN_INT (extra));
1812 /* Output a library call to function FUN (a SYMBOL_REF rtx)
1813 (emitting the queue unless NO_QUEUE is nonzero),
1814 for a value of mode OUTMODE,
1815 with NARGS different arguments, passed as alternating rtx values
1816 and machine_modes to convert them to.
1817 The rtx values should have been passed through protect_from_queue already.
1819 NO_QUEUE will be true if and only if the library call is a `const' call
1820 which will be enclosed in REG_LIBCALL/REG_RETVAL notes; it is equivalent
1821 to the variable is_const in expand_call.
1823 NO_QUEUE must be true for const calls, because if it isn't, then
1824 any pending increment will be emitted between REG_LIBCALL/REG_RETVAL notes,
1825 and will be lost if the libcall sequence is optimized away.
1827 NO_QUEUE must be false for non-const calls, because if it isn't, the
1828 call insn will have its CONST_CALL_P bit set, and it will be incorrectly
1829 optimized. For instance, the instruction scheduler may incorrectly
1830 move memory references across the non-const call. */
1833 emit_library_call (va_alist)
1837 struct args_size args_size;
1838 register int argnum;
1839 enum machine_mode outmode;
1846 CUMULATIVE_ARGS args_so_far;
1847 struct arg { rtx value; enum machine_mode mode; rtx reg; int partial;
1848 struct args_size offset; struct args_size size; };
1850 int old_inhibit_defer_pop = inhibit_defer_pop;
1855 orgfun = fun = va_arg (p, rtx);
1856 no_queue = va_arg (p, int);
1857 outmode = va_arg (p, enum machine_mode);
1858 nargs = va_arg (p, int);
1860 /* Copy all the libcall-arguments out of the varargs data
1861 and into a vector ARGVEC.
1863 Compute how to pass each argument. We only support a very small subset
1864 of the full argument passing conventions to limit complexity here since
1865 library functions shouldn't have many args. */
1867 argvec = (struct arg *) alloca (nargs * sizeof (struct arg));
1869 INIT_CUMULATIVE_ARGS (args_so_far, (tree)0, fun);
1871 args_size.constant = 0;
1874 for (count = 0; count < nargs; count++)
1876 rtx val = va_arg (p, rtx);
1877 enum machine_mode mode = va_arg (p, enum machine_mode);
1879 /* We cannot convert the arg value to the mode the library wants here;
1880 must do it earlier where we know the signedness of the arg. */
1882 || (GET_MODE (val) != mode && GET_MODE (val) != VOIDmode))
1885 /* On some machines, there's no way to pass a float to a library fcn.
1886 Pass it as a double instead. */
1887 #ifdef LIBGCC_NEEDS_DOUBLE
1888 if (LIBGCC_NEEDS_DOUBLE && mode == SFmode)
1889 val = convert_to_mode (DFmode, val), mode = DFmode;
1892 /* There's no need to call protect_from_queue, because
1893 either emit_move_insn or emit_push_insn will do that. */
1895 /* Make sure it is a reasonable operand for a move or push insn. */
1896 if (GET_CODE (val) != REG && GET_CODE (val) != MEM
1897 && ! (CONSTANT_P (val) && LEGITIMATE_CONSTANT_P (val)))
1898 val = force_operand (val, NULL_RTX);
1900 argvec[count].value = val;
1901 argvec[count].mode = mode;
1903 #ifdef FUNCTION_ARG_PASS_BY_REFERENCE
1904 if (FUNCTION_ARG_PASS_BY_REFERENCE (args_so_far, mode, NULL_TREE, 1))
1908 argvec[count].reg = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
1909 if (argvec[count].reg && GET_CODE (argvec[count].reg) == EXPR_LIST)
1911 #ifdef FUNCTION_ARG_PARTIAL_NREGS
1912 argvec[count].partial
1913 = FUNCTION_ARG_PARTIAL_NREGS (args_so_far, mode, NULL_TREE, 1);
1915 argvec[count].partial = 0;
1918 locate_and_pad_parm (mode, NULL_TREE,
1919 argvec[count].reg && argvec[count].partial == 0,
1920 NULL_TREE, &args_size, &argvec[count].offset,
1921 &argvec[count].size);
1923 if (argvec[count].size.var)
1926 #ifndef REG_PARM_STACK_SPACE
1927 if (argvec[count].partial)
1928 argvec[count].size.constant -= argvec[count].partial * UNITS_PER_WORD;
1931 if (argvec[count].reg == 0 || argvec[count].partial != 0
1932 #ifdef REG_PARM_STACK_SPACE
1936 args_size.constant += argvec[count].size.constant;
1938 #ifdef ACCUMULATE_OUTGOING_ARGS
1939 /* If this arg is actually passed on the stack, it might be
1940 clobbering something we already put there (this library call might
1941 be inside the evaluation of an argument to a function whose call
1942 requires the stack). This will only occur when the library call
1943 has sufficient args to run out of argument registers. Abort in
1944 this case; if this ever occurs, code must be added to save and
1945 restore the arg slot. */
1947 if (argvec[count].reg == 0 || argvec[count].partial != 0)
1951 FUNCTION_ARG_ADVANCE (args_so_far, mode, (tree)0, 1);
1955 /* If this machine requires an external definition for library
1956 functions, write one out. */
1957 assemble_external_libcall (fun);
1959 #ifdef STACK_BOUNDARY
1960 args_size.constant = (((args_size.constant + (STACK_BYTES - 1))
1961 / STACK_BYTES) * STACK_BYTES);
1964 #ifdef REG_PARM_STACK_SPACE
1965 args_size.constant = MAX (args_size.constant,
1966 REG_PARM_STACK_SPACE ((tree) 0));
1969 #ifdef ACCUMULATE_OUTGOING_ARGS
1970 if (args_size.constant > current_function_outgoing_args_size)
1971 current_function_outgoing_args_size = args_size.constant;
1972 args_size.constant = 0;
1975 #ifndef PUSH_ROUNDING
1976 argblock = push_block (GEN_INT (args_size.constant), 0, 0);
1979 #ifdef PUSH_ARGS_REVERSED
1987 /* Push the args that need to be pushed. */
1989 for (count = 0; count < nargs; count++, argnum += inc)
1991 register enum machine_mode mode = argvec[argnum].mode;
1992 register rtx val = argvec[argnum].value;
1993 rtx reg = argvec[argnum].reg;
1994 int partial = argvec[argnum].partial;
1996 if (! (reg != 0 && partial == 0))
1997 emit_push_insn (val, mode, NULL_TREE, NULL_RTX, 0, partial, reg, 0,
1998 argblock, GEN_INT (argvec[count].offset.constant));
2002 #ifdef PUSH_ARGS_REVERSED
2008 /* Now load any reg parms into their regs. */
2010 for (count = 0; count < nargs; count++, argnum += inc)
2012 register enum machine_mode mode = argvec[argnum].mode;
2013 register rtx val = argvec[argnum].value;
2014 rtx reg = argvec[argnum].reg;
2015 int partial = argvec[argnum].partial;
2017 if (reg != 0 && partial == 0)
2018 emit_move_insn (reg, val);
2022 /* For version 1.37, try deleting this entirely. */
2026 /* Any regs containing parms remain in use through the call. */
2028 for (count = 0; count < nargs; count++)
2029 if (argvec[count].reg != 0)
2030 emit_insn (gen_rtx (USE, VOIDmode, argvec[count].reg));
2032 use_insns = get_insns ();
2035 fun = prepare_call_address (fun, NULL_TREE, &use_insns);
2037 /* Don't allow popping to be deferred, since then
2038 cse'ing of library calls could delete a call and leave the pop. */
2041 /* We pass the old value of inhibit_defer_pop + 1 to emit_call_1, which
2042 will set inhibit_defer_pop to that value. */
2044 emit_call_1 (fun, get_identifier (XSTR (orgfun, 0)), args_size.constant, 0,
2045 FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1),
2046 outmode != VOIDmode ? hard_libcall_value (outmode) : NULL_RTX,
2047 old_inhibit_defer_pop + 1, use_insns, no_queue);
2049 /* Now restore inhibit_defer_pop to its actual original value. */
2053 /* Expand an assignment that stores the value of FROM into TO.
2054 If WANT_VALUE is nonzero, return an rtx for the value of TO.
2055 (This may contain a QUEUED rtx.)
2056 Otherwise, the returned value is not meaningful.
2058 SUGGEST_REG is no longer actually used.
2059 It used to mean, copy the value through a register
2060 and return that register, if that is possible.
2061 But now we do this if WANT_VALUE.
2063 If the value stored is a constant, we return the constant. */
2066 expand_assignment (to, from, want_value, suggest_reg)
2071 register rtx to_rtx = 0;
2074 /* Don't crash if the lhs of the assignment was erroneous. */
2076 if (TREE_CODE (to) == ERROR_MARK)
2077 return expand_expr (from, NULL_RTX, VOIDmode, 0);
2079 /* Assignment of a structure component needs special treatment
2080 if the structure component's rtx is not simply a MEM.
2081 Assignment of an array element at a constant index
2082 has the same problem. */
2084 if (TREE_CODE (to) == COMPONENT_REF
2085 || TREE_CODE (to) == BIT_FIELD_REF
2086 || (TREE_CODE (to) == ARRAY_REF
2087 && TREE_CODE (TREE_OPERAND (to, 1)) == INTEGER_CST
2088 && TREE_CODE (TYPE_SIZE (TREE_TYPE (to))) == INTEGER_CST))
2090 enum machine_mode mode1;
2096 tree tem = get_inner_reference (to, &bitsize, &bitpos, &offset,
2097 &mode1, &unsignedp, &volatilep);
2099 /* If we are going to use store_bit_field and extract_bit_field,
2100 make sure to_rtx will be safe for multiple use. */
2102 if (mode1 == VOIDmode && want_value)
2103 tem = stabilize_reference (tem);
2105 to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, 0);
2108 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
2110 if (GET_CODE (to_rtx) != MEM)
2112 to_rtx = change_address (to_rtx, VOIDmode,
2113 gen_rtx (PLUS, Pmode, XEXP (to_rtx, 0),
2114 force_reg (Pmode, offset_rtx)));
2118 if (GET_CODE (to_rtx) == MEM)
2119 MEM_VOLATILE_P (to_rtx) = 1;
2120 #if 0 /* This was turned off because, when a field is volatile
2121 in an object which is not volatile, the object may be in a register,
2122 and then we would abort over here. */
2128 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
2130 /* Spurious cast makes HPUX compiler happy. */
2131 ? (enum machine_mode) TYPE_MODE (TREE_TYPE (to))
2134 /* Required alignment of containing datum. */
2135 TYPE_ALIGN (TREE_TYPE (tem)) / BITS_PER_UNIT,
2136 int_size_in_bytes (TREE_TYPE (tem)));
2137 preserve_temp_slots (result);
2143 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
2144 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
2147 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, 0);
2149 /* In case we are returning the contents of an object which overlaps
2150 the place the value is being stored, use a safe function when copying
2151 a value through a pointer into a structure value return block. */
2152 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
2153 && current_function_returns_struct
2154 && !current_function_returns_pcc_struct)
2156 rtx from_rtx = expand_expr (from, NULL_RTX, VOIDmode, 0);
2157 rtx size = expr_size (from);
2159 #ifdef TARGET_MEM_FUNCTIONS
2160 emit_library_call (memcpy_libfunc, 0,
2161 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
2162 XEXP (from_rtx, 0), Pmode,
2165 emit_library_call (bcopy_libfunc, 0,
2166 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
2167 XEXP (to_rtx, 0), Pmode,
2171 preserve_temp_slots (to_rtx);
2176 /* Compute FROM and store the value in the rtx we got. */
2178 result = store_expr (from, to_rtx, want_value);
2179 preserve_temp_slots (result);
2184 /* Generate code for computing expression EXP,
2185 and storing the value into TARGET.
2186 Returns TARGET or an equivalent value.
2187 TARGET may contain a QUEUED rtx.
2189 If SUGGEST_REG is nonzero, copy the value through a register
2190 and return that register, if that is possible.
2192 If the value stored is a constant, we return the constant. */
2195 store_expr (exp, target, suggest_reg)
2197 register rtx target;
2201 int dont_return_target = 0;
2203 if (TREE_CODE (exp) == COMPOUND_EXPR)
2205 /* Perform first part of compound expression, then assign from second
2207 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
2209 return store_expr (TREE_OPERAND (exp, 1), target, suggest_reg);
2211 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
2213 /* For conditional expression, get safe form of the target. Then
2214 test the condition, doing the appropriate assignment on either
2215 side. This avoids the creation of unnecessary temporaries.
2216 For non-BLKmode, it is more efficient not to do this. */
2218 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
2221 target = protect_from_queue (target, 1);
2224 jumpifnot (TREE_OPERAND (exp, 0), lab1);
2225 store_expr (TREE_OPERAND (exp, 1), target, suggest_reg);
2227 emit_jump_insn (gen_jump (lab2));
2230 store_expr (TREE_OPERAND (exp, 2), target, suggest_reg);
2236 else if (suggest_reg && GET_CODE (target) == MEM
2237 && GET_MODE (target) != BLKmode)
2238 /* If target is in memory and caller wants value in a register instead,
2239 arrange that. Pass TARGET as target for expand_expr so that,
2240 if EXP is another assignment, SUGGEST_REG will be nonzero for it.
2241 We know expand_expr will not use the target in that case. */
2243 temp = expand_expr (exp, cse_not_expected ? NULL_RTX : target,
2244 GET_MODE (target), 0);
2245 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
2246 temp = copy_to_reg (temp);
2247 dont_return_target = 1;
2249 else if (queued_subexp_p (target))
2250 /* If target contains a postincrement, it is not safe
2251 to use as the returned value. It would access the wrong
2252 place by the time the queued increment gets output.
2253 So copy the value through a temporary and use that temp
2256 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
2258 /* Expand EXP into a new pseudo. */
2259 temp = gen_reg_rtx (GET_MODE (target));
2260 temp = expand_expr (exp, temp, GET_MODE (target), 0);
2263 temp = expand_expr (exp, NULL_RTX, GET_MODE (target), 0);
2264 dont_return_target = 1;
2268 temp = expand_expr (exp, target, GET_MODE (target), 0);
2269 /* DO return TARGET if it's a specified hardware register.
2270 expand_return relies on this. */
2271 if (!(target && GET_CODE (target) == REG
2272 && REGNO (target) < FIRST_PSEUDO_REGISTER)
2273 && CONSTANT_P (temp))
2274 dont_return_target = 1;
2277 /* If value was not generated in the target, store it there.
2278 Convert the value to TARGET's type first if nec. */
2280 if (temp != target && TREE_CODE (exp) != ERROR_MARK)
2282 target = protect_from_queue (target, 1);
2283 if (GET_MODE (temp) != GET_MODE (target)
2284 && GET_MODE (temp) != VOIDmode)
2286 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
2287 if (dont_return_target)
2289 /* In this case, we will return TEMP,
2290 so make sure it has the proper mode.
2291 But don't forget to store the value into TARGET. */
2292 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
2293 emit_move_insn (target, temp);
2296 convert_move (target, temp, unsignedp);
2299 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
2301 /* Handle copying a string constant into an array.
2302 The string constant may be shorter than the array.
2303 So copy just the string's actual length, and clear the rest. */
2306 /* Get the size of the data type of the string,
2307 which is actually the size of the target. */
2308 size = expr_size (exp);
2309 if (GET_CODE (size) == CONST_INT
2310 && INTVAL (size) < TREE_STRING_LENGTH (exp))
2311 emit_block_move (target, temp, size,
2312 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
2315 /* Compute the size of the data to copy from the string. */
2317 = fold (build (MIN_EXPR, sizetype,
2318 size_binop (CEIL_DIV_EXPR,
2319 TYPE_SIZE (TREE_TYPE (exp)),
2320 size_int (BITS_PER_UNIT)),
2322 build_int_2 (TREE_STRING_LENGTH (exp), 0))));
2323 rtx copy_size_rtx = expand_expr (copy_size, NULL_RTX,
2327 /* Copy that much. */
2328 emit_block_move (target, temp, copy_size_rtx,
2329 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
2331 /* Figure out how much is left in TARGET
2332 that we have to clear. */
2333 if (GET_CODE (copy_size_rtx) == CONST_INT)
2335 temp = plus_constant (XEXP (target, 0),
2336 TREE_STRING_LENGTH (exp));
2337 size = plus_constant (size,
2338 - TREE_STRING_LENGTH (exp));
2342 enum machine_mode size_mode = Pmode;
2344 temp = force_reg (Pmode, XEXP (target, 0));
2345 temp = expand_binop (size_mode, add_optab, temp,
2346 copy_size_rtx, NULL_RTX, 0,
2349 size = expand_binop (size_mode, sub_optab, size,
2350 copy_size_rtx, NULL_RTX, 0,
2353 emit_cmp_insn (size, const0_rtx, LT, NULL_RTX,
2354 GET_MODE (size), 0, 0);
2355 label = gen_label_rtx ();
2356 emit_jump_insn (gen_blt (label));
2359 if (size != const0_rtx)
2361 #ifdef TARGET_MEM_FUNCTIONS
2362 emit_library_call (memset_libfunc, 0, VOIDmode, 3,
2363 temp, Pmode, const0_rtx, Pmode, size, Pmode);
2365 emit_library_call (bzero_libfunc, 0, VOIDmode, 2,
2366 temp, Pmode, size, Pmode);
2373 else if (GET_MODE (temp) == BLKmode)
2374 emit_block_move (target, temp, expr_size (exp),
2375 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
2377 emit_move_insn (target, temp);
2379 if (dont_return_target)
2384 /* Store the value of constructor EXP into the rtx TARGET.
2385 TARGET is either a REG or a MEM. */
2388 store_constructor (exp, target)
2392 tree type = TREE_TYPE (exp);
2394 /* We know our target cannot conflict, since safe_from_p has been called. */
2396 /* Don't try copying piece by piece into a hard register
2397 since that is vulnerable to being clobbered by EXP.
2398 Instead, construct in a pseudo register and then copy it all. */
2399 if (GET_CODE (target) == REG && REGNO (target) < FIRST_PSEUDO_REGISTER)
2401 rtx temp = gen_reg_rtx (GET_MODE (target));
2402 store_constructor (exp, temp);
2403 emit_move_insn (target, temp);
2408 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE)
2412 /* Inform later passes that the whole union value is dead. */
2413 if (TREE_CODE (type) == UNION_TYPE)
2414 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
2416 /* If we are building a static constructor into a register,
2417 set the initial value as zero so we can fold the value into
2419 else if (GET_CODE (target) == REG && TREE_STATIC (exp))
2420 emit_move_insn (target, const0_rtx);
2422 /* If the constructor has fewer fields than the structure,
2423 clear the whole structure first. */
2424 else if (list_length (CONSTRUCTOR_ELTS (exp))
2425 != list_length (TYPE_FIELDS (type)))
2426 clear_storage (target, int_size_in_bytes (type));
2428 /* Inform later passes that the old value is dead. */
2429 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
2431 /* Store each element of the constructor into
2432 the corresponding field of TARGET. */
2434 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
2436 register tree field = TREE_PURPOSE (elt);
2437 register enum machine_mode mode;
2442 /* Just ignore missing fields.
2443 We cleared the whole structure, above,
2444 if any fields are missing. */
2448 bitsize = TREE_INT_CST_LOW (DECL_SIZE (field));
2449 unsignedp = TREE_UNSIGNED (field);
2450 mode = DECL_MODE (field);
2451 if (DECL_BIT_FIELD (field))
2454 if (TREE_CODE (DECL_FIELD_BITPOS (field)) != INTEGER_CST)
2455 /* ??? This case remains to be written. */
2458 bitpos = TREE_INT_CST_LOW (DECL_FIELD_BITPOS (field));
2460 store_field (target, bitsize, bitpos, mode, TREE_VALUE (elt),
2461 /* The alignment of TARGET is
2462 at least what its type requires. */
2464 TYPE_ALIGN (type) / BITS_PER_UNIT,
2465 int_size_in_bytes (type));
2468 else if (TREE_CODE (type) == ARRAY_TYPE)
2472 tree domain = TYPE_DOMAIN (type);
2473 HOST_WIDE_INT minelt = TREE_INT_CST_LOW (TYPE_MIN_VALUE (domain));
2474 HOST_WIDE_INT maxelt = TREE_INT_CST_LOW (TYPE_MAX_VALUE (domain));
2475 tree elttype = TREE_TYPE (type);
2477 /* If the constructor has fewer fields than the structure,
2478 clear the whole structure first. Similarly if this this is
2479 static constructor of a non-BLKmode object. */
2481 if (list_length (CONSTRUCTOR_ELTS (exp)) < maxelt - minelt + 1
2482 || (GET_CODE (target) == REG && TREE_STATIC (exp)))
2483 clear_storage (target, maxelt - minelt + 1);
2485 /* Inform later passes that the old value is dead. */
2486 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
2488 /* Store each element of the constructor into
2489 the corresponding element of TARGET, determined
2490 by counting the elements. */
2491 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
2493 elt = TREE_CHAIN (elt), i++)
2495 register enum machine_mode mode;
2500 mode = TYPE_MODE (elttype);
2501 bitsize = GET_MODE_BITSIZE (mode);
2502 unsignedp = TREE_UNSIGNED (elttype);
2504 bitpos = (i * TREE_INT_CST_LOW (TYPE_SIZE (elttype)));
2506 store_field (target, bitsize, bitpos, mode, TREE_VALUE (elt),
2507 /* The alignment of TARGET is
2508 at least what its type requires. */
2510 TYPE_ALIGN (type) / BITS_PER_UNIT,
2511 int_size_in_bytes (type));
2519 /* Store the value of EXP (an expression tree)
2520 into a subfield of TARGET which has mode MODE and occupies
2521 BITSIZE bits, starting BITPOS bits from the start of TARGET.
2522 If MODE is VOIDmode, it means that we are storing into a bit-field.
2524 If VALUE_MODE is VOIDmode, return nothing in particular.
2525 UNSIGNEDP is not used in this case.
2527 Otherwise, return an rtx for the value stored. This rtx
2528 has mode VALUE_MODE if that is convenient to do.
2529 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
2531 ALIGN is the alignment that TARGET is known to have, measured in bytes.
2532 TOTAL_SIZE is the size in bytes of the structure, or -1 if varying. */
2535 store_field (target, bitsize, bitpos, mode, exp, value_mode,
2536 unsignedp, align, total_size)
2538 int bitsize, bitpos;
2539 enum machine_mode mode;
2541 enum machine_mode value_mode;
2546 HOST_WIDE_INT width_mask = 0;
2548 if (bitsize < HOST_BITS_PER_WIDE_INT)
2549 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
2551 /* If we are storing into an unaligned field of an aligned union that is
2552 in a register, we may have the mode of TARGET being an integer mode but
2553 MODE == BLKmode. In that case, get an aligned object whose size and
2554 alignment are the same as TARGET and store TARGET into it (we can avoid
2555 the store if the field being stored is the entire width of TARGET). Then
2556 call ourselves recursively to store the field into a BLKmode version of
2557 that object. Finally, load from the object into TARGET. This is not
2558 very efficient in general, but should only be slightly more expensive
2559 than the otherwise-required unaligned accesses. Perhaps this can be
2560 cleaned up later. */
2563 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
2565 rtx object = assign_stack_temp (GET_MODE (target),
2566 GET_MODE_SIZE (GET_MODE (target)), 0);
2567 rtx blk_object = copy_rtx (object);
2569 PUT_MODE (blk_object, BLKmode);
2571 if (bitsize != GET_MODE_BITSIZE (GET_MODE (target)))
2572 emit_move_insn (object, target);
2574 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0,
2577 emit_move_insn (target, object);
2582 /* If the structure is in a register or if the component
2583 is a bit field, we cannot use addressing to access it.
2584 Use bit-field techniques or SUBREG to store in it. */
2586 if (mode == VOIDmode
2587 || (mode != BLKmode && ! direct_store[(int) mode])
2588 || GET_CODE (target) == REG
2589 || GET_CODE (target) == SUBREG)
2591 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
2592 /* Store the value in the bitfield. */
2593 store_bit_field (target, bitsize, bitpos, mode, temp, align, total_size);
2594 if (value_mode != VOIDmode)
2596 /* The caller wants an rtx for the value. */
2597 /* If possible, avoid refetching from the bitfield itself. */
2599 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
2600 return expand_and (temp, GEN_INT (width_mask), NULL_RTX);
2601 return extract_bit_field (target, bitsize, bitpos, unsignedp,
2602 NULL_RTX, value_mode, 0, align,
2609 rtx addr = XEXP (target, 0);
2612 /* If a value is wanted, it must be the lhs;
2613 so make the address stable for multiple use. */
2615 if (value_mode != VOIDmode && GET_CODE (addr) != REG
2616 && ! CONSTANT_ADDRESS_P (addr)
2617 /* A frame-pointer reference is already stable. */
2618 && ! (GET_CODE (addr) == PLUS
2619 && GET_CODE (XEXP (addr, 1)) == CONST_INT
2620 && (XEXP (addr, 0) == virtual_incoming_args_rtx
2621 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
2622 addr = copy_to_reg (addr);
2624 /* Now build a reference to just the desired component. */
2626 to_rtx = change_address (target, mode,
2627 plus_constant (addr, (bitpos / BITS_PER_UNIT)));
2628 MEM_IN_STRUCT_P (to_rtx) = 1;
2630 return store_expr (exp, to_rtx, value_mode != VOIDmode);
2634 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
2635 or an ARRAY_REF, look for nested COMPONENT_REFs, BIT_FIELD_REFs, or
2636 ARRAY_REFs at constant positions and find the ultimate containing object,
2639 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
2640 bit position, and *PUNSIGNEDP to the signedness of the field.
2641 If the position of the field is variable, we store a tree
2642 giving the variable offset (in units) in *POFFSET.
2643 This offset is in addition to the bit position.
2644 If the position is not variable, we store 0 in *POFFSET.
2646 If any of the extraction expressions is volatile,
2647 we store 1 in *PVOLATILEP. Otherwise we don't change that.
2649 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
2650 is a mode that can be used to access the field. In that case, *PBITSIZE
2653 If the field describes a variable-sized object, *PMODE is set to
2654 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
2655 this case, but the address of the object can be found. */
2658 get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode, punsignedp, pvolatilep)
2663 enum machine_mode *pmode;
2668 enum machine_mode mode = VOIDmode;
2671 if (TREE_CODE (exp) == COMPONENT_REF)
2673 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
2674 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
2675 mode = DECL_MODE (TREE_OPERAND (exp, 1));
2676 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
2678 else if (TREE_CODE (exp) == BIT_FIELD_REF)
2680 size_tree = TREE_OPERAND (exp, 1);
2681 *punsignedp = TREE_UNSIGNED (exp);
2685 mode = TYPE_MODE (TREE_TYPE (exp));
2686 *pbitsize = GET_MODE_BITSIZE (mode);
2687 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
2692 if (TREE_CODE (size_tree) != INTEGER_CST)
2693 mode = BLKmode, *pbitsize = -1;
2695 *pbitsize = TREE_INT_CST_LOW (size_tree);
2698 /* Compute cumulative bit-offset for nested component-refs and array-refs,
2699 and find the ultimate containing object. */
2705 if (TREE_CODE (exp) == COMPONENT_REF || TREE_CODE (exp) == BIT_FIELD_REF)
2707 tree pos = (TREE_CODE (exp) == COMPONENT_REF
2708 ? DECL_FIELD_BITPOS (TREE_OPERAND (exp, 1))
2709 : TREE_OPERAND (exp, 2));
2711 if (TREE_CODE (pos) == PLUS_EXPR)
2714 if (TREE_CODE (TREE_OPERAND (pos, 0)) == INTEGER_CST)
2716 constant = TREE_OPERAND (pos, 0);
2717 var = TREE_OPERAND (pos, 1);
2719 else if (TREE_CODE (TREE_OPERAND (pos, 1)) == INTEGER_CST)
2721 constant = TREE_OPERAND (pos, 1);
2722 var = TREE_OPERAND (pos, 0);
2726 *pbitpos += TREE_INT_CST_LOW (constant);
2728 offset = size_binop (PLUS_EXPR, offset,
2729 size_binop (FLOOR_DIV_EXPR, var,
2730 size_int (BITS_PER_UNIT)));
2732 offset = size_binop (FLOOR_DIV_EXPR, var,
2733 size_int (BITS_PER_UNIT));
2735 else if (TREE_CODE (pos) == INTEGER_CST)
2736 *pbitpos += TREE_INT_CST_LOW (pos);
2739 /* Assume here that the offset is a multiple of a unit.
2740 If not, there should be an explicitly added constant. */
2742 offset = size_binop (PLUS_EXPR, offset,
2743 size_binop (FLOOR_DIV_EXPR, pos,
2744 size_int (BITS_PER_UNIT)));
2746 offset = size_binop (FLOOR_DIV_EXPR, pos,
2747 size_int (BITS_PER_UNIT));
2751 else if (TREE_CODE (exp) == ARRAY_REF
2752 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
2753 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST)
2755 *pbitpos += (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))
2756 * TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (exp))));
2758 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
2759 && ! ((TREE_CODE (exp) == NOP_EXPR
2760 || TREE_CODE (exp) == CONVERT_EXPR)
2761 && (TYPE_MODE (TREE_TYPE (exp))
2762 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
2765 /* If any reference in the chain is volatile, the effect is volatile. */
2766 if (TREE_THIS_VOLATILE (exp))
2768 exp = TREE_OPERAND (exp, 0);
2771 /* If this was a bit-field, see if there is a mode that allows direct
2772 access in case EXP is in memory. */
2773 if (mode == VOIDmode && *pbitpos % *pbitsize == 0)
2775 mode = mode_for_size (*pbitsize, MODE_INT, 0);
2776 if (mode == BLKmode)
2783 /* We aren't finished fixing the callers to really handle nonzero offset. */
2791 /* Given an rtx VALUE that may contain additions and multiplications,
2792 return an equivalent value that just refers to a register or memory.
2793 This is done by generating instructions to perform the arithmetic
2794 and returning a pseudo-register containing the value. */
2797 force_operand (value, target)
2800 register optab binoptab = 0;
2801 /* Use a temporary to force order of execution of calls to
2805 /* Use subtarget as the target for operand 0 of a binary operation. */
2806 register rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
2808 if (GET_CODE (value) == PLUS)
2809 binoptab = add_optab;
2810 else if (GET_CODE (value) == MINUS)
2811 binoptab = sub_optab;
2812 else if (GET_CODE (value) == MULT)
2814 op2 = XEXP (value, 1);
2815 if (!CONSTANT_P (op2)
2816 && !(GET_CODE (op2) == REG && op2 != subtarget))
2818 tmp = force_operand (XEXP (value, 0), subtarget);
2819 return expand_mult (GET_MODE (value), tmp,
2820 force_operand (op2, NULL_RTX),
2826 op2 = XEXP (value, 1);
2827 if (!CONSTANT_P (op2)
2828 && !(GET_CODE (op2) == REG && op2 != subtarget))
2830 if (binoptab == sub_optab && GET_CODE (op2) == CONST_INT)
2832 binoptab = add_optab;
2833 op2 = negate_rtx (GET_MODE (value), op2);
2836 /* Check for an addition with OP2 a constant integer and our first
2837 operand a PLUS of a virtual register and something else. In that
2838 case, we want to emit the sum of the virtual register and the
2839 constant first and then add the other value. This allows virtual
2840 register instantiation to simply modify the constant rather than
2841 creating another one around this addition. */
2842 if (binoptab == add_optab && GET_CODE (op2) == CONST_INT
2843 && GET_CODE (XEXP (value, 0)) == PLUS
2844 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
2845 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
2846 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
2848 rtx temp = expand_binop (GET_MODE (value), binoptab,
2849 XEXP (XEXP (value, 0), 0), op2,
2850 subtarget, 0, OPTAB_LIB_WIDEN);
2851 return expand_binop (GET_MODE (value), binoptab, temp,
2852 force_operand (XEXP (XEXP (value, 0), 1), 0),
2853 target, 0, OPTAB_LIB_WIDEN);
2856 tmp = force_operand (XEXP (value, 0), subtarget);
2857 return expand_binop (GET_MODE (value), binoptab, tmp,
2858 force_operand (op2, NULL_RTX),
2859 target, 0, OPTAB_LIB_WIDEN);
2860 /* We give UNSIGNEP = 0 to expand_binop
2861 because the only operations we are expanding here are signed ones. */
2866 /* Subroutine of expand_expr:
2867 save the non-copied parts (LIST) of an expr (LHS), and return a list
2868 which can restore these values to their previous values,
2869 should something modify their storage. */
2872 save_noncopied_parts (lhs, list)
2879 for (tail = list; tail; tail = TREE_CHAIN (tail))
2880 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
2881 parts = chainon (parts, save_noncopied_parts (lhs, TREE_VALUE (tail)));
2884 tree part = TREE_VALUE (tail);
2885 tree part_type = TREE_TYPE (part);
2886 tree to_be_saved = build (COMPONENT_REF, part_type, lhs, part);
2887 rtx target = assign_stack_temp (TYPE_MODE (part_type),
2888 int_size_in_bytes (part_type), 0);
2889 if (! memory_address_p (TYPE_MODE (part_type), XEXP (target, 0)))
2890 target = change_address (target, TYPE_MODE (part_type), NULL_RTX);
2891 parts = tree_cons (to_be_saved,
2892 build (RTL_EXPR, part_type, NULL_TREE,
2895 store_expr (TREE_PURPOSE (parts), RTL_EXPR_RTL (TREE_VALUE (parts)), 0);
2900 /* Subroutine of expand_expr:
2901 record the non-copied parts (LIST) of an expr (LHS), and return a list
2902 which specifies the initial values of these parts. */
2905 init_noncopied_parts (lhs, list)
2912 for (tail = list; tail; tail = TREE_CHAIN (tail))
2913 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
2914 parts = chainon (parts, init_noncopied_parts (lhs, TREE_VALUE (tail)));
2917 tree part = TREE_VALUE (tail);
2918 tree part_type = TREE_TYPE (part);
2919 tree to_be_initialized = build (COMPONENT_REF, part_type, lhs, part);
2920 parts = tree_cons (TREE_PURPOSE (tail), to_be_initialized, parts);
2925 /* Subroutine of expand_expr: return nonzero iff there is no way that
2926 EXP can reference X, which is being modified. */
2929 safe_from_p (x, exp)
2939 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
2940 find the underlying pseudo. */
2941 if (GET_CODE (x) == SUBREG)
2944 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
2948 /* If X is a location in the outgoing argument area, it is always safe. */
2949 if (GET_CODE (x) == MEM
2950 && (XEXP (x, 0) == virtual_outgoing_args_rtx
2951 || (GET_CODE (XEXP (x, 0)) == PLUS
2952 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx)))
2955 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
2958 exp_rtl = DECL_RTL (exp);
2965 if (TREE_CODE (exp) == TREE_LIST)
2966 return ((TREE_VALUE (exp) == 0
2967 || safe_from_p (x, TREE_VALUE (exp)))
2968 && (TREE_CHAIN (exp) == 0
2969 || safe_from_p (x, TREE_CHAIN (exp))));
2974 return safe_from_p (x, TREE_OPERAND (exp, 0));
2978 return (safe_from_p (x, TREE_OPERAND (exp, 0))
2979 && safe_from_p (x, TREE_OPERAND (exp, 1)));
2983 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
2984 the expression. If it is set, we conflict iff we are that rtx or
2985 both are in memory. Otherwise, we check all operands of the
2986 expression recursively. */
2988 switch (TREE_CODE (exp))
2991 return staticp (TREE_OPERAND (exp, 0));
2994 if (GET_CODE (x) == MEM)
2999 exp_rtl = CALL_EXPR_RTL (exp);
3002 /* Assume that the call will clobber all hard registers and
3004 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
3005 || GET_CODE (x) == MEM)
3012 exp_rtl = RTL_EXPR_RTL (exp);
3014 /* We don't know what this can modify. */
3019 case WITH_CLEANUP_EXPR:
3020 exp_rtl = RTL_EXPR_RTL (exp);
3024 exp_rtl = SAVE_EXPR_RTL (exp);
3028 /* The only operand we look at is operand 1. The rest aren't
3029 part of the expression. */
3030 return safe_from_p (x, TREE_OPERAND (exp, 1));
3032 case METHOD_CALL_EXPR:
3033 /* This takes a rtx argument, but shouldn't appear here. */
3037 /* If we have an rtx, we do not need to scan our operands. */
3041 nops = tree_code_length[(int) TREE_CODE (exp)];
3042 for (i = 0; i < nops; i++)
3043 if (TREE_OPERAND (exp, i) != 0
3044 && ! safe_from_p (x, TREE_OPERAND (exp, i)))
3048 /* If we have an rtl, find any enclosed object. Then see if we conflict
3052 if (GET_CODE (exp_rtl) == SUBREG)
3054 exp_rtl = SUBREG_REG (exp_rtl);
3055 if (GET_CODE (exp_rtl) == REG
3056 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
3060 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
3061 are memory and EXP is not readonly. */
3062 return ! (rtx_equal_p (x, exp_rtl)
3063 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
3064 && ! TREE_READONLY (exp)));
3067 /* If we reach here, it is safe. */
3071 /* Subroutine of expand_expr: return nonzero iff EXP is an
3072 expression whose type is statically determinable. */
3078 if (TREE_CODE (exp) == PARM_DECL
3079 || TREE_CODE (exp) == VAR_DECL
3080 || TREE_CODE (exp) == CALL_EXPR || TREE_CODE (exp) == TARGET_EXPR
3081 || TREE_CODE (exp) == COMPONENT_REF
3082 || TREE_CODE (exp) == ARRAY_REF)
3087 /* expand_expr: generate code for computing expression EXP.
3088 An rtx for the computed value is returned. The value is never null.
3089 In the case of a void EXP, const0_rtx is returned.
3091 The value may be stored in TARGET if TARGET is nonzero.
3092 TARGET is just a suggestion; callers must assume that
3093 the rtx returned may not be the same as TARGET.
3095 If TARGET is CONST0_RTX, it means that the value will be ignored.
3097 If TMODE is not VOIDmode, it suggests generating the
3098 result in mode TMODE. But this is done only when convenient.
3099 Otherwise, TMODE is ignored and the value generated in its natural mode.
3100 TMODE is just a suggestion; callers must assume that
3101 the rtx returned may not have mode TMODE.
3103 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
3104 with a constant address even if that address is not normally legitimate.
3105 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
3107 If MODIFIER is EXPAND_SUM then when EXP is an addition
3108 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
3109 or a nest of (PLUS ...) and (MINUS ...) where the terms are
3110 products as above, or REG or MEM, or constant.
3111 Ordinarily in such cases we would output mul or add instructions
3112 and then return a pseudo reg containing the sum.
3114 EXPAND_INITIALIZER is much like EXPAND_SUM except that
3115 it also marks a label as absolutely required (it can't be dead).
3116 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
3117 This is used for outputting expressions used in initializers. */
3120 expand_expr (exp, target, tmode, modifier)
3123 enum machine_mode tmode;
3124 enum expand_modifier modifier;
3126 register rtx op0, op1, temp;
3127 tree type = TREE_TYPE (exp);
3128 int unsignedp = TREE_UNSIGNED (type);
3129 register enum machine_mode mode = TYPE_MODE (type);
3130 register enum tree_code code = TREE_CODE (exp);
3132 /* Use subtarget as the target for operand 0 of a binary operation. */
3133 rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
3134 rtx original_target = target;
3135 int ignore = target == const0_rtx;
3138 /* Don't use hard regs as subtargets, because the combiner
3139 can only handle pseudo regs. */
3140 if (subtarget && REGNO (subtarget) < FIRST_PSEUDO_REGISTER)
3142 /* Avoid subtargets inside loops,
3143 since they hide some invariant expressions. */
3144 if (preserve_subexpressions_p ())
3147 if (ignore) target = 0, original_target = 0;
3149 /* If will do cse, generate all results into pseudo registers
3150 since 1) that allows cse to find more things
3151 and 2) otherwise cse could produce an insn the machine
3154 if (! cse_not_expected && mode != BLKmode && target
3155 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER))
3158 /* Ensure we reference a volatile object even if value is ignored. */
3159 if (ignore && TREE_THIS_VOLATILE (exp)
3160 && mode != VOIDmode && mode != BLKmode)
3162 target = gen_reg_rtx (mode);
3163 temp = expand_expr (exp, target, VOIDmode, modifier);
3165 emit_move_insn (target, temp);
3173 tree function = decl_function_context (exp);
3174 /* Handle using a label in a containing function. */
3175 if (function != current_function_decl && function != 0)
3177 struct function *p = find_function_data (function);
3178 /* Allocate in the memory associated with the function
3179 that the label is in. */
3180 push_obstacks (p->function_obstack,
3181 p->function_maybepermanent_obstack);
3183 p->forced_labels = gen_rtx (EXPR_LIST, VOIDmode,
3184 label_rtx (exp), p->forced_labels);
3187 else if (modifier == EXPAND_INITIALIZER)
3188 forced_labels = gen_rtx (EXPR_LIST, VOIDmode,
3189 label_rtx (exp), forced_labels);
3190 temp = gen_rtx (MEM, FUNCTION_MODE,
3191 gen_rtx (LABEL_REF, Pmode, label_rtx (exp)));
3192 if (function != current_function_decl && function != 0)
3193 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
3198 if (DECL_RTL (exp) == 0)
3200 error_with_decl (exp, "prior parameter's size depends on `%s'");
3201 return CONST0_RTX (mode);
3207 if (DECL_RTL (exp) == 0)
3209 /* Ensure variable marked as used
3210 even if it doesn't go through a parser. */
3211 TREE_USED (exp) = 1;
3212 /* Handle variables inherited from containing functions. */
3213 context = decl_function_context (exp);
3215 /* We treat inline_function_decl as an alias for the current function
3216 because that is the inline function whose vars, types, etc.
3217 are being merged into the current function.
3218 See expand_inline_function. */
3219 if (context != 0 && context != current_function_decl
3220 && context != inline_function_decl
3221 /* If var is static, we don't need a static chain to access it. */
3222 && ! (GET_CODE (DECL_RTL (exp)) == MEM
3223 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
3227 /* Mark as non-local and addressable. */
3228 DECL_NONLOCAL (exp) = 1;
3229 mark_addressable (exp);
3230 if (GET_CODE (DECL_RTL (exp)) != MEM)
3232 addr = XEXP (DECL_RTL (exp), 0);
3233 if (GET_CODE (addr) == MEM)
3234 addr = gen_rtx (MEM, Pmode, fix_lexical_addr (XEXP (addr, 0), exp));
3236 addr = fix_lexical_addr (addr, exp);
3237 return change_address (DECL_RTL (exp), mode, addr);
3240 /* This is the case of an array whose size is to be determined
3241 from its initializer, while the initializer is still being parsed.
3243 if (GET_CODE (DECL_RTL (exp)) == MEM
3244 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
3245 return change_address (DECL_RTL (exp), GET_MODE (DECL_RTL (exp)),
3246 XEXP (DECL_RTL (exp), 0));
3247 if (GET_CODE (DECL_RTL (exp)) == MEM
3248 && modifier != EXPAND_CONST_ADDRESS
3249 && modifier != EXPAND_SUM
3250 && modifier != EXPAND_INITIALIZER)
3252 /* DECL_RTL probably contains a constant address.
3253 On RISC machines where a constant address isn't valid,
3254 make some insns to get that address into a register. */
3255 if (!memory_address_p (DECL_MODE (exp), XEXP (DECL_RTL (exp), 0))
3257 && CONSTANT_ADDRESS_P (XEXP (DECL_RTL (exp), 0))))
3258 return change_address (DECL_RTL (exp), VOIDmode,
3259 copy_rtx (XEXP (DECL_RTL (exp), 0)));
3261 return DECL_RTL (exp);
3264 return immed_double_const (TREE_INT_CST_LOW (exp),
3265 TREE_INT_CST_HIGH (exp),
3269 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, 0);
3272 /* If optimized, generate immediate CONST_DOUBLE
3273 which will be turned into memory by reload if necessary.
3275 We used to force a register so that loop.c could see it. But
3276 this does not allow gen_* patterns to perform optimizations with
3277 the constants. It also produces two insns in cases like "x = 1.0;".
3278 On most machines, floating-point constants are not permitted in
3279 many insns, so we'd end up copying it to a register in any case.
3281 Now, we do the copying in expand_binop, if appropriate. */
3282 return immed_real_const (exp);
3286 if (! TREE_CST_RTL (exp))
3287 output_constant_def (exp);
3289 /* TREE_CST_RTL probably contains a constant address.
3290 On RISC machines where a constant address isn't valid,
3291 make some insns to get that address into a register. */
3292 if (GET_CODE (TREE_CST_RTL (exp)) == MEM
3293 && modifier != EXPAND_CONST_ADDRESS
3294 && modifier != EXPAND_INITIALIZER
3295 && modifier != EXPAND_SUM
3296 && !memory_address_p (mode, XEXP (TREE_CST_RTL (exp), 0)))
3297 return change_address (TREE_CST_RTL (exp), VOIDmode,
3298 copy_rtx (XEXP (TREE_CST_RTL (exp), 0)));
3299 return TREE_CST_RTL (exp);
3302 context = decl_function_context (exp);
3303 /* We treat inline_function_decl as an alias for the current function
3304 because that is the inline function whose vars, types, etc.
3305 are being merged into the current function.
3306 See expand_inline_function. */
3307 if (context == current_function_decl || context == inline_function_decl)
3310 /* If this is non-local, handle it. */
3313 temp = SAVE_EXPR_RTL (exp);
3314 if (temp && GET_CODE (temp) == REG)
3316 put_var_into_stack (exp);
3317 temp = SAVE_EXPR_RTL (exp);
3319 if (temp == 0 || GET_CODE (temp) != MEM)
3321 return change_address (temp, mode,
3322 fix_lexical_addr (XEXP (temp, 0), exp));
3324 if (SAVE_EXPR_RTL (exp) == 0)
3326 if (mode == BLKmode)
3328 = assign_stack_temp (mode,
3329 int_size_in_bytes (TREE_TYPE (exp)), 0);
3331 temp = gen_reg_rtx (mode);
3332 SAVE_EXPR_RTL (exp) = temp;
3333 store_expr (TREE_OPERAND (exp, 0), temp, 0);
3334 if (!optimize && GET_CODE (temp) == REG)
3335 save_expr_regs = gen_rtx (EXPR_LIST, VOIDmode, temp,
3338 return SAVE_EXPR_RTL (exp);
3341 /* Exit the current loop if the body-expression is true. */
3343 rtx label = gen_label_rtx ();
3344 do_jump (TREE_OPERAND (exp, 0), label, NULL_RTX);
3345 expand_exit_loop (NULL_PTR);
3351 expand_start_loop (1);
3352 expand_expr_stmt (TREE_OPERAND (exp, 0));
3359 tree vars = TREE_OPERAND (exp, 0);
3360 int vars_need_expansion = 0;
3362 /* Need to open a binding contour here because
3363 if there are any cleanups they most be contained here. */
3364 expand_start_bindings (0);
3366 /* Mark the corresponding BLOCK for output in its proper place. */
3367 if (TREE_OPERAND (exp, 2) != 0
3368 && ! TREE_USED (TREE_OPERAND (exp, 2)))
3369 insert_block (TREE_OPERAND (exp, 2));
3371 /* If VARS have not yet been expanded, expand them now. */
3374 if (DECL_RTL (vars) == 0)
3376 vars_need_expansion = 1;
3379 expand_decl_init (vars);
3380 vars = TREE_CHAIN (vars);
3383 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, modifier);
3385 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
3391 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
3393 emit_insns (RTL_EXPR_SEQUENCE (exp));
3394 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
3395 return RTL_EXPR_RTL (exp);
3398 /* All elts simple constants => refer to a constant in memory. But
3399 if this is a non-BLKmode mode, let it store a field at a time
3400 since that should make a CONST_INT or CONST_DOUBLE when we
3402 if (TREE_STATIC (exp) && (mode == BLKmode || TREE_ADDRESSABLE (exp)))
3404 rtx constructor = output_constant_def (exp);
3405 if (modifier != EXPAND_CONST_ADDRESS
3406 && modifier != EXPAND_INITIALIZER
3407 && modifier != EXPAND_SUM
3408 && !memory_address_p (GET_MODE (constructor),
3409 XEXP (constructor, 0)))
3410 constructor = change_address (constructor, VOIDmode,
3411 XEXP (constructor, 0));
3418 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
3419 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode, 0);
3424 if (target == 0 || ! safe_from_p (target, exp))
3426 if (mode != BLKmode && ! TREE_ADDRESSABLE (exp))
3427 target = gen_reg_rtx (mode);
3430 rtx safe_target = assign_stack_temp (mode, int_size_in_bytes (type), 0);
3432 MEM_IN_STRUCT_P (safe_target) = MEM_IN_STRUCT_P (target);
3433 target = safe_target;
3436 store_constructor (exp, target);
3442 tree exp1 = TREE_OPERAND (exp, 0);
3445 /* A SAVE_EXPR as the address in an INDIRECT_EXPR is generated
3446 for *PTR += ANYTHING where PTR is put inside the SAVE_EXPR.
3447 This code has the same general effect as simply doing
3448 expand_expr on the save expr, except that the expression PTR
3449 is computed for use as a memory address. This means different
3450 code, suitable for indexing, may be generated. */
3451 if (TREE_CODE (exp1) == SAVE_EXPR
3452 && SAVE_EXPR_RTL (exp1) == 0
3453 && TREE_CODE (exp2 = TREE_OPERAND (exp1, 0)) != ERROR_MARK
3454 && TYPE_MODE (TREE_TYPE (exp1)) == Pmode
3455 && TYPE_MODE (TREE_TYPE (exp2)) == Pmode)
3457 temp = expand_expr (TREE_OPERAND (exp1, 0), NULL_RTX,
3458 VOIDmode, EXPAND_SUM);
3459 op0 = memory_address (mode, temp);
3460 op0 = copy_all_regs (op0);
3461 SAVE_EXPR_RTL (exp1) = op0;
3465 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
3466 op0 = memory_address (mode, op0);
3469 temp = gen_rtx (MEM, mode, op0);
3470 /* If address was computed by addition,
3471 mark this as an element of an aggregate. */
3472 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
3473 || (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR
3474 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) == PLUS_EXPR)
3475 || TREE_CODE (TREE_TYPE (exp)) == ARRAY_TYPE
3476 || TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE
3477 || TREE_CODE (TREE_TYPE (exp)) == UNION_TYPE
3478 || (TREE_CODE (exp1) == ADDR_EXPR
3479 && (exp2 = TREE_OPERAND (exp1, 0))
3480 && (TREE_CODE (TREE_TYPE (exp2)) == ARRAY_TYPE
3481 || TREE_CODE (TREE_TYPE (exp2)) == RECORD_TYPE
3482 || TREE_CODE (TREE_TYPE (exp2)) == UNION_TYPE)))
3483 MEM_IN_STRUCT_P (temp) = 1;
3484 MEM_VOLATILE_P (temp) = TREE_THIS_VOLATILE (exp) || flag_volatile;
3485 #if 0 /* It is incorrectto set RTX_UNCHANGING_P here, because the fact that
3486 a location is accessed through a pointer to const does not mean
3487 that the value there can never change. */
3488 RTX_UNCHANGING_P (temp) = TREE_READONLY (exp);
3494 if (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST
3495 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
3497 /* Nonconstant array index or nonconstant element size.
3498 Generate the tree for *(&array+index) and expand that,
3499 except do it in a language-independent way
3500 and don't complain about non-lvalue arrays.
3501 `mark_addressable' should already have been called
3502 for any array for which this case will be reached. */
3504 /* Don't forget the const or volatile flag from the array element. */
3505 tree variant_type = build_type_variant (type,
3506 TREE_READONLY (exp),
3507 TREE_THIS_VOLATILE (exp));
3508 tree array_adr = build1 (ADDR_EXPR, build_pointer_type (variant_type),
3509 TREE_OPERAND (exp, 0));
3510 tree index = TREE_OPERAND (exp, 1);
3513 /* Convert the integer argument to a type the same size as a pointer
3514 so the multiply won't overflow spuriously. */
3515 if (TYPE_PRECISION (TREE_TYPE (index)) != POINTER_SIZE)
3516 index = convert (type_for_size (POINTER_SIZE, 0), index);
3518 /* Don't think the address has side effects
3519 just because the array does.
3520 (In some cases the address might have side effects,
3521 and we fail to record that fact here. However, it should not
3522 matter, since expand_expr should not care.) */
3523 TREE_SIDE_EFFECTS (array_adr) = 0;
3525 elt = build1 (INDIRECT_REF, type,
3526 fold (build (PLUS_EXPR, TYPE_POINTER_TO (variant_type),
3528 fold (build (MULT_EXPR,
3529 TYPE_POINTER_TO (variant_type),
3530 index, size_in_bytes (type))))));
3532 /* Volatility, etc., of new expression is same as old expression. */
3533 TREE_SIDE_EFFECTS (elt) = TREE_SIDE_EFFECTS (exp);
3534 TREE_THIS_VOLATILE (elt) = TREE_THIS_VOLATILE (exp);
3535 TREE_READONLY (elt) = TREE_READONLY (exp);
3537 return expand_expr (elt, target, tmode, modifier);
3540 /* Fold an expression like: "foo"[2].
3541 This is not done in fold so it won't happen inside &. */
3544 tree arg0 = TREE_OPERAND (exp, 0);
3545 tree arg1 = TREE_OPERAND (exp, 1);
3547 if (TREE_CODE (arg0) == STRING_CST
3548 && TREE_CODE (arg1) == INTEGER_CST
3549 && !TREE_INT_CST_HIGH (arg1)
3550 && (i = TREE_INT_CST_LOW (arg1)) < TREE_STRING_LENGTH (arg0))
3552 if (TREE_TYPE (TREE_TYPE (arg0)) == integer_type_node)
3554 exp = build_int_2 (((int *)TREE_STRING_POINTER (arg0))[i], 0);
3555 TREE_TYPE (exp) = integer_type_node;
3556 return expand_expr (exp, target, tmode, modifier);
3558 if (TREE_TYPE (TREE_TYPE (arg0)) == char_type_node)
3560 exp = build_int_2 (TREE_STRING_POINTER (arg0)[i], 0);
3561 TREE_TYPE (exp) = integer_type_node;
3562 return expand_expr (convert (TREE_TYPE (TREE_TYPE (arg0)), exp), target, tmode, modifier);
3567 /* If this is a constant index into a constant array,
3568 just get the value from the array. Handle both the cases when
3569 we have an explicit constructor and when our operand is a variable
3570 that was declared const. */
3572 if (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
3573 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
3575 tree index = fold (TREE_OPERAND (exp, 1));
3576 if (TREE_CODE (index) == INTEGER_CST
3577 && TREE_INT_CST_HIGH (index) == 0)
3579 int i = TREE_INT_CST_LOW (index);
3580 tree elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0));
3583 elem = TREE_CHAIN (elem);
3585 return expand_expr (fold (TREE_VALUE (elem)), target,
3590 else if (TREE_READONLY (TREE_OPERAND (exp, 0))
3591 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
3592 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == ARRAY_TYPE
3593 && TREE_CODE (TREE_OPERAND (exp, 0)) == VAR_DECL
3594 && DECL_INITIAL (TREE_OPERAND (exp, 0))
3596 && (TREE_CODE (DECL_INITIAL (TREE_OPERAND (exp, 0)))
3599 tree index = fold (TREE_OPERAND (exp, 1));
3600 if (TREE_CODE (index) == INTEGER_CST
3601 && TREE_INT_CST_HIGH (index) == 0)
3603 int i = TREE_INT_CST_LOW (index);
3604 tree init = DECL_INITIAL (TREE_OPERAND (exp, 0));
3606 if (TREE_CODE (init) == CONSTRUCTOR)
3608 tree elem = CONSTRUCTOR_ELTS (init);
3611 elem = TREE_CHAIN (elem);
3613 return expand_expr (fold (TREE_VALUE (elem)), target,
3616 else if (TREE_CODE (init) == STRING_CST
3617 && i < TREE_STRING_LENGTH (init))
3619 temp = GEN_INT (TREE_STRING_POINTER (init)[i]);
3620 return convert_to_mode (mode, temp, 0);
3624 /* Treat array-ref with constant index as a component-ref. */
3628 /* If the operand is a CONSTRUCTOR, we can just extract the
3629 appropriate field if it is present. */
3630 if (code != ARRAY_REF
3631 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR)
3635 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
3636 elt = TREE_CHAIN (elt))
3637 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1))
3638 return expand_expr (TREE_VALUE (elt), target, tmode, modifier);
3642 enum machine_mode mode1;
3647 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
3648 &mode1, &unsignedp, &volatilep);
3650 /* In some cases, we will be offsetting OP0's address by a constant.
3651 So get it as a sum, if possible. If we will be using it
3652 directly in an insn, we validate it. */
3653 op0 = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_SUM);
3655 /* If this is a constant, put it into a register if it is a
3656 legimate constant and memory if it isn't. */
3657 if (CONSTANT_P (op0))
3659 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
3660 if (LEGITIMATE_CONSTANT_P (op0))
3661 op0 = force_reg (mode, op0);
3663 op0 = validize_mem (force_const_mem (mode, op0));
3668 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
3670 if (GET_CODE (op0) != MEM)
3672 op0 = change_address (op0, VOIDmode,
3673 gen_rtx (PLUS, Pmode, XEXP (op0, 0),
3674 force_reg (Pmode, offset_rtx)));
3677 /* Don't forget about volatility even if this is a bitfield. */
3678 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
3680 op0 = copy_rtx (op0);
3681 MEM_VOLATILE_P (op0) = 1;
3684 if (mode1 == VOIDmode
3685 || (mode1 != BLKmode && ! direct_load[(int) mode1]
3686 && modifier != EXPAND_CONST_ADDRESS
3687 && modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
3688 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG)
3690 /* In cases where an aligned union has an unaligned object
3691 as a field, we might be extracting a BLKmode value from
3692 an integer-mode (e.g., SImode) object. Handle this case
3693 by doing the extract into an object as wide as the field
3694 (which we know to be the width of a basic mode), then
3695 storing into memory, and changing the mode to BLKmode. */
3696 enum machine_mode ext_mode = mode;
3698 if (ext_mode == BLKmode)
3699 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
3701 if (ext_mode == BLKmode)
3704 op0 = extract_bit_field (validize_mem (op0), bitsize, bitpos,
3705 unsignedp, target, ext_mode, ext_mode,
3706 TYPE_ALIGN (TREE_TYPE (tem)) / BITS_PER_UNIT,
3707 int_size_in_bytes (TREE_TYPE (tem)));
3708 if (mode == BLKmode)
3710 rtx new = assign_stack_temp (ext_mode,
3711 bitsize / BITS_PER_UNIT, 0);
3713 emit_move_insn (new, op0);
3714 op0 = copy_rtx (new);
3715 PUT_MODE (op0, BLKmode);
3721 /* Get a reference to just this component. */
3722 if (modifier == EXPAND_CONST_ADDRESS
3723 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
3724 op0 = gen_rtx (MEM, mode1, plus_constant (XEXP (op0, 0),
3725 (bitpos / BITS_PER_UNIT)));
3727 op0 = change_address (op0, mode1,
3728 plus_constant (XEXP (op0, 0),
3729 (bitpos / BITS_PER_UNIT)));
3730 MEM_IN_STRUCT_P (op0) = 1;
3731 MEM_VOLATILE_P (op0) |= volatilep;
3732 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode)
3735 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
3736 convert_move (target, op0, unsignedp);
3742 tree base = build_unary_op (ADDR_EXPR, TREE_OPERAND (exp, 0), 0);
3743 tree addr = build (PLUS_EXPR, type, base, TREE_OPERAND (exp, 1));
3744 op0 = expand_expr (addr, NULL_RTX, VOIDmode, EXPAND_SUM);
3745 temp = gen_rtx (MEM, mode, memory_address (mode, op0));
3746 MEM_IN_STRUCT_P (temp) = 1;
3747 MEM_VOLATILE_P (temp) = TREE_THIS_VOLATILE (exp) || flag_volatile;
3748 #if 0 /* It is incorrectto set RTX_UNCHANGING_P here, because the fact that
3749 a location is accessed through a pointer to const does not mean
3750 that the value there can never change. */
3751 RTX_UNCHANGING_P (temp) = TREE_READONLY (exp);
3756 /* Intended for a reference to a buffer of a file-object in Pascal.
3757 But it's not certain that a special tree code will really be
3758 necessary for these. INDIRECT_REF might work for them. */
3762 case WITH_CLEANUP_EXPR:
3763 if (RTL_EXPR_RTL (exp) == 0)
3766 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
3768 = tree_cons (NULL_TREE, TREE_OPERAND (exp, 2), cleanups_this_call);
3769 /* That's it for this cleanup. */
3770 TREE_OPERAND (exp, 2) = 0;
3772 return RTL_EXPR_RTL (exp);
3775 /* Check for a built-in function. */
3776 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
3777 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) == FUNCTION_DECL
3778 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
3779 return expand_builtin (exp, target, subtarget, tmode, ignore);
3780 /* If this call was expanded already by preexpand_calls,
3781 just return the result we got. */
3782 if (CALL_EXPR_RTL (exp) != 0)
3783 return CALL_EXPR_RTL (exp);
3784 return expand_call (exp, target, ignore);
3786 case NON_LVALUE_EXPR:
3789 case REFERENCE_EXPR:
3790 if (TREE_CODE (type) == VOID_TYPE || ignore)
3792 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
3795 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
3796 return expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, modifier);
3797 if (TREE_CODE (type) == UNION_TYPE)
3799 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
3802 if (mode == BLKmode)
3804 if (TYPE_SIZE (type) == 0
3805 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
3807 target = assign_stack_temp (BLKmode,
3808 (TREE_INT_CST_LOW (TYPE_SIZE (type))
3809 + BITS_PER_UNIT - 1)
3810 / BITS_PER_UNIT, 0);
3813 target = gen_reg_rtx (mode);
3815 if (GET_CODE (target) == MEM)
3816 /* Store data into beginning of memory target. */
3817 store_expr (TREE_OPERAND (exp, 0),
3818 change_address (target, TYPE_MODE (valtype), 0),
3820 else if (GET_CODE (target) == REG)
3821 /* Store this field into a union of the proper type. */
3822 store_field (target, GET_MODE_BITSIZE (TYPE_MODE (valtype)), 0,
3823 TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
3825 int_size_in_bytes (TREE_TYPE (TREE_OPERAND (exp, 0))));
3829 /* Return the entire union. */
3832 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, mode, modifier);
3833 if (GET_MODE (op0) == mode || GET_MODE (op0) == VOIDmode)
3835 if (modifier == EXPAND_INITIALIZER)
3836 return gen_rtx (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
3837 if (flag_force_mem && GET_CODE (op0) == MEM)
3838 op0 = copy_to_reg (op0);
3841 return convert_to_mode (mode, op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
3843 convert_move (target, op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
3847 /* We come here from MINUS_EXPR when the second operand is a constant. */
3849 this_optab = add_optab;
3851 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
3852 something else, make sure we add the register to the constant and
3853 then to the other thing. This case can occur during strength
3854 reduction and doing it this way will produce better code if the
3855 frame pointer or argument pointer is eliminated.
3857 fold-const.c will ensure that the constant is always in the inner
3858 PLUS_EXPR, so the only case we need to do anything about is if
3859 sp, ap, or fp is our second argument, in which case we must swap
3860 the innermost first argument and our second argument. */
3862 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
3863 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
3864 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
3865 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
3866 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
3867 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
3869 tree t = TREE_OPERAND (exp, 1);
3871 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
3872 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
3875 /* If the result is to be Pmode and we are adding an integer to
3876 something, we might be forming a constant. So try to use
3877 plus_constant. If it produces a sum and we can't accept it,
3878 use force_operand. This allows P = &ARR[const] to generate
3879 efficient code on machines where a SYMBOL_REF is not a valid
3882 If this is an EXPAND_SUM call, always return the sum. */
3883 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
3884 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
3885 && (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
3888 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
3890 op1 = plus_constant (op1, TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)));
3891 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
3892 op1 = force_operand (op1, target);
3896 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
3897 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
3898 && (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
3901 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
3903 op0 = plus_constant (op0, TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)));
3904 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
3905 op0 = force_operand (op0, target);
3909 /* No sense saving up arithmetic to be done
3910 if it's all in the wrong mode to form part of an address.
3911 And force_operand won't know whether to sign-extend or
3913 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
3914 || mode != Pmode) goto binop;
3916 preexpand_calls (exp);
3917 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
3920 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, modifier);
3921 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, modifier);
3923 /* Make sure any term that's a sum with a constant comes last. */
3924 if (GET_CODE (op0) == PLUS
3925 && CONSTANT_P (XEXP (op0, 1)))
3931 /* If adding to a sum including a constant,
3932 associate it to put the constant outside. */
3933 if (GET_CODE (op1) == PLUS
3934 && CONSTANT_P (XEXP (op1, 1)))
3936 rtx constant_term = const0_rtx;
3938 temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0);
3941 /* Ensure that MULT comes first if there is one. */
3942 else if (GET_CODE (op0) == MULT)
3943 op0 = gen_rtx (PLUS, mode, op0, XEXP (op1, 0));
3945 op0 = gen_rtx (PLUS, mode, XEXP (op1, 0), op0);
3947 /* Let's also eliminate constants from op0 if possible. */
3948 op0 = eliminate_constant_term (op0, &constant_term);
3950 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
3951 their sum should be a constant. Form it into OP1, since the
3952 result we want will then be OP0 + OP1. */
3954 temp = simplify_binary_operation (PLUS, mode, constant_term,
3959 op1 = gen_rtx (PLUS, mode, constant_term, XEXP (op1, 1));
3962 /* Put a constant term last and put a multiplication first. */
3963 if (CONSTANT_P (op0) || GET_CODE (op1) == MULT)
3964 temp = op1, op1 = op0, op0 = temp;
3966 temp = simplify_binary_operation (PLUS, mode, op0, op1);
3967 return temp ? temp : gen_rtx (PLUS, mode, op0, op1);
3970 /* Handle difference of two symbolic constants,
3971 for the sake of an initializer. */
3972 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
3973 && really_constant_p (TREE_OPERAND (exp, 0))
3974 && really_constant_p (TREE_OPERAND (exp, 1)))
3976 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX,
3977 VOIDmode, modifier);
3978 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
3979 VOIDmode, modifier);
3980 return gen_rtx (MINUS, mode, op0, op1);
3982 /* Convert A - const to A + (-const). */
3983 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
3985 exp = build (PLUS_EXPR, type, TREE_OPERAND (exp, 0),
3986 fold (build1 (NEGATE_EXPR, type,
3987 TREE_OPERAND (exp, 1))));
3990 this_optab = sub_optab;
3994 preexpand_calls (exp);
3995 /* If first operand is constant, swap them.
3996 Thus the following special case checks need only
3997 check the second operand. */
3998 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
4000 register tree t1 = TREE_OPERAND (exp, 0);
4001 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
4002 TREE_OPERAND (exp, 1) = t1;
4005 /* Attempt to return something suitable for generating an
4006 indexed address, for machines that support that. */
4008 if (modifier == EXPAND_SUM && mode == Pmode
4009 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
4010 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
4012 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, EXPAND_SUM);
4014 /* Apply distributive law if OP0 is x+c. */
4015 if (GET_CODE (op0) == PLUS
4016 && GET_CODE (XEXP (op0, 1)) == CONST_INT)
4017 return gen_rtx (PLUS, mode,
4018 gen_rtx (MULT, mode, XEXP (op0, 0),
4019 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))),
4020 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))
4021 * INTVAL (XEXP (op0, 1))));
4023 if (GET_CODE (op0) != REG)
4024 op0 = force_operand (op0, NULL_RTX);
4025 if (GET_CODE (op0) != REG)
4026 op0 = copy_to_mode_reg (mode, op0);
4028 return gen_rtx (MULT, mode, op0,
4029 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))));
4032 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
4035 /* Check for multiplying things that have been extended
4036 from a narrower type. If this machine supports multiplying
4037 in that narrower type with a result in the desired type,
4038 do it that way, and avoid the explicit type-conversion. */
4039 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
4040 && TREE_CODE (type) == INTEGER_TYPE
4041 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
4042 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
4043 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
4044 && int_fits_type_p (TREE_OPERAND (exp, 1),
4045 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
4046 /* Don't use a widening multiply if a shift will do. */
4047 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
4048 > HOST_BITS_PER_WIDE_INT)
4049 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
4051 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
4052 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
4054 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
4055 /* If both operands are extended, they must either both
4056 be zero-extended or both be sign-extended. */
4057 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
4059 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
4061 enum machine_mode innermode
4062 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
4063 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
4064 ? umul_widen_optab : smul_widen_optab);
4065 if (mode == GET_MODE_WIDER_MODE (innermode)
4066 && this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
4068 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
4069 NULL_RTX, VOIDmode, 0);
4070 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
4071 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
4074 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
4075 NULL_RTX, VOIDmode, 0);
4079 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
4080 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
4081 return expand_mult (mode, op0, op1, target, unsignedp);
4083 case TRUNC_DIV_EXPR:
4084 case FLOOR_DIV_EXPR:
4086 case ROUND_DIV_EXPR:
4087 case EXACT_DIV_EXPR:
4088 preexpand_calls (exp);
4089 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
4091 /* Possible optimization: compute the dividend with EXPAND_SUM
4092 then if the divisor is constant can optimize the case
4093 where some terms of the dividend have coeffs divisible by it. */
4094 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
4095 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
4096 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
4099 this_optab = flodiv_optab;
4102 case TRUNC_MOD_EXPR:
4103 case FLOOR_MOD_EXPR:
4105 case ROUND_MOD_EXPR:
4106 preexpand_calls (exp);
4107 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
4109 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
4110 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
4111 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
4113 case FIX_ROUND_EXPR:
4114 case FIX_FLOOR_EXPR:
4116 abort (); /* Not used for C. */
4118 case FIX_TRUNC_EXPR:
4119 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
4121 target = gen_reg_rtx (mode);
4122 expand_fix (target, op0, unsignedp);
4126 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
4128 target = gen_reg_rtx (mode);
4129 /* expand_float can't figure out what to do if FROM has VOIDmode.
4130 So give it the correct mode. With -O, cse will optimize this. */
4131 if (GET_MODE (op0) == VOIDmode)
4132 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
4134 expand_float (target, op0,
4135 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
4139 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
4140 temp = expand_unop (mode, neg_optab, op0, target, 0);
4146 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
4148 /* Unsigned abs is simply the operand. Testing here means we don't
4149 risk generating incorrect code below. */
4150 if (TREE_UNSIGNED (type))
4153 /* First try to do it with a special abs instruction. */
4154 temp = expand_unop (mode, abs_optab, op0, target, 0);
4158 /* If this machine has expensive jumps, we can do integer absolute
4159 value of X as (((signed) x >> (W-1)) ^ x) - ((signed) x >> (W-1)),
4160 where W is the width of MODE. */
4162 if (GET_MODE_CLASS (mode) == MODE_INT && BRANCH_COST >= 2)
4164 rtx extended = expand_shift (RSHIFT_EXPR, mode, op0,
4165 size_int (GET_MODE_BITSIZE (mode) - 1),
4168 temp = expand_binop (mode, xor_optab, extended, op0, target, 0,
4171 temp = expand_binop (mode, sub_optab, temp, extended, target, 0,
4178 /* If that does not win, use conditional jump and negate. */
4179 target = original_target;
4180 temp = gen_label_rtx ();
4181 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 0))
4182 || (GET_CODE (target) == REG
4183 && REGNO (target) < FIRST_PSEUDO_REGISTER))
4184 target = gen_reg_rtx (mode);
4185 emit_move_insn (target, op0);
4186 emit_cmp_insn (target,
4187 expand_expr (convert (type, integer_zero_node),
4188 NULL_RTX, VOIDmode, 0),
4189 GE, NULL_RTX, mode, 0, 0);
4191 emit_jump_insn (gen_bge (temp));
4192 op0 = expand_unop (mode, neg_optab, target, target, 0);
4194 emit_move_insn (target, op0);
4201 target = original_target;
4202 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 1))
4203 || (GET_CODE (target) == REG
4204 && REGNO (target) < FIRST_PSEUDO_REGISTER))
4205 target = gen_reg_rtx (mode);
4206 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
4207 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
4209 /* First try to do it with a special MIN or MAX instruction.
4210 If that does not win, use a conditional jump to select the proper
4212 this_optab = (TREE_UNSIGNED (type)
4213 ? (code == MIN_EXPR ? umin_optab : umax_optab)
4214 : (code == MIN_EXPR ? smin_optab : smax_optab));
4216 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
4222 emit_move_insn (target, op0);
4223 op0 = gen_label_rtx ();
4224 if (code == MAX_EXPR)
4225 temp = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)))
4226 ? compare_from_rtx (target, op1, GEU, 1, mode, NULL_RTX, 0)
4227 : compare_from_rtx (target, op1, GE, 0, mode, NULL_RTX, 0));
4229 temp = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)))
4230 ? compare_from_rtx (target, op1, LEU, 1, mode, NULL_RTX, 0)
4231 : compare_from_rtx (target, op1, LE, 0, mode, NULL_RTX, 0));
4232 if (temp == const0_rtx)
4233 emit_move_insn (target, op1);
4234 else if (temp != const_true_rtx)
4236 if (bcc_gen_fctn[(int) GET_CODE (temp)] != 0)
4237 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (temp)]) (op0));
4240 emit_move_insn (target, op1);
4245 /* ??? Can optimize when the operand of this is a bitwise operation,
4246 by using a different bitwise operation. */
4248 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
4249 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
4255 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
4256 temp = expand_unop (mode, ffs_optab, op0, target, 1);
4261 /* ??? Can optimize bitwise operations with one arg constant.
4262 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
4263 and (a bitwise1 b) bitwise2 b (etc)
4264 but that is probably not worth while. */
4266 /* BIT_AND_EXPR is for bitwise anding.
4267 TRUTH_AND_EXPR is for anding two boolean values
4268 when we want in all cases to compute both of them.
4269 In general it is fastest to do TRUTH_AND_EXPR by
4270 computing both operands as actual zero-or-1 values
4271 and then bitwise anding. In cases where there cannot
4272 be any side effects, better code would be made by
4273 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR;
4274 but the question is how to recognize those cases. */
4276 case TRUTH_AND_EXPR:
4278 this_optab = and_optab;
4281 /* See comment above about TRUTH_AND_EXPR; it applies here too. */
4284 this_optab = ior_optab;
4288 this_optab = xor_optab;
4295 preexpand_calls (exp);
4296 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
4298 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
4299 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
4302 /* Could determine the answer when only additive constants differ.
4303 Also, the addition of one can be handled by changing the condition. */
4310 preexpand_calls (exp);
4311 temp = do_store_flag (exp, target, tmode != VOIDmode ? tmode : mode, 0);
4314 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
4315 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
4317 && GET_CODE (original_target) == REG
4318 && (GET_MODE (original_target)
4319 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
4321 temp = expand_expr (TREE_OPERAND (exp, 0), original_target, VOIDmode, 0);
4322 if (temp != original_target)
4323 temp = copy_to_reg (temp);
4324 op1 = gen_label_rtx ();
4325 emit_cmp_insn (temp, const0_rtx, EQ, NULL_RTX,
4326 GET_MODE (temp), unsignedp, 0);
4327 emit_jump_insn (gen_beq (op1));
4328 emit_move_insn (temp, const1_rtx);
4332 /* If no set-flag instruction, must generate a conditional
4333 store into a temporary variable. Drop through
4334 and handle this like && and ||. */
4336 case TRUTH_ANDIF_EXPR:
4337 case TRUTH_ORIF_EXPR:
4338 if (target == 0 || ! safe_from_p (target, exp)
4339 /* Make sure we don't have a hard reg (such as function's return
4340 value) live across basic blocks, if not optimizing. */
4341 || (!optimize && GET_CODE (target) == REG
4342 && REGNO (target) < FIRST_PSEUDO_REGISTER))
4343 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
4344 emit_clr_insn (target);
4345 op1 = gen_label_rtx ();
4346 jumpifnot (exp, op1);
4347 emit_0_to_1_insn (target);
4351 case TRUTH_NOT_EXPR:
4352 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
4353 /* The parser is careful to generate TRUTH_NOT_EXPR
4354 only with operands that are always zero or one. */
4355 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
4356 target, 1, OPTAB_LIB_WIDEN);
4362 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
4364 return expand_expr (TREE_OPERAND (exp, 1),
4365 (ignore ? const0_rtx : target),
4370 /* Note that COND_EXPRs whose type is a structure or union
4371 are required to be constructed to contain assignments of
4372 a temporary variable, so that we can evaluate them here
4373 for side effect only. If type is void, we must do likewise. */
4375 /* If an arm of the branch requires a cleanup,
4376 only that cleanup is performed. */
4379 tree binary_op = 0, unary_op = 0;
4380 tree old_cleanups = cleanups_this_call;
4381 cleanups_this_call = 0;
4383 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
4384 convert it to our mode, if necessary. */
4385 if (integer_onep (TREE_OPERAND (exp, 1))
4386 && integer_zerop (TREE_OPERAND (exp, 2))
4387 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
4389 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, modifier);
4390 if (GET_MODE (op0) == mode)
4393 target = gen_reg_rtx (mode);
4394 convert_move (target, op0, unsignedp);
4398 /* If we are not to produce a result, we have no target. Otherwise,
4399 if a target was specified use it; it will not be used as an
4400 intermediate target unless it is safe. If no target, use a
4403 if (mode == VOIDmode || ignore)
4405 else if (original_target
4406 && safe_from_p (original_target, TREE_OPERAND (exp, 0)))
4407 temp = original_target;
4408 else if (mode == BLKmode)
4410 if (TYPE_SIZE (type) == 0
4411 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
4413 temp = assign_stack_temp (BLKmode,
4414 (TREE_INT_CST_LOW (TYPE_SIZE (type))
4415 + BITS_PER_UNIT - 1)
4416 / BITS_PER_UNIT, 0);
4419 temp = gen_reg_rtx (mode);
4421 /* Check for X ? A + B : A. If we have this, we can copy
4422 A to the output and conditionally add B. Similarly for unary
4423 operations. Don't do this if X has side-effects because
4424 those side effects might affect A or B and the "?" operation is
4425 a sequence point in ANSI. (We test for side effects later.) */
4427 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
4428 && operand_equal_p (TREE_OPERAND (exp, 2),
4429 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
4430 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
4431 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
4432 && operand_equal_p (TREE_OPERAND (exp, 1),
4433 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
4434 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
4435 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
4436 && operand_equal_p (TREE_OPERAND (exp, 2),
4437 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
4438 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
4439 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
4440 && operand_equal_p (TREE_OPERAND (exp, 1),
4441 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
4442 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
4444 /* If we had X ? A + 1 : A and we can do the test of X as a store-flag
4445 operation, do this as A + (X != 0). Similarly for other simple
4446 binary operators. */
4447 if (singleton && binary_op
4448 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
4449 && (TREE_CODE (binary_op) == PLUS_EXPR
4450 || TREE_CODE (binary_op) == MINUS_EXPR
4451 || TREE_CODE (binary_op) == BIT_IOR_EXPR
4452 || TREE_CODE (binary_op) == BIT_XOR_EXPR
4453 || TREE_CODE (binary_op) == BIT_AND_EXPR)
4454 && integer_onep (TREE_OPERAND (binary_op, 1))
4455 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
4458 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR ? add_optab
4459 : TREE_CODE (binary_op) == MINUS_EXPR ? sub_optab
4460 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
4461 : TREE_CODE (binary_op) == BIT_XOR_EXPR ? xor_optab
4464 /* If we had X ? A : A + 1, do this as A + (X == 0).
4466 We have to invert the truth value here and then put it
4467 back later if do_store_flag fails. We cannot simply copy
4468 TREE_OPERAND (exp, 0) to another variable and modify that
4469 because invert_truthvalue can modify the tree pointed to
4471 if (singleton == TREE_OPERAND (exp, 1))
4472 TREE_OPERAND (exp, 0)
4473 = invert_truthvalue (TREE_OPERAND (exp, 0));
4475 result = do_store_flag (TREE_OPERAND (exp, 0),
4476 (safe_from_p (temp, singleton)
4478 mode, BRANCH_COST <= 1);
4482 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
4483 return expand_binop (mode, boptab, op1, result, temp,
4484 unsignedp, OPTAB_LIB_WIDEN);
4486 else if (singleton == TREE_OPERAND (exp, 1))
4487 TREE_OPERAND (exp, 0)
4488 = invert_truthvalue (TREE_OPERAND (exp, 0));
4492 op0 = gen_label_rtx ();
4494 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
4498 /* If the target conflicts with the other operand of the
4499 binary op, we can't use it. Also, we can't use the target
4500 if it is a hard register, because evaluating the condition
4501 might clobber it. */
4503 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1)))
4504 || (GET_CODE (temp) == REG
4505 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
4506 temp = gen_reg_rtx (mode);
4507 store_expr (singleton, temp, 0);
4510 expand_expr (singleton,
4511 ignore ? const1_rtx : NULL_RTX, VOIDmode, 0);
4512 if (cleanups_this_call)
4514 sorry ("aggregate value in COND_EXPR");
4515 cleanups_this_call = 0;
4517 if (singleton == TREE_OPERAND (exp, 1))
4518 jumpif (TREE_OPERAND (exp, 0), op0);
4520 jumpifnot (TREE_OPERAND (exp, 0), op0);
4522 if (binary_op && temp == 0)
4523 /* Just touch the other operand. */
4524 expand_expr (TREE_OPERAND (binary_op, 1),
4525 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
4527 store_expr (build (TREE_CODE (binary_op), type,
4528 make_tree (type, temp),
4529 TREE_OPERAND (binary_op, 1)),
4532 store_expr (build1 (TREE_CODE (unary_op), type,
4533 make_tree (type, temp)),
4538 /* This is now done in jump.c and is better done there because it
4539 produces shorter register lifetimes. */
4541 /* Check for both possibilities either constants or variables
4542 in registers (but not the same as the target!). If so, can
4543 save branches by assigning one, branching, and assigning the
4545 else if (temp && GET_MODE (temp) != BLKmode
4546 && (TREE_CONSTANT (TREE_OPERAND (exp, 1))
4547 || ((TREE_CODE (TREE_OPERAND (exp, 1)) == PARM_DECL
4548 || TREE_CODE (TREE_OPERAND (exp, 1)) == VAR_DECL)
4549 && DECL_RTL (TREE_OPERAND (exp, 1))
4550 && GET_CODE (DECL_RTL (TREE_OPERAND (exp, 1))) == REG
4551 && DECL_RTL (TREE_OPERAND (exp, 1)) != temp))
4552 && (TREE_CONSTANT (TREE_OPERAND (exp, 2))
4553 || ((TREE_CODE (TREE_OPERAND (exp, 2)) == PARM_DECL
4554 || TREE_CODE (TREE_OPERAND (exp, 2)) == VAR_DECL)
4555 && DECL_RTL (TREE_OPERAND (exp, 2))
4556 && GET_CODE (DECL_RTL (TREE_OPERAND (exp, 2))) == REG
4557 && DECL_RTL (TREE_OPERAND (exp, 2)) != temp)))
4559 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
4560 temp = gen_reg_rtx (mode);
4561 store_expr (TREE_OPERAND (exp, 2), temp, 0);
4562 jumpifnot (TREE_OPERAND (exp, 0), op0);
4563 store_expr (TREE_OPERAND (exp, 1), temp, 0);
4567 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
4568 comparison operator. If we have one of these cases, set the
4569 output to A, branch on A (cse will merge these two references),
4570 then set the output to FOO. */
4572 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
4573 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
4574 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
4575 TREE_OPERAND (exp, 1), 0)
4576 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
4577 && safe_from_p (temp, TREE_OPERAND (exp, 2)))
4579 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
4580 temp = gen_reg_rtx (mode);
4581 store_expr (TREE_OPERAND (exp, 1), temp, 0);
4582 jumpif (TREE_OPERAND (exp, 0), op0);
4583 store_expr (TREE_OPERAND (exp, 2), temp, 0);
4587 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
4588 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
4589 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
4590 TREE_OPERAND (exp, 2), 0)
4591 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
4592 && safe_from_p (temp, TREE_OPERAND (exp, 1)))
4594 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
4595 temp = gen_reg_rtx (mode);
4596 store_expr (TREE_OPERAND (exp, 2), temp, 0);
4597 jumpifnot (TREE_OPERAND (exp, 0), op0);
4598 store_expr (TREE_OPERAND (exp, 1), temp, 0);
4603 op1 = gen_label_rtx ();
4604 jumpifnot (TREE_OPERAND (exp, 0), op0);
4606 store_expr (TREE_OPERAND (exp, 1), temp, 0);
4608 expand_expr (TREE_OPERAND (exp, 1),
4609 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
4610 if (cleanups_this_call)
4612 sorry ("aggregate value in COND_EXPR");
4613 cleanups_this_call = 0;
4617 emit_jump_insn (gen_jump (op1));
4621 store_expr (TREE_OPERAND (exp, 2), temp, 0);
4623 expand_expr (TREE_OPERAND (exp, 2),
4624 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
4627 if (cleanups_this_call)
4629 sorry ("aggregate value in COND_EXPR");
4630 cleanups_this_call = 0;
4636 cleanups_this_call = old_cleanups;
4642 /* Something needs to be initialized, but we didn't know
4643 where that thing was when building the tree. For example,
4644 it could be the return value of a function, or a parameter
4645 to a function which lays down in the stack, or a temporary
4646 variable which must be passed by reference.
4648 We guarantee that the expression will either be constructed
4649 or copied into our original target. */
4651 tree slot = TREE_OPERAND (exp, 0);
4654 if (TREE_CODE (slot) != VAR_DECL)
4659 if (DECL_RTL (slot) != 0)
4661 target = DECL_RTL (slot);
4662 /* If we have already expanded the slot, so don't do
4664 if (TREE_OPERAND (exp, 1) == NULL_TREE)
4669 target = assign_stack_temp (mode, int_size_in_bytes (type), 0);
4670 /* All temp slots at this level must not conflict. */
4671 preserve_temp_slots (target);
4672 DECL_RTL (slot) = target;
4676 /* I bet this needs to be done, and I bet that it needs to
4677 be above, inside the else clause. The reason is
4678 simple, how else is it going to get cleaned up? (mrs)
4680 The reason is probably did not work before, and was
4681 commented out is because this was re-expanding already
4682 expanded target_exprs (target == 0 and DECL_RTL (slot)
4683 != 0) also cleaning them up many times as well. :-( */
4685 /* Since SLOT is not known to the called function
4686 to belong to its stack frame, we must build an explicit
4687 cleanup. This case occurs when we must build up a reference
4688 to pass the reference as an argument. In this case,
4689 it is very likely that such a reference need not be
4692 if (TREE_OPERAND (exp, 2) == 0)
4693 TREE_OPERAND (exp, 2) = maybe_build_cleanup (slot);
4694 if (TREE_OPERAND (exp, 2))
4695 cleanups_this_call = tree_cons (NULL_TREE, TREE_OPERAND (exp, 2),
4696 cleanups_this_call);
4701 /* This case does occur, when expanding a parameter which
4702 needs to be constructed on the stack. The target
4703 is the actual stack address that we want to initialize.
4704 The function we call will perform the cleanup in this case. */
4706 DECL_RTL (slot) = target;
4709 exp1 = TREE_OPERAND (exp, 1);
4710 /* Mark it as expanded. */
4711 TREE_OPERAND (exp, 1) = NULL_TREE;
4713 return expand_expr (exp1, target, tmode, modifier);
4718 tree lhs = TREE_OPERAND (exp, 0);
4719 tree rhs = TREE_OPERAND (exp, 1);
4720 tree noncopied_parts = 0;
4721 tree lhs_type = TREE_TYPE (lhs);
4723 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
4724 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0 && !fixed_type_p (rhs))
4725 noncopied_parts = init_noncopied_parts (stabilize_reference (lhs),
4726 TYPE_NONCOPIED_PARTS (lhs_type));
4727 while (noncopied_parts != 0)
4729 expand_assignment (TREE_VALUE (noncopied_parts),
4730 TREE_PURPOSE (noncopied_parts), 0, 0);
4731 noncopied_parts = TREE_CHAIN (noncopied_parts);
4738 /* If lhs is complex, expand calls in rhs before computing it.
4739 That's so we don't compute a pointer and save it over a call.
4740 If lhs is simple, compute it first so we can give it as a
4741 target if the rhs is just a call. This avoids an extra temp and copy
4742 and that prevents a partial-subsumption which makes bad code.
4743 Actually we could treat component_ref's of vars like vars. */
4745 tree lhs = TREE_OPERAND (exp, 0);
4746 tree rhs = TREE_OPERAND (exp, 1);
4747 tree noncopied_parts = 0;
4748 tree lhs_type = TREE_TYPE (lhs);
4752 if (TREE_CODE (lhs) != VAR_DECL
4753 && TREE_CODE (lhs) != RESULT_DECL
4754 && TREE_CODE (lhs) != PARM_DECL)
4755 preexpand_calls (exp);
4757 /* Check for |= or &= of a bitfield of size one into another bitfield
4758 of size 1. In this case, (unless we need the result of the
4759 assignment) we can do this more efficiently with a
4760 test followed by an assignment, if necessary.
4762 ??? At this point, we can't get a BIT_FIELD_REF here. But if
4763 things change so we do, this code should be enhanced to
4766 && TREE_CODE (lhs) == COMPONENT_REF
4767 && (TREE_CODE (rhs) == BIT_IOR_EXPR
4768 || TREE_CODE (rhs) == BIT_AND_EXPR)
4769 && TREE_OPERAND (rhs, 0) == lhs
4770 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
4771 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (lhs, 1))) == 1
4772 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))) == 1)
4774 rtx label = gen_label_rtx ();
4776 do_jump (TREE_OPERAND (rhs, 1),
4777 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
4778 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
4779 expand_assignment (lhs, convert (TREE_TYPE (rhs),
4780 (TREE_CODE (rhs) == BIT_IOR_EXPR
4782 : integer_zero_node)),
4784 do_pending_stack_adjust ();
4789 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0
4790 && ! (fixed_type_p (lhs) && fixed_type_p (rhs)))
4791 noncopied_parts = save_noncopied_parts (stabilize_reference (lhs),
4792 TYPE_NONCOPIED_PARTS (lhs_type));
4794 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
4795 while (noncopied_parts != 0)
4797 expand_assignment (TREE_PURPOSE (noncopied_parts),
4798 TREE_VALUE (noncopied_parts), 0, 0);
4799 noncopied_parts = TREE_CHAIN (noncopied_parts);
4804 case PREINCREMENT_EXPR:
4805 case PREDECREMENT_EXPR:
4806 return expand_increment (exp, 0);
4808 case POSTINCREMENT_EXPR:
4809 case POSTDECREMENT_EXPR:
4810 /* Faster to treat as pre-increment if result is not used. */
4811 return expand_increment (exp, ! ignore);
4814 /* Are we taking the address of a nested function? */
4815 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
4816 && decl_function_context (TREE_OPERAND (exp, 0)) != 0)
4818 op0 = trampoline_address (TREE_OPERAND (exp, 0));
4819 op0 = force_operand (op0, target);
4823 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode,
4824 (modifier == EXPAND_INITIALIZER
4825 ? modifier : EXPAND_CONST_ADDRESS));
4826 if (GET_CODE (op0) != MEM)
4829 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
4830 return XEXP (op0, 0);
4831 op0 = force_operand (XEXP (op0, 0), target);
4833 if (flag_force_addr && GET_CODE (op0) != REG)
4834 return force_reg (Pmode, op0);
4837 case ENTRY_VALUE_EXPR:
4844 return (*lang_expand_expr) (exp, target, tmode, modifier);
4847 /* Here to do an ordinary binary operator, generating an instruction
4848 from the optab already placed in `this_optab'. */
4850 preexpand_calls (exp);
4851 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
4853 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
4854 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
4856 temp = expand_binop (mode, this_optab, op0, op1, target,
4857 unsignedp, OPTAB_LIB_WIDEN);
4863 /* Return the alignment in bits of EXP, a pointer valued expression.
4864 But don't return more than MAX_ALIGN no matter what.
4865 The alignment returned is, by default, the alignment of the thing that
4866 EXP points to (if it is not a POINTER_TYPE, 0 is returned).
4868 Otherwise, look at the expression to see if we can do better, i.e., if the
4869 expression is actually pointing at an object whose alignment is tighter. */
4872 get_pointer_alignment (exp, max_align)
4876 unsigned align, inner;
4878 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
4881 align = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
4882 align = MIN (align, max_align);
4886 switch (TREE_CODE (exp))
4890 case NON_LVALUE_EXPR:
4891 exp = TREE_OPERAND (exp, 0);
4892 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
4894 inner = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
4895 inner = MIN (inner, max_align);
4896 align = MAX (align, inner);
4900 /* If sum of pointer + int, restrict our maximum alignment to that
4901 imposed by the integer. If not, we can't do any better than
4903 if (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST)
4906 while (((TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)) * BITS_PER_UNIT)
4911 exp = TREE_OPERAND (exp, 0);
4915 /* See what we are pointing at and look at its alignment. */
4916 exp = TREE_OPERAND (exp, 0);
4917 if (TREE_CODE (exp) == FUNCTION_DECL)
4918 align = MAX (align, FUNCTION_BOUNDARY);
4919 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'd')
4920 align = MAX (align, DECL_ALIGN (exp));
4921 #ifdef CONSTANT_ALIGNMENT
4922 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'c')
4923 align = CONSTANT_ALIGNMENT (exp, align);
4925 return MIN (align, max_align);
4933 /* Return the tree node and offset if a given argument corresponds to
4934 a string constant. */
4937 string_constant (arg, ptr_offset)
4943 if (TREE_CODE (arg) == ADDR_EXPR
4944 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
4946 *ptr_offset = integer_zero_node;
4947 return TREE_OPERAND (arg, 0);
4949 else if (TREE_CODE (arg) == PLUS_EXPR)
4951 tree arg0 = TREE_OPERAND (arg, 0);
4952 tree arg1 = TREE_OPERAND (arg, 1);
4957 if (TREE_CODE (arg0) == ADDR_EXPR
4958 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
4961 return TREE_OPERAND (arg0, 0);
4963 else if (TREE_CODE (arg1) == ADDR_EXPR
4964 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
4967 return TREE_OPERAND (arg1, 0);
4974 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
4975 way, because it could contain a zero byte in the middle.
4976 TREE_STRING_LENGTH is the size of the character array, not the string.
4978 Unfortunately, string_constant can't access the values of const char
4979 arrays with initializers, so neither can we do so here. */
4989 src = string_constant (src, &offset_node);
4992 max = TREE_STRING_LENGTH (src);
4993 ptr = TREE_STRING_POINTER (src);
4994 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
4996 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
4997 compute the offset to the following null if we don't know where to
4998 start searching for it. */
5000 for (i = 0; i < max; i++)
5003 /* We don't know the starting offset, but we do know that the string
5004 has no internal zero bytes. We can assume that the offset falls
5005 within the bounds of the string; otherwise, the programmer deserves
5006 what he gets. Subtract the offset from the length of the string,
5008 /* This would perhaps not be valid if we were dealing with named
5009 arrays in addition to literal string constants. */
5010 return size_binop (MINUS_EXPR, size_int (max), offset_node);
5013 /* We have a known offset into the string. Start searching there for
5014 a null character. */
5015 if (offset_node == 0)
5019 /* Did we get a long long offset? If so, punt. */
5020 if (TREE_INT_CST_HIGH (offset_node) != 0)
5022 offset = TREE_INT_CST_LOW (offset_node);
5024 /* If the offset is known to be out of bounds, warn, and call strlen at
5026 if (offset < 0 || offset > max)
5028 warning ("offset outside bounds of constant string");
5031 /* Use strlen to search for the first zero byte. Since any strings
5032 constructed with build_string will have nulls appended, we win even
5033 if we get handed something like (char[4])"abcd".
5035 Since OFFSET is our starting index into the string, no further
5036 calculation is needed. */
5037 return size_int (strlen (ptr + offset));
5040 /* Expand an expression EXP that calls a built-in function,
5041 with result going to TARGET if that's convenient
5042 (and in mode MODE if that's convenient).
5043 SUBTARGET may be used as the target for computing one of EXP's operands.
5044 IGNORE is nonzero if the value is to be ignored. */
5047 expand_builtin (exp, target, subtarget, mode, ignore)
5051 enum machine_mode mode;
5054 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
5055 tree arglist = TREE_OPERAND (exp, 1);
5058 enum machine_mode value_mode = TYPE_MODE (TREE_TYPE (exp));
5060 switch (DECL_FUNCTION_CODE (fndecl))
5065 /* build_function_call changes these into ABS_EXPR. */
5068 case BUILT_IN_FSQRT:
5069 /* If not optimizing, call the library function. */
5074 /* Arg could be wrong type if user redeclared this fcn wrong. */
5075 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != REAL_TYPE)
5076 return CONST0_RTX (TYPE_MODE (TREE_TYPE (exp)));
5078 /* Stabilize and compute the argument. */
5079 if (TREE_CODE (TREE_VALUE (arglist)) != VAR_DECL
5080 && TREE_CODE (TREE_VALUE (arglist)) != PARM_DECL)
5082 exp = copy_node (exp);
5083 arglist = copy_node (arglist);
5084 TREE_OPERAND (exp, 1) = arglist;
5085 TREE_VALUE (arglist) = save_expr (TREE_VALUE (arglist));
5087 op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
5089 /* Make a suitable register to place result in. */
5090 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
5095 /* Compute sqrt into TARGET.
5096 Set TARGET to wherever the result comes back. */
5097 target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
5098 sqrt_optab, op0, target, 0);
5100 /* If we were unable to expand via the builtin, stop the
5101 sequence (without outputting the insns) and break, causing
5102 a call the the library function. */
5109 /* Check the results by default. But if flag_fast_math is turned on,
5110 then assume sqrt will always be called with valid arguments. */
5112 if (! flag_fast_math)
5114 /* Don't define the sqrt instructions
5115 if your machine is not IEEE. */
5116 if (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT)
5119 lab1 = gen_label_rtx ();
5121 /* Test the result; if it is NaN, set errno=EDOM because
5122 the argument was not in the domain. */
5123 emit_cmp_insn (target, target, EQ, 0, GET_MODE (target), 0, 0);
5124 emit_jump_insn (gen_beq (lab1));
5128 #ifdef GEN_ERRNO_RTX
5129 rtx errno_rtx = GEN_ERRNO_RTX;
5132 = gen_rtx (MEM, word_mode, gen_rtx (SYMBOL_REF, Pmode, "*errno"));
5135 emit_move_insn (errno_rtx, GEN_INT (TARGET_EDOM));
5138 /* We can't set errno=EDOM directly; let the library call do it.
5139 Pop the arguments right away in case the call gets deleted. */
5141 expand_call (exp, target, 0);
5148 /* Output the entire sequence. */
5149 insns = get_insns ();
5155 case BUILT_IN_SAVEREGS:
5156 /* Don't do __builtin_saveregs more than once in a function.
5157 Save the result of the first call and reuse it. */
5158 if (saveregs_value != 0)
5159 return saveregs_value;
5161 /* When this function is called, it means that registers must be
5162 saved on entry to this function. So we migrate the
5163 call to the first insn of this function. */
5166 rtx valreg, saved_valreg;
5168 /* Now really call the function. `expand_call' does not call
5169 expand_builtin, so there is no danger of infinite recursion here. */
5172 #ifdef EXPAND_BUILTIN_SAVEREGS
5173 /* Do whatever the machine needs done in this case. */
5174 temp = EXPAND_BUILTIN_SAVEREGS (arglist);
5176 /* The register where the function returns its value
5177 is likely to have something else in it, such as an argument.
5178 So preserve that register around the call. */
5179 if (value_mode != VOIDmode)
5181 valreg = hard_libcall_value (value_mode);
5182 saved_valreg = gen_reg_rtx (value_mode);
5183 emit_move_insn (saved_valreg, valreg);
5186 /* Generate the call, putting the value in a pseudo. */
5187 temp = expand_call (exp, target, ignore);
5189 if (value_mode != VOIDmode)
5190 emit_move_insn (valreg, saved_valreg);
5196 saveregs_value = temp;
5198 /* This won't work inside a SEQUENCE--it really has to be
5199 at the start of the function. */
5200 if (in_sequence_p ())
5202 /* Better to do this than to crash. */
5203 error ("`va_start' used within `({...})'");
5207 /* Put the sequence after the NOTE that starts the function. */
5208 emit_insns_before (seq, NEXT_INSN (get_insns ()));
5212 /* __builtin_args_info (N) returns word N of the arg space info
5213 for the current function. The number and meanings of words
5214 is controlled by the definition of CUMULATIVE_ARGS. */
5215 case BUILT_IN_ARGS_INFO:
5217 int nwords = sizeof (CUMULATIVE_ARGS) / sizeof (int);
5219 int *word_ptr = (int *) ¤t_function_args_info;
5220 tree type, elts, result;
5222 if (sizeof (CUMULATIVE_ARGS) % sizeof (int) != 0)
5223 fatal ("CUMULATIVE_ARGS type defined badly; see %s, line %d",
5224 __FILE__, __LINE__);
5228 tree arg = TREE_VALUE (arglist);
5229 if (TREE_CODE (arg) != INTEGER_CST)
5230 error ("argument of __builtin_args_info must be constant");
5233 int wordnum = TREE_INT_CST_LOW (arg);
5235 if (wordnum < 0 || wordnum >= nwords)
5236 error ("argument of __builtin_args_info out of range");
5238 return GEN_INT (word_ptr[wordnum]);
5242 error ("missing argument in __builtin_args_info");
5247 for (i = 0; i < nwords; i++)
5248 elts = tree_cons (NULL_TREE, build_int_2 (word_ptr[i], 0));
5250 type = build_array_type (integer_type_node,
5251 build_index_type (build_int_2 (nwords, 0)));
5252 result = build (CONSTRUCTOR, type, NULL_TREE, nreverse (elts));
5253 TREE_CONSTANT (result) = 1;
5254 TREE_STATIC (result) = 1;
5255 result = build (INDIRECT_REF, build_pointer_type (type), result);
5256 TREE_CONSTANT (result) = 1;
5257 return expand_expr (result, NULL_RTX, VOIDmode, 0);
5261 /* Return the address of the first anonymous stack arg. */
5262 case BUILT_IN_NEXT_ARG:
5264 tree fntype = TREE_TYPE (current_function_decl);
5265 if (!(TYPE_ARG_TYPES (fntype) != 0
5266 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
5267 != void_type_node)))
5269 error ("`va_start' used in function with fixed args");
5274 return expand_binop (Pmode, add_optab,
5275 current_function_internal_arg_pointer,
5276 current_function_arg_offset_rtx,
5277 NULL_RTX, 0, OPTAB_LIB_WIDEN);
5279 case BUILT_IN_CLASSIFY_TYPE:
5282 tree type = TREE_TYPE (TREE_VALUE (arglist));
5283 enum tree_code code = TREE_CODE (type);
5284 if (code == VOID_TYPE)
5285 return GEN_INT (void_type_class);
5286 if (code == INTEGER_TYPE)
5287 return GEN_INT (integer_type_class);
5288 if (code == CHAR_TYPE)
5289 return GEN_INT (char_type_class);
5290 if (code == ENUMERAL_TYPE)
5291 return GEN_INT (enumeral_type_class);
5292 if (code == BOOLEAN_TYPE)
5293 return GEN_INT (boolean_type_class);
5294 if (code == POINTER_TYPE)
5295 return GEN_INT (pointer_type_class);
5296 if (code == REFERENCE_TYPE)
5297 return GEN_INT (reference_type_class);
5298 if (code == OFFSET_TYPE)
5299 return GEN_INT (offset_type_class);
5300 if (code == REAL_TYPE)
5301 return GEN_INT (real_type_class);
5302 if (code == COMPLEX_TYPE)
5303 return GEN_INT (complex_type_class);
5304 if (code == FUNCTION_TYPE)
5305 return GEN_INT (function_type_class);
5306 if (code == METHOD_TYPE)
5307 return GEN_INT (method_type_class);
5308 if (code == RECORD_TYPE)
5309 return GEN_INT (record_type_class);
5310 if (code == UNION_TYPE)
5311 return GEN_INT (union_type_class);
5312 if (code == ARRAY_TYPE)
5313 return GEN_INT (array_type_class);
5314 if (code == STRING_TYPE)
5315 return GEN_INT (string_type_class);
5316 if (code == SET_TYPE)
5317 return GEN_INT (set_type_class);
5318 if (code == FILE_TYPE)
5319 return GEN_INT (file_type_class);
5320 if (code == LANG_TYPE)
5321 return GEN_INT (lang_type_class);
5323 return GEN_INT (no_type_class);
5325 case BUILT_IN_CONSTANT_P:
5329 return (TREE_CODE_CLASS (TREE_CODE (TREE_VALUE (arglist))) == 'c'
5330 ? const1_rtx : const0_rtx);
5332 case BUILT_IN_FRAME_ADDRESS:
5333 /* The argument must be a nonnegative integer constant.
5334 It counts the number of frames to scan up the stack.
5335 The value is the address of that frame. */
5336 case BUILT_IN_RETURN_ADDRESS:
5337 /* The argument must be a nonnegative integer constant.
5338 It counts the number of frames to scan up the stack.
5339 The value is the return address saved in that frame. */
5341 /* Warning about missing arg was already issued. */
5343 else if (TREE_CODE (TREE_VALUE (arglist)) != INTEGER_CST)
5345 error ("invalid arg to __builtin_return_address");
5348 else if (tree_int_cst_lt (TREE_VALUE (arglist), integer_zero_node))
5350 error ("invalid arg to __builtin_return_address");
5355 int count = TREE_INT_CST_LOW (TREE_VALUE (arglist));
5356 rtx tem = frame_pointer_rtx;
5359 /* Scan back COUNT frames to the specified frame. */
5360 for (i = 0; i < count; i++)
5362 /* Assume the dynamic chain pointer is in the word that
5363 the frame address points to, unless otherwise specified. */
5364 #ifdef DYNAMIC_CHAIN_ADDRESS
5365 tem = DYNAMIC_CHAIN_ADDRESS (tem);
5367 tem = memory_address (Pmode, tem);
5368 tem = copy_to_reg (gen_rtx (MEM, Pmode, tem));
5371 /* For __builtin_frame_address, return what we've got. */
5372 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
5375 /* For __builtin_return_address,
5376 Get the return address from that frame. */
5377 #ifdef RETURN_ADDR_RTX
5378 return RETURN_ADDR_RTX (count, tem);
5380 tem = memory_address (Pmode,
5381 plus_constant (tem, GET_MODE_SIZE (Pmode)));
5382 return copy_to_reg (gen_rtx (MEM, Pmode, tem));
5386 case BUILT_IN_ALLOCA:
5388 /* Arg could be non-integer if user redeclared this fcn wrong. */
5389 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE)
5391 current_function_calls_alloca = 1;
5392 /* Compute the argument. */
5393 op0 = expand_expr (TREE_VALUE (arglist), NULL_RTX, VOIDmode, 0);
5395 /* Allocate the desired space. */
5396 target = allocate_dynamic_stack_space (op0, target, BITS_PER_UNIT);
5398 /* Record the new stack level for nonlocal gotos. */
5399 if (nonlocal_goto_handler_slot != 0)
5400 emit_stack_save (SAVE_NONLOCAL, &nonlocal_goto_stack_level, NULL_RTX);
5404 /* If not optimizing, call the library function. */
5409 /* Arg could be non-integer if user redeclared this fcn wrong. */
5410 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE)
5413 /* Compute the argument. */
5414 op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
5415 /* Compute ffs, into TARGET if possible.
5416 Set TARGET to wherever the result comes back. */
5417 target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
5418 ffs_optab, op0, target, 1);
5423 case BUILT_IN_STRLEN:
5424 /* If not optimizing, call the library function. */
5429 /* Arg could be non-pointer if user redeclared this fcn wrong. */
5430 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE)
5434 tree src = TREE_VALUE (arglist);
5435 tree len = c_strlen (src);
5438 = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
5440 rtx result, src_rtx, char_rtx;
5441 enum machine_mode insn_mode = value_mode, char_mode;
5442 enum insn_code icode;
5444 /* If the length is known, just return it. */
5446 return expand_expr (len, target, mode, 0);
5448 /* If SRC is not a pointer type, don't do this operation inline. */
5452 /* Call a function if we can't compute strlen in the right mode. */
5454 while (insn_mode != VOIDmode)
5456 icode = strlen_optab->handlers[(int) insn_mode].insn_code;
5457 if (icode != CODE_FOR_nothing)
5460 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
5462 if (insn_mode == VOIDmode)
5465 /* Make a place to write the result of the instruction. */
5468 && GET_CODE (result) == REG
5469 && GET_MODE (result) == insn_mode
5470 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
5471 result = gen_reg_rtx (insn_mode);
5473 /* Make sure the operands are acceptable to the predicates. */
5475 if (! (*insn_operand_predicate[(int)icode][0]) (result, insn_mode))
5476 result = gen_reg_rtx (insn_mode);
5478 src_rtx = memory_address (BLKmode,
5479 expand_expr (src, NULL_RTX, Pmode,
5481 if (! (*insn_operand_predicate[(int)icode][1]) (src_rtx, Pmode))
5482 src_rtx = copy_to_mode_reg (Pmode, src_rtx);
5484 char_rtx = const0_rtx;
5485 char_mode = insn_operand_mode[(int)icode][2];
5486 if (! (*insn_operand_predicate[(int)icode][2]) (char_rtx, char_mode))
5487 char_rtx = copy_to_mode_reg (char_mode, char_rtx);
5489 emit_insn (GEN_FCN (icode) (result,
5490 gen_rtx (MEM, BLKmode, src_rtx),
5491 char_rtx, GEN_INT (align)));
5493 /* Return the value in the proper mode for this function. */
5494 if (GET_MODE (result) == value_mode)
5496 else if (target != 0)
5498 convert_move (target, result, 0);
5502 return convert_to_mode (value_mode, result, 0);
5505 case BUILT_IN_STRCPY:
5506 /* If not optimizing, call the library function. */
5511 /* Arg could be non-pointer if user redeclared this fcn wrong. */
5512 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
5513 || TREE_CHAIN (arglist) == 0
5514 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE)
5518 tree len = c_strlen (TREE_VALUE (TREE_CHAIN (arglist)));
5523 len = size_binop (PLUS_EXPR, len, integer_one_node);
5525 chainon (arglist, build_tree_list (NULL_TREE, len));
5529 case BUILT_IN_MEMCPY:
5530 /* If not optimizing, call the library function. */
5535 /* Arg could be non-pointer if user redeclared this fcn wrong. */
5536 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
5537 || TREE_CHAIN (arglist) == 0
5538 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
5539 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
5540 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
5544 tree dest = TREE_VALUE (arglist);
5545 tree src = TREE_VALUE (TREE_CHAIN (arglist));
5546 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
5549 = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
5551 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
5554 /* If either SRC or DEST is not a pointer type, don't do
5555 this operation in-line. */
5556 if (src_align == 0 || dest_align == 0)
5558 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRCPY)
5559 TREE_CHAIN (TREE_CHAIN (arglist)) = 0;
5563 dest_rtx = expand_expr (dest, NULL_RTX, Pmode, EXPAND_NORMAL);
5565 /* Copy word part most expediently. */
5566 emit_block_move (gen_rtx (MEM, BLKmode,
5567 memory_address (BLKmode, dest_rtx)),
5568 gen_rtx (MEM, BLKmode,
5569 memory_address (BLKmode,
5570 expand_expr (src, NULL_RTX,
5573 expand_expr (len, NULL_RTX, VOIDmode, 0),
5574 MIN (src_align, dest_align));
5578 /* These comparison functions need an instruction that returns an actual
5579 index. An ordinary compare that just sets the condition codes
5581 #ifdef HAVE_cmpstrsi
5582 case BUILT_IN_STRCMP:
5583 /* If not optimizing, call the library function. */
5588 /* Arg could be non-pointer if user redeclared this fcn wrong. */
5589 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
5590 || TREE_CHAIN (arglist) == 0
5591 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE)
5593 else if (!HAVE_cmpstrsi)
5596 tree arg1 = TREE_VALUE (arglist);
5597 tree arg2 = TREE_VALUE (TREE_CHAIN (arglist));
5601 len = c_strlen (arg1);
5603 len = size_binop (PLUS_EXPR, integer_one_node, len);
5604 len2 = c_strlen (arg2);
5606 len2 = size_binop (PLUS_EXPR, integer_one_node, len2);
5608 /* If we don't have a constant length for the first, use the length
5609 of the second, if we know it. We don't require a constant for
5610 this case; some cost analysis could be done if both are available
5611 but neither is constant. For now, assume they're equally cheap.
5613 If both strings have constant lengths, use the smaller. This
5614 could arise if optimization results in strcpy being called with
5615 two fixed strings, or if the code was machine-generated. We should
5616 add some code to the `memcmp' handler below to deal with such
5617 situations, someday. */
5618 if (!len || TREE_CODE (len) != INTEGER_CST)
5625 else if (len2 && TREE_CODE (len2) == INTEGER_CST)
5627 if (tree_int_cst_lt (len2, len))
5631 chainon (arglist, build_tree_list (NULL_TREE, len));
5635 case BUILT_IN_MEMCMP:
5636 /* If not optimizing, call the library function. */
5641 /* Arg could be non-pointer if user redeclared this fcn wrong. */
5642 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
5643 || TREE_CHAIN (arglist) == 0
5644 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
5645 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
5646 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
5648 else if (!HAVE_cmpstrsi)
5651 tree arg1 = TREE_VALUE (arglist);
5652 tree arg2 = TREE_VALUE (TREE_CHAIN (arglist));
5653 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
5657 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
5659 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
5660 enum machine_mode insn_mode
5661 = insn_operand_mode[(int) CODE_FOR_cmpstrsi][0];
5663 /* If we don't have POINTER_TYPE, call the function. */
5664 if (arg1_align == 0 || arg2_align == 0)
5666 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRCMP)
5667 TREE_CHAIN (TREE_CHAIN (arglist)) = 0;
5671 /* Make a place to write the result of the instruction. */
5674 && GET_CODE (result) == REG && GET_MODE (result) == insn_mode
5675 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
5676 result = gen_reg_rtx (insn_mode);
5678 emit_insn (gen_cmpstrsi (result,
5679 gen_rtx (MEM, BLKmode,
5680 expand_expr (arg1, NULL_RTX, Pmode,
5682 gen_rtx (MEM, BLKmode,
5683 expand_expr (arg2, NULL_RTX, Pmode,
5685 expand_expr (len, NULL_RTX, VOIDmode, 0),
5686 GEN_INT (MIN (arg1_align, arg2_align))));
5688 /* Return the value in the proper mode for this function. */
5689 mode = TYPE_MODE (TREE_TYPE (exp));
5690 if (GET_MODE (result) == mode)
5692 else if (target != 0)
5694 convert_move (target, result, 0);
5698 return convert_to_mode (mode, result, 0);
5701 case BUILT_IN_STRCMP:
5702 case BUILT_IN_MEMCMP:
5706 default: /* just do library call, if unknown builtin */
5707 error ("built-in function %s not currently supported",
5708 IDENTIFIER_POINTER (DECL_NAME (fndecl)));
5711 /* The switch statement above can drop through to cause the function
5712 to be called normally. */
5714 return expand_call (exp, target, ignore);
5717 /* Expand code for a post- or pre- increment or decrement
5718 and return the RTX for the result.
5719 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
5722 expand_increment (exp, post)
5726 register rtx op0, op1;
5727 register rtx temp, value;
5728 register tree incremented = TREE_OPERAND (exp, 0);
5729 optab this_optab = add_optab;
5731 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
5732 int op0_is_copy = 0;
5734 /* Stabilize any component ref that might need to be
5735 evaluated more than once below. */
5736 if (TREE_CODE (incremented) == BIT_FIELD_REF
5737 || (TREE_CODE (incremented) == COMPONENT_REF
5738 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
5739 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
5740 incremented = stabilize_reference (incremented);
5742 /* Compute the operands as RTX.
5743 Note whether OP0 is the actual lvalue or a copy of it:
5744 I believe it is a copy iff it is a register or subreg
5745 and insns were generated in computing it. */
5746 temp = get_last_insn ();
5747 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, 0);
5748 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
5749 && temp != get_last_insn ());
5750 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
5752 /* Decide whether incrementing or decrementing. */
5753 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
5754 || TREE_CODE (exp) == PREDECREMENT_EXPR)
5755 this_optab = sub_optab;
5757 /* If OP0 is not the actual lvalue, but rather a copy in a register,
5758 then we cannot just increment OP0. We must
5759 therefore contrive to increment the original value.
5760 Then we can return OP0 since it is a copy of the old value. */
5763 /* This is the easiest way to increment the value wherever it is.
5764 Problems with multiple evaluation of INCREMENTED
5765 are prevented because either (1) it is a component_ref,
5766 in which case it was stabilized above, or (2) it is an array_ref
5767 with constant index in an array in a register, which is
5768 safe to reevaluate. */
5769 tree newexp = build ((this_optab == add_optab
5770 ? PLUS_EXPR : MINUS_EXPR),
5773 TREE_OPERAND (exp, 1));
5774 temp = expand_assignment (incremented, newexp, ! post, 0);
5775 return post ? op0 : temp;
5778 /* Convert decrement by a constant into a negative increment. */
5779 if (this_optab == sub_optab
5780 && GET_CODE (op1) == CONST_INT)
5782 op1 = GEN_INT (- INTVAL (op1));
5783 this_optab = add_optab;
5788 /* We have a true reference to the value in OP0.
5789 If there is an insn to add or subtract in this mode, queue it. */
5791 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
5792 op0 = stabilize (op0);
5795 icode = (int) this_optab->handlers[(int) mode].insn_code;
5796 if (icode != (int) CODE_FOR_nothing
5797 /* Make sure that OP0 is valid for operands 0 and 1
5798 of the insn we want to queue. */
5799 && (*insn_operand_predicate[icode][0]) (op0, mode)
5800 && (*insn_operand_predicate[icode][1]) (op0, mode))
5802 if (! (*insn_operand_predicate[icode][2]) (op1, mode))
5803 op1 = force_reg (mode, op1);
5805 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
5809 /* Preincrement, or we can't increment with one simple insn. */
5811 /* Save a copy of the value before inc or dec, to return it later. */
5812 temp = value = copy_to_reg (op0);
5814 /* Arrange to return the incremented value. */
5815 /* Copy the rtx because expand_binop will protect from the queue,
5816 and the results of that would be invalid for us to return
5817 if our caller does emit_queue before using our result. */
5818 temp = copy_rtx (value = op0);
5820 /* Increment however we can. */
5821 op1 = expand_binop (mode, this_optab, value, op1, op0,
5822 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
5823 /* Make sure the value is stored into OP0. */
5825 emit_move_insn (op0, op1);
5830 /* Expand all function calls contained within EXP, innermost ones first.
5831 But don't look within expressions that have sequence points.
5832 For each CALL_EXPR, record the rtx for its value
5833 in the CALL_EXPR_RTL field. */
5836 preexpand_calls (exp)
5839 register int nops, i;
5840 int type = TREE_CODE_CLASS (TREE_CODE (exp));
5842 if (! do_preexpand_calls)
5845 /* Only expressions and references can contain calls. */
5847 if (type != 'e' && type != '<' && type != '1' && type != '2' && type != 'r')
5850 switch (TREE_CODE (exp))
5853 /* Do nothing if already expanded. */
5854 if (CALL_EXPR_RTL (exp) != 0)
5857 /* Do nothing to built-in functions. */
5858 if (TREE_CODE (TREE_OPERAND (exp, 0)) != ADDR_EXPR
5859 || TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) != FUNCTION_DECL
5860 || ! DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
5861 CALL_EXPR_RTL (exp) = expand_call (exp, NULL_RTX, 0);
5866 case TRUTH_ANDIF_EXPR:
5867 case TRUTH_ORIF_EXPR:
5868 /* If we find one of these, then we can be sure
5869 the adjust will be done for it (since it makes jumps).
5870 Do it now, so that if this is inside an argument
5871 of a function, we don't get the stack adjustment
5872 after some other args have already been pushed. */
5873 do_pending_stack_adjust ();
5878 case WITH_CLEANUP_EXPR:
5882 if (SAVE_EXPR_RTL (exp) != 0)
5886 nops = tree_code_length[(int) TREE_CODE (exp)];
5887 for (i = 0; i < nops; i++)
5888 if (TREE_OPERAND (exp, i) != 0)
5890 type = TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, i)));
5891 if (type == 'e' || type == '<' || type == '1' || type == '2'
5893 preexpand_calls (TREE_OPERAND (exp, i));
5897 /* At the start of a function, record that we have no previously-pushed
5898 arguments waiting to be popped. */
5901 init_pending_stack_adjust ()
5903 pending_stack_adjust = 0;
5906 /* When exiting from function, if safe, clear out any pending stack adjust
5907 so the adjustment won't get done. */
5910 clear_pending_stack_adjust ()
5912 #ifdef EXIT_IGNORE_STACK
5913 if (! flag_omit_frame_pointer && EXIT_IGNORE_STACK
5914 && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline)
5915 && ! flag_inline_functions)
5916 pending_stack_adjust = 0;
5920 /* Pop any previously-pushed arguments that have not been popped yet. */
5923 do_pending_stack_adjust ()
5925 if (inhibit_defer_pop == 0)
5927 if (pending_stack_adjust != 0)
5928 adjust_stack (GEN_INT (pending_stack_adjust));
5929 pending_stack_adjust = 0;
5933 /* Expand all cleanups up to OLD_CLEANUPS.
5934 Needed here, and also for language-dependent calls. */
5937 expand_cleanups_to (old_cleanups)
5940 while (cleanups_this_call != old_cleanups)
5942 expand_expr (TREE_VALUE (cleanups_this_call), NULL_RTX, VOIDmode, 0);
5943 cleanups_this_call = TREE_CHAIN (cleanups_this_call);
5947 /* Expand conditional expressions. */
5949 /* Generate code to evaluate EXP and jump to LABEL if the value is zero.
5950 LABEL is an rtx of code CODE_LABEL, in this function and all the
5954 jumpifnot (exp, label)
5958 do_jump (exp, label, NULL_RTX);
5961 /* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
5968 do_jump (exp, NULL_RTX, label);
5971 /* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
5972 the result is zero, or IF_TRUE_LABEL if the result is one.
5973 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
5974 meaning fall through in that case.
5976 do_jump always does any pending stack adjust except when it does not
5977 actually perform a jump. An example where there is no jump
5978 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
5980 This function is responsible for optimizing cases such as
5981 &&, || and comparison operators in EXP. */
5984 do_jump (exp, if_false_label, if_true_label)
5986 rtx if_false_label, if_true_label;
5988 register enum tree_code code = TREE_CODE (exp);
5989 /* Some cases need to create a label to jump to
5990 in order to properly fall through.
5991 These cases set DROP_THROUGH_LABEL nonzero. */
5992 rtx drop_through_label = 0;
6006 temp = integer_zerop (exp) ? if_false_label : if_true_label;
6012 /* This is not true with #pragma weak */
6014 /* The address of something can never be zero. */
6016 emit_jump (if_true_label);
6021 if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
6022 || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
6023 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF)
6026 /* If we are narrowing the operand, we have to do the compare in the
6028 if ((TYPE_PRECISION (TREE_TYPE (exp))
6029 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
6031 case NON_LVALUE_EXPR:
6032 case REFERENCE_EXPR:
6037 /* These cannot change zero->non-zero or vice versa. */
6038 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
6042 /* This is never less insns than evaluating the PLUS_EXPR followed by
6043 a test and can be longer if the test is eliminated. */
6045 /* Reduce to minus. */
6046 exp = build (MINUS_EXPR, TREE_TYPE (exp),
6047 TREE_OPERAND (exp, 0),
6048 fold (build1 (NEGATE_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
6049 TREE_OPERAND (exp, 1))));
6050 /* Process as MINUS. */
6054 /* Non-zero iff operands of minus differ. */
6055 comparison = compare (build (NE_EXPR, TREE_TYPE (exp),
6056 TREE_OPERAND (exp, 0),
6057 TREE_OPERAND (exp, 1)),
6062 /* If we are AND'ing with a small constant, do this comparison in the
6063 smallest type that fits. If the machine doesn't have comparisons
6064 that small, it will be converted back to the wider comparison.
6065 This helps if we are testing the sign bit of a narrower object.
6066 combine can't do this for us because it can't know whether a
6067 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
6069 if (! SLOW_BYTE_ACCESS
6070 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
6071 && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
6072 && (i = floor_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))) >= 0
6073 && (type = type_for_size (i + 1, 1)) != 0
6074 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
6075 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
6076 != CODE_FOR_nothing))
6078 do_jump (convert (type, exp), if_false_label, if_true_label);
6083 case TRUTH_NOT_EXPR:
6084 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
6087 case TRUTH_ANDIF_EXPR:
6088 if (if_false_label == 0)
6089 if_false_label = drop_through_label = gen_label_rtx ();
6090 do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
6091 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
6094 case TRUTH_ORIF_EXPR:
6095 if (if_true_label == 0)
6096 if_true_label = drop_through_label = gen_label_rtx ();
6097 do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
6098 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
6102 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
6105 do_pending_stack_adjust ();
6106 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
6113 int bitsize, bitpos, unsignedp;
6114 enum machine_mode mode;
6119 /* Get description of this reference. We don't actually care
6120 about the underlying object here. */
6121 get_inner_reference (exp, &bitsize, &bitpos, &offset,
6122 &mode, &unsignedp, &volatilep);
6124 type = type_for_size (bitsize, unsignedp);
6125 if (! SLOW_BYTE_ACCESS
6126 && type != 0 && bitsize >= 0
6127 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
6128 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
6129 != CODE_FOR_nothing))
6131 do_jump (convert (type, exp), if_false_label, if_true_label);
6138 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
6139 if (integer_onep (TREE_OPERAND (exp, 1))
6140 && integer_zerop (TREE_OPERAND (exp, 2)))
6141 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
6143 else if (integer_zerop (TREE_OPERAND (exp, 1))
6144 && integer_onep (TREE_OPERAND (exp, 2)))
6145 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
6149 register rtx label1 = gen_label_rtx ();
6150 drop_through_label = gen_label_rtx ();
6151 do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX);
6152 /* Now the THEN-expression. */
6153 do_jump (TREE_OPERAND (exp, 1),
6154 if_false_label ? if_false_label : drop_through_label,
6155 if_true_label ? if_true_label : drop_through_label);
6156 /* In case the do_jump just above never jumps. */
6157 do_pending_stack_adjust ();
6158 emit_label (label1);
6159 /* Now the ELSE-expression. */
6160 do_jump (TREE_OPERAND (exp, 2),
6161 if_false_label ? if_false_label : drop_through_label,
6162 if_true_label ? if_true_label : drop_through_label);
6167 if (integer_zerop (TREE_OPERAND (exp, 1)))
6168 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
6169 else if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
6172 !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
6173 do_jump_by_parts_equality (exp, if_false_label, if_true_label);
6175 comparison = compare (exp, EQ, EQ);
6179 if (integer_zerop (TREE_OPERAND (exp, 1)))
6180 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
6181 else if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
6184 !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
6185 do_jump_by_parts_equality (exp, if_true_label, if_false_label);
6187 comparison = compare (exp, NE, NE);
6191 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
6193 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
6194 do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
6196 comparison = compare (exp, LT, LTU);
6200 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
6202 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
6203 do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
6205 comparison = compare (exp, LE, LEU);
6209 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
6211 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
6212 do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
6214 comparison = compare (exp, GT, GTU);
6218 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
6220 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
6221 do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
6223 comparison = compare (exp, GE, GEU);
6228 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
6230 /* This is not needed any more and causes poor code since it causes
6231 comparisons and tests from non-SI objects to have different code
6233 /* Copy to register to avoid generating bad insns by cse
6234 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
6235 if (!cse_not_expected && GET_CODE (temp) == MEM)
6236 temp = copy_to_reg (temp);
6238 do_pending_stack_adjust ();
6239 if (GET_CODE (temp) == CONST_INT)
6240 comparison = (temp == const0_rtx ? const0_rtx : const_true_rtx);
6241 else if (GET_CODE (temp) == LABEL_REF)
6242 comparison = const_true_rtx;
6243 else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
6244 && !can_compare_p (GET_MODE (temp)))
6245 /* Note swapping the labels gives us not-equal. */
6246 do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label);
6247 else if (GET_MODE (temp) != VOIDmode)
6248 comparison = compare_from_rtx (temp, CONST0_RTX (GET_MODE (temp)),
6249 NE, TREE_UNSIGNED (TREE_TYPE (exp)),
6250 GET_MODE (temp), NULL_RTX, 0);
6255 /* Do any postincrements in the expression that was tested. */
6258 /* If COMPARISON is nonzero here, it is an rtx that can be substituted
6259 straight into a conditional jump instruction as the jump condition.
6260 Otherwise, all the work has been done already. */
6262 if (comparison == const_true_rtx)
6265 emit_jump (if_true_label);
6267 else if (comparison == const0_rtx)
6270 emit_jump (if_false_label);
6272 else if (comparison)
6273 do_jump_for_compare (comparison, if_false_label, if_true_label);
6277 if (drop_through_label)
6279 /* If do_jump produces code that might be jumped around,
6280 do any stack adjusts from that code, before the place
6281 where control merges in. */
6282 do_pending_stack_adjust ();
6283 emit_label (drop_through_label);
6287 /* Given a comparison expression EXP for values too wide to be compared
6288 with one insn, test the comparison and jump to the appropriate label.
6289 The code of EXP is ignored; we always test GT if SWAP is 0,
6290 and LT if SWAP is 1. */
6293 do_jump_by_parts_greater (exp, swap, if_false_label, if_true_label)
6296 rtx if_false_label, if_true_label;
6298 rtx op0 = expand_expr (TREE_OPERAND (exp, swap), NULL_RTX, VOIDmode, 0);
6299 rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), NULL_RTX, VOIDmode, 0);
6300 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
6301 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
6302 rtx drop_through_label = 0;
6303 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
6306 if (! if_true_label || ! if_false_label)
6307 drop_through_label = gen_label_rtx ();
6308 if (! if_true_label)
6309 if_true_label = drop_through_label;
6310 if (! if_false_label)
6311 if_false_label = drop_through_label;
6313 /* Compare a word at a time, high order first. */
6314 for (i = 0; i < nwords; i++)
6317 rtx op0_word, op1_word;
6319 if (WORDS_BIG_ENDIAN)
6321 op0_word = operand_subword_force (op0, i, mode);
6322 op1_word = operand_subword_force (op1, i, mode);
6326 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
6327 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
6330 /* All but high-order word must be compared as unsigned. */
6331 comp = compare_from_rtx (op0_word, op1_word,
6332 (unsignedp || i > 0) ? GTU : GT,
6333 unsignedp, word_mode, NULL_RTX, 0);
6334 if (comp == const_true_rtx)
6335 emit_jump (if_true_label);
6336 else if (comp != const0_rtx)
6337 do_jump_for_compare (comp, NULL_RTX, if_true_label);
6339 /* Consider lower words only if these are equal. */
6340 comp = compare_from_rtx (op0_word, op1_word, NE, unsignedp, word_mode,
6342 if (comp == const_true_rtx)
6343 emit_jump (if_false_label);
6344 else if (comp != const0_rtx)
6345 do_jump_for_compare (comp, NULL_RTX, if_false_label);
6349 emit_jump (if_false_label);
6350 if (drop_through_label)
6351 emit_label (drop_through_label);
6354 /* Given an EQ_EXPR expression EXP for values too wide to be compared
6355 with one insn, test the comparison and jump to the appropriate label. */
6358 do_jump_by_parts_equality (exp, if_false_label, if_true_label)
6360 rtx if_false_label, if_true_label;
6362 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
6363 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
6364 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
6365 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
6367 rtx drop_through_label = 0;
6369 if (! if_false_label)
6370 drop_through_label = if_false_label = gen_label_rtx ();
6372 for (i = 0; i < nwords; i++)
6374 rtx comp = compare_from_rtx (operand_subword_force (op0, i, mode),
6375 operand_subword_force (op1, i, mode),
6376 EQ, TREE_UNSIGNED (TREE_TYPE (exp)),
6377 word_mode, NULL_RTX, 0);
6378 if (comp == const_true_rtx)
6379 emit_jump (if_false_label);
6380 else if (comp != const0_rtx)
6381 do_jump_for_compare (comp, if_false_label, NULL_RTX);
6385 emit_jump (if_true_label);
6386 if (drop_through_label)
6387 emit_label (drop_through_label);
6390 /* Jump according to whether OP0 is 0.
6391 We assume that OP0 has an integer mode that is too wide
6392 for the available compare insns. */
6395 do_jump_by_parts_equality_rtx (op0, if_false_label, if_true_label)
6397 rtx if_false_label, if_true_label;
6399 int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD;
6401 rtx drop_through_label = 0;
6403 if (! if_false_label)
6404 drop_through_label = if_false_label = gen_label_rtx ();
6406 for (i = 0; i < nwords; i++)
6408 rtx comp = compare_from_rtx (operand_subword_force (op0, i,
6410 const0_rtx, EQ, 1, word_mode, NULL_RTX, 0);
6411 if (comp == const_true_rtx)
6412 emit_jump (if_false_label);
6413 else if (comp != const0_rtx)
6414 do_jump_for_compare (comp, if_false_label, NULL_RTX);
6418 emit_jump (if_true_label);
6419 if (drop_through_label)
6420 emit_label (drop_through_label);
6423 /* Given a comparison expression in rtl form, output conditional branches to
6424 IF_TRUE_LABEL, IF_FALSE_LABEL, or both. */
6427 do_jump_for_compare (comparison, if_false_label, if_true_label)
6428 rtx comparison, if_false_label, if_true_label;
6432 if (bcc_gen_fctn[(int) GET_CODE (comparison)] != 0)
6433 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (comparison)]) (if_true_label));
6438 emit_jump (if_false_label);
6440 else if (if_false_label)
6443 rtx prev = PREV_INSN (get_last_insn ());
6446 /* Output the branch with the opposite condition. Then try to invert
6447 what is generated. If more than one insn is a branch, or if the
6448 branch is not the last insn written, abort. If we can't invert
6449 the branch, emit make a true label, redirect this jump to that,
6450 emit a jump to the false label and define the true label. */
6452 if (bcc_gen_fctn[(int) GET_CODE (comparison)] != 0)
6453 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (comparison)]) (if_false_label));
6457 /* Here we get the insn before what was just emitted.
6458 On some machines, emitting the branch can discard
6459 the previous compare insn and emit a replacement. */
6461 /* If there's only one preceding insn... */
6462 insn = get_insns ();
6464 insn = NEXT_INSN (prev);
6466 for (insn = NEXT_INSN (insn); insn; insn = NEXT_INSN (insn))
6467 if (GET_CODE (insn) == JUMP_INSN)
6474 if (branch != get_last_insn ())
6477 if (! invert_jump (branch, if_false_label))
6479 if_true_label = gen_label_rtx ();
6480 redirect_jump (branch, if_true_label);
6481 emit_jump (if_false_label);
6482 emit_label (if_true_label);
6487 /* Generate code for a comparison expression EXP
6488 (including code to compute the values to be compared)
6489 and set (CC0) according to the result.
6490 SIGNED_CODE should be the rtx operation for this comparison for
6491 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
6493 We force a stack adjustment unless there are currently
6494 things pushed on the stack that aren't yet used. */
6497 compare (exp, signed_code, unsigned_code)
6499 enum rtx_code signed_code, unsigned_code;
6502 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
6504 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
6505 register tree type = TREE_TYPE (TREE_OPERAND (exp, 0));
6506 register enum machine_mode mode = TYPE_MODE (type);
6507 int unsignedp = TREE_UNSIGNED (type);
6508 enum rtx_code code = unsignedp ? unsigned_code : signed_code;
6510 return compare_from_rtx (op0, op1, code, unsignedp, mode,
6512 ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
6513 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
6516 /* Like compare but expects the values to compare as two rtx's.
6517 The decision as to signed or unsigned comparison must be made by the caller.
6519 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
6522 If ALIGN is non-zero, it is the alignment of this type; if zero, the
6523 size of MODE should be used. */
6526 compare_from_rtx (op0, op1, code, unsignedp, mode, size, align)
6527 register rtx op0, op1;
6530 enum machine_mode mode;
6534 /* If one operand is constant, make it the second one. */
6536 if (GET_CODE (op0) == CONST_INT || GET_CODE (op0) == CONST_DOUBLE)
6541 code = swap_condition (code);
6546 op0 = force_not_mem (op0);
6547 op1 = force_not_mem (op1);
6550 do_pending_stack_adjust ();
6552 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT)
6553 return simplify_relational_operation (code, mode, op0, op1);
6556 /* There's no need to do this now that combine.c can eliminate lots of
6557 sign extensions. This can be less efficient in certain cases on other
6560 /* If this is a signed equality comparison, we can do it as an
6561 unsigned comparison since zero-extension is cheaper than sign
6562 extension and comparisons with zero are done as unsigned. This is
6563 the case even on machines that can do fast sign extension, since
6564 zero-extension is easier to combinen with other operations than
6565 sign-extension is. If we are comparing against a constant, we must
6566 convert it to what it would look like unsigned. */
6567 if ((code == EQ || code == NE) && ! unsignedp
6568 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
6570 if (GET_CODE (op1) == CONST_INT
6571 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
6572 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
6577 emit_cmp_insn (op0, op1, code, size, mode, unsignedp, align);
6579 return gen_rtx (code, VOIDmode, cc0_rtx, const0_rtx);
6582 /* Generate code to calculate EXP using a store-flag instruction
6583 and return an rtx for the result. EXP is either a comparison
6584 or a TRUTH_NOT_EXPR whose operand is a comparison.
6586 If TARGET is nonzero, store the result there if convenient.
6588 If ONLY_CHEAP is non-zero, only do this if it is likely to be very
6591 Return zero if there is no suitable set-flag instruction
6592 available on this machine.
6594 Once expand_expr has been called on the arguments of the comparison,
6595 we are committed to doing the store flag, since it is not safe to
6596 re-evaluate the expression. We emit the store-flag insn by calling
6597 emit_store_flag, but only expand the arguments if we have a reason
6598 to believe that emit_store_flag will be successful. If we think that
6599 it will, but it isn't, we have to simulate the store-flag with a
6600 set/jump/set sequence. */
6603 do_store_flag (exp, target, mode, only_cheap)
6606 enum machine_mode mode;
6610 tree arg0, arg1, type;
6612 enum machine_mode operand_mode;
6616 enum insn_code icode;
6617 rtx subtarget = target;
6618 rtx result, label, pattern, jump_pat;
6620 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
6621 result at the end. We can't simply invert the test since it would
6622 have already been inverted if it were valid. This case occurs for
6623 some floating-point comparisons. */
6625 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
6626 invert = 1, exp = TREE_OPERAND (exp, 0);
6628 arg0 = TREE_OPERAND (exp, 0);
6629 arg1 = TREE_OPERAND (exp, 1);
6630 type = TREE_TYPE (arg0);
6631 operand_mode = TYPE_MODE (type);
6632 unsignedp = TREE_UNSIGNED (type);
6634 /* We won't bother with BLKmode store-flag operations because it would mean
6635 passing a lot of information to emit_store_flag. */
6636 if (operand_mode == BLKmode)
6642 /* Get the rtx comparison code to use. We know that EXP is a comparison
6643 operation of some type. Some comparisons against 1 and -1 can be
6644 converted to comparisons with zero. Do so here so that the tests
6645 below will be aware that we have a comparison with zero. These
6646 tests will not catch constants in the first operand, but constants
6647 are rarely passed as the first operand. */
6649 switch (TREE_CODE (exp))
6658 if (integer_onep (arg1))
6659 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
6661 code = unsignedp ? LTU : LT;
6664 if (integer_all_onesp (arg1))
6665 arg1 = integer_zero_node, code = unsignedp ? LTU : LT;
6667 code = unsignedp ? LEU : LE;
6670 if (integer_all_onesp (arg1))
6671 arg1 = integer_zero_node, code = unsignedp ? GEU : GE;
6673 code = unsignedp ? GTU : GT;
6676 if (integer_onep (arg1))
6677 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
6679 code = unsignedp ? GEU : GE;
6685 /* Put a constant second. */
6686 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
6688 tem = arg0; arg0 = arg1; arg1 = tem;
6689 code = swap_condition (code);
6692 /* If this is an equality or inequality test of a single bit, we can
6693 do this by shifting the bit being tested to the low-order bit and
6694 masking the result with the constant 1. If the condition was EQ,
6695 we xor it with 1. This does not require an scc insn and is faster
6696 than an scc insn even if we have it. */
6698 if ((code == NE || code == EQ)
6699 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6700 && integer_pow2p (TREE_OPERAND (arg0, 1))
6701 && TYPE_PRECISION (type) <= HOST_BITS_PER_WIDE_INT)
6703 int bitnum = exact_log2 (INTVAL (expand_expr (TREE_OPERAND (arg0, 1),
6704 NULL_RTX, VOIDmode, 0)));
6706 if (subtarget == 0 || GET_CODE (subtarget) != REG
6707 || GET_MODE (subtarget) != operand_mode
6708 || ! safe_from_p (subtarget, TREE_OPERAND (arg0, 0)))
6711 op0 = expand_expr (TREE_OPERAND (arg0, 0), subtarget, VOIDmode, 0);
6714 op0 = expand_shift (RSHIFT_EXPR, GET_MODE (op0), op0,
6715 size_int (bitnum), target, 1);
6717 if (GET_MODE (op0) != mode)
6718 op0 = convert_to_mode (mode, op0, 1);
6720 if (bitnum != TYPE_PRECISION (type) - 1)
6721 op0 = expand_and (op0, const1_rtx, target);
6723 if ((code == EQ && ! invert) || (code == NE && invert))
6724 op0 = expand_binop (mode, xor_optab, op0, const1_rtx, target, 0,
6730 /* Now see if we are likely to be able to do this. Return if not. */
6731 if (! can_compare_p (operand_mode))
6733 icode = setcc_gen_code[(int) code];
6734 if (icode == CODE_FOR_nothing
6735 || (only_cheap && insn_operand_mode[(int) icode][0] != mode))
6737 /* We can only do this if it is one of the special cases that
6738 can be handled without an scc insn. */
6739 if ((code == LT && integer_zerop (arg1))
6740 || (! only_cheap && code == GE && integer_zerop (arg1)))
6742 else if (BRANCH_COST >= 0
6743 && ! only_cheap && (code == NE || code == EQ)
6744 && TREE_CODE (type) != REAL_TYPE
6745 && ((abs_optab->handlers[(int) operand_mode].insn_code
6746 != CODE_FOR_nothing)
6747 || (ffs_optab->handlers[(int) operand_mode].insn_code
6748 != CODE_FOR_nothing)))
6754 preexpand_calls (exp);
6755 if (subtarget == 0 || GET_CODE (subtarget) != REG
6756 || GET_MODE (subtarget) != operand_mode
6757 || ! safe_from_p (subtarget, arg1))
6760 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
6761 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
6764 target = gen_reg_rtx (mode);
6766 result = emit_store_flag (target, code, op0, op1, operand_mode,
6772 result = expand_binop (mode, xor_optab, result, const1_rtx,
6773 result, 0, OPTAB_LIB_WIDEN);
6777 /* If this failed, we have to do this with set/compare/jump/set code. */
6778 if (target == 0 || GET_CODE (target) != REG
6779 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
6780 target = gen_reg_rtx (GET_MODE (target));
6782 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
6783 result = compare_from_rtx (op0, op1, code, unsignedp,
6784 operand_mode, NULL_RTX, 0);
6785 if (GET_CODE (result) == CONST_INT)
6786 return (((result == const0_rtx && ! invert)
6787 || (result != const0_rtx && invert))
6788 ? const0_rtx : const1_rtx);
6790 label = gen_label_rtx ();
6791 if (bcc_gen_fctn[(int) code] == 0)
6794 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
6795 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
6801 /* Generate a tablejump instruction (used for switch statements). */
6803 #ifdef HAVE_tablejump
6805 /* INDEX is the value being switched on, with the lowest value
6806 in the table already subtracted.
6807 MODE is its expected mode (needed if INDEX is constant).
6808 RANGE is the length of the jump table.
6809 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
6811 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
6812 index value is out of range. */
6815 do_tablejump (index, mode, range, table_label, default_label)
6816 rtx index, range, table_label, default_label;
6817 enum machine_mode mode;
6819 register rtx temp, vector;
6821 /* Do an unsigned comparison (in the proper mode) between the index
6822 expression and the value which represents the length of the range.
6823 Since we just finished subtracting the lower bound of the range
6824 from the index expression, this comparison allows us to simultaneously
6825 check that the original index expression value is both greater than
6826 or equal to the minimum value of the range and less than or equal to
6827 the maximum value of the range. */
6829 emit_cmp_insn (range, index, LTU, NULL_RTX, mode, 0, 0);
6830 emit_jump_insn (gen_bltu (default_label));
6832 /* If index is in range, it must fit in Pmode.
6833 Convert to Pmode so we can index with it. */
6835 index = convert_to_mode (Pmode, index, 1);
6837 /* If flag_force_addr were to affect this address
6838 it could interfere with the tricky assumptions made
6839 about addresses that contain label-refs,
6840 which may be valid only very near the tablejump itself. */
6841 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
6842 GET_MODE_SIZE, because this indicates how large insns are. The other
6843 uses should all be Pmode, because they are addresses. This code
6844 could fail if addresses and insns are not the same size. */
6845 index = memory_address_noforce
6847 gen_rtx (PLUS, Pmode,
6848 gen_rtx (MULT, Pmode, index,
6849 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
6850 gen_rtx (LABEL_REF, Pmode, table_label)));
6851 temp = gen_reg_rtx (CASE_VECTOR_MODE);
6852 vector = gen_rtx (MEM, CASE_VECTOR_MODE, index);
6853 RTX_UNCHANGING_P (vector) = 1;
6854 convert_move (temp, vector, 0);
6856 emit_jump_insn (gen_tablejump (temp, table_label));
6858 #ifndef CASE_VECTOR_PC_RELATIVE
6859 /* If we are generating PIC code or if the table is PC-relative, the
6860 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
6866 #endif /* HAVE_tablejump */