1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA. */
26 #include "insn-flags.h"
27 #include "insn-codes.h"
29 #include "insn-config.h"
33 #include "typeclass.h"
35 #define CEIL(x,y) (((x) + (y) - 1) / (y))
37 /* Decide whether a function's arguments should be processed
38 from first to last or from last to first. */
40 #ifdef STACK_GROWS_DOWNWARD
42 #define PUSH_ARGS_REVERSED /* If it's last to first */
46 #ifndef STACK_PUSH_CODE
47 #ifdef STACK_GROWS_DOWNWARD
48 #define STACK_PUSH_CODE PRE_DEC
50 #define STACK_PUSH_CODE PRE_INC
54 /* Like STACK_BOUNDARY but in units of bytes, not bits. */
55 #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
57 /* If this is nonzero, we do not bother generating VOLATILE
58 around volatile memory references, and we are willing to
59 output indirect addresses. If cse is to follow, we reject
60 indirect addresses so a useful potential cse is generated;
61 if it is used only once, instruction combination will produce
62 the same indirect address eventually. */
65 /* Nonzero to generate code for all the subroutines within an
66 expression before generating the upper levels of the expression.
67 Nowadays this is never zero. */
68 int do_preexpand_calls = 1;
70 /* Number of units that we should eventually pop off the stack.
71 These are the arguments to function calls that have already returned. */
72 int pending_stack_adjust;
74 /* Nonzero means stack pops must not be deferred, and deferred stack
75 pops must not be output. It is nonzero inside a function call,
76 inside a conditional expression, inside a statement expression,
77 and in other cases as well. */
78 int inhibit_defer_pop;
80 /* A list of all cleanups which belong to the arguments of
81 function calls being expanded by expand_call. */
82 tree cleanups_this_call;
84 /* Nonzero means __builtin_saveregs has already been done in this function.
85 The value is the pseudoreg containing the value __builtin_saveregs
87 static rtx saveregs_value;
90 static void store_constructor ();
91 static rtx store_field ();
92 static rtx expand_builtin ();
93 static rtx compare ();
94 static rtx do_store_flag ();
95 static void preexpand_calls ();
96 static rtx expand_increment ();
97 static void init_queue ();
99 void do_pending_stack_adjust ();
100 static void do_jump_for_compare ();
101 static void do_jump_by_parts_equality ();
102 static void do_jump_by_parts_equality_rtx ();
103 static void do_jump_by_parts_greater ();
105 /* Record for each mode whether we can move a register directly to or
106 from an object of that mode in memory. If we can't, we won't try
107 to use that mode directly when accessing a field of that mode. */
109 static char direct_load[NUM_MACHINE_MODES];
110 static char direct_store[NUM_MACHINE_MODES];
112 /* MOVE_RATIO is the number of move instructions that is better than
116 #if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
119 /* A value of around 6 would minimize code size; infinity would minimize
121 #define MOVE_RATIO 15
125 /* This array records the insn_code of insns to perform block moves. */
126 static enum insn_code movstr_optab[NUM_MACHINE_MODES];
128 /* SLOW_UNALIGNED_ACCESS is non-zero if unaligned accesses are very slow. */
130 #ifndef SLOW_UNALIGNED_ACCESS
131 #define SLOW_UNALIGNED_ACCESS 0
134 /* This is run once per compilation to set up which modes can be used
135 directly in memory and to initialize the block move optab. */
141 enum machine_mode mode;
142 rtx mem = gen_rtx (MEM, VOIDmode, stack_pointer_rtx);
145 insn = emit_insn (gen_rtx (SET, 0, 0));
146 pat = PATTERN (insn);
148 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
149 mode = (enum machine_mode) ((int) mode + 1))
155 direct_load[(int) mode] = direct_store[(int) mode] = 0;
156 PUT_MODE (mem, mode);
158 /* See if there is some register that can be used in this mode and
159 directly loaded or stored from memory. */
161 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
162 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
165 if (! HARD_REGNO_MODE_OK (regno, mode))
168 reg = gen_rtx (REG, mode, regno);
171 SET_DEST (pat) = reg;
172 if (recog (pat, insn, &num_clobbers) >= 0)
173 direct_load[(int) mode] = 1;
176 SET_DEST (pat) = mem;
177 if (recog (pat, insn, &num_clobbers) >= 0)
178 direct_store[(int) mode] = 1;
181 movstr_optab[(int) mode] = CODE_FOR_nothing;
188 movstr_optab[(int) QImode] = CODE_FOR_movstrqi;
192 movstr_optab[(int) HImode] = CODE_FOR_movstrhi;
196 movstr_optab[(int) SImode] = CODE_FOR_movstrsi;
200 movstr_optab[(int) DImode] = CODE_FOR_movstrdi;
204 movstr_optab[(int) TImode] = CODE_FOR_movstrti;
208 /* This is run at the start of compiling a function. */
215 pending_stack_adjust = 0;
216 inhibit_defer_pop = 0;
217 cleanups_this_call = 0;
222 /* Save all variables describing the current status into the structure *P.
223 This is used before starting a nested function. */
229 /* Instead of saving the postincrement queue, empty it. */
232 p->pending_stack_adjust = pending_stack_adjust;
233 p->inhibit_defer_pop = inhibit_defer_pop;
234 p->cleanups_this_call = cleanups_this_call;
235 p->saveregs_value = saveregs_value;
236 p->forced_labels = forced_labels;
238 pending_stack_adjust = 0;
239 inhibit_defer_pop = 0;
240 cleanups_this_call = 0;
245 /* Restore all variables describing the current status from the structure *P.
246 This is used after a nested function. */
249 restore_expr_status (p)
252 pending_stack_adjust = p->pending_stack_adjust;
253 inhibit_defer_pop = p->inhibit_defer_pop;
254 cleanups_this_call = p->cleanups_this_call;
255 saveregs_value = p->saveregs_value;
256 forced_labels = p->forced_labels;
259 /* Manage the queue of increment instructions to be output
260 for POSTINCREMENT_EXPR expressions, etc. */
262 static rtx pending_chain;
264 /* Queue up to increment (or change) VAR later. BODY says how:
265 BODY should be the same thing you would pass to emit_insn
266 to increment right away. It will go to emit_insn later on.
268 The value is a QUEUED expression to be used in place of VAR
269 where you want to guarantee the pre-incrementation value of VAR. */
272 enqueue_insn (var, body)
275 pending_chain = gen_rtx (QUEUED, GET_MODE (var),
276 var, NULL_RTX, NULL_RTX, body, pending_chain);
277 return pending_chain;
280 /* Use protect_from_queue to convert a QUEUED expression
281 into something that you can put immediately into an instruction.
282 If the queued incrementation has not happened yet,
283 protect_from_queue returns the variable itself.
284 If the incrementation has happened, protect_from_queue returns a temp
285 that contains a copy of the old value of the variable.
287 Any time an rtx which might possibly be a QUEUED is to be put
288 into an instruction, it must be passed through protect_from_queue first.
289 QUEUED expressions are not meaningful in instructions.
291 Do not pass a value through protect_from_queue and then hold
292 on to it for a while before putting it in an instruction!
293 If the queue is flushed in between, incorrect code will result. */
296 protect_from_queue (x, modify)
300 register RTX_CODE code = GET_CODE (x);
302 #if 0 /* A QUEUED can hang around after the queue is forced out. */
303 /* Shortcut for most common case. */
304 if (pending_chain == 0)
310 /* A special hack for read access to (MEM (QUEUED ...))
311 to facilitate use of autoincrement.
312 Make a copy of the contents of the memory location
313 rather than a copy of the address, but not
314 if the value is of mode BLKmode. */
315 if (code == MEM && GET_MODE (x) != BLKmode
316 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
318 register rtx y = XEXP (x, 0);
319 XEXP (x, 0) = QUEUED_VAR (y);
322 register rtx temp = gen_reg_rtx (GET_MODE (x));
323 emit_insn_before (gen_move_insn (temp, x),
329 /* Otherwise, recursively protect the subexpressions of all
330 the kinds of rtx's that can contain a QUEUED. */
332 XEXP (x, 0) = protect_from_queue (XEXP (x, 0), 0);
333 else if (code == PLUS || code == MULT)
335 XEXP (x, 0) = protect_from_queue (XEXP (x, 0), 0);
336 XEXP (x, 1) = protect_from_queue (XEXP (x, 1), 0);
340 /* If the increment has not happened, use the variable itself. */
341 if (QUEUED_INSN (x) == 0)
342 return QUEUED_VAR (x);
343 /* If the increment has happened and a pre-increment copy exists,
345 if (QUEUED_COPY (x) != 0)
346 return QUEUED_COPY (x);
347 /* The increment has happened but we haven't set up a pre-increment copy.
348 Set one up now, and use it. */
349 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
350 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
352 return QUEUED_COPY (x);
355 /* Return nonzero if X contains a QUEUED expression:
356 if it contains anything that will be altered by a queued increment.
357 We handle only combinations of MEM, PLUS, MINUS and MULT operators
358 since memory addresses generally contain only those. */
364 register enum rtx_code code = GET_CODE (x);
370 return queued_subexp_p (XEXP (x, 0));
374 return queued_subexp_p (XEXP (x, 0))
375 || queued_subexp_p (XEXP (x, 1));
380 /* Perform all the pending incrementations. */
386 while (p = pending_chain)
388 QUEUED_INSN (p) = emit_insn (QUEUED_BODY (p));
389 pending_chain = QUEUED_NEXT (p);
400 /* Copy data from FROM to TO, where the machine modes are not the same.
401 Both modes may be integer, or both may be floating.
402 UNSIGNEDP should be nonzero if FROM is an unsigned type.
403 This causes zero-extension instead of sign-extension. */
406 convert_move (to, from, unsignedp)
407 register rtx to, from;
410 enum machine_mode to_mode = GET_MODE (to);
411 enum machine_mode from_mode = GET_MODE (from);
412 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
413 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
417 /* rtx code for making an equivalent value. */
418 enum rtx_code equiv_code = (unsignedp ? ZERO_EXTEND : SIGN_EXTEND);
420 to = protect_from_queue (to, 1);
421 from = protect_from_queue (from, 0);
423 if (to_real != from_real)
426 if (to_mode == from_mode
427 || (from_mode == VOIDmode && CONSTANT_P (from)))
429 emit_move_insn (to, from);
435 #ifdef HAVE_extendsfdf2
436 if (HAVE_extendsfdf2 && from_mode == SFmode && to_mode == DFmode)
438 emit_unop_insn (CODE_FOR_extendsfdf2, to, from, UNKNOWN);
442 #ifdef HAVE_extendsfxf2
443 if (HAVE_extendsfxf2 && from_mode == SFmode && to_mode == XFmode)
445 emit_unop_insn (CODE_FOR_extendsfxf2, to, from, UNKNOWN);
449 #ifdef HAVE_extendsftf2
450 if (HAVE_extendsftf2 && from_mode == SFmode && to_mode == TFmode)
452 emit_unop_insn (CODE_FOR_extendsftf2, to, from, UNKNOWN);
456 #ifdef HAVE_extenddfxf2
457 if (HAVE_extenddfxf2 && from_mode == DFmode && to_mode == XFmode)
459 emit_unop_insn (CODE_FOR_extenddfxf2, to, from, UNKNOWN);
463 #ifdef HAVE_extenddftf2
464 if (HAVE_extenddftf2 && from_mode == DFmode && to_mode == TFmode)
466 emit_unop_insn (CODE_FOR_extenddftf2, to, from, UNKNOWN);
470 #ifdef HAVE_truncdfsf2
471 if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
473 emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
477 #ifdef HAVE_truncxfsf2
478 if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode)
480 emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN);
484 #ifdef HAVE_trunctfsf2
485 if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
487 emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
491 #ifdef HAVE_truncxfdf2
492 if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode)
494 emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN);
498 #ifdef HAVE_trunctfdf2
499 if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
501 emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
513 libcall = extendsfdf2_libfunc;
517 libcall = extendsfxf2_libfunc;
521 libcall = extendsftf2_libfunc;
530 libcall = truncdfsf2_libfunc;
534 libcall = extenddfxf2_libfunc;
538 libcall = extenddftf2_libfunc;
547 libcall = truncxfsf2_libfunc;
551 libcall = truncxfdf2_libfunc;
560 libcall = trunctfsf2_libfunc;
564 libcall = trunctfdf2_libfunc;
570 if (libcall == (rtx) 0)
571 /* This conversion is not implemented yet. */
574 emit_library_call (libcall, 1, to_mode, 1, from, from_mode);
575 emit_move_insn (to, hard_libcall_value (to_mode));
579 /* Now both modes are integers. */
581 /* Handle expanding beyond a word. */
582 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
583 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
590 enum machine_mode lowpart_mode;
591 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
593 /* Try converting directly if the insn is supported. */
594 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
597 /* If FROM is a SUBREG, put it into a register. Do this
598 so that we always generate the same set of insns for
599 better cse'ing; if an intermediate assignment occurred,
600 we won't be doing the operation directly on the SUBREG. */
601 if (optimize > 0 && GET_CODE (from) == SUBREG)
602 from = force_reg (from_mode, from);
603 emit_unop_insn (code, to, from, equiv_code);
606 /* Next, try converting via full word. */
607 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
608 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
609 != CODE_FOR_nothing))
611 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
612 emit_unop_insn (code, to,
613 gen_lowpart (word_mode, to), equiv_code);
617 /* No special multiword conversion insn; do it by hand. */
620 /* Get a copy of FROM widened to a word, if necessary. */
621 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
622 lowpart_mode = word_mode;
624 lowpart_mode = from_mode;
626 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
628 lowpart = gen_lowpart (lowpart_mode, to);
629 emit_move_insn (lowpart, lowfrom);
631 /* Compute the value to put in each remaining word. */
633 fill_value = const0_rtx;
638 && insn_operand_mode[(int) CODE_FOR_slt][0] == word_mode
639 && STORE_FLAG_VALUE == -1)
641 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
643 fill_value = gen_reg_rtx (word_mode);
644 emit_insn (gen_slt (fill_value));
650 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
651 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
653 fill_value = convert_to_mode (word_mode, fill_value, 1);
657 /* Fill the remaining words. */
658 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
660 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
661 rtx subword = operand_subword (to, index, 1, to_mode);
666 if (fill_value != subword)
667 emit_move_insn (subword, fill_value);
670 insns = get_insns ();
673 emit_no_conflict_block (insns, to, from, NULL_RTX,
674 gen_rtx (equiv_code, to_mode, from));
678 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD)
680 convert_move (to, gen_lowpart (word_mode, from), 0);
684 /* Handle pointer conversion */ /* SPEE 900220 */
685 if (to_mode == PSImode)
687 if (from_mode != SImode)
688 from = convert_to_mode (SImode, from, unsignedp);
690 #ifdef HAVE_truncsipsi
693 emit_unop_insn (CODE_FOR_truncsipsi, to, from, UNKNOWN);
696 #endif /* HAVE_truncsipsi */
700 if (from_mode == PSImode)
702 if (to_mode != SImode)
704 from = convert_to_mode (SImode, from, unsignedp);
709 #ifdef HAVE_extendpsisi
710 if (HAVE_extendpsisi)
712 emit_unop_insn (CODE_FOR_extendpsisi, to, from, UNKNOWN);
715 #endif /* HAVE_extendpsisi */
720 /* Now follow all the conversions between integers
721 no more than a word long. */
723 /* For truncation, usually we can just refer to FROM in a narrower mode. */
724 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
725 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
726 GET_MODE_BITSIZE (from_mode))
727 && ((GET_CODE (from) == MEM
728 && ! MEM_VOLATILE_P (from)
729 && direct_load[(int) to_mode]
730 && ! mode_dependent_address_p (XEXP (from, 0)))
731 || GET_CODE (from) == REG
732 || GET_CODE (from) == SUBREG))
734 emit_move_insn (to, gen_lowpart (to_mode, from));
738 /* For truncation, usually we can just refer to FROM in a narrower mode. */
739 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
741 /* Convert directly if that works. */
742 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
745 /* If FROM is a SUBREG, put it into a register. Do this
746 so that we always generate the same set of insns for
747 better cse'ing; if an intermediate assignment occurred,
748 we won't be doing the operation directly on the SUBREG. */
749 if (optimize > 0 && GET_CODE (from) == SUBREG)
750 from = force_reg (from_mode, from);
751 emit_unop_insn (code, to, from, equiv_code);
756 enum machine_mode intermediate;
758 /* Search for a mode to convert via. */
759 for (intermediate = from_mode; intermediate != VOIDmode;
760 intermediate = GET_MODE_WIDER_MODE (intermediate))
761 if ((can_extend_p (to_mode, intermediate, unsignedp)
763 && (can_extend_p (intermediate, from_mode, unsignedp)
764 != CODE_FOR_nothing))
766 convert_move (to, convert_to_mode (intermediate, from,
767 unsignedp), unsignedp);
771 /* No suitable intermediate mode. */
776 /* Support special truncate insns for certain modes. */
778 if (from_mode == DImode && to_mode == SImode)
780 #ifdef HAVE_truncdisi2
783 emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
787 convert_move (to, force_reg (from_mode, from), unsignedp);
791 if (from_mode == DImode && to_mode == HImode)
793 #ifdef HAVE_truncdihi2
796 emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
800 convert_move (to, force_reg (from_mode, from), unsignedp);
804 if (from_mode == DImode && to_mode == QImode)
806 #ifdef HAVE_truncdiqi2
809 emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
813 convert_move (to, force_reg (from_mode, from), unsignedp);
817 if (from_mode == SImode && to_mode == HImode)
819 #ifdef HAVE_truncsihi2
822 emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
826 convert_move (to, force_reg (from_mode, from), unsignedp);
830 if (from_mode == SImode && to_mode == QImode)
832 #ifdef HAVE_truncsiqi2
835 emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
839 convert_move (to, force_reg (from_mode, from), unsignedp);
843 if (from_mode == HImode && to_mode == QImode)
845 #ifdef HAVE_trunchiqi2
848 emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
852 convert_move (to, force_reg (from_mode, from), unsignedp);
856 /* Handle truncation of volatile memrefs, and so on;
857 the things that couldn't be truncated directly,
858 and for which there was no special instruction. */
859 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
861 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
862 emit_move_insn (to, temp);
866 /* Mode combination is not recognized. */
870 /* Return an rtx for a value that would result
871 from converting X to mode MODE.
872 Both X and MODE may be floating, or both integer.
873 UNSIGNEDP is nonzero if X is an unsigned value.
874 This can be done by referring to a part of X in place
875 or by copying to a new temporary with conversion.
877 This function *must not* call protect_from_queue
878 except when putting X into an insn (in which case convert_move does it). */
881 convert_to_mode (mode, x, unsignedp)
882 enum machine_mode mode;
888 if (mode == GET_MODE (x))
891 /* There is one case that we must handle specially: If we are converting
892 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
893 we are to interpret the constant as unsigned, gen_lowpart will do
894 the wrong if the constant appears negative. What we want to do is
895 make the high-order word of the constant zero, not all ones. */
897 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
898 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
899 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
900 return immed_double_const (INTVAL (x), (HOST_WIDE_INT) 0, mode);
902 /* We can do this with a gen_lowpart if both desired and current modes
903 are integer, and this is either a constant integer, a register, or a
904 non-volatile MEM. Except for the constant case, we must be narrowing
907 if (GET_CODE (x) == CONST_INT
908 || (GET_MODE_CLASS (mode) == MODE_INT
909 && GET_MODE_CLASS (GET_MODE (x)) == MODE_INT
910 && (GET_CODE (x) == CONST_DOUBLE
911 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (GET_MODE (x))
912 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x))
913 && direct_load[(int) mode]
914 || GET_CODE (x) == REG)))))
915 return gen_lowpart (mode, x);
917 temp = gen_reg_rtx (mode);
918 convert_move (temp, x, unsignedp);
922 /* Generate several move instructions to copy LEN bytes
923 from block FROM to block TO. (These are MEM rtx's with BLKmode).
924 The caller must pass FROM and TO
925 through protect_from_queue before calling.
926 ALIGN (in bytes) is maximum alignment we can assume. */
928 struct move_by_pieces
937 int explicit_inc_from;
943 static void move_by_pieces_1 ();
944 static int move_by_pieces_ninsns ();
947 move_by_pieces (to, from, len, align)
951 struct move_by_pieces data;
952 rtx to_addr = XEXP (to, 0), from_addr = XEXP (from, 0);
953 int max_size = MOVE_MAX + 1;
956 data.to_addr = to_addr;
957 data.from_addr = from_addr;
961 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
962 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
964 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
965 || GET_CODE (from_addr) == POST_INC
966 || GET_CODE (from_addr) == POST_DEC);
968 data.explicit_inc_from = 0;
969 data.explicit_inc_to = 0;
971 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
972 if (data.reverse) data.offset = len;
975 /* If copying requires more than two move insns,
976 copy addresses to registers (to make displacements shorter)
977 and use post-increment if available. */
978 if (!(data.autinc_from && data.autinc_to)
979 && move_by_pieces_ninsns (len, align) > 2)
981 #ifdef HAVE_PRE_DECREMENT
982 if (data.reverse && ! data.autinc_from)
984 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
985 data.autinc_from = 1;
986 data.explicit_inc_from = -1;
989 #ifdef HAVE_POST_INCREMENT
990 if (! data.autinc_from)
992 data.from_addr = copy_addr_to_reg (from_addr);
993 data.autinc_from = 1;
994 data.explicit_inc_from = 1;
997 if (!data.autinc_from && CONSTANT_P (from_addr))
998 data.from_addr = copy_addr_to_reg (from_addr);
999 #ifdef HAVE_PRE_DECREMENT
1000 if (data.reverse && ! data.autinc_to)
1002 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1004 data.explicit_inc_to = -1;
1007 #ifdef HAVE_POST_INCREMENT
1008 if (! data.reverse && ! data.autinc_to)
1010 data.to_addr = copy_addr_to_reg (to_addr);
1012 data.explicit_inc_to = 1;
1015 if (!data.autinc_to && CONSTANT_P (to_addr))
1016 data.to_addr = copy_addr_to_reg (to_addr);
1019 if (! (STRICT_ALIGNMENT || SLOW_UNALIGNED_ACCESS)
1020 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
1023 /* First move what we can in the largest integer mode, then go to
1024 successively smaller modes. */
1026 while (max_size > 1)
1028 enum machine_mode mode = VOIDmode, tmode;
1029 enum insn_code icode;
1031 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1032 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1033 if (GET_MODE_SIZE (tmode) < max_size)
1036 if (mode == VOIDmode)
1039 icode = mov_optab->handlers[(int) mode].insn_code;
1040 if (icode != CODE_FOR_nothing
1041 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1042 GET_MODE_SIZE (mode)))
1043 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1045 max_size = GET_MODE_SIZE (mode);
1048 /* The code above should have handled everything. */
1053 /* Return number of insns required to move L bytes by pieces.
1054 ALIGN (in bytes) is maximum alignment we can assume. */
1057 move_by_pieces_ninsns (l, align)
1061 register int n_insns = 0;
1062 int max_size = MOVE_MAX + 1;
1064 if (! (STRICT_ALIGNMENT || SLOW_UNALIGNED_ACCESS)
1065 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
1068 while (max_size > 1)
1070 enum machine_mode mode = VOIDmode, tmode;
1071 enum insn_code icode;
1073 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1074 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1075 if (GET_MODE_SIZE (tmode) < max_size)
1078 if (mode == VOIDmode)
1081 icode = mov_optab->handlers[(int) mode].insn_code;
1082 if (icode != CODE_FOR_nothing
1083 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1084 GET_MODE_SIZE (mode)))
1085 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1087 max_size = GET_MODE_SIZE (mode);
1093 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1094 with move instructions for mode MODE. GENFUN is the gen_... function
1095 to make a move insn for that mode. DATA has all the other info. */
1098 move_by_pieces_1 (genfun, mode, data)
1100 enum machine_mode mode;
1101 struct move_by_pieces *data;
1103 register int size = GET_MODE_SIZE (mode);
1104 register rtx to1, from1;
1106 while (data->len >= size)
1108 if (data->reverse) data->offset -= size;
1110 to1 = (data->autinc_to
1111 ? gen_rtx (MEM, mode, data->to_addr)
1112 : change_address (data->to, mode,
1113 plus_constant (data->to_addr, data->offset)));
1116 ? gen_rtx (MEM, mode, data->from_addr)
1117 : change_address (data->from, mode,
1118 plus_constant (data->from_addr, data->offset)));
1120 #ifdef HAVE_PRE_DECREMENT
1121 if (data->explicit_inc_to < 0)
1122 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
1123 if (data->explicit_inc_from < 0)
1124 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (-size)));
1127 emit_insn ((*genfun) (to1, from1));
1128 #ifdef HAVE_POST_INCREMENT
1129 if (data->explicit_inc_to > 0)
1130 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1131 if (data->explicit_inc_from > 0)
1132 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1135 if (! data->reverse) data->offset += size;
1141 /* Emit code to move a block Y to a block X.
1142 This may be done with string-move instructions,
1143 with multiple scalar move instructions, or with a library call.
1145 Both X and Y must be MEM rtx's (perhaps inside VOLATILE)
1147 SIZE is an rtx that says how long they are.
1148 ALIGN is the maximum alignment we can assume they have,
1149 measured in bytes. */
1152 emit_block_move (x, y, size, align)
1157 if (GET_MODE (x) != BLKmode)
1160 if (GET_MODE (y) != BLKmode)
1163 x = protect_from_queue (x, 1);
1164 y = protect_from_queue (y, 0);
1165 size = protect_from_queue (size, 0);
1167 if (GET_CODE (x) != MEM)
1169 if (GET_CODE (y) != MEM)
1174 if (GET_CODE (size) == CONST_INT
1175 && (move_by_pieces_ninsns (INTVAL (size), align) < MOVE_RATIO))
1176 move_by_pieces (x, y, INTVAL (size), align);
1179 /* Try the most limited insn first, because there's no point
1180 including more than one in the machine description unless
1181 the more limited one has some advantage. */
1183 rtx opalign = GEN_INT (align);
1184 enum machine_mode mode;
1186 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1187 mode = GET_MODE_WIDER_MODE (mode))
1189 enum insn_code code = movstr_optab[(int) mode];
1191 if (code != CODE_FOR_nothing
1192 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
1193 && (unsigned) INTVAL (size) <= GET_MODE_MASK (mode)
1194 && (insn_operand_predicate[(int) code][0] == 0
1195 || (*insn_operand_predicate[(int) code][0]) (x, BLKmode))
1196 && (insn_operand_predicate[(int) code][1] == 0
1197 || (*insn_operand_predicate[(int) code][1]) (y, BLKmode))
1198 && (insn_operand_predicate[(int) code][3] == 0
1199 || (*insn_operand_predicate[(int) code][3]) (opalign,
1203 rtx last = get_last_insn ();
1206 if (insn_operand_predicate[(int) code][2] != 0
1207 && ! (*insn_operand_predicate[(int) code][2]) (op2, mode))
1208 op2 = copy_to_mode_reg (mode, op2);
1210 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1217 delete_insns_since (last);
1221 #ifdef TARGET_MEM_FUNCTIONS
1222 emit_library_call (memcpy_libfunc, 1,
1223 VOIDmode, 3, XEXP (x, 0), Pmode,
1225 convert_to_mode (Pmode, size, 1), Pmode);
1227 emit_library_call (bcopy_libfunc, 1,
1228 VOIDmode, 3, XEXP (y, 0), Pmode,
1230 convert_to_mode (Pmode, size, 1), Pmode);
1235 /* Copy all or part of a value X into registers starting at REGNO.
1236 The number of registers to be filled is NREGS. */
1239 move_block_to_reg (regno, x, nregs, mode)
1243 enum machine_mode mode;
1248 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1249 x = validize_mem (force_const_mem (mode, x));
1251 /* See if the machine can do this with a load multiple insn. */
1252 #ifdef HAVE_load_multiple
1253 last = get_last_insn ();
1254 pat = gen_load_multiple (gen_rtx (REG, word_mode, regno), x,
1262 delete_insns_since (last);
1265 for (i = 0; i < nregs; i++)
1266 emit_move_insn (gen_rtx (REG, word_mode, regno + i),
1267 operand_subword_force (x, i, mode));
1270 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1271 The number of registers to be filled is NREGS. */
1274 move_block_from_reg (regno, x, nregs)
1282 /* See if the machine can do this with a store multiple insn. */
1283 #ifdef HAVE_store_multiple
1284 last = get_last_insn ();
1285 pat = gen_store_multiple (x, gen_rtx (REG, word_mode, regno),
1293 delete_insns_since (last);
1296 for (i = 0; i < nregs; i++)
1298 rtx tem = operand_subword (x, i, 1, BLKmode);
1303 emit_move_insn (tem, gen_rtx (REG, word_mode, regno + i));
1307 /* Mark NREGS consecutive regs, starting at REGNO, as being live now. */
1310 use_regs (regno, nregs)
1316 for (i = 0; i < nregs; i++)
1317 emit_insn (gen_rtx (USE, VOIDmode, gen_rtx (REG, word_mode, regno + i)));
1320 /* Write zeros through the storage of OBJECT.
1321 If OBJECT has BLKmode, SIZE is its length in bytes. */
1324 clear_storage (object, size)
1328 if (GET_MODE (object) == BLKmode)
1330 #ifdef TARGET_MEM_FUNCTIONS
1331 emit_library_call (memset_libfunc, 1,
1333 XEXP (object, 0), Pmode, const0_rtx, Pmode,
1334 GEN_INT (size), Pmode);
1336 emit_library_call (bzero_libfunc, 1,
1338 XEXP (object, 0), Pmode,
1339 GEN_INT (size), Pmode);
1343 emit_move_insn (object, const0_rtx);
1346 /* Generate code to copy Y into X.
1347 Both Y and X must have the same mode, except that
1348 Y can be a constant with VOIDmode.
1349 This mode cannot be BLKmode; use emit_block_move for that.
1351 Return the last instruction emitted. */
1354 emit_move_insn (x, y)
1357 enum machine_mode mode = GET_MODE (x);
1360 x = protect_from_queue (x, 1);
1361 y = protect_from_queue (y, 0);
1363 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
1366 if (CONSTANT_P (y) && ! LEGITIMATE_CONSTANT_P (y))
1367 y = force_const_mem (mode, y);
1369 /* If X or Y are memory references, verify that their addresses are valid
1371 if (GET_CODE (x) == MEM
1372 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
1373 && ! push_operand (x, GET_MODE (x)))
1375 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
1376 x = change_address (x, VOIDmode, XEXP (x, 0));
1378 if (GET_CODE (y) == MEM
1379 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
1381 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
1382 y = change_address (y, VOIDmode, XEXP (y, 0));
1384 if (mode == BLKmode)
1387 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
1389 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
1391 /* This will handle any multi-word mode that lacks a move_insn pattern.
1392 However, you will get better code if you define such patterns,
1393 even if they must turn into multiple assembler instructions. */
1394 else if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
1399 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
1402 rtx xpart = operand_subword (x, i, 1, mode);
1403 rtx ypart = operand_subword (y, i, 1, mode);
1405 /* If we can't get a part of Y, put Y into memory if it is a
1406 constant. Otherwise, force it into a register. If we still
1407 can't get a part of Y, abort. */
1408 if (ypart == 0 && CONSTANT_P (y))
1410 y = force_const_mem (mode, y);
1411 ypart = operand_subword (y, i, 1, mode);
1413 else if (ypart == 0)
1414 ypart = operand_subword_force (y, i, mode);
1416 if (xpart == 0 || ypart == 0)
1419 last_insn = emit_move_insn (xpart, ypart);
1427 /* Pushing data onto the stack. */
1429 /* Push a block of length SIZE (perhaps variable)
1430 and return an rtx to address the beginning of the block.
1431 Note that it is not possible for the value returned to be a QUEUED.
1432 The value may be virtual_outgoing_args_rtx.
1434 EXTRA is the number of bytes of padding to push in addition to SIZE.
1435 BELOW nonzero means this padding comes at low addresses;
1436 otherwise, the padding comes at high addresses. */
1439 push_block (size, extra, below)
1444 if (CONSTANT_P (size))
1445 anti_adjust_stack (plus_constant (size, extra));
1446 else if (GET_CODE (size) == REG && extra == 0)
1447 anti_adjust_stack (size);
1450 rtx temp = copy_to_mode_reg (Pmode, size);
1452 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
1453 temp, 0, OPTAB_LIB_WIDEN);
1454 anti_adjust_stack (temp);
1457 #ifdef STACK_GROWS_DOWNWARD
1458 temp = virtual_outgoing_args_rtx;
1459 if (extra != 0 && below)
1460 temp = plus_constant (temp, extra);
1462 if (GET_CODE (size) == CONST_INT)
1463 temp = plus_constant (virtual_outgoing_args_rtx,
1464 - INTVAL (size) - (below ? 0 : extra));
1465 else if (extra != 0 && !below)
1466 temp = gen_rtx (PLUS, Pmode, virtual_outgoing_args_rtx,
1467 negate_rtx (Pmode, plus_constant (size, extra)));
1469 temp = gen_rtx (PLUS, Pmode, virtual_outgoing_args_rtx,
1470 negate_rtx (Pmode, size));
1473 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
1479 return gen_rtx (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
1482 /* Generate code to push X onto the stack, assuming it has mode MODE and
1484 MODE is redundant except when X is a CONST_INT (since they don't
1486 SIZE is an rtx for the size of data to be copied (in bytes),
1487 needed only if X is BLKmode.
1489 ALIGN (in bytes) is maximum alignment we can assume.
1491 If PARTIAL is nonzero, then copy that many of the first words
1492 of X into registers starting with REG, and push the rest of X.
1493 The amount of space pushed is decreased by PARTIAL words,
1494 rounded *down* to a multiple of PARM_BOUNDARY.
1495 REG must be a hard register in this case.
1497 EXTRA is the amount in bytes of extra space to leave next to this arg.
1498 This is ignored if an argument block has already been allocated.
1500 On a machine that lacks real push insns, ARGS_ADDR is the address of
1501 the bottom of the argument block for this call. We use indexing off there
1502 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
1503 argument block has not been preallocated.
1505 ARGS_SO_FAR is the size of args previously pushed for this call. */
1508 emit_push_insn (x, mode, type, size, align, partial, reg, extra,
1509 args_addr, args_so_far)
1511 enum machine_mode mode;
1522 enum direction stack_direction
1523 #ifdef STACK_GROWS_DOWNWARD
1529 /* Decide where to pad the argument: `downward' for below,
1530 `upward' for above, or `none' for don't pad it.
1531 Default is below for small data on big-endian machines; else above. */
1532 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
1534 /* Invert direction if stack is post-update. */
1535 if (STACK_PUSH_CODE == POST_INC || STACK_PUSH_CODE == POST_DEC)
1536 if (where_pad != none)
1537 where_pad = (where_pad == downward ? upward : downward);
1539 xinner = x = protect_from_queue (x, 0);
1541 if (mode == BLKmode)
1543 /* Copy a block into the stack, entirely or partially. */
1546 int used = partial * UNITS_PER_WORD;
1547 int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
1555 /* USED is now the # of bytes we need not copy to the stack
1556 because registers will take care of them. */
1559 xinner = change_address (xinner, BLKmode,
1560 plus_constant (XEXP (xinner, 0), used));
1562 /* If the partial register-part of the arg counts in its stack size,
1563 skip the part of stack space corresponding to the registers.
1564 Otherwise, start copying to the beginning of the stack space,
1565 by setting SKIP to 0. */
1566 #ifndef REG_PARM_STACK_SPACE
1572 #ifdef PUSH_ROUNDING
1573 /* Do it with several push insns if that doesn't take lots of insns
1574 and if there is no difficulty with push insns that skip bytes
1575 on the stack for alignment purposes. */
1577 && GET_CODE (size) == CONST_INT
1579 && (move_by_pieces_ninsns ((unsigned) INTVAL (size) - used, align)
1581 /* Here we avoid the case of a structure whose weak alignment
1582 forces many pushes of a small amount of data,
1583 and such small pushes do rounding that causes trouble. */
1584 && ((! STRICT_ALIGNMENT && ! SLOW_UNALIGNED_ACCESS)
1585 || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT
1586 || PUSH_ROUNDING (align) == align)
1587 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
1589 /* Push padding now if padding above and stack grows down,
1590 or if padding below and stack grows up.
1591 But if space already allocated, this has already been done. */
1592 if (extra && args_addr == 0
1593 && where_pad != none && where_pad != stack_direction)
1594 anti_adjust_stack (GEN_INT (extra));
1596 move_by_pieces (gen_rtx (MEM, BLKmode, gen_push_operand ()), xinner,
1597 INTVAL (size) - used, align);
1600 #endif /* PUSH_ROUNDING */
1602 /* Otherwise make space on the stack and copy the data
1603 to the address of that space. */
1605 /* Deduct words put into registers from the size we must copy. */
1608 if (GET_CODE (size) == CONST_INT)
1609 size = GEN_INT (INTVAL (size) - used);
1611 size = expand_binop (GET_MODE (size), sub_optab, size,
1612 GEN_INT (used), NULL_RTX, 0,
1616 /* Get the address of the stack space.
1617 In this case, we do not deal with EXTRA separately.
1618 A single stack adjust will do. */
1621 temp = push_block (size, extra, where_pad == downward);
1624 else if (GET_CODE (args_so_far) == CONST_INT)
1625 temp = memory_address (BLKmode,
1626 plus_constant (args_addr,
1627 skip + INTVAL (args_so_far)));
1629 temp = memory_address (BLKmode,
1630 plus_constant (gen_rtx (PLUS, Pmode,
1631 args_addr, args_so_far),
1634 /* TEMP is the address of the block. Copy the data there. */
1635 if (GET_CODE (size) == CONST_INT
1636 && (move_by_pieces_ninsns ((unsigned) INTVAL (size), align)
1639 move_by_pieces (gen_rtx (MEM, BLKmode, temp), xinner,
1640 INTVAL (size), align);
1643 /* Try the most limited insn first, because there's no point
1644 including more than one in the machine description unless
1645 the more limited one has some advantage. */
1646 #ifdef HAVE_movstrqi
1648 && GET_CODE (size) == CONST_INT
1649 && ((unsigned) INTVAL (size)
1650 < (1 << (GET_MODE_BITSIZE (QImode) - 1))))
1652 emit_insn (gen_movstrqi (gen_rtx (MEM, BLKmode, temp),
1653 xinner, size, GEN_INT (align)));
1657 #ifdef HAVE_movstrhi
1659 && GET_CODE (size) == CONST_INT
1660 && ((unsigned) INTVAL (size)
1661 < (1 << (GET_MODE_BITSIZE (HImode) - 1))))
1663 emit_insn (gen_movstrhi (gen_rtx (MEM, BLKmode, temp),
1664 xinner, size, GEN_INT (align)));
1668 #ifdef HAVE_movstrsi
1671 emit_insn (gen_movstrsi (gen_rtx (MEM, BLKmode, temp),
1672 xinner, size, GEN_INT (align)));
1676 #ifdef HAVE_movstrdi
1679 emit_insn (gen_movstrdi (gen_rtx (MEM, BLKmode, temp),
1680 xinner, size, GEN_INT (align)));
1685 #ifndef ACCUMULATE_OUTGOING_ARGS
1686 /* If the source is referenced relative to the stack pointer,
1687 copy it to another register to stabilize it. We do not need
1688 to do this if we know that we won't be changing sp. */
1690 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
1691 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
1692 temp = copy_to_reg (temp);
1695 /* Make inhibit_defer_pop nonzero around the library call
1696 to force it to pop the bcopy-arguments right away. */
1698 #ifdef TARGET_MEM_FUNCTIONS
1699 emit_library_call (memcpy_libfunc, 1,
1700 VOIDmode, 3, temp, Pmode, XEXP (xinner, 0), Pmode,
1703 emit_library_call (bcopy_libfunc, 1,
1704 VOIDmode, 3, XEXP (xinner, 0), Pmode, temp, Pmode,
1710 else if (partial > 0)
1712 /* Scalar partly in registers. */
1714 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
1717 /* # words of start of argument
1718 that we must make space for but need not store. */
1719 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
1720 int args_offset = INTVAL (args_so_far);
1723 /* Push padding now if padding above and stack grows down,
1724 or if padding below and stack grows up.
1725 But if space already allocated, this has already been done. */
1726 if (extra && args_addr == 0
1727 && where_pad != none && where_pad != stack_direction)
1728 anti_adjust_stack (GEN_INT (extra));
1730 /* If we make space by pushing it, we might as well push
1731 the real data. Otherwise, we can leave OFFSET nonzero
1732 and leave the space uninitialized. */
1736 /* Now NOT_STACK gets the number of words that we don't need to
1737 allocate on the stack. */
1738 not_stack = partial - offset;
1740 /* If the partial register-part of the arg counts in its stack size,
1741 skip the part of stack space corresponding to the registers.
1742 Otherwise, start copying to the beginning of the stack space,
1743 by setting SKIP to 0. */
1744 #ifndef REG_PARM_STACK_SPACE
1750 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1751 x = validize_mem (force_const_mem (mode, x));
1753 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
1754 SUBREGs of such registers are not allowed. */
1755 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
1756 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
1757 x = copy_to_reg (x);
1759 /* Loop over all the words allocated on the stack for this arg. */
1760 /* We can do it by words, because any scalar bigger than a word
1761 has a size a multiple of a word. */
1762 #ifndef PUSH_ARGS_REVERSED
1763 for (i = not_stack; i < size; i++)
1765 for (i = size - 1; i >= not_stack; i--)
1767 if (i >= not_stack + offset)
1768 emit_push_insn (operand_subword_force (x, i, mode),
1769 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
1771 GEN_INT (args_offset + ((i - not_stack + skip)
1772 * UNITS_PER_WORD)));
1778 /* Push padding now if padding above and stack grows down,
1779 or if padding below and stack grows up.
1780 But if space already allocated, this has already been done. */
1781 if (extra && args_addr == 0
1782 && where_pad != none && where_pad != stack_direction)
1783 anti_adjust_stack (GEN_INT (extra));
1785 #ifdef PUSH_ROUNDING
1787 addr = gen_push_operand ();
1790 if (GET_CODE (args_so_far) == CONST_INT)
1792 = memory_address (mode,
1793 plus_constant (args_addr, INTVAL (args_so_far)));
1795 addr = memory_address (mode, gen_rtx (PLUS, Pmode, args_addr,
1798 emit_move_insn (gen_rtx (MEM, mode, addr), x);
1802 /* If part should go in registers, copy that part
1803 into the appropriate registers. Do this now, at the end,
1804 since mem-to-mem copies above may do function calls. */
1806 move_block_to_reg (REGNO (reg), x, partial, mode);
1808 if (extra && args_addr == 0 && where_pad == stack_direction)
1809 anti_adjust_stack (GEN_INT (extra));
1812 /* Output a library call to function FUN (a SYMBOL_REF rtx)
1813 (emitting the queue unless NO_QUEUE is nonzero),
1814 for a value of mode OUTMODE,
1815 with NARGS different arguments, passed as alternating rtx values
1816 and machine_modes to convert them to.
1817 The rtx values should have been passed through protect_from_queue already.
1819 NO_QUEUE will be true if and only if the library call is a `const' call
1820 which will be enclosed in REG_LIBCALL/REG_RETVAL notes; it is equivalent
1821 to the variable is_const in expand_call. */
1824 emit_library_call (va_alist)
1828 struct args_size args_size;
1829 register int argnum;
1830 enum machine_mode outmode;
1837 CUMULATIVE_ARGS args_so_far;
1838 struct arg { rtx value; enum machine_mode mode; rtx reg; int partial;
1839 struct args_size offset; struct args_size size; };
1841 int old_inhibit_defer_pop = inhibit_defer_pop;
1846 orgfun = fun = va_arg (p, rtx);
1847 no_queue = va_arg (p, int);
1848 outmode = va_arg (p, enum machine_mode);
1849 nargs = va_arg (p, int);
1851 /* Copy all the libcall-arguments out of the varargs data
1852 and into a vector ARGVEC.
1854 Compute how to pass each argument. We only support a very small subset
1855 of the full argument passing conventions to limit complexity here since
1856 library functions shouldn't have many args. */
1858 argvec = (struct arg *) alloca (nargs * sizeof (struct arg));
1860 INIT_CUMULATIVE_ARGS (args_so_far, (tree)0, fun);
1862 args_size.constant = 0;
1865 for (count = 0; count < nargs; count++)
1867 rtx val = va_arg (p, rtx);
1868 enum machine_mode mode = va_arg (p, enum machine_mode);
1870 /* We cannot convert the arg value to the mode the library wants here;
1871 must do it earlier where we know the signedness of the arg. */
1873 || (GET_MODE (val) != mode && GET_MODE (val) != VOIDmode))
1876 /* On some machines, there's no way to pass a float to a library fcn.
1877 Pass it as a double instead. */
1878 #ifdef LIBGCC_NEEDS_DOUBLE
1879 if (LIBGCC_NEEDS_DOUBLE && mode == SFmode)
1880 val = convert_to_mode (DFmode, val), mode = DFmode;
1883 /* There's no need to call protect_from_queue, because
1884 either emit_move_insn or emit_push_insn will do that. */
1886 /* Make sure it is a reasonable operand for a move or push insn. */
1887 if (GET_CODE (val) != REG && GET_CODE (val) != MEM
1888 && ! (CONSTANT_P (val) && LEGITIMATE_CONSTANT_P (val)))
1889 val = force_operand (val, NULL_RTX);
1891 argvec[count].value = val;
1892 argvec[count].mode = mode;
1894 #ifdef FUNCTION_ARG_PASS_BY_REFERENCE
1895 if (FUNCTION_ARG_PASS_BY_REFERENCE (args_so_far, mode, NULL_TREE, 1))
1899 argvec[count].reg = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
1900 if (argvec[count].reg && GET_CODE (argvec[count].reg) == EXPR_LIST)
1902 #ifdef FUNCTION_ARG_PARTIAL_NREGS
1903 argvec[count].partial
1904 = FUNCTION_ARG_PARTIAL_NREGS (args_so_far, mode, NULL_TREE, 1);
1906 argvec[count].partial = 0;
1909 locate_and_pad_parm (mode, NULL_TREE,
1910 argvec[count].reg && argvec[count].partial == 0,
1911 NULL_TREE, &args_size, &argvec[count].offset,
1912 &argvec[count].size);
1914 if (argvec[count].size.var)
1917 #ifndef REG_PARM_STACK_SPACE
1918 if (argvec[count].partial)
1919 argvec[count].size.constant -= argvec[count].partial * UNITS_PER_WORD;
1922 if (argvec[count].reg == 0 || argvec[count].partial != 0
1923 #ifdef REG_PARM_STACK_SPACE
1927 args_size.constant += argvec[count].size.constant;
1929 #ifdef ACCUMULATE_OUTGOING_ARGS
1930 /* If this arg is actually passed on the stack, it might be
1931 clobbering something we already put there (this library call might
1932 be inside the evaluation of an argument to a function whose call
1933 requires the stack). This will only occur when the library call
1934 has sufficient args to run out of argument registers. Abort in
1935 this case; if this ever occurs, code must be added to save and
1936 restore the arg slot. */
1938 if (argvec[count].reg == 0 || argvec[count].partial != 0)
1942 FUNCTION_ARG_ADVANCE (args_so_far, mode, (tree)0, 1);
1946 /* If this machine requires an external definition for library
1947 functions, write one out. */
1948 assemble_external_libcall (fun);
1950 #ifdef STACK_BOUNDARY
1951 args_size.constant = (((args_size.constant + (STACK_BYTES - 1))
1952 / STACK_BYTES) * STACK_BYTES);
1955 #ifdef REG_PARM_STACK_SPACE
1956 args_size.constant = MAX (args_size.constant,
1957 REG_PARM_STACK_SPACE ((tree) 0));
1960 #ifdef ACCUMULATE_OUTGOING_ARGS
1961 if (args_size.constant > current_function_outgoing_args_size)
1962 current_function_outgoing_args_size = args_size.constant;
1963 args_size.constant = 0;
1966 #ifndef PUSH_ROUNDING
1967 argblock = push_block (GEN_INT (args_size.constant), 0, 0);
1970 #ifdef PUSH_ARGS_REVERSED
1978 /* Push the args that need to be pushed. */
1980 for (count = 0; count < nargs; count++, argnum += inc)
1982 register enum machine_mode mode = argvec[argnum].mode;
1983 register rtx val = argvec[argnum].value;
1984 rtx reg = argvec[argnum].reg;
1985 int partial = argvec[argnum].partial;
1987 if (! (reg != 0 && partial == 0))
1988 emit_push_insn (val, mode, NULL_TREE, NULL_RTX, 0, partial, reg, 0,
1989 argblock, GEN_INT (argvec[count].offset.constant));
1993 #ifdef PUSH_ARGS_REVERSED
1999 /* Now load any reg parms into their regs. */
2001 for (count = 0; count < nargs; count++, argnum += inc)
2003 register enum machine_mode mode = argvec[argnum].mode;
2004 register rtx val = argvec[argnum].value;
2005 rtx reg = argvec[argnum].reg;
2006 int partial = argvec[argnum].partial;
2008 if (reg != 0 && partial == 0)
2009 emit_move_insn (reg, val);
2013 /* For version 1.37, try deleting this entirely. */
2017 /* Any regs containing parms remain in use through the call. */
2019 for (count = 0; count < nargs; count++)
2020 if (argvec[count].reg != 0)
2021 emit_insn (gen_rtx (USE, VOIDmode, argvec[count].reg));
2023 use_insns = get_insns ();
2026 fun = prepare_call_address (fun, NULL_TREE, &use_insns);
2028 /* Don't allow popping to be deferred, since then
2029 cse'ing of library calls could delete a call and leave the pop. */
2032 /* We pass the old value of inhibit_defer_pop + 1 to emit_call_1, which
2033 will set inhibit_defer_pop to that value. */
2035 emit_call_1 (fun, get_identifier (XSTR (orgfun, 0)), args_size.constant, 0,
2036 FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1),
2037 outmode != VOIDmode ? hard_libcall_value (outmode) : NULL_RTX,
2038 old_inhibit_defer_pop + 1, use_insns, no_queue);
2040 /* Now restore inhibit_defer_pop to its actual original value. */
2044 /* Expand an assignment that stores the value of FROM into TO.
2045 If WANT_VALUE is nonzero, return an rtx for the value of TO.
2046 (This may contain a QUEUED rtx.)
2047 Otherwise, the returned value is not meaningful.
2049 SUGGEST_REG is no longer actually used.
2050 It used to mean, copy the value through a register
2051 and return that register, if that is possible.
2052 But now we do this if WANT_VALUE.
2054 If the value stored is a constant, we return the constant. */
2057 expand_assignment (to, from, want_value, suggest_reg)
2062 register rtx to_rtx = 0;
2065 /* Don't crash if the lhs of the assignment was erroneous. */
2067 if (TREE_CODE (to) == ERROR_MARK)
2068 return expand_expr (from, NULL_RTX, VOIDmode, 0);
2070 /* Assignment of a structure component needs special treatment
2071 if the structure component's rtx is not simply a MEM.
2072 Assignment of an array element at a constant index
2073 has the same problem. */
2075 if (TREE_CODE (to) == COMPONENT_REF
2076 || TREE_CODE (to) == BIT_FIELD_REF
2077 || (TREE_CODE (to) == ARRAY_REF
2078 && TREE_CODE (TREE_OPERAND (to, 1)) == INTEGER_CST
2079 && TREE_CODE (TYPE_SIZE (TREE_TYPE (to))) == INTEGER_CST))
2081 enum machine_mode mode1;
2087 tree tem = get_inner_reference (to, &bitsize, &bitpos, &offset,
2088 &mode1, &unsignedp, &volatilep);
2090 /* If we are going to use store_bit_field and extract_bit_field,
2091 make sure to_rtx will be safe for multiple use. */
2093 if (mode1 == VOIDmode && want_value)
2094 tem = stabilize_reference (tem);
2096 to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, 0);
2099 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
2101 if (GET_CODE (to_rtx) != MEM)
2103 to_rtx = change_address (to_rtx, VOIDmode,
2104 gen_rtx (PLUS, Pmode, XEXP (to_rtx, 0),
2105 force_reg (Pmode, offset_rtx)));
2109 if (GET_CODE (to_rtx) == MEM)
2110 MEM_VOLATILE_P (to_rtx) = 1;
2111 #if 0 /* This was turned off because, when a field is volatile
2112 in an object which is not volatile, the object may be in a register,
2113 and then we would abort over here. */
2119 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
2121 /* Spurious cast makes HPUX compiler happy. */
2122 ? (enum machine_mode) TYPE_MODE (TREE_TYPE (to))
2125 /* Required alignment of containing datum. */
2126 TYPE_ALIGN (TREE_TYPE (tem)) / BITS_PER_UNIT,
2127 int_size_in_bytes (TREE_TYPE (tem)));
2128 preserve_temp_slots (result);
2134 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
2135 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
2138 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, 0);
2140 /* In case we are returning the contents of an object which overlaps
2141 the place the value is being stored, use a safe function when copying
2142 a value through a pointer into a structure value return block. */
2143 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
2144 && current_function_returns_struct
2145 && !current_function_returns_pcc_struct)
2147 rtx from_rtx = expand_expr (from, NULL_RTX, VOIDmode, 0);
2148 rtx size = expr_size (from);
2150 #ifdef TARGET_MEM_FUNCTIONS
2151 emit_library_call (memcpy_libfunc, 1,
2152 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
2153 XEXP (from_rtx, 0), Pmode,
2156 emit_library_call (bcopy_libfunc, 1,
2157 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
2158 XEXP (to_rtx, 0), Pmode,
2162 preserve_temp_slots (to_rtx);
2167 /* Compute FROM and store the value in the rtx we got. */
2169 result = store_expr (from, to_rtx, want_value);
2170 preserve_temp_slots (result);
2175 /* Generate code for computing expression EXP,
2176 and storing the value into TARGET.
2177 Returns TARGET or an equivalent value.
2178 TARGET may contain a QUEUED rtx.
2180 If SUGGEST_REG is nonzero, copy the value through a register
2181 and return that register, if that is possible.
2183 If the value stored is a constant, we return the constant. */
2186 store_expr (exp, target, suggest_reg)
2188 register rtx target;
2192 int dont_return_target = 0;
2194 if (TREE_CODE (exp) == COMPOUND_EXPR)
2196 /* Perform first part of compound expression, then assign from second
2198 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
2200 return store_expr (TREE_OPERAND (exp, 1), target, suggest_reg);
2202 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
2204 /* For conditional expression, get safe form of the target. Then
2205 test the condition, doing the appropriate assignment on either
2206 side. This avoids the creation of unnecessary temporaries.
2207 For non-BLKmode, it is more efficient not to do this. */
2209 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
2212 target = protect_from_queue (target, 1);
2215 jumpifnot (TREE_OPERAND (exp, 0), lab1);
2216 store_expr (TREE_OPERAND (exp, 1), target, suggest_reg);
2218 emit_jump_insn (gen_jump (lab2));
2221 store_expr (TREE_OPERAND (exp, 2), target, suggest_reg);
2227 else if (suggest_reg && GET_CODE (target) == MEM
2228 && GET_MODE (target) != BLKmode)
2229 /* If target is in memory and caller wants value in a register instead,
2230 arrange that. Pass TARGET as target for expand_expr so that,
2231 if EXP is another assignment, SUGGEST_REG will be nonzero for it.
2232 We know expand_expr will not use the target in that case. */
2234 temp = expand_expr (exp, cse_not_expected ? NULL_RTX : target,
2235 GET_MODE (target), 0);
2236 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
2237 temp = copy_to_reg (temp);
2238 dont_return_target = 1;
2240 else if (queued_subexp_p (target))
2241 /* If target contains a postincrement, it is not safe
2242 to use as the returned value. It would access the wrong
2243 place by the time the queued increment gets output.
2244 So copy the value through a temporary and use that temp
2247 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
2249 /* Expand EXP into a new pseudo. */
2250 temp = gen_reg_rtx (GET_MODE (target));
2251 temp = expand_expr (exp, temp, GET_MODE (target), 0);
2254 temp = expand_expr (exp, NULL_RTX, GET_MODE (target), 0);
2255 dont_return_target = 1;
2259 temp = expand_expr (exp, target, GET_MODE (target), 0);
2260 /* DO return TARGET if it's a specified hardware register.
2261 expand_return relies on this. */
2262 if (!(target && GET_CODE (target) == REG
2263 && REGNO (target) < FIRST_PSEUDO_REGISTER)
2264 && CONSTANT_P (temp))
2265 dont_return_target = 1;
2268 /* If value was not generated in the target, store it there.
2269 Convert the value to TARGET's type first if nec. */
2271 if (temp != target && TREE_CODE (exp) != ERROR_MARK)
2273 target = protect_from_queue (target, 1);
2274 if (GET_MODE (temp) != GET_MODE (target)
2275 && GET_MODE (temp) != VOIDmode)
2277 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
2278 if (dont_return_target)
2280 /* In this case, we will return TEMP,
2281 so make sure it has the proper mode.
2282 But don't forget to store the value into TARGET. */
2283 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
2284 emit_move_insn (target, temp);
2287 convert_move (target, temp, unsignedp);
2290 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
2292 /* Handle copying a string constant into an array.
2293 The string constant may be shorter than the array.
2294 So copy just the string's actual length, and clear the rest. */
2297 /* Get the size of the data type of the string,
2298 which is actually the size of the target. */
2299 size = expr_size (exp);
2300 if (GET_CODE (size) == CONST_INT
2301 && INTVAL (size) < TREE_STRING_LENGTH (exp))
2302 emit_block_move (target, temp, size,
2303 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
2306 /* Compute the size of the data to copy from the string. */
2308 = fold (build (MIN_EXPR, sizetype,
2309 size_binop (CEIL_DIV_EXPR,
2310 TYPE_SIZE (TREE_TYPE (exp)),
2311 size_int (BITS_PER_UNIT)),
2313 build_int_2 (TREE_STRING_LENGTH (exp), 0))));
2314 rtx copy_size_rtx = expand_expr (copy_size, NULL_RTX,
2318 /* Copy that much. */
2319 emit_block_move (target, temp, copy_size_rtx,
2320 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
2322 /* Figure out how much is left in TARGET
2323 that we have to clear. */
2324 if (GET_CODE (copy_size_rtx) == CONST_INT)
2326 temp = plus_constant (XEXP (target, 0),
2327 TREE_STRING_LENGTH (exp));
2328 size = plus_constant (size,
2329 - TREE_STRING_LENGTH (exp));
2333 enum machine_mode size_mode = Pmode;
2335 temp = force_reg (Pmode, XEXP (target, 0));
2336 temp = expand_binop (size_mode, add_optab, temp,
2337 copy_size_rtx, NULL_RTX, 0,
2340 size = expand_binop (size_mode, sub_optab, size,
2341 copy_size_rtx, NULL_RTX, 0,
2344 emit_cmp_insn (size, const0_rtx, LT, NULL_RTX,
2345 GET_MODE (size), 0, 0);
2346 label = gen_label_rtx ();
2347 emit_jump_insn (gen_blt (label));
2350 if (size != const0_rtx)
2352 #ifdef TARGET_MEM_FUNCTIONS
2353 emit_library_call (memset_libfunc, 1, VOIDmode, 3,
2354 temp, Pmode, const0_rtx, Pmode, size, Pmode);
2356 emit_library_call (bzero_libfunc, 1, VOIDmode, 2,
2357 temp, Pmode, size, Pmode);
2364 else if (GET_MODE (temp) == BLKmode)
2365 emit_block_move (target, temp, expr_size (exp),
2366 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
2368 emit_move_insn (target, temp);
2370 if (dont_return_target)
2375 /* Store the value of constructor EXP into the rtx TARGET.
2376 TARGET is either a REG or a MEM. */
2379 store_constructor (exp, target)
2383 tree type = TREE_TYPE (exp);
2385 /* We know our target cannot conflict, since safe_from_p has been called. */
2387 /* Don't try copying piece by piece into a hard register
2388 since that is vulnerable to being clobbered by EXP.
2389 Instead, construct in a pseudo register and then copy it all. */
2390 if (GET_CODE (target) == REG && REGNO (target) < FIRST_PSEUDO_REGISTER)
2392 rtx temp = gen_reg_rtx (GET_MODE (target));
2393 store_constructor (exp, temp);
2394 emit_move_insn (target, temp);
2399 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE)
2403 /* Inform later passes that the whole union value is dead. */
2404 if (TREE_CODE (type) == UNION_TYPE)
2405 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
2407 /* If we are building a static constructor into a register,
2408 set the initial value as zero so we can fold the value into
2410 else if (GET_CODE (target) == REG && TREE_STATIC (exp))
2411 emit_move_insn (target, const0_rtx);
2413 /* If the constructor has fewer fields than the structure,
2414 clear the whole structure first. */
2415 else if (list_length (CONSTRUCTOR_ELTS (exp))
2416 != list_length (TYPE_FIELDS (type)))
2417 clear_storage (target, int_size_in_bytes (type));
2419 /* Inform later passes that the old value is dead. */
2420 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
2422 /* Store each element of the constructor into
2423 the corresponding field of TARGET. */
2425 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
2427 register tree field = TREE_PURPOSE (elt);
2428 register enum machine_mode mode;
2433 /* Just ignore missing fields.
2434 We cleared the whole structure, above,
2435 if any fields are missing. */
2439 bitsize = TREE_INT_CST_LOW (DECL_SIZE (field));
2440 unsignedp = TREE_UNSIGNED (field);
2441 mode = DECL_MODE (field);
2442 if (DECL_BIT_FIELD (field))
2445 if (TREE_CODE (DECL_FIELD_BITPOS (field)) != INTEGER_CST)
2446 /* ??? This case remains to be written. */
2449 bitpos = TREE_INT_CST_LOW (DECL_FIELD_BITPOS (field));
2451 store_field (target, bitsize, bitpos, mode, TREE_VALUE (elt),
2452 /* The alignment of TARGET is
2453 at least what its type requires. */
2455 TYPE_ALIGN (type) / BITS_PER_UNIT,
2456 int_size_in_bytes (type));
2459 else if (TREE_CODE (type) == ARRAY_TYPE)
2463 tree domain = TYPE_DOMAIN (type);
2464 HOST_WIDE_INT minelt = TREE_INT_CST_LOW (TYPE_MIN_VALUE (domain));
2465 HOST_WIDE_INT maxelt = TREE_INT_CST_LOW (TYPE_MAX_VALUE (domain));
2466 tree elttype = TREE_TYPE (type);
2468 /* If the constructor has fewer fields than the structure,
2469 clear the whole structure first. Similarly if this this is
2470 static constructor of a non-BLKmode object. */
2472 if (list_length (CONSTRUCTOR_ELTS (exp)) < maxelt - minelt + 1
2473 || (GET_CODE (target) == REG && TREE_STATIC (exp)))
2474 clear_storage (target, maxelt - minelt + 1);
2476 /* Inform later passes that the old value is dead. */
2477 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
2479 /* Store each element of the constructor into
2480 the corresponding element of TARGET, determined
2481 by counting the elements. */
2482 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
2484 elt = TREE_CHAIN (elt), i++)
2486 register enum machine_mode mode;
2491 mode = TYPE_MODE (elttype);
2492 bitsize = GET_MODE_BITSIZE (mode);
2493 unsignedp = TREE_UNSIGNED (elttype);
2495 bitpos = (i * TREE_INT_CST_LOW (TYPE_SIZE (elttype)));
2497 store_field (target, bitsize, bitpos, mode, TREE_VALUE (elt),
2498 /* The alignment of TARGET is
2499 at least what its type requires. */
2501 TYPE_ALIGN (type) / BITS_PER_UNIT,
2502 int_size_in_bytes (type));
2510 /* Store the value of EXP (an expression tree)
2511 into a subfield of TARGET which has mode MODE and occupies
2512 BITSIZE bits, starting BITPOS bits from the start of TARGET.
2513 If MODE is VOIDmode, it means that we are storing into a bit-field.
2515 If VALUE_MODE is VOIDmode, return nothing in particular.
2516 UNSIGNEDP is not used in this case.
2518 Otherwise, return an rtx for the value stored. This rtx
2519 has mode VALUE_MODE if that is convenient to do.
2520 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
2522 ALIGN is the alignment that TARGET is known to have, measured in bytes.
2523 TOTAL_SIZE is the size in bytes of the structure, or -1 if varying. */
2526 store_field (target, bitsize, bitpos, mode, exp, value_mode,
2527 unsignedp, align, total_size)
2529 int bitsize, bitpos;
2530 enum machine_mode mode;
2532 enum machine_mode value_mode;
2537 HOST_WIDE_INT width_mask = 0;
2539 if (bitsize < HOST_BITS_PER_WIDE_INT)
2540 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
2542 /* If we are storing into an unaligned field of an aligned union that is
2543 in a register, we may have the mode of TARGET being an integer mode but
2544 MODE == BLKmode. In that case, get an aligned object whose size and
2545 alignment are the same as TARGET and store TARGET into it (we can avoid
2546 the store if the field being stored is the entire width of TARGET). Then
2547 call ourselves recursively to store the field into a BLKmode version of
2548 that object. Finally, load from the object into TARGET. This is not
2549 very efficient in general, but should only be slightly more expensive
2550 than the otherwise-required unaligned accesses. Perhaps this can be
2551 cleaned up later. */
2554 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
2556 rtx object = assign_stack_temp (GET_MODE (target),
2557 GET_MODE_SIZE (GET_MODE (target)), 0);
2558 rtx blk_object = copy_rtx (object);
2560 PUT_MODE (blk_object, BLKmode);
2562 if (bitsize != GET_MODE_BITSIZE (GET_MODE (target)))
2563 emit_move_insn (object, target);
2565 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0,
2568 emit_move_insn (target, object);
2573 /* If the structure is in a register or if the component
2574 is a bit field, we cannot use addressing to access it.
2575 Use bit-field techniques or SUBREG to store in it. */
2577 if (mode == VOIDmode
2578 || (mode != BLKmode && ! direct_store[(int) mode])
2579 || GET_CODE (target) == REG
2580 || GET_CODE (target) == SUBREG)
2582 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
2583 /* Store the value in the bitfield. */
2584 store_bit_field (target, bitsize, bitpos, mode, temp, align, total_size);
2585 if (value_mode != VOIDmode)
2587 /* The caller wants an rtx for the value. */
2588 /* If possible, avoid refetching from the bitfield itself. */
2590 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
2591 return expand_and (temp, GEN_INT (width_mask), NULL_RTX);
2592 return extract_bit_field (target, bitsize, bitpos, unsignedp,
2593 NULL_RTX, value_mode, 0, align,
2600 rtx addr = XEXP (target, 0);
2603 /* If a value is wanted, it must be the lhs;
2604 so make the address stable for multiple use. */
2606 if (value_mode != VOIDmode && GET_CODE (addr) != REG
2607 && ! CONSTANT_ADDRESS_P (addr)
2608 /* A frame-pointer reference is already stable. */
2609 && ! (GET_CODE (addr) == PLUS
2610 && GET_CODE (XEXP (addr, 1)) == CONST_INT
2611 && (XEXP (addr, 0) == virtual_incoming_args_rtx
2612 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
2613 addr = copy_to_reg (addr);
2615 /* Now build a reference to just the desired component. */
2617 to_rtx = change_address (target, mode,
2618 plus_constant (addr, (bitpos / BITS_PER_UNIT)));
2619 MEM_IN_STRUCT_P (to_rtx) = 1;
2621 return store_expr (exp, to_rtx, value_mode != VOIDmode);
2625 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
2626 or an ARRAY_REF, look for nested COMPONENT_REFs, BIT_FIELD_REFs, or
2627 ARRAY_REFs at constant positions and find the ultimate containing object,
2630 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
2631 bit position, and *PUNSIGNEDP to the signedness of the field.
2632 If the position of the field is variable, we store a tree
2633 giving the variable offset (in units) in *POFFSET.
2634 This offset is in addition to the bit position.
2635 If the position is not variable, we store 0 in *POFFSET.
2637 If any of the extraction expressions is volatile,
2638 we store 1 in *PVOLATILEP. Otherwise we don't change that.
2640 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
2641 is a mode that can be used to access the field. In that case, *PBITSIZE
2644 If the field describes a variable-sized object, *PMODE is set to
2645 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
2646 this case, but the address of the object can be found. */
2649 get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode, punsignedp, pvolatilep)
2654 enum machine_mode *pmode;
2659 enum machine_mode mode = VOIDmode;
2662 if (TREE_CODE (exp) == COMPONENT_REF)
2664 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
2665 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
2666 mode = DECL_MODE (TREE_OPERAND (exp, 1));
2667 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
2669 else if (TREE_CODE (exp) == BIT_FIELD_REF)
2671 size_tree = TREE_OPERAND (exp, 1);
2672 *punsignedp = TREE_UNSIGNED (exp);
2676 mode = TYPE_MODE (TREE_TYPE (exp));
2677 *pbitsize = GET_MODE_BITSIZE (mode);
2678 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
2683 if (TREE_CODE (size_tree) != INTEGER_CST)
2684 mode = BLKmode, *pbitsize = -1;
2686 *pbitsize = TREE_INT_CST_LOW (size_tree);
2689 /* Compute cumulative bit-offset for nested component-refs and array-refs,
2690 and find the ultimate containing object. */
2696 if (TREE_CODE (exp) == COMPONENT_REF || TREE_CODE (exp) == BIT_FIELD_REF)
2698 tree pos = (TREE_CODE (exp) == COMPONENT_REF
2699 ? DECL_FIELD_BITPOS (TREE_OPERAND (exp, 1))
2700 : TREE_OPERAND (exp, 2));
2702 if (TREE_CODE (pos) == PLUS_EXPR)
2705 if (TREE_CODE (TREE_OPERAND (pos, 0)) == INTEGER_CST)
2707 constant = TREE_OPERAND (pos, 0);
2708 var = TREE_OPERAND (pos, 1);
2710 else if (TREE_CODE (TREE_OPERAND (pos, 1)) == INTEGER_CST)
2712 constant = TREE_OPERAND (pos, 1);
2713 var = TREE_OPERAND (pos, 0);
2717 *pbitpos += TREE_INT_CST_LOW (constant);
2719 offset = size_binop (PLUS_EXPR, offset,
2720 size_binop (FLOOR_DIV_EXPR, var,
2721 size_int (BITS_PER_UNIT)));
2723 offset = size_binop (FLOOR_DIV_EXPR, var,
2724 size_int (BITS_PER_UNIT));
2726 else if (TREE_CODE (pos) == INTEGER_CST)
2727 *pbitpos += TREE_INT_CST_LOW (pos);
2730 /* Assume here that the offset is a multiple of a unit.
2731 If not, there should be an explicitly added constant. */
2733 offset = size_binop (PLUS_EXPR, offset,
2734 size_binop (FLOOR_DIV_EXPR, pos,
2735 size_int (BITS_PER_UNIT)));
2737 offset = size_binop (FLOOR_DIV_EXPR, pos,
2738 size_int (BITS_PER_UNIT));
2742 else if (TREE_CODE (exp) == ARRAY_REF
2743 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
2744 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST)
2746 *pbitpos += (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))
2747 * TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (exp))));
2749 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
2750 && ! ((TREE_CODE (exp) == NOP_EXPR
2751 || TREE_CODE (exp) == CONVERT_EXPR)
2752 && (TYPE_MODE (TREE_TYPE (exp))
2753 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
2756 /* If any reference in the chain is volatile, the effect is volatile. */
2757 if (TREE_THIS_VOLATILE (exp))
2759 exp = TREE_OPERAND (exp, 0);
2762 /* If this was a bit-field, see if there is a mode that allows direct
2763 access in case EXP is in memory. */
2764 if (mode == VOIDmode && *pbitpos % *pbitsize == 0)
2766 mode = mode_for_size (*pbitsize, MODE_INT, 0);
2767 if (mode == BLKmode)
2774 /* We aren't finished fixing the callers to really handle nonzero offset. */
2782 /* Given an rtx VALUE that may contain additions and multiplications,
2783 return an equivalent value that just refers to a register or memory.
2784 This is done by generating instructions to perform the arithmetic
2785 and returning a pseudo-register containing the value. */
2788 force_operand (value, target)
2791 register optab binoptab = 0;
2792 /* Use a temporary to force order of execution of calls to
2796 /* Use subtarget as the target for operand 0 of a binary operation. */
2797 register rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
2799 if (GET_CODE (value) == PLUS)
2800 binoptab = add_optab;
2801 else if (GET_CODE (value) == MINUS)
2802 binoptab = sub_optab;
2803 else if (GET_CODE (value) == MULT)
2805 op2 = XEXP (value, 1);
2806 if (!CONSTANT_P (op2)
2807 && !(GET_CODE (op2) == REG && op2 != subtarget))
2809 tmp = force_operand (XEXP (value, 0), subtarget);
2810 return expand_mult (GET_MODE (value), tmp,
2811 force_operand (op2, NULL_RTX),
2817 op2 = XEXP (value, 1);
2818 if (!CONSTANT_P (op2)
2819 && !(GET_CODE (op2) == REG && op2 != subtarget))
2821 if (binoptab == sub_optab && GET_CODE (op2) == CONST_INT)
2823 binoptab = add_optab;
2824 op2 = negate_rtx (GET_MODE (value), op2);
2827 /* Check for an addition with OP2 a constant integer and our first
2828 operand a PLUS of a virtual register and something else. In that
2829 case, we want to emit the sum of the virtual register and the
2830 constant first and then add the other value. This allows virtual
2831 register instantiation to simply modify the constant rather than
2832 creating another one around this addition. */
2833 if (binoptab == add_optab && GET_CODE (op2) == CONST_INT
2834 && GET_CODE (XEXP (value, 0)) == PLUS
2835 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
2836 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
2837 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
2839 rtx temp = expand_binop (GET_MODE (value), binoptab,
2840 XEXP (XEXP (value, 0), 0), op2,
2841 subtarget, 0, OPTAB_LIB_WIDEN);
2842 return expand_binop (GET_MODE (value), binoptab, temp,
2843 force_operand (XEXP (XEXP (value, 0), 1), 0),
2844 target, 0, OPTAB_LIB_WIDEN);
2847 tmp = force_operand (XEXP (value, 0), subtarget);
2848 return expand_binop (GET_MODE (value), binoptab, tmp,
2849 force_operand (op2, NULL_RTX),
2850 target, 0, OPTAB_LIB_WIDEN);
2851 /* We give UNSIGNEP = 0 to expand_binop
2852 because the only operations we are expanding here are signed ones. */
2857 /* Subroutine of expand_expr:
2858 save the non-copied parts (LIST) of an expr (LHS), and return a list
2859 which can restore these values to their previous values,
2860 should something modify their storage. */
2863 save_noncopied_parts (lhs, list)
2870 for (tail = list; tail; tail = TREE_CHAIN (tail))
2871 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
2872 parts = chainon (parts, save_noncopied_parts (lhs, TREE_VALUE (tail)));
2875 tree part = TREE_VALUE (tail);
2876 tree part_type = TREE_TYPE (part);
2877 tree to_be_saved = build (COMPONENT_REF, part_type, lhs, part);
2878 rtx target = assign_stack_temp (TYPE_MODE (part_type),
2879 int_size_in_bytes (part_type), 0);
2880 if (! memory_address_p (TYPE_MODE (part_type), XEXP (target, 0)))
2881 target = change_address (target, TYPE_MODE (part_type), NULL_RTX);
2882 parts = tree_cons (to_be_saved,
2883 build (RTL_EXPR, part_type, NULL_TREE,
2886 store_expr (TREE_PURPOSE (parts), RTL_EXPR_RTL (TREE_VALUE (parts)), 0);
2891 /* Subroutine of expand_expr:
2892 record the non-copied parts (LIST) of an expr (LHS), and return a list
2893 which specifies the initial values of these parts. */
2896 init_noncopied_parts (lhs, list)
2903 for (tail = list; tail; tail = TREE_CHAIN (tail))
2904 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
2905 parts = chainon (parts, init_noncopied_parts (lhs, TREE_VALUE (tail)));
2908 tree part = TREE_VALUE (tail);
2909 tree part_type = TREE_TYPE (part);
2910 tree to_be_initialized = build (COMPONENT_REF, part_type, lhs, part);
2911 parts = tree_cons (TREE_PURPOSE (tail), to_be_initialized, parts);
2916 /* Subroutine of expand_expr: return nonzero iff there is no way that
2917 EXP can reference X, which is being modified. */
2920 safe_from_p (x, exp)
2930 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
2931 find the underlying pseudo. */
2932 if (GET_CODE (x) == SUBREG)
2935 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
2939 /* If X is a location in the outgoing argument area, it is always safe. */
2940 if (GET_CODE (x) == MEM
2941 && (XEXP (x, 0) == virtual_outgoing_args_rtx
2942 || (GET_CODE (XEXP (x, 0)) == PLUS
2943 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx)))
2946 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
2949 exp_rtl = DECL_RTL (exp);
2956 if (TREE_CODE (exp) == TREE_LIST)
2957 return ((TREE_VALUE (exp) == 0
2958 || safe_from_p (x, TREE_VALUE (exp)))
2959 && (TREE_CHAIN (exp) == 0
2960 || safe_from_p (x, TREE_CHAIN (exp))));
2965 return safe_from_p (x, TREE_OPERAND (exp, 0));
2969 return (safe_from_p (x, TREE_OPERAND (exp, 0))
2970 && safe_from_p (x, TREE_OPERAND (exp, 1)));
2974 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
2975 the expression. If it is set, we conflict iff we are that rtx or
2976 both are in memory. Otherwise, we check all operands of the
2977 expression recursively. */
2979 switch (TREE_CODE (exp))
2982 return staticp (TREE_OPERAND (exp, 0));
2985 if (GET_CODE (x) == MEM)
2990 exp_rtl = CALL_EXPR_RTL (exp);
2993 /* Assume that the call will clobber all hard registers and
2995 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
2996 || GET_CODE (x) == MEM)
3003 exp_rtl = RTL_EXPR_RTL (exp);
3005 /* We don't know what this can modify. */
3010 case WITH_CLEANUP_EXPR:
3011 exp_rtl = RTL_EXPR_RTL (exp);
3015 exp_rtl = SAVE_EXPR_RTL (exp);
3019 /* The only operand we look at is operand 1. The rest aren't
3020 part of the expression. */
3021 return safe_from_p (x, TREE_OPERAND (exp, 1));
3023 case METHOD_CALL_EXPR:
3024 /* This takes a rtx argument, but shouldn't appear here. */
3028 /* If we have an rtx, we do not need to scan our operands. */
3032 nops = tree_code_length[(int) TREE_CODE (exp)];
3033 for (i = 0; i < nops; i++)
3034 if (TREE_OPERAND (exp, i) != 0
3035 && ! safe_from_p (x, TREE_OPERAND (exp, i)))
3039 /* If we have an rtl, find any enclosed object. Then see if we conflict
3043 if (GET_CODE (exp_rtl) == SUBREG)
3045 exp_rtl = SUBREG_REG (exp_rtl);
3046 if (GET_CODE (exp_rtl) == REG
3047 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
3051 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
3052 are memory and EXP is not readonly. */
3053 return ! (rtx_equal_p (x, exp_rtl)
3054 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
3055 && ! TREE_READONLY (exp)));
3058 /* If we reach here, it is safe. */
3062 /* Subroutine of expand_expr: return nonzero iff EXP is an
3063 expression whose type is statically determinable. */
3069 if (TREE_CODE (exp) == PARM_DECL
3070 || TREE_CODE (exp) == VAR_DECL
3071 || TREE_CODE (exp) == CALL_EXPR || TREE_CODE (exp) == TARGET_EXPR
3072 || TREE_CODE (exp) == COMPONENT_REF
3073 || TREE_CODE (exp) == ARRAY_REF)
3078 /* expand_expr: generate code for computing expression EXP.
3079 An rtx for the computed value is returned. The value is never null.
3080 In the case of a void EXP, const0_rtx is returned.
3082 The value may be stored in TARGET if TARGET is nonzero.
3083 TARGET is just a suggestion; callers must assume that
3084 the rtx returned may not be the same as TARGET.
3086 If TARGET is CONST0_RTX, it means that the value will be ignored.
3088 If TMODE is not VOIDmode, it suggests generating the
3089 result in mode TMODE. But this is done only when convenient.
3090 Otherwise, TMODE is ignored and the value generated in its natural mode.
3091 TMODE is just a suggestion; callers must assume that
3092 the rtx returned may not have mode TMODE.
3094 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
3095 with a constant address even if that address is not normally legitimate.
3096 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
3098 If MODIFIER is EXPAND_SUM then when EXP is an addition
3099 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
3100 or a nest of (PLUS ...) and (MINUS ...) where the terms are
3101 products as above, or REG or MEM, or constant.
3102 Ordinarily in such cases we would output mul or add instructions
3103 and then return a pseudo reg containing the sum.
3105 EXPAND_INITIALIZER is much like EXPAND_SUM except that
3106 it also marks a label as absolutely required (it can't be dead).
3107 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
3108 This is used for outputting expressions used in initializers. */
3111 expand_expr (exp, target, tmode, modifier)
3114 enum machine_mode tmode;
3115 enum expand_modifier modifier;
3117 register rtx op0, op1, temp;
3118 tree type = TREE_TYPE (exp);
3119 int unsignedp = TREE_UNSIGNED (type);
3120 register enum machine_mode mode = TYPE_MODE (type);
3121 register enum tree_code code = TREE_CODE (exp);
3123 /* Use subtarget as the target for operand 0 of a binary operation. */
3124 rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
3125 rtx original_target = target;
3126 int ignore = target == const0_rtx;
3129 /* Don't use hard regs as subtargets, because the combiner
3130 can only handle pseudo regs. */
3131 if (subtarget && REGNO (subtarget) < FIRST_PSEUDO_REGISTER)
3133 /* Avoid subtargets inside loops,
3134 since they hide some invariant expressions. */
3135 if (preserve_subexpressions_p ())
3138 if (ignore) target = 0, original_target = 0;
3140 /* If will do cse, generate all results into pseudo registers
3141 since 1) that allows cse to find more things
3142 and 2) otherwise cse could produce an insn the machine
3145 if (! cse_not_expected && mode != BLKmode && target
3146 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER))
3149 /* Ensure we reference a volatile object even if value is ignored. */
3150 if (ignore && TREE_THIS_VOLATILE (exp)
3151 && mode != VOIDmode && mode != BLKmode)
3153 target = gen_reg_rtx (mode);
3154 temp = expand_expr (exp, target, VOIDmode, modifier);
3156 emit_move_insn (target, temp);
3164 tree function = decl_function_context (exp);
3165 /* Handle using a label in a containing function. */
3166 if (function != current_function_decl && function != 0)
3168 struct function *p = find_function_data (function);
3169 /* Allocate in the memory associated with the function
3170 that the label is in. */
3171 push_obstacks (p->function_obstack,
3172 p->function_maybepermanent_obstack);
3174 p->forced_labels = gen_rtx (EXPR_LIST, VOIDmode,
3175 label_rtx (exp), p->forced_labels);
3178 else if (modifier == EXPAND_INITIALIZER)
3179 forced_labels = gen_rtx (EXPR_LIST, VOIDmode,
3180 label_rtx (exp), forced_labels);
3181 temp = gen_rtx (MEM, FUNCTION_MODE,
3182 gen_rtx (LABEL_REF, Pmode, label_rtx (exp)));
3183 if (function != current_function_decl && function != 0)
3184 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
3189 if (DECL_RTL (exp) == 0)
3191 error_with_decl (exp, "prior parameter's size depends on `%s'");
3192 return CONST0_RTX (mode);
3198 if (DECL_RTL (exp) == 0)
3200 /* Ensure variable marked as used
3201 even if it doesn't go through a parser. */
3202 TREE_USED (exp) = 1;
3203 /* Handle variables inherited from containing functions. */
3204 context = decl_function_context (exp);
3206 /* We treat inline_function_decl as an alias for the current function
3207 because that is the inline function whose vars, types, etc.
3208 are being merged into the current function.
3209 See expand_inline_function. */
3210 if (context != 0 && context != current_function_decl
3211 && context != inline_function_decl
3212 /* If var is static, we don't need a static chain to access it. */
3213 && ! (GET_CODE (DECL_RTL (exp)) == MEM
3214 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
3218 /* Mark as non-local and addressable. */
3219 DECL_NONLOCAL (exp) = 1;
3220 mark_addressable (exp);
3221 if (GET_CODE (DECL_RTL (exp)) != MEM)
3223 addr = XEXP (DECL_RTL (exp), 0);
3224 if (GET_CODE (addr) == MEM)
3225 addr = gen_rtx (MEM, Pmode, fix_lexical_addr (XEXP (addr, 0), exp));
3227 addr = fix_lexical_addr (addr, exp);
3228 return change_address (DECL_RTL (exp), mode, addr);
3231 /* This is the case of an array whose size is to be determined
3232 from its initializer, while the initializer is still being parsed.
3234 if (GET_CODE (DECL_RTL (exp)) == MEM
3235 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
3236 return change_address (DECL_RTL (exp), GET_MODE (DECL_RTL (exp)),
3237 XEXP (DECL_RTL (exp), 0));
3238 if (GET_CODE (DECL_RTL (exp)) == MEM
3239 && modifier != EXPAND_CONST_ADDRESS
3240 && modifier != EXPAND_SUM
3241 && modifier != EXPAND_INITIALIZER)
3243 /* DECL_RTL probably contains a constant address.
3244 On RISC machines where a constant address isn't valid,
3245 make some insns to get that address into a register. */
3246 if (!memory_address_p (DECL_MODE (exp), XEXP (DECL_RTL (exp), 0))
3248 && CONSTANT_ADDRESS_P (XEXP (DECL_RTL (exp), 0))))
3249 return change_address (DECL_RTL (exp), VOIDmode,
3250 copy_rtx (XEXP (DECL_RTL (exp), 0)));
3252 return DECL_RTL (exp);
3255 return immed_double_const (TREE_INT_CST_LOW (exp),
3256 TREE_INT_CST_HIGH (exp),
3260 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, 0);
3263 /* If optimized, generate immediate CONST_DOUBLE
3264 which will be turned into memory by reload if necessary.
3266 We used to force a register so that loop.c could see it. But
3267 this does not allow gen_* patterns to perform optimizations with
3268 the constants. It also produces two insns in cases like "x = 1.0;".
3269 On most machines, floating-point constants are not permitted in
3270 many insns, so we'd end up copying it to a register in any case.
3272 Now, we do the copying in expand_binop, if appropriate. */
3273 return immed_real_const (exp);
3277 if (! TREE_CST_RTL (exp))
3278 output_constant_def (exp);
3280 /* TREE_CST_RTL probably contains a constant address.
3281 On RISC machines where a constant address isn't valid,
3282 make some insns to get that address into a register. */
3283 if (GET_CODE (TREE_CST_RTL (exp)) == MEM
3284 && modifier != EXPAND_CONST_ADDRESS
3285 && modifier != EXPAND_INITIALIZER
3286 && modifier != EXPAND_SUM
3287 && !memory_address_p (mode, XEXP (TREE_CST_RTL (exp), 0)))
3288 return change_address (TREE_CST_RTL (exp), VOIDmode,
3289 copy_rtx (XEXP (TREE_CST_RTL (exp), 0)));
3290 return TREE_CST_RTL (exp);
3293 context = decl_function_context (exp);
3294 /* We treat inline_function_decl as an alias for the current function
3295 because that is the inline function whose vars, types, etc.
3296 are being merged into the current function.
3297 See expand_inline_function. */
3298 if (context == current_function_decl || context == inline_function_decl)
3301 /* If this is non-local, handle it. */
3304 temp = SAVE_EXPR_RTL (exp);
3305 if (temp && GET_CODE (temp) == REG)
3307 put_var_into_stack (exp);
3308 temp = SAVE_EXPR_RTL (exp);
3310 if (temp == 0 || GET_CODE (temp) != MEM)
3312 return change_address (temp, mode,
3313 fix_lexical_addr (XEXP (temp, 0), exp));
3315 if (SAVE_EXPR_RTL (exp) == 0)
3317 if (mode == BLKmode)
3319 = assign_stack_temp (mode,
3320 int_size_in_bytes (TREE_TYPE (exp)), 0);
3322 temp = gen_reg_rtx (mode);
3323 SAVE_EXPR_RTL (exp) = temp;
3324 store_expr (TREE_OPERAND (exp, 0), temp, 0);
3325 if (!optimize && GET_CODE (temp) == REG)
3326 save_expr_regs = gen_rtx (EXPR_LIST, VOIDmode, temp,
3329 return SAVE_EXPR_RTL (exp);
3332 /* Exit the current loop if the body-expression is true. */
3334 rtx label = gen_label_rtx ();
3335 do_jump (TREE_OPERAND (exp, 0), label, NULL_RTX);
3336 expand_exit_loop (NULL_PTR);
3342 expand_start_loop (1);
3343 expand_expr_stmt (TREE_OPERAND (exp, 0));
3350 tree vars = TREE_OPERAND (exp, 0);
3351 int vars_need_expansion = 0;
3353 /* Need to open a binding contour here because
3354 if there are any cleanups they most be contained here. */
3355 expand_start_bindings (0);
3357 /* Mark the corresponding BLOCK for output. */
3358 if (TREE_OPERAND (exp, 2) != 0)
3359 TREE_USED (TREE_OPERAND (exp, 2)) = 1;
3361 /* If VARS have not yet been expanded, expand them now. */
3364 if (DECL_RTL (vars) == 0)
3366 vars_need_expansion = 1;
3369 expand_decl_init (vars);
3370 vars = TREE_CHAIN (vars);
3373 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, modifier);
3375 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
3381 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
3383 emit_insns (RTL_EXPR_SEQUENCE (exp));
3384 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
3385 return RTL_EXPR_RTL (exp);
3388 /* All elts simple constants => refer to a constant in memory. But
3389 if this is a non-BLKmode mode, let it store a field at a time
3390 since that should make a CONST_INT or CONST_DOUBLE when we
3392 if (TREE_STATIC (exp) && (mode == BLKmode || TREE_ADDRESSABLE (exp)))
3394 rtx constructor = output_constant_def (exp);
3395 if (modifier != EXPAND_CONST_ADDRESS
3396 && modifier != EXPAND_INITIALIZER
3397 && modifier != EXPAND_SUM
3398 && !memory_address_p (GET_MODE (constructor),
3399 XEXP (constructor, 0)))
3400 constructor = change_address (constructor, VOIDmode,
3401 XEXP (constructor, 0));
3408 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
3409 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode, 0);
3414 if (target == 0 || ! safe_from_p (target, exp))
3416 if (mode != BLKmode && ! TREE_ADDRESSABLE (exp))
3417 target = gen_reg_rtx (mode);
3420 rtx safe_target = assign_stack_temp (mode, int_size_in_bytes (type), 0);
3422 MEM_IN_STRUCT_P (safe_target) = MEM_IN_STRUCT_P (target);
3423 target = safe_target;
3426 store_constructor (exp, target);
3432 tree exp1 = TREE_OPERAND (exp, 0);
3435 /* A SAVE_EXPR as the address in an INDIRECT_EXPR is generated
3436 for *PTR += ANYTHING where PTR is put inside the SAVE_EXPR.
3437 This code has the same general effect as simply doing
3438 expand_expr on the save expr, except that the expression PTR
3439 is computed for use as a memory address. This means different
3440 code, suitable for indexing, may be generated. */
3441 if (TREE_CODE (exp1) == SAVE_EXPR
3442 && SAVE_EXPR_RTL (exp1) == 0
3443 && TREE_CODE (exp2 = TREE_OPERAND (exp1, 0)) != ERROR_MARK
3444 && TYPE_MODE (TREE_TYPE (exp1)) == Pmode
3445 && TYPE_MODE (TREE_TYPE (exp2)) == Pmode)
3447 temp = expand_expr (TREE_OPERAND (exp1, 0), NULL_RTX,
3448 VOIDmode, EXPAND_SUM);
3449 op0 = memory_address (mode, temp);
3450 op0 = copy_all_regs (op0);
3451 SAVE_EXPR_RTL (exp1) = op0;
3455 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
3456 op0 = memory_address (mode, op0);
3459 temp = gen_rtx (MEM, mode, op0);
3460 /* If address was computed by addition,
3461 mark this as an element of an aggregate. */
3462 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
3463 || (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR
3464 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) == PLUS_EXPR)
3465 || TREE_CODE (TREE_TYPE (exp)) == ARRAY_TYPE
3466 || TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE
3467 || TREE_CODE (TREE_TYPE (exp)) == UNION_TYPE
3468 || (TREE_CODE (exp1) == ADDR_EXPR
3469 && (exp2 = TREE_OPERAND (exp1, 0))
3470 && (TREE_CODE (TREE_TYPE (exp2)) == ARRAY_TYPE
3471 || TREE_CODE (TREE_TYPE (exp2)) == RECORD_TYPE
3472 || TREE_CODE (TREE_TYPE (exp2)) == UNION_TYPE)))
3473 MEM_IN_STRUCT_P (temp) = 1;
3474 MEM_VOLATILE_P (temp) = TREE_THIS_VOLATILE (exp) || flag_volatile;
3475 #if 0 /* It is incorrectto set RTX_UNCHANGING_P here, because the fact that
3476 a location is accessed through a pointer to const does not mean
3477 that the value there can never change. */
3478 RTX_UNCHANGING_P (temp) = TREE_READONLY (exp);
3484 if (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST
3485 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
3487 /* Nonconstant array index or nonconstant element size.
3488 Generate the tree for *(&array+index) and expand that,
3489 except do it in a language-independent way
3490 and don't complain about non-lvalue arrays.
3491 `mark_addressable' should already have been called
3492 for any array for which this case will be reached. */
3494 /* Don't forget the const or volatile flag from the array element. */
3495 tree variant_type = build_type_variant (type,
3496 TREE_READONLY (exp),
3497 TREE_THIS_VOLATILE (exp));
3498 tree array_adr = build1 (ADDR_EXPR, build_pointer_type (variant_type),
3499 TREE_OPERAND (exp, 0));
3500 tree index = TREE_OPERAND (exp, 1);
3503 /* Convert the integer argument to a type the same size as a pointer
3504 so the multiply won't overflow spuriously. */
3505 if (TYPE_PRECISION (TREE_TYPE (index)) != POINTER_SIZE)
3506 index = convert (type_for_size (POINTER_SIZE, 0), index);
3508 /* Don't think the address has side effects
3509 just because the array does.
3510 (In some cases the address might have side effects,
3511 and we fail to record that fact here. However, it should not
3512 matter, since expand_expr should not care.) */
3513 TREE_SIDE_EFFECTS (array_adr) = 0;
3515 elt = build1 (INDIRECT_REF, type,
3516 fold (build (PLUS_EXPR, TYPE_POINTER_TO (variant_type),
3518 fold (build (MULT_EXPR,
3519 TYPE_POINTER_TO (variant_type),
3520 index, size_in_bytes (type))))));
3522 /* Volatility, etc., of new expression is same as old expression. */
3523 TREE_SIDE_EFFECTS (elt) = TREE_SIDE_EFFECTS (exp);
3524 TREE_THIS_VOLATILE (elt) = TREE_THIS_VOLATILE (exp);
3525 TREE_READONLY (elt) = TREE_READONLY (exp);
3527 return expand_expr (elt, target, tmode, modifier);
3530 /* Fold an expression like: "foo"[2].
3531 This is not done in fold so it won't happen inside &. */
3534 tree arg0 = TREE_OPERAND (exp, 0);
3535 tree arg1 = TREE_OPERAND (exp, 1);
3537 if (TREE_CODE (arg0) == STRING_CST
3538 && TREE_CODE (arg1) == INTEGER_CST
3539 && !TREE_INT_CST_HIGH (arg1)
3540 && (i = TREE_INT_CST_LOW (arg1)) < TREE_STRING_LENGTH (arg0))
3542 if (TREE_TYPE (TREE_TYPE (arg0)) == integer_type_node)
3544 exp = build_int_2 (((int *)TREE_STRING_POINTER (arg0))[i], 0);
3545 TREE_TYPE (exp) = integer_type_node;
3546 return expand_expr (exp, target, tmode, modifier);
3548 if (TREE_TYPE (TREE_TYPE (arg0)) == char_type_node)
3550 exp = build_int_2 (TREE_STRING_POINTER (arg0)[i], 0);
3551 TREE_TYPE (exp) = integer_type_node;
3552 return expand_expr (convert (TREE_TYPE (TREE_TYPE (arg0)), exp), target, tmode, modifier);
3557 /* If this is a constant index into a constant array,
3558 just get the value from the array. Handle both the cases when
3559 we have an explicit constructor and when our operand is a variable
3560 that was declared const. */
3562 if (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
3563 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
3565 tree index = fold (TREE_OPERAND (exp, 1));
3566 if (TREE_CODE (index) == INTEGER_CST
3567 && TREE_INT_CST_HIGH (index) == 0)
3569 int i = TREE_INT_CST_LOW (index);
3570 tree elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0));
3573 elem = TREE_CHAIN (elem);
3575 return expand_expr (fold (TREE_VALUE (elem)), target,
3580 else if (TREE_READONLY (TREE_OPERAND (exp, 0))
3581 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
3582 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == ARRAY_TYPE
3583 && TREE_CODE (TREE_OPERAND (exp, 0)) == VAR_DECL
3584 && DECL_INITIAL (TREE_OPERAND (exp, 0))
3586 && (TREE_CODE (DECL_INITIAL (TREE_OPERAND (exp, 0)))
3589 tree index = fold (TREE_OPERAND (exp, 1));
3590 if (TREE_CODE (index) == INTEGER_CST
3591 && TREE_INT_CST_HIGH (index) == 0)
3593 int i = TREE_INT_CST_LOW (index);
3594 tree init = DECL_INITIAL (TREE_OPERAND (exp, 0));
3596 if (TREE_CODE (init) == CONSTRUCTOR)
3598 tree elem = CONSTRUCTOR_ELTS (init);
3601 elem = TREE_CHAIN (elem);
3603 return expand_expr (fold (TREE_VALUE (elem)), target,
3606 else if (TREE_CODE (init) == STRING_CST
3607 && i < TREE_STRING_LENGTH (init))
3609 temp = GEN_INT (TREE_STRING_POINTER (init)[i]);
3610 return convert_to_mode (mode, temp, 0);
3614 /* Treat array-ref with constant index as a component-ref. */
3618 /* If the operand is a CONSTRUCTOR, we can just extract the
3619 appropriate field if it is present. */
3620 if (code != ARRAY_REF
3621 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR)
3625 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
3626 elt = TREE_CHAIN (elt))
3627 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1))
3628 return expand_expr (TREE_VALUE (elt), target, tmode, modifier);
3632 enum machine_mode mode1;
3637 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
3638 &mode1, &unsignedp, &volatilep);
3640 /* In some cases, we will be offsetting OP0's address by a constant.
3641 So get it as a sum, if possible. If we will be using it
3642 directly in an insn, we validate it. */
3643 op0 = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_SUM);
3645 /* If this is a constant, put it into a register if it is a
3646 legimate constant and memory if it isn't. */
3647 if (CONSTANT_P (op0))
3649 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
3650 if (LEGITIMATE_CONSTANT_P (op0))
3651 op0 = force_reg (mode, op0);
3653 op0 = validize_mem (force_const_mem (mode, op0));
3658 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
3660 if (GET_CODE (op0) != MEM)
3662 op0 = change_address (op0, VOIDmode,
3663 gen_rtx (PLUS, Pmode, XEXP (op0, 0),
3664 force_reg (Pmode, offset_rtx)));
3667 /* Don't forget about volatility even if this is a bitfield. */
3668 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
3670 op0 = copy_rtx (op0);
3671 MEM_VOLATILE_P (op0) = 1;
3674 if (mode1 == VOIDmode
3675 || (mode1 != BLKmode && ! direct_load[(int) mode1]
3676 && modifier != EXPAND_CONST_ADDRESS
3677 && modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
3678 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG)
3680 /* In cases where an aligned union has an unaligned object
3681 as a field, we might be extracting a BLKmode value from
3682 an integer-mode (e.g., SImode) object. Handle this case
3683 by doing the extract into an object as wide as the field
3684 (which we know to be the width of a basic mode), then
3685 storing into memory, and changing the mode to BLKmode. */
3686 enum machine_mode ext_mode = mode;
3688 if (ext_mode == BLKmode)
3689 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
3691 if (ext_mode == BLKmode)
3694 op0 = extract_bit_field (validize_mem (op0), bitsize, bitpos,
3695 unsignedp, target, ext_mode, ext_mode,
3696 TYPE_ALIGN (TREE_TYPE (tem)) / BITS_PER_UNIT,
3697 int_size_in_bytes (TREE_TYPE (tem)));
3698 if (mode == BLKmode)
3700 rtx new = assign_stack_temp (ext_mode,
3701 bitsize / BITS_PER_UNIT, 0);
3703 emit_move_insn (new, op0);
3704 op0 = copy_rtx (new);
3705 PUT_MODE (op0, BLKmode);
3711 /* Get a reference to just this component. */
3712 if (modifier == EXPAND_CONST_ADDRESS
3713 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
3714 op0 = gen_rtx (MEM, mode1, plus_constant (XEXP (op0, 0),
3715 (bitpos / BITS_PER_UNIT)));
3717 op0 = change_address (op0, mode1,
3718 plus_constant (XEXP (op0, 0),
3719 (bitpos / BITS_PER_UNIT)));
3720 MEM_IN_STRUCT_P (op0) = 1;
3721 MEM_VOLATILE_P (op0) |= volatilep;
3722 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode)
3725 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
3726 convert_move (target, op0, unsignedp);
3732 tree base = build_unary_op (ADDR_EXPR, TREE_OPERAND (exp, 0), 0);
3733 tree addr = build (PLUS_EXPR, type, base, TREE_OPERAND (exp, 1));
3734 op0 = expand_expr (addr, NULL_RTX, VOIDmode, EXPAND_SUM);
3735 temp = gen_rtx (MEM, mode, memory_address (mode, op0));
3736 MEM_IN_STRUCT_P (temp) = 1;
3737 MEM_VOLATILE_P (temp) = TREE_THIS_VOLATILE (exp) || flag_volatile;
3738 #if 0 /* It is incorrectto set RTX_UNCHANGING_P here, because the fact that
3739 a location is accessed through a pointer to const does not mean
3740 that the value there can never change. */
3741 RTX_UNCHANGING_P (temp) = TREE_READONLY (exp);
3746 /* Intended for a reference to a buffer of a file-object in Pascal.
3747 But it's not certain that a special tree code will really be
3748 necessary for these. INDIRECT_REF might work for them. */
3752 case WITH_CLEANUP_EXPR:
3753 if (RTL_EXPR_RTL (exp) == 0)
3756 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
3758 = tree_cons (NULL_TREE, TREE_OPERAND (exp, 2), cleanups_this_call);
3759 /* That's it for this cleanup. */
3760 TREE_OPERAND (exp, 2) = 0;
3762 return RTL_EXPR_RTL (exp);
3765 /* Check for a built-in function. */
3766 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
3767 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) == FUNCTION_DECL
3768 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
3769 return expand_builtin (exp, target, subtarget, tmode, ignore);
3770 /* If this call was expanded already by preexpand_calls,
3771 just return the result we got. */
3772 if (CALL_EXPR_RTL (exp) != 0)
3773 return CALL_EXPR_RTL (exp);
3774 return expand_call (exp, target, ignore);
3776 case NON_LVALUE_EXPR:
3779 case REFERENCE_EXPR:
3780 if (TREE_CODE (type) == VOID_TYPE || ignore)
3782 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
3785 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
3786 return expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, modifier);
3787 if (TREE_CODE (type) == UNION_TYPE)
3789 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
3792 if (mode == BLKmode)
3794 if (TYPE_SIZE (type) == 0
3795 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
3797 target = assign_stack_temp (BLKmode,
3798 (TREE_INT_CST_LOW (TYPE_SIZE (type))
3799 + BITS_PER_UNIT - 1)
3800 / BITS_PER_UNIT, 0);
3803 target = gen_reg_rtx (mode);
3805 if (GET_CODE (target) == MEM)
3806 /* Store data into beginning of memory target. */
3807 store_expr (TREE_OPERAND (exp, 0),
3808 change_address (target, TYPE_MODE (valtype), 0),
3810 else if (GET_CODE (target) == REG)
3811 /* Store this field into a union of the proper type. */
3812 store_field (target, GET_MODE_BITSIZE (TYPE_MODE (valtype)), 0,
3813 TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
3815 int_size_in_bytes (TREE_TYPE (TREE_OPERAND (exp, 0))));
3819 /* Return the entire union. */
3822 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, mode, modifier);
3823 if (GET_MODE (op0) == mode || GET_MODE (op0) == VOIDmode)
3825 if (modifier == EXPAND_INITIALIZER)
3826 return gen_rtx (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
3827 if (flag_force_mem && GET_CODE (op0) == MEM)
3828 op0 = copy_to_reg (op0);
3831 return convert_to_mode (mode, op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
3833 convert_move (target, op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
3837 /* We come here from MINUS_EXPR when the second operand is a constant. */
3839 this_optab = add_optab;
3841 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
3842 something else, make sure we add the register to the constant and
3843 then to the other thing. This case can occur during strength
3844 reduction and doing it this way will produce better code if the
3845 frame pointer or argument pointer is eliminated.
3847 fold-const.c will ensure that the constant is always in the inner
3848 PLUS_EXPR, so the only case we need to do anything about is if
3849 sp, ap, or fp is our second argument, in which case we must swap
3850 the innermost first argument and our second argument. */
3852 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
3853 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
3854 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
3855 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
3856 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
3857 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
3859 tree t = TREE_OPERAND (exp, 1);
3861 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
3862 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
3865 /* If the result is to be Pmode and we are adding an integer to
3866 something, we might be forming a constant. So try to use
3867 plus_constant. If it produces a sum and we can't accept it,
3868 use force_operand. This allows P = &ARR[const] to generate
3869 efficient code on machines where a SYMBOL_REF is not a valid
3872 If this is an EXPAND_SUM call, always return the sum. */
3873 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
3874 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
3875 && (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
3878 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
3880 op1 = plus_constant (op1, TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)));
3881 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
3882 op1 = force_operand (op1, target);
3886 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
3887 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
3888 && (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
3891 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
3893 op0 = plus_constant (op0, TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)));
3894 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
3895 op0 = force_operand (op0, target);
3899 /* No sense saving up arithmetic to be done
3900 if it's all in the wrong mode to form part of an address.
3901 And force_operand won't know whether to sign-extend or
3903 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
3904 || mode != Pmode) goto binop;
3906 preexpand_calls (exp);
3907 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
3910 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, modifier);
3911 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, modifier);
3913 /* Make sure any term that's a sum with a constant comes last. */
3914 if (GET_CODE (op0) == PLUS
3915 && CONSTANT_P (XEXP (op0, 1)))
3921 /* If adding to a sum including a constant,
3922 associate it to put the constant outside. */
3923 if (GET_CODE (op1) == PLUS
3924 && CONSTANT_P (XEXP (op1, 1)))
3926 rtx constant_term = const0_rtx;
3928 temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0);
3931 /* Ensure that MULT comes first if there is one. */
3932 else if (GET_CODE (op0) == MULT)
3933 op0 = gen_rtx (PLUS, mode, op0, XEXP (op1, 0));
3935 op0 = gen_rtx (PLUS, mode, XEXP (op1, 0), op0);
3937 /* Let's also eliminate constants from op0 if possible. */
3938 op0 = eliminate_constant_term (op0, &constant_term);
3940 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
3941 their sum should be a constant. Form it into OP1, since the
3942 result we want will then be OP0 + OP1. */
3944 temp = simplify_binary_operation (PLUS, mode, constant_term,
3949 op1 = gen_rtx (PLUS, mode, constant_term, XEXP (op1, 1));
3952 /* Put a constant term last and put a multiplication first. */
3953 if (CONSTANT_P (op0) || GET_CODE (op1) == MULT)
3954 temp = op1, op1 = op0, op0 = temp;
3956 temp = simplify_binary_operation (PLUS, mode, op0, op1);
3957 return temp ? temp : gen_rtx (PLUS, mode, op0, op1);
3960 /* Handle difference of two symbolic constants,
3961 for the sake of an initializer. */
3962 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
3963 && really_constant_p (TREE_OPERAND (exp, 0))
3964 && really_constant_p (TREE_OPERAND (exp, 1)))
3966 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX,
3967 VOIDmode, modifier);
3968 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
3969 VOIDmode, modifier);
3970 return gen_rtx (MINUS, mode, op0, op1);
3972 /* Convert A - const to A + (-const). */
3973 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
3975 exp = build (PLUS_EXPR, type, TREE_OPERAND (exp, 0),
3976 fold (build1 (NEGATE_EXPR, type,
3977 TREE_OPERAND (exp, 1))));
3980 this_optab = sub_optab;
3984 preexpand_calls (exp);
3985 /* If first operand is constant, swap them.
3986 Thus the following special case checks need only
3987 check the second operand. */
3988 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
3990 register tree t1 = TREE_OPERAND (exp, 0);
3991 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
3992 TREE_OPERAND (exp, 1) = t1;
3995 /* Attempt to return something suitable for generating an
3996 indexed address, for machines that support that. */
3998 if (modifier == EXPAND_SUM && mode == Pmode
3999 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
4000 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
4002 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, EXPAND_SUM);
4004 /* Apply distributive law if OP0 is x+c. */
4005 if (GET_CODE (op0) == PLUS
4006 && GET_CODE (XEXP (op0, 1)) == CONST_INT)
4007 return gen_rtx (PLUS, mode,
4008 gen_rtx (MULT, mode, XEXP (op0, 0),
4009 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))),
4010 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))
4011 * INTVAL (XEXP (op0, 1))));
4013 if (GET_CODE (op0) != REG)
4014 op0 = force_operand (op0, NULL_RTX);
4015 if (GET_CODE (op0) != REG)
4016 op0 = copy_to_mode_reg (mode, op0);
4018 return gen_rtx (MULT, mode, op0,
4019 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))));
4022 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
4025 /* Check for multiplying things that have been extended
4026 from a narrower type. If this machine supports multiplying
4027 in that narrower type with a result in the desired type,
4028 do it that way, and avoid the explicit type-conversion. */
4029 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
4030 && TREE_CODE (type) == INTEGER_TYPE
4031 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
4032 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
4033 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
4034 && int_fits_type_p (TREE_OPERAND (exp, 1),
4035 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
4036 /* Don't use a widening multiply if a shift will do. */
4037 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
4038 > HOST_BITS_PER_WIDE_INT)
4039 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
4041 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
4042 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
4044 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
4045 /* If both operands are extended, they must either both
4046 be zero-extended or both be sign-extended. */
4047 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
4049 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
4051 enum machine_mode innermode
4052 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
4053 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
4054 ? umul_widen_optab : smul_widen_optab);
4055 if (mode == GET_MODE_WIDER_MODE (innermode)
4056 && this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
4058 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
4059 NULL_RTX, VOIDmode, 0);
4060 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
4061 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
4064 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
4065 NULL_RTX, VOIDmode, 0);
4069 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
4070 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
4071 return expand_mult (mode, op0, op1, target, unsignedp);
4073 case TRUNC_DIV_EXPR:
4074 case FLOOR_DIV_EXPR:
4076 case ROUND_DIV_EXPR:
4077 case EXACT_DIV_EXPR:
4078 preexpand_calls (exp);
4079 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
4081 /* Possible optimization: compute the dividend with EXPAND_SUM
4082 then if the divisor is constant can optimize the case
4083 where some terms of the dividend have coeffs divisible by it. */
4084 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
4085 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
4086 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
4089 this_optab = flodiv_optab;
4092 case TRUNC_MOD_EXPR:
4093 case FLOOR_MOD_EXPR:
4095 case ROUND_MOD_EXPR:
4096 preexpand_calls (exp);
4097 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
4099 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
4100 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
4101 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
4103 case FIX_ROUND_EXPR:
4104 case FIX_FLOOR_EXPR:
4106 abort (); /* Not used for C. */
4108 case FIX_TRUNC_EXPR:
4109 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
4111 target = gen_reg_rtx (mode);
4112 expand_fix (target, op0, unsignedp);
4116 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
4118 target = gen_reg_rtx (mode);
4119 /* expand_float can't figure out what to do if FROM has VOIDmode.
4120 So give it the correct mode. With -O, cse will optimize this. */
4121 if (GET_MODE (op0) == VOIDmode)
4122 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
4124 expand_float (target, op0,
4125 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
4129 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
4130 temp = expand_unop (mode, neg_optab, op0, target, 0);
4136 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
4138 /* Unsigned abs is simply the operand. Testing here means we don't
4139 risk generating incorrect code below. */
4140 if (TREE_UNSIGNED (type))
4143 /* First try to do it with a special abs instruction. */
4144 temp = expand_unop (mode, abs_optab, op0, target, 0);
4148 /* If this machine has expensive jumps, we can do integer absolute
4149 value of X as (((signed) x >> (W-1)) ^ x) - ((signed) x >> (W-1)),
4150 where W is the width of MODE. */
4152 if (GET_MODE_CLASS (mode) == MODE_INT && BRANCH_COST >= 2)
4154 rtx extended = expand_shift (RSHIFT_EXPR, mode, op0,
4155 size_int (GET_MODE_BITSIZE (mode) - 1),
4158 temp = expand_binop (mode, xor_optab, extended, op0, target, 0,
4161 temp = expand_binop (mode, sub_optab, temp, extended, target, 0,
4168 /* If that does not win, use conditional jump and negate. */
4169 target = original_target;
4170 temp = gen_label_rtx ();
4171 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 0))
4172 || (GET_CODE (target) == REG
4173 && REGNO (target) < FIRST_PSEUDO_REGISTER))
4174 target = gen_reg_rtx (mode);
4175 emit_move_insn (target, op0);
4176 emit_cmp_insn (target,
4177 expand_expr (convert (type, integer_zero_node),
4178 NULL_RTX, VOIDmode, 0),
4179 GE, NULL_RTX, mode, 0, 0);
4181 emit_jump_insn (gen_bge (temp));
4182 op0 = expand_unop (mode, neg_optab, target, target, 0);
4184 emit_move_insn (target, op0);
4191 target = original_target;
4192 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 1))
4193 || (GET_CODE (target) == REG
4194 && REGNO (target) < FIRST_PSEUDO_REGISTER))
4195 target = gen_reg_rtx (mode);
4196 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
4197 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
4199 /* First try to do it with a special MIN or MAX instruction.
4200 If that does not win, use a conditional jump to select the proper
4202 this_optab = (TREE_UNSIGNED (type)
4203 ? (code == MIN_EXPR ? umin_optab : umax_optab)
4204 : (code == MIN_EXPR ? smin_optab : smax_optab));
4206 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
4212 emit_move_insn (target, op0);
4213 op0 = gen_label_rtx ();
4214 if (code == MAX_EXPR)
4215 temp = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)))
4216 ? compare_from_rtx (target, op1, GEU, 1, mode, NULL_RTX, 0)
4217 : compare_from_rtx (target, op1, GE, 0, mode, NULL_RTX, 0));
4219 temp = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)))
4220 ? compare_from_rtx (target, op1, LEU, 1, mode, NULL_RTX, 0)
4221 : compare_from_rtx (target, op1, LE, 0, mode, NULL_RTX, 0));
4222 if (temp == const0_rtx)
4223 emit_move_insn (target, op1);
4224 else if (temp != const_true_rtx)
4226 if (bcc_gen_fctn[(int) GET_CODE (temp)] != 0)
4227 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (temp)]) (op0));
4230 emit_move_insn (target, op1);
4235 /* ??? Can optimize when the operand of this is a bitwise operation,
4236 by using a different bitwise operation. */
4238 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
4239 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
4245 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
4246 temp = expand_unop (mode, ffs_optab, op0, target, 1);
4251 /* ??? Can optimize bitwise operations with one arg constant.
4252 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
4253 and (a bitwise1 b) bitwise2 b (etc)
4254 but that is probably not worth while. */
4256 /* BIT_AND_EXPR is for bitwise anding.
4257 TRUTH_AND_EXPR is for anding two boolean values
4258 when we want in all cases to compute both of them.
4259 In general it is fastest to do TRUTH_AND_EXPR by
4260 computing both operands as actual zero-or-1 values
4261 and then bitwise anding. In cases where there cannot
4262 be any side effects, better code would be made by
4263 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR;
4264 but the question is how to recognize those cases. */
4266 case TRUTH_AND_EXPR:
4268 this_optab = and_optab;
4271 /* See comment above about TRUTH_AND_EXPR; it applies here too. */
4274 this_optab = ior_optab;
4278 this_optab = xor_optab;
4285 preexpand_calls (exp);
4286 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
4288 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
4289 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
4292 /* Could determine the answer when only additive constants differ.
4293 Also, the addition of one can be handled by changing the condition. */
4300 preexpand_calls (exp);
4301 temp = do_store_flag (exp, target, tmode != VOIDmode ? tmode : mode, 0);
4304 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
4305 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
4307 && GET_CODE (original_target) == REG
4308 && (GET_MODE (original_target)
4309 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
4311 temp = expand_expr (TREE_OPERAND (exp, 0), original_target, VOIDmode, 0);
4312 if (temp != original_target)
4313 temp = copy_to_reg (temp);
4314 op1 = gen_label_rtx ();
4315 emit_cmp_insn (temp, const0_rtx, EQ, NULL_RTX,
4316 GET_MODE (temp), unsignedp, 0);
4317 emit_jump_insn (gen_beq (op1));
4318 emit_move_insn (temp, const1_rtx);
4322 /* If no set-flag instruction, must generate a conditional
4323 store into a temporary variable. Drop through
4324 and handle this like && and ||. */
4326 case TRUTH_ANDIF_EXPR:
4327 case TRUTH_ORIF_EXPR:
4328 if (target == 0 || ! safe_from_p (target, exp)
4329 /* Make sure we don't have a hard reg (such as function's return
4330 value) live across basic blocks, if not optimizing. */
4331 || (!optimize && GET_CODE (target) == REG
4332 && REGNO (target) < FIRST_PSEUDO_REGISTER))
4333 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
4334 emit_clr_insn (target);
4335 op1 = gen_label_rtx ();
4336 jumpifnot (exp, op1);
4337 emit_0_to_1_insn (target);
4341 case TRUTH_NOT_EXPR:
4342 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
4343 /* The parser is careful to generate TRUTH_NOT_EXPR
4344 only with operands that are always zero or one. */
4345 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
4346 target, 1, OPTAB_LIB_WIDEN);
4352 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
4354 return expand_expr (TREE_OPERAND (exp, 1),
4355 (ignore ? const0_rtx : target),
4360 /* Note that COND_EXPRs whose type is a structure or union
4361 are required to be constructed to contain assignments of
4362 a temporary variable, so that we can evaluate them here
4363 for side effect only. If type is void, we must do likewise. */
4365 /* If an arm of the branch requires a cleanup,
4366 only that cleanup is performed. */
4369 tree binary_op = 0, unary_op = 0;
4370 tree old_cleanups = cleanups_this_call;
4371 cleanups_this_call = 0;
4373 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
4374 convert it to our mode, if necessary. */
4375 if (integer_onep (TREE_OPERAND (exp, 1))
4376 && integer_zerop (TREE_OPERAND (exp, 2))
4377 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
4379 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, modifier);
4380 if (GET_MODE (op0) == mode)
4383 target = gen_reg_rtx (mode);
4384 convert_move (target, op0, unsignedp);
4388 /* If we are not to produce a result, we have no target. Otherwise,
4389 if a target was specified use it; it will not be used as an
4390 intermediate target unless it is safe. If no target, use a
4393 if (mode == VOIDmode || ignore)
4395 else if (original_target
4396 && safe_from_p (original_target, TREE_OPERAND (exp, 0)))
4397 temp = original_target;
4398 else if (mode == BLKmode)
4400 if (TYPE_SIZE (type) == 0
4401 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
4403 temp = assign_stack_temp (BLKmode,
4404 (TREE_INT_CST_LOW (TYPE_SIZE (type))
4405 + BITS_PER_UNIT - 1)
4406 / BITS_PER_UNIT, 0);
4409 temp = gen_reg_rtx (mode);
4411 /* Check for X ? A + B : A. If we have this, we can copy
4412 A to the output and conditionally add B. Similarly for unary
4413 operations. Don't do this if X has side-effects because
4414 those side effects might affect A or B and the "?" operation is
4415 a sequence point in ANSI. (We test for side effects later.) */
4417 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
4418 && operand_equal_p (TREE_OPERAND (exp, 2),
4419 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
4420 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
4421 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
4422 && operand_equal_p (TREE_OPERAND (exp, 1),
4423 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
4424 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
4425 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
4426 && operand_equal_p (TREE_OPERAND (exp, 2),
4427 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
4428 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
4429 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
4430 && operand_equal_p (TREE_OPERAND (exp, 1),
4431 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
4432 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
4434 /* If we had X ? A + 1 : A and we can do the test of X as a store-flag
4435 operation, do this as A + (X != 0). Similarly for other simple
4436 binary operators. */
4437 if (singleton && binary_op
4438 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
4439 && (TREE_CODE (binary_op) == PLUS_EXPR
4440 || TREE_CODE (binary_op) == MINUS_EXPR
4441 || TREE_CODE (binary_op) == BIT_IOR_EXPR
4442 || TREE_CODE (binary_op) == BIT_XOR_EXPR
4443 || TREE_CODE (binary_op) == BIT_AND_EXPR)
4444 && integer_onep (TREE_OPERAND (binary_op, 1))
4445 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
4448 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR ? add_optab
4449 : TREE_CODE (binary_op) == MINUS_EXPR ? sub_optab
4450 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
4451 : TREE_CODE (binary_op) == BIT_XOR_EXPR ? xor_optab
4454 /* If we had X ? A : A + 1, do this as A + (X == 0).
4456 We have to invert the truth value here and then put it
4457 back later if do_store_flag fails. We cannot simply copy
4458 TREE_OPERAND (exp, 0) to another variable and modify that
4459 because invert_truthvalue can modify the tree pointed to
4461 if (singleton == TREE_OPERAND (exp, 1))
4462 TREE_OPERAND (exp, 0)
4463 = invert_truthvalue (TREE_OPERAND (exp, 0));
4465 result = do_store_flag (TREE_OPERAND (exp, 0),
4466 (safe_from_p (temp, singleton)
4468 mode, BRANCH_COST <= 1);
4472 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
4473 return expand_binop (mode, boptab, op1, result, temp,
4474 unsignedp, OPTAB_LIB_WIDEN);
4476 else if (singleton == TREE_OPERAND (exp, 1))
4477 TREE_OPERAND (exp, 0)
4478 = invert_truthvalue (TREE_OPERAND (exp, 0));
4482 op0 = gen_label_rtx ();
4484 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
4488 /* If the target conflicts with the other operand of the
4489 binary op, we can't use it. Also, we can't use the target
4490 if it is a hard register, because evaluating the condition
4491 might clobber it. */
4493 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1)))
4494 || (GET_CODE (temp) == REG
4495 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
4496 temp = gen_reg_rtx (mode);
4497 store_expr (singleton, temp, 0);
4500 expand_expr (singleton,
4501 ignore ? const1_rtx : NULL_RTX, VOIDmode, 0);
4502 if (cleanups_this_call)
4504 sorry ("aggregate value in COND_EXPR");
4505 cleanups_this_call = 0;
4507 if (singleton == TREE_OPERAND (exp, 1))
4508 jumpif (TREE_OPERAND (exp, 0), op0);
4510 jumpifnot (TREE_OPERAND (exp, 0), op0);
4512 if (binary_op && temp == 0)
4513 /* Just touch the other operand. */
4514 expand_expr (TREE_OPERAND (binary_op, 1),
4515 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
4517 store_expr (build (TREE_CODE (binary_op), type,
4518 make_tree (type, temp),
4519 TREE_OPERAND (binary_op, 1)),
4522 store_expr (build1 (TREE_CODE (unary_op), type,
4523 make_tree (type, temp)),
4528 /* This is now done in jump.c and is better done there because it
4529 produces shorter register lifetimes. */
4531 /* Check for both possibilities either constants or variables
4532 in registers (but not the same as the target!). If so, can
4533 save branches by assigning one, branching, and assigning the
4535 else if (temp && GET_MODE (temp) != BLKmode
4536 && (TREE_CONSTANT (TREE_OPERAND (exp, 1))
4537 || ((TREE_CODE (TREE_OPERAND (exp, 1)) == PARM_DECL
4538 || TREE_CODE (TREE_OPERAND (exp, 1)) == VAR_DECL)
4539 && DECL_RTL (TREE_OPERAND (exp, 1))
4540 && GET_CODE (DECL_RTL (TREE_OPERAND (exp, 1))) == REG
4541 && DECL_RTL (TREE_OPERAND (exp, 1)) != temp))
4542 && (TREE_CONSTANT (TREE_OPERAND (exp, 2))
4543 || ((TREE_CODE (TREE_OPERAND (exp, 2)) == PARM_DECL
4544 || TREE_CODE (TREE_OPERAND (exp, 2)) == VAR_DECL)
4545 && DECL_RTL (TREE_OPERAND (exp, 2))
4546 && GET_CODE (DECL_RTL (TREE_OPERAND (exp, 2))) == REG
4547 && DECL_RTL (TREE_OPERAND (exp, 2)) != temp)))
4549 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
4550 temp = gen_reg_rtx (mode);
4551 store_expr (TREE_OPERAND (exp, 2), temp, 0);
4552 jumpifnot (TREE_OPERAND (exp, 0), op0);
4553 store_expr (TREE_OPERAND (exp, 1), temp, 0);
4557 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
4558 comparison operator. If we have one of these cases, set the
4559 output to A, branch on A (cse will merge these two references),
4560 then set the output to FOO. */
4562 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
4563 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
4564 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
4565 TREE_OPERAND (exp, 1), 0)
4566 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
4567 && safe_from_p (temp, TREE_OPERAND (exp, 2)))
4569 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
4570 temp = gen_reg_rtx (mode);
4571 store_expr (TREE_OPERAND (exp, 1), temp, 0);
4572 jumpif (TREE_OPERAND (exp, 0), op0);
4573 store_expr (TREE_OPERAND (exp, 2), temp, 0);
4577 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
4578 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
4579 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
4580 TREE_OPERAND (exp, 2), 0)
4581 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
4582 && safe_from_p (temp, TREE_OPERAND (exp, 1)))
4584 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
4585 temp = gen_reg_rtx (mode);
4586 store_expr (TREE_OPERAND (exp, 2), temp, 0);
4587 jumpifnot (TREE_OPERAND (exp, 0), op0);
4588 store_expr (TREE_OPERAND (exp, 1), temp, 0);
4593 op1 = gen_label_rtx ();
4594 jumpifnot (TREE_OPERAND (exp, 0), op0);
4596 store_expr (TREE_OPERAND (exp, 1), temp, 0);
4598 expand_expr (TREE_OPERAND (exp, 1),
4599 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
4600 if (cleanups_this_call)
4602 sorry ("aggregate value in COND_EXPR");
4603 cleanups_this_call = 0;
4607 emit_jump_insn (gen_jump (op1));
4611 store_expr (TREE_OPERAND (exp, 2), temp, 0);
4613 expand_expr (TREE_OPERAND (exp, 2),
4614 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
4617 if (cleanups_this_call)
4619 sorry ("aggregate value in COND_EXPR");
4620 cleanups_this_call = 0;
4626 cleanups_this_call = old_cleanups;
4632 /* Something needs to be initialized, but we didn't know
4633 where that thing was when building the tree. For example,
4634 it could be the return value of a function, or a parameter
4635 to a function which lays down in the stack, or a temporary
4636 variable which must be passed by reference.
4638 We guarantee that the expression will either be constructed
4639 or copied into our original target. */
4641 tree slot = TREE_OPERAND (exp, 0);
4644 if (TREE_CODE (slot) != VAR_DECL)
4649 if (DECL_RTL (slot) != 0)
4651 target = DECL_RTL (slot);
4652 /* If we have already expanded the slot, so don't do
4654 if (TREE_OPERAND (exp, 1) == NULL_TREE)
4659 target = assign_stack_temp (mode, int_size_in_bytes (type), 0);
4660 /* All temp slots at this level must not conflict. */
4661 preserve_temp_slots (target);
4662 DECL_RTL (slot) = target;
4666 /* I bet this needs to be done, and I bet that it needs to
4667 be above, inside the else clause. The reason is
4668 simple, how else is it going to get cleaned up? (mrs)
4670 The reason is probably did not work before, and was
4671 commented out is because this was re-expanding already
4672 expanded target_exprs (target == 0 and DECL_RTL (slot)
4673 != 0) also cleaning them up many times as well. :-( */
4675 /* Since SLOT is not known to the called function
4676 to belong to its stack frame, we must build an explicit
4677 cleanup. This case occurs when we must build up a reference
4678 to pass the reference as an argument. In this case,
4679 it is very likely that such a reference need not be
4682 if (TREE_OPERAND (exp, 2) == 0)
4683 TREE_OPERAND (exp, 2) = maybe_build_cleanup (slot);
4684 if (TREE_OPERAND (exp, 2))
4685 cleanups_this_call = tree_cons (NULL_TREE, TREE_OPERAND (exp, 2),
4686 cleanups_this_call);
4691 /* This case does occur, when expanding a parameter which
4692 needs to be constructed on the stack. The target
4693 is the actual stack address that we want to initialize.
4694 The function we call will perform the cleanup in this case. */
4696 DECL_RTL (slot) = target;
4699 exp1 = TREE_OPERAND (exp, 1);
4700 /* Mark it as expanded. */
4701 TREE_OPERAND (exp, 1) = NULL_TREE;
4703 return expand_expr (exp1, target, tmode, modifier);
4708 tree lhs = TREE_OPERAND (exp, 0);
4709 tree rhs = TREE_OPERAND (exp, 1);
4710 tree noncopied_parts = 0;
4711 tree lhs_type = TREE_TYPE (lhs);
4713 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
4714 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0 && !fixed_type_p (rhs))
4715 noncopied_parts = init_noncopied_parts (stabilize_reference (lhs),
4716 TYPE_NONCOPIED_PARTS (lhs_type));
4717 while (noncopied_parts != 0)
4719 expand_assignment (TREE_VALUE (noncopied_parts),
4720 TREE_PURPOSE (noncopied_parts), 0, 0);
4721 noncopied_parts = TREE_CHAIN (noncopied_parts);
4728 /* If lhs is complex, expand calls in rhs before computing it.
4729 That's so we don't compute a pointer and save it over a call.
4730 If lhs is simple, compute it first so we can give it as a
4731 target if the rhs is just a call. This avoids an extra temp and copy
4732 and that prevents a partial-subsumption which makes bad code.
4733 Actually we could treat component_ref's of vars like vars. */
4735 tree lhs = TREE_OPERAND (exp, 0);
4736 tree rhs = TREE_OPERAND (exp, 1);
4737 tree noncopied_parts = 0;
4738 tree lhs_type = TREE_TYPE (lhs);
4742 if (TREE_CODE (lhs) != VAR_DECL
4743 && TREE_CODE (lhs) != RESULT_DECL
4744 && TREE_CODE (lhs) != PARM_DECL)
4745 preexpand_calls (exp);
4747 /* Check for |= or &= of a bitfield of size one into another bitfield
4748 of size 1. In this case, (unless we need the result of the
4749 assignment) we can do this more efficiently with a
4750 test followed by an assignment, if necessary.
4752 ??? At this point, we can't get a BIT_FIELD_REF here. But if
4753 things change so we do, this code should be enhanced to
4756 && TREE_CODE (lhs) == COMPONENT_REF
4757 && (TREE_CODE (rhs) == BIT_IOR_EXPR
4758 || TREE_CODE (rhs) == BIT_AND_EXPR)
4759 && TREE_OPERAND (rhs, 0) == lhs
4760 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
4761 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (lhs, 1))) == 1
4762 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))) == 1)
4764 rtx label = gen_label_rtx ();
4766 do_jump (TREE_OPERAND (rhs, 1),
4767 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
4768 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
4769 expand_assignment (lhs, convert (TREE_TYPE (rhs),
4770 (TREE_CODE (rhs) == BIT_IOR_EXPR
4772 : integer_zero_node)),
4774 do_pending_stack_adjust ();
4779 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0
4780 && ! (fixed_type_p (lhs) && fixed_type_p (rhs)))
4781 noncopied_parts = save_noncopied_parts (stabilize_reference (lhs),
4782 TYPE_NONCOPIED_PARTS (lhs_type));
4784 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
4785 while (noncopied_parts != 0)
4787 expand_assignment (TREE_PURPOSE (noncopied_parts),
4788 TREE_VALUE (noncopied_parts), 0, 0);
4789 noncopied_parts = TREE_CHAIN (noncopied_parts);
4794 case PREINCREMENT_EXPR:
4795 case PREDECREMENT_EXPR:
4796 return expand_increment (exp, 0);
4798 case POSTINCREMENT_EXPR:
4799 case POSTDECREMENT_EXPR:
4800 /* Faster to treat as pre-increment if result is not used. */
4801 return expand_increment (exp, ! ignore);
4804 /* Are we taking the address of a nested function? */
4805 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
4806 && decl_function_context (TREE_OPERAND (exp, 0)) != 0)
4808 op0 = trampoline_address (TREE_OPERAND (exp, 0));
4809 op0 = force_operand (op0, target);
4813 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode,
4814 (modifier == EXPAND_INITIALIZER
4815 ? modifier : EXPAND_CONST_ADDRESS));
4816 if (GET_CODE (op0) != MEM)
4819 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
4820 return XEXP (op0, 0);
4821 op0 = force_operand (XEXP (op0, 0), target);
4823 if (flag_force_addr && GET_CODE (op0) != REG)
4824 return force_reg (Pmode, op0);
4827 case ENTRY_VALUE_EXPR:
4834 return (*lang_expand_expr) (exp, target, tmode, modifier);
4837 /* Here to do an ordinary binary operator, generating an instruction
4838 from the optab already placed in `this_optab'. */
4840 preexpand_calls (exp);
4841 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
4843 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
4844 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
4846 temp = expand_binop (mode, this_optab, op0, op1, target,
4847 unsignedp, OPTAB_LIB_WIDEN);
4853 /* Return the alignment in bits of EXP, a pointer valued expression.
4854 But don't return more than MAX_ALIGN no matter what.
4855 The alignment returned is, by default, the alignment of the thing that
4856 EXP points to (if it is not a POINTER_TYPE, 0 is returned).
4858 Otherwise, look at the expression to see if we can do better, i.e., if the
4859 expression is actually pointing at an object whose alignment is tighter. */
4862 get_pointer_alignment (exp, max_align)
4866 unsigned align, inner;
4868 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
4871 align = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
4872 align = MIN (align, max_align);
4876 switch (TREE_CODE (exp))
4880 case NON_LVALUE_EXPR:
4881 exp = TREE_OPERAND (exp, 0);
4882 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
4884 inner = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
4885 inner = MIN (inner, max_align);
4886 align = MAX (align, inner);
4890 /* If sum of pointer + int, restrict our maximum alignment to that
4891 imposed by the integer. If not, we can't do any better than
4893 if (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST)
4896 while (((TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)) * BITS_PER_UNIT)
4901 exp = TREE_OPERAND (exp, 0);
4905 /* See what we are pointing at and look at its alignment. */
4906 exp = TREE_OPERAND (exp, 0);
4907 if (TREE_CODE (exp) == FUNCTION_DECL)
4908 align = MAX (align, FUNCTION_BOUNDARY);
4909 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'd')
4910 align = MAX (align, DECL_ALIGN (exp));
4911 #ifdef CONSTANT_ALIGNMENT
4912 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'c')
4913 align = CONSTANT_ALIGNMENT (exp, align);
4915 return MIN (align, max_align);
4923 /* Return the tree node and offset if a given argument corresponds to
4924 a string constant. */
4927 string_constant (arg, ptr_offset)
4933 if (TREE_CODE (arg) == ADDR_EXPR
4934 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
4936 *ptr_offset = integer_zero_node;
4937 return TREE_OPERAND (arg, 0);
4939 else if (TREE_CODE (arg) == PLUS_EXPR)
4941 tree arg0 = TREE_OPERAND (arg, 0);
4942 tree arg1 = TREE_OPERAND (arg, 1);
4947 if (TREE_CODE (arg0) == ADDR_EXPR
4948 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
4951 return TREE_OPERAND (arg0, 0);
4953 else if (TREE_CODE (arg1) == ADDR_EXPR
4954 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
4957 return TREE_OPERAND (arg1, 0);
4964 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
4965 way, because it could contain a zero byte in the middle.
4966 TREE_STRING_LENGTH is the size of the character array, not the string.
4968 Unfortunately, string_constant can't access the values of const char
4969 arrays with initializers, so neither can we do so here. */
4979 src = string_constant (src, &offset_node);
4982 max = TREE_STRING_LENGTH (src);
4983 ptr = TREE_STRING_POINTER (src);
4984 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
4986 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
4987 compute the offset to the following null if we don't know where to
4988 start searching for it. */
4990 for (i = 0; i < max; i++)
4993 /* We don't know the starting offset, but we do know that the string
4994 has no internal zero bytes. We can assume that the offset falls
4995 within the bounds of the string; otherwise, the programmer deserves
4996 what he gets. Subtract the offset from the length of the string,
4998 /* This would perhaps not be valid if we were dealing with named
4999 arrays in addition to literal string constants. */
5000 return size_binop (MINUS_EXPR, size_int (max), offset_node);
5003 /* We have a known offset into the string. Start searching there for
5004 a null character. */
5005 if (offset_node == 0)
5009 /* Did we get a long long offset? If so, punt. */
5010 if (TREE_INT_CST_HIGH (offset_node) != 0)
5012 offset = TREE_INT_CST_LOW (offset_node);
5014 /* If the offset is known to be out of bounds, warn, and call strlen at
5016 if (offset < 0 || offset > max)
5018 warning ("offset outside bounds of constant string");
5021 /* Use strlen to search for the first zero byte. Since any strings
5022 constructed with build_string will have nulls appended, we win even
5023 if we get handed something like (char[4])"abcd".
5025 Since OFFSET is our starting index into the string, no further
5026 calculation is needed. */
5027 return size_int (strlen (ptr + offset));
5030 /* Expand an expression EXP that calls a built-in function,
5031 with result going to TARGET if that's convenient
5032 (and in mode MODE if that's convenient).
5033 SUBTARGET may be used as the target for computing one of EXP's operands.
5034 IGNORE is nonzero if the value is to be ignored. */
5037 expand_builtin (exp, target, subtarget, mode, ignore)
5041 enum machine_mode mode;
5044 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
5045 tree arglist = TREE_OPERAND (exp, 1);
5048 enum machine_mode value_mode = TYPE_MODE (TREE_TYPE (exp));
5050 switch (DECL_FUNCTION_CODE (fndecl))
5055 /* build_function_call changes these into ABS_EXPR. */
5058 case BUILT_IN_FSQRT:
5059 /* If not optimizing, call the library function. */
5064 /* Arg could be wrong type if user redeclared this fcn wrong. */
5065 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != REAL_TYPE)
5066 return CONST0_RTX (TYPE_MODE (TREE_TYPE (exp)));
5068 /* Stabilize and compute the argument. */
5069 if (TREE_CODE (TREE_VALUE (arglist)) != VAR_DECL
5070 && TREE_CODE (TREE_VALUE (arglist)) != PARM_DECL)
5072 exp = copy_node (exp);
5073 arglist = copy_node (arglist);
5074 TREE_OPERAND (exp, 1) = arglist;
5075 TREE_VALUE (arglist) = save_expr (TREE_VALUE (arglist));
5077 op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
5079 /* Make a suitable register to place result in. */
5080 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
5085 /* Compute sqrt into TARGET.
5086 Set TARGET to wherever the result comes back. */
5087 target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
5088 sqrt_optab, op0, target, 0);
5090 /* If we were unable to expand via the builtin, stop the
5091 sequence (without outputting the insns) and break, causing
5092 a call the the library function. */
5099 /* Check the results by default. But if flag_fast_math is turned on,
5100 then assume sqrt will always be called with valid arguments. */
5102 if (! flag_fast_math)
5104 /* Don't define the sqrt instructions
5105 if your machine is not IEEE. */
5106 if (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT)
5109 lab1 = gen_label_rtx ();
5111 /* Test the result; if it is NaN, set errno=EDOM because
5112 the argument was not in the domain. */
5113 emit_cmp_insn (target, target, EQ, 0, GET_MODE (target), 0, 0);
5114 emit_jump_insn (gen_beq (lab1));
5118 #ifdef GEN_ERRNO_RTX
5119 rtx errno_rtx = GEN_ERRNO_RTX;
5122 = gen_rtx (MEM, word_mode, gen_rtx (SYMBOL_REF, Pmode, "*errno"));
5125 emit_move_insn (errno_rtx, GEN_INT (TARGET_EDOM));
5128 /* We can't set errno=EDOM directly; let the library call do it.
5129 Pop the arguments right away in case the call gets deleted. */
5131 expand_call (exp, target, 0);
5138 /* Output the entire sequence. */
5139 insns = get_insns ();
5145 case BUILT_IN_SAVEREGS:
5146 /* Don't do __builtin_saveregs more than once in a function.
5147 Save the result of the first call and reuse it. */
5148 if (saveregs_value != 0)
5149 return saveregs_value;
5151 /* When this function is called, it means that registers must be
5152 saved on entry to this function. So we migrate the
5153 call to the first insn of this function. */
5156 rtx valreg, saved_valreg;
5158 /* Now really call the function. `expand_call' does not call
5159 expand_builtin, so there is no danger of infinite recursion here. */
5162 #ifdef EXPAND_BUILTIN_SAVEREGS
5163 /* Do whatever the machine needs done in this case. */
5164 temp = EXPAND_BUILTIN_SAVEREGS (arglist);
5166 /* The register where the function returns its value
5167 is likely to have something else in it, such as an argument.
5168 So preserve that register around the call. */
5169 if (value_mode != VOIDmode)
5171 valreg = hard_libcall_value (value_mode);
5172 saved_valreg = gen_reg_rtx (value_mode);
5173 emit_move_insn (saved_valreg, valreg);
5176 /* Generate the call, putting the value in a pseudo. */
5177 temp = expand_call (exp, target, ignore);
5179 if (value_mode != VOIDmode)
5180 emit_move_insn (valreg, saved_valreg);
5186 saveregs_value = temp;
5188 /* This won't work inside a SEQUENCE--it really has to be
5189 at the start of the function. */
5190 if (in_sequence_p ())
5192 /* Better to do this than to crash. */
5193 error ("`va_start' used within `({...})'");
5197 /* Put the sequence after the NOTE that starts the function. */
5198 emit_insns_before (seq, NEXT_INSN (get_insns ()));
5202 /* __builtin_args_info (N) returns word N of the arg space info
5203 for the current function. The number and meanings of words
5204 is controlled by the definition of CUMULATIVE_ARGS. */
5205 case BUILT_IN_ARGS_INFO:
5207 int nwords = sizeof (CUMULATIVE_ARGS) / sizeof (int);
5209 int *word_ptr = (int *) ¤t_function_args_info;
5210 tree type, elts, result;
5212 if (sizeof (CUMULATIVE_ARGS) % sizeof (int) != 0)
5213 fatal ("CUMULATIVE_ARGS type defined badly; see %s, line %d",
5214 __FILE__, __LINE__);
5218 tree arg = TREE_VALUE (arglist);
5219 if (TREE_CODE (arg) != INTEGER_CST)
5220 error ("argument of __builtin_args_info must be constant");
5223 int wordnum = TREE_INT_CST_LOW (arg);
5225 if (wordnum < 0 || wordnum >= nwords)
5226 error ("argument of __builtin_args_info out of range");
5228 return GEN_INT (word_ptr[wordnum]);
5232 error ("missing argument in __builtin_args_info");
5237 for (i = 0; i < nwords; i++)
5238 elts = tree_cons (NULL_TREE, build_int_2 (word_ptr[i], 0));
5240 type = build_array_type (integer_type_node,
5241 build_index_type (build_int_2 (nwords, 0)));
5242 result = build (CONSTRUCTOR, type, NULL_TREE, nreverse (elts));
5243 TREE_CONSTANT (result) = 1;
5244 TREE_STATIC (result) = 1;
5245 result = build (INDIRECT_REF, build_pointer_type (type), result);
5246 TREE_CONSTANT (result) = 1;
5247 return expand_expr (result, NULL_RTX, VOIDmode, 0);
5251 /* Return the address of the first anonymous stack arg. */
5252 case BUILT_IN_NEXT_ARG:
5254 tree fntype = TREE_TYPE (current_function_decl);
5255 if (!(TYPE_ARG_TYPES (fntype) != 0
5256 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
5257 != void_type_node)))
5259 error ("`va_start' used in function with fixed args");
5264 return expand_binop (Pmode, add_optab,
5265 current_function_internal_arg_pointer,
5266 current_function_arg_offset_rtx,
5267 NULL_RTX, 0, OPTAB_LIB_WIDEN);
5269 case BUILT_IN_CLASSIFY_TYPE:
5272 tree type = TREE_TYPE (TREE_VALUE (arglist));
5273 enum tree_code code = TREE_CODE (type);
5274 if (code == VOID_TYPE)
5275 return GEN_INT (void_type_class);
5276 if (code == INTEGER_TYPE)
5277 return GEN_INT (integer_type_class);
5278 if (code == CHAR_TYPE)
5279 return GEN_INT (char_type_class);
5280 if (code == ENUMERAL_TYPE)
5281 return GEN_INT (enumeral_type_class);
5282 if (code == BOOLEAN_TYPE)
5283 return GEN_INT (boolean_type_class);
5284 if (code == POINTER_TYPE)
5285 return GEN_INT (pointer_type_class);
5286 if (code == REFERENCE_TYPE)
5287 return GEN_INT (reference_type_class);
5288 if (code == OFFSET_TYPE)
5289 return GEN_INT (offset_type_class);
5290 if (code == REAL_TYPE)
5291 return GEN_INT (real_type_class);
5292 if (code == COMPLEX_TYPE)
5293 return GEN_INT (complex_type_class);
5294 if (code == FUNCTION_TYPE)
5295 return GEN_INT (function_type_class);
5296 if (code == METHOD_TYPE)
5297 return GEN_INT (method_type_class);
5298 if (code == RECORD_TYPE)
5299 return GEN_INT (record_type_class);
5300 if (code == UNION_TYPE)
5301 return GEN_INT (union_type_class);
5302 if (code == ARRAY_TYPE)
5303 return GEN_INT (array_type_class);
5304 if (code == STRING_TYPE)
5305 return GEN_INT (string_type_class);
5306 if (code == SET_TYPE)
5307 return GEN_INT (set_type_class);
5308 if (code == FILE_TYPE)
5309 return GEN_INT (file_type_class);
5310 if (code == LANG_TYPE)
5311 return GEN_INT (lang_type_class);
5313 return GEN_INT (no_type_class);
5315 case BUILT_IN_CONSTANT_P:
5319 return (TREE_CODE_CLASS (TREE_CODE (TREE_VALUE (arglist))) == 'c'
5320 ? const1_rtx : const0_rtx);
5322 case BUILT_IN_FRAME_ADDRESS:
5323 /* The argument must be a nonnegative integer constant.
5324 It counts the number of frames to scan up the stack.
5325 The value is the address of that frame. */
5326 case BUILT_IN_RETURN_ADDRESS:
5327 /* The argument must be a nonnegative integer constant.
5328 It counts the number of frames to scan up the stack.
5329 The value is the return address saved in that frame. */
5331 /* Warning about missing arg was already issued. */
5333 else if (TREE_CODE (TREE_VALUE (arglist)) != INTEGER_CST)
5335 error ("invalid arg to __builtin_return_address");
5338 else if (tree_int_cst_lt (TREE_VALUE (arglist), integer_zero_node))
5340 error ("invalid arg to __builtin_return_address");
5345 int count = TREE_INT_CST_LOW (TREE_VALUE (arglist));
5346 rtx tem = frame_pointer_rtx;
5349 /* Scan back COUNT frames to the specified frame. */
5350 for (i = 0; i < count; i++)
5352 /* Assume the dynamic chain pointer is in the word that
5353 the frame address points to, unless otherwise specified. */
5354 #ifdef DYNAMIC_CHAIN_ADDRESS
5355 tem = DYNAMIC_CHAIN_ADDRESS (tem);
5357 tem = memory_address (Pmode, tem);
5358 tem = copy_to_reg (gen_rtx (MEM, Pmode, tem));
5361 /* For __builtin_frame_address, return what we've got. */
5362 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
5365 /* For __builtin_return_address,
5366 Get the return address from that frame. */
5367 #ifdef RETURN_ADDR_RTX
5368 return RETURN_ADDR_RTX (count, tem);
5370 tem = memory_address (Pmode,
5371 plus_constant (tem, GET_MODE_SIZE (Pmode)));
5372 return copy_to_reg (gen_rtx (MEM, Pmode, tem));
5376 case BUILT_IN_ALLOCA:
5378 /* Arg could be non-integer if user redeclared this fcn wrong. */
5379 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE)
5381 current_function_calls_alloca = 1;
5382 /* Compute the argument. */
5383 op0 = expand_expr (TREE_VALUE (arglist), NULL_RTX, VOIDmode, 0);
5385 /* Allocate the desired space. */
5386 target = allocate_dynamic_stack_space (op0, target, BITS_PER_UNIT);
5388 /* Record the new stack level for nonlocal gotos. */
5389 if (nonlocal_goto_handler_slot != 0)
5390 emit_stack_save (SAVE_NONLOCAL, &nonlocal_goto_stack_level, NULL_RTX);
5394 /* If not optimizing, call the library function. */
5399 /* Arg could be non-integer if user redeclared this fcn wrong. */
5400 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE)
5403 /* Compute the argument. */
5404 op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
5405 /* Compute ffs, into TARGET if possible.
5406 Set TARGET to wherever the result comes back. */
5407 target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
5408 ffs_optab, op0, target, 1);
5413 case BUILT_IN_STRLEN:
5414 /* If not optimizing, call the library function. */
5419 /* Arg could be non-pointer if user redeclared this fcn wrong. */
5420 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE)
5424 tree src = TREE_VALUE (arglist);
5425 tree len = c_strlen (src);
5428 = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
5430 rtx result, src_rtx, char_rtx;
5431 enum machine_mode insn_mode = value_mode, char_mode;
5432 enum insn_code icode;
5434 /* If the length is known, just return it. */
5436 return expand_expr (len, target, mode, 0);
5438 /* If SRC is not a pointer type, don't do this operation inline. */
5442 /* Call a function if we can't compute strlen in the right mode. */
5444 while (insn_mode != VOIDmode)
5446 icode = strlen_optab->handlers[(int) insn_mode].insn_code;
5447 if (icode != CODE_FOR_nothing)
5450 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
5452 if (insn_mode == VOIDmode)
5455 /* Make a place to write the result of the instruction. */
5458 && GET_CODE (result) == REG
5459 && GET_MODE (result) == insn_mode
5460 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
5461 result = gen_reg_rtx (insn_mode);
5463 /* Make sure the operands are acceptable to the predicates. */
5465 if (! (*insn_operand_predicate[(int)icode][0]) (result, insn_mode))
5466 result = gen_reg_rtx (insn_mode);
5468 src_rtx = memory_address (BLKmode,
5469 expand_expr (src, NULL_RTX, Pmode,
5471 if (! (*insn_operand_predicate[(int)icode][1]) (src_rtx, Pmode))
5472 src_rtx = copy_to_mode_reg (Pmode, src_rtx);
5474 char_rtx = const0_rtx;
5475 char_mode = insn_operand_mode[(int)icode][2];
5476 if (! (*insn_operand_predicate[(int)icode][2]) (char_rtx, char_mode))
5477 char_rtx = copy_to_mode_reg (char_mode, char_rtx);
5479 emit_insn (GEN_FCN (icode) (result,
5480 gen_rtx (MEM, BLKmode, src_rtx),
5481 char_rtx, GEN_INT (align)));
5483 /* Return the value in the proper mode for this function. */
5484 if (GET_MODE (result) == value_mode)
5486 else if (target != 0)
5488 convert_move (target, result, 0);
5492 return convert_to_mode (value_mode, result, 0);
5495 case BUILT_IN_STRCPY:
5496 /* If not optimizing, call the library function. */
5501 /* Arg could be non-pointer if user redeclared this fcn wrong. */
5502 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
5503 || TREE_CHAIN (arglist) == 0
5504 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE)
5508 tree len = c_strlen (TREE_VALUE (TREE_CHAIN (arglist)));
5513 len = size_binop (PLUS_EXPR, len, integer_one_node);
5515 chainon (arglist, build_tree_list (NULL_TREE, len));
5519 case BUILT_IN_MEMCPY:
5520 /* If not optimizing, call the library function. */
5525 /* Arg could be non-pointer if user redeclared this fcn wrong. */
5526 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
5527 || TREE_CHAIN (arglist) == 0
5528 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
5529 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
5530 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
5534 tree dest = TREE_VALUE (arglist);
5535 tree src = TREE_VALUE (TREE_CHAIN (arglist));
5536 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
5539 = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
5541 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
5544 /* If either SRC or DEST is not a pointer type, don't do
5545 this operation in-line. */
5546 if (src_align == 0 || dest_align == 0)
5548 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRCPY)
5549 TREE_CHAIN (TREE_CHAIN (arglist)) = 0;
5553 dest_rtx = expand_expr (dest, NULL_RTX, Pmode, EXPAND_NORMAL);
5555 /* Copy word part most expediently. */
5556 emit_block_move (gen_rtx (MEM, BLKmode,
5557 memory_address (BLKmode, dest_rtx)),
5558 gen_rtx (MEM, BLKmode,
5559 memory_address (BLKmode,
5560 expand_expr (src, NULL_RTX,
5563 expand_expr (len, NULL_RTX, VOIDmode, 0),
5564 MIN (src_align, dest_align));
5568 /* These comparison functions need an instruction that returns an actual
5569 index. An ordinary compare that just sets the condition codes
5571 #ifdef HAVE_cmpstrsi
5572 case BUILT_IN_STRCMP:
5573 /* If not optimizing, call the library function. */
5578 /* Arg could be non-pointer if user redeclared this fcn wrong. */
5579 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
5580 || TREE_CHAIN (arglist) == 0
5581 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE)
5583 else if (!HAVE_cmpstrsi)
5586 tree arg1 = TREE_VALUE (arglist);
5587 tree arg2 = TREE_VALUE (TREE_CHAIN (arglist));
5591 len = c_strlen (arg1);
5593 len = size_binop (PLUS_EXPR, integer_one_node, len);
5594 len2 = c_strlen (arg2);
5596 len2 = size_binop (PLUS_EXPR, integer_one_node, len2);
5598 /* If we don't have a constant length for the first, use the length
5599 of the second, if we know it. We don't require a constant for
5600 this case; some cost analysis could be done if both are available
5601 but neither is constant. For now, assume they're equally cheap.
5603 If both strings have constant lengths, use the smaller. This
5604 could arise if optimization results in strcpy being called with
5605 two fixed strings, or if the code was machine-generated. We should
5606 add some code to the `memcmp' handler below to deal with such
5607 situations, someday. */
5608 if (!len || TREE_CODE (len) != INTEGER_CST)
5615 else if (len2 && TREE_CODE (len2) == INTEGER_CST)
5617 if (tree_int_cst_lt (len2, len))
5621 chainon (arglist, build_tree_list (NULL_TREE, len));
5625 case BUILT_IN_MEMCMP:
5626 /* If not optimizing, call the library function. */
5631 /* Arg could be non-pointer if user redeclared this fcn wrong. */
5632 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
5633 || TREE_CHAIN (arglist) == 0
5634 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
5635 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
5636 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
5638 else if (!HAVE_cmpstrsi)
5641 tree arg1 = TREE_VALUE (arglist);
5642 tree arg2 = TREE_VALUE (TREE_CHAIN (arglist));
5643 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
5647 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
5649 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
5650 enum machine_mode insn_mode
5651 = insn_operand_mode[(int) CODE_FOR_cmpstrsi][0];
5653 /* If we don't have POINTER_TYPE, call the function. */
5654 if (arg1_align == 0 || arg2_align == 0)
5656 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRCMP)
5657 TREE_CHAIN (TREE_CHAIN (arglist)) = 0;
5661 /* Make a place to write the result of the instruction. */
5664 && GET_CODE (result) == REG && GET_MODE (result) == insn_mode
5665 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
5666 result = gen_reg_rtx (insn_mode);
5668 emit_insn (gen_cmpstrsi (result,
5669 gen_rtx (MEM, BLKmode,
5670 expand_expr (arg1, NULL_RTX, Pmode,
5672 gen_rtx (MEM, BLKmode,
5673 expand_expr (arg2, NULL_RTX, Pmode,
5675 expand_expr (len, NULL_RTX, VOIDmode, 0),
5676 GEN_INT (MIN (arg1_align, arg2_align))));
5678 /* Return the value in the proper mode for this function. */
5679 mode = TYPE_MODE (TREE_TYPE (exp));
5680 if (GET_MODE (result) == mode)
5682 else if (target != 0)
5684 convert_move (target, result, 0);
5688 return convert_to_mode (mode, result, 0);
5691 case BUILT_IN_STRCMP:
5692 case BUILT_IN_MEMCMP:
5696 default: /* just do library call, if unknown builtin */
5697 error ("built-in function %s not currently supported",
5698 IDENTIFIER_POINTER (DECL_NAME (fndecl)));
5701 /* The switch statement above can drop through to cause the function
5702 to be called normally. */
5704 return expand_call (exp, target, ignore);
5707 /* Expand code for a post- or pre- increment or decrement
5708 and return the RTX for the result.
5709 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
5712 expand_increment (exp, post)
5716 register rtx op0, op1;
5717 register rtx temp, value;
5718 register tree incremented = TREE_OPERAND (exp, 0);
5719 optab this_optab = add_optab;
5721 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
5722 int op0_is_copy = 0;
5724 /* Stabilize any component ref that might need to be
5725 evaluated more than once below. */
5726 if (TREE_CODE (incremented) == BIT_FIELD_REF
5727 || (TREE_CODE (incremented) == COMPONENT_REF
5728 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
5729 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
5730 incremented = stabilize_reference (incremented);
5732 /* Compute the operands as RTX.
5733 Note whether OP0 is the actual lvalue or a copy of it:
5734 I believe it is a copy iff it is a register or subreg
5735 and insns were generated in computing it. */
5736 temp = get_last_insn ();
5737 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, 0);
5738 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
5739 && temp != get_last_insn ());
5740 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
5742 /* Decide whether incrementing or decrementing. */
5743 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
5744 || TREE_CODE (exp) == PREDECREMENT_EXPR)
5745 this_optab = sub_optab;
5747 /* If OP0 is not the actual lvalue, but rather a copy in a register,
5748 then we cannot just increment OP0. We must
5749 therefore contrive to increment the original value.
5750 Then we can return OP0 since it is a copy of the old value. */
5753 /* This is the easiest way to increment the value wherever it is.
5754 Problems with multiple evaluation of INCREMENTED
5755 are prevented because either (1) it is a component_ref,
5756 in which case it was stabilized above, or (2) it is an array_ref
5757 with constant index in an array in a register, which is
5758 safe to reevaluate. */
5759 tree newexp = build ((this_optab == add_optab
5760 ? PLUS_EXPR : MINUS_EXPR),
5763 TREE_OPERAND (exp, 1));
5764 temp = expand_assignment (incremented, newexp, ! post, 0);
5765 return post ? op0 : temp;
5768 /* Convert decrement by a constant into a negative increment. */
5769 if (this_optab == sub_optab
5770 && GET_CODE (op1) == CONST_INT)
5772 op1 = GEN_INT (- INTVAL (op1));
5773 this_optab = add_optab;
5778 /* We have a true reference to the value in OP0.
5779 If there is an insn to add or subtract in this mode, queue it. */
5781 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
5782 op0 = stabilize (op0);
5785 icode = (int) this_optab->handlers[(int) mode].insn_code;
5786 if (icode != (int) CODE_FOR_nothing
5787 /* Make sure that OP0 is valid for operands 0 and 1
5788 of the insn we want to queue. */
5789 && (*insn_operand_predicate[icode][0]) (op0, mode)
5790 && (*insn_operand_predicate[icode][1]) (op0, mode))
5792 if (! (*insn_operand_predicate[icode][2]) (op1, mode))
5793 op1 = force_reg (mode, op1);
5795 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
5799 /* Preincrement, or we can't increment with one simple insn. */
5801 /* Save a copy of the value before inc or dec, to return it later. */
5802 temp = value = copy_to_reg (op0);
5804 /* Arrange to return the incremented value. */
5805 /* Copy the rtx because expand_binop will protect from the queue,
5806 and the results of that would be invalid for us to return
5807 if our caller does emit_queue before using our result. */
5808 temp = copy_rtx (value = op0);
5810 /* Increment however we can. */
5811 op1 = expand_binop (mode, this_optab, value, op1, op0,
5812 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
5813 /* Make sure the value is stored into OP0. */
5815 emit_move_insn (op0, op1);
5820 /* Expand all function calls contained within EXP, innermost ones first.
5821 But don't look within expressions that have sequence points.
5822 For each CALL_EXPR, record the rtx for its value
5823 in the CALL_EXPR_RTL field. */
5826 preexpand_calls (exp)
5829 register int nops, i;
5830 int type = TREE_CODE_CLASS (TREE_CODE (exp));
5832 if (! do_preexpand_calls)
5835 /* Only expressions and references can contain calls. */
5837 if (type != 'e' && type != '<' && type != '1' && type != '2' && type != 'r')
5840 switch (TREE_CODE (exp))
5843 /* Do nothing if already expanded. */
5844 if (CALL_EXPR_RTL (exp) != 0)
5847 /* Do nothing to built-in functions. */
5848 if (TREE_CODE (TREE_OPERAND (exp, 0)) != ADDR_EXPR
5849 || TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) != FUNCTION_DECL
5850 || ! DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
5851 CALL_EXPR_RTL (exp) = expand_call (exp, NULL_RTX, 0);
5856 case TRUTH_ANDIF_EXPR:
5857 case TRUTH_ORIF_EXPR:
5858 /* If we find one of these, then we can be sure
5859 the adjust will be done for it (since it makes jumps).
5860 Do it now, so that if this is inside an argument
5861 of a function, we don't get the stack adjustment
5862 after some other args have already been pushed. */
5863 do_pending_stack_adjust ();
5868 case WITH_CLEANUP_EXPR:
5872 if (SAVE_EXPR_RTL (exp) != 0)
5876 nops = tree_code_length[(int) TREE_CODE (exp)];
5877 for (i = 0; i < nops; i++)
5878 if (TREE_OPERAND (exp, i) != 0)
5880 type = TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, i)));
5881 if (type == 'e' || type == '<' || type == '1' || type == '2'
5883 preexpand_calls (TREE_OPERAND (exp, i));
5887 /* At the start of a function, record that we have no previously-pushed
5888 arguments waiting to be popped. */
5891 init_pending_stack_adjust ()
5893 pending_stack_adjust = 0;
5896 /* When exiting from function, if safe, clear out any pending stack adjust
5897 so the adjustment won't get done. */
5900 clear_pending_stack_adjust ()
5902 #ifdef EXIT_IGNORE_STACK
5903 if (! flag_omit_frame_pointer && EXIT_IGNORE_STACK
5904 && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline)
5905 && ! flag_inline_functions)
5906 pending_stack_adjust = 0;
5910 /* Pop any previously-pushed arguments that have not been popped yet. */
5913 do_pending_stack_adjust ()
5915 if (inhibit_defer_pop == 0)
5917 if (pending_stack_adjust != 0)
5918 adjust_stack (GEN_INT (pending_stack_adjust));
5919 pending_stack_adjust = 0;
5923 /* Expand all cleanups up to OLD_CLEANUPS.
5924 Needed here, and also for language-dependent calls. */
5927 expand_cleanups_to (old_cleanups)
5930 while (cleanups_this_call != old_cleanups)
5932 expand_expr (TREE_VALUE (cleanups_this_call), NULL_RTX, VOIDmode, 0);
5933 cleanups_this_call = TREE_CHAIN (cleanups_this_call);
5937 /* Expand conditional expressions. */
5939 /* Generate code to evaluate EXP and jump to LABEL if the value is zero.
5940 LABEL is an rtx of code CODE_LABEL, in this function and all the
5944 jumpifnot (exp, label)
5948 do_jump (exp, label, NULL_RTX);
5951 /* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
5958 do_jump (exp, NULL_RTX, label);
5961 /* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
5962 the result is zero, or IF_TRUE_LABEL if the result is one.
5963 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
5964 meaning fall through in that case.
5966 do_jump always does any pending stack adjust except when it does not
5967 actually perform a jump. An example where there is no jump
5968 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
5970 This function is responsible for optimizing cases such as
5971 &&, || and comparison operators in EXP. */
5974 do_jump (exp, if_false_label, if_true_label)
5976 rtx if_false_label, if_true_label;
5978 register enum tree_code code = TREE_CODE (exp);
5979 /* Some cases need to create a label to jump to
5980 in order to properly fall through.
5981 These cases set DROP_THROUGH_LABEL nonzero. */
5982 rtx drop_through_label = 0;
5996 temp = integer_zerop (exp) ? if_false_label : if_true_label;
6002 /* This is not true with #pragma weak */
6004 /* The address of something can never be zero. */
6006 emit_jump (if_true_label);
6011 if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
6012 || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
6013 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF)
6016 /* If we are narrowing the operand, we have to do the compare in the
6018 if ((TYPE_PRECISION (TREE_TYPE (exp))
6019 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
6021 case NON_LVALUE_EXPR:
6022 case REFERENCE_EXPR:
6027 /* These cannot change zero->non-zero or vice versa. */
6028 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
6032 /* This is never less insns than evaluating the PLUS_EXPR followed by
6033 a test and can be longer if the test is eliminated. */
6035 /* Reduce to minus. */
6036 exp = build (MINUS_EXPR, TREE_TYPE (exp),
6037 TREE_OPERAND (exp, 0),
6038 fold (build1 (NEGATE_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
6039 TREE_OPERAND (exp, 1))));
6040 /* Process as MINUS. */
6044 /* Non-zero iff operands of minus differ. */
6045 comparison = compare (build (NE_EXPR, TREE_TYPE (exp),
6046 TREE_OPERAND (exp, 0),
6047 TREE_OPERAND (exp, 1)),
6052 /* If we are AND'ing with a small constant, do this comparison in the
6053 smallest type that fits. If the machine doesn't have comparisons
6054 that small, it will be converted back to the wider comparison.
6055 This helps if we are testing the sign bit of a narrower object.
6056 combine can't do this for us because it can't know whether a
6057 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
6059 if (! SLOW_BYTE_ACCESS
6060 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
6061 && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
6062 && (i = floor_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))) >= 0
6063 && (type = type_for_size (i + 1, 1)) != 0
6064 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
6065 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
6066 != CODE_FOR_nothing))
6068 do_jump (convert (type, exp), if_false_label, if_true_label);
6073 case TRUTH_NOT_EXPR:
6074 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
6077 case TRUTH_ANDIF_EXPR:
6078 if (if_false_label == 0)
6079 if_false_label = drop_through_label = gen_label_rtx ();
6080 do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
6081 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
6084 case TRUTH_ORIF_EXPR:
6085 if (if_true_label == 0)
6086 if_true_label = drop_through_label = gen_label_rtx ();
6087 do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
6088 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
6092 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
6095 do_pending_stack_adjust ();
6096 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
6103 int bitsize, bitpos, unsignedp;
6104 enum machine_mode mode;
6109 /* Get description of this reference. We don't actually care
6110 about the underlying object here. */
6111 get_inner_reference (exp, &bitsize, &bitpos, &offset,
6112 &mode, &unsignedp, &volatilep);
6114 type = type_for_size (bitsize, unsignedp);
6115 if (! SLOW_BYTE_ACCESS
6116 && type != 0 && bitsize >= 0
6117 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
6118 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
6119 != CODE_FOR_nothing))
6121 do_jump (convert (type, exp), if_false_label, if_true_label);
6128 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
6129 if (integer_onep (TREE_OPERAND (exp, 1))
6130 && integer_zerop (TREE_OPERAND (exp, 2)))
6131 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
6133 else if (integer_zerop (TREE_OPERAND (exp, 1))
6134 && integer_onep (TREE_OPERAND (exp, 2)))
6135 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
6139 register rtx label1 = gen_label_rtx ();
6140 drop_through_label = gen_label_rtx ();
6141 do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX);
6142 /* Now the THEN-expression. */
6143 do_jump (TREE_OPERAND (exp, 1),
6144 if_false_label ? if_false_label : drop_through_label,
6145 if_true_label ? if_true_label : drop_through_label);
6146 /* In case the do_jump just above never jumps. */
6147 do_pending_stack_adjust ();
6148 emit_label (label1);
6149 /* Now the ELSE-expression. */
6150 do_jump (TREE_OPERAND (exp, 2),
6151 if_false_label ? if_false_label : drop_through_label,
6152 if_true_label ? if_true_label : drop_through_label);
6157 if (integer_zerop (TREE_OPERAND (exp, 1)))
6158 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
6159 else if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
6162 !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
6163 do_jump_by_parts_equality (exp, if_false_label, if_true_label);
6165 comparison = compare (exp, EQ, EQ);
6169 if (integer_zerop (TREE_OPERAND (exp, 1)))
6170 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
6171 else if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
6174 !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
6175 do_jump_by_parts_equality (exp, if_true_label, if_false_label);
6177 comparison = compare (exp, NE, NE);
6181 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
6183 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
6184 do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
6186 comparison = compare (exp, LT, LTU);
6190 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
6192 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
6193 do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
6195 comparison = compare (exp, LE, LEU);
6199 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
6201 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
6202 do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
6204 comparison = compare (exp, GT, GTU);
6208 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
6210 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
6211 do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
6213 comparison = compare (exp, GE, GEU);
6218 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
6220 /* This is not needed any more and causes poor code since it causes
6221 comparisons and tests from non-SI objects to have different code
6223 /* Copy to register to avoid generating bad insns by cse
6224 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
6225 if (!cse_not_expected && GET_CODE (temp) == MEM)
6226 temp = copy_to_reg (temp);
6228 do_pending_stack_adjust ();
6229 if (GET_CODE (temp) == CONST_INT)
6230 comparison = (temp == const0_rtx ? const0_rtx : const_true_rtx);
6231 else if (GET_CODE (temp) == LABEL_REF)
6232 comparison = const_true_rtx;
6233 else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
6234 && !can_compare_p (GET_MODE (temp)))
6235 /* Note swapping the labels gives us not-equal. */
6236 do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label);
6237 else if (GET_MODE (temp) != VOIDmode)
6238 comparison = compare_from_rtx (temp, CONST0_RTX (GET_MODE (temp)),
6239 NE, TREE_UNSIGNED (TREE_TYPE (exp)),
6240 GET_MODE (temp), NULL_RTX, 0);
6245 /* Do any postincrements in the expression that was tested. */
6248 /* If COMPARISON is nonzero here, it is an rtx that can be substituted
6249 straight into a conditional jump instruction as the jump condition.
6250 Otherwise, all the work has been done already. */
6252 if (comparison == const_true_rtx)
6255 emit_jump (if_true_label);
6257 else if (comparison == const0_rtx)
6260 emit_jump (if_false_label);
6262 else if (comparison)
6263 do_jump_for_compare (comparison, if_false_label, if_true_label);
6267 if (drop_through_label)
6269 /* If do_jump produces code that might be jumped around,
6270 do any stack adjusts from that code, before the place
6271 where control merges in. */
6272 do_pending_stack_adjust ();
6273 emit_label (drop_through_label);
6277 /* Given a comparison expression EXP for values too wide to be compared
6278 with one insn, test the comparison and jump to the appropriate label.
6279 The code of EXP is ignored; we always test GT if SWAP is 0,
6280 and LT if SWAP is 1. */
6283 do_jump_by_parts_greater (exp, swap, if_false_label, if_true_label)
6286 rtx if_false_label, if_true_label;
6288 rtx op0 = expand_expr (TREE_OPERAND (exp, swap), NULL_RTX, VOIDmode, 0);
6289 rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), NULL_RTX, VOIDmode, 0);
6290 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
6291 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
6292 rtx drop_through_label = 0;
6293 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
6296 if (! if_true_label || ! if_false_label)
6297 drop_through_label = gen_label_rtx ();
6298 if (! if_true_label)
6299 if_true_label = drop_through_label;
6300 if (! if_false_label)
6301 if_false_label = drop_through_label;
6303 /* Compare a word at a time, high order first. */
6304 for (i = 0; i < nwords; i++)
6307 rtx op0_word, op1_word;
6309 if (WORDS_BIG_ENDIAN)
6311 op0_word = operand_subword_force (op0, i, mode);
6312 op1_word = operand_subword_force (op1, i, mode);
6316 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
6317 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
6320 /* All but high-order word must be compared as unsigned. */
6321 comp = compare_from_rtx (op0_word, op1_word,
6322 (unsignedp || i > 0) ? GTU : GT,
6323 unsignedp, word_mode, NULL_RTX, 0);
6324 if (comp == const_true_rtx)
6325 emit_jump (if_true_label);
6326 else if (comp != const0_rtx)
6327 do_jump_for_compare (comp, NULL_RTX, if_true_label);
6329 /* Consider lower words only if these are equal. */
6330 comp = compare_from_rtx (op0_word, op1_word, NE, unsignedp, word_mode,
6332 if (comp == const_true_rtx)
6333 emit_jump (if_false_label);
6334 else if (comp != const0_rtx)
6335 do_jump_for_compare (comp, NULL_RTX, if_false_label);
6339 emit_jump (if_false_label);
6340 if (drop_through_label)
6341 emit_label (drop_through_label);
6344 /* Given an EQ_EXPR expression EXP for values too wide to be compared
6345 with one insn, test the comparison and jump to the appropriate label. */
6348 do_jump_by_parts_equality (exp, if_false_label, if_true_label)
6350 rtx if_false_label, if_true_label;
6352 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
6353 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
6354 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
6355 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
6357 rtx drop_through_label = 0;
6359 if (! if_false_label)
6360 drop_through_label = if_false_label = gen_label_rtx ();
6362 for (i = 0; i < nwords; i++)
6364 rtx comp = compare_from_rtx (operand_subword_force (op0, i, mode),
6365 operand_subword_force (op1, i, mode),
6366 EQ, TREE_UNSIGNED (TREE_TYPE (exp)),
6367 word_mode, NULL_RTX, 0);
6368 if (comp == const_true_rtx)
6369 emit_jump (if_false_label);
6370 else if (comp != const0_rtx)
6371 do_jump_for_compare (comp, if_false_label, NULL_RTX);
6375 emit_jump (if_true_label);
6376 if (drop_through_label)
6377 emit_label (drop_through_label);
6380 /* Jump according to whether OP0 is 0.
6381 We assume that OP0 has an integer mode that is too wide
6382 for the available compare insns. */
6385 do_jump_by_parts_equality_rtx (op0, if_false_label, if_true_label)
6387 rtx if_false_label, if_true_label;
6389 int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD;
6391 rtx drop_through_label = 0;
6393 if (! if_false_label)
6394 drop_through_label = if_false_label = gen_label_rtx ();
6396 for (i = 0; i < nwords; i++)
6398 rtx comp = compare_from_rtx (operand_subword_force (op0, i,
6400 const0_rtx, EQ, 1, word_mode, NULL_RTX, 0);
6401 if (comp == const_true_rtx)
6402 emit_jump (if_false_label);
6403 else if (comp != const0_rtx)
6404 do_jump_for_compare (comp, if_false_label, NULL_RTX);
6408 emit_jump (if_true_label);
6409 if (drop_through_label)
6410 emit_label (drop_through_label);
6413 /* Given a comparison expression in rtl form, output conditional branches to
6414 IF_TRUE_LABEL, IF_FALSE_LABEL, or both. */
6417 do_jump_for_compare (comparison, if_false_label, if_true_label)
6418 rtx comparison, if_false_label, if_true_label;
6422 if (bcc_gen_fctn[(int) GET_CODE (comparison)] != 0)
6423 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (comparison)]) (if_true_label));
6428 emit_jump (if_false_label);
6430 else if (if_false_label)
6433 rtx prev = PREV_INSN (get_last_insn ());
6436 /* Output the branch with the opposite condition. Then try to invert
6437 what is generated. If more than one insn is a branch, or if the
6438 branch is not the last insn written, abort. If we can't invert
6439 the branch, emit make a true label, redirect this jump to that,
6440 emit a jump to the false label and define the true label. */
6442 if (bcc_gen_fctn[(int) GET_CODE (comparison)] != 0)
6443 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (comparison)]) (if_false_label));
6447 /* Here we get the insn before what was just emitted.
6448 On some machines, emitting the branch can discard
6449 the previous compare insn and emit a replacement. */
6451 /* If there's only one preceding insn... */
6452 insn = get_insns ();
6454 insn = NEXT_INSN (prev);
6456 for (insn = NEXT_INSN (insn); insn; insn = NEXT_INSN (insn))
6457 if (GET_CODE (insn) == JUMP_INSN)
6464 if (branch != get_last_insn ())
6467 if (! invert_jump (branch, if_false_label))
6469 if_true_label = gen_label_rtx ();
6470 redirect_jump (branch, if_true_label);
6471 emit_jump (if_false_label);
6472 emit_label (if_true_label);
6477 /* Generate code for a comparison expression EXP
6478 (including code to compute the values to be compared)
6479 and set (CC0) according to the result.
6480 SIGNED_CODE should be the rtx operation for this comparison for
6481 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
6483 We force a stack adjustment unless there are currently
6484 things pushed on the stack that aren't yet used. */
6487 compare (exp, signed_code, unsigned_code)
6489 enum rtx_code signed_code, unsigned_code;
6492 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
6494 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
6495 register tree type = TREE_TYPE (TREE_OPERAND (exp, 0));
6496 register enum machine_mode mode = TYPE_MODE (type);
6497 int unsignedp = TREE_UNSIGNED (type);
6498 enum rtx_code code = unsignedp ? unsigned_code : signed_code;
6500 return compare_from_rtx (op0, op1, code, unsignedp, mode,
6502 ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
6503 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
6506 /* Like compare but expects the values to compare as two rtx's.
6507 The decision as to signed or unsigned comparison must be made by the caller.
6509 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
6512 If ALIGN is non-zero, it is the alignment of this type; if zero, the
6513 size of MODE should be used. */
6516 compare_from_rtx (op0, op1, code, unsignedp, mode, size, align)
6517 register rtx op0, op1;
6520 enum machine_mode mode;
6524 /* If one operand is constant, make it the second one. */
6526 if (GET_CODE (op0) == CONST_INT || GET_CODE (op0) == CONST_DOUBLE)
6531 code = swap_condition (code);
6536 op0 = force_not_mem (op0);
6537 op1 = force_not_mem (op1);
6540 do_pending_stack_adjust ();
6542 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT)
6543 return simplify_relational_operation (code, mode, op0, op1);
6546 /* There's no need to do this now that combine.c can eliminate lots of
6547 sign extensions. This can be less efficient in certain cases on other
6550 /* If this is a signed equality comparison, we can do it as an
6551 unsigned comparison since zero-extension is cheaper than sign
6552 extension and comparisons with zero are done as unsigned. This is
6553 the case even on machines that can do fast sign extension, since
6554 zero-extension is easier to combinen with other operations than
6555 sign-extension is. If we are comparing against a constant, we must
6556 convert it to what it would look like unsigned. */
6557 if ((code == EQ || code == NE) && ! unsignedp
6558 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
6560 if (GET_CODE (op1) == CONST_INT
6561 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
6562 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
6567 emit_cmp_insn (op0, op1, code, size, mode, unsignedp, align);
6569 return gen_rtx (code, VOIDmode, cc0_rtx, const0_rtx);
6572 /* Generate code to calculate EXP using a store-flag instruction
6573 and return an rtx for the result. EXP is either a comparison
6574 or a TRUTH_NOT_EXPR whose operand is a comparison.
6576 If TARGET is nonzero, store the result there if convenient.
6578 If ONLY_CHEAP is non-zero, only do this if it is likely to be very
6581 Return zero if there is no suitable set-flag instruction
6582 available on this machine.
6584 Once expand_expr has been called on the arguments of the comparison,
6585 we are committed to doing the store flag, since it is not safe to
6586 re-evaluate the expression. We emit the store-flag insn by calling
6587 emit_store_flag, but only expand the arguments if we have a reason
6588 to believe that emit_store_flag will be successful. If we think that
6589 it will, but it isn't, we have to simulate the store-flag with a
6590 set/jump/set sequence. */
6593 do_store_flag (exp, target, mode, only_cheap)
6596 enum machine_mode mode;
6600 tree arg0, arg1, type;
6602 enum machine_mode operand_mode;
6606 enum insn_code icode;
6607 rtx subtarget = target;
6608 rtx result, label, pattern, jump_pat;
6610 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
6611 result at the end. We can't simply invert the test since it would
6612 have already been inverted if it were valid. This case occurs for
6613 some floating-point comparisons. */
6615 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
6616 invert = 1, exp = TREE_OPERAND (exp, 0);
6618 arg0 = TREE_OPERAND (exp, 0);
6619 arg1 = TREE_OPERAND (exp, 1);
6620 type = TREE_TYPE (arg0);
6621 operand_mode = TYPE_MODE (type);
6622 unsignedp = TREE_UNSIGNED (type);
6624 /* We won't bother with BLKmode store-flag operations because it would mean
6625 passing a lot of information to emit_store_flag. */
6626 if (operand_mode == BLKmode)
6632 /* Get the rtx comparison code to use. We know that EXP is a comparison
6633 operation of some type. Some comparisons against 1 and -1 can be
6634 converted to comparisons with zero. Do so here so that the tests
6635 below will be aware that we have a comparison with zero. These
6636 tests will not catch constants in the first operand, but constants
6637 are rarely passed as the first operand. */
6639 switch (TREE_CODE (exp))
6648 if (integer_onep (arg1))
6649 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
6651 code = unsignedp ? LTU : LT;
6654 if (integer_all_onesp (arg1))
6655 arg1 = integer_zero_node, code = unsignedp ? LTU : LT;
6657 code = unsignedp ? LEU : LE;
6660 if (integer_all_onesp (arg1))
6661 arg1 = integer_zero_node, code = unsignedp ? GEU : GE;
6663 code = unsignedp ? GTU : GT;
6666 if (integer_onep (arg1))
6667 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
6669 code = unsignedp ? GEU : GE;
6675 /* Put a constant second. */
6676 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
6678 tem = arg0; arg0 = arg1; arg1 = tem;
6679 code = swap_condition (code);
6682 /* If this is an equality or inequality test of a single bit, we can
6683 do this by shifting the bit being tested to the low-order bit and
6684 masking the result with the constant 1. If the condition was EQ,
6685 we xor it with 1. This does not require an scc insn and is faster
6686 than an scc insn even if we have it. */
6688 if ((code == NE || code == EQ)
6689 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6690 && integer_pow2p (TREE_OPERAND (arg0, 1))
6691 && TYPE_PRECISION (type) <= HOST_BITS_PER_WIDE_INT)
6693 int bitnum = exact_log2 (INTVAL (expand_expr (TREE_OPERAND (arg0, 1),
6694 NULL_RTX, VOIDmode, 0)));
6696 if (subtarget == 0 || GET_CODE (subtarget) != REG
6697 || GET_MODE (subtarget) != operand_mode
6698 || ! safe_from_p (subtarget, TREE_OPERAND (arg0, 0)))
6701 op0 = expand_expr (TREE_OPERAND (arg0, 0), subtarget, VOIDmode, 0);
6704 op0 = expand_shift (RSHIFT_EXPR, GET_MODE (op0), op0,
6705 size_int (bitnum), target, 1);
6707 if (GET_MODE (op0) != mode)
6708 op0 = convert_to_mode (mode, op0, 1);
6710 if (bitnum != TYPE_PRECISION (type) - 1)
6711 op0 = expand_and (op0, const1_rtx, target);
6713 if ((code == EQ && ! invert) || (code == NE && invert))
6714 op0 = expand_binop (mode, xor_optab, op0, const1_rtx, target, 0,
6720 /* Now see if we are likely to be able to do this. Return if not. */
6721 if (! can_compare_p (operand_mode))
6723 icode = setcc_gen_code[(int) code];
6724 if (icode == CODE_FOR_nothing
6725 || (only_cheap && insn_operand_mode[(int) icode][0] != mode))
6727 /* We can only do this if it is one of the special cases that
6728 can be handled without an scc insn. */
6729 if ((code == LT && integer_zerop (arg1))
6730 || (! only_cheap && code == GE && integer_zerop (arg1)))
6732 else if (BRANCH_COST >= 0
6733 && ! only_cheap && (code == NE || code == EQ)
6734 && TREE_CODE (type) != REAL_TYPE
6735 && ((abs_optab->handlers[(int) operand_mode].insn_code
6736 != CODE_FOR_nothing)
6737 || (ffs_optab->handlers[(int) operand_mode].insn_code
6738 != CODE_FOR_nothing)))
6744 preexpand_calls (exp);
6745 if (subtarget == 0 || GET_CODE (subtarget) != REG
6746 || GET_MODE (subtarget) != operand_mode
6747 || ! safe_from_p (subtarget, arg1))
6750 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
6751 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
6754 target = gen_reg_rtx (mode);
6756 result = emit_store_flag (target, code, op0, op1, operand_mode,
6762 result = expand_binop (mode, xor_optab, result, const1_rtx,
6763 result, 0, OPTAB_LIB_WIDEN);
6767 /* If this failed, we have to do this with set/compare/jump/set code. */
6768 if (target == 0 || GET_CODE (target) != REG
6769 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
6770 target = gen_reg_rtx (GET_MODE (target));
6772 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
6773 result = compare_from_rtx (op0, op1, code, unsignedp,
6774 operand_mode, NULL_RTX, 0);
6775 if (GET_CODE (result) == CONST_INT)
6776 return (((result == const0_rtx && ! invert)
6777 || (result != const0_rtx && invert))
6778 ? const0_rtx : const1_rtx);
6780 label = gen_label_rtx ();
6781 if (bcc_gen_fctn[(int) code] == 0)
6784 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
6785 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
6791 /* Generate a tablejump instruction (used for switch statements). */
6793 #ifdef HAVE_tablejump
6795 /* INDEX is the value being switched on, with the lowest value
6796 in the table already subtracted.
6797 MODE is its expected mode (needed if INDEX is constant).
6798 RANGE is the length of the jump table.
6799 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
6801 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
6802 index value is out of range. */
6805 do_tablejump (index, mode, range, table_label, default_label)
6806 rtx index, range, table_label, default_label;
6807 enum machine_mode mode;
6809 register rtx temp, vector;
6811 /* Do an unsigned comparison (in the proper mode) between the index
6812 expression and the value which represents the length of the range.
6813 Since we just finished subtracting the lower bound of the range
6814 from the index expression, this comparison allows us to simultaneously
6815 check that the original index expression value is both greater than
6816 or equal to the minimum value of the range and less than or equal to
6817 the maximum value of the range. */
6819 emit_cmp_insn (range, index, LTU, NULL_RTX, mode, 0, 0);
6820 emit_jump_insn (gen_bltu (default_label));
6822 /* If index is in range, it must fit in Pmode.
6823 Convert to Pmode so we can index with it. */
6825 index = convert_to_mode (Pmode, index, 1);
6827 /* If flag_force_addr were to affect this address
6828 it could interfere with the tricky assumptions made
6829 about addresses that contain label-refs,
6830 which may be valid only very near the tablejump itself. */
6831 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
6832 GET_MODE_SIZE, because this indicates how large insns are. The other
6833 uses should all be Pmode, because they are addresses. This code
6834 could fail if addresses and insns are not the same size. */
6835 index = memory_address_noforce
6837 gen_rtx (PLUS, Pmode,
6838 gen_rtx (MULT, Pmode, index,
6839 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
6840 gen_rtx (LABEL_REF, Pmode, table_label)));
6841 temp = gen_reg_rtx (CASE_VECTOR_MODE);
6842 vector = gen_rtx (MEM, CASE_VECTOR_MODE, index);
6843 RTX_UNCHANGING_P (vector) = 1;
6844 convert_move (temp, vector, 0);
6846 emit_jump_insn (gen_tablejump (temp, table_label));
6848 #ifndef CASE_VECTOR_PC_RELATIVE
6849 /* If we are generating PIC code or if the table is PC-relative, the
6850 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
6856 #endif /* HAVE_tablejump */