1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA. */
26 #include "insn-flags.h"
27 #include "insn-codes.h"
29 #include "insn-config.h"
33 #include "typeclass.h"
35 #define CEIL(x,y) (((x) + (y) - 1) / (y))
37 /* Decide whether a function's arguments should be processed
38 from first to last or from last to first. */
40 #ifdef STACK_GROWS_DOWNWARD
42 #define PUSH_ARGS_REVERSED /* If it's last to first */
46 #ifndef STACK_PUSH_CODE
47 #ifdef STACK_GROWS_DOWNWARD
48 #define STACK_PUSH_CODE PRE_DEC
50 #define STACK_PUSH_CODE PRE_INC
54 /* Like STACK_BOUNDARY but in units of bytes, not bits. */
55 #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
57 /* If this is nonzero, we do not bother generating VOLATILE
58 around volatile memory references, and we are willing to
59 output indirect addresses. If cse is to follow, we reject
60 indirect addresses so a useful potential cse is generated;
61 if it is used only once, instruction combination will produce
62 the same indirect address eventually. */
65 /* Nonzero to generate code for all the subroutines within an
66 expression before generating the upper levels of the expression.
67 Nowadays this is never zero. */
68 int do_preexpand_calls = 1;
70 /* Number of units that we should eventually pop off the stack.
71 These are the arguments to function calls that have already returned. */
72 int pending_stack_adjust;
74 /* Nonzero means stack pops must not be deferred, and deferred stack
75 pops must not be output. It is nonzero inside a function call,
76 inside a conditional expression, inside a statement expression,
77 and in other cases as well. */
78 int inhibit_defer_pop;
80 /* A list of all cleanups which belong to the arguments of
81 function calls being expanded by expand_call. */
82 tree cleanups_this_call;
84 /* Nonzero means __builtin_saveregs has already been done in this function.
85 The value is the pseudoreg containing the value __builtin_saveregs
87 static rtx saveregs_value;
90 static void store_constructor ();
91 static rtx store_field ();
92 static rtx expand_builtin ();
93 static rtx compare ();
94 static rtx do_store_flag ();
95 static void preexpand_calls ();
96 static rtx expand_increment ();
97 static void init_queue ();
99 void do_pending_stack_adjust ();
100 static void do_jump_for_compare ();
101 static void do_jump_by_parts_equality ();
102 static void do_jump_by_parts_equality_rtx ();
103 static void do_jump_by_parts_greater ();
105 /* Record for each mode whether we can move a register directly to or
106 from an object of that mode in memory. If we can't, we won't try
107 to use that mode directly when accessing a field of that mode. */
109 static char direct_load[NUM_MACHINE_MODES];
110 static char direct_store[NUM_MACHINE_MODES];
112 /* MOVE_RATIO is the number of move instructions that is better than
116 #if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
119 /* A value of around 6 would minimize code size; infinity would minimize
121 #define MOVE_RATIO 15
125 /* This array records the insn_code of insns to perform block moves. */
126 static enum insn_code movstr_optab[NUM_MACHINE_MODES];
128 /* SLOW_UNALIGNED_ACCESS is non-zero if unaligned accesses are very slow. */
130 #ifndef SLOW_UNALIGNED_ACCESS
131 #define SLOW_UNALIGNED_ACCESS 0
134 /* This is run once per compilation to set up which modes can be used
135 directly in memory and to initialize the block move optab. */
141 enum machine_mode mode;
142 rtx mem = gen_rtx (MEM, VOIDmode, stack_pointer_rtx);
145 insn = emit_insn (gen_rtx (SET, 0, 0));
146 pat = PATTERN (insn);
148 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
149 mode = (enum machine_mode) ((int) mode + 1))
155 direct_load[(int) mode] = direct_store[(int) mode] = 0;
156 PUT_MODE (mem, mode);
158 /* Find a register that can be used in this mode, if any. */
159 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
160 if (HARD_REGNO_MODE_OK (regno, mode))
163 if (regno == FIRST_PSEUDO_REGISTER)
166 reg = gen_rtx (REG, mode, regno);
169 SET_DEST (pat) = reg;
170 direct_load[(int) mode] = (recog (pat, insn, &num_clobbers)) >= 0;
173 SET_DEST (pat) = mem;
174 direct_store[(int) mode] = (recog (pat, insn, &num_clobbers)) >= 0;
176 movstr_optab[(int) mode] = CODE_FOR_nothing;
183 movstr_optab[(int) QImode] = CODE_FOR_movstrqi;
187 movstr_optab[(int) HImode] = CODE_FOR_movstrhi;
191 movstr_optab[(int) SImode] = CODE_FOR_movstrsi;
195 movstr_optab[(int) DImode] = CODE_FOR_movstrdi;
199 movstr_optab[(int) TImode] = CODE_FOR_movstrti;
203 /* This is run at the start of compiling a function. */
210 pending_stack_adjust = 0;
211 inhibit_defer_pop = 0;
212 cleanups_this_call = 0;
217 /* Save all variables describing the current status into the structure *P.
218 This is used before starting a nested function. */
224 /* Instead of saving the postincrement queue, empty it. */
227 p->pending_stack_adjust = pending_stack_adjust;
228 p->inhibit_defer_pop = inhibit_defer_pop;
229 p->cleanups_this_call = cleanups_this_call;
230 p->saveregs_value = saveregs_value;
231 p->forced_labels = forced_labels;
233 pending_stack_adjust = 0;
234 inhibit_defer_pop = 0;
235 cleanups_this_call = 0;
240 /* Restore all variables describing the current status from the structure *P.
241 This is used after a nested function. */
244 restore_expr_status (p)
247 pending_stack_adjust = p->pending_stack_adjust;
248 inhibit_defer_pop = p->inhibit_defer_pop;
249 cleanups_this_call = p->cleanups_this_call;
250 saveregs_value = p->saveregs_value;
251 forced_labels = p->forced_labels;
254 /* Manage the queue of increment instructions to be output
255 for POSTINCREMENT_EXPR expressions, etc. */
257 static rtx pending_chain;
259 /* Queue up to increment (or change) VAR later. BODY says how:
260 BODY should be the same thing you would pass to emit_insn
261 to increment right away. It will go to emit_insn later on.
263 The value is a QUEUED expression to be used in place of VAR
264 where you want to guarantee the pre-incrementation value of VAR. */
267 enqueue_insn (var, body)
270 pending_chain = gen_rtx (QUEUED, GET_MODE (var),
271 var, NULL_RTX, NULL_RTX, body, pending_chain);
272 return pending_chain;
275 /* Use protect_from_queue to convert a QUEUED expression
276 into something that you can put immediately into an instruction.
277 If the queued incrementation has not happened yet,
278 protect_from_queue returns the variable itself.
279 If the incrementation has happened, protect_from_queue returns a temp
280 that contains a copy of the old value of the variable.
282 Any time an rtx which might possibly be a QUEUED is to be put
283 into an instruction, it must be passed through protect_from_queue first.
284 QUEUED expressions are not meaningful in instructions.
286 Do not pass a value through protect_from_queue and then hold
287 on to it for a while before putting it in an instruction!
288 If the queue is flushed in between, incorrect code will result. */
291 protect_from_queue (x, modify)
295 register RTX_CODE code = GET_CODE (x);
297 #if 0 /* A QUEUED can hang around after the queue is forced out. */
298 /* Shortcut for most common case. */
299 if (pending_chain == 0)
305 /* A special hack for read access to (MEM (QUEUED ...))
306 to facilitate use of autoincrement.
307 Make a copy of the contents of the memory location
308 rather than a copy of the address, but not
309 if the value is of mode BLKmode. */
310 if (code == MEM && GET_MODE (x) != BLKmode
311 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
313 register rtx y = XEXP (x, 0);
314 XEXP (x, 0) = QUEUED_VAR (y);
317 register rtx temp = gen_reg_rtx (GET_MODE (x));
318 emit_insn_before (gen_move_insn (temp, x),
324 /* Otherwise, recursively protect the subexpressions of all
325 the kinds of rtx's that can contain a QUEUED. */
327 XEXP (x, 0) = protect_from_queue (XEXP (x, 0), 0);
328 else if (code == PLUS || code == MULT)
330 XEXP (x, 0) = protect_from_queue (XEXP (x, 0), 0);
331 XEXP (x, 1) = protect_from_queue (XEXP (x, 1), 0);
335 /* If the increment has not happened, use the variable itself. */
336 if (QUEUED_INSN (x) == 0)
337 return QUEUED_VAR (x);
338 /* If the increment has happened and a pre-increment copy exists,
340 if (QUEUED_COPY (x) != 0)
341 return QUEUED_COPY (x);
342 /* The increment has happened but we haven't set up a pre-increment copy.
343 Set one up now, and use it. */
344 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
345 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
347 return QUEUED_COPY (x);
350 /* Return nonzero if X contains a QUEUED expression:
351 if it contains anything that will be altered by a queued increment.
352 We handle only combinations of MEM, PLUS, MINUS and MULT operators
353 since memory addresses generally contain only those. */
359 register enum rtx_code code = GET_CODE (x);
365 return queued_subexp_p (XEXP (x, 0));
369 return queued_subexp_p (XEXP (x, 0))
370 || queued_subexp_p (XEXP (x, 1));
375 /* Perform all the pending incrementations. */
381 while (p = pending_chain)
383 QUEUED_INSN (p) = emit_insn (QUEUED_BODY (p));
384 pending_chain = QUEUED_NEXT (p);
395 /* Copy data from FROM to TO, where the machine modes are not the same.
396 Both modes may be integer, or both may be floating.
397 UNSIGNEDP should be nonzero if FROM is an unsigned type.
398 This causes zero-extension instead of sign-extension. */
401 convert_move (to, from, unsignedp)
402 register rtx to, from;
405 enum machine_mode to_mode = GET_MODE (to);
406 enum machine_mode from_mode = GET_MODE (from);
407 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
408 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
412 /* rtx code for making an equivalent value. */
413 enum rtx_code equiv_code = (unsignedp ? ZERO_EXTEND : SIGN_EXTEND);
415 to = protect_from_queue (to, 1);
416 from = protect_from_queue (from, 0);
418 if (to_real != from_real)
421 if (to_mode == from_mode
422 || (from_mode == VOIDmode && CONSTANT_P (from)))
424 emit_move_insn (to, from);
430 #ifdef HAVE_extendsfdf2
431 if (HAVE_extendsfdf2 && from_mode == SFmode && to_mode == DFmode)
433 emit_unop_insn (CODE_FOR_extendsfdf2, to, from, UNKNOWN);
437 #ifdef HAVE_extendsfxf2
438 if (HAVE_extendsfxf2 && from_mode == SFmode && to_mode == XFmode)
440 emit_unop_insn (CODE_FOR_extendsfxf2, to, from, UNKNOWN);
444 #ifdef HAVE_extendsftf2
445 if (HAVE_extendsftf2 && from_mode == SFmode && to_mode == TFmode)
447 emit_unop_insn (CODE_FOR_extendsftf2, to, from, UNKNOWN);
451 #ifdef HAVE_extenddfxf2
452 if (HAVE_extenddfxf2 && from_mode == DFmode && to_mode == XFmode)
454 emit_unop_insn (CODE_FOR_extenddfxf2, to, from, UNKNOWN);
458 #ifdef HAVE_extenddftf2
459 if (HAVE_extenddftf2 && from_mode == DFmode && to_mode == TFmode)
461 emit_unop_insn (CODE_FOR_extenddftf2, to, from, UNKNOWN);
465 #ifdef HAVE_truncdfsf2
466 if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
468 emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
472 #ifdef HAVE_truncxfsf2
473 if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode)
475 emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN);
479 #ifdef HAVE_trunctfsf2
480 if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
482 emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
486 #ifdef HAVE_truncxfdf2
487 if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode)
489 emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN);
493 #ifdef HAVE_trunctfdf2
494 if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
496 emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
508 libcall = extendsfdf2_libfunc;
512 libcall = extendsfxf2_libfunc;
516 libcall = extendsftf2_libfunc;
525 libcall = truncdfsf2_libfunc;
529 libcall = extenddfxf2_libfunc;
533 libcall = extenddftf2_libfunc;
542 libcall = truncxfsf2_libfunc;
546 libcall = truncxfdf2_libfunc;
555 libcall = trunctfsf2_libfunc;
559 libcall = trunctfdf2_libfunc;
565 if (libcall == (rtx) 0)
566 /* This conversion is not implemented yet. */
569 emit_library_call (libcall, 1, to_mode, 1, from, from_mode);
570 emit_move_insn (to, hard_libcall_value (to_mode));
574 /* Now both modes are integers. */
576 /* Handle expanding beyond a word. */
577 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
578 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
585 enum machine_mode lowpart_mode;
586 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
588 /* Try converting directly if the insn is supported. */
589 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
592 emit_unop_insn (code, to, from, equiv_code);
595 /* Next, try converting via full word. */
596 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
597 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
598 != CODE_FOR_nothing))
600 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
601 emit_unop_insn (code, to,
602 gen_lowpart (word_mode, to), equiv_code);
606 /* No special multiword conversion insn; do it by hand. */
609 /* Get a copy of FROM widened to a word, if necessary. */
610 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
611 lowpart_mode = word_mode;
613 lowpart_mode = from_mode;
615 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
617 lowpart = gen_lowpart (lowpart_mode, to);
618 emit_move_insn (lowpart, lowfrom);
620 /* Compute the value to put in each remaining word. */
622 fill_value = const0_rtx;
627 && insn_operand_mode[(int) CODE_FOR_slt][0] == word_mode
628 && STORE_FLAG_VALUE == -1)
630 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
632 fill_value = gen_reg_rtx (word_mode);
633 emit_insn (gen_slt (fill_value));
639 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
640 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
642 fill_value = convert_to_mode (word_mode, fill_value, 1);
646 /* Fill the remaining words. */
647 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
649 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
650 rtx subword = operand_subword (to, index, 1, to_mode);
655 if (fill_value != subword)
656 emit_move_insn (subword, fill_value);
659 insns = get_insns ();
662 emit_no_conflict_block (insns, to, from, NULL_RTX,
663 gen_rtx (equiv_code, to_mode, from));
667 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD)
669 convert_move (to, gen_lowpart (word_mode, from), 0);
673 /* Handle pointer conversion */ /* SPEE 900220 */
674 if (to_mode == PSImode)
676 if (from_mode != SImode)
677 from = convert_to_mode (SImode, from, unsignedp);
679 #ifdef HAVE_truncsipsi
682 emit_unop_insn (CODE_FOR_truncsipsi, to, from, UNKNOWN);
685 #endif /* HAVE_truncsipsi */
689 if (from_mode == PSImode)
691 if (to_mode != SImode)
693 from = convert_to_mode (SImode, from, unsignedp);
698 #ifdef HAVE_extendpsisi
699 if (HAVE_extendpsisi)
701 emit_unop_insn (CODE_FOR_extendpsisi, to, from, UNKNOWN);
704 #endif /* HAVE_extendpsisi */
709 /* Now follow all the conversions between integers
710 no more than a word long. */
712 /* For truncation, usually we can just refer to FROM in a narrower mode. */
713 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
714 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
715 GET_MODE_BITSIZE (from_mode))
716 && ((GET_CODE (from) == MEM
717 && ! MEM_VOLATILE_P (from)
718 && direct_load[(int) to_mode]
719 && ! mode_dependent_address_p (XEXP (from, 0)))
720 || GET_CODE (from) == REG
721 || GET_CODE (from) == SUBREG))
723 emit_move_insn (to, gen_lowpart (to_mode, from));
727 /* For truncation, usually we can just refer to FROM in a narrower mode. */
728 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
730 /* Convert directly if that works. */
731 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
734 /* If FROM is a SUBREG, put it into a register. Do this
735 so that we always generate the same set of insns for
736 better cse'ing; if an intermediate assignment occurred,
737 we won't be doing the operation directly on the SUBREG. */
738 if (optimize > 0 && GET_CODE (from) == SUBREG)
739 from = force_reg (from_mode, from);
740 emit_unop_insn (code, to, from, equiv_code);
745 enum machine_mode intermediate;
747 /* Search for a mode to convert via. */
748 for (intermediate = from_mode; intermediate != VOIDmode;
749 intermediate = GET_MODE_WIDER_MODE (intermediate))
750 if ((can_extend_p (to_mode, intermediate, unsignedp)
752 && (can_extend_p (intermediate, from_mode, unsignedp)
753 != CODE_FOR_nothing))
755 convert_move (to, convert_to_mode (intermediate, from,
756 unsignedp), unsignedp);
760 /* No suitable intermediate mode. */
765 /* Support special truncate insns for certain modes. */
767 if (from_mode == DImode && to_mode == SImode)
769 #ifdef HAVE_truncdisi2
772 emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
776 convert_move (to, force_reg (from_mode, from), unsignedp);
780 if (from_mode == DImode && to_mode == HImode)
782 #ifdef HAVE_truncdihi2
785 emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
789 convert_move (to, force_reg (from_mode, from), unsignedp);
793 if (from_mode == DImode && to_mode == QImode)
795 #ifdef HAVE_truncdiqi2
798 emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
802 convert_move (to, force_reg (from_mode, from), unsignedp);
806 if (from_mode == SImode && to_mode == HImode)
808 #ifdef HAVE_truncsihi2
811 emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
815 convert_move (to, force_reg (from_mode, from), unsignedp);
819 if (from_mode == SImode && to_mode == QImode)
821 #ifdef HAVE_truncsiqi2
824 emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
828 convert_move (to, force_reg (from_mode, from), unsignedp);
832 if (from_mode == HImode && to_mode == QImode)
834 #ifdef HAVE_trunchiqi2
837 emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
841 convert_move (to, force_reg (from_mode, from), unsignedp);
845 /* Handle truncation of volatile memrefs, and so on;
846 the things that couldn't be truncated directly,
847 and for which there was no special instruction. */
848 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
850 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
851 emit_move_insn (to, temp);
855 /* Mode combination is not recognized. */
859 /* Return an rtx for a value that would result
860 from converting X to mode MODE.
861 Both X and MODE may be floating, or both integer.
862 UNSIGNEDP is nonzero if X is an unsigned value.
863 This can be done by referring to a part of X in place
864 or by copying to a new temporary with conversion.
866 This function *must not* call protect_from_queue
867 except when putting X into an insn (in which case convert_move does it). */
870 convert_to_mode (mode, x, unsignedp)
871 enum machine_mode mode;
877 if (mode == GET_MODE (x))
880 /* There is one case that we must handle specially: If we are converting
881 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
882 we are to interpret the constant as unsigned, gen_lowpart will do
883 the wrong if the constant appears negative. What we want to do is
884 make the high-order word of the constant zero, not all ones. */
886 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
887 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
888 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
889 return immed_double_const (INTVAL (x), (HOST_WIDE_INT) 0, mode);
891 /* We can do this with a gen_lowpart if both desired and current modes
892 are integer, and this is either a constant integer, a register, or a
893 non-volatile MEM. Except for the constant case, we must be narrowing
896 if (GET_CODE (x) == CONST_INT
897 || (GET_MODE_CLASS (mode) == MODE_INT
898 && GET_MODE_CLASS (GET_MODE (x)) == MODE_INT
899 && (GET_CODE (x) == CONST_DOUBLE
900 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (GET_MODE (x))
901 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x))
902 && direct_load[(int) mode]
903 || GET_CODE (x) == REG)))))
904 return gen_lowpart (mode, x);
906 temp = gen_reg_rtx (mode);
907 convert_move (temp, x, unsignedp);
911 /* Generate several move instructions to copy LEN bytes
912 from block FROM to block TO. (These are MEM rtx's with BLKmode).
913 The caller must pass FROM and TO
914 through protect_from_queue before calling.
915 ALIGN (in bytes) is maximum alignment we can assume. */
917 struct move_by_pieces
926 int explicit_inc_from;
932 static void move_by_pieces_1 ();
933 static int move_by_pieces_ninsns ();
936 move_by_pieces (to, from, len, align)
940 struct move_by_pieces data;
941 rtx to_addr = XEXP (to, 0), from_addr = XEXP (from, 0);
942 int max_size = MOVE_MAX + 1;
945 data.to_addr = to_addr;
946 data.from_addr = from_addr;
950 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
951 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
953 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
954 || GET_CODE (from_addr) == POST_INC
955 || GET_CODE (from_addr) == POST_DEC);
957 data.explicit_inc_from = 0;
958 data.explicit_inc_to = 0;
960 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
961 if (data.reverse) data.offset = len;
964 /* If copying requires more than two move insns,
965 copy addresses to registers (to make displacements shorter)
966 and use post-increment if available. */
967 if (!(data.autinc_from && data.autinc_to)
968 && move_by_pieces_ninsns (len, align) > 2)
970 #ifdef HAVE_PRE_DECREMENT
971 if (data.reverse && ! data.autinc_from)
973 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
974 data.autinc_from = 1;
975 data.explicit_inc_from = -1;
978 #ifdef HAVE_POST_INCREMENT
979 if (! data.autinc_from)
981 data.from_addr = copy_addr_to_reg (from_addr);
982 data.autinc_from = 1;
983 data.explicit_inc_from = 1;
986 if (!data.autinc_from && CONSTANT_P (from_addr))
987 data.from_addr = copy_addr_to_reg (from_addr);
988 #ifdef HAVE_PRE_DECREMENT
989 if (data.reverse && ! data.autinc_to)
991 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
993 data.explicit_inc_to = -1;
996 #ifdef HAVE_POST_INCREMENT
997 if (! data.reverse && ! data.autinc_to)
999 data.to_addr = copy_addr_to_reg (to_addr);
1001 data.explicit_inc_to = 1;
1004 if (!data.autinc_to && CONSTANT_P (to_addr))
1005 data.to_addr = copy_addr_to_reg (to_addr);
1008 if (! (STRICT_ALIGNMENT || SLOW_UNALIGNED_ACCESS)
1009 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
1012 /* First move what we can in the largest integer mode, then go to
1013 successively smaller modes. */
1015 while (max_size > 1)
1017 enum machine_mode mode = VOIDmode, tmode;
1018 enum insn_code icode;
1020 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1021 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1022 if (GET_MODE_SIZE (tmode) < max_size)
1025 if (mode == VOIDmode)
1028 icode = mov_optab->handlers[(int) mode].insn_code;
1029 if (icode != CODE_FOR_nothing
1030 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1031 GET_MODE_SIZE (mode)))
1032 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1034 max_size = GET_MODE_SIZE (mode);
1037 /* The code above should have handled everything. */
1042 /* Return number of insns required to move L bytes by pieces.
1043 ALIGN (in bytes) is maximum alignment we can assume. */
1046 move_by_pieces_ninsns (l, align)
1050 register int n_insns = 0;
1051 int max_size = MOVE_MAX + 1;
1053 if (! (STRICT_ALIGNMENT || SLOW_UNALIGNED_ACCESS)
1054 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
1057 while (max_size > 1)
1059 enum machine_mode mode = VOIDmode, tmode;
1060 enum insn_code icode;
1062 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1063 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1064 if (GET_MODE_SIZE (tmode) < max_size)
1067 if (mode == VOIDmode)
1070 icode = mov_optab->handlers[(int) mode].insn_code;
1071 if (icode != CODE_FOR_nothing
1072 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1073 GET_MODE_SIZE (mode)))
1074 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1076 max_size = GET_MODE_SIZE (mode);
1082 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1083 with move instructions for mode MODE. GENFUN is the gen_... function
1084 to make a move insn for that mode. DATA has all the other info. */
1087 move_by_pieces_1 (genfun, mode, data)
1089 enum machine_mode mode;
1090 struct move_by_pieces *data;
1092 register int size = GET_MODE_SIZE (mode);
1093 register rtx to1, from1;
1095 while (data->len >= size)
1097 if (data->reverse) data->offset -= size;
1099 to1 = (data->autinc_to
1100 ? gen_rtx (MEM, mode, data->to_addr)
1101 : change_address (data->to, mode,
1102 plus_constant (data->to_addr, data->offset)));
1105 ? gen_rtx (MEM, mode, data->from_addr)
1106 : change_address (data->from, mode,
1107 plus_constant (data->from_addr, data->offset)));
1109 #ifdef HAVE_PRE_DECREMENT
1110 if (data->explicit_inc_to < 0)
1111 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
1112 if (data->explicit_inc_from < 0)
1113 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (-size)));
1116 emit_insn ((*genfun) (to1, from1));
1117 #ifdef HAVE_POST_INCREMENT
1118 if (data->explicit_inc_to > 0)
1119 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1120 if (data->explicit_inc_from > 0)
1121 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1124 if (! data->reverse) data->offset += size;
1130 /* Emit code to move a block Y to a block X.
1131 This may be done with string-move instructions,
1132 with multiple scalar move instructions, or with a library call.
1134 Both X and Y must be MEM rtx's (perhaps inside VOLATILE)
1136 SIZE is an rtx that says how long they are.
1137 ALIGN is the maximum alignment we can assume they have,
1138 measured in bytes. */
1141 emit_block_move (x, y, size, align)
1146 if (GET_MODE (x) != BLKmode)
1149 if (GET_MODE (y) != BLKmode)
1152 x = protect_from_queue (x, 1);
1153 y = protect_from_queue (y, 0);
1154 size = protect_from_queue (size, 0);
1156 if (GET_CODE (x) != MEM)
1158 if (GET_CODE (y) != MEM)
1163 if (GET_CODE (size) == CONST_INT
1164 && (move_by_pieces_ninsns (INTVAL (size), align) < MOVE_RATIO))
1165 move_by_pieces (x, y, INTVAL (size), align);
1168 /* Try the most limited insn first, because there's no point
1169 including more than one in the machine description unless
1170 the more limited one has some advantage. */
1172 enum machine_mode mode;
1174 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1175 mode = GET_MODE_WIDER_MODE (mode))
1177 enum insn_code code = movstr_optab[(int) mode];
1178 rtx opalign = GEN_INT (align);
1180 if (code != CODE_FOR_nothing
1181 && GET_MODE_BITSIZE (mode) < HOST_BITS_PER_WIDE_INT
1182 && (unsigned) INTVAL (size) <= GET_MODE_MASK (mode)
1183 && (*insn_operand_predicate[(int) code][0]) (x, Pmode)
1184 && (*insn_operand_predicate[(int) code][1]) (y, Pmode)
1185 && (*insn_operand_predicate[(int) code][3]) (opalign, VOIDmode))
1188 rtx last = get_last_insn ();
1191 if (! (*insn_operand_predicate[(int) code][2]) (op2, mode))
1192 op2 = copy_to_mode_reg (mode, op2);
1194 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1201 delete_insns_since (last);
1205 #ifdef TARGET_MEM_FUNCTIONS
1206 emit_library_call (memcpy_libfunc, 1,
1207 VOIDmode, 3, XEXP (x, 0), Pmode,
1209 convert_to_mode (Pmode, size, 1), Pmode);
1211 emit_library_call (bcopy_libfunc, 1,
1212 VOIDmode, 3, XEXP (y, 0), Pmode,
1214 convert_to_mode (Pmode, size, 1), Pmode);
1219 /* Copy all or part of a value X into registers starting at REGNO.
1220 The number of registers to be filled is NREGS. */
1223 move_block_to_reg (regno, x, nregs, mode)
1227 enum machine_mode mode;
1232 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1233 x = validize_mem (force_const_mem (mode, x));
1235 /* See if the machine can do this with a load multiple insn. */
1236 #ifdef HAVE_load_multiple
1237 last = get_last_insn ();
1238 pat = gen_load_multiple (gen_rtx (REG, word_mode, regno), x,
1246 delete_insns_since (last);
1249 for (i = 0; i < nregs; i++)
1250 emit_move_insn (gen_rtx (REG, word_mode, regno + i),
1251 operand_subword_force (x, i, mode));
1254 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1255 The number of registers to be filled is NREGS. */
1258 move_block_from_reg (regno, x, nregs)
1266 /* See if the machine can do this with a store multiple insn. */
1267 #ifdef HAVE_store_multiple
1268 last = get_last_insn ();
1269 pat = gen_store_multiple (x, gen_rtx (REG, word_mode, regno),
1277 delete_insns_since (last);
1280 for (i = 0; i < nregs; i++)
1282 rtx tem = operand_subword (x, i, 1, BLKmode);
1287 emit_move_insn (tem, gen_rtx (REG, word_mode, regno + i));
1291 /* Mark NREGS consecutive regs, starting at REGNO, as being live now. */
1294 use_regs (regno, nregs)
1300 for (i = 0; i < nregs; i++)
1301 emit_insn (gen_rtx (USE, VOIDmode, gen_rtx (REG, word_mode, regno + i)));
1304 /* Write zeros through the storage of OBJECT.
1305 If OBJECT has BLKmode, SIZE is its length in bytes. */
1308 clear_storage (object, size)
1312 if (GET_MODE (object) == BLKmode)
1314 #ifdef TARGET_MEM_FUNCTIONS
1315 emit_library_call (memset_libfunc, 1,
1317 XEXP (object, 0), Pmode, const0_rtx, Pmode,
1318 GEN_INT (size), Pmode);
1320 emit_library_call (bzero_libfunc, 1,
1322 XEXP (object, 0), Pmode,
1323 GEN_INT (size), Pmode);
1327 emit_move_insn (object, const0_rtx);
1330 /* Generate code to copy Y into X.
1331 Both Y and X must have the same mode, except that
1332 Y can be a constant with VOIDmode.
1333 This mode cannot be BLKmode; use emit_block_move for that.
1335 Return the last instruction emitted. */
1338 emit_move_insn (x, y)
1341 enum machine_mode mode = GET_MODE (x);
1344 x = protect_from_queue (x, 1);
1345 y = protect_from_queue (y, 0);
1347 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
1350 if (CONSTANT_P (y) && ! LEGITIMATE_CONSTANT_P (y))
1351 y = force_const_mem (mode, y);
1353 /* If X or Y are memory references, verify that their addresses are valid
1355 if (GET_CODE (x) == MEM
1356 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
1357 && ! push_operand (x, GET_MODE (x)))
1359 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
1360 x = change_address (x, VOIDmode, XEXP (x, 0));
1362 if (GET_CODE (y) == MEM
1363 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
1365 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
1366 y = change_address (y, VOIDmode, XEXP (y, 0));
1368 if (mode == BLKmode)
1371 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
1373 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
1375 /* This will handle any multi-word mode that lacks a move_insn pattern.
1376 However, you will get better code if you define such patterns,
1377 even if they must turn into multiple assembler instructions. */
1378 else if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
1383 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
1386 rtx xpart = operand_subword (x, i, 1, mode);
1387 rtx ypart = operand_subword (y, i, 1, mode);
1389 /* If we can't get a part of Y, put Y into memory if it is a
1390 constant. Otherwise, force it into a register. If we still
1391 can't get a part of Y, abort. */
1392 if (ypart == 0 && CONSTANT_P (y))
1394 y = force_const_mem (mode, y);
1395 ypart = operand_subword (y, i, 1, mode);
1397 else if (ypart == 0)
1398 ypart = operand_subword_force (y, i, mode);
1400 if (xpart == 0 || ypart == 0)
1403 last_insn = emit_move_insn (xpart, ypart);
1411 /* Pushing data onto the stack. */
1413 /* Push a block of length SIZE (perhaps variable)
1414 and return an rtx to address the beginning of the block.
1415 Note that it is not possible for the value returned to be a QUEUED.
1416 The value may be virtual_outgoing_args_rtx.
1418 EXTRA is the number of bytes of padding to push in addition to SIZE.
1419 BELOW nonzero means this padding comes at low addresses;
1420 otherwise, the padding comes at high addresses. */
1423 push_block (size, extra, below)
1428 if (CONSTANT_P (size))
1429 anti_adjust_stack (plus_constant (size, extra));
1430 else if (GET_CODE (size) == REG && extra == 0)
1431 anti_adjust_stack (size);
1434 rtx temp = copy_to_mode_reg (Pmode, size);
1436 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
1437 temp, 0, OPTAB_LIB_WIDEN);
1438 anti_adjust_stack (temp);
1441 #ifdef STACK_GROWS_DOWNWARD
1442 temp = virtual_outgoing_args_rtx;
1443 if (extra != 0 && below)
1444 temp = plus_constant (temp, extra);
1446 if (GET_CODE (size) == CONST_INT)
1447 temp = plus_constant (virtual_outgoing_args_rtx,
1448 - INTVAL (size) - (below ? 0 : extra));
1449 else if (extra != 0 && !below)
1450 temp = gen_rtx (PLUS, Pmode, virtual_outgoing_args_rtx,
1451 negate_rtx (Pmode, plus_constant (size, extra)));
1453 temp = gen_rtx (PLUS, Pmode, virtual_outgoing_args_rtx,
1454 negate_rtx (Pmode, size));
1457 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
1463 return gen_rtx (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
1466 /* Generate code to push X onto the stack, assuming it has mode MODE and
1468 MODE is redundant except when X is a CONST_INT (since they don't
1470 SIZE is an rtx for the size of data to be copied (in bytes),
1471 needed only if X is BLKmode.
1473 ALIGN (in bytes) is maximum alignment we can assume.
1475 If PARTIAL is nonzero, then copy that many of the first words
1476 of X into registers starting with REG, and push the rest of X.
1477 The amount of space pushed is decreased by PARTIAL words,
1478 rounded *down* to a multiple of PARM_BOUNDARY.
1479 REG must be a hard register in this case.
1481 EXTRA is the amount in bytes of extra space to leave next to this arg.
1482 This is ignored if an argument block has already been allocated.
1484 On a machine that lacks real push insns, ARGS_ADDR is the address of
1485 the bottom of the argument block for this call. We use indexing off there
1486 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
1487 argument block has not been preallocated.
1489 ARGS_SO_FAR is the size of args previously pushed for this call. */
1492 emit_push_insn (x, mode, type, size, align, partial, reg, extra,
1493 args_addr, args_so_far)
1495 enum machine_mode mode;
1506 enum direction stack_direction
1507 #ifdef STACK_GROWS_DOWNWARD
1513 /* Decide where to pad the argument: `downward' for below,
1514 `upward' for above, or `none' for don't pad it.
1515 Default is below for small data on big-endian machines; else above. */
1516 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
1518 /* Invert direction if stack is post-update. */
1519 if (STACK_PUSH_CODE == POST_INC || STACK_PUSH_CODE == POST_DEC)
1520 if (where_pad != none)
1521 where_pad = (where_pad == downward ? upward : downward);
1523 xinner = x = protect_from_queue (x, 0);
1525 if (mode == BLKmode)
1527 /* Copy a block into the stack, entirely or partially. */
1530 int used = partial * UNITS_PER_WORD;
1531 int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
1539 /* USED is now the # of bytes we need not copy to the stack
1540 because registers will take care of them. */
1543 xinner = change_address (xinner, BLKmode,
1544 plus_constant (XEXP (xinner, 0), used));
1546 /* If the partial register-part of the arg counts in its stack size,
1547 skip the part of stack space corresponding to the registers.
1548 Otherwise, start copying to the beginning of the stack space,
1549 by setting SKIP to 0. */
1550 #ifndef REG_PARM_STACK_SPACE
1556 #ifdef PUSH_ROUNDING
1557 /* Do it with several push insns if that doesn't take lots of insns
1558 and if there is no difficulty with push insns that skip bytes
1559 on the stack for alignment purposes. */
1561 && GET_CODE (size) == CONST_INT
1563 && (move_by_pieces_ninsns ((unsigned) INTVAL (size) - used, align)
1565 /* Here we avoid the case of a structure whose weak alignment
1566 forces many pushes of a small amount of data,
1567 and such small pushes do rounding that causes trouble. */
1568 && ((! STRICT_ALIGNMENT && ! SLOW_UNALIGNED_ACCESS)
1569 || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT
1570 || PUSH_ROUNDING (align) == align)
1571 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
1573 /* Push padding now if padding above and stack grows down,
1574 or if padding below and stack grows up.
1575 But if space already allocated, this has already been done. */
1576 if (extra && args_addr == 0
1577 && where_pad != none && where_pad != stack_direction)
1578 anti_adjust_stack (GEN_INT (extra));
1580 move_by_pieces (gen_rtx (MEM, BLKmode, gen_push_operand ()), xinner,
1581 INTVAL (size) - used, align);
1584 #endif /* PUSH_ROUNDING */
1586 /* Otherwise make space on the stack and copy the data
1587 to the address of that space. */
1589 /* Deduct words put into registers from the size we must copy. */
1592 if (GET_CODE (size) == CONST_INT)
1593 size = GEN_INT (INTVAL (size) - used);
1595 size = expand_binop (GET_MODE (size), sub_optab, size,
1596 GEN_INT (used), NULL_RTX, 0,
1600 /* Get the address of the stack space.
1601 In this case, we do not deal with EXTRA separately.
1602 A single stack adjust will do. */
1605 temp = push_block (size, extra, where_pad == downward);
1608 else if (GET_CODE (args_so_far) == CONST_INT)
1609 temp = memory_address (BLKmode,
1610 plus_constant (args_addr,
1611 skip + INTVAL (args_so_far)));
1613 temp = memory_address (BLKmode,
1614 plus_constant (gen_rtx (PLUS, Pmode,
1615 args_addr, args_so_far),
1618 /* TEMP is the address of the block. Copy the data there. */
1619 if (GET_CODE (size) == CONST_INT
1620 && (move_by_pieces_ninsns ((unsigned) INTVAL (size), align)
1623 move_by_pieces (gen_rtx (MEM, BLKmode, temp), xinner,
1624 INTVAL (size), align);
1627 /* Try the most limited insn first, because there's no point
1628 including more than one in the machine description unless
1629 the more limited one has some advantage. */
1630 #ifdef HAVE_movstrqi
1632 && GET_CODE (size) == CONST_INT
1633 && ((unsigned) INTVAL (size)
1634 < (1 << (GET_MODE_BITSIZE (QImode) - 1))))
1636 emit_insn (gen_movstrqi (gen_rtx (MEM, BLKmode, temp),
1637 xinner, size, GEN_INT (align)));
1641 #ifdef HAVE_movstrhi
1643 && GET_CODE (size) == CONST_INT
1644 && ((unsigned) INTVAL (size)
1645 < (1 << (GET_MODE_BITSIZE (HImode) - 1))))
1647 emit_insn (gen_movstrhi (gen_rtx (MEM, BLKmode, temp),
1648 xinner, size, GEN_INT (align)));
1652 #ifdef HAVE_movstrsi
1655 emit_insn (gen_movstrsi (gen_rtx (MEM, BLKmode, temp),
1656 xinner, size, GEN_INT (align)));
1660 #ifdef HAVE_movstrdi
1663 emit_insn (gen_movstrdi (gen_rtx (MEM, BLKmode, temp),
1664 xinner, size, GEN_INT (align)));
1669 #ifndef ACCUMULATE_OUTGOING_ARGS
1670 /* If the source is referenced relative to the stack pointer,
1671 copy it to another register to stabilize it. We do not need
1672 to do this if we know that we won't be changing sp. */
1674 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
1675 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
1676 temp = copy_to_reg (temp);
1679 /* Make inhibit_defer_pop nonzero around the library call
1680 to force it to pop the bcopy-arguments right away. */
1682 #ifdef TARGET_MEM_FUNCTIONS
1683 emit_library_call (memcpy_libfunc, 1,
1684 VOIDmode, 3, temp, Pmode, XEXP (xinner, 0), Pmode,
1687 emit_library_call (bcopy_libfunc, 1,
1688 VOIDmode, 3, XEXP (xinner, 0), Pmode, temp, Pmode,
1694 else if (partial > 0)
1696 /* Scalar partly in registers. */
1698 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
1701 /* # words of start of argument
1702 that we must make space for but need not store. */
1703 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
1704 int args_offset = INTVAL (args_so_far);
1707 /* Push padding now if padding above and stack grows down,
1708 or if padding below and stack grows up.
1709 But if space already allocated, this has already been done. */
1710 if (extra && args_addr == 0
1711 && where_pad != none && where_pad != stack_direction)
1712 anti_adjust_stack (GEN_INT (extra));
1714 /* If we make space by pushing it, we might as well push
1715 the real data. Otherwise, we can leave OFFSET nonzero
1716 and leave the space uninitialized. */
1720 /* Now NOT_STACK gets the number of words that we don't need to
1721 allocate on the stack. */
1722 not_stack = partial - offset;
1724 /* If the partial register-part of the arg counts in its stack size,
1725 skip the part of stack space corresponding to the registers.
1726 Otherwise, start copying to the beginning of the stack space,
1727 by setting SKIP to 0. */
1728 #ifndef REG_PARM_STACK_SPACE
1734 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1735 x = validize_mem (force_const_mem (mode, x));
1737 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
1738 SUBREGs of such registers are not allowed. */
1739 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
1740 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
1741 x = copy_to_reg (x);
1743 /* Loop over all the words allocated on the stack for this arg. */
1744 /* We can do it by words, because any scalar bigger than a word
1745 has a size a multiple of a word. */
1746 #ifndef PUSH_ARGS_REVERSED
1747 for (i = not_stack; i < size; i++)
1749 for (i = size - 1; i >= not_stack; i--)
1751 if (i >= not_stack + offset)
1752 emit_push_insn (operand_subword_force (x, i, mode),
1753 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
1755 GEN_INT (args_offset + ((i - not_stack + skip)
1756 * UNITS_PER_WORD)));
1762 /* Push padding now if padding above and stack grows down,
1763 or if padding below and stack grows up.
1764 But if space already allocated, this has already been done. */
1765 if (extra && args_addr == 0
1766 && where_pad != none && where_pad != stack_direction)
1767 anti_adjust_stack (GEN_INT (extra));
1769 #ifdef PUSH_ROUNDING
1771 addr = gen_push_operand ();
1774 if (GET_CODE (args_so_far) == CONST_INT)
1776 = memory_address (mode,
1777 plus_constant (args_addr, INTVAL (args_so_far)));
1779 addr = memory_address (mode, gen_rtx (PLUS, Pmode, args_addr,
1782 emit_move_insn (gen_rtx (MEM, mode, addr), x);
1786 /* If part should go in registers, copy that part
1787 into the appropriate registers. Do this now, at the end,
1788 since mem-to-mem copies above may do function calls. */
1790 move_block_to_reg (REGNO (reg), x, partial, mode);
1792 if (extra && args_addr == 0 && where_pad == stack_direction)
1793 anti_adjust_stack (GEN_INT (extra));
1796 /* Output a library call to function FUN (a SYMBOL_REF rtx)
1797 (emitting the queue unless NO_QUEUE is nonzero),
1798 for a value of mode OUTMODE,
1799 with NARGS different arguments, passed as alternating rtx values
1800 and machine_modes to convert them to.
1801 The rtx values should have been passed through protect_from_queue already.
1803 NO_QUEUE will be true if and only if the library call is a `const' call
1804 which will be enclosed in REG_LIBCALL/REG_RETVAL notes; it is equivalent
1805 to the variable is_const in expand_call. */
1808 emit_library_call (va_alist)
1812 struct args_size args_size;
1813 register int argnum;
1814 enum machine_mode outmode;
1821 CUMULATIVE_ARGS args_so_far;
1822 struct arg { rtx value; enum machine_mode mode; rtx reg; int partial;
1823 struct args_size offset; struct args_size size; };
1825 int old_inhibit_defer_pop = inhibit_defer_pop;
1830 orgfun = fun = va_arg (p, rtx);
1831 no_queue = va_arg (p, int);
1832 outmode = va_arg (p, enum machine_mode);
1833 nargs = va_arg (p, int);
1835 /* Copy all the libcall-arguments out of the varargs data
1836 and into a vector ARGVEC.
1838 Compute how to pass each argument. We only support a very small subset
1839 of the full argument passing conventions to limit complexity here since
1840 library functions shouldn't have many args. */
1842 argvec = (struct arg *) alloca (nargs * sizeof (struct arg));
1844 INIT_CUMULATIVE_ARGS (args_so_far, (tree)0, fun);
1846 args_size.constant = 0;
1849 for (count = 0; count < nargs; count++)
1851 rtx val = va_arg (p, rtx);
1852 enum machine_mode mode = va_arg (p, enum machine_mode);
1854 /* We cannot convert the arg value to the mode the library wants here;
1855 must do it earlier where we know the signedness of the arg. */
1857 || (GET_MODE (val) != mode && GET_MODE (val) != VOIDmode))
1860 /* On some machines, there's no way to pass a float to a library fcn.
1861 Pass it as a double instead. */
1862 #ifdef LIBGCC_NEEDS_DOUBLE
1863 if (LIBGCC_NEEDS_DOUBLE && mode == SFmode)
1864 val = convert_to_mode (DFmode, val), mode = DFmode;
1867 /* There's no need to call protect_from_queue, because
1868 either emit_move_insn or emit_push_insn will do that. */
1870 /* Make sure it is a reasonable operand for a move or push insn. */
1871 if (GET_CODE (val) != REG && GET_CODE (val) != MEM
1872 && ! (CONSTANT_P (val) && LEGITIMATE_CONSTANT_P (val)))
1873 val = force_operand (val, NULL_RTX);
1875 argvec[count].value = val;
1876 argvec[count].mode = mode;
1878 #ifdef FUNCTION_ARG_PASS_BY_REFERENCE
1879 if (FUNCTION_ARG_PASS_BY_REFERENCE (args_so_far, mode, NULL_TREE, 1))
1883 argvec[count].reg = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
1884 if (argvec[count].reg && GET_CODE (argvec[count].reg) == EXPR_LIST)
1886 #ifdef FUNCTION_ARG_PARTIAL_NREGS
1887 argvec[count].partial
1888 = FUNCTION_ARG_PARTIAL_NREGS (args_so_far, mode, NULL_TREE, 1);
1890 argvec[count].partial = 0;
1893 locate_and_pad_parm (mode, NULL_TREE,
1894 argvec[count].reg && argvec[count].partial == 0,
1895 NULL_TREE, &args_size, &argvec[count].offset,
1896 &argvec[count].size);
1898 if (argvec[count].size.var)
1901 #ifndef REG_PARM_STACK_SPACE
1902 if (argvec[count].partial)
1903 argvec[count].size.constant -= argvec[count].partial * UNITS_PER_WORD;
1906 if (argvec[count].reg == 0 || argvec[count].partial != 0
1907 #ifdef REG_PARM_STACK_SPACE
1911 args_size.constant += argvec[count].size.constant;
1913 #ifdef ACCUMULATE_OUTGOING_ARGS
1914 /* If this arg is actually passed on the stack, it might be
1915 clobbering something we already put there (this library call might
1916 be inside the evaluation of an argument to a function whose call
1917 requires the stack). This will only occur when the library call
1918 has sufficient args to run out of argument registers. Abort in
1919 this case; if this ever occurs, code must be added to save and
1920 restore the arg slot. */
1922 if (argvec[count].reg == 0 || argvec[count].partial != 0)
1926 FUNCTION_ARG_ADVANCE (args_so_far, mode, (tree)0, 1);
1930 /* If this machine requires an external definition for library
1931 functions, write one out. */
1932 assemble_external_libcall (fun);
1934 #ifdef STACK_BOUNDARY
1935 args_size.constant = (((args_size.constant + (STACK_BYTES - 1))
1936 / STACK_BYTES) * STACK_BYTES);
1939 #ifdef REG_PARM_STACK_SPACE
1940 args_size.constant = MAX (args_size.constant,
1941 REG_PARM_STACK_SPACE ((tree) 0));
1944 #ifdef ACCUMULATE_OUTGOING_ARGS
1945 if (args_size.constant > current_function_outgoing_args_size)
1946 current_function_outgoing_args_size = args_size.constant;
1947 args_size.constant = 0;
1950 #ifndef PUSH_ROUNDING
1951 argblock = push_block (GEN_INT (args_size.constant), 0, 0);
1954 #ifdef PUSH_ARGS_REVERSED
1962 /* Push the args that need to be pushed. */
1964 for (count = 0; count < nargs; count++, argnum += inc)
1966 register enum machine_mode mode = argvec[argnum].mode;
1967 register rtx val = argvec[argnum].value;
1968 rtx reg = argvec[argnum].reg;
1969 int partial = argvec[argnum].partial;
1971 if (! (reg != 0 && partial == 0))
1972 emit_push_insn (val, mode, NULL_TREE, NULL_RTX, 0, partial, reg, 0,
1973 argblock, GEN_INT (argvec[count].offset.constant));
1977 #ifdef PUSH_ARGS_REVERSED
1983 /* Now load any reg parms into their regs. */
1985 for (count = 0; count < nargs; count++, argnum += inc)
1987 register enum machine_mode mode = argvec[argnum].mode;
1988 register rtx val = argvec[argnum].value;
1989 rtx reg = argvec[argnum].reg;
1990 int partial = argvec[argnum].partial;
1992 if (reg != 0 && partial == 0)
1993 emit_move_insn (reg, val);
1997 /* For version 1.37, try deleting this entirely. */
2001 /* Any regs containing parms remain in use through the call. */
2003 for (count = 0; count < nargs; count++)
2004 if (argvec[count].reg != 0)
2005 emit_insn (gen_rtx (USE, VOIDmode, argvec[count].reg));
2007 use_insns = get_insns ();
2010 fun = prepare_call_address (fun, NULL_TREE, &use_insns);
2012 /* Don't allow popping to be deferred, since then
2013 cse'ing of library calls could delete a call and leave the pop. */
2016 /* We pass the old value of inhibit_defer_pop + 1 to emit_call_1, which
2017 will set inhibit_defer_pop to that value. */
2019 emit_call_1 (fun, get_identifier (XSTR (orgfun, 0)), args_size.constant, 0,
2020 FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1),
2021 outmode != VOIDmode ? hard_libcall_value (outmode) : NULL_RTX,
2022 old_inhibit_defer_pop + 1, use_insns, no_queue);
2024 /* Now restore inhibit_defer_pop to its actual original value. */
2028 /* Expand an assignment that stores the value of FROM into TO.
2029 If WANT_VALUE is nonzero, return an rtx for the value of TO.
2030 (This may contain a QUEUED rtx.)
2031 Otherwise, the returned value is not meaningful.
2033 SUGGEST_REG is no longer actually used.
2034 It used to mean, copy the value through a register
2035 and return that register, if that is possible.
2036 But now we do this if WANT_VALUE.
2038 If the value stored is a constant, we return the constant. */
2041 expand_assignment (to, from, want_value, suggest_reg)
2046 register rtx to_rtx = 0;
2049 /* Don't crash if the lhs of the assignment was erroneous. */
2051 if (TREE_CODE (to) == ERROR_MARK)
2052 return expand_expr (from, NULL_RTX, VOIDmode, 0);
2054 /* Assignment of a structure component needs special treatment
2055 if the structure component's rtx is not simply a MEM.
2056 Assignment of an array element at a constant index
2057 has the same problem. */
2059 if (TREE_CODE (to) == COMPONENT_REF
2060 || TREE_CODE (to) == BIT_FIELD_REF
2061 || (TREE_CODE (to) == ARRAY_REF
2062 && TREE_CODE (TREE_OPERAND (to, 1)) == INTEGER_CST
2063 && TREE_CODE (TYPE_SIZE (TREE_TYPE (to))) == INTEGER_CST))
2065 enum machine_mode mode1;
2071 tree tem = get_inner_reference (to, &bitsize, &bitpos, &offset,
2072 &mode1, &unsignedp, &volatilep);
2074 /* If we are going to use store_bit_field and extract_bit_field,
2075 make sure to_rtx will be safe for multiple use. */
2077 if (mode1 == VOIDmode && want_value)
2078 tem = stabilize_reference (tem);
2080 to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, 0);
2083 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
2085 if (GET_CODE (to_rtx) != MEM)
2087 to_rtx = change_address (to_rtx, VOIDmode,
2088 gen_rtx (PLUS, Pmode, XEXP (to_rtx, 0),
2089 force_reg (Pmode, offset_rtx)));
2093 if (GET_CODE (to_rtx) == MEM)
2094 MEM_VOLATILE_P (to_rtx) = 1;
2095 #if 0 /* This was turned off because, when a field is volatile
2096 in an object which is not volatile, the object may be in a register,
2097 and then we would abort over here. */
2103 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
2105 /* Spurious cast makes HPUX compiler happy. */
2106 ? (enum machine_mode) TYPE_MODE (TREE_TYPE (to))
2109 /* Required alignment of containing datum. */
2110 TYPE_ALIGN (TREE_TYPE (tem)) / BITS_PER_UNIT,
2111 int_size_in_bytes (TREE_TYPE (tem)));
2112 preserve_temp_slots (result);
2118 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
2119 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
2122 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, 0);
2124 /* In case we are returning the contents of an object which overlaps
2125 the place the value is being stored, use a safe function when copying
2126 a value through a pointer into a structure value return block. */
2127 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
2128 && current_function_returns_struct
2129 && !current_function_returns_pcc_struct)
2131 rtx from_rtx = expand_expr (from, NULL_RTX, VOIDmode, 0);
2132 rtx size = expr_size (from);
2134 #ifdef TARGET_MEM_FUNCTIONS
2135 emit_library_call (memcpy_libfunc, 1,
2136 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
2137 XEXP (from_rtx, 0), Pmode,
2140 emit_library_call (bcopy_libfunc, 1,
2141 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
2142 XEXP (to_rtx, 0), Pmode,
2146 preserve_temp_slots (to_rtx);
2151 /* Compute FROM and store the value in the rtx we got. */
2153 result = store_expr (from, to_rtx, want_value);
2154 preserve_temp_slots (result);
2159 /* Generate code for computing expression EXP,
2160 and storing the value into TARGET.
2161 Returns TARGET or an equivalent value.
2162 TARGET may contain a QUEUED rtx.
2164 If SUGGEST_REG is nonzero, copy the value through a register
2165 and return that register, if that is possible.
2167 If the value stored is a constant, we return the constant. */
2170 store_expr (exp, target, suggest_reg)
2172 register rtx target;
2176 int dont_return_target = 0;
2178 if (TREE_CODE (exp) == COMPOUND_EXPR)
2180 /* Perform first part of compound expression, then assign from second
2182 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
2184 return store_expr (TREE_OPERAND (exp, 1), target, suggest_reg);
2186 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
2188 /* For conditional expression, get safe form of the target. Then
2189 test the condition, doing the appropriate assignment on either
2190 side. This avoids the creation of unnecessary temporaries.
2191 For non-BLKmode, it is more efficient not to do this. */
2193 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
2196 target = protect_from_queue (target, 1);
2199 jumpifnot (TREE_OPERAND (exp, 0), lab1);
2200 store_expr (TREE_OPERAND (exp, 1), target, suggest_reg);
2202 emit_jump_insn (gen_jump (lab2));
2205 store_expr (TREE_OPERAND (exp, 2), target, suggest_reg);
2211 else if (suggest_reg && GET_CODE (target) == MEM
2212 && GET_MODE (target) != BLKmode)
2213 /* If target is in memory and caller wants value in a register instead,
2214 arrange that. Pass TARGET as target for expand_expr so that,
2215 if EXP is another assignment, SUGGEST_REG will be nonzero for it.
2216 We know expand_expr will not use the target in that case. */
2218 temp = expand_expr (exp, cse_not_expected ? NULL_RTX : target,
2219 GET_MODE (target), 0);
2220 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
2221 temp = copy_to_reg (temp);
2222 dont_return_target = 1;
2224 else if (queued_subexp_p (target))
2225 /* If target contains a postincrement, it is not safe
2226 to use as the returned value. It would access the wrong
2227 place by the time the queued increment gets output.
2228 So copy the value through a temporary and use that temp
2231 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
2233 /* Expand EXP into a new pseudo. */
2234 temp = gen_reg_rtx (GET_MODE (target));
2235 temp = expand_expr (exp, temp, GET_MODE (target), 0);
2238 temp = expand_expr (exp, NULL_RTX, GET_MODE (target), 0);
2239 dont_return_target = 1;
2243 temp = expand_expr (exp, target, GET_MODE (target), 0);
2244 /* DO return TARGET if it's a specified hardware register.
2245 expand_return relies on this. */
2246 if (!(target && GET_CODE (target) == REG
2247 && REGNO (target) < FIRST_PSEUDO_REGISTER)
2248 && CONSTANT_P (temp))
2249 dont_return_target = 1;
2252 /* If value was not generated in the target, store it there.
2253 Convert the value to TARGET's type first if nec. */
2255 if (temp != target && TREE_CODE (exp) != ERROR_MARK)
2257 target = protect_from_queue (target, 1);
2258 if (GET_MODE (temp) != GET_MODE (target)
2259 && GET_MODE (temp) != VOIDmode)
2261 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
2262 if (dont_return_target)
2264 /* In this case, we will return TEMP,
2265 so make sure it has the proper mode.
2266 But don't forget to store the value into TARGET. */
2267 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
2268 emit_move_insn (target, temp);
2271 convert_move (target, temp, unsignedp);
2274 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
2276 /* Handle copying a string constant into an array.
2277 The string constant may be shorter than the array.
2278 So copy just the string's actual length, and clear the rest. */
2281 /* Get the size of the data type of the string,
2282 which is actually the size of the target. */
2283 size = expr_size (exp);
2284 if (GET_CODE (size) == CONST_INT
2285 && INTVAL (size) < TREE_STRING_LENGTH (exp))
2286 emit_block_move (target, temp, size,
2287 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
2290 /* Compute the size of the data to copy from the string. */
2292 = fold (build (MIN_EXPR, sizetype,
2293 size_binop (CEIL_DIV_EXPR,
2294 TYPE_SIZE (TREE_TYPE (exp)),
2295 size_int (BITS_PER_UNIT)),
2297 build_int_2 (TREE_STRING_LENGTH (exp), 0))));
2298 rtx copy_size_rtx = expand_expr (copy_size, NULL_RTX,
2302 /* Copy that much. */
2303 emit_block_move (target, temp, copy_size_rtx,
2304 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
2306 /* Figure out how much is left in TARGET
2307 that we have to clear. */
2308 if (GET_CODE (copy_size_rtx) == CONST_INT)
2310 temp = plus_constant (XEXP (target, 0),
2311 TREE_STRING_LENGTH (exp));
2312 size = plus_constant (size,
2313 - TREE_STRING_LENGTH (exp));
2317 enum machine_mode size_mode = Pmode;
2319 temp = force_reg (Pmode, XEXP (target, 0));
2320 temp = expand_binop (size_mode, add_optab, temp,
2321 copy_size_rtx, NULL_RTX, 0,
2324 size = expand_binop (size_mode, sub_optab, size,
2325 copy_size_rtx, NULL_RTX, 0,
2328 emit_cmp_insn (size, const0_rtx, LT, NULL_RTX,
2329 GET_MODE (size), 0, 0);
2330 label = gen_label_rtx ();
2331 emit_jump_insn (gen_blt (label));
2334 if (size != const0_rtx)
2336 #ifdef TARGET_MEM_FUNCTIONS
2337 emit_library_call (memset_libfunc, 1, VOIDmode, 3,
2338 temp, Pmode, const0_rtx, Pmode, size, Pmode);
2340 emit_library_call (bzero_libfunc, 1, VOIDmode, 2,
2341 temp, Pmode, size, Pmode);
2348 else if (GET_MODE (temp) == BLKmode)
2349 emit_block_move (target, temp, expr_size (exp),
2350 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
2352 emit_move_insn (target, temp);
2354 if (dont_return_target)
2359 /* Store the value of constructor EXP into the rtx TARGET.
2360 TARGET is either a REG or a MEM. */
2363 store_constructor (exp, target)
2367 tree type = TREE_TYPE (exp);
2369 /* We know our target cannot conflict, since safe_from_p has been called. */
2371 /* Don't try copying piece by piece into a hard register
2372 since that is vulnerable to being clobbered by EXP.
2373 Instead, construct in a pseudo register and then copy it all. */
2374 if (GET_CODE (target) == REG && REGNO (target) < FIRST_PSEUDO_REGISTER)
2376 rtx temp = gen_reg_rtx (GET_MODE (target));
2377 store_constructor (exp, temp);
2378 emit_move_insn (target, temp);
2383 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE)
2387 /* Inform later passes that the whole union value is dead. */
2388 if (TREE_CODE (type) == UNION_TYPE)
2389 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
2391 /* If we are building a static constructor into a register,
2392 set the initial value as zero so we can fold the value into
2394 else if (GET_CODE (target) == REG && TREE_STATIC (exp))
2395 emit_move_insn (target, const0_rtx);
2397 /* If the constructor has fewer fields than the structure,
2398 clear the whole structure first. */
2399 else if (list_length (CONSTRUCTOR_ELTS (exp))
2400 != list_length (TYPE_FIELDS (type)))
2401 clear_storage (target, int_size_in_bytes (type));
2403 /* Inform later passes that the old value is dead. */
2404 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
2406 /* Store each element of the constructor into
2407 the corresponding field of TARGET. */
2409 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
2411 register tree field = TREE_PURPOSE (elt);
2412 register enum machine_mode mode;
2417 /* Just ignore missing fields.
2418 We cleared the whole structure, above,
2419 if any fields are missing. */
2423 bitsize = TREE_INT_CST_LOW (DECL_SIZE (field));
2424 unsignedp = TREE_UNSIGNED (field);
2425 mode = DECL_MODE (field);
2426 if (DECL_BIT_FIELD (field))
2429 if (TREE_CODE (DECL_FIELD_BITPOS (field)) != INTEGER_CST)
2430 /* ??? This case remains to be written. */
2433 bitpos = TREE_INT_CST_LOW (DECL_FIELD_BITPOS (field));
2435 store_field (target, bitsize, bitpos, mode, TREE_VALUE (elt),
2436 /* The alignment of TARGET is
2437 at least what its type requires. */
2439 TYPE_ALIGN (type) / BITS_PER_UNIT,
2440 int_size_in_bytes (type));
2443 else if (TREE_CODE (type) == ARRAY_TYPE)
2447 tree domain = TYPE_DOMAIN (type);
2448 HOST_WIDE_INT minelt = TREE_INT_CST_LOW (TYPE_MIN_VALUE (domain));
2449 HOST_WIDE_INT maxelt = TREE_INT_CST_LOW (TYPE_MAX_VALUE (domain));
2450 tree elttype = TREE_TYPE (type);
2452 /* If the constructor has fewer fields than the structure,
2453 clear the whole structure first. Similarly if this this is
2454 static constructor of a non-BLKmode object. */
2456 if (list_length (CONSTRUCTOR_ELTS (exp)) < maxelt - minelt + 1
2457 || (GET_CODE (target) == REG && TREE_STATIC (exp)))
2458 clear_storage (target, maxelt - minelt + 1);
2460 /* Inform later passes that the old value is dead. */
2461 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
2463 /* Store each element of the constructor into
2464 the corresponding element of TARGET, determined
2465 by counting the elements. */
2466 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
2468 elt = TREE_CHAIN (elt), i++)
2470 register enum machine_mode mode;
2475 mode = TYPE_MODE (elttype);
2476 bitsize = GET_MODE_BITSIZE (mode);
2477 unsignedp = TREE_UNSIGNED (elttype);
2479 bitpos = (i * TREE_INT_CST_LOW (TYPE_SIZE (elttype)));
2481 store_field (target, bitsize, bitpos, mode, TREE_VALUE (elt),
2482 /* The alignment of TARGET is
2483 at least what its type requires. */
2485 TYPE_ALIGN (type) / BITS_PER_UNIT,
2486 int_size_in_bytes (type));
2494 /* Store the value of EXP (an expression tree)
2495 into a subfield of TARGET which has mode MODE and occupies
2496 BITSIZE bits, starting BITPOS bits from the start of TARGET.
2497 If MODE is VOIDmode, it means that we are storing into a bit-field.
2499 If VALUE_MODE is VOIDmode, return nothing in particular.
2500 UNSIGNEDP is not used in this case.
2502 Otherwise, return an rtx for the value stored. This rtx
2503 has mode VALUE_MODE if that is convenient to do.
2504 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
2506 ALIGN is the alignment that TARGET is known to have, measured in bytes.
2507 TOTAL_SIZE is the size in bytes of the structure, or -1 if varying. */
2510 store_field (target, bitsize, bitpos, mode, exp, value_mode,
2511 unsignedp, align, total_size)
2513 int bitsize, bitpos;
2514 enum machine_mode mode;
2516 enum machine_mode value_mode;
2521 HOST_WIDE_INT width_mask = 0;
2523 if (bitsize < HOST_BITS_PER_WIDE_INT)
2524 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
2526 /* If we are storing into an unaligned field of an aligned union that is
2527 in a register, we may have the mode of TARGET being an integer mode but
2528 MODE == BLKmode. In that case, get an aligned object whose size and
2529 alignment are the same as TARGET and store TARGET into it (we can avoid
2530 the store if the field being stored is the entire width of TARGET). Then
2531 call ourselves recursively to store the field into a BLKmode version of
2532 that object. Finally, load from the object into TARGET. This is not
2533 very efficient in general, but should only be slightly more expensive
2534 than the otherwise-required unaligned accesses. Perhaps this can be
2535 cleaned up later. */
2538 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
2540 rtx object = assign_stack_temp (GET_MODE (target),
2541 GET_MODE_SIZE (GET_MODE (target)), 0);
2542 rtx blk_object = copy_rtx (object);
2544 PUT_MODE (blk_object, BLKmode);
2546 if (bitsize != GET_MODE_BITSIZE (GET_MODE (target)))
2547 emit_move_insn (object, target);
2549 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0,
2552 emit_move_insn (target, object);
2557 /* If the structure is in a register or if the component
2558 is a bit field, we cannot use addressing to access it.
2559 Use bit-field techniques or SUBREG to store in it. */
2561 if (mode == VOIDmode
2562 || (mode != BLKmode && ! direct_store[(int) mode])
2563 || GET_CODE (target) == REG
2564 || GET_CODE (target) == SUBREG)
2566 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
2567 /* Store the value in the bitfield. */
2568 store_bit_field (target, bitsize, bitpos, mode, temp, align, total_size);
2569 if (value_mode != VOIDmode)
2571 /* The caller wants an rtx for the value. */
2572 /* If possible, avoid refetching from the bitfield itself. */
2574 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
2575 return expand_and (temp, GEN_INT (width_mask), NULL_RTX);
2576 return extract_bit_field (target, bitsize, bitpos, unsignedp,
2577 NULL_RTX, value_mode, 0, align,
2584 rtx addr = XEXP (target, 0);
2587 /* If a value is wanted, it must be the lhs;
2588 so make the address stable for multiple use. */
2590 if (value_mode != VOIDmode && GET_CODE (addr) != REG
2591 && ! CONSTANT_ADDRESS_P (addr)
2592 /* A frame-pointer reference is already stable. */
2593 && ! (GET_CODE (addr) == PLUS
2594 && GET_CODE (XEXP (addr, 1)) == CONST_INT
2595 && (XEXP (addr, 0) == virtual_incoming_args_rtx
2596 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
2597 addr = copy_to_reg (addr);
2599 /* Now build a reference to just the desired component. */
2601 to_rtx = change_address (target, mode,
2602 plus_constant (addr, (bitpos / BITS_PER_UNIT)));
2603 MEM_IN_STRUCT_P (to_rtx) = 1;
2605 return store_expr (exp, to_rtx, value_mode != VOIDmode);
2609 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
2610 or an ARRAY_REF, look for nested COMPONENT_REFs, BIT_FIELD_REFs, or
2611 ARRAY_REFs at constant positions and find the ultimate containing object,
2614 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
2615 bit position, and *PUNSIGNEDP to the signedness of the field.
2616 If the position of the field is variable, we store a tree
2617 giving the variable offset (in units) in *POFFSET.
2618 This offset is in addition to the bit position.
2619 If the position is not variable, we store 0 in *POFFSET.
2621 If any of the extraction expressions is volatile,
2622 we store 1 in *PVOLATILEP. Otherwise we don't change that.
2624 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
2625 is a mode that can be used to access the field. In that case, *PBITSIZE
2628 If the field describes a variable-sized object, *PMODE is set to
2629 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
2630 this case, but the address of the object can be found. */
2633 get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode, punsignedp, pvolatilep)
2638 enum machine_mode *pmode;
2643 enum machine_mode mode = VOIDmode;
2646 if (TREE_CODE (exp) == COMPONENT_REF)
2648 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
2649 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
2650 mode = DECL_MODE (TREE_OPERAND (exp, 1));
2651 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
2653 else if (TREE_CODE (exp) == BIT_FIELD_REF)
2655 size_tree = TREE_OPERAND (exp, 1);
2656 *punsignedp = TREE_UNSIGNED (exp);
2660 mode = TYPE_MODE (TREE_TYPE (exp));
2661 *pbitsize = GET_MODE_BITSIZE (mode);
2662 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
2667 if (TREE_CODE (size_tree) != INTEGER_CST)
2668 mode = BLKmode, *pbitsize = -1;
2670 *pbitsize = TREE_INT_CST_LOW (size_tree);
2673 /* Compute cumulative bit-offset for nested component-refs and array-refs,
2674 and find the ultimate containing object. */
2680 if (TREE_CODE (exp) == COMPONENT_REF || TREE_CODE (exp) == BIT_FIELD_REF)
2682 tree pos = (TREE_CODE (exp) == COMPONENT_REF
2683 ? DECL_FIELD_BITPOS (TREE_OPERAND (exp, 1))
2684 : TREE_OPERAND (exp, 2));
2686 if (TREE_CODE (pos) == PLUS_EXPR)
2689 if (TREE_CODE (TREE_OPERAND (pos, 0)) == INTEGER_CST)
2691 constant = TREE_OPERAND (pos, 0);
2692 var = TREE_OPERAND (pos, 1);
2694 else if (TREE_CODE (TREE_OPERAND (pos, 1)) == INTEGER_CST)
2696 constant = TREE_OPERAND (pos, 1);
2697 var = TREE_OPERAND (pos, 0);
2701 *pbitpos += TREE_INT_CST_LOW (constant);
2703 offset = size_binop (PLUS_EXPR, offset,
2704 size_binop (FLOOR_DIV_EXPR, var,
2705 size_int (BITS_PER_UNIT)));
2707 offset = size_binop (FLOOR_DIV_EXPR, var,
2708 size_int (BITS_PER_UNIT));
2710 else if (TREE_CODE (pos) == INTEGER_CST)
2711 *pbitpos += TREE_INT_CST_LOW (pos);
2714 /* Assume here that the offset is a multiple of a unit.
2715 If not, there should be an explicitly added constant. */
2717 offset = size_binop (PLUS_EXPR, offset,
2718 size_binop (FLOOR_DIV_EXPR, pos,
2719 size_int (BITS_PER_UNIT)));
2721 offset = size_binop (FLOOR_DIV_EXPR, pos,
2722 size_int (BITS_PER_UNIT));
2726 else if (TREE_CODE (exp) == ARRAY_REF
2727 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
2728 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST)
2730 *pbitpos += (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))
2731 * TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (exp))));
2733 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
2734 && ! ((TREE_CODE (exp) == NOP_EXPR
2735 || TREE_CODE (exp) == CONVERT_EXPR)
2736 && (TYPE_MODE (TREE_TYPE (exp))
2737 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
2740 /* If any reference in the chain is volatile, the effect is volatile. */
2741 if (TREE_THIS_VOLATILE (exp))
2743 exp = TREE_OPERAND (exp, 0);
2746 /* If this was a bit-field, see if there is a mode that allows direct
2747 access in case EXP is in memory. */
2748 if (mode == VOIDmode && *pbitpos % *pbitsize == 0)
2750 mode = mode_for_size (*pbitsize, MODE_INT, 0);
2751 if (mode == BLKmode)
2758 /* We aren't finished fixing the callers to really handle nonzero offset. */
2766 /* Given an rtx VALUE that may contain additions and multiplications,
2767 return an equivalent value that just refers to a register or memory.
2768 This is done by generating instructions to perform the arithmetic
2769 and returning a pseudo-register containing the value. */
2772 force_operand (value, target)
2775 register optab binoptab = 0;
2776 /* Use a temporary to force order of execution of calls to
2780 /* Use subtarget as the target for operand 0 of a binary operation. */
2781 register rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
2783 if (GET_CODE (value) == PLUS)
2784 binoptab = add_optab;
2785 else if (GET_CODE (value) == MINUS)
2786 binoptab = sub_optab;
2787 else if (GET_CODE (value) == MULT)
2789 op2 = XEXP (value, 1);
2790 if (!CONSTANT_P (op2)
2791 && !(GET_CODE (op2) == REG && op2 != subtarget))
2793 tmp = force_operand (XEXP (value, 0), subtarget);
2794 return expand_mult (GET_MODE (value), tmp,
2795 force_operand (op2, NULL_RTX),
2801 op2 = XEXP (value, 1);
2802 if (!CONSTANT_P (op2)
2803 && !(GET_CODE (op2) == REG && op2 != subtarget))
2805 if (binoptab == sub_optab && GET_CODE (op2) == CONST_INT)
2807 binoptab = add_optab;
2808 op2 = negate_rtx (GET_MODE (value), op2);
2811 /* Check for an addition with OP2 a constant integer and our first
2812 operand a PLUS of a virtual register and something else. In that
2813 case, we want to emit the sum of the virtual register and the
2814 constant first and then add the other value. This allows virtual
2815 register instantiation to simply modify the constant rather than
2816 creating another one around this addition. */
2817 if (binoptab == add_optab && GET_CODE (op2) == CONST_INT
2818 && GET_CODE (XEXP (value, 0)) == PLUS
2819 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
2820 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
2821 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
2823 rtx temp = expand_binop (GET_MODE (value), binoptab,
2824 XEXP (XEXP (value, 0), 0), op2,
2825 subtarget, 0, OPTAB_LIB_WIDEN);
2826 return expand_binop (GET_MODE (value), binoptab, temp,
2827 force_operand (XEXP (XEXP (value, 0), 1), 0),
2828 target, 0, OPTAB_LIB_WIDEN);
2831 tmp = force_operand (XEXP (value, 0), subtarget);
2832 return expand_binop (GET_MODE (value), binoptab, tmp,
2833 force_operand (op2, NULL_RTX),
2834 target, 0, OPTAB_LIB_WIDEN);
2835 /* We give UNSIGNEP = 0 to expand_binop
2836 because the only operations we are expanding here are signed ones. */
2841 /* Subroutine of expand_expr:
2842 save the non-copied parts (LIST) of an expr (LHS), and return a list
2843 which can restore these values to their previous values,
2844 should something modify their storage. */
2847 save_noncopied_parts (lhs, list)
2854 for (tail = list; tail; tail = TREE_CHAIN (tail))
2855 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
2856 parts = chainon (parts, save_noncopied_parts (lhs, TREE_VALUE (tail)));
2859 tree part = TREE_VALUE (tail);
2860 tree part_type = TREE_TYPE (part);
2861 tree to_be_saved = build (COMPONENT_REF, part_type, lhs, part);
2862 rtx target = assign_stack_temp (TYPE_MODE (part_type),
2863 int_size_in_bytes (part_type), 0);
2864 if (! memory_address_p (TYPE_MODE (part_type), XEXP (target, 0)))
2865 target = change_address (target, TYPE_MODE (part_type), NULL_RTX);
2866 parts = tree_cons (to_be_saved,
2867 build (RTL_EXPR, part_type, NULL_TREE,
2870 store_expr (TREE_PURPOSE (parts), RTL_EXPR_RTL (TREE_VALUE (parts)), 0);
2875 /* Subroutine of expand_expr:
2876 record the non-copied parts (LIST) of an expr (LHS), and return a list
2877 which specifies the initial values of these parts. */
2880 init_noncopied_parts (lhs, list)
2887 for (tail = list; tail; tail = TREE_CHAIN (tail))
2888 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
2889 parts = chainon (parts, init_noncopied_parts (lhs, TREE_VALUE (tail)));
2892 tree part = TREE_VALUE (tail);
2893 tree part_type = TREE_TYPE (part);
2894 tree to_be_initialized = build (COMPONENT_REF, part_type, lhs, part);
2895 parts = tree_cons (TREE_PURPOSE (tail), to_be_initialized, parts);
2900 /* Subroutine of expand_expr: return nonzero iff there is no way that
2901 EXP can reference X, which is being modified. */
2904 safe_from_p (x, exp)
2914 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
2915 find the underlying pseudo. */
2916 if (GET_CODE (x) == SUBREG)
2919 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
2923 /* If X is a location in the outgoing argument area, it is always safe. */
2924 if (GET_CODE (x) == MEM
2925 && (XEXP (x, 0) == virtual_outgoing_args_rtx
2926 || (GET_CODE (XEXP (x, 0)) == PLUS
2927 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx)))
2930 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
2933 exp_rtl = DECL_RTL (exp);
2940 if (TREE_CODE (exp) == TREE_LIST)
2941 return ((TREE_VALUE (exp) == 0
2942 || safe_from_p (x, TREE_VALUE (exp)))
2943 && (TREE_CHAIN (exp) == 0
2944 || safe_from_p (x, TREE_CHAIN (exp))));
2949 return safe_from_p (x, TREE_OPERAND (exp, 0));
2953 return (safe_from_p (x, TREE_OPERAND (exp, 0))
2954 && safe_from_p (x, TREE_OPERAND (exp, 1)));
2958 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
2959 the expression. If it is set, we conflict iff we are that rtx or
2960 both are in memory. Otherwise, we check all operands of the
2961 expression recursively. */
2963 switch (TREE_CODE (exp))
2966 return staticp (TREE_OPERAND (exp, 0));
2969 if (GET_CODE (x) == MEM)
2974 exp_rtl = CALL_EXPR_RTL (exp);
2977 /* Assume that the call will clobber all hard registers and
2979 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
2980 || GET_CODE (x) == MEM)
2987 exp_rtl = RTL_EXPR_RTL (exp);
2989 /* We don't know what this can modify. */
2994 case WITH_CLEANUP_EXPR:
2995 exp_rtl = RTL_EXPR_RTL (exp);
2999 exp_rtl = SAVE_EXPR_RTL (exp);
3003 /* The only operand we look at is operand 1. The rest aren't
3004 part of the expression. */
3005 return safe_from_p (x, TREE_OPERAND (exp, 1));
3007 case METHOD_CALL_EXPR:
3008 /* This takes a rtx argument, but shouldn't appear here. */
3012 /* If we have an rtx, we do not need to scan our operands. */
3016 nops = tree_code_length[(int) TREE_CODE (exp)];
3017 for (i = 0; i < nops; i++)
3018 if (TREE_OPERAND (exp, i) != 0
3019 && ! safe_from_p (x, TREE_OPERAND (exp, i)))
3023 /* If we have an rtl, find any enclosed object. Then see if we conflict
3027 if (GET_CODE (exp_rtl) == SUBREG)
3029 exp_rtl = SUBREG_REG (exp_rtl);
3030 if (GET_CODE (exp_rtl) == REG
3031 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
3035 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
3036 are memory and EXP is not readonly. */
3037 return ! (rtx_equal_p (x, exp_rtl)
3038 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
3039 && ! TREE_READONLY (exp)));
3042 /* If we reach here, it is safe. */
3046 /* Subroutine of expand_expr: return nonzero iff EXP is an
3047 expression whose type is statically determinable. */
3053 if (TREE_CODE (exp) == PARM_DECL
3054 || TREE_CODE (exp) == VAR_DECL
3055 || TREE_CODE (exp) == CALL_EXPR || TREE_CODE (exp) == TARGET_EXPR
3056 || TREE_CODE (exp) == COMPONENT_REF
3057 || TREE_CODE (exp) == ARRAY_REF)
3062 /* expand_expr: generate code for computing expression EXP.
3063 An rtx for the computed value is returned. The value is never null.
3064 In the case of a void EXP, const0_rtx is returned.
3066 The value may be stored in TARGET if TARGET is nonzero.
3067 TARGET is just a suggestion; callers must assume that
3068 the rtx returned may not be the same as TARGET.
3070 If TARGET is CONST0_RTX, it means that the value will be ignored.
3072 If TMODE is not VOIDmode, it suggests generating the
3073 result in mode TMODE. But this is done only when convenient.
3074 Otherwise, TMODE is ignored and the value generated in its natural mode.
3075 TMODE is just a suggestion; callers must assume that
3076 the rtx returned may not have mode TMODE.
3078 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
3079 with a constant address even if that address is not normally legitimate.
3080 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
3082 If MODIFIER is EXPAND_SUM then when EXP is an addition
3083 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
3084 or a nest of (PLUS ...) and (MINUS ...) where the terms are
3085 products as above, or REG or MEM, or constant.
3086 Ordinarily in such cases we would output mul or add instructions
3087 and then return a pseudo reg containing the sum.
3089 EXPAND_INITIALIZER is much like EXPAND_SUM except that
3090 it also marks a label as absolutely required (it can't be dead).
3091 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
3092 This is used for outputting expressions used in initializers. */
3095 expand_expr (exp, target, tmode, modifier)
3098 enum machine_mode tmode;
3099 enum expand_modifier modifier;
3101 register rtx op0, op1, temp;
3102 tree type = TREE_TYPE (exp);
3103 int unsignedp = TREE_UNSIGNED (type);
3104 register enum machine_mode mode = TYPE_MODE (type);
3105 register enum tree_code code = TREE_CODE (exp);
3107 /* Use subtarget as the target for operand 0 of a binary operation. */
3108 rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
3109 rtx original_target = target;
3110 int ignore = target == const0_rtx;
3113 /* Don't use hard regs as subtargets, because the combiner
3114 can only handle pseudo regs. */
3115 if (subtarget && REGNO (subtarget) < FIRST_PSEUDO_REGISTER)
3117 /* Avoid subtargets inside loops,
3118 since they hide some invariant expressions. */
3119 if (preserve_subexpressions_p ())
3122 if (ignore) target = 0, original_target = 0;
3124 /* If will do cse, generate all results into pseudo registers
3125 since 1) that allows cse to find more things
3126 and 2) otherwise cse could produce an insn the machine
3129 if (! cse_not_expected && mode != BLKmode && target
3130 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER))
3133 /* Ensure we reference a volatile object even if value is ignored. */
3134 if (ignore && TREE_THIS_VOLATILE (exp)
3135 && mode != VOIDmode && mode != BLKmode)
3137 target = gen_reg_rtx (mode);
3138 temp = expand_expr (exp, target, VOIDmode, modifier);
3140 emit_move_insn (target, temp);
3148 tree function = decl_function_context (exp);
3149 /* Handle using a label in a containing function. */
3150 if (function != current_function_decl && function != 0)
3152 struct function *p = find_function_data (function);
3153 /* Allocate in the memory associated with the function
3154 that the label is in. */
3155 push_obstacks (p->function_obstack,
3156 p->function_maybepermanent_obstack);
3158 p->forced_labels = gen_rtx (EXPR_LIST, VOIDmode,
3159 label_rtx (exp), p->forced_labels);
3162 else if (modifier == EXPAND_INITIALIZER)
3163 forced_labels = gen_rtx (EXPR_LIST, VOIDmode,
3164 label_rtx (exp), forced_labels);
3165 temp = gen_rtx (MEM, FUNCTION_MODE,
3166 gen_rtx (LABEL_REF, Pmode, label_rtx (exp)));
3167 if (function != current_function_decl && function != 0)
3168 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
3173 if (DECL_RTL (exp) == 0)
3175 error_with_decl (exp, "prior parameter's size depends on `%s'");
3176 return CONST0_RTX (mode);
3182 if (DECL_RTL (exp) == 0)
3184 /* Ensure variable marked as used
3185 even if it doesn't go through a parser. */
3186 TREE_USED (exp) = 1;
3187 /* Handle variables inherited from containing functions. */
3188 context = decl_function_context (exp);
3190 /* We treat inline_function_decl as an alias for the current function
3191 because that is the inline function whose vars, types, etc.
3192 are being merged into the current function.
3193 See expand_inline_function. */
3194 if (context != 0 && context != current_function_decl
3195 && context != inline_function_decl
3196 /* If var is static, we don't need a static chain to access it. */
3197 && ! (GET_CODE (DECL_RTL (exp)) == MEM
3198 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
3202 /* Mark as non-local and addressable. */
3203 DECL_NONLOCAL (exp) = 1;
3204 mark_addressable (exp);
3205 if (GET_CODE (DECL_RTL (exp)) != MEM)
3207 addr = XEXP (DECL_RTL (exp), 0);
3208 if (GET_CODE (addr) == MEM)
3209 addr = gen_rtx (MEM, Pmode, fix_lexical_addr (XEXP (addr, 0), exp));
3211 addr = fix_lexical_addr (addr, exp);
3212 return change_address (DECL_RTL (exp), mode, addr);
3215 /* This is the case of an array whose size is to be determined
3216 from its initializer, while the initializer is still being parsed.
3218 if (GET_CODE (DECL_RTL (exp)) == MEM
3219 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
3220 return change_address (DECL_RTL (exp), GET_MODE (DECL_RTL (exp)),
3221 XEXP (DECL_RTL (exp), 0));
3222 if (GET_CODE (DECL_RTL (exp)) == MEM
3223 && modifier != EXPAND_CONST_ADDRESS
3224 && modifier != EXPAND_SUM
3225 && modifier != EXPAND_INITIALIZER)
3227 /* DECL_RTL probably contains a constant address.
3228 On RISC machines where a constant address isn't valid,
3229 make some insns to get that address into a register. */
3230 if (!memory_address_p (DECL_MODE (exp), XEXP (DECL_RTL (exp), 0))
3232 && CONSTANT_ADDRESS_P (XEXP (DECL_RTL (exp), 0))))
3233 return change_address (DECL_RTL (exp), VOIDmode,
3234 copy_rtx (XEXP (DECL_RTL (exp), 0)));
3236 return DECL_RTL (exp);
3239 return immed_double_const (TREE_INT_CST_LOW (exp),
3240 TREE_INT_CST_HIGH (exp),
3244 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, 0);
3247 /* If optimized, generate immediate CONST_DOUBLE
3248 which will be turned into memory by reload if necessary.
3250 We used to force a register so that loop.c could see it. But
3251 this does not allow gen_* patterns to perform optimizations with
3252 the constants. It also produces two insns in cases like "x = 1.0;".
3253 On most machines, floating-point constants are not permitted in
3254 many insns, so we'd end up copying it to a register in any case.
3256 Now, we do the copying in expand_binop, if appropriate. */
3257 return immed_real_const (exp);
3261 if (! TREE_CST_RTL (exp))
3262 output_constant_def (exp);
3264 /* TREE_CST_RTL probably contains a constant address.
3265 On RISC machines where a constant address isn't valid,
3266 make some insns to get that address into a register. */
3267 if (GET_CODE (TREE_CST_RTL (exp)) == MEM
3268 && modifier != EXPAND_CONST_ADDRESS
3269 && modifier != EXPAND_INITIALIZER
3270 && modifier != EXPAND_SUM
3271 && !memory_address_p (mode, XEXP (TREE_CST_RTL (exp), 0)))
3272 return change_address (TREE_CST_RTL (exp), VOIDmode,
3273 copy_rtx (XEXP (TREE_CST_RTL (exp), 0)));
3274 return TREE_CST_RTL (exp);
3277 context = decl_function_context (exp);
3278 /* We treat inline_function_decl as an alias for the current function
3279 because that is the inline function whose vars, types, etc.
3280 are being merged into the current function.
3281 See expand_inline_function. */
3282 if (context == current_function_decl || context == inline_function_decl)
3285 /* If this is non-local, handle it. */
3288 temp = SAVE_EXPR_RTL (exp);
3289 if (temp && GET_CODE (temp) == REG)
3291 put_var_into_stack (exp);
3292 temp = SAVE_EXPR_RTL (exp);
3294 if (temp == 0 || GET_CODE (temp) != MEM)
3296 return change_address (temp, mode,
3297 fix_lexical_addr (XEXP (temp, 0), exp));
3299 if (SAVE_EXPR_RTL (exp) == 0)
3301 if (mode == BLKmode)
3303 = assign_stack_temp (mode,
3304 int_size_in_bytes (TREE_TYPE (exp)), 0);
3306 temp = gen_reg_rtx (mode);
3307 SAVE_EXPR_RTL (exp) = temp;
3308 store_expr (TREE_OPERAND (exp, 0), temp, 0);
3309 if (!optimize && GET_CODE (temp) == REG)
3310 save_expr_regs = gen_rtx (EXPR_LIST, VOIDmode, temp,
3313 return SAVE_EXPR_RTL (exp);
3316 /* Exit the current loop if the body-expression is true. */
3318 rtx label = gen_label_rtx ();
3319 do_jump (TREE_OPERAND (exp, 0), label, NULL_RTX);
3320 expand_exit_loop (NULL_PTR);
3326 expand_start_loop (1);
3327 expand_expr_stmt (TREE_OPERAND (exp, 0));
3334 tree vars = TREE_OPERAND (exp, 0);
3335 int vars_need_expansion = 0;
3337 /* Need to open a binding contour here because
3338 if there are any cleanups they most be contained here. */
3339 expand_start_bindings (0);
3341 /* Mark the corresponding BLOCK for output. */
3342 if (TREE_OPERAND (exp, 2) != 0)
3343 TREE_USED (TREE_OPERAND (exp, 2)) = 1;
3345 /* If VARS have not yet been expanded, expand them now. */
3348 if (DECL_RTL (vars) == 0)
3350 vars_need_expansion = 1;
3353 expand_decl_init (vars);
3354 vars = TREE_CHAIN (vars);
3357 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, modifier);
3359 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
3365 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
3367 emit_insns (RTL_EXPR_SEQUENCE (exp));
3368 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
3369 return RTL_EXPR_RTL (exp);
3372 /* All elts simple constants => refer to a constant in memory. But
3373 if this is a non-BLKmode mode, let it store a field at a time
3374 since that should make a CONST_INT or CONST_DOUBLE when we
3376 if (TREE_STATIC (exp) && (mode == BLKmode || TREE_ADDRESSABLE (exp)))
3378 rtx constructor = output_constant_def (exp);
3379 if (modifier != EXPAND_CONST_ADDRESS
3380 && modifier != EXPAND_INITIALIZER
3381 && modifier != EXPAND_SUM
3382 && !memory_address_p (GET_MODE (constructor),
3383 XEXP (constructor, 0)))
3384 constructor = change_address (constructor, VOIDmode,
3385 XEXP (constructor, 0));
3392 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
3393 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode, 0);
3398 if (target == 0 || ! safe_from_p (target, exp))
3400 if (mode != BLKmode && ! TREE_ADDRESSABLE (exp))
3401 target = gen_reg_rtx (mode);
3404 rtx safe_target = assign_stack_temp (mode, int_size_in_bytes (type), 0);
3406 MEM_IN_STRUCT_P (safe_target) = MEM_IN_STRUCT_P (target);
3407 target = safe_target;
3410 store_constructor (exp, target);
3416 tree exp1 = TREE_OPERAND (exp, 0);
3419 /* A SAVE_EXPR as the address in an INDIRECT_EXPR is generated
3420 for *PTR += ANYTHING where PTR is put inside the SAVE_EXPR.
3421 This code has the same general effect as simply doing
3422 expand_expr on the save expr, except that the expression PTR
3423 is computed for use as a memory address. This means different
3424 code, suitable for indexing, may be generated. */
3425 if (TREE_CODE (exp1) == SAVE_EXPR
3426 && SAVE_EXPR_RTL (exp1) == 0
3427 && TREE_CODE (exp2 = TREE_OPERAND (exp1, 0)) != ERROR_MARK
3428 && TYPE_MODE (TREE_TYPE (exp1)) == Pmode
3429 && TYPE_MODE (TREE_TYPE (exp2)) == Pmode)
3431 temp = expand_expr (TREE_OPERAND (exp1, 0), NULL_RTX,
3432 VOIDmode, EXPAND_SUM);
3433 op0 = memory_address (mode, temp);
3434 op0 = copy_all_regs (op0);
3435 SAVE_EXPR_RTL (exp1) = op0;
3439 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
3440 op0 = memory_address (mode, op0);
3443 temp = gen_rtx (MEM, mode, op0);
3444 /* If address was computed by addition,
3445 mark this as an element of an aggregate. */
3446 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
3447 || (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR
3448 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) == PLUS_EXPR)
3449 || TREE_CODE (TREE_TYPE (exp)) == ARRAY_TYPE
3450 || TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE
3451 || TREE_CODE (TREE_TYPE (exp)) == UNION_TYPE
3452 || (TREE_CODE (exp1) == ADDR_EXPR
3453 && (exp2 = TREE_OPERAND (exp1, 0))
3454 && (TREE_CODE (TREE_TYPE (exp2)) == ARRAY_TYPE
3455 || TREE_CODE (TREE_TYPE (exp2)) == RECORD_TYPE
3456 || TREE_CODE (TREE_TYPE (exp2)) == UNION_TYPE)))
3457 MEM_IN_STRUCT_P (temp) = 1;
3458 MEM_VOLATILE_P (temp) = TREE_THIS_VOLATILE (exp) || flag_volatile;
3459 #if 0 /* It is incorrectto set RTX_UNCHANGING_P here, because the fact that
3460 a location is accessed through a pointer to const does not mean
3461 that the value there can never change. */
3462 RTX_UNCHANGING_P (temp) = TREE_READONLY (exp);
3468 if (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST
3469 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
3471 /* Nonconstant array index or nonconstant element size.
3472 Generate the tree for *(&array+index) and expand that,
3473 except do it in a language-independent way
3474 and don't complain about non-lvalue arrays.
3475 `mark_addressable' should already have been called
3476 for any array for which this case will be reached. */
3478 /* Don't forget the const or volatile flag from the array element. */
3479 tree variant_type = build_type_variant (type,
3480 TREE_READONLY (exp),
3481 TREE_THIS_VOLATILE (exp));
3482 tree array_adr = build1 (ADDR_EXPR, build_pointer_type (variant_type),
3483 TREE_OPERAND (exp, 0));
3484 tree index = TREE_OPERAND (exp, 1);
3487 /* Convert the integer argument to a type the same size as a pointer
3488 so the multiply won't overflow spuriously. */
3489 if (TYPE_PRECISION (TREE_TYPE (index)) != POINTER_SIZE)
3490 index = convert (type_for_size (POINTER_SIZE, 0), index);
3492 /* Don't think the address has side effects
3493 just because the array does.
3494 (In some cases the address might have side effects,
3495 and we fail to record that fact here. However, it should not
3496 matter, since expand_expr should not care.) */
3497 TREE_SIDE_EFFECTS (array_adr) = 0;
3499 elt = build1 (INDIRECT_REF, type,
3500 fold (build (PLUS_EXPR, TYPE_POINTER_TO (variant_type),
3502 fold (build (MULT_EXPR,
3503 TYPE_POINTER_TO (variant_type),
3504 index, size_in_bytes (type))))));
3506 /* Volatility, etc., of new expression is same as old expression. */
3507 TREE_SIDE_EFFECTS (elt) = TREE_SIDE_EFFECTS (exp);
3508 TREE_THIS_VOLATILE (elt) = TREE_THIS_VOLATILE (exp);
3509 TREE_READONLY (elt) = TREE_READONLY (exp);
3511 return expand_expr (elt, target, tmode, modifier);
3514 /* Fold an expression like: "foo"[2].
3515 This is not done in fold so it won't happen inside &. */
3518 tree arg0 = TREE_OPERAND (exp, 0);
3519 tree arg1 = TREE_OPERAND (exp, 1);
3521 if (TREE_CODE (arg0) == STRING_CST
3522 && TREE_CODE (arg1) == INTEGER_CST
3523 && !TREE_INT_CST_HIGH (arg1)
3524 && (i = TREE_INT_CST_LOW (arg1)) < TREE_STRING_LENGTH (arg0))
3526 if (TREE_TYPE (TREE_TYPE (arg0)) == integer_type_node)
3528 exp = build_int_2 (((int *)TREE_STRING_POINTER (arg0))[i], 0);
3529 TREE_TYPE (exp) = integer_type_node;
3530 return expand_expr (exp, target, tmode, modifier);
3532 if (TREE_TYPE (TREE_TYPE (arg0)) == char_type_node)
3534 exp = build_int_2 (TREE_STRING_POINTER (arg0)[i], 0);
3535 TREE_TYPE (exp) = integer_type_node;
3536 return expand_expr (convert (TREE_TYPE (TREE_TYPE (arg0)), exp), target, tmode, modifier);
3541 /* If this is a constant index into a constant array,
3542 just get the value from the array. Handle both the cases when
3543 we have an explicit constructor and when our operand is a variable
3544 that was declared const. */
3546 if (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
3547 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
3549 tree index = fold (TREE_OPERAND (exp, 1));
3550 if (TREE_CODE (index) == INTEGER_CST
3551 && TREE_INT_CST_HIGH (index) == 0)
3553 int i = TREE_INT_CST_LOW (index);
3554 tree elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0));
3557 elem = TREE_CHAIN (elem);
3559 return expand_expr (fold (TREE_VALUE (elem)), target,
3564 else if (TREE_READONLY (TREE_OPERAND (exp, 0))
3565 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
3566 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == ARRAY_TYPE
3567 && TREE_CODE (TREE_OPERAND (exp, 0)) == VAR_DECL
3568 && DECL_INITIAL (TREE_OPERAND (exp, 0))
3570 && (TREE_CODE (DECL_INITIAL (TREE_OPERAND (exp, 0)))
3573 tree index = fold (TREE_OPERAND (exp, 1));
3574 if (TREE_CODE (index) == INTEGER_CST
3575 && TREE_INT_CST_HIGH (index) == 0)
3577 int i = TREE_INT_CST_LOW (index);
3578 tree init = DECL_INITIAL (TREE_OPERAND (exp, 0));
3580 if (TREE_CODE (init) == CONSTRUCTOR)
3582 tree elem = CONSTRUCTOR_ELTS (init);
3585 elem = TREE_CHAIN (elem);
3587 return expand_expr (fold (TREE_VALUE (elem)), target,
3590 else if (TREE_CODE (init) == STRING_CST
3591 && i < TREE_STRING_LENGTH (init))
3593 temp = GEN_INT (TREE_STRING_POINTER (init)[i]);
3594 return convert_to_mode (mode, temp, 0);
3598 /* Treat array-ref with constant index as a component-ref. */
3602 /* If the operand is a CONSTRUCTOR, we can just extract the
3603 appropriate field if it is present. */
3604 if (code != ARRAY_REF
3605 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR)
3609 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
3610 elt = TREE_CHAIN (elt))
3611 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1))
3612 return expand_expr (TREE_VALUE (elt), target, tmode, modifier);
3616 enum machine_mode mode1;
3621 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
3622 &mode1, &unsignedp, &volatilep);
3624 /* In some cases, we will be offsetting OP0's address by a constant.
3625 So get it as a sum, if possible. If we will be using it
3626 directly in an insn, we validate it. */
3627 op0 = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_SUM);
3629 /* If this is a constant, put it into a register if it is a
3630 legimate constant and memory if it isn't. */
3631 if (CONSTANT_P (op0))
3633 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
3634 if (LEGITIMATE_CONSTANT_P (op0))
3635 op0 = force_reg (mode, op0);
3637 op0 = validize_mem (force_const_mem (mode, op0));
3642 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
3644 if (GET_CODE (op0) != MEM)
3646 op0 = change_address (op0, VOIDmode,
3647 gen_rtx (PLUS, Pmode, XEXP (op0, 0),
3648 force_reg (Pmode, offset_rtx)));
3651 /* Don't forget about volatility even if this is a bitfield. */
3652 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
3654 op0 = copy_rtx (op0);
3655 MEM_VOLATILE_P (op0) = 1;
3658 if (mode1 == VOIDmode
3659 || (mode1 != BLKmode && ! direct_load[(int) mode1])
3660 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG)
3662 /* In cases where an aligned union has an unaligned object
3663 as a field, we might be extracting a BLKmode value from
3664 an integer-mode (e.g., SImode) object. Handle this case
3665 by doing the extract into an object as wide as the field
3666 (which we know to be the width of a basic mode), then
3667 storing into memory, and changing the mode to BLKmode. */
3668 enum machine_mode ext_mode = mode;
3670 if (ext_mode == BLKmode)
3671 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
3673 if (ext_mode == BLKmode)
3676 op0 = extract_bit_field (validize_mem (op0), bitsize, bitpos,
3677 unsignedp, target, ext_mode, ext_mode,
3678 TYPE_ALIGN (TREE_TYPE (tem)) / BITS_PER_UNIT,
3679 int_size_in_bytes (TREE_TYPE (tem)));
3680 if (mode == BLKmode)
3682 rtx new = assign_stack_temp (ext_mode,
3683 bitsize / BITS_PER_UNIT, 0);
3685 emit_move_insn (new, op0);
3686 op0 = copy_rtx (new);
3687 PUT_MODE (op0, BLKmode);
3693 /* Get a reference to just this component. */
3694 if (modifier == EXPAND_CONST_ADDRESS
3695 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
3696 op0 = gen_rtx (MEM, mode1, plus_constant (XEXP (op0, 0),
3697 (bitpos / BITS_PER_UNIT)));
3699 op0 = change_address (op0, mode1,
3700 plus_constant (XEXP (op0, 0),
3701 (bitpos / BITS_PER_UNIT)));
3702 MEM_IN_STRUCT_P (op0) = 1;
3703 MEM_VOLATILE_P (op0) |= volatilep;
3704 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode)
3707 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
3708 convert_move (target, op0, unsignedp);
3714 tree base = build_unary_op (ADDR_EXPR, TREE_OPERAND (exp, 0), 0);
3715 tree addr = build (PLUS_EXPR, type, base, TREE_OPERAND (exp, 1));
3716 op0 = expand_expr (addr, NULL_RTX, VOIDmode, EXPAND_SUM);
3717 temp = gen_rtx (MEM, mode, memory_address (mode, op0));
3718 MEM_IN_STRUCT_P (temp) = 1;
3719 MEM_VOLATILE_P (temp) = TREE_THIS_VOLATILE (exp) || flag_volatile;
3720 #if 0 /* It is incorrectto set RTX_UNCHANGING_P here, because the fact that
3721 a location is accessed through a pointer to const does not mean
3722 that the value there can never change. */
3723 RTX_UNCHANGING_P (temp) = TREE_READONLY (exp);
3728 /* Intended for a reference to a buffer of a file-object in Pascal.
3729 But it's not certain that a special tree code will really be
3730 necessary for these. INDIRECT_REF might work for them. */
3734 case WITH_CLEANUP_EXPR:
3735 if (RTL_EXPR_RTL (exp) == 0)
3738 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
3740 = tree_cons (NULL_TREE, TREE_OPERAND (exp, 2), cleanups_this_call);
3741 /* That's it for this cleanup. */
3742 TREE_OPERAND (exp, 2) = 0;
3744 return RTL_EXPR_RTL (exp);
3747 /* Check for a built-in function. */
3748 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
3749 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) == FUNCTION_DECL
3750 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
3751 return expand_builtin (exp, target, subtarget, tmode, ignore);
3752 /* If this call was expanded already by preexpand_calls,
3753 just return the result we got. */
3754 if (CALL_EXPR_RTL (exp) != 0)
3755 return CALL_EXPR_RTL (exp);
3756 return expand_call (exp, target, ignore);
3758 case NON_LVALUE_EXPR:
3761 case REFERENCE_EXPR:
3762 if (TREE_CODE (type) == VOID_TYPE || ignore)
3764 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
3767 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
3768 return expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, modifier);
3769 if (TREE_CODE (type) == UNION_TYPE)
3771 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
3774 if (mode == BLKmode)
3776 if (TYPE_SIZE (type) == 0
3777 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
3779 target = assign_stack_temp (BLKmode,
3780 (TREE_INT_CST_LOW (TYPE_SIZE (type))
3781 + BITS_PER_UNIT - 1)
3782 / BITS_PER_UNIT, 0);
3785 target = gen_reg_rtx (mode);
3787 if (GET_CODE (target) == MEM)
3788 /* Store data into beginning of memory target. */
3789 store_expr (TREE_OPERAND (exp, 0),
3790 change_address (target, TYPE_MODE (valtype), 0),
3792 else if (GET_CODE (target) == REG)
3793 /* Store this field into a union of the proper type. */
3794 store_field (target, GET_MODE_BITSIZE (TYPE_MODE (valtype)), 0,
3795 TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
3797 int_size_in_bytes (TREE_TYPE (TREE_OPERAND (exp, 0))));
3801 /* Return the entire union. */
3804 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, mode, modifier);
3805 if (GET_MODE (op0) == mode || GET_MODE (op0) == VOIDmode)
3807 if (modifier == EXPAND_INITIALIZER)
3808 return gen_rtx (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
3809 if (flag_force_mem && GET_CODE (op0) == MEM)
3810 op0 = copy_to_reg (op0);
3813 return convert_to_mode (mode, op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
3815 convert_move (target, op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
3819 /* We come here from MINUS_EXPR when the second operand is a constant. */
3821 this_optab = add_optab;
3823 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
3824 something else, make sure we add the register to the constant and
3825 then to the other thing. This case can occur during strength
3826 reduction and doing it this way will produce better code if the
3827 frame pointer or argument pointer is eliminated.
3829 fold-const.c will ensure that the constant is always in the inner
3830 PLUS_EXPR, so the only case we need to do anything about is if
3831 sp, ap, or fp is our second argument, in which case we must swap
3832 the innermost first argument and our second argument. */
3834 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
3835 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
3836 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
3837 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
3838 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
3839 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
3841 tree t = TREE_OPERAND (exp, 1);
3843 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
3844 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
3847 /* If the result is to be Pmode and we are adding an integer to
3848 something, we might be forming a constant. So try to use
3849 plus_constant. If it produces a sum and we can't accept it,
3850 use force_operand. This allows P = &ARR[const] to generate
3851 efficient code on machines where a SYMBOL_REF is not a valid
3854 If this is an EXPAND_SUM call, always return the sum. */
3855 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
3856 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
3857 && (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
3860 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
3862 op1 = plus_constant (op1, TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)));
3863 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
3864 op1 = force_operand (op1, target);
3868 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
3869 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
3870 && (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
3873 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
3875 op0 = plus_constant (op0, TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)));
3876 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
3877 op0 = force_operand (op0, target);
3881 /* No sense saving up arithmetic to be done
3882 if it's all in the wrong mode to form part of an address.
3883 And force_operand won't know whether to sign-extend or
3885 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
3886 || mode != Pmode) goto binop;
3888 preexpand_calls (exp);
3889 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
3892 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, modifier);
3893 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, modifier);
3895 /* Make sure any term that's a sum with a constant comes last. */
3896 if (GET_CODE (op0) == PLUS
3897 && CONSTANT_P (XEXP (op0, 1)))
3903 /* If adding to a sum including a constant,
3904 associate it to put the constant outside. */
3905 if (GET_CODE (op1) == PLUS
3906 && CONSTANT_P (XEXP (op1, 1)))
3908 rtx constant_term = const0_rtx;
3910 temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0);
3913 /* Ensure that MULT comes first if there is one. */
3914 else if (GET_CODE (op0) == MULT)
3915 op0 = gen_rtx (PLUS, mode, op0, XEXP (op1, 0));
3917 op0 = gen_rtx (PLUS, mode, XEXP (op1, 0), op0);
3919 /* Let's also eliminate constants from op0 if possible. */
3920 op0 = eliminate_constant_term (op0, &constant_term);
3922 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
3923 their sum should be a constant. Form it into OP1, since the
3924 result we want will then be OP0 + OP1. */
3926 temp = simplify_binary_operation (PLUS, mode, constant_term,
3931 op1 = gen_rtx (PLUS, mode, constant_term, XEXP (op1, 1));
3934 /* Put a constant term last and put a multiplication first. */
3935 if (CONSTANT_P (op0) || GET_CODE (op1) == MULT)
3936 temp = op1, op1 = op0, op0 = temp;
3938 temp = simplify_binary_operation (PLUS, mode, op0, op1);
3939 return temp ? temp : gen_rtx (PLUS, mode, op0, op1);
3942 /* Handle difference of two symbolic constants,
3943 for the sake of an initializer. */
3944 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
3945 && really_constant_p (TREE_OPERAND (exp, 0))
3946 && really_constant_p (TREE_OPERAND (exp, 1)))
3948 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX,
3949 VOIDmode, modifier);
3950 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
3951 VOIDmode, modifier);
3952 return gen_rtx (MINUS, mode, op0, op1);
3954 /* Convert A - const to A + (-const). */
3955 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
3957 exp = build (PLUS_EXPR, type, TREE_OPERAND (exp, 0),
3958 fold (build1 (NEGATE_EXPR, type,
3959 TREE_OPERAND (exp, 1))));
3962 this_optab = sub_optab;
3966 preexpand_calls (exp);
3967 /* If first operand is constant, swap them.
3968 Thus the following special case checks need only
3969 check the second operand. */
3970 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
3972 register tree t1 = TREE_OPERAND (exp, 0);
3973 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
3974 TREE_OPERAND (exp, 1) = t1;
3977 /* Attempt to return something suitable for generating an
3978 indexed address, for machines that support that. */
3980 if (modifier == EXPAND_SUM && mode == Pmode
3981 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
3982 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
3984 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, EXPAND_SUM);
3986 /* Apply distributive law if OP0 is x+c. */
3987 if (GET_CODE (op0) == PLUS
3988 && GET_CODE (XEXP (op0, 1)) == CONST_INT)
3989 return gen_rtx (PLUS, mode,
3990 gen_rtx (MULT, mode, XEXP (op0, 0),
3991 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))),
3992 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))
3993 * INTVAL (XEXP (op0, 1))));
3995 if (GET_CODE (op0) != REG)
3996 op0 = force_operand (op0, NULL_RTX);
3997 if (GET_CODE (op0) != REG)
3998 op0 = copy_to_mode_reg (mode, op0);
4000 return gen_rtx (MULT, mode, op0,
4001 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))));
4004 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
4007 /* Check for multiplying things that have been extended
4008 from a narrower type. If this machine supports multiplying
4009 in that narrower type with a result in the desired type,
4010 do it that way, and avoid the explicit type-conversion. */
4011 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
4012 && TREE_CODE (type) == INTEGER_TYPE
4013 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
4014 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
4015 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
4016 && int_fits_type_p (TREE_OPERAND (exp, 1),
4017 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
4018 /* Don't use a widening multiply if a shift will do. */
4019 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
4020 > HOST_BITS_PER_WIDE_INT)
4021 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
4023 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
4024 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
4026 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
4027 /* If both operands are extended, they must either both
4028 be zero-extended or both be sign-extended. */
4029 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
4031 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
4033 enum machine_mode innermode
4034 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
4035 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
4036 ? umul_widen_optab : smul_widen_optab);
4037 if (mode == GET_MODE_WIDER_MODE (innermode)
4038 && this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
4040 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
4041 NULL_RTX, VOIDmode, 0);
4042 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
4043 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
4046 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
4047 NULL_RTX, VOIDmode, 0);
4051 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
4052 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
4053 return expand_mult (mode, op0, op1, target, unsignedp);
4055 case TRUNC_DIV_EXPR:
4056 case FLOOR_DIV_EXPR:
4058 case ROUND_DIV_EXPR:
4059 case EXACT_DIV_EXPR:
4060 preexpand_calls (exp);
4061 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
4063 /* Possible optimization: compute the dividend with EXPAND_SUM
4064 then if the divisor is constant can optimize the case
4065 where some terms of the dividend have coeffs divisible by it. */
4066 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
4067 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
4068 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
4071 this_optab = flodiv_optab;
4074 case TRUNC_MOD_EXPR:
4075 case FLOOR_MOD_EXPR:
4077 case ROUND_MOD_EXPR:
4078 preexpand_calls (exp);
4079 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
4081 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
4082 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
4083 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
4085 case FIX_ROUND_EXPR:
4086 case FIX_FLOOR_EXPR:
4088 abort (); /* Not used for C. */
4090 case FIX_TRUNC_EXPR:
4091 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
4093 target = gen_reg_rtx (mode);
4094 expand_fix (target, op0, unsignedp);
4098 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
4100 target = gen_reg_rtx (mode);
4101 /* expand_float can't figure out what to do if FROM has VOIDmode.
4102 So give it the correct mode. With -O, cse will optimize this. */
4103 if (GET_MODE (op0) == VOIDmode)
4104 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
4106 expand_float (target, op0,
4107 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
4111 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
4112 temp = expand_unop (mode, neg_optab, op0, target, 0);
4118 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
4120 /* Unsigned abs is simply the operand. Testing here means we don't
4121 risk generating incorrect code below. */
4122 if (TREE_UNSIGNED (type))
4125 /* First try to do it with a special abs instruction. */
4126 temp = expand_unop (mode, abs_optab, op0, target, 0);
4130 /* If this machine has expensive jumps, we can do integer absolute
4131 value of X as (((signed) x >> (W-1)) ^ x) - ((signed) x >> (W-1)),
4132 where W is the width of MODE. */
4134 if (GET_MODE_CLASS (mode) == MODE_INT && BRANCH_COST >= 2)
4136 rtx extended = expand_shift (RSHIFT_EXPR, mode, op0,
4137 size_int (GET_MODE_BITSIZE (mode) - 1),
4140 temp = expand_binop (mode, xor_optab, extended, op0, target, 0,
4143 temp = expand_binop (mode, sub_optab, temp, extended, target, 0,
4150 /* If that does not win, use conditional jump and negate. */
4151 target = original_target;
4152 temp = gen_label_rtx ();
4153 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 0))
4154 || (GET_CODE (target) == REG
4155 && REGNO (target) < FIRST_PSEUDO_REGISTER))
4156 target = gen_reg_rtx (mode);
4157 emit_move_insn (target, op0);
4158 emit_cmp_insn (target,
4159 expand_expr (convert (type, integer_zero_node),
4160 NULL_RTX, VOIDmode, 0),
4161 GE, NULL_RTX, mode, 0, 0);
4163 emit_jump_insn (gen_bge (temp));
4164 op0 = expand_unop (mode, neg_optab, target, target, 0);
4166 emit_move_insn (target, op0);
4173 target = original_target;
4174 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 1))
4175 || (GET_CODE (target) == REG
4176 && REGNO (target) < FIRST_PSEUDO_REGISTER))
4177 target = gen_reg_rtx (mode);
4178 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
4179 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
4181 /* First try to do it with a special MIN or MAX instruction.
4182 If that does not win, use a conditional jump to select the proper
4184 this_optab = (TREE_UNSIGNED (type)
4185 ? (code == MIN_EXPR ? umin_optab : umax_optab)
4186 : (code == MIN_EXPR ? smin_optab : smax_optab));
4188 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
4194 emit_move_insn (target, op0);
4195 op0 = gen_label_rtx ();
4196 if (code == MAX_EXPR)
4197 temp = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)))
4198 ? compare_from_rtx (target, op1, GEU, 1, mode, NULL_RTX, 0)
4199 : compare_from_rtx (target, op1, GE, 0, mode, NULL_RTX, 0));
4201 temp = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)))
4202 ? compare_from_rtx (target, op1, LEU, 1, mode, NULL_RTX, 0)
4203 : compare_from_rtx (target, op1, LE, 0, mode, NULL_RTX, 0));
4204 if (temp == const0_rtx)
4205 emit_move_insn (target, op1);
4206 else if (temp != const_true_rtx)
4208 if (bcc_gen_fctn[(int) GET_CODE (temp)] != 0)
4209 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (temp)]) (op0));
4212 emit_move_insn (target, op1);
4217 /* ??? Can optimize when the operand of this is a bitwise operation,
4218 by using a different bitwise operation. */
4220 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
4221 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
4227 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
4228 temp = expand_unop (mode, ffs_optab, op0, target, 1);
4233 /* ??? Can optimize bitwise operations with one arg constant.
4234 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
4235 and (a bitwise1 b) bitwise2 b (etc)
4236 but that is probably not worth while. */
4238 /* BIT_AND_EXPR is for bitwise anding.
4239 TRUTH_AND_EXPR is for anding two boolean values
4240 when we want in all cases to compute both of them.
4241 In general it is fastest to do TRUTH_AND_EXPR by
4242 computing both operands as actual zero-or-1 values
4243 and then bitwise anding. In cases where there cannot
4244 be any side effects, better code would be made by
4245 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR;
4246 but the question is how to recognize those cases. */
4248 case TRUTH_AND_EXPR:
4250 this_optab = and_optab;
4253 /* See comment above about TRUTH_AND_EXPR; it applies here too. */
4256 this_optab = ior_optab;
4260 this_optab = xor_optab;
4267 preexpand_calls (exp);
4268 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
4270 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
4271 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
4274 /* Could determine the answer when only additive constants differ.
4275 Also, the addition of one can be handled by changing the condition. */
4282 preexpand_calls (exp);
4283 temp = do_store_flag (exp, target, tmode != VOIDmode ? tmode : mode, 0);
4286 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
4287 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
4289 && GET_CODE (original_target) == REG
4290 && (GET_MODE (original_target)
4291 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
4293 temp = expand_expr (TREE_OPERAND (exp, 0), original_target, VOIDmode, 0);
4294 if (temp != original_target)
4295 temp = copy_to_reg (temp);
4296 op1 = gen_label_rtx ();
4297 emit_cmp_insn (temp, const0_rtx, EQ, NULL_RTX,
4298 GET_MODE (temp), unsignedp, 0);
4299 emit_jump_insn (gen_beq (op1));
4300 emit_move_insn (temp, const1_rtx);
4304 /* If no set-flag instruction, must generate a conditional
4305 store into a temporary variable. Drop through
4306 and handle this like && and ||. */
4308 case TRUTH_ANDIF_EXPR:
4309 case TRUTH_ORIF_EXPR:
4310 if (target == 0 || ! safe_from_p (target, exp)
4311 /* Make sure we don't have a hard reg (such as function's return
4312 value) live across basic blocks, if not optimizing. */
4313 || (!optimize && GET_CODE (target) == REG
4314 && REGNO (target) < FIRST_PSEUDO_REGISTER))
4315 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
4316 emit_clr_insn (target);
4317 op1 = gen_label_rtx ();
4318 jumpifnot (exp, op1);
4319 emit_0_to_1_insn (target);
4323 case TRUTH_NOT_EXPR:
4324 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
4325 /* The parser is careful to generate TRUTH_NOT_EXPR
4326 only with operands that are always zero or one. */
4327 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
4328 target, 1, OPTAB_LIB_WIDEN);
4334 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
4336 return expand_expr (TREE_OPERAND (exp, 1),
4337 (ignore ? const0_rtx : target),
4342 /* Note that COND_EXPRs whose type is a structure or union
4343 are required to be constructed to contain assignments of
4344 a temporary variable, so that we can evaluate them here
4345 for side effect only. If type is void, we must do likewise. */
4347 /* If an arm of the branch requires a cleanup,
4348 only that cleanup is performed. */
4351 tree binary_op = 0, unary_op = 0;
4352 tree old_cleanups = cleanups_this_call;
4353 cleanups_this_call = 0;
4355 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
4356 convert it to our mode, if necessary. */
4357 if (integer_onep (TREE_OPERAND (exp, 1))
4358 && integer_zerop (TREE_OPERAND (exp, 2))
4359 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
4361 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, modifier);
4362 if (GET_MODE (op0) == mode)
4365 target = gen_reg_rtx (mode);
4366 convert_move (target, op0, unsignedp);
4370 /* If we are not to produce a result, we have no target. Otherwise,
4371 if a target was specified use it; it will not be used as an
4372 intermediate target unless it is safe. If no target, use a
4375 if (mode == VOIDmode || ignore)
4377 else if (original_target
4378 && safe_from_p (original_target, TREE_OPERAND (exp, 0)))
4379 temp = original_target;
4380 else if (mode == BLKmode)
4382 if (TYPE_SIZE (type) == 0
4383 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
4385 temp = assign_stack_temp (BLKmode,
4386 (TREE_INT_CST_LOW (TYPE_SIZE (type))
4387 + BITS_PER_UNIT - 1)
4388 / BITS_PER_UNIT, 0);
4391 temp = gen_reg_rtx (mode);
4393 /* Check for X ? A + B : A. If we have this, we can copy
4394 A to the output and conditionally add B. Similarly for unary
4395 operations. Don't do this if X has side-effects because
4396 those side effects might affect A or B and the "?" operation is
4397 a sequence point in ANSI. (We test for side effects later.) */
4399 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
4400 && operand_equal_p (TREE_OPERAND (exp, 2),
4401 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
4402 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
4403 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
4404 && operand_equal_p (TREE_OPERAND (exp, 1),
4405 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
4406 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
4407 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
4408 && operand_equal_p (TREE_OPERAND (exp, 2),
4409 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
4410 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
4411 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
4412 && operand_equal_p (TREE_OPERAND (exp, 1),
4413 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
4414 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
4416 /* If we had X ? A + 1 : A and we can do the test of X as a store-flag
4417 operation, do this as A + (X != 0). Similarly for other simple
4418 binary operators. */
4419 if (singleton && binary_op
4420 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
4421 && (TREE_CODE (binary_op) == PLUS_EXPR
4422 || TREE_CODE (binary_op) == MINUS_EXPR
4423 || TREE_CODE (binary_op) == BIT_IOR_EXPR
4424 || TREE_CODE (binary_op) == BIT_XOR_EXPR
4425 || TREE_CODE (binary_op) == BIT_AND_EXPR)
4426 && integer_onep (TREE_OPERAND (binary_op, 1))
4427 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
4430 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR ? add_optab
4431 : TREE_CODE (binary_op) == MINUS_EXPR ? sub_optab
4432 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
4433 : TREE_CODE (binary_op) == BIT_XOR_EXPR ? xor_optab
4436 /* If we had X ? A : A + 1, do this as A + (X == 0).
4438 We have to invert the truth value here and then put it
4439 back later if do_store_flag fails. We cannot simply copy
4440 TREE_OPERAND (exp, 0) to another variable and modify that
4441 because invert_truthvalue can modify the tree pointed to
4443 if (singleton == TREE_OPERAND (exp, 1))
4444 TREE_OPERAND (exp, 0)
4445 = invert_truthvalue (TREE_OPERAND (exp, 0));
4447 result = do_store_flag (TREE_OPERAND (exp, 0),
4448 (safe_from_p (temp, singleton)
4450 mode, BRANCH_COST <= 1);
4454 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
4455 return expand_binop (mode, boptab, op1, result, temp,
4456 unsignedp, OPTAB_LIB_WIDEN);
4458 else if (singleton == TREE_OPERAND (exp, 1))
4459 TREE_OPERAND (exp, 0)
4460 = invert_truthvalue (TREE_OPERAND (exp, 0));
4464 op0 = gen_label_rtx ();
4466 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
4470 /* If the target conflicts with the other operand of the
4471 binary op, we can't use it. Also, we can't use the target
4472 if it is a hard register, because evaluating the condition
4473 might clobber it. */
4475 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1)))
4476 || (GET_CODE (temp) == REG
4477 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
4478 temp = gen_reg_rtx (mode);
4479 store_expr (singleton, temp, 0);
4482 expand_expr (singleton,
4483 ignore ? const1_rtx : NULL_RTX, VOIDmode, 0);
4484 if (cleanups_this_call)
4486 sorry ("aggregate value in COND_EXPR");
4487 cleanups_this_call = 0;
4489 if (singleton == TREE_OPERAND (exp, 1))
4490 jumpif (TREE_OPERAND (exp, 0), op0);
4492 jumpifnot (TREE_OPERAND (exp, 0), op0);
4494 if (binary_op && temp == 0)
4495 /* Just touch the other operand. */
4496 expand_expr (TREE_OPERAND (binary_op, 1),
4497 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
4499 store_expr (build (TREE_CODE (binary_op), type,
4500 make_tree (type, temp),
4501 TREE_OPERAND (binary_op, 1)),
4504 store_expr (build1 (TREE_CODE (unary_op), type,
4505 make_tree (type, temp)),
4510 /* This is now done in jump.c and is better done there because it
4511 produces shorter register lifetimes. */
4513 /* Check for both possibilities either constants or variables
4514 in registers (but not the same as the target!). If so, can
4515 save branches by assigning one, branching, and assigning the
4517 else if (temp && GET_MODE (temp) != BLKmode
4518 && (TREE_CONSTANT (TREE_OPERAND (exp, 1))
4519 || ((TREE_CODE (TREE_OPERAND (exp, 1)) == PARM_DECL
4520 || TREE_CODE (TREE_OPERAND (exp, 1)) == VAR_DECL)
4521 && DECL_RTL (TREE_OPERAND (exp, 1))
4522 && GET_CODE (DECL_RTL (TREE_OPERAND (exp, 1))) == REG
4523 && DECL_RTL (TREE_OPERAND (exp, 1)) != temp))
4524 && (TREE_CONSTANT (TREE_OPERAND (exp, 2))
4525 || ((TREE_CODE (TREE_OPERAND (exp, 2)) == PARM_DECL
4526 || TREE_CODE (TREE_OPERAND (exp, 2)) == VAR_DECL)
4527 && DECL_RTL (TREE_OPERAND (exp, 2))
4528 && GET_CODE (DECL_RTL (TREE_OPERAND (exp, 2))) == REG
4529 && DECL_RTL (TREE_OPERAND (exp, 2)) != temp)))
4531 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
4532 temp = gen_reg_rtx (mode);
4533 store_expr (TREE_OPERAND (exp, 2), temp, 0);
4534 jumpifnot (TREE_OPERAND (exp, 0), op0);
4535 store_expr (TREE_OPERAND (exp, 1), temp, 0);
4539 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
4540 comparison operator. If we have one of these cases, set the
4541 output to A, branch on A (cse will merge these two references),
4542 then set the output to FOO. */
4544 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
4545 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
4546 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
4547 TREE_OPERAND (exp, 1), 0)
4548 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
4549 && safe_from_p (temp, TREE_OPERAND (exp, 2)))
4551 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
4552 temp = gen_reg_rtx (mode);
4553 store_expr (TREE_OPERAND (exp, 1), temp, 0);
4554 jumpif (TREE_OPERAND (exp, 0), op0);
4555 store_expr (TREE_OPERAND (exp, 2), temp, 0);
4559 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
4560 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
4561 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
4562 TREE_OPERAND (exp, 2), 0)
4563 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
4564 && safe_from_p (temp, TREE_OPERAND (exp, 1)))
4566 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
4567 temp = gen_reg_rtx (mode);
4568 store_expr (TREE_OPERAND (exp, 2), temp, 0);
4569 jumpifnot (TREE_OPERAND (exp, 0), op0);
4570 store_expr (TREE_OPERAND (exp, 1), temp, 0);
4575 op1 = gen_label_rtx ();
4576 jumpifnot (TREE_OPERAND (exp, 0), op0);
4578 store_expr (TREE_OPERAND (exp, 1), temp, 0);
4580 expand_expr (TREE_OPERAND (exp, 1),
4581 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
4582 if (cleanups_this_call)
4584 sorry ("aggregate value in COND_EXPR");
4585 cleanups_this_call = 0;
4589 emit_jump_insn (gen_jump (op1));
4593 store_expr (TREE_OPERAND (exp, 2), temp, 0);
4595 expand_expr (TREE_OPERAND (exp, 2),
4596 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
4599 if (cleanups_this_call)
4601 sorry ("aggregate value in COND_EXPR");
4602 cleanups_this_call = 0;
4608 cleanups_this_call = old_cleanups;
4614 /* Something needs to be initialized, but we didn't know
4615 where that thing was when building the tree. For example,
4616 it could be the return value of a function, or a parameter
4617 to a function which lays down in the stack, or a temporary
4618 variable which must be passed by reference.
4620 We guarantee that the expression will either be constructed
4621 or copied into our original target. */
4623 tree slot = TREE_OPERAND (exp, 0);
4625 if (TREE_CODE (slot) != VAR_DECL)
4630 if (DECL_RTL (slot) != 0)
4631 target = DECL_RTL (slot);
4634 target = assign_stack_temp (mode, int_size_in_bytes (type), 0);
4635 /* All temp slots at this level must not conflict. */
4636 preserve_temp_slots (target);
4637 DECL_RTL (slot) = target;
4641 /* Since SLOT is not known to the called function
4642 to belong to its stack frame, we must build an explicit
4643 cleanup. This case occurs when we must build up a reference
4644 to pass the reference as an argument. In this case,
4645 it is very likely that such a reference need not be
4648 if (TREE_OPERAND (exp, 2) == 0)
4649 TREE_OPERAND (exp, 2) = maybe_build_cleanup (slot);
4650 if (TREE_OPERAND (exp, 2))
4651 cleanups_this_call = tree_cons (NULL_TREE, TREE_OPERAND (exp, 2),
4652 cleanups_this_call);
4657 /* This case does occur, when expanding a parameter which
4658 needs to be constructed on the stack. The target
4659 is the actual stack address that we want to initialize.
4660 The function we call will perform the cleanup in this case. */
4662 DECL_RTL (slot) = target;
4665 return expand_expr (TREE_OPERAND (exp, 1), target, tmode, modifier);
4670 tree lhs = TREE_OPERAND (exp, 0);
4671 tree rhs = TREE_OPERAND (exp, 1);
4672 tree noncopied_parts = 0;
4673 tree lhs_type = TREE_TYPE (lhs);
4675 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
4676 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0 && !fixed_type_p (rhs))
4677 noncopied_parts = init_noncopied_parts (stabilize_reference (lhs),
4678 TYPE_NONCOPIED_PARTS (lhs_type));
4679 while (noncopied_parts != 0)
4681 expand_assignment (TREE_VALUE (noncopied_parts),
4682 TREE_PURPOSE (noncopied_parts), 0, 0);
4683 noncopied_parts = TREE_CHAIN (noncopied_parts);
4690 /* If lhs is complex, expand calls in rhs before computing it.
4691 That's so we don't compute a pointer and save it over a call.
4692 If lhs is simple, compute it first so we can give it as a
4693 target if the rhs is just a call. This avoids an extra temp and copy
4694 and that prevents a partial-subsumption which makes bad code.
4695 Actually we could treat component_ref's of vars like vars. */
4697 tree lhs = TREE_OPERAND (exp, 0);
4698 tree rhs = TREE_OPERAND (exp, 1);
4699 tree noncopied_parts = 0;
4700 tree lhs_type = TREE_TYPE (lhs);
4704 if (TREE_CODE (lhs) != VAR_DECL
4705 && TREE_CODE (lhs) != RESULT_DECL
4706 && TREE_CODE (lhs) != PARM_DECL)
4707 preexpand_calls (exp);
4709 /* Check for |= or &= of a bitfield of size one into another bitfield
4710 of size 1. In this case, (unless we need the result of the
4711 assignment) we can do this more efficiently with a
4712 test followed by an assignment, if necessary.
4714 ??? At this point, we can't get a BIT_FIELD_REF here. But if
4715 things change so we do, this code should be enhanced to
4718 && TREE_CODE (lhs) == COMPONENT_REF
4719 && (TREE_CODE (rhs) == BIT_IOR_EXPR
4720 || TREE_CODE (rhs) == BIT_AND_EXPR)
4721 && TREE_OPERAND (rhs, 0) == lhs
4722 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
4723 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (lhs, 1))) == 1
4724 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))) == 1)
4726 rtx label = gen_label_rtx ();
4728 do_jump (TREE_OPERAND (rhs, 1),
4729 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
4730 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
4731 expand_assignment (lhs, convert (TREE_TYPE (rhs),
4732 (TREE_CODE (rhs) == BIT_IOR_EXPR
4734 : integer_zero_node)),
4736 do_pending_stack_adjust ();
4741 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0
4742 && ! (fixed_type_p (lhs) && fixed_type_p (rhs)))
4743 noncopied_parts = save_noncopied_parts (stabilize_reference (lhs),
4744 TYPE_NONCOPIED_PARTS (lhs_type));
4746 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
4747 while (noncopied_parts != 0)
4749 expand_assignment (TREE_PURPOSE (noncopied_parts),
4750 TREE_VALUE (noncopied_parts), 0, 0);
4751 noncopied_parts = TREE_CHAIN (noncopied_parts);
4756 case PREINCREMENT_EXPR:
4757 case PREDECREMENT_EXPR:
4758 return expand_increment (exp, 0);
4760 case POSTINCREMENT_EXPR:
4761 case POSTDECREMENT_EXPR:
4762 /* Faster to treat as pre-increment if result is not used. */
4763 return expand_increment (exp, ! ignore);
4766 /* Are we taking the address of a nested function? */
4767 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
4768 && decl_function_context (TREE_OPERAND (exp, 0)) != 0)
4770 op0 = trampoline_address (TREE_OPERAND (exp, 0));
4771 op0 = force_operand (op0, target);
4775 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode,
4776 (modifier == EXPAND_INITIALIZER
4777 ? modifier : EXPAND_CONST_ADDRESS));
4778 if (GET_CODE (op0) != MEM)
4781 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
4782 return XEXP (op0, 0);
4783 op0 = force_operand (XEXP (op0, 0), target);
4785 if (flag_force_addr && GET_CODE (op0) != REG)
4786 return force_reg (Pmode, op0);
4789 case ENTRY_VALUE_EXPR:
4796 return (*lang_expand_expr) (exp, target, tmode, modifier);
4799 /* Here to do an ordinary binary operator, generating an instruction
4800 from the optab already placed in `this_optab'. */
4802 preexpand_calls (exp);
4803 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
4805 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
4806 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
4808 temp = expand_binop (mode, this_optab, op0, op1, target,
4809 unsignedp, OPTAB_LIB_WIDEN);
4815 /* Return the alignment in bits of EXP, a pointer valued expression.
4816 But don't return more than MAX_ALIGN no matter what.
4817 The alignment returned is, by default, the alignment of the thing that
4818 EXP points to (if it is not a POINTER_TYPE, 0 is returned).
4820 Otherwise, look at the expression to see if we can do better, i.e., if the
4821 expression is actually pointing at an object whose alignment is tighter. */
4824 get_pointer_alignment (exp, max_align)
4828 unsigned align, inner;
4830 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
4833 align = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
4834 align = MIN (align, max_align);
4838 switch (TREE_CODE (exp))
4842 case NON_LVALUE_EXPR:
4843 exp = TREE_OPERAND (exp, 0);
4844 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
4846 inner = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
4847 inner = MIN (inner, max_align);
4848 align = MAX (align, inner);
4852 /* If sum of pointer + int, restrict our maximum alignment to that
4853 imposed by the integer. If not, we can't do any better than
4855 if (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST)
4858 while (((TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)) * BITS_PER_UNIT)
4863 exp = TREE_OPERAND (exp, 0);
4867 /* See what we are pointing at and look at its alignment. */
4868 exp = TREE_OPERAND (exp, 0);
4869 if (TREE_CODE (exp) == FUNCTION_DECL)
4870 align = MAX (align, FUNCTION_BOUNDARY);
4871 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'd')
4872 align = MAX (align, DECL_ALIGN (exp));
4873 #ifdef CONSTANT_ALIGNMENT
4874 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'c')
4875 align = CONSTANT_ALIGNMENT (exp, align);
4877 return MIN (align, max_align);
4885 /* Return the tree node and offset if a given argument corresponds to
4886 a string constant. */
4889 string_constant (arg, ptr_offset)
4895 if (TREE_CODE (arg) == ADDR_EXPR
4896 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
4898 *ptr_offset = integer_zero_node;
4899 return TREE_OPERAND (arg, 0);
4901 else if (TREE_CODE (arg) == PLUS_EXPR)
4903 tree arg0 = TREE_OPERAND (arg, 0);
4904 tree arg1 = TREE_OPERAND (arg, 1);
4909 if (TREE_CODE (arg0) == ADDR_EXPR
4910 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
4913 return TREE_OPERAND (arg0, 0);
4915 else if (TREE_CODE (arg1) == ADDR_EXPR
4916 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
4919 return TREE_OPERAND (arg1, 0);
4926 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
4927 way, because it could contain a zero byte in the middle.
4928 TREE_STRING_LENGTH is the size of the character array, not the string.
4930 Unfortunately, string_constant can't access the values of const char
4931 arrays with initializers, so neither can we do so here. */
4941 src = string_constant (src, &offset_node);
4944 max = TREE_STRING_LENGTH (src);
4945 ptr = TREE_STRING_POINTER (src);
4946 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
4948 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
4949 compute the offset to the following null if we don't know where to
4950 start searching for it. */
4952 for (i = 0; i < max; i++)
4955 /* We don't know the starting offset, but we do know that the string
4956 has no internal zero bytes. We can assume that the offset falls
4957 within the bounds of the string; otherwise, the programmer deserves
4958 what he gets. Subtract the offset from the length of the string,
4960 /* This would perhaps not be valid if we were dealing with named
4961 arrays in addition to literal string constants. */
4962 return size_binop (MINUS_EXPR, size_int (max), offset_node);
4965 /* We have a known offset into the string. Start searching there for
4966 a null character. */
4967 if (offset_node == 0)
4971 /* Did we get a long long offset? If so, punt. */
4972 if (TREE_INT_CST_HIGH (offset_node) != 0)
4974 offset = TREE_INT_CST_LOW (offset_node);
4976 /* If the offset is known to be out of bounds, warn, and call strlen at
4978 if (offset < 0 || offset > max)
4980 warning ("offset outside bounds of constant string");
4983 /* Use strlen to search for the first zero byte. Since any strings
4984 constructed with build_string will have nulls appended, we win even
4985 if we get handed something like (char[4])"abcd".
4987 Since OFFSET is our starting index into the string, no further
4988 calculation is needed. */
4989 return size_int (strlen (ptr + offset));
4992 /* Expand an expression EXP that calls a built-in function,
4993 with result going to TARGET if that's convenient
4994 (and in mode MODE if that's convenient).
4995 SUBTARGET may be used as the target for computing one of EXP's operands.
4996 IGNORE is nonzero if the value is to be ignored. */
4999 expand_builtin (exp, target, subtarget, mode, ignore)
5003 enum machine_mode mode;
5006 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
5007 tree arglist = TREE_OPERAND (exp, 1);
5010 enum machine_mode value_mode = TYPE_MODE (TREE_TYPE (exp));
5012 switch (DECL_FUNCTION_CODE (fndecl))
5017 /* build_function_call changes these into ABS_EXPR. */
5020 case BUILT_IN_FSQRT:
5021 /* If not optimizing, call the library function. */
5026 /* Arg could be wrong type if user redeclared this fcn wrong. */
5027 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != REAL_TYPE)
5028 return CONST0_RTX (TYPE_MODE (TREE_TYPE (exp)));
5030 /* Stabilize and compute the argument. */
5031 if (TREE_CODE (TREE_VALUE (arglist)) != VAR_DECL
5032 && TREE_CODE (TREE_VALUE (arglist)) != PARM_DECL)
5034 exp = copy_node (exp);
5035 arglist = copy_node (arglist);
5036 TREE_OPERAND (exp, 1) = arglist;
5037 TREE_VALUE (arglist) = save_expr (TREE_VALUE (arglist));
5039 op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
5041 /* Make a suitable register to place result in. */
5042 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
5047 /* Compute sqrt into TARGET.
5048 Set TARGET to wherever the result comes back. */
5049 target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
5050 sqrt_optab, op0, target, 0);
5052 /* If we were unable to expand via the builtin, stop the
5053 sequence (without outputting the insns) and break, causing
5054 a call the the library function. */
5061 /* Check the results by default. But if flag_fast_math is turned on,
5062 then assume sqrt will always be called with valid arguments. */
5064 if (! flag_fast_math)
5066 /* Don't define the sqrt instructions
5067 if your machine is not IEEE. */
5068 if (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT)
5071 lab1 = gen_label_rtx ();
5073 /* Test the result; if it is NaN, set errno=EDOM because
5074 the argument was not in the domain. */
5075 emit_cmp_insn (target, target, EQ, 0, GET_MODE (target), 0, 0);
5076 emit_jump_insn (gen_beq (lab1));
5080 #ifdef GEN_ERRNO_RTX
5081 rtx errno_rtx = GEN_ERRNO_RTX;
5084 = gen_rtx (MEM, word_mode, gen_rtx (SYMBOL_REF, Pmode, "*errno"));
5087 emit_move_insn (errno_rtx, GEN_INT (TARGET_EDOM));
5090 /* We can't set errno=EDOM directly; let the library call do it.
5091 Pop the arguments right away in case the call gets deleted. */
5093 expand_call (exp, target, 0);
5100 /* Output the entire sequence. */
5101 insns = get_insns ();
5107 case BUILT_IN_SAVEREGS:
5108 /* Don't do __builtin_saveregs more than once in a function.
5109 Save the result of the first call and reuse it. */
5110 if (saveregs_value != 0)
5111 return saveregs_value;
5113 /* When this function is called, it means that registers must be
5114 saved on entry to this function. So we migrate the
5115 call to the first insn of this function. */
5118 rtx valreg, saved_valreg;
5120 /* Now really call the function. `expand_call' does not call
5121 expand_builtin, so there is no danger of infinite recursion here. */
5124 #ifdef EXPAND_BUILTIN_SAVEREGS
5125 /* Do whatever the machine needs done in this case. */
5126 temp = EXPAND_BUILTIN_SAVEREGS (arglist);
5128 /* The register where the function returns its value
5129 is likely to have something else in it, such as an argument.
5130 So preserve that register around the call. */
5131 if (value_mode != VOIDmode)
5133 valreg = hard_libcall_value (value_mode);
5134 saved_valreg = gen_reg_rtx (value_mode);
5135 emit_move_insn (saved_valreg, valreg);
5138 /* Generate the call, putting the value in a pseudo. */
5139 temp = expand_call (exp, target, ignore);
5141 if (value_mode != VOIDmode)
5142 emit_move_insn (valreg, saved_valreg);
5148 saveregs_value = temp;
5150 /* This won't work inside a SEQUENCE--it really has to be
5151 at the start of the function. */
5152 if (in_sequence_p ())
5154 /* Better to do this than to crash. */
5155 error ("`va_start' used within `({...})'");
5159 /* Put the sequence after the NOTE that starts the function. */
5160 emit_insns_before (seq, NEXT_INSN (get_insns ()));
5164 /* __builtin_args_info (N) returns word N of the arg space info
5165 for the current function. The number and meanings of words
5166 is controlled by the definition of CUMULATIVE_ARGS. */
5167 case BUILT_IN_ARGS_INFO:
5169 int nwords = sizeof (CUMULATIVE_ARGS) / sizeof (int);
5171 int *word_ptr = (int *) ¤t_function_args_info;
5172 tree type, elts, result;
5174 if (sizeof (CUMULATIVE_ARGS) % sizeof (int) != 0)
5175 fatal ("CUMULATIVE_ARGS type defined badly; see %s, line %d",
5176 __FILE__, __LINE__);
5180 tree arg = TREE_VALUE (arglist);
5181 if (TREE_CODE (arg) != INTEGER_CST)
5182 error ("argument of __builtin_args_info must be constant");
5185 int wordnum = TREE_INT_CST_LOW (arg);
5187 if (wordnum < 0 || wordnum >= nwords)
5188 error ("argument of __builtin_args_info out of range");
5190 return GEN_INT (word_ptr[wordnum]);
5194 error ("missing argument in __builtin_args_info");
5199 for (i = 0; i < nwords; i++)
5200 elts = tree_cons (NULL_TREE, build_int_2 (word_ptr[i], 0));
5202 type = build_array_type (integer_type_node,
5203 build_index_type (build_int_2 (nwords, 0)));
5204 result = build (CONSTRUCTOR, type, NULL_TREE, nreverse (elts));
5205 TREE_CONSTANT (result) = 1;
5206 TREE_STATIC (result) = 1;
5207 result = build (INDIRECT_REF, build_pointer_type (type), result);
5208 TREE_CONSTANT (result) = 1;
5209 return expand_expr (result, NULL_RTX, VOIDmode, 0);
5213 /* Return the address of the first anonymous stack arg. */
5214 case BUILT_IN_NEXT_ARG:
5216 tree fntype = TREE_TYPE (current_function_decl);
5217 if (!(TYPE_ARG_TYPES (fntype) != 0
5218 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
5219 != void_type_node)))
5221 error ("`va_start' used in function with fixed args");
5226 return expand_binop (Pmode, add_optab,
5227 current_function_internal_arg_pointer,
5228 current_function_arg_offset_rtx,
5229 NULL_RTX, 0, OPTAB_LIB_WIDEN);
5231 case BUILT_IN_CLASSIFY_TYPE:
5234 tree type = TREE_TYPE (TREE_VALUE (arglist));
5235 enum tree_code code = TREE_CODE (type);
5236 if (code == VOID_TYPE)
5237 return GEN_INT (void_type_class);
5238 if (code == INTEGER_TYPE)
5239 return GEN_INT (integer_type_class);
5240 if (code == CHAR_TYPE)
5241 return GEN_INT (char_type_class);
5242 if (code == ENUMERAL_TYPE)
5243 return GEN_INT (enumeral_type_class);
5244 if (code == BOOLEAN_TYPE)
5245 return GEN_INT (boolean_type_class);
5246 if (code == POINTER_TYPE)
5247 return GEN_INT (pointer_type_class);
5248 if (code == REFERENCE_TYPE)
5249 return GEN_INT (reference_type_class);
5250 if (code == OFFSET_TYPE)
5251 return GEN_INT (offset_type_class);
5252 if (code == REAL_TYPE)
5253 return GEN_INT (real_type_class);
5254 if (code == COMPLEX_TYPE)
5255 return GEN_INT (complex_type_class);
5256 if (code == FUNCTION_TYPE)
5257 return GEN_INT (function_type_class);
5258 if (code == METHOD_TYPE)
5259 return GEN_INT (method_type_class);
5260 if (code == RECORD_TYPE)
5261 return GEN_INT (record_type_class);
5262 if (code == UNION_TYPE)
5263 return GEN_INT (union_type_class);
5264 if (code == ARRAY_TYPE)
5265 return GEN_INT (array_type_class);
5266 if (code == STRING_TYPE)
5267 return GEN_INT (string_type_class);
5268 if (code == SET_TYPE)
5269 return GEN_INT (set_type_class);
5270 if (code == FILE_TYPE)
5271 return GEN_INT (file_type_class);
5272 if (code == LANG_TYPE)
5273 return GEN_INT (lang_type_class);
5275 return GEN_INT (no_type_class);
5277 case BUILT_IN_CONSTANT_P:
5281 return (TREE_CODE_CLASS (TREE_CODE (TREE_VALUE (arglist))) == 'c'
5282 ? const1_rtx : const0_rtx);
5284 case BUILT_IN_FRAME_ADDRESS:
5285 /* The argument must be a nonnegative integer constant.
5286 It counts the number of frames to scan up the stack.
5287 The value is the address of that frame. */
5288 case BUILT_IN_RETURN_ADDRESS:
5289 /* The argument must be a nonnegative integer constant.
5290 It counts the number of frames to scan up the stack.
5291 The value is the return address saved in that frame. */
5293 /* Warning about missing arg was already issued. */
5295 else if (TREE_CODE (TREE_VALUE (arglist)) != INTEGER_CST)
5297 error ("invalid arg to __builtin_return_address");
5300 else if (tree_int_cst_lt (TREE_VALUE (arglist), integer_zero_node))
5302 error ("invalid arg to __builtin_return_address");
5307 int count = TREE_INT_CST_LOW (TREE_VALUE (arglist));
5308 rtx tem = frame_pointer_rtx;
5311 /* Scan back COUNT frames to the specified frame. */
5312 for (i = 0; i < count; i++)
5314 /* Assume the dynamic chain pointer is in the word that
5315 the frame address points to, unless otherwise specified. */
5316 #ifdef DYNAMIC_CHAIN_ADDRESS
5317 tem = DYNAMIC_CHAIN_ADDRESS (tem);
5319 tem = memory_address (Pmode, tem);
5320 tem = copy_to_reg (gen_rtx (MEM, Pmode, tem));
5323 /* For __builtin_frame_address, return what we've got. */
5324 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
5327 /* For __builtin_return_address,
5328 Get the return address from that frame. */
5329 #ifdef RETURN_ADDR_RTX
5330 return RETURN_ADDR_RTX (count, tem);
5332 tem = memory_address (Pmode,
5333 plus_constant (tem, GET_MODE_SIZE (Pmode)));
5334 return copy_to_reg (gen_rtx (MEM, Pmode, tem));
5338 case BUILT_IN_ALLOCA:
5340 /* Arg could be non-integer if user redeclared this fcn wrong. */
5341 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE)
5343 current_function_calls_alloca = 1;
5344 /* Compute the argument. */
5345 op0 = expand_expr (TREE_VALUE (arglist), NULL_RTX, VOIDmode, 0);
5347 /* Allocate the desired space. */
5348 target = allocate_dynamic_stack_space (op0, target, BITS_PER_UNIT);
5350 /* Record the new stack level for nonlocal gotos. */
5351 if (nonlocal_goto_handler_slot != 0)
5352 emit_stack_save (SAVE_NONLOCAL, &nonlocal_goto_stack_level, NULL_RTX);
5356 /* If not optimizing, call the library function. */
5361 /* Arg could be non-integer if user redeclared this fcn wrong. */
5362 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE)
5365 /* Compute the argument. */
5366 op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
5367 /* Compute ffs, into TARGET if possible.
5368 Set TARGET to wherever the result comes back. */
5369 target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
5370 ffs_optab, op0, target, 1);
5375 case BUILT_IN_STRLEN:
5376 /* If not optimizing, call the library function. */
5381 /* Arg could be non-pointer if user redeclared this fcn wrong. */
5382 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE)
5386 tree src = TREE_VALUE (arglist);
5387 tree len = c_strlen (src);
5390 = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
5392 rtx result, src_rtx, char_rtx;
5393 enum machine_mode insn_mode = value_mode, char_mode;
5394 enum insn_code icode;
5396 /* If the length is known, just return it. */
5398 return expand_expr (len, target, mode, 0);
5400 /* If SRC is not a pointer type, don't do this operation inline. */
5404 /* Call a function if we can't compute strlen in the right mode. */
5406 while (insn_mode != VOIDmode)
5408 icode = strlen_optab->handlers[(int) insn_mode].insn_code;
5409 if (icode != CODE_FOR_nothing)
5412 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
5414 if (insn_mode == VOIDmode)
5417 /* Make a place to write the result of the instruction. */
5420 && GET_CODE (result) == REG
5421 && GET_MODE (result) == insn_mode
5422 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
5423 result = gen_reg_rtx (insn_mode);
5425 /* Make sure the operands are acceptable to the predicates. */
5427 if (! (*insn_operand_predicate[(int)icode][0]) (result, insn_mode))
5428 result = gen_reg_rtx (insn_mode);
5430 src_rtx = memory_address (BLKmode,
5431 expand_expr (src, NULL_RTX, Pmode,
5433 if (! (*insn_operand_predicate[(int)icode][1]) (src_rtx, Pmode))
5434 src_rtx = copy_to_mode_reg (Pmode, src_rtx);
5436 char_rtx = const0_rtx;
5437 char_mode = insn_operand_mode[(int)icode][2];
5438 if (! (*insn_operand_predicate[(int)icode][2]) (char_rtx, char_mode))
5439 char_rtx = copy_to_mode_reg (char_mode, char_rtx);
5441 emit_insn (GEN_FCN (icode) (result,
5442 gen_rtx (MEM, BLKmode, src_rtx),
5443 char_rtx, GEN_INT (align)));
5445 /* Return the value in the proper mode for this function. */
5446 if (GET_MODE (result) == value_mode)
5448 else if (target != 0)
5450 convert_move (target, result, 0);
5454 return convert_to_mode (value_mode, result, 0);
5457 case BUILT_IN_STRCPY:
5458 /* If not optimizing, call the library function. */
5463 /* Arg could be non-pointer if user redeclared this fcn wrong. */
5464 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
5465 || TREE_CHAIN (arglist) == 0
5466 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE)
5470 tree len = c_strlen (TREE_VALUE (TREE_CHAIN (arglist)));
5475 len = size_binop (PLUS_EXPR, len, integer_one_node);
5477 chainon (arglist, build_tree_list (NULL_TREE, len));
5481 case BUILT_IN_MEMCPY:
5482 /* If not optimizing, call the library function. */
5487 /* Arg could be non-pointer if user redeclared this fcn wrong. */
5488 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
5489 || TREE_CHAIN (arglist) == 0
5490 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
5491 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
5492 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
5496 tree dest = TREE_VALUE (arglist);
5497 tree src = TREE_VALUE (TREE_CHAIN (arglist));
5498 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
5501 = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
5503 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
5506 /* If either SRC or DEST is not a pointer type, don't do
5507 this operation in-line. */
5508 if (src_align == 0 || dest_align == 0)
5510 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRCPY)
5511 TREE_CHAIN (TREE_CHAIN (arglist)) = 0;
5515 dest_rtx = expand_expr (dest, NULL_RTX, Pmode, EXPAND_NORMAL);
5517 /* Copy word part most expediently. */
5518 emit_block_move (gen_rtx (MEM, BLKmode,
5519 memory_address (BLKmode, dest_rtx)),
5520 gen_rtx (MEM, BLKmode,
5521 memory_address (BLKmode,
5522 expand_expr (src, NULL_RTX,
5525 expand_expr (len, NULL_RTX, VOIDmode, 0),
5526 MIN (src_align, dest_align));
5530 /* These comparison functions need an instruction that returns an actual
5531 index. An ordinary compare that just sets the condition codes
5533 #ifdef HAVE_cmpstrsi
5534 case BUILT_IN_STRCMP:
5535 /* If not optimizing, call the library function. */
5540 /* Arg could be non-pointer if user redeclared this fcn wrong. */
5541 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
5542 || TREE_CHAIN (arglist) == 0
5543 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE)
5545 else if (!HAVE_cmpstrsi)
5548 tree arg1 = TREE_VALUE (arglist);
5549 tree arg2 = TREE_VALUE (TREE_CHAIN (arglist));
5553 len = c_strlen (arg1);
5555 len = size_binop (PLUS_EXPR, integer_one_node, len);
5556 len2 = c_strlen (arg2);
5558 len2 = size_binop (PLUS_EXPR, integer_one_node, len2);
5560 /* If we don't have a constant length for the first, use the length
5561 of the second, if we know it. We don't require a constant for
5562 this case; some cost analysis could be done if both are available
5563 but neither is constant. For now, assume they're equally cheap.
5565 If both strings have constant lengths, use the smaller. This
5566 could arise if optimization results in strcpy being called with
5567 two fixed strings, or if the code was machine-generated. We should
5568 add some code to the `memcmp' handler below to deal with such
5569 situations, someday. */
5570 if (!len || TREE_CODE (len) != INTEGER_CST)
5577 else if (len2 && TREE_CODE (len2) == INTEGER_CST)
5579 if (tree_int_cst_lt (len2, len))
5583 chainon (arglist, build_tree_list (NULL_TREE, len));
5587 case BUILT_IN_MEMCMP:
5588 /* If not optimizing, call the library function. */
5593 /* Arg could be non-pointer if user redeclared this fcn wrong. */
5594 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
5595 || TREE_CHAIN (arglist) == 0
5596 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
5597 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
5598 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
5600 else if (!HAVE_cmpstrsi)
5603 tree arg1 = TREE_VALUE (arglist);
5604 tree arg2 = TREE_VALUE (TREE_CHAIN (arglist));
5605 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
5609 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
5611 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
5612 enum machine_mode insn_mode
5613 = insn_operand_mode[(int) CODE_FOR_cmpstrsi][0];
5615 /* If we don't have POINTER_TYPE, call the function. */
5616 if (arg1_align == 0 || arg2_align == 0)
5618 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRCMP)
5619 TREE_CHAIN (TREE_CHAIN (arglist)) = 0;
5623 /* Make a place to write the result of the instruction. */
5626 && GET_CODE (result) == REG && GET_MODE (result) == insn_mode
5627 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
5628 result = gen_reg_rtx (insn_mode);
5630 emit_insn (gen_cmpstrsi (result,
5631 gen_rtx (MEM, BLKmode,
5632 expand_expr (arg1, NULL_RTX, Pmode,
5634 gen_rtx (MEM, BLKmode,
5635 expand_expr (arg2, NULL_RTX, Pmode,
5637 expand_expr (len, NULL_RTX, VOIDmode, 0),
5638 GEN_INT (MIN (arg1_align, arg2_align))));
5640 /* Return the value in the proper mode for this function. */
5641 mode = TYPE_MODE (TREE_TYPE (exp));
5642 if (GET_MODE (result) == mode)
5644 else if (target != 0)
5646 convert_move (target, result, 0);
5650 return convert_to_mode (mode, result, 0);
5653 case BUILT_IN_STRCMP:
5654 case BUILT_IN_MEMCMP:
5658 default: /* just do library call, if unknown builtin */
5659 error ("built-in function %s not currently supported",
5660 IDENTIFIER_POINTER (DECL_NAME (fndecl)));
5663 /* The switch statement above can drop through to cause the function
5664 to be called normally. */
5666 return expand_call (exp, target, ignore);
5669 /* Expand code for a post- or pre- increment or decrement
5670 and return the RTX for the result.
5671 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
5674 expand_increment (exp, post)
5678 register rtx op0, op1;
5679 register rtx temp, value;
5680 register tree incremented = TREE_OPERAND (exp, 0);
5681 optab this_optab = add_optab;
5683 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
5684 int op0_is_copy = 0;
5686 /* Stabilize any component ref that might need to be
5687 evaluated more than once below. */
5688 if (TREE_CODE (incremented) == BIT_FIELD_REF
5689 || (TREE_CODE (incremented) == COMPONENT_REF
5690 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
5691 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
5692 incremented = stabilize_reference (incremented);
5694 /* Compute the operands as RTX.
5695 Note whether OP0 is the actual lvalue or a copy of it:
5696 I believe it is a copy iff it is a register or subreg
5697 and insns were generated in computing it. */
5698 temp = get_last_insn ();
5699 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, 0);
5700 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
5701 && temp != get_last_insn ());
5702 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
5704 /* Decide whether incrementing or decrementing. */
5705 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
5706 || TREE_CODE (exp) == PREDECREMENT_EXPR)
5707 this_optab = sub_optab;
5709 /* If OP0 is not the actual lvalue, but rather a copy in a register,
5710 then we cannot just increment OP0. We must
5711 therefore contrive to increment the original value.
5712 Then we can return OP0 since it is a copy of the old value. */
5715 /* This is the easiest way to increment the value wherever it is.
5716 Problems with multiple evaluation of INCREMENTED
5717 are prevented because either (1) it is a component_ref,
5718 in which case it was stabilized above, or (2) it is an array_ref
5719 with constant index in an array in a register, which is
5720 safe to reevaluate. */
5721 tree newexp = build ((this_optab == add_optab
5722 ? PLUS_EXPR : MINUS_EXPR),
5725 TREE_OPERAND (exp, 1));
5726 temp = expand_assignment (incremented, newexp, ! post, 0);
5727 return post ? op0 : temp;
5730 /* Convert decrement by a constant into a negative increment. */
5731 if (this_optab == sub_optab
5732 && GET_CODE (op1) == CONST_INT)
5734 op1 = GEN_INT (- INTVAL (op1));
5735 this_optab = add_optab;
5740 /* We have a true reference to the value in OP0.
5741 If there is an insn to add or subtract in this mode, queue it. */
5743 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
5744 op0 = stabilize (op0);
5747 icode = (int) this_optab->handlers[(int) mode].insn_code;
5748 if (icode != (int) CODE_FOR_nothing
5749 /* Make sure that OP0 is valid for operands 0 and 1
5750 of the insn we want to queue. */
5751 && (*insn_operand_predicate[icode][0]) (op0, mode)
5752 && (*insn_operand_predicate[icode][1]) (op0, mode))
5754 if (! (*insn_operand_predicate[icode][2]) (op1, mode))
5755 op1 = force_reg (mode, op1);
5757 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
5761 /* Preincrement, or we can't increment with one simple insn. */
5763 /* Save a copy of the value before inc or dec, to return it later. */
5764 temp = value = copy_to_reg (op0);
5766 /* Arrange to return the incremented value. */
5767 /* Copy the rtx because expand_binop will protect from the queue,
5768 and the results of that would be invalid for us to return
5769 if our caller does emit_queue before using our result. */
5770 temp = copy_rtx (value = op0);
5772 /* Increment however we can. */
5773 op1 = expand_binop (mode, this_optab, value, op1, op0,
5774 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
5775 /* Make sure the value is stored into OP0. */
5777 emit_move_insn (op0, op1);
5782 /* Expand all function calls contained within EXP, innermost ones first.
5783 But don't look within expressions that have sequence points.
5784 For each CALL_EXPR, record the rtx for its value
5785 in the CALL_EXPR_RTL field. */
5788 preexpand_calls (exp)
5791 register int nops, i;
5792 int type = TREE_CODE_CLASS (TREE_CODE (exp));
5794 if (! do_preexpand_calls)
5797 /* Only expressions and references can contain calls. */
5799 if (type != 'e' && type != '<' && type != '1' && type != '2' && type != 'r')
5802 switch (TREE_CODE (exp))
5805 /* Do nothing if already expanded. */
5806 if (CALL_EXPR_RTL (exp) != 0)
5809 /* Do nothing to built-in functions. */
5810 if (TREE_CODE (TREE_OPERAND (exp, 0)) != ADDR_EXPR
5811 || TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) != FUNCTION_DECL
5812 || ! DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
5813 CALL_EXPR_RTL (exp) = expand_call (exp, NULL_RTX, 0);
5818 case TRUTH_ANDIF_EXPR:
5819 case TRUTH_ORIF_EXPR:
5820 /* If we find one of these, then we can be sure
5821 the adjust will be done for it (since it makes jumps).
5822 Do it now, so that if this is inside an argument
5823 of a function, we don't get the stack adjustment
5824 after some other args have already been pushed. */
5825 do_pending_stack_adjust ();
5830 case WITH_CLEANUP_EXPR:
5834 if (SAVE_EXPR_RTL (exp) != 0)
5838 nops = tree_code_length[(int) TREE_CODE (exp)];
5839 for (i = 0; i < nops; i++)
5840 if (TREE_OPERAND (exp, i) != 0)
5842 type = TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, i)));
5843 if (type == 'e' || type == '<' || type == '1' || type == '2'
5845 preexpand_calls (TREE_OPERAND (exp, i));
5849 /* At the start of a function, record that we have no previously-pushed
5850 arguments waiting to be popped. */
5853 init_pending_stack_adjust ()
5855 pending_stack_adjust = 0;
5858 /* When exiting from function, if safe, clear out any pending stack adjust
5859 so the adjustment won't get done. */
5862 clear_pending_stack_adjust ()
5864 #ifdef EXIT_IGNORE_STACK
5865 if (! flag_omit_frame_pointer && EXIT_IGNORE_STACK
5866 && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline)
5867 && ! flag_inline_functions)
5868 pending_stack_adjust = 0;
5872 /* Pop any previously-pushed arguments that have not been popped yet. */
5875 do_pending_stack_adjust ()
5877 if (inhibit_defer_pop == 0)
5879 if (pending_stack_adjust != 0)
5880 adjust_stack (GEN_INT (pending_stack_adjust));
5881 pending_stack_adjust = 0;
5885 /* Expand all cleanups up to OLD_CLEANUPS.
5886 Needed here, and also for language-dependent calls. */
5889 expand_cleanups_to (old_cleanups)
5892 while (cleanups_this_call != old_cleanups)
5894 expand_expr (TREE_VALUE (cleanups_this_call), NULL_RTX, VOIDmode, 0);
5895 cleanups_this_call = TREE_CHAIN (cleanups_this_call);
5899 /* Expand conditional expressions. */
5901 /* Generate code to evaluate EXP and jump to LABEL if the value is zero.
5902 LABEL is an rtx of code CODE_LABEL, in this function and all the
5906 jumpifnot (exp, label)
5910 do_jump (exp, label, NULL_RTX);
5913 /* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
5920 do_jump (exp, NULL_RTX, label);
5923 /* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
5924 the result is zero, or IF_TRUE_LABEL if the result is one.
5925 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
5926 meaning fall through in that case.
5928 do_jump always does any pending stack adjust except when it does not
5929 actually perform a jump. An example where there is no jump
5930 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
5932 This function is responsible for optimizing cases such as
5933 &&, || and comparison operators in EXP. */
5936 do_jump (exp, if_false_label, if_true_label)
5938 rtx if_false_label, if_true_label;
5940 register enum tree_code code = TREE_CODE (exp);
5941 /* Some cases need to create a label to jump to
5942 in order to properly fall through.
5943 These cases set DROP_THROUGH_LABEL nonzero. */
5944 rtx drop_through_label = 0;
5958 temp = integer_zerop (exp) ? if_false_label : if_true_label;
5964 /* This is not true with #pragma weak */
5966 /* The address of something can never be zero. */
5968 emit_jump (if_true_label);
5973 if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
5974 || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
5975 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF)
5978 /* If we are narrowing the operand, we have to do the compare in the
5980 if ((TYPE_PRECISION (TREE_TYPE (exp))
5981 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
5983 case NON_LVALUE_EXPR:
5984 case REFERENCE_EXPR:
5989 /* These cannot change zero->non-zero or vice versa. */
5990 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
5994 /* This is never less insns than evaluating the PLUS_EXPR followed by
5995 a test and can be longer if the test is eliminated. */
5997 /* Reduce to minus. */
5998 exp = build (MINUS_EXPR, TREE_TYPE (exp),
5999 TREE_OPERAND (exp, 0),
6000 fold (build1 (NEGATE_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
6001 TREE_OPERAND (exp, 1))));
6002 /* Process as MINUS. */
6006 /* Non-zero iff operands of minus differ. */
6007 comparison = compare (build (NE_EXPR, TREE_TYPE (exp),
6008 TREE_OPERAND (exp, 0),
6009 TREE_OPERAND (exp, 1)),
6014 /* If we are AND'ing with a small constant, do this comparison in the
6015 smallest type that fits. If the machine doesn't have comparisons
6016 that small, it will be converted back to the wider comparison.
6017 This helps if we are testing the sign bit of a narrower object.
6018 combine can't do this for us because it can't know whether a
6019 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
6021 if (! SLOW_BYTE_ACCESS
6022 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
6023 && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
6024 && (i = floor_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))) >= 0
6025 && (type = type_for_size (i + 1, 1)) != 0
6026 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
6027 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
6028 != CODE_FOR_nothing))
6030 do_jump (convert (type, exp), if_false_label, if_true_label);
6035 case TRUTH_NOT_EXPR:
6036 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
6039 case TRUTH_ANDIF_EXPR:
6040 if (if_false_label == 0)
6041 if_false_label = drop_through_label = gen_label_rtx ();
6042 do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
6043 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
6046 case TRUTH_ORIF_EXPR:
6047 if (if_true_label == 0)
6048 if_true_label = drop_through_label = gen_label_rtx ();
6049 do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
6050 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
6054 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
6057 do_pending_stack_adjust ();
6058 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
6065 int bitsize, bitpos, unsignedp;
6066 enum machine_mode mode;
6071 /* Get description of this reference. We don't actually care
6072 about the underlying object here. */
6073 get_inner_reference (exp, &bitsize, &bitpos, &offset,
6074 &mode, &unsignedp, &volatilep);
6076 type = type_for_size (bitsize, unsignedp);
6077 if (! SLOW_BYTE_ACCESS
6078 && type != 0 && bitsize >= 0
6079 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
6080 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
6081 != CODE_FOR_nothing))
6083 do_jump (convert (type, exp), if_false_label, if_true_label);
6090 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
6091 if (integer_onep (TREE_OPERAND (exp, 1))
6092 && integer_zerop (TREE_OPERAND (exp, 2)))
6093 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
6095 else if (integer_zerop (TREE_OPERAND (exp, 1))
6096 && integer_onep (TREE_OPERAND (exp, 2)))
6097 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
6101 register rtx label1 = gen_label_rtx ();
6102 drop_through_label = gen_label_rtx ();
6103 do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX);
6104 /* Now the THEN-expression. */
6105 do_jump (TREE_OPERAND (exp, 1),
6106 if_false_label ? if_false_label : drop_through_label,
6107 if_true_label ? if_true_label : drop_through_label);
6108 /* In case the do_jump just above never jumps. */
6109 do_pending_stack_adjust ();
6110 emit_label (label1);
6111 /* Now the ELSE-expression. */
6112 do_jump (TREE_OPERAND (exp, 2),
6113 if_false_label ? if_false_label : drop_through_label,
6114 if_true_label ? if_true_label : drop_through_label);
6119 if (integer_zerop (TREE_OPERAND (exp, 1)))
6120 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
6121 else if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
6124 !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
6125 do_jump_by_parts_equality (exp, if_false_label, if_true_label);
6127 comparison = compare (exp, EQ, EQ);
6131 if (integer_zerop (TREE_OPERAND (exp, 1)))
6132 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
6133 else if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
6136 !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
6137 do_jump_by_parts_equality (exp, if_true_label, if_false_label);
6139 comparison = compare (exp, NE, NE);
6143 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
6145 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
6146 do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
6148 comparison = compare (exp, LT, LTU);
6152 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
6154 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
6155 do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
6157 comparison = compare (exp, LE, LEU);
6161 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
6163 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
6164 do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
6166 comparison = compare (exp, GT, GTU);
6170 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
6172 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
6173 do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
6175 comparison = compare (exp, GE, GEU);
6180 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
6182 /* This is not needed any more and causes poor code since it causes
6183 comparisons and tests from non-SI objects to have different code
6185 /* Copy to register to avoid generating bad insns by cse
6186 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
6187 if (!cse_not_expected && GET_CODE (temp) == MEM)
6188 temp = copy_to_reg (temp);
6190 do_pending_stack_adjust ();
6191 if (GET_CODE (temp) == CONST_INT)
6192 comparison = (temp == const0_rtx ? const0_rtx : const_true_rtx);
6193 else if (GET_CODE (temp) == LABEL_REF)
6194 comparison = const_true_rtx;
6195 else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
6196 && !can_compare_p (GET_MODE (temp)))
6197 /* Note swapping the labels gives us not-equal. */
6198 do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label);
6199 else if (GET_MODE (temp) != VOIDmode)
6200 comparison = compare_from_rtx (temp, CONST0_RTX (GET_MODE (temp)),
6201 NE, 1, GET_MODE (temp), NULL_RTX, 0);
6206 /* Do any postincrements in the expression that was tested. */
6209 /* If COMPARISON is nonzero here, it is an rtx that can be substituted
6210 straight into a conditional jump instruction as the jump condition.
6211 Otherwise, all the work has been done already. */
6213 if (comparison == const_true_rtx)
6216 emit_jump (if_true_label);
6218 else if (comparison == const0_rtx)
6221 emit_jump (if_false_label);
6223 else if (comparison)
6224 do_jump_for_compare (comparison, if_false_label, if_true_label);
6228 if (drop_through_label)
6230 /* If do_jump produces code that might be jumped around,
6231 do any stack adjusts from that code, before the place
6232 where control merges in. */
6233 do_pending_stack_adjust ();
6234 emit_label (drop_through_label);
6238 /* Given a comparison expression EXP for values too wide to be compared
6239 with one insn, test the comparison and jump to the appropriate label.
6240 The code of EXP is ignored; we always test GT if SWAP is 0,
6241 and LT if SWAP is 1. */
6244 do_jump_by_parts_greater (exp, swap, if_false_label, if_true_label)
6247 rtx if_false_label, if_true_label;
6249 rtx op0 = expand_expr (TREE_OPERAND (exp, swap), NULL_RTX, VOIDmode, 0);
6250 rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), NULL_RTX, VOIDmode, 0);
6251 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
6252 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
6253 rtx drop_through_label = 0;
6254 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
6257 if (! if_true_label || ! if_false_label)
6258 drop_through_label = gen_label_rtx ();
6259 if (! if_true_label)
6260 if_true_label = drop_through_label;
6261 if (! if_false_label)
6262 if_false_label = drop_through_label;
6264 /* Compare a word at a time, high order first. */
6265 for (i = 0; i < nwords; i++)
6268 rtx op0_word, op1_word;
6270 if (WORDS_BIG_ENDIAN)
6272 op0_word = operand_subword_force (op0, i, mode);
6273 op1_word = operand_subword_force (op1, i, mode);
6277 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
6278 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
6281 /* All but high-order word must be compared as unsigned. */
6282 comp = compare_from_rtx (op0_word, op1_word,
6283 (unsignedp || i > 0) ? GTU : GT,
6284 unsignedp, word_mode, NULL_RTX, 0);
6285 if (comp == const_true_rtx)
6286 emit_jump (if_true_label);
6287 else if (comp != const0_rtx)
6288 do_jump_for_compare (comp, NULL_RTX, if_true_label);
6290 /* Consider lower words only if these are equal. */
6291 comp = compare_from_rtx (op0_word, op1_word, NE, unsignedp, word_mode,
6293 if (comp == const_true_rtx)
6294 emit_jump (if_false_label);
6295 else if (comp != const0_rtx)
6296 do_jump_for_compare (comp, NULL_RTX, if_false_label);
6300 emit_jump (if_false_label);
6301 if (drop_through_label)
6302 emit_label (drop_through_label);
6305 /* Given an EQ_EXPR expression EXP for values too wide to be compared
6306 with one insn, test the comparison and jump to the appropriate label. */
6309 do_jump_by_parts_equality (exp, if_false_label, if_true_label)
6311 rtx if_false_label, if_true_label;
6313 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
6314 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
6315 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
6316 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
6318 rtx drop_through_label = 0;
6320 if (! if_false_label)
6321 drop_through_label = if_false_label = gen_label_rtx ();
6323 for (i = 0; i < nwords; i++)
6325 rtx comp = compare_from_rtx (operand_subword_force (op0, i, mode),
6326 operand_subword_force (op1, i, mode),
6327 EQ, 0, word_mode, NULL_RTX, 0);
6328 if (comp == const_true_rtx)
6329 emit_jump (if_false_label);
6330 else if (comp != const0_rtx)
6331 do_jump_for_compare (comp, if_false_label, NULL_RTX);
6335 emit_jump (if_true_label);
6336 if (drop_through_label)
6337 emit_label (drop_through_label);
6340 /* Jump according to whether OP0 is 0.
6341 We assume that OP0 has an integer mode that is too wide
6342 for the available compare insns. */
6345 do_jump_by_parts_equality_rtx (op0, if_false_label, if_true_label)
6347 rtx if_false_label, if_true_label;
6349 int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD;
6351 rtx drop_through_label = 0;
6353 if (! if_false_label)
6354 drop_through_label = if_false_label = gen_label_rtx ();
6356 for (i = 0; i < nwords; i++)
6358 rtx comp = compare_from_rtx (operand_subword_force (op0, i,
6360 const0_rtx, EQ, 0, word_mode, NULL_RTX, 0);
6361 if (comp == const_true_rtx)
6362 emit_jump (if_false_label);
6363 else if (comp != const0_rtx)
6364 do_jump_for_compare (comp, if_false_label, NULL_RTX);
6368 emit_jump (if_true_label);
6369 if (drop_through_label)
6370 emit_label (drop_through_label);
6373 /* Given a comparison expression in rtl form, output conditional branches to
6374 IF_TRUE_LABEL, IF_FALSE_LABEL, or both. */
6377 do_jump_for_compare (comparison, if_false_label, if_true_label)
6378 rtx comparison, if_false_label, if_true_label;
6382 if (bcc_gen_fctn[(int) GET_CODE (comparison)] != 0)
6383 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (comparison)]) (if_true_label));
6388 emit_jump (if_false_label);
6390 else if (if_false_label)
6393 rtx prev = PREV_INSN (get_last_insn ());
6396 /* Output the branch with the opposite condition. Then try to invert
6397 what is generated. If more than one insn is a branch, or if the
6398 branch is not the last insn written, abort. If we can't invert
6399 the branch, emit make a true label, redirect this jump to that,
6400 emit a jump to the false label and define the true label. */
6402 if (bcc_gen_fctn[(int) GET_CODE (comparison)] != 0)
6403 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (comparison)]) (if_false_label));
6407 /* Here we get the insn before what was just emitted.
6408 On some machines, emitting the branch can discard
6409 the previous compare insn and emit a replacement. */
6411 /* If there's only one preceding insn... */
6412 insn = get_insns ();
6414 insn = NEXT_INSN (prev);
6416 for (insn = NEXT_INSN (insn); insn; insn = NEXT_INSN (insn))
6417 if (GET_CODE (insn) == JUMP_INSN)
6424 if (branch != get_last_insn ())
6427 if (! invert_jump (branch, if_false_label))
6429 if_true_label = gen_label_rtx ();
6430 redirect_jump (branch, if_true_label);
6431 emit_jump (if_false_label);
6432 emit_label (if_true_label);
6437 /* Generate code for a comparison expression EXP
6438 (including code to compute the values to be compared)
6439 and set (CC0) according to the result.
6440 SIGNED_CODE should be the rtx operation for this comparison for
6441 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
6443 We force a stack adjustment unless there are currently
6444 things pushed on the stack that aren't yet used. */
6447 compare (exp, signed_code, unsigned_code)
6449 enum rtx_code signed_code, unsigned_code;
6452 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
6454 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
6455 register tree type = TREE_TYPE (TREE_OPERAND (exp, 0));
6456 register enum machine_mode mode = TYPE_MODE (type);
6457 int unsignedp = TREE_UNSIGNED (type);
6458 enum rtx_code code = unsignedp ? unsigned_code : signed_code;
6460 return compare_from_rtx (op0, op1, code, unsignedp, mode,
6462 ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
6463 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
6466 /* Like compare but expects the values to compare as two rtx's.
6467 The decision as to signed or unsigned comparison must be made by the caller.
6469 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
6472 If ALIGN is non-zero, it is the alignment of this type; if zero, the
6473 size of MODE should be used. */
6476 compare_from_rtx (op0, op1, code, unsignedp, mode, size, align)
6477 register rtx op0, op1;
6480 enum machine_mode mode;
6484 /* If one operand is constant, make it the second one. */
6486 if (GET_CODE (op0) == CONST_INT || GET_CODE (op0) == CONST_DOUBLE)
6491 code = swap_condition (code);
6496 op0 = force_not_mem (op0);
6497 op1 = force_not_mem (op1);
6500 do_pending_stack_adjust ();
6502 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT)
6503 return simplify_relational_operation (code, mode, op0, op1);
6505 /* If this is a signed equality comparison, we can do it as an
6506 unsigned comparison since zero-extension is cheaper than sign
6507 extension and comparisons with zero are done as unsigned. This is
6508 the case even on machines that can do fast sign extension, since
6509 zero-extension is easier to combinen with other operations than
6510 sign-extension is. If we are comparing against a constant, we must
6511 convert it to what it would look like unsigned. */
6512 if ((code == EQ || code == NE) && ! unsignedp
6513 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
6515 if (GET_CODE (op1) == CONST_INT
6516 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
6517 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
6521 emit_cmp_insn (op0, op1, code, size, mode, unsignedp, align);
6523 return gen_rtx (code, VOIDmode, cc0_rtx, const0_rtx);
6526 /* Generate code to calculate EXP using a store-flag instruction
6527 and return an rtx for the result. EXP is either a comparison
6528 or a TRUTH_NOT_EXPR whose operand is a comparison.
6530 If TARGET is nonzero, store the result there if convenient.
6532 If ONLY_CHEAP is non-zero, only do this if it is likely to be very
6535 Return zero if there is no suitable set-flag instruction
6536 available on this machine.
6538 Once expand_expr has been called on the arguments of the comparison,
6539 we are committed to doing the store flag, since it is not safe to
6540 re-evaluate the expression. We emit the store-flag insn by calling
6541 emit_store_flag, but only expand the arguments if we have a reason
6542 to believe that emit_store_flag will be successful. If we think that
6543 it will, but it isn't, we have to simulate the store-flag with a
6544 set/jump/set sequence. */
6547 do_store_flag (exp, target, mode, only_cheap)
6550 enum machine_mode mode;
6554 tree arg0, arg1, type;
6556 enum machine_mode operand_mode;
6560 enum insn_code icode;
6561 rtx subtarget = target;
6562 rtx result, label, pattern, jump_pat;
6564 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
6565 result at the end. We can't simply invert the test since it would
6566 have already been inverted if it were valid. This case occurs for
6567 some floating-point comparisons. */
6569 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
6570 invert = 1, exp = TREE_OPERAND (exp, 0);
6572 arg0 = TREE_OPERAND (exp, 0);
6573 arg1 = TREE_OPERAND (exp, 1);
6574 type = TREE_TYPE (arg0);
6575 operand_mode = TYPE_MODE (type);
6576 unsignedp = TREE_UNSIGNED (type);
6578 /* We won't bother with BLKmode store-flag operations because it would mean
6579 passing a lot of information to emit_store_flag. */
6580 if (operand_mode == BLKmode)
6586 /* Get the rtx comparison code to use. We know that EXP is a comparison
6587 operation of some type. Some comparisons against 1 and -1 can be
6588 converted to comparisons with zero. Do so here so that the tests
6589 below will be aware that we have a comparison with zero. These
6590 tests will not catch constants in the first operand, but constants
6591 are rarely passed as the first operand. */
6593 switch (TREE_CODE (exp))
6602 if (integer_onep (arg1))
6603 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
6605 code = unsignedp ? LTU : LT;
6608 if (integer_all_onesp (arg1))
6609 arg1 = integer_zero_node, code = unsignedp ? LTU : LT;
6611 code = unsignedp ? LEU : LE;
6614 if (integer_all_onesp (arg1))
6615 arg1 = integer_zero_node, code = unsignedp ? GEU : GE;
6617 code = unsignedp ? GTU : GT;
6620 if (integer_onep (arg1))
6621 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
6623 code = unsignedp ? GEU : GE;
6629 /* Put a constant second. */
6630 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
6632 tem = arg0; arg0 = arg1; arg1 = tem;
6633 code = swap_condition (code);
6636 /* If this is an equality or inequality test of a single bit, we can
6637 do this by shifting the bit being tested to the low-order bit and
6638 masking the result with the constant 1. If the condition was EQ,
6639 we xor it with 1. This does not require an scc insn and is faster
6640 than an scc insn even if we have it. */
6642 if ((code == NE || code == EQ)
6643 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6644 && integer_pow2p (TREE_OPERAND (arg0, 1))
6645 && TYPE_PRECISION (type) <= HOST_BITS_PER_WIDE_INT)
6647 int bitnum = exact_log2 (INTVAL (expand_expr (TREE_OPERAND (arg0, 1),
6648 NULL_RTX, VOIDmode, 0)));
6650 if (subtarget == 0 || GET_CODE (subtarget) != REG
6651 || GET_MODE (subtarget) != operand_mode
6652 || ! safe_from_p (subtarget, TREE_OPERAND (arg0, 0)))
6655 op0 = expand_expr (TREE_OPERAND (arg0, 0), subtarget, VOIDmode, 0);
6658 op0 = expand_shift (RSHIFT_EXPR, GET_MODE (op0), op0,
6659 size_int (bitnum), target, 1);
6661 if (GET_MODE (op0) != mode)
6662 op0 = convert_to_mode (mode, op0, 1);
6664 if (bitnum != TYPE_PRECISION (type) - 1)
6665 op0 = expand_and (op0, const1_rtx, target);
6667 if ((code == EQ && ! invert) || (code == NE && invert))
6668 op0 = expand_binop (mode, xor_optab, op0, const1_rtx, target, 0,
6674 /* Now see if we are likely to be able to do this. Return if not. */
6675 if (! can_compare_p (operand_mode))
6677 icode = setcc_gen_code[(int) code];
6678 if (icode == CODE_FOR_nothing
6679 || (only_cheap && insn_operand_mode[(int) icode][0] != mode))
6681 /* We can only do this if it is one of the special cases that
6682 can be handled without an scc insn. */
6683 if ((code == LT && integer_zerop (arg1))
6684 || (! only_cheap && code == GE && integer_zerop (arg1)))
6686 else if (BRANCH_COST >= 0
6687 && ! only_cheap && (code == NE || code == EQ)
6688 && TREE_CODE (type) != REAL_TYPE
6689 && ((abs_optab->handlers[(int) operand_mode].insn_code
6690 != CODE_FOR_nothing)
6691 || (ffs_optab->handlers[(int) operand_mode].insn_code
6692 != CODE_FOR_nothing)))
6698 preexpand_calls (exp);
6699 if (subtarget == 0 || GET_CODE (subtarget) != REG
6700 || GET_MODE (subtarget) != operand_mode
6701 || ! safe_from_p (subtarget, arg1))
6704 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
6705 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
6708 target = gen_reg_rtx (mode);
6710 result = emit_store_flag (target, code, op0, op1, operand_mode,
6716 result = expand_binop (mode, xor_optab, result, const1_rtx,
6717 result, 0, OPTAB_LIB_WIDEN);
6721 /* If this failed, we have to do this with set/compare/jump/set code. */
6722 if (target == 0 || GET_CODE (target) != REG
6723 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
6724 target = gen_reg_rtx (GET_MODE (target));
6726 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
6727 result = compare_from_rtx (op0, op1, code, unsignedp,
6728 operand_mode, NULL_RTX, 0);
6729 if (GET_CODE (result) == CONST_INT)
6730 return (((result == const0_rtx && ! invert)
6731 || (result != const0_rtx && invert))
6732 ? const0_rtx : const1_rtx);
6734 label = gen_label_rtx ();
6735 if (bcc_gen_fctn[(int) code] == 0)
6738 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
6739 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
6745 /* Generate a tablejump instruction (used for switch statements). */
6747 #ifdef HAVE_tablejump
6749 /* INDEX is the value being switched on, with the lowest value
6750 in the table already subtracted.
6751 MODE is its expected mode (needed if INDEX is constant).
6752 RANGE is the length of the jump table.
6753 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
6755 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
6756 index value is out of range. */
6759 do_tablejump (index, mode, range, table_label, default_label)
6760 rtx index, range, table_label, default_label;
6761 enum machine_mode mode;
6763 register rtx temp, vector;
6765 /* Do an unsigned comparison (in the proper mode) between the index
6766 expression and the value which represents the length of the range.
6767 Since we just finished subtracting the lower bound of the range
6768 from the index expression, this comparison allows us to simultaneously
6769 check that the original index expression value is both greater than
6770 or equal to the minimum value of the range and less than or equal to
6771 the maximum value of the range. */
6773 emit_cmp_insn (range, index, LTU, NULL_RTX, mode, 0, 0);
6774 emit_jump_insn (gen_bltu (default_label));
6776 /* If index is in range, it must fit in Pmode.
6777 Convert to Pmode so we can index with it. */
6779 index = convert_to_mode (Pmode, index, 1);
6781 /* If flag_force_addr were to affect this address
6782 it could interfere with the tricky assumptions made
6783 about addresses that contain label-refs,
6784 which may be valid only very near the tablejump itself. */
6785 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
6786 GET_MODE_SIZE, because this indicates how large insns are. The other
6787 uses should all be Pmode, because they are addresses. This code
6788 could fail if addresses and insns are not the same size. */
6789 index = memory_address_noforce
6791 gen_rtx (PLUS, Pmode,
6792 gen_rtx (MULT, Pmode, index,
6793 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
6794 gen_rtx (LABEL_REF, Pmode, table_label)));
6795 temp = gen_reg_rtx (CASE_VECTOR_MODE);
6796 vector = gen_rtx (MEM, CASE_VECTOR_MODE, index);
6797 RTX_UNCHANGING_P (vector) = 1;
6798 convert_move (temp, vector, 0);
6800 emit_jump_insn (gen_tablejump (temp, table_label));
6802 #ifndef CASE_VECTOR_PC_RELATIVE
6803 /* If we are generating PIC code or if the table is PC-relative, the
6804 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
6810 #endif /* HAVE_tablejump */