1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA. */
26 #include "insn-flags.h"
27 #include "insn-codes.h"
29 #include "insn-config.h"
33 #include "typeclass.h"
35 #define CEIL(x,y) (((x) + (y) - 1) / (y))
37 /* Decide whether a function's arguments should be processed
38 from first to last or from last to first. */
40 #ifdef STACK_GROWS_DOWNWARD
42 #define PUSH_ARGS_REVERSED /* If it's last to first */
46 #ifndef STACK_PUSH_CODE
47 #ifdef STACK_GROWS_DOWNWARD
48 #define STACK_PUSH_CODE PRE_DEC
50 #define STACK_PUSH_CODE PRE_INC
54 /* Like STACK_BOUNDARY but in units of bytes, not bits. */
55 #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
57 /* If this is nonzero, we do not bother generating VOLATILE
58 around volatile memory references, and we are willing to
59 output indirect addresses. If cse is to follow, we reject
60 indirect addresses so a useful potential cse is generated;
61 if it is used only once, instruction combination will produce
62 the same indirect address eventually. */
65 /* Nonzero to generate code for all the subroutines within an
66 expression before generating the upper levels of the expression.
67 Nowadays this is never zero. */
68 int do_preexpand_calls = 1;
70 /* Number of units that we should eventually pop off the stack.
71 These are the arguments to function calls that have already returned. */
72 int pending_stack_adjust;
74 /* Nonzero means stack pops must not be deferred, and deferred stack
75 pops must not be output. It is nonzero inside a function call,
76 inside a conditional expression, inside a statement expression,
77 and in other cases as well. */
78 int inhibit_defer_pop;
80 /* A list of all cleanups which belong to the arguments of
81 function calls being expanded by expand_call. */
82 tree cleanups_this_call;
84 /* Nonzero means __builtin_saveregs has already been done in this function.
85 The value is the pseudoreg containing the value __builtin_saveregs
87 static rtx saveregs_value;
90 static void store_constructor ();
91 static rtx store_field ();
92 static rtx expand_builtin ();
93 static rtx compare ();
94 static rtx do_store_flag ();
95 static void preexpand_calls ();
96 static rtx expand_increment ();
97 static void init_queue ();
99 void do_pending_stack_adjust ();
100 static void do_jump_for_compare ();
101 static void do_jump_by_parts_equality ();
102 static void do_jump_by_parts_equality_rtx ();
103 static void do_jump_by_parts_greater ();
105 /* Record for each mode whether we can move a register directly to or
106 from an object of that mode in memory. If we can't, we won't try
107 to use that mode directly when accessing a field of that mode. */
109 static char direct_load[NUM_MACHINE_MODES];
110 static char direct_store[NUM_MACHINE_MODES];
112 /* MOVE_RATIO is the number of move instructions that is better than
116 #if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
119 /* A value of around 6 would minimize code size; infinity would minimize
121 #define MOVE_RATIO 15
125 /* This array records the insn_code of insns to perform block moves. */
126 static enum insn_code movstr_optab[NUM_MACHINE_MODES];
128 /* SLOW_UNALIGNED_ACCESS is non-zero if unaligned accesses are very slow. */
130 #ifndef SLOW_UNALIGNED_ACCESS
131 #define SLOW_UNALIGNED_ACCESS 0
134 /* This is run once per compilation to set up which modes can be used
135 directly in memory and to initialize the block move optab. */
141 enum machine_mode mode;
142 rtx mem = gen_rtx (MEM, VOIDmode, stack_pointer_rtx);
145 insn = emit_insn (gen_rtx (SET, 0, 0));
146 pat = PATTERN (insn);
148 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
149 mode = (enum machine_mode) ((int) mode + 1))
155 direct_load[(int) mode] = direct_store[(int) mode] = 0;
156 PUT_MODE (mem, mode);
158 /* See if there is some register that can be used in this mode and
159 directly loaded or stored from memory. */
161 if (mode != VOIDmode && mode != BLKmode)
162 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
163 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
166 if (! HARD_REGNO_MODE_OK (regno, mode))
169 reg = gen_rtx (REG, mode, regno);
172 SET_DEST (pat) = reg;
173 if (recog (pat, insn, &num_clobbers) >= 0)
174 direct_load[(int) mode] = 1;
177 SET_DEST (pat) = mem;
178 if (recog (pat, insn, &num_clobbers) >= 0)
179 direct_store[(int) mode] = 1;
182 movstr_optab[(int) mode] = CODE_FOR_nothing;
189 movstr_optab[(int) QImode] = CODE_FOR_movstrqi;
193 movstr_optab[(int) HImode] = CODE_FOR_movstrhi;
197 movstr_optab[(int) SImode] = CODE_FOR_movstrsi;
201 movstr_optab[(int) DImode] = CODE_FOR_movstrdi;
205 movstr_optab[(int) TImode] = CODE_FOR_movstrti;
209 /* This is run at the start of compiling a function. */
216 pending_stack_adjust = 0;
217 inhibit_defer_pop = 0;
218 cleanups_this_call = 0;
223 /* Save all variables describing the current status into the structure *P.
224 This is used before starting a nested function. */
230 /* Instead of saving the postincrement queue, empty it. */
233 p->pending_stack_adjust = pending_stack_adjust;
234 p->inhibit_defer_pop = inhibit_defer_pop;
235 p->cleanups_this_call = cleanups_this_call;
236 p->saveregs_value = saveregs_value;
237 p->forced_labels = forced_labels;
239 pending_stack_adjust = 0;
240 inhibit_defer_pop = 0;
241 cleanups_this_call = 0;
246 /* Restore all variables describing the current status from the structure *P.
247 This is used after a nested function. */
250 restore_expr_status (p)
253 pending_stack_adjust = p->pending_stack_adjust;
254 inhibit_defer_pop = p->inhibit_defer_pop;
255 cleanups_this_call = p->cleanups_this_call;
256 saveregs_value = p->saveregs_value;
257 forced_labels = p->forced_labels;
260 /* Manage the queue of increment instructions to be output
261 for POSTINCREMENT_EXPR expressions, etc. */
263 static rtx pending_chain;
265 /* Queue up to increment (or change) VAR later. BODY says how:
266 BODY should be the same thing you would pass to emit_insn
267 to increment right away. It will go to emit_insn later on.
269 The value is a QUEUED expression to be used in place of VAR
270 where you want to guarantee the pre-incrementation value of VAR. */
273 enqueue_insn (var, body)
276 pending_chain = gen_rtx (QUEUED, GET_MODE (var),
277 var, NULL_RTX, NULL_RTX, body, pending_chain);
278 return pending_chain;
281 /* Use protect_from_queue to convert a QUEUED expression
282 into something that you can put immediately into an instruction.
283 If the queued incrementation has not happened yet,
284 protect_from_queue returns the variable itself.
285 If the incrementation has happened, protect_from_queue returns a temp
286 that contains a copy of the old value of the variable.
288 Any time an rtx which might possibly be a QUEUED is to be put
289 into an instruction, it must be passed through protect_from_queue first.
290 QUEUED expressions are not meaningful in instructions.
292 Do not pass a value through protect_from_queue and then hold
293 on to it for a while before putting it in an instruction!
294 If the queue is flushed in between, incorrect code will result. */
297 protect_from_queue (x, modify)
301 register RTX_CODE code = GET_CODE (x);
303 #if 0 /* A QUEUED can hang around after the queue is forced out. */
304 /* Shortcut for most common case. */
305 if (pending_chain == 0)
311 /* A special hack for read access to (MEM (QUEUED ...))
312 to facilitate use of autoincrement.
313 Make a copy of the contents of the memory location
314 rather than a copy of the address, but not
315 if the value is of mode BLKmode. */
316 if (code == MEM && GET_MODE (x) != BLKmode
317 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
319 register rtx y = XEXP (x, 0);
320 XEXP (x, 0) = QUEUED_VAR (y);
323 register rtx temp = gen_reg_rtx (GET_MODE (x));
324 emit_insn_before (gen_move_insn (temp, x),
330 /* Otherwise, recursively protect the subexpressions of all
331 the kinds of rtx's that can contain a QUEUED. */
333 XEXP (x, 0) = protect_from_queue (XEXP (x, 0), 0);
334 else if (code == PLUS || code == MULT)
336 XEXP (x, 0) = protect_from_queue (XEXP (x, 0), 0);
337 XEXP (x, 1) = protect_from_queue (XEXP (x, 1), 0);
341 /* If the increment has not happened, use the variable itself. */
342 if (QUEUED_INSN (x) == 0)
343 return QUEUED_VAR (x);
344 /* If the increment has happened and a pre-increment copy exists,
346 if (QUEUED_COPY (x) != 0)
347 return QUEUED_COPY (x);
348 /* The increment has happened but we haven't set up a pre-increment copy.
349 Set one up now, and use it. */
350 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
351 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
353 return QUEUED_COPY (x);
356 /* Return nonzero if X contains a QUEUED expression:
357 if it contains anything that will be altered by a queued increment.
358 We handle only combinations of MEM, PLUS, MINUS and MULT operators
359 since memory addresses generally contain only those. */
365 register enum rtx_code code = GET_CODE (x);
371 return queued_subexp_p (XEXP (x, 0));
375 return queued_subexp_p (XEXP (x, 0))
376 || queued_subexp_p (XEXP (x, 1));
381 /* Perform all the pending incrementations. */
387 while (p = pending_chain)
389 QUEUED_INSN (p) = emit_insn (QUEUED_BODY (p));
390 pending_chain = QUEUED_NEXT (p);
401 /* Copy data from FROM to TO, where the machine modes are not the same.
402 Both modes may be integer, or both may be floating.
403 UNSIGNEDP should be nonzero if FROM is an unsigned type.
404 This causes zero-extension instead of sign-extension. */
407 convert_move (to, from, unsignedp)
408 register rtx to, from;
411 enum machine_mode to_mode = GET_MODE (to);
412 enum machine_mode from_mode = GET_MODE (from);
413 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
414 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
418 /* rtx code for making an equivalent value. */
419 enum rtx_code equiv_code = (unsignedp ? ZERO_EXTEND : SIGN_EXTEND);
421 to = protect_from_queue (to, 1);
422 from = protect_from_queue (from, 0);
424 if (to_real != from_real)
427 if (to_mode == from_mode
428 || (from_mode == VOIDmode && CONSTANT_P (from)))
430 emit_move_insn (to, from);
436 #ifdef HAVE_extendsfdf2
437 if (HAVE_extendsfdf2 && from_mode == SFmode && to_mode == DFmode)
439 emit_unop_insn (CODE_FOR_extendsfdf2, to, from, UNKNOWN);
443 #ifdef HAVE_extendsfxf2
444 if (HAVE_extendsfxf2 && from_mode == SFmode && to_mode == XFmode)
446 emit_unop_insn (CODE_FOR_extendsfxf2, to, from, UNKNOWN);
450 #ifdef HAVE_extendsftf2
451 if (HAVE_extendsftf2 && from_mode == SFmode && to_mode == TFmode)
453 emit_unop_insn (CODE_FOR_extendsftf2, to, from, UNKNOWN);
457 #ifdef HAVE_extenddfxf2
458 if (HAVE_extenddfxf2 && from_mode == DFmode && to_mode == XFmode)
460 emit_unop_insn (CODE_FOR_extenddfxf2, to, from, UNKNOWN);
464 #ifdef HAVE_extenddftf2
465 if (HAVE_extenddftf2 && from_mode == DFmode && to_mode == TFmode)
467 emit_unop_insn (CODE_FOR_extenddftf2, to, from, UNKNOWN);
471 #ifdef HAVE_truncdfsf2
472 if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
474 emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
478 #ifdef HAVE_truncxfsf2
479 if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode)
481 emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN);
485 #ifdef HAVE_trunctfsf2
486 if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
488 emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
492 #ifdef HAVE_truncxfdf2
493 if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode)
495 emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN);
499 #ifdef HAVE_trunctfdf2
500 if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
502 emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
514 libcall = extendsfdf2_libfunc;
518 libcall = extendsfxf2_libfunc;
522 libcall = extendsftf2_libfunc;
531 libcall = truncdfsf2_libfunc;
535 libcall = extenddfxf2_libfunc;
539 libcall = extenddftf2_libfunc;
548 libcall = truncxfsf2_libfunc;
552 libcall = truncxfdf2_libfunc;
561 libcall = trunctfsf2_libfunc;
565 libcall = trunctfdf2_libfunc;
571 if (libcall == (rtx) 0)
572 /* This conversion is not implemented yet. */
575 emit_library_call (libcall, 1, to_mode, 1, from, from_mode);
576 emit_move_insn (to, hard_libcall_value (to_mode));
580 /* Now both modes are integers. */
582 /* Handle expanding beyond a word. */
583 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
584 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
591 enum machine_mode lowpart_mode;
592 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
594 /* Try converting directly if the insn is supported. */
595 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
598 /* If FROM is a SUBREG, put it into a register. Do this
599 so that we always generate the same set of insns for
600 better cse'ing; if an intermediate assignment occurred,
601 we won't be doing the operation directly on the SUBREG. */
602 if (optimize > 0 && GET_CODE (from) == SUBREG)
603 from = force_reg (from_mode, from);
604 emit_unop_insn (code, to, from, equiv_code);
607 /* Next, try converting via full word. */
608 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
609 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
610 != CODE_FOR_nothing))
612 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
613 emit_unop_insn (code, to,
614 gen_lowpart (word_mode, to), equiv_code);
618 /* No special multiword conversion insn; do it by hand. */
621 /* Get a copy of FROM widened to a word, if necessary. */
622 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
623 lowpart_mode = word_mode;
625 lowpart_mode = from_mode;
627 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
629 lowpart = gen_lowpart (lowpart_mode, to);
630 emit_move_insn (lowpart, lowfrom);
632 /* Compute the value to put in each remaining word. */
634 fill_value = const0_rtx;
639 && insn_operand_mode[(int) CODE_FOR_slt][0] == word_mode
640 && STORE_FLAG_VALUE == -1)
642 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
644 fill_value = gen_reg_rtx (word_mode);
645 emit_insn (gen_slt (fill_value));
651 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
652 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
654 fill_value = convert_to_mode (word_mode, fill_value, 1);
658 /* Fill the remaining words. */
659 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
661 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
662 rtx subword = operand_subword (to, index, 1, to_mode);
667 if (fill_value != subword)
668 emit_move_insn (subword, fill_value);
671 insns = get_insns ();
674 emit_no_conflict_block (insns, to, from, NULL_RTX,
675 gen_rtx (equiv_code, to_mode, from));
679 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD)
681 convert_move (to, gen_lowpart (word_mode, from), 0);
685 /* Handle pointer conversion */ /* SPEE 900220 */
686 if (to_mode == PSImode)
688 if (from_mode != SImode)
689 from = convert_to_mode (SImode, from, unsignedp);
691 #ifdef HAVE_truncsipsi
694 emit_unop_insn (CODE_FOR_truncsipsi, to, from, UNKNOWN);
697 #endif /* HAVE_truncsipsi */
701 if (from_mode == PSImode)
703 if (to_mode != SImode)
705 from = convert_to_mode (SImode, from, unsignedp);
710 #ifdef HAVE_extendpsisi
711 if (HAVE_extendpsisi)
713 emit_unop_insn (CODE_FOR_extendpsisi, to, from, UNKNOWN);
716 #endif /* HAVE_extendpsisi */
721 /* Now follow all the conversions between integers
722 no more than a word long. */
724 /* For truncation, usually we can just refer to FROM in a narrower mode. */
725 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
726 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
727 GET_MODE_BITSIZE (from_mode))
728 && ((GET_CODE (from) == MEM
729 && ! MEM_VOLATILE_P (from)
730 && direct_load[(int) to_mode]
731 && ! mode_dependent_address_p (XEXP (from, 0)))
732 || GET_CODE (from) == REG
733 || GET_CODE (from) == SUBREG))
735 emit_move_insn (to, gen_lowpart (to_mode, from));
739 /* For truncation, usually we can just refer to FROM in a narrower mode. */
740 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
742 /* Convert directly if that works. */
743 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
746 /* If FROM is a SUBREG, put it into a register. Do this
747 so that we always generate the same set of insns for
748 better cse'ing; if an intermediate assignment occurred,
749 we won't be doing the operation directly on the SUBREG. */
750 if (optimize > 0 && GET_CODE (from) == SUBREG)
751 from = force_reg (from_mode, from);
752 emit_unop_insn (code, to, from, equiv_code);
757 enum machine_mode intermediate;
759 /* Search for a mode to convert via. */
760 for (intermediate = from_mode; intermediate != VOIDmode;
761 intermediate = GET_MODE_WIDER_MODE (intermediate))
762 if ((can_extend_p (to_mode, intermediate, unsignedp)
764 && (can_extend_p (intermediate, from_mode, unsignedp)
765 != CODE_FOR_nothing))
767 convert_move (to, convert_to_mode (intermediate, from,
768 unsignedp), unsignedp);
772 /* No suitable intermediate mode. */
777 /* Support special truncate insns for certain modes. */
779 if (from_mode == DImode && to_mode == SImode)
781 #ifdef HAVE_truncdisi2
784 emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
788 convert_move (to, force_reg (from_mode, from), unsignedp);
792 if (from_mode == DImode && to_mode == HImode)
794 #ifdef HAVE_truncdihi2
797 emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
801 convert_move (to, force_reg (from_mode, from), unsignedp);
805 if (from_mode == DImode && to_mode == QImode)
807 #ifdef HAVE_truncdiqi2
810 emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
814 convert_move (to, force_reg (from_mode, from), unsignedp);
818 if (from_mode == SImode && to_mode == HImode)
820 #ifdef HAVE_truncsihi2
823 emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
827 convert_move (to, force_reg (from_mode, from), unsignedp);
831 if (from_mode == SImode && to_mode == QImode)
833 #ifdef HAVE_truncsiqi2
836 emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
840 convert_move (to, force_reg (from_mode, from), unsignedp);
844 if (from_mode == HImode && to_mode == QImode)
846 #ifdef HAVE_trunchiqi2
849 emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
853 convert_move (to, force_reg (from_mode, from), unsignedp);
857 /* Handle truncation of volatile memrefs, and so on;
858 the things that couldn't be truncated directly,
859 and for which there was no special instruction. */
860 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
862 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
863 emit_move_insn (to, temp);
867 /* Mode combination is not recognized. */
871 /* Return an rtx for a value that would result
872 from converting X to mode MODE.
873 Both X and MODE may be floating, or both integer.
874 UNSIGNEDP is nonzero if X is an unsigned value.
875 This can be done by referring to a part of X in place
876 or by copying to a new temporary with conversion.
878 This function *must not* call protect_from_queue
879 except when putting X into an insn (in which case convert_move does it). */
882 convert_to_mode (mode, x, unsignedp)
883 enum machine_mode mode;
889 if (mode == GET_MODE (x))
892 /* There is one case that we must handle specially: If we are converting
893 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
894 we are to interpret the constant as unsigned, gen_lowpart will do
895 the wrong if the constant appears negative. What we want to do is
896 make the high-order word of the constant zero, not all ones. */
898 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
899 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
900 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
901 return immed_double_const (INTVAL (x), (HOST_WIDE_INT) 0, mode);
903 /* We can do this with a gen_lowpart if both desired and current modes
904 are integer, and this is either a constant integer, a register, or a
905 non-volatile MEM. Except for the constant case, we must be narrowing
908 if (GET_CODE (x) == CONST_INT
909 || (GET_MODE_CLASS (mode) == MODE_INT
910 && GET_MODE_CLASS (GET_MODE (x)) == MODE_INT
911 && (GET_CODE (x) == CONST_DOUBLE
912 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (GET_MODE (x))
913 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x))
914 && direct_load[(int) mode]
915 || GET_CODE (x) == REG)))))
916 return gen_lowpart (mode, x);
918 temp = gen_reg_rtx (mode);
919 convert_move (temp, x, unsignedp);
923 /* Generate several move instructions to copy LEN bytes
924 from block FROM to block TO. (These are MEM rtx's with BLKmode).
925 The caller must pass FROM and TO
926 through protect_from_queue before calling.
927 ALIGN (in bytes) is maximum alignment we can assume. */
929 struct move_by_pieces
938 int explicit_inc_from;
944 static void move_by_pieces_1 ();
945 static int move_by_pieces_ninsns ();
948 move_by_pieces (to, from, len, align)
952 struct move_by_pieces data;
953 rtx to_addr = XEXP (to, 0), from_addr = XEXP (from, 0);
954 int max_size = MOVE_MAX + 1;
957 data.to_addr = to_addr;
958 data.from_addr = from_addr;
962 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
963 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
965 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
966 || GET_CODE (from_addr) == POST_INC
967 || GET_CODE (from_addr) == POST_DEC);
969 data.explicit_inc_from = 0;
970 data.explicit_inc_to = 0;
972 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
973 if (data.reverse) data.offset = len;
976 /* If copying requires more than two move insns,
977 copy addresses to registers (to make displacements shorter)
978 and use post-increment if available. */
979 if (!(data.autinc_from && data.autinc_to)
980 && move_by_pieces_ninsns (len, align) > 2)
982 #ifdef HAVE_PRE_DECREMENT
983 if (data.reverse && ! data.autinc_from)
985 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
986 data.autinc_from = 1;
987 data.explicit_inc_from = -1;
990 #ifdef HAVE_POST_INCREMENT
991 if (! data.autinc_from)
993 data.from_addr = copy_addr_to_reg (from_addr);
994 data.autinc_from = 1;
995 data.explicit_inc_from = 1;
998 if (!data.autinc_from && CONSTANT_P (from_addr))
999 data.from_addr = copy_addr_to_reg (from_addr);
1000 #ifdef HAVE_PRE_DECREMENT
1001 if (data.reverse && ! data.autinc_to)
1003 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1005 data.explicit_inc_to = -1;
1008 #ifdef HAVE_POST_INCREMENT
1009 if (! data.reverse && ! data.autinc_to)
1011 data.to_addr = copy_addr_to_reg (to_addr);
1013 data.explicit_inc_to = 1;
1016 if (!data.autinc_to && CONSTANT_P (to_addr))
1017 data.to_addr = copy_addr_to_reg (to_addr);
1020 if (! (STRICT_ALIGNMENT || SLOW_UNALIGNED_ACCESS)
1021 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
1024 /* First move what we can in the largest integer mode, then go to
1025 successively smaller modes. */
1027 while (max_size > 1)
1029 enum machine_mode mode = VOIDmode, tmode;
1030 enum insn_code icode;
1032 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1033 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1034 if (GET_MODE_SIZE (tmode) < max_size)
1037 if (mode == VOIDmode)
1040 icode = mov_optab->handlers[(int) mode].insn_code;
1041 if (icode != CODE_FOR_nothing
1042 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1043 GET_MODE_SIZE (mode)))
1044 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1046 max_size = GET_MODE_SIZE (mode);
1049 /* The code above should have handled everything. */
1054 /* Return number of insns required to move L bytes by pieces.
1055 ALIGN (in bytes) is maximum alignment we can assume. */
1058 move_by_pieces_ninsns (l, align)
1062 register int n_insns = 0;
1063 int max_size = MOVE_MAX + 1;
1065 if (! (STRICT_ALIGNMENT || SLOW_UNALIGNED_ACCESS)
1066 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
1069 while (max_size > 1)
1071 enum machine_mode mode = VOIDmode, tmode;
1072 enum insn_code icode;
1074 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1075 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1076 if (GET_MODE_SIZE (tmode) < max_size)
1079 if (mode == VOIDmode)
1082 icode = mov_optab->handlers[(int) mode].insn_code;
1083 if (icode != CODE_FOR_nothing
1084 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1085 GET_MODE_SIZE (mode)))
1086 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1088 max_size = GET_MODE_SIZE (mode);
1094 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1095 with move instructions for mode MODE. GENFUN is the gen_... function
1096 to make a move insn for that mode. DATA has all the other info. */
1099 move_by_pieces_1 (genfun, mode, data)
1101 enum machine_mode mode;
1102 struct move_by_pieces *data;
1104 register int size = GET_MODE_SIZE (mode);
1105 register rtx to1, from1;
1107 while (data->len >= size)
1109 if (data->reverse) data->offset -= size;
1111 to1 = (data->autinc_to
1112 ? gen_rtx (MEM, mode, data->to_addr)
1113 : change_address (data->to, mode,
1114 plus_constant (data->to_addr, data->offset)));
1117 ? gen_rtx (MEM, mode, data->from_addr)
1118 : change_address (data->from, mode,
1119 plus_constant (data->from_addr, data->offset)));
1121 #ifdef HAVE_PRE_DECREMENT
1122 if (data->explicit_inc_to < 0)
1123 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
1124 if (data->explicit_inc_from < 0)
1125 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (-size)));
1128 emit_insn ((*genfun) (to1, from1));
1129 #ifdef HAVE_POST_INCREMENT
1130 if (data->explicit_inc_to > 0)
1131 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1132 if (data->explicit_inc_from > 0)
1133 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1136 if (! data->reverse) data->offset += size;
1142 /* Emit code to move a block Y to a block X.
1143 This may be done with string-move instructions,
1144 with multiple scalar move instructions, or with a library call.
1146 Both X and Y must be MEM rtx's (perhaps inside VOLATILE)
1148 SIZE is an rtx that says how long they are.
1149 ALIGN is the maximum alignment we can assume they have,
1150 measured in bytes. */
1153 emit_block_move (x, y, size, align)
1158 if (GET_MODE (x) != BLKmode)
1161 if (GET_MODE (y) != BLKmode)
1164 x = protect_from_queue (x, 1);
1165 y = protect_from_queue (y, 0);
1166 size = protect_from_queue (size, 0);
1168 if (GET_CODE (x) != MEM)
1170 if (GET_CODE (y) != MEM)
1175 if (GET_CODE (size) == CONST_INT
1176 && (move_by_pieces_ninsns (INTVAL (size), align) < MOVE_RATIO))
1177 move_by_pieces (x, y, INTVAL (size), align);
1180 /* Try the most limited insn first, because there's no point
1181 including more than one in the machine description unless
1182 the more limited one has some advantage. */
1184 rtx opalign = GEN_INT (align);
1185 enum machine_mode mode;
1187 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1188 mode = GET_MODE_WIDER_MODE (mode))
1190 enum insn_code code = movstr_optab[(int) mode];
1192 if (code != CODE_FOR_nothing
1193 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1194 here because if SIZE is less than the mode mask, as it is
1195 returned by the macro, it will definately be less than the
1196 actual mode mask. */
1197 && (unsigned) INTVAL (size) <= GET_MODE_MASK (mode)
1198 && (insn_operand_predicate[(int) code][0] == 0
1199 || (*insn_operand_predicate[(int) code][0]) (x, BLKmode))
1200 && (insn_operand_predicate[(int) code][1] == 0
1201 || (*insn_operand_predicate[(int) code][1]) (y, BLKmode))
1202 && (insn_operand_predicate[(int) code][3] == 0
1203 || (*insn_operand_predicate[(int) code][3]) (opalign,
1207 rtx last = get_last_insn ();
1210 op2 = convert_to_mode (mode, size, 1);
1211 if (insn_operand_predicate[(int) code][2] != 0
1212 && ! (*insn_operand_predicate[(int) code][2]) (op2, mode))
1213 op2 = copy_to_mode_reg (mode, op2);
1215 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1222 delete_insns_since (last);
1226 #ifdef TARGET_MEM_FUNCTIONS
1227 emit_library_call (memcpy_libfunc, 0,
1228 VOIDmode, 3, XEXP (x, 0), Pmode,
1230 convert_to_mode (Pmode, size, 1), Pmode);
1232 emit_library_call (bcopy_libfunc, 0,
1233 VOIDmode, 3, XEXP (y, 0), Pmode,
1235 convert_to_mode (Pmode, size, 1), Pmode);
1240 /* Copy all or part of a value X into registers starting at REGNO.
1241 The number of registers to be filled is NREGS. */
1244 move_block_to_reg (regno, x, nregs, mode)
1248 enum machine_mode mode;
1253 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1254 x = validize_mem (force_const_mem (mode, x));
1256 /* See if the machine can do this with a load multiple insn. */
1257 #ifdef HAVE_load_multiple
1258 last = get_last_insn ();
1259 pat = gen_load_multiple (gen_rtx (REG, word_mode, regno), x,
1267 delete_insns_since (last);
1270 for (i = 0; i < nregs; i++)
1271 emit_move_insn (gen_rtx (REG, word_mode, regno + i),
1272 operand_subword_force (x, i, mode));
1275 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1276 The number of registers to be filled is NREGS. */
1279 move_block_from_reg (regno, x, nregs)
1287 /* See if the machine can do this with a store multiple insn. */
1288 #ifdef HAVE_store_multiple
1289 last = get_last_insn ();
1290 pat = gen_store_multiple (x, gen_rtx (REG, word_mode, regno),
1298 delete_insns_since (last);
1301 for (i = 0; i < nregs; i++)
1303 rtx tem = operand_subword (x, i, 1, BLKmode);
1308 emit_move_insn (tem, gen_rtx (REG, word_mode, regno + i));
1312 /* Mark NREGS consecutive regs, starting at REGNO, as being live now. */
1315 use_regs (regno, nregs)
1321 for (i = 0; i < nregs; i++)
1322 emit_insn (gen_rtx (USE, VOIDmode, gen_rtx (REG, word_mode, regno + i)));
1325 /* Mark the instructions since PREV as a libcall block.
1326 Add REG_LIBCALL to PREV and add a REG_RETVAL to the most recent insn. */
1335 /* Find the instructions to mark */
1337 insn_first = NEXT_INSN (prev);
1339 insn_first = get_insns ();
1341 insn_last = get_last_insn ();
1343 REG_NOTES (insn_last) = gen_rtx (INSN_LIST, REG_RETVAL, insn_first,
1344 REG_NOTES (insn_last));
1346 REG_NOTES (insn_first) = gen_rtx (INSN_LIST, REG_LIBCALL, insn_last,
1347 REG_NOTES (insn_first));
1350 /* Write zeros through the storage of OBJECT.
1351 If OBJECT has BLKmode, SIZE is its length in bytes. */
1354 clear_storage (object, size)
1358 if (GET_MODE (object) == BLKmode)
1360 #ifdef TARGET_MEM_FUNCTIONS
1361 emit_library_call (memset_libfunc, 0,
1363 XEXP (object, 0), Pmode, const0_rtx, Pmode,
1364 GEN_INT (size), Pmode);
1366 emit_library_call (bzero_libfunc, 0,
1368 XEXP (object, 0), Pmode,
1369 GEN_INT (size), Pmode);
1373 emit_move_insn (object, const0_rtx);
1376 /* Generate code to copy Y into X.
1377 Both Y and X must have the same mode, except that
1378 Y can be a constant with VOIDmode.
1379 This mode cannot be BLKmode; use emit_block_move for that.
1381 Return the last instruction emitted. */
1384 emit_move_insn (x, y)
1387 enum machine_mode mode = GET_MODE (x);
1388 enum machine_mode submode;
1389 enum mode_class class = GET_MODE_CLASS (mode);
1392 x = protect_from_queue (x, 1);
1393 y = protect_from_queue (y, 0);
1395 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
1398 if (CONSTANT_P (y) && ! LEGITIMATE_CONSTANT_P (y))
1399 y = force_const_mem (mode, y);
1401 /* If X or Y are memory references, verify that their addresses are valid
1403 if (GET_CODE (x) == MEM
1404 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
1405 && ! push_operand (x, GET_MODE (x)))
1407 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
1408 x = change_address (x, VOIDmode, XEXP (x, 0));
1410 if (GET_CODE (y) == MEM
1411 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
1413 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
1414 y = change_address (y, VOIDmode, XEXP (y, 0));
1416 if (mode == BLKmode)
1419 if (class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
1420 submode = mode_for_size (GET_MODE_UNIT_SIZE (mode) * BITS_PER_UNIT,
1421 (class == MODE_COMPLEX_INT
1422 ? MODE_INT : MODE_FLOAT),
1425 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
1427 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
1429 /* Expand complex moves by moving real part and imag part, if posible. */
1430 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
1431 && submode != BLKmode
1432 && (mov_optab->handlers[(int) submode].insn_code
1433 != CODE_FOR_nothing))
1435 /* Don't split destination if it is a stack push. */
1436 int stack = push_operand (x, GET_MODE (x));
1437 rtx prev = get_last_insn ();
1439 /* Tell flow that the whole of the destination is being set. */
1440 if (GET_CODE (x) == REG)
1441 emit_insn (gen_rtx (CLOBBER, VOIDmode, x));
1443 /* If this is a stack, push the highpart first, so it
1444 will be in the argument order.
1446 In that case, change_address is used only to convert
1447 the mode, not to change the address. */
1448 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
1449 ((stack ? change_address (x, submode, (rtx) 0)
1450 : gen_highpart (submode, x)),
1451 gen_highpart (submode, y)));
1452 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
1453 ((stack ? change_address (x, submode, (rtx) 0)
1454 : gen_lowpart (submode, x)),
1455 gen_lowpart (submode, y)));
1460 /* This will handle any multi-word mode that lacks a move_insn pattern.
1461 However, you will get better code if you define such patterns,
1462 even if they must turn into multiple assembler instructions. */
1463 else if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
1466 rtx prev_insn = get_last_insn ();
1469 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
1472 rtx xpart = operand_subword (x, i, 1, mode);
1473 rtx ypart = operand_subword (y, i, 1, mode);
1475 /* If we can't get a part of Y, put Y into memory if it is a
1476 constant. Otherwise, force it into a register. If we still
1477 can't get a part of Y, abort. */
1478 if (ypart == 0 && CONSTANT_P (y))
1480 y = force_const_mem (mode, y);
1481 ypart = operand_subword (y, i, 1, mode);
1483 else if (ypart == 0)
1484 ypart = operand_subword_force (y, i, mode);
1486 if (xpart == 0 || ypart == 0)
1489 last_insn = emit_move_insn (xpart, ypart);
1491 /* Mark these insns as a libcall block. */
1492 group_insns (prev_insn);
1500 /* Pushing data onto the stack. */
1502 /* Push a block of length SIZE (perhaps variable)
1503 and return an rtx to address the beginning of the block.
1504 Note that it is not possible for the value returned to be a QUEUED.
1505 The value may be virtual_outgoing_args_rtx.
1507 EXTRA is the number of bytes of padding to push in addition to SIZE.
1508 BELOW nonzero means this padding comes at low addresses;
1509 otherwise, the padding comes at high addresses. */
1512 push_block (size, extra, below)
1517 if (CONSTANT_P (size))
1518 anti_adjust_stack (plus_constant (size, extra));
1519 else if (GET_CODE (size) == REG && extra == 0)
1520 anti_adjust_stack (size);
1523 rtx temp = copy_to_mode_reg (Pmode, size);
1525 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
1526 temp, 0, OPTAB_LIB_WIDEN);
1527 anti_adjust_stack (temp);
1530 #ifdef STACK_GROWS_DOWNWARD
1531 temp = virtual_outgoing_args_rtx;
1532 if (extra != 0 && below)
1533 temp = plus_constant (temp, extra);
1535 if (GET_CODE (size) == CONST_INT)
1536 temp = plus_constant (virtual_outgoing_args_rtx,
1537 - INTVAL (size) - (below ? 0 : extra));
1538 else if (extra != 0 && !below)
1539 temp = gen_rtx (PLUS, Pmode, virtual_outgoing_args_rtx,
1540 negate_rtx (Pmode, plus_constant (size, extra)));
1542 temp = gen_rtx (PLUS, Pmode, virtual_outgoing_args_rtx,
1543 negate_rtx (Pmode, size));
1546 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
1552 return gen_rtx (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
1555 /* Generate code to push X onto the stack, assuming it has mode MODE and
1557 MODE is redundant except when X is a CONST_INT (since they don't
1559 SIZE is an rtx for the size of data to be copied (in bytes),
1560 needed only if X is BLKmode.
1562 ALIGN (in bytes) is maximum alignment we can assume.
1564 If PARTIAL is nonzero, then copy that many of the first words
1565 of X into registers starting with REG, and push the rest of X.
1566 The amount of space pushed is decreased by PARTIAL words,
1567 rounded *down* to a multiple of PARM_BOUNDARY.
1568 REG must be a hard register in this case.
1570 EXTRA is the amount in bytes of extra space to leave next to this arg.
1571 This is ignored if an argument block has already been allocated.
1573 On a machine that lacks real push insns, ARGS_ADDR is the address of
1574 the bottom of the argument block for this call. We use indexing off there
1575 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
1576 argument block has not been preallocated.
1578 ARGS_SO_FAR is the size of args previously pushed for this call. */
1581 emit_push_insn (x, mode, type, size, align, partial, reg, extra,
1582 args_addr, args_so_far)
1584 enum machine_mode mode;
1595 enum direction stack_direction
1596 #ifdef STACK_GROWS_DOWNWARD
1602 /* Decide where to pad the argument: `downward' for below,
1603 `upward' for above, or `none' for don't pad it.
1604 Default is below for small data on big-endian machines; else above. */
1605 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
1607 /* Invert direction if stack is post-update. */
1608 if (STACK_PUSH_CODE == POST_INC || STACK_PUSH_CODE == POST_DEC)
1609 if (where_pad != none)
1610 where_pad = (where_pad == downward ? upward : downward);
1612 xinner = x = protect_from_queue (x, 0);
1614 if (mode == BLKmode)
1616 /* Copy a block into the stack, entirely or partially. */
1619 int used = partial * UNITS_PER_WORD;
1620 int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
1628 /* USED is now the # of bytes we need not copy to the stack
1629 because registers will take care of them. */
1632 xinner = change_address (xinner, BLKmode,
1633 plus_constant (XEXP (xinner, 0), used));
1635 /* If the partial register-part of the arg counts in its stack size,
1636 skip the part of stack space corresponding to the registers.
1637 Otherwise, start copying to the beginning of the stack space,
1638 by setting SKIP to 0. */
1639 #ifndef REG_PARM_STACK_SPACE
1645 #ifdef PUSH_ROUNDING
1646 /* Do it with several push insns if that doesn't take lots of insns
1647 and if there is no difficulty with push insns that skip bytes
1648 on the stack for alignment purposes. */
1650 && GET_CODE (size) == CONST_INT
1652 && (move_by_pieces_ninsns ((unsigned) INTVAL (size) - used, align)
1654 /* Here we avoid the case of a structure whose weak alignment
1655 forces many pushes of a small amount of data,
1656 and such small pushes do rounding that causes trouble. */
1657 && ((! STRICT_ALIGNMENT && ! SLOW_UNALIGNED_ACCESS)
1658 || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT
1659 || PUSH_ROUNDING (align) == align)
1660 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
1662 /* Push padding now if padding above and stack grows down,
1663 or if padding below and stack grows up.
1664 But if space already allocated, this has already been done. */
1665 if (extra && args_addr == 0
1666 && where_pad != none && where_pad != stack_direction)
1667 anti_adjust_stack (GEN_INT (extra));
1669 move_by_pieces (gen_rtx (MEM, BLKmode, gen_push_operand ()), xinner,
1670 INTVAL (size) - used, align);
1673 #endif /* PUSH_ROUNDING */
1675 /* Otherwise make space on the stack and copy the data
1676 to the address of that space. */
1678 /* Deduct words put into registers from the size we must copy. */
1681 if (GET_CODE (size) == CONST_INT)
1682 size = GEN_INT (INTVAL (size) - used);
1684 size = expand_binop (GET_MODE (size), sub_optab, size,
1685 GEN_INT (used), NULL_RTX, 0,
1689 /* Get the address of the stack space.
1690 In this case, we do not deal with EXTRA separately.
1691 A single stack adjust will do. */
1694 temp = push_block (size, extra, where_pad == downward);
1697 else if (GET_CODE (args_so_far) == CONST_INT)
1698 temp = memory_address (BLKmode,
1699 plus_constant (args_addr,
1700 skip + INTVAL (args_so_far)));
1702 temp = memory_address (BLKmode,
1703 plus_constant (gen_rtx (PLUS, Pmode,
1704 args_addr, args_so_far),
1707 /* TEMP is the address of the block. Copy the data there. */
1708 if (GET_CODE (size) == CONST_INT
1709 && (move_by_pieces_ninsns ((unsigned) INTVAL (size), align)
1712 move_by_pieces (gen_rtx (MEM, BLKmode, temp), xinner,
1713 INTVAL (size), align);
1716 /* Try the most limited insn first, because there's no point
1717 including more than one in the machine description unless
1718 the more limited one has some advantage. */
1719 #ifdef HAVE_movstrqi
1721 && GET_CODE (size) == CONST_INT
1722 && ((unsigned) INTVAL (size)
1723 < (1 << (GET_MODE_BITSIZE (QImode) - 1))))
1725 emit_insn (gen_movstrqi (gen_rtx (MEM, BLKmode, temp),
1726 xinner, size, GEN_INT (align)));
1730 #ifdef HAVE_movstrhi
1732 && GET_CODE (size) == CONST_INT
1733 && ((unsigned) INTVAL (size)
1734 < (1 << (GET_MODE_BITSIZE (HImode) - 1))))
1736 emit_insn (gen_movstrhi (gen_rtx (MEM, BLKmode, temp),
1737 xinner, size, GEN_INT (align)));
1741 #ifdef HAVE_movstrsi
1744 emit_insn (gen_movstrsi (gen_rtx (MEM, BLKmode, temp),
1745 xinner, size, GEN_INT (align)));
1749 #ifdef HAVE_movstrdi
1752 emit_insn (gen_movstrdi (gen_rtx (MEM, BLKmode, temp),
1753 xinner, size, GEN_INT (align)));
1758 #ifndef ACCUMULATE_OUTGOING_ARGS
1759 /* If the source is referenced relative to the stack pointer,
1760 copy it to another register to stabilize it. We do not need
1761 to do this if we know that we won't be changing sp. */
1763 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
1764 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
1765 temp = copy_to_reg (temp);
1768 /* Make inhibit_defer_pop nonzero around the library call
1769 to force it to pop the bcopy-arguments right away. */
1771 #ifdef TARGET_MEM_FUNCTIONS
1772 emit_library_call (memcpy_libfunc, 0,
1773 VOIDmode, 3, temp, Pmode, XEXP (xinner, 0), Pmode,
1776 emit_library_call (bcopy_libfunc, 0,
1777 VOIDmode, 3, XEXP (xinner, 0), Pmode, temp, Pmode,
1783 else if (partial > 0)
1785 /* Scalar partly in registers. */
1787 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
1790 /* # words of start of argument
1791 that we must make space for but need not store. */
1792 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
1793 int args_offset = INTVAL (args_so_far);
1796 /* Push padding now if padding above and stack grows down,
1797 or if padding below and stack grows up.
1798 But if space already allocated, this has already been done. */
1799 if (extra && args_addr == 0
1800 && where_pad != none && where_pad != stack_direction)
1801 anti_adjust_stack (GEN_INT (extra));
1803 /* If we make space by pushing it, we might as well push
1804 the real data. Otherwise, we can leave OFFSET nonzero
1805 and leave the space uninitialized. */
1809 /* Now NOT_STACK gets the number of words that we don't need to
1810 allocate on the stack. */
1811 not_stack = partial - offset;
1813 /* If the partial register-part of the arg counts in its stack size,
1814 skip the part of stack space corresponding to the registers.
1815 Otherwise, start copying to the beginning of the stack space,
1816 by setting SKIP to 0. */
1817 #ifndef REG_PARM_STACK_SPACE
1823 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1824 x = validize_mem (force_const_mem (mode, x));
1826 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
1827 SUBREGs of such registers are not allowed. */
1828 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
1829 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
1830 x = copy_to_reg (x);
1832 /* Loop over all the words allocated on the stack for this arg. */
1833 /* We can do it by words, because any scalar bigger than a word
1834 has a size a multiple of a word. */
1835 #ifndef PUSH_ARGS_REVERSED
1836 for (i = not_stack; i < size; i++)
1838 for (i = size - 1; i >= not_stack; i--)
1840 if (i >= not_stack + offset)
1841 emit_push_insn (operand_subword_force (x, i, mode),
1842 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
1844 GEN_INT (args_offset + ((i - not_stack + skip)
1845 * UNITS_PER_WORD)));
1851 /* Push padding now if padding above and stack grows down,
1852 or if padding below and stack grows up.
1853 But if space already allocated, this has already been done. */
1854 if (extra && args_addr == 0
1855 && where_pad != none && where_pad != stack_direction)
1856 anti_adjust_stack (GEN_INT (extra));
1858 #ifdef PUSH_ROUNDING
1860 addr = gen_push_operand ();
1863 if (GET_CODE (args_so_far) == CONST_INT)
1865 = memory_address (mode,
1866 plus_constant (args_addr, INTVAL (args_so_far)));
1868 addr = memory_address (mode, gen_rtx (PLUS, Pmode, args_addr,
1871 emit_move_insn (gen_rtx (MEM, mode, addr), x);
1875 /* If part should go in registers, copy that part
1876 into the appropriate registers. Do this now, at the end,
1877 since mem-to-mem copies above may do function calls. */
1879 move_block_to_reg (REGNO (reg), x, partial, mode);
1881 if (extra && args_addr == 0 && where_pad == stack_direction)
1882 anti_adjust_stack (GEN_INT (extra));
1885 /* Output a library call to function FUN (a SYMBOL_REF rtx)
1886 (emitting the queue unless NO_QUEUE is nonzero),
1887 for a value of mode OUTMODE,
1888 with NARGS different arguments, passed as alternating rtx values
1889 and machine_modes to convert them to.
1890 The rtx values should have been passed through protect_from_queue already.
1892 NO_QUEUE will be true if and only if the library call is a `const' call
1893 which will be enclosed in REG_LIBCALL/REG_RETVAL notes; it is equivalent
1894 to the variable is_const in expand_call.
1896 NO_QUEUE must be true for const calls, because if it isn't, then
1897 any pending increment will be emitted between REG_LIBCALL/REG_RETVAL notes,
1898 and will be lost if the libcall sequence is optimized away.
1900 NO_QUEUE must be false for non-const calls, because if it isn't, the
1901 call insn will have its CONST_CALL_P bit set, and it will be incorrectly
1902 optimized. For instance, the instruction scheduler may incorrectly
1903 move memory references across the non-const call. */
1906 emit_library_call (va_alist)
1910 struct args_size args_size;
1911 register int argnum;
1912 enum machine_mode outmode;
1919 CUMULATIVE_ARGS args_so_far;
1920 struct arg { rtx value; enum machine_mode mode; rtx reg; int partial;
1921 struct args_size offset; struct args_size size; };
1923 int old_inhibit_defer_pop = inhibit_defer_pop;
1928 orgfun = fun = va_arg (p, rtx);
1929 no_queue = va_arg (p, int);
1930 outmode = va_arg (p, enum machine_mode);
1931 nargs = va_arg (p, int);
1933 /* Copy all the libcall-arguments out of the varargs data
1934 and into a vector ARGVEC.
1936 Compute how to pass each argument. We only support a very small subset
1937 of the full argument passing conventions to limit complexity here since
1938 library functions shouldn't have many args. */
1940 argvec = (struct arg *) alloca (nargs * sizeof (struct arg));
1942 INIT_CUMULATIVE_ARGS (args_so_far, (tree)0, fun);
1944 args_size.constant = 0;
1947 for (count = 0; count < nargs; count++)
1949 rtx val = va_arg (p, rtx);
1950 enum machine_mode mode = va_arg (p, enum machine_mode);
1952 /* We cannot convert the arg value to the mode the library wants here;
1953 must do it earlier where we know the signedness of the arg. */
1955 || (GET_MODE (val) != mode && GET_MODE (val) != VOIDmode))
1958 /* On some machines, there's no way to pass a float to a library fcn.
1959 Pass it as a double instead. */
1960 #ifdef LIBGCC_NEEDS_DOUBLE
1961 if (LIBGCC_NEEDS_DOUBLE && mode == SFmode)
1962 val = convert_to_mode (DFmode, val, 0), mode = DFmode;
1965 /* There's no need to call protect_from_queue, because
1966 either emit_move_insn or emit_push_insn will do that. */
1968 /* Make sure it is a reasonable operand for a move or push insn. */
1969 if (GET_CODE (val) != REG && GET_CODE (val) != MEM
1970 && ! (CONSTANT_P (val) && LEGITIMATE_CONSTANT_P (val)))
1971 val = force_operand (val, NULL_RTX);
1973 argvec[count].value = val;
1974 argvec[count].mode = mode;
1976 #ifdef FUNCTION_ARG_PASS_BY_REFERENCE
1977 if (FUNCTION_ARG_PASS_BY_REFERENCE (args_so_far, mode, NULL_TREE, 1))
1981 argvec[count].reg = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
1982 if (argvec[count].reg && GET_CODE (argvec[count].reg) == EXPR_LIST)
1984 #ifdef FUNCTION_ARG_PARTIAL_NREGS
1985 argvec[count].partial
1986 = FUNCTION_ARG_PARTIAL_NREGS (args_so_far, mode, NULL_TREE, 1);
1988 argvec[count].partial = 0;
1991 locate_and_pad_parm (mode, NULL_TREE,
1992 argvec[count].reg && argvec[count].partial == 0,
1993 NULL_TREE, &args_size, &argvec[count].offset,
1994 &argvec[count].size);
1996 if (argvec[count].size.var)
1999 #ifndef REG_PARM_STACK_SPACE
2000 if (argvec[count].partial)
2001 argvec[count].size.constant -= argvec[count].partial * UNITS_PER_WORD;
2004 if (argvec[count].reg == 0 || argvec[count].partial != 0
2005 #ifdef REG_PARM_STACK_SPACE
2009 args_size.constant += argvec[count].size.constant;
2011 #ifdef ACCUMULATE_OUTGOING_ARGS
2012 /* If this arg is actually passed on the stack, it might be
2013 clobbering something we already put there (this library call might
2014 be inside the evaluation of an argument to a function whose call
2015 requires the stack). This will only occur when the library call
2016 has sufficient args to run out of argument registers. Abort in
2017 this case; if this ever occurs, code must be added to save and
2018 restore the arg slot. */
2020 if (argvec[count].reg == 0 || argvec[count].partial != 0)
2024 FUNCTION_ARG_ADVANCE (args_so_far, mode, (tree)0, 1);
2028 /* If this machine requires an external definition for library
2029 functions, write one out. */
2030 assemble_external_libcall (fun);
2032 #ifdef STACK_BOUNDARY
2033 args_size.constant = (((args_size.constant + (STACK_BYTES - 1))
2034 / STACK_BYTES) * STACK_BYTES);
2037 #ifdef REG_PARM_STACK_SPACE
2038 args_size.constant = MAX (args_size.constant,
2039 REG_PARM_STACK_SPACE ((tree) 0));
2042 #ifdef ACCUMULATE_OUTGOING_ARGS
2043 if (args_size.constant > current_function_outgoing_args_size)
2044 current_function_outgoing_args_size = args_size.constant;
2045 args_size.constant = 0;
2048 #ifndef PUSH_ROUNDING
2049 argblock = push_block (GEN_INT (args_size.constant), 0, 0);
2052 #ifdef PUSH_ARGS_REVERSED
2060 /* Push the args that need to be pushed. */
2062 for (count = 0; count < nargs; count++, argnum += inc)
2064 register enum machine_mode mode = argvec[argnum].mode;
2065 register rtx val = argvec[argnum].value;
2066 rtx reg = argvec[argnum].reg;
2067 int partial = argvec[argnum].partial;
2069 if (! (reg != 0 && partial == 0))
2070 emit_push_insn (val, mode, NULL_TREE, NULL_RTX, 0, partial, reg, 0,
2071 argblock, GEN_INT (argvec[count].offset.constant));
2075 #ifdef PUSH_ARGS_REVERSED
2081 /* Now load any reg parms into their regs. */
2083 for (count = 0; count < nargs; count++, argnum += inc)
2085 register enum machine_mode mode = argvec[argnum].mode;
2086 register rtx val = argvec[argnum].value;
2087 rtx reg = argvec[argnum].reg;
2088 int partial = argvec[argnum].partial;
2090 if (reg != 0 && partial == 0)
2091 emit_move_insn (reg, val);
2095 /* For version 1.37, try deleting this entirely. */
2099 /* Any regs containing parms remain in use through the call. */
2101 for (count = 0; count < nargs; count++)
2102 if (argvec[count].reg != 0)
2103 emit_insn (gen_rtx (USE, VOIDmode, argvec[count].reg));
2105 use_insns = get_insns ();
2108 fun = prepare_call_address (fun, NULL_TREE, &use_insns);
2110 /* Don't allow popping to be deferred, since then
2111 cse'ing of library calls could delete a call and leave the pop. */
2114 /* We pass the old value of inhibit_defer_pop + 1 to emit_call_1, which
2115 will set inhibit_defer_pop to that value. */
2117 emit_call_1 (fun, get_identifier (XSTR (orgfun, 0)), args_size.constant, 0,
2118 FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1),
2119 outmode != VOIDmode ? hard_libcall_value (outmode) : NULL_RTX,
2120 old_inhibit_defer_pop + 1, use_insns, no_queue);
2122 /* Now restore inhibit_defer_pop to its actual original value. */
2126 /* Expand an assignment that stores the value of FROM into TO.
2127 If WANT_VALUE is nonzero, return an rtx for the value of TO.
2128 (This may contain a QUEUED rtx.)
2129 Otherwise, the returned value is not meaningful.
2131 SUGGEST_REG is no longer actually used.
2132 It used to mean, copy the value through a register
2133 and return that register, if that is possible.
2134 But now we do this if WANT_VALUE.
2136 If the value stored is a constant, we return the constant. */
2139 expand_assignment (to, from, want_value, suggest_reg)
2144 register rtx to_rtx = 0;
2147 /* Don't crash if the lhs of the assignment was erroneous. */
2149 if (TREE_CODE (to) == ERROR_MARK)
2150 return expand_expr (from, NULL_RTX, VOIDmode, 0);
2152 /* Assignment of a structure component needs special treatment
2153 if the structure component's rtx is not simply a MEM.
2154 Assignment of an array element at a constant index
2155 has the same problem. */
2157 if (TREE_CODE (to) == COMPONENT_REF
2158 || TREE_CODE (to) == BIT_FIELD_REF
2159 || (TREE_CODE (to) == ARRAY_REF
2160 && TREE_CODE (TREE_OPERAND (to, 1)) == INTEGER_CST
2161 && TREE_CODE (TYPE_SIZE (TREE_TYPE (to))) == INTEGER_CST))
2163 enum machine_mode mode1;
2169 tree tem = get_inner_reference (to, &bitsize, &bitpos, &offset,
2170 &mode1, &unsignedp, &volatilep);
2172 /* If we are going to use store_bit_field and extract_bit_field,
2173 make sure to_rtx will be safe for multiple use. */
2175 if (mode1 == VOIDmode && want_value)
2176 tem = stabilize_reference (tem);
2178 to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, 0);
2181 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
2183 if (GET_CODE (to_rtx) != MEM)
2185 to_rtx = change_address (to_rtx, VOIDmode,
2186 gen_rtx (PLUS, Pmode, XEXP (to_rtx, 0),
2187 force_reg (Pmode, offset_rtx)));
2191 if (GET_CODE (to_rtx) == MEM)
2192 MEM_VOLATILE_P (to_rtx) = 1;
2193 #if 0 /* This was turned off because, when a field is volatile
2194 in an object which is not volatile, the object may be in a register,
2195 and then we would abort over here. */
2201 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
2203 /* Spurious cast makes HPUX compiler happy. */
2204 ? (enum machine_mode) TYPE_MODE (TREE_TYPE (to))
2207 /* Required alignment of containing datum. */
2208 TYPE_ALIGN (TREE_TYPE (tem)) / BITS_PER_UNIT,
2209 int_size_in_bytes (TREE_TYPE (tem)));
2210 preserve_temp_slots (result);
2216 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
2217 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
2220 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, 0);
2222 /* In case we are returning the contents of an object which overlaps
2223 the place the value is being stored, use a safe function when copying
2224 a value through a pointer into a structure value return block. */
2225 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
2226 && current_function_returns_struct
2227 && !current_function_returns_pcc_struct)
2229 rtx from_rtx = expand_expr (from, NULL_RTX, VOIDmode, 0);
2230 rtx size = expr_size (from);
2232 #ifdef TARGET_MEM_FUNCTIONS
2233 emit_library_call (memcpy_libfunc, 0,
2234 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
2235 XEXP (from_rtx, 0), Pmode,
2238 emit_library_call (bcopy_libfunc, 0,
2239 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
2240 XEXP (to_rtx, 0), Pmode,
2244 preserve_temp_slots (to_rtx);
2249 /* Compute FROM and store the value in the rtx we got. */
2251 result = store_expr (from, to_rtx, want_value);
2252 preserve_temp_slots (result);
2257 /* Generate code for computing expression EXP,
2258 and storing the value into TARGET.
2259 Returns TARGET or an equivalent value.
2260 TARGET may contain a QUEUED rtx.
2262 If SUGGEST_REG is nonzero, copy the value through a register
2263 and return that register, if that is possible.
2265 If the value stored is a constant, we return the constant. */
2268 store_expr (exp, target, suggest_reg)
2270 register rtx target;
2274 int dont_return_target = 0;
2276 if (TREE_CODE (exp) == COMPOUND_EXPR)
2278 /* Perform first part of compound expression, then assign from second
2280 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
2282 return store_expr (TREE_OPERAND (exp, 1), target, suggest_reg);
2284 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
2286 /* For conditional expression, get safe form of the target. Then
2287 test the condition, doing the appropriate assignment on either
2288 side. This avoids the creation of unnecessary temporaries.
2289 For non-BLKmode, it is more efficient not to do this. */
2291 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
2294 target = protect_from_queue (target, 1);
2297 jumpifnot (TREE_OPERAND (exp, 0), lab1);
2298 store_expr (TREE_OPERAND (exp, 1), target, suggest_reg);
2300 emit_jump_insn (gen_jump (lab2));
2303 store_expr (TREE_OPERAND (exp, 2), target, suggest_reg);
2309 else if (suggest_reg && GET_CODE (target) == MEM
2310 && GET_MODE (target) != BLKmode)
2311 /* If target is in memory and caller wants value in a register instead,
2312 arrange that. Pass TARGET as target for expand_expr so that,
2313 if EXP is another assignment, SUGGEST_REG will be nonzero for it.
2314 We know expand_expr will not use the target in that case. */
2316 temp = expand_expr (exp, cse_not_expected ? NULL_RTX : target,
2317 GET_MODE (target), 0);
2318 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
2319 temp = copy_to_reg (temp);
2320 dont_return_target = 1;
2322 else if (queued_subexp_p (target))
2323 /* If target contains a postincrement, it is not safe
2324 to use as the returned value. It would access the wrong
2325 place by the time the queued increment gets output.
2326 So copy the value through a temporary and use that temp
2329 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
2331 /* Expand EXP into a new pseudo. */
2332 temp = gen_reg_rtx (GET_MODE (target));
2333 temp = expand_expr (exp, temp, GET_MODE (target), 0);
2336 temp = expand_expr (exp, NULL_RTX, GET_MODE (target), 0);
2337 dont_return_target = 1;
2341 temp = expand_expr (exp, target, GET_MODE (target), 0);
2342 /* DO return TARGET if it's a specified hardware register.
2343 expand_return relies on this. */
2344 if (!(target && GET_CODE (target) == REG
2345 && REGNO (target) < FIRST_PSEUDO_REGISTER)
2346 && CONSTANT_P (temp))
2347 dont_return_target = 1;
2350 /* If value was not generated in the target, store it there.
2351 Convert the value to TARGET's type first if nec. */
2353 if (temp != target && TREE_CODE (exp) != ERROR_MARK)
2355 target = protect_from_queue (target, 1);
2356 if (GET_MODE (temp) != GET_MODE (target)
2357 && GET_MODE (temp) != VOIDmode)
2359 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
2360 if (dont_return_target)
2362 /* In this case, we will return TEMP,
2363 so make sure it has the proper mode.
2364 But don't forget to store the value into TARGET. */
2365 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
2366 emit_move_insn (target, temp);
2369 convert_move (target, temp, unsignedp);
2372 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
2374 /* Handle copying a string constant into an array.
2375 The string constant may be shorter than the array.
2376 So copy just the string's actual length, and clear the rest. */
2379 /* Get the size of the data type of the string,
2380 which is actually the size of the target. */
2381 size = expr_size (exp);
2382 if (GET_CODE (size) == CONST_INT
2383 && INTVAL (size) < TREE_STRING_LENGTH (exp))
2384 emit_block_move (target, temp, size,
2385 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
2388 /* Compute the size of the data to copy from the string. */
2390 = fold (build (MIN_EXPR, sizetype,
2391 size_binop (CEIL_DIV_EXPR,
2392 TYPE_SIZE (TREE_TYPE (exp)),
2393 size_int (BITS_PER_UNIT)),
2395 build_int_2 (TREE_STRING_LENGTH (exp), 0))));
2396 rtx copy_size_rtx = expand_expr (copy_size, NULL_RTX,
2400 /* Copy that much. */
2401 emit_block_move (target, temp, copy_size_rtx,
2402 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
2404 /* Figure out how much is left in TARGET
2405 that we have to clear. */
2406 if (GET_CODE (copy_size_rtx) == CONST_INT)
2408 temp = plus_constant (XEXP (target, 0),
2409 TREE_STRING_LENGTH (exp));
2410 size = plus_constant (size,
2411 - TREE_STRING_LENGTH (exp));
2415 enum machine_mode size_mode = Pmode;
2417 temp = force_reg (Pmode, XEXP (target, 0));
2418 temp = expand_binop (size_mode, add_optab, temp,
2419 copy_size_rtx, NULL_RTX, 0,
2422 size = expand_binop (size_mode, sub_optab, size,
2423 copy_size_rtx, NULL_RTX, 0,
2426 emit_cmp_insn (size, const0_rtx, LT, NULL_RTX,
2427 GET_MODE (size), 0, 0);
2428 label = gen_label_rtx ();
2429 emit_jump_insn (gen_blt (label));
2432 if (size != const0_rtx)
2434 #ifdef TARGET_MEM_FUNCTIONS
2435 emit_library_call (memset_libfunc, 0, VOIDmode, 3,
2436 temp, Pmode, const0_rtx, Pmode, size, Pmode);
2438 emit_library_call (bzero_libfunc, 0, VOIDmode, 2,
2439 temp, Pmode, size, Pmode);
2446 else if (GET_MODE (temp) == BLKmode)
2447 emit_block_move (target, temp, expr_size (exp),
2448 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
2450 emit_move_insn (target, temp);
2452 if (dont_return_target)
2457 /* Store the value of constructor EXP into the rtx TARGET.
2458 TARGET is either a REG or a MEM. */
2461 store_constructor (exp, target)
2465 tree type = TREE_TYPE (exp);
2467 /* We know our target cannot conflict, since safe_from_p has been called. */
2469 /* Don't try copying piece by piece into a hard register
2470 since that is vulnerable to being clobbered by EXP.
2471 Instead, construct in a pseudo register and then copy it all. */
2472 if (GET_CODE (target) == REG && REGNO (target) < FIRST_PSEUDO_REGISTER)
2474 rtx temp = gen_reg_rtx (GET_MODE (target));
2475 store_constructor (exp, temp);
2476 emit_move_insn (target, temp);
2481 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE)
2485 /* Inform later passes that the whole union value is dead. */
2486 if (TREE_CODE (type) == UNION_TYPE)
2487 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
2489 /* If we are building a static constructor into a register,
2490 set the initial value as zero so we can fold the value into
2492 else if (GET_CODE (target) == REG && TREE_STATIC (exp))
2493 emit_move_insn (target, const0_rtx);
2495 /* If the constructor has fewer fields than the structure,
2496 clear the whole structure first. */
2497 else if (list_length (CONSTRUCTOR_ELTS (exp))
2498 != list_length (TYPE_FIELDS (type)))
2499 clear_storage (target, int_size_in_bytes (type));
2501 /* Inform later passes that the old value is dead. */
2502 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
2504 /* Store each element of the constructor into
2505 the corresponding field of TARGET. */
2507 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
2509 register tree field = TREE_PURPOSE (elt);
2510 register enum machine_mode mode;
2515 /* Just ignore missing fields.
2516 We cleared the whole structure, above,
2517 if any fields are missing. */
2521 bitsize = TREE_INT_CST_LOW (DECL_SIZE (field));
2522 unsignedp = TREE_UNSIGNED (field);
2523 mode = DECL_MODE (field);
2524 if (DECL_BIT_FIELD (field))
2527 if (TREE_CODE (DECL_FIELD_BITPOS (field)) != INTEGER_CST)
2528 /* ??? This case remains to be written. */
2531 bitpos = TREE_INT_CST_LOW (DECL_FIELD_BITPOS (field));
2533 store_field (target, bitsize, bitpos, mode, TREE_VALUE (elt),
2534 /* The alignment of TARGET is
2535 at least what its type requires. */
2537 TYPE_ALIGN (type) / BITS_PER_UNIT,
2538 int_size_in_bytes (type));
2541 else if (TREE_CODE (type) == ARRAY_TYPE)
2545 tree domain = TYPE_DOMAIN (type);
2546 HOST_WIDE_INT minelt = TREE_INT_CST_LOW (TYPE_MIN_VALUE (domain));
2547 HOST_WIDE_INT maxelt = TREE_INT_CST_LOW (TYPE_MAX_VALUE (domain));
2548 tree elttype = TREE_TYPE (type);
2550 /* If the constructor has fewer fields than the structure,
2551 clear the whole structure first. Similarly if this this is
2552 static constructor of a non-BLKmode object. */
2554 if (list_length (CONSTRUCTOR_ELTS (exp)) < maxelt - minelt + 1
2555 || (GET_CODE (target) == REG && TREE_STATIC (exp)))
2556 clear_storage (target, maxelt - minelt + 1);
2558 /* Inform later passes that the old value is dead. */
2559 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
2561 /* Store each element of the constructor into
2562 the corresponding element of TARGET, determined
2563 by counting the elements. */
2564 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
2566 elt = TREE_CHAIN (elt), i++)
2568 register enum machine_mode mode;
2573 mode = TYPE_MODE (elttype);
2574 bitsize = GET_MODE_BITSIZE (mode);
2575 unsignedp = TREE_UNSIGNED (elttype);
2577 bitpos = (i * TREE_INT_CST_LOW (TYPE_SIZE (elttype)));
2579 store_field (target, bitsize, bitpos, mode, TREE_VALUE (elt),
2580 /* The alignment of TARGET is
2581 at least what its type requires. */
2583 TYPE_ALIGN (type) / BITS_PER_UNIT,
2584 int_size_in_bytes (type));
2592 /* Store the value of EXP (an expression tree)
2593 into a subfield of TARGET which has mode MODE and occupies
2594 BITSIZE bits, starting BITPOS bits from the start of TARGET.
2595 If MODE is VOIDmode, it means that we are storing into a bit-field.
2597 If VALUE_MODE is VOIDmode, return nothing in particular.
2598 UNSIGNEDP is not used in this case.
2600 Otherwise, return an rtx for the value stored. This rtx
2601 has mode VALUE_MODE if that is convenient to do.
2602 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
2604 ALIGN is the alignment that TARGET is known to have, measured in bytes.
2605 TOTAL_SIZE is the size in bytes of the structure, or -1 if varying. */
2608 store_field (target, bitsize, bitpos, mode, exp, value_mode,
2609 unsignedp, align, total_size)
2611 int bitsize, bitpos;
2612 enum machine_mode mode;
2614 enum machine_mode value_mode;
2619 HOST_WIDE_INT width_mask = 0;
2621 if (bitsize < HOST_BITS_PER_WIDE_INT)
2622 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
2624 /* If we are storing into an unaligned field of an aligned union that is
2625 in a register, we may have the mode of TARGET being an integer mode but
2626 MODE == BLKmode. In that case, get an aligned object whose size and
2627 alignment are the same as TARGET and store TARGET into it (we can avoid
2628 the store if the field being stored is the entire width of TARGET). Then
2629 call ourselves recursively to store the field into a BLKmode version of
2630 that object. Finally, load from the object into TARGET. This is not
2631 very efficient in general, but should only be slightly more expensive
2632 than the otherwise-required unaligned accesses. Perhaps this can be
2633 cleaned up later. */
2636 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
2638 rtx object = assign_stack_temp (GET_MODE (target),
2639 GET_MODE_SIZE (GET_MODE (target)), 0);
2640 rtx blk_object = copy_rtx (object);
2642 PUT_MODE (blk_object, BLKmode);
2644 if (bitsize != GET_MODE_BITSIZE (GET_MODE (target)))
2645 emit_move_insn (object, target);
2647 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0,
2650 emit_move_insn (target, object);
2655 /* If the structure is in a register or if the component
2656 is a bit field, we cannot use addressing to access it.
2657 Use bit-field techniques or SUBREG to store in it. */
2659 if (mode == VOIDmode
2660 || (mode != BLKmode && ! direct_store[(int) mode])
2661 || GET_CODE (target) == REG
2662 || GET_CODE (target) == SUBREG)
2664 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
2665 /* Store the value in the bitfield. */
2666 store_bit_field (target, bitsize, bitpos, mode, temp, align, total_size);
2667 if (value_mode != VOIDmode)
2669 /* The caller wants an rtx for the value. */
2670 /* If possible, avoid refetching from the bitfield itself. */
2672 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
2673 return expand_and (temp, GEN_INT (width_mask), NULL_RTX);
2674 return extract_bit_field (target, bitsize, bitpos, unsignedp,
2675 NULL_RTX, value_mode, 0, align,
2682 rtx addr = XEXP (target, 0);
2685 /* If a value is wanted, it must be the lhs;
2686 so make the address stable for multiple use. */
2688 if (value_mode != VOIDmode && GET_CODE (addr) != REG
2689 && ! CONSTANT_ADDRESS_P (addr)
2690 /* A frame-pointer reference is already stable. */
2691 && ! (GET_CODE (addr) == PLUS
2692 && GET_CODE (XEXP (addr, 1)) == CONST_INT
2693 && (XEXP (addr, 0) == virtual_incoming_args_rtx
2694 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
2695 addr = copy_to_reg (addr);
2697 /* Now build a reference to just the desired component. */
2699 to_rtx = change_address (target, mode,
2700 plus_constant (addr, (bitpos / BITS_PER_UNIT)));
2701 MEM_IN_STRUCT_P (to_rtx) = 1;
2703 return store_expr (exp, to_rtx, value_mode != VOIDmode);
2707 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
2708 or an ARRAY_REF, look for nested COMPONENT_REFs, BIT_FIELD_REFs, or
2709 ARRAY_REFs at constant positions and find the ultimate containing object,
2712 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
2713 bit position, and *PUNSIGNEDP to the signedness of the field.
2714 If the position of the field is variable, we store a tree
2715 giving the variable offset (in units) in *POFFSET.
2716 This offset is in addition to the bit position.
2717 If the position is not variable, we store 0 in *POFFSET.
2719 If any of the extraction expressions is volatile,
2720 we store 1 in *PVOLATILEP. Otherwise we don't change that.
2722 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
2723 is a mode that can be used to access the field. In that case, *PBITSIZE
2726 If the field describes a variable-sized object, *PMODE is set to
2727 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
2728 this case, but the address of the object can be found. */
2731 get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode, punsignedp, pvolatilep)
2736 enum machine_mode *pmode;
2741 enum machine_mode mode = VOIDmode;
2744 if (TREE_CODE (exp) == COMPONENT_REF)
2746 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
2747 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
2748 mode = DECL_MODE (TREE_OPERAND (exp, 1));
2749 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
2751 else if (TREE_CODE (exp) == BIT_FIELD_REF)
2753 size_tree = TREE_OPERAND (exp, 1);
2754 *punsignedp = TREE_UNSIGNED (exp);
2758 mode = TYPE_MODE (TREE_TYPE (exp));
2759 *pbitsize = GET_MODE_BITSIZE (mode);
2760 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
2765 if (TREE_CODE (size_tree) != INTEGER_CST)
2766 mode = BLKmode, *pbitsize = -1;
2768 *pbitsize = TREE_INT_CST_LOW (size_tree);
2771 /* Compute cumulative bit-offset for nested component-refs and array-refs,
2772 and find the ultimate containing object. */
2778 if (TREE_CODE (exp) == COMPONENT_REF || TREE_CODE (exp) == BIT_FIELD_REF)
2780 tree pos = (TREE_CODE (exp) == COMPONENT_REF
2781 ? DECL_FIELD_BITPOS (TREE_OPERAND (exp, 1))
2782 : TREE_OPERAND (exp, 2));
2784 if (TREE_CODE (pos) == PLUS_EXPR)
2787 if (TREE_CODE (TREE_OPERAND (pos, 0)) == INTEGER_CST)
2789 constant = TREE_OPERAND (pos, 0);
2790 var = TREE_OPERAND (pos, 1);
2792 else if (TREE_CODE (TREE_OPERAND (pos, 1)) == INTEGER_CST)
2794 constant = TREE_OPERAND (pos, 1);
2795 var = TREE_OPERAND (pos, 0);
2799 *pbitpos += TREE_INT_CST_LOW (constant);
2801 offset = size_binop (PLUS_EXPR, offset,
2802 size_binop (FLOOR_DIV_EXPR, var,
2803 size_int (BITS_PER_UNIT)));
2805 offset = size_binop (FLOOR_DIV_EXPR, var,
2806 size_int (BITS_PER_UNIT));
2808 else if (TREE_CODE (pos) == INTEGER_CST)
2809 *pbitpos += TREE_INT_CST_LOW (pos);
2812 /* Assume here that the offset is a multiple of a unit.
2813 If not, there should be an explicitly added constant. */
2815 offset = size_binop (PLUS_EXPR, offset,
2816 size_binop (FLOOR_DIV_EXPR, pos,
2817 size_int (BITS_PER_UNIT)));
2819 offset = size_binop (FLOOR_DIV_EXPR, pos,
2820 size_int (BITS_PER_UNIT));
2824 else if (TREE_CODE (exp) == ARRAY_REF
2825 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
2826 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST)
2828 *pbitpos += (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))
2829 * TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (exp))));
2831 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
2832 && ! ((TREE_CODE (exp) == NOP_EXPR
2833 || TREE_CODE (exp) == CONVERT_EXPR)
2834 && (TYPE_MODE (TREE_TYPE (exp))
2835 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
2838 /* If any reference in the chain is volatile, the effect is volatile. */
2839 if (TREE_THIS_VOLATILE (exp))
2841 exp = TREE_OPERAND (exp, 0);
2844 /* If this was a bit-field, see if there is a mode that allows direct
2845 access in case EXP is in memory. */
2846 if (mode == VOIDmode && *pbitpos % *pbitsize == 0)
2848 mode = mode_for_size (*pbitsize, MODE_INT, 0);
2849 if (mode == BLKmode)
2856 /* We aren't finished fixing the callers to really handle nonzero offset. */
2864 /* Given an rtx VALUE that may contain additions and multiplications,
2865 return an equivalent value that just refers to a register or memory.
2866 This is done by generating instructions to perform the arithmetic
2867 and returning a pseudo-register containing the value. */
2870 force_operand (value, target)
2873 register optab binoptab = 0;
2874 /* Use a temporary to force order of execution of calls to
2878 /* Use subtarget as the target for operand 0 of a binary operation. */
2879 register rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
2881 if (GET_CODE (value) == PLUS)
2882 binoptab = add_optab;
2883 else if (GET_CODE (value) == MINUS)
2884 binoptab = sub_optab;
2885 else if (GET_CODE (value) == MULT)
2887 op2 = XEXP (value, 1);
2888 if (!CONSTANT_P (op2)
2889 && !(GET_CODE (op2) == REG && op2 != subtarget))
2891 tmp = force_operand (XEXP (value, 0), subtarget);
2892 return expand_mult (GET_MODE (value), tmp,
2893 force_operand (op2, NULL_RTX),
2899 op2 = XEXP (value, 1);
2900 if (!CONSTANT_P (op2)
2901 && !(GET_CODE (op2) == REG && op2 != subtarget))
2903 if (binoptab == sub_optab && GET_CODE (op2) == CONST_INT)
2905 binoptab = add_optab;
2906 op2 = negate_rtx (GET_MODE (value), op2);
2909 /* Check for an addition with OP2 a constant integer and our first
2910 operand a PLUS of a virtual register and something else. In that
2911 case, we want to emit the sum of the virtual register and the
2912 constant first and then add the other value. This allows virtual
2913 register instantiation to simply modify the constant rather than
2914 creating another one around this addition. */
2915 if (binoptab == add_optab && GET_CODE (op2) == CONST_INT
2916 && GET_CODE (XEXP (value, 0)) == PLUS
2917 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
2918 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
2919 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
2921 rtx temp = expand_binop (GET_MODE (value), binoptab,
2922 XEXP (XEXP (value, 0), 0), op2,
2923 subtarget, 0, OPTAB_LIB_WIDEN);
2924 return expand_binop (GET_MODE (value), binoptab, temp,
2925 force_operand (XEXP (XEXP (value, 0), 1), 0),
2926 target, 0, OPTAB_LIB_WIDEN);
2929 tmp = force_operand (XEXP (value, 0), subtarget);
2930 return expand_binop (GET_MODE (value), binoptab, tmp,
2931 force_operand (op2, NULL_RTX),
2932 target, 0, OPTAB_LIB_WIDEN);
2933 /* We give UNSIGNEP = 0 to expand_binop
2934 because the only operations we are expanding here are signed ones. */
2939 /* Subroutine of expand_expr:
2940 save the non-copied parts (LIST) of an expr (LHS), and return a list
2941 which can restore these values to their previous values,
2942 should something modify their storage. */
2945 save_noncopied_parts (lhs, list)
2952 for (tail = list; tail; tail = TREE_CHAIN (tail))
2953 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
2954 parts = chainon (parts, save_noncopied_parts (lhs, TREE_VALUE (tail)));
2957 tree part = TREE_VALUE (tail);
2958 tree part_type = TREE_TYPE (part);
2959 tree to_be_saved = build (COMPONENT_REF, part_type, lhs, part);
2960 rtx target = assign_stack_temp (TYPE_MODE (part_type),
2961 int_size_in_bytes (part_type), 0);
2962 if (! memory_address_p (TYPE_MODE (part_type), XEXP (target, 0)))
2963 target = change_address (target, TYPE_MODE (part_type), NULL_RTX);
2964 parts = tree_cons (to_be_saved,
2965 build (RTL_EXPR, part_type, NULL_TREE,
2968 store_expr (TREE_PURPOSE (parts), RTL_EXPR_RTL (TREE_VALUE (parts)), 0);
2973 /* Subroutine of expand_expr:
2974 record the non-copied parts (LIST) of an expr (LHS), and return a list
2975 which specifies the initial values of these parts. */
2978 init_noncopied_parts (lhs, list)
2985 for (tail = list; tail; tail = TREE_CHAIN (tail))
2986 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
2987 parts = chainon (parts, init_noncopied_parts (lhs, TREE_VALUE (tail)));
2990 tree part = TREE_VALUE (tail);
2991 tree part_type = TREE_TYPE (part);
2992 tree to_be_initialized = build (COMPONENT_REF, part_type, lhs, part);
2993 parts = tree_cons (TREE_PURPOSE (tail), to_be_initialized, parts);
2998 /* Subroutine of expand_expr: return nonzero iff there is no way that
2999 EXP can reference X, which is being modified. */
3002 safe_from_p (x, exp)
3012 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
3013 find the underlying pseudo. */
3014 if (GET_CODE (x) == SUBREG)
3017 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
3021 /* If X is a location in the outgoing argument area, it is always safe. */
3022 if (GET_CODE (x) == MEM
3023 && (XEXP (x, 0) == virtual_outgoing_args_rtx
3024 || (GET_CODE (XEXP (x, 0)) == PLUS
3025 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx)))
3028 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
3031 exp_rtl = DECL_RTL (exp);
3038 if (TREE_CODE (exp) == TREE_LIST)
3039 return ((TREE_VALUE (exp) == 0
3040 || safe_from_p (x, TREE_VALUE (exp)))
3041 && (TREE_CHAIN (exp) == 0
3042 || safe_from_p (x, TREE_CHAIN (exp))));
3047 return safe_from_p (x, TREE_OPERAND (exp, 0));
3051 return (safe_from_p (x, TREE_OPERAND (exp, 0))
3052 && safe_from_p (x, TREE_OPERAND (exp, 1)));
3056 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
3057 the expression. If it is set, we conflict iff we are that rtx or
3058 both are in memory. Otherwise, we check all operands of the
3059 expression recursively. */
3061 switch (TREE_CODE (exp))
3064 return staticp (TREE_OPERAND (exp, 0));
3067 if (GET_CODE (x) == MEM)
3072 exp_rtl = CALL_EXPR_RTL (exp);
3075 /* Assume that the call will clobber all hard registers and
3077 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
3078 || GET_CODE (x) == MEM)
3085 exp_rtl = RTL_EXPR_RTL (exp);
3087 /* We don't know what this can modify. */
3092 case WITH_CLEANUP_EXPR:
3093 exp_rtl = RTL_EXPR_RTL (exp);
3097 exp_rtl = SAVE_EXPR_RTL (exp);
3101 /* The only operand we look at is operand 1. The rest aren't
3102 part of the expression. */
3103 return safe_from_p (x, TREE_OPERAND (exp, 1));
3105 case METHOD_CALL_EXPR:
3106 /* This takes a rtx argument, but shouldn't appear here. */
3110 /* If we have an rtx, we do not need to scan our operands. */
3114 nops = tree_code_length[(int) TREE_CODE (exp)];
3115 for (i = 0; i < nops; i++)
3116 if (TREE_OPERAND (exp, i) != 0
3117 && ! safe_from_p (x, TREE_OPERAND (exp, i)))
3121 /* If we have an rtl, find any enclosed object. Then see if we conflict
3125 if (GET_CODE (exp_rtl) == SUBREG)
3127 exp_rtl = SUBREG_REG (exp_rtl);
3128 if (GET_CODE (exp_rtl) == REG
3129 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
3133 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
3134 are memory and EXP is not readonly. */
3135 return ! (rtx_equal_p (x, exp_rtl)
3136 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
3137 && ! TREE_READONLY (exp)));
3140 /* If we reach here, it is safe. */
3144 /* Subroutine of expand_expr: return nonzero iff EXP is an
3145 expression whose type is statically determinable. */
3151 if (TREE_CODE (exp) == PARM_DECL
3152 || TREE_CODE (exp) == VAR_DECL
3153 || TREE_CODE (exp) == CALL_EXPR || TREE_CODE (exp) == TARGET_EXPR
3154 || TREE_CODE (exp) == COMPONENT_REF
3155 || TREE_CODE (exp) == ARRAY_REF)
3160 /* expand_expr: generate code for computing expression EXP.
3161 An rtx for the computed value is returned. The value is never null.
3162 In the case of a void EXP, const0_rtx is returned.
3164 The value may be stored in TARGET if TARGET is nonzero.
3165 TARGET is just a suggestion; callers must assume that
3166 the rtx returned may not be the same as TARGET.
3168 If TARGET is CONST0_RTX, it means that the value will be ignored.
3170 If TMODE is not VOIDmode, it suggests generating the
3171 result in mode TMODE. But this is done only when convenient.
3172 Otherwise, TMODE is ignored and the value generated in its natural mode.
3173 TMODE is just a suggestion; callers must assume that
3174 the rtx returned may not have mode TMODE.
3176 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
3177 with a constant address even if that address is not normally legitimate.
3178 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
3180 If MODIFIER is EXPAND_SUM then when EXP is an addition
3181 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
3182 or a nest of (PLUS ...) and (MINUS ...) where the terms are
3183 products as above, or REG or MEM, or constant.
3184 Ordinarily in such cases we would output mul or add instructions
3185 and then return a pseudo reg containing the sum.
3187 EXPAND_INITIALIZER is much like EXPAND_SUM except that
3188 it also marks a label as absolutely required (it can't be dead).
3189 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
3190 This is used for outputting expressions used in initializers. */
3193 expand_expr (exp, target, tmode, modifier)
3196 enum machine_mode tmode;
3197 enum expand_modifier modifier;
3199 register rtx op0, op1, temp;
3200 tree type = TREE_TYPE (exp);
3201 int unsignedp = TREE_UNSIGNED (type);
3202 register enum machine_mode mode = TYPE_MODE (type);
3203 register enum tree_code code = TREE_CODE (exp);
3205 /* Use subtarget as the target for operand 0 of a binary operation. */
3206 rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
3207 rtx original_target = target;
3208 int ignore = target == const0_rtx;
3211 /* Don't use hard regs as subtargets, because the combiner
3212 can only handle pseudo regs. */
3213 if (subtarget && REGNO (subtarget) < FIRST_PSEUDO_REGISTER)
3215 /* Avoid subtargets inside loops,
3216 since they hide some invariant expressions. */
3217 if (preserve_subexpressions_p ())
3220 if (ignore) target = 0, original_target = 0;
3222 /* If will do cse, generate all results into pseudo registers
3223 since 1) that allows cse to find more things
3224 and 2) otherwise cse could produce an insn the machine
3227 if (! cse_not_expected && mode != BLKmode && target
3228 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER))
3231 /* Ensure we reference a volatile object even if value is ignored. */
3232 if (ignore && TREE_THIS_VOLATILE (exp)
3233 && mode != VOIDmode && mode != BLKmode)
3235 target = gen_reg_rtx (mode);
3236 temp = expand_expr (exp, target, VOIDmode, modifier);
3238 emit_move_insn (target, temp);
3246 tree function = decl_function_context (exp);
3247 /* Handle using a label in a containing function. */
3248 if (function != current_function_decl && function != 0)
3250 struct function *p = find_function_data (function);
3251 /* Allocate in the memory associated with the function
3252 that the label is in. */
3253 push_obstacks (p->function_obstack,
3254 p->function_maybepermanent_obstack);
3256 p->forced_labels = gen_rtx (EXPR_LIST, VOIDmode,
3257 label_rtx (exp), p->forced_labels);
3260 else if (modifier == EXPAND_INITIALIZER)
3261 forced_labels = gen_rtx (EXPR_LIST, VOIDmode,
3262 label_rtx (exp), forced_labels);
3263 temp = gen_rtx (MEM, FUNCTION_MODE,
3264 gen_rtx (LABEL_REF, Pmode, label_rtx (exp)));
3265 if (function != current_function_decl && function != 0)
3266 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
3271 if (DECL_RTL (exp) == 0)
3273 error_with_decl (exp, "prior parameter's size depends on `%s'");
3274 return CONST0_RTX (mode);
3280 if (DECL_RTL (exp) == 0)
3282 /* Ensure variable marked as used
3283 even if it doesn't go through a parser. */
3284 TREE_USED (exp) = 1;
3285 /* Handle variables inherited from containing functions. */
3286 context = decl_function_context (exp);
3288 /* We treat inline_function_decl as an alias for the current function
3289 because that is the inline function whose vars, types, etc.
3290 are being merged into the current function.
3291 See expand_inline_function. */
3292 if (context != 0 && context != current_function_decl
3293 && context != inline_function_decl
3294 /* If var is static, we don't need a static chain to access it. */
3295 && ! (GET_CODE (DECL_RTL (exp)) == MEM
3296 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
3300 /* Mark as non-local and addressable. */
3301 DECL_NONLOCAL (exp) = 1;
3302 mark_addressable (exp);
3303 if (GET_CODE (DECL_RTL (exp)) != MEM)
3305 addr = XEXP (DECL_RTL (exp), 0);
3306 if (GET_CODE (addr) == MEM)
3307 addr = gen_rtx (MEM, Pmode, fix_lexical_addr (XEXP (addr, 0), exp));
3309 addr = fix_lexical_addr (addr, exp);
3310 return change_address (DECL_RTL (exp), mode, addr);
3313 /* This is the case of an array whose size is to be determined
3314 from its initializer, while the initializer is still being parsed.
3316 if (GET_CODE (DECL_RTL (exp)) == MEM
3317 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
3318 return change_address (DECL_RTL (exp), GET_MODE (DECL_RTL (exp)),
3319 XEXP (DECL_RTL (exp), 0));
3320 if (GET_CODE (DECL_RTL (exp)) == MEM
3321 && modifier != EXPAND_CONST_ADDRESS
3322 && modifier != EXPAND_SUM
3323 && modifier != EXPAND_INITIALIZER)
3325 /* DECL_RTL probably contains a constant address.
3326 On RISC machines where a constant address isn't valid,
3327 make some insns to get that address into a register. */
3328 if (!memory_address_p (DECL_MODE (exp), XEXP (DECL_RTL (exp), 0))
3330 && CONSTANT_ADDRESS_P (XEXP (DECL_RTL (exp), 0))))
3331 return change_address (DECL_RTL (exp), VOIDmode,
3332 copy_rtx (XEXP (DECL_RTL (exp), 0)));
3334 return DECL_RTL (exp);
3337 return immed_double_const (TREE_INT_CST_LOW (exp),
3338 TREE_INT_CST_HIGH (exp),
3342 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, 0);
3345 /* If optimized, generate immediate CONST_DOUBLE
3346 which will be turned into memory by reload if necessary.
3348 We used to force a register so that loop.c could see it. But
3349 this does not allow gen_* patterns to perform optimizations with
3350 the constants. It also produces two insns in cases like "x = 1.0;".
3351 On most machines, floating-point constants are not permitted in
3352 many insns, so we'd end up copying it to a register in any case.
3354 Now, we do the copying in expand_binop, if appropriate. */
3355 return immed_real_const (exp);
3359 if (! TREE_CST_RTL (exp))
3360 output_constant_def (exp);
3362 /* TREE_CST_RTL probably contains a constant address.
3363 On RISC machines where a constant address isn't valid,
3364 make some insns to get that address into a register. */
3365 if (GET_CODE (TREE_CST_RTL (exp)) == MEM
3366 && modifier != EXPAND_CONST_ADDRESS
3367 && modifier != EXPAND_INITIALIZER
3368 && modifier != EXPAND_SUM
3369 && !memory_address_p (mode, XEXP (TREE_CST_RTL (exp), 0)))
3370 return change_address (TREE_CST_RTL (exp), VOIDmode,
3371 copy_rtx (XEXP (TREE_CST_RTL (exp), 0)));
3372 return TREE_CST_RTL (exp);
3375 context = decl_function_context (exp);
3376 /* We treat inline_function_decl as an alias for the current function
3377 because that is the inline function whose vars, types, etc.
3378 are being merged into the current function.
3379 See expand_inline_function. */
3380 if (context == current_function_decl || context == inline_function_decl)
3383 /* If this is non-local, handle it. */
3386 temp = SAVE_EXPR_RTL (exp);
3387 if (temp && GET_CODE (temp) == REG)
3389 put_var_into_stack (exp);
3390 temp = SAVE_EXPR_RTL (exp);
3392 if (temp == 0 || GET_CODE (temp) != MEM)
3394 return change_address (temp, mode,
3395 fix_lexical_addr (XEXP (temp, 0), exp));
3397 if (SAVE_EXPR_RTL (exp) == 0)
3399 if (mode == BLKmode)
3401 = assign_stack_temp (mode,
3402 int_size_in_bytes (TREE_TYPE (exp)), 0);
3404 temp = gen_reg_rtx (mode);
3405 SAVE_EXPR_RTL (exp) = temp;
3406 store_expr (TREE_OPERAND (exp, 0), temp, 0);
3407 if (!optimize && GET_CODE (temp) == REG)
3408 save_expr_regs = gen_rtx (EXPR_LIST, VOIDmode, temp,
3411 return SAVE_EXPR_RTL (exp);
3414 /* Exit the current loop if the body-expression is true. */
3416 rtx label = gen_label_rtx ();
3417 do_jump (TREE_OPERAND (exp, 0), label, NULL_RTX);
3418 expand_exit_loop (NULL_PTR);
3424 expand_start_loop (1);
3425 expand_expr_stmt (TREE_OPERAND (exp, 0));
3432 tree vars = TREE_OPERAND (exp, 0);
3433 int vars_need_expansion = 0;
3435 /* Need to open a binding contour here because
3436 if there are any cleanups they most be contained here. */
3437 expand_start_bindings (0);
3439 /* Mark the corresponding BLOCK for output in its proper place. */
3440 if (TREE_OPERAND (exp, 2) != 0
3441 && ! TREE_USED (TREE_OPERAND (exp, 2)))
3442 insert_block (TREE_OPERAND (exp, 2));
3444 /* If VARS have not yet been expanded, expand them now. */
3447 if (DECL_RTL (vars) == 0)
3449 vars_need_expansion = 1;
3452 expand_decl_init (vars);
3453 vars = TREE_CHAIN (vars);
3456 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, modifier);
3458 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
3464 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
3466 emit_insns (RTL_EXPR_SEQUENCE (exp));
3467 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
3468 return RTL_EXPR_RTL (exp);
3471 /* All elts simple constants => refer to a constant in memory. But
3472 if this is a non-BLKmode mode, let it store a field at a time
3473 since that should make a CONST_INT or CONST_DOUBLE when we
3475 if (TREE_STATIC (exp) && (mode == BLKmode || TREE_ADDRESSABLE (exp)))
3477 rtx constructor = output_constant_def (exp);
3478 if (modifier != EXPAND_CONST_ADDRESS
3479 && modifier != EXPAND_INITIALIZER
3480 && modifier != EXPAND_SUM
3481 && !memory_address_p (GET_MODE (constructor),
3482 XEXP (constructor, 0)))
3483 constructor = change_address (constructor, VOIDmode,
3484 XEXP (constructor, 0));
3491 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
3492 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode, 0);
3497 if (target == 0 || ! safe_from_p (target, exp))
3499 if (mode != BLKmode && ! TREE_ADDRESSABLE (exp))
3500 target = gen_reg_rtx (mode);
3503 rtx safe_target = assign_stack_temp (mode, int_size_in_bytes (type), 0);
3505 MEM_IN_STRUCT_P (safe_target) = MEM_IN_STRUCT_P (target);
3506 target = safe_target;
3509 store_constructor (exp, target);
3515 tree exp1 = TREE_OPERAND (exp, 0);
3518 /* A SAVE_EXPR as the address in an INDIRECT_EXPR is generated
3519 for *PTR += ANYTHING where PTR is put inside the SAVE_EXPR.
3520 This code has the same general effect as simply doing
3521 expand_expr on the save expr, except that the expression PTR
3522 is computed for use as a memory address. This means different
3523 code, suitable for indexing, may be generated. */
3524 if (TREE_CODE (exp1) == SAVE_EXPR
3525 && SAVE_EXPR_RTL (exp1) == 0
3526 && TREE_CODE (exp2 = TREE_OPERAND (exp1, 0)) != ERROR_MARK
3527 && TYPE_MODE (TREE_TYPE (exp1)) == Pmode
3528 && TYPE_MODE (TREE_TYPE (exp2)) == Pmode)
3530 temp = expand_expr (TREE_OPERAND (exp1, 0), NULL_RTX,
3531 VOIDmode, EXPAND_SUM);
3532 op0 = memory_address (mode, temp);
3533 op0 = copy_all_regs (op0);
3534 SAVE_EXPR_RTL (exp1) = op0;
3538 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
3539 op0 = memory_address (mode, op0);
3542 temp = gen_rtx (MEM, mode, op0);
3543 /* If address was computed by addition,
3544 mark this as an element of an aggregate. */
3545 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
3546 || (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR
3547 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) == PLUS_EXPR)
3548 || TREE_CODE (TREE_TYPE (exp)) == ARRAY_TYPE
3549 || TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE
3550 || TREE_CODE (TREE_TYPE (exp)) == UNION_TYPE
3551 || (TREE_CODE (exp1) == ADDR_EXPR
3552 && (exp2 = TREE_OPERAND (exp1, 0))
3553 && (TREE_CODE (TREE_TYPE (exp2)) == ARRAY_TYPE
3554 || TREE_CODE (TREE_TYPE (exp2)) == RECORD_TYPE
3555 || TREE_CODE (TREE_TYPE (exp2)) == UNION_TYPE)))
3556 MEM_IN_STRUCT_P (temp) = 1;
3557 MEM_VOLATILE_P (temp) = TREE_THIS_VOLATILE (exp) || flag_volatile;
3558 #if 0 /* It is incorrectto set RTX_UNCHANGING_P here, because the fact that
3559 a location is accessed through a pointer to const does not mean
3560 that the value there can never change. */
3561 RTX_UNCHANGING_P (temp) = TREE_READONLY (exp);
3567 if (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST
3568 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
3570 /* Nonconstant array index or nonconstant element size.
3571 Generate the tree for *(&array+index) and expand that,
3572 except do it in a language-independent way
3573 and don't complain about non-lvalue arrays.
3574 `mark_addressable' should already have been called
3575 for any array for which this case will be reached. */
3577 /* Don't forget the const or volatile flag from the array element. */
3578 tree variant_type = build_type_variant (type,
3579 TREE_READONLY (exp),
3580 TREE_THIS_VOLATILE (exp));
3581 tree array_adr = build1 (ADDR_EXPR, build_pointer_type (variant_type),
3582 TREE_OPERAND (exp, 0));
3583 tree index = TREE_OPERAND (exp, 1);
3586 /* Convert the integer argument to a type the same size as a pointer
3587 so the multiply won't overflow spuriously. */
3588 if (TYPE_PRECISION (TREE_TYPE (index)) != POINTER_SIZE)
3589 index = convert (type_for_size (POINTER_SIZE, 0), index);
3591 /* Don't think the address has side effects
3592 just because the array does.
3593 (In some cases the address might have side effects,
3594 and we fail to record that fact here. However, it should not
3595 matter, since expand_expr should not care.) */
3596 TREE_SIDE_EFFECTS (array_adr) = 0;
3598 elt = build1 (INDIRECT_REF, type,
3599 fold (build (PLUS_EXPR, TYPE_POINTER_TO (variant_type),
3601 fold (build (MULT_EXPR,
3602 TYPE_POINTER_TO (variant_type),
3603 index, size_in_bytes (type))))));
3605 /* Volatility, etc., of new expression is same as old expression. */
3606 TREE_SIDE_EFFECTS (elt) = TREE_SIDE_EFFECTS (exp);
3607 TREE_THIS_VOLATILE (elt) = TREE_THIS_VOLATILE (exp);
3608 TREE_READONLY (elt) = TREE_READONLY (exp);
3610 return expand_expr (elt, target, tmode, modifier);
3613 /* Fold an expression like: "foo"[2].
3614 This is not done in fold so it won't happen inside &. */
3617 tree arg0 = TREE_OPERAND (exp, 0);
3618 tree arg1 = TREE_OPERAND (exp, 1);
3620 if (TREE_CODE (arg0) == STRING_CST
3621 && TREE_CODE (arg1) == INTEGER_CST
3622 && !TREE_INT_CST_HIGH (arg1)
3623 && (i = TREE_INT_CST_LOW (arg1)) < TREE_STRING_LENGTH (arg0))
3625 if (TREE_TYPE (TREE_TYPE (arg0)) == integer_type_node)
3627 exp = build_int_2 (((int *)TREE_STRING_POINTER (arg0))[i], 0);
3628 TREE_TYPE (exp) = integer_type_node;
3629 return expand_expr (exp, target, tmode, modifier);
3631 if (TREE_TYPE (TREE_TYPE (arg0)) == char_type_node)
3633 exp = build_int_2 (TREE_STRING_POINTER (arg0)[i], 0);
3634 TREE_TYPE (exp) = integer_type_node;
3635 return expand_expr (convert (TREE_TYPE (TREE_TYPE (arg0)), exp), target, tmode, modifier);
3640 /* If this is a constant index into a constant array,
3641 just get the value from the array. Handle both the cases when
3642 we have an explicit constructor and when our operand is a variable
3643 that was declared const. */
3645 if (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
3646 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
3648 tree index = fold (TREE_OPERAND (exp, 1));
3649 if (TREE_CODE (index) == INTEGER_CST
3650 && TREE_INT_CST_HIGH (index) == 0)
3652 int i = TREE_INT_CST_LOW (index);
3653 tree elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0));
3656 elem = TREE_CHAIN (elem);
3658 return expand_expr (fold (TREE_VALUE (elem)), target,
3663 else if (TREE_READONLY (TREE_OPERAND (exp, 0))
3664 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
3665 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == ARRAY_TYPE
3666 && TREE_CODE (TREE_OPERAND (exp, 0)) == VAR_DECL
3667 && DECL_INITIAL (TREE_OPERAND (exp, 0))
3669 && (TREE_CODE (DECL_INITIAL (TREE_OPERAND (exp, 0)))
3672 tree index = fold (TREE_OPERAND (exp, 1));
3673 if (TREE_CODE (index) == INTEGER_CST
3674 && TREE_INT_CST_HIGH (index) == 0)
3676 int i = TREE_INT_CST_LOW (index);
3677 tree init = DECL_INITIAL (TREE_OPERAND (exp, 0));
3679 if (TREE_CODE (init) == CONSTRUCTOR)
3681 tree elem = CONSTRUCTOR_ELTS (init);
3684 elem = TREE_CHAIN (elem);
3686 return expand_expr (fold (TREE_VALUE (elem)), target,
3689 else if (TREE_CODE (init) == STRING_CST
3690 && i < TREE_STRING_LENGTH (init))
3692 temp = GEN_INT (TREE_STRING_POINTER (init)[i]);
3693 return convert_to_mode (mode, temp, 0);
3697 /* Treat array-ref with constant index as a component-ref. */
3701 /* If the operand is a CONSTRUCTOR, we can just extract the
3702 appropriate field if it is present. */
3703 if (code != ARRAY_REF
3704 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR)
3708 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
3709 elt = TREE_CHAIN (elt))
3710 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1))
3711 return expand_expr (TREE_VALUE (elt), target, tmode, modifier);
3715 enum machine_mode mode1;
3720 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
3721 &mode1, &unsignedp, &volatilep);
3723 /* In some cases, we will be offsetting OP0's address by a constant.
3724 So get it as a sum, if possible. If we will be using it
3725 directly in an insn, we validate it. */
3726 op0 = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_SUM);
3728 /* If this is a constant, put it into a register if it is a
3729 legimate constant and memory if it isn't. */
3730 if (CONSTANT_P (op0))
3732 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
3733 if (LEGITIMATE_CONSTANT_P (op0))
3734 op0 = force_reg (mode, op0);
3736 op0 = validize_mem (force_const_mem (mode, op0));
3741 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
3743 if (GET_CODE (op0) != MEM)
3745 op0 = change_address (op0, VOIDmode,
3746 gen_rtx (PLUS, Pmode, XEXP (op0, 0),
3747 force_reg (Pmode, offset_rtx)));
3750 /* Don't forget about volatility even if this is a bitfield. */
3751 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
3753 op0 = copy_rtx (op0);
3754 MEM_VOLATILE_P (op0) = 1;
3757 if (mode1 == VOIDmode
3758 || (mode1 != BLKmode && ! direct_load[(int) mode1]
3759 && modifier != EXPAND_CONST_ADDRESS
3760 && modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
3761 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG)
3763 /* In cases where an aligned union has an unaligned object
3764 as a field, we might be extracting a BLKmode value from
3765 an integer-mode (e.g., SImode) object. Handle this case
3766 by doing the extract into an object as wide as the field
3767 (which we know to be the width of a basic mode), then
3768 storing into memory, and changing the mode to BLKmode. */
3769 enum machine_mode ext_mode = mode;
3771 if (ext_mode == BLKmode)
3772 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
3774 if (ext_mode == BLKmode)
3777 op0 = extract_bit_field (validize_mem (op0), bitsize, bitpos,
3778 unsignedp, target, ext_mode, ext_mode,
3779 TYPE_ALIGN (TREE_TYPE (tem)) / BITS_PER_UNIT,
3780 int_size_in_bytes (TREE_TYPE (tem)));
3781 if (mode == BLKmode)
3783 rtx new = assign_stack_temp (ext_mode,
3784 bitsize / BITS_PER_UNIT, 0);
3786 emit_move_insn (new, op0);
3787 op0 = copy_rtx (new);
3788 PUT_MODE (op0, BLKmode);
3794 /* Get a reference to just this component. */
3795 if (modifier == EXPAND_CONST_ADDRESS
3796 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
3797 op0 = gen_rtx (MEM, mode1, plus_constant (XEXP (op0, 0),
3798 (bitpos / BITS_PER_UNIT)));
3800 op0 = change_address (op0, mode1,
3801 plus_constant (XEXP (op0, 0),
3802 (bitpos / BITS_PER_UNIT)));
3803 MEM_IN_STRUCT_P (op0) = 1;
3804 MEM_VOLATILE_P (op0) |= volatilep;
3805 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode)
3808 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
3809 convert_move (target, op0, unsignedp);
3815 tree base = build_unary_op (ADDR_EXPR, TREE_OPERAND (exp, 0), 0);
3816 tree addr = build (PLUS_EXPR, type, base, TREE_OPERAND (exp, 1));
3817 op0 = expand_expr (addr, NULL_RTX, VOIDmode, EXPAND_SUM);
3818 temp = gen_rtx (MEM, mode, memory_address (mode, op0));
3819 MEM_IN_STRUCT_P (temp) = 1;
3820 MEM_VOLATILE_P (temp) = TREE_THIS_VOLATILE (exp) || flag_volatile;
3821 #if 0 /* It is incorrectto set RTX_UNCHANGING_P here, because the fact that
3822 a location is accessed through a pointer to const does not mean
3823 that the value there can never change. */
3824 RTX_UNCHANGING_P (temp) = TREE_READONLY (exp);
3829 /* Intended for a reference to a buffer of a file-object in Pascal.
3830 But it's not certain that a special tree code will really be
3831 necessary for these. INDIRECT_REF might work for them. */
3835 /* IN_EXPR: Inlined pascal set IN expression.
3838 rlo = set_low - (set_low%bits_per_word);
3839 the_word = set [ (index - rlo)/bits_per_word ];
3840 bit_index = index % bits_per_word;
3841 bitmask = 1 << bit_index;
3842 return !!(the_word & bitmask); */
3844 preexpand_calls (exp);
3846 tree set = TREE_OPERAND (exp, 0);
3847 tree index = TREE_OPERAND (exp, 1);
3848 tree set_type = TREE_TYPE (set);
3850 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
3851 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
3857 rtx diff, quo, rem, addr, bit, result;
3858 rtx setval, setaddr;
3859 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
3862 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
3864 /* If domain is empty, answer is no. */
3865 if (tree_int_cst_lt (set_high_bound, set_low_bound))
3868 index_val = expand_expr (index, 0, VOIDmode, 0);
3869 lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
3870 hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
3871 setval = expand_expr (set, 0, VOIDmode, 0);
3872 setaddr = XEXP (setval, 0);
3874 /* Compare index against bounds, if they are constant. */
3875 if (GET_CODE (index_val) == CONST_INT
3876 && GET_CODE (lo_r) == CONST_INT)
3878 if (INTVAL (index_val) < INTVAL (lo_r))
3882 if (GET_CODE (index_val) == CONST_INT
3883 && GET_CODE (hi_r) == CONST_INT)
3885 if (INTVAL (hi_r) < INTVAL (index_val))
3889 /* If we get here, we have to generate the code for both cases
3890 (in range and out of range). */
3892 op0 = gen_label_rtx ();
3893 op1 = gen_label_rtx ();
3895 if (! (GET_CODE (index_val) == CONST_INT
3896 && GET_CODE (lo_r) == CONST_INT))
3898 emit_cmp_insn (index_val, lo_r, LT, 0, GET_MODE (index_val), 0, 0);
3899 emit_jump_insn (gen_blt (op1));
3902 if (! (GET_CODE (index_val) == CONST_INT
3903 && GET_CODE (hi_r) == CONST_INT))
3905 emit_cmp_insn (index_val, hi_r, GT, 0, GET_MODE (index_val), 0, 0);
3906 emit_jump_insn (gen_bgt (op1));
3909 /* Calculate the element number of bit zero in the first word
3911 if (GET_CODE (lo_r) == CONST_INT)
3912 rlow = gen_rtx (CONST_INT, VOIDmode,
3913 INTVAL (lo_r) & ~ (1 << BITS_PER_UNIT));
3915 rlow = expand_binop (index_mode, and_optab,
3916 lo_r, gen_rtx (CONST_INT, VOIDmode,
3917 ~ (1 << BITS_PER_UNIT)),
3918 0, 0, OPTAB_LIB_WIDEN);
3920 diff = expand_binop (index_mode, sub_optab,
3921 index_val, rlow, 0, 0, OPTAB_LIB_WIDEN);
3923 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
3924 gen_rtx (CONST_INT, VOIDmode, BITS_PER_UNIT),
3926 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
3927 gen_rtx (CONST_INT, VOIDmode, BITS_PER_UNIT),
3929 addr = memory_address (byte_mode,
3930 expand_binop (index_mode, add_optab,
3932 /* Extract the bit we want to examine */
3933 bit = expand_shift (RSHIFT_EXPR, byte_mode,
3934 gen_rtx (MEM, byte_mode, addr), rem, 0, 1);
3935 result = expand_binop (SImode, and_optab, bit, const1_rtx, target,
3936 1, OPTAB_LIB_WIDEN);
3937 emit_move_insn (target, result);
3939 /* Output the code to handle the out-of-range case. */
3942 emit_move_insn (target, const0_rtx);
3947 case WITH_CLEANUP_EXPR:
3948 if (RTL_EXPR_RTL (exp) == 0)
3951 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
3953 = tree_cons (NULL_TREE, TREE_OPERAND (exp, 2), cleanups_this_call);
3954 /* That's it for this cleanup. */
3955 TREE_OPERAND (exp, 2) = 0;
3957 return RTL_EXPR_RTL (exp);
3960 /* Check for a built-in function. */
3961 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
3962 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) == FUNCTION_DECL
3963 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
3964 return expand_builtin (exp, target, subtarget, tmode, ignore);
3965 /* If this call was expanded already by preexpand_calls,
3966 just return the result we got. */
3967 if (CALL_EXPR_RTL (exp) != 0)
3968 return CALL_EXPR_RTL (exp);
3969 return expand_call (exp, target, ignore);
3971 case NON_LVALUE_EXPR:
3974 case REFERENCE_EXPR:
3975 if (TREE_CODE (type) == VOID_TYPE || ignore)
3977 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
3980 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
3981 return expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, modifier);
3982 if (TREE_CODE (type) == UNION_TYPE)
3984 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
3987 if (mode == BLKmode)
3989 if (TYPE_SIZE (type) == 0
3990 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
3992 target = assign_stack_temp (BLKmode,
3993 (TREE_INT_CST_LOW (TYPE_SIZE (type))
3994 + BITS_PER_UNIT - 1)
3995 / BITS_PER_UNIT, 0);
3998 target = gen_reg_rtx (mode);
4000 if (GET_CODE (target) == MEM)
4001 /* Store data into beginning of memory target. */
4002 store_expr (TREE_OPERAND (exp, 0),
4003 change_address (target, TYPE_MODE (valtype), 0),
4005 else if (GET_CODE (target) == REG)
4006 /* Store this field into a union of the proper type. */
4007 store_field (target, GET_MODE_BITSIZE (TYPE_MODE (valtype)), 0,
4008 TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
4010 int_size_in_bytes (TREE_TYPE (TREE_OPERAND (exp, 0))));
4014 /* Return the entire union. */
4017 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, mode, modifier);
4018 if (GET_MODE (op0) == mode || GET_MODE (op0) == VOIDmode)
4020 if (modifier == EXPAND_INITIALIZER)
4021 return gen_rtx (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
4022 if (flag_force_mem && GET_CODE (op0) == MEM)
4023 op0 = copy_to_reg (op0);
4026 return convert_to_mode (mode, op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
4028 convert_move (target, op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
4032 /* We come here from MINUS_EXPR when the second operand is a constant. */
4034 this_optab = add_optab;
4036 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
4037 something else, make sure we add the register to the constant and
4038 then to the other thing. This case can occur during strength
4039 reduction and doing it this way will produce better code if the
4040 frame pointer or argument pointer is eliminated.
4042 fold-const.c will ensure that the constant is always in the inner
4043 PLUS_EXPR, so the only case we need to do anything about is if
4044 sp, ap, or fp is our second argument, in which case we must swap
4045 the innermost first argument and our second argument. */
4047 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
4048 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
4049 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
4050 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
4051 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
4052 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
4054 tree t = TREE_OPERAND (exp, 1);
4056 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
4057 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
4060 /* If the result is to be Pmode and we are adding an integer to
4061 something, we might be forming a constant. So try to use
4062 plus_constant. If it produces a sum and we can't accept it,
4063 use force_operand. This allows P = &ARR[const] to generate
4064 efficient code on machines where a SYMBOL_REF is not a valid
4067 If this is an EXPAND_SUM call, always return the sum. */
4068 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
4069 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
4070 && (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
4073 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
4075 op1 = plus_constant (op1, TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)));
4076 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
4077 op1 = force_operand (op1, target);
4081 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
4082 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
4083 && (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
4086 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
4088 op0 = plus_constant (op0, TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)));
4089 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
4090 op0 = force_operand (op0, target);
4094 /* No sense saving up arithmetic to be done
4095 if it's all in the wrong mode to form part of an address.
4096 And force_operand won't know whether to sign-extend or
4098 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
4099 || mode != Pmode) goto binop;
4101 preexpand_calls (exp);
4102 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
4105 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, modifier);
4106 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, modifier);
4108 /* Make sure any term that's a sum with a constant comes last. */
4109 if (GET_CODE (op0) == PLUS
4110 && CONSTANT_P (XEXP (op0, 1)))
4116 /* If adding to a sum including a constant,
4117 associate it to put the constant outside. */
4118 if (GET_CODE (op1) == PLUS
4119 && CONSTANT_P (XEXP (op1, 1)))
4121 rtx constant_term = const0_rtx;
4123 temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0);
4126 /* Ensure that MULT comes first if there is one. */
4127 else if (GET_CODE (op0) == MULT)
4128 op0 = gen_rtx (PLUS, mode, op0, XEXP (op1, 0));
4130 op0 = gen_rtx (PLUS, mode, XEXP (op1, 0), op0);
4132 /* Let's also eliminate constants from op0 if possible. */
4133 op0 = eliminate_constant_term (op0, &constant_term);
4135 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
4136 their sum should be a constant. Form it into OP1, since the
4137 result we want will then be OP0 + OP1. */
4139 temp = simplify_binary_operation (PLUS, mode, constant_term,
4144 op1 = gen_rtx (PLUS, mode, constant_term, XEXP (op1, 1));
4147 /* Put a constant term last and put a multiplication first. */
4148 if (CONSTANT_P (op0) || GET_CODE (op1) == MULT)
4149 temp = op1, op1 = op0, op0 = temp;
4151 temp = simplify_binary_operation (PLUS, mode, op0, op1);
4152 return temp ? temp : gen_rtx (PLUS, mode, op0, op1);
4155 /* Handle difference of two symbolic constants,
4156 for the sake of an initializer. */
4157 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
4158 && really_constant_p (TREE_OPERAND (exp, 0))
4159 && really_constant_p (TREE_OPERAND (exp, 1)))
4161 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX,
4162 VOIDmode, modifier);
4163 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
4164 VOIDmode, modifier);
4165 return gen_rtx (MINUS, mode, op0, op1);
4167 /* Convert A - const to A + (-const). */
4168 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
4170 exp = build (PLUS_EXPR, type, TREE_OPERAND (exp, 0),
4171 fold (build1 (NEGATE_EXPR, type,
4172 TREE_OPERAND (exp, 1))));
4175 this_optab = sub_optab;
4179 preexpand_calls (exp);
4180 /* If first operand is constant, swap them.
4181 Thus the following special case checks need only
4182 check the second operand. */
4183 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
4185 register tree t1 = TREE_OPERAND (exp, 0);
4186 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
4187 TREE_OPERAND (exp, 1) = t1;
4190 /* Attempt to return something suitable for generating an
4191 indexed address, for machines that support that. */
4193 if (modifier == EXPAND_SUM && mode == Pmode
4194 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
4195 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
4197 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, EXPAND_SUM);
4199 /* Apply distributive law if OP0 is x+c. */
4200 if (GET_CODE (op0) == PLUS
4201 && GET_CODE (XEXP (op0, 1)) == CONST_INT)
4202 return gen_rtx (PLUS, mode,
4203 gen_rtx (MULT, mode, XEXP (op0, 0),
4204 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))),
4205 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))
4206 * INTVAL (XEXP (op0, 1))));
4208 if (GET_CODE (op0) != REG)
4209 op0 = force_operand (op0, NULL_RTX);
4210 if (GET_CODE (op0) != REG)
4211 op0 = copy_to_mode_reg (mode, op0);
4213 return gen_rtx (MULT, mode, op0,
4214 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))));
4217 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
4220 /* Check for multiplying things that have been extended
4221 from a narrower type. If this machine supports multiplying
4222 in that narrower type with a result in the desired type,
4223 do it that way, and avoid the explicit type-conversion. */
4224 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
4225 && TREE_CODE (type) == INTEGER_TYPE
4226 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
4227 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
4228 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
4229 && int_fits_type_p (TREE_OPERAND (exp, 1),
4230 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
4231 /* Don't use a widening multiply if a shift will do. */
4232 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
4233 > HOST_BITS_PER_WIDE_INT)
4234 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
4236 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
4237 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
4239 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
4240 /* If both operands are extended, they must either both
4241 be zero-extended or both be sign-extended. */
4242 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
4244 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
4246 enum machine_mode innermode
4247 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
4248 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
4249 ? umul_widen_optab : smul_widen_optab);
4250 if (mode == GET_MODE_WIDER_MODE (innermode)
4251 && this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
4253 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
4254 NULL_RTX, VOIDmode, 0);
4255 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
4256 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
4259 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
4260 NULL_RTX, VOIDmode, 0);
4264 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
4265 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
4266 return expand_mult (mode, op0, op1, target, unsignedp);
4268 case TRUNC_DIV_EXPR:
4269 case FLOOR_DIV_EXPR:
4271 case ROUND_DIV_EXPR:
4272 case EXACT_DIV_EXPR:
4273 preexpand_calls (exp);
4274 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
4276 /* Possible optimization: compute the dividend with EXPAND_SUM
4277 then if the divisor is constant can optimize the case
4278 where some terms of the dividend have coeffs divisible by it. */
4279 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
4280 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
4281 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
4284 this_optab = flodiv_optab;
4287 case TRUNC_MOD_EXPR:
4288 case FLOOR_MOD_EXPR:
4290 case ROUND_MOD_EXPR:
4291 preexpand_calls (exp);
4292 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
4294 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
4295 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
4296 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
4298 case FIX_ROUND_EXPR:
4299 case FIX_FLOOR_EXPR:
4301 abort (); /* Not used for C. */
4303 case FIX_TRUNC_EXPR:
4304 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
4306 target = gen_reg_rtx (mode);
4307 expand_fix (target, op0, unsignedp);
4311 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
4313 target = gen_reg_rtx (mode);
4314 /* expand_float can't figure out what to do if FROM has VOIDmode.
4315 So give it the correct mode. With -O, cse will optimize this. */
4316 if (GET_MODE (op0) == VOIDmode)
4317 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
4319 expand_float (target, op0,
4320 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
4324 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
4325 temp = expand_unop (mode, neg_optab, op0, target, 0);
4331 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
4333 /* Unsigned abs is simply the operand. Testing here means we don't
4334 risk generating incorrect code below. */
4335 if (TREE_UNSIGNED (type))
4338 /* First try to do it with a special abs instruction. */
4339 temp = expand_unop (mode, abs_optab, op0, target, 0);
4343 /* If this machine has expensive jumps, we can do integer absolute
4344 value of X as (((signed) x >> (W-1)) ^ x) - ((signed) x >> (W-1)),
4345 where W is the width of MODE. */
4347 if (GET_MODE_CLASS (mode) == MODE_INT && BRANCH_COST >= 2)
4349 rtx extended = expand_shift (RSHIFT_EXPR, mode, op0,
4350 size_int (GET_MODE_BITSIZE (mode) - 1),
4353 temp = expand_binop (mode, xor_optab, extended, op0, target, 0,
4356 temp = expand_binop (mode, sub_optab, temp, extended, target, 0,
4363 /* If that does not win, use conditional jump and negate. */
4364 target = original_target;
4365 temp = gen_label_rtx ();
4366 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 0))
4367 || (GET_CODE (target) == REG
4368 && REGNO (target) < FIRST_PSEUDO_REGISTER))
4369 target = gen_reg_rtx (mode);
4370 emit_move_insn (target, op0);
4371 emit_cmp_insn (target,
4372 expand_expr (convert (type, integer_zero_node),
4373 NULL_RTX, VOIDmode, 0),
4374 GE, NULL_RTX, mode, 0, 0);
4376 emit_jump_insn (gen_bge (temp));
4377 op0 = expand_unop (mode, neg_optab, target, target, 0);
4379 emit_move_insn (target, op0);
4386 target = original_target;
4387 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 1))
4388 || (GET_CODE (target) == REG
4389 && REGNO (target) < FIRST_PSEUDO_REGISTER))
4390 target = gen_reg_rtx (mode);
4391 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
4392 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
4394 /* First try to do it with a special MIN or MAX instruction.
4395 If that does not win, use a conditional jump to select the proper
4397 this_optab = (TREE_UNSIGNED (type)
4398 ? (code == MIN_EXPR ? umin_optab : umax_optab)
4399 : (code == MIN_EXPR ? smin_optab : smax_optab));
4401 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
4407 emit_move_insn (target, op0);
4408 op0 = gen_label_rtx ();
4409 if (code == MAX_EXPR)
4410 temp = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)))
4411 ? compare_from_rtx (target, op1, GEU, 1, mode, NULL_RTX, 0)
4412 : compare_from_rtx (target, op1, GE, 0, mode, NULL_RTX, 0));
4414 temp = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)))
4415 ? compare_from_rtx (target, op1, LEU, 1, mode, NULL_RTX, 0)
4416 : compare_from_rtx (target, op1, LE, 0, mode, NULL_RTX, 0));
4417 if (temp == const0_rtx)
4418 emit_move_insn (target, op1);
4419 else if (temp != const_true_rtx)
4421 if (bcc_gen_fctn[(int) GET_CODE (temp)] != 0)
4422 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (temp)]) (op0));
4425 emit_move_insn (target, op1);
4430 /* ??? Can optimize when the operand of this is a bitwise operation,
4431 by using a different bitwise operation. */
4433 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
4434 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
4440 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
4441 temp = expand_unop (mode, ffs_optab, op0, target, 1);
4446 /* ??? Can optimize bitwise operations with one arg constant.
4447 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
4448 and (a bitwise1 b) bitwise2 b (etc)
4449 but that is probably not worth while. */
4451 /* BIT_AND_EXPR is for bitwise anding.
4452 TRUTH_AND_EXPR is for anding two boolean values
4453 when we want in all cases to compute both of them.
4454 In general it is fastest to do TRUTH_AND_EXPR by
4455 computing both operands as actual zero-or-1 values
4456 and then bitwise anding. In cases where there cannot
4457 be any side effects, better code would be made by
4458 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR;
4459 but the question is how to recognize those cases. */
4461 case TRUTH_AND_EXPR:
4463 this_optab = and_optab;
4466 /* See comment above about TRUTH_AND_EXPR; it applies here too. */
4469 this_optab = ior_optab;
4473 this_optab = xor_optab;
4480 preexpand_calls (exp);
4481 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
4483 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
4484 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
4487 /* Could determine the answer when only additive constants differ.
4488 Also, the addition of one can be handled by changing the condition. */
4495 preexpand_calls (exp);
4496 temp = do_store_flag (exp, target, tmode != VOIDmode ? tmode : mode, 0);
4499 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
4500 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
4502 && GET_CODE (original_target) == REG
4503 && (GET_MODE (original_target)
4504 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
4506 temp = expand_expr (TREE_OPERAND (exp, 0), original_target, VOIDmode, 0);
4507 if (temp != original_target)
4508 temp = copy_to_reg (temp);
4509 op1 = gen_label_rtx ();
4510 emit_cmp_insn (temp, const0_rtx, EQ, NULL_RTX,
4511 GET_MODE (temp), unsignedp, 0);
4512 emit_jump_insn (gen_beq (op1));
4513 emit_move_insn (temp, const1_rtx);
4517 /* If no set-flag instruction, must generate a conditional
4518 store into a temporary variable. Drop through
4519 and handle this like && and ||. */
4521 case TRUTH_ANDIF_EXPR:
4522 case TRUTH_ORIF_EXPR:
4523 if (target == 0 || ! safe_from_p (target, exp)
4524 /* Make sure we don't have a hard reg (such as function's return
4525 value) live across basic blocks, if not optimizing. */
4526 || (!optimize && GET_CODE (target) == REG
4527 && REGNO (target) < FIRST_PSEUDO_REGISTER))
4528 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
4529 emit_clr_insn (target);
4530 op1 = gen_label_rtx ();
4531 jumpifnot (exp, op1);
4532 emit_0_to_1_insn (target);
4536 case TRUTH_NOT_EXPR:
4537 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
4538 /* The parser is careful to generate TRUTH_NOT_EXPR
4539 only with operands that are always zero or one. */
4540 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
4541 target, 1, OPTAB_LIB_WIDEN);
4547 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
4549 return expand_expr (TREE_OPERAND (exp, 1),
4550 (ignore ? const0_rtx : target),
4555 /* Note that COND_EXPRs whose type is a structure or union
4556 are required to be constructed to contain assignments of
4557 a temporary variable, so that we can evaluate them here
4558 for side effect only. If type is void, we must do likewise. */
4560 /* If an arm of the branch requires a cleanup,
4561 only that cleanup is performed. */
4564 tree binary_op = 0, unary_op = 0;
4565 tree old_cleanups = cleanups_this_call;
4566 cleanups_this_call = 0;
4568 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
4569 convert it to our mode, if necessary. */
4570 if (integer_onep (TREE_OPERAND (exp, 1))
4571 && integer_zerop (TREE_OPERAND (exp, 2))
4572 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
4574 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, modifier);
4575 if (GET_MODE (op0) == mode)
4578 target = gen_reg_rtx (mode);
4579 convert_move (target, op0, unsignedp);
4583 /* If we are not to produce a result, we have no target. Otherwise,
4584 if a target was specified use it; it will not be used as an
4585 intermediate target unless it is safe. If no target, use a
4588 if (mode == VOIDmode || ignore)
4590 else if (original_target
4591 && safe_from_p (original_target, TREE_OPERAND (exp, 0)))
4592 temp = original_target;
4593 else if (mode == BLKmode)
4595 if (TYPE_SIZE (type) == 0
4596 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
4598 temp = assign_stack_temp (BLKmode,
4599 (TREE_INT_CST_LOW (TYPE_SIZE (type))
4600 + BITS_PER_UNIT - 1)
4601 / BITS_PER_UNIT, 0);
4604 temp = gen_reg_rtx (mode);
4606 /* Check for X ? A + B : A. If we have this, we can copy
4607 A to the output and conditionally add B. Similarly for unary
4608 operations. Don't do this if X has side-effects because
4609 those side effects might affect A or B and the "?" operation is
4610 a sequence point in ANSI. (We test for side effects later.) */
4612 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
4613 && operand_equal_p (TREE_OPERAND (exp, 2),
4614 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
4615 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
4616 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
4617 && operand_equal_p (TREE_OPERAND (exp, 1),
4618 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
4619 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
4620 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
4621 && operand_equal_p (TREE_OPERAND (exp, 2),
4622 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
4623 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
4624 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
4625 && operand_equal_p (TREE_OPERAND (exp, 1),
4626 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
4627 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
4629 /* If we had X ? A + 1 : A and we can do the test of X as a store-flag
4630 operation, do this as A + (X != 0). Similarly for other simple
4631 binary operators. */
4632 if (singleton && binary_op
4633 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
4634 && (TREE_CODE (binary_op) == PLUS_EXPR
4635 || TREE_CODE (binary_op) == MINUS_EXPR
4636 || TREE_CODE (binary_op) == BIT_IOR_EXPR
4637 || TREE_CODE (binary_op) == BIT_XOR_EXPR
4638 || TREE_CODE (binary_op) == BIT_AND_EXPR)
4639 && integer_onep (TREE_OPERAND (binary_op, 1))
4640 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
4643 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR ? add_optab
4644 : TREE_CODE (binary_op) == MINUS_EXPR ? sub_optab
4645 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
4646 : TREE_CODE (binary_op) == BIT_XOR_EXPR ? xor_optab
4649 /* If we had X ? A : A + 1, do this as A + (X == 0).
4651 We have to invert the truth value here and then put it
4652 back later if do_store_flag fails. We cannot simply copy
4653 TREE_OPERAND (exp, 0) to another variable and modify that
4654 because invert_truthvalue can modify the tree pointed to
4656 if (singleton == TREE_OPERAND (exp, 1))
4657 TREE_OPERAND (exp, 0)
4658 = invert_truthvalue (TREE_OPERAND (exp, 0));
4660 result = do_store_flag (TREE_OPERAND (exp, 0),
4661 (safe_from_p (temp, singleton)
4663 mode, BRANCH_COST <= 1);
4667 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
4668 return expand_binop (mode, boptab, op1, result, temp,
4669 unsignedp, OPTAB_LIB_WIDEN);
4671 else if (singleton == TREE_OPERAND (exp, 1))
4672 TREE_OPERAND (exp, 0)
4673 = invert_truthvalue (TREE_OPERAND (exp, 0));
4677 op0 = gen_label_rtx ();
4679 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
4683 /* If the target conflicts with the other operand of the
4684 binary op, we can't use it. Also, we can't use the target
4685 if it is a hard register, because evaluating the condition
4686 might clobber it. */
4688 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1)))
4689 || (GET_CODE (temp) == REG
4690 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
4691 temp = gen_reg_rtx (mode);
4692 store_expr (singleton, temp, 0);
4695 expand_expr (singleton,
4696 ignore ? const1_rtx : NULL_RTX, VOIDmode, 0);
4697 if (cleanups_this_call)
4699 sorry ("aggregate value in COND_EXPR");
4700 cleanups_this_call = 0;
4702 if (singleton == TREE_OPERAND (exp, 1))
4703 jumpif (TREE_OPERAND (exp, 0), op0);
4705 jumpifnot (TREE_OPERAND (exp, 0), op0);
4707 if (binary_op && temp == 0)
4708 /* Just touch the other operand. */
4709 expand_expr (TREE_OPERAND (binary_op, 1),
4710 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
4712 store_expr (build (TREE_CODE (binary_op), type,
4713 make_tree (type, temp),
4714 TREE_OPERAND (binary_op, 1)),
4717 store_expr (build1 (TREE_CODE (unary_op), type,
4718 make_tree (type, temp)),
4723 /* This is now done in jump.c and is better done there because it
4724 produces shorter register lifetimes. */
4726 /* Check for both possibilities either constants or variables
4727 in registers (but not the same as the target!). If so, can
4728 save branches by assigning one, branching, and assigning the
4730 else if (temp && GET_MODE (temp) != BLKmode
4731 && (TREE_CONSTANT (TREE_OPERAND (exp, 1))
4732 || ((TREE_CODE (TREE_OPERAND (exp, 1)) == PARM_DECL
4733 || TREE_CODE (TREE_OPERAND (exp, 1)) == VAR_DECL)
4734 && DECL_RTL (TREE_OPERAND (exp, 1))
4735 && GET_CODE (DECL_RTL (TREE_OPERAND (exp, 1))) == REG
4736 && DECL_RTL (TREE_OPERAND (exp, 1)) != temp))
4737 && (TREE_CONSTANT (TREE_OPERAND (exp, 2))
4738 || ((TREE_CODE (TREE_OPERAND (exp, 2)) == PARM_DECL
4739 || TREE_CODE (TREE_OPERAND (exp, 2)) == VAR_DECL)
4740 && DECL_RTL (TREE_OPERAND (exp, 2))
4741 && GET_CODE (DECL_RTL (TREE_OPERAND (exp, 2))) == REG
4742 && DECL_RTL (TREE_OPERAND (exp, 2)) != temp)))
4744 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
4745 temp = gen_reg_rtx (mode);
4746 store_expr (TREE_OPERAND (exp, 2), temp, 0);
4747 jumpifnot (TREE_OPERAND (exp, 0), op0);
4748 store_expr (TREE_OPERAND (exp, 1), temp, 0);
4752 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
4753 comparison operator. If we have one of these cases, set the
4754 output to A, branch on A (cse will merge these two references),
4755 then set the output to FOO. */
4757 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
4758 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
4759 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
4760 TREE_OPERAND (exp, 1), 0)
4761 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
4762 && safe_from_p (temp, TREE_OPERAND (exp, 2)))
4764 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
4765 temp = gen_reg_rtx (mode);
4766 store_expr (TREE_OPERAND (exp, 1), temp, 0);
4767 jumpif (TREE_OPERAND (exp, 0), op0);
4768 store_expr (TREE_OPERAND (exp, 2), temp, 0);
4772 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
4773 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
4774 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
4775 TREE_OPERAND (exp, 2), 0)
4776 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
4777 && safe_from_p (temp, TREE_OPERAND (exp, 1)))
4779 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
4780 temp = gen_reg_rtx (mode);
4781 store_expr (TREE_OPERAND (exp, 2), temp, 0);
4782 jumpifnot (TREE_OPERAND (exp, 0), op0);
4783 store_expr (TREE_OPERAND (exp, 1), temp, 0);
4788 op1 = gen_label_rtx ();
4789 jumpifnot (TREE_OPERAND (exp, 0), op0);
4791 store_expr (TREE_OPERAND (exp, 1), temp, 0);
4793 expand_expr (TREE_OPERAND (exp, 1),
4794 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
4795 if (cleanups_this_call)
4797 sorry ("aggregate value in COND_EXPR");
4798 cleanups_this_call = 0;
4802 emit_jump_insn (gen_jump (op1));
4806 store_expr (TREE_OPERAND (exp, 2), temp, 0);
4808 expand_expr (TREE_OPERAND (exp, 2),
4809 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
4812 if (cleanups_this_call)
4814 sorry ("aggregate value in COND_EXPR");
4815 cleanups_this_call = 0;
4821 cleanups_this_call = old_cleanups;
4827 /* Something needs to be initialized, but we didn't know
4828 where that thing was when building the tree. For example,
4829 it could be the return value of a function, or a parameter
4830 to a function which lays down in the stack, or a temporary
4831 variable which must be passed by reference.
4833 We guarantee that the expression will either be constructed
4834 or copied into our original target. */
4836 tree slot = TREE_OPERAND (exp, 0);
4839 if (TREE_CODE (slot) != VAR_DECL)
4844 if (DECL_RTL (slot) != 0)
4846 target = DECL_RTL (slot);
4847 /* If we have already expanded the slot, so don't do
4849 if (TREE_OPERAND (exp, 1) == NULL_TREE)
4854 target = assign_stack_temp (mode, int_size_in_bytes (type), 0);
4855 /* All temp slots at this level must not conflict. */
4856 preserve_temp_slots (target);
4857 DECL_RTL (slot) = target;
4861 /* I bet this needs to be done, and I bet that it needs to
4862 be above, inside the else clause. The reason is
4863 simple, how else is it going to get cleaned up? (mrs)
4865 The reason is probably did not work before, and was
4866 commented out is because this was re-expanding already
4867 expanded target_exprs (target == 0 and DECL_RTL (slot)
4868 != 0) also cleaning them up many times as well. :-( */
4870 /* Since SLOT is not known to the called function
4871 to belong to its stack frame, we must build an explicit
4872 cleanup. This case occurs when we must build up a reference
4873 to pass the reference as an argument. In this case,
4874 it is very likely that such a reference need not be
4877 if (TREE_OPERAND (exp, 2) == 0)
4878 TREE_OPERAND (exp, 2) = maybe_build_cleanup (slot);
4879 if (TREE_OPERAND (exp, 2))
4880 cleanups_this_call = tree_cons (NULL_TREE, TREE_OPERAND (exp, 2),
4881 cleanups_this_call);
4886 /* This case does occur, when expanding a parameter which
4887 needs to be constructed on the stack. The target
4888 is the actual stack address that we want to initialize.
4889 The function we call will perform the cleanup in this case. */
4891 DECL_RTL (slot) = target;
4894 exp1 = TREE_OPERAND (exp, 1);
4895 /* Mark it as expanded. */
4896 TREE_OPERAND (exp, 1) = NULL_TREE;
4898 return expand_expr (exp1, target, tmode, modifier);
4903 tree lhs = TREE_OPERAND (exp, 0);
4904 tree rhs = TREE_OPERAND (exp, 1);
4905 tree noncopied_parts = 0;
4906 tree lhs_type = TREE_TYPE (lhs);
4908 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
4909 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0 && !fixed_type_p (rhs))
4910 noncopied_parts = init_noncopied_parts (stabilize_reference (lhs),
4911 TYPE_NONCOPIED_PARTS (lhs_type));
4912 while (noncopied_parts != 0)
4914 expand_assignment (TREE_VALUE (noncopied_parts),
4915 TREE_PURPOSE (noncopied_parts), 0, 0);
4916 noncopied_parts = TREE_CHAIN (noncopied_parts);
4923 /* If lhs is complex, expand calls in rhs before computing it.
4924 That's so we don't compute a pointer and save it over a call.
4925 If lhs is simple, compute it first so we can give it as a
4926 target if the rhs is just a call. This avoids an extra temp and copy
4927 and that prevents a partial-subsumption which makes bad code.
4928 Actually we could treat component_ref's of vars like vars. */
4930 tree lhs = TREE_OPERAND (exp, 0);
4931 tree rhs = TREE_OPERAND (exp, 1);
4932 tree noncopied_parts = 0;
4933 tree lhs_type = TREE_TYPE (lhs);
4937 if (TREE_CODE (lhs) != VAR_DECL
4938 && TREE_CODE (lhs) != RESULT_DECL
4939 && TREE_CODE (lhs) != PARM_DECL)
4940 preexpand_calls (exp);
4942 /* Check for |= or &= of a bitfield of size one into another bitfield
4943 of size 1. In this case, (unless we need the result of the
4944 assignment) we can do this more efficiently with a
4945 test followed by an assignment, if necessary.
4947 ??? At this point, we can't get a BIT_FIELD_REF here. But if
4948 things change so we do, this code should be enhanced to
4951 && TREE_CODE (lhs) == COMPONENT_REF
4952 && (TREE_CODE (rhs) == BIT_IOR_EXPR
4953 || TREE_CODE (rhs) == BIT_AND_EXPR)
4954 && TREE_OPERAND (rhs, 0) == lhs
4955 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
4956 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (lhs, 1))) == 1
4957 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))) == 1)
4959 rtx label = gen_label_rtx ();
4961 do_jump (TREE_OPERAND (rhs, 1),
4962 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
4963 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
4964 expand_assignment (lhs, convert (TREE_TYPE (rhs),
4965 (TREE_CODE (rhs) == BIT_IOR_EXPR
4967 : integer_zero_node)),
4969 do_pending_stack_adjust ();
4974 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0
4975 && ! (fixed_type_p (lhs) && fixed_type_p (rhs)))
4976 noncopied_parts = save_noncopied_parts (stabilize_reference (lhs),
4977 TYPE_NONCOPIED_PARTS (lhs_type));
4979 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
4980 while (noncopied_parts != 0)
4982 expand_assignment (TREE_PURPOSE (noncopied_parts),
4983 TREE_VALUE (noncopied_parts), 0, 0);
4984 noncopied_parts = TREE_CHAIN (noncopied_parts);
4989 case PREINCREMENT_EXPR:
4990 case PREDECREMENT_EXPR:
4991 return expand_increment (exp, 0);
4993 case POSTINCREMENT_EXPR:
4994 case POSTDECREMENT_EXPR:
4995 /* Faster to treat as pre-increment if result is not used. */
4996 return expand_increment (exp, ! ignore);
4999 /* Are we taking the address of a nested function? */
5000 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
5001 && decl_function_context (TREE_OPERAND (exp, 0)) != 0)
5003 op0 = trampoline_address (TREE_OPERAND (exp, 0));
5004 op0 = force_operand (op0, target);
5008 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode,
5009 (modifier == EXPAND_INITIALIZER
5010 ? modifier : EXPAND_CONST_ADDRESS));
5011 if (GET_CODE (op0) != MEM)
5014 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
5015 return XEXP (op0, 0);
5016 op0 = force_operand (XEXP (op0, 0), target);
5018 if (flag_force_addr && GET_CODE (op0) != REG)
5019 return force_reg (Pmode, op0);
5022 case ENTRY_VALUE_EXPR:
5025 /* COMPLEX type for Extended Pascal & Fortran */
5028 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
5032 /* Get the rtx code of the operands. */
5033 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
5034 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
5037 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
5039 prev = get_last_insn ();
5041 /* Tell flow that the whole of the destination is being set. */
5042 if (GET_CODE (target) == REG)
5043 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
5045 /* Move the real (op0) and imaginary (op1) parts to their location. */
5046 emit_move_insn (gen_lowpart (mode, target), op0);
5047 emit_move_insn (gen_highpart (mode, target), op1);
5049 /* Complex construction should appear as a single unit. */
5057 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
5058 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
5060 target = gen_reg_rtx (mode);
5061 emit_move_insn (target, gen_lowpart (mode, op0));
5067 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
5068 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
5070 target = gen_reg_rtx (mode);
5071 emit_move_insn (target, gen_highpart (mode, op0));
5077 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
5081 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
5084 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
5086 prev = get_last_insn ();
5088 /* Tell flow that the whole of the destination is being set. */
5089 if (GET_CODE (target) == REG)
5090 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
5092 /* Store the realpart and the negated imagpart to target. */
5093 emit_move_insn (gen_lowpart (mode, target), gen_lowpart (mode, op0));
5095 imag_t = gen_highpart (mode, target);
5096 temp = expand_unop (mode, neg_optab,
5097 gen_highpart (mode, op0), imag_t, 0);
5099 emit_move_insn (imag_t, temp);
5101 /* Conjugate should appear as a single unit */
5111 return (*lang_expand_expr) (exp, target, tmode, modifier);
5114 /* Here to do an ordinary binary operator, generating an instruction
5115 from the optab already placed in `this_optab'. */
5117 preexpand_calls (exp);
5118 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
5120 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
5121 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
5123 temp = expand_binop (mode, this_optab, op0, op1, target,
5124 unsignedp, OPTAB_LIB_WIDEN);
5130 /* Return the alignment in bits of EXP, a pointer valued expression.
5131 But don't return more than MAX_ALIGN no matter what.
5132 The alignment returned is, by default, the alignment of the thing that
5133 EXP points to (if it is not a POINTER_TYPE, 0 is returned).
5135 Otherwise, look at the expression to see if we can do better, i.e., if the
5136 expression is actually pointing at an object whose alignment is tighter. */
5139 get_pointer_alignment (exp, max_align)
5143 unsigned align, inner;
5145 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
5148 align = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
5149 align = MIN (align, max_align);
5153 switch (TREE_CODE (exp))
5157 case NON_LVALUE_EXPR:
5158 exp = TREE_OPERAND (exp, 0);
5159 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
5161 inner = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
5162 inner = MIN (inner, max_align);
5163 align = MAX (align, inner);
5167 /* If sum of pointer + int, restrict our maximum alignment to that
5168 imposed by the integer. If not, we can't do any better than
5170 if (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST)
5173 while (((TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)) * BITS_PER_UNIT)
5178 exp = TREE_OPERAND (exp, 0);
5182 /* See what we are pointing at and look at its alignment. */
5183 exp = TREE_OPERAND (exp, 0);
5184 if (TREE_CODE (exp) == FUNCTION_DECL)
5185 align = MAX (align, FUNCTION_BOUNDARY);
5186 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'd')
5187 align = MAX (align, DECL_ALIGN (exp));
5188 #ifdef CONSTANT_ALIGNMENT
5189 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'c')
5190 align = CONSTANT_ALIGNMENT (exp, align);
5192 return MIN (align, max_align);
5200 /* Return the tree node and offset if a given argument corresponds to
5201 a string constant. */
5204 string_constant (arg, ptr_offset)
5210 if (TREE_CODE (arg) == ADDR_EXPR
5211 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
5213 *ptr_offset = integer_zero_node;
5214 return TREE_OPERAND (arg, 0);
5216 else if (TREE_CODE (arg) == PLUS_EXPR)
5218 tree arg0 = TREE_OPERAND (arg, 0);
5219 tree arg1 = TREE_OPERAND (arg, 1);
5224 if (TREE_CODE (arg0) == ADDR_EXPR
5225 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
5228 return TREE_OPERAND (arg0, 0);
5230 else if (TREE_CODE (arg1) == ADDR_EXPR
5231 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
5234 return TREE_OPERAND (arg1, 0);
5241 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
5242 way, because it could contain a zero byte in the middle.
5243 TREE_STRING_LENGTH is the size of the character array, not the string.
5245 Unfortunately, string_constant can't access the values of const char
5246 arrays with initializers, so neither can we do so here. */
5256 src = string_constant (src, &offset_node);
5259 max = TREE_STRING_LENGTH (src);
5260 ptr = TREE_STRING_POINTER (src);
5261 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
5263 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
5264 compute the offset to the following null if we don't know where to
5265 start searching for it. */
5267 for (i = 0; i < max; i++)
5270 /* We don't know the starting offset, but we do know that the string
5271 has no internal zero bytes. We can assume that the offset falls
5272 within the bounds of the string; otherwise, the programmer deserves
5273 what he gets. Subtract the offset from the length of the string,
5275 /* This would perhaps not be valid if we were dealing with named
5276 arrays in addition to literal string constants. */
5277 return size_binop (MINUS_EXPR, size_int (max), offset_node);
5280 /* We have a known offset into the string. Start searching there for
5281 a null character. */
5282 if (offset_node == 0)
5286 /* Did we get a long long offset? If so, punt. */
5287 if (TREE_INT_CST_HIGH (offset_node) != 0)
5289 offset = TREE_INT_CST_LOW (offset_node);
5291 /* If the offset is known to be out of bounds, warn, and call strlen at
5293 if (offset < 0 || offset > max)
5295 warning ("offset outside bounds of constant string");
5298 /* Use strlen to search for the first zero byte. Since any strings
5299 constructed with build_string will have nulls appended, we win even
5300 if we get handed something like (char[4])"abcd".
5302 Since OFFSET is our starting index into the string, no further
5303 calculation is needed. */
5304 return size_int (strlen (ptr + offset));
5307 /* Expand an expression EXP that calls a built-in function,
5308 with result going to TARGET if that's convenient
5309 (and in mode MODE if that's convenient).
5310 SUBTARGET may be used as the target for computing one of EXP's operands.
5311 IGNORE is nonzero if the value is to be ignored. */
5314 expand_builtin (exp, target, subtarget, mode, ignore)
5318 enum machine_mode mode;
5321 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
5322 tree arglist = TREE_OPERAND (exp, 1);
5325 enum machine_mode value_mode = TYPE_MODE (TREE_TYPE (exp));
5326 optab builtin_optab;
5328 switch (DECL_FUNCTION_CODE (fndecl))
5333 /* build_function_call changes these into ABS_EXPR. */
5338 case BUILT_IN_FSQRT:
5339 /* If not optimizing, call the library function. */
5344 /* Arg could be wrong type if user redeclared this fcn wrong. */
5345 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != REAL_TYPE)
5346 return CONST0_RTX (TYPE_MODE (TREE_TYPE (exp)));
5348 /* Stabilize and compute the argument. */
5349 if (TREE_CODE (TREE_VALUE (arglist)) != VAR_DECL
5350 && TREE_CODE (TREE_VALUE (arglist)) != PARM_DECL)
5352 exp = copy_node (exp);
5353 arglist = copy_node (arglist);
5354 TREE_OPERAND (exp, 1) = arglist;
5355 TREE_VALUE (arglist) = save_expr (TREE_VALUE (arglist));
5357 op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
5359 /* Make a suitable register to place result in. */
5360 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
5365 switch (DECL_FUNCTION_CODE (fndecl))
5368 builtin_optab = sin_optab; break;
5370 builtin_optab = cos_optab; break;
5371 case BUILT_IN_FSQRT:
5372 builtin_optab = sqrt_optab; break;
5377 /* Compute into TARGET.
5378 Set TARGET to wherever the result comes back. */
5379 target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
5380 builtin_optab, op0, target, 0);
5382 /* If we were unable to expand via the builtin, stop the
5383 sequence (without outputting the insns) and break, causing
5384 a call the the library function. */
5391 /* Check the results by default. But if flag_fast_math is turned on,
5392 then assume sqrt will always be called with valid arguments. */
5394 if (! flag_fast_math)
5396 /* Don't define the builtin FP instructions
5397 if your machine is not IEEE. */
5398 if (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT)
5401 lab1 = gen_label_rtx ();
5403 /* Test the result; if it is NaN, set errno=EDOM because
5404 the argument was not in the domain. */
5405 emit_cmp_insn (target, target, EQ, 0, GET_MODE (target), 0, 0);
5406 emit_jump_insn (gen_beq (lab1));
5410 #ifdef GEN_ERRNO_RTX
5411 rtx errno_rtx = GEN_ERRNO_RTX;
5414 = gen_rtx (MEM, word_mode, gen_rtx (SYMBOL_REF, Pmode, "*errno"));
5417 emit_move_insn (errno_rtx, GEN_INT (TARGET_EDOM));
5420 /* We can't set errno=EDOM directly; let the library call do it.
5421 Pop the arguments right away in case the call gets deleted. */
5423 expand_call (exp, target, 0);
5430 /* Output the entire sequence. */
5431 insns = get_insns ();
5437 case BUILT_IN_SAVEREGS:
5438 /* Don't do __builtin_saveregs more than once in a function.
5439 Save the result of the first call and reuse it. */
5440 if (saveregs_value != 0)
5441 return saveregs_value;
5443 /* When this function is called, it means that registers must be
5444 saved on entry to this function. So we migrate the
5445 call to the first insn of this function. */
5448 rtx valreg, saved_valreg;
5450 /* Now really call the function. `expand_call' does not call
5451 expand_builtin, so there is no danger of infinite recursion here. */
5454 #ifdef EXPAND_BUILTIN_SAVEREGS
5455 /* Do whatever the machine needs done in this case. */
5456 temp = EXPAND_BUILTIN_SAVEREGS (arglist);
5458 /* The register where the function returns its value
5459 is likely to have something else in it, such as an argument.
5460 So preserve that register around the call. */
5461 if (value_mode != VOIDmode)
5463 valreg = hard_libcall_value (value_mode);
5464 saved_valreg = gen_reg_rtx (value_mode);
5465 emit_move_insn (saved_valreg, valreg);
5468 /* Generate the call, putting the value in a pseudo. */
5469 temp = expand_call (exp, target, ignore);
5471 if (value_mode != VOIDmode)
5472 emit_move_insn (valreg, saved_valreg);
5478 saveregs_value = temp;
5480 /* This won't work inside a SEQUENCE--it really has to be
5481 at the start of the function. */
5482 if (in_sequence_p ())
5484 /* Better to do this than to crash. */
5485 error ("`va_start' used within `({...})'");
5489 /* Put the sequence after the NOTE that starts the function. */
5490 emit_insns_before (seq, NEXT_INSN (get_insns ()));
5494 /* __builtin_args_info (N) returns word N of the arg space info
5495 for the current function. The number and meanings of words
5496 is controlled by the definition of CUMULATIVE_ARGS. */
5497 case BUILT_IN_ARGS_INFO:
5499 int nwords = sizeof (CUMULATIVE_ARGS) / sizeof (int);
5501 int *word_ptr = (int *) ¤t_function_args_info;
5502 tree type, elts, result;
5504 if (sizeof (CUMULATIVE_ARGS) % sizeof (int) != 0)
5505 fatal ("CUMULATIVE_ARGS type defined badly; see %s, line %d",
5506 __FILE__, __LINE__);
5510 tree arg = TREE_VALUE (arglist);
5511 if (TREE_CODE (arg) != INTEGER_CST)
5512 error ("argument of __builtin_args_info must be constant");
5515 int wordnum = TREE_INT_CST_LOW (arg);
5517 if (wordnum < 0 || wordnum >= nwords)
5518 error ("argument of __builtin_args_info out of range");
5520 return GEN_INT (word_ptr[wordnum]);
5524 error ("missing argument in __builtin_args_info");
5529 for (i = 0; i < nwords; i++)
5530 elts = tree_cons (NULL_TREE, build_int_2 (word_ptr[i], 0));
5532 type = build_array_type (integer_type_node,
5533 build_index_type (build_int_2 (nwords, 0)));
5534 result = build (CONSTRUCTOR, type, NULL_TREE, nreverse (elts));
5535 TREE_CONSTANT (result) = 1;
5536 TREE_STATIC (result) = 1;
5537 result = build (INDIRECT_REF, build_pointer_type (type), result);
5538 TREE_CONSTANT (result) = 1;
5539 return expand_expr (result, NULL_RTX, VOIDmode, 0);
5543 /* Return the address of the first anonymous stack arg. */
5544 case BUILT_IN_NEXT_ARG:
5546 tree fntype = TREE_TYPE (current_function_decl);
5547 if (!(TYPE_ARG_TYPES (fntype) != 0
5548 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
5549 != void_type_node)))
5551 error ("`va_start' used in function with fixed args");
5556 return expand_binop (Pmode, add_optab,
5557 current_function_internal_arg_pointer,
5558 current_function_arg_offset_rtx,
5559 NULL_RTX, 0, OPTAB_LIB_WIDEN);
5561 case BUILT_IN_CLASSIFY_TYPE:
5564 tree type = TREE_TYPE (TREE_VALUE (arglist));
5565 enum tree_code code = TREE_CODE (type);
5566 if (code == VOID_TYPE)
5567 return GEN_INT (void_type_class);
5568 if (code == INTEGER_TYPE)
5569 return GEN_INT (integer_type_class);
5570 if (code == CHAR_TYPE)
5571 return GEN_INT (char_type_class);
5572 if (code == ENUMERAL_TYPE)
5573 return GEN_INT (enumeral_type_class);
5574 if (code == BOOLEAN_TYPE)
5575 return GEN_INT (boolean_type_class);
5576 if (code == POINTER_TYPE)
5577 return GEN_INT (pointer_type_class);
5578 if (code == REFERENCE_TYPE)
5579 return GEN_INT (reference_type_class);
5580 if (code == OFFSET_TYPE)
5581 return GEN_INT (offset_type_class);
5582 if (code == REAL_TYPE)
5583 return GEN_INT (real_type_class);
5584 if (code == COMPLEX_TYPE)
5585 return GEN_INT (complex_type_class);
5586 if (code == FUNCTION_TYPE)
5587 return GEN_INT (function_type_class);
5588 if (code == METHOD_TYPE)
5589 return GEN_INT (method_type_class);
5590 if (code == RECORD_TYPE)
5591 return GEN_INT (record_type_class);
5592 if (code == UNION_TYPE)
5593 return GEN_INT (union_type_class);
5594 if (code == ARRAY_TYPE)
5595 return GEN_INT (array_type_class);
5596 if (code == STRING_TYPE)
5597 return GEN_INT (string_type_class);
5598 if (code == SET_TYPE)
5599 return GEN_INT (set_type_class);
5600 if (code == FILE_TYPE)
5601 return GEN_INT (file_type_class);
5602 if (code == LANG_TYPE)
5603 return GEN_INT (lang_type_class);
5605 return GEN_INT (no_type_class);
5607 case BUILT_IN_CONSTANT_P:
5611 return (TREE_CODE_CLASS (TREE_CODE (TREE_VALUE (arglist))) == 'c'
5612 ? const1_rtx : const0_rtx);
5614 case BUILT_IN_FRAME_ADDRESS:
5615 /* The argument must be a nonnegative integer constant.
5616 It counts the number of frames to scan up the stack.
5617 The value is the address of that frame. */
5618 case BUILT_IN_RETURN_ADDRESS:
5619 /* The argument must be a nonnegative integer constant.
5620 It counts the number of frames to scan up the stack.
5621 The value is the return address saved in that frame. */
5623 /* Warning about missing arg was already issued. */
5625 else if (TREE_CODE (TREE_VALUE (arglist)) != INTEGER_CST)
5627 error ("invalid arg to __builtin_return_address");
5630 else if (tree_int_cst_lt (TREE_VALUE (arglist), integer_zero_node))
5632 error ("invalid arg to __builtin_return_address");
5637 int count = TREE_INT_CST_LOW (TREE_VALUE (arglist));
5638 rtx tem = frame_pointer_rtx;
5641 /* Scan back COUNT frames to the specified frame. */
5642 for (i = 0; i < count; i++)
5644 /* Assume the dynamic chain pointer is in the word that
5645 the frame address points to, unless otherwise specified. */
5646 #ifdef DYNAMIC_CHAIN_ADDRESS
5647 tem = DYNAMIC_CHAIN_ADDRESS (tem);
5649 tem = memory_address (Pmode, tem);
5650 tem = copy_to_reg (gen_rtx (MEM, Pmode, tem));
5653 /* For __builtin_frame_address, return what we've got. */
5654 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
5657 /* For __builtin_return_address,
5658 Get the return address from that frame. */
5659 #ifdef RETURN_ADDR_RTX
5660 return RETURN_ADDR_RTX (count, tem);
5662 tem = memory_address (Pmode,
5663 plus_constant (tem, GET_MODE_SIZE (Pmode)));
5664 return copy_to_reg (gen_rtx (MEM, Pmode, tem));
5668 case BUILT_IN_ALLOCA:
5670 /* Arg could be non-integer if user redeclared this fcn wrong. */
5671 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE)
5673 current_function_calls_alloca = 1;
5674 /* Compute the argument. */
5675 op0 = expand_expr (TREE_VALUE (arglist), NULL_RTX, VOIDmode, 0);
5677 /* Allocate the desired space. */
5678 target = allocate_dynamic_stack_space (op0, target, BITS_PER_UNIT);
5680 /* Record the new stack level for nonlocal gotos. */
5681 if (nonlocal_goto_handler_slot != 0)
5682 emit_stack_save (SAVE_NONLOCAL, &nonlocal_goto_stack_level, NULL_RTX);
5686 /* If not optimizing, call the library function. */
5691 /* Arg could be non-integer if user redeclared this fcn wrong. */
5692 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE)
5695 /* Compute the argument. */
5696 op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
5697 /* Compute ffs, into TARGET if possible.
5698 Set TARGET to wherever the result comes back. */
5699 target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
5700 ffs_optab, op0, target, 1);
5705 case BUILT_IN_STRLEN:
5706 /* If not optimizing, call the library function. */
5711 /* Arg could be non-pointer if user redeclared this fcn wrong. */
5712 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE)
5716 tree src = TREE_VALUE (arglist);
5717 tree len = c_strlen (src);
5720 = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
5722 rtx result, src_rtx, char_rtx;
5723 enum machine_mode insn_mode = value_mode, char_mode;
5724 enum insn_code icode;
5726 /* If the length is known, just return it. */
5728 return expand_expr (len, target, mode, 0);
5730 /* If SRC is not a pointer type, don't do this operation inline. */
5734 /* Call a function if we can't compute strlen in the right mode. */
5736 while (insn_mode != VOIDmode)
5738 icode = strlen_optab->handlers[(int) insn_mode].insn_code;
5739 if (icode != CODE_FOR_nothing)
5742 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
5744 if (insn_mode == VOIDmode)
5747 /* Make a place to write the result of the instruction. */
5750 && GET_CODE (result) == REG
5751 && GET_MODE (result) == insn_mode
5752 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
5753 result = gen_reg_rtx (insn_mode);
5755 /* Make sure the operands are acceptable to the predicates. */
5757 if (! (*insn_operand_predicate[(int)icode][0]) (result, insn_mode))
5758 result = gen_reg_rtx (insn_mode);
5760 src_rtx = memory_address (BLKmode,
5761 expand_expr (src, NULL_RTX, Pmode,
5763 if (! (*insn_operand_predicate[(int)icode][1]) (src_rtx, Pmode))
5764 src_rtx = copy_to_mode_reg (Pmode, src_rtx);
5766 char_rtx = const0_rtx;
5767 char_mode = insn_operand_mode[(int)icode][2];
5768 if (! (*insn_operand_predicate[(int)icode][2]) (char_rtx, char_mode))
5769 char_rtx = copy_to_mode_reg (char_mode, char_rtx);
5771 emit_insn (GEN_FCN (icode) (result,
5772 gen_rtx (MEM, BLKmode, src_rtx),
5773 char_rtx, GEN_INT (align)));
5775 /* Return the value in the proper mode for this function. */
5776 if (GET_MODE (result) == value_mode)
5778 else if (target != 0)
5780 convert_move (target, result, 0);
5784 return convert_to_mode (value_mode, result, 0);
5787 case BUILT_IN_STRCPY:
5788 /* If not optimizing, call the library function. */
5793 /* Arg could be non-pointer if user redeclared this fcn wrong. */
5794 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
5795 || TREE_CHAIN (arglist) == 0
5796 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE)
5800 tree len = c_strlen (TREE_VALUE (TREE_CHAIN (arglist)));
5805 len = size_binop (PLUS_EXPR, len, integer_one_node);
5807 chainon (arglist, build_tree_list (NULL_TREE, len));
5811 case BUILT_IN_MEMCPY:
5812 /* If not optimizing, call the library function. */
5817 /* Arg could be non-pointer if user redeclared this fcn wrong. */
5818 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
5819 || TREE_CHAIN (arglist) == 0
5820 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
5821 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
5822 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
5826 tree dest = TREE_VALUE (arglist);
5827 tree src = TREE_VALUE (TREE_CHAIN (arglist));
5828 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
5831 = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
5833 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
5836 /* If either SRC or DEST is not a pointer type, don't do
5837 this operation in-line. */
5838 if (src_align == 0 || dest_align == 0)
5840 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRCPY)
5841 TREE_CHAIN (TREE_CHAIN (arglist)) = 0;
5845 dest_rtx = expand_expr (dest, NULL_RTX, Pmode, EXPAND_NORMAL);
5847 /* Copy word part most expediently. */
5848 emit_block_move (gen_rtx (MEM, BLKmode,
5849 memory_address (BLKmode, dest_rtx)),
5850 gen_rtx (MEM, BLKmode,
5851 memory_address (BLKmode,
5852 expand_expr (src, NULL_RTX,
5855 expand_expr (len, NULL_RTX, VOIDmode, 0),
5856 MIN (src_align, dest_align));
5860 /* These comparison functions need an instruction that returns an actual
5861 index. An ordinary compare that just sets the condition codes
5863 #ifdef HAVE_cmpstrsi
5864 case BUILT_IN_STRCMP:
5865 /* If not optimizing, call the library function. */
5870 /* Arg could be non-pointer if user redeclared this fcn wrong. */
5871 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
5872 || TREE_CHAIN (arglist) == 0
5873 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE)
5875 else if (!HAVE_cmpstrsi)
5878 tree arg1 = TREE_VALUE (arglist);
5879 tree arg2 = TREE_VALUE (TREE_CHAIN (arglist));
5883 len = c_strlen (arg1);
5885 len = size_binop (PLUS_EXPR, integer_one_node, len);
5886 len2 = c_strlen (arg2);
5888 len2 = size_binop (PLUS_EXPR, integer_one_node, len2);
5890 /* If we don't have a constant length for the first, use the length
5891 of the second, if we know it. We don't require a constant for
5892 this case; some cost analysis could be done if both are available
5893 but neither is constant. For now, assume they're equally cheap.
5895 If both strings have constant lengths, use the smaller. This
5896 could arise if optimization results in strcpy being called with
5897 two fixed strings, or if the code was machine-generated. We should
5898 add some code to the `memcmp' handler below to deal with such
5899 situations, someday. */
5900 if (!len || TREE_CODE (len) != INTEGER_CST)
5907 else if (len2 && TREE_CODE (len2) == INTEGER_CST)
5909 if (tree_int_cst_lt (len2, len))
5913 chainon (arglist, build_tree_list (NULL_TREE, len));
5917 case BUILT_IN_MEMCMP:
5918 /* If not optimizing, call the library function. */
5923 /* Arg could be non-pointer if user redeclared this fcn wrong. */
5924 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
5925 || TREE_CHAIN (arglist) == 0
5926 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
5927 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
5928 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
5930 else if (!HAVE_cmpstrsi)
5933 tree arg1 = TREE_VALUE (arglist);
5934 tree arg2 = TREE_VALUE (TREE_CHAIN (arglist));
5935 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
5939 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
5941 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
5942 enum machine_mode insn_mode
5943 = insn_operand_mode[(int) CODE_FOR_cmpstrsi][0];
5945 /* If we don't have POINTER_TYPE, call the function. */
5946 if (arg1_align == 0 || arg2_align == 0)
5948 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRCMP)
5949 TREE_CHAIN (TREE_CHAIN (arglist)) = 0;
5953 /* Make a place to write the result of the instruction. */
5956 && GET_CODE (result) == REG && GET_MODE (result) == insn_mode
5957 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
5958 result = gen_reg_rtx (insn_mode);
5960 emit_insn (gen_cmpstrsi (result,
5961 gen_rtx (MEM, BLKmode,
5962 expand_expr (arg1, NULL_RTX, Pmode,
5964 gen_rtx (MEM, BLKmode,
5965 expand_expr (arg2, NULL_RTX, Pmode,
5967 expand_expr (len, NULL_RTX, VOIDmode, 0),
5968 GEN_INT (MIN (arg1_align, arg2_align))));
5970 /* Return the value in the proper mode for this function. */
5971 mode = TYPE_MODE (TREE_TYPE (exp));
5972 if (GET_MODE (result) == mode)
5974 else if (target != 0)
5976 convert_move (target, result, 0);
5980 return convert_to_mode (mode, result, 0);
5983 case BUILT_IN_STRCMP:
5984 case BUILT_IN_MEMCMP:
5988 default: /* just do library call, if unknown builtin */
5989 error ("built-in function %s not currently supported",
5990 IDENTIFIER_POINTER (DECL_NAME (fndecl)));
5993 /* The switch statement above can drop through to cause the function
5994 to be called normally. */
5996 return expand_call (exp, target, ignore);
5999 /* Expand code for a post- or pre- increment or decrement
6000 and return the RTX for the result.
6001 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
6004 expand_increment (exp, post)
6008 register rtx op0, op1;
6009 register rtx temp, value;
6010 register tree incremented = TREE_OPERAND (exp, 0);
6011 optab this_optab = add_optab;
6013 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
6014 int op0_is_copy = 0;
6016 /* Stabilize any component ref that might need to be
6017 evaluated more than once below. */
6018 if (TREE_CODE (incremented) == BIT_FIELD_REF
6019 || (TREE_CODE (incremented) == COMPONENT_REF
6020 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
6021 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
6022 incremented = stabilize_reference (incremented);
6024 /* Compute the operands as RTX.
6025 Note whether OP0 is the actual lvalue or a copy of it:
6026 I believe it is a copy iff it is a register or subreg
6027 and insns were generated in computing it. */
6028 temp = get_last_insn ();
6029 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, 0);
6030 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
6031 && temp != get_last_insn ());
6032 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
6034 /* Decide whether incrementing or decrementing. */
6035 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
6036 || TREE_CODE (exp) == PREDECREMENT_EXPR)
6037 this_optab = sub_optab;
6039 /* If OP0 is not the actual lvalue, but rather a copy in a register,
6040 then we cannot just increment OP0. We must
6041 therefore contrive to increment the original value.
6042 Then we can return OP0 since it is a copy of the old value. */
6045 /* This is the easiest way to increment the value wherever it is.
6046 Problems with multiple evaluation of INCREMENTED
6047 are prevented because either (1) it is a component_ref,
6048 in which case it was stabilized above, or (2) it is an array_ref
6049 with constant index in an array in a register, which is
6050 safe to reevaluate. */
6051 tree newexp = build ((this_optab == add_optab
6052 ? PLUS_EXPR : MINUS_EXPR),
6055 TREE_OPERAND (exp, 1));
6056 temp = expand_assignment (incremented, newexp, ! post, 0);
6057 return post ? op0 : temp;
6060 /* Convert decrement by a constant into a negative increment. */
6061 if (this_optab == sub_optab
6062 && GET_CODE (op1) == CONST_INT)
6064 op1 = GEN_INT (- INTVAL (op1));
6065 this_optab = add_optab;
6070 /* We have a true reference to the value in OP0.
6071 If there is an insn to add or subtract in this mode, queue it. */
6073 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
6074 op0 = stabilize (op0);
6077 icode = (int) this_optab->handlers[(int) mode].insn_code;
6078 if (icode != (int) CODE_FOR_nothing
6079 /* Make sure that OP0 is valid for operands 0 and 1
6080 of the insn we want to queue. */
6081 && (*insn_operand_predicate[icode][0]) (op0, mode)
6082 && (*insn_operand_predicate[icode][1]) (op0, mode))
6084 if (! (*insn_operand_predicate[icode][2]) (op1, mode))
6085 op1 = force_reg (mode, op1);
6087 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
6091 /* Preincrement, or we can't increment with one simple insn. */
6093 /* Save a copy of the value before inc or dec, to return it later. */
6094 temp = value = copy_to_reg (op0);
6096 /* Arrange to return the incremented value. */
6097 /* Copy the rtx because expand_binop will protect from the queue,
6098 and the results of that would be invalid for us to return
6099 if our caller does emit_queue before using our result. */
6100 temp = copy_rtx (value = op0);
6102 /* Increment however we can. */
6103 op1 = expand_binop (mode, this_optab, value, op1, op0,
6104 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
6105 /* Make sure the value is stored into OP0. */
6107 emit_move_insn (op0, op1);
6112 /* Expand all function calls contained within EXP, innermost ones first.
6113 But don't look within expressions that have sequence points.
6114 For each CALL_EXPR, record the rtx for its value
6115 in the CALL_EXPR_RTL field. */
6118 preexpand_calls (exp)
6121 register int nops, i;
6122 int type = TREE_CODE_CLASS (TREE_CODE (exp));
6124 if (! do_preexpand_calls)
6127 /* Only expressions and references can contain calls. */
6129 if (type != 'e' && type != '<' && type != '1' && type != '2' && type != 'r')
6132 switch (TREE_CODE (exp))
6135 /* Do nothing if already expanded. */
6136 if (CALL_EXPR_RTL (exp) != 0)
6139 /* Do nothing to built-in functions. */
6140 if (TREE_CODE (TREE_OPERAND (exp, 0)) != ADDR_EXPR
6141 || TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) != FUNCTION_DECL
6142 || ! DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
6143 CALL_EXPR_RTL (exp) = expand_call (exp, NULL_RTX, 0);
6148 case TRUTH_ANDIF_EXPR:
6149 case TRUTH_ORIF_EXPR:
6150 /* If we find one of these, then we can be sure
6151 the adjust will be done for it (since it makes jumps).
6152 Do it now, so that if this is inside an argument
6153 of a function, we don't get the stack adjustment
6154 after some other args have already been pushed. */
6155 do_pending_stack_adjust ();
6160 case WITH_CLEANUP_EXPR:
6164 if (SAVE_EXPR_RTL (exp) != 0)
6168 nops = tree_code_length[(int) TREE_CODE (exp)];
6169 for (i = 0; i < nops; i++)
6170 if (TREE_OPERAND (exp, i) != 0)
6172 type = TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, i)));
6173 if (type == 'e' || type == '<' || type == '1' || type == '2'
6175 preexpand_calls (TREE_OPERAND (exp, i));
6179 /* At the start of a function, record that we have no previously-pushed
6180 arguments waiting to be popped. */
6183 init_pending_stack_adjust ()
6185 pending_stack_adjust = 0;
6188 /* When exiting from function, if safe, clear out any pending stack adjust
6189 so the adjustment won't get done. */
6192 clear_pending_stack_adjust ()
6194 #ifdef EXIT_IGNORE_STACK
6195 if (! flag_omit_frame_pointer && EXIT_IGNORE_STACK
6196 && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline)
6197 && ! flag_inline_functions)
6198 pending_stack_adjust = 0;
6202 /* Pop any previously-pushed arguments that have not been popped yet. */
6205 do_pending_stack_adjust ()
6207 if (inhibit_defer_pop == 0)
6209 if (pending_stack_adjust != 0)
6210 adjust_stack (GEN_INT (pending_stack_adjust));
6211 pending_stack_adjust = 0;
6215 /* Expand all cleanups up to OLD_CLEANUPS.
6216 Needed here, and also for language-dependent calls. */
6219 expand_cleanups_to (old_cleanups)
6222 while (cleanups_this_call != old_cleanups)
6224 expand_expr (TREE_VALUE (cleanups_this_call), NULL_RTX, VOIDmode, 0);
6225 cleanups_this_call = TREE_CHAIN (cleanups_this_call);
6229 /* Expand conditional expressions. */
6231 /* Generate code to evaluate EXP and jump to LABEL if the value is zero.
6232 LABEL is an rtx of code CODE_LABEL, in this function and all the
6236 jumpifnot (exp, label)
6240 do_jump (exp, label, NULL_RTX);
6243 /* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
6250 do_jump (exp, NULL_RTX, label);
6253 /* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
6254 the result is zero, or IF_TRUE_LABEL if the result is one.
6255 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
6256 meaning fall through in that case.
6258 do_jump always does any pending stack adjust except when it does not
6259 actually perform a jump. An example where there is no jump
6260 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
6262 This function is responsible for optimizing cases such as
6263 &&, || and comparison operators in EXP. */
6266 do_jump (exp, if_false_label, if_true_label)
6268 rtx if_false_label, if_true_label;
6270 register enum tree_code code = TREE_CODE (exp);
6271 /* Some cases need to create a label to jump to
6272 in order to properly fall through.
6273 These cases set DROP_THROUGH_LABEL nonzero. */
6274 rtx drop_through_label = 0;
6288 temp = integer_zerop (exp) ? if_false_label : if_true_label;
6294 /* This is not true with #pragma weak */
6296 /* The address of something can never be zero. */
6298 emit_jump (if_true_label);
6303 if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
6304 || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
6305 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF)
6308 /* If we are narrowing the operand, we have to do the compare in the
6310 if ((TYPE_PRECISION (TREE_TYPE (exp))
6311 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
6313 case NON_LVALUE_EXPR:
6314 case REFERENCE_EXPR:
6319 /* These cannot change zero->non-zero or vice versa. */
6320 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
6324 /* This is never less insns than evaluating the PLUS_EXPR followed by
6325 a test and can be longer if the test is eliminated. */
6327 /* Reduce to minus. */
6328 exp = build (MINUS_EXPR, TREE_TYPE (exp),
6329 TREE_OPERAND (exp, 0),
6330 fold (build1 (NEGATE_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
6331 TREE_OPERAND (exp, 1))));
6332 /* Process as MINUS. */
6336 /* Non-zero iff operands of minus differ. */
6337 comparison = compare (build (NE_EXPR, TREE_TYPE (exp),
6338 TREE_OPERAND (exp, 0),
6339 TREE_OPERAND (exp, 1)),
6344 /* If we are AND'ing with a small constant, do this comparison in the
6345 smallest type that fits. If the machine doesn't have comparisons
6346 that small, it will be converted back to the wider comparison.
6347 This helps if we are testing the sign bit of a narrower object.
6348 combine can't do this for us because it can't know whether a
6349 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
6351 if (! SLOW_BYTE_ACCESS
6352 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
6353 && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
6354 && (i = floor_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))) >= 0
6355 && (type = type_for_size (i + 1, 1)) != 0
6356 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
6357 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
6358 != CODE_FOR_nothing))
6360 do_jump (convert (type, exp), if_false_label, if_true_label);
6365 case TRUTH_NOT_EXPR:
6366 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
6369 case TRUTH_ANDIF_EXPR:
6370 if (if_false_label == 0)
6371 if_false_label = drop_through_label = gen_label_rtx ();
6372 do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
6373 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
6376 case TRUTH_ORIF_EXPR:
6377 if (if_true_label == 0)
6378 if_true_label = drop_through_label = gen_label_rtx ();
6379 do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
6380 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
6384 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
6387 do_pending_stack_adjust ();
6388 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
6395 int bitsize, bitpos, unsignedp;
6396 enum machine_mode mode;
6401 /* Get description of this reference. We don't actually care
6402 about the underlying object here. */
6403 get_inner_reference (exp, &bitsize, &bitpos, &offset,
6404 &mode, &unsignedp, &volatilep);
6406 type = type_for_size (bitsize, unsignedp);
6407 if (! SLOW_BYTE_ACCESS
6408 && type != 0 && bitsize >= 0
6409 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
6410 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
6411 != CODE_FOR_nothing))
6413 do_jump (convert (type, exp), if_false_label, if_true_label);
6420 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
6421 if (integer_onep (TREE_OPERAND (exp, 1))
6422 && integer_zerop (TREE_OPERAND (exp, 2)))
6423 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
6425 else if (integer_zerop (TREE_OPERAND (exp, 1))
6426 && integer_onep (TREE_OPERAND (exp, 2)))
6427 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
6431 register rtx label1 = gen_label_rtx ();
6432 drop_through_label = gen_label_rtx ();
6433 do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX);
6434 /* Now the THEN-expression. */
6435 do_jump (TREE_OPERAND (exp, 1),
6436 if_false_label ? if_false_label : drop_through_label,
6437 if_true_label ? if_true_label : drop_through_label);
6438 /* In case the do_jump just above never jumps. */
6439 do_pending_stack_adjust ();
6440 emit_label (label1);
6441 /* Now the ELSE-expression. */
6442 do_jump (TREE_OPERAND (exp, 2),
6443 if_false_label ? if_false_label : drop_through_label,
6444 if_true_label ? if_true_label : drop_through_label);
6449 if (integer_zerop (TREE_OPERAND (exp, 1)))
6450 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
6451 else if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
6454 !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
6455 do_jump_by_parts_equality (exp, if_false_label, if_true_label);
6457 comparison = compare (exp, EQ, EQ);
6461 if (integer_zerop (TREE_OPERAND (exp, 1)))
6462 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
6463 else if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
6466 !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
6467 do_jump_by_parts_equality (exp, if_true_label, if_false_label);
6469 comparison = compare (exp, NE, NE);
6473 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
6475 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
6476 do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
6478 comparison = compare (exp, LT, LTU);
6482 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
6484 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
6485 do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
6487 comparison = compare (exp, LE, LEU);
6491 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
6493 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
6494 do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
6496 comparison = compare (exp, GT, GTU);
6500 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
6502 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
6503 do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
6505 comparison = compare (exp, GE, GEU);
6510 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
6512 /* This is not needed any more and causes poor code since it causes
6513 comparisons and tests from non-SI objects to have different code
6515 /* Copy to register to avoid generating bad insns by cse
6516 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
6517 if (!cse_not_expected && GET_CODE (temp) == MEM)
6518 temp = copy_to_reg (temp);
6520 do_pending_stack_adjust ();
6521 if (GET_CODE (temp) == CONST_INT)
6522 comparison = (temp == const0_rtx ? const0_rtx : const_true_rtx);
6523 else if (GET_CODE (temp) == LABEL_REF)
6524 comparison = const_true_rtx;
6525 else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
6526 && !can_compare_p (GET_MODE (temp)))
6527 /* Note swapping the labels gives us not-equal. */
6528 do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label);
6529 else if (GET_MODE (temp) != VOIDmode)
6530 comparison = compare_from_rtx (temp, CONST0_RTX (GET_MODE (temp)),
6531 NE, TREE_UNSIGNED (TREE_TYPE (exp)),
6532 GET_MODE (temp), NULL_RTX, 0);
6537 /* Do any postincrements in the expression that was tested. */
6540 /* If COMPARISON is nonzero here, it is an rtx that can be substituted
6541 straight into a conditional jump instruction as the jump condition.
6542 Otherwise, all the work has been done already. */
6544 if (comparison == const_true_rtx)
6547 emit_jump (if_true_label);
6549 else if (comparison == const0_rtx)
6552 emit_jump (if_false_label);
6554 else if (comparison)
6555 do_jump_for_compare (comparison, if_false_label, if_true_label);
6559 if (drop_through_label)
6561 /* If do_jump produces code that might be jumped around,
6562 do any stack adjusts from that code, before the place
6563 where control merges in. */
6564 do_pending_stack_adjust ();
6565 emit_label (drop_through_label);
6569 /* Given a comparison expression EXP for values too wide to be compared
6570 with one insn, test the comparison and jump to the appropriate label.
6571 The code of EXP is ignored; we always test GT if SWAP is 0,
6572 and LT if SWAP is 1. */
6575 do_jump_by_parts_greater (exp, swap, if_false_label, if_true_label)
6578 rtx if_false_label, if_true_label;
6580 rtx op0 = expand_expr (TREE_OPERAND (exp, swap), NULL_RTX, VOIDmode, 0);
6581 rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), NULL_RTX, VOIDmode, 0);
6582 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
6583 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
6584 rtx drop_through_label = 0;
6585 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
6588 if (! if_true_label || ! if_false_label)
6589 drop_through_label = gen_label_rtx ();
6590 if (! if_true_label)
6591 if_true_label = drop_through_label;
6592 if (! if_false_label)
6593 if_false_label = drop_through_label;
6595 /* Compare a word at a time, high order first. */
6596 for (i = 0; i < nwords; i++)
6599 rtx op0_word, op1_word;
6601 if (WORDS_BIG_ENDIAN)
6603 op0_word = operand_subword_force (op0, i, mode);
6604 op1_word = operand_subword_force (op1, i, mode);
6608 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
6609 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
6612 /* All but high-order word must be compared as unsigned. */
6613 comp = compare_from_rtx (op0_word, op1_word,
6614 (unsignedp || i > 0) ? GTU : GT,
6615 unsignedp, word_mode, NULL_RTX, 0);
6616 if (comp == const_true_rtx)
6617 emit_jump (if_true_label);
6618 else if (comp != const0_rtx)
6619 do_jump_for_compare (comp, NULL_RTX, if_true_label);
6621 /* Consider lower words only if these are equal. */
6622 comp = compare_from_rtx (op0_word, op1_word, NE, unsignedp, word_mode,
6624 if (comp == const_true_rtx)
6625 emit_jump (if_false_label);
6626 else if (comp != const0_rtx)
6627 do_jump_for_compare (comp, NULL_RTX, if_false_label);
6631 emit_jump (if_false_label);
6632 if (drop_through_label)
6633 emit_label (drop_through_label);
6636 /* Given an EQ_EXPR expression EXP for values too wide to be compared
6637 with one insn, test the comparison and jump to the appropriate label. */
6640 do_jump_by_parts_equality (exp, if_false_label, if_true_label)
6642 rtx if_false_label, if_true_label;
6644 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
6645 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
6646 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
6647 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
6649 rtx drop_through_label = 0;
6651 if (! if_false_label)
6652 drop_through_label = if_false_label = gen_label_rtx ();
6654 for (i = 0; i < nwords; i++)
6656 rtx comp = compare_from_rtx (operand_subword_force (op0, i, mode),
6657 operand_subword_force (op1, i, mode),
6658 EQ, TREE_UNSIGNED (TREE_TYPE (exp)),
6659 word_mode, NULL_RTX, 0);
6660 if (comp == const_true_rtx)
6661 emit_jump (if_false_label);
6662 else if (comp != const0_rtx)
6663 do_jump_for_compare (comp, if_false_label, NULL_RTX);
6667 emit_jump (if_true_label);
6668 if (drop_through_label)
6669 emit_label (drop_through_label);
6672 /* Jump according to whether OP0 is 0.
6673 We assume that OP0 has an integer mode that is too wide
6674 for the available compare insns. */
6677 do_jump_by_parts_equality_rtx (op0, if_false_label, if_true_label)
6679 rtx if_false_label, if_true_label;
6681 int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD;
6683 rtx drop_through_label = 0;
6685 if (! if_false_label)
6686 drop_through_label = if_false_label = gen_label_rtx ();
6688 for (i = 0; i < nwords; i++)
6690 rtx comp = compare_from_rtx (operand_subword_force (op0, i,
6692 const0_rtx, EQ, 1, word_mode, NULL_RTX, 0);
6693 if (comp == const_true_rtx)
6694 emit_jump (if_false_label);
6695 else if (comp != const0_rtx)
6696 do_jump_for_compare (comp, if_false_label, NULL_RTX);
6700 emit_jump (if_true_label);
6701 if (drop_through_label)
6702 emit_label (drop_through_label);
6705 /* Given a comparison expression in rtl form, output conditional branches to
6706 IF_TRUE_LABEL, IF_FALSE_LABEL, or both. */
6709 do_jump_for_compare (comparison, if_false_label, if_true_label)
6710 rtx comparison, if_false_label, if_true_label;
6714 if (bcc_gen_fctn[(int) GET_CODE (comparison)] != 0)
6715 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (comparison)]) (if_true_label));
6720 emit_jump (if_false_label);
6722 else if (if_false_label)
6725 rtx prev = PREV_INSN (get_last_insn ());
6728 /* Output the branch with the opposite condition. Then try to invert
6729 what is generated. If more than one insn is a branch, or if the
6730 branch is not the last insn written, abort. If we can't invert
6731 the branch, emit make a true label, redirect this jump to that,
6732 emit a jump to the false label and define the true label. */
6734 if (bcc_gen_fctn[(int) GET_CODE (comparison)] != 0)
6735 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (comparison)]) (if_false_label));
6739 /* Here we get the insn before what was just emitted.
6740 On some machines, emitting the branch can discard
6741 the previous compare insn and emit a replacement. */
6743 /* If there's only one preceding insn... */
6744 insn = get_insns ();
6746 insn = NEXT_INSN (prev);
6748 for (insn = NEXT_INSN (insn); insn; insn = NEXT_INSN (insn))
6749 if (GET_CODE (insn) == JUMP_INSN)
6756 if (branch != get_last_insn ())
6759 if (! invert_jump (branch, if_false_label))
6761 if_true_label = gen_label_rtx ();
6762 redirect_jump (branch, if_true_label);
6763 emit_jump (if_false_label);
6764 emit_label (if_true_label);
6769 /* Generate code for a comparison expression EXP
6770 (including code to compute the values to be compared)
6771 and set (CC0) according to the result.
6772 SIGNED_CODE should be the rtx operation for this comparison for
6773 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
6775 We force a stack adjustment unless there are currently
6776 things pushed on the stack that aren't yet used. */
6779 compare (exp, signed_code, unsigned_code)
6781 enum rtx_code signed_code, unsigned_code;
6784 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
6786 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
6787 register tree type = TREE_TYPE (TREE_OPERAND (exp, 0));
6788 register enum machine_mode mode = TYPE_MODE (type);
6789 int unsignedp = TREE_UNSIGNED (type);
6790 enum rtx_code code = unsignedp ? unsigned_code : signed_code;
6792 return compare_from_rtx (op0, op1, code, unsignedp, mode,
6794 ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
6795 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
6798 /* Like compare but expects the values to compare as two rtx's.
6799 The decision as to signed or unsigned comparison must be made by the caller.
6801 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
6804 If ALIGN is non-zero, it is the alignment of this type; if zero, the
6805 size of MODE should be used. */
6808 compare_from_rtx (op0, op1, code, unsignedp, mode, size, align)
6809 register rtx op0, op1;
6812 enum machine_mode mode;
6816 /* If one operand is constant, make it the second one. */
6818 if (GET_CODE (op0) == CONST_INT || GET_CODE (op0) == CONST_DOUBLE)
6823 code = swap_condition (code);
6828 op0 = force_not_mem (op0);
6829 op1 = force_not_mem (op1);
6832 do_pending_stack_adjust ();
6834 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT)
6835 return simplify_relational_operation (code, mode, op0, op1);
6838 /* There's no need to do this now that combine.c can eliminate lots of
6839 sign extensions. This can be less efficient in certain cases on other
6842 /* If this is a signed equality comparison, we can do it as an
6843 unsigned comparison since zero-extension is cheaper than sign
6844 extension and comparisons with zero are done as unsigned. This is
6845 the case even on machines that can do fast sign extension, since
6846 zero-extension is easier to combinen with other operations than
6847 sign-extension is. If we are comparing against a constant, we must
6848 convert it to what it would look like unsigned. */
6849 if ((code == EQ || code == NE) && ! unsignedp
6850 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
6852 if (GET_CODE (op1) == CONST_INT
6853 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
6854 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
6859 emit_cmp_insn (op0, op1, code, size, mode, unsignedp, align);
6861 return gen_rtx (code, VOIDmode, cc0_rtx, const0_rtx);
6864 /* Generate code to calculate EXP using a store-flag instruction
6865 and return an rtx for the result. EXP is either a comparison
6866 or a TRUTH_NOT_EXPR whose operand is a comparison.
6868 If TARGET is nonzero, store the result there if convenient.
6870 If ONLY_CHEAP is non-zero, only do this if it is likely to be very
6873 Return zero if there is no suitable set-flag instruction
6874 available on this machine.
6876 Once expand_expr has been called on the arguments of the comparison,
6877 we are committed to doing the store flag, since it is not safe to
6878 re-evaluate the expression. We emit the store-flag insn by calling
6879 emit_store_flag, but only expand the arguments if we have a reason
6880 to believe that emit_store_flag will be successful. If we think that
6881 it will, but it isn't, we have to simulate the store-flag with a
6882 set/jump/set sequence. */
6885 do_store_flag (exp, target, mode, only_cheap)
6888 enum machine_mode mode;
6892 tree arg0, arg1, type;
6894 enum machine_mode operand_mode;
6898 enum insn_code icode;
6899 rtx subtarget = target;
6900 rtx result, label, pattern, jump_pat;
6902 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
6903 result at the end. We can't simply invert the test since it would
6904 have already been inverted if it were valid. This case occurs for
6905 some floating-point comparisons. */
6907 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
6908 invert = 1, exp = TREE_OPERAND (exp, 0);
6910 arg0 = TREE_OPERAND (exp, 0);
6911 arg1 = TREE_OPERAND (exp, 1);
6912 type = TREE_TYPE (arg0);
6913 operand_mode = TYPE_MODE (type);
6914 unsignedp = TREE_UNSIGNED (type);
6916 /* We won't bother with BLKmode store-flag operations because it would mean
6917 passing a lot of information to emit_store_flag. */
6918 if (operand_mode == BLKmode)
6924 /* Get the rtx comparison code to use. We know that EXP is a comparison
6925 operation of some type. Some comparisons against 1 and -1 can be
6926 converted to comparisons with zero. Do so here so that the tests
6927 below will be aware that we have a comparison with zero. These
6928 tests will not catch constants in the first operand, but constants
6929 are rarely passed as the first operand. */
6931 switch (TREE_CODE (exp))
6940 if (integer_onep (arg1))
6941 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
6943 code = unsignedp ? LTU : LT;
6946 if (integer_all_onesp (arg1))
6947 arg1 = integer_zero_node, code = unsignedp ? LTU : LT;
6949 code = unsignedp ? LEU : LE;
6952 if (integer_all_onesp (arg1))
6953 arg1 = integer_zero_node, code = unsignedp ? GEU : GE;
6955 code = unsignedp ? GTU : GT;
6958 if (integer_onep (arg1))
6959 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
6961 code = unsignedp ? GEU : GE;
6967 /* Put a constant second. */
6968 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
6970 tem = arg0; arg0 = arg1; arg1 = tem;
6971 code = swap_condition (code);
6974 /* If this is an equality or inequality test of a single bit, we can
6975 do this by shifting the bit being tested to the low-order bit and
6976 masking the result with the constant 1. If the condition was EQ,
6977 we xor it with 1. This does not require an scc insn and is faster
6978 than an scc insn even if we have it. */
6980 if ((code == NE || code == EQ)
6981 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6982 && integer_pow2p (TREE_OPERAND (arg0, 1))
6983 && TYPE_PRECISION (type) <= HOST_BITS_PER_WIDE_INT)
6985 int bitnum = exact_log2 (INTVAL (expand_expr (TREE_OPERAND (arg0, 1),
6986 NULL_RTX, VOIDmode, 0)));
6988 if (subtarget == 0 || GET_CODE (subtarget) != REG
6989 || GET_MODE (subtarget) != operand_mode
6990 || ! safe_from_p (subtarget, TREE_OPERAND (arg0, 0)))
6993 op0 = expand_expr (TREE_OPERAND (arg0, 0), subtarget, VOIDmode, 0);
6996 op0 = expand_shift (RSHIFT_EXPR, GET_MODE (op0), op0,
6997 size_int (bitnum), target, 1);
6999 if (GET_MODE (op0) != mode)
7000 op0 = convert_to_mode (mode, op0, 1);
7002 if (bitnum != TYPE_PRECISION (type) - 1)
7003 op0 = expand_and (op0, const1_rtx, target);
7005 if ((code == EQ && ! invert) || (code == NE && invert))
7006 op0 = expand_binop (mode, xor_optab, op0, const1_rtx, target, 0,
7012 /* Now see if we are likely to be able to do this. Return if not. */
7013 if (! can_compare_p (operand_mode))
7015 icode = setcc_gen_code[(int) code];
7016 if (icode == CODE_FOR_nothing
7017 || (only_cheap && insn_operand_mode[(int) icode][0] != mode))
7019 /* We can only do this if it is one of the special cases that
7020 can be handled without an scc insn. */
7021 if ((code == LT && integer_zerop (arg1))
7022 || (! only_cheap && code == GE && integer_zerop (arg1)))
7024 else if (BRANCH_COST >= 0
7025 && ! only_cheap && (code == NE || code == EQ)
7026 && TREE_CODE (type) != REAL_TYPE
7027 && ((abs_optab->handlers[(int) operand_mode].insn_code
7028 != CODE_FOR_nothing)
7029 || (ffs_optab->handlers[(int) operand_mode].insn_code
7030 != CODE_FOR_nothing)))
7036 preexpand_calls (exp);
7037 if (subtarget == 0 || GET_CODE (subtarget) != REG
7038 || GET_MODE (subtarget) != operand_mode
7039 || ! safe_from_p (subtarget, arg1))
7042 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
7043 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
7046 target = gen_reg_rtx (mode);
7048 result = emit_store_flag (target, code, op0, op1, operand_mode,
7054 result = expand_binop (mode, xor_optab, result, const1_rtx,
7055 result, 0, OPTAB_LIB_WIDEN);
7059 /* If this failed, we have to do this with set/compare/jump/set code. */
7060 if (target == 0 || GET_CODE (target) != REG
7061 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
7062 target = gen_reg_rtx (GET_MODE (target));
7064 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
7065 result = compare_from_rtx (op0, op1, code, unsignedp,
7066 operand_mode, NULL_RTX, 0);
7067 if (GET_CODE (result) == CONST_INT)
7068 return (((result == const0_rtx && ! invert)
7069 || (result != const0_rtx && invert))
7070 ? const0_rtx : const1_rtx);
7072 label = gen_label_rtx ();
7073 if (bcc_gen_fctn[(int) code] == 0)
7076 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
7077 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
7083 /* Generate a tablejump instruction (used for switch statements). */
7085 #ifdef HAVE_tablejump
7087 /* INDEX is the value being switched on, with the lowest value
7088 in the table already subtracted.
7089 MODE is its expected mode (needed if INDEX is constant).
7090 RANGE is the length of the jump table.
7091 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
7093 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
7094 index value is out of range. */
7097 do_tablejump (index, mode, range, table_label, default_label)
7098 rtx index, range, table_label, default_label;
7099 enum machine_mode mode;
7101 register rtx temp, vector;
7103 /* Do an unsigned comparison (in the proper mode) between the index
7104 expression and the value which represents the length of the range.
7105 Since we just finished subtracting the lower bound of the range
7106 from the index expression, this comparison allows us to simultaneously
7107 check that the original index expression value is both greater than
7108 or equal to the minimum value of the range and less than or equal to
7109 the maximum value of the range. */
7111 emit_cmp_insn (range, index, LTU, NULL_RTX, mode, 0, 0);
7112 emit_jump_insn (gen_bltu (default_label));
7114 /* If index is in range, it must fit in Pmode.
7115 Convert to Pmode so we can index with it. */
7117 index = convert_to_mode (Pmode, index, 1);
7119 /* If flag_force_addr were to affect this address
7120 it could interfere with the tricky assumptions made
7121 about addresses that contain label-refs,
7122 which may be valid only very near the tablejump itself. */
7123 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
7124 GET_MODE_SIZE, because this indicates how large insns are. The other
7125 uses should all be Pmode, because they are addresses. This code
7126 could fail if addresses and insns are not the same size. */
7127 index = memory_address_noforce
7129 gen_rtx (PLUS, Pmode,
7130 gen_rtx (MULT, Pmode, index,
7131 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
7132 gen_rtx (LABEL_REF, Pmode, table_label)));
7133 temp = gen_reg_rtx (CASE_VECTOR_MODE);
7134 vector = gen_rtx (MEM, CASE_VECTOR_MODE, index);
7135 RTX_UNCHANGING_P (vector) = 1;
7136 convert_move (temp, vector, 0);
7138 emit_jump_insn (gen_tablejump (temp, table_label));
7140 #ifndef CASE_VECTOR_PC_RELATIVE
7141 /* If we are generating PIC code or if the table is PC-relative, the
7142 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
7148 #endif /* HAVE_tablejump */