1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA. */
26 #include "insn-flags.h"
27 #include "insn-codes.h"
29 #include "insn-config.h"
33 #include "typeclass.h"
35 #define CEIL(x,y) (((x) + (y) - 1) / (y))
37 /* Decide whether a function's arguments should be processed
38 from first to last or from last to first. */
40 #ifdef STACK_GROWS_DOWNWARD
42 #define PUSH_ARGS_REVERSED /* If it's last to first */
46 #ifndef STACK_PUSH_CODE
47 #ifdef STACK_GROWS_DOWNWARD
48 #define STACK_PUSH_CODE PRE_DEC
50 #define STACK_PUSH_CODE PRE_INC
54 /* Like STACK_BOUNDARY but in units of bytes, not bits. */
55 #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
57 /* If this is nonzero, we do not bother generating VOLATILE
58 around volatile memory references, and we are willing to
59 output indirect addresses. If cse is to follow, we reject
60 indirect addresses so a useful potential cse is generated;
61 if it is used only once, instruction combination will produce
62 the same indirect address eventually. */
65 /* Nonzero to generate code for all the subroutines within an
66 expression before generating the upper levels of the expression.
67 Nowadays this is never zero. */
68 int do_preexpand_calls = 1;
70 /* Number of units that we should eventually pop off the stack.
71 These are the arguments to function calls that have already returned. */
72 int pending_stack_adjust;
74 /* Nonzero means stack pops must not be deferred, and deferred stack
75 pops must not be output. It is nonzero inside a function call,
76 inside a conditional expression, inside a statement expression,
77 and in other cases as well. */
78 int inhibit_defer_pop;
80 /* A list of all cleanups which belong to the arguments of
81 function calls being expanded by expand_call. */
82 tree cleanups_this_call;
84 /* Nonzero means __builtin_saveregs has already been done in this function.
85 The value is the pseudoreg containing the value __builtin_saveregs
87 static rtx saveregs_value;
90 static void store_constructor ();
91 static rtx store_field ();
92 static rtx expand_builtin ();
93 static rtx compare ();
94 static rtx do_store_flag ();
95 static void preexpand_calls ();
96 static rtx expand_increment ();
97 static void init_queue ();
99 void do_pending_stack_adjust ();
100 static void do_jump_for_compare ();
101 static void do_jump_by_parts_equality ();
102 static void do_jump_by_parts_equality_rtx ();
103 static void do_jump_by_parts_greater ();
105 /* Record for each mode whether we can move a register directly to or
106 from an object of that mode in memory. If we can't, we won't try
107 to use that mode directly when accessing a field of that mode. */
109 static char direct_load[NUM_MACHINE_MODES];
110 static char direct_store[NUM_MACHINE_MODES];
112 /* MOVE_RATIO is the number of move instructions that is better than
116 #if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
119 /* A value of around 6 would minimize code size; infinity would minimize
121 #define MOVE_RATIO 15
125 /* This array records the insn_code of insns to perform block moves. */
126 static enum insn_code movstr_optab[NUM_MACHINE_MODES];
128 /* SLOW_UNALIGNED_ACCESS is non-zero if unaligned accesses are very slow. */
130 #ifndef SLOW_UNALIGNED_ACCESS
131 #define SLOW_UNALIGNED_ACCESS 0
134 /* This is run once per compilation to set up which modes can be used
135 directly in memory and to initialize the block move optab. */
141 enum machine_mode mode;
142 rtx mem = gen_rtx (MEM, VOIDmode, stack_pointer_rtx);
145 insn = emit_insn (gen_rtx (SET, 0, 0));
146 pat = PATTERN (insn);
148 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
149 mode = (enum machine_mode) ((int) mode + 1))
155 direct_load[(int) mode] = direct_store[(int) mode] = 0;
156 PUT_MODE (mem, mode);
158 /* See if there is some register that can be used in this mode and
159 directly loaded or stored from memory. */
161 if (mode != VOIDmode && mode != BLKmode)
162 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
163 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
166 if (! HARD_REGNO_MODE_OK (regno, mode))
169 reg = gen_rtx (REG, mode, regno);
172 SET_DEST (pat) = reg;
173 if (recog (pat, insn, &num_clobbers) >= 0)
174 direct_load[(int) mode] = 1;
177 SET_DEST (pat) = mem;
178 if (recog (pat, insn, &num_clobbers) >= 0)
179 direct_store[(int) mode] = 1;
182 movstr_optab[(int) mode] = CODE_FOR_nothing;
189 movstr_optab[(int) QImode] = CODE_FOR_movstrqi;
193 movstr_optab[(int) HImode] = CODE_FOR_movstrhi;
197 movstr_optab[(int) SImode] = CODE_FOR_movstrsi;
201 movstr_optab[(int) DImode] = CODE_FOR_movstrdi;
205 movstr_optab[(int) TImode] = CODE_FOR_movstrti;
209 /* This is run at the start of compiling a function. */
216 pending_stack_adjust = 0;
217 inhibit_defer_pop = 0;
218 cleanups_this_call = 0;
223 /* Save all variables describing the current status into the structure *P.
224 This is used before starting a nested function. */
230 /* Instead of saving the postincrement queue, empty it. */
233 p->pending_stack_adjust = pending_stack_adjust;
234 p->inhibit_defer_pop = inhibit_defer_pop;
235 p->cleanups_this_call = cleanups_this_call;
236 p->saveregs_value = saveregs_value;
237 p->forced_labels = forced_labels;
239 pending_stack_adjust = 0;
240 inhibit_defer_pop = 0;
241 cleanups_this_call = 0;
246 /* Restore all variables describing the current status from the structure *P.
247 This is used after a nested function. */
250 restore_expr_status (p)
253 pending_stack_adjust = p->pending_stack_adjust;
254 inhibit_defer_pop = p->inhibit_defer_pop;
255 cleanups_this_call = p->cleanups_this_call;
256 saveregs_value = p->saveregs_value;
257 forced_labels = p->forced_labels;
260 /* Manage the queue of increment instructions to be output
261 for POSTINCREMENT_EXPR expressions, etc. */
263 static rtx pending_chain;
265 /* Queue up to increment (or change) VAR later. BODY says how:
266 BODY should be the same thing you would pass to emit_insn
267 to increment right away. It will go to emit_insn later on.
269 The value is a QUEUED expression to be used in place of VAR
270 where you want to guarantee the pre-incrementation value of VAR. */
273 enqueue_insn (var, body)
276 pending_chain = gen_rtx (QUEUED, GET_MODE (var),
277 var, NULL_RTX, NULL_RTX, body, pending_chain);
278 return pending_chain;
281 /* Use protect_from_queue to convert a QUEUED expression
282 into something that you can put immediately into an instruction.
283 If the queued incrementation has not happened yet,
284 protect_from_queue returns the variable itself.
285 If the incrementation has happened, protect_from_queue returns a temp
286 that contains a copy of the old value of the variable.
288 Any time an rtx which might possibly be a QUEUED is to be put
289 into an instruction, it must be passed through protect_from_queue first.
290 QUEUED expressions are not meaningful in instructions.
292 Do not pass a value through protect_from_queue and then hold
293 on to it for a while before putting it in an instruction!
294 If the queue is flushed in between, incorrect code will result. */
297 protect_from_queue (x, modify)
301 register RTX_CODE code = GET_CODE (x);
303 #if 0 /* A QUEUED can hang around after the queue is forced out. */
304 /* Shortcut for most common case. */
305 if (pending_chain == 0)
311 /* A special hack for read access to (MEM (QUEUED ...))
312 to facilitate use of autoincrement.
313 Make a copy of the contents of the memory location
314 rather than a copy of the address, but not
315 if the value is of mode BLKmode. */
316 if (code == MEM && GET_MODE (x) != BLKmode
317 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
319 register rtx y = XEXP (x, 0);
320 XEXP (x, 0) = QUEUED_VAR (y);
323 register rtx temp = gen_reg_rtx (GET_MODE (x));
324 emit_insn_before (gen_move_insn (temp, x),
330 /* Otherwise, recursively protect the subexpressions of all
331 the kinds of rtx's that can contain a QUEUED. */
333 XEXP (x, 0) = protect_from_queue (XEXP (x, 0), 0);
334 else if (code == PLUS || code == MULT)
336 XEXP (x, 0) = protect_from_queue (XEXP (x, 0), 0);
337 XEXP (x, 1) = protect_from_queue (XEXP (x, 1), 0);
341 /* If the increment has not happened, use the variable itself. */
342 if (QUEUED_INSN (x) == 0)
343 return QUEUED_VAR (x);
344 /* If the increment has happened and a pre-increment copy exists,
346 if (QUEUED_COPY (x) != 0)
347 return QUEUED_COPY (x);
348 /* The increment has happened but we haven't set up a pre-increment copy.
349 Set one up now, and use it. */
350 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
351 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
353 return QUEUED_COPY (x);
356 /* Return nonzero if X contains a QUEUED expression:
357 if it contains anything that will be altered by a queued increment.
358 We handle only combinations of MEM, PLUS, MINUS and MULT operators
359 since memory addresses generally contain only those. */
365 register enum rtx_code code = GET_CODE (x);
371 return queued_subexp_p (XEXP (x, 0));
375 return queued_subexp_p (XEXP (x, 0))
376 || queued_subexp_p (XEXP (x, 1));
381 /* Perform all the pending incrementations. */
387 while (p = pending_chain)
389 QUEUED_INSN (p) = emit_insn (QUEUED_BODY (p));
390 pending_chain = QUEUED_NEXT (p);
401 /* Copy data from FROM to TO, where the machine modes are not the same.
402 Both modes may be integer, or both may be floating.
403 UNSIGNEDP should be nonzero if FROM is an unsigned type.
404 This causes zero-extension instead of sign-extension. */
407 convert_move (to, from, unsignedp)
408 register rtx to, from;
411 enum machine_mode to_mode = GET_MODE (to);
412 enum machine_mode from_mode = GET_MODE (from);
413 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
414 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
418 /* rtx code for making an equivalent value. */
419 enum rtx_code equiv_code = (unsignedp ? ZERO_EXTEND : SIGN_EXTEND);
421 to = protect_from_queue (to, 1);
422 from = protect_from_queue (from, 0);
424 if (to_real != from_real)
427 /* If FROM is a SUBREG that indicates that we have already done at least
428 the required extension, strip it. We don't handle such SUBREGs as
431 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
432 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
433 >= GET_MODE_SIZE (to_mode))
434 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
435 from = gen_lowpart (to_mode, from), from_mode = to_mode;
437 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
440 if (to_mode == from_mode
441 || (from_mode == VOIDmode && CONSTANT_P (from)))
443 emit_move_insn (to, from);
449 #ifdef HAVE_extendsfdf2
450 if (HAVE_extendsfdf2 && from_mode == SFmode && to_mode == DFmode)
452 emit_unop_insn (CODE_FOR_extendsfdf2, to, from, UNKNOWN);
456 #ifdef HAVE_extendsfxf2
457 if (HAVE_extendsfxf2 && from_mode == SFmode && to_mode == XFmode)
459 emit_unop_insn (CODE_FOR_extendsfxf2, to, from, UNKNOWN);
463 #ifdef HAVE_extendsftf2
464 if (HAVE_extendsftf2 && from_mode == SFmode && to_mode == TFmode)
466 emit_unop_insn (CODE_FOR_extendsftf2, to, from, UNKNOWN);
470 #ifdef HAVE_extenddfxf2
471 if (HAVE_extenddfxf2 && from_mode == DFmode && to_mode == XFmode)
473 emit_unop_insn (CODE_FOR_extenddfxf2, to, from, UNKNOWN);
477 #ifdef HAVE_extenddftf2
478 if (HAVE_extenddftf2 && from_mode == DFmode && to_mode == TFmode)
480 emit_unop_insn (CODE_FOR_extenddftf2, to, from, UNKNOWN);
484 #ifdef HAVE_truncdfsf2
485 if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
487 emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
491 #ifdef HAVE_truncxfsf2
492 if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode)
494 emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN);
498 #ifdef HAVE_trunctfsf2
499 if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
501 emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
505 #ifdef HAVE_truncxfdf2
506 if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode)
508 emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN);
512 #ifdef HAVE_trunctfdf2
513 if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
515 emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
527 libcall = extendsfdf2_libfunc;
531 libcall = extendsfxf2_libfunc;
535 libcall = extendsftf2_libfunc;
544 libcall = truncdfsf2_libfunc;
548 libcall = extenddfxf2_libfunc;
552 libcall = extenddftf2_libfunc;
561 libcall = truncxfsf2_libfunc;
565 libcall = truncxfdf2_libfunc;
574 libcall = trunctfsf2_libfunc;
578 libcall = trunctfdf2_libfunc;
584 if (libcall == (rtx) 0)
585 /* This conversion is not implemented yet. */
588 emit_library_call (libcall, 1, to_mode, 1, from, from_mode);
589 emit_move_insn (to, hard_libcall_value (to_mode));
593 /* Now both modes are integers. */
595 /* Handle expanding beyond a word. */
596 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
597 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
604 enum machine_mode lowpart_mode;
605 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
607 /* Try converting directly if the insn is supported. */
608 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
611 /* If FROM is a SUBREG, put it into a register. Do this
612 so that we always generate the same set of insns for
613 better cse'ing; if an intermediate assignment occurred,
614 we won't be doing the operation directly on the SUBREG. */
615 if (optimize > 0 && GET_CODE (from) == SUBREG)
616 from = force_reg (from_mode, from);
617 emit_unop_insn (code, to, from, equiv_code);
620 /* Next, try converting via full word. */
621 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
622 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
623 != CODE_FOR_nothing))
625 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
626 emit_unop_insn (code, to,
627 gen_lowpart (word_mode, to), equiv_code);
631 /* No special multiword conversion insn; do it by hand. */
634 /* Get a copy of FROM widened to a word, if necessary. */
635 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
636 lowpart_mode = word_mode;
638 lowpart_mode = from_mode;
640 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
642 lowpart = gen_lowpart (lowpart_mode, to);
643 emit_move_insn (lowpart, lowfrom);
645 /* Compute the value to put in each remaining word. */
647 fill_value = const0_rtx;
652 && insn_operand_mode[(int) CODE_FOR_slt][0] == word_mode
653 && STORE_FLAG_VALUE == -1)
655 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
657 fill_value = gen_reg_rtx (word_mode);
658 emit_insn (gen_slt (fill_value));
664 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
665 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
667 fill_value = convert_to_mode (word_mode, fill_value, 1);
671 /* Fill the remaining words. */
672 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
674 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
675 rtx subword = operand_subword (to, index, 1, to_mode);
680 if (fill_value != subword)
681 emit_move_insn (subword, fill_value);
684 insns = get_insns ();
687 emit_no_conflict_block (insns, to, from, NULL_RTX,
688 gen_rtx (equiv_code, to_mode, from));
692 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD)
694 convert_move (to, gen_lowpart (word_mode, from), 0);
698 /* Handle pointer conversion */ /* SPEE 900220 */
699 if (to_mode == PSImode)
701 if (from_mode != SImode)
702 from = convert_to_mode (SImode, from, unsignedp);
704 #ifdef HAVE_truncsipsi
707 emit_unop_insn (CODE_FOR_truncsipsi, to, from, UNKNOWN);
710 #endif /* HAVE_truncsipsi */
714 if (from_mode == PSImode)
716 if (to_mode != SImode)
718 from = convert_to_mode (SImode, from, unsignedp);
723 #ifdef HAVE_extendpsisi
724 if (HAVE_extendpsisi)
726 emit_unop_insn (CODE_FOR_extendpsisi, to, from, UNKNOWN);
729 #endif /* HAVE_extendpsisi */
734 /* Now follow all the conversions between integers
735 no more than a word long. */
737 /* For truncation, usually we can just refer to FROM in a narrower mode. */
738 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
739 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
740 GET_MODE_BITSIZE (from_mode))
741 && ((GET_CODE (from) == MEM
742 && ! MEM_VOLATILE_P (from)
743 && direct_load[(int) to_mode]
744 && ! mode_dependent_address_p (XEXP (from, 0)))
745 || GET_CODE (from) == REG
746 || GET_CODE (from) == SUBREG))
748 emit_move_insn (to, gen_lowpart (to_mode, from));
752 /* For truncation, usually we can just refer to FROM in a narrower mode. */
753 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
755 /* Convert directly if that works. */
756 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
759 /* If FROM is a SUBREG, put it into a register. Do this
760 so that we always generate the same set of insns for
761 better cse'ing; if an intermediate assignment occurred,
762 we won't be doing the operation directly on the SUBREG. */
763 if (optimize > 0 && GET_CODE (from) == SUBREG)
764 from = force_reg (from_mode, from);
765 emit_unop_insn (code, to, from, equiv_code);
770 enum machine_mode intermediate;
772 /* Search for a mode to convert via. */
773 for (intermediate = from_mode; intermediate != VOIDmode;
774 intermediate = GET_MODE_WIDER_MODE (intermediate))
775 if ((can_extend_p (to_mode, intermediate, unsignedp)
777 && (can_extend_p (intermediate, from_mode, unsignedp)
778 != CODE_FOR_nothing))
780 convert_move (to, convert_to_mode (intermediate, from,
781 unsignedp), unsignedp);
785 /* No suitable intermediate mode. */
790 /* Support special truncate insns for certain modes. */
792 if (from_mode == DImode && to_mode == SImode)
794 #ifdef HAVE_truncdisi2
797 emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
801 convert_move (to, force_reg (from_mode, from), unsignedp);
805 if (from_mode == DImode && to_mode == HImode)
807 #ifdef HAVE_truncdihi2
810 emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
814 convert_move (to, force_reg (from_mode, from), unsignedp);
818 if (from_mode == DImode && to_mode == QImode)
820 #ifdef HAVE_truncdiqi2
823 emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
827 convert_move (to, force_reg (from_mode, from), unsignedp);
831 if (from_mode == SImode && to_mode == HImode)
833 #ifdef HAVE_truncsihi2
836 emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
840 convert_move (to, force_reg (from_mode, from), unsignedp);
844 if (from_mode == SImode && to_mode == QImode)
846 #ifdef HAVE_truncsiqi2
849 emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
853 convert_move (to, force_reg (from_mode, from), unsignedp);
857 if (from_mode == HImode && to_mode == QImode)
859 #ifdef HAVE_trunchiqi2
862 emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
866 convert_move (to, force_reg (from_mode, from), unsignedp);
870 /* Handle truncation of volatile memrefs, and so on;
871 the things that couldn't be truncated directly,
872 and for which there was no special instruction. */
873 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
875 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
876 emit_move_insn (to, temp);
880 /* Mode combination is not recognized. */
884 /* Return an rtx for a value that would result
885 from converting X to mode MODE.
886 Both X and MODE may be floating, or both integer.
887 UNSIGNEDP is nonzero if X is an unsigned value.
888 This can be done by referring to a part of X in place
889 or by copying to a new temporary with conversion.
891 This function *must not* call protect_from_queue
892 except when putting X into an insn (in which case convert_move does it). */
895 convert_to_mode (mode, x, unsignedp)
896 enum machine_mode mode;
902 /* If FROM is a SUBREG that indicates that we have already done at least
903 the required extension, strip it. */
905 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
906 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
907 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
908 x = gen_lowpart (mode, x);
910 if (mode == GET_MODE (x))
913 /* There is one case that we must handle specially: If we are converting
914 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
915 we are to interpret the constant as unsigned, gen_lowpart will do
916 the wrong if the constant appears negative. What we want to do is
917 make the high-order word of the constant zero, not all ones. */
919 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
920 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
921 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
922 return immed_double_const (INTVAL (x), (HOST_WIDE_INT) 0, mode);
924 /* We can do this with a gen_lowpart if both desired and current modes
925 are integer, and this is either a constant integer, a register, or a
926 non-volatile MEM. Except for the constant case, we must be narrowing
929 if (GET_CODE (x) == CONST_INT
930 || (GET_MODE_CLASS (mode) == MODE_INT
931 && GET_MODE_CLASS (GET_MODE (x)) == MODE_INT
932 && (GET_CODE (x) == CONST_DOUBLE
933 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (GET_MODE (x))
934 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x))
935 && direct_load[(int) mode]
936 || GET_CODE (x) == REG)))))
937 return gen_lowpart (mode, x);
939 temp = gen_reg_rtx (mode);
940 convert_move (temp, x, unsignedp);
944 /* Generate several move instructions to copy LEN bytes
945 from block FROM to block TO. (These are MEM rtx's with BLKmode).
946 The caller must pass FROM and TO
947 through protect_from_queue before calling.
948 ALIGN (in bytes) is maximum alignment we can assume. */
950 struct move_by_pieces
959 int explicit_inc_from;
965 static void move_by_pieces_1 ();
966 static int move_by_pieces_ninsns ();
969 move_by_pieces (to, from, len, align)
973 struct move_by_pieces data;
974 rtx to_addr = XEXP (to, 0), from_addr = XEXP (from, 0);
975 int max_size = MOVE_MAX + 1;
978 data.to_addr = to_addr;
979 data.from_addr = from_addr;
983 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
984 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
986 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
987 || GET_CODE (from_addr) == POST_INC
988 || GET_CODE (from_addr) == POST_DEC);
990 data.explicit_inc_from = 0;
991 data.explicit_inc_to = 0;
993 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
994 if (data.reverse) data.offset = len;
997 /* If copying requires more than two move insns,
998 copy addresses to registers (to make displacements shorter)
999 and use post-increment if available. */
1000 if (!(data.autinc_from && data.autinc_to)
1001 && move_by_pieces_ninsns (len, align) > 2)
1003 #ifdef HAVE_PRE_DECREMENT
1004 if (data.reverse && ! data.autinc_from)
1006 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1007 data.autinc_from = 1;
1008 data.explicit_inc_from = -1;
1011 #ifdef HAVE_POST_INCREMENT
1012 if (! data.autinc_from)
1014 data.from_addr = copy_addr_to_reg (from_addr);
1015 data.autinc_from = 1;
1016 data.explicit_inc_from = 1;
1019 if (!data.autinc_from && CONSTANT_P (from_addr))
1020 data.from_addr = copy_addr_to_reg (from_addr);
1021 #ifdef HAVE_PRE_DECREMENT
1022 if (data.reverse && ! data.autinc_to)
1024 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1026 data.explicit_inc_to = -1;
1029 #ifdef HAVE_POST_INCREMENT
1030 if (! data.reverse && ! data.autinc_to)
1032 data.to_addr = copy_addr_to_reg (to_addr);
1034 data.explicit_inc_to = 1;
1037 if (!data.autinc_to && CONSTANT_P (to_addr))
1038 data.to_addr = copy_addr_to_reg (to_addr);
1041 if (! (STRICT_ALIGNMENT || SLOW_UNALIGNED_ACCESS)
1042 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
1045 /* First move what we can in the largest integer mode, then go to
1046 successively smaller modes. */
1048 while (max_size > 1)
1050 enum machine_mode mode = VOIDmode, tmode;
1051 enum insn_code icode;
1053 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1054 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1055 if (GET_MODE_SIZE (tmode) < max_size)
1058 if (mode == VOIDmode)
1061 icode = mov_optab->handlers[(int) mode].insn_code;
1062 if (icode != CODE_FOR_nothing
1063 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1064 GET_MODE_SIZE (mode)))
1065 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1067 max_size = GET_MODE_SIZE (mode);
1070 /* The code above should have handled everything. */
1075 /* Return number of insns required to move L bytes by pieces.
1076 ALIGN (in bytes) is maximum alignment we can assume. */
1079 move_by_pieces_ninsns (l, align)
1083 register int n_insns = 0;
1084 int max_size = MOVE_MAX + 1;
1086 if (! (STRICT_ALIGNMENT || SLOW_UNALIGNED_ACCESS)
1087 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
1090 while (max_size > 1)
1092 enum machine_mode mode = VOIDmode, tmode;
1093 enum insn_code icode;
1095 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1096 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1097 if (GET_MODE_SIZE (tmode) < max_size)
1100 if (mode == VOIDmode)
1103 icode = mov_optab->handlers[(int) mode].insn_code;
1104 if (icode != CODE_FOR_nothing
1105 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1106 GET_MODE_SIZE (mode)))
1107 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1109 max_size = GET_MODE_SIZE (mode);
1115 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1116 with move instructions for mode MODE. GENFUN is the gen_... function
1117 to make a move insn for that mode. DATA has all the other info. */
1120 move_by_pieces_1 (genfun, mode, data)
1122 enum machine_mode mode;
1123 struct move_by_pieces *data;
1125 register int size = GET_MODE_SIZE (mode);
1126 register rtx to1, from1;
1128 while (data->len >= size)
1130 if (data->reverse) data->offset -= size;
1132 to1 = (data->autinc_to
1133 ? gen_rtx (MEM, mode, data->to_addr)
1134 : change_address (data->to, mode,
1135 plus_constant (data->to_addr, data->offset)));
1138 ? gen_rtx (MEM, mode, data->from_addr)
1139 : change_address (data->from, mode,
1140 plus_constant (data->from_addr, data->offset)));
1142 #ifdef HAVE_PRE_DECREMENT
1143 if (data->explicit_inc_to < 0)
1144 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
1145 if (data->explicit_inc_from < 0)
1146 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (-size)));
1149 emit_insn ((*genfun) (to1, from1));
1150 #ifdef HAVE_POST_INCREMENT
1151 if (data->explicit_inc_to > 0)
1152 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1153 if (data->explicit_inc_from > 0)
1154 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1157 if (! data->reverse) data->offset += size;
1163 /* Emit code to move a block Y to a block X.
1164 This may be done with string-move instructions,
1165 with multiple scalar move instructions, or with a library call.
1167 Both X and Y must be MEM rtx's (perhaps inside VOLATILE)
1169 SIZE is an rtx that says how long they are.
1170 ALIGN is the maximum alignment we can assume they have,
1171 measured in bytes. */
1174 emit_block_move (x, y, size, align)
1179 if (GET_MODE (x) != BLKmode)
1182 if (GET_MODE (y) != BLKmode)
1185 x = protect_from_queue (x, 1);
1186 y = protect_from_queue (y, 0);
1187 size = protect_from_queue (size, 0);
1189 if (GET_CODE (x) != MEM)
1191 if (GET_CODE (y) != MEM)
1196 if (GET_CODE (size) == CONST_INT
1197 && (move_by_pieces_ninsns (INTVAL (size), align) < MOVE_RATIO))
1198 move_by_pieces (x, y, INTVAL (size), align);
1201 /* Try the most limited insn first, because there's no point
1202 including more than one in the machine description unless
1203 the more limited one has some advantage. */
1205 rtx opalign = GEN_INT (align);
1206 enum machine_mode mode;
1208 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1209 mode = GET_MODE_WIDER_MODE (mode))
1211 enum insn_code code = movstr_optab[(int) mode];
1213 if (code != CODE_FOR_nothing
1214 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1215 here because if SIZE is less than the mode mask, as it is
1216 returned by the macro, it will definately be less than the
1217 actual mode mask. */
1218 && (unsigned) INTVAL (size) <= GET_MODE_MASK (mode)
1219 && (insn_operand_predicate[(int) code][0] == 0
1220 || (*insn_operand_predicate[(int) code][0]) (x, BLKmode))
1221 && (insn_operand_predicate[(int) code][1] == 0
1222 || (*insn_operand_predicate[(int) code][1]) (y, BLKmode))
1223 && (insn_operand_predicate[(int) code][3] == 0
1224 || (*insn_operand_predicate[(int) code][3]) (opalign,
1228 rtx last = get_last_insn ();
1231 op2 = convert_to_mode (mode, size, 1);
1232 if (insn_operand_predicate[(int) code][2] != 0
1233 && ! (*insn_operand_predicate[(int) code][2]) (op2, mode))
1234 op2 = copy_to_mode_reg (mode, op2);
1236 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1243 delete_insns_since (last);
1247 #ifdef TARGET_MEM_FUNCTIONS
1248 emit_library_call (memcpy_libfunc, 0,
1249 VOIDmode, 3, XEXP (x, 0), Pmode,
1251 convert_to_mode (Pmode, size, 1), Pmode);
1253 emit_library_call (bcopy_libfunc, 0,
1254 VOIDmode, 3, XEXP (y, 0), Pmode,
1256 convert_to_mode (Pmode, size, 1), Pmode);
1261 /* Copy all or part of a value X into registers starting at REGNO.
1262 The number of registers to be filled is NREGS. */
1265 move_block_to_reg (regno, x, nregs, mode)
1269 enum machine_mode mode;
1274 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1275 x = validize_mem (force_const_mem (mode, x));
1277 /* See if the machine can do this with a load multiple insn. */
1278 #ifdef HAVE_load_multiple
1279 last = get_last_insn ();
1280 pat = gen_load_multiple (gen_rtx (REG, word_mode, regno), x,
1288 delete_insns_since (last);
1291 for (i = 0; i < nregs; i++)
1292 emit_move_insn (gen_rtx (REG, word_mode, regno + i),
1293 operand_subword_force (x, i, mode));
1296 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1297 The number of registers to be filled is NREGS. */
1300 move_block_from_reg (regno, x, nregs)
1308 /* See if the machine can do this with a store multiple insn. */
1309 #ifdef HAVE_store_multiple
1310 last = get_last_insn ();
1311 pat = gen_store_multiple (x, gen_rtx (REG, word_mode, regno),
1319 delete_insns_since (last);
1322 for (i = 0; i < nregs; i++)
1324 rtx tem = operand_subword (x, i, 1, BLKmode);
1329 emit_move_insn (tem, gen_rtx (REG, word_mode, regno + i));
1333 /* Mark NREGS consecutive regs, starting at REGNO, as being live now. */
1336 use_regs (regno, nregs)
1342 for (i = 0; i < nregs; i++)
1343 emit_insn (gen_rtx (USE, VOIDmode, gen_rtx (REG, word_mode, regno + i)));
1346 /* Mark the instructions since PREV as a libcall block.
1347 Add REG_LIBCALL to PREV and add a REG_RETVAL to the most recent insn. */
1356 /* Find the instructions to mark */
1358 insn_first = NEXT_INSN (prev);
1360 insn_first = get_insns ();
1362 insn_last = get_last_insn ();
1364 REG_NOTES (insn_last) = gen_rtx (INSN_LIST, REG_RETVAL, insn_first,
1365 REG_NOTES (insn_last));
1367 REG_NOTES (insn_first) = gen_rtx (INSN_LIST, REG_LIBCALL, insn_last,
1368 REG_NOTES (insn_first));
1371 /* Write zeros through the storage of OBJECT.
1372 If OBJECT has BLKmode, SIZE is its length in bytes. */
1375 clear_storage (object, size)
1379 if (GET_MODE (object) == BLKmode)
1381 #ifdef TARGET_MEM_FUNCTIONS
1382 emit_library_call (memset_libfunc, 0,
1384 XEXP (object, 0), Pmode, const0_rtx, Pmode,
1385 GEN_INT (size), Pmode);
1387 emit_library_call (bzero_libfunc, 0,
1389 XEXP (object, 0), Pmode,
1390 GEN_INT (size), Pmode);
1394 emit_move_insn (object, const0_rtx);
1397 /* Generate code to copy Y into X.
1398 Both Y and X must have the same mode, except that
1399 Y can be a constant with VOIDmode.
1400 This mode cannot be BLKmode; use emit_block_move for that.
1402 Return the last instruction emitted. */
1405 emit_move_insn (x, y)
1408 enum machine_mode mode = GET_MODE (x);
1409 enum machine_mode submode;
1410 enum mode_class class = GET_MODE_CLASS (mode);
1413 x = protect_from_queue (x, 1);
1414 y = protect_from_queue (y, 0);
1416 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
1419 if (CONSTANT_P (y) && ! LEGITIMATE_CONSTANT_P (y))
1420 y = force_const_mem (mode, y);
1422 /* If X or Y are memory references, verify that their addresses are valid
1424 if (GET_CODE (x) == MEM
1425 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
1426 && ! push_operand (x, GET_MODE (x)))
1428 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
1429 x = change_address (x, VOIDmode, XEXP (x, 0));
1431 if (GET_CODE (y) == MEM
1432 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
1434 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
1435 y = change_address (y, VOIDmode, XEXP (y, 0));
1437 if (mode == BLKmode)
1440 if (class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
1441 submode = mode_for_size (GET_MODE_UNIT_SIZE (mode) * BITS_PER_UNIT,
1442 (class == MODE_COMPLEX_INT
1443 ? MODE_INT : MODE_FLOAT),
1446 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
1448 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
1450 /* Expand complex moves by moving real part and imag part, if posible. */
1451 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
1452 && submode != BLKmode
1453 && (mov_optab->handlers[(int) submode].insn_code
1454 != CODE_FOR_nothing))
1456 /* Don't split destination if it is a stack push. */
1457 int stack = push_operand (x, GET_MODE (x));
1458 rtx prev = get_last_insn ();
1460 /* Tell flow that the whole of the destination is being set. */
1461 if (GET_CODE (x) == REG)
1462 emit_insn (gen_rtx (CLOBBER, VOIDmode, x));
1464 /* If this is a stack, push the highpart first, so it
1465 will be in the argument order.
1467 In that case, change_address is used only to convert
1468 the mode, not to change the address. */
1469 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
1470 ((stack ? change_address (x, submode, (rtx) 0)
1471 : gen_highpart (submode, x)),
1472 gen_highpart (submode, y)));
1473 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
1474 ((stack ? change_address (x, submode, (rtx) 0)
1475 : gen_lowpart (submode, x)),
1476 gen_lowpart (submode, y)));
1481 /* This will handle any multi-word mode that lacks a move_insn pattern.
1482 However, you will get better code if you define such patterns,
1483 even if they must turn into multiple assembler instructions. */
1484 else if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
1487 rtx prev_insn = get_last_insn ();
1490 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
1493 rtx xpart = operand_subword (x, i, 1, mode);
1494 rtx ypart = operand_subword (y, i, 1, mode);
1496 /* If we can't get a part of Y, put Y into memory if it is a
1497 constant. Otherwise, force it into a register. If we still
1498 can't get a part of Y, abort. */
1499 if (ypart == 0 && CONSTANT_P (y))
1501 y = force_const_mem (mode, y);
1502 ypart = operand_subword (y, i, 1, mode);
1504 else if (ypart == 0)
1505 ypart = operand_subword_force (y, i, mode);
1507 if (xpart == 0 || ypart == 0)
1510 last_insn = emit_move_insn (xpart, ypart);
1512 /* Mark these insns as a libcall block. */
1513 group_insns (prev_insn);
1521 /* Pushing data onto the stack. */
1523 /* Push a block of length SIZE (perhaps variable)
1524 and return an rtx to address the beginning of the block.
1525 Note that it is not possible for the value returned to be a QUEUED.
1526 The value may be virtual_outgoing_args_rtx.
1528 EXTRA is the number of bytes of padding to push in addition to SIZE.
1529 BELOW nonzero means this padding comes at low addresses;
1530 otherwise, the padding comes at high addresses. */
1533 push_block (size, extra, below)
1538 if (CONSTANT_P (size))
1539 anti_adjust_stack (plus_constant (size, extra));
1540 else if (GET_CODE (size) == REG && extra == 0)
1541 anti_adjust_stack (size);
1544 rtx temp = copy_to_mode_reg (Pmode, size);
1546 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
1547 temp, 0, OPTAB_LIB_WIDEN);
1548 anti_adjust_stack (temp);
1551 #ifdef STACK_GROWS_DOWNWARD
1552 temp = virtual_outgoing_args_rtx;
1553 if (extra != 0 && below)
1554 temp = plus_constant (temp, extra);
1556 if (GET_CODE (size) == CONST_INT)
1557 temp = plus_constant (virtual_outgoing_args_rtx,
1558 - INTVAL (size) - (below ? 0 : extra));
1559 else if (extra != 0 && !below)
1560 temp = gen_rtx (PLUS, Pmode, virtual_outgoing_args_rtx,
1561 negate_rtx (Pmode, plus_constant (size, extra)));
1563 temp = gen_rtx (PLUS, Pmode, virtual_outgoing_args_rtx,
1564 negate_rtx (Pmode, size));
1567 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
1573 return gen_rtx (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
1576 /* Generate code to push X onto the stack, assuming it has mode MODE and
1578 MODE is redundant except when X is a CONST_INT (since they don't
1580 SIZE is an rtx for the size of data to be copied (in bytes),
1581 needed only if X is BLKmode.
1583 ALIGN (in bytes) is maximum alignment we can assume.
1585 If PARTIAL is nonzero, then copy that many of the first words
1586 of X into registers starting with REG, and push the rest of X.
1587 The amount of space pushed is decreased by PARTIAL words,
1588 rounded *down* to a multiple of PARM_BOUNDARY.
1589 REG must be a hard register in this case.
1591 EXTRA is the amount in bytes of extra space to leave next to this arg.
1592 This is ignored if an argument block has already been allocated.
1594 On a machine that lacks real push insns, ARGS_ADDR is the address of
1595 the bottom of the argument block for this call. We use indexing off there
1596 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
1597 argument block has not been preallocated.
1599 ARGS_SO_FAR is the size of args previously pushed for this call. */
1602 emit_push_insn (x, mode, type, size, align, partial, reg, extra,
1603 args_addr, args_so_far)
1605 enum machine_mode mode;
1616 enum direction stack_direction
1617 #ifdef STACK_GROWS_DOWNWARD
1623 /* Decide where to pad the argument: `downward' for below,
1624 `upward' for above, or `none' for don't pad it.
1625 Default is below for small data on big-endian machines; else above. */
1626 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
1628 /* Invert direction if stack is post-update. */
1629 if (STACK_PUSH_CODE == POST_INC || STACK_PUSH_CODE == POST_DEC)
1630 if (where_pad != none)
1631 where_pad = (where_pad == downward ? upward : downward);
1633 xinner = x = protect_from_queue (x, 0);
1635 if (mode == BLKmode)
1637 /* Copy a block into the stack, entirely or partially. */
1640 int used = partial * UNITS_PER_WORD;
1641 int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
1649 /* USED is now the # of bytes we need not copy to the stack
1650 because registers will take care of them. */
1653 xinner = change_address (xinner, BLKmode,
1654 plus_constant (XEXP (xinner, 0), used));
1656 /* If the partial register-part of the arg counts in its stack size,
1657 skip the part of stack space corresponding to the registers.
1658 Otherwise, start copying to the beginning of the stack space,
1659 by setting SKIP to 0. */
1660 #ifndef REG_PARM_STACK_SPACE
1666 #ifdef PUSH_ROUNDING
1667 /* Do it with several push insns if that doesn't take lots of insns
1668 and if there is no difficulty with push insns that skip bytes
1669 on the stack for alignment purposes. */
1671 && GET_CODE (size) == CONST_INT
1673 && (move_by_pieces_ninsns ((unsigned) INTVAL (size) - used, align)
1675 /* Here we avoid the case of a structure whose weak alignment
1676 forces many pushes of a small amount of data,
1677 and such small pushes do rounding that causes trouble. */
1678 && ((! STRICT_ALIGNMENT && ! SLOW_UNALIGNED_ACCESS)
1679 || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT
1680 || PUSH_ROUNDING (align) == align)
1681 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
1683 /* Push padding now if padding above and stack grows down,
1684 or if padding below and stack grows up.
1685 But if space already allocated, this has already been done. */
1686 if (extra && args_addr == 0
1687 && where_pad != none && where_pad != stack_direction)
1688 anti_adjust_stack (GEN_INT (extra));
1690 move_by_pieces (gen_rtx (MEM, BLKmode, gen_push_operand ()), xinner,
1691 INTVAL (size) - used, align);
1694 #endif /* PUSH_ROUNDING */
1696 /* Otherwise make space on the stack and copy the data
1697 to the address of that space. */
1699 /* Deduct words put into registers from the size we must copy. */
1702 if (GET_CODE (size) == CONST_INT)
1703 size = GEN_INT (INTVAL (size) - used);
1705 size = expand_binop (GET_MODE (size), sub_optab, size,
1706 GEN_INT (used), NULL_RTX, 0,
1710 /* Get the address of the stack space.
1711 In this case, we do not deal with EXTRA separately.
1712 A single stack adjust will do. */
1715 temp = push_block (size, extra, where_pad == downward);
1718 else if (GET_CODE (args_so_far) == CONST_INT)
1719 temp = memory_address (BLKmode,
1720 plus_constant (args_addr,
1721 skip + INTVAL (args_so_far)));
1723 temp = memory_address (BLKmode,
1724 plus_constant (gen_rtx (PLUS, Pmode,
1725 args_addr, args_so_far),
1728 /* TEMP is the address of the block. Copy the data there. */
1729 if (GET_CODE (size) == CONST_INT
1730 && (move_by_pieces_ninsns ((unsigned) INTVAL (size), align)
1733 move_by_pieces (gen_rtx (MEM, BLKmode, temp), xinner,
1734 INTVAL (size), align);
1737 /* Try the most limited insn first, because there's no point
1738 including more than one in the machine description unless
1739 the more limited one has some advantage. */
1740 #ifdef HAVE_movstrqi
1742 && GET_CODE (size) == CONST_INT
1743 && ((unsigned) INTVAL (size)
1744 < (1 << (GET_MODE_BITSIZE (QImode) - 1))))
1746 emit_insn (gen_movstrqi (gen_rtx (MEM, BLKmode, temp),
1747 xinner, size, GEN_INT (align)));
1751 #ifdef HAVE_movstrhi
1753 && GET_CODE (size) == CONST_INT
1754 && ((unsigned) INTVAL (size)
1755 < (1 << (GET_MODE_BITSIZE (HImode) - 1))))
1757 emit_insn (gen_movstrhi (gen_rtx (MEM, BLKmode, temp),
1758 xinner, size, GEN_INT (align)));
1762 #ifdef HAVE_movstrsi
1765 emit_insn (gen_movstrsi (gen_rtx (MEM, BLKmode, temp),
1766 xinner, size, GEN_INT (align)));
1770 #ifdef HAVE_movstrdi
1773 emit_insn (gen_movstrdi (gen_rtx (MEM, BLKmode, temp),
1774 xinner, size, GEN_INT (align)));
1779 #ifndef ACCUMULATE_OUTGOING_ARGS
1780 /* If the source is referenced relative to the stack pointer,
1781 copy it to another register to stabilize it. We do not need
1782 to do this if we know that we won't be changing sp. */
1784 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
1785 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
1786 temp = copy_to_reg (temp);
1789 /* Make inhibit_defer_pop nonzero around the library call
1790 to force it to pop the bcopy-arguments right away. */
1792 #ifdef TARGET_MEM_FUNCTIONS
1793 emit_library_call (memcpy_libfunc, 0,
1794 VOIDmode, 3, temp, Pmode, XEXP (xinner, 0), Pmode,
1797 emit_library_call (bcopy_libfunc, 0,
1798 VOIDmode, 3, XEXP (xinner, 0), Pmode, temp, Pmode,
1804 else if (partial > 0)
1806 /* Scalar partly in registers. */
1808 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
1811 /* # words of start of argument
1812 that we must make space for but need not store. */
1813 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
1814 int args_offset = INTVAL (args_so_far);
1817 /* Push padding now if padding above and stack grows down,
1818 or if padding below and stack grows up.
1819 But if space already allocated, this has already been done. */
1820 if (extra && args_addr == 0
1821 && where_pad != none && where_pad != stack_direction)
1822 anti_adjust_stack (GEN_INT (extra));
1824 /* If we make space by pushing it, we might as well push
1825 the real data. Otherwise, we can leave OFFSET nonzero
1826 and leave the space uninitialized. */
1830 /* Now NOT_STACK gets the number of words that we don't need to
1831 allocate on the stack. */
1832 not_stack = partial - offset;
1834 /* If the partial register-part of the arg counts in its stack size,
1835 skip the part of stack space corresponding to the registers.
1836 Otherwise, start copying to the beginning of the stack space,
1837 by setting SKIP to 0. */
1838 #ifndef REG_PARM_STACK_SPACE
1844 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1845 x = validize_mem (force_const_mem (mode, x));
1847 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
1848 SUBREGs of such registers are not allowed. */
1849 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
1850 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
1851 x = copy_to_reg (x);
1853 /* Loop over all the words allocated on the stack for this arg. */
1854 /* We can do it by words, because any scalar bigger than a word
1855 has a size a multiple of a word. */
1856 #ifndef PUSH_ARGS_REVERSED
1857 for (i = not_stack; i < size; i++)
1859 for (i = size - 1; i >= not_stack; i--)
1861 if (i >= not_stack + offset)
1862 emit_push_insn (operand_subword_force (x, i, mode),
1863 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
1865 GEN_INT (args_offset + ((i - not_stack + skip)
1866 * UNITS_PER_WORD)));
1872 /* Push padding now if padding above and stack grows down,
1873 or if padding below and stack grows up.
1874 But if space already allocated, this has already been done. */
1875 if (extra && args_addr == 0
1876 && where_pad != none && where_pad != stack_direction)
1877 anti_adjust_stack (GEN_INT (extra));
1879 #ifdef PUSH_ROUNDING
1881 addr = gen_push_operand ();
1884 if (GET_CODE (args_so_far) == CONST_INT)
1886 = memory_address (mode,
1887 plus_constant (args_addr, INTVAL (args_so_far)));
1889 addr = memory_address (mode, gen_rtx (PLUS, Pmode, args_addr,
1892 emit_move_insn (gen_rtx (MEM, mode, addr), x);
1896 /* If part should go in registers, copy that part
1897 into the appropriate registers. Do this now, at the end,
1898 since mem-to-mem copies above may do function calls. */
1900 move_block_to_reg (REGNO (reg), x, partial, mode);
1902 if (extra && args_addr == 0 && where_pad == stack_direction)
1903 anti_adjust_stack (GEN_INT (extra));
1906 /* Output a library call to function FUN (a SYMBOL_REF rtx)
1907 (emitting the queue unless NO_QUEUE is nonzero),
1908 for a value of mode OUTMODE,
1909 with NARGS different arguments, passed as alternating rtx values
1910 and machine_modes to convert them to.
1911 The rtx values should have been passed through protect_from_queue already.
1913 NO_QUEUE will be true if and only if the library call is a `const' call
1914 which will be enclosed in REG_LIBCALL/REG_RETVAL notes; it is equivalent
1915 to the variable is_const in expand_call.
1917 NO_QUEUE must be true for const calls, because if it isn't, then
1918 any pending increment will be emitted between REG_LIBCALL/REG_RETVAL notes,
1919 and will be lost if the libcall sequence is optimized away.
1921 NO_QUEUE must be false for non-const calls, because if it isn't, the
1922 call insn will have its CONST_CALL_P bit set, and it will be incorrectly
1923 optimized. For instance, the instruction scheduler may incorrectly
1924 move memory references across the non-const call. */
1927 emit_library_call (va_alist)
1931 struct args_size args_size;
1932 register int argnum;
1933 enum machine_mode outmode;
1940 CUMULATIVE_ARGS args_so_far;
1941 struct arg { rtx value; enum machine_mode mode; rtx reg; int partial;
1942 struct args_size offset; struct args_size size; };
1944 int old_inhibit_defer_pop = inhibit_defer_pop;
1949 orgfun = fun = va_arg (p, rtx);
1950 no_queue = va_arg (p, int);
1951 outmode = va_arg (p, enum machine_mode);
1952 nargs = va_arg (p, int);
1954 /* Copy all the libcall-arguments out of the varargs data
1955 and into a vector ARGVEC.
1957 Compute how to pass each argument. We only support a very small subset
1958 of the full argument passing conventions to limit complexity here since
1959 library functions shouldn't have many args. */
1961 argvec = (struct arg *) alloca (nargs * sizeof (struct arg));
1963 INIT_CUMULATIVE_ARGS (args_so_far, (tree)0, fun);
1965 args_size.constant = 0;
1968 for (count = 0; count < nargs; count++)
1970 rtx val = va_arg (p, rtx);
1971 enum machine_mode mode = va_arg (p, enum machine_mode);
1973 /* We cannot convert the arg value to the mode the library wants here;
1974 must do it earlier where we know the signedness of the arg. */
1976 || (GET_MODE (val) != mode && GET_MODE (val) != VOIDmode))
1979 /* On some machines, there's no way to pass a float to a library fcn.
1980 Pass it as a double instead. */
1981 #ifdef LIBGCC_NEEDS_DOUBLE
1982 if (LIBGCC_NEEDS_DOUBLE && mode == SFmode)
1983 val = convert_to_mode (DFmode, val, 0), mode = DFmode;
1986 /* There's no need to call protect_from_queue, because
1987 either emit_move_insn or emit_push_insn will do that. */
1989 /* Make sure it is a reasonable operand for a move or push insn. */
1990 if (GET_CODE (val) != REG && GET_CODE (val) != MEM
1991 && ! (CONSTANT_P (val) && LEGITIMATE_CONSTANT_P (val)))
1992 val = force_operand (val, NULL_RTX);
1994 argvec[count].value = val;
1995 argvec[count].mode = mode;
1997 #ifdef FUNCTION_ARG_PASS_BY_REFERENCE
1998 if (FUNCTION_ARG_PASS_BY_REFERENCE (args_so_far, mode, NULL_TREE, 1))
2002 argvec[count].reg = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
2003 if (argvec[count].reg && GET_CODE (argvec[count].reg) == EXPR_LIST)
2005 #ifdef FUNCTION_ARG_PARTIAL_NREGS
2006 argvec[count].partial
2007 = FUNCTION_ARG_PARTIAL_NREGS (args_so_far, mode, NULL_TREE, 1);
2009 argvec[count].partial = 0;
2012 locate_and_pad_parm (mode, NULL_TREE,
2013 argvec[count].reg && argvec[count].partial == 0,
2014 NULL_TREE, &args_size, &argvec[count].offset,
2015 &argvec[count].size);
2017 if (argvec[count].size.var)
2020 #ifndef REG_PARM_STACK_SPACE
2021 if (argvec[count].partial)
2022 argvec[count].size.constant -= argvec[count].partial * UNITS_PER_WORD;
2025 if (argvec[count].reg == 0 || argvec[count].partial != 0
2026 #ifdef REG_PARM_STACK_SPACE
2030 args_size.constant += argvec[count].size.constant;
2032 #ifdef ACCUMULATE_OUTGOING_ARGS
2033 /* If this arg is actually passed on the stack, it might be
2034 clobbering something we already put there (this library call might
2035 be inside the evaluation of an argument to a function whose call
2036 requires the stack). This will only occur when the library call
2037 has sufficient args to run out of argument registers. Abort in
2038 this case; if this ever occurs, code must be added to save and
2039 restore the arg slot. */
2041 if (argvec[count].reg == 0 || argvec[count].partial != 0)
2045 FUNCTION_ARG_ADVANCE (args_so_far, mode, (tree)0, 1);
2049 /* If this machine requires an external definition for library
2050 functions, write one out. */
2051 assemble_external_libcall (fun);
2053 #ifdef STACK_BOUNDARY
2054 args_size.constant = (((args_size.constant + (STACK_BYTES - 1))
2055 / STACK_BYTES) * STACK_BYTES);
2058 #ifdef REG_PARM_STACK_SPACE
2059 args_size.constant = MAX (args_size.constant,
2060 REG_PARM_STACK_SPACE ((tree) 0));
2063 #ifdef ACCUMULATE_OUTGOING_ARGS
2064 if (args_size.constant > current_function_outgoing_args_size)
2065 current_function_outgoing_args_size = args_size.constant;
2066 args_size.constant = 0;
2069 #ifndef PUSH_ROUNDING
2070 argblock = push_block (GEN_INT (args_size.constant), 0, 0);
2073 #ifdef PUSH_ARGS_REVERSED
2081 /* Push the args that need to be pushed. */
2083 for (count = 0; count < nargs; count++, argnum += inc)
2085 register enum machine_mode mode = argvec[argnum].mode;
2086 register rtx val = argvec[argnum].value;
2087 rtx reg = argvec[argnum].reg;
2088 int partial = argvec[argnum].partial;
2090 if (! (reg != 0 && partial == 0))
2091 emit_push_insn (val, mode, NULL_TREE, NULL_RTX, 0, partial, reg, 0,
2092 argblock, GEN_INT (argvec[count].offset.constant));
2096 #ifdef PUSH_ARGS_REVERSED
2102 /* Now load any reg parms into their regs. */
2104 for (count = 0; count < nargs; count++, argnum += inc)
2106 register enum machine_mode mode = argvec[argnum].mode;
2107 register rtx val = argvec[argnum].value;
2108 rtx reg = argvec[argnum].reg;
2109 int partial = argvec[argnum].partial;
2111 if (reg != 0 && partial == 0)
2112 emit_move_insn (reg, val);
2116 /* For version 1.37, try deleting this entirely. */
2120 /* Any regs containing parms remain in use through the call. */
2122 for (count = 0; count < nargs; count++)
2123 if (argvec[count].reg != 0)
2124 emit_insn (gen_rtx (USE, VOIDmode, argvec[count].reg));
2126 use_insns = get_insns ();
2129 fun = prepare_call_address (fun, NULL_TREE, &use_insns);
2131 /* Don't allow popping to be deferred, since then
2132 cse'ing of library calls could delete a call and leave the pop. */
2135 /* We pass the old value of inhibit_defer_pop + 1 to emit_call_1, which
2136 will set inhibit_defer_pop to that value. */
2138 emit_call_1 (fun, get_identifier (XSTR (orgfun, 0)), args_size.constant, 0,
2139 FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1),
2140 outmode != VOIDmode ? hard_libcall_value (outmode) : NULL_RTX,
2141 old_inhibit_defer_pop + 1, use_insns, no_queue);
2143 /* Now restore inhibit_defer_pop to its actual original value. */
2147 /* Expand an assignment that stores the value of FROM into TO.
2148 If WANT_VALUE is nonzero, return an rtx for the value of TO.
2149 (This may contain a QUEUED rtx.)
2150 Otherwise, the returned value is not meaningful.
2152 SUGGEST_REG is no longer actually used.
2153 It used to mean, copy the value through a register
2154 and return that register, if that is possible.
2155 But now we do this if WANT_VALUE.
2157 If the value stored is a constant, we return the constant. */
2160 expand_assignment (to, from, want_value, suggest_reg)
2165 register rtx to_rtx = 0;
2168 /* Don't crash if the lhs of the assignment was erroneous. */
2170 if (TREE_CODE (to) == ERROR_MARK)
2171 return expand_expr (from, NULL_RTX, VOIDmode, 0);
2173 /* Assignment of a structure component needs special treatment
2174 if the structure component's rtx is not simply a MEM.
2175 Assignment of an array element at a constant index
2176 has the same problem. */
2178 if (TREE_CODE (to) == COMPONENT_REF
2179 || TREE_CODE (to) == BIT_FIELD_REF
2180 || (TREE_CODE (to) == ARRAY_REF
2181 && TREE_CODE (TREE_OPERAND (to, 1)) == INTEGER_CST
2182 && TREE_CODE (TYPE_SIZE (TREE_TYPE (to))) == INTEGER_CST))
2184 enum machine_mode mode1;
2190 tree tem = get_inner_reference (to, &bitsize, &bitpos, &offset,
2191 &mode1, &unsignedp, &volatilep);
2193 /* If we are going to use store_bit_field and extract_bit_field,
2194 make sure to_rtx will be safe for multiple use. */
2196 if (mode1 == VOIDmode && want_value)
2197 tem = stabilize_reference (tem);
2199 to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, 0);
2202 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
2204 if (GET_CODE (to_rtx) != MEM)
2206 to_rtx = change_address (to_rtx, VOIDmode,
2207 gen_rtx (PLUS, Pmode, XEXP (to_rtx, 0),
2208 force_reg (Pmode, offset_rtx)));
2212 if (GET_CODE (to_rtx) == MEM)
2213 MEM_VOLATILE_P (to_rtx) = 1;
2214 #if 0 /* This was turned off because, when a field is volatile
2215 in an object which is not volatile, the object may be in a register,
2216 and then we would abort over here. */
2222 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
2224 /* Spurious cast makes HPUX compiler happy. */
2225 ? (enum machine_mode) TYPE_MODE (TREE_TYPE (to))
2228 /* Required alignment of containing datum. */
2229 TYPE_ALIGN (TREE_TYPE (tem)) / BITS_PER_UNIT,
2230 int_size_in_bytes (TREE_TYPE (tem)));
2231 preserve_temp_slots (result);
2237 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
2238 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
2241 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, 0);
2243 /* In case we are returning the contents of an object which overlaps
2244 the place the value is being stored, use a safe function when copying
2245 a value through a pointer into a structure value return block. */
2246 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
2247 && current_function_returns_struct
2248 && !current_function_returns_pcc_struct)
2250 rtx from_rtx = expand_expr (from, NULL_RTX, VOIDmode, 0);
2251 rtx size = expr_size (from);
2253 #ifdef TARGET_MEM_FUNCTIONS
2254 emit_library_call (memcpy_libfunc, 0,
2255 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
2256 XEXP (from_rtx, 0), Pmode,
2259 emit_library_call (bcopy_libfunc, 0,
2260 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
2261 XEXP (to_rtx, 0), Pmode,
2265 preserve_temp_slots (to_rtx);
2270 /* Compute FROM and store the value in the rtx we got. */
2272 result = store_expr (from, to_rtx, want_value);
2273 preserve_temp_slots (result);
2278 /* Generate code for computing expression EXP,
2279 and storing the value into TARGET.
2280 Returns TARGET or an equivalent value.
2281 TARGET may contain a QUEUED rtx.
2283 If SUGGEST_REG is nonzero, copy the value through a register
2284 and return that register, if that is possible.
2286 If the value stored is a constant, we return the constant. */
2289 store_expr (exp, target, suggest_reg)
2291 register rtx target;
2295 int dont_return_target = 0;
2297 if (TREE_CODE (exp) == COMPOUND_EXPR)
2299 /* Perform first part of compound expression, then assign from second
2301 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
2303 return store_expr (TREE_OPERAND (exp, 1), target, suggest_reg);
2305 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
2307 /* For conditional expression, get safe form of the target. Then
2308 test the condition, doing the appropriate assignment on either
2309 side. This avoids the creation of unnecessary temporaries.
2310 For non-BLKmode, it is more efficient not to do this. */
2312 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
2315 target = protect_from_queue (target, 1);
2318 jumpifnot (TREE_OPERAND (exp, 0), lab1);
2319 store_expr (TREE_OPERAND (exp, 1), target, suggest_reg);
2321 emit_jump_insn (gen_jump (lab2));
2324 store_expr (TREE_OPERAND (exp, 2), target, suggest_reg);
2330 else if (suggest_reg && GET_CODE (target) == MEM
2331 && GET_MODE (target) != BLKmode)
2332 /* If target is in memory and caller wants value in a register instead,
2333 arrange that. Pass TARGET as target for expand_expr so that,
2334 if EXP is another assignment, SUGGEST_REG will be nonzero for it.
2335 We know expand_expr will not use the target in that case. */
2337 temp = expand_expr (exp, cse_not_expected ? NULL_RTX : target,
2338 GET_MODE (target), 0);
2339 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
2340 temp = copy_to_reg (temp);
2341 dont_return_target = 1;
2343 else if (queued_subexp_p (target))
2344 /* If target contains a postincrement, it is not safe
2345 to use as the returned value. It would access the wrong
2346 place by the time the queued increment gets output.
2347 So copy the value through a temporary and use that temp
2350 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
2352 /* Expand EXP into a new pseudo. */
2353 temp = gen_reg_rtx (GET_MODE (target));
2354 temp = expand_expr (exp, temp, GET_MODE (target), 0);
2357 temp = expand_expr (exp, NULL_RTX, GET_MODE (target), 0);
2358 dont_return_target = 1;
2360 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
2361 /* If this is an scalar in a register that is stored in a wider mode
2362 than the declared mode, compute the result into its declared mode
2363 and then convert to the wider mode. Our value is the computed
2366 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
2367 convert_move (SUBREG_REG (target), temp,
2368 SUBREG_PROMOTED_UNSIGNED_P (target));
2373 temp = expand_expr (exp, target, GET_MODE (target), 0);
2374 /* DO return TARGET if it's a specified hardware register.
2375 expand_return relies on this. */
2376 if (!(target && GET_CODE (target) == REG
2377 && REGNO (target) < FIRST_PSEUDO_REGISTER)
2378 && CONSTANT_P (temp))
2379 dont_return_target = 1;
2382 /* If value was not generated in the target, store it there.
2383 Convert the value to TARGET's type first if nec. */
2385 if (temp != target && TREE_CODE (exp) != ERROR_MARK)
2387 target = protect_from_queue (target, 1);
2388 if (GET_MODE (temp) != GET_MODE (target)
2389 && GET_MODE (temp) != VOIDmode)
2391 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
2392 if (dont_return_target)
2394 /* In this case, we will return TEMP,
2395 so make sure it has the proper mode.
2396 But don't forget to store the value into TARGET. */
2397 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
2398 emit_move_insn (target, temp);
2401 convert_move (target, temp, unsignedp);
2404 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
2406 /* Handle copying a string constant into an array.
2407 The string constant may be shorter than the array.
2408 So copy just the string's actual length, and clear the rest. */
2411 /* Get the size of the data type of the string,
2412 which is actually the size of the target. */
2413 size = expr_size (exp);
2414 if (GET_CODE (size) == CONST_INT
2415 && INTVAL (size) < TREE_STRING_LENGTH (exp))
2416 emit_block_move (target, temp, size,
2417 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
2420 /* Compute the size of the data to copy from the string. */
2422 = fold (build (MIN_EXPR, sizetype,
2423 size_binop (CEIL_DIV_EXPR,
2424 TYPE_SIZE (TREE_TYPE (exp)),
2425 size_int (BITS_PER_UNIT)),
2427 build_int_2 (TREE_STRING_LENGTH (exp), 0))));
2428 rtx copy_size_rtx = expand_expr (copy_size, NULL_RTX,
2432 /* Copy that much. */
2433 emit_block_move (target, temp, copy_size_rtx,
2434 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
2436 /* Figure out how much is left in TARGET
2437 that we have to clear. */
2438 if (GET_CODE (copy_size_rtx) == CONST_INT)
2440 temp = plus_constant (XEXP (target, 0),
2441 TREE_STRING_LENGTH (exp));
2442 size = plus_constant (size,
2443 - TREE_STRING_LENGTH (exp));
2447 enum machine_mode size_mode = Pmode;
2449 temp = force_reg (Pmode, XEXP (target, 0));
2450 temp = expand_binop (size_mode, add_optab, temp,
2451 copy_size_rtx, NULL_RTX, 0,
2454 size = expand_binop (size_mode, sub_optab, size,
2455 copy_size_rtx, NULL_RTX, 0,
2458 emit_cmp_insn (size, const0_rtx, LT, NULL_RTX,
2459 GET_MODE (size), 0, 0);
2460 label = gen_label_rtx ();
2461 emit_jump_insn (gen_blt (label));
2464 if (size != const0_rtx)
2466 #ifdef TARGET_MEM_FUNCTIONS
2467 emit_library_call (memset_libfunc, 0, VOIDmode, 3,
2468 temp, Pmode, const0_rtx, Pmode, size, Pmode);
2470 emit_library_call (bzero_libfunc, 0, VOIDmode, 2,
2471 temp, Pmode, size, Pmode);
2478 else if (GET_MODE (temp) == BLKmode)
2479 emit_block_move (target, temp, expr_size (exp),
2480 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
2482 emit_move_insn (target, temp);
2484 if (dont_return_target)
2489 /* Store the value of constructor EXP into the rtx TARGET.
2490 TARGET is either a REG or a MEM. */
2493 store_constructor (exp, target)
2497 tree type = TREE_TYPE (exp);
2499 /* We know our target cannot conflict, since safe_from_p has been called. */
2501 /* Don't try copying piece by piece into a hard register
2502 since that is vulnerable to being clobbered by EXP.
2503 Instead, construct in a pseudo register and then copy it all. */
2504 if (GET_CODE (target) == REG && REGNO (target) < FIRST_PSEUDO_REGISTER)
2506 rtx temp = gen_reg_rtx (GET_MODE (target));
2507 store_constructor (exp, temp);
2508 emit_move_insn (target, temp);
2513 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE)
2517 /* Inform later passes that the whole union value is dead. */
2518 if (TREE_CODE (type) == UNION_TYPE)
2519 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
2521 /* If we are building a static constructor into a register,
2522 set the initial value as zero so we can fold the value into
2524 else if (GET_CODE (target) == REG && TREE_STATIC (exp))
2525 emit_move_insn (target, const0_rtx);
2527 /* If the constructor has fewer fields than the structure,
2528 clear the whole structure first. */
2529 else if (list_length (CONSTRUCTOR_ELTS (exp))
2530 != list_length (TYPE_FIELDS (type)))
2531 clear_storage (target, int_size_in_bytes (type));
2533 /* Inform later passes that the old value is dead. */
2534 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
2536 /* Store each element of the constructor into
2537 the corresponding field of TARGET. */
2539 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
2541 register tree field = TREE_PURPOSE (elt);
2542 register enum machine_mode mode;
2547 /* Just ignore missing fields.
2548 We cleared the whole structure, above,
2549 if any fields are missing. */
2553 bitsize = TREE_INT_CST_LOW (DECL_SIZE (field));
2554 unsignedp = TREE_UNSIGNED (field);
2555 mode = DECL_MODE (field);
2556 if (DECL_BIT_FIELD (field))
2559 if (TREE_CODE (DECL_FIELD_BITPOS (field)) != INTEGER_CST)
2560 /* ??? This case remains to be written. */
2563 bitpos = TREE_INT_CST_LOW (DECL_FIELD_BITPOS (field));
2565 store_field (target, bitsize, bitpos, mode, TREE_VALUE (elt),
2566 /* The alignment of TARGET is
2567 at least what its type requires. */
2569 TYPE_ALIGN (type) / BITS_PER_UNIT,
2570 int_size_in_bytes (type));
2573 else if (TREE_CODE (type) == ARRAY_TYPE)
2577 tree domain = TYPE_DOMAIN (type);
2578 HOST_WIDE_INT minelt = TREE_INT_CST_LOW (TYPE_MIN_VALUE (domain));
2579 HOST_WIDE_INT maxelt = TREE_INT_CST_LOW (TYPE_MAX_VALUE (domain));
2580 tree elttype = TREE_TYPE (type);
2582 /* If the constructor has fewer fields than the structure,
2583 clear the whole structure first. Similarly if this this is
2584 static constructor of a non-BLKmode object. */
2586 if (list_length (CONSTRUCTOR_ELTS (exp)) < maxelt - minelt + 1
2587 || (GET_CODE (target) == REG && TREE_STATIC (exp)))
2588 clear_storage (target, maxelt - minelt + 1);
2590 /* Inform later passes that the old value is dead. */
2591 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
2593 /* Store each element of the constructor into
2594 the corresponding element of TARGET, determined
2595 by counting the elements. */
2596 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
2598 elt = TREE_CHAIN (elt), i++)
2600 register enum machine_mode mode;
2605 mode = TYPE_MODE (elttype);
2606 bitsize = GET_MODE_BITSIZE (mode);
2607 unsignedp = TREE_UNSIGNED (elttype);
2609 bitpos = (i * TREE_INT_CST_LOW (TYPE_SIZE (elttype)));
2611 store_field (target, bitsize, bitpos, mode, TREE_VALUE (elt),
2612 /* The alignment of TARGET is
2613 at least what its type requires. */
2615 TYPE_ALIGN (type) / BITS_PER_UNIT,
2616 int_size_in_bytes (type));
2624 /* Store the value of EXP (an expression tree)
2625 into a subfield of TARGET which has mode MODE and occupies
2626 BITSIZE bits, starting BITPOS bits from the start of TARGET.
2627 If MODE is VOIDmode, it means that we are storing into a bit-field.
2629 If VALUE_MODE is VOIDmode, return nothing in particular.
2630 UNSIGNEDP is not used in this case.
2632 Otherwise, return an rtx for the value stored. This rtx
2633 has mode VALUE_MODE if that is convenient to do.
2634 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
2636 ALIGN is the alignment that TARGET is known to have, measured in bytes.
2637 TOTAL_SIZE is the size in bytes of the structure, or -1 if varying. */
2640 store_field (target, bitsize, bitpos, mode, exp, value_mode,
2641 unsignedp, align, total_size)
2643 int bitsize, bitpos;
2644 enum machine_mode mode;
2646 enum machine_mode value_mode;
2651 HOST_WIDE_INT width_mask = 0;
2653 if (bitsize < HOST_BITS_PER_WIDE_INT)
2654 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
2656 /* If we are storing into an unaligned field of an aligned union that is
2657 in a register, we may have the mode of TARGET being an integer mode but
2658 MODE == BLKmode. In that case, get an aligned object whose size and
2659 alignment are the same as TARGET and store TARGET into it (we can avoid
2660 the store if the field being stored is the entire width of TARGET). Then
2661 call ourselves recursively to store the field into a BLKmode version of
2662 that object. Finally, load from the object into TARGET. This is not
2663 very efficient in general, but should only be slightly more expensive
2664 than the otherwise-required unaligned accesses. Perhaps this can be
2665 cleaned up later. */
2668 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
2670 rtx object = assign_stack_temp (GET_MODE (target),
2671 GET_MODE_SIZE (GET_MODE (target)), 0);
2672 rtx blk_object = copy_rtx (object);
2674 PUT_MODE (blk_object, BLKmode);
2676 if (bitsize != GET_MODE_BITSIZE (GET_MODE (target)))
2677 emit_move_insn (object, target);
2679 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0,
2682 emit_move_insn (target, object);
2687 /* If the structure is in a register or if the component
2688 is a bit field, we cannot use addressing to access it.
2689 Use bit-field techniques or SUBREG to store in it. */
2691 if (mode == VOIDmode
2692 || (mode != BLKmode && ! direct_store[(int) mode])
2693 || GET_CODE (target) == REG
2694 || GET_CODE (target) == SUBREG)
2696 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
2697 /* Store the value in the bitfield. */
2698 store_bit_field (target, bitsize, bitpos, mode, temp, align, total_size);
2699 if (value_mode != VOIDmode)
2701 /* The caller wants an rtx for the value. */
2702 /* If possible, avoid refetching from the bitfield itself. */
2704 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
2707 enum machine_mode tmode;
2709 return expand_and (temp, GEN_INT (width_mask), NULL_RTX);
2710 tmode = GET_MODE (temp);
2711 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
2712 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
2713 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
2715 return extract_bit_field (target, bitsize, bitpos, unsignedp,
2716 NULL_RTX, value_mode, 0, align,
2723 rtx addr = XEXP (target, 0);
2726 /* If a value is wanted, it must be the lhs;
2727 so make the address stable for multiple use. */
2729 if (value_mode != VOIDmode && GET_CODE (addr) != REG
2730 && ! CONSTANT_ADDRESS_P (addr)
2731 /* A frame-pointer reference is already stable. */
2732 && ! (GET_CODE (addr) == PLUS
2733 && GET_CODE (XEXP (addr, 1)) == CONST_INT
2734 && (XEXP (addr, 0) == virtual_incoming_args_rtx
2735 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
2736 addr = copy_to_reg (addr);
2738 /* Now build a reference to just the desired component. */
2740 to_rtx = change_address (target, mode,
2741 plus_constant (addr, (bitpos / BITS_PER_UNIT)));
2742 MEM_IN_STRUCT_P (to_rtx) = 1;
2744 return store_expr (exp, to_rtx, value_mode != VOIDmode);
2748 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
2749 or an ARRAY_REF, look for nested COMPONENT_REFs, BIT_FIELD_REFs, or
2750 ARRAY_REFs at constant positions and find the ultimate containing object,
2753 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
2754 bit position, and *PUNSIGNEDP to the signedness of the field.
2755 If the position of the field is variable, we store a tree
2756 giving the variable offset (in units) in *POFFSET.
2757 This offset is in addition to the bit position.
2758 If the position is not variable, we store 0 in *POFFSET.
2760 If any of the extraction expressions is volatile,
2761 we store 1 in *PVOLATILEP. Otherwise we don't change that.
2763 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
2764 is a mode that can be used to access the field. In that case, *PBITSIZE
2767 If the field describes a variable-sized object, *PMODE is set to
2768 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
2769 this case, but the address of the object can be found. */
2772 get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode, punsignedp, pvolatilep)
2777 enum machine_mode *pmode;
2782 enum machine_mode mode = VOIDmode;
2785 if (TREE_CODE (exp) == COMPONENT_REF)
2787 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
2788 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
2789 mode = DECL_MODE (TREE_OPERAND (exp, 1));
2790 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
2792 else if (TREE_CODE (exp) == BIT_FIELD_REF)
2794 size_tree = TREE_OPERAND (exp, 1);
2795 *punsignedp = TREE_UNSIGNED (exp);
2799 mode = TYPE_MODE (TREE_TYPE (exp));
2800 *pbitsize = GET_MODE_BITSIZE (mode);
2801 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
2806 if (TREE_CODE (size_tree) != INTEGER_CST)
2807 mode = BLKmode, *pbitsize = -1;
2809 *pbitsize = TREE_INT_CST_LOW (size_tree);
2812 /* Compute cumulative bit-offset for nested component-refs and array-refs,
2813 and find the ultimate containing object. */
2819 if (TREE_CODE (exp) == INDIRECT_REF && flag_volatile)
2822 if (TREE_CODE (exp) == COMPONENT_REF || TREE_CODE (exp) == BIT_FIELD_REF)
2824 tree pos = (TREE_CODE (exp) == COMPONENT_REF
2825 ? DECL_FIELD_BITPOS (TREE_OPERAND (exp, 1))
2826 : TREE_OPERAND (exp, 2));
2828 if (TREE_CODE (pos) == PLUS_EXPR)
2831 if (TREE_CODE (TREE_OPERAND (pos, 0)) == INTEGER_CST)
2833 constant = TREE_OPERAND (pos, 0);
2834 var = TREE_OPERAND (pos, 1);
2836 else if (TREE_CODE (TREE_OPERAND (pos, 1)) == INTEGER_CST)
2838 constant = TREE_OPERAND (pos, 1);
2839 var = TREE_OPERAND (pos, 0);
2843 *pbitpos += TREE_INT_CST_LOW (constant);
2845 offset = size_binop (PLUS_EXPR, offset,
2846 size_binop (FLOOR_DIV_EXPR, var,
2847 size_int (BITS_PER_UNIT)));
2849 offset = size_binop (FLOOR_DIV_EXPR, var,
2850 size_int (BITS_PER_UNIT));
2852 else if (TREE_CODE (pos) == INTEGER_CST)
2853 *pbitpos += TREE_INT_CST_LOW (pos);
2856 /* Assume here that the offset is a multiple of a unit.
2857 If not, there should be an explicitly added constant. */
2859 offset = size_binop (PLUS_EXPR, offset,
2860 size_binop (FLOOR_DIV_EXPR, pos,
2861 size_int (BITS_PER_UNIT)));
2863 offset = size_binop (FLOOR_DIV_EXPR, pos,
2864 size_int (BITS_PER_UNIT));
2868 else if (TREE_CODE (exp) == ARRAY_REF
2869 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
2870 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST)
2872 *pbitpos += (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))
2873 * TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (exp))));
2875 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
2876 && ! ((TREE_CODE (exp) == NOP_EXPR
2877 || TREE_CODE (exp) == CONVERT_EXPR)
2878 && (TYPE_MODE (TREE_TYPE (exp))
2879 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
2882 /* If any reference in the chain is volatile, the effect is volatile. */
2883 if (TREE_THIS_VOLATILE (exp))
2885 exp = TREE_OPERAND (exp, 0);
2888 /* If this was a bit-field, see if there is a mode that allows direct
2889 access in case EXP is in memory. */
2890 if (mode == VOIDmode && *pbitpos % *pbitsize == 0)
2892 mode = mode_for_size (*pbitsize, MODE_INT, 0);
2893 if (mode == BLKmode)
2900 /* We aren't finished fixing the callers to really handle nonzero offset. */
2908 /* Given an rtx VALUE that may contain additions and multiplications,
2909 return an equivalent value that just refers to a register or memory.
2910 This is done by generating instructions to perform the arithmetic
2911 and returning a pseudo-register containing the value. */
2914 force_operand (value, target)
2917 register optab binoptab = 0;
2918 /* Use a temporary to force order of execution of calls to
2922 /* Use subtarget as the target for operand 0 of a binary operation. */
2923 register rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
2925 if (GET_CODE (value) == PLUS)
2926 binoptab = add_optab;
2927 else if (GET_CODE (value) == MINUS)
2928 binoptab = sub_optab;
2929 else if (GET_CODE (value) == MULT)
2931 op2 = XEXP (value, 1);
2932 if (!CONSTANT_P (op2)
2933 && !(GET_CODE (op2) == REG && op2 != subtarget))
2935 tmp = force_operand (XEXP (value, 0), subtarget);
2936 return expand_mult (GET_MODE (value), tmp,
2937 force_operand (op2, NULL_RTX),
2943 op2 = XEXP (value, 1);
2944 if (!CONSTANT_P (op2)
2945 && !(GET_CODE (op2) == REG && op2 != subtarget))
2947 if (binoptab == sub_optab && GET_CODE (op2) == CONST_INT)
2949 binoptab = add_optab;
2950 op2 = negate_rtx (GET_MODE (value), op2);
2953 /* Check for an addition with OP2 a constant integer and our first
2954 operand a PLUS of a virtual register and something else. In that
2955 case, we want to emit the sum of the virtual register and the
2956 constant first and then add the other value. This allows virtual
2957 register instantiation to simply modify the constant rather than
2958 creating another one around this addition. */
2959 if (binoptab == add_optab && GET_CODE (op2) == CONST_INT
2960 && GET_CODE (XEXP (value, 0)) == PLUS
2961 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
2962 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
2963 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
2965 rtx temp = expand_binop (GET_MODE (value), binoptab,
2966 XEXP (XEXP (value, 0), 0), op2,
2967 subtarget, 0, OPTAB_LIB_WIDEN);
2968 return expand_binop (GET_MODE (value), binoptab, temp,
2969 force_operand (XEXP (XEXP (value, 0), 1), 0),
2970 target, 0, OPTAB_LIB_WIDEN);
2973 tmp = force_operand (XEXP (value, 0), subtarget);
2974 return expand_binop (GET_MODE (value), binoptab, tmp,
2975 force_operand (op2, NULL_RTX),
2976 target, 0, OPTAB_LIB_WIDEN);
2977 /* We give UNSIGNEP = 0 to expand_binop
2978 because the only operations we are expanding here are signed ones. */
2983 /* Subroutine of expand_expr:
2984 save the non-copied parts (LIST) of an expr (LHS), and return a list
2985 which can restore these values to their previous values,
2986 should something modify their storage. */
2989 save_noncopied_parts (lhs, list)
2996 for (tail = list; tail; tail = TREE_CHAIN (tail))
2997 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
2998 parts = chainon (parts, save_noncopied_parts (lhs, TREE_VALUE (tail)));
3001 tree part = TREE_VALUE (tail);
3002 tree part_type = TREE_TYPE (part);
3003 tree to_be_saved = build (COMPONENT_REF, part_type, lhs, part);
3004 rtx target = assign_stack_temp (TYPE_MODE (part_type),
3005 int_size_in_bytes (part_type), 0);
3006 if (! memory_address_p (TYPE_MODE (part_type), XEXP (target, 0)))
3007 target = change_address (target, TYPE_MODE (part_type), NULL_RTX);
3008 parts = tree_cons (to_be_saved,
3009 build (RTL_EXPR, part_type, NULL_TREE,
3012 store_expr (TREE_PURPOSE (parts), RTL_EXPR_RTL (TREE_VALUE (parts)), 0);
3017 /* Subroutine of expand_expr:
3018 record the non-copied parts (LIST) of an expr (LHS), and return a list
3019 which specifies the initial values of these parts. */
3022 init_noncopied_parts (lhs, list)
3029 for (tail = list; tail; tail = TREE_CHAIN (tail))
3030 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
3031 parts = chainon (parts, init_noncopied_parts (lhs, TREE_VALUE (tail)));
3034 tree part = TREE_VALUE (tail);
3035 tree part_type = TREE_TYPE (part);
3036 tree to_be_initialized = build (COMPONENT_REF, part_type, lhs, part);
3037 parts = tree_cons (TREE_PURPOSE (tail), to_be_initialized, parts);
3042 /* Subroutine of expand_expr: return nonzero iff there is no way that
3043 EXP can reference X, which is being modified. */
3046 safe_from_p (x, exp)
3056 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
3057 find the underlying pseudo. */
3058 if (GET_CODE (x) == SUBREG)
3061 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
3065 /* If X is a location in the outgoing argument area, it is always safe. */
3066 if (GET_CODE (x) == MEM
3067 && (XEXP (x, 0) == virtual_outgoing_args_rtx
3068 || (GET_CODE (XEXP (x, 0)) == PLUS
3069 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx)))
3072 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
3075 exp_rtl = DECL_RTL (exp);
3082 if (TREE_CODE (exp) == TREE_LIST)
3083 return ((TREE_VALUE (exp) == 0
3084 || safe_from_p (x, TREE_VALUE (exp)))
3085 && (TREE_CHAIN (exp) == 0
3086 || safe_from_p (x, TREE_CHAIN (exp))));
3091 return safe_from_p (x, TREE_OPERAND (exp, 0));
3095 return (safe_from_p (x, TREE_OPERAND (exp, 0))
3096 && safe_from_p (x, TREE_OPERAND (exp, 1)));
3100 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
3101 the expression. If it is set, we conflict iff we are that rtx or
3102 both are in memory. Otherwise, we check all operands of the
3103 expression recursively. */
3105 switch (TREE_CODE (exp))
3108 return staticp (TREE_OPERAND (exp, 0));
3111 if (GET_CODE (x) == MEM)
3116 exp_rtl = CALL_EXPR_RTL (exp);
3119 /* Assume that the call will clobber all hard registers and
3121 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
3122 || GET_CODE (x) == MEM)
3129 exp_rtl = RTL_EXPR_RTL (exp);
3131 /* We don't know what this can modify. */
3136 case WITH_CLEANUP_EXPR:
3137 exp_rtl = RTL_EXPR_RTL (exp);
3141 exp_rtl = SAVE_EXPR_RTL (exp);
3145 /* The only operand we look at is operand 1. The rest aren't
3146 part of the expression. */
3147 return safe_from_p (x, TREE_OPERAND (exp, 1));
3149 case METHOD_CALL_EXPR:
3150 /* This takes a rtx argument, but shouldn't appear here. */
3154 /* If we have an rtx, we do not need to scan our operands. */
3158 nops = tree_code_length[(int) TREE_CODE (exp)];
3159 for (i = 0; i < nops; i++)
3160 if (TREE_OPERAND (exp, i) != 0
3161 && ! safe_from_p (x, TREE_OPERAND (exp, i)))
3165 /* If we have an rtl, find any enclosed object. Then see if we conflict
3169 if (GET_CODE (exp_rtl) == SUBREG)
3171 exp_rtl = SUBREG_REG (exp_rtl);
3172 if (GET_CODE (exp_rtl) == REG
3173 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
3177 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
3178 are memory and EXP is not readonly. */
3179 return ! (rtx_equal_p (x, exp_rtl)
3180 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
3181 && ! TREE_READONLY (exp)));
3184 /* If we reach here, it is safe. */
3188 /* Subroutine of expand_expr: return nonzero iff EXP is an
3189 expression whose type is statically determinable. */
3195 if (TREE_CODE (exp) == PARM_DECL
3196 || TREE_CODE (exp) == VAR_DECL
3197 || TREE_CODE (exp) == CALL_EXPR || TREE_CODE (exp) == TARGET_EXPR
3198 || TREE_CODE (exp) == COMPONENT_REF
3199 || TREE_CODE (exp) == ARRAY_REF)
3204 /* expand_expr: generate code for computing expression EXP.
3205 An rtx for the computed value is returned. The value is never null.
3206 In the case of a void EXP, const0_rtx is returned.
3208 The value may be stored in TARGET if TARGET is nonzero.
3209 TARGET is just a suggestion; callers must assume that
3210 the rtx returned may not be the same as TARGET.
3212 If TARGET is CONST0_RTX, it means that the value will be ignored.
3214 If TMODE is not VOIDmode, it suggests generating the
3215 result in mode TMODE. But this is done only when convenient.
3216 Otherwise, TMODE is ignored and the value generated in its natural mode.
3217 TMODE is just a suggestion; callers must assume that
3218 the rtx returned may not have mode TMODE.
3220 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
3221 with a constant address even if that address is not normally legitimate.
3222 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
3224 If MODIFIER is EXPAND_SUM then when EXP is an addition
3225 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
3226 or a nest of (PLUS ...) and (MINUS ...) where the terms are
3227 products as above, or REG or MEM, or constant.
3228 Ordinarily in such cases we would output mul or add instructions
3229 and then return a pseudo reg containing the sum.
3231 EXPAND_INITIALIZER is much like EXPAND_SUM except that
3232 it also marks a label as absolutely required (it can't be dead).
3233 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
3234 This is used for outputting expressions used in initializers. */
3237 expand_expr (exp, target, tmode, modifier)
3240 enum machine_mode tmode;
3241 enum expand_modifier modifier;
3243 register rtx op0, op1, temp;
3244 tree type = TREE_TYPE (exp);
3245 int unsignedp = TREE_UNSIGNED (type);
3246 register enum machine_mode mode = TYPE_MODE (type);
3247 register enum tree_code code = TREE_CODE (exp);
3249 /* Use subtarget as the target for operand 0 of a binary operation. */
3250 rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
3251 rtx original_target = target;
3252 int ignore = target == const0_rtx;
3255 /* Don't use hard regs as subtargets, because the combiner
3256 can only handle pseudo regs. */
3257 if (subtarget && REGNO (subtarget) < FIRST_PSEUDO_REGISTER)
3259 /* Avoid subtargets inside loops,
3260 since they hide some invariant expressions. */
3261 if (preserve_subexpressions_p ())
3264 if (ignore) target = 0, original_target = 0;
3266 /* If will do cse, generate all results into pseudo registers
3267 since 1) that allows cse to find more things
3268 and 2) otherwise cse could produce an insn the machine
3271 if (! cse_not_expected && mode != BLKmode && target
3272 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER))
3275 /* Ensure we reference a volatile object even if value is ignored. */
3276 if (ignore && TREE_THIS_VOLATILE (exp)
3277 && mode != VOIDmode && mode != BLKmode)
3279 target = gen_reg_rtx (mode);
3280 temp = expand_expr (exp, target, VOIDmode, modifier);
3282 emit_move_insn (target, temp);
3290 tree function = decl_function_context (exp);
3291 /* Handle using a label in a containing function. */
3292 if (function != current_function_decl && function != 0)
3294 struct function *p = find_function_data (function);
3295 /* Allocate in the memory associated with the function
3296 that the label is in. */
3297 push_obstacks (p->function_obstack,
3298 p->function_maybepermanent_obstack);
3300 p->forced_labels = gen_rtx (EXPR_LIST, VOIDmode,
3301 label_rtx (exp), p->forced_labels);
3304 else if (modifier == EXPAND_INITIALIZER)
3305 forced_labels = gen_rtx (EXPR_LIST, VOIDmode,
3306 label_rtx (exp), forced_labels);
3307 temp = gen_rtx (MEM, FUNCTION_MODE,
3308 gen_rtx (LABEL_REF, Pmode, label_rtx (exp)));
3309 if (function != current_function_decl && function != 0)
3310 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
3315 if (DECL_RTL (exp) == 0)
3317 error_with_decl (exp, "prior parameter's size depends on `%s'");
3318 return CONST0_RTX (mode);
3324 if (DECL_RTL (exp) == 0)
3326 /* Ensure variable marked as used
3327 even if it doesn't go through a parser. */
3328 TREE_USED (exp) = 1;
3329 /* Handle variables inherited from containing functions. */
3330 context = decl_function_context (exp);
3332 /* We treat inline_function_decl as an alias for the current function
3333 because that is the inline function whose vars, types, etc.
3334 are being merged into the current function.
3335 See expand_inline_function. */
3336 if (context != 0 && context != current_function_decl
3337 && context != inline_function_decl
3338 /* If var is static, we don't need a static chain to access it. */
3339 && ! (GET_CODE (DECL_RTL (exp)) == MEM
3340 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
3344 /* Mark as non-local and addressable. */
3345 DECL_NONLOCAL (exp) = 1;
3346 mark_addressable (exp);
3347 if (GET_CODE (DECL_RTL (exp)) != MEM)
3349 addr = XEXP (DECL_RTL (exp), 0);
3350 if (GET_CODE (addr) == MEM)
3351 addr = gen_rtx (MEM, Pmode, fix_lexical_addr (XEXP (addr, 0), exp));
3353 addr = fix_lexical_addr (addr, exp);
3354 return change_address (DECL_RTL (exp), mode, addr);
3357 /* This is the case of an array whose size is to be determined
3358 from its initializer, while the initializer is still being parsed.
3360 if (GET_CODE (DECL_RTL (exp)) == MEM
3361 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
3362 return change_address (DECL_RTL (exp), GET_MODE (DECL_RTL (exp)),
3363 XEXP (DECL_RTL (exp), 0));
3364 if (GET_CODE (DECL_RTL (exp)) == MEM
3365 && modifier != EXPAND_CONST_ADDRESS
3366 && modifier != EXPAND_SUM
3367 && modifier != EXPAND_INITIALIZER)
3369 /* DECL_RTL probably contains a constant address.
3370 On RISC machines where a constant address isn't valid,
3371 make some insns to get that address into a register. */
3372 if (!memory_address_p (DECL_MODE (exp), XEXP (DECL_RTL (exp), 0))
3374 && CONSTANT_ADDRESS_P (XEXP (DECL_RTL (exp), 0))))
3375 return change_address (DECL_RTL (exp), VOIDmode,
3376 copy_rtx (XEXP (DECL_RTL (exp), 0)));
3379 /* If the mode of DECL_RTL does not match that of the decl, it
3380 must be a promoted value. We return a SUBREG of the wanted mode,
3381 but mark it so that we know that it was already extended. */
3383 if (GET_CODE (DECL_RTL (exp)) == REG
3384 && GET_MODE (DECL_RTL (exp)) != mode)
3386 enum machine_mode decl_mode = DECL_MODE (exp);
3388 /* Get the signedness used for this variable. Ensure we get the
3389 same mode we got when the variable was declared. */
3391 PROMOTE_MODE (decl_mode, unsignedp, type);
3393 if (decl_mode != GET_MODE (DECL_RTL (exp)))
3396 temp = gen_rtx (SUBREG, mode, DECL_RTL (exp), 0);
3397 SUBREG_PROMOTED_VAR_P (temp) = 1;
3398 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
3402 return DECL_RTL (exp);
3405 return immed_double_const (TREE_INT_CST_LOW (exp),
3406 TREE_INT_CST_HIGH (exp),
3410 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, 0);
3413 /* If optimized, generate immediate CONST_DOUBLE
3414 which will be turned into memory by reload if necessary.
3416 We used to force a register so that loop.c could see it. But
3417 this does not allow gen_* patterns to perform optimizations with
3418 the constants. It also produces two insns in cases like "x = 1.0;".
3419 On most machines, floating-point constants are not permitted in
3420 many insns, so we'd end up copying it to a register in any case.
3422 Now, we do the copying in expand_binop, if appropriate. */
3423 return immed_real_const (exp);
3427 if (! TREE_CST_RTL (exp))
3428 output_constant_def (exp);
3430 /* TREE_CST_RTL probably contains a constant address.
3431 On RISC machines where a constant address isn't valid,
3432 make some insns to get that address into a register. */
3433 if (GET_CODE (TREE_CST_RTL (exp)) == MEM
3434 && modifier != EXPAND_CONST_ADDRESS
3435 && modifier != EXPAND_INITIALIZER
3436 && modifier != EXPAND_SUM
3437 && !memory_address_p (mode, XEXP (TREE_CST_RTL (exp), 0)))
3438 return change_address (TREE_CST_RTL (exp), VOIDmode,
3439 copy_rtx (XEXP (TREE_CST_RTL (exp), 0)));
3440 return TREE_CST_RTL (exp);
3443 context = decl_function_context (exp);
3444 /* We treat inline_function_decl as an alias for the current function
3445 because that is the inline function whose vars, types, etc.
3446 are being merged into the current function.
3447 See expand_inline_function. */
3448 if (context == current_function_decl || context == inline_function_decl)
3451 /* If this is non-local, handle it. */
3454 temp = SAVE_EXPR_RTL (exp);
3455 if (temp && GET_CODE (temp) == REG)
3457 put_var_into_stack (exp);
3458 temp = SAVE_EXPR_RTL (exp);
3460 if (temp == 0 || GET_CODE (temp) != MEM)
3462 return change_address (temp, mode,
3463 fix_lexical_addr (XEXP (temp, 0), exp));
3465 if (SAVE_EXPR_RTL (exp) == 0)
3467 if (mode == BLKmode)
3469 = assign_stack_temp (mode,
3470 int_size_in_bytes (TREE_TYPE (exp)), 0);
3473 enum machine_mode var_mode = mode;
3475 if (TREE_CODE (type) == INTEGER_TYPE
3476 || TREE_CODE (type) == ENUMERAL_TYPE
3477 || TREE_CODE (type) == BOOLEAN_TYPE
3478 || TREE_CODE (type) == CHAR_TYPE
3479 || TREE_CODE (type) == REAL_TYPE
3480 || TREE_CODE (type) == POINTER_TYPE
3481 || TREE_CODE (type) == OFFSET_TYPE)
3483 PROMOTE_MODE (var_mode, unsignedp, type);
3486 temp = gen_reg_rtx (var_mode);
3489 SAVE_EXPR_RTL (exp) = temp;
3490 store_expr (TREE_OPERAND (exp, 0), temp, 0);
3491 if (!optimize && GET_CODE (temp) == REG)
3492 save_expr_regs = gen_rtx (EXPR_LIST, VOIDmode, temp,
3496 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
3497 must be a promoted value. We return a SUBREG of the wanted mode,
3498 but mark it so that we know that it was already extended. Note
3499 that `unsignedp' was modified above in this case. */
3501 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
3502 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
3504 temp = gen_rtx (SUBREG, mode, SAVE_EXPR_RTL (exp), 0);
3505 SUBREG_PROMOTED_VAR_P (temp) = 1;
3506 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
3510 return SAVE_EXPR_RTL (exp);
3513 /* Exit the current loop if the body-expression is true. */
3515 rtx label = gen_label_rtx ();
3516 do_jump (TREE_OPERAND (exp, 0), label, NULL_RTX);
3517 expand_exit_loop (NULL_PTR);
3523 expand_start_loop (1);
3524 expand_expr_stmt (TREE_OPERAND (exp, 0));
3531 tree vars = TREE_OPERAND (exp, 0);
3532 int vars_need_expansion = 0;
3534 /* Need to open a binding contour here because
3535 if there are any cleanups they most be contained here. */
3536 expand_start_bindings (0);
3538 /* Mark the corresponding BLOCK for output in its proper place. */
3539 if (TREE_OPERAND (exp, 2) != 0
3540 && ! TREE_USED (TREE_OPERAND (exp, 2)))
3541 insert_block (TREE_OPERAND (exp, 2));
3543 /* If VARS have not yet been expanded, expand them now. */
3546 if (DECL_RTL (vars) == 0)
3548 vars_need_expansion = 1;
3551 expand_decl_init (vars);
3552 vars = TREE_CHAIN (vars);
3555 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, modifier);
3557 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
3563 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
3565 emit_insns (RTL_EXPR_SEQUENCE (exp));
3566 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
3567 return RTL_EXPR_RTL (exp);
3570 /* All elts simple constants => refer to a constant in memory. But
3571 if this is a non-BLKmode mode, let it store a field at a time
3572 since that should make a CONST_INT or CONST_DOUBLE when we
3574 if (TREE_STATIC (exp) && (mode == BLKmode || TREE_ADDRESSABLE (exp)))
3576 rtx constructor = output_constant_def (exp);
3577 if (modifier != EXPAND_CONST_ADDRESS
3578 && modifier != EXPAND_INITIALIZER
3579 && modifier != EXPAND_SUM
3580 && !memory_address_p (GET_MODE (constructor),
3581 XEXP (constructor, 0)))
3582 constructor = change_address (constructor, VOIDmode,
3583 XEXP (constructor, 0));
3590 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
3591 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode, 0);
3596 if (target == 0 || ! safe_from_p (target, exp))
3598 if (mode != BLKmode && ! TREE_ADDRESSABLE (exp))
3599 target = gen_reg_rtx (mode);
3602 rtx safe_target = assign_stack_temp (mode, int_size_in_bytes (type), 0);
3604 MEM_IN_STRUCT_P (safe_target) = MEM_IN_STRUCT_P (target);
3605 target = safe_target;
3608 store_constructor (exp, target);
3614 tree exp1 = TREE_OPERAND (exp, 0);
3617 /* A SAVE_EXPR as the address in an INDIRECT_EXPR is generated
3618 for *PTR += ANYTHING where PTR is put inside the SAVE_EXPR.
3619 This code has the same general effect as simply doing
3620 expand_expr on the save expr, except that the expression PTR
3621 is computed for use as a memory address. This means different
3622 code, suitable for indexing, may be generated. */
3623 if (TREE_CODE (exp1) == SAVE_EXPR
3624 && SAVE_EXPR_RTL (exp1) == 0
3625 && TREE_CODE (exp2 = TREE_OPERAND (exp1, 0)) != ERROR_MARK
3626 && TYPE_MODE (TREE_TYPE (exp1)) == Pmode
3627 && TYPE_MODE (TREE_TYPE (exp2)) == Pmode)
3629 temp = expand_expr (TREE_OPERAND (exp1, 0), NULL_RTX,
3630 VOIDmode, EXPAND_SUM);
3631 op0 = memory_address (mode, temp);
3632 op0 = copy_all_regs (op0);
3633 SAVE_EXPR_RTL (exp1) = op0;
3637 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
3638 op0 = memory_address (mode, op0);
3641 temp = gen_rtx (MEM, mode, op0);
3642 /* If address was computed by addition,
3643 mark this as an element of an aggregate. */
3644 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
3645 || (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR
3646 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) == PLUS_EXPR)
3647 || TREE_CODE (TREE_TYPE (exp)) == ARRAY_TYPE
3648 || TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE
3649 || TREE_CODE (TREE_TYPE (exp)) == UNION_TYPE
3650 || (TREE_CODE (exp1) == ADDR_EXPR
3651 && (exp2 = TREE_OPERAND (exp1, 0))
3652 && (TREE_CODE (TREE_TYPE (exp2)) == ARRAY_TYPE
3653 || TREE_CODE (TREE_TYPE (exp2)) == RECORD_TYPE
3654 || TREE_CODE (TREE_TYPE (exp2)) == UNION_TYPE)))
3655 MEM_IN_STRUCT_P (temp) = 1;
3656 MEM_VOLATILE_P (temp) = TREE_THIS_VOLATILE (exp) || flag_volatile;
3657 #if 0 /* It is incorrectto set RTX_UNCHANGING_P here, because the fact that
3658 a location is accessed through a pointer to const does not mean
3659 that the value there can never change. */
3660 RTX_UNCHANGING_P (temp) = TREE_READONLY (exp);
3666 if (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST
3667 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
3669 /* Nonconstant array index or nonconstant element size.
3670 Generate the tree for *(&array+index) and expand that,
3671 except do it in a language-independent way
3672 and don't complain about non-lvalue arrays.
3673 `mark_addressable' should already have been called
3674 for any array for which this case will be reached. */
3676 /* Don't forget the const or volatile flag from the array element. */
3677 tree variant_type = build_type_variant (type,
3678 TREE_READONLY (exp),
3679 TREE_THIS_VOLATILE (exp));
3680 tree array_adr = build1 (ADDR_EXPR, build_pointer_type (variant_type),
3681 TREE_OPERAND (exp, 0));
3682 tree index = TREE_OPERAND (exp, 1);
3685 /* Convert the integer argument to a type the same size as a pointer
3686 so the multiply won't overflow spuriously. */
3687 if (TYPE_PRECISION (TREE_TYPE (index)) != POINTER_SIZE)
3688 index = convert (type_for_size (POINTER_SIZE, 0), index);
3690 /* Don't think the address has side effects
3691 just because the array does.
3692 (In some cases the address might have side effects,
3693 and we fail to record that fact here. However, it should not
3694 matter, since expand_expr should not care.) */
3695 TREE_SIDE_EFFECTS (array_adr) = 0;
3697 elt = build1 (INDIRECT_REF, type,
3698 fold (build (PLUS_EXPR, TYPE_POINTER_TO (variant_type),
3700 fold (build (MULT_EXPR,
3701 TYPE_POINTER_TO (variant_type),
3702 index, size_in_bytes (type))))));
3704 /* Volatility, etc., of new expression is same as old expression. */
3705 TREE_SIDE_EFFECTS (elt) = TREE_SIDE_EFFECTS (exp);
3706 TREE_THIS_VOLATILE (elt) = TREE_THIS_VOLATILE (exp);
3707 TREE_READONLY (elt) = TREE_READONLY (exp);
3709 return expand_expr (elt, target, tmode, modifier);
3712 /* Fold an expression like: "foo"[2].
3713 This is not done in fold so it won't happen inside &. */
3716 tree arg0 = TREE_OPERAND (exp, 0);
3717 tree arg1 = TREE_OPERAND (exp, 1);
3719 if (TREE_CODE (arg0) == STRING_CST
3720 && TREE_CODE (arg1) == INTEGER_CST
3721 && !TREE_INT_CST_HIGH (arg1)
3722 && (i = TREE_INT_CST_LOW (arg1)) < TREE_STRING_LENGTH (arg0))
3724 if (TREE_TYPE (TREE_TYPE (arg0)) == integer_type_node)
3726 exp = build_int_2 (((int *)TREE_STRING_POINTER (arg0))[i], 0);
3727 TREE_TYPE (exp) = integer_type_node;
3728 return expand_expr (exp, target, tmode, modifier);
3730 if (TREE_TYPE (TREE_TYPE (arg0)) == char_type_node)
3732 exp = build_int_2 (TREE_STRING_POINTER (arg0)[i], 0);
3733 TREE_TYPE (exp) = integer_type_node;
3734 return expand_expr (convert (TREE_TYPE (TREE_TYPE (arg0)), exp), target, tmode, modifier);
3739 /* If this is a constant index into a constant array,
3740 just get the value from the array. Handle both the cases when
3741 we have an explicit constructor and when our operand is a variable
3742 that was declared const. */
3744 if (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
3745 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
3747 tree index = fold (TREE_OPERAND (exp, 1));
3748 if (TREE_CODE (index) == INTEGER_CST
3749 && TREE_INT_CST_HIGH (index) == 0)
3751 int i = TREE_INT_CST_LOW (index);
3752 tree elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0));
3755 elem = TREE_CHAIN (elem);
3757 return expand_expr (fold (TREE_VALUE (elem)), target,
3762 else if (TREE_READONLY (TREE_OPERAND (exp, 0))
3763 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
3764 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == ARRAY_TYPE
3765 && TREE_CODE (TREE_OPERAND (exp, 0)) == VAR_DECL
3766 && DECL_INITIAL (TREE_OPERAND (exp, 0))
3768 && (TREE_CODE (DECL_INITIAL (TREE_OPERAND (exp, 0)))
3771 tree index = fold (TREE_OPERAND (exp, 1));
3772 if (TREE_CODE (index) == INTEGER_CST
3773 && TREE_INT_CST_HIGH (index) == 0)
3775 int i = TREE_INT_CST_LOW (index);
3776 tree init = DECL_INITIAL (TREE_OPERAND (exp, 0));
3778 if (TREE_CODE (init) == CONSTRUCTOR)
3780 tree elem = CONSTRUCTOR_ELTS (init);
3783 elem = TREE_CHAIN (elem);
3785 return expand_expr (fold (TREE_VALUE (elem)), target,
3788 else if (TREE_CODE (init) == STRING_CST
3789 && i < TREE_STRING_LENGTH (init))
3791 temp = GEN_INT (TREE_STRING_POINTER (init)[i]);
3792 return convert_to_mode (mode, temp, 0);
3796 /* Treat array-ref with constant index as a component-ref. */
3800 /* If the operand is a CONSTRUCTOR, we can just extract the
3801 appropriate field if it is present. */
3802 if (code != ARRAY_REF
3803 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR)
3807 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
3808 elt = TREE_CHAIN (elt))
3809 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1))
3810 return expand_expr (TREE_VALUE (elt), target, tmode, modifier);
3814 enum machine_mode mode1;
3819 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
3820 &mode1, &unsignedp, &volatilep);
3822 /* In some cases, we will be offsetting OP0's address by a constant.
3823 So get it as a sum, if possible. If we will be using it
3824 directly in an insn, we validate it. */
3825 op0 = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_SUM);
3827 /* If this is a constant, put it into a register if it is a
3828 legimate constant and memory if it isn't. */
3829 if (CONSTANT_P (op0))
3831 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
3832 if (LEGITIMATE_CONSTANT_P (op0))
3833 op0 = force_reg (mode, op0);
3835 op0 = validize_mem (force_const_mem (mode, op0));
3840 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
3842 if (GET_CODE (op0) != MEM)
3844 op0 = change_address (op0, VOIDmode,
3845 gen_rtx (PLUS, Pmode, XEXP (op0, 0),
3846 force_reg (Pmode, offset_rtx)));
3849 /* Don't forget about volatility even if this is a bitfield. */
3850 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
3852 op0 = copy_rtx (op0);
3853 MEM_VOLATILE_P (op0) = 1;
3856 if (mode1 == VOIDmode
3857 || (mode1 != BLKmode && ! direct_load[(int) mode1]
3858 && modifier != EXPAND_CONST_ADDRESS
3859 && modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
3860 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG)
3862 /* In cases where an aligned union has an unaligned object
3863 as a field, we might be extracting a BLKmode value from
3864 an integer-mode (e.g., SImode) object. Handle this case
3865 by doing the extract into an object as wide as the field
3866 (which we know to be the width of a basic mode), then
3867 storing into memory, and changing the mode to BLKmode. */
3868 enum machine_mode ext_mode = mode;
3870 if (ext_mode == BLKmode)
3871 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
3873 if (ext_mode == BLKmode)
3876 op0 = extract_bit_field (validize_mem (op0), bitsize, bitpos,
3877 unsignedp, target, ext_mode, ext_mode,
3878 TYPE_ALIGN (TREE_TYPE (tem)) / BITS_PER_UNIT,
3879 int_size_in_bytes (TREE_TYPE (tem)));
3880 if (mode == BLKmode)
3882 rtx new = assign_stack_temp (ext_mode,
3883 bitsize / BITS_PER_UNIT, 0);
3885 emit_move_insn (new, op0);
3886 op0 = copy_rtx (new);
3887 PUT_MODE (op0, BLKmode);
3893 /* Get a reference to just this component. */
3894 if (modifier == EXPAND_CONST_ADDRESS
3895 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
3896 op0 = gen_rtx (MEM, mode1, plus_constant (XEXP (op0, 0),
3897 (bitpos / BITS_PER_UNIT)));
3899 op0 = change_address (op0, mode1,
3900 plus_constant (XEXP (op0, 0),
3901 (bitpos / BITS_PER_UNIT)));
3902 MEM_IN_STRUCT_P (op0) = 1;
3903 MEM_VOLATILE_P (op0) |= volatilep;
3904 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode)
3907 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
3908 convert_move (target, op0, unsignedp);
3914 tree base = build_unary_op (ADDR_EXPR, TREE_OPERAND (exp, 0), 0);
3915 tree addr = build (PLUS_EXPR, type, base, TREE_OPERAND (exp, 1));
3916 op0 = expand_expr (addr, NULL_RTX, VOIDmode, EXPAND_SUM);
3917 temp = gen_rtx (MEM, mode, memory_address (mode, op0));
3918 MEM_IN_STRUCT_P (temp) = 1;
3919 MEM_VOLATILE_P (temp) = TREE_THIS_VOLATILE (exp) || flag_volatile;
3920 #if 0 /* It is incorrectto set RTX_UNCHANGING_P here, because the fact that
3921 a location is accessed through a pointer to const does not mean
3922 that the value there can never change. */
3923 RTX_UNCHANGING_P (temp) = TREE_READONLY (exp);
3928 /* Intended for a reference to a buffer of a file-object in Pascal.
3929 But it's not certain that a special tree code will really be
3930 necessary for these. INDIRECT_REF might work for them. */
3934 /* IN_EXPR: Inlined pascal set IN expression.
3937 rlo = set_low - (set_low%bits_per_word);
3938 the_word = set [ (index - rlo)/bits_per_word ];
3939 bit_index = index % bits_per_word;
3940 bitmask = 1 << bit_index;
3941 return !!(the_word & bitmask); */
3943 preexpand_calls (exp);
3945 tree set = TREE_OPERAND (exp, 0);
3946 tree index = TREE_OPERAND (exp, 1);
3947 tree set_type = TREE_TYPE (set);
3949 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
3950 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
3956 rtx diff, quo, rem, addr, bit, result;
3957 rtx setval, setaddr;
3958 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
3961 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
3963 /* If domain is empty, answer is no. */
3964 if (tree_int_cst_lt (set_high_bound, set_low_bound))
3967 index_val = expand_expr (index, 0, VOIDmode, 0);
3968 lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
3969 hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
3970 setval = expand_expr (set, 0, VOIDmode, 0);
3971 setaddr = XEXP (setval, 0);
3973 /* Compare index against bounds, if they are constant. */
3974 if (GET_CODE (index_val) == CONST_INT
3975 && GET_CODE (lo_r) == CONST_INT)
3977 if (INTVAL (index_val) < INTVAL (lo_r))
3981 if (GET_CODE (index_val) == CONST_INT
3982 && GET_CODE (hi_r) == CONST_INT)
3984 if (INTVAL (hi_r) < INTVAL (index_val))
3988 /* If we get here, we have to generate the code for both cases
3989 (in range and out of range). */
3991 op0 = gen_label_rtx ();
3992 op1 = gen_label_rtx ();
3994 if (! (GET_CODE (index_val) == CONST_INT
3995 && GET_CODE (lo_r) == CONST_INT))
3997 emit_cmp_insn (index_val, lo_r, LT, 0, GET_MODE (index_val), 0, 0);
3998 emit_jump_insn (gen_blt (op1));
4001 if (! (GET_CODE (index_val) == CONST_INT
4002 && GET_CODE (hi_r) == CONST_INT))
4004 emit_cmp_insn (index_val, hi_r, GT, 0, GET_MODE (index_val), 0, 0);
4005 emit_jump_insn (gen_bgt (op1));
4008 /* Calculate the element number of bit zero in the first word
4010 if (GET_CODE (lo_r) == CONST_INT)
4011 rlow = gen_rtx (CONST_INT, VOIDmode,
4012 INTVAL (lo_r) & ~ (1 << BITS_PER_UNIT));
4014 rlow = expand_binop (index_mode, and_optab,
4015 lo_r, gen_rtx (CONST_INT, VOIDmode,
4016 ~ (1 << BITS_PER_UNIT)),
4017 0, 0, OPTAB_LIB_WIDEN);
4019 diff = expand_binop (index_mode, sub_optab,
4020 index_val, rlow, 0, 0, OPTAB_LIB_WIDEN);
4022 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
4023 gen_rtx (CONST_INT, VOIDmode, BITS_PER_UNIT),
4025 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
4026 gen_rtx (CONST_INT, VOIDmode, BITS_PER_UNIT),
4028 addr = memory_address (byte_mode,
4029 expand_binop (index_mode, add_optab,
4031 /* Extract the bit we want to examine */
4032 bit = expand_shift (RSHIFT_EXPR, byte_mode,
4033 gen_rtx (MEM, byte_mode, addr), rem, 0, 1);
4034 result = expand_binop (SImode, and_optab, bit, const1_rtx, target,
4035 1, OPTAB_LIB_WIDEN);
4036 emit_move_insn (target, result);
4038 /* Output the code to handle the out-of-range case. */
4041 emit_move_insn (target, const0_rtx);
4046 case WITH_CLEANUP_EXPR:
4047 if (RTL_EXPR_RTL (exp) == 0)
4050 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
4052 = tree_cons (NULL_TREE, TREE_OPERAND (exp, 2), cleanups_this_call);
4053 /* That's it for this cleanup. */
4054 TREE_OPERAND (exp, 2) = 0;
4056 return RTL_EXPR_RTL (exp);
4059 /* Check for a built-in function. */
4060 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
4061 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) == FUNCTION_DECL
4062 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
4063 return expand_builtin (exp, target, subtarget, tmode, ignore);
4064 /* If this call was expanded already by preexpand_calls,
4065 just return the result we got. */
4066 if (CALL_EXPR_RTL (exp) != 0)
4067 return CALL_EXPR_RTL (exp);
4068 return expand_call (exp, target, ignore);
4070 case NON_LVALUE_EXPR:
4073 case REFERENCE_EXPR:
4074 if (TREE_CODE (type) == VOID_TYPE || ignore)
4076 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
4079 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
4080 return expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, modifier);
4081 if (TREE_CODE (type) == UNION_TYPE)
4083 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
4086 if (mode == BLKmode)
4088 if (TYPE_SIZE (type) == 0
4089 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
4091 target = assign_stack_temp (BLKmode,
4092 (TREE_INT_CST_LOW (TYPE_SIZE (type))
4093 + BITS_PER_UNIT - 1)
4094 / BITS_PER_UNIT, 0);
4097 target = gen_reg_rtx (mode);
4099 if (GET_CODE (target) == MEM)
4100 /* Store data into beginning of memory target. */
4101 store_expr (TREE_OPERAND (exp, 0),
4102 change_address (target, TYPE_MODE (valtype), 0), 0);
4104 else if (GET_CODE (target) == REG)
4105 /* Store this field into a union of the proper type. */
4106 store_field (target, GET_MODE_BITSIZE (TYPE_MODE (valtype)), 0,
4107 TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
4109 int_size_in_bytes (TREE_TYPE (TREE_OPERAND (exp, 0))));
4113 /* Return the entire union. */
4116 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, 0);
4117 if (GET_MODE (op0) == mode || GET_MODE (op0) == VOIDmode)
4119 if (modifier == EXPAND_INITIALIZER)
4120 return gen_rtx (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
4121 if (flag_force_mem && GET_CODE (op0) == MEM)
4122 op0 = copy_to_reg (op0);
4125 return convert_to_mode (mode, op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
4127 convert_move (target, op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
4131 /* We come here from MINUS_EXPR when the second operand is a constant. */
4133 this_optab = add_optab;
4135 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
4136 something else, make sure we add the register to the constant and
4137 then to the other thing. This case can occur during strength
4138 reduction and doing it this way will produce better code if the
4139 frame pointer or argument pointer is eliminated.
4141 fold-const.c will ensure that the constant is always in the inner
4142 PLUS_EXPR, so the only case we need to do anything about is if
4143 sp, ap, or fp is our second argument, in which case we must swap
4144 the innermost first argument and our second argument. */
4146 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
4147 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
4148 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
4149 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
4150 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
4151 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
4153 tree t = TREE_OPERAND (exp, 1);
4155 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
4156 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
4159 /* If the result is to be Pmode and we are adding an integer to
4160 something, we might be forming a constant. So try to use
4161 plus_constant. If it produces a sum and we can't accept it,
4162 use force_operand. This allows P = &ARR[const] to generate
4163 efficient code on machines where a SYMBOL_REF is not a valid
4166 If this is an EXPAND_SUM call, always return the sum. */
4167 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
4168 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
4169 && (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
4172 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
4174 op1 = plus_constant (op1, TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)));
4175 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
4176 op1 = force_operand (op1, target);
4180 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
4181 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
4182 && (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
4185 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
4187 op0 = plus_constant (op0, TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)));
4188 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
4189 op0 = force_operand (op0, target);
4193 /* No sense saving up arithmetic to be done
4194 if it's all in the wrong mode to form part of an address.
4195 And force_operand won't know whether to sign-extend or
4197 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
4198 || mode != Pmode) goto binop;
4200 preexpand_calls (exp);
4201 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
4204 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, modifier);
4205 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, modifier);
4207 /* Make sure any term that's a sum with a constant comes last. */
4208 if (GET_CODE (op0) == PLUS
4209 && CONSTANT_P (XEXP (op0, 1)))
4215 /* If adding to a sum including a constant,
4216 associate it to put the constant outside. */
4217 if (GET_CODE (op1) == PLUS
4218 && CONSTANT_P (XEXP (op1, 1)))
4220 rtx constant_term = const0_rtx;
4222 temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0);
4225 /* Ensure that MULT comes first if there is one. */
4226 else if (GET_CODE (op0) == MULT)
4227 op0 = gen_rtx (PLUS, mode, op0, XEXP (op1, 0));
4229 op0 = gen_rtx (PLUS, mode, XEXP (op1, 0), op0);
4231 /* Let's also eliminate constants from op0 if possible. */
4232 op0 = eliminate_constant_term (op0, &constant_term);
4234 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
4235 their sum should be a constant. Form it into OP1, since the
4236 result we want will then be OP0 + OP1. */
4238 temp = simplify_binary_operation (PLUS, mode, constant_term,
4243 op1 = gen_rtx (PLUS, mode, constant_term, XEXP (op1, 1));
4246 /* Put a constant term last and put a multiplication first. */
4247 if (CONSTANT_P (op0) || GET_CODE (op1) == MULT)
4248 temp = op1, op1 = op0, op0 = temp;
4250 temp = simplify_binary_operation (PLUS, mode, op0, op1);
4251 return temp ? temp : gen_rtx (PLUS, mode, op0, op1);
4254 /* Handle difference of two symbolic constants,
4255 for the sake of an initializer. */
4256 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
4257 && really_constant_p (TREE_OPERAND (exp, 0))
4258 && really_constant_p (TREE_OPERAND (exp, 1)))
4260 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX,
4261 VOIDmode, modifier);
4262 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
4263 VOIDmode, modifier);
4264 return gen_rtx (MINUS, mode, op0, op1);
4266 /* Convert A - const to A + (-const). */
4267 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
4269 exp = build (PLUS_EXPR, type, TREE_OPERAND (exp, 0),
4270 fold (build1 (NEGATE_EXPR, type,
4271 TREE_OPERAND (exp, 1))));
4274 this_optab = sub_optab;
4278 preexpand_calls (exp);
4279 /* If first operand is constant, swap them.
4280 Thus the following special case checks need only
4281 check the second operand. */
4282 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
4284 register tree t1 = TREE_OPERAND (exp, 0);
4285 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
4286 TREE_OPERAND (exp, 1) = t1;
4289 /* Attempt to return something suitable for generating an
4290 indexed address, for machines that support that. */
4292 if (modifier == EXPAND_SUM && mode == Pmode
4293 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
4294 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
4296 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, EXPAND_SUM);
4298 /* Apply distributive law if OP0 is x+c. */
4299 if (GET_CODE (op0) == PLUS
4300 && GET_CODE (XEXP (op0, 1)) == CONST_INT)
4301 return gen_rtx (PLUS, mode,
4302 gen_rtx (MULT, mode, XEXP (op0, 0),
4303 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))),
4304 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))
4305 * INTVAL (XEXP (op0, 1))));
4307 if (GET_CODE (op0) != REG)
4308 op0 = force_operand (op0, NULL_RTX);
4309 if (GET_CODE (op0) != REG)
4310 op0 = copy_to_mode_reg (mode, op0);
4312 return gen_rtx (MULT, mode, op0,
4313 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))));
4316 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
4319 /* Check for multiplying things that have been extended
4320 from a narrower type. If this machine supports multiplying
4321 in that narrower type with a result in the desired type,
4322 do it that way, and avoid the explicit type-conversion. */
4323 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
4324 && TREE_CODE (type) == INTEGER_TYPE
4325 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
4326 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
4327 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
4328 && int_fits_type_p (TREE_OPERAND (exp, 1),
4329 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
4330 /* Don't use a widening multiply if a shift will do. */
4331 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
4332 > HOST_BITS_PER_WIDE_INT)
4333 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
4335 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
4336 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
4338 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
4339 /* If both operands are extended, they must either both
4340 be zero-extended or both be sign-extended. */
4341 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
4343 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
4345 enum machine_mode innermode
4346 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
4347 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
4348 ? umul_widen_optab : smul_widen_optab);
4349 if (mode == GET_MODE_WIDER_MODE (innermode)
4350 && this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
4352 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
4353 NULL_RTX, VOIDmode, 0);
4354 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
4355 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
4358 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
4359 NULL_RTX, VOIDmode, 0);
4363 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
4364 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
4365 return expand_mult (mode, op0, op1, target, unsignedp);
4367 case TRUNC_DIV_EXPR:
4368 case FLOOR_DIV_EXPR:
4370 case ROUND_DIV_EXPR:
4371 case EXACT_DIV_EXPR:
4372 preexpand_calls (exp);
4373 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
4375 /* Possible optimization: compute the dividend with EXPAND_SUM
4376 then if the divisor is constant can optimize the case
4377 where some terms of the dividend have coeffs divisible by it. */
4378 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
4379 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
4380 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
4383 this_optab = flodiv_optab;
4386 case TRUNC_MOD_EXPR:
4387 case FLOOR_MOD_EXPR:
4389 case ROUND_MOD_EXPR:
4390 preexpand_calls (exp);
4391 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
4393 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
4394 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
4395 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
4397 case FIX_ROUND_EXPR:
4398 case FIX_FLOOR_EXPR:
4400 abort (); /* Not used for C. */
4402 case FIX_TRUNC_EXPR:
4403 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
4405 target = gen_reg_rtx (mode);
4406 expand_fix (target, op0, unsignedp);
4410 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
4412 target = gen_reg_rtx (mode);
4413 /* expand_float can't figure out what to do if FROM has VOIDmode.
4414 So give it the correct mode. With -O, cse will optimize this. */
4415 if (GET_MODE (op0) == VOIDmode)
4416 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
4418 expand_float (target, op0,
4419 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
4423 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
4424 temp = expand_unop (mode, neg_optab, op0, target, 0);
4430 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
4432 /* Handle complex values specially. */
4434 enum machine_mode opmode
4435 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
4437 if (GET_MODE_CLASS (opmode) == MODE_COMPLEX_INT
4438 || GET_MODE_CLASS (opmode) == MODE_COMPLEX_FLOAT)
4439 return expand_complex_abs (opmode, op0, target, unsignedp);
4442 /* Unsigned abs is simply the operand. Testing here means we don't
4443 risk generating incorrect code below. */
4444 if (TREE_UNSIGNED (type))
4447 /* First try to do it with a special abs instruction. */
4448 temp = expand_unop (mode, abs_optab, op0, target, 0);
4452 /* If this machine has expensive jumps, we can do integer absolute
4453 value of X as (((signed) x >> (W-1)) ^ x) - ((signed) x >> (W-1)),
4454 where W is the width of MODE. */
4456 if (GET_MODE_CLASS (mode) == MODE_INT && BRANCH_COST >= 2)
4458 rtx extended = expand_shift (RSHIFT_EXPR, mode, op0,
4459 size_int (GET_MODE_BITSIZE (mode) - 1),
4462 temp = expand_binop (mode, xor_optab, extended, op0, target, 0,
4465 temp = expand_binop (mode, sub_optab, temp, extended, target, 0,
4472 /* If that does not win, use conditional jump and negate. */
4473 target = original_target;
4474 temp = gen_label_rtx ();
4475 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 0))
4476 || (GET_CODE (target) == REG
4477 && REGNO (target) < FIRST_PSEUDO_REGISTER))
4478 target = gen_reg_rtx (mode);
4479 emit_move_insn (target, op0);
4480 emit_cmp_insn (target,
4481 expand_expr (convert (type, integer_zero_node),
4482 NULL_RTX, VOIDmode, 0),
4483 GE, NULL_RTX, mode, 0, 0);
4485 emit_jump_insn (gen_bge (temp));
4486 op0 = expand_unop (mode, neg_optab, target, target, 0);
4488 emit_move_insn (target, op0);
4495 target = original_target;
4496 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 1))
4497 || (GET_CODE (target) == REG
4498 && REGNO (target) < FIRST_PSEUDO_REGISTER))
4499 target = gen_reg_rtx (mode);
4500 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
4501 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
4503 /* First try to do it with a special MIN or MAX instruction.
4504 If that does not win, use a conditional jump to select the proper
4506 this_optab = (TREE_UNSIGNED (type)
4507 ? (code == MIN_EXPR ? umin_optab : umax_optab)
4508 : (code == MIN_EXPR ? smin_optab : smax_optab));
4510 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
4516 emit_move_insn (target, op0);
4517 op0 = gen_label_rtx ();
4518 if (code == MAX_EXPR)
4519 temp = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)))
4520 ? compare_from_rtx (target, op1, GEU, 1, mode, NULL_RTX, 0)
4521 : compare_from_rtx (target, op1, GE, 0, mode, NULL_RTX, 0));
4523 temp = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)))
4524 ? compare_from_rtx (target, op1, LEU, 1, mode, NULL_RTX, 0)
4525 : compare_from_rtx (target, op1, LE, 0, mode, NULL_RTX, 0));
4526 if (temp == const0_rtx)
4527 emit_move_insn (target, op1);
4528 else if (temp != const_true_rtx)
4530 if (bcc_gen_fctn[(int) GET_CODE (temp)] != 0)
4531 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (temp)]) (op0));
4534 emit_move_insn (target, op1);
4539 /* ??? Can optimize when the operand of this is a bitwise operation,
4540 by using a different bitwise operation. */
4542 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
4543 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
4549 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
4550 temp = expand_unop (mode, ffs_optab, op0, target, 1);
4555 /* ??? Can optimize bitwise operations with one arg constant.
4556 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
4557 and (a bitwise1 b) bitwise2 b (etc)
4558 but that is probably not worth while. */
4560 /* BIT_AND_EXPR is for bitwise anding.
4561 TRUTH_AND_EXPR is for anding two boolean values
4562 when we want in all cases to compute both of them.
4563 In general it is fastest to do TRUTH_AND_EXPR by
4564 computing both operands as actual zero-or-1 values
4565 and then bitwise anding. In cases where there cannot
4566 be any side effects, better code would be made by
4567 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR;
4568 but the question is how to recognize those cases. */
4570 case TRUTH_AND_EXPR:
4572 this_optab = and_optab;
4575 /* See comment above about TRUTH_AND_EXPR; it applies here too. */
4578 this_optab = ior_optab;
4582 this_optab = xor_optab;
4589 preexpand_calls (exp);
4590 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
4592 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
4593 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
4596 /* Could determine the answer when only additive constants differ.
4597 Also, the addition of one can be handled by changing the condition. */
4604 preexpand_calls (exp);
4605 temp = do_store_flag (exp, target, tmode != VOIDmode ? tmode : mode, 0);
4608 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
4609 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
4611 && GET_CODE (original_target) == REG
4612 && (GET_MODE (original_target)
4613 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
4615 temp = expand_expr (TREE_OPERAND (exp, 0), original_target, VOIDmode, 0);
4616 if (temp != original_target)
4617 temp = copy_to_reg (temp);
4618 op1 = gen_label_rtx ();
4619 emit_cmp_insn (temp, const0_rtx, EQ, NULL_RTX,
4620 GET_MODE (temp), unsignedp, 0);
4621 emit_jump_insn (gen_beq (op1));
4622 emit_move_insn (temp, const1_rtx);
4626 /* If no set-flag instruction, must generate a conditional
4627 store into a temporary variable. Drop through
4628 and handle this like && and ||. */
4630 case TRUTH_ANDIF_EXPR:
4631 case TRUTH_ORIF_EXPR:
4632 if (target == 0 || ! safe_from_p (target, exp)
4633 /* Make sure we don't have a hard reg (such as function's return
4634 value) live across basic blocks, if not optimizing. */
4635 || (!optimize && GET_CODE (target) == REG
4636 && REGNO (target) < FIRST_PSEUDO_REGISTER))
4637 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
4638 emit_clr_insn (target);
4639 op1 = gen_label_rtx ();
4640 jumpifnot (exp, op1);
4641 emit_0_to_1_insn (target);
4645 case TRUTH_NOT_EXPR:
4646 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
4647 /* The parser is careful to generate TRUTH_NOT_EXPR
4648 only with operands that are always zero or one. */
4649 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
4650 target, 1, OPTAB_LIB_WIDEN);
4656 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
4658 return expand_expr (TREE_OPERAND (exp, 1),
4659 (ignore ? const0_rtx : target),
4664 /* Note that COND_EXPRs whose type is a structure or union
4665 are required to be constructed to contain assignments of
4666 a temporary variable, so that we can evaluate them here
4667 for side effect only. If type is void, we must do likewise. */
4669 /* If an arm of the branch requires a cleanup,
4670 only that cleanup is performed. */
4673 tree binary_op = 0, unary_op = 0;
4674 tree old_cleanups = cleanups_this_call;
4675 cleanups_this_call = 0;
4677 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
4678 convert it to our mode, if necessary. */
4679 if (integer_onep (TREE_OPERAND (exp, 1))
4680 && integer_zerop (TREE_OPERAND (exp, 2))
4681 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
4683 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, modifier);
4684 if (GET_MODE (op0) == mode)
4687 target = gen_reg_rtx (mode);
4688 convert_move (target, op0, unsignedp);
4692 /* If we are not to produce a result, we have no target. Otherwise,
4693 if a target was specified use it; it will not be used as an
4694 intermediate target unless it is safe. If no target, use a
4697 if (mode == VOIDmode || ignore)
4699 else if (original_target
4700 && safe_from_p (original_target, TREE_OPERAND (exp, 0)))
4701 temp = original_target;
4702 else if (mode == BLKmode)
4704 if (TYPE_SIZE (type) == 0
4705 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
4707 temp = assign_stack_temp (BLKmode,
4708 (TREE_INT_CST_LOW (TYPE_SIZE (type))
4709 + BITS_PER_UNIT - 1)
4710 / BITS_PER_UNIT, 0);
4713 temp = gen_reg_rtx (mode);
4715 /* Check for X ? A + B : A. If we have this, we can copy
4716 A to the output and conditionally add B. Similarly for unary
4717 operations. Don't do this if X has side-effects because
4718 those side effects might affect A or B and the "?" operation is
4719 a sequence point in ANSI. (We test for side effects later.) */
4721 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
4722 && operand_equal_p (TREE_OPERAND (exp, 2),
4723 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
4724 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
4725 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
4726 && operand_equal_p (TREE_OPERAND (exp, 1),
4727 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
4728 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
4729 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
4730 && operand_equal_p (TREE_OPERAND (exp, 2),
4731 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
4732 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
4733 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
4734 && operand_equal_p (TREE_OPERAND (exp, 1),
4735 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
4736 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
4738 /* If we had X ? A + 1 : A and we can do the test of X as a store-flag
4739 operation, do this as A + (X != 0). Similarly for other simple
4740 binary operators. */
4741 if (singleton && binary_op
4742 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
4743 && (TREE_CODE (binary_op) == PLUS_EXPR
4744 || TREE_CODE (binary_op) == MINUS_EXPR
4745 || TREE_CODE (binary_op) == BIT_IOR_EXPR
4746 || TREE_CODE (binary_op) == BIT_XOR_EXPR
4747 || TREE_CODE (binary_op) == BIT_AND_EXPR)
4748 && integer_onep (TREE_OPERAND (binary_op, 1))
4749 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
4752 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR ? add_optab
4753 : TREE_CODE (binary_op) == MINUS_EXPR ? sub_optab
4754 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
4755 : TREE_CODE (binary_op) == BIT_XOR_EXPR ? xor_optab
4758 /* If we had X ? A : A + 1, do this as A + (X == 0).
4760 We have to invert the truth value here and then put it
4761 back later if do_store_flag fails. We cannot simply copy
4762 TREE_OPERAND (exp, 0) to another variable and modify that
4763 because invert_truthvalue can modify the tree pointed to
4765 if (singleton == TREE_OPERAND (exp, 1))
4766 TREE_OPERAND (exp, 0)
4767 = invert_truthvalue (TREE_OPERAND (exp, 0));
4769 result = do_store_flag (TREE_OPERAND (exp, 0),
4770 (safe_from_p (temp, singleton)
4772 mode, BRANCH_COST <= 1);
4776 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
4777 return expand_binop (mode, boptab, op1, result, temp,
4778 unsignedp, OPTAB_LIB_WIDEN);
4780 else if (singleton == TREE_OPERAND (exp, 1))
4781 TREE_OPERAND (exp, 0)
4782 = invert_truthvalue (TREE_OPERAND (exp, 0));
4786 op0 = gen_label_rtx ();
4788 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
4792 /* If the target conflicts with the other operand of the
4793 binary op, we can't use it. Also, we can't use the target
4794 if it is a hard register, because evaluating the condition
4795 might clobber it. */
4797 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1)))
4798 || (GET_CODE (temp) == REG
4799 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
4800 temp = gen_reg_rtx (mode);
4801 store_expr (singleton, temp, 0);
4804 expand_expr (singleton,
4805 ignore ? const1_rtx : NULL_RTX, VOIDmode, 0);
4806 if (cleanups_this_call)
4808 sorry ("aggregate value in COND_EXPR");
4809 cleanups_this_call = 0;
4811 if (singleton == TREE_OPERAND (exp, 1))
4812 jumpif (TREE_OPERAND (exp, 0), op0);
4814 jumpifnot (TREE_OPERAND (exp, 0), op0);
4816 if (binary_op && temp == 0)
4817 /* Just touch the other operand. */
4818 expand_expr (TREE_OPERAND (binary_op, 1),
4819 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
4821 store_expr (build (TREE_CODE (binary_op), type,
4822 make_tree (type, temp),
4823 TREE_OPERAND (binary_op, 1)),
4826 store_expr (build1 (TREE_CODE (unary_op), type,
4827 make_tree (type, temp)),
4832 /* This is now done in jump.c and is better done there because it
4833 produces shorter register lifetimes. */
4835 /* Check for both possibilities either constants or variables
4836 in registers (but not the same as the target!). If so, can
4837 save branches by assigning one, branching, and assigning the
4839 else if (temp && GET_MODE (temp) != BLKmode
4840 && (TREE_CONSTANT (TREE_OPERAND (exp, 1))
4841 || ((TREE_CODE (TREE_OPERAND (exp, 1)) == PARM_DECL
4842 || TREE_CODE (TREE_OPERAND (exp, 1)) == VAR_DECL)
4843 && DECL_RTL (TREE_OPERAND (exp, 1))
4844 && GET_CODE (DECL_RTL (TREE_OPERAND (exp, 1))) == REG
4845 && DECL_RTL (TREE_OPERAND (exp, 1)) != temp))
4846 && (TREE_CONSTANT (TREE_OPERAND (exp, 2))
4847 || ((TREE_CODE (TREE_OPERAND (exp, 2)) == PARM_DECL
4848 || TREE_CODE (TREE_OPERAND (exp, 2)) == VAR_DECL)
4849 && DECL_RTL (TREE_OPERAND (exp, 2))
4850 && GET_CODE (DECL_RTL (TREE_OPERAND (exp, 2))) == REG
4851 && DECL_RTL (TREE_OPERAND (exp, 2)) != temp)))
4853 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
4854 temp = gen_reg_rtx (mode);
4855 store_expr (TREE_OPERAND (exp, 2), temp, 0);
4856 jumpifnot (TREE_OPERAND (exp, 0), op0);
4857 store_expr (TREE_OPERAND (exp, 1), temp, 0);
4861 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
4862 comparison operator. If we have one of these cases, set the
4863 output to A, branch on A (cse will merge these two references),
4864 then set the output to FOO. */
4866 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
4867 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
4868 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
4869 TREE_OPERAND (exp, 1), 0)
4870 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
4871 && safe_from_p (temp, TREE_OPERAND (exp, 2)))
4873 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
4874 temp = gen_reg_rtx (mode);
4875 store_expr (TREE_OPERAND (exp, 1), temp, 0);
4876 jumpif (TREE_OPERAND (exp, 0), op0);
4877 store_expr (TREE_OPERAND (exp, 2), temp, 0);
4881 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
4882 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
4883 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
4884 TREE_OPERAND (exp, 2), 0)
4885 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
4886 && safe_from_p (temp, TREE_OPERAND (exp, 1)))
4888 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
4889 temp = gen_reg_rtx (mode);
4890 store_expr (TREE_OPERAND (exp, 2), temp, 0);
4891 jumpifnot (TREE_OPERAND (exp, 0), op0);
4892 store_expr (TREE_OPERAND (exp, 1), temp, 0);
4897 op1 = gen_label_rtx ();
4898 jumpifnot (TREE_OPERAND (exp, 0), op0);
4900 store_expr (TREE_OPERAND (exp, 1), temp, 0);
4902 expand_expr (TREE_OPERAND (exp, 1),
4903 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
4904 if (cleanups_this_call)
4906 sorry ("aggregate value in COND_EXPR");
4907 cleanups_this_call = 0;
4911 emit_jump_insn (gen_jump (op1));
4915 store_expr (TREE_OPERAND (exp, 2), temp, 0);
4917 expand_expr (TREE_OPERAND (exp, 2),
4918 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
4921 if (cleanups_this_call)
4923 sorry ("aggregate value in COND_EXPR");
4924 cleanups_this_call = 0;
4930 cleanups_this_call = old_cleanups;
4936 /* Something needs to be initialized, but we didn't know
4937 where that thing was when building the tree. For example,
4938 it could be the return value of a function, or a parameter
4939 to a function which lays down in the stack, or a temporary
4940 variable which must be passed by reference.
4942 We guarantee that the expression will either be constructed
4943 or copied into our original target. */
4945 tree slot = TREE_OPERAND (exp, 0);
4948 if (TREE_CODE (slot) != VAR_DECL)
4953 if (DECL_RTL (slot) != 0)
4955 target = DECL_RTL (slot);
4956 /* If we have already expanded the slot, so don't do
4958 if (TREE_OPERAND (exp, 1) == NULL_TREE)
4963 target = assign_stack_temp (mode, int_size_in_bytes (type), 0);
4964 /* All temp slots at this level must not conflict. */
4965 preserve_temp_slots (target);
4966 DECL_RTL (slot) = target;
4970 /* I bet this needs to be done, and I bet that it needs to
4971 be above, inside the else clause. The reason is
4972 simple, how else is it going to get cleaned up? (mrs)
4974 The reason is probably did not work before, and was
4975 commented out is because this was re-expanding already
4976 expanded target_exprs (target == 0 and DECL_RTL (slot)
4977 != 0) also cleaning them up many times as well. :-( */
4979 /* Since SLOT is not known to the called function
4980 to belong to its stack frame, we must build an explicit
4981 cleanup. This case occurs when we must build up a reference
4982 to pass the reference as an argument. In this case,
4983 it is very likely that such a reference need not be
4986 if (TREE_OPERAND (exp, 2) == 0)
4987 TREE_OPERAND (exp, 2) = maybe_build_cleanup (slot);
4988 if (TREE_OPERAND (exp, 2))
4989 cleanups_this_call = tree_cons (NULL_TREE, TREE_OPERAND (exp, 2),
4990 cleanups_this_call);
4995 /* This case does occur, when expanding a parameter which
4996 needs to be constructed on the stack. The target
4997 is the actual stack address that we want to initialize.
4998 The function we call will perform the cleanup in this case. */
5000 DECL_RTL (slot) = target;
5003 exp1 = TREE_OPERAND (exp, 1);
5004 /* Mark it as expanded. */
5005 TREE_OPERAND (exp, 1) = NULL_TREE;
5007 return expand_expr (exp1, target, tmode, modifier);
5012 tree lhs = TREE_OPERAND (exp, 0);
5013 tree rhs = TREE_OPERAND (exp, 1);
5014 tree noncopied_parts = 0;
5015 tree lhs_type = TREE_TYPE (lhs);
5017 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
5018 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0 && !fixed_type_p (rhs))
5019 noncopied_parts = init_noncopied_parts (stabilize_reference (lhs),
5020 TYPE_NONCOPIED_PARTS (lhs_type));
5021 while (noncopied_parts != 0)
5023 expand_assignment (TREE_VALUE (noncopied_parts),
5024 TREE_PURPOSE (noncopied_parts), 0, 0);
5025 noncopied_parts = TREE_CHAIN (noncopied_parts);
5032 /* If lhs is complex, expand calls in rhs before computing it.
5033 That's so we don't compute a pointer and save it over a call.
5034 If lhs is simple, compute it first so we can give it as a
5035 target if the rhs is just a call. This avoids an extra temp and copy
5036 and that prevents a partial-subsumption which makes bad code.
5037 Actually we could treat component_ref's of vars like vars. */
5039 tree lhs = TREE_OPERAND (exp, 0);
5040 tree rhs = TREE_OPERAND (exp, 1);
5041 tree noncopied_parts = 0;
5042 tree lhs_type = TREE_TYPE (lhs);
5046 if (TREE_CODE (lhs) != VAR_DECL
5047 && TREE_CODE (lhs) != RESULT_DECL
5048 && TREE_CODE (lhs) != PARM_DECL)
5049 preexpand_calls (exp);
5051 /* Check for |= or &= of a bitfield of size one into another bitfield
5052 of size 1. In this case, (unless we need the result of the
5053 assignment) we can do this more efficiently with a
5054 test followed by an assignment, if necessary.
5056 ??? At this point, we can't get a BIT_FIELD_REF here. But if
5057 things change so we do, this code should be enhanced to
5060 && TREE_CODE (lhs) == COMPONENT_REF
5061 && (TREE_CODE (rhs) == BIT_IOR_EXPR
5062 || TREE_CODE (rhs) == BIT_AND_EXPR)
5063 && TREE_OPERAND (rhs, 0) == lhs
5064 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
5065 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (lhs, 1))) == 1
5066 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))) == 1)
5068 rtx label = gen_label_rtx ();
5070 do_jump (TREE_OPERAND (rhs, 1),
5071 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
5072 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
5073 expand_assignment (lhs, convert (TREE_TYPE (rhs),
5074 (TREE_CODE (rhs) == BIT_IOR_EXPR
5076 : integer_zero_node)),
5078 do_pending_stack_adjust ();
5083 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0
5084 && ! (fixed_type_p (lhs) && fixed_type_p (rhs)))
5085 noncopied_parts = save_noncopied_parts (stabilize_reference (lhs),
5086 TYPE_NONCOPIED_PARTS (lhs_type));
5088 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
5089 while (noncopied_parts != 0)
5091 expand_assignment (TREE_PURPOSE (noncopied_parts),
5092 TREE_VALUE (noncopied_parts), 0, 0);
5093 noncopied_parts = TREE_CHAIN (noncopied_parts);
5098 case PREINCREMENT_EXPR:
5099 case PREDECREMENT_EXPR:
5100 return expand_increment (exp, 0);
5102 case POSTINCREMENT_EXPR:
5103 case POSTDECREMENT_EXPR:
5104 /* Faster to treat as pre-increment if result is not used. */
5105 return expand_increment (exp, ! ignore);
5108 /* Are we taking the address of a nested function? */
5109 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
5110 && decl_function_context (TREE_OPERAND (exp, 0)) != 0)
5112 op0 = trampoline_address (TREE_OPERAND (exp, 0));
5113 op0 = force_operand (op0, target);
5117 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode,
5118 (modifier == EXPAND_INITIALIZER
5119 ? modifier : EXPAND_CONST_ADDRESS));
5120 if (GET_CODE (op0) != MEM)
5123 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
5124 return XEXP (op0, 0);
5125 op0 = force_operand (XEXP (op0, 0), target);
5127 if (flag_force_addr && GET_CODE (op0) != REG)
5128 return force_reg (Pmode, op0);
5131 case ENTRY_VALUE_EXPR:
5134 /* COMPLEX type for Extended Pascal & Fortran */
5137 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
5141 /* Get the rtx code of the operands. */
5142 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
5143 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
5146 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
5148 prev = get_last_insn ();
5150 /* Tell flow that the whole of the destination is being set. */
5151 if (GET_CODE (target) == REG)
5152 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
5154 /* Move the real (op0) and imaginary (op1) parts to their location. */
5155 emit_move_insn (gen_realpart (mode, target), op0);
5156 emit_move_insn (gen_imagpart (mode, target), op1);
5158 /* Complex construction should appear as a single unit. */
5165 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
5166 return gen_realpart (mode, op0);
5169 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
5170 return gen_imagpart (mode, op0);
5174 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
5178 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
5181 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
5183 prev = get_last_insn ();
5185 /* Tell flow that the whole of the destination is being set. */
5186 if (GET_CODE (target) == REG)
5187 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
5189 /* Store the realpart and the negated imagpart to target. */
5190 emit_move_insn (gen_realpart (mode, target), gen_realpart (mode, op0));
5192 imag_t = gen_imagpart (mode, target);
5193 temp = expand_unop (mode, neg_optab,
5194 gen_imagpart (mode, op0), imag_t, 0);
5196 emit_move_insn (imag_t, temp);
5198 /* Conjugate should appear as a single unit */
5208 return (*lang_expand_expr) (exp, target, tmode, modifier);
5211 /* Here to do an ordinary binary operator, generating an instruction
5212 from the optab already placed in `this_optab'. */
5214 preexpand_calls (exp);
5215 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
5217 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
5218 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
5220 temp = expand_binop (mode, this_optab, op0, op1, target,
5221 unsignedp, OPTAB_LIB_WIDEN);
5227 /* Return the alignment in bits of EXP, a pointer valued expression.
5228 But don't return more than MAX_ALIGN no matter what.
5229 The alignment returned is, by default, the alignment of the thing that
5230 EXP points to (if it is not a POINTER_TYPE, 0 is returned).
5232 Otherwise, look at the expression to see if we can do better, i.e., if the
5233 expression is actually pointing at an object whose alignment is tighter. */
5236 get_pointer_alignment (exp, max_align)
5240 unsigned align, inner;
5242 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
5245 align = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
5246 align = MIN (align, max_align);
5250 switch (TREE_CODE (exp))
5254 case NON_LVALUE_EXPR:
5255 exp = TREE_OPERAND (exp, 0);
5256 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
5258 inner = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
5259 inner = MIN (inner, max_align);
5260 align = MAX (align, inner);
5264 /* If sum of pointer + int, restrict our maximum alignment to that
5265 imposed by the integer. If not, we can't do any better than
5267 if (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST)
5270 while (((TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)) * BITS_PER_UNIT)
5275 exp = TREE_OPERAND (exp, 0);
5279 /* See what we are pointing at and look at its alignment. */
5280 exp = TREE_OPERAND (exp, 0);
5281 if (TREE_CODE (exp) == FUNCTION_DECL)
5282 align = MAX (align, FUNCTION_BOUNDARY);
5283 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'd')
5284 align = MAX (align, DECL_ALIGN (exp));
5285 #ifdef CONSTANT_ALIGNMENT
5286 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'c')
5287 align = CONSTANT_ALIGNMENT (exp, align);
5289 return MIN (align, max_align);
5297 /* Return the tree node and offset if a given argument corresponds to
5298 a string constant. */
5301 string_constant (arg, ptr_offset)
5307 if (TREE_CODE (arg) == ADDR_EXPR
5308 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
5310 *ptr_offset = integer_zero_node;
5311 return TREE_OPERAND (arg, 0);
5313 else if (TREE_CODE (arg) == PLUS_EXPR)
5315 tree arg0 = TREE_OPERAND (arg, 0);
5316 tree arg1 = TREE_OPERAND (arg, 1);
5321 if (TREE_CODE (arg0) == ADDR_EXPR
5322 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
5325 return TREE_OPERAND (arg0, 0);
5327 else if (TREE_CODE (arg1) == ADDR_EXPR
5328 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
5331 return TREE_OPERAND (arg1, 0);
5338 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
5339 way, because it could contain a zero byte in the middle.
5340 TREE_STRING_LENGTH is the size of the character array, not the string.
5342 Unfortunately, string_constant can't access the values of const char
5343 arrays with initializers, so neither can we do so here. */
5353 src = string_constant (src, &offset_node);
5356 max = TREE_STRING_LENGTH (src);
5357 ptr = TREE_STRING_POINTER (src);
5358 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
5360 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
5361 compute the offset to the following null if we don't know where to
5362 start searching for it. */
5364 for (i = 0; i < max; i++)
5367 /* We don't know the starting offset, but we do know that the string
5368 has no internal zero bytes. We can assume that the offset falls
5369 within the bounds of the string; otherwise, the programmer deserves
5370 what he gets. Subtract the offset from the length of the string,
5372 /* This would perhaps not be valid if we were dealing with named
5373 arrays in addition to literal string constants. */
5374 return size_binop (MINUS_EXPR, size_int (max), offset_node);
5377 /* We have a known offset into the string. Start searching there for
5378 a null character. */
5379 if (offset_node == 0)
5383 /* Did we get a long long offset? If so, punt. */
5384 if (TREE_INT_CST_HIGH (offset_node) != 0)
5386 offset = TREE_INT_CST_LOW (offset_node);
5388 /* If the offset is known to be out of bounds, warn, and call strlen at
5390 if (offset < 0 || offset > max)
5392 warning ("offset outside bounds of constant string");
5395 /* Use strlen to search for the first zero byte. Since any strings
5396 constructed with build_string will have nulls appended, we win even
5397 if we get handed something like (char[4])"abcd".
5399 Since OFFSET is our starting index into the string, no further
5400 calculation is needed. */
5401 return size_int (strlen (ptr + offset));
5404 /* Expand an expression EXP that calls a built-in function,
5405 with result going to TARGET if that's convenient
5406 (and in mode MODE if that's convenient).
5407 SUBTARGET may be used as the target for computing one of EXP's operands.
5408 IGNORE is nonzero if the value is to be ignored. */
5411 expand_builtin (exp, target, subtarget, mode, ignore)
5415 enum machine_mode mode;
5418 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
5419 tree arglist = TREE_OPERAND (exp, 1);
5422 enum machine_mode value_mode = TYPE_MODE (TREE_TYPE (exp));
5423 optab builtin_optab;
5425 switch (DECL_FUNCTION_CODE (fndecl))
5430 /* build_function_call changes these into ABS_EXPR. */
5435 case BUILT_IN_FSQRT:
5436 /* If not optimizing, call the library function. */
5441 /* Arg could be wrong type if user redeclared this fcn wrong. */
5442 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != REAL_TYPE)
5443 return CONST0_RTX (TYPE_MODE (TREE_TYPE (exp)));
5445 /* Stabilize and compute the argument. */
5446 if (TREE_CODE (TREE_VALUE (arglist)) != VAR_DECL
5447 && TREE_CODE (TREE_VALUE (arglist)) != PARM_DECL)
5449 exp = copy_node (exp);
5450 arglist = copy_node (arglist);
5451 TREE_OPERAND (exp, 1) = arglist;
5452 TREE_VALUE (arglist) = save_expr (TREE_VALUE (arglist));
5454 op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
5456 /* Make a suitable register to place result in. */
5457 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
5462 switch (DECL_FUNCTION_CODE (fndecl))
5465 builtin_optab = sin_optab; break;
5467 builtin_optab = cos_optab; break;
5468 case BUILT_IN_FSQRT:
5469 builtin_optab = sqrt_optab; break;
5474 /* Compute into TARGET.
5475 Set TARGET to wherever the result comes back. */
5476 target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
5477 builtin_optab, op0, target, 0);
5479 /* If we were unable to expand via the builtin, stop the
5480 sequence (without outputting the insns) and break, causing
5481 a call the the library function. */
5488 /* Check the results by default. But if flag_fast_math is turned on,
5489 then assume sqrt will always be called with valid arguments. */
5491 if (! flag_fast_math)
5493 /* Don't define the builtin FP instructions
5494 if your machine is not IEEE. */
5495 if (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT)
5498 lab1 = gen_label_rtx ();
5500 /* Test the result; if it is NaN, set errno=EDOM because
5501 the argument was not in the domain. */
5502 emit_cmp_insn (target, target, EQ, 0, GET_MODE (target), 0, 0);
5503 emit_jump_insn (gen_beq (lab1));
5507 #ifdef GEN_ERRNO_RTX
5508 rtx errno_rtx = GEN_ERRNO_RTX;
5511 = gen_rtx (MEM, word_mode, gen_rtx (SYMBOL_REF, Pmode, "*errno"));
5514 emit_move_insn (errno_rtx, GEN_INT (TARGET_EDOM));
5517 /* We can't set errno=EDOM directly; let the library call do it.
5518 Pop the arguments right away in case the call gets deleted. */
5520 expand_call (exp, target, 0);
5527 /* Output the entire sequence. */
5528 insns = get_insns ();
5534 case BUILT_IN_SAVEREGS:
5535 /* Don't do __builtin_saveregs more than once in a function.
5536 Save the result of the first call and reuse it. */
5537 if (saveregs_value != 0)
5538 return saveregs_value;
5540 /* When this function is called, it means that registers must be
5541 saved on entry to this function. So we migrate the
5542 call to the first insn of this function. */
5545 rtx valreg, saved_valreg;
5547 /* Now really call the function. `expand_call' does not call
5548 expand_builtin, so there is no danger of infinite recursion here. */
5551 #ifdef EXPAND_BUILTIN_SAVEREGS
5552 /* Do whatever the machine needs done in this case. */
5553 temp = EXPAND_BUILTIN_SAVEREGS (arglist);
5555 /* The register where the function returns its value
5556 is likely to have something else in it, such as an argument.
5557 So preserve that register around the call. */
5558 if (value_mode != VOIDmode)
5560 valreg = hard_libcall_value (value_mode);
5561 saved_valreg = gen_reg_rtx (value_mode);
5562 emit_move_insn (saved_valreg, valreg);
5565 /* Generate the call, putting the value in a pseudo. */
5566 temp = expand_call (exp, target, ignore);
5568 if (value_mode != VOIDmode)
5569 emit_move_insn (valreg, saved_valreg);
5575 saveregs_value = temp;
5577 /* This won't work inside a SEQUENCE--it really has to be
5578 at the start of the function. */
5579 if (in_sequence_p ())
5581 /* Better to do this than to crash. */
5582 error ("`va_start' used within `({...})'");
5586 /* Put the sequence after the NOTE that starts the function. */
5587 emit_insns_before (seq, NEXT_INSN (get_insns ()));
5591 /* __builtin_args_info (N) returns word N of the arg space info
5592 for the current function. The number and meanings of words
5593 is controlled by the definition of CUMULATIVE_ARGS. */
5594 case BUILT_IN_ARGS_INFO:
5596 int nwords = sizeof (CUMULATIVE_ARGS) / sizeof (int);
5598 int *word_ptr = (int *) ¤t_function_args_info;
5599 tree type, elts, result;
5601 if (sizeof (CUMULATIVE_ARGS) % sizeof (int) != 0)
5602 fatal ("CUMULATIVE_ARGS type defined badly; see %s, line %d",
5603 __FILE__, __LINE__);
5607 tree arg = TREE_VALUE (arglist);
5608 if (TREE_CODE (arg) != INTEGER_CST)
5609 error ("argument of __builtin_args_info must be constant");
5612 int wordnum = TREE_INT_CST_LOW (arg);
5614 if (wordnum < 0 || wordnum >= nwords)
5615 error ("argument of __builtin_args_info out of range");
5617 return GEN_INT (word_ptr[wordnum]);
5621 error ("missing argument in __builtin_args_info");
5626 for (i = 0; i < nwords; i++)
5627 elts = tree_cons (NULL_TREE, build_int_2 (word_ptr[i], 0));
5629 type = build_array_type (integer_type_node,
5630 build_index_type (build_int_2 (nwords, 0)));
5631 result = build (CONSTRUCTOR, type, NULL_TREE, nreverse (elts));
5632 TREE_CONSTANT (result) = 1;
5633 TREE_STATIC (result) = 1;
5634 result = build (INDIRECT_REF, build_pointer_type (type), result);
5635 TREE_CONSTANT (result) = 1;
5636 return expand_expr (result, NULL_RTX, VOIDmode, 0);
5640 /* Return the address of the first anonymous stack arg. */
5641 case BUILT_IN_NEXT_ARG:
5643 tree fntype = TREE_TYPE (current_function_decl);
5644 if (!(TYPE_ARG_TYPES (fntype) != 0
5645 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
5646 != void_type_node)))
5648 error ("`va_start' used in function with fixed args");
5653 return expand_binop (Pmode, add_optab,
5654 current_function_internal_arg_pointer,
5655 current_function_arg_offset_rtx,
5656 NULL_RTX, 0, OPTAB_LIB_WIDEN);
5658 case BUILT_IN_CLASSIFY_TYPE:
5661 tree type = TREE_TYPE (TREE_VALUE (arglist));
5662 enum tree_code code = TREE_CODE (type);
5663 if (code == VOID_TYPE)
5664 return GEN_INT (void_type_class);
5665 if (code == INTEGER_TYPE)
5666 return GEN_INT (integer_type_class);
5667 if (code == CHAR_TYPE)
5668 return GEN_INT (char_type_class);
5669 if (code == ENUMERAL_TYPE)
5670 return GEN_INT (enumeral_type_class);
5671 if (code == BOOLEAN_TYPE)
5672 return GEN_INT (boolean_type_class);
5673 if (code == POINTER_TYPE)
5674 return GEN_INT (pointer_type_class);
5675 if (code == REFERENCE_TYPE)
5676 return GEN_INT (reference_type_class);
5677 if (code == OFFSET_TYPE)
5678 return GEN_INT (offset_type_class);
5679 if (code == REAL_TYPE)
5680 return GEN_INT (real_type_class);
5681 if (code == COMPLEX_TYPE)
5682 return GEN_INT (complex_type_class);
5683 if (code == FUNCTION_TYPE)
5684 return GEN_INT (function_type_class);
5685 if (code == METHOD_TYPE)
5686 return GEN_INT (method_type_class);
5687 if (code == RECORD_TYPE)
5688 return GEN_INT (record_type_class);
5689 if (code == UNION_TYPE)
5690 return GEN_INT (union_type_class);
5691 if (code == ARRAY_TYPE)
5692 return GEN_INT (array_type_class);
5693 if (code == STRING_TYPE)
5694 return GEN_INT (string_type_class);
5695 if (code == SET_TYPE)
5696 return GEN_INT (set_type_class);
5697 if (code == FILE_TYPE)
5698 return GEN_INT (file_type_class);
5699 if (code == LANG_TYPE)
5700 return GEN_INT (lang_type_class);
5702 return GEN_INT (no_type_class);
5704 case BUILT_IN_CONSTANT_P:
5708 return (TREE_CODE_CLASS (TREE_CODE (TREE_VALUE (arglist))) == 'c'
5709 ? const1_rtx : const0_rtx);
5711 case BUILT_IN_FRAME_ADDRESS:
5712 /* The argument must be a nonnegative integer constant.
5713 It counts the number of frames to scan up the stack.
5714 The value is the address of that frame. */
5715 case BUILT_IN_RETURN_ADDRESS:
5716 /* The argument must be a nonnegative integer constant.
5717 It counts the number of frames to scan up the stack.
5718 The value is the return address saved in that frame. */
5720 /* Warning about missing arg was already issued. */
5722 else if (TREE_CODE (TREE_VALUE (arglist)) != INTEGER_CST)
5724 error ("invalid arg to __builtin_return_address");
5727 else if (tree_int_cst_lt (TREE_VALUE (arglist), integer_zero_node))
5729 error ("invalid arg to __builtin_return_address");
5734 int count = TREE_INT_CST_LOW (TREE_VALUE (arglist));
5735 rtx tem = frame_pointer_rtx;
5738 /* Scan back COUNT frames to the specified frame. */
5739 for (i = 0; i < count; i++)
5741 /* Assume the dynamic chain pointer is in the word that
5742 the frame address points to, unless otherwise specified. */
5743 #ifdef DYNAMIC_CHAIN_ADDRESS
5744 tem = DYNAMIC_CHAIN_ADDRESS (tem);
5746 tem = memory_address (Pmode, tem);
5747 tem = copy_to_reg (gen_rtx (MEM, Pmode, tem));
5750 /* For __builtin_frame_address, return what we've got. */
5751 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
5754 /* For __builtin_return_address,
5755 Get the return address from that frame. */
5756 #ifdef RETURN_ADDR_RTX
5757 return RETURN_ADDR_RTX (count, tem);
5759 tem = memory_address (Pmode,
5760 plus_constant (tem, GET_MODE_SIZE (Pmode)));
5761 return copy_to_reg (gen_rtx (MEM, Pmode, tem));
5765 case BUILT_IN_ALLOCA:
5767 /* Arg could be non-integer if user redeclared this fcn wrong. */
5768 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE)
5770 current_function_calls_alloca = 1;
5771 /* Compute the argument. */
5772 op0 = expand_expr (TREE_VALUE (arglist), NULL_RTX, VOIDmode, 0);
5774 /* Allocate the desired space. */
5775 target = allocate_dynamic_stack_space (op0, target, BITS_PER_UNIT);
5777 /* Record the new stack level for nonlocal gotos. */
5778 if (nonlocal_goto_handler_slot != 0)
5779 emit_stack_save (SAVE_NONLOCAL, &nonlocal_goto_stack_level, NULL_RTX);
5783 /* If not optimizing, call the library function. */
5788 /* Arg could be non-integer if user redeclared this fcn wrong. */
5789 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE)
5792 /* Compute the argument. */
5793 op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
5794 /* Compute ffs, into TARGET if possible.
5795 Set TARGET to wherever the result comes back. */
5796 target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
5797 ffs_optab, op0, target, 1);
5802 case BUILT_IN_STRLEN:
5803 /* If not optimizing, call the library function. */
5808 /* Arg could be non-pointer if user redeclared this fcn wrong. */
5809 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE)
5813 tree src = TREE_VALUE (arglist);
5814 tree len = c_strlen (src);
5817 = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
5819 rtx result, src_rtx, char_rtx;
5820 enum machine_mode insn_mode = value_mode, char_mode;
5821 enum insn_code icode;
5823 /* If the length is known, just return it. */
5825 return expand_expr (len, target, mode, 0);
5827 /* If SRC is not a pointer type, don't do this operation inline. */
5831 /* Call a function if we can't compute strlen in the right mode. */
5833 while (insn_mode != VOIDmode)
5835 icode = strlen_optab->handlers[(int) insn_mode].insn_code;
5836 if (icode != CODE_FOR_nothing)
5839 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
5841 if (insn_mode == VOIDmode)
5844 /* Make a place to write the result of the instruction. */
5847 && GET_CODE (result) == REG
5848 && GET_MODE (result) == insn_mode
5849 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
5850 result = gen_reg_rtx (insn_mode);
5852 /* Make sure the operands are acceptable to the predicates. */
5854 if (! (*insn_operand_predicate[(int)icode][0]) (result, insn_mode))
5855 result = gen_reg_rtx (insn_mode);
5857 src_rtx = memory_address (BLKmode,
5858 expand_expr (src, NULL_RTX, Pmode,
5860 if (! (*insn_operand_predicate[(int)icode][1]) (src_rtx, Pmode))
5861 src_rtx = copy_to_mode_reg (Pmode, src_rtx);
5863 char_rtx = const0_rtx;
5864 char_mode = insn_operand_mode[(int)icode][2];
5865 if (! (*insn_operand_predicate[(int)icode][2]) (char_rtx, char_mode))
5866 char_rtx = copy_to_mode_reg (char_mode, char_rtx);
5868 emit_insn (GEN_FCN (icode) (result,
5869 gen_rtx (MEM, BLKmode, src_rtx),
5870 char_rtx, GEN_INT (align)));
5872 /* Return the value in the proper mode for this function. */
5873 if (GET_MODE (result) == value_mode)
5875 else if (target != 0)
5877 convert_move (target, result, 0);
5881 return convert_to_mode (value_mode, result, 0);
5884 case BUILT_IN_STRCPY:
5885 /* If not optimizing, call the library function. */
5890 /* Arg could be non-pointer if user redeclared this fcn wrong. */
5891 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
5892 || TREE_CHAIN (arglist) == 0
5893 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE)
5897 tree len = c_strlen (TREE_VALUE (TREE_CHAIN (arglist)));
5902 len = size_binop (PLUS_EXPR, len, integer_one_node);
5904 chainon (arglist, build_tree_list (NULL_TREE, len));
5908 case BUILT_IN_MEMCPY:
5909 /* If not optimizing, call the library function. */
5914 /* Arg could be non-pointer if user redeclared this fcn wrong. */
5915 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
5916 || TREE_CHAIN (arglist) == 0
5917 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
5918 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
5919 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
5923 tree dest = TREE_VALUE (arglist);
5924 tree src = TREE_VALUE (TREE_CHAIN (arglist));
5925 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
5928 = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
5930 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
5933 /* If either SRC or DEST is not a pointer type, don't do
5934 this operation in-line. */
5935 if (src_align == 0 || dest_align == 0)
5937 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRCPY)
5938 TREE_CHAIN (TREE_CHAIN (arglist)) = 0;
5942 dest_rtx = expand_expr (dest, NULL_RTX, Pmode, EXPAND_NORMAL);
5944 /* Copy word part most expediently. */
5945 emit_block_move (gen_rtx (MEM, BLKmode,
5946 memory_address (BLKmode, dest_rtx)),
5947 gen_rtx (MEM, BLKmode,
5948 memory_address (BLKmode,
5949 expand_expr (src, NULL_RTX,
5952 expand_expr (len, NULL_RTX, VOIDmode, 0),
5953 MIN (src_align, dest_align));
5957 /* These comparison functions need an instruction that returns an actual
5958 index. An ordinary compare that just sets the condition codes
5960 #ifdef HAVE_cmpstrsi
5961 case BUILT_IN_STRCMP:
5962 /* If not optimizing, call the library function. */
5967 /* Arg could be non-pointer if user redeclared this fcn wrong. */
5968 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
5969 || TREE_CHAIN (arglist) == 0
5970 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE)
5972 else if (!HAVE_cmpstrsi)
5975 tree arg1 = TREE_VALUE (arglist);
5976 tree arg2 = TREE_VALUE (TREE_CHAIN (arglist));
5980 len = c_strlen (arg1);
5982 len = size_binop (PLUS_EXPR, integer_one_node, len);
5983 len2 = c_strlen (arg2);
5985 len2 = size_binop (PLUS_EXPR, integer_one_node, len2);
5987 /* If we don't have a constant length for the first, use the length
5988 of the second, if we know it. We don't require a constant for
5989 this case; some cost analysis could be done if both are available
5990 but neither is constant. For now, assume they're equally cheap.
5992 If both strings have constant lengths, use the smaller. This
5993 could arise if optimization results in strcpy being called with
5994 two fixed strings, or if the code was machine-generated. We should
5995 add some code to the `memcmp' handler below to deal with such
5996 situations, someday. */
5997 if (!len || TREE_CODE (len) != INTEGER_CST)
6004 else if (len2 && TREE_CODE (len2) == INTEGER_CST)
6006 if (tree_int_cst_lt (len2, len))
6010 chainon (arglist, build_tree_list (NULL_TREE, len));
6014 case BUILT_IN_MEMCMP:
6015 /* If not optimizing, call the library function. */
6020 /* Arg could be non-pointer if user redeclared this fcn wrong. */
6021 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
6022 || TREE_CHAIN (arglist) == 0
6023 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
6024 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
6025 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
6027 else if (!HAVE_cmpstrsi)
6030 tree arg1 = TREE_VALUE (arglist);
6031 tree arg2 = TREE_VALUE (TREE_CHAIN (arglist));
6032 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
6036 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
6038 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
6039 enum machine_mode insn_mode
6040 = insn_operand_mode[(int) CODE_FOR_cmpstrsi][0];
6042 /* If we don't have POINTER_TYPE, call the function. */
6043 if (arg1_align == 0 || arg2_align == 0)
6045 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRCMP)
6046 TREE_CHAIN (TREE_CHAIN (arglist)) = 0;
6050 /* Make a place to write the result of the instruction. */
6053 && GET_CODE (result) == REG && GET_MODE (result) == insn_mode
6054 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
6055 result = gen_reg_rtx (insn_mode);
6057 emit_insn (gen_cmpstrsi (result,
6058 gen_rtx (MEM, BLKmode,
6059 expand_expr (arg1, NULL_RTX, Pmode,
6061 gen_rtx (MEM, BLKmode,
6062 expand_expr (arg2, NULL_RTX, Pmode,
6064 expand_expr (len, NULL_RTX, VOIDmode, 0),
6065 GEN_INT (MIN (arg1_align, arg2_align))));
6067 /* Return the value in the proper mode for this function. */
6068 mode = TYPE_MODE (TREE_TYPE (exp));
6069 if (GET_MODE (result) == mode)
6071 else if (target != 0)
6073 convert_move (target, result, 0);
6077 return convert_to_mode (mode, result, 0);
6080 case BUILT_IN_STRCMP:
6081 case BUILT_IN_MEMCMP:
6085 default: /* just do library call, if unknown builtin */
6086 error ("built-in function %s not currently supported",
6087 IDENTIFIER_POINTER (DECL_NAME (fndecl)));
6090 /* The switch statement above can drop through to cause the function
6091 to be called normally. */
6093 return expand_call (exp, target, ignore);
6096 /* Expand code for a post- or pre- increment or decrement
6097 and return the RTX for the result.
6098 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
6101 expand_increment (exp, post)
6105 register rtx op0, op1;
6106 register rtx temp, value;
6107 register tree incremented = TREE_OPERAND (exp, 0);
6108 optab this_optab = add_optab;
6110 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
6111 int op0_is_copy = 0;
6113 /* Stabilize any component ref that might need to be
6114 evaluated more than once below. */
6115 if (TREE_CODE (incremented) == BIT_FIELD_REF
6116 || (TREE_CODE (incremented) == COMPONENT_REF
6117 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
6118 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
6119 incremented = stabilize_reference (incremented);
6121 /* Compute the operands as RTX.
6122 Note whether OP0 is the actual lvalue or a copy of it:
6123 I believe it is a copy iff it is a register or subreg
6124 and insns were generated in computing it. */
6126 temp = get_last_insn ();
6127 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, 0);
6129 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
6130 in place but intead must do sign- or zero-extension during assignment,
6131 so we copy it into a new register and let the code below use it as
6134 Note that we can safely modify this SUBREG since it is know not to be
6135 shared (it was made by the expand_expr call above). */
6137 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
6138 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
6140 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
6141 && temp != get_last_insn ());
6142 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
6144 /* Decide whether incrementing or decrementing. */
6145 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
6146 || TREE_CODE (exp) == PREDECREMENT_EXPR)
6147 this_optab = sub_optab;
6149 /* If OP0 is not the actual lvalue, but rather a copy in a register,
6150 then we cannot just increment OP0. We must
6151 therefore contrive to increment the original value.
6152 Then we can return OP0 since it is a copy of the old value. */
6155 /* This is the easiest way to increment the value wherever it is.
6156 Problems with multiple evaluation of INCREMENTED
6157 are prevented because either (1) it is a component_ref,
6158 in which case it was stabilized above, or (2) it is an array_ref
6159 with constant index in an array in a register, which is
6160 safe to reevaluate. */
6161 tree newexp = build ((this_optab == add_optab
6162 ? PLUS_EXPR : MINUS_EXPR),
6165 TREE_OPERAND (exp, 1));
6166 temp = expand_assignment (incremented, newexp, ! post, 0);
6167 return post ? op0 : temp;
6170 /* Convert decrement by a constant into a negative increment. */
6171 if (this_optab == sub_optab
6172 && GET_CODE (op1) == CONST_INT)
6174 op1 = GEN_INT (- INTVAL (op1));
6175 this_optab = add_optab;
6180 /* We have a true reference to the value in OP0.
6181 If there is an insn to add or subtract in this mode, queue it. */
6183 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
6184 op0 = stabilize (op0);
6187 icode = (int) this_optab->handlers[(int) mode].insn_code;
6188 if (icode != (int) CODE_FOR_nothing
6189 /* Make sure that OP0 is valid for operands 0 and 1
6190 of the insn we want to queue. */
6191 && (*insn_operand_predicate[icode][0]) (op0, mode)
6192 && (*insn_operand_predicate[icode][1]) (op0, mode))
6194 if (! (*insn_operand_predicate[icode][2]) (op1, mode))
6195 op1 = force_reg (mode, op1);
6197 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
6201 /* Preincrement, or we can't increment with one simple insn. */
6203 /* Save a copy of the value before inc or dec, to return it later. */
6204 temp = value = copy_to_reg (op0);
6206 /* Arrange to return the incremented value. */
6207 /* Copy the rtx because expand_binop will protect from the queue,
6208 and the results of that would be invalid for us to return
6209 if our caller does emit_queue before using our result. */
6210 temp = copy_rtx (value = op0);
6212 /* Increment however we can. */
6213 op1 = expand_binop (mode, this_optab, value, op1, op0,
6214 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
6215 /* Make sure the value is stored into OP0. */
6217 emit_move_insn (op0, op1);
6222 /* Expand all function calls contained within EXP, innermost ones first.
6223 But don't look within expressions that have sequence points.
6224 For each CALL_EXPR, record the rtx for its value
6225 in the CALL_EXPR_RTL field. */
6228 preexpand_calls (exp)
6231 register int nops, i;
6232 int type = TREE_CODE_CLASS (TREE_CODE (exp));
6234 if (! do_preexpand_calls)
6237 /* Only expressions and references can contain calls. */
6239 if (type != 'e' && type != '<' && type != '1' && type != '2' && type != 'r')
6242 switch (TREE_CODE (exp))
6245 /* Do nothing if already expanded. */
6246 if (CALL_EXPR_RTL (exp) != 0)
6249 /* Do nothing to built-in functions. */
6250 if (TREE_CODE (TREE_OPERAND (exp, 0)) != ADDR_EXPR
6251 || TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) != FUNCTION_DECL
6252 || ! DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
6253 CALL_EXPR_RTL (exp) = expand_call (exp, NULL_RTX, 0);
6258 case TRUTH_ANDIF_EXPR:
6259 case TRUTH_ORIF_EXPR:
6260 /* If we find one of these, then we can be sure
6261 the adjust will be done for it (since it makes jumps).
6262 Do it now, so that if this is inside an argument
6263 of a function, we don't get the stack adjustment
6264 after some other args have already been pushed. */
6265 do_pending_stack_adjust ();
6270 case WITH_CLEANUP_EXPR:
6274 if (SAVE_EXPR_RTL (exp) != 0)
6278 nops = tree_code_length[(int) TREE_CODE (exp)];
6279 for (i = 0; i < nops; i++)
6280 if (TREE_OPERAND (exp, i) != 0)
6282 type = TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, i)));
6283 if (type == 'e' || type == '<' || type == '1' || type == '2'
6285 preexpand_calls (TREE_OPERAND (exp, i));
6289 /* At the start of a function, record that we have no previously-pushed
6290 arguments waiting to be popped. */
6293 init_pending_stack_adjust ()
6295 pending_stack_adjust = 0;
6298 /* When exiting from function, if safe, clear out any pending stack adjust
6299 so the adjustment won't get done. */
6302 clear_pending_stack_adjust ()
6304 #ifdef EXIT_IGNORE_STACK
6305 if (! flag_omit_frame_pointer && EXIT_IGNORE_STACK
6306 && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline)
6307 && ! flag_inline_functions)
6308 pending_stack_adjust = 0;
6312 /* Pop any previously-pushed arguments that have not been popped yet. */
6315 do_pending_stack_adjust ()
6317 if (inhibit_defer_pop == 0)
6319 if (pending_stack_adjust != 0)
6320 adjust_stack (GEN_INT (pending_stack_adjust));
6321 pending_stack_adjust = 0;
6325 /* Expand all cleanups up to OLD_CLEANUPS.
6326 Needed here, and also for language-dependent calls. */
6329 expand_cleanups_to (old_cleanups)
6332 while (cleanups_this_call != old_cleanups)
6334 expand_expr (TREE_VALUE (cleanups_this_call), NULL_RTX, VOIDmode, 0);
6335 cleanups_this_call = TREE_CHAIN (cleanups_this_call);
6339 /* Expand conditional expressions. */
6341 /* Generate code to evaluate EXP and jump to LABEL if the value is zero.
6342 LABEL is an rtx of code CODE_LABEL, in this function and all the
6346 jumpifnot (exp, label)
6350 do_jump (exp, label, NULL_RTX);
6353 /* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
6360 do_jump (exp, NULL_RTX, label);
6363 /* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
6364 the result is zero, or IF_TRUE_LABEL if the result is one.
6365 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
6366 meaning fall through in that case.
6368 do_jump always does any pending stack adjust except when it does not
6369 actually perform a jump. An example where there is no jump
6370 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
6372 This function is responsible for optimizing cases such as
6373 &&, || and comparison operators in EXP. */
6376 do_jump (exp, if_false_label, if_true_label)
6378 rtx if_false_label, if_true_label;
6380 register enum tree_code code = TREE_CODE (exp);
6381 /* Some cases need to create a label to jump to
6382 in order to properly fall through.
6383 These cases set DROP_THROUGH_LABEL nonzero. */
6384 rtx drop_through_label = 0;
6398 temp = integer_zerop (exp) ? if_false_label : if_true_label;
6404 /* This is not true with #pragma weak */
6406 /* The address of something can never be zero. */
6408 emit_jump (if_true_label);
6413 if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
6414 || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
6415 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF)
6418 /* If we are narrowing the operand, we have to do the compare in the
6420 if ((TYPE_PRECISION (TREE_TYPE (exp))
6421 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
6423 case NON_LVALUE_EXPR:
6424 case REFERENCE_EXPR:
6429 /* These cannot change zero->non-zero or vice versa. */
6430 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
6434 /* This is never less insns than evaluating the PLUS_EXPR followed by
6435 a test and can be longer if the test is eliminated. */
6437 /* Reduce to minus. */
6438 exp = build (MINUS_EXPR, TREE_TYPE (exp),
6439 TREE_OPERAND (exp, 0),
6440 fold (build1 (NEGATE_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
6441 TREE_OPERAND (exp, 1))));
6442 /* Process as MINUS. */
6446 /* Non-zero iff operands of minus differ. */
6447 comparison = compare (build (NE_EXPR, TREE_TYPE (exp),
6448 TREE_OPERAND (exp, 0),
6449 TREE_OPERAND (exp, 1)),
6454 /* If we are AND'ing with a small constant, do this comparison in the
6455 smallest type that fits. If the machine doesn't have comparisons
6456 that small, it will be converted back to the wider comparison.
6457 This helps if we are testing the sign bit of a narrower object.
6458 combine can't do this for us because it can't know whether a
6459 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
6461 if (! SLOW_BYTE_ACCESS
6462 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
6463 && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
6464 && (i = floor_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))) >= 0
6465 && (type = type_for_size (i + 1, 1)) != 0
6466 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
6467 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
6468 != CODE_FOR_nothing))
6470 do_jump (convert (type, exp), if_false_label, if_true_label);
6475 case TRUTH_NOT_EXPR:
6476 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
6479 case TRUTH_ANDIF_EXPR:
6480 if (if_false_label == 0)
6481 if_false_label = drop_through_label = gen_label_rtx ();
6482 do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
6483 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
6486 case TRUTH_ORIF_EXPR:
6487 if (if_true_label == 0)
6488 if_true_label = drop_through_label = gen_label_rtx ();
6489 do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
6490 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
6494 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
6497 do_pending_stack_adjust ();
6498 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
6505 int bitsize, bitpos, unsignedp;
6506 enum machine_mode mode;
6511 /* Get description of this reference. We don't actually care
6512 about the underlying object here. */
6513 get_inner_reference (exp, &bitsize, &bitpos, &offset,
6514 &mode, &unsignedp, &volatilep);
6516 type = type_for_size (bitsize, unsignedp);
6517 if (! SLOW_BYTE_ACCESS
6518 && type != 0 && bitsize >= 0
6519 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
6520 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
6521 != CODE_FOR_nothing))
6523 do_jump (convert (type, exp), if_false_label, if_true_label);
6530 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
6531 if (integer_onep (TREE_OPERAND (exp, 1))
6532 && integer_zerop (TREE_OPERAND (exp, 2)))
6533 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
6535 else if (integer_zerop (TREE_OPERAND (exp, 1))
6536 && integer_onep (TREE_OPERAND (exp, 2)))
6537 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
6541 register rtx label1 = gen_label_rtx ();
6542 drop_through_label = gen_label_rtx ();
6543 do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX);
6544 /* Now the THEN-expression. */
6545 do_jump (TREE_OPERAND (exp, 1),
6546 if_false_label ? if_false_label : drop_through_label,
6547 if_true_label ? if_true_label : drop_through_label);
6548 /* In case the do_jump just above never jumps. */
6549 do_pending_stack_adjust ();
6550 emit_label (label1);
6551 /* Now the ELSE-expression. */
6552 do_jump (TREE_OPERAND (exp, 2),
6553 if_false_label ? if_false_label : drop_through_label,
6554 if_true_label ? if_true_label : drop_through_label);
6559 if (integer_zerop (TREE_OPERAND (exp, 1)))
6560 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
6561 else if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
6564 !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
6565 do_jump_by_parts_equality (exp, if_false_label, if_true_label);
6567 comparison = compare (exp, EQ, EQ);
6571 if (integer_zerop (TREE_OPERAND (exp, 1)))
6572 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
6573 else if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
6576 !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
6577 do_jump_by_parts_equality (exp, if_true_label, if_false_label);
6579 comparison = compare (exp, NE, NE);
6583 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
6585 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
6586 do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
6588 comparison = compare (exp, LT, LTU);
6592 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
6594 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
6595 do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
6597 comparison = compare (exp, LE, LEU);
6601 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
6603 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
6604 do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
6606 comparison = compare (exp, GT, GTU);
6610 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
6612 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
6613 do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
6615 comparison = compare (exp, GE, GEU);
6620 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
6622 /* This is not needed any more and causes poor code since it causes
6623 comparisons and tests from non-SI objects to have different code
6625 /* Copy to register to avoid generating bad insns by cse
6626 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
6627 if (!cse_not_expected && GET_CODE (temp) == MEM)
6628 temp = copy_to_reg (temp);
6630 do_pending_stack_adjust ();
6631 if (GET_CODE (temp) == CONST_INT)
6632 comparison = (temp == const0_rtx ? const0_rtx : const_true_rtx);
6633 else if (GET_CODE (temp) == LABEL_REF)
6634 comparison = const_true_rtx;
6635 else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
6636 && !can_compare_p (GET_MODE (temp)))
6637 /* Note swapping the labels gives us not-equal. */
6638 do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label);
6639 else if (GET_MODE (temp) != VOIDmode)
6640 comparison = compare_from_rtx (temp, CONST0_RTX (GET_MODE (temp)),
6641 NE, TREE_UNSIGNED (TREE_TYPE (exp)),
6642 GET_MODE (temp), NULL_RTX, 0);
6647 /* Do any postincrements in the expression that was tested. */
6650 /* If COMPARISON is nonzero here, it is an rtx that can be substituted
6651 straight into a conditional jump instruction as the jump condition.
6652 Otherwise, all the work has been done already. */
6654 if (comparison == const_true_rtx)
6657 emit_jump (if_true_label);
6659 else if (comparison == const0_rtx)
6662 emit_jump (if_false_label);
6664 else if (comparison)
6665 do_jump_for_compare (comparison, if_false_label, if_true_label);
6669 if (drop_through_label)
6671 /* If do_jump produces code that might be jumped around,
6672 do any stack adjusts from that code, before the place
6673 where control merges in. */
6674 do_pending_stack_adjust ();
6675 emit_label (drop_through_label);
6679 /* Given a comparison expression EXP for values too wide to be compared
6680 with one insn, test the comparison and jump to the appropriate label.
6681 The code of EXP is ignored; we always test GT if SWAP is 0,
6682 and LT if SWAP is 1. */
6685 do_jump_by_parts_greater (exp, swap, if_false_label, if_true_label)
6688 rtx if_false_label, if_true_label;
6690 rtx op0 = expand_expr (TREE_OPERAND (exp, swap), NULL_RTX, VOIDmode, 0);
6691 rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), NULL_RTX, VOIDmode, 0);
6692 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
6693 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
6694 rtx drop_through_label = 0;
6695 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
6698 if (! if_true_label || ! if_false_label)
6699 drop_through_label = gen_label_rtx ();
6700 if (! if_true_label)
6701 if_true_label = drop_through_label;
6702 if (! if_false_label)
6703 if_false_label = drop_through_label;
6705 /* Compare a word at a time, high order first. */
6706 for (i = 0; i < nwords; i++)
6709 rtx op0_word, op1_word;
6711 if (WORDS_BIG_ENDIAN)
6713 op0_word = operand_subword_force (op0, i, mode);
6714 op1_word = operand_subword_force (op1, i, mode);
6718 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
6719 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
6722 /* All but high-order word must be compared as unsigned. */
6723 comp = compare_from_rtx (op0_word, op1_word,
6724 (unsignedp || i > 0) ? GTU : GT,
6725 unsignedp, word_mode, NULL_RTX, 0);
6726 if (comp == const_true_rtx)
6727 emit_jump (if_true_label);
6728 else if (comp != const0_rtx)
6729 do_jump_for_compare (comp, NULL_RTX, if_true_label);
6731 /* Consider lower words only if these are equal. */
6732 comp = compare_from_rtx (op0_word, op1_word, NE, unsignedp, word_mode,
6734 if (comp == const_true_rtx)
6735 emit_jump (if_false_label);
6736 else if (comp != const0_rtx)
6737 do_jump_for_compare (comp, NULL_RTX, if_false_label);
6741 emit_jump (if_false_label);
6742 if (drop_through_label)
6743 emit_label (drop_through_label);
6746 /* Given an EQ_EXPR expression EXP for values too wide to be compared
6747 with one insn, test the comparison and jump to the appropriate label. */
6750 do_jump_by_parts_equality (exp, if_false_label, if_true_label)
6752 rtx if_false_label, if_true_label;
6754 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
6755 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
6756 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
6757 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
6759 rtx drop_through_label = 0;
6761 if (! if_false_label)
6762 drop_through_label = if_false_label = gen_label_rtx ();
6764 for (i = 0; i < nwords; i++)
6766 rtx comp = compare_from_rtx (operand_subword_force (op0, i, mode),
6767 operand_subword_force (op1, i, mode),
6768 EQ, TREE_UNSIGNED (TREE_TYPE (exp)),
6769 word_mode, NULL_RTX, 0);
6770 if (comp == const_true_rtx)
6771 emit_jump (if_false_label);
6772 else if (comp != const0_rtx)
6773 do_jump_for_compare (comp, if_false_label, NULL_RTX);
6777 emit_jump (if_true_label);
6778 if (drop_through_label)
6779 emit_label (drop_through_label);
6782 /* Jump according to whether OP0 is 0.
6783 We assume that OP0 has an integer mode that is too wide
6784 for the available compare insns. */
6787 do_jump_by_parts_equality_rtx (op0, if_false_label, if_true_label)
6789 rtx if_false_label, if_true_label;
6791 int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD;
6793 rtx drop_through_label = 0;
6795 if (! if_false_label)
6796 drop_through_label = if_false_label = gen_label_rtx ();
6798 for (i = 0; i < nwords; i++)
6800 rtx comp = compare_from_rtx (operand_subword_force (op0, i,
6802 const0_rtx, EQ, 1, word_mode, NULL_RTX, 0);
6803 if (comp == const_true_rtx)
6804 emit_jump (if_false_label);
6805 else if (comp != const0_rtx)
6806 do_jump_for_compare (comp, if_false_label, NULL_RTX);
6810 emit_jump (if_true_label);
6811 if (drop_through_label)
6812 emit_label (drop_through_label);
6815 /* Given a comparison expression in rtl form, output conditional branches to
6816 IF_TRUE_LABEL, IF_FALSE_LABEL, or both. */
6819 do_jump_for_compare (comparison, if_false_label, if_true_label)
6820 rtx comparison, if_false_label, if_true_label;
6824 if (bcc_gen_fctn[(int) GET_CODE (comparison)] != 0)
6825 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (comparison)]) (if_true_label));
6830 emit_jump (if_false_label);
6832 else if (if_false_label)
6835 rtx prev = PREV_INSN (get_last_insn ());
6838 /* Output the branch with the opposite condition. Then try to invert
6839 what is generated. If more than one insn is a branch, or if the
6840 branch is not the last insn written, abort. If we can't invert
6841 the branch, emit make a true label, redirect this jump to that,
6842 emit a jump to the false label and define the true label. */
6844 if (bcc_gen_fctn[(int) GET_CODE (comparison)] != 0)
6845 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (comparison)]) (if_false_label));
6849 /* Here we get the insn before what was just emitted.
6850 On some machines, emitting the branch can discard
6851 the previous compare insn and emit a replacement. */
6853 /* If there's only one preceding insn... */
6854 insn = get_insns ();
6856 insn = NEXT_INSN (prev);
6858 for (insn = NEXT_INSN (insn); insn; insn = NEXT_INSN (insn))
6859 if (GET_CODE (insn) == JUMP_INSN)
6866 if (branch != get_last_insn ())
6869 if (! invert_jump (branch, if_false_label))
6871 if_true_label = gen_label_rtx ();
6872 redirect_jump (branch, if_true_label);
6873 emit_jump (if_false_label);
6874 emit_label (if_true_label);
6879 /* Generate code for a comparison expression EXP
6880 (including code to compute the values to be compared)
6881 and set (CC0) according to the result.
6882 SIGNED_CODE should be the rtx operation for this comparison for
6883 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
6885 We force a stack adjustment unless there are currently
6886 things pushed on the stack that aren't yet used. */
6889 compare (exp, signed_code, unsigned_code)
6891 enum rtx_code signed_code, unsigned_code;
6894 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
6896 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
6897 register tree type = TREE_TYPE (TREE_OPERAND (exp, 0));
6898 register enum machine_mode mode = TYPE_MODE (type);
6899 int unsignedp = TREE_UNSIGNED (type);
6900 enum rtx_code code = unsignedp ? unsigned_code : signed_code;
6902 return compare_from_rtx (op0, op1, code, unsignedp, mode,
6904 ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
6905 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
6908 /* Like compare but expects the values to compare as two rtx's.
6909 The decision as to signed or unsigned comparison must be made by the caller.
6911 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
6914 If ALIGN is non-zero, it is the alignment of this type; if zero, the
6915 size of MODE should be used. */
6918 compare_from_rtx (op0, op1, code, unsignedp, mode, size, align)
6919 register rtx op0, op1;
6922 enum machine_mode mode;
6926 /* If one operand is constant, make it the second one. */
6928 if (GET_CODE (op0) == CONST_INT || GET_CODE (op0) == CONST_DOUBLE)
6933 code = swap_condition (code);
6938 op0 = force_not_mem (op0);
6939 op1 = force_not_mem (op1);
6942 do_pending_stack_adjust ();
6944 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT)
6945 return simplify_relational_operation (code, mode, op0, op1);
6948 /* There's no need to do this now that combine.c can eliminate lots of
6949 sign extensions. This can be less efficient in certain cases on other
6952 /* If this is a signed equality comparison, we can do it as an
6953 unsigned comparison since zero-extension is cheaper than sign
6954 extension and comparisons with zero are done as unsigned. This is
6955 the case even on machines that can do fast sign extension, since
6956 zero-extension is easier to combinen with other operations than
6957 sign-extension is. If we are comparing against a constant, we must
6958 convert it to what it would look like unsigned. */
6959 if ((code == EQ || code == NE) && ! unsignedp
6960 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
6962 if (GET_CODE (op1) == CONST_INT
6963 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
6964 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
6969 emit_cmp_insn (op0, op1, code, size, mode, unsignedp, align);
6971 return gen_rtx (code, VOIDmode, cc0_rtx, const0_rtx);
6974 /* Generate code to calculate EXP using a store-flag instruction
6975 and return an rtx for the result. EXP is either a comparison
6976 or a TRUTH_NOT_EXPR whose operand is a comparison.
6978 If TARGET is nonzero, store the result there if convenient.
6980 If ONLY_CHEAP is non-zero, only do this if it is likely to be very
6983 Return zero if there is no suitable set-flag instruction
6984 available on this machine.
6986 Once expand_expr has been called on the arguments of the comparison,
6987 we are committed to doing the store flag, since it is not safe to
6988 re-evaluate the expression. We emit the store-flag insn by calling
6989 emit_store_flag, but only expand the arguments if we have a reason
6990 to believe that emit_store_flag will be successful. If we think that
6991 it will, but it isn't, we have to simulate the store-flag with a
6992 set/jump/set sequence. */
6995 do_store_flag (exp, target, mode, only_cheap)
6998 enum machine_mode mode;
7002 tree arg0, arg1, type;
7004 enum machine_mode operand_mode;
7008 enum insn_code icode;
7009 rtx subtarget = target;
7010 rtx result, label, pattern, jump_pat;
7012 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
7013 result at the end. We can't simply invert the test since it would
7014 have already been inverted if it were valid. This case occurs for
7015 some floating-point comparisons. */
7017 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
7018 invert = 1, exp = TREE_OPERAND (exp, 0);
7020 arg0 = TREE_OPERAND (exp, 0);
7021 arg1 = TREE_OPERAND (exp, 1);
7022 type = TREE_TYPE (arg0);
7023 operand_mode = TYPE_MODE (type);
7024 unsignedp = TREE_UNSIGNED (type);
7026 /* We won't bother with BLKmode store-flag operations because it would mean
7027 passing a lot of information to emit_store_flag. */
7028 if (operand_mode == BLKmode)
7034 /* Get the rtx comparison code to use. We know that EXP is a comparison
7035 operation of some type. Some comparisons against 1 and -1 can be
7036 converted to comparisons with zero. Do so here so that the tests
7037 below will be aware that we have a comparison with zero. These
7038 tests will not catch constants in the first operand, but constants
7039 are rarely passed as the first operand. */
7041 switch (TREE_CODE (exp))
7050 if (integer_onep (arg1))
7051 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
7053 code = unsignedp ? LTU : LT;
7056 if (integer_all_onesp (arg1))
7057 arg1 = integer_zero_node, code = unsignedp ? LTU : LT;
7059 code = unsignedp ? LEU : LE;
7062 if (integer_all_onesp (arg1))
7063 arg1 = integer_zero_node, code = unsignedp ? GEU : GE;
7065 code = unsignedp ? GTU : GT;
7068 if (integer_onep (arg1))
7069 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
7071 code = unsignedp ? GEU : GE;
7077 /* Put a constant second. */
7078 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
7080 tem = arg0; arg0 = arg1; arg1 = tem;
7081 code = swap_condition (code);
7084 /* If this is an equality or inequality test of a single bit, we can
7085 do this by shifting the bit being tested to the low-order bit and
7086 masking the result with the constant 1. If the condition was EQ,
7087 we xor it with 1. This does not require an scc insn and is faster
7088 than an scc insn even if we have it. */
7090 if ((code == NE || code == EQ)
7091 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
7092 && integer_pow2p (TREE_OPERAND (arg0, 1))
7093 && TYPE_PRECISION (type) <= HOST_BITS_PER_WIDE_INT)
7095 int bitnum = exact_log2 (INTVAL (expand_expr (TREE_OPERAND (arg0, 1),
7096 NULL_RTX, VOIDmode, 0)));
7098 if (subtarget == 0 || GET_CODE (subtarget) != REG
7099 || GET_MODE (subtarget) != operand_mode
7100 || ! safe_from_p (subtarget, TREE_OPERAND (arg0, 0)))
7103 op0 = expand_expr (TREE_OPERAND (arg0, 0), subtarget, VOIDmode, 0);
7106 op0 = expand_shift (RSHIFT_EXPR, GET_MODE (op0), op0,
7107 size_int (bitnum), target, 1);
7109 if (GET_MODE (op0) != mode)
7110 op0 = convert_to_mode (mode, op0, 1);
7112 if (bitnum != TYPE_PRECISION (type) - 1)
7113 op0 = expand_and (op0, const1_rtx, target);
7115 if ((code == EQ && ! invert) || (code == NE && invert))
7116 op0 = expand_binop (mode, xor_optab, op0, const1_rtx, target, 0,
7122 /* Now see if we are likely to be able to do this. Return if not. */
7123 if (! can_compare_p (operand_mode))
7125 icode = setcc_gen_code[(int) code];
7126 if (icode == CODE_FOR_nothing
7127 || (only_cheap && insn_operand_mode[(int) icode][0] != mode))
7129 /* We can only do this if it is one of the special cases that
7130 can be handled without an scc insn. */
7131 if ((code == LT && integer_zerop (arg1))
7132 || (! only_cheap && code == GE && integer_zerop (arg1)))
7134 else if (BRANCH_COST >= 0
7135 && ! only_cheap && (code == NE || code == EQ)
7136 && TREE_CODE (type) != REAL_TYPE
7137 && ((abs_optab->handlers[(int) operand_mode].insn_code
7138 != CODE_FOR_nothing)
7139 || (ffs_optab->handlers[(int) operand_mode].insn_code
7140 != CODE_FOR_nothing)))
7146 preexpand_calls (exp);
7147 if (subtarget == 0 || GET_CODE (subtarget) != REG
7148 || GET_MODE (subtarget) != operand_mode
7149 || ! safe_from_p (subtarget, arg1))
7152 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
7153 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
7156 target = gen_reg_rtx (mode);
7158 result = emit_store_flag (target, code, op0, op1, operand_mode,
7164 result = expand_binop (mode, xor_optab, result, const1_rtx,
7165 result, 0, OPTAB_LIB_WIDEN);
7169 /* If this failed, we have to do this with set/compare/jump/set code. */
7170 if (target == 0 || GET_CODE (target) != REG
7171 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
7172 target = gen_reg_rtx (GET_MODE (target));
7174 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
7175 result = compare_from_rtx (op0, op1, code, unsignedp,
7176 operand_mode, NULL_RTX, 0);
7177 if (GET_CODE (result) == CONST_INT)
7178 return (((result == const0_rtx && ! invert)
7179 || (result != const0_rtx && invert))
7180 ? const0_rtx : const1_rtx);
7182 label = gen_label_rtx ();
7183 if (bcc_gen_fctn[(int) code] == 0)
7186 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
7187 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
7193 /* Generate a tablejump instruction (used for switch statements). */
7195 #ifdef HAVE_tablejump
7197 /* INDEX is the value being switched on, with the lowest value
7198 in the table already subtracted.
7199 MODE is its expected mode (needed if INDEX is constant).
7200 RANGE is the length of the jump table.
7201 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
7203 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
7204 index value is out of range. */
7207 do_tablejump (index, mode, range, table_label, default_label)
7208 rtx index, range, table_label, default_label;
7209 enum machine_mode mode;
7211 register rtx temp, vector;
7213 /* Do an unsigned comparison (in the proper mode) between the index
7214 expression and the value which represents the length of the range.
7215 Since we just finished subtracting the lower bound of the range
7216 from the index expression, this comparison allows us to simultaneously
7217 check that the original index expression value is both greater than
7218 or equal to the minimum value of the range and less than or equal to
7219 the maximum value of the range. */
7221 emit_cmp_insn (range, index, LTU, NULL_RTX, mode, 0, 0);
7222 emit_jump_insn (gen_bltu (default_label));
7224 /* If index is in range, it must fit in Pmode.
7225 Convert to Pmode so we can index with it. */
7227 index = convert_to_mode (Pmode, index, 1);
7229 /* If flag_force_addr were to affect this address
7230 it could interfere with the tricky assumptions made
7231 about addresses that contain label-refs,
7232 which may be valid only very near the tablejump itself. */
7233 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
7234 GET_MODE_SIZE, because this indicates how large insns are. The other
7235 uses should all be Pmode, because they are addresses. This code
7236 could fail if addresses and insns are not the same size. */
7237 index = memory_address_noforce
7239 gen_rtx (PLUS, Pmode,
7240 gen_rtx (MULT, Pmode, index,
7241 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
7242 gen_rtx (LABEL_REF, Pmode, table_label)));
7243 temp = gen_reg_rtx (CASE_VECTOR_MODE);
7244 vector = gen_rtx (MEM, CASE_VECTOR_MODE, index);
7245 RTX_UNCHANGING_P (vector) = 1;
7246 convert_move (temp, vector, 0);
7248 emit_jump_insn (gen_tablejump (temp, table_label));
7250 #ifndef CASE_VECTOR_PC_RELATIVE
7251 /* If we are generating PIC code or if the table is PC-relative, the
7252 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
7258 #endif /* HAVE_tablejump */