1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA. */
26 #include "insn-flags.h"
27 #include "insn-codes.h"
29 #include "insn-config.h"
33 #include "typeclass.h"
35 #define CEIL(x,y) (((x) + (y) - 1) / (y))
37 /* Decide whether a function's arguments should be processed
38 from first to last or from last to first. */
40 #ifdef STACK_GROWS_DOWNWARD
42 #define PUSH_ARGS_REVERSED /* If it's last to first */
46 #ifndef STACK_PUSH_CODE
47 #ifdef STACK_GROWS_DOWNWARD
48 #define STACK_PUSH_CODE PRE_DEC
50 #define STACK_PUSH_CODE PRE_INC
54 /* Like STACK_BOUNDARY but in units of bytes, not bits. */
55 #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
57 /* If this is nonzero, we do not bother generating VOLATILE
58 around volatile memory references, and we are willing to
59 output indirect addresses. If cse is to follow, we reject
60 indirect addresses so a useful potential cse is generated;
61 if it is used only once, instruction combination will produce
62 the same indirect address eventually. */
65 /* Nonzero to generate code for all the subroutines within an
66 expression before generating the upper levels of the expression.
67 Nowadays this is never zero. */
68 int do_preexpand_calls = 1;
70 /* Number of units that we should eventually pop off the stack.
71 These are the arguments to function calls that have already returned. */
72 int pending_stack_adjust;
74 /* Nonzero means stack pops must not be deferred, and deferred stack
75 pops must not be output. It is nonzero inside a function call,
76 inside a conditional expression, inside a statement expression,
77 and in other cases as well. */
78 int inhibit_defer_pop;
80 /* A list of all cleanups which belong to the arguments of
81 function calls being expanded by expand_call. */
82 tree cleanups_this_call;
84 /* Nonzero means __builtin_saveregs has already been done in this function.
85 The value is the pseudoreg containing the value __builtin_saveregs
87 static rtx saveregs_value;
90 static void store_constructor ();
91 static rtx store_field ();
92 static rtx expand_builtin ();
93 static rtx compare ();
94 static rtx do_store_flag ();
95 static void preexpand_calls ();
96 static rtx expand_increment ();
97 static void init_queue ();
99 void do_pending_stack_adjust ();
100 static void do_jump_for_compare ();
101 static void do_jump_by_parts_equality ();
102 static void do_jump_by_parts_equality_rtx ();
103 static void do_jump_by_parts_greater ();
105 /* Record for each mode whether we can move a register directly to or
106 from an object of that mode in memory. If we can't, we won't try
107 to use that mode directly when accessing a field of that mode. */
109 static char direct_load[NUM_MACHINE_MODES];
110 static char direct_store[NUM_MACHINE_MODES];
112 /* MOVE_RATIO is the number of move instructions that is better than
116 #if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi)
119 /* A value of around 6 would minimize code size; infinity would minimize
121 #define MOVE_RATIO 15
125 /* SLOW_UNALIGNED_ACCESS is non-zero if unaligned accesses are very slow. */
127 #ifndef SLOW_UNALIGNED_ACCESS
128 #define SLOW_UNALIGNED_ACCESS 0
131 /* This is run once per compilation to set up which modes can be used
132 directly in memory. */
138 enum machine_mode mode;
139 rtx mem = gen_rtx (MEM, VOIDmode, stack_pointer_rtx);
142 insn = emit_insn (gen_rtx (SET, 0, 0));
143 pat = PATTERN (insn);
145 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
146 mode = (enum machine_mode) ((int) mode + 1))
152 direct_load[(int) mode] = direct_store[(int) mode] = 0;
153 PUT_MODE (mem, mode);
155 /* Find a register that can be used in this mode, if any. */
156 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
157 if (HARD_REGNO_MODE_OK (regno, mode))
160 if (regno == FIRST_PSEUDO_REGISTER)
163 reg = gen_rtx (REG, mode, regno);
166 SET_DEST (pat) = reg;
167 direct_load[(int) mode] = (recog (pat, insn, &num_clobbers)) >= 0;
170 SET_DEST (pat) = mem;
171 direct_store[(int) mode] = (recog (pat, insn, &num_clobbers)) >= 0;
177 /* This is run at the start of compiling a function. */
184 pending_stack_adjust = 0;
185 inhibit_defer_pop = 0;
186 cleanups_this_call = 0;
191 /* Save all variables describing the current status into the structure *P.
192 This is used before starting a nested function. */
198 /* Instead of saving the postincrement queue, empty it. */
201 p->pending_stack_adjust = pending_stack_adjust;
202 p->inhibit_defer_pop = inhibit_defer_pop;
203 p->cleanups_this_call = cleanups_this_call;
204 p->saveregs_value = saveregs_value;
205 p->forced_labels = forced_labels;
207 pending_stack_adjust = 0;
208 inhibit_defer_pop = 0;
209 cleanups_this_call = 0;
214 /* Restore all variables describing the current status from the structure *P.
215 This is used after a nested function. */
218 restore_expr_status (p)
221 pending_stack_adjust = p->pending_stack_adjust;
222 inhibit_defer_pop = p->inhibit_defer_pop;
223 cleanups_this_call = p->cleanups_this_call;
224 saveregs_value = p->saveregs_value;
225 forced_labels = p->forced_labels;
228 /* Manage the queue of increment instructions to be output
229 for POSTINCREMENT_EXPR expressions, etc. */
231 static rtx pending_chain;
233 /* Queue up to increment (or change) VAR later. BODY says how:
234 BODY should be the same thing you would pass to emit_insn
235 to increment right away. It will go to emit_insn later on.
237 The value is a QUEUED expression to be used in place of VAR
238 where you want to guarantee the pre-incrementation value of VAR. */
241 enqueue_insn (var, body)
244 pending_chain = gen_rtx (QUEUED, GET_MODE (var),
245 var, NULL_RTX, NULL_RTX, body, pending_chain);
246 return pending_chain;
249 /* Use protect_from_queue to convert a QUEUED expression
250 into something that you can put immediately into an instruction.
251 If the queued incrementation has not happened yet,
252 protect_from_queue returns the variable itself.
253 If the incrementation has happened, protect_from_queue returns a temp
254 that contains a copy of the old value of the variable.
256 Any time an rtx which might possibly be a QUEUED is to be put
257 into an instruction, it must be passed through protect_from_queue first.
258 QUEUED expressions are not meaningful in instructions.
260 Do not pass a value through protect_from_queue and then hold
261 on to it for a while before putting it in an instruction!
262 If the queue is flushed in between, incorrect code will result. */
265 protect_from_queue (x, modify)
269 register RTX_CODE code = GET_CODE (x);
271 #if 0 /* A QUEUED can hang around after the queue is forced out. */
272 /* Shortcut for most common case. */
273 if (pending_chain == 0)
279 /* A special hack for read access to (MEM (QUEUED ...))
280 to facilitate use of autoincrement.
281 Make a copy of the contents of the memory location
282 rather than a copy of the address, but not
283 if the value is of mode BLKmode. */
284 if (code == MEM && GET_MODE (x) != BLKmode
285 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
287 register rtx y = XEXP (x, 0);
288 XEXP (x, 0) = QUEUED_VAR (y);
291 register rtx temp = gen_reg_rtx (GET_MODE (x));
292 emit_insn_before (gen_move_insn (temp, x),
298 /* Otherwise, recursively protect the subexpressions of all
299 the kinds of rtx's that can contain a QUEUED. */
301 XEXP (x, 0) = protect_from_queue (XEXP (x, 0), 0);
302 else if (code == PLUS || code == MULT)
304 XEXP (x, 0) = protect_from_queue (XEXP (x, 0), 0);
305 XEXP (x, 1) = protect_from_queue (XEXP (x, 1), 0);
309 /* If the increment has not happened, use the variable itself. */
310 if (QUEUED_INSN (x) == 0)
311 return QUEUED_VAR (x);
312 /* If the increment has happened and a pre-increment copy exists,
314 if (QUEUED_COPY (x) != 0)
315 return QUEUED_COPY (x);
316 /* The increment has happened but we haven't set up a pre-increment copy.
317 Set one up now, and use it. */
318 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
319 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
321 return QUEUED_COPY (x);
324 /* Return nonzero if X contains a QUEUED expression:
325 if it contains anything that will be altered by a queued increment.
326 We handle only combinations of MEM, PLUS, MINUS and MULT operators
327 since memory addresses generally contain only those. */
333 register enum rtx_code code = GET_CODE (x);
339 return queued_subexp_p (XEXP (x, 0));
343 return queued_subexp_p (XEXP (x, 0))
344 || queued_subexp_p (XEXP (x, 1));
349 /* Perform all the pending incrementations. */
355 while (p = pending_chain)
357 QUEUED_INSN (p) = emit_insn (QUEUED_BODY (p));
358 pending_chain = QUEUED_NEXT (p);
369 /* Copy data from FROM to TO, where the machine modes are not the same.
370 Both modes may be integer, or both may be floating.
371 UNSIGNEDP should be nonzero if FROM is an unsigned type.
372 This causes zero-extension instead of sign-extension. */
375 convert_move (to, from, unsignedp)
376 register rtx to, from;
379 enum machine_mode to_mode = GET_MODE (to);
380 enum machine_mode from_mode = GET_MODE (from);
381 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
382 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
386 /* rtx code for making an equivalent value. */
387 enum rtx_code equiv_code = (unsignedp ? ZERO_EXTEND : SIGN_EXTEND);
389 to = protect_from_queue (to, 1);
390 from = protect_from_queue (from, 0);
392 if (to_real != from_real)
395 if (to_mode == from_mode
396 || (from_mode == VOIDmode && CONSTANT_P (from)))
398 emit_move_insn (to, from);
404 #ifdef HAVE_extendsfdf2
405 if (HAVE_extendsfdf2 && from_mode == SFmode && to_mode == DFmode)
407 emit_unop_insn (CODE_FOR_extendsfdf2, to, from, UNKNOWN);
411 #ifdef HAVE_extendsfxf2
412 if (HAVE_extendsfxf2 && from_mode == SFmode && to_mode == XFmode)
414 emit_unop_insn (CODE_FOR_extendsfxf2, to, from, UNKNOWN);
418 #ifdef HAVE_extendsftf2
419 if (HAVE_extendsftf2 && from_mode == SFmode && to_mode == TFmode)
421 emit_unop_insn (CODE_FOR_extendsftf2, to, from, UNKNOWN);
425 #ifdef HAVE_extenddfxf2
426 if (HAVE_extenddfxf2 && from_mode == DFmode && to_mode == XFmode)
428 emit_unop_insn (CODE_FOR_extenddfxf2, to, from, UNKNOWN);
432 #ifdef HAVE_extenddftf2
433 if (HAVE_extenddftf2 && from_mode == DFmode && to_mode == TFmode)
435 emit_unop_insn (CODE_FOR_extenddftf2, to, from, UNKNOWN);
439 #ifdef HAVE_truncdfsf2
440 if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
442 emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
446 #ifdef HAVE_truncxfsf2
447 if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode)
449 emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN);
453 #ifdef HAVE_trunctfsf2
454 if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
456 emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
460 #ifdef HAVE_truncxfdf2
461 if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode)
463 emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN);
467 #ifdef HAVE_trunctfdf2
468 if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
470 emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
482 libcall = extendsfdf2_libfunc;
486 libcall = extendsfxf2_libfunc;
490 libcall = extendsftf2_libfunc;
499 libcall = truncdfsf2_libfunc;
503 libcall = extenddfxf2_libfunc;
507 libcall = extenddftf2_libfunc;
516 libcall = truncxfsf2_libfunc;
520 libcall = truncxfdf2_libfunc;
529 libcall = trunctfsf2_libfunc;
533 libcall = trunctfdf2_libfunc;
539 if (libcall == (rtx) 0)
540 /* This conversion is not implemented yet. */
543 emit_library_call (libcall, 1, to_mode, 1, from, from_mode);
544 emit_move_insn (to, hard_libcall_value (to_mode));
548 /* Now both modes are integers. */
550 /* Handle expanding beyond a word. */
551 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
552 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
559 enum machine_mode lowpart_mode;
560 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
562 /* Try converting directly if the insn is supported. */
563 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
566 emit_unop_insn (code, to, from, equiv_code);
569 /* Next, try converting via full word. */
570 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
571 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
572 != CODE_FOR_nothing))
574 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
575 emit_unop_insn (code, to,
576 gen_lowpart (word_mode, to), equiv_code);
580 /* No special multiword conversion insn; do it by hand. */
583 /* Get a copy of FROM widened to a word, if necessary. */
584 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
585 lowpart_mode = word_mode;
587 lowpart_mode = from_mode;
589 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
591 lowpart = gen_lowpart (lowpart_mode, to);
592 emit_move_insn (lowpart, lowfrom);
594 /* Compute the value to put in each remaining word. */
596 fill_value = const0_rtx;
601 && insn_operand_mode[(int) CODE_FOR_slt][0] == word_mode
602 && STORE_FLAG_VALUE == -1)
604 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
606 fill_value = gen_reg_rtx (word_mode);
607 emit_insn (gen_slt (fill_value));
613 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
614 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
616 fill_value = convert_to_mode (word_mode, fill_value, 1);
620 /* Fill the remaining words. */
621 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
623 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
624 rtx subword = operand_subword (to, index, 1, to_mode);
629 if (fill_value != subword)
630 emit_move_insn (subword, fill_value);
633 insns = get_insns ();
636 emit_no_conflict_block (insns, to, from, NULL_RTX,
637 gen_rtx (equiv_code, to_mode, from));
641 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD)
643 convert_move (to, gen_lowpart (word_mode, from), 0);
647 /* Handle pointer conversion */ /* SPEE 900220 */
648 if (to_mode == PSImode)
650 if (from_mode != SImode)
651 from = convert_to_mode (SImode, from, unsignedp);
653 #ifdef HAVE_truncsipsi
656 emit_unop_insn (CODE_FOR_truncsipsi, to, from, UNKNOWN);
659 #endif /* HAVE_truncsipsi */
663 if (from_mode == PSImode)
665 if (to_mode != SImode)
667 from = convert_to_mode (SImode, from, unsignedp);
672 #ifdef HAVE_extendpsisi
673 if (HAVE_extendpsisi)
675 emit_unop_insn (CODE_FOR_extendpsisi, to, from, UNKNOWN);
678 #endif /* HAVE_extendpsisi */
683 /* Now follow all the conversions between integers
684 no more than a word long. */
686 /* For truncation, usually we can just refer to FROM in a narrower mode. */
687 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
688 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
689 GET_MODE_BITSIZE (from_mode))
690 && ((GET_CODE (from) == MEM
691 && ! MEM_VOLATILE_P (from)
692 && direct_load[(int) to_mode]
693 && ! mode_dependent_address_p (XEXP (from, 0)))
694 || GET_CODE (from) == REG
695 || GET_CODE (from) == SUBREG))
697 emit_move_insn (to, gen_lowpart (to_mode, from));
701 /* For truncation, usually we can just refer to FROM in a narrower mode. */
702 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
704 /* Convert directly if that works. */
705 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
708 emit_unop_insn (code, to, from, equiv_code);
713 enum machine_mode intermediate;
715 /* Search for a mode to convert via. */
716 for (intermediate = from_mode; intermediate != VOIDmode;
717 intermediate = GET_MODE_WIDER_MODE (intermediate))
718 if ((can_extend_p (to_mode, intermediate, unsignedp)
720 && (can_extend_p (intermediate, from_mode, unsignedp)
721 != CODE_FOR_nothing))
723 convert_move (to, convert_to_mode (intermediate, from,
724 unsignedp), unsignedp);
728 /* No suitable intermediate mode. */
733 /* Support special truncate insns for certain modes. */
735 if (from_mode == DImode && to_mode == SImode)
737 #ifdef HAVE_truncdisi2
740 emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
744 convert_move (to, force_reg (from_mode, from), unsignedp);
748 if (from_mode == DImode && to_mode == HImode)
750 #ifdef HAVE_truncdihi2
753 emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
757 convert_move (to, force_reg (from_mode, from), unsignedp);
761 if (from_mode == DImode && to_mode == QImode)
763 #ifdef HAVE_truncdiqi2
766 emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
770 convert_move (to, force_reg (from_mode, from), unsignedp);
774 if (from_mode == SImode && to_mode == HImode)
776 #ifdef HAVE_truncsihi2
779 emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
783 convert_move (to, force_reg (from_mode, from), unsignedp);
787 if (from_mode == SImode && to_mode == QImode)
789 #ifdef HAVE_truncsiqi2
792 emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
796 convert_move (to, force_reg (from_mode, from), unsignedp);
800 if (from_mode == HImode && to_mode == QImode)
802 #ifdef HAVE_trunchiqi2
805 emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
809 convert_move (to, force_reg (from_mode, from), unsignedp);
813 /* Handle truncation of volatile memrefs, and so on;
814 the things that couldn't be truncated directly,
815 and for which there was no special instruction. */
816 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
818 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
819 emit_move_insn (to, temp);
823 /* Mode combination is not recognized. */
827 /* Return an rtx for a value that would result
828 from converting X to mode MODE.
829 Both X and MODE may be floating, or both integer.
830 UNSIGNEDP is nonzero if X is an unsigned value.
831 This can be done by referring to a part of X in place
832 or by copying to a new temporary with conversion.
834 This function *must not* call protect_from_queue
835 except when putting X into an insn (in which case convert_move does it). */
838 convert_to_mode (mode, x, unsignedp)
839 enum machine_mode mode;
845 if (mode == GET_MODE (x))
848 /* There is one case that we must handle specially: If we are converting
849 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
850 we are to interpret the constant as unsigned, gen_lowpart will do
851 the wrong if the constant appears negative. What we want to do is
852 make the high-order word of the constant zero, not all ones. */
854 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
855 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
856 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
857 return immed_double_const (INTVAL (x), (HOST_WIDE_INT) 0, mode);
859 /* We can do this with a gen_lowpart if both desired and current modes
860 are integer, and this is either a constant integer, a register, or a
861 non-volatile MEM. Except for the constant case, we must be narrowing
864 if (GET_CODE (x) == CONST_INT
865 || (GET_MODE_CLASS (mode) == MODE_INT
866 && GET_MODE_CLASS (GET_MODE (x)) == MODE_INT
867 && (GET_CODE (x) == CONST_DOUBLE
868 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (GET_MODE (x))
869 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x))
870 && direct_load[(int) mode]
871 || GET_CODE (x) == REG)))))
872 return gen_lowpart (mode, x);
874 temp = gen_reg_rtx (mode);
875 convert_move (temp, x, unsignedp);
879 /* Generate several move instructions to copy LEN bytes
880 from block FROM to block TO. (These are MEM rtx's with BLKmode).
881 The caller must pass FROM and TO
882 through protect_from_queue before calling.
883 ALIGN (in bytes) is maximum alignment we can assume. */
885 struct move_by_pieces
894 int explicit_inc_from;
900 static void move_by_pieces_1 ();
901 static int move_by_pieces_ninsns ();
904 move_by_pieces (to, from, len, align)
908 struct move_by_pieces data;
909 rtx to_addr = XEXP (to, 0), from_addr = XEXP (from, 0);
910 int max_size = MOVE_MAX + 1;
913 data.to_addr = to_addr;
914 data.from_addr = from_addr;
918 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
919 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
921 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
922 || GET_CODE (from_addr) == POST_INC
923 || GET_CODE (from_addr) == POST_DEC);
925 data.explicit_inc_from = 0;
926 data.explicit_inc_to = 0;
928 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
929 if (data.reverse) data.offset = len;
932 /* If copying requires more than two move insns,
933 copy addresses to registers (to make displacements shorter)
934 and use post-increment if available. */
935 if (!(data.autinc_from && data.autinc_to)
936 && move_by_pieces_ninsns (len, align) > 2)
938 #ifdef HAVE_PRE_DECREMENT
939 if (data.reverse && ! data.autinc_from)
941 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
942 data.autinc_from = 1;
943 data.explicit_inc_from = -1;
946 #ifdef HAVE_POST_INCREMENT
947 if (! data.autinc_from)
949 data.from_addr = copy_addr_to_reg (from_addr);
950 data.autinc_from = 1;
951 data.explicit_inc_from = 1;
954 if (!data.autinc_from && CONSTANT_P (from_addr))
955 data.from_addr = copy_addr_to_reg (from_addr);
956 #ifdef HAVE_PRE_DECREMENT
957 if (data.reverse && ! data.autinc_to)
959 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
961 data.explicit_inc_to = -1;
964 #ifdef HAVE_POST_INCREMENT
965 if (! data.reverse && ! data.autinc_to)
967 data.to_addr = copy_addr_to_reg (to_addr);
969 data.explicit_inc_to = 1;
972 if (!data.autinc_to && CONSTANT_P (to_addr))
973 data.to_addr = copy_addr_to_reg (to_addr);
976 if (! (STRICT_ALIGNMENT || SLOW_UNALIGNED_ACCESS)
977 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
980 /* First move what we can in the largest integer mode, then go to
981 successively smaller modes. */
985 enum machine_mode mode = VOIDmode, tmode;
986 enum insn_code icode;
988 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
989 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
990 if (GET_MODE_SIZE (tmode) < max_size)
993 if (mode == VOIDmode)
996 icode = mov_optab->handlers[(int) mode].insn_code;
997 if (icode != CODE_FOR_nothing
998 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
999 GET_MODE_SIZE (mode)))
1000 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1002 max_size = GET_MODE_SIZE (mode);
1005 /* The code above should have handled everything. */
1010 /* Return number of insns required to move L bytes by pieces.
1011 ALIGN (in bytes) is maximum alignment we can assume. */
1014 move_by_pieces_ninsns (l, align)
1018 register int n_insns = 0;
1019 int max_size = MOVE_MAX + 1;
1021 if (! (STRICT_ALIGNMENT || SLOW_UNALIGNED_ACCESS)
1022 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
1025 while (max_size > 1)
1027 enum machine_mode mode = VOIDmode, tmode;
1028 enum insn_code icode;
1030 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1031 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1032 if (GET_MODE_SIZE (tmode) < max_size)
1035 if (mode == VOIDmode)
1038 icode = mov_optab->handlers[(int) mode].insn_code;
1039 if (icode != CODE_FOR_nothing
1040 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1041 GET_MODE_SIZE (mode)))
1042 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1044 max_size = GET_MODE_SIZE (mode);
1050 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1051 with move instructions for mode MODE. GENFUN is the gen_... function
1052 to make a move insn for that mode. DATA has all the other info. */
1055 move_by_pieces_1 (genfun, mode, data)
1057 enum machine_mode mode;
1058 struct move_by_pieces *data;
1060 register int size = GET_MODE_SIZE (mode);
1061 register rtx to1, from1;
1063 while (data->len >= size)
1065 if (data->reverse) data->offset -= size;
1067 to1 = (data->autinc_to
1068 ? gen_rtx (MEM, mode, data->to_addr)
1069 : change_address (data->to, mode,
1070 plus_constant (data->to_addr, data->offset)));
1073 ? gen_rtx (MEM, mode, data->from_addr)
1074 : change_address (data->from, mode,
1075 plus_constant (data->from_addr, data->offset)));
1077 #ifdef HAVE_PRE_DECREMENT
1078 if (data->explicit_inc_to < 0)
1079 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
1080 if (data->explicit_inc_from < 0)
1081 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (-size)));
1084 emit_insn ((*genfun) (to1, from1));
1085 #ifdef HAVE_POST_INCREMENT
1086 if (data->explicit_inc_to > 0)
1087 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1088 if (data->explicit_inc_from > 0)
1089 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1092 if (! data->reverse) data->offset += size;
1098 /* Emit code to move a block Y to a block X.
1099 This may be done with string-move instructions,
1100 with multiple scalar move instructions, or with a library call.
1102 Both X and Y must be MEM rtx's (perhaps inside VOLATILE)
1104 SIZE is an rtx that says how long they are.
1105 ALIGN is the maximum alignment we can assume they have,
1106 measured in bytes. */
1109 emit_block_move (x, y, size, align)
1114 if (GET_MODE (x) != BLKmode)
1117 if (GET_MODE (y) != BLKmode)
1120 x = protect_from_queue (x, 1);
1121 y = protect_from_queue (y, 0);
1122 size = protect_from_queue (size, 0);
1124 if (GET_CODE (x) != MEM)
1126 if (GET_CODE (y) != MEM)
1131 if (GET_CODE (size) == CONST_INT
1132 && (move_by_pieces_ninsns (INTVAL (size), align) < MOVE_RATIO))
1133 move_by_pieces (x, y, INTVAL (size), align);
1136 /* Try the most limited insn first, because there's no point
1137 including more than one in the machine description unless
1138 the more limited one has some advantage. */
1139 #ifdef HAVE_movstrqi
1141 && GET_CODE (size) == CONST_INT
1142 && ((unsigned) INTVAL (size)
1143 < (1 << (GET_MODE_BITSIZE (QImode) - 1))))
1145 rtx insn = gen_movstrqi (x, y, size, GEN_INT (align));
1153 #ifdef HAVE_movstrhi
1155 && GET_CODE (size) == CONST_INT
1156 && ((unsigned) INTVAL (size)
1157 < (1 << (GET_MODE_BITSIZE (HImode) - 1))))
1159 rtx insn = gen_movstrhi (x, y, size, GEN_INT (align));
1167 #ifdef HAVE_movstrsi
1170 rtx insn = gen_movstrsi (x, y, size, GEN_INT (align));
1178 #ifdef HAVE_movstrdi
1181 rtx insn = gen_movstrdi (x, y, size, GEN_INT (align));
1190 #ifdef TARGET_MEM_FUNCTIONS
1191 emit_library_call (memcpy_libfunc, 1,
1192 VOIDmode, 3, XEXP (x, 0), Pmode,
1194 convert_to_mode (Pmode, size, 1), Pmode);
1196 emit_library_call (bcopy_libfunc, 1,
1197 VOIDmode, 3, XEXP (y, 0), Pmode,
1199 convert_to_mode (Pmode, size, 1), Pmode);
1204 /* Copy all or part of a value X into registers starting at REGNO.
1205 The number of registers to be filled is NREGS. */
1208 move_block_to_reg (regno, x, nregs, mode)
1212 enum machine_mode mode;
1217 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1218 x = validize_mem (force_const_mem (mode, x));
1220 /* See if the machine can do this with a load multiple insn. */
1221 #ifdef HAVE_load_multiple
1222 last = get_last_insn ();
1223 pat = gen_load_multiple (gen_rtx (REG, word_mode, regno), x,
1231 delete_insns_since (last);
1234 for (i = 0; i < nregs; i++)
1235 emit_move_insn (gen_rtx (REG, word_mode, regno + i),
1236 operand_subword_force (x, i, mode));
1239 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1240 The number of registers to be filled is NREGS. */
1243 move_block_from_reg (regno, x, nregs)
1251 /* See if the machine can do this with a store multiple insn. */
1252 #ifdef HAVE_store_multiple
1253 last = get_last_insn ();
1254 pat = gen_store_multiple (x, gen_rtx (REG, word_mode, regno),
1262 delete_insns_since (last);
1265 for (i = 0; i < nregs; i++)
1267 rtx tem = operand_subword (x, i, 1, BLKmode);
1272 emit_move_insn (tem, gen_rtx (REG, word_mode, regno + i));
1276 /* Mark NREGS consecutive regs, starting at REGNO, as being live now. */
1279 use_regs (regno, nregs)
1285 for (i = 0; i < nregs; i++)
1286 emit_insn (gen_rtx (USE, VOIDmode, gen_rtx (REG, word_mode, regno + i)));
1289 /* Write zeros through the storage of OBJECT.
1290 If OBJECT has BLKmode, SIZE is its length in bytes. */
1293 clear_storage (object, size)
1297 if (GET_MODE (object) == BLKmode)
1299 #ifdef TARGET_MEM_FUNCTIONS
1300 emit_library_call (memset_libfunc, 1,
1302 XEXP (object, 0), Pmode, const0_rtx, Pmode,
1303 GEN_INT (size), Pmode);
1305 emit_library_call (bzero_libfunc, 1,
1307 XEXP (object, 0), Pmode,
1308 GEN_INT (size), Pmode);
1312 emit_move_insn (object, const0_rtx);
1315 /* Generate code to copy Y into X.
1316 Both Y and X must have the same mode, except that
1317 Y can be a constant with VOIDmode.
1318 This mode cannot be BLKmode; use emit_block_move for that.
1320 Return the last instruction emitted. */
1323 emit_move_insn (x, y)
1326 enum machine_mode mode = GET_MODE (x);
1329 x = protect_from_queue (x, 1);
1330 y = protect_from_queue (y, 0);
1332 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
1335 if (CONSTANT_P (y) && ! LEGITIMATE_CONSTANT_P (y))
1336 y = force_const_mem (mode, y);
1338 /* If X or Y are memory references, verify that their addresses are valid
1340 if (GET_CODE (x) == MEM
1341 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
1342 && ! push_operand (x, GET_MODE (x)))
1344 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
1345 x = change_address (x, VOIDmode, XEXP (x, 0));
1347 if (GET_CODE (y) == MEM
1348 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
1350 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
1351 y = change_address (y, VOIDmode, XEXP (y, 0));
1353 if (mode == BLKmode)
1356 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
1358 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
1360 /* This will handle any multi-word mode that lacks a move_insn pattern.
1361 However, you will get better code if you define such patterns,
1362 even if they must turn into multiple assembler instructions. */
1363 else if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
1368 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
1371 rtx xpart = operand_subword (x, i, 1, mode);
1372 rtx ypart = operand_subword (y, i, 1, mode);
1374 /* If we can't get a part of Y, put Y into memory if it is a
1375 constant. Otherwise, force it into a register. If we still
1376 can't get a part of Y, abort. */
1377 if (ypart == 0 && CONSTANT_P (y))
1379 y = force_const_mem (mode, y);
1380 ypart = operand_subword (y, i, 1, mode);
1382 else if (ypart == 0)
1383 ypart = operand_subword_force (y, i, mode);
1385 if (xpart == 0 || ypart == 0)
1388 last_insn = emit_move_insn (xpart, ypart);
1396 /* Pushing data onto the stack. */
1398 /* Push a block of length SIZE (perhaps variable)
1399 and return an rtx to address the beginning of the block.
1400 Note that it is not possible for the value returned to be a QUEUED.
1401 The value may be virtual_outgoing_args_rtx.
1403 EXTRA is the number of bytes of padding to push in addition to SIZE.
1404 BELOW nonzero means this padding comes at low addresses;
1405 otherwise, the padding comes at high addresses. */
1408 push_block (size, extra, below)
1413 if (CONSTANT_P (size))
1414 anti_adjust_stack (plus_constant (size, extra));
1415 else if (GET_CODE (size) == REG && extra == 0)
1416 anti_adjust_stack (size);
1419 rtx temp = copy_to_mode_reg (Pmode, size);
1421 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
1422 temp, 0, OPTAB_LIB_WIDEN);
1423 anti_adjust_stack (temp);
1426 #ifdef STACK_GROWS_DOWNWARD
1427 temp = virtual_outgoing_args_rtx;
1428 if (extra != 0 && below)
1429 temp = plus_constant (temp, extra);
1431 if (GET_CODE (size) == CONST_INT)
1432 temp = plus_constant (virtual_outgoing_args_rtx,
1433 - INTVAL (size) - (below ? 0 : extra));
1434 else if (extra != 0 && !below)
1435 temp = gen_rtx (PLUS, Pmode, virtual_outgoing_args_rtx,
1436 negate_rtx (Pmode, plus_constant (size, extra)));
1438 temp = gen_rtx (PLUS, Pmode, virtual_outgoing_args_rtx,
1439 negate_rtx (Pmode, size));
1442 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
1448 return gen_rtx (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
1451 /* Generate code to push X onto the stack, assuming it has mode MODE and
1453 MODE is redundant except when X is a CONST_INT (since they don't
1455 SIZE is an rtx for the size of data to be copied (in bytes),
1456 needed only if X is BLKmode.
1458 ALIGN (in bytes) is maximum alignment we can assume.
1460 If PARTIAL is nonzero, then copy that many of the first words
1461 of X into registers starting with REG, and push the rest of X.
1462 The amount of space pushed is decreased by PARTIAL words,
1463 rounded *down* to a multiple of PARM_BOUNDARY.
1464 REG must be a hard register in this case.
1466 EXTRA is the amount in bytes of extra space to leave next to this arg.
1467 This is ignored if an argument block has already been allocated.
1469 On a machine that lacks real push insns, ARGS_ADDR is the address of
1470 the bottom of the argument block for this call. We use indexing off there
1471 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
1472 argument block has not been preallocated.
1474 ARGS_SO_FAR is the size of args previously pushed for this call. */
1477 emit_push_insn (x, mode, type, size, align, partial, reg, extra,
1478 args_addr, args_so_far)
1480 enum machine_mode mode;
1491 enum direction stack_direction
1492 #ifdef STACK_GROWS_DOWNWARD
1498 /* Decide where to pad the argument: `downward' for below,
1499 `upward' for above, or `none' for don't pad it.
1500 Default is below for small data on big-endian machines; else above. */
1501 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
1503 /* Invert direction if stack is post-update. */
1504 if (STACK_PUSH_CODE == POST_INC || STACK_PUSH_CODE == POST_DEC)
1505 if (where_pad != none)
1506 where_pad = (where_pad == downward ? upward : downward);
1508 xinner = x = protect_from_queue (x, 0);
1510 if (mode == BLKmode)
1512 /* Copy a block into the stack, entirely or partially. */
1515 int used = partial * UNITS_PER_WORD;
1516 int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
1524 /* USED is now the # of bytes we need not copy to the stack
1525 because registers will take care of them. */
1528 xinner = change_address (xinner, BLKmode,
1529 plus_constant (XEXP (xinner, 0), used));
1531 /* If the partial register-part of the arg counts in its stack size,
1532 skip the part of stack space corresponding to the registers.
1533 Otherwise, start copying to the beginning of the stack space,
1534 by setting SKIP to 0. */
1535 #ifndef REG_PARM_STACK_SPACE
1541 #ifdef PUSH_ROUNDING
1542 /* Do it with several push insns if that doesn't take lots of insns
1543 and if there is no difficulty with push insns that skip bytes
1544 on the stack for alignment purposes. */
1546 && GET_CODE (size) == CONST_INT
1548 && (move_by_pieces_ninsns ((unsigned) INTVAL (size) - used, align)
1550 /* Here we avoid the case of a structure whose weak alignment
1551 forces many pushes of a small amount of data,
1552 and such small pushes do rounding that causes trouble. */
1553 && ((! STRICT_ALIGNMENT && ! SLOW_UNALIGNED_ACCESS)
1554 || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT
1555 || PUSH_ROUNDING (align) == align)
1556 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
1558 /* Push padding now if padding above and stack grows down,
1559 or if padding below and stack grows up.
1560 But if space already allocated, this has already been done. */
1561 if (extra && args_addr == 0
1562 && where_pad != none && where_pad != stack_direction)
1563 anti_adjust_stack (GEN_INT (extra));
1565 move_by_pieces (gen_rtx (MEM, BLKmode, gen_push_operand ()), xinner,
1566 INTVAL (size) - used, align);
1569 #endif /* PUSH_ROUNDING */
1571 /* Otherwise make space on the stack and copy the data
1572 to the address of that space. */
1574 /* Deduct words put into registers from the size we must copy. */
1577 if (GET_CODE (size) == CONST_INT)
1578 size = GEN_INT (INTVAL (size) - used);
1580 size = expand_binop (GET_MODE (size), sub_optab, size,
1581 GEN_INT (used), NULL_RTX, 0,
1585 /* Get the address of the stack space.
1586 In this case, we do not deal with EXTRA separately.
1587 A single stack adjust will do. */
1590 temp = push_block (size, extra, where_pad == downward);
1593 else if (GET_CODE (args_so_far) == CONST_INT)
1594 temp = memory_address (BLKmode,
1595 plus_constant (args_addr,
1596 skip + INTVAL (args_so_far)));
1598 temp = memory_address (BLKmode,
1599 plus_constant (gen_rtx (PLUS, Pmode,
1600 args_addr, args_so_far),
1603 /* TEMP is the address of the block. Copy the data there. */
1604 if (GET_CODE (size) == CONST_INT
1605 && (move_by_pieces_ninsns ((unsigned) INTVAL (size), align)
1608 move_by_pieces (gen_rtx (MEM, BLKmode, temp), xinner,
1609 INTVAL (size), align);
1612 /* Try the most limited insn first, because there's no point
1613 including more than one in the machine description unless
1614 the more limited one has some advantage. */
1615 #ifdef HAVE_movstrqi
1617 && GET_CODE (size) == CONST_INT
1618 && ((unsigned) INTVAL (size)
1619 < (1 << (GET_MODE_BITSIZE (QImode) - 1))))
1621 emit_insn (gen_movstrqi (gen_rtx (MEM, BLKmode, temp),
1622 xinner, size, GEN_INT (align)));
1626 #ifdef HAVE_movstrhi
1628 && GET_CODE (size) == CONST_INT
1629 && ((unsigned) INTVAL (size)
1630 < (1 << (GET_MODE_BITSIZE (HImode) - 1))))
1632 emit_insn (gen_movstrhi (gen_rtx (MEM, BLKmode, temp),
1633 xinner, size, GEN_INT (align)));
1637 #ifdef HAVE_movstrsi
1640 emit_insn (gen_movstrsi (gen_rtx (MEM, BLKmode, temp),
1641 xinner, size, GEN_INT (align)));
1645 #ifdef HAVE_movstrdi
1648 emit_insn (gen_movstrdi (gen_rtx (MEM, BLKmode, temp),
1649 xinner, size, GEN_INT (align)));
1654 #ifndef ACCUMULATE_OUTGOING_ARGS
1655 /* If the source is referenced relative to the stack pointer,
1656 copy it to another register to stabilize it. We do not need
1657 to do this if we know that we won't be changing sp. */
1659 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
1660 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
1661 temp = copy_to_reg (temp);
1664 /* Make inhibit_defer_pop nonzero around the library call
1665 to force it to pop the bcopy-arguments right away. */
1667 #ifdef TARGET_MEM_FUNCTIONS
1668 emit_library_call (memcpy_libfunc, 1,
1669 VOIDmode, 3, temp, Pmode, XEXP (xinner, 0), Pmode,
1672 emit_library_call (bcopy_libfunc, 1,
1673 VOIDmode, 3, XEXP (xinner, 0), Pmode, temp, Pmode,
1679 else if (partial > 0)
1681 /* Scalar partly in registers. */
1683 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
1686 /* # words of start of argument
1687 that we must make space for but need not store. */
1688 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
1689 int args_offset = INTVAL (args_so_far);
1692 /* Push padding now if padding above and stack grows down,
1693 or if padding below and stack grows up.
1694 But if space already allocated, this has already been done. */
1695 if (extra && args_addr == 0
1696 && where_pad != none && where_pad != stack_direction)
1697 anti_adjust_stack (GEN_INT (extra));
1699 /* If we make space by pushing it, we might as well push
1700 the real data. Otherwise, we can leave OFFSET nonzero
1701 and leave the space uninitialized. */
1705 /* Now NOT_STACK gets the number of words that we don't need to
1706 allocate on the stack. */
1707 not_stack = partial - offset;
1709 /* If the partial register-part of the arg counts in its stack size,
1710 skip the part of stack space corresponding to the registers.
1711 Otherwise, start copying to the beginning of the stack space,
1712 by setting SKIP to 0. */
1713 #ifndef REG_PARM_STACK_SPACE
1719 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1720 x = validize_mem (force_const_mem (mode, x));
1722 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
1723 SUBREGs of such registers are not allowed. */
1724 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
1725 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
1726 x = copy_to_reg (x);
1728 /* Loop over all the words allocated on the stack for this arg. */
1729 /* We can do it by words, because any scalar bigger than a word
1730 has a size a multiple of a word. */
1731 #ifndef PUSH_ARGS_REVERSED
1732 for (i = not_stack; i < size; i++)
1734 for (i = size - 1; i >= not_stack; i--)
1736 if (i >= not_stack + offset)
1737 emit_push_insn (operand_subword_force (x, i, mode),
1738 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
1740 GEN_INT (args_offset + ((i - not_stack + skip)
1741 * UNITS_PER_WORD)));
1747 /* Push padding now if padding above and stack grows down,
1748 or if padding below and stack grows up.
1749 But if space already allocated, this has already been done. */
1750 if (extra && args_addr == 0
1751 && where_pad != none && where_pad != stack_direction)
1752 anti_adjust_stack (GEN_INT (extra));
1754 #ifdef PUSH_ROUNDING
1756 addr = gen_push_operand ();
1759 if (GET_CODE (args_so_far) == CONST_INT)
1761 = memory_address (mode,
1762 plus_constant (args_addr, INTVAL (args_so_far)));
1764 addr = memory_address (mode, gen_rtx (PLUS, Pmode, args_addr,
1767 emit_move_insn (gen_rtx (MEM, mode, addr), x);
1771 /* If part should go in registers, copy that part
1772 into the appropriate registers. Do this now, at the end,
1773 since mem-to-mem copies above may do function calls. */
1775 move_block_to_reg (REGNO (reg), x, partial, mode);
1777 if (extra && args_addr == 0 && where_pad == stack_direction)
1778 anti_adjust_stack (GEN_INT (extra));
1781 /* Output a library call to function FUN (a SYMBOL_REF rtx)
1782 (emitting the queue unless NO_QUEUE is nonzero),
1783 for a value of mode OUTMODE,
1784 with NARGS different arguments, passed as alternating rtx values
1785 and machine_modes to convert them to.
1786 The rtx values should have been passed through protect_from_queue already.
1788 NO_QUEUE will be true if and only if the library call is a `const' call
1789 which will be enclosed in REG_LIBCALL/REG_RETVAL notes; it is equivalent
1790 to the variable is_const in expand_call. */
1793 emit_library_call (va_alist)
1797 struct args_size args_size;
1798 register int argnum;
1799 enum machine_mode outmode;
1806 CUMULATIVE_ARGS args_so_far;
1807 struct arg { rtx value; enum machine_mode mode; rtx reg; int partial;
1808 struct args_size offset; struct args_size size; };
1810 int old_inhibit_defer_pop = inhibit_defer_pop;
1815 orgfun = fun = va_arg (p, rtx);
1816 no_queue = va_arg (p, int);
1817 outmode = va_arg (p, enum machine_mode);
1818 nargs = va_arg (p, int);
1820 /* Copy all the libcall-arguments out of the varargs data
1821 and into a vector ARGVEC.
1823 Compute how to pass each argument. We only support a very small subset
1824 of the full argument passing conventions to limit complexity here since
1825 library functions shouldn't have many args. */
1827 argvec = (struct arg *) alloca (nargs * sizeof (struct arg));
1829 INIT_CUMULATIVE_ARGS (args_so_far, (tree)0, fun);
1831 args_size.constant = 0;
1834 for (count = 0; count < nargs; count++)
1836 rtx val = va_arg (p, rtx);
1837 enum machine_mode mode = va_arg (p, enum machine_mode);
1839 /* We cannot convert the arg value to the mode the library wants here;
1840 must do it earlier where we know the signedness of the arg. */
1842 || (GET_MODE (val) != mode && GET_MODE (val) != VOIDmode))
1845 /* On some machines, there's no way to pass a float to a library fcn.
1846 Pass it as a double instead. */
1847 #ifdef LIBGCC_NEEDS_DOUBLE
1848 if (LIBGCC_NEEDS_DOUBLE && mode == SFmode)
1849 val = convert_to_mode (DFmode, val), mode = DFmode;
1852 /* There's no need to call protect_from_queue, because
1853 either emit_move_insn or emit_push_insn will do that. */
1855 /* Make sure it is a reasonable operand for a move or push insn. */
1856 if (GET_CODE (val) != REG && GET_CODE (val) != MEM
1857 && ! (CONSTANT_P (val) && LEGITIMATE_CONSTANT_P (val)))
1858 val = force_operand (val, NULL_RTX);
1860 argvec[count].value = val;
1861 argvec[count].mode = mode;
1863 #ifdef FUNCTION_ARG_PASS_BY_REFERENCE
1864 if (FUNCTION_ARG_PASS_BY_REFERENCE (args_so_far, mode, NULL_TREE, 1))
1868 argvec[count].reg = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
1869 if (argvec[count].reg && GET_CODE (argvec[count].reg) == EXPR_LIST)
1871 #ifdef FUNCTION_ARG_PARTIAL_NREGS
1872 argvec[count].partial
1873 = FUNCTION_ARG_PARTIAL_NREGS (args_so_far, mode, NULL_TREE, 1);
1875 argvec[count].partial = 0;
1878 locate_and_pad_parm (mode, NULL_TREE,
1879 argvec[count].reg && argvec[count].partial == 0,
1880 NULL_TREE, &args_size, &argvec[count].offset,
1881 &argvec[count].size);
1883 if (argvec[count].size.var)
1886 #ifndef REG_PARM_STACK_SPACE
1887 if (argvec[count].partial)
1888 argvec[count].size.constant -= argvec[count].partial * UNITS_PER_WORD;
1891 if (argvec[count].reg == 0 || argvec[count].partial != 0
1892 #ifdef REG_PARM_STACK_SPACE
1896 args_size.constant += argvec[count].size.constant;
1898 #ifdef ACCUMULATE_OUTGOING_ARGS
1899 /* If this arg is actually passed on the stack, it might be
1900 clobbering something we already put there (this library call might
1901 be inside the evaluation of an argument to a function whose call
1902 requires the stack). This will only occur when the library call
1903 has sufficient args to run out of argument registers. Abort in
1904 this case; if this ever occurs, code must be added to save and
1905 restore the arg slot. */
1907 if (argvec[count].reg == 0 || argvec[count].partial != 0)
1911 FUNCTION_ARG_ADVANCE (args_so_far, mode, (tree)0, 1);
1915 /* If this machine requires an external definition for library
1916 functions, write one out. */
1917 assemble_external_libcall (fun);
1919 #ifdef STACK_BOUNDARY
1920 args_size.constant = (((args_size.constant + (STACK_BYTES - 1))
1921 / STACK_BYTES) * STACK_BYTES);
1924 #ifdef REG_PARM_STACK_SPACE
1925 args_size.constant = MAX (args_size.constant,
1926 REG_PARM_STACK_SPACE ((tree) 0));
1929 #ifdef ACCUMULATE_OUTGOING_ARGS
1930 if (args_size.constant > current_function_outgoing_args_size)
1931 current_function_outgoing_args_size = args_size.constant;
1932 args_size.constant = 0;
1935 #ifndef PUSH_ROUNDING
1936 argblock = push_block (GEN_INT (args_size.constant), 0, 0);
1939 #ifdef PUSH_ARGS_REVERSED
1947 /* Push the args that need to be pushed. */
1949 for (count = 0; count < nargs; count++, argnum += inc)
1951 register enum machine_mode mode = argvec[argnum].mode;
1952 register rtx val = argvec[argnum].value;
1953 rtx reg = argvec[argnum].reg;
1954 int partial = argvec[argnum].partial;
1956 if (! (reg != 0 && partial == 0))
1957 emit_push_insn (val, mode, NULL_TREE, NULL_RTX, 0, partial, reg, 0,
1958 argblock, GEN_INT (argvec[count].offset.constant));
1962 #ifdef PUSH_ARGS_REVERSED
1968 /* Now load any reg parms into their regs. */
1970 for (count = 0; count < nargs; count++, argnum += inc)
1972 register enum machine_mode mode = argvec[argnum].mode;
1973 register rtx val = argvec[argnum].value;
1974 rtx reg = argvec[argnum].reg;
1975 int partial = argvec[argnum].partial;
1977 if (reg != 0 && partial == 0)
1978 emit_move_insn (reg, val);
1982 /* For version 1.37, try deleting this entirely. */
1986 /* Any regs containing parms remain in use through the call. */
1988 for (count = 0; count < nargs; count++)
1989 if (argvec[count].reg != 0)
1990 emit_insn (gen_rtx (USE, VOIDmode, argvec[count].reg));
1992 use_insns = get_insns ();
1995 fun = prepare_call_address (fun, NULL_TREE, &use_insns);
1997 /* Don't allow popping to be deferred, since then
1998 cse'ing of library calls could delete a call and leave the pop. */
2001 /* We pass the old value of inhibit_defer_pop + 1 to emit_call_1, which
2002 will set inhibit_defer_pop to that value. */
2004 emit_call_1 (fun, get_identifier (XSTR (orgfun, 0)), args_size.constant, 0,
2005 FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1),
2006 outmode != VOIDmode ? hard_libcall_value (outmode) : NULL_RTX,
2007 old_inhibit_defer_pop + 1, use_insns, no_queue);
2009 /* Now restore inhibit_defer_pop to its actual original value. */
2013 /* Expand an assignment that stores the value of FROM into TO.
2014 If WANT_VALUE is nonzero, return an rtx for the value of TO.
2015 (This may contain a QUEUED rtx.)
2016 Otherwise, the returned value is not meaningful.
2018 SUGGEST_REG is no longer actually used.
2019 It used to mean, copy the value through a register
2020 and return that register, if that is possible.
2021 But now we do this if WANT_VALUE.
2023 If the value stored is a constant, we return the constant. */
2026 expand_assignment (to, from, want_value, suggest_reg)
2031 register rtx to_rtx = 0;
2034 /* Don't crash if the lhs of the assignment was erroneous. */
2036 if (TREE_CODE (to) == ERROR_MARK)
2037 return expand_expr (from, NULL_RTX, VOIDmode, 0);
2039 /* Assignment of a structure component needs special treatment
2040 if the structure component's rtx is not simply a MEM.
2041 Assignment of an array element at a constant index
2042 has the same problem. */
2044 if (TREE_CODE (to) == COMPONENT_REF
2045 || TREE_CODE (to) == BIT_FIELD_REF
2046 || (TREE_CODE (to) == ARRAY_REF
2047 && TREE_CODE (TREE_OPERAND (to, 1)) == INTEGER_CST
2048 && TREE_CODE (TYPE_SIZE (TREE_TYPE (to))) == INTEGER_CST))
2050 enum machine_mode mode1;
2056 tree tem = get_inner_reference (to, &bitsize, &bitpos, &offset,
2057 &mode1, &unsignedp, &volatilep);
2059 /* If we are going to use store_bit_field and extract_bit_field,
2060 make sure to_rtx will be safe for multiple use. */
2062 if (mode1 == VOIDmode && want_value)
2063 tem = stabilize_reference (tem);
2065 to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, 0);
2068 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
2070 if (GET_CODE (to_rtx) != MEM)
2072 to_rtx = change_address (to_rtx, VOIDmode,
2073 gen_rtx (PLUS, Pmode, XEXP (to_rtx, 0),
2074 force_reg (Pmode, offset_rtx)));
2078 if (GET_CODE (to_rtx) == MEM)
2079 MEM_VOLATILE_P (to_rtx) = 1;
2080 #if 0 /* This was turned off because, when a field is volatile
2081 in an object which is not volatile, the object may be in a register,
2082 and then we would abort over here. */
2088 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
2090 /* Spurious cast makes HPUX compiler happy. */
2091 ? (enum machine_mode) TYPE_MODE (TREE_TYPE (to))
2094 /* Required alignment of containing datum. */
2095 TYPE_ALIGN (TREE_TYPE (tem)) / BITS_PER_UNIT,
2096 int_size_in_bytes (TREE_TYPE (tem)));
2097 preserve_temp_slots (result);
2103 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
2104 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
2107 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, 0);
2109 /* In case we are returning the contents of an object which overlaps
2110 the place the value is being stored, use a safe function when copying
2111 a value through a pointer into a structure value return block. */
2112 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
2113 && current_function_returns_struct
2114 && !current_function_returns_pcc_struct)
2116 rtx from_rtx = expand_expr (from, NULL_RTX, VOIDmode, 0);
2117 rtx size = expr_size (from);
2119 #ifdef TARGET_MEM_FUNCTIONS
2120 emit_library_call (memcpy_libfunc, 1,
2121 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
2122 XEXP (from_rtx, 0), Pmode,
2125 emit_library_call (bcopy_libfunc, 1,
2126 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
2127 XEXP (to_rtx, 0), Pmode,
2131 preserve_temp_slots (to_rtx);
2136 /* Compute FROM and store the value in the rtx we got. */
2138 result = store_expr (from, to_rtx, want_value);
2139 preserve_temp_slots (result);
2144 /* Generate code for computing expression EXP,
2145 and storing the value into TARGET.
2146 Returns TARGET or an equivalent value.
2147 TARGET may contain a QUEUED rtx.
2149 If SUGGEST_REG is nonzero, copy the value through a register
2150 and return that register, if that is possible.
2152 If the value stored is a constant, we return the constant. */
2155 store_expr (exp, target, suggest_reg)
2157 register rtx target;
2161 int dont_return_target = 0;
2163 if (TREE_CODE (exp) == COMPOUND_EXPR)
2165 /* Perform first part of compound expression, then assign from second
2167 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
2169 return store_expr (TREE_OPERAND (exp, 1), target, suggest_reg);
2171 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
2173 /* For conditional expression, get safe form of the target. Then
2174 test the condition, doing the appropriate assignment on either
2175 side. This avoids the creation of unnecessary temporaries.
2176 For non-BLKmode, it is more efficient not to do this. */
2178 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
2181 target = protect_from_queue (target, 1);
2184 jumpifnot (TREE_OPERAND (exp, 0), lab1);
2185 store_expr (TREE_OPERAND (exp, 1), target, suggest_reg);
2187 emit_jump_insn (gen_jump (lab2));
2190 store_expr (TREE_OPERAND (exp, 2), target, suggest_reg);
2196 else if (suggest_reg && GET_CODE (target) == MEM
2197 && GET_MODE (target) != BLKmode)
2198 /* If target is in memory and caller wants value in a register instead,
2199 arrange that. Pass TARGET as target for expand_expr so that,
2200 if EXP is another assignment, SUGGEST_REG will be nonzero for it.
2201 We know expand_expr will not use the target in that case. */
2203 temp = expand_expr (exp, cse_not_expected ? NULL_RTX : target,
2204 GET_MODE (target), 0);
2205 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
2206 temp = copy_to_reg (temp);
2207 dont_return_target = 1;
2209 else if (queued_subexp_p (target))
2210 /* If target contains a postincrement, it is not safe
2211 to use as the returned value. It would access the wrong
2212 place by the time the queued increment gets output.
2213 So copy the value through a temporary and use that temp
2216 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
2218 /* Expand EXP into a new pseudo. */
2219 temp = gen_reg_rtx (GET_MODE (target));
2220 temp = expand_expr (exp, temp, GET_MODE (target), 0);
2223 temp = expand_expr (exp, NULL_RTX, GET_MODE (target), 0);
2224 dont_return_target = 1;
2228 temp = expand_expr (exp, target, GET_MODE (target), 0);
2229 /* DO return TARGET if it's a specified hardware register.
2230 expand_return relies on this. */
2231 if (!(target && GET_CODE (target) == REG
2232 && REGNO (target) < FIRST_PSEUDO_REGISTER)
2233 && CONSTANT_P (temp))
2234 dont_return_target = 1;
2237 /* If value was not generated in the target, store it there.
2238 Convert the value to TARGET's type first if nec. */
2240 if (temp != target && TREE_CODE (exp) != ERROR_MARK)
2242 target = protect_from_queue (target, 1);
2243 if (GET_MODE (temp) != GET_MODE (target)
2244 && GET_MODE (temp) != VOIDmode)
2246 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
2247 if (dont_return_target)
2249 /* In this case, we will return TEMP,
2250 so make sure it has the proper mode.
2251 But don't forget to store the value into TARGET. */
2252 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
2253 emit_move_insn (target, temp);
2256 convert_move (target, temp, unsignedp);
2259 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
2261 /* Handle copying a string constant into an array.
2262 The string constant may be shorter than the array.
2263 So copy just the string's actual length, and clear the rest. */
2266 /* Get the size of the data type of the string,
2267 which is actually the size of the target. */
2268 size = expr_size (exp);
2269 if (GET_CODE (size) == CONST_INT
2270 && INTVAL (size) < TREE_STRING_LENGTH (exp))
2271 emit_block_move (target, temp, size,
2272 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
2275 /* Compute the size of the data to copy from the string. */
2277 = fold (build (MIN_EXPR, sizetype,
2278 size_binop (CEIL_DIV_EXPR,
2279 TYPE_SIZE (TREE_TYPE (exp)),
2280 size_int (BITS_PER_UNIT)),
2282 build_int_2 (TREE_STRING_LENGTH (exp), 0))));
2283 rtx copy_size_rtx = expand_expr (copy_size, NULL_RTX,
2287 /* Copy that much. */
2288 emit_block_move (target, temp, copy_size_rtx,
2289 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
2291 /* Figure out how much is left in TARGET
2292 that we have to clear. */
2293 if (GET_CODE (copy_size_rtx) == CONST_INT)
2295 temp = plus_constant (XEXP (target, 0),
2296 TREE_STRING_LENGTH (exp));
2297 size = plus_constant (size,
2298 - TREE_STRING_LENGTH (exp));
2302 enum machine_mode size_mode = Pmode;
2304 temp = force_reg (Pmode, XEXP (target, 0));
2305 temp = expand_binop (size_mode, add_optab, temp,
2306 copy_size_rtx, NULL_RTX, 0,
2309 size = expand_binop (size_mode, sub_optab, size,
2310 copy_size_rtx, NULL_RTX, 0,
2313 emit_cmp_insn (size, const0_rtx, LT, NULL_RTX,
2314 GET_MODE (size), 0, 0);
2315 label = gen_label_rtx ();
2316 emit_jump_insn (gen_blt (label));
2319 if (size != const0_rtx)
2321 #ifdef TARGET_MEM_FUNCTIONS
2322 emit_library_call (memset_libfunc, 1, VOIDmode, 3,
2323 temp, Pmode, const0_rtx, Pmode, size, Pmode);
2325 emit_library_call (bzero_libfunc, 1, VOIDmode, 2,
2326 temp, Pmode, size, Pmode);
2333 else if (GET_MODE (temp) == BLKmode)
2334 emit_block_move (target, temp, expr_size (exp),
2335 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
2337 emit_move_insn (target, temp);
2339 if (dont_return_target)
2344 /* Store the value of constructor EXP into the rtx TARGET.
2345 TARGET is either a REG or a MEM. */
2348 store_constructor (exp, target)
2352 tree type = TREE_TYPE (exp);
2354 /* We know our target cannot conflict, since safe_from_p has been called. */
2356 /* Don't try copying piece by piece into a hard register
2357 since that is vulnerable to being clobbered by EXP.
2358 Instead, construct in a pseudo register and then copy it all. */
2359 if (GET_CODE (target) == REG && REGNO (target) < FIRST_PSEUDO_REGISTER)
2361 rtx temp = gen_reg_rtx (GET_MODE (target));
2362 store_constructor (exp, temp);
2363 emit_move_insn (target, temp);
2368 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE)
2372 /* Inform later passes that the whole union value is dead. */
2373 if (TREE_CODE (type) == UNION_TYPE)
2374 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
2376 /* If we are building a static constructor into a register,
2377 set the initial value as zero so we can fold the value into
2379 else if (GET_CODE (target) == REG && TREE_STATIC (exp))
2380 emit_move_insn (target, const0_rtx);
2382 /* If the constructor has fewer fields than the structure,
2383 clear the whole structure first. */
2384 else if (list_length (CONSTRUCTOR_ELTS (exp))
2385 != list_length (TYPE_FIELDS (type)))
2386 clear_storage (target, int_size_in_bytes (type));
2388 /* Inform later passes that the old value is dead. */
2389 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
2391 /* Store each element of the constructor into
2392 the corresponding field of TARGET. */
2394 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
2396 register tree field = TREE_PURPOSE (elt);
2397 register enum machine_mode mode;
2402 /* Just ignore missing fields.
2403 We cleared the whole structure, above,
2404 if any fields are missing. */
2408 bitsize = TREE_INT_CST_LOW (DECL_SIZE (field));
2409 unsignedp = TREE_UNSIGNED (field);
2410 mode = DECL_MODE (field);
2411 if (DECL_BIT_FIELD (field))
2414 if (TREE_CODE (DECL_FIELD_BITPOS (field)) != INTEGER_CST)
2415 /* ??? This case remains to be written. */
2418 bitpos = TREE_INT_CST_LOW (DECL_FIELD_BITPOS (field));
2420 store_field (target, bitsize, bitpos, mode, TREE_VALUE (elt),
2421 /* The alignment of TARGET is
2422 at least what its type requires. */
2424 TYPE_ALIGN (type) / BITS_PER_UNIT,
2425 int_size_in_bytes (type));
2428 else if (TREE_CODE (type) == ARRAY_TYPE)
2432 tree domain = TYPE_DOMAIN (type);
2433 HOST_WIDE_INT minelt = TREE_INT_CST_LOW (TYPE_MIN_VALUE (domain));
2434 HOST_WIDE_INT maxelt = TREE_INT_CST_LOW (TYPE_MAX_VALUE (domain));
2435 tree elttype = TREE_TYPE (type);
2437 /* If the constructor has fewer fields than the structure,
2438 clear the whole structure first. Similarly if this this is
2439 static constructor of a non-BLKmode object. */
2441 if (list_length (CONSTRUCTOR_ELTS (exp)) < maxelt - minelt + 1
2442 || (GET_CODE (target) == REG && TREE_STATIC (exp)))
2443 clear_storage (target, maxelt - minelt + 1);
2445 /* Inform later passes that the old value is dead. */
2446 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
2448 /* Store each element of the constructor into
2449 the corresponding element of TARGET, determined
2450 by counting the elements. */
2451 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
2453 elt = TREE_CHAIN (elt), i++)
2455 register enum machine_mode mode;
2460 mode = TYPE_MODE (elttype);
2461 bitsize = GET_MODE_BITSIZE (mode);
2462 unsignedp = TREE_UNSIGNED (elttype);
2464 bitpos = (i * TREE_INT_CST_LOW (TYPE_SIZE (elttype)));
2466 store_field (target, bitsize, bitpos, mode, TREE_VALUE (elt),
2467 /* The alignment of TARGET is
2468 at least what its type requires. */
2470 TYPE_ALIGN (type) / BITS_PER_UNIT,
2471 int_size_in_bytes (type));
2479 /* Store the value of EXP (an expression tree)
2480 into a subfield of TARGET which has mode MODE and occupies
2481 BITSIZE bits, starting BITPOS bits from the start of TARGET.
2482 If MODE is VOIDmode, it means that we are storing into a bit-field.
2484 If VALUE_MODE is VOIDmode, return nothing in particular.
2485 UNSIGNEDP is not used in this case.
2487 Otherwise, return an rtx for the value stored. This rtx
2488 has mode VALUE_MODE if that is convenient to do.
2489 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
2491 ALIGN is the alignment that TARGET is known to have, measured in bytes.
2492 TOTAL_SIZE is the size in bytes of the structure, or -1 if varying. */
2495 store_field (target, bitsize, bitpos, mode, exp, value_mode,
2496 unsignedp, align, total_size)
2498 int bitsize, bitpos;
2499 enum machine_mode mode;
2501 enum machine_mode value_mode;
2506 HOST_WIDE_INT width_mask = 0;
2508 if (bitsize < HOST_BITS_PER_WIDE_INT)
2509 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
2511 /* If we are storing into an unaligned field of an aligned union that is
2512 in a register, we may have the mode of TARGET being an integer mode but
2513 MODE == BLKmode. In that case, get an aligned object whose size and
2514 alignment are the same as TARGET and store TARGET into it (we can avoid
2515 the store if the field being stored is the entire width of TARGET). Then
2516 call ourselves recursively to store the field into a BLKmode version of
2517 that object. Finally, load from the object into TARGET. This is not
2518 very efficient in general, but should only be slightly more expensive
2519 than the otherwise-required unaligned accesses. Perhaps this can be
2520 cleaned up later. */
2523 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
2525 rtx object = assign_stack_temp (GET_MODE (target),
2526 GET_MODE_SIZE (GET_MODE (target)), 0);
2527 rtx blk_object = copy_rtx (object);
2529 PUT_MODE (blk_object, BLKmode);
2531 if (bitsize != GET_MODE_BITSIZE (GET_MODE (target)))
2532 emit_move_insn (object, target);
2534 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0,
2537 emit_move_insn (target, object);
2542 /* If the structure is in a register or if the component
2543 is a bit field, we cannot use addressing to access it.
2544 Use bit-field techniques or SUBREG to store in it. */
2546 if (mode == VOIDmode
2547 || (mode != BLKmode && ! direct_store[(int) mode])
2548 || GET_CODE (target) == REG
2549 || GET_CODE (target) == SUBREG)
2551 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
2552 /* Store the value in the bitfield. */
2553 store_bit_field (target, bitsize, bitpos, mode, temp, align, total_size);
2554 if (value_mode != VOIDmode)
2556 /* The caller wants an rtx for the value. */
2557 /* If possible, avoid refetching from the bitfield itself. */
2559 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
2560 return expand_and (temp, GEN_INT (width_mask), NULL_RTX);
2561 return extract_bit_field (target, bitsize, bitpos, unsignedp,
2562 NULL_RTX, value_mode, 0, align,
2569 rtx addr = XEXP (target, 0);
2572 /* If a value is wanted, it must be the lhs;
2573 so make the address stable for multiple use. */
2575 if (value_mode != VOIDmode && GET_CODE (addr) != REG
2576 && ! CONSTANT_ADDRESS_P (addr)
2577 /* A frame-pointer reference is already stable. */
2578 && ! (GET_CODE (addr) == PLUS
2579 && GET_CODE (XEXP (addr, 1)) == CONST_INT
2580 && (XEXP (addr, 0) == virtual_incoming_args_rtx
2581 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
2582 addr = copy_to_reg (addr);
2584 /* Now build a reference to just the desired component. */
2586 to_rtx = change_address (target, mode,
2587 plus_constant (addr, (bitpos / BITS_PER_UNIT)));
2588 MEM_IN_STRUCT_P (to_rtx) = 1;
2590 return store_expr (exp, to_rtx, value_mode != VOIDmode);
2594 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
2595 or an ARRAY_REF, look for nested COMPONENT_REFs, BIT_FIELD_REFs, or
2596 ARRAY_REFs at constant positions and find the ultimate containing object,
2599 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
2600 bit position, and *PUNSIGNEDP to the signedness of the field.
2601 If the position of the field is variable, we store a tree
2602 giving the variable offset (in units) in *POFFSET.
2603 This offset is in addition to the bit position.
2604 If the position is not variable, we store 0 in *POFFSET.
2606 If any of the extraction expressions is volatile,
2607 we store 1 in *PVOLATILEP. Otherwise we don't change that.
2609 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
2610 is a mode that can be used to access the field. In that case, *PBITSIZE
2613 If the field describes a variable-sized object, *PMODE is set to
2614 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
2615 this case, but the address of the object can be found. */
2618 get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode, punsignedp, pvolatilep)
2623 enum machine_mode *pmode;
2628 enum machine_mode mode = VOIDmode;
2631 if (TREE_CODE (exp) == COMPONENT_REF)
2633 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
2634 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
2635 mode = DECL_MODE (TREE_OPERAND (exp, 1));
2636 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
2638 else if (TREE_CODE (exp) == BIT_FIELD_REF)
2640 size_tree = TREE_OPERAND (exp, 1);
2641 *punsignedp = TREE_UNSIGNED (exp);
2645 mode = TYPE_MODE (TREE_TYPE (exp));
2646 *pbitsize = GET_MODE_BITSIZE (mode);
2647 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
2652 if (TREE_CODE (size_tree) != INTEGER_CST)
2653 mode = BLKmode, *pbitsize = -1;
2655 *pbitsize = TREE_INT_CST_LOW (size_tree);
2658 /* Compute cumulative bit-offset for nested component-refs and array-refs,
2659 and find the ultimate containing object. */
2665 if (TREE_CODE (exp) == COMPONENT_REF || TREE_CODE (exp) == BIT_FIELD_REF)
2667 tree pos = (TREE_CODE (exp) == COMPONENT_REF
2668 ? DECL_FIELD_BITPOS (TREE_OPERAND (exp, 1))
2669 : TREE_OPERAND (exp, 2));
2671 if (TREE_CODE (pos) == PLUS_EXPR)
2674 if (TREE_CODE (TREE_OPERAND (pos, 0)) == INTEGER_CST)
2676 constant = TREE_OPERAND (pos, 0);
2677 var = TREE_OPERAND (pos, 1);
2679 else if (TREE_CODE (TREE_OPERAND (pos, 1)) == INTEGER_CST)
2681 constant = TREE_OPERAND (pos, 1);
2682 var = TREE_OPERAND (pos, 0);
2686 *pbitpos += TREE_INT_CST_LOW (constant);
2688 offset = size_binop (PLUS_EXPR, offset,
2689 size_binop (FLOOR_DIV_EXPR, var,
2690 size_int (BITS_PER_UNIT)));
2692 offset = size_binop (FLOOR_DIV_EXPR, var,
2693 size_int (BITS_PER_UNIT));
2695 else if (TREE_CODE (pos) == INTEGER_CST)
2696 *pbitpos += TREE_INT_CST_LOW (pos);
2699 /* Assume here that the offset is a multiple of a unit.
2700 If not, there should be an explicitly added constant. */
2702 offset = size_binop (PLUS_EXPR, offset,
2703 size_binop (FLOOR_DIV_EXPR, pos,
2704 size_int (BITS_PER_UNIT)));
2706 offset = size_binop (FLOOR_DIV_EXPR, pos,
2707 size_int (BITS_PER_UNIT));
2711 else if (TREE_CODE (exp) == ARRAY_REF
2712 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
2713 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST)
2715 *pbitpos += (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))
2716 * TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (exp))));
2718 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
2719 && ! ((TREE_CODE (exp) == NOP_EXPR
2720 || TREE_CODE (exp) == CONVERT_EXPR)
2721 && (TYPE_MODE (TREE_TYPE (exp))
2722 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
2725 /* If any reference in the chain is volatile, the effect is volatile. */
2726 if (TREE_THIS_VOLATILE (exp))
2728 exp = TREE_OPERAND (exp, 0);
2731 /* If this was a bit-field, see if there is a mode that allows direct
2732 access in case EXP is in memory. */
2733 if (mode == VOIDmode && *pbitpos % *pbitsize == 0)
2735 mode = mode_for_size (*pbitsize, MODE_INT, 0);
2736 if (mode == BLKmode)
2743 /* We aren't finished fixing the callers to really handle nonzero offset. */
2751 /* Given an rtx VALUE that may contain additions and multiplications,
2752 return an equivalent value that just refers to a register or memory.
2753 This is done by generating instructions to perform the arithmetic
2754 and returning a pseudo-register containing the value. */
2757 force_operand (value, target)
2760 register optab binoptab = 0;
2761 /* Use a temporary to force order of execution of calls to
2765 /* Use subtarget as the target for operand 0 of a binary operation. */
2766 register rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
2768 if (GET_CODE (value) == PLUS)
2769 binoptab = add_optab;
2770 else if (GET_CODE (value) == MINUS)
2771 binoptab = sub_optab;
2772 else if (GET_CODE (value) == MULT)
2774 op2 = XEXP (value, 1);
2775 if (!CONSTANT_P (op2)
2776 && !(GET_CODE (op2) == REG && op2 != subtarget))
2778 tmp = force_operand (XEXP (value, 0), subtarget);
2779 return expand_mult (GET_MODE (value), tmp,
2780 force_operand (op2, NULL_RTX),
2786 op2 = XEXP (value, 1);
2787 if (!CONSTANT_P (op2)
2788 && !(GET_CODE (op2) == REG && op2 != subtarget))
2790 if (binoptab == sub_optab && GET_CODE (op2) == CONST_INT)
2792 binoptab = add_optab;
2793 op2 = negate_rtx (GET_MODE (value), op2);
2796 /* Check for an addition with OP2 a constant integer and our first
2797 operand a PLUS of a virtual register and something else. In that
2798 case, we want to emit the sum of the virtual register and the
2799 constant first and then add the other value. This allows virtual
2800 register instantiation to simply modify the constant rather than
2801 creating another one around this addition. */
2802 if (binoptab == add_optab && GET_CODE (op2) == CONST_INT
2803 && GET_CODE (XEXP (value, 0)) == PLUS
2804 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
2805 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
2806 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
2808 rtx temp = expand_binop (GET_MODE (value), binoptab,
2809 XEXP (XEXP (value, 0), 0), op2,
2810 subtarget, 0, OPTAB_LIB_WIDEN);
2811 return expand_binop (GET_MODE (value), binoptab, temp,
2812 force_operand (XEXP (XEXP (value, 0), 1), 0),
2813 target, 0, OPTAB_LIB_WIDEN);
2816 tmp = force_operand (XEXP (value, 0), subtarget);
2817 return expand_binop (GET_MODE (value), binoptab, tmp,
2818 force_operand (op2, NULL_RTX),
2819 target, 0, OPTAB_LIB_WIDEN);
2820 /* We give UNSIGNEP = 0 to expand_binop
2821 because the only operations we are expanding here are signed ones. */
2826 /* Subroutine of expand_expr:
2827 save the non-copied parts (LIST) of an expr (LHS), and return a list
2828 which can restore these values to their previous values,
2829 should something modify their storage. */
2832 save_noncopied_parts (lhs, list)
2839 for (tail = list; tail; tail = TREE_CHAIN (tail))
2840 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
2841 parts = chainon (parts, save_noncopied_parts (lhs, TREE_VALUE (tail)));
2844 tree part = TREE_VALUE (tail);
2845 tree part_type = TREE_TYPE (part);
2846 tree to_be_saved = build (COMPONENT_REF, part_type, lhs, part);
2847 rtx target = assign_stack_temp (TYPE_MODE (part_type),
2848 int_size_in_bytes (part_type), 0);
2849 if (! memory_address_p (TYPE_MODE (part_type), XEXP (target, 0)))
2850 target = change_address (target, TYPE_MODE (part_type), NULL_RTX);
2851 parts = tree_cons (to_be_saved,
2852 build (RTL_EXPR, part_type, NULL_TREE,
2855 store_expr (TREE_PURPOSE (parts), RTL_EXPR_RTL (TREE_VALUE (parts)), 0);
2860 /* Subroutine of expand_expr:
2861 record the non-copied parts (LIST) of an expr (LHS), and return a list
2862 which specifies the initial values of these parts. */
2865 init_noncopied_parts (lhs, list)
2872 for (tail = list; tail; tail = TREE_CHAIN (tail))
2873 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
2874 parts = chainon (parts, init_noncopied_parts (lhs, TREE_VALUE (tail)));
2877 tree part = TREE_VALUE (tail);
2878 tree part_type = TREE_TYPE (part);
2879 tree to_be_initialized = build (COMPONENT_REF, part_type, lhs, part);
2880 parts = tree_cons (TREE_PURPOSE (tail), to_be_initialized, parts);
2885 /* Subroutine of expand_expr: return nonzero iff there is no way that
2886 EXP can reference X, which is being modified. */
2889 safe_from_p (x, exp)
2899 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
2900 find the underlying pseudo. */
2901 if (GET_CODE (x) == SUBREG)
2904 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
2908 /* If X is a location in the outgoing argument area, it is always safe. */
2909 if (GET_CODE (x) == MEM
2910 && (XEXP (x, 0) == virtual_outgoing_args_rtx
2911 || (GET_CODE (XEXP (x, 0)) == PLUS
2912 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx)))
2915 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
2918 exp_rtl = DECL_RTL (exp);
2925 if (TREE_CODE (exp) == TREE_LIST)
2926 return ((TREE_VALUE (exp) == 0
2927 || safe_from_p (x, TREE_VALUE (exp)))
2928 && (TREE_CHAIN (exp) == 0
2929 || safe_from_p (x, TREE_CHAIN (exp))));
2934 return safe_from_p (x, TREE_OPERAND (exp, 0));
2938 return (safe_from_p (x, TREE_OPERAND (exp, 0))
2939 && safe_from_p (x, TREE_OPERAND (exp, 1)));
2943 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
2944 the expression. If it is set, we conflict iff we are that rtx or
2945 both are in memory. Otherwise, we check all operands of the
2946 expression recursively. */
2948 switch (TREE_CODE (exp))
2951 return staticp (TREE_OPERAND (exp, 0));
2954 if (GET_CODE (x) == MEM)
2959 exp_rtl = CALL_EXPR_RTL (exp);
2962 /* Assume that the call will clobber all hard registers and
2964 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
2965 || GET_CODE (x) == MEM)
2972 exp_rtl = RTL_EXPR_RTL (exp);
2974 /* We don't know what this can modify. */
2979 case WITH_CLEANUP_EXPR:
2980 exp_rtl = RTL_EXPR_RTL (exp);
2984 exp_rtl = SAVE_EXPR_RTL (exp);
2988 /* The only operand we look at is operand 1. The rest aren't
2989 part of the expression. */
2990 return safe_from_p (x, TREE_OPERAND (exp, 1));
2992 case METHOD_CALL_EXPR:
2993 /* This takes a rtx argument, but shouldn't appear here. */
2997 /* If we have an rtx, we do not need to scan our operands. */
3001 nops = tree_code_length[(int) TREE_CODE (exp)];
3002 for (i = 0; i < nops; i++)
3003 if (TREE_OPERAND (exp, i) != 0
3004 && ! safe_from_p (x, TREE_OPERAND (exp, i)))
3008 /* If we have an rtl, find any enclosed object. Then see if we conflict
3012 if (GET_CODE (exp_rtl) == SUBREG)
3014 exp_rtl = SUBREG_REG (exp_rtl);
3015 if (GET_CODE (exp_rtl) == REG
3016 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
3020 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
3021 are memory and EXP is not readonly. */
3022 return ! (rtx_equal_p (x, exp_rtl)
3023 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
3024 && ! TREE_READONLY (exp)));
3027 /* If we reach here, it is safe. */
3031 /* Subroutine of expand_expr: return nonzero iff EXP is an
3032 expression whose type is statically determinable. */
3038 if (TREE_CODE (exp) == PARM_DECL
3039 || TREE_CODE (exp) == VAR_DECL
3040 || TREE_CODE (exp) == CALL_EXPR || TREE_CODE (exp) == TARGET_EXPR
3041 || TREE_CODE (exp) == COMPONENT_REF
3042 || TREE_CODE (exp) == ARRAY_REF)
3047 /* expand_expr: generate code for computing expression EXP.
3048 An rtx for the computed value is returned. The value is never null.
3049 In the case of a void EXP, const0_rtx is returned.
3051 The value may be stored in TARGET if TARGET is nonzero.
3052 TARGET is just a suggestion; callers must assume that
3053 the rtx returned may not be the same as TARGET.
3055 If TARGET is CONST0_RTX, it means that the value will be ignored.
3057 If TMODE is not VOIDmode, it suggests generating the
3058 result in mode TMODE. But this is done only when convenient.
3059 Otherwise, TMODE is ignored and the value generated in its natural mode.
3060 TMODE is just a suggestion; callers must assume that
3061 the rtx returned may not have mode TMODE.
3063 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
3064 with a constant address even if that address is not normally legitimate.
3065 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
3067 If MODIFIER is EXPAND_SUM then when EXP is an addition
3068 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
3069 or a nest of (PLUS ...) and (MINUS ...) where the terms are
3070 products as above, or REG or MEM, or constant.
3071 Ordinarily in such cases we would output mul or add instructions
3072 and then return a pseudo reg containing the sum.
3074 EXPAND_INITIALIZER is much like EXPAND_SUM except that
3075 it also marks a label as absolutely required (it can't be dead).
3076 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
3077 This is used for outputting expressions used in initializers. */
3080 expand_expr (exp, target, tmode, modifier)
3083 enum machine_mode tmode;
3084 enum expand_modifier modifier;
3086 register rtx op0, op1, temp;
3087 tree type = TREE_TYPE (exp);
3088 int unsignedp = TREE_UNSIGNED (type);
3089 register enum machine_mode mode = TYPE_MODE (type);
3090 register enum tree_code code = TREE_CODE (exp);
3092 /* Use subtarget as the target for operand 0 of a binary operation. */
3093 rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
3094 rtx original_target = target;
3095 int ignore = target == const0_rtx;
3098 /* Don't use hard regs as subtargets, because the combiner
3099 can only handle pseudo regs. */
3100 if (subtarget && REGNO (subtarget) < FIRST_PSEUDO_REGISTER)
3102 /* Avoid subtargets inside loops,
3103 since they hide some invariant expressions. */
3104 if (preserve_subexpressions_p ())
3107 if (ignore) target = 0, original_target = 0;
3109 /* If will do cse, generate all results into pseudo registers
3110 since 1) that allows cse to find more things
3111 and 2) otherwise cse could produce an insn the machine
3114 if (! cse_not_expected && mode != BLKmode && target
3115 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER))
3118 /* Ensure we reference a volatile object even if value is ignored. */
3119 if (ignore && TREE_THIS_VOLATILE (exp)
3120 && mode != VOIDmode && mode != BLKmode)
3122 target = gen_reg_rtx (mode);
3123 temp = expand_expr (exp, target, VOIDmode, modifier);
3125 emit_move_insn (target, temp);
3133 tree function = decl_function_context (exp);
3134 /* Handle using a label in a containing function. */
3135 if (function != current_function_decl && function != 0)
3137 struct function *p = find_function_data (function);
3138 /* Allocate in the memory associated with the function
3139 that the label is in. */
3140 push_obstacks (p->function_obstack,
3141 p->function_maybepermanent_obstack);
3143 p->forced_labels = gen_rtx (EXPR_LIST, VOIDmode,
3144 label_rtx (exp), p->forced_labels);
3147 else if (modifier == EXPAND_INITIALIZER)
3148 forced_labels = gen_rtx (EXPR_LIST, VOIDmode,
3149 label_rtx (exp), forced_labels);
3150 temp = gen_rtx (MEM, FUNCTION_MODE,
3151 gen_rtx (LABEL_REF, Pmode, label_rtx (exp)));
3152 if (function != current_function_decl && function != 0)
3153 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
3158 if (DECL_RTL (exp) == 0)
3160 error_with_decl (exp, "prior parameter's size depends on `%s'");
3161 return CONST0_RTX (mode);
3167 if (DECL_RTL (exp) == 0)
3169 /* Ensure variable marked as used
3170 even if it doesn't go through a parser. */
3171 TREE_USED (exp) = 1;
3172 /* Handle variables inherited from containing functions. */
3173 context = decl_function_context (exp);
3175 /* We treat inline_function_decl as an alias for the current function
3176 because that is the inline function whose vars, types, etc.
3177 are being merged into the current function.
3178 See expand_inline_function. */
3179 if (context != 0 && context != current_function_decl
3180 && context != inline_function_decl
3181 /* If var is static, we don't need a static chain to access it. */
3182 && ! (GET_CODE (DECL_RTL (exp)) == MEM
3183 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
3187 /* Mark as non-local and addressable. */
3188 DECL_NONLOCAL (exp) = 1;
3189 mark_addressable (exp);
3190 if (GET_CODE (DECL_RTL (exp)) != MEM)
3192 addr = XEXP (DECL_RTL (exp), 0);
3193 if (GET_CODE (addr) == MEM)
3194 addr = gen_rtx (MEM, Pmode, fix_lexical_addr (XEXP (addr, 0), exp));
3196 addr = fix_lexical_addr (addr, exp);
3197 return change_address (DECL_RTL (exp), mode, addr);
3200 /* This is the case of an array whose size is to be determined
3201 from its initializer, while the initializer is still being parsed.
3203 if (GET_CODE (DECL_RTL (exp)) == MEM
3204 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
3205 return change_address (DECL_RTL (exp), GET_MODE (DECL_RTL (exp)),
3206 XEXP (DECL_RTL (exp), 0));
3207 if (GET_CODE (DECL_RTL (exp)) == MEM
3208 && modifier != EXPAND_CONST_ADDRESS
3209 && modifier != EXPAND_SUM
3210 && modifier != EXPAND_INITIALIZER)
3212 /* DECL_RTL probably contains a constant address.
3213 On RISC machines where a constant address isn't valid,
3214 make some insns to get that address into a register. */
3215 if (!memory_address_p (DECL_MODE (exp), XEXP (DECL_RTL (exp), 0))
3217 && CONSTANT_ADDRESS_P (XEXP (DECL_RTL (exp), 0))))
3218 return change_address (DECL_RTL (exp), VOIDmode,
3219 copy_rtx (XEXP (DECL_RTL (exp), 0)));
3221 return DECL_RTL (exp);
3224 return immed_double_const (TREE_INT_CST_LOW (exp),
3225 TREE_INT_CST_HIGH (exp),
3229 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, 0);
3232 /* If optimized, generate immediate CONST_DOUBLE
3233 which will be turned into memory by reload if necessary.
3235 We used to force a register so that loop.c could see it. But
3236 this does not allow gen_* patterns to perform optimizations with
3237 the constants. It also produces two insns in cases like "x = 1.0;".
3238 On most machines, floating-point constants are not permitted in
3239 many insns, so we'd end up copying it to a register in any case.
3241 Now, we do the copying in expand_binop, if appropriate. */
3242 return immed_real_const (exp);
3246 if (! TREE_CST_RTL (exp))
3247 output_constant_def (exp);
3249 /* TREE_CST_RTL probably contains a constant address.
3250 On RISC machines where a constant address isn't valid,
3251 make some insns to get that address into a register. */
3252 if (GET_CODE (TREE_CST_RTL (exp)) == MEM
3253 && modifier != EXPAND_CONST_ADDRESS
3254 && modifier != EXPAND_INITIALIZER
3255 && modifier != EXPAND_SUM
3256 && !memory_address_p (mode, XEXP (TREE_CST_RTL (exp), 0)))
3257 return change_address (TREE_CST_RTL (exp), VOIDmode,
3258 copy_rtx (XEXP (TREE_CST_RTL (exp), 0)));
3259 return TREE_CST_RTL (exp);
3262 context = decl_function_context (exp);
3263 /* We treat inline_function_decl as an alias for the current function
3264 because that is the inline function whose vars, types, etc.
3265 are being merged into the current function.
3266 See expand_inline_function. */
3267 if (context == current_function_decl || context == inline_function_decl)
3270 /* If this is non-local, handle it. */
3273 temp = SAVE_EXPR_RTL (exp);
3274 if (temp && GET_CODE (temp) == REG)
3276 put_var_into_stack (exp);
3277 temp = SAVE_EXPR_RTL (exp);
3279 if (temp == 0 || GET_CODE (temp) != MEM)
3281 return change_address (temp, mode,
3282 fix_lexical_addr (XEXP (temp, 0), exp));
3284 if (SAVE_EXPR_RTL (exp) == 0)
3286 if (mode == BLKmode)
3288 = assign_stack_temp (mode,
3289 int_size_in_bytes (TREE_TYPE (exp)), 0);
3291 temp = gen_reg_rtx (mode);
3292 SAVE_EXPR_RTL (exp) = temp;
3293 store_expr (TREE_OPERAND (exp, 0), temp, 0);
3294 if (!optimize && GET_CODE (temp) == REG)
3295 save_expr_regs = gen_rtx (EXPR_LIST, VOIDmode, temp,
3298 return SAVE_EXPR_RTL (exp);
3301 /* Exit the current loop if the body-expression is true. */
3303 rtx label = gen_label_rtx ();
3304 do_jump (TREE_OPERAND (exp, 0), label, NULL_RTX);
3305 expand_exit_loop (NULL_PTR);
3311 expand_start_loop (1);
3312 expand_expr_stmt (TREE_OPERAND (exp, 0));
3319 tree vars = TREE_OPERAND (exp, 0);
3320 int vars_need_expansion = 0;
3322 /* Need to open a binding contour here because
3323 if there are any cleanups they most be contained here. */
3324 expand_start_bindings (0);
3326 /* Mark the corresponding BLOCK for output. */
3327 if (TREE_OPERAND (exp, 2) != 0)
3328 TREE_USED (TREE_OPERAND (exp, 2)) = 1;
3330 /* If VARS have not yet been expanded, expand them now. */
3333 if (DECL_RTL (vars) == 0)
3335 vars_need_expansion = 1;
3338 expand_decl_init (vars);
3339 vars = TREE_CHAIN (vars);
3342 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, modifier);
3344 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
3350 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
3352 emit_insns (RTL_EXPR_SEQUENCE (exp));
3353 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
3354 return RTL_EXPR_RTL (exp);
3357 /* All elts simple constants => refer to a constant in memory. But
3358 if this is a non-BLKmode mode, let it store a field at a time
3359 since that should make a CONST_INT or CONST_DOUBLE when we
3361 if (TREE_STATIC (exp) && (mode == BLKmode || TREE_ADDRESSABLE (exp)))
3363 rtx constructor = output_constant_def (exp);
3364 if (modifier != EXPAND_CONST_ADDRESS
3365 && modifier != EXPAND_INITIALIZER
3366 && modifier != EXPAND_SUM
3367 && !memory_address_p (GET_MODE (constructor),
3368 XEXP (constructor, 0)))
3369 constructor = change_address (constructor, VOIDmode,
3370 XEXP (constructor, 0));
3377 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
3378 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode, 0);
3383 if (target == 0 || ! safe_from_p (target, exp))
3385 if (mode != BLKmode && ! TREE_ADDRESSABLE (exp))
3386 target = gen_reg_rtx (mode);
3389 rtx safe_target = assign_stack_temp (mode, int_size_in_bytes (type), 0);
3391 MEM_IN_STRUCT_P (safe_target) = MEM_IN_STRUCT_P (target);
3392 target = safe_target;
3395 store_constructor (exp, target);
3401 tree exp1 = TREE_OPERAND (exp, 0);
3404 /* A SAVE_EXPR as the address in an INDIRECT_EXPR is generated
3405 for *PTR += ANYTHING where PTR is put inside the SAVE_EXPR.
3406 This code has the same general effect as simply doing
3407 expand_expr on the save expr, except that the expression PTR
3408 is computed for use as a memory address. This means different
3409 code, suitable for indexing, may be generated. */
3410 if (TREE_CODE (exp1) == SAVE_EXPR
3411 && SAVE_EXPR_RTL (exp1) == 0
3412 && TREE_CODE (exp2 = TREE_OPERAND (exp1, 0)) != ERROR_MARK
3413 && TYPE_MODE (TREE_TYPE (exp1)) == Pmode
3414 && TYPE_MODE (TREE_TYPE (exp2)) == Pmode)
3416 temp = expand_expr (TREE_OPERAND (exp1, 0), NULL_RTX,
3417 VOIDmode, EXPAND_SUM);
3418 op0 = memory_address (mode, temp);
3419 op0 = copy_all_regs (op0);
3420 SAVE_EXPR_RTL (exp1) = op0;
3424 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
3425 op0 = memory_address (mode, op0);
3428 temp = gen_rtx (MEM, mode, op0);
3429 /* If address was computed by addition,
3430 mark this as an element of an aggregate. */
3431 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
3432 || (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR
3433 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) == PLUS_EXPR)
3434 || TREE_CODE (TREE_TYPE (exp)) == ARRAY_TYPE
3435 || TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE
3436 || TREE_CODE (TREE_TYPE (exp)) == UNION_TYPE
3437 || (TREE_CODE (exp1) == ADDR_EXPR
3438 && (exp2 = TREE_OPERAND (exp1, 0))
3439 && (TREE_CODE (TREE_TYPE (exp2)) == ARRAY_TYPE
3440 || TREE_CODE (TREE_TYPE (exp2)) == RECORD_TYPE
3441 || TREE_CODE (TREE_TYPE (exp2)) == UNION_TYPE)))
3442 MEM_IN_STRUCT_P (temp) = 1;
3443 MEM_VOLATILE_P (temp) = TREE_THIS_VOLATILE (exp) || flag_volatile;
3444 #if 0 /* It is incorrectto set RTX_UNCHANGING_P here, because the fact that
3445 a location is accessed through a pointer to const does not mean
3446 that the value there can never change. */
3447 RTX_UNCHANGING_P (temp) = TREE_READONLY (exp);
3453 if (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST
3454 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
3456 /* Nonconstant array index or nonconstant element size.
3457 Generate the tree for *(&array+index) and expand that,
3458 except do it in a language-independent way
3459 and don't complain about non-lvalue arrays.
3460 `mark_addressable' should already have been called
3461 for any array for which this case will be reached. */
3463 /* Don't forget the const or volatile flag from the array element. */
3464 tree variant_type = build_type_variant (type,
3465 TREE_READONLY (exp),
3466 TREE_THIS_VOLATILE (exp));
3467 tree array_adr = build1 (ADDR_EXPR, build_pointer_type (variant_type),
3468 TREE_OPERAND (exp, 0));
3469 tree index = TREE_OPERAND (exp, 1);
3472 /* Convert the integer argument to a type the same size as a pointer
3473 so the multiply won't overflow spuriously. */
3474 if (TYPE_PRECISION (TREE_TYPE (index)) != POINTER_SIZE)
3475 index = convert (type_for_size (POINTER_SIZE, 0), index);
3477 /* Don't think the address has side effects
3478 just because the array does.
3479 (In some cases the address might have side effects,
3480 and we fail to record that fact here. However, it should not
3481 matter, since expand_expr should not care.) */
3482 TREE_SIDE_EFFECTS (array_adr) = 0;
3484 elt = build1 (INDIRECT_REF, type,
3485 fold (build (PLUS_EXPR, TYPE_POINTER_TO (variant_type),
3487 fold (build (MULT_EXPR,
3488 TYPE_POINTER_TO (variant_type),
3489 index, size_in_bytes (type))))));
3491 /* Volatility, etc., of new expression is same as old expression. */
3492 TREE_SIDE_EFFECTS (elt) = TREE_SIDE_EFFECTS (exp);
3493 TREE_THIS_VOLATILE (elt) = TREE_THIS_VOLATILE (exp);
3494 TREE_READONLY (elt) = TREE_READONLY (exp);
3496 return expand_expr (elt, target, tmode, modifier);
3499 /* Fold an expression like: "foo"[2].
3500 This is not done in fold so it won't happen inside &. */
3503 tree arg0 = TREE_OPERAND (exp, 0);
3504 tree arg1 = TREE_OPERAND (exp, 1);
3506 if (TREE_CODE (arg0) == STRING_CST
3507 && TREE_CODE (arg1) == INTEGER_CST
3508 && !TREE_INT_CST_HIGH (arg1)
3509 && (i = TREE_INT_CST_LOW (arg1)) < TREE_STRING_LENGTH (arg0))
3511 if (TREE_TYPE (TREE_TYPE (arg0)) == integer_type_node)
3513 exp = build_int_2 (((int *)TREE_STRING_POINTER (arg0))[i], 0);
3514 TREE_TYPE (exp) = integer_type_node;
3515 return expand_expr (exp, target, tmode, modifier);
3517 if (TREE_TYPE (TREE_TYPE (arg0)) == char_type_node)
3519 exp = build_int_2 (TREE_STRING_POINTER (arg0)[i], 0);
3520 TREE_TYPE (exp) = integer_type_node;
3521 return expand_expr (convert (TREE_TYPE (TREE_TYPE (arg0)), exp), target, tmode, modifier);
3526 /* If this is a constant index into a constant array,
3527 just get the value from the array. Handle both the cases when
3528 we have an explicit constructor and when our operand is a variable
3529 that was declared const. */
3531 if (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
3532 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
3534 tree index = fold (TREE_OPERAND (exp, 1));
3535 if (TREE_CODE (index) == INTEGER_CST
3536 && TREE_INT_CST_HIGH (index) == 0)
3538 int i = TREE_INT_CST_LOW (index);
3539 tree elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0));
3542 elem = TREE_CHAIN (elem);
3544 return expand_expr (fold (TREE_VALUE (elem)), target,
3549 else if (TREE_READONLY (TREE_OPERAND (exp, 0))
3550 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
3551 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == ARRAY_TYPE
3552 && TREE_CODE (TREE_OPERAND (exp, 0)) == VAR_DECL
3553 && DECL_INITIAL (TREE_OPERAND (exp, 0))
3555 && (TREE_CODE (DECL_INITIAL (TREE_OPERAND (exp, 0)))
3558 tree index = fold (TREE_OPERAND (exp, 1));
3559 if (TREE_CODE (index) == INTEGER_CST
3560 && TREE_INT_CST_HIGH (index) == 0)
3562 int i = TREE_INT_CST_LOW (index);
3563 tree init = DECL_INITIAL (TREE_OPERAND (exp, 0));
3565 if (TREE_CODE (init) == CONSTRUCTOR)
3567 tree elem = CONSTRUCTOR_ELTS (init);
3570 elem = TREE_CHAIN (elem);
3572 return expand_expr (fold (TREE_VALUE (elem)), target,
3575 else if (TREE_CODE (init) == STRING_CST
3576 && i < TREE_STRING_LENGTH (init))
3578 temp = GEN_INT (TREE_STRING_POINTER (init)[i]);
3579 return convert_to_mode (mode, temp, 0);
3583 /* Treat array-ref with constant index as a component-ref. */
3587 /* If the operand is a CONSTRUCTOR, we can just extract the
3588 appropriate field if it is present. */
3589 if (code != ARRAY_REF
3590 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR)
3594 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
3595 elt = TREE_CHAIN (elt))
3596 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1))
3597 return expand_expr (TREE_VALUE (elt), target, tmode, modifier);
3601 enum machine_mode mode1;
3606 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
3607 &mode1, &unsignedp, &volatilep);
3609 /* In some cases, we will be offsetting OP0's address by a constant.
3610 So get it as a sum, if possible. If we will be using it
3611 directly in an insn, we validate it. */
3612 op0 = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_SUM);
3614 /* If this is a constant, put it into a register if it is a
3615 legimate constant and memory if it isn't. */
3616 if (CONSTANT_P (op0))
3618 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
3619 if (LEGITIMATE_CONSTANT_P (op0))
3620 op0 = force_reg (mode, op0);
3622 op0 = validize_mem (force_const_mem (mode, op0));
3627 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
3629 if (GET_CODE (op0) != MEM)
3631 op0 = change_address (op0, VOIDmode,
3632 gen_rtx (PLUS, Pmode, XEXP (op0, 0),
3633 force_reg (Pmode, offset_rtx)));
3636 /* Don't forget about volatility even if this is a bitfield. */
3637 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
3639 op0 = copy_rtx (op0);
3640 MEM_VOLATILE_P (op0) = 1;
3643 if (mode1 == VOIDmode
3644 || (mode1 != BLKmode && ! direct_load[(int) mode1])
3645 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG)
3647 /* In cases where an aligned union has an unaligned object
3648 as a field, we might be extracting a BLKmode value from
3649 an integer-mode (e.g., SImode) object. Handle this case
3650 by doing the extract into an object as wide as the field
3651 (which we know to be the width of a basic mode), then
3652 storing into memory, and changing the mode to BLKmode. */
3653 enum machine_mode ext_mode = mode;
3655 if (ext_mode == BLKmode)
3656 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
3658 if (ext_mode == BLKmode)
3661 op0 = extract_bit_field (validize_mem (op0), bitsize, bitpos,
3662 unsignedp, target, ext_mode, ext_mode,
3663 TYPE_ALIGN (TREE_TYPE (tem)) / BITS_PER_UNIT,
3664 int_size_in_bytes (TREE_TYPE (tem)));
3665 if (mode == BLKmode)
3667 rtx new = assign_stack_temp (ext_mode,
3668 bitsize / BITS_PER_UNIT, 0);
3670 emit_move_insn (new, op0);
3671 op0 = copy_rtx (new);
3672 PUT_MODE (op0, BLKmode);
3678 /* Get a reference to just this component. */
3679 if (modifier == EXPAND_CONST_ADDRESS
3680 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
3681 op0 = gen_rtx (MEM, mode1, plus_constant (XEXP (op0, 0),
3682 (bitpos / BITS_PER_UNIT)));
3684 op0 = change_address (op0, mode1,
3685 plus_constant (XEXP (op0, 0),
3686 (bitpos / BITS_PER_UNIT)));
3687 MEM_IN_STRUCT_P (op0) = 1;
3688 MEM_VOLATILE_P (op0) |= volatilep;
3689 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode)
3692 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
3693 convert_move (target, op0, unsignedp);
3699 tree base = build_unary_op (ADDR_EXPR, TREE_OPERAND (exp, 0), 0);
3700 tree addr = build (PLUS_EXPR, type, base, TREE_OPERAND (exp, 1));
3701 op0 = expand_expr (addr, NULL_RTX, VOIDmode, EXPAND_SUM);
3702 temp = gen_rtx (MEM, mode, memory_address (mode, op0));
3703 MEM_IN_STRUCT_P (temp) = 1;
3704 MEM_VOLATILE_P (temp) = TREE_THIS_VOLATILE (exp) || flag_volatile;
3705 #if 0 /* It is incorrectto set RTX_UNCHANGING_P here, because the fact that
3706 a location is accessed through a pointer to const does not mean
3707 that the value there can never change. */
3708 RTX_UNCHANGING_P (temp) = TREE_READONLY (exp);
3713 /* Intended for a reference to a buffer of a file-object in Pascal.
3714 But it's not certain that a special tree code will really be
3715 necessary for these. INDIRECT_REF might work for them. */
3719 case WITH_CLEANUP_EXPR:
3720 if (RTL_EXPR_RTL (exp) == 0)
3723 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
3725 = tree_cons (NULL_TREE, TREE_OPERAND (exp, 2), cleanups_this_call);
3726 /* That's it for this cleanup. */
3727 TREE_OPERAND (exp, 2) = 0;
3729 return RTL_EXPR_RTL (exp);
3732 /* Check for a built-in function. */
3733 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
3734 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) == FUNCTION_DECL
3735 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
3736 return expand_builtin (exp, target, subtarget, tmode, ignore);
3737 /* If this call was expanded already by preexpand_calls,
3738 just return the result we got. */
3739 if (CALL_EXPR_RTL (exp) != 0)
3740 return CALL_EXPR_RTL (exp);
3741 return expand_call (exp, target, ignore);
3743 case NON_LVALUE_EXPR:
3746 case REFERENCE_EXPR:
3747 if (TREE_CODE (type) == VOID_TYPE || ignore)
3749 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
3752 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
3753 return expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, modifier);
3754 if (TREE_CODE (type) == UNION_TYPE)
3756 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
3759 if (mode == BLKmode)
3761 if (TYPE_SIZE (type) == 0
3762 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
3764 target = assign_stack_temp (BLKmode,
3765 (TREE_INT_CST_LOW (TYPE_SIZE (type))
3766 + BITS_PER_UNIT - 1)
3767 / BITS_PER_UNIT, 0);
3770 target = gen_reg_rtx (mode);
3772 if (GET_CODE (target) == MEM)
3773 /* Store data into beginning of memory target. */
3774 store_expr (TREE_OPERAND (exp, 0),
3775 change_address (target, TYPE_MODE (valtype), 0),
3777 else if (GET_CODE (target) == REG)
3778 /* Store this field into a union of the proper type. */
3779 store_field (target, GET_MODE_BITSIZE (TYPE_MODE (valtype)), 0,
3780 TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
3782 int_size_in_bytes (TREE_TYPE (TREE_OPERAND (exp, 0))));
3786 /* Return the entire union. */
3789 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, mode, modifier);
3790 if (GET_MODE (op0) == mode || GET_MODE (op0) == VOIDmode)
3792 if (modifier == EXPAND_INITIALIZER)
3793 return gen_rtx (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
3794 if (flag_force_mem && GET_CODE (op0) == MEM)
3795 op0 = copy_to_reg (op0);
3798 return convert_to_mode (mode, op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
3800 convert_move (target, op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
3804 /* We come here from MINUS_EXPR when the second operand is a constant. */
3806 this_optab = add_optab;
3808 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
3809 something else, make sure we add the register to the constant and
3810 then to the other thing. This case can occur during strength
3811 reduction and doing it this way will produce better code if the
3812 frame pointer or argument pointer is eliminated.
3814 fold-const.c will ensure that the constant is always in the inner
3815 PLUS_EXPR, so the only case we need to do anything about is if
3816 sp, ap, or fp is our second argument, in which case we must swap
3817 the innermost first argument and our second argument. */
3819 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
3820 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
3821 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
3822 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
3823 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
3824 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
3826 tree t = TREE_OPERAND (exp, 1);
3828 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
3829 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
3832 /* If the result is to be Pmode and we are adding an integer to
3833 something, we might be forming a constant. So try to use
3834 plus_constant. If it produces a sum and we can't accept it,
3835 use force_operand. This allows P = &ARR[const] to generate
3836 efficient code on machines where a SYMBOL_REF is not a valid
3839 If this is an EXPAND_SUM call, always return the sum. */
3840 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
3841 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
3842 && (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
3845 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
3847 op1 = plus_constant (op1, TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)));
3848 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
3849 op1 = force_operand (op1, target);
3853 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
3854 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
3855 && (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
3858 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
3860 op0 = plus_constant (op0, TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)));
3861 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
3862 op0 = force_operand (op0, target);
3866 /* No sense saving up arithmetic to be done
3867 if it's all in the wrong mode to form part of an address.
3868 And force_operand won't know whether to sign-extend or
3870 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
3871 || mode != Pmode) goto binop;
3873 preexpand_calls (exp);
3874 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
3877 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, modifier);
3878 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, modifier);
3880 /* Make sure any term that's a sum with a constant comes last. */
3881 if (GET_CODE (op0) == PLUS
3882 && CONSTANT_P (XEXP (op0, 1)))
3888 /* If adding to a sum including a constant,
3889 associate it to put the constant outside. */
3890 if (GET_CODE (op1) == PLUS
3891 && CONSTANT_P (XEXP (op1, 1)))
3893 rtx constant_term = const0_rtx;
3895 temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0);
3898 /* Ensure that MULT comes first if there is one. */
3899 else if (GET_CODE (op0) == MULT)
3900 op0 = gen_rtx (PLUS, mode, op0, XEXP (op1, 0));
3902 op0 = gen_rtx (PLUS, mode, XEXP (op1, 0), op0);
3904 /* Let's also eliminate constants from op0 if possible. */
3905 op0 = eliminate_constant_term (op0, &constant_term);
3907 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
3908 their sum should be a constant. Form it into OP1, since the
3909 result we want will then be OP0 + OP1. */
3911 temp = simplify_binary_operation (PLUS, mode, constant_term,
3916 op1 = gen_rtx (PLUS, mode, constant_term, XEXP (op1, 1));
3919 /* Put a constant term last and put a multiplication first. */
3920 if (CONSTANT_P (op0) || GET_CODE (op1) == MULT)
3921 temp = op1, op1 = op0, op0 = temp;
3923 temp = simplify_binary_operation (PLUS, mode, op0, op1);
3924 return temp ? temp : gen_rtx (PLUS, mode, op0, op1);
3927 /* Handle difference of two symbolic constants,
3928 for the sake of an initializer. */
3929 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
3930 && really_constant_p (TREE_OPERAND (exp, 0))
3931 && really_constant_p (TREE_OPERAND (exp, 1)))
3933 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX,
3934 VOIDmode, modifier);
3935 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
3936 VOIDmode, modifier);
3937 return gen_rtx (MINUS, mode, op0, op1);
3939 /* Convert A - const to A + (-const). */
3940 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
3942 exp = build (PLUS_EXPR, type, TREE_OPERAND (exp, 0),
3943 fold (build1 (NEGATE_EXPR, type,
3944 TREE_OPERAND (exp, 1))));
3947 this_optab = sub_optab;
3951 preexpand_calls (exp);
3952 /* If first operand is constant, swap them.
3953 Thus the following special case checks need only
3954 check the second operand. */
3955 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
3957 register tree t1 = TREE_OPERAND (exp, 0);
3958 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
3959 TREE_OPERAND (exp, 1) = t1;
3962 /* Attempt to return something suitable for generating an
3963 indexed address, for machines that support that. */
3965 if (modifier == EXPAND_SUM && mode == Pmode
3966 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
3967 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
3969 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, EXPAND_SUM);
3971 /* Apply distributive law if OP0 is x+c. */
3972 if (GET_CODE (op0) == PLUS
3973 && GET_CODE (XEXP (op0, 1)) == CONST_INT)
3974 return gen_rtx (PLUS, mode,
3975 gen_rtx (MULT, mode, XEXP (op0, 0),
3976 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))),
3977 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))
3978 * INTVAL (XEXP (op0, 1))));
3980 if (GET_CODE (op0) != REG)
3981 op0 = force_operand (op0, NULL_RTX);
3982 if (GET_CODE (op0) != REG)
3983 op0 = copy_to_mode_reg (mode, op0);
3985 return gen_rtx (MULT, mode, op0,
3986 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))));
3989 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
3992 /* Check for multiplying things that have been extended
3993 from a narrower type. If this machine supports multiplying
3994 in that narrower type with a result in the desired type,
3995 do it that way, and avoid the explicit type-conversion. */
3996 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
3997 && TREE_CODE (type) == INTEGER_TYPE
3998 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
3999 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
4000 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
4001 && int_fits_type_p (TREE_OPERAND (exp, 1),
4002 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
4003 /* Don't use a widening multiply if a shift will do. */
4004 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
4005 > HOST_BITS_PER_WIDE_INT)
4006 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
4008 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
4009 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
4011 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
4012 /* If both operands are extended, they must either both
4013 be zero-extended or both be sign-extended. */
4014 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
4016 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
4018 enum machine_mode innermode
4019 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
4020 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
4021 ? umul_widen_optab : smul_widen_optab);
4022 if (mode == GET_MODE_WIDER_MODE (innermode)
4023 && this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
4025 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
4026 NULL_RTX, VOIDmode, 0);
4027 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
4028 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
4031 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
4032 NULL_RTX, VOIDmode, 0);
4036 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
4037 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
4038 return expand_mult (mode, op0, op1, target, unsignedp);
4040 case TRUNC_DIV_EXPR:
4041 case FLOOR_DIV_EXPR:
4043 case ROUND_DIV_EXPR:
4044 case EXACT_DIV_EXPR:
4045 preexpand_calls (exp);
4046 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
4048 /* Possible optimization: compute the dividend with EXPAND_SUM
4049 then if the divisor is constant can optimize the case
4050 where some terms of the dividend have coeffs divisible by it. */
4051 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
4052 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
4053 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
4056 this_optab = flodiv_optab;
4059 case TRUNC_MOD_EXPR:
4060 case FLOOR_MOD_EXPR:
4062 case ROUND_MOD_EXPR:
4063 preexpand_calls (exp);
4064 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
4066 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
4067 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
4068 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
4070 case FIX_ROUND_EXPR:
4071 case FIX_FLOOR_EXPR:
4073 abort (); /* Not used for C. */
4075 case FIX_TRUNC_EXPR:
4076 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
4078 target = gen_reg_rtx (mode);
4079 expand_fix (target, op0, unsignedp);
4083 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
4085 target = gen_reg_rtx (mode);
4086 /* expand_float can't figure out what to do if FROM has VOIDmode.
4087 So give it the correct mode. With -O, cse will optimize this. */
4088 if (GET_MODE (op0) == VOIDmode)
4089 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
4091 expand_float (target, op0,
4092 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
4096 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
4097 temp = expand_unop (mode, neg_optab, op0, target, 0);
4103 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
4105 /* Unsigned abs is simply the operand. Testing here means we don't
4106 risk generating incorrect code below. */
4107 if (TREE_UNSIGNED (type))
4110 /* First try to do it with a special abs instruction. */
4111 temp = expand_unop (mode, abs_optab, op0, target, 0);
4115 /* If this machine has expensive jumps, we can do integer absolute
4116 value of X as (((signed) x >> (W-1)) ^ x) - ((signed) x >> (W-1)),
4117 where W is the width of MODE. */
4119 if (GET_MODE_CLASS (mode) == MODE_INT && BRANCH_COST >= 2)
4121 rtx extended = expand_shift (RSHIFT_EXPR, mode, op0,
4122 size_int (GET_MODE_BITSIZE (mode) - 1),
4125 temp = expand_binop (mode, xor_optab, extended, op0, target, 0,
4128 temp = expand_binop (mode, sub_optab, temp, extended, target, 0,
4135 /* If that does not win, use conditional jump and negate. */
4136 target = original_target;
4137 temp = gen_label_rtx ();
4138 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 0))
4139 || (GET_CODE (target) == REG
4140 && REGNO (target) < FIRST_PSEUDO_REGISTER))
4141 target = gen_reg_rtx (mode);
4142 emit_move_insn (target, op0);
4143 emit_cmp_insn (target,
4144 expand_expr (convert (type, integer_zero_node),
4145 NULL_RTX, VOIDmode, 0),
4146 GE, NULL_RTX, mode, 0, 0);
4148 emit_jump_insn (gen_bge (temp));
4149 op0 = expand_unop (mode, neg_optab, target, target, 0);
4151 emit_move_insn (target, op0);
4158 target = original_target;
4159 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 1))
4160 || (GET_CODE (target) == REG
4161 && REGNO (target) < FIRST_PSEUDO_REGISTER))
4162 target = gen_reg_rtx (mode);
4163 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
4164 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
4166 /* First try to do it with a special MIN or MAX instruction.
4167 If that does not win, use a conditional jump to select the proper
4169 this_optab = (TREE_UNSIGNED (type)
4170 ? (code == MIN_EXPR ? umin_optab : umax_optab)
4171 : (code == MIN_EXPR ? smin_optab : smax_optab));
4173 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
4179 emit_move_insn (target, op0);
4180 op0 = gen_label_rtx ();
4181 if (code == MAX_EXPR)
4182 temp = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)))
4183 ? compare_from_rtx (target, op1, GEU, 1, mode, NULL_RTX, 0)
4184 : compare_from_rtx (target, op1, GE, 0, mode, NULL_RTX, 0));
4186 temp = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)))
4187 ? compare_from_rtx (target, op1, LEU, 1, mode, NULL_RTX, 0)
4188 : compare_from_rtx (target, op1, LE, 0, mode, NULL_RTX, 0));
4189 if (temp == const0_rtx)
4190 emit_move_insn (target, op1);
4191 else if (temp != const_true_rtx)
4193 if (bcc_gen_fctn[(int) GET_CODE (temp)] != 0)
4194 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (temp)]) (op0));
4197 emit_move_insn (target, op1);
4202 /* ??? Can optimize when the operand of this is a bitwise operation,
4203 by using a different bitwise operation. */
4205 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
4206 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
4212 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
4213 temp = expand_unop (mode, ffs_optab, op0, target, 1);
4218 /* ??? Can optimize bitwise operations with one arg constant.
4219 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
4220 and (a bitwise1 b) bitwise2 b (etc)
4221 but that is probably not worth while. */
4223 /* BIT_AND_EXPR is for bitwise anding.
4224 TRUTH_AND_EXPR is for anding two boolean values
4225 when we want in all cases to compute both of them.
4226 In general it is fastest to do TRUTH_AND_EXPR by
4227 computing both operands as actual zero-or-1 values
4228 and then bitwise anding. In cases where there cannot
4229 be any side effects, better code would be made by
4230 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR;
4231 but the question is how to recognize those cases. */
4233 case TRUTH_AND_EXPR:
4235 this_optab = and_optab;
4238 /* See comment above about TRUTH_AND_EXPR; it applies here too. */
4241 this_optab = ior_optab;
4245 this_optab = xor_optab;
4252 preexpand_calls (exp);
4253 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
4255 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
4256 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
4259 /* Could determine the answer when only additive constants differ.
4260 Also, the addition of one can be handled by changing the condition. */
4267 preexpand_calls (exp);
4268 temp = do_store_flag (exp, target, tmode != VOIDmode ? tmode : mode, 0);
4271 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
4272 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
4274 && GET_CODE (original_target) == REG
4275 && (GET_MODE (original_target)
4276 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
4278 temp = expand_expr (TREE_OPERAND (exp, 0), original_target, VOIDmode, 0);
4279 if (temp != original_target)
4280 temp = copy_to_reg (temp);
4281 op1 = gen_label_rtx ();
4282 emit_cmp_insn (temp, const0_rtx, EQ, NULL_RTX,
4283 GET_MODE (temp), unsignedp, 0);
4284 emit_jump_insn (gen_beq (op1));
4285 emit_move_insn (temp, const1_rtx);
4289 /* If no set-flag instruction, must generate a conditional
4290 store into a temporary variable. Drop through
4291 and handle this like && and ||. */
4293 case TRUTH_ANDIF_EXPR:
4294 case TRUTH_ORIF_EXPR:
4295 if (target == 0 || ! safe_from_p (target, exp)
4296 /* Make sure we don't have a hard reg (such as function's return
4297 value) live across basic blocks, if not optimizing. */
4298 || (!optimize && GET_CODE (target) == REG
4299 && REGNO (target) < FIRST_PSEUDO_REGISTER))
4300 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
4301 emit_clr_insn (target);
4302 op1 = gen_label_rtx ();
4303 jumpifnot (exp, op1);
4304 emit_0_to_1_insn (target);
4308 case TRUTH_NOT_EXPR:
4309 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
4310 /* The parser is careful to generate TRUTH_NOT_EXPR
4311 only with operands that are always zero or one. */
4312 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
4313 target, 1, OPTAB_LIB_WIDEN);
4319 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
4321 return expand_expr (TREE_OPERAND (exp, 1),
4322 (ignore ? const0_rtx : target),
4327 /* Note that COND_EXPRs whose type is a structure or union
4328 are required to be constructed to contain assignments of
4329 a temporary variable, so that we can evaluate them here
4330 for side effect only. If type is void, we must do likewise. */
4332 /* If an arm of the branch requires a cleanup,
4333 only that cleanup is performed. */
4336 tree binary_op = 0, unary_op = 0;
4337 tree old_cleanups = cleanups_this_call;
4338 cleanups_this_call = 0;
4340 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
4341 convert it to our mode, if necessary. */
4342 if (integer_onep (TREE_OPERAND (exp, 1))
4343 && integer_zerop (TREE_OPERAND (exp, 2))
4344 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
4346 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, modifier);
4347 if (GET_MODE (op0) == mode)
4350 target = gen_reg_rtx (mode);
4351 convert_move (target, op0, unsignedp);
4355 /* If we are not to produce a result, we have no target. Otherwise,
4356 if a target was specified use it; it will not be used as an
4357 intermediate target unless it is safe. If no target, use a
4360 if (mode == VOIDmode || ignore)
4362 else if (original_target
4363 && safe_from_p (original_target, TREE_OPERAND (exp, 0)))
4364 temp = original_target;
4365 else if (mode == BLKmode)
4367 if (TYPE_SIZE (type) == 0
4368 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
4370 temp = assign_stack_temp (BLKmode,
4371 (TREE_INT_CST_LOW (TYPE_SIZE (type))
4372 + BITS_PER_UNIT - 1)
4373 / BITS_PER_UNIT, 0);
4376 temp = gen_reg_rtx (mode);
4378 /* Check for X ? A + B : A. If we have this, we can copy
4379 A to the output and conditionally add B. Similarly for unary
4380 operations. Don't do this if X has side-effects because
4381 those side effects might affect A or B and the "?" operation is
4382 a sequence point in ANSI. (We test for side effects later.) */
4384 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
4385 && operand_equal_p (TREE_OPERAND (exp, 2),
4386 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
4387 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
4388 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
4389 && operand_equal_p (TREE_OPERAND (exp, 1),
4390 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
4391 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
4392 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
4393 && operand_equal_p (TREE_OPERAND (exp, 2),
4394 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
4395 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
4396 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
4397 && operand_equal_p (TREE_OPERAND (exp, 1),
4398 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
4399 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
4401 /* If we had X ? A + 1 : A and we can do the test of X as a store-flag
4402 operation, do this as A + (X != 0). Similarly for other simple
4403 binary operators. */
4404 if (singleton && binary_op
4405 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
4406 && (TREE_CODE (binary_op) == PLUS_EXPR
4407 || TREE_CODE (binary_op) == MINUS_EXPR
4408 || TREE_CODE (binary_op) == BIT_IOR_EXPR
4409 || TREE_CODE (binary_op) == BIT_XOR_EXPR
4410 || TREE_CODE (binary_op) == BIT_AND_EXPR)
4411 && integer_onep (TREE_OPERAND (binary_op, 1))
4412 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
4415 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR ? add_optab
4416 : TREE_CODE (binary_op) == MINUS_EXPR ? sub_optab
4417 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
4418 : TREE_CODE (binary_op) == BIT_XOR_EXPR ? xor_optab
4421 /* If we had X ? A : A + 1, do this as A + (X == 0).
4423 We have to invert the truth value here and then put it
4424 back later if do_store_flag fails. We cannot simply copy
4425 TREE_OPERAND (exp, 0) to another variable and modify that
4426 because invert_truthvalue can modify the tree pointed to
4428 if (singleton == TREE_OPERAND (exp, 1))
4429 TREE_OPERAND (exp, 0)
4430 = invert_truthvalue (TREE_OPERAND (exp, 0));
4432 result = do_store_flag (TREE_OPERAND (exp, 0),
4433 (safe_from_p (temp, singleton)
4435 mode, BRANCH_COST <= 1);
4439 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
4440 return expand_binop (mode, boptab, op1, result, temp,
4441 unsignedp, OPTAB_LIB_WIDEN);
4443 else if (singleton == TREE_OPERAND (exp, 1))
4444 TREE_OPERAND (exp, 0)
4445 = invert_truthvalue (TREE_OPERAND (exp, 0));
4449 op0 = gen_label_rtx ();
4451 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
4455 /* If the target conflicts with the other operand of the
4456 binary op, we can't use it. Also, we can't use the target
4457 if it is a hard register, because evaluating the condition
4458 might clobber it. */
4460 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1)))
4461 || (GET_CODE (temp) == REG
4462 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
4463 temp = gen_reg_rtx (mode);
4464 store_expr (singleton, temp, 0);
4467 expand_expr (singleton,
4468 ignore ? const1_rtx : NULL_RTX, VOIDmode, 0);
4469 if (cleanups_this_call)
4471 sorry ("aggregate value in COND_EXPR");
4472 cleanups_this_call = 0;
4474 if (singleton == TREE_OPERAND (exp, 1))
4475 jumpif (TREE_OPERAND (exp, 0), op0);
4477 jumpifnot (TREE_OPERAND (exp, 0), op0);
4479 if (binary_op && temp == 0)
4480 /* Just touch the other operand. */
4481 expand_expr (TREE_OPERAND (binary_op, 1),
4482 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
4484 store_expr (build (TREE_CODE (binary_op), type,
4485 make_tree (type, temp),
4486 TREE_OPERAND (binary_op, 1)),
4489 store_expr (build1 (TREE_CODE (unary_op), type,
4490 make_tree (type, temp)),
4495 /* This is now done in jump.c and is better done there because it
4496 produces shorter register lifetimes. */
4498 /* Check for both possibilities either constants or variables
4499 in registers (but not the same as the target!). If so, can
4500 save branches by assigning one, branching, and assigning the
4502 else if (temp && GET_MODE (temp) != BLKmode
4503 && (TREE_CONSTANT (TREE_OPERAND (exp, 1))
4504 || ((TREE_CODE (TREE_OPERAND (exp, 1)) == PARM_DECL
4505 || TREE_CODE (TREE_OPERAND (exp, 1)) == VAR_DECL)
4506 && DECL_RTL (TREE_OPERAND (exp, 1))
4507 && GET_CODE (DECL_RTL (TREE_OPERAND (exp, 1))) == REG
4508 && DECL_RTL (TREE_OPERAND (exp, 1)) != temp))
4509 && (TREE_CONSTANT (TREE_OPERAND (exp, 2))
4510 || ((TREE_CODE (TREE_OPERAND (exp, 2)) == PARM_DECL
4511 || TREE_CODE (TREE_OPERAND (exp, 2)) == VAR_DECL)
4512 && DECL_RTL (TREE_OPERAND (exp, 2))
4513 && GET_CODE (DECL_RTL (TREE_OPERAND (exp, 2))) == REG
4514 && DECL_RTL (TREE_OPERAND (exp, 2)) != temp)))
4516 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
4517 temp = gen_reg_rtx (mode);
4518 store_expr (TREE_OPERAND (exp, 2), temp, 0);
4519 jumpifnot (TREE_OPERAND (exp, 0), op0);
4520 store_expr (TREE_OPERAND (exp, 1), temp, 0);
4524 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
4525 comparison operator. If we have one of these cases, set the
4526 output to A, branch on A (cse will merge these two references),
4527 then set the output to FOO. */
4529 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
4530 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
4531 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
4532 TREE_OPERAND (exp, 1), 0)
4533 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
4534 && safe_from_p (temp, TREE_OPERAND (exp, 2)))
4536 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
4537 temp = gen_reg_rtx (mode);
4538 store_expr (TREE_OPERAND (exp, 1), temp, 0);
4539 jumpif (TREE_OPERAND (exp, 0), op0);
4540 store_expr (TREE_OPERAND (exp, 2), temp, 0);
4544 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
4545 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
4546 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
4547 TREE_OPERAND (exp, 2), 0)
4548 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
4549 && safe_from_p (temp, TREE_OPERAND (exp, 1)))
4551 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
4552 temp = gen_reg_rtx (mode);
4553 store_expr (TREE_OPERAND (exp, 2), temp, 0);
4554 jumpifnot (TREE_OPERAND (exp, 0), op0);
4555 store_expr (TREE_OPERAND (exp, 1), temp, 0);
4560 op1 = gen_label_rtx ();
4561 jumpifnot (TREE_OPERAND (exp, 0), op0);
4563 store_expr (TREE_OPERAND (exp, 1), temp, 0);
4565 expand_expr (TREE_OPERAND (exp, 1),
4566 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
4567 if (cleanups_this_call)
4569 sorry ("aggregate value in COND_EXPR");
4570 cleanups_this_call = 0;
4574 emit_jump_insn (gen_jump (op1));
4578 store_expr (TREE_OPERAND (exp, 2), temp, 0);
4580 expand_expr (TREE_OPERAND (exp, 2),
4581 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
4584 if (cleanups_this_call)
4586 sorry ("aggregate value in COND_EXPR");
4587 cleanups_this_call = 0;
4593 cleanups_this_call = old_cleanups;
4599 /* Something needs to be initialized, but we didn't know
4600 where that thing was when building the tree. For example,
4601 it could be the return value of a function, or a parameter
4602 to a function which lays down in the stack, or a temporary
4603 variable which must be passed by reference.
4605 We guarantee that the expression will either be constructed
4606 or copied into our original target. */
4608 tree slot = TREE_OPERAND (exp, 0);
4610 if (TREE_CODE (slot) != VAR_DECL)
4615 if (DECL_RTL (slot) != 0)
4616 target = DECL_RTL (slot);
4619 target = assign_stack_temp (mode, int_size_in_bytes (type), 0);
4620 /* All temp slots at this level must not conflict. */
4621 preserve_temp_slots (target);
4622 DECL_RTL (slot) = target;
4626 /* Since SLOT is not known to the called function
4627 to belong to its stack frame, we must build an explicit
4628 cleanup. This case occurs when we must build up a reference
4629 to pass the reference as an argument. In this case,
4630 it is very likely that such a reference need not be
4633 if (TREE_OPERAND (exp, 2) == 0)
4634 TREE_OPERAND (exp, 2) = maybe_build_cleanup (slot);
4635 if (TREE_OPERAND (exp, 2))
4636 cleanups_this_call = tree_cons (NULL_TREE, TREE_OPERAND (exp, 2),
4637 cleanups_this_call);
4642 /* This case does occur, when expanding a parameter which
4643 needs to be constructed on the stack. The target
4644 is the actual stack address that we want to initialize.
4645 The function we call will perform the cleanup in this case. */
4647 DECL_RTL (slot) = target;
4650 return expand_expr (TREE_OPERAND (exp, 1), target, tmode, modifier);
4655 tree lhs = TREE_OPERAND (exp, 0);
4656 tree rhs = TREE_OPERAND (exp, 1);
4657 tree noncopied_parts = 0;
4658 tree lhs_type = TREE_TYPE (lhs);
4660 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
4661 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0 && !fixed_type_p (rhs))
4662 noncopied_parts = init_noncopied_parts (stabilize_reference (lhs),
4663 TYPE_NONCOPIED_PARTS (lhs_type));
4664 while (noncopied_parts != 0)
4666 expand_assignment (TREE_VALUE (noncopied_parts),
4667 TREE_PURPOSE (noncopied_parts), 0, 0);
4668 noncopied_parts = TREE_CHAIN (noncopied_parts);
4675 /* If lhs is complex, expand calls in rhs before computing it.
4676 That's so we don't compute a pointer and save it over a call.
4677 If lhs is simple, compute it first so we can give it as a
4678 target if the rhs is just a call. This avoids an extra temp and copy
4679 and that prevents a partial-subsumption which makes bad code.
4680 Actually we could treat component_ref's of vars like vars. */
4682 tree lhs = TREE_OPERAND (exp, 0);
4683 tree rhs = TREE_OPERAND (exp, 1);
4684 tree noncopied_parts = 0;
4685 tree lhs_type = TREE_TYPE (lhs);
4689 if (TREE_CODE (lhs) != VAR_DECL
4690 && TREE_CODE (lhs) != RESULT_DECL
4691 && TREE_CODE (lhs) != PARM_DECL)
4692 preexpand_calls (exp);
4694 /* Check for |= or &= of a bitfield of size one into another bitfield
4695 of size 1. In this case, (unless we need the result of the
4696 assignment) we can do this more efficiently with a
4697 test followed by an assignment, if necessary.
4699 ??? At this point, we can't get a BIT_FIELD_REF here. But if
4700 things change so we do, this code should be enhanced to
4703 && TREE_CODE (lhs) == COMPONENT_REF
4704 && (TREE_CODE (rhs) == BIT_IOR_EXPR
4705 || TREE_CODE (rhs) == BIT_AND_EXPR)
4706 && TREE_OPERAND (rhs, 0) == lhs
4707 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
4708 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (lhs, 1))) == 1
4709 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))) == 1)
4711 rtx label = gen_label_rtx ();
4713 do_jump (TREE_OPERAND (rhs, 1),
4714 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
4715 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
4716 expand_assignment (lhs, convert (TREE_TYPE (rhs),
4717 (TREE_CODE (rhs) == BIT_IOR_EXPR
4719 : integer_zero_node)),
4721 do_pending_stack_adjust ();
4726 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0
4727 && ! (fixed_type_p (lhs) && fixed_type_p (rhs)))
4728 noncopied_parts = save_noncopied_parts (stabilize_reference (lhs),
4729 TYPE_NONCOPIED_PARTS (lhs_type));
4731 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
4732 while (noncopied_parts != 0)
4734 expand_assignment (TREE_PURPOSE (noncopied_parts),
4735 TREE_VALUE (noncopied_parts), 0, 0);
4736 noncopied_parts = TREE_CHAIN (noncopied_parts);
4741 case PREINCREMENT_EXPR:
4742 case PREDECREMENT_EXPR:
4743 return expand_increment (exp, 0);
4745 case POSTINCREMENT_EXPR:
4746 case POSTDECREMENT_EXPR:
4747 /* Faster to treat as pre-increment if result is not used. */
4748 return expand_increment (exp, ! ignore);
4751 /* Are we taking the address of a nested function? */
4752 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
4753 && decl_function_context (TREE_OPERAND (exp, 0)) != 0)
4755 op0 = trampoline_address (TREE_OPERAND (exp, 0));
4756 op0 = force_operand (op0, target);
4760 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode,
4761 (modifier == EXPAND_INITIALIZER
4762 ? modifier : EXPAND_CONST_ADDRESS));
4763 if (GET_CODE (op0) != MEM)
4766 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
4767 return XEXP (op0, 0);
4768 op0 = force_operand (XEXP (op0, 0), target);
4770 if (flag_force_addr && GET_CODE (op0) != REG)
4771 return force_reg (Pmode, op0);
4774 case ENTRY_VALUE_EXPR:
4781 return (*lang_expand_expr) (exp, target, tmode, modifier);
4784 /* Here to do an ordinary binary operator, generating an instruction
4785 from the optab already placed in `this_optab'. */
4787 preexpand_calls (exp);
4788 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
4790 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
4791 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
4793 temp = expand_binop (mode, this_optab, op0, op1, target,
4794 unsignedp, OPTAB_LIB_WIDEN);
4800 /* Return the alignment in bits of EXP, a pointer valued expression.
4801 But don't return more than MAX_ALIGN no matter what.
4802 The alignment returned is, by default, the alignment of the thing that
4803 EXP points to (if it is not a POINTER_TYPE, 0 is returned).
4805 Otherwise, look at the expression to see if we can do better, i.e., if the
4806 expression is actually pointing at an object whose alignment is tighter. */
4809 get_pointer_alignment (exp, max_align)
4813 unsigned align, inner;
4815 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
4818 align = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
4819 align = MIN (align, max_align);
4823 switch (TREE_CODE (exp))
4827 case NON_LVALUE_EXPR:
4828 exp = TREE_OPERAND (exp, 0);
4829 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
4831 inner = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
4832 inner = MIN (inner, max_align);
4833 align = MAX (align, inner);
4837 /* If sum of pointer + int, restrict our maximum alignment to that
4838 imposed by the integer. If not, we can't do any better than
4840 if (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST)
4843 while (((TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)) * BITS_PER_UNIT)
4848 exp = TREE_OPERAND (exp, 0);
4852 /* See what we are pointing at and look at its alignment. */
4853 exp = TREE_OPERAND (exp, 0);
4854 if (TREE_CODE (exp) == FUNCTION_DECL)
4855 align = MAX (align, FUNCTION_BOUNDARY);
4856 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'd')
4857 align = MAX (align, DECL_ALIGN (exp));
4858 #ifdef CONSTANT_ALIGNMENT
4859 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'c')
4860 align = CONSTANT_ALIGNMENT (exp, align);
4862 return MIN (align, max_align);
4870 /* Return the tree node and offset if a given argument corresponds to
4871 a string constant. */
4874 string_constant (arg, ptr_offset)
4880 if (TREE_CODE (arg) == ADDR_EXPR
4881 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
4883 *ptr_offset = integer_zero_node;
4884 return TREE_OPERAND (arg, 0);
4886 else if (TREE_CODE (arg) == PLUS_EXPR)
4888 tree arg0 = TREE_OPERAND (arg, 0);
4889 tree arg1 = TREE_OPERAND (arg, 1);
4894 if (TREE_CODE (arg0) == ADDR_EXPR
4895 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
4898 return TREE_OPERAND (arg0, 0);
4900 else if (TREE_CODE (arg1) == ADDR_EXPR
4901 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
4904 return TREE_OPERAND (arg1, 0);
4911 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
4912 way, because it could contain a zero byte in the middle.
4913 TREE_STRING_LENGTH is the size of the character array, not the string.
4915 Unfortunately, string_constant can't access the values of const char
4916 arrays with initializers, so neither can we do so here. */
4926 src = string_constant (src, &offset_node);
4929 max = TREE_STRING_LENGTH (src);
4930 ptr = TREE_STRING_POINTER (src);
4931 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
4933 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
4934 compute the offset to the following null if we don't know where to
4935 start searching for it. */
4937 for (i = 0; i < max; i++)
4940 /* We don't know the starting offset, but we do know that the string
4941 has no internal zero bytes. We can assume that the offset falls
4942 within the bounds of the string; otherwise, the programmer deserves
4943 what he gets. Subtract the offset from the length of the string,
4945 /* This would perhaps not be valid if we were dealing with named
4946 arrays in addition to literal string constants. */
4947 return size_binop (MINUS_EXPR, size_int (max), offset_node);
4950 /* We have a known offset into the string. Start searching there for
4951 a null character. */
4952 if (offset_node == 0)
4956 /* Did we get a long long offset? If so, punt. */
4957 if (TREE_INT_CST_HIGH (offset_node) != 0)
4959 offset = TREE_INT_CST_LOW (offset_node);
4961 /* If the offset is known to be out of bounds, warn, and call strlen at
4963 if (offset < 0 || offset > max)
4965 warning ("offset outside bounds of constant string");
4968 /* Use strlen to search for the first zero byte. Since any strings
4969 constructed with build_string will have nulls appended, we win even
4970 if we get handed something like (char[4])"abcd".
4972 Since OFFSET is our starting index into the string, no further
4973 calculation is needed. */
4974 return size_int (strlen (ptr + offset));
4977 /* Expand an expression EXP that calls a built-in function,
4978 with result going to TARGET if that's convenient
4979 (and in mode MODE if that's convenient).
4980 SUBTARGET may be used as the target for computing one of EXP's operands.
4981 IGNORE is nonzero if the value is to be ignored. */
4984 expand_builtin (exp, target, subtarget, mode, ignore)
4988 enum machine_mode mode;
4991 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
4992 tree arglist = TREE_OPERAND (exp, 1);
4995 enum machine_mode value_mode = TYPE_MODE (TREE_TYPE (exp));
4997 switch (DECL_FUNCTION_CODE (fndecl))
5002 /* build_function_call changes these into ABS_EXPR. */
5005 case BUILT_IN_FSQRT:
5006 /* If not optimizing, call the library function. */
5011 /* Arg could be wrong type if user redeclared this fcn wrong. */
5012 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != REAL_TYPE)
5013 return CONST0_RTX (TYPE_MODE (TREE_TYPE (exp)));
5015 /* Stabilize and compute the argument. */
5016 if (TREE_CODE (TREE_VALUE (arglist)) != VAR_DECL
5017 && TREE_CODE (TREE_VALUE (arglist)) != PARM_DECL)
5019 exp = copy_node (exp);
5020 arglist = copy_node (arglist);
5021 TREE_OPERAND (exp, 1) = arglist;
5022 TREE_VALUE (arglist) = save_expr (TREE_VALUE (arglist));
5024 op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
5026 /* Make a suitable register to place result in. */
5027 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
5032 /* Compute sqrt into TARGET.
5033 Set TARGET to wherever the result comes back. */
5034 target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
5035 sqrt_optab, op0, target, 0);
5037 /* If we were unable to expand via the builtin, stop the
5038 sequence (without outputting the insns) and break, causing
5039 a call the the library function. */
5046 /* Check the results by default. But if flag_fast_math is turned on,
5047 then assume sqrt will always be called with valid arguments. */
5049 if (! flag_fast_math)
5051 /* Don't define the sqrt instructions
5052 if your machine is not IEEE. */
5053 if (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT)
5056 lab1 = gen_label_rtx ();
5058 /* Test the result; if it is NaN, set errno=EDOM because
5059 the argument was not in the domain. */
5060 emit_cmp_insn (target, target, EQ, 0, GET_MODE (target), 0, 0);
5061 emit_jump_insn (gen_beq (lab1));
5065 #ifdef GEN_ERRNO_RTX
5066 rtx errno_rtx = GEN_ERRNO_RTX;
5069 = gen_rtx (MEM, word_mode, gen_rtx (SYMBOL_REF, Pmode, "*errno"));
5072 emit_move_insn (errno_rtx, GEN_INT (TARGET_EDOM));
5075 /* We can't set errno=EDOM directly; let the library call do it.
5076 Pop the arguments right away in case the call gets deleted. */
5078 expand_call (exp, target, 0);
5085 /* Output the entire sequence. */
5086 insns = get_insns ();
5092 case BUILT_IN_SAVEREGS:
5093 /* Don't do __builtin_saveregs more than once in a function.
5094 Save the result of the first call and reuse it. */
5095 if (saveregs_value != 0)
5096 return saveregs_value;
5098 /* When this function is called, it means that registers must be
5099 saved on entry to this function. So we migrate the
5100 call to the first insn of this function. */
5103 rtx valreg, saved_valreg;
5105 /* Now really call the function. `expand_call' does not call
5106 expand_builtin, so there is no danger of infinite recursion here. */
5109 #ifdef EXPAND_BUILTIN_SAVEREGS
5110 /* Do whatever the machine needs done in this case. */
5111 temp = EXPAND_BUILTIN_SAVEREGS (arglist);
5113 /* The register where the function returns its value
5114 is likely to have something else in it, such as an argument.
5115 So preserve that register around the call. */
5116 if (value_mode != VOIDmode)
5118 valreg = hard_libcall_value (value_mode);
5119 saved_valreg = gen_reg_rtx (value_mode);
5120 emit_move_insn (saved_valreg, valreg);
5123 /* Generate the call, putting the value in a pseudo. */
5124 temp = expand_call (exp, target, ignore);
5126 if (value_mode != VOIDmode)
5127 emit_move_insn (valreg, saved_valreg);
5133 saveregs_value = temp;
5135 /* This won't work inside a SEQUENCE--it really has to be
5136 at the start of the function. */
5137 if (in_sequence_p ())
5139 /* Better to do this than to crash. */
5140 error ("`va_start' used within `({...})'");
5144 /* Put the sequence after the NOTE that starts the function. */
5145 emit_insns_before (seq, NEXT_INSN (get_insns ()));
5149 /* __builtin_args_info (N) returns word N of the arg space info
5150 for the current function. The number and meanings of words
5151 is controlled by the definition of CUMULATIVE_ARGS. */
5152 case BUILT_IN_ARGS_INFO:
5154 int nwords = sizeof (CUMULATIVE_ARGS) / sizeof (int);
5156 int *word_ptr = (int *) ¤t_function_args_info;
5157 tree type, elts, result;
5159 if (sizeof (CUMULATIVE_ARGS) % sizeof (int) != 0)
5160 fatal ("CUMULATIVE_ARGS type defined badly; see %s, line %d",
5161 __FILE__, __LINE__);
5165 tree arg = TREE_VALUE (arglist);
5166 if (TREE_CODE (arg) != INTEGER_CST)
5167 error ("argument of __builtin_args_info must be constant");
5170 int wordnum = TREE_INT_CST_LOW (arg);
5172 if (wordnum < 0 || wordnum >= nwords)
5173 error ("argument of __builtin_args_info out of range");
5175 return GEN_INT (word_ptr[wordnum]);
5179 error ("missing argument in __builtin_args_info");
5184 for (i = 0; i < nwords; i++)
5185 elts = tree_cons (NULL_TREE, build_int_2 (word_ptr[i], 0));
5187 type = build_array_type (integer_type_node,
5188 build_index_type (build_int_2 (nwords, 0)));
5189 result = build (CONSTRUCTOR, type, NULL_TREE, nreverse (elts));
5190 TREE_CONSTANT (result) = 1;
5191 TREE_STATIC (result) = 1;
5192 result = build (INDIRECT_REF, build_pointer_type (type), result);
5193 TREE_CONSTANT (result) = 1;
5194 return expand_expr (result, NULL_RTX, VOIDmode, 0);
5198 /* Return the address of the first anonymous stack arg. */
5199 case BUILT_IN_NEXT_ARG:
5201 tree fntype = TREE_TYPE (current_function_decl);
5202 if (!(TYPE_ARG_TYPES (fntype) != 0
5203 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
5204 != void_type_node)))
5206 error ("`va_start' used in function with fixed args");
5211 return expand_binop (Pmode, add_optab,
5212 current_function_internal_arg_pointer,
5213 current_function_arg_offset_rtx,
5214 NULL_RTX, 0, OPTAB_LIB_WIDEN);
5216 case BUILT_IN_CLASSIFY_TYPE:
5219 tree type = TREE_TYPE (TREE_VALUE (arglist));
5220 enum tree_code code = TREE_CODE (type);
5221 if (code == VOID_TYPE)
5222 return GEN_INT (void_type_class);
5223 if (code == INTEGER_TYPE)
5224 return GEN_INT (integer_type_class);
5225 if (code == CHAR_TYPE)
5226 return GEN_INT (char_type_class);
5227 if (code == ENUMERAL_TYPE)
5228 return GEN_INT (enumeral_type_class);
5229 if (code == BOOLEAN_TYPE)
5230 return GEN_INT (boolean_type_class);
5231 if (code == POINTER_TYPE)
5232 return GEN_INT (pointer_type_class);
5233 if (code == REFERENCE_TYPE)
5234 return GEN_INT (reference_type_class);
5235 if (code == OFFSET_TYPE)
5236 return GEN_INT (offset_type_class);
5237 if (code == REAL_TYPE)
5238 return GEN_INT (real_type_class);
5239 if (code == COMPLEX_TYPE)
5240 return GEN_INT (complex_type_class);
5241 if (code == FUNCTION_TYPE)
5242 return GEN_INT (function_type_class);
5243 if (code == METHOD_TYPE)
5244 return GEN_INT (method_type_class);
5245 if (code == RECORD_TYPE)
5246 return GEN_INT (record_type_class);
5247 if (code == UNION_TYPE)
5248 return GEN_INT (union_type_class);
5249 if (code == ARRAY_TYPE)
5250 return GEN_INT (array_type_class);
5251 if (code == STRING_TYPE)
5252 return GEN_INT (string_type_class);
5253 if (code == SET_TYPE)
5254 return GEN_INT (set_type_class);
5255 if (code == FILE_TYPE)
5256 return GEN_INT (file_type_class);
5257 if (code == LANG_TYPE)
5258 return GEN_INT (lang_type_class);
5260 return GEN_INT (no_type_class);
5262 case BUILT_IN_CONSTANT_P:
5266 return (TREE_CODE_CLASS (TREE_VALUE (arglist)) == 'c'
5267 ? const1_rtx : const0_rtx);
5269 case BUILT_IN_FRAME_ADDRESS:
5270 /* The argument must be a nonnegative integer constant.
5271 It counts the number of frames to scan up the stack.
5272 The value is the address of that frame. */
5273 case BUILT_IN_RETURN_ADDRESS:
5274 /* The argument must be a nonnegative integer constant.
5275 It counts the number of frames to scan up the stack.
5276 The value is the return address saved in that frame. */
5278 /* Warning about missing arg was already issued. */
5280 else if (TREE_CODE (TREE_VALUE (arglist)) != INTEGER_CST)
5282 error ("invalid arg to __builtin_return_address");
5285 else if (tree_int_cst_lt (TREE_VALUE (arglist), integer_zero_node))
5287 error ("invalid arg to __builtin_return_address");
5292 int count = TREE_INT_CST_LOW (TREE_VALUE (arglist));
5293 rtx tem = frame_pointer_rtx;
5296 /* Scan back COUNT frames to the specified frame. */
5297 for (i = 0; i < count; i++)
5299 /* Assume the dynamic chain pointer is in the word that
5300 the frame address points to, unless otherwise specified. */
5301 #ifdef DYNAMIC_CHAIN_ADDRESS
5302 tem = DYNAMIC_CHAIN_ADDRESS (tem);
5304 tem = memory_address (Pmode, tem);
5305 tem = copy_to_reg (gen_rtx (MEM, Pmode, tem));
5308 /* For __builtin_frame_address, return what we've got. */
5309 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
5312 /* For __builtin_return_address,
5313 Get the return address from that frame. */
5314 #ifdef RETURN_ADDR_RTX
5315 return RETURN_ADDR_RTX (count, tem);
5317 tem = memory_address (Pmode,
5318 plus_constant (tem, GET_MODE_SIZE (Pmode)));
5319 return copy_to_reg (gen_rtx (MEM, Pmode, tem));
5323 case BUILT_IN_ALLOCA:
5325 /* Arg could be non-integer if user redeclared this fcn wrong. */
5326 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE)
5328 current_function_calls_alloca = 1;
5329 /* Compute the argument. */
5330 op0 = expand_expr (TREE_VALUE (arglist), NULL_RTX, VOIDmode, 0);
5332 /* Allocate the desired space. */
5333 target = allocate_dynamic_stack_space (op0, target, BITS_PER_UNIT);
5335 /* Record the new stack level for nonlocal gotos. */
5336 if (nonlocal_goto_handler_slot != 0)
5337 emit_stack_save (SAVE_NONLOCAL, &nonlocal_goto_stack_level, NULL_RTX);
5341 /* If not optimizing, call the library function. */
5346 /* Arg could be non-integer if user redeclared this fcn wrong. */
5347 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE)
5350 /* Compute the argument. */
5351 op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
5352 /* Compute ffs, into TARGET if possible.
5353 Set TARGET to wherever the result comes back. */
5354 target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
5355 ffs_optab, op0, target, 1);
5360 case BUILT_IN_STRLEN:
5361 /* If not optimizing, call the library function. */
5366 /* Arg could be non-pointer if user redeclared this fcn wrong. */
5367 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE)
5371 tree src = TREE_VALUE (arglist);
5372 tree len = c_strlen (src);
5375 = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
5377 rtx result, src_rtx, char_rtx;
5378 enum machine_mode insn_mode = value_mode, char_mode;
5379 enum insn_code icode;
5381 /* If the length is known, just return it. */
5383 return expand_expr (len, target, mode, 0);
5385 /* If SRC is not a pointer type, don't do this operation inline. */
5389 /* Call a function if we can't compute strlen in the right mode. */
5391 while (insn_mode != VOIDmode)
5393 icode = strlen_optab->handlers[(int) insn_mode].insn_code;
5394 if (icode != CODE_FOR_nothing)
5397 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
5399 if (insn_mode == VOIDmode)
5402 /* Make a place to write the result of the instruction. */
5405 && GET_CODE (result) == REG
5406 && GET_MODE (result) == insn_mode
5407 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
5408 result = gen_reg_rtx (insn_mode);
5410 /* Make sure the operands are acceptable to the predicates. */
5412 if (! (*insn_operand_predicate[(int)icode][0]) (result, insn_mode))
5413 result = gen_reg_rtx (insn_mode);
5415 src_rtx = memory_address (BLKmode,
5416 expand_expr (src, NULL_RTX, Pmode,
5418 if (! (*insn_operand_predicate[(int)icode][1]) (src_rtx, Pmode))
5419 src_rtx = copy_to_mode_reg (Pmode, src_rtx);
5421 char_rtx = const0_rtx;
5422 char_mode = insn_operand_mode[(int)icode][2];
5423 if (! (*insn_operand_predicate[(int)icode][2]) (char_rtx, char_mode))
5424 char_rtx = copy_to_mode_reg (char_mode, char_rtx);
5426 emit_insn (GEN_FCN (icode) (result,
5427 gen_rtx (MEM, BLKmode, src_rtx),
5428 char_rtx, GEN_INT (align)));
5430 /* Return the value in the proper mode for this function. */
5431 if (GET_MODE (result) == value_mode)
5433 else if (target != 0)
5435 convert_move (target, result, 0);
5439 return convert_to_mode (value_mode, result, 0);
5442 case BUILT_IN_STRCPY:
5443 /* If not optimizing, call the library function. */
5448 /* Arg could be non-pointer if user redeclared this fcn wrong. */
5449 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
5450 || TREE_CHAIN (arglist) == 0
5451 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE)
5455 tree len = c_strlen (TREE_VALUE (TREE_CHAIN (arglist)));
5460 len = size_binop (PLUS_EXPR, len, integer_one_node);
5462 chainon (arglist, build_tree_list (NULL_TREE, len));
5466 case BUILT_IN_MEMCPY:
5467 /* If not optimizing, call the library function. */
5472 /* Arg could be non-pointer if user redeclared this fcn wrong. */
5473 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
5474 || TREE_CHAIN (arglist) == 0
5475 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
5476 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
5477 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
5481 tree dest = TREE_VALUE (arglist);
5482 tree src = TREE_VALUE (TREE_CHAIN (arglist));
5483 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
5486 = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
5488 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
5491 /* If either SRC or DEST is not a pointer type, don't do
5492 this operation in-line. */
5493 if (src_align == 0 || dest_align == 0)
5495 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRCPY)
5496 TREE_CHAIN (TREE_CHAIN (arglist)) = 0;
5500 dest_rtx = expand_expr (dest, NULL_RTX, Pmode, EXPAND_NORMAL);
5502 /* Copy word part most expediently. */
5503 emit_block_move (gen_rtx (MEM, BLKmode,
5504 memory_address (BLKmode, dest_rtx)),
5505 gen_rtx (MEM, BLKmode,
5506 memory_address (BLKmode,
5507 expand_expr (src, NULL_RTX,
5510 expand_expr (len, NULL_RTX, VOIDmode, 0),
5511 MIN (src_align, dest_align));
5515 /* These comparison functions need an instruction that returns an actual
5516 index. An ordinary compare that just sets the condition codes
5518 #ifdef HAVE_cmpstrsi
5519 case BUILT_IN_STRCMP:
5520 /* If not optimizing, call the library function. */
5525 /* Arg could be non-pointer if user redeclared this fcn wrong. */
5526 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
5527 || TREE_CHAIN (arglist) == 0
5528 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE)
5530 else if (!HAVE_cmpstrsi)
5533 tree arg1 = TREE_VALUE (arglist);
5534 tree arg2 = TREE_VALUE (TREE_CHAIN (arglist));
5538 len = c_strlen (arg1);
5540 len = size_binop (PLUS_EXPR, integer_one_node, len);
5541 len2 = c_strlen (arg2);
5543 len2 = size_binop (PLUS_EXPR, integer_one_node, len2);
5545 /* If we don't have a constant length for the first, use the length
5546 of the second, if we know it. We don't require a constant for
5547 this case; some cost analysis could be done if both are available
5548 but neither is constant. For now, assume they're equally cheap.
5550 If both strings have constant lengths, use the smaller. This
5551 could arise if optimization results in strcpy being called with
5552 two fixed strings, or if the code was machine-generated. We should
5553 add some code to the `memcmp' handler below to deal with such
5554 situations, someday. */
5555 if (!len || TREE_CODE (len) != INTEGER_CST)
5562 else if (len2 && TREE_CODE (len2) == INTEGER_CST)
5564 if (tree_int_cst_lt (len2, len))
5568 chainon (arglist, build_tree_list (NULL_TREE, len));
5572 case BUILT_IN_MEMCMP:
5573 /* If not optimizing, call the library function. */
5578 /* Arg could be non-pointer if user redeclared this fcn wrong. */
5579 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
5580 || TREE_CHAIN (arglist) == 0
5581 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
5582 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
5583 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
5585 else if (!HAVE_cmpstrsi)
5588 tree arg1 = TREE_VALUE (arglist);
5589 tree arg2 = TREE_VALUE (TREE_CHAIN (arglist));
5590 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
5594 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
5596 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
5597 enum machine_mode insn_mode
5598 = insn_operand_mode[(int) CODE_FOR_cmpstrsi][0];
5600 /* If we don't have POINTER_TYPE, call the function. */
5601 if (arg1_align == 0 || arg2_align == 0)
5603 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRCMP)
5604 TREE_CHAIN (TREE_CHAIN (arglist)) = 0;
5608 /* Make a place to write the result of the instruction. */
5611 && GET_CODE (result) == REG && GET_MODE (result) == insn_mode
5612 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
5613 result = gen_reg_rtx (insn_mode);
5615 emit_insn (gen_cmpstrsi (result,
5616 gen_rtx (MEM, BLKmode,
5617 expand_expr (arg1, NULL_RTX, Pmode,
5619 gen_rtx (MEM, BLKmode,
5620 expand_expr (arg2, NULL_RTX, Pmode,
5622 expand_expr (len, NULL_RTX, VOIDmode, 0),
5623 GEN_INT (MIN (arg1_align, arg2_align))));
5625 /* Return the value in the proper mode for this function. */
5626 mode = TYPE_MODE (TREE_TYPE (exp));
5627 if (GET_MODE (result) == mode)
5629 else if (target != 0)
5631 convert_move (target, result, 0);
5635 return convert_to_mode (mode, result, 0);
5638 case BUILT_IN_STRCMP:
5639 case BUILT_IN_MEMCMP:
5643 default: /* just do library call, if unknown builtin */
5644 error ("built-in function %s not currently supported",
5645 IDENTIFIER_POINTER (DECL_NAME (fndecl)));
5648 /* The switch statement above can drop through to cause the function
5649 to be called normally. */
5651 return expand_call (exp, target, ignore);
5654 /* Expand code for a post- or pre- increment or decrement
5655 and return the RTX for the result.
5656 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
5659 expand_increment (exp, post)
5663 register rtx op0, op1;
5664 register rtx temp, value;
5665 register tree incremented = TREE_OPERAND (exp, 0);
5666 optab this_optab = add_optab;
5668 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
5669 int op0_is_copy = 0;
5671 /* Stabilize any component ref that might need to be
5672 evaluated more than once below. */
5673 if (TREE_CODE (incremented) == BIT_FIELD_REF
5674 || (TREE_CODE (incremented) == COMPONENT_REF
5675 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
5676 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
5677 incremented = stabilize_reference (incremented);
5679 /* Compute the operands as RTX.
5680 Note whether OP0 is the actual lvalue or a copy of it:
5681 I believe it is a copy iff it is a register or subreg
5682 and insns were generated in computing it. */
5683 temp = get_last_insn ();
5684 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, 0);
5685 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
5686 && temp != get_last_insn ());
5687 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
5689 /* Decide whether incrementing or decrementing. */
5690 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
5691 || TREE_CODE (exp) == PREDECREMENT_EXPR)
5692 this_optab = sub_optab;
5694 /* If OP0 is not the actual lvalue, but rather a copy in a register,
5695 then we cannot just increment OP0. We must
5696 therefore contrive to increment the original value.
5697 Then we can return OP0 since it is a copy of the old value. */
5700 /* This is the easiest way to increment the value wherever it is.
5701 Problems with multiple evaluation of INCREMENTED
5702 are prevented because either (1) it is a component_ref,
5703 in which case it was stabilized above, or (2) it is an array_ref
5704 with constant index in an array in a register, which is
5705 safe to reevaluate. */
5706 tree newexp = build ((this_optab == add_optab
5707 ? PLUS_EXPR : MINUS_EXPR),
5710 TREE_OPERAND (exp, 1));
5711 temp = expand_assignment (incremented, newexp, ! post, 0);
5712 return post ? op0 : temp;
5715 /* Convert decrement by a constant into a negative increment. */
5716 if (this_optab == sub_optab
5717 && GET_CODE (op1) == CONST_INT)
5719 op1 = GEN_INT (- INTVAL (op1));
5720 this_optab = add_optab;
5725 /* We have a true reference to the value in OP0.
5726 If there is an insn to add or subtract in this mode, queue it. */
5728 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
5729 op0 = stabilize (op0);
5732 icode = (int) this_optab->handlers[(int) mode].insn_code;
5733 if (icode != (int) CODE_FOR_nothing
5734 /* Make sure that OP0 is valid for operands 0 and 1
5735 of the insn we want to queue. */
5736 && (*insn_operand_predicate[icode][0]) (op0, mode)
5737 && (*insn_operand_predicate[icode][1]) (op0, mode))
5739 if (! (*insn_operand_predicate[icode][2]) (op1, mode))
5740 op1 = force_reg (mode, op1);
5742 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
5746 /* Preincrement, or we can't increment with one simple insn. */
5748 /* Save a copy of the value before inc or dec, to return it later. */
5749 temp = value = copy_to_reg (op0);
5751 /* Arrange to return the incremented value. */
5752 /* Copy the rtx because expand_binop will protect from the queue,
5753 and the results of that would be invalid for us to return
5754 if our caller does emit_queue before using our result. */
5755 temp = copy_rtx (value = op0);
5757 /* Increment however we can. */
5758 op1 = expand_binop (mode, this_optab, value, op1, op0,
5759 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
5760 /* Make sure the value is stored into OP0. */
5762 emit_move_insn (op0, op1);
5767 /* Expand all function calls contained within EXP, innermost ones first.
5768 But don't look within expressions that have sequence points.
5769 For each CALL_EXPR, record the rtx for its value
5770 in the CALL_EXPR_RTL field. */
5773 preexpand_calls (exp)
5776 register int nops, i;
5777 int type = TREE_CODE_CLASS (TREE_CODE (exp));
5779 if (! do_preexpand_calls)
5782 /* Only expressions and references can contain calls. */
5784 if (type != 'e' && type != '<' && type != '1' && type != '2' && type != 'r')
5787 switch (TREE_CODE (exp))
5790 /* Do nothing if already expanded. */
5791 if (CALL_EXPR_RTL (exp) != 0)
5794 /* Do nothing to built-in functions. */
5795 if (TREE_CODE (TREE_OPERAND (exp, 0)) != ADDR_EXPR
5796 || TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) != FUNCTION_DECL
5797 || ! DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
5798 CALL_EXPR_RTL (exp) = expand_call (exp, NULL_RTX, 0);
5803 case TRUTH_ANDIF_EXPR:
5804 case TRUTH_ORIF_EXPR:
5805 /* If we find one of these, then we can be sure
5806 the adjust will be done for it (since it makes jumps).
5807 Do it now, so that if this is inside an argument
5808 of a function, we don't get the stack adjustment
5809 after some other args have already been pushed. */
5810 do_pending_stack_adjust ();
5815 case WITH_CLEANUP_EXPR:
5819 if (SAVE_EXPR_RTL (exp) != 0)
5823 nops = tree_code_length[(int) TREE_CODE (exp)];
5824 for (i = 0; i < nops; i++)
5825 if (TREE_OPERAND (exp, i) != 0)
5827 type = TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, i)));
5828 if (type == 'e' || type == '<' || type == '1' || type == '2'
5830 preexpand_calls (TREE_OPERAND (exp, i));
5834 /* At the start of a function, record that we have no previously-pushed
5835 arguments waiting to be popped. */
5838 init_pending_stack_adjust ()
5840 pending_stack_adjust = 0;
5843 /* When exiting from function, if safe, clear out any pending stack adjust
5844 so the adjustment won't get done. */
5847 clear_pending_stack_adjust ()
5849 #ifdef EXIT_IGNORE_STACK
5850 if (! flag_omit_frame_pointer && EXIT_IGNORE_STACK
5851 && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline)
5852 && ! flag_inline_functions)
5853 pending_stack_adjust = 0;
5857 /* Pop any previously-pushed arguments that have not been popped yet. */
5860 do_pending_stack_adjust ()
5862 if (inhibit_defer_pop == 0)
5864 if (pending_stack_adjust != 0)
5865 adjust_stack (GEN_INT (pending_stack_adjust));
5866 pending_stack_adjust = 0;
5870 /* Expand all cleanups up to OLD_CLEANUPS.
5871 Needed here, and also for language-dependent calls. */
5874 expand_cleanups_to (old_cleanups)
5877 while (cleanups_this_call != old_cleanups)
5879 expand_expr (TREE_VALUE (cleanups_this_call), NULL_RTX, VOIDmode, 0);
5880 cleanups_this_call = TREE_CHAIN (cleanups_this_call);
5884 /* Expand conditional expressions. */
5886 /* Generate code to evaluate EXP and jump to LABEL if the value is zero.
5887 LABEL is an rtx of code CODE_LABEL, in this function and all the
5891 jumpifnot (exp, label)
5895 do_jump (exp, label, NULL_RTX);
5898 /* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
5905 do_jump (exp, NULL_RTX, label);
5908 /* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
5909 the result is zero, or IF_TRUE_LABEL if the result is one.
5910 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
5911 meaning fall through in that case.
5913 do_jump always does any pending stack adjust except when it does not
5914 actually perform a jump. An example where there is no jump
5915 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
5917 This function is responsible for optimizing cases such as
5918 &&, || and comparison operators in EXP. */
5921 do_jump (exp, if_false_label, if_true_label)
5923 rtx if_false_label, if_true_label;
5925 register enum tree_code code = TREE_CODE (exp);
5926 /* Some cases need to create a label to jump to
5927 in order to properly fall through.
5928 These cases set DROP_THROUGH_LABEL nonzero. */
5929 rtx drop_through_label = 0;
5943 temp = integer_zerop (exp) ? if_false_label : if_true_label;
5949 /* This is not true with #pragma weak */
5951 /* The address of something can never be zero. */
5953 emit_jump (if_true_label);
5958 if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
5959 || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
5960 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF)
5963 /* If we are narrowing the operand, we have to do the compare in the
5965 if ((TYPE_PRECISION (TREE_TYPE (exp))
5966 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
5968 case NON_LVALUE_EXPR:
5969 case REFERENCE_EXPR:
5974 /* These cannot change zero->non-zero or vice versa. */
5975 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
5979 /* This is never less insns than evaluating the PLUS_EXPR followed by
5980 a test and can be longer if the test is eliminated. */
5982 /* Reduce to minus. */
5983 exp = build (MINUS_EXPR, TREE_TYPE (exp),
5984 TREE_OPERAND (exp, 0),
5985 fold (build1 (NEGATE_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
5986 TREE_OPERAND (exp, 1))));
5987 /* Process as MINUS. */
5991 /* Non-zero iff operands of minus differ. */
5992 comparison = compare (build (NE_EXPR, TREE_TYPE (exp),
5993 TREE_OPERAND (exp, 0),
5994 TREE_OPERAND (exp, 1)),
5999 /* If we are AND'ing with a small constant, do this comparison in the
6000 smallest type that fits. If the machine doesn't have comparisons
6001 that small, it will be converted back to the wider comparison.
6002 This helps if we are testing the sign bit of a narrower object.
6003 combine can't do this for us because it can't know whether a
6004 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
6006 if (! SLOW_BYTE_ACCESS
6007 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
6008 && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
6009 && (i = floor_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))) >= 0
6010 && (type = type_for_size (i + 1, 1)) != 0
6011 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
6012 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
6013 != CODE_FOR_nothing))
6015 do_jump (convert (type, exp), if_false_label, if_true_label);
6020 case TRUTH_NOT_EXPR:
6021 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
6024 case TRUTH_ANDIF_EXPR:
6025 if (if_false_label == 0)
6026 if_false_label = drop_through_label = gen_label_rtx ();
6027 do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
6028 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
6031 case TRUTH_ORIF_EXPR:
6032 if (if_true_label == 0)
6033 if_true_label = drop_through_label = gen_label_rtx ();
6034 do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
6035 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
6039 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
6042 do_pending_stack_adjust ();
6043 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
6050 int bitsize, bitpos, unsignedp;
6051 enum machine_mode mode;
6056 /* Get description of this reference. We don't actually care
6057 about the underlying object here. */
6058 get_inner_reference (exp, &bitsize, &bitpos, &offset,
6059 &mode, &unsignedp, &volatilep);
6061 type = type_for_size (bitsize, unsignedp);
6062 if (! SLOW_BYTE_ACCESS
6063 && type != 0 && bitsize >= 0
6064 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
6065 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
6066 != CODE_FOR_nothing))
6068 do_jump (convert (type, exp), if_false_label, if_true_label);
6075 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
6076 if (integer_onep (TREE_OPERAND (exp, 1))
6077 && integer_zerop (TREE_OPERAND (exp, 2)))
6078 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
6080 else if (integer_zerop (TREE_OPERAND (exp, 1))
6081 && integer_onep (TREE_OPERAND (exp, 2)))
6082 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
6086 register rtx label1 = gen_label_rtx ();
6087 drop_through_label = gen_label_rtx ();
6088 do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX);
6089 /* Now the THEN-expression. */
6090 do_jump (TREE_OPERAND (exp, 1),
6091 if_false_label ? if_false_label : drop_through_label,
6092 if_true_label ? if_true_label : drop_through_label);
6093 /* In case the do_jump just above never jumps. */
6094 do_pending_stack_adjust ();
6095 emit_label (label1);
6096 /* Now the ELSE-expression. */
6097 do_jump (TREE_OPERAND (exp, 2),
6098 if_false_label ? if_false_label : drop_through_label,
6099 if_true_label ? if_true_label : drop_through_label);
6104 if (integer_zerop (TREE_OPERAND (exp, 1)))
6105 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
6106 else if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
6109 !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
6110 do_jump_by_parts_equality (exp, if_false_label, if_true_label);
6112 comparison = compare (exp, EQ, EQ);
6116 if (integer_zerop (TREE_OPERAND (exp, 1)))
6117 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
6118 else if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
6121 !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
6122 do_jump_by_parts_equality (exp, if_true_label, if_false_label);
6124 comparison = compare (exp, NE, NE);
6128 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
6130 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
6131 do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
6133 comparison = compare (exp, LT, LTU);
6137 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
6139 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
6140 do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
6142 comparison = compare (exp, LE, LEU);
6146 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
6148 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
6149 do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
6151 comparison = compare (exp, GT, GTU);
6155 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
6157 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
6158 do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
6160 comparison = compare (exp, GE, GEU);
6165 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
6167 /* This is not needed any more and causes poor code since it causes
6168 comparisons and tests from non-SI objects to have different code
6170 /* Copy to register to avoid generating bad insns by cse
6171 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
6172 if (!cse_not_expected && GET_CODE (temp) == MEM)
6173 temp = copy_to_reg (temp);
6175 do_pending_stack_adjust ();
6176 if (GET_CODE (temp) == CONST_INT)
6177 comparison = (temp == const0_rtx ? const0_rtx : const_true_rtx);
6178 else if (GET_CODE (temp) == LABEL_REF)
6179 comparison = const_true_rtx;
6180 else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
6181 && !can_compare_p (GET_MODE (temp)))
6182 /* Note swapping the labels gives us not-equal. */
6183 do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label);
6184 else if (GET_MODE (temp) != VOIDmode)
6185 comparison = compare_from_rtx (temp, CONST0_RTX (GET_MODE (temp)),
6186 NE, 1, GET_MODE (temp), NULL_RTX, 0);
6191 /* Do any postincrements in the expression that was tested. */
6194 /* If COMPARISON is nonzero here, it is an rtx that can be substituted
6195 straight into a conditional jump instruction as the jump condition.
6196 Otherwise, all the work has been done already. */
6198 if (comparison == const_true_rtx)
6201 emit_jump (if_true_label);
6203 else if (comparison == const0_rtx)
6206 emit_jump (if_false_label);
6208 else if (comparison)
6209 do_jump_for_compare (comparison, if_false_label, if_true_label);
6213 if (drop_through_label)
6215 /* If do_jump produces code that might be jumped around,
6216 do any stack adjusts from that code, before the place
6217 where control merges in. */
6218 do_pending_stack_adjust ();
6219 emit_label (drop_through_label);
6223 /* Given a comparison expression EXP for values too wide to be compared
6224 with one insn, test the comparison and jump to the appropriate label.
6225 The code of EXP is ignored; we always test GT if SWAP is 0,
6226 and LT if SWAP is 1. */
6229 do_jump_by_parts_greater (exp, swap, if_false_label, if_true_label)
6232 rtx if_false_label, if_true_label;
6234 rtx op0 = expand_expr (TREE_OPERAND (exp, swap), NULL_RTX, VOIDmode, 0);
6235 rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), NULL_RTX, VOIDmode, 0);
6236 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
6237 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
6238 rtx drop_through_label = 0;
6239 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
6242 if (! if_true_label || ! if_false_label)
6243 drop_through_label = gen_label_rtx ();
6244 if (! if_true_label)
6245 if_true_label = drop_through_label;
6246 if (! if_false_label)
6247 if_false_label = drop_through_label;
6249 /* Compare a word at a time, high order first. */
6250 for (i = 0; i < nwords; i++)
6253 rtx op0_word, op1_word;
6255 if (WORDS_BIG_ENDIAN)
6257 op0_word = operand_subword_force (op0, i, mode);
6258 op1_word = operand_subword_force (op1, i, mode);
6262 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
6263 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
6266 /* All but high-order word must be compared as unsigned. */
6267 comp = compare_from_rtx (op0_word, op1_word,
6268 (unsignedp || i > 0) ? GTU : GT,
6269 unsignedp, word_mode, NULL_RTX, 0);
6270 if (comp == const_true_rtx)
6271 emit_jump (if_true_label);
6272 else if (comp != const0_rtx)
6273 do_jump_for_compare (comp, NULL_RTX, if_true_label);
6275 /* Consider lower words only if these are equal. */
6276 comp = compare_from_rtx (op0_word, op1_word, NE, unsignedp, word_mode,
6278 if (comp == const_true_rtx)
6279 emit_jump (if_false_label);
6280 else if (comp != const0_rtx)
6281 do_jump_for_compare (comp, NULL_RTX, if_false_label);
6285 emit_jump (if_false_label);
6286 if (drop_through_label)
6287 emit_label (drop_through_label);
6290 /* Given an EQ_EXPR expression EXP for values too wide to be compared
6291 with one insn, test the comparison and jump to the appropriate label. */
6294 do_jump_by_parts_equality (exp, if_false_label, if_true_label)
6296 rtx if_false_label, if_true_label;
6298 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
6299 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
6300 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
6301 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
6303 rtx drop_through_label = 0;
6305 if (! if_false_label)
6306 drop_through_label = if_false_label = gen_label_rtx ();
6308 for (i = 0; i < nwords; i++)
6310 rtx comp = compare_from_rtx (operand_subword_force (op0, i, mode),
6311 operand_subword_force (op1, i, mode),
6312 EQ, 0, word_mode, NULL_RTX, 0);
6313 if (comp == const_true_rtx)
6314 emit_jump (if_false_label);
6315 else if (comp != const0_rtx)
6316 do_jump_for_compare (comp, if_false_label, NULL_RTX);
6320 emit_jump (if_true_label);
6321 if (drop_through_label)
6322 emit_label (drop_through_label);
6325 /* Jump according to whether OP0 is 0.
6326 We assume that OP0 has an integer mode that is too wide
6327 for the available compare insns. */
6330 do_jump_by_parts_equality_rtx (op0, if_false_label, if_true_label)
6332 rtx if_false_label, if_true_label;
6334 int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD;
6336 rtx drop_through_label = 0;
6338 if (! if_false_label)
6339 drop_through_label = if_false_label = gen_label_rtx ();
6341 for (i = 0; i < nwords; i++)
6343 rtx comp = compare_from_rtx (operand_subword_force (op0, i,
6345 const0_rtx, EQ, 0, word_mode, NULL_RTX, 0);
6346 if (comp == const_true_rtx)
6347 emit_jump (if_false_label);
6348 else if (comp != const0_rtx)
6349 do_jump_for_compare (comp, if_false_label, NULL_RTX);
6353 emit_jump (if_true_label);
6354 if (drop_through_label)
6355 emit_label (drop_through_label);
6358 /* Given a comparison expression in rtl form, output conditional branches to
6359 IF_TRUE_LABEL, IF_FALSE_LABEL, or both. */
6362 do_jump_for_compare (comparison, if_false_label, if_true_label)
6363 rtx comparison, if_false_label, if_true_label;
6367 if (bcc_gen_fctn[(int) GET_CODE (comparison)] != 0)
6368 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (comparison)]) (if_true_label));
6373 emit_jump (if_false_label);
6375 else if (if_false_label)
6378 rtx prev = PREV_INSN (get_last_insn ());
6381 /* Output the branch with the opposite condition. Then try to invert
6382 what is generated. If more than one insn is a branch, or if the
6383 branch is not the last insn written, abort. If we can't invert
6384 the branch, emit make a true label, redirect this jump to that,
6385 emit a jump to the false label and define the true label. */
6387 if (bcc_gen_fctn[(int) GET_CODE (comparison)] != 0)
6388 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (comparison)]) (if_false_label));
6392 /* Here we get the insn before what was just emitted.
6393 On some machines, emitting the branch can discard
6394 the previous compare insn and emit a replacement. */
6396 /* If there's only one preceding insn... */
6397 insn = get_insns ();
6399 insn = NEXT_INSN (prev);
6401 for (insn = NEXT_INSN (insn); insn; insn = NEXT_INSN (insn))
6402 if (GET_CODE (insn) == JUMP_INSN)
6409 if (branch != get_last_insn ())
6412 if (! invert_jump (branch, if_false_label))
6414 if_true_label = gen_label_rtx ();
6415 redirect_jump (branch, if_true_label);
6416 emit_jump (if_false_label);
6417 emit_label (if_true_label);
6422 /* Generate code for a comparison expression EXP
6423 (including code to compute the values to be compared)
6424 and set (CC0) according to the result.
6425 SIGNED_CODE should be the rtx operation for this comparison for
6426 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
6428 We force a stack adjustment unless there are currently
6429 things pushed on the stack that aren't yet used. */
6432 compare (exp, signed_code, unsigned_code)
6434 enum rtx_code signed_code, unsigned_code;
6437 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
6439 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
6440 register tree type = TREE_TYPE (TREE_OPERAND (exp, 0));
6441 register enum machine_mode mode = TYPE_MODE (type);
6442 int unsignedp = TREE_UNSIGNED (type);
6443 enum rtx_code code = unsignedp ? unsigned_code : signed_code;
6445 return compare_from_rtx (op0, op1, code, unsignedp, mode,
6447 ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
6448 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
6451 /* Like compare but expects the values to compare as two rtx's.
6452 The decision as to signed or unsigned comparison must be made by the caller.
6454 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
6457 If ALIGN is non-zero, it is the alignment of this type; if zero, the
6458 size of MODE should be used. */
6461 compare_from_rtx (op0, op1, code, unsignedp, mode, size, align)
6462 register rtx op0, op1;
6465 enum machine_mode mode;
6469 /* If one operand is constant, make it the second one. */
6471 if (GET_CODE (op0) == CONST_INT || GET_CODE (op0) == CONST_DOUBLE)
6476 code = swap_condition (code);
6481 op0 = force_not_mem (op0);
6482 op1 = force_not_mem (op1);
6485 do_pending_stack_adjust ();
6487 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT)
6488 return simplify_relational_operation (code, mode, op0, op1);
6490 /* If this is a signed equality comparison, we can do it as an
6491 unsigned comparison since zero-extension is cheaper than sign
6492 extension and comparisons with zero are done as unsigned. This is
6493 the case even on machines that can do fast sign extension, since
6494 zero-extension is easier to combinen with other operations than
6495 sign-extension is. If we are comparing against a constant, we must
6496 convert it to what it would look like unsigned. */
6497 if ((code == EQ || code == NE) && ! unsignedp
6498 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
6500 if (GET_CODE (op1) == CONST_INT
6501 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
6502 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
6506 emit_cmp_insn (op0, op1, code, size, mode, unsignedp, align);
6508 return gen_rtx (code, VOIDmode, cc0_rtx, const0_rtx);
6511 /* Generate code to calculate EXP using a store-flag instruction
6512 and return an rtx for the result. EXP is either a comparison
6513 or a TRUTH_NOT_EXPR whose operand is a comparison.
6515 If TARGET is nonzero, store the result there if convenient.
6517 If ONLY_CHEAP is non-zero, only do this if it is likely to be very
6520 Return zero if there is no suitable set-flag instruction
6521 available on this machine.
6523 Once expand_expr has been called on the arguments of the comparison,
6524 we are committed to doing the store flag, since it is not safe to
6525 re-evaluate the expression. We emit the store-flag insn by calling
6526 emit_store_flag, but only expand the arguments if we have a reason
6527 to believe that emit_store_flag will be successful. If we think that
6528 it will, but it isn't, we have to simulate the store-flag with a
6529 set/jump/set sequence. */
6532 do_store_flag (exp, target, mode, only_cheap)
6535 enum machine_mode mode;
6539 tree arg0, arg1, type;
6541 enum machine_mode operand_mode;
6545 enum insn_code icode;
6546 rtx subtarget = target;
6547 rtx result, label, pattern, jump_pat;
6549 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
6550 result at the end. We can't simply invert the test since it would
6551 have already been inverted if it were valid. This case occurs for
6552 some floating-point comparisons. */
6554 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
6555 invert = 1, exp = TREE_OPERAND (exp, 0);
6557 arg0 = TREE_OPERAND (exp, 0);
6558 arg1 = TREE_OPERAND (exp, 1);
6559 type = TREE_TYPE (arg0);
6560 operand_mode = TYPE_MODE (type);
6561 unsignedp = TREE_UNSIGNED (type);
6563 /* We won't bother with BLKmode store-flag operations because it would mean
6564 passing a lot of information to emit_store_flag. */
6565 if (operand_mode == BLKmode)
6571 /* Get the rtx comparison code to use. We know that EXP is a comparison
6572 operation of some type. Some comparisons against 1 and -1 can be
6573 converted to comparisons with zero. Do so here so that the tests
6574 below will be aware that we have a comparison with zero. These
6575 tests will not catch constants in the first operand, but constants
6576 are rarely passed as the first operand. */
6578 switch (TREE_CODE (exp))
6587 if (integer_onep (arg1))
6588 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
6590 code = unsignedp ? LTU : LT;
6593 if (integer_all_onesp (arg1))
6594 arg1 = integer_zero_node, code = unsignedp ? LTU : LT;
6596 code = unsignedp ? LEU : LE;
6599 if (integer_all_onesp (arg1))
6600 arg1 = integer_zero_node, code = unsignedp ? GEU : GE;
6602 code = unsignedp ? GTU : GT;
6605 if (integer_onep (arg1))
6606 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
6608 code = unsignedp ? GEU : GE;
6614 /* Put a constant second. */
6615 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
6617 tem = arg0; arg0 = arg1; arg1 = tem;
6618 code = swap_condition (code);
6621 /* If this is an equality or inequality test of a single bit, we can
6622 do this by shifting the bit being tested to the low-order bit and
6623 masking the result with the constant 1. If the condition was EQ,
6624 we xor it with 1. This does not require an scc insn and is faster
6625 than an scc insn even if we have it. */
6627 if ((code == NE || code == EQ)
6628 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6629 && integer_pow2p (TREE_OPERAND (arg0, 1))
6630 && TYPE_PRECISION (type) <= HOST_BITS_PER_WIDE_INT)
6632 int bitnum = exact_log2 (INTVAL (expand_expr (TREE_OPERAND (arg0, 1),
6633 NULL_RTX, VOIDmode, 0)));
6635 if (subtarget == 0 || GET_CODE (subtarget) != REG
6636 || GET_MODE (subtarget) != operand_mode
6637 || ! safe_from_p (subtarget, TREE_OPERAND (arg0, 0)))
6640 op0 = expand_expr (TREE_OPERAND (arg0, 0), subtarget, VOIDmode, 0);
6643 op0 = expand_shift (RSHIFT_EXPR, GET_MODE (op0), op0,
6644 size_int (bitnum), target, 1);
6646 if (GET_MODE (op0) != mode)
6647 op0 = convert_to_mode (mode, op0, 1);
6649 if (bitnum != TYPE_PRECISION (type) - 1)
6650 op0 = expand_and (op0, const1_rtx, target);
6652 if ((code == EQ && ! invert) || (code == NE && invert))
6653 op0 = expand_binop (mode, xor_optab, op0, const1_rtx, target, 0,
6659 /* Now see if we are likely to be able to do this. Return if not. */
6660 if (! can_compare_p (operand_mode))
6662 icode = setcc_gen_code[(int) code];
6663 if (icode == CODE_FOR_nothing
6664 || (only_cheap && insn_operand_mode[(int) icode][0] != mode))
6666 /* We can only do this if it is one of the special cases that
6667 can be handled without an scc insn. */
6668 if ((code == LT && integer_zerop (arg1))
6669 || (! only_cheap && code == GE && integer_zerop (arg1)))
6671 else if (BRANCH_COST >= 0
6672 && ! only_cheap && (code == NE || code == EQ)
6673 && TREE_CODE (type) != REAL_TYPE
6674 && ((abs_optab->handlers[(int) operand_mode].insn_code
6675 != CODE_FOR_nothing)
6676 || (ffs_optab->handlers[(int) operand_mode].insn_code
6677 != CODE_FOR_nothing)))
6683 preexpand_calls (exp);
6684 if (subtarget == 0 || GET_CODE (subtarget) != REG
6685 || GET_MODE (subtarget) != operand_mode
6686 || ! safe_from_p (subtarget, arg1))
6689 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
6690 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
6693 target = gen_reg_rtx (mode);
6695 result = emit_store_flag (target, code, op0, op1, operand_mode,
6701 result = expand_binop (mode, xor_optab, result, const1_rtx,
6702 result, 0, OPTAB_LIB_WIDEN);
6706 /* If this failed, we have to do this with set/compare/jump/set code. */
6707 if (target == 0 || GET_CODE (target) != REG
6708 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
6709 target = gen_reg_rtx (GET_MODE (target));
6711 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
6712 result = compare_from_rtx (op0, op1, code, unsignedp,
6713 operand_mode, NULL_RTX, 0);
6714 if (GET_CODE (result) == CONST_INT)
6715 return (((result == const0_rtx && ! invert)
6716 || (result != const0_rtx && invert))
6717 ? const0_rtx : const1_rtx);
6719 label = gen_label_rtx ();
6720 if (bcc_gen_fctn[(int) code] == 0)
6723 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
6724 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
6730 /* Generate a tablejump instruction (used for switch statements). */
6732 #ifdef HAVE_tablejump
6734 /* INDEX is the value being switched on, with the lowest value
6735 in the table already subtracted.
6736 MODE is its expected mode (needed if INDEX is constant).
6737 RANGE is the length of the jump table.
6738 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
6740 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
6741 index value is out of range. */
6744 do_tablejump (index, mode, range, table_label, default_label)
6745 rtx index, range, table_label, default_label;
6746 enum machine_mode mode;
6748 register rtx temp, vector;
6750 /* Do an unsigned comparison (in the proper mode) between the index
6751 expression and the value which represents the length of the range.
6752 Since we just finished subtracting the lower bound of the range
6753 from the index expression, this comparison allows us to simultaneously
6754 check that the original index expression value is both greater than
6755 or equal to the minimum value of the range and less than or equal to
6756 the maximum value of the range. */
6758 emit_cmp_insn (range, index, LTU, NULL_RTX, mode, 0, 0);
6759 emit_jump_insn (gen_bltu (default_label));
6761 /* If index is in range, it must fit in Pmode.
6762 Convert to Pmode so we can index with it. */
6764 index = convert_to_mode (Pmode, index, 1);
6766 /* If flag_force_addr were to affect this address
6767 it could interfere with the tricky assumptions made
6768 about addresses that contain label-refs,
6769 which may be valid only very near the tablejump itself. */
6770 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
6771 GET_MODE_SIZE, because this indicates how large insns are. The other
6772 uses should all be Pmode, because they are addresses. This code
6773 could fail if addresses and insns are not the same size. */
6774 index = memory_address_noforce
6776 gen_rtx (PLUS, Pmode,
6777 gen_rtx (MULT, Pmode, index,
6778 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
6779 gen_rtx (LABEL_REF, Pmode, table_label)));
6780 temp = gen_reg_rtx (CASE_VECTOR_MODE);
6781 vector = gen_rtx (MEM, CASE_VECTOR_MODE, index);
6782 RTX_UNCHANGING_P (vector) = 1;
6783 convert_move (temp, vector, 0);
6785 emit_jump_insn (gen_tablejump (temp, table_label));
6787 #ifndef CASE_VECTOR_PC_RELATIVE
6788 /* If we are generating PIC code or if the table is PC-relative, the
6789 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
6795 #endif /* HAVE_tablejump */