1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA. */
26 #include "insn-flags.h"
27 #include "insn-codes.h"
29 #include "insn-config.h"
33 #include "typeclass.h"
35 #define CEIL(x,y) (((x) + (y) - 1) / (y))
37 /* Decide whether a function's arguments should be processed
38 from first to last or from last to first. */
40 #ifdef STACK_GROWS_DOWNWARD
42 #define PUSH_ARGS_REVERSED /* If it's last to first */
46 #ifndef STACK_PUSH_CODE
47 #ifdef STACK_GROWS_DOWNWARD
48 #define STACK_PUSH_CODE PRE_DEC
50 #define STACK_PUSH_CODE PRE_INC
54 /* Like STACK_BOUNDARY but in units of bytes, not bits. */
55 #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
57 /* If this is nonzero, we do not bother generating VOLATILE
58 around volatile memory references, and we are willing to
59 output indirect addresses. If cse is to follow, we reject
60 indirect addresses so a useful potential cse is generated;
61 if it is used only once, instruction combination will produce
62 the same indirect address eventually. */
65 /* Nonzero to generate code for all the subroutines within an
66 expression before generating the upper levels of the expression.
67 Nowadays this is never zero. */
68 int do_preexpand_calls = 1;
70 /* Number of units that we should eventually pop off the stack.
71 These are the arguments to function calls that have already returned. */
72 int pending_stack_adjust;
74 /* Nonzero means stack pops must not be deferred, and deferred stack
75 pops must not be output. It is nonzero inside a function call,
76 inside a conditional expression, inside a statement expression,
77 and in other cases as well. */
78 int inhibit_defer_pop;
80 /* A list of all cleanups which belong to the arguments of
81 function calls being expanded by expand_call. */
82 tree cleanups_this_call;
84 /* Nonzero means __builtin_saveregs has already been done in this function.
85 The value is the pseudoreg containing the value __builtin_saveregs
87 static rtx saveregs_value;
90 static void store_constructor ();
91 static rtx store_field ();
92 static rtx expand_builtin ();
93 static rtx compare ();
94 static rtx do_store_flag ();
95 static void preexpand_calls ();
96 static rtx expand_increment ();
97 static void init_queue ();
99 void do_pending_stack_adjust ();
100 static void do_jump_for_compare ();
101 static void do_jump_by_parts_equality ();
102 static void do_jump_by_parts_equality_rtx ();
103 static void do_jump_by_parts_greater ();
105 /* Record for each mode whether we can move a register directly to or
106 from an object of that mode in memory. If we can't, we won't try
107 to use that mode directly when accessing a field of that mode. */
109 static char direct_load[NUM_MACHINE_MODES];
110 static char direct_store[NUM_MACHINE_MODES];
112 /* MOVE_RATIO is the number of move instructions that is better than
116 #if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi)
119 /* A value of around 6 would minimize code size; infinity would minimize
121 #define MOVE_RATIO 15
125 /* SLOW_UNALIGNED_ACCESS is non-zero if unaligned accesses are very slow. */
127 #ifndef SLOW_UNALIGNED_ACCESS
128 #define SLOW_UNALIGNED_ACCESS 0
131 /* This is run once per compilation to set up which modes can be used
132 directly in memory. */
138 enum machine_mode mode;
139 rtx mem = gen_rtx (MEM, VOIDmode, stack_pointer_rtx);
142 insn = emit_insn (gen_rtx (SET, 0, 0));
143 pat = PATTERN (insn);
145 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
146 mode = (enum machine_mode) ((int) mode + 1))
152 direct_load[(int) mode] = direct_store[(int) mode] = 0;
153 PUT_MODE (mem, mode);
155 /* Find a register that can be used in this mode, if any. */
156 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
157 if (HARD_REGNO_MODE_OK (regno, mode))
160 if (regno == FIRST_PSEUDO_REGISTER)
163 reg = gen_rtx (REG, mode, regno);
166 SET_DEST (pat) = reg;
167 direct_load[(int) mode] = (recog (pat, insn, &num_clobbers)) >= 0;
170 SET_DEST (pat) = mem;
171 direct_store[(int) mode] = (recog (pat, insn, &num_clobbers)) >= 0;
177 /* This is run at the start of compiling a function. */
184 pending_stack_adjust = 0;
185 inhibit_defer_pop = 0;
186 cleanups_this_call = 0;
191 /* Save all variables describing the current status into the structure *P.
192 This is used before starting a nested function. */
198 /* Instead of saving the postincrement queue, empty it. */
201 p->pending_stack_adjust = pending_stack_adjust;
202 p->inhibit_defer_pop = inhibit_defer_pop;
203 p->cleanups_this_call = cleanups_this_call;
204 p->saveregs_value = saveregs_value;
205 p->forced_labels = forced_labels;
207 pending_stack_adjust = 0;
208 inhibit_defer_pop = 0;
209 cleanups_this_call = 0;
214 /* Restore all variables describing the current status from the structure *P.
215 This is used after a nested function. */
218 restore_expr_status (p)
221 pending_stack_adjust = p->pending_stack_adjust;
222 inhibit_defer_pop = p->inhibit_defer_pop;
223 cleanups_this_call = p->cleanups_this_call;
224 saveregs_value = p->saveregs_value;
225 forced_labels = p->forced_labels;
228 /* Manage the queue of increment instructions to be output
229 for POSTINCREMENT_EXPR expressions, etc. */
231 static rtx pending_chain;
233 /* Queue up to increment (or change) VAR later. BODY says how:
234 BODY should be the same thing you would pass to emit_insn
235 to increment right away. It will go to emit_insn later on.
237 The value is a QUEUED expression to be used in place of VAR
238 where you want to guarantee the pre-incrementation value of VAR. */
241 enqueue_insn (var, body)
244 pending_chain = gen_rtx (QUEUED, GET_MODE (var),
245 var, NULL_RTX, NULL_RTX, body, pending_chain);
246 return pending_chain;
249 /* Use protect_from_queue to convert a QUEUED expression
250 into something that you can put immediately into an instruction.
251 If the queued incrementation has not happened yet,
252 protect_from_queue returns the variable itself.
253 If the incrementation has happened, protect_from_queue returns a temp
254 that contains a copy of the old value of the variable.
256 Any time an rtx which might possibly be a QUEUED is to be put
257 into an instruction, it must be passed through protect_from_queue first.
258 QUEUED expressions are not meaningful in instructions.
260 Do not pass a value through protect_from_queue and then hold
261 on to it for a while before putting it in an instruction!
262 If the queue is flushed in between, incorrect code will result. */
265 protect_from_queue (x, modify)
269 register RTX_CODE code = GET_CODE (x);
271 #if 0 /* A QUEUED can hang around after the queue is forced out. */
272 /* Shortcut for most common case. */
273 if (pending_chain == 0)
279 /* A special hack for read access to (MEM (QUEUED ...))
280 to facilitate use of autoincrement.
281 Make a copy of the contents of the memory location
282 rather than a copy of the address, but not
283 if the value is of mode BLKmode. */
284 if (code == MEM && GET_MODE (x) != BLKmode
285 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
287 register rtx y = XEXP (x, 0);
288 XEXP (x, 0) = QUEUED_VAR (y);
291 register rtx temp = gen_reg_rtx (GET_MODE (x));
292 emit_insn_before (gen_move_insn (temp, x),
298 /* Otherwise, recursively protect the subexpressions of all
299 the kinds of rtx's that can contain a QUEUED. */
301 XEXP (x, 0) = protect_from_queue (XEXP (x, 0), 0);
302 else if (code == PLUS || code == MULT)
304 XEXP (x, 0) = protect_from_queue (XEXP (x, 0), 0);
305 XEXP (x, 1) = protect_from_queue (XEXP (x, 1), 0);
309 /* If the increment has not happened, use the variable itself. */
310 if (QUEUED_INSN (x) == 0)
311 return QUEUED_VAR (x);
312 /* If the increment has happened and a pre-increment copy exists,
314 if (QUEUED_COPY (x) != 0)
315 return QUEUED_COPY (x);
316 /* The increment has happened but we haven't set up a pre-increment copy.
317 Set one up now, and use it. */
318 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
319 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
321 return QUEUED_COPY (x);
324 /* Return nonzero if X contains a QUEUED expression:
325 if it contains anything that will be altered by a queued increment.
326 We handle only combinations of MEM, PLUS, MINUS and MULT operators
327 since memory addresses generally contain only those. */
333 register enum rtx_code code = GET_CODE (x);
339 return queued_subexp_p (XEXP (x, 0));
343 return queued_subexp_p (XEXP (x, 0))
344 || queued_subexp_p (XEXP (x, 1));
349 /* Perform all the pending incrementations. */
355 while (p = pending_chain)
357 QUEUED_INSN (p) = emit_insn (QUEUED_BODY (p));
358 pending_chain = QUEUED_NEXT (p);
369 /* Copy data from FROM to TO, where the machine modes are not the same.
370 Both modes may be integer, or both may be floating.
371 UNSIGNEDP should be nonzero if FROM is an unsigned type.
372 This causes zero-extension instead of sign-extension. */
375 convert_move (to, from, unsignedp)
376 register rtx to, from;
379 enum machine_mode to_mode = GET_MODE (to);
380 enum machine_mode from_mode = GET_MODE (from);
381 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
382 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
386 /* rtx code for making an equivalent value. */
387 enum rtx_code equiv_code = (unsignedp ? ZERO_EXTEND : SIGN_EXTEND);
389 to = protect_from_queue (to, 1);
390 from = protect_from_queue (from, 0);
392 if (to_real != from_real)
395 if (to_mode == from_mode
396 || (from_mode == VOIDmode && CONSTANT_P (from)))
398 emit_move_insn (to, from);
404 #ifdef HAVE_extendsfdf2
405 if (HAVE_extendsfdf2 && from_mode == SFmode && to_mode == DFmode)
407 emit_unop_insn (CODE_FOR_extendsfdf2, to, from, UNKNOWN);
411 #ifdef HAVE_extendsfxf2
412 if (HAVE_extendsfxf2 && from_mode == SFmode && to_mode == XFmode)
414 emit_unop_insn (CODE_FOR_extendsfxf2, to, from, UNKNOWN);
418 #ifdef HAVE_extendsftf2
419 if (HAVE_extendsftf2 && from_mode == SFmode && to_mode == TFmode)
421 emit_unop_insn (CODE_FOR_extendsftf2, to, from, UNKNOWN);
425 #ifdef HAVE_extenddfxf2
426 if (HAVE_extenddfxf2 && from_mode == DFmode && to_mode == XFmode)
428 emit_unop_insn (CODE_FOR_extenddfxf2, to, from, UNKNOWN);
432 #ifdef HAVE_extenddftf2
433 if (HAVE_extenddftf2 && from_mode == DFmode && to_mode == TFmode)
435 emit_unop_insn (CODE_FOR_extenddftf2, to, from, UNKNOWN);
439 #ifdef HAVE_truncdfsf2
440 if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
442 emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
446 #ifdef HAVE_truncxfsf2
447 if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode)
449 emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN);
453 #ifdef HAVE_trunctfsf2
454 if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
456 emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
460 #ifdef HAVE_truncxfdf2
461 if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode)
463 emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN);
467 #ifdef HAVE_trunctfdf2
468 if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
470 emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
482 libcall = extendsfdf2_libfunc;
486 libcall = extendsfxf2_libfunc;
490 libcall = extendsftf2_libfunc;
499 libcall = truncdfsf2_libfunc;
503 libcall = extenddfxf2_libfunc;
507 libcall = extenddftf2_libfunc;
516 libcall = truncxfsf2_libfunc;
520 libcall = truncxfdf2_libfunc;
529 libcall = trunctfsf2_libfunc;
533 libcall = trunctfdf2_libfunc;
539 if (libcall == (rtx) 0)
540 /* This conversion is not implemented yet. */
543 emit_library_call (libcall, 1, to_mode, 1, from, from_mode);
544 emit_move_insn (to, hard_libcall_value (to_mode));
548 /* Now both modes are integers. */
550 /* Handle expanding beyond a word. */
551 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
552 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
559 enum machine_mode lowpart_mode;
560 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
562 /* Try converting directly if the insn is supported. */
563 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
566 emit_unop_insn (code, to, from, equiv_code);
569 /* Next, try converting via full word. */
570 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
571 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
572 != CODE_FOR_nothing))
574 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
575 emit_unop_insn (code, to,
576 gen_lowpart (word_mode, to), equiv_code);
580 /* No special multiword conversion insn; do it by hand. */
583 /* Get a copy of FROM widened to a word, if necessary. */
584 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
585 lowpart_mode = word_mode;
587 lowpart_mode = from_mode;
589 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
591 lowpart = gen_lowpart (lowpart_mode, to);
592 emit_move_insn (lowpart, lowfrom);
594 /* Compute the value to put in each remaining word. */
596 fill_value = const0_rtx;
601 && insn_operand_mode[(int) CODE_FOR_slt][0] == word_mode
602 && STORE_FLAG_VALUE == -1)
604 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
606 fill_value = gen_reg_rtx (word_mode);
607 emit_insn (gen_slt (fill_value));
613 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
614 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
616 fill_value = convert_to_mode (word_mode, fill_value, 1);
620 /* Fill the remaining words. */
621 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
623 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
624 rtx subword = operand_subword (to, index, 1, to_mode);
629 if (fill_value != subword)
630 emit_move_insn (subword, fill_value);
633 insns = get_insns ();
636 emit_no_conflict_block (insns, to, from, NULL_RTX,
637 gen_rtx (equiv_code, to_mode, from));
641 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD)
643 convert_move (to, gen_lowpart (word_mode, from), 0);
647 /* Handle pointer conversion */ /* SPEE 900220 */
648 if (to_mode == PSImode)
650 if (from_mode != SImode)
651 from = convert_to_mode (SImode, from, unsignedp);
653 #ifdef HAVE_truncsipsi
656 emit_unop_insn (CODE_FOR_truncsipsi, to, from, UNKNOWN);
659 #endif /* HAVE_truncsipsi */
663 if (from_mode == PSImode)
665 if (to_mode != SImode)
667 from = convert_to_mode (SImode, from, unsignedp);
672 #ifdef HAVE_extendpsisi
673 if (HAVE_extendpsisi)
675 emit_unop_insn (CODE_FOR_extendpsisi, to, from, UNKNOWN);
678 #endif /* HAVE_extendpsisi */
683 /* Now follow all the conversions between integers
684 no more than a word long. */
686 /* For truncation, usually we can just refer to FROM in a narrower mode. */
687 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
688 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
689 GET_MODE_BITSIZE (from_mode))
690 && ((GET_CODE (from) == MEM
691 && ! MEM_VOLATILE_P (from)
692 && direct_load[(int) to_mode]
693 && ! mode_dependent_address_p (XEXP (from, 0)))
694 || GET_CODE (from) == REG
695 || GET_CODE (from) == SUBREG))
697 emit_move_insn (to, gen_lowpart (to_mode, from));
701 /* For truncation, usually we can just refer to FROM in a narrower mode. */
702 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
704 /* Convert directly if that works. */
705 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
708 emit_unop_insn (code, to, from, equiv_code);
713 enum machine_mode intermediate;
715 /* Search for a mode to convert via. */
716 for (intermediate = from_mode; intermediate != VOIDmode;
717 intermediate = GET_MODE_WIDER_MODE (intermediate))
718 if ((can_extend_p (to_mode, intermediate, unsignedp)
720 && (can_extend_p (intermediate, from_mode, unsignedp)
721 != CODE_FOR_nothing))
723 convert_move (to, convert_to_mode (intermediate, from,
724 unsignedp), unsignedp);
728 /* No suitable intermediate mode. */
733 /* Support special truncate insns for certain modes. */
735 if (from_mode == DImode && to_mode == SImode)
737 #ifdef HAVE_truncdisi2
740 emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
744 convert_move (to, force_reg (from_mode, from), unsignedp);
748 if (from_mode == DImode && to_mode == HImode)
750 #ifdef HAVE_truncdihi2
753 emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
757 convert_move (to, force_reg (from_mode, from), unsignedp);
761 if (from_mode == DImode && to_mode == QImode)
763 #ifdef HAVE_truncdiqi2
766 emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
770 convert_move (to, force_reg (from_mode, from), unsignedp);
774 if (from_mode == SImode && to_mode == HImode)
776 #ifdef HAVE_truncsihi2
779 emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
783 convert_move (to, force_reg (from_mode, from), unsignedp);
787 if (from_mode == SImode && to_mode == QImode)
789 #ifdef HAVE_truncsiqi2
792 emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
796 convert_move (to, force_reg (from_mode, from), unsignedp);
800 if (from_mode == HImode && to_mode == QImode)
802 #ifdef HAVE_trunchiqi2
805 emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
809 convert_move (to, force_reg (from_mode, from), unsignedp);
813 /* Handle truncation of volatile memrefs, and so on;
814 the things that couldn't be truncated directly,
815 and for which there was no special instruction. */
816 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
818 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
819 emit_move_insn (to, temp);
823 /* Mode combination is not recognized. */
827 /* Return an rtx for a value that would result
828 from converting X to mode MODE.
829 Both X and MODE may be floating, or both integer.
830 UNSIGNEDP is nonzero if X is an unsigned value.
831 This can be done by referring to a part of X in place
832 or by copying to a new temporary with conversion. */
835 convert_to_mode (mode, x, unsignedp)
836 enum machine_mode mode;
842 x = protect_from_queue (x, 0);
844 if (mode == GET_MODE (x))
847 /* There is one case that we must handle specially: If we are converting
848 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
849 we are to interpret the constant as unsigned, gen_lowpart will do
850 the wrong if the constant appears negative. What we want to do is
851 make the high-order word of the constant zero, not all ones. */
853 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
854 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
855 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
856 return immed_double_const (INTVAL (x), (HOST_WIDE_INT) 0, mode);
858 /* We can do this with a gen_lowpart if both desired and current modes
859 are integer, and this is either a constant integer, a register, or a
860 non-volatile MEM. Except for the constant case, we must be narrowing
863 if (GET_CODE (x) == CONST_INT
864 || (GET_MODE_CLASS (mode) == MODE_INT
865 && GET_MODE_CLASS (GET_MODE (x)) == MODE_INT
866 && (GET_CODE (x) == CONST_DOUBLE
867 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (GET_MODE (x))
868 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x))
869 && direct_load[(int) mode]
870 || GET_CODE (x) == REG)))))
871 return gen_lowpart (mode, x);
873 temp = gen_reg_rtx (mode);
874 convert_move (temp, x, unsignedp);
878 /* Generate several move instructions to copy LEN bytes
879 from block FROM to block TO. (These are MEM rtx's with BLKmode).
880 The caller must pass FROM and TO
881 through protect_from_queue before calling.
882 ALIGN (in bytes) is maximum alignment we can assume. */
884 struct move_by_pieces
893 int explicit_inc_from;
899 static void move_by_pieces_1 ();
900 static int move_by_pieces_ninsns ();
903 move_by_pieces (to, from, len, align)
907 struct move_by_pieces data;
908 rtx to_addr = XEXP (to, 0), from_addr = XEXP (from, 0);
909 int max_size = MOVE_MAX + 1;
912 data.to_addr = to_addr;
913 data.from_addr = from_addr;
917 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
918 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
920 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
921 || GET_CODE (from_addr) == POST_INC
922 || GET_CODE (from_addr) == POST_DEC);
924 data.explicit_inc_from = 0;
925 data.explicit_inc_to = 0;
927 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
928 if (data.reverse) data.offset = len;
931 /* If copying requires more than two move insns,
932 copy addresses to registers (to make displacements shorter)
933 and use post-increment if available. */
934 if (!(data.autinc_from && data.autinc_to)
935 && move_by_pieces_ninsns (len, align) > 2)
937 #ifdef HAVE_PRE_DECREMENT
938 if (data.reverse && ! data.autinc_from)
940 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
941 data.autinc_from = 1;
942 data.explicit_inc_from = -1;
945 #ifdef HAVE_POST_INCREMENT
946 if (! data.autinc_from)
948 data.from_addr = copy_addr_to_reg (from_addr);
949 data.autinc_from = 1;
950 data.explicit_inc_from = 1;
953 if (!data.autinc_from && CONSTANT_P (from_addr))
954 data.from_addr = copy_addr_to_reg (from_addr);
955 #ifdef HAVE_PRE_DECREMENT
956 if (data.reverse && ! data.autinc_to)
958 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
960 data.explicit_inc_to = -1;
963 #ifdef HAVE_POST_INCREMENT
964 if (! data.reverse && ! data.autinc_to)
966 data.to_addr = copy_addr_to_reg (to_addr);
968 data.explicit_inc_to = 1;
971 if (!data.autinc_to && CONSTANT_P (to_addr))
972 data.to_addr = copy_addr_to_reg (to_addr);
975 if (! (STRICT_ALIGNMENT || SLOW_UNALIGNED_ACCESS)
976 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
979 /* First move what we can in the largest integer mode, then go to
980 successively smaller modes. */
984 enum machine_mode mode = VOIDmode, tmode;
985 enum insn_code icode;
987 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
988 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
989 if (GET_MODE_SIZE (tmode) < max_size)
992 if (mode == VOIDmode)
995 icode = mov_optab->handlers[(int) mode].insn_code;
996 if (icode != CODE_FOR_nothing
997 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
998 GET_MODE_SIZE (mode)))
999 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1001 max_size = GET_MODE_SIZE (mode);
1004 /* The code above should have handled everything. */
1009 /* Return number of insns required to move L bytes by pieces.
1010 ALIGN (in bytes) is maximum alignment we can assume. */
1013 move_by_pieces_ninsns (l, align)
1017 register int n_insns = 0;
1018 int max_size = MOVE_MAX + 1;
1020 if (! (STRICT_ALIGNMENT || SLOW_UNALIGNED_ACCESS)
1021 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
1024 while (max_size > 1)
1026 enum machine_mode mode = VOIDmode, tmode;
1027 enum insn_code icode;
1029 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1030 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1031 if (GET_MODE_SIZE (tmode) < max_size)
1034 if (mode == VOIDmode)
1037 icode = mov_optab->handlers[(int) mode].insn_code;
1038 if (icode != CODE_FOR_nothing
1039 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1040 GET_MODE_SIZE (mode)))
1041 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1043 max_size = GET_MODE_SIZE (mode);
1049 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1050 with move instructions for mode MODE. GENFUN is the gen_... function
1051 to make a move insn for that mode. DATA has all the other info. */
1054 move_by_pieces_1 (genfun, mode, data)
1056 enum machine_mode mode;
1057 struct move_by_pieces *data;
1059 register int size = GET_MODE_SIZE (mode);
1060 register rtx to1, from1;
1062 while (data->len >= size)
1064 if (data->reverse) data->offset -= size;
1066 to1 = (data->autinc_to
1067 ? gen_rtx (MEM, mode, data->to_addr)
1068 : change_address (data->to, mode,
1069 plus_constant (data->to_addr, data->offset)));
1072 ? gen_rtx (MEM, mode, data->from_addr)
1073 : change_address (data->from, mode,
1074 plus_constant (data->from_addr, data->offset)));
1076 #ifdef HAVE_PRE_DECREMENT
1077 if (data->explicit_inc_to < 0)
1078 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
1079 if (data->explicit_inc_from < 0)
1080 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (-size)));
1083 emit_insn ((*genfun) (to1, from1));
1084 #ifdef HAVE_POST_INCREMENT
1085 if (data->explicit_inc_to > 0)
1086 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1087 if (data->explicit_inc_from > 0)
1088 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1091 if (! data->reverse) data->offset += size;
1097 /* Emit code to move a block Y to a block X.
1098 This may be done with string-move instructions,
1099 with multiple scalar move instructions, or with a library call.
1101 Both X and Y must be MEM rtx's (perhaps inside VOLATILE)
1103 SIZE is an rtx that says how long they are.
1104 ALIGN is the maximum alignment we can assume they have,
1105 measured in bytes. */
1108 emit_block_move (x, y, size, align)
1113 if (GET_MODE (x) != BLKmode)
1116 if (GET_MODE (y) != BLKmode)
1119 x = protect_from_queue (x, 1);
1120 y = protect_from_queue (y, 0);
1122 if (GET_CODE (x) != MEM)
1124 if (GET_CODE (y) != MEM)
1129 if (GET_CODE (size) == CONST_INT
1130 && (move_by_pieces_ninsns (INTVAL (size), align) < MOVE_RATIO))
1131 move_by_pieces (x, y, INTVAL (size), align);
1134 /* Try the most limited insn first, because there's no point
1135 including more than one in the machine description unless
1136 the more limited one has some advantage. */
1137 #ifdef HAVE_movstrqi
1139 && GET_CODE (size) == CONST_INT
1140 && ((unsigned) INTVAL (size)
1141 < (1 << (GET_MODE_BITSIZE (QImode) - 1))))
1143 rtx insn = gen_movstrqi (x, y, size, GEN_INT (align));
1151 #ifdef HAVE_movstrhi
1153 && GET_CODE (size) == CONST_INT
1154 && ((unsigned) INTVAL (size)
1155 < (1 << (GET_MODE_BITSIZE (HImode) - 1))))
1157 rtx insn = gen_movstrhi (x, y, size, GEN_INT (align));
1165 #ifdef HAVE_movstrsi
1168 rtx insn = gen_movstrsi (x, y, size, GEN_INT (align));
1176 #ifdef HAVE_movstrdi
1179 rtx insn = gen_movstrdi (x, y, size, GEN_INT (align));
1188 #ifdef TARGET_MEM_FUNCTIONS
1189 emit_library_call (memcpy_libfunc, 1,
1190 VOIDmode, 3, XEXP (x, 0), Pmode,
1192 convert_to_mode (Pmode, size, 1), Pmode);
1194 emit_library_call (bcopy_libfunc, 1,
1195 VOIDmode, 3, XEXP (y, 0), Pmode,
1197 convert_to_mode (Pmode, size, 1), Pmode);
1202 /* Copy all or part of a value X into registers starting at REGNO.
1203 The number of registers to be filled is NREGS. */
1206 move_block_to_reg (regno, x, nregs, mode)
1210 enum machine_mode mode;
1215 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1216 x = validize_mem (force_const_mem (mode, x));
1218 /* See if the machine can do this with a load multiple insn. */
1219 #ifdef HAVE_load_multiple
1220 last = get_last_insn ();
1221 pat = gen_load_multiple (gen_rtx (REG, word_mode, regno), x,
1229 delete_insns_since (last);
1232 for (i = 0; i < nregs; i++)
1233 emit_move_insn (gen_rtx (REG, word_mode, regno + i),
1234 operand_subword_force (x, i, mode));
1237 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1238 The number of registers to be filled is NREGS. */
1241 move_block_from_reg (regno, x, nregs)
1249 /* See if the machine can do this with a store multiple insn. */
1250 #ifdef HAVE_store_multiple
1251 last = get_last_insn ();
1252 pat = gen_store_multiple (x, gen_rtx (REG, word_mode, regno),
1260 delete_insns_since (last);
1263 for (i = 0; i < nregs; i++)
1265 rtx tem = operand_subword (x, i, 1, BLKmode);
1270 emit_move_insn (tem, gen_rtx (REG, word_mode, regno + i));
1274 /* Mark NREGS consecutive regs, starting at REGNO, as being live now. */
1277 use_regs (regno, nregs)
1283 for (i = 0; i < nregs; i++)
1284 emit_insn (gen_rtx (USE, VOIDmode, gen_rtx (REG, word_mode, regno + i)));
1287 /* Write zeros through the storage of OBJECT.
1288 If OBJECT has BLKmode, SIZE is its length in bytes. */
1291 clear_storage (object, size)
1295 if (GET_MODE (object) == BLKmode)
1297 #ifdef TARGET_MEM_FUNCTIONS
1298 emit_library_call (memset_libfunc, 1,
1300 XEXP (object, 0), Pmode, const0_rtx, Pmode,
1301 GEN_INT (size), Pmode);
1303 emit_library_call (bzero_libfunc, 1,
1305 XEXP (object, 0), Pmode,
1306 GEN_INT (size), Pmode);
1310 emit_move_insn (object, const0_rtx);
1313 /* Generate code to copy Y into X.
1314 Both Y and X must have the same mode, except that
1315 Y can be a constant with VOIDmode.
1316 This mode cannot be BLKmode; use emit_block_move for that.
1318 Return the last instruction emitted. */
1321 emit_move_insn (x, y)
1324 enum machine_mode mode = GET_MODE (x);
1327 x = protect_from_queue (x, 1);
1328 y = protect_from_queue (y, 0);
1330 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
1333 if (CONSTANT_P (y) && ! LEGITIMATE_CONSTANT_P (y))
1334 y = force_const_mem (mode, y);
1336 /* If X or Y are memory references, verify that their addresses are valid
1338 if (GET_CODE (x) == MEM
1339 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
1340 && ! push_operand (x, GET_MODE (x)))
1342 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
1343 x = change_address (x, VOIDmode, XEXP (x, 0));
1345 if (GET_CODE (y) == MEM
1346 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
1348 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
1349 y = change_address (y, VOIDmode, XEXP (y, 0));
1351 if (mode == BLKmode)
1354 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
1356 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
1358 /* This will handle any multi-word mode that lacks a move_insn pattern.
1359 However, you will get better code if you define such patterns,
1360 even if they must turn into multiple assembler instructions. */
1361 else if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
1366 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
1369 rtx xpart = operand_subword (x, i, 1, mode);
1370 rtx ypart = operand_subword (y, i, 1, mode);
1372 /* If we can't get a part of Y, put Y into memory if it is a
1373 constant. Otherwise, force it into a register. If we still
1374 can't get a part of Y, abort. */
1375 if (ypart == 0 && CONSTANT_P (y))
1377 y = force_const_mem (mode, y);
1378 ypart = operand_subword (y, i, 1, mode);
1380 else if (ypart == 0)
1381 ypart = operand_subword_force (y, i, mode);
1383 if (xpart == 0 || ypart == 0)
1386 last_insn = emit_move_insn (xpart, ypart);
1394 /* Pushing data onto the stack. */
1396 /* Push a block of length SIZE (perhaps variable)
1397 and return an rtx to address the beginning of the block.
1398 Note that it is not possible for the value returned to be a QUEUED.
1399 The value may be virtual_outgoing_args_rtx.
1401 EXTRA is the number of bytes of padding to push in addition to SIZE.
1402 BELOW nonzero means this padding comes at low addresses;
1403 otherwise, the padding comes at high addresses. */
1406 push_block (size, extra, below)
1411 if (CONSTANT_P (size))
1412 anti_adjust_stack (plus_constant (size, extra));
1413 else if (GET_CODE (size) == REG && extra == 0)
1414 anti_adjust_stack (size);
1417 rtx temp = copy_to_mode_reg (Pmode, size);
1419 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
1420 temp, 0, OPTAB_LIB_WIDEN);
1421 anti_adjust_stack (temp);
1424 #ifdef STACK_GROWS_DOWNWARD
1425 temp = virtual_outgoing_args_rtx;
1426 if (extra != 0 && below)
1427 temp = plus_constant (temp, extra);
1429 if (GET_CODE (size) == CONST_INT)
1430 temp = plus_constant (virtual_outgoing_args_rtx,
1431 - INTVAL (size) - (below ? 0 : extra));
1432 else if (extra != 0 && !below)
1433 temp = gen_rtx (PLUS, Pmode, virtual_outgoing_args_rtx,
1434 negate_rtx (Pmode, plus_constant (size, extra)));
1436 temp = gen_rtx (PLUS, Pmode, virtual_outgoing_args_rtx,
1437 negate_rtx (Pmode, size));
1440 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
1446 return gen_rtx (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
1449 /* Generate code to push X onto the stack, assuming it has mode MODE and
1451 MODE is redundant except when X is a CONST_INT (since they don't
1453 SIZE is an rtx for the size of data to be copied (in bytes),
1454 needed only if X is BLKmode.
1456 ALIGN (in bytes) is maximum alignment we can assume.
1458 If PARTIAL is nonzero, then copy that many of the first words
1459 of X into registers starting with REG, and push the rest of X.
1460 The amount of space pushed is decreased by PARTIAL words,
1461 rounded *down* to a multiple of PARM_BOUNDARY.
1462 REG must be a hard register in this case.
1464 EXTRA is the amount in bytes of extra space to leave next to this arg.
1465 This is ignored if an argument block has already been allocated.
1467 On a machine that lacks real push insns, ARGS_ADDR is the address of
1468 the bottom of the argument block for this call. We use indexing off there
1469 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
1470 argument block has not been preallocated.
1472 ARGS_SO_FAR is the size of args previously pushed for this call. */
1475 emit_push_insn (x, mode, type, size, align, partial, reg, extra,
1476 args_addr, args_so_far)
1478 enum machine_mode mode;
1489 enum direction stack_direction
1490 #ifdef STACK_GROWS_DOWNWARD
1496 /* Decide where to pad the argument: `downward' for below,
1497 `upward' for above, or `none' for don't pad it.
1498 Default is below for small data on big-endian machines; else above. */
1499 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
1501 /* Invert direction if stack is post-update. */
1502 if (STACK_PUSH_CODE == POST_INC || STACK_PUSH_CODE == POST_DEC)
1503 if (where_pad != none)
1504 where_pad = (where_pad == downward ? upward : downward);
1506 xinner = x = protect_from_queue (x, 0);
1508 if (mode == BLKmode)
1510 /* Copy a block into the stack, entirely or partially. */
1513 int used = partial * UNITS_PER_WORD;
1514 int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
1522 /* USED is now the # of bytes we need not copy to the stack
1523 because registers will take care of them. */
1526 xinner = change_address (xinner, BLKmode,
1527 plus_constant (XEXP (xinner, 0), used));
1529 /* If the partial register-part of the arg counts in its stack size,
1530 skip the part of stack space corresponding to the registers.
1531 Otherwise, start copying to the beginning of the stack space,
1532 by setting SKIP to 0. */
1533 #ifndef REG_PARM_STACK_SPACE
1539 #ifdef PUSH_ROUNDING
1540 /* Do it with several push insns if that doesn't take lots of insns
1541 and if there is no difficulty with push insns that skip bytes
1542 on the stack for alignment purposes. */
1544 && GET_CODE (size) == CONST_INT
1546 && (move_by_pieces_ninsns ((unsigned) INTVAL (size) - used, align)
1548 /* Here we avoid the case of a structure whose weak alignment
1549 forces many pushes of a small amount of data,
1550 and such small pushes do rounding that causes trouble. */
1551 && ((! STRICT_ALIGNMENT && ! SLOW_UNALIGNED_ACCESS)
1552 || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT
1553 || PUSH_ROUNDING (align) == align)
1554 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
1556 /* Push padding now if padding above and stack grows down,
1557 or if padding below and stack grows up.
1558 But if space already allocated, this has already been done. */
1559 if (extra && args_addr == 0
1560 && where_pad != none && where_pad != stack_direction)
1561 anti_adjust_stack (GEN_INT (extra));
1563 move_by_pieces (gen_rtx (MEM, BLKmode, gen_push_operand ()), xinner,
1564 INTVAL (size) - used, align);
1567 #endif /* PUSH_ROUNDING */
1569 /* Otherwise make space on the stack and copy the data
1570 to the address of that space. */
1572 /* Deduct words put into registers from the size we must copy. */
1575 if (GET_CODE (size) == CONST_INT)
1576 size = GEN_INT (INTVAL (size) - used);
1578 size = expand_binop (GET_MODE (size), sub_optab, size,
1579 GEN_INT (used), NULL_RTX, 0,
1583 /* Get the address of the stack space.
1584 In this case, we do not deal with EXTRA separately.
1585 A single stack adjust will do. */
1588 temp = push_block (size, extra, where_pad == downward);
1591 else if (GET_CODE (args_so_far) == CONST_INT)
1592 temp = memory_address (BLKmode,
1593 plus_constant (args_addr,
1594 skip + INTVAL (args_so_far)));
1596 temp = memory_address (BLKmode,
1597 plus_constant (gen_rtx (PLUS, Pmode,
1598 args_addr, args_so_far),
1601 /* TEMP is the address of the block. Copy the data there. */
1602 if (GET_CODE (size) == CONST_INT
1603 && (move_by_pieces_ninsns ((unsigned) INTVAL (size), align)
1606 move_by_pieces (gen_rtx (MEM, BLKmode, temp), xinner,
1607 INTVAL (size), align);
1610 /* Try the most limited insn first, because there's no point
1611 including more than one in the machine description unless
1612 the more limited one has some advantage. */
1613 #ifdef HAVE_movstrqi
1615 && GET_CODE (size) == CONST_INT
1616 && ((unsigned) INTVAL (size)
1617 < (1 << (GET_MODE_BITSIZE (QImode) - 1))))
1619 emit_insn (gen_movstrqi (gen_rtx (MEM, BLKmode, temp),
1620 xinner, size, GEN_INT (align)));
1624 #ifdef HAVE_movstrhi
1626 && GET_CODE (size) == CONST_INT
1627 && ((unsigned) INTVAL (size)
1628 < (1 << (GET_MODE_BITSIZE (HImode) - 1))))
1630 emit_insn (gen_movstrhi (gen_rtx (MEM, BLKmode, temp),
1631 xinner, size, GEN_INT (align)));
1635 #ifdef HAVE_movstrsi
1638 emit_insn (gen_movstrsi (gen_rtx (MEM, BLKmode, temp),
1639 xinner, size, GEN_INT (align)));
1643 #ifdef HAVE_movstrdi
1646 emit_insn (gen_movstrdi (gen_rtx (MEM, BLKmode, temp),
1647 xinner, size, GEN_INT (align)));
1652 #ifndef ACCUMULATE_OUTGOING_ARGS
1653 /* If the source is referenced relative to the stack pointer,
1654 copy it to another register to stabilize it. We do not need
1655 to do this if we know that we won't be changing sp. */
1657 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
1658 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
1659 temp = copy_to_reg (temp);
1662 /* Make inhibit_defer_pop nonzero around the library call
1663 to force it to pop the bcopy-arguments right away. */
1665 #ifdef TARGET_MEM_FUNCTIONS
1666 emit_library_call (memcpy_libfunc, 1,
1667 VOIDmode, 3, temp, Pmode, XEXP (xinner, 0), Pmode,
1670 emit_library_call (bcopy_libfunc, 1,
1671 VOIDmode, 3, XEXP (xinner, 0), Pmode, temp, Pmode,
1677 else if (partial > 0)
1679 /* Scalar partly in registers. */
1681 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
1684 /* # words of start of argument
1685 that we must make space for but need not store. */
1686 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
1687 int args_offset = INTVAL (args_so_far);
1690 /* Push padding now if padding above and stack grows down,
1691 or if padding below and stack grows up.
1692 But if space already allocated, this has already been done. */
1693 if (extra && args_addr == 0
1694 && where_pad != none && where_pad != stack_direction)
1695 anti_adjust_stack (GEN_INT (extra));
1697 /* If we make space by pushing it, we might as well push
1698 the real data. Otherwise, we can leave OFFSET nonzero
1699 and leave the space uninitialized. */
1703 /* Now NOT_STACK gets the number of words that we don't need to
1704 allocate on the stack. */
1705 not_stack = partial - offset;
1707 /* If the partial register-part of the arg counts in its stack size,
1708 skip the part of stack space corresponding to the registers.
1709 Otherwise, start copying to the beginning of the stack space,
1710 by setting SKIP to 0. */
1711 #ifndef REG_PARM_STACK_SPACE
1717 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1718 x = validize_mem (force_const_mem (mode, x));
1720 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
1721 SUBREGs of such registers are not allowed. */
1722 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
1723 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
1724 x = copy_to_reg (x);
1726 /* Loop over all the words allocated on the stack for this arg. */
1727 /* We can do it by words, because any scalar bigger than a word
1728 has a size a multiple of a word. */
1729 #ifndef PUSH_ARGS_REVERSED
1730 for (i = not_stack; i < size; i++)
1732 for (i = size - 1; i >= not_stack; i--)
1734 if (i >= not_stack + offset)
1735 emit_push_insn (operand_subword_force (x, i, mode),
1736 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
1738 GEN_INT (args_offset + ((i - not_stack + skip)
1739 * UNITS_PER_WORD)));
1745 /* Push padding now if padding above and stack grows down,
1746 or if padding below and stack grows up.
1747 But if space already allocated, this has already been done. */
1748 if (extra && args_addr == 0
1749 && where_pad != none && where_pad != stack_direction)
1750 anti_adjust_stack (GEN_INT (extra));
1752 #ifdef PUSH_ROUNDING
1754 addr = gen_push_operand ();
1757 if (GET_CODE (args_so_far) == CONST_INT)
1759 = memory_address (mode,
1760 plus_constant (args_addr, INTVAL (args_so_far)));
1762 addr = memory_address (mode, gen_rtx (PLUS, Pmode, args_addr,
1765 emit_move_insn (gen_rtx (MEM, mode, addr), x);
1769 /* If part should go in registers, copy that part
1770 into the appropriate registers. Do this now, at the end,
1771 since mem-to-mem copies above may do function calls. */
1773 move_block_to_reg (REGNO (reg), x, partial, mode);
1775 if (extra && args_addr == 0 && where_pad == stack_direction)
1776 anti_adjust_stack (GEN_INT (extra));
1779 /* Output a library call to function FUN (a SYMBOL_REF rtx)
1780 (emitting the queue unless NO_QUEUE is nonzero),
1781 for a value of mode OUTMODE,
1782 with NARGS different arguments, passed as alternating rtx values
1783 and machine_modes to convert them to.
1784 The rtx values should have been passed through protect_from_queue already.
1786 NO_QUEUE will be true if and only if the library call is a `const' call
1787 which will be enclosed in REG_LIBCALL/REG_RETVAL notes; it is equivalent
1788 to the variable is_const in expand_call. */
1791 emit_library_call (va_alist)
1795 struct args_size args_size;
1796 register int argnum;
1797 enum machine_mode outmode;
1804 CUMULATIVE_ARGS args_so_far;
1805 struct arg { rtx value; enum machine_mode mode; rtx reg; int partial;
1806 struct args_size offset; struct args_size size; };
1808 int old_inhibit_defer_pop = inhibit_defer_pop;
1813 orgfun = fun = va_arg (p, rtx);
1814 no_queue = va_arg (p, int);
1815 outmode = va_arg (p, enum machine_mode);
1816 nargs = va_arg (p, int);
1818 /* Copy all the libcall-arguments out of the varargs data
1819 and into a vector ARGVEC.
1821 Compute how to pass each argument. We only support a very small subset
1822 of the full argument passing conventions to limit complexity here since
1823 library functions shouldn't have many args. */
1825 argvec = (struct arg *) alloca (nargs * sizeof (struct arg));
1827 INIT_CUMULATIVE_ARGS (args_so_far, (tree)0, fun);
1829 args_size.constant = 0;
1832 for (count = 0; count < nargs; count++)
1834 rtx val = va_arg (p, rtx);
1835 enum machine_mode mode = va_arg (p, enum machine_mode);
1837 /* We cannot convert the arg value to the mode the library wants here;
1838 must do it earlier where we know the signedness of the arg. */
1840 || (GET_MODE (val) != mode && GET_MODE (val) != VOIDmode))
1843 /* On some machines, there's no way to pass a float to a library fcn.
1844 Pass it as a double instead. */
1845 #ifdef LIBGCC_NEEDS_DOUBLE
1846 if (LIBGCC_NEEDS_DOUBLE && mode == SFmode)
1847 val = convert_to_mode (DFmode, val), mode = DFmode;
1850 /* Make sure it is a reasonable operand for a move or push insn. */
1851 if (GET_CODE (val) != REG && GET_CODE (val) != MEM
1852 && ! (CONSTANT_P (val) && LEGITIMATE_CONSTANT_P (val)))
1853 val = force_operand (val, NULL_RTX);
1855 argvec[count].value = val;
1856 argvec[count].mode = mode;
1858 #ifdef FUNCTION_ARG_PASS_BY_REFERENCE
1859 if (FUNCTION_ARG_PASS_BY_REFERENCE (args_so_far, mode, NULL_TREE, 1))
1863 argvec[count].reg = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
1864 if (argvec[count].reg && GET_CODE (argvec[count].reg) == EXPR_LIST)
1866 #ifdef FUNCTION_ARG_PARTIAL_NREGS
1867 argvec[count].partial
1868 = FUNCTION_ARG_PARTIAL_NREGS (args_so_far, mode, NULL_TREE, 1);
1870 argvec[count].partial = 0;
1873 locate_and_pad_parm (mode, NULL_TREE,
1874 argvec[count].reg && argvec[count].partial == 0,
1875 NULL_TREE, &args_size, &argvec[count].offset,
1876 &argvec[count].size);
1878 if (argvec[count].size.var)
1881 #ifndef REG_PARM_STACK_SPACE
1882 if (argvec[count].partial)
1883 argvec[count].size.constant -= argvec[count].partial * UNITS_PER_WORD;
1886 if (argvec[count].reg == 0 || argvec[count].partial != 0
1887 #ifdef REG_PARM_STACK_SPACE
1891 args_size.constant += argvec[count].size.constant;
1893 #ifdef ACCUMULATE_OUTGOING_ARGS
1894 /* If this arg is actually passed on the stack, it might be
1895 clobbering something we already put there (this library call might
1896 be inside the evaluation of an argument to a function whose call
1897 requires the stack). This will only occur when the library call
1898 has sufficient args to run out of argument registers. Abort in
1899 this case; if this ever occurs, code must be added to save and
1900 restore the arg slot. */
1902 if (argvec[count].reg == 0 || argvec[count].partial != 0)
1906 FUNCTION_ARG_ADVANCE (args_so_far, mode, (tree)0, 1);
1910 /* If this machine requires an external definition for library
1911 functions, write one out. */
1912 assemble_external_libcall (fun);
1914 #ifdef STACK_BOUNDARY
1915 args_size.constant = (((args_size.constant + (STACK_BYTES - 1))
1916 / STACK_BYTES) * STACK_BYTES);
1919 #ifdef REG_PARM_STACK_SPACE
1920 args_size.constant = MAX (args_size.constant,
1921 REG_PARM_STACK_SPACE ((tree) 0));
1924 #ifdef ACCUMULATE_OUTGOING_ARGS
1925 if (args_size.constant > current_function_outgoing_args_size)
1926 current_function_outgoing_args_size = args_size.constant;
1927 args_size.constant = 0;
1930 #ifndef PUSH_ROUNDING
1931 argblock = push_block (GEN_INT (args_size.constant), 0, 0);
1934 #ifdef PUSH_ARGS_REVERSED
1942 /* Push the args that need to be pushed. */
1944 for (count = 0; count < nargs; count++, argnum += inc)
1946 register enum machine_mode mode = argvec[argnum].mode;
1947 register rtx val = argvec[argnum].value;
1948 rtx reg = argvec[argnum].reg;
1949 int partial = argvec[argnum].partial;
1951 if (! (reg != 0 && partial == 0))
1952 emit_push_insn (val, mode, NULL_TREE, NULL_RTX, 0, partial, reg, 0,
1953 argblock, GEN_INT (argvec[count].offset.constant));
1957 #ifdef PUSH_ARGS_REVERSED
1963 /* Now load any reg parms into their regs. */
1965 for (count = 0; count < nargs; count++, argnum += inc)
1967 register enum machine_mode mode = argvec[argnum].mode;
1968 register rtx val = argvec[argnum].value;
1969 rtx reg = argvec[argnum].reg;
1970 int partial = argvec[argnum].partial;
1972 if (reg != 0 && partial == 0)
1973 emit_move_insn (reg, val);
1977 /* For version 1.37, try deleting this entirely. */
1981 /* Any regs containing parms remain in use through the call. */
1983 for (count = 0; count < nargs; count++)
1984 if (argvec[count].reg != 0)
1985 emit_insn (gen_rtx (USE, VOIDmode, argvec[count].reg));
1987 use_insns = get_insns ();
1990 fun = prepare_call_address (fun, NULL_TREE, &use_insns);
1992 /* Don't allow popping to be deferred, since then
1993 cse'ing of library calls could delete a call and leave the pop. */
1996 /* We pass the old value of inhibit_defer_pop + 1 to emit_call_1, which
1997 will set inhibit_defer_pop to that value. */
1999 emit_call_1 (fun, get_identifier (XSTR (orgfun, 0)), args_size.constant, 0,
2000 FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1),
2001 outmode != VOIDmode ? hard_libcall_value (outmode) : NULL_RTX,
2002 old_inhibit_defer_pop + 1, use_insns, no_queue);
2004 /* Now restore inhibit_defer_pop to its actual original value. */
2008 /* Expand an assignment that stores the value of FROM into TO.
2009 If WANT_VALUE is nonzero, return an rtx for the value of TO.
2010 (This may contain a QUEUED rtx.)
2011 Otherwise, the returned value is not meaningful.
2013 SUGGEST_REG is no longer actually used.
2014 It used to mean, copy the value through a register
2015 and return that register, if that is possible.
2016 But now we do this if WANT_VALUE.
2018 If the value stored is a constant, we return the constant. */
2021 expand_assignment (to, from, want_value, suggest_reg)
2026 register rtx to_rtx = 0;
2029 /* Don't crash if the lhs of the assignment was erroneous. */
2031 if (TREE_CODE (to) == ERROR_MARK)
2032 return expand_expr (from, NULL_RTX, VOIDmode, 0);
2034 /* Assignment of a structure component needs special treatment
2035 if the structure component's rtx is not simply a MEM.
2036 Assignment of an array element at a constant index
2037 has the same problem. */
2039 if (TREE_CODE (to) == COMPONENT_REF
2040 || TREE_CODE (to) == BIT_FIELD_REF
2041 || (TREE_CODE (to) == ARRAY_REF
2042 && TREE_CODE (TREE_OPERAND (to, 1)) == INTEGER_CST
2043 && TREE_CODE (TYPE_SIZE (TREE_TYPE (to))) == INTEGER_CST))
2045 enum machine_mode mode1;
2051 tree tem = get_inner_reference (to, &bitsize, &bitpos, &offset,
2052 &mode1, &unsignedp, &volatilep);
2054 /* If we are going to use store_bit_field and extract_bit_field,
2055 make sure to_rtx will be safe for multiple use. */
2057 if (mode1 == VOIDmode && want_value)
2058 tem = stabilize_reference (tem);
2060 to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, 0);
2063 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
2065 if (GET_CODE (to_rtx) != MEM)
2067 to_rtx = change_address (to_rtx, VOIDmode,
2068 gen_rtx (PLUS, Pmode, XEXP (to_rtx, 0),
2069 force_reg (Pmode, offset_rtx)));
2073 if (GET_CODE (to_rtx) == MEM)
2074 MEM_VOLATILE_P (to_rtx) = 1;
2075 #if 0 /* This was turned off because, when a field is volatile
2076 in an object which is not volatile, the object may be in a register,
2077 and then we would abort over here. */
2083 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
2085 /* Spurious cast makes HPUX compiler happy. */
2086 ? (enum machine_mode) TYPE_MODE (TREE_TYPE (to))
2089 /* Required alignment of containing datum. */
2090 TYPE_ALIGN (TREE_TYPE (tem)) / BITS_PER_UNIT,
2091 int_size_in_bytes (TREE_TYPE (tem)));
2092 preserve_temp_slots (result);
2098 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
2099 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
2102 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, 0);
2104 /* In case we are returning the contents of an object which overlaps
2105 the place the value is being stored, use a safe function when copying
2106 a value through a pointer into a structure value return block. */
2107 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
2108 && current_function_returns_struct
2109 && !current_function_returns_pcc_struct)
2111 rtx from_rtx = expand_expr (from, NULL_RTX, VOIDmode, 0);
2112 rtx size = expr_size (from);
2114 #ifdef TARGET_MEM_FUNCTIONS
2115 emit_library_call (memcpy_libfunc, 1,
2116 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
2117 XEXP (from_rtx, 0), Pmode,
2120 emit_library_call (bcopy_libfunc, 1,
2121 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
2122 XEXP (to_rtx, 0), Pmode,
2126 preserve_temp_slots (to_rtx);
2131 /* Compute FROM and store the value in the rtx we got. */
2133 result = store_expr (from, to_rtx, want_value);
2134 preserve_temp_slots (result);
2139 /* Generate code for computing expression EXP,
2140 and storing the value into TARGET.
2141 Returns TARGET or an equivalent value.
2142 TARGET may contain a QUEUED rtx.
2144 If SUGGEST_REG is nonzero, copy the value through a register
2145 and return that register, if that is possible.
2147 If the value stored is a constant, we return the constant. */
2150 store_expr (exp, target, suggest_reg)
2152 register rtx target;
2156 int dont_return_target = 0;
2158 if (TREE_CODE (exp) == COMPOUND_EXPR)
2160 /* Perform first part of compound expression, then assign from second
2162 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
2164 return store_expr (TREE_OPERAND (exp, 1), target, suggest_reg);
2166 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
2168 /* For conditional expression, get safe form of the target. Then
2169 test the condition, doing the appropriate assignment on either
2170 side. This avoids the creation of unnecessary temporaries.
2171 For non-BLKmode, it is more efficient not to do this. */
2173 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
2176 target = protect_from_queue (target, 1);
2179 jumpifnot (TREE_OPERAND (exp, 0), lab1);
2180 store_expr (TREE_OPERAND (exp, 1), target, suggest_reg);
2182 emit_jump_insn (gen_jump (lab2));
2185 store_expr (TREE_OPERAND (exp, 2), target, suggest_reg);
2191 else if (suggest_reg && GET_CODE (target) == MEM
2192 && GET_MODE (target) != BLKmode)
2193 /* If target is in memory and caller wants value in a register instead,
2194 arrange that. Pass TARGET as target for expand_expr so that,
2195 if EXP is another assignment, SUGGEST_REG will be nonzero for it.
2196 We know expand_expr will not use the target in that case. */
2198 temp = expand_expr (exp, cse_not_expected ? NULL_RTX : target,
2199 GET_MODE (target), 0);
2200 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
2201 temp = copy_to_reg (temp);
2202 dont_return_target = 1;
2204 else if (queued_subexp_p (target))
2205 /* If target contains a postincrement, it is not safe
2206 to use as the returned value. It would access the wrong
2207 place by the time the queued increment gets output.
2208 So copy the value through a temporary and use that temp
2211 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
2213 /* Expand EXP into a new pseudo. */
2214 temp = gen_reg_rtx (GET_MODE (target));
2215 temp = expand_expr (exp, temp, GET_MODE (target), 0);
2218 temp = expand_expr (exp, NULL_RTX, GET_MODE (target), 0);
2219 dont_return_target = 1;
2223 temp = expand_expr (exp, target, GET_MODE (target), 0);
2224 /* DO return TARGET if it's a specified hardware register.
2225 expand_return relies on this. */
2226 if (!(target && GET_CODE (target) == REG
2227 && REGNO (target) < FIRST_PSEUDO_REGISTER)
2228 && CONSTANT_P (temp))
2229 dont_return_target = 1;
2232 /* If value was not generated in the target, store it there.
2233 Convert the value to TARGET's type first if nec. */
2235 if (temp != target && TREE_CODE (exp) != ERROR_MARK)
2237 target = protect_from_queue (target, 1);
2238 if (GET_MODE (temp) != GET_MODE (target)
2239 && GET_MODE (temp) != VOIDmode)
2241 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
2242 if (dont_return_target)
2244 /* In this case, we will return TEMP,
2245 so make sure it has the proper mode.
2246 But don't forget to store the value into TARGET. */
2247 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
2248 emit_move_insn (target, temp);
2251 convert_move (target, temp, unsignedp);
2254 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
2256 /* Handle copying a string constant into an array.
2257 The string constant may be shorter than the array.
2258 So copy just the string's actual length, and clear the rest. */
2261 /* Get the size of the data type of the string,
2262 which is actually the size of the target. */
2263 size = expr_size (exp);
2264 if (GET_CODE (size) == CONST_INT
2265 && INTVAL (size) < TREE_STRING_LENGTH (exp))
2266 emit_block_move (target, temp, size,
2267 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
2270 /* Compute the size of the data to copy from the string. */
2272 = fold (build (MIN_EXPR, sizetype,
2273 size_binop (CEIL_DIV_EXPR,
2274 TYPE_SIZE (TREE_TYPE (exp)),
2275 size_int (BITS_PER_UNIT)),
2277 build_int_2 (TREE_STRING_LENGTH (exp), 0))));
2278 rtx copy_size_rtx = expand_expr (copy_size, NULL_RTX,
2282 /* Copy that much. */
2283 emit_block_move (target, temp, copy_size_rtx,
2284 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
2286 /* Figure out how much is left in TARGET
2287 that we have to clear. */
2288 if (GET_CODE (copy_size_rtx) == CONST_INT)
2290 temp = plus_constant (XEXP (target, 0),
2291 TREE_STRING_LENGTH (exp));
2292 size = plus_constant (size,
2293 - TREE_STRING_LENGTH (exp));
2297 enum machine_mode size_mode = Pmode;
2299 temp = force_reg (Pmode, XEXP (target, 0));
2300 temp = expand_binop (size_mode, add_optab, temp,
2301 copy_size_rtx, NULL_RTX, 0,
2304 size = expand_binop (size_mode, sub_optab, size,
2305 copy_size_rtx, NULL_RTX, 0,
2308 emit_cmp_insn (size, const0_rtx, LT, NULL_RTX,
2309 GET_MODE (size), 0, 0);
2310 label = gen_label_rtx ();
2311 emit_jump_insn (gen_blt (label));
2314 if (size != const0_rtx)
2316 #ifdef TARGET_MEM_FUNCTIONS
2317 emit_library_call (memset_libfunc, 1, VOIDmode, 3,
2318 temp, Pmode, const0_rtx, Pmode, size, Pmode);
2320 emit_library_call (bzero_libfunc, 1, VOIDmode, 2,
2321 temp, Pmode, size, Pmode);
2328 else if (GET_MODE (temp) == BLKmode)
2329 emit_block_move (target, temp, expr_size (exp),
2330 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
2332 emit_move_insn (target, temp);
2334 if (dont_return_target)
2339 /* Store the value of constructor EXP into the rtx TARGET.
2340 TARGET is either a REG or a MEM. */
2343 store_constructor (exp, target)
2347 tree type = TREE_TYPE (exp);
2349 /* We know our target cannot conflict, since safe_from_p has been called. */
2351 /* Don't try copying piece by piece into a hard register
2352 since that is vulnerable to being clobbered by EXP.
2353 Instead, construct in a pseudo register and then copy it all. */
2354 if (GET_CODE (target) == REG && REGNO (target) < FIRST_PSEUDO_REGISTER)
2356 rtx temp = gen_reg_rtx (GET_MODE (target));
2357 store_constructor (exp, temp);
2358 emit_move_insn (target, temp);
2363 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE)
2367 /* Inform later passes that the whole union value is dead. */
2368 if (TREE_CODE (type) == UNION_TYPE)
2369 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
2371 /* If we are building a static constructor into a register,
2372 set the initial value as zero so we can fold the value into
2374 else if (GET_CODE (target) == REG && TREE_STATIC (exp))
2375 emit_move_insn (target, const0_rtx);
2377 /* If the constructor has fewer fields than the structure,
2378 clear the whole structure first. */
2379 else if (list_length (CONSTRUCTOR_ELTS (exp))
2380 != list_length (TYPE_FIELDS (type)))
2381 clear_storage (target, int_size_in_bytes (type));
2383 /* Inform later passes that the old value is dead. */
2384 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
2386 /* Store each element of the constructor into
2387 the corresponding field of TARGET. */
2389 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
2391 register tree field = TREE_PURPOSE (elt);
2392 register enum machine_mode mode;
2397 /* Just ignore missing fields.
2398 We cleared the whole structure, above,
2399 if any fields are missing. */
2403 bitsize = TREE_INT_CST_LOW (DECL_SIZE (field));
2404 unsignedp = TREE_UNSIGNED (field);
2405 mode = DECL_MODE (field);
2406 if (DECL_BIT_FIELD (field))
2409 if (TREE_CODE (DECL_FIELD_BITPOS (field)) != INTEGER_CST)
2410 /* ??? This case remains to be written. */
2413 bitpos = TREE_INT_CST_LOW (DECL_FIELD_BITPOS (field));
2415 store_field (target, bitsize, bitpos, mode, TREE_VALUE (elt),
2416 /* The alignment of TARGET is
2417 at least what its type requires. */
2419 TYPE_ALIGN (type) / BITS_PER_UNIT,
2420 int_size_in_bytes (type));
2423 else if (TREE_CODE (type) == ARRAY_TYPE)
2427 tree domain = TYPE_DOMAIN (type);
2428 HOST_WIDE_INT minelt = TREE_INT_CST_LOW (TYPE_MIN_VALUE (domain));
2429 HOST_WIDE_INT maxelt = TREE_INT_CST_LOW (TYPE_MAX_VALUE (domain));
2430 tree elttype = TREE_TYPE (type);
2432 /* If the constructor has fewer fields than the structure,
2433 clear the whole structure first. Similarly if this this is
2434 static constructor of a non-BLKmode object. */
2436 if (list_length (CONSTRUCTOR_ELTS (exp)) < maxelt - minelt + 1
2437 || (GET_CODE (target) == REG && TREE_STATIC (exp)))
2438 clear_storage (target, maxelt - minelt + 1);
2440 /* Inform later passes that the old value is dead. */
2441 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
2443 /* Store each element of the constructor into
2444 the corresponding element of TARGET, determined
2445 by counting the elements. */
2446 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
2448 elt = TREE_CHAIN (elt), i++)
2450 register enum machine_mode mode;
2455 mode = TYPE_MODE (elttype);
2456 bitsize = GET_MODE_BITSIZE (mode);
2457 unsignedp = TREE_UNSIGNED (elttype);
2459 bitpos = (i * TREE_INT_CST_LOW (TYPE_SIZE (elttype)));
2461 store_field (target, bitsize, bitpos, mode, TREE_VALUE (elt),
2462 /* The alignment of TARGET is
2463 at least what its type requires. */
2465 TYPE_ALIGN (type) / BITS_PER_UNIT,
2466 int_size_in_bytes (type));
2474 /* Store the value of EXP (an expression tree)
2475 into a subfield of TARGET which has mode MODE and occupies
2476 BITSIZE bits, starting BITPOS bits from the start of TARGET.
2477 If MODE is VOIDmode, it means that we are storing into a bit-field.
2479 If VALUE_MODE is VOIDmode, return nothing in particular.
2480 UNSIGNEDP is not used in this case.
2482 Otherwise, return an rtx for the value stored. This rtx
2483 has mode VALUE_MODE if that is convenient to do.
2484 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
2486 ALIGN is the alignment that TARGET is known to have, measured in bytes.
2487 TOTAL_SIZE is the size in bytes of the structure, or -1 if varying. */
2490 store_field (target, bitsize, bitpos, mode, exp, value_mode,
2491 unsignedp, align, total_size)
2493 int bitsize, bitpos;
2494 enum machine_mode mode;
2496 enum machine_mode value_mode;
2501 HOST_WIDE_INT width_mask = 0;
2503 if (bitsize < HOST_BITS_PER_WIDE_INT)
2504 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
2506 /* If we are storing into an unaligned field of an aligned union that is
2507 in a register, we may have the mode of TARGET being an integer mode but
2508 MODE == BLKmode. In that case, get an aligned object whose size and
2509 alignment are the same as TARGET and store TARGET into it (we can avoid
2510 the store if the field being stored is the entire width of TARGET). Then
2511 call ourselves recursively to store the field into a BLKmode version of
2512 that object. Finally, load from the object into TARGET. This is not
2513 very efficient in general, but should only be slightly more expensive
2514 than the otherwise-required unaligned accesses. Perhaps this can be
2515 cleaned up later. */
2518 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
2520 rtx object = assign_stack_temp (GET_MODE (target),
2521 GET_MODE_SIZE (GET_MODE (target)), 0);
2522 rtx blk_object = copy_rtx (object);
2524 PUT_MODE (blk_object, BLKmode);
2526 if (bitsize != GET_MODE_BITSIZE (GET_MODE (target)))
2527 emit_move_insn (object, target);
2529 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0,
2532 emit_move_insn (target, object);
2537 /* If the structure is in a register or if the component
2538 is a bit field, we cannot use addressing to access it.
2539 Use bit-field techniques or SUBREG to store in it. */
2541 if (mode == VOIDmode
2542 || (mode != BLKmode && ! direct_store[(int) mode])
2543 || GET_CODE (target) == REG
2544 || GET_CODE (target) == SUBREG)
2546 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
2547 /* Store the value in the bitfield. */
2548 store_bit_field (target, bitsize, bitpos, mode, temp, align, total_size);
2549 if (value_mode != VOIDmode)
2551 /* The caller wants an rtx for the value. */
2552 /* If possible, avoid refetching from the bitfield itself. */
2554 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
2555 return expand_and (temp, GEN_INT (width_mask), NULL_RTX);
2556 return extract_bit_field (target, bitsize, bitpos, unsignedp,
2557 NULL_RTX, value_mode, 0, align,
2564 rtx addr = XEXP (target, 0);
2567 /* If a value is wanted, it must be the lhs;
2568 so make the address stable for multiple use. */
2570 if (value_mode != VOIDmode && GET_CODE (addr) != REG
2571 && ! CONSTANT_ADDRESS_P (addr)
2572 /* A frame-pointer reference is already stable. */
2573 && ! (GET_CODE (addr) == PLUS
2574 && GET_CODE (XEXP (addr, 1)) == CONST_INT
2575 && (XEXP (addr, 0) == virtual_incoming_args_rtx
2576 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
2577 addr = copy_to_reg (addr);
2579 /* Now build a reference to just the desired component. */
2581 to_rtx = change_address (target, mode,
2582 plus_constant (addr, (bitpos / BITS_PER_UNIT)));
2583 MEM_IN_STRUCT_P (to_rtx) = 1;
2585 return store_expr (exp, to_rtx, value_mode != VOIDmode);
2589 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
2590 or an ARRAY_REF, look for nested COMPONENT_REFs, BIT_FIELD_REFs, or
2591 ARRAY_REFs at constant positions and find the ultimate containing object,
2594 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
2595 bit position, and *PUNSIGNEDP to the signedness of the field.
2596 If the position of the field is variable, we store a tree
2597 giving the variable offset (in units) in *POFFSET.
2598 This offset is in addition to the bit position.
2599 If the position is not variable, we store 0 in *POFFSET.
2601 If any of the extraction expressions is volatile,
2602 we store 1 in *PVOLATILEP. Otherwise we don't change that.
2604 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
2605 is a mode that can be used to access the field. In that case, *PBITSIZE
2608 If the field describes a variable-sized object, *PMODE is set to
2609 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
2610 this case, but the address of the object can be found. */
2613 get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode, punsignedp, pvolatilep)
2618 enum machine_mode *pmode;
2623 enum machine_mode mode = VOIDmode;
2626 if (TREE_CODE (exp) == COMPONENT_REF)
2628 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
2629 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
2630 mode = DECL_MODE (TREE_OPERAND (exp, 1));
2631 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
2633 else if (TREE_CODE (exp) == BIT_FIELD_REF)
2635 size_tree = TREE_OPERAND (exp, 1);
2636 *punsignedp = TREE_UNSIGNED (exp);
2640 mode = TYPE_MODE (TREE_TYPE (exp));
2641 *pbitsize = GET_MODE_BITSIZE (mode);
2642 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
2647 if (TREE_CODE (size_tree) != INTEGER_CST)
2648 mode = BLKmode, *pbitsize = -1;
2650 *pbitsize = TREE_INT_CST_LOW (size_tree);
2653 /* Compute cumulative bit-offset for nested component-refs and array-refs,
2654 and find the ultimate containing object. */
2660 if (TREE_CODE (exp) == COMPONENT_REF || TREE_CODE (exp) == BIT_FIELD_REF)
2662 tree pos = (TREE_CODE (exp) == COMPONENT_REF
2663 ? DECL_FIELD_BITPOS (TREE_OPERAND (exp, 1))
2664 : TREE_OPERAND (exp, 2));
2666 if (TREE_CODE (pos) == PLUS_EXPR)
2669 if (TREE_CODE (TREE_OPERAND (pos, 0)) == INTEGER_CST)
2671 constant = TREE_OPERAND (pos, 0);
2672 var = TREE_OPERAND (pos, 1);
2674 else if (TREE_CODE (TREE_OPERAND (pos, 1)) == INTEGER_CST)
2676 constant = TREE_OPERAND (pos, 1);
2677 var = TREE_OPERAND (pos, 0);
2681 *pbitpos += TREE_INT_CST_LOW (constant);
2683 offset = size_binop (PLUS_EXPR, offset,
2684 size_binop (FLOOR_DIV_EXPR, var,
2685 size_int (BITS_PER_UNIT)));
2687 offset = size_binop (FLOOR_DIV_EXPR, var,
2688 size_int (BITS_PER_UNIT));
2690 else if (TREE_CODE (pos) == INTEGER_CST)
2691 *pbitpos += TREE_INT_CST_LOW (pos);
2694 /* Assume here that the offset is a multiple of a unit.
2695 If not, there should be an explicitly added constant. */
2697 offset = size_binop (PLUS_EXPR, offset,
2698 size_binop (FLOOR_DIV_EXPR, pos,
2699 size_int (BITS_PER_UNIT)));
2701 offset = size_binop (FLOOR_DIV_EXPR, pos,
2702 size_int (BITS_PER_UNIT));
2706 else if (TREE_CODE (exp) == ARRAY_REF
2707 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
2708 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST)
2710 *pbitpos += (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))
2711 * TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (exp))));
2713 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
2714 && ! ((TREE_CODE (exp) == NOP_EXPR
2715 || TREE_CODE (exp) == CONVERT_EXPR)
2716 && (TYPE_MODE (TREE_TYPE (exp))
2717 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
2720 /* If any reference in the chain is volatile, the effect is volatile. */
2721 if (TREE_THIS_VOLATILE (exp))
2723 exp = TREE_OPERAND (exp, 0);
2726 /* If this was a bit-field, see if there is a mode that allows direct
2727 access in case EXP is in memory. */
2728 if (mode == VOIDmode && *pbitpos % *pbitsize == 0)
2730 mode = mode_for_size (*pbitsize, MODE_INT, 0);
2731 if (mode == BLKmode)
2738 /* We aren't finished fixing the callers to really handle nonzero offset. */
2746 /* Given an rtx VALUE that may contain additions and multiplications,
2747 return an equivalent value that just refers to a register or memory.
2748 This is done by generating instructions to perform the arithmetic
2749 and returning a pseudo-register containing the value. */
2752 force_operand (value, target)
2755 register optab binoptab = 0;
2756 /* Use a temporary to force order of execution of calls to
2760 /* Use subtarget as the target for operand 0 of a binary operation. */
2761 register rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
2763 if (GET_CODE (value) == PLUS)
2764 binoptab = add_optab;
2765 else if (GET_CODE (value) == MINUS)
2766 binoptab = sub_optab;
2767 else if (GET_CODE (value) == MULT)
2769 op2 = XEXP (value, 1);
2770 if (!CONSTANT_P (op2)
2771 && !(GET_CODE (op2) == REG && op2 != subtarget))
2773 tmp = force_operand (XEXP (value, 0), subtarget);
2774 return expand_mult (GET_MODE (value), tmp,
2775 force_operand (op2, NULL_RTX),
2781 op2 = XEXP (value, 1);
2782 if (!CONSTANT_P (op2)
2783 && !(GET_CODE (op2) == REG && op2 != subtarget))
2785 if (binoptab == sub_optab && GET_CODE (op2) == CONST_INT)
2787 binoptab = add_optab;
2788 op2 = negate_rtx (GET_MODE (value), op2);
2791 /* Check for an addition with OP2 a constant integer and our first
2792 operand a PLUS of a virtual register and something else. In that
2793 case, we want to emit the sum of the virtual register and the
2794 constant first and then add the other value. This allows virtual
2795 register instantiation to simply modify the constant rather than
2796 creating another one around this addition. */
2797 if (binoptab == add_optab && GET_CODE (op2) == CONST_INT
2798 && GET_CODE (XEXP (value, 0)) == PLUS
2799 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
2800 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
2801 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
2803 rtx temp = expand_binop (GET_MODE (value), binoptab,
2804 XEXP (XEXP (value, 0), 0), op2,
2805 subtarget, 0, OPTAB_LIB_WIDEN);
2806 return expand_binop (GET_MODE (value), binoptab, temp,
2807 force_operand (XEXP (XEXP (value, 0), 1), 0),
2808 target, 0, OPTAB_LIB_WIDEN);
2811 tmp = force_operand (XEXP (value, 0), subtarget);
2812 return expand_binop (GET_MODE (value), binoptab, tmp,
2813 force_operand (op2, NULL_RTX),
2814 target, 0, OPTAB_LIB_WIDEN);
2815 /* We give UNSIGNEP = 0 to expand_binop
2816 because the only operations we are expanding here are signed ones. */
2821 /* Subroutine of expand_expr:
2822 save the non-copied parts (LIST) of an expr (LHS), and return a list
2823 which can restore these values to their previous values,
2824 should something modify their storage. */
2827 save_noncopied_parts (lhs, list)
2834 for (tail = list; tail; tail = TREE_CHAIN (tail))
2835 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
2836 parts = chainon (parts, save_noncopied_parts (lhs, TREE_VALUE (tail)));
2839 tree part = TREE_VALUE (tail);
2840 tree part_type = TREE_TYPE (part);
2841 tree to_be_saved = build (COMPONENT_REF, part_type, lhs, part);
2842 rtx target = assign_stack_temp (TYPE_MODE (part_type),
2843 int_size_in_bytes (part_type), 0);
2844 if (! memory_address_p (TYPE_MODE (part_type), XEXP (target, 0)))
2845 target = change_address (target, TYPE_MODE (part_type), NULL_RTX);
2846 parts = tree_cons (to_be_saved,
2847 build (RTL_EXPR, part_type, NULL_TREE,
2850 store_expr (TREE_PURPOSE (parts), RTL_EXPR_RTL (TREE_VALUE (parts)), 0);
2855 /* Subroutine of expand_expr:
2856 record the non-copied parts (LIST) of an expr (LHS), and return a list
2857 which specifies the initial values of these parts. */
2860 init_noncopied_parts (lhs, list)
2867 for (tail = list; tail; tail = TREE_CHAIN (tail))
2868 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
2869 parts = chainon (parts, init_noncopied_parts (lhs, TREE_VALUE (tail)));
2872 tree part = TREE_VALUE (tail);
2873 tree part_type = TREE_TYPE (part);
2874 tree to_be_initialized = build (COMPONENT_REF, part_type, lhs, part);
2875 parts = tree_cons (TREE_PURPOSE (tail), to_be_initialized, parts);
2880 /* Subroutine of expand_expr: return nonzero iff there is no way that
2881 EXP can reference X, which is being modified. */
2884 safe_from_p (x, exp)
2894 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
2895 find the underlying pseudo. */
2896 if (GET_CODE (x) == SUBREG)
2899 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
2903 /* If X is a location in the outgoing argument area, it is always safe. */
2904 if (GET_CODE (x) == MEM
2905 && (XEXP (x, 0) == virtual_outgoing_args_rtx
2906 || (GET_CODE (XEXP (x, 0)) == PLUS
2907 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx)))
2910 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
2913 exp_rtl = DECL_RTL (exp);
2920 if (TREE_CODE (exp) == TREE_LIST)
2921 return ((TREE_VALUE (exp) == 0
2922 || safe_from_p (x, TREE_VALUE (exp)))
2923 && (TREE_CHAIN (exp) == 0
2924 || safe_from_p (x, TREE_CHAIN (exp))));
2929 return safe_from_p (x, TREE_OPERAND (exp, 0));
2933 return (safe_from_p (x, TREE_OPERAND (exp, 0))
2934 && safe_from_p (x, TREE_OPERAND (exp, 1)));
2938 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
2939 the expression. If it is set, we conflict iff we are that rtx or
2940 both are in memory. Otherwise, we check all operands of the
2941 expression recursively. */
2943 switch (TREE_CODE (exp))
2946 return staticp (TREE_OPERAND (exp, 0));
2949 if (GET_CODE (x) == MEM)
2954 exp_rtl = CALL_EXPR_RTL (exp);
2957 /* Assume that the call will clobber all hard registers and
2959 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
2960 || GET_CODE (x) == MEM)
2967 exp_rtl = RTL_EXPR_RTL (exp);
2969 /* We don't know what this can modify. */
2974 case WITH_CLEANUP_EXPR:
2975 exp_rtl = RTL_EXPR_RTL (exp);
2979 exp_rtl = SAVE_EXPR_RTL (exp);
2983 /* The only operand we look at is operand 1. The rest aren't
2984 part of the expression. */
2985 return safe_from_p (x, TREE_OPERAND (exp, 1));
2987 case METHOD_CALL_EXPR:
2988 /* This takes a rtx argument, but shouldn't appear here. */
2992 /* If we have an rtx, we do not need to scan our operands. */
2996 nops = tree_code_length[(int) TREE_CODE (exp)];
2997 for (i = 0; i < nops; i++)
2998 if (TREE_OPERAND (exp, i) != 0
2999 && ! safe_from_p (x, TREE_OPERAND (exp, i)))
3003 /* If we have an rtl, find any enclosed object. Then see if we conflict
3007 if (GET_CODE (exp_rtl) == SUBREG)
3009 exp_rtl = SUBREG_REG (exp_rtl);
3010 if (GET_CODE (exp_rtl) == REG
3011 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
3015 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
3016 are memory and EXP is not readonly. */
3017 return ! (rtx_equal_p (x, exp_rtl)
3018 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
3019 && ! TREE_READONLY (exp)));
3022 /* If we reach here, it is safe. */
3026 /* Subroutine of expand_expr: return nonzero iff EXP is an
3027 expression whose type is statically determinable. */
3033 if (TREE_CODE (exp) == PARM_DECL
3034 || TREE_CODE (exp) == VAR_DECL
3035 || TREE_CODE (exp) == CALL_EXPR || TREE_CODE (exp) == TARGET_EXPR
3036 || TREE_CODE (exp) == COMPONENT_REF
3037 || TREE_CODE (exp) == ARRAY_REF)
3042 /* expand_expr: generate code for computing expression EXP.
3043 An rtx for the computed value is returned. The value is never null.
3044 In the case of a void EXP, const0_rtx is returned.
3046 The value may be stored in TARGET if TARGET is nonzero.
3047 TARGET is just a suggestion; callers must assume that
3048 the rtx returned may not be the same as TARGET.
3050 If TARGET is CONST0_RTX, it means that the value will be ignored.
3052 If TMODE is not VOIDmode, it suggests generating the
3053 result in mode TMODE. But this is done only when convenient.
3054 Otherwise, TMODE is ignored and the value generated in its natural mode.
3055 TMODE is just a suggestion; callers must assume that
3056 the rtx returned may not have mode TMODE.
3058 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
3059 with a constant address even if that address is not normally legitimate.
3060 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
3062 If MODIFIER is EXPAND_SUM then when EXP is an addition
3063 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
3064 or a nest of (PLUS ...) and (MINUS ...) where the terms are
3065 products as above, or REG or MEM, or constant.
3066 Ordinarily in such cases we would output mul or add instructions
3067 and then return a pseudo reg containing the sum.
3069 EXPAND_INITIALIZER is much like EXPAND_SUM except that
3070 it also marks a label as absolutely required (it can't be dead).
3071 This is used for outputting expressions used in initializers. */
3074 expand_expr (exp, target, tmode, modifier)
3077 enum machine_mode tmode;
3078 enum expand_modifier modifier;
3080 register rtx op0, op1, temp;
3081 tree type = TREE_TYPE (exp);
3082 int unsignedp = TREE_UNSIGNED (type);
3083 register enum machine_mode mode = TYPE_MODE (type);
3084 register enum tree_code code = TREE_CODE (exp);
3086 /* Use subtarget as the target for operand 0 of a binary operation. */
3087 rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
3088 rtx original_target = target;
3089 int ignore = target == const0_rtx;
3092 /* Don't use hard regs as subtargets, because the combiner
3093 can only handle pseudo regs. */
3094 if (subtarget && REGNO (subtarget) < FIRST_PSEUDO_REGISTER)
3096 /* Avoid subtargets inside loops,
3097 since they hide some invariant expressions. */
3098 if (preserve_subexpressions_p ())
3101 if (ignore) target = 0, original_target = 0;
3103 /* If will do cse, generate all results into pseudo registers
3104 since 1) that allows cse to find more things
3105 and 2) otherwise cse could produce an insn the machine
3108 if (! cse_not_expected && mode != BLKmode && target
3109 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER))
3112 /* Ensure we reference a volatile object even if value is ignored. */
3113 if (ignore && TREE_THIS_VOLATILE (exp)
3114 && mode != VOIDmode && mode != BLKmode)
3116 target = gen_reg_rtx (mode);
3117 temp = expand_expr (exp, target, VOIDmode, modifier);
3119 emit_move_insn (target, temp);
3127 tree function = decl_function_context (exp);
3128 /* Handle using a label in a containing function. */
3129 if (function != current_function_decl && function != 0)
3131 struct function *p = find_function_data (function);
3132 /* Allocate in the memory associated with the function
3133 that the label is in. */
3134 push_obstacks (p->function_obstack,
3135 p->function_maybepermanent_obstack);
3137 p->forced_labels = gen_rtx (EXPR_LIST, VOIDmode,
3138 label_rtx (exp), p->forced_labels);
3141 else if (modifier == EXPAND_INITIALIZER)
3142 forced_labels = gen_rtx (EXPR_LIST, VOIDmode,
3143 label_rtx (exp), forced_labels);
3144 return gen_rtx (MEM, FUNCTION_MODE,
3145 gen_rtx (LABEL_REF, Pmode, label_rtx (exp)));
3149 if (DECL_RTL (exp) == 0)
3151 error_with_decl (exp, "prior parameter's size depends on `%s'");
3152 return CONST0_RTX (mode);
3158 if (DECL_RTL (exp) == 0)
3160 /* Ensure variable marked as used
3161 even if it doesn't go through a parser. */
3162 TREE_USED (exp) = 1;
3163 /* Handle variables inherited from containing functions. */
3164 context = decl_function_context (exp);
3166 /* We treat inline_function_decl as an alias for the current function
3167 because that is the inline function whose vars, types, etc.
3168 are being merged into the current function.
3169 See expand_inline_function. */
3170 if (context != 0 && context != current_function_decl
3171 && context != inline_function_decl
3172 /* If var is static, we don't need a static chain to access it. */
3173 && ! (GET_CODE (DECL_RTL (exp)) == MEM
3174 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
3178 /* Mark as non-local and addressable. */
3179 TREE_NONLOCAL (exp) = 1;
3180 mark_addressable (exp);
3181 if (GET_CODE (DECL_RTL (exp)) != MEM)
3183 addr = XEXP (DECL_RTL (exp), 0);
3184 if (GET_CODE (addr) == MEM)
3185 addr = gen_rtx (MEM, Pmode, fix_lexical_addr (XEXP (addr, 0), exp));
3187 addr = fix_lexical_addr (addr, exp);
3188 return change_address (DECL_RTL (exp), mode, addr);
3191 /* This is the case of an array whose size is to be determined
3192 from its initializer, while the initializer is still being parsed.
3194 if (GET_CODE (DECL_RTL (exp)) == MEM
3195 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
3196 return change_address (DECL_RTL (exp), GET_MODE (DECL_RTL (exp)),
3197 XEXP (DECL_RTL (exp), 0));
3198 if (GET_CODE (DECL_RTL (exp)) == MEM
3199 && modifier != EXPAND_CONST_ADDRESS
3200 && modifier != EXPAND_SUM
3201 && modifier != EXPAND_INITIALIZER)
3203 /* DECL_RTL probably contains a constant address.
3204 On RISC machines where a constant address isn't valid,
3205 make some insns to get that address into a register. */
3206 if (!memory_address_p (DECL_MODE (exp), XEXP (DECL_RTL (exp), 0))
3208 && CONSTANT_ADDRESS_P (XEXP (DECL_RTL (exp), 0))))
3209 return change_address (DECL_RTL (exp), VOIDmode,
3210 copy_rtx (XEXP (DECL_RTL (exp), 0)));
3212 return DECL_RTL (exp);
3215 return immed_double_const (TREE_INT_CST_LOW (exp),
3216 TREE_INT_CST_HIGH (exp),
3220 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, 0);
3223 /* If optimized, generate immediate CONST_DOUBLE
3224 which will be turned into memory by reload if necessary.
3226 We used to force a register so that loop.c could see it. But
3227 this does not allow gen_* patterns to perform optimizations with
3228 the constants. It also produces two insns in cases like "x = 1.0;".
3229 On most machines, floating-point constants are not permitted in
3230 many insns, so we'd end up copying it to a register in any case.
3232 Now, we do the copying in expand_binop, if appropriate. */
3233 return immed_real_const (exp);
3237 if (! TREE_CST_RTL (exp))
3238 output_constant_def (exp);
3240 /* TREE_CST_RTL probably contains a constant address.
3241 On RISC machines where a constant address isn't valid,
3242 make some insns to get that address into a register. */
3243 if (GET_CODE (TREE_CST_RTL (exp)) == MEM
3244 && modifier != EXPAND_CONST_ADDRESS
3245 && modifier != EXPAND_INITIALIZER
3246 && modifier != EXPAND_SUM
3247 && !memory_address_p (mode, XEXP (TREE_CST_RTL (exp), 0)))
3248 return change_address (TREE_CST_RTL (exp), VOIDmode,
3249 copy_rtx (XEXP (TREE_CST_RTL (exp), 0)));
3250 return TREE_CST_RTL (exp);
3253 context = decl_function_context (exp);
3254 /* We treat inline_function_decl as an alias for the current function
3255 because that is the inline function whose vars, types, etc.
3256 are being merged into the current function.
3257 See expand_inline_function. */
3258 if (context == current_function_decl || context == inline_function_decl)
3261 /* If this is non-local, handle it. */
3264 temp = SAVE_EXPR_RTL (exp);
3265 if (temp && GET_CODE (temp) == REG)
3267 put_var_into_stack (exp);
3268 temp = SAVE_EXPR_RTL (exp);
3270 if (temp == 0 || GET_CODE (temp) != MEM)
3272 return change_address (temp, mode,
3273 fix_lexical_addr (XEXP (temp, 0), exp));
3275 if (SAVE_EXPR_RTL (exp) == 0)
3277 if (mode == BLKmode)
3279 = assign_stack_temp (mode,
3280 int_size_in_bytes (TREE_TYPE (exp)), 0);
3282 temp = gen_reg_rtx (mode);
3283 SAVE_EXPR_RTL (exp) = temp;
3284 store_expr (TREE_OPERAND (exp, 0), temp, 0);
3285 if (!optimize && GET_CODE (temp) == REG)
3286 save_expr_regs = gen_rtx (EXPR_LIST, VOIDmode, temp,
3289 return SAVE_EXPR_RTL (exp);
3292 /* Exit the current loop if the body-expression is true. */
3294 rtx label = gen_label_rtx ();
3295 do_jump (TREE_OPERAND (exp, 0), label, NULL_RTX);
3296 expand_exit_loop (NULL_PTR);
3302 expand_start_loop (1);
3303 expand_expr_stmt (TREE_OPERAND (exp, 0));
3310 tree vars = TREE_OPERAND (exp, 0);
3311 int vars_need_expansion = 0;
3313 /* Need to open a binding contour here because
3314 if there are any cleanups they most be contained here. */
3315 expand_start_bindings (0);
3317 /* Mark the corresponding BLOCK for output. */
3318 if (TREE_OPERAND (exp, 2) != 0)
3319 TREE_USED (TREE_OPERAND (exp, 2)) = 1;
3321 /* If VARS have not yet been expanded, expand them now. */
3324 if (DECL_RTL (vars) == 0)
3326 vars_need_expansion = 1;
3329 expand_decl_init (vars);
3330 vars = TREE_CHAIN (vars);
3333 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, modifier);
3335 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
3341 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
3343 emit_insns (RTL_EXPR_SEQUENCE (exp));
3344 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
3345 return RTL_EXPR_RTL (exp);
3348 /* All elts simple constants => refer to a constant in memory. But
3349 if this is a non-BLKmode mode, let it store a field at a time
3350 since that should make a CONST_INT or CONST_DOUBLE when we
3352 if (TREE_STATIC (exp) && (mode == BLKmode || TREE_ADDRESSABLE (exp)))
3354 rtx constructor = output_constant_def (exp);
3355 if (modifier != EXPAND_CONST_ADDRESS
3356 && modifier != EXPAND_INITIALIZER
3357 && modifier != EXPAND_SUM
3358 && !memory_address_p (GET_MODE (constructor),
3359 XEXP (constructor, 0)))
3360 constructor = change_address (constructor, VOIDmode,
3361 XEXP (constructor, 0));
3368 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
3369 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode, 0);
3374 if (target == 0 || ! safe_from_p (target, exp))
3376 if (mode != BLKmode && ! TREE_ADDRESSABLE (exp))
3377 target = gen_reg_rtx (mode);
3380 rtx safe_target = assign_stack_temp (mode, int_size_in_bytes (type), 0);
3382 MEM_IN_STRUCT_P (safe_target) = MEM_IN_STRUCT_P (target);
3383 target = safe_target;
3386 store_constructor (exp, target);
3392 tree exp1 = TREE_OPERAND (exp, 0);
3395 /* A SAVE_EXPR as the address in an INDIRECT_EXPR is generated
3396 for *PTR += ANYTHING where PTR is put inside the SAVE_EXPR.
3397 This code has the same general effect as simply doing
3398 expand_expr on the save expr, except that the expression PTR
3399 is computed for use as a memory address. This means different
3400 code, suitable for indexing, may be generated. */
3401 if (TREE_CODE (exp1) == SAVE_EXPR
3402 && SAVE_EXPR_RTL (exp1) == 0
3403 && TREE_CODE (exp2 = TREE_OPERAND (exp1, 0)) != ERROR_MARK
3404 && TYPE_MODE (TREE_TYPE (exp1)) == Pmode
3405 && TYPE_MODE (TREE_TYPE (exp2)) == Pmode)
3407 temp = expand_expr (TREE_OPERAND (exp1, 0), NULL_RTX,
3408 VOIDmode, EXPAND_SUM);
3409 op0 = memory_address (mode, temp);
3410 op0 = copy_all_regs (op0);
3411 SAVE_EXPR_RTL (exp1) = op0;
3415 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
3416 op0 = memory_address (mode, op0);
3419 temp = gen_rtx (MEM, mode, op0);
3420 /* If address was computed by addition,
3421 mark this as an element of an aggregate. */
3422 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
3423 || (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR
3424 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) == PLUS_EXPR)
3425 || TREE_CODE (TREE_TYPE (exp)) == ARRAY_TYPE
3426 || TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE
3427 || TREE_CODE (TREE_TYPE (exp)) == UNION_TYPE
3428 || (TREE_CODE (exp1) == ADDR_EXPR
3429 && (exp2 = TREE_OPERAND (exp1, 0))
3430 && (TREE_CODE (TREE_TYPE (exp2)) == ARRAY_TYPE
3431 || TREE_CODE (TREE_TYPE (exp2)) == RECORD_TYPE
3432 || TREE_CODE (TREE_TYPE (exp2)) == UNION_TYPE)))
3433 MEM_IN_STRUCT_P (temp) = 1;
3434 MEM_VOLATILE_P (temp) = TREE_THIS_VOLATILE (exp) || flag_volatile;
3435 #if 0 /* It is incorrectto set RTX_UNCHANGING_P here, because the fact that
3436 a location is accessed through a pointer to const does not mean
3437 that the value there can never change. */
3438 RTX_UNCHANGING_P (temp) = TREE_READONLY (exp);
3444 if (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST
3445 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
3447 /* Nonconstant array index or nonconstant element size.
3448 Generate the tree for *(&array+index) and expand that,
3449 except do it in a language-independent way
3450 and don't complain about non-lvalue arrays.
3451 `mark_addressable' should already have been called
3452 for any array for which this case will be reached. */
3454 /* Don't forget the const or volatile flag from the array element. */
3455 tree variant_type = build_type_variant (type,
3456 TREE_READONLY (exp),
3457 TREE_THIS_VOLATILE (exp));
3458 tree array_adr = build1 (ADDR_EXPR, build_pointer_type (variant_type),
3459 TREE_OPERAND (exp, 0));
3460 tree index = TREE_OPERAND (exp, 1);
3463 /* Convert the integer argument to a type the same size as a pointer
3464 so the multiply won't overflow spuriously. */
3465 if (TYPE_PRECISION (TREE_TYPE (index)) != POINTER_SIZE)
3466 index = convert (type_for_size (POINTER_SIZE, 0), index);
3468 /* Don't think the address has side effects
3469 just because the array does.
3470 (In some cases the address might have side effects,
3471 and we fail to record that fact here. However, it should not
3472 matter, since expand_expr should not care.) */
3473 TREE_SIDE_EFFECTS (array_adr) = 0;
3475 elt = build1 (INDIRECT_REF, type,
3476 fold (build (PLUS_EXPR, TYPE_POINTER_TO (variant_type),
3478 fold (build (MULT_EXPR,
3479 TYPE_POINTER_TO (variant_type),
3480 index, size_in_bytes (type))))));
3482 /* Volatility, etc., of new expression is same as old expression. */
3483 TREE_SIDE_EFFECTS (elt) = TREE_SIDE_EFFECTS (exp);
3484 TREE_THIS_VOLATILE (elt) = TREE_THIS_VOLATILE (exp);
3485 TREE_READONLY (elt) = TREE_READONLY (exp);
3487 return expand_expr (elt, target, tmode, modifier);
3490 /* Fold an expression like: "foo"[2].
3491 This is not done in fold so it won't happen inside &. */
3494 tree arg0 = TREE_OPERAND (exp, 0);
3495 tree arg1 = TREE_OPERAND (exp, 1);
3497 if (TREE_CODE (arg0) == STRING_CST
3498 && TREE_CODE (arg1) == INTEGER_CST
3499 && !TREE_INT_CST_HIGH (arg1)
3500 && (i = TREE_INT_CST_LOW (arg1)) < TREE_STRING_LENGTH (arg0))
3502 if (TREE_TYPE (TREE_TYPE (arg0)) == integer_type_node)
3504 exp = build_int_2 (((int *)TREE_STRING_POINTER (arg0))[i], 0);
3505 TREE_TYPE (exp) = integer_type_node;
3506 return expand_expr (exp, target, tmode, modifier);
3508 if (TREE_TYPE (TREE_TYPE (arg0)) == char_type_node)
3510 exp = build_int_2 (TREE_STRING_POINTER (arg0)[i], 0);
3511 TREE_TYPE (exp) = integer_type_node;
3512 return expand_expr (convert (TREE_TYPE (TREE_TYPE (arg0)), exp), target, tmode, modifier);
3517 /* If this is a constant index into a constant array,
3518 just get the value from the array. Handle both the cases when
3519 we have an explicit constructor and when our operand is a variable
3520 that was declared const. */
3522 if (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
3523 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
3525 tree index = fold (TREE_OPERAND (exp, 1));
3526 if (TREE_CODE (index) == INTEGER_CST
3527 && TREE_INT_CST_HIGH (index) == 0)
3529 int i = TREE_INT_CST_LOW (index);
3530 tree elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0));
3533 elem = TREE_CHAIN (elem);
3535 return expand_expr (fold (TREE_VALUE (elem)), target,
3540 else if (TREE_READONLY (TREE_OPERAND (exp, 0))
3541 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
3542 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == ARRAY_TYPE
3543 && TREE_CODE (TREE_OPERAND (exp, 0)) == VAR_DECL
3544 && DECL_INITIAL (TREE_OPERAND (exp, 0))
3546 && (TREE_CODE (DECL_INITIAL (TREE_OPERAND (exp, 0)))
3549 tree index = fold (TREE_OPERAND (exp, 1));
3550 if (TREE_CODE (index) == INTEGER_CST
3551 && TREE_INT_CST_HIGH (index) == 0)
3553 int i = TREE_INT_CST_LOW (index);
3554 tree init = DECL_INITIAL (TREE_OPERAND (exp, 0));
3556 if (TREE_CODE (init) == CONSTRUCTOR)
3558 tree elem = CONSTRUCTOR_ELTS (init);
3561 elem = TREE_CHAIN (elem);
3563 return expand_expr (fold (TREE_VALUE (elem)), target,
3566 else if (TREE_CODE (init) == STRING_CST
3567 && i < TREE_STRING_LENGTH (init))
3569 temp = GEN_INT (TREE_STRING_POINTER (init)[i]);
3570 return convert_to_mode (mode, temp, 0);
3574 /* Treat array-ref with constant index as a component-ref. */
3578 /* If the operand is a CONSTRUCTOR, we can just extract the
3579 appropriate field if it is present. */
3580 if (code != ARRAY_REF
3581 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR)
3585 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
3586 elt = TREE_CHAIN (elt))
3587 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1))
3588 return expand_expr (TREE_VALUE (elt), target, tmode, modifier);
3592 enum machine_mode mode1;
3597 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
3598 &mode1, &unsignedp, &volatilep);
3600 /* In some cases, we will be offsetting OP0's address by a constant.
3601 So get it as a sum, if possible. If we will be using it
3602 directly in an insn, we validate it. */
3603 op0 = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_SUM);
3605 /* If this is a constant, put it into a register if it is a
3606 legimate constant and memory if it isn't. */
3607 if (CONSTANT_P (op0))
3609 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
3610 if (LEGITIMATE_CONSTANT_P (op0))
3611 op0 = force_reg (mode, op0);
3613 op0 = validize_mem (force_const_mem (mode, op0));
3618 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
3620 if (GET_CODE (op0) != MEM)
3622 op0 = change_address (op0, VOIDmode,
3623 gen_rtx (PLUS, Pmode, XEXP (op0, 0),
3624 force_reg (Pmode, offset_rtx)));
3627 /* Don't forget about volatility even if this is a bitfield. */
3628 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
3630 op0 = copy_rtx (op0);
3631 MEM_VOLATILE_P (op0) = 1;
3634 if (mode1 == VOIDmode
3635 || (mode1 != BLKmode && ! direct_load[(int) mode1])
3636 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG)
3638 /* In cases where an aligned union has an unaligned object
3639 as a field, we might be extracting a BLKmode value from
3640 an integer-mode (e.g., SImode) object. Handle this case
3641 by doing the extract into an object as wide as the field
3642 (which we know to be the width of a basic mode), then
3643 storing into memory, and changing the mode to BLKmode. */
3644 enum machine_mode ext_mode = mode;
3646 if (ext_mode == BLKmode)
3647 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
3649 if (ext_mode == BLKmode)
3652 op0 = extract_bit_field (validize_mem (op0), bitsize, bitpos,
3653 unsignedp, target, ext_mode, ext_mode,
3654 TYPE_ALIGN (TREE_TYPE (tem)) / BITS_PER_UNIT,
3655 int_size_in_bytes (TREE_TYPE (tem)));
3656 if (mode == BLKmode)
3658 rtx new = assign_stack_temp (ext_mode,
3659 bitsize / BITS_PER_UNIT, 0);
3661 emit_move_insn (new, op0);
3662 op0 = copy_rtx (new);
3663 PUT_MODE (op0, BLKmode);
3669 /* Get a reference to just this component. */
3670 if (modifier == EXPAND_CONST_ADDRESS
3671 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
3672 op0 = gen_rtx (MEM, mode1, plus_constant (XEXP (op0, 0),
3673 (bitpos / BITS_PER_UNIT)));
3675 op0 = change_address (op0, mode1,
3676 plus_constant (XEXP (op0, 0),
3677 (bitpos / BITS_PER_UNIT)));
3678 MEM_IN_STRUCT_P (op0) = 1;
3679 MEM_VOLATILE_P (op0) |= volatilep;
3680 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode)
3683 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
3684 convert_move (target, op0, unsignedp);
3690 tree base = build_unary_op (ADDR_EXPR, TREE_OPERAND (exp, 0), 0);
3691 tree addr = build (PLUS_EXPR, type, base, TREE_OPERAND (exp, 1));
3692 op0 = expand_expr (addr, NULL_RTX, VOIDmode, EXPAND_SUM);
3693 temp = gen_rtx (MEM, mode, memory_address (mode, op0));
3694 MEM_IN_STRUCT_P (temp) = 1;
3695 MEM_VOLATILE_P (temp) = TREE_THIS_VOLATILE (exp) || flag_volatile;
3696 #if 0 /* It is incorrectto set RTX_UNCHANGING_P here, because the fact that
3697 a location is accessed through a pointer to const does not mean
3698 that the value there can never change. */
3699 RTX_UNCHANGING_P (temp) = TREE_READONLY (exp);
3704 /* Intended for a reference to a buffer of a file-object in Pascal.
3705 But it's not certain that a special tree code will really be
3706 necessary for these. INDIRECT_REF might work for them. */
3710 case WITH_CLEANUP_EXPR:
3711 if (RTL_EXPR_RTL (exp) == 0)
3714 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
3716 = tree_cons (NULL_TREE, TREE_OPERAND (exp, 2), cleanups_this_call);
3717 /* That's it for this cleanup. */
3718 TREE_OPERAND (exp, 2) = 0;
3720 return RTL_EXPR_RTL (exp);
3723 /* Check for a built-in function. */
3724 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
3725 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) == FUNCTION_DECL
3726 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
3727 return expand_builtin (exp, target, subtarget, tmode, ignore);
3728 /* If this call was expanded already by preexpand_calls,
3729 just return the result we got. */
3730 if (CALL_EXPR_RTL (exp) != 0)
3731 return CALL_EXPR_RTL (exp);
3732 return expand_call (exp, target, ignore);
3734 case NON_LVALUE_EXPR:
3737 case REFERENCE_EXPR:
3738 if (TREE_CODE (type) == VOID_TYPE || ignore)
3740 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
3743 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
3744 return expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, modifier);
3745 if (TREE_CODE (type) == UNION_TYPE)
3747 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
3750 if (mode == BLKmode)
3752 if (TYPE_SIZE (type) == 0
3753 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
3755 target = assign_stack_temp (BLKmode,
3756 (TREE_INT_CST_LOW (TYPE_SIZE (type))
3757 + BITS_PER_UNIT - 1)
3758 / BITS_PER_UNIT, 0);
3761 target = gen_reg_rtx (mode);
3763 if (GET_CODE (target) == MEM)
3764 /* Store data into beginning of memory target. */
3765 store_expr (TREE_OPERAND (exp, 0),
3766 change_address (target, TYPE_MODE (valtype), 0),
3768 else if (GET_CODE (target) == REG)
3769 /* Store this field into a union of the proper type. */
3770 store_field (target, GET_MODE_BITSIZE (TYPE_MODE (valtype)), 0,
3771 TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
3773 int_size_in_bytes (TREE_TYPE (TREE_OPERAND (exp, 0))));
3777 /* Return the entire union. */
3780 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, 0);
3781 if (GET_MODE (op0) == mode || GET_MODE (op0) == VOIDmode)
3783 if (flag_force_mem && GET_CODE (op0) == MEM)
3784 op0 = copy_to_reg (op0);
3787 return convert_to_mode (mode, op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
3789 convert_move (target, op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
3793 /* We come here from MINUS_EXPR when the second operand is a constant. */
3795 this_optab = add_optab;
3797 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
3798 something else, make sure we add the register to the constant and
3799 then to the other thing. This case can occur during strength
3800 reduction and doing it this way will produce better code if the
3801 frame pointer or argument pointer is eliminated.
3803 fold-const.c will ensure that the constant is always in the inner
3804 PLUS_EXPR, so the only case we need to do anything about is if
3805 sp, ap, or fp is our second argument, in which case we must swap
3806 the innermost first argument and our second argument. */
3808 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
3809 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
3810 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
3811 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
3812 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
3813 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
3815 tree t = TREE_OPERAND (exp, 1);
3817 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
3818 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
3821 /* If the result is to be Pmode and we are adding an integer to
3822 something, we might be forming a constant. So try to use
3823 plus_constant. If it produces a sum and we can't accept it,
3824 use force_operand. This allows P = &ARR[const] to generate
3825 efficient code on machines where a SYMBOL_REF is not a valid
3828 If this is an EXPAND_SUM call, always return the sum. */
3829 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
3830 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
3831 && (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
3834 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
3836 op1 = plus_constant (op1, TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)));
3837 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
3838 op1 = force_operand (op1, target);
3842 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
3843 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
3844 && (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
3847 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
3849 op0 = plus_constant (op0, TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)));
3850 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
3851 op0 = force_operand (op0, target);
3855 /* No sense saving up arithmetic to be done
3856 if it's all in the wrong mode to form part of an address.
3857 And force_operand won't know whether to sign-extend or
3859 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
3860 || mode != Pmode) goto binop;
3862 preexpand_calls (exp);
3863 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
3866 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, modifier);
3867 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, modifier);
3869 /* Make sure any term that's a sum with a constant comes last. */
3870 if (GET_CODE (op0) == PLUS
3871 && CONSTANT_P (XEXP (op0, 1)))
3877 /* If adding to a sum including a constant,
3878 associate it to put the constant outside. */
3879 if (GET_CODE (op1) == PLUS
3880 && CONSTANT_P (XEXP (op1, 1)))
3882 rtx constant_term = const0_rtx;
3884 temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0);
3887 /* Ensure that MULT comes first if there is one. */
3888 else if (GET_CODE (op0) == MULT)
3889 op0 = gen_rtx (PLUS, mode, op0, XEXP (op1, 0));
3891 op0 = gen_rtx (PLUS, mode, XEXP (op1, 0), op0);
3893 /* Let's also eliminate constants from op0 if possible. */
3894 op0 = eliminate_constant_term (op0, &constant_term);
3896 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
3897 their sum should be a constant. Form it into OP1, since the
3898 result we want will then be OP0 + OP1. */
3900 temp = simplify_binary_operation (PLUS, mode, constant_term,
3905 op1 = gen_rtx (PLUS, mode, constant_term, XEXP (op1, 1));
3908 /* Put a constant term last and put a multiplication first. */
3909 if (CONSTANT_P (op0) || GET_CODE (op1) == MULT)
3910 temp = op1, op1 = op0, op0 = temp;
3912 temp = simplify_binary_operation (PLUS, mode, op0, op1);
3913 return temp ? temp : gen_rtx (PLUS, mode, op0, op1);
3916 /* Handle difference of two symbolic constants,
3917 for the sake of an initializer. */
3918 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
3919 && really_constant_p (TREE_OPERAND (exp, 0))
3920 && really_constant_p (TREE_OPERAND (exp, 1)))
3922 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX,
3923 VOIDmode, modifier);
3924 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
3925 VOIDmode, modifier);
3926 return gen_rtx (MINUS, mode, op0, op1);
3928 /* Convert A - const to A + (-const). */
3929 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
3931 exp = build (PLUS_EXPR, type, TREE_OPERAND (exp, 0),
3932 fold (build1 (NEGATE_EXPR, type,
3933 TREE_OPERAND (exp, 1))));
3936 this_optab = sub_optab;
3940 preexpand_calls (exp);
3941 /* If first operand is constant, swap them.
3942 Thus the following special case checks need only
3943 check the second operand. */
3944 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
3946 register tree t1 = TREE_OPERAND (exp, 0);
3947 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
3948 TREE_OPERAND (exp, 1) = t1;
3951 /* Attempt to return something suitable for generating an
3952 indexed address, for machines that support that. */
3954 if (modifier == EXPAND_SUM && mode == Pmode
3955 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
3956 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
3958 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, EXPAND_SUM);
3960 /* Apply distributive law if OP0 is x+c. */
3961 if (GET_CODE (op0) == PLUS
3962 && GET_CODE (XEXP (op0, 1)) == CONST_INT)
3963 return gen_rtx (PLUS, mode,
3964 gen_rtx (MULT, mode, XEXP (op0, 0),
3965 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))),
3966 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))
3967 * INTVAL (XEXP (op0, 1))));
3969 if (GET_CODE (op0) != REG)
3970 op0 = force_operand (op0, NULL_RTX);
3971 if (GET_CODE (op0) != REG)
3972 op0 = copy_to_mode_reg (mode, op0);
3974 return gen_rtx (MULT, mode, op0,
3975 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))));
3978 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
3981 /* Check for multiplying things that have been extended
3982 from a narrower type. If this machine supports multiplying
3983 in that narrower type with a result in the desired type,
3984 do it that way, and avoid the explicit type-conversion. */
3985 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
3986 && TREE_CODE (type) == INTEGER_TYPE
3987 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
3988 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
3989 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
3990 && int_fits_type_p (TREE_OPERAND (exp, 1),
3991 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
3992 /* Don't use a widening multiply if a shift will do. */
3993 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
3994 > HOST_BITS_PER_WIDE_INT)
3995 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
3997 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
3998 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
4000 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
4001 /* If both operands are extended, they must either both
4002 be zero-extended or both be sign-extended. */
4003 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
4005 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
4007 enum machine_mode innermode
4008 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
4009 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
4010 ? umul_widen_optab : smul_widen_optab);
4011 if (mode == GET_MODE_WIDER_MODE (innermode)
4012 && this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
4014 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
4015 NULL_RTX, VOIDmode, 0);
4016 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
4017 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
4020 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
4021 NULL_RTX, VOIDmode, 0);
4025 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
4026 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
4027 return expand_mult (mode, op0, op1, target, unsignedp);
4029 case TRUNC_DIV_EXPR:
4030 case FLOOR_DIV_EXPR:
4032 case ROUND_DIV_EXPR:
4033 case EXACT_DIV_EXPR:
4034 preexpand_calls (exp);
4035 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
4037 /* Possible optimization: compute the dividend with EXPAND_SUM
4038 then if the divisor is constant can optimize the case
4039 where some terms of the dividend have coeffs divisible by it. */
4040 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
4041 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
4042 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
4045 this_optab = flodiv_optab;
4048 case TRUNC_MOD_EXPR:
4049 case FLOOR_MOD_EXPR:
4051 case ROUND_MOD_EXPR:
4052 preexpand_calls (exp);
4053 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
4055 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
4056 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
4057 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
4059 case FIX_ROUND_EXPR:
4060 case FIX_FLOOR_EXPR:
4062 abort (); /* Not used for C. */
4064 case FIX_TRUNC_EXPR:
4065 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
4067 target = gen_reg_rtx (mode);
4068 expand_fix (target, op0, unsignedp);
4072 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
4074 target = gen_reg_rtx (mode);
4075 /* expand_float can't figure out what to do if FROM has VOIDmode.
4076 So give it the correct mode. With -O, cse will optimize this. */
4077 if (GET_MODE (op0) == VOIDmode)
4078 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
4080 expand_float (target, op0,
4081 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
4085 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
4086 temp = expand_unop (mode, neg_optab, op0, target, 0);
4092 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
4094 /* Unsigned abs is simply the operand. Testing here means we don't
4095 risk generating incorrect code below. */
4096 if (TREE_UNSIGNED (type))
4099 /* First try to do it with a special abs instruction. */
4100 temp = expand_unop (mode, abs_optab, op0, target, 0);
4104 /* If this machine has expensive jumps, we can do integer absolute
4105 value of X as (((signed) x >> (W-1)) ^ x) - ((signed) x >> (W-1)),
4106 where W is the width of MODE. */
4108 if (GET_MODE_CLASS (mode) == MODE_INT && BRANCH_COST >= 2)
4110 rtx extended = expand_shift (RSHIFT_EXPR, mode, op0,
4111 size_int (GET_MODE_BITSIZE (mode) - 1),
4114 temp = expand_binop (mode, xor_optab, extended, op0, target, 0,
4117 temp = expand_binop (mode, sub_optab, temp, extended, target, 0,
4124 /* If that does not win, use conditional jump and negate. */
4125 target = original_target;
4126 temp = gen_label_rtx ();
4127 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 0))
4128 || (GET_CODE (target) == REG
4129 && REGNO (target) < FIRST_PSEUDO_REGISTER))
4130 target = gen_reg_rtx (mode);
4131 emit_move_insn (target, op0);
4132 emit_cmp_insn (target,
4133 expand_expr (convert (type, integer_zero_node),
4134 NULL_RTX, VOIDmode, 0),
4135 GE, NULL_RTX, mode, 0, 0);
4137 emit_jump_insn (gen_bge (temp));
4138 op0 = expand_unop (mode, neg_optab, target, target, 0);
4140 emit_move_insn (target, op0);
4147 target = original_target;
4148 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 1))
4149 || (GET_CODE (target) == REG
4150 && REGNO (target) < FIRST_PSEUDO_REGISTER))
4151 target = gen_reg_rtx (mode);
4152 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
4153 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
4155 /* First try to do it with a special MIN or MAX instruction.
4156 If that does not win, use a conditional jump to select the proper
4158 this_optab = (TREE_UNSIGNED (type)
4159 ? (code == MIN_EXPR ? umin_optab : umax_optab)
4160 : (code == MIN_EXPR ? smin_optab : smax_optab));
4162 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
4168 emit_move_insn (target, op0);
4169 op0 = gen_label_rtx ();
4170 if (code == MAX_EXPR)
4171 temp = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)))
4172 ? compare_from_rtx (target, op1, GEU, 1, mode, NULL_RTX, 0)
4173 : compare_from_rtx (target, op1, GE, 0, mode, NULL_RTX, 0));
4175 temp = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)))
4176 ? compare_from_rtx (target, op1, LEU, 1, mode, NULL_RTX, 0)
4177 : compare_from_rtx (target, op1, LE, 0, mode, NULL_RTX, 0));
4178 if (temp == const0_rtx)
4179 emit_move_insn (target, op1);
4180 else if (temp != const_true_rtx)
4182 if (bcc_gen_fctn[(int) GET_CODE (temp)] != 0)
4183 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (temp)]) (op0));
4186 emit_move_insn (target, op1);
4191 /* ??? Can optimize when the operand of this is a bitwise operation,
4192 by using a different bitwise operation. */
4194 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
4195 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
4201 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
4202 temp = expand_unop (mode, ffs_optab, op0, target, 1);
4207 /* ??? Can optimize bitwise operations with one arg constant.
4208 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
4209 and (a bitwise1 b) bitwise2 b (etc)
4210 but that is probably not worth while. */
4212 /* BIT_AND_EXPR is for bitwise anding.
4213 TRUTH_AND_EXPR is for anding two boolean values
4214 when we want in all cases to compute both of them.
4215 In general it is fastest to do TRUTH_AND_EXPR by
4216 computing both operands as actual zero-or-1 values
4217 and then bitwise anding. In cases where there cannot
4218 be any side effects, better code would be made by
4219 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR;
4220 but the question is how to recognize those cases. */
4222 case TRUTH_AND_EXPR:
4224 this_optab = and_optab;
4227 /* See comment above about TRUTH_AND_EXPR; it applies here too. */
4230 this_optab = ior_optab;
4234 this_optab = xor_optab;
4241 preexpand_calls (exp);
4242 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
4244 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
4245 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
4248 /* Could determine the answer when only additive constants differ.
4249 Also, the addition of one can be handled by changing the condition. */
4256 preexpand_calls (exp);
4257 temp = do_store_flag (exp, target, tmode != VOIDmode ? tmode : mode, 0);
4260 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
4261 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
4263 && GET_CODE (original_target) == REG
4264 && (GET_MODE (original_target)
4265 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
4267 temp = expand_expr (TREE_OPERAND (exp, 0), original_target, VOIDmode, 0);
4268 if (temp != original_target)
4269 temp = copy_to_reg (temp);
4270 op1 = gen_label_rtx ();
4271 emit_cmp_insn (temp, const0_rtx, EQ, NULL_RTX,
4272 GET_MODE (temp), unsignedp, 0);
4273 emit_jump_insn (gen_beq (op1));
4274 emit_move_insn (temp, const1_rtx);
4278 /* If no set-flag instruction, must generate a conditional
4279 store into a temporary variable. Drop through
4280 and handle this like && and ||. */
4282 case TRUTH_ANDIF_EXPR:
4283 case TRUTH_ORIF_EXPR:
4284 if (target == 0 || ! safe_from_p (target, exp)
4285 /* Make sure we don't have a hard reg (such as function's return
4286 value) live across basic blocks, if not optimizing. */
4287 || (!optimize && GET_CODE (target) == REG
4288 && REGNO (target) < FIRST_PSEUDO_REGISTER))
4289 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
4290 emit_clr_insn (target);
4291 op1 = gen_label_rtx ();
4292 jumpifnot (exp, op1);
4293 emit_0_to_1_insn (target);
4297 case TRUTH_NOT_EXPR:
4298 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
4299 /* The parser is careful to generate TRUTH_NOT_EXPR
4300 only with operands that are always zero or one. */
4301 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
4302 target, 1, OPTAB_LIB_WIDEN);
4308 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
4310 return expand_expr (TREE_OPERAND (exp, 1),
4311 (ignore ? const0_rtx : target),
4316 /* Note that COND_EXPRs whose type is a structure or union
4317 are required to be constructed to contain assignments of
4318 a temporary variable, so that we can evaluate them here
4319 for side effect only. If type is void, we must do likewise. */
4321 /* If an arm of the branch requires a cleanup,
4322 only that cleanup is performed. */
4325 tree binary_op = 0, unary_op = 0;
4326 tree old_cleanups = cleanups_this_call;
4327 cleanups_this_call = 0;
4329 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
4330 convert it to our mode, if necessary. */
4331 if (integer_onep (TREE_OPERAND (exp, 1))
4332 && integer_zerop (TREE_OPERAND (exp, 2))
4333 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
4335 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, modifier);
4336 if (GET_MODE (op0) == mode)
4339 target = gen_reg_rtx (mode);
4340 convert_move (target, op0, unsignedp);
4344 /* If we are not to produce a result, we have no target. Otherwise,
4345 if a target was specified use it; it will not be used as an
4346 intermediate target unless it is safe. If no target, use a
4349 if (mode == VOIDmode || ignore)
4351 else if (original_target
4352 && safe_from_p (original_target, TREE_OPERAND (exp, 0)))
4353 temp = original_target;
4354 else if (mode == BLKmode)
4356 if (TYPE_SIZE (type) == 0
4357 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
4359 temp = assign_stack_temp (BLKmode,
4360 (TREE_INT_CST_LOW (TYPE_SIZE (type))
4361 + BITS_PER_UNIT - 1)
4362 / BITS_PER_UNIT, 0);
4365 temp = gen_reg_rtx (mode);
4367 /* Check for X ? A + B : A. If we have this, we can copy
4368 A to the output and conditionally add B. Similarly for unary
4369 operations. Don't do this if X has side-effects because
4370 those side effects might affect A or B and the "?" operation is
4371 a sequence point in ANSI. (We test for side effects later.) */
4373 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
4374 && operand_equal_p (TREE_OPERAND (exp, 2),
4375 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
4376 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
4377 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
4378 && operand_equal_p (TREE_OPERAND (exp, 1),
4379 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
4380 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
4381 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
4382 && operand_equal_p (TREE_OPERAND (exp, 2),
4383 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
4384 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
4385 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
4386 && operand_equal_p (TREE_OPERAND (exp, 1),
4387 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
4388 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
4390 /* If we had X ? A + 1 : A and we can do the test of X as a store-flag
4391 operation, do this as A + (X != 0). Similarly for other simple
4392 binary operators. */
4393 if (singleton && binary_op
4394 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
4395 && (TREE_CODE (binary_op) == PLUS_EXPR
4396 || TREE_CODE (binary_op) == MINUS_EXPR
4397 || TREE_CODE (binary_op) == BIT_IOR_EXPR
4398 || TREE_CODE (binary_op) == BIT_XOR_EXPR
4399 || TREE_CODE (binary_op) == BIT_AND_EXPR)
4400 && integer_onep (TREE_OPERAND (binary_op, 1))
4401 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
4404 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR ? add_optab
4405 : TREE_CODE (binary_op) == MINUS_EXPR ? sub_optab
4406 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
4407 : TREE_CODE (binary_op) == BIT_XOR_EXPR ? xor_optab
4410 /* If we had X ? A : A + 1, do this as A + (X == 0).
4412 We have to invert the truth value here and then put it
4413 back later if do_store_flag fails. We cannot simply copy
4414 TREE_OPERAND (exp, 0) to another variable and modify that
4415 because invert_truthvalue can modify the tree pointed to
4417 if (singleton == TREE_OPERAND (exp, 1))
4418 TREE_OPERAND (exp, 0)
4419 = invert_truthvalue (TREE_OPERAND (exp, 0));
4421 result = do_store_flag (TREE_OPERAND (exp, 0),
4422 (safe_from_p (temp, singleton)
4424 mode, BRANCH_COST <= 1);
4428 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
4429 return expand_binop (mode, boptab, op1, result, temp,
4430 unsignedp, OPTAB_LIB_WIDEN);
4432 else if (singleton == TREE_OPERAND (exp, 1))
4433 TREE_OPERAND (exp, 0)
4434 = invert_truthvalue (TREE_OPERAND (exp, 0));
4438 op0 = gen_label_rtx ();
4440 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
4444 /* If the target conflicts with the other operand of the
4445 binary op, we can't use it. Also, we can't use the target
4446 if it is a hard register, because evaluating the condition
4447 might clobber it. */
4449 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1)))
4450 || (GET_CODE (temp) == REG
4451 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
4452 temp = gen_reg_rtx (mode);
4453 store_expr (singleton, temp, 0);
4456 expand_expr (singleton,
4457 ignore ? const1_rtx : NULL_RTX, VOIDmode, 0);
4458 if (cleanups_this_call)
4460 sorry ("aggregate value in COND_EXPR");
4461 cleanups_this_call = 0;
4463 if (singleton == TREE_OPERAND (exp, 1))
4464 jumpif (TREE_OPERAND (exp, 0), op0);
4466 jumpifnot (TREE_OPERAND (exp, 0), op0);
4468 if (binary_op && temp == 0)
4469 /* Just touch the other operand. */
4470 expand_expr (TREE_OPERAND (binary_op, 1),
4471 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
4473 store_expr (build (TREE_CODE (binary_op), type,
4474 make_tree (type, temp),
4475 TREE_OPERAND (binary_op, 1)),
4478 store_expr (build1 (TREE_CODE (unary_op), type,
4479 make_tree (type, temp)),
4484 /* This is now done in jump.c and is better done there because it
4485 produces shorter register lifetimes. */
4487 /* Check for both possibilities either constants or variables
4488 in registers (but not the same as the target!). If so, can
4489 save branches by assigning one, branching, and assigning the
4491 else if (temp && GET_MODE (temp) != BLKmode
4492 && (TREE_CONSTANT (TREE_OPERAND (exp, 1))
4493 || ((TREE_CODE (TREE_OPERAND (exp, 1)) == PARM_DECL
4494 || TREE_CODE (TREE_OPERAND (exp, 1)) == VAR_DECL)
4495 && DECL_RTL (TREE_OPERAND (exp, 1))
4496 && GET_CODE (DECL_RTL (TREE_OPERAND (exp, 1))) == REG
4497 && DECL_RTL (TREE_OPERAND (exp, 1)) != temp))
4498 && (TREE_CONSTANT (TREE_OPERAND (exp, 2))
4499 || ((TREE_CODE (TREE_OPERAND (exp, 2)) == PARM_DECL
4500 || TREE_CODE (TREE_OPERAND (exp, 2)) == VAR_DECL)
4501 && DECL_RTL (TREE_OPERAND (exp, 2))
4502 && GET_CODE (DECL_RTL (TREE_OPERAND (exp, 2))) == REG
4503 && DECL_RTL (TREE_OPERAND (exp, 2)) != temp)))
4505 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
4506 temp = gen_reg_rtx (mode);
4507 store_expr (TREE_OPERAND (exp, 2), temp, 0);
4508 jumpifnot (TREE_OPERAND (exp, 0), op0);
4509 store_expr (TREE_OPERAND (exp, 1), temp, 0);
4513 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
4514 comparison operator. If we have one of these cases, set the
4515 output to A, branch on A (cse will merge these two references),
4516 then set the output to FOO. */
4518 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
4519 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
4520 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
4521 TREE_OPERAND (exp, 1), 0)
4522 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
4523 && safe_from_p (temp, TREE_OPERAND (exp, 2)))
4525 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
4526 temp = gen_reg_rtx (mode);
4527 store_expr (TREE_OPERAND (exp, 1), temp, 0);
4528 jumpif (TREE_OPERAND (exp, 0), op0);
4529 store_expr (TREE_OPERAND (exp, 2), temp, 0);
4533 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
4534 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
4535 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
4536 TREE_OPERAND (exp, 2), 0)
4537 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
4538 && safe_from_p (temp, TREE_OPERAND (exp, 1)))
4540 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
4541 temp = gen_reg_rtx (mode);
4542 store_expr (TREE_OPERAND (exp, 2), temp, 0);
4543 jumpifnot (TREE_OPERAND (exp, 0), op0);
4544 store_expr (TREE_OPERAND (exp, 1), temp, 0);
4549 op1 = gen_label_rtx ();
4550 jumpifnot (TREE_OPERAND (exp, 0), op0);
4552 store_expr (TREE_OPERAND (exp, 1), temp, 0);
4554 expand_expr (TREE_OPERAND (exp, 1),
4555 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
4556 if (cleanups_this_call)
4558 sorry ("aggregate value in COND_EXPR");
4559 cleanups_this_call = 0;
4563 emit_jump_insn (gen_jump (op1));
4567 store_expr (TREE_OPERAND (exp, 2), temp, 0);
4569 expand_expr (TREE_OPERAND (exp, 2),
4570 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
4573 if (cleanups_this_call)
4575 sorry ("aggregate value in COND_EXPR");
4576 cleanups_this_call = 0;
4582 cleanups_this_call = old_cleanups;
4588 /* Something needs to be initialized, but we didn't know
4589 where that thing was when building the tree. For example,
4590 it could be the return value of a function, or a parameter
4591 to a function which lays down in the stack, or a temporary
4592 variable which must be passed by reference.
4594 We guarantee that the expression will either be constructed
4595 or copied into our original target. */
4597 tree slot = TREE_OPERAND (exp, 0);
4599 if (TREE_CODE (slot) != VAR_DECL)
4604 if (DECL_RTL (slot) != 0)
4605 target = DECL_RTL (slot);
4608 target = assign_stack_temp (mode, int_size_in_bytes (type), 0);
4609 /* All temp slots at this level must not conflict. */
4610 preserve_temp_slots (target);
4611 DECL_RTL (slot) = target;
4615 /* Since SLOT is not known to the called function
4616 to belong to its stack frame, we must build an explicit
4617 cleanup. This case occurs when we must build up a reference
4618 to pass the reference as an argument. In this case,
4619 it is very likely that such a reference need not be
4622 if (TREE_OPERAND (exp, 2) == 0)
4623 TREE_OPERAND (exp, 2) = maybe_build_cleanup (slot);
4624 if (TREE_OPERAND (exp, 2))
4625 cleanups_this_call = tree_cons (NULL_TREE, TREE_OPERAND (exp, 2),
4626 cleanups_this_call);
4631 /* This case does occur, when expanding a parameter which
4632 needs to be constructed on the stack. The target
4633 is the actual stack address that we want to initialize.
4634 The function we call will perform the cleanup in this case. */
4636 DECL_RTL (slot) = target;
4639 return expand_expr (TREE_OPERAND (exp, 1), target, tmode, modifier);
4644 tree lhs = TREE_OPERAND (exp, 0);
4645 tree rhs = TREE_OPERAND (exp, 1);
4646 tree noncopied_parts = 0;
4647 tree lhs_type = TREE_TYPE (lhs);
4649 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
4650 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0 && !fixed_type_p (rhs))
4651 noncopied_parts = init_noncopied_parts (stabilize_reference (lhs),
4652 TYPE_NONCOPIED_PARTS (lhs_type));
4653 while (noncopied_parts != 0)
4655 expand_assignment (TREE_VALUE (noncopied_parts),
4656 TREE_PURPOSE (noncopied_parts), 0, 0);
4657 noncopied_parts = TREE_CHAIN (noncopied_parts);
4664 /* If lhs is complex, expand calls in rhs before computing it.
4665 That's so we don't compute a pointer and save it over a call.
4666 If lhs is simple, compute it first so we can give it as a
4667 target if the rhs is just a call. This avoids an extra temp and copy
4668 and that prevents a partial-subsumption which makes bad code.
4669 Actually we could treat component_ref's of vars like vars. */
4671 tree lhs = TREE_OPERAND (exp, 0);
4672 tree rhs = TREE_OPERAND (exp, 1);
4673 tree noncopied_parts = 0;
4674 tree lhs_type = TREE_TYPE (lhs);
4678 if (TREE_CODE (lhs) != VAR_DECL
4679 && TREE_CODE (lhs) != RESULT_DECL
4680 && TREE_CODE (lhs) != PARM_DECL)
4681 preexpand_calls (exp);
4683 /* Check for |= or &= of a bitfield of size one into another bitfield
4684 of size 1. In this case, (unless we need the result of the
4685 assignment) we can do this more efficiently with a
4686 test followed by an assignment, if necessary.
4688 ??? At this point, we can't get a BIT_FIELD_REF here. But if
4689 things change so we do, this code should be enhanced to
4692 && TREE_CODE (lhs) == COMPONENT_REF
4693 && (TREE_CODE (rhs) == BIT_IOR_EXPR
4694 || TREE_CODE (rhs) == BIT_AND_EXPR)
4695 && TREE_OPERAND (rhs, 0) == lhs
4696 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
4697 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (lhs, 1))) == 1
4698 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))) == 1)
4700 rtx label = gen_label_rtx ();
4702 do_jump (TREE_OPERAND (rhs, 1),
4703 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
4704 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
4705 expand_assignment (lhs, convert (TREE_TYPE (rhs),
4706 (TREE_CODE (rhs) == BIT_IOR_EXPR
4708 : integer_zero_node)),
4710 do_pending_stack_adjust ();
4715 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0
4716 && ! (fixed_type_p (lhs) && fixed_type_p (rhs)))
4717 noncopied_parts = save_noncopied_parts (stabilize_reference (lhs),
4718 TYPE_NONCOPIED_PARTS (lhs_type));
4720 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
4721 while (noncopied_parts != 0)
4723 expand_assignment (TREE_PURPOSE (noncopied_parts),
4724 TREE_VALUE (noncopied_parts), 0, 0);
4725 noncopied_parts = TREE_CHAIN (noncopied_parts);
4730 case PREINCREMENT_EXPR:
4731 case PREDECREMENT_EXPR:
4732 return expand_increment (exp, 0);
4734 case POSTINCREMENT_EXPR:
4735 case POSTDECREMENT_EXPR:
4736 /* Faster to treat as pre-increment if result is not used. */
4737 return expand_increment (exp, ! ignore);
4740 /* Are we taking the address of a nested function? */
4741 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
4742 && decl_function_context (TREE_OPERAND (exp, 0)) != 0)
4744 op0 = trampoline_address (TREE_OPERAND (exp, 0));
4745 op0 = force_operand (op0, target);
4749 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode,
4750 (modifier == EXPAND_INITIALIZER
4751 ? modifier : EXPAND_CONST_ADDRESS));
4752 if (GET_CODE (op0) != MEM)
4755 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
4756 return XEXP (op0, 0);
4757 op0 = force_operand (XEXP (op0, 0), target);
4759 if (flag_force_addr && GET_CODE (op0) != REG)
4760 return force_reg (Pmode, op0);
4763 case ENTRY_VALUE_EXPR:
4770 return (*lang_expand_expr) (exp, target, tmode, modifier);
4773 /* Here to do an ordinary binary operator, generating an instruction
4774 from the optab already placed in `this_optab'. */
4776 preexpand_calls (exp);
4777 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
4779 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
4780 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
4782 temp = expand_binop (mode, this_optab, op0, op1, target,
4783 unsignedp, OPTAB_LIB_WIDEN);
4789 /* Return the alignment in bits of EXP, a pointer valued expression.
4790 But don't return more than MAX_ALIGN no matter what.
4791 The alignment returned is, by default, the alignment of the thing that
4792 EXP points to (if it is not a POINTER_TYPE, 0 is returned).
4794 Otherwise, look at the expression to see if we can do better, i.e., if the
4795 expression is actually pointing at an object whose alignment is tighter. */
4798 get_pointer_alignment (exp, max_align)
4802 unsigned align, inner;
4804 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
4807 align = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
4808 align = MIN (align, max_align);
4812 switch (TREE_CODE (exp))
4816 case NON_LVALUE_EXPR:
4817 exp = TREE_OPERAND (exp, 0);
4818 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
4820 inner = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
4821 inner = MIN (inner, max_align);
4822 align = MAX (align, inner);
4826 /* If sum of pointer + int, restrict our maximum alignment to that
4827 imposed by the integer. If not, we can't do any better than
4829 if (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST)
4832 while (((TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)) * BITS_PER_UNIT)
4837 exp = TREE_OPERAND (exp, 0);
4841 /* See what we are pointing at and look at its alignment. */
4842 exp = TREE_OPERAND (exp, 0);
4843 if (TREE_CODE (exp) == FUNCTION_DECL)
4844 align = MAX (align, FUNCTION_BOUNDARY);
4845 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'd')
4846 align = MAX (align, DECL_ALIGN (exp));
4847 #ifdef CONSTANT_ALIGNMENT
4848 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'c')
4849 align = CONSTANT_ALIGNMENT (exp, align);
4851 return MIN (align, max_align);
4859 /* Return the tree node and offset if a given argument corresponds to
4860 a string constant. */
4863 string_constant (arg, ptr_offset)
4869 if (TREE_CODE (arg) == ADDR_EXPR
4870 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
4872 *ptr_offset = integer_zero_node;
4873 return TREE_OPERAND (arg, 0);
4875 else if (TREE_CODE (arg) == PLUS_EXPR)
4877 tree arg0 = TREE_OPERAND (arg, 0);
4878 tree arg1 = TREE_OPERAND (arg, 1);
4883 if (TREE_CODE (arg0) == ADDR_EXPR
4884 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
4887 return TREE_OPERAND (arg0, 0);
4889 else if (TREE_CODE (arg1) == ADDR_EXPR
4890 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
4893 return TREE_OPERAND (arg1, 0);
4900 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
4901 way, because it could contain a zero byte in the middle.
4902 TREE_STRING_LENGTH is the size of the character array, not the string.
4904 Unfortunately, string_constant can't access the values of const char
4905 arrays with initializers, so neither can we do so here. */
4915 src = string_constant (src, &offset_node);
4918 max = TREE_STRING_LENGTH (src);
4919 ptr = TREE_STRING_POINTER (src);
4920 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
4922 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
4923 compute the offset to the following null if we don't know where to
4924 start searching for it. */
4926 for (i = 0; i < max; i++)
4929 /* We don't know the starting offset, but we do know that the string
4930 has no internal zero bytes. We can assume that the offset falls
4931 within the bounds of the string; otherwise, the programmer deserves
4932 what he gets. Subtract the offset from the length of the string,
4934 /* This would perhaps not be valid if we were dealing with named
4935 arrays in addition to literal string constants. */
4936 return size_binop (MINUS_EXPR, size_int (max), offset_node);
4939 /* We have a known offset into the string. Start searching there for
4940 a null character. */
4941 if (offset_node == 0)
4945 /* Did we get a long long offset? If so, punt. */
4946 if (TREE_INT_CST_HIGH (offset_node) != 0)
4948 offset = TREE_INT_CST_LOW (offset_node);
4950 /* If the offset is known to be out of bounds, warn, and call strlen at
4952 if (offset < 0 || offset > max)
4954 warning ("offset outside bounds of constant string");
4957 /* Use strlen to search for the first zero byte. Since any strings
4958 constructed with build_string will have nulls appended, we win even
4959 if we get handed something like (char[4])"abcd".
4961 Since OFFSET is our starting index into the string, no further
4962 calculation is needed. */
4963 return size_int (strlen (ptr + offset));
4966 /* Expand an expression EXP that calls a built-in function,
4967 with result going to TARGET if that's convenient
4968 (and in mode MODE if that's convenient).
4969 SUBTARGET may be used as the target for computing one of EXP's operands.
4970 IGNORE is nonzero if the value is to be ignored. */
4973 expand_builtin (exp, target, subtarget, mode, ignore)
4977 enum machine_mode mode;
4980 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
4981 tree arglist = TREE_OPERAND (exp, 1);
4983 rtx lab1, lab2, insns;
4984 enum machine_mode value_mode = TYPE_MODE (TREE_TYPE (exp));
4986 switch (DECL_FUNCTION_CODE (fndecl))
4991 /* build_function_call changes these into ABS_EXPR. */
4994 case BUILT_IN_FSQRT:
4995 /* If not optimizing, call the library function. */
5000 /* Arg could be wrong type if user redeclared this fcn wrong. */
5001 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != REAL_TYPE)
5002 return CONST0_RTX (TYPE_MODE (TREE_TYPE (exp)));
5004 /* Stabilize and compute the argument. */
5005 if (TREE_CODE (TREE_VALUE (arglist)) != VAR_DECL
5006 && TREE_CODE (TREE_VALUE (arglist)) != PARM_DECL)
5008 exp = copy_node (exp);
5009 arglist = copy_node (arglist);
5010 TREE_OPERAND (exp, 1) = arglist;
5011 TREE_VALUE (arglist) = save_expr (TREE_VALUE (arglist));
5013 op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
5015 /* Make a suitable register to place result in. */
5016 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
5018 /* Test the argument to make sure it is in the proper domain for
5019 the sqrt function. If it is not in the domain, branch to a
5023 lab1 = gen_label_rtx ();
5024 lab2 = gen_label_rtx ();
5026 /* By default check the arguments. If flag_fast_math is turned on,
5027 then assume sqrt will always be called with valid arguments.
5028 Note changing the test below from "> 0" to ">= 0" would cause
5029 incorrect results when computing sqrt(-0.0). */
5031 if (! flag_fast_math)
5033 /* By checking op > 0 we are able to catch all of the
5034 IEEE special cases with a single if conditional. */
5035 emit_cmp_insn (op0, CONST0_RTX (GET_MODE (op0)), GT, NULL_RTX,
5036 GET_MODE (op0), 0, 0);
5037 emit_jump_insn (gen_bgt (lab1));
5039 /* The argument was not in the domain; do this via library call.
5040 Pop the arguments right away in case the call gets deleted. */
5042 expand_call (exp, target, 0);
5045 /* Branch around open coded version */
5046 emit_jump_insn (gen_jump (lab2));
5050 /* Arg is in the domain, compute sqrt, into TARGET.
5051 Set TARGET to wherever the result comes back. */
5052 target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
5053 sqrt_optab, op0, target, 0);
5055 /* If we were unable to expand via the builtin, stop the
5056 sequence (without outputting the insns) and break, causing
5057 a call the the library function. */
5066 /* Output the entire sequence. */
5067 insns = get_insns ();
5073 case BUILT_IN_SAVEREGS:
5074 /* Don't do __builtin_saveregs more than once in a function.
5075 Save the result of the first call and reuse it. */
5076 if (saveregs_value != 0)
5077 return saveregs_value;
5079 /* When this function is called, it means that registers must be
5080 saved on entry to this function. So we migrate the
5081 call to the first insn of this function. */
5084 rtx valreg, saved_valreg;
5086 /* Now really call the function. `expand_call' does not call
5087 expand_builtin, so there is no danger of infinite recursion here. */
5090 #ifdef EXPAND_BUILTIN_SAVEREGS
5091 /* Do whatever the machine needs done in this case. */
5092 temp = EXPAND_BUILTIN_SAVEREGS (arglist);
5094 /* The register where the function returns its value
5095 is likely to have something else in it, such as an argument.
5096 So preserve that register around the call. */
5097 if (value_mode != VOIDmode)
5099 valreg = hard_libcall_value (value_mode);
5100 saved_valreg = gen_reg_rtx (value_mode);
5101 emit_move_insn (saved_valreg, valreg);
5104 /* Generate the call, putting the value in a pseudo. */
5105 temp = expand_call (exp, target, ignore);
5107 if (value_mode != VOIDmode)
5108 emit_move_insn (valreg, saved_valreg);
5114 saveregs_value = temp;
5116 /* This won't work inside a SEQUENCE--it really has to be
5117 at the start of the function. */
5118 if (in_sequence_p ())
5120 /* Better to do this than to crash. */
5121 error ("`va_start' used within `({...})'");
5125 /* Put the sequence after the NOTE that starts the function. */
5126 emit_insns_before (seq, NEXT_INSN (get_insns ()));
5130 /* __builtin_args_info (N) returns word N of the arg space info
5131 for the current function. The number and meanings of words
5132 is controlled by the definition of CUMULATIVE_ARGS. */
5133 case BUILT_IN_ARGS_INFO:
5135 int nwords = sizeof (CUMULATIVE_ARGS) / sizeof (int);
5137 int *word_ptr = (int *) ¤t_function_args_info;
5138 tree type, elts, result;
5140 if (sizeof (CUMULATIVE_ARGS) % sizeof (int) != 0)
5141 fatal ("CUMULATIVE_ARGS type defined badly; see %s, line %d",
5142 __FILE__, __LINE__);
5146 tree arg = TREE_VALUE (arglist);
5147 if (TREE_CODE (arg) != INTEGER_CST)
5148 error ("argument of __builtin_args_info must be constant");
5151 int wordnum = TREE_INT_CST_LOW (arg);
5153 if (wordnum < 0 || wordnum >= nwords)
5154 error ("argument of __builtin_args_info out of range");
5156 return GEN_INT (word_ptr[wordnum]);
5160 error ("missing argument in __builtin_args_info");
5165 for (i = 0; i < nwords; i++)
5166 elts = tree_cons (NULL_TREE, build_int_2 (word_ptr[i], 0));
5168 type = build_array_type (integer_type_node,
5169 build_index_type (build_int_2 (nwords, 0)));
5170 result = build (CONSTRUCTOR, type, NULL_TREE, nreverse (elts));
5171 TREE_CONSTANT (result) = 1;
5172 TREE_STATIC (result) = 1;
5173 result = build (INDIRECT_REF, build_pointer_type (type), result);
5174 TREE_CONSTANT (result) = 1;
5175 return expand_expr (result, NULL_RTX, VOIDmode, 0);
5179 /* Return the address of the first anonymous stack arg. */
5180 case BUILT_IN_NEXT_ARG:
5182 tree fntype = TREE_TYPE (current_function_decl);
5183 if (!(TYPE_ARG_TYPES (fntype) != 0
5184 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
5185 != void_type_node)))
5187 error ("`va_start' used in function with fixed args");
5192 return expand_binop (Pmode, add_optab,
5193 current_function_internal_arg_pointer,
5194 current_function_arg_offset_rtx,
5195 NULL_RTX, 0, OPTAB_LIB_WIDEN);
5197 case BUILT_IN_CLASSIFY_TYPE:
5200 tree type = TREE_TYPE (TREE_VALUE (arglist));
5201 enum tree_code code = TREE_CODE (type);
5202 if (code == VOID_TYPE)
5203 return GEN_INT (void_type_class);
5204 if (code == INTEGER_TYPE)
5205 return GEN_INT (integer_type_class);
5206 if (code == CHAR_TYPE)
5207 return GEN_INT (char_type_class);
5208 if (code == ENUMERAL_TYPE)
5209 return GEN_INT (enumeral_type_class);
5210 if (code == BOOLEAN_TYPE)
5211 return GEN_INT (boolean_type_class);
5212 if (code == POINTER_TYPE)
5213 return GEN_INT (pointer_type_class);
5214 if (code == REFERENCE_TYPE)
5215 return GEN_INT (reference_type_class);
5216 if (code == OFFSET_TYPE)
5217 return GEN_INT (offset_type_class);
5218 if (code == REAL_TYPE)
5219 return GEN_INT (real_type_class);
5220 if (code == COMPLEX_TYPE)
5221 return GEN_INT (complex_type_class);
5222 if (code == FUNCTION_TYPE)
5223 return GEN_INT (function_type_class);
5224 if (code == METHOD_TYPE)
5225 return GEN_INT (method_type_class);
5226 if (code == RECORD_TYPE)
5227 return GEN_INT (record_type_class);
5228 if (code == UNION_TYPE)
5229 return GEN_INT (union_type_class);
5230 if (code == ARRAY_TYPE)
5231 return GEN_INT (array_type_class);
5232 if (code == STRING_TYPE)
5233 return GEN_INT (string_type_class);
5234 if (code == SET_TYPE)
5235 return GEN_INT (set_type_class);
5236 if (code == FILE_TYPE)
5237 return GEN_INT (file_type_class);
5238 if (code == LANG_TYPE)
5239 return GEN_INT (lang_type_class);
5241 return GEN_INT (no_type_class);
5243 case BUILT_IN_CONSTANT_P:
5247 return (TREE_CODE_CLASS (TREE_VALUE (arglist)) == 'c'
5248 ? const1_rtx : const0_rtx);
5250 case BUILT_IN_FRAME_ADDRESS:
5251 /* The argument must be a nonnegative integer constant.
5252 It counts the number of frames to scan up the stack.
5253 The value is the address of that frame. */
5254 case BUILT_IN_RETURN_ADDRESS:
5255 /* The argument must be a nonnegative integer constant.
5256 It counts the number of frames to scan up the stack.
5257 The value is the return address saved in that frame. */
5259 /* Warning about missing arg was already issued. */
5261 else if (TREE_CODE (TREE_VALUE (arglist)) != INTEGER_CST)
5263 error ("invalid arg to __builtin_return_address");
5266 else if (tree_int_cst_lt (TREE_VALUE (arglist), integer_zero_node))
5268 error ("invalid arg to __builtin_return_address");
5273 int count = TREE_INT_CST_LOW (TREE_VALUE (arglist));
5274 rtx tem = frame_pointer_rtx;
5277 /* Scan back COUNT frames to the specified frame. */
5278 for (i = 0; i < count; i++)
5280 /* Assume the dynamic chain pointer is in the word that
5281 the frame address points to, unless otherwise specified. */
5282 #ifdef DYNAMIC_CHAIN_ADDRESS
5283 tem = DYNAMIC_CHAIN_ADDRESS (tem);
5285 tem = memory_address (Pmode, tem);
5286 tem = copy_to_reg (gen_rtx (MEM, Pmode, tem));
5289 /* For __builtin_frame_address, return what we've got. */
5290 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
5293 /* For __builtin_return_address,
5294 Get the return address from that frame. */
5295 #ifdef RETURN_ADDR_RTX
5296 return RETURN_ADDR_RTX (count, tem);
5298 tem = memory_address (Pmode,
5299 plus_constant (tem, GET_MODE_SIZE (Pmode)));
5300 return copy_to_reg (gen_rtx (MEM, Pmode, tem));
5304 case BUILT_IN_ALLOCA:
5306 /* Arg could be non-integer if user redeclared this fcn wrong. */
5307 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE)
5309 current_function_calls_alloca = 1;
5310 /* Compute the argument. */
5311 op0 = expand_expr (TREE_VALUE (arglist), NULL_RTX, VOIDmode, 0);
5313 /* Allocate the desired space. */
5314 target = allocate_dynamic_stack_space (op0, target, BITS_PER_UNIT);
5316 /* Record the new stack level for nonlocal gotos. */
5317 if (nonlocal_goto_handler_slot != 0)
5318 emit_stack_save (SAVE_NONLOCAL, &nonlocal_goto_stack_level, NULL_RTX);
5322 /* If not optimizing, call the library function. */
5327 /* Arg could be non-integer if user redeclared this fcn wrong. */
5328 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE)
5331 /* Compute the argument. */
5332 op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
5333 /* Compute ffs, into TARGET if possible.
5334 Set TARGET to wherever the result comes back. */
5335 target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
5336 ffs_optab, op0, target, 1);
5341 case BUILT_IN_STRLEN:
5342 /* If not optimizing, call the library function. */
5347 /* Arg could be non-pointer if user redeclared this fcn wrong. */
5348 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE)
5352 tree src = TREE_VALUE (arglist);
5353 tree len = c_strlen (src);
5356 = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
5358 rtx result, src_rtx, char_rtx;
5359 enum machine_mode insn_mode = value_mode, char_mode;
5360 enum insn_code icode;
5362 /* If the length is known, just return it. */
5364 return expand_expr (len, target, mode, 0);
5366 /* If SRC is not a pointer type, don't do this operation inline. */
5370 /* Call a function if we can't compute strlen in the right mode. */
5372 while (insn_mode != VOIDmode)
5374 icode = strlen_optab->handlers[(int) insn_mode].insn_code;
5375 if (icode != CODE_FOR_nothing)
5378 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
5380 if (insn_mode == VOIDmode)
5383 /* Make a place to write the result of the instruction. */
5386 && GET_CODE (result) == REG
5387 && GET_MODE (result) == insn_mode
5388 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
5389 result = gen_reg_rtx (insn_mode);
5391 /* Make sure the operands are acceptable to the predicates. */
5393 if (! (*insn_operand_predicate[(int)icode][0]) (result, insn_mode))
5394 result = gen_reg_rtx (insn_mode);
5396 src_rtx = memory_address (BLKmode,
5397 expand_expr (src, NULL_RTX, Pmode,
5399 if (! (*insn_operand_predicate[(int)icode][1]) (src_rtx, Pmode))
5400 src_rtx = copy_to_mode_reg (Pmode, src_rtx);
5402 char_rtx = const0_rtx;
5403 char_mode = insn_operand_mode[(int)icode][2];
5404 if (! (*insn_operand_predicate[(int)icode][2]) (char_rtx, char_mode))
5405 char_rtx = copy_to_mode_reg (char_mode, char_rtx);
5407 emit_insn (GEN_FCN (icode) (result,
5408 gen_rtx (MEM, BLKmode, src_rtx),
5409 char_rtx, GEN_INT (align)));
5411 /* Return the value in the proper mode for this function. */
5412 if (GET_MODE (result) == value_mode)
5414 else if (target != 0)
5416 convert_move (target, result, 0);
5420 return convert_to_mode (value_mode, result, 0);
5423 case BUILT_IN_STRCPY:
5424 /* If not optimizing, call the library function. */
5429 /* Arg could be non-pointer if user redeclared this fcn wrong. */
5430 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
5431 || TREE_CHAIN (arglist) == 0
5432 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE)
5436 tree len = c_strlen (TREE_VALUE (TREE_CHAIN (arglist)));
5441 len = size_binop (PLUS_EXPR, len, integer_one_node);
5443 chainon (arglist, build_tree_list (NULL_TREE, len));
5447 case BUILT_IN_MEMCPY:
5448 /* If not optimizing, call the library function. */
5453 /* Arg could be non-pointer if user redeclared this fcn wrong. */
5454 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
5455 || TREE_CHAIN (arglist) == 0
5456 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
5457 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
5458 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
5462 tree dest = TREE_VALUE (arglist);
5463 tree src = TREE_VALUE (TREE_CHAIN (arglist));
5464 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
5467 = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
5469 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
5472 /* If either SRC or DEST is not a pointer type, don't do
5473 this operation in-line. */
5474 if (src_align == 0 || dest_align == 0)
5476 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRCPY)
5477 TREE_CHAIN (TREE_CHAIN (arglist)) = 0;
5481 dest_rtx = expand_expr (dest, NULL_RTX, Pmode, EXPAND_NORMAL);
5483 /* Copy word part most expediently. */
5484 emit_block_move (gen_rtx (MEM, BLKmode,
5485 memory_address (BLKmode, dest_rtx)),
5486 gen_rtx (MEM, BLKmode,
5487 memory_address (BLKmode,
5488 expand_expr (src, NULL_RTX,
5491 expand_expr (len, NULL_RTX, VOIDmode, 0),
5492 MIN (src_align, dest_align));
5496 /* These comparison functions need an instruction that returns an actual
5497 index. An ordinary compare that just sets the condition codes
5499 #ifdef HAVE_cmpstrsi
5500 case BUILT_IN_STRCMP:
5501 /* If not optimizing, call the library function. */
5506 /* Arg could be non-pointer if user redeclared this fcn wrong. */
5507 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
5508 || TREE_CHAIN (arglist) == 0
5509 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE)
5511 else if (!HAVE_cmpstrsi)
5514 tree arg1 = TREE_VALUE (arglist);
5515 tree arg2 = TREE_VALUE (TREE_CHAIN (arglist));
5519 len = c_strlen (arg1);
5521 len = size_binop (PLUS_EXPR, integer_one_node, len);
5522 len2 = c_strlen (arg2);
5524 len2 = size_binop (PLUS_EXPR, integer_one_node, len2);
5526 /* If we don't have a constant length for the first, use the length
5527 of the second, if we know it. We don't require a constant for
5528 this case; some cost analysis could be done if both are available
5529 but neither is constant. For now, assume they're equally cheap.
5531 If both strings have constant lengths, use the smaller. This
5532 could arise if optimization results in strcpy being called with
5533 two fixed strings, or if the code was machine-generated. We should
5534 add some code to the `memcmp' handler below to deal with such
5535 situations, someday. */
5536 if (!len || TREE_CODE (len) != INTEGER_CST)
5543 else if (len2 && TREE_CODE (len2) == INTEGER_CST)
5545 if (tree_int_cst_lt (len2, len))
5549 chainon (arglist, build_tree_list (NULL_TREE, len));
5553 case BUILT_IN_MEMCMP:
5554 /* If not optimizing, call the library function. */
5559 /* Arg could be non-pointer if user redeclared this fcn wrong. */
5560 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
5561 || TREE_CHAIN (arglist) == 0
5562 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
5563 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
5564 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
5566 else if (!HAVE_cmpstrsi)
5569 tree arg1 = TREE_VALUE (arglist);
5570 tree arg2 = TREE_VALUE (TREE_CHAIN (arglist));
5571 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
5575 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
5577 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
5578 enum machine_mode insn_mode
5579 = insn_operand_mode[(int) CODE_FOR_cmpstrsi][0];
5581 /* If we don't have POINTER_TYPE, call the function. */
5582 if (arg1_align == 0 || arg2_align == 0)
5584 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRCMP)
5585 TREE_CHAIN (TREE_CHAIN (arglist)) = 0;
5589 /* Make a place to write the result of the instruction. */
5592 && GET_CODE (result) == REG && GET_MODE (result) == insn_mode
5593 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
5594 result = gen_reg_rtx (insn_mode);
5596 emit_insn (gen_cmpstrsi (result,
5597 gen_rtx (MEM, BLKmode,
5598 expand_expr (arg1, NULL_RTX, Pmode,
5600 gen_rtx (MEM, BLKmode,
5601 expand_expr (arg2, NULL_RTX, Pmode,
5603 expand_expr (len, NULL_RTX, VOIDmode, 0),
5604 GEN_INT (MIN (arg1_align, arg2_align))));
5606 /* Return the value in the proper mode for this function. */
5607 mode = TYPE_MODE (TREE_TYPE (exp));
5608 if (GET_MODE (result) == mode)
5610 else if (target != 0)
5612 convert_move (target, result, 0);
5616 return convert_to_mode (mode, result, 0);
5619 case BUILT_IN_STRCMP:
5620 case BUILT_IN_MEMCMP:
5624 default: /* just do library call, if unknown builtin */
5625 error ("built-in function %s not currently supported",
5626 IDENTIFIER_POINTER (DECL_NAME (fndecl)));
5629 /* The switch statement above can drop through to cause the function
5630 to be called normally. */
5632 return expand_call (exp, target, ignore);
5635 /* Expand code for a post- or pre- increment or decrement
5636 and return the RTX for the result.
5637 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
5640 expand_increment (exp, post)
5644 register rtx op0, op1;
5645 register rtx temp, value;
5646 register tree incremented = TREE_OPERAND (exp, 0);
5647 optab this_optab = add_optab;
5649 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
5650 int op0_is_copy = 0;
5652 /* Stabilize any component ref that might need to be
5653 evaluated more than once below. */
5654 if (TREE_CODE (incremented) == BIT_FIELD_REF
5655 || (TREE_CODE (incremented) == COMPONENT_REF
5656 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
5657 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
5658 incremented = stabilize_reference (incremented);
5660 /* Compute the operands as RTX.
5661 Note whether OP0 is the actual lvalue or a copy of it:
5662 I believe it is a copy iff it is a register or subreg
5663 and insns were generated in computing it. */
5664 temp = get_last_insn ();
5665 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, 0);
5666 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
5667 && temp != get_last_insn ());
5668 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
5670 /* Decide whether incrementing or decrementing. */
5671 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
5672 || TREE_CODE (exp) == PREDECREMENT_EXPR)
5673 this_optab = sub_optab;
5675 /* If OP0 is not the actual lvalue, but rather a copy in a register,
5676 then we cannot just increment OP0. We must
5677 therefore contrive to increment the original value.
5678 Then we can return OP0 since it is a copy of the old value. */
5681 /* This is the easiest way to increment the value wherever it is.
5682 Problems with multiple evaluation of INCREMENTED
5683 are prevented because either (1) it is a component_ref,
5684 in which case it was stabilized above, or (2) it is an array_ref
5685 with constant index in an array in a register, which is
5686 safe to reevaluate. */
5687 tree newexp = build ((this_optab == add_optab
5688 ? PLUS_EXPR : MINUS_EXPR),
5691 TREE_OPERAND (exp, 1));
5692 temp = expand_assignment (incremented, newexp, ! post, 0);
5693 return post ? op0 : temp;
5696 /* Convert decrement by a constant into a negative increment. */
5697 if (this_optab == sub_optab
5698 && GET_CODE (op1) == CONST_INT)
5700 op1 = GEN_INT (- INTVAL (op1));
5701 this_optab = add_optab;
5706 /* We have a true reference to the value in OP0.
5707 If there is an insn to add or subtract in this mode, queue it. */
5709 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
5710 op0 = stabilize (op0);
5713 icode = (int) this_optab->handlers[(int) mode].insn_code;
5714 if (icode != (int) CODE_FOR_nothing
5715 /* Make sure that OP0 is valid for operands 0 and 1
5716 of the insn we want to queue. */
5717 && (*insn_operand_predicate[icode][0]) (op0, mode)
5718 && (*insn_operand_predicate[icode][1]) (op0, mode))
5720 if (! (*insn_operand_predicate[icode][2]) (op1, mode))
5721 op1 = force_reg (mode, op1);
5723 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
5727 /* Preincrement, or we can't increment with one simple insn. */
5729 /* Save a copy of the value before inc or dec, to return it later. */
5730 temp = value = copy_to_reg (op0);
5732 /* Arrange to return the incremented value. */
5733 /* Copy the rtx because expand_binop will protect from the queue,
5734 and the results of that would be invalid for us to return
5735 if our caller does emit_queue before using our result. */
5736 temp = copy_rtx (value = op0);
5738 /* Increment however we can. */
5739 op1 = expand_binop (mode, this_optab, value, op1, op0,
5740 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
5741 /* Make sure the value is stored into OP0. */
5743 emit_move_insn (op0, op1);
5748 /* Expand all function calls contained within EXP, innermost ones first.
5749 But don't look within expressions that have sequence points.
5750 For each CALL_EXPR, record the rtx for its value
5751 in the CALL_EXPR_RTL field. */
5754 preexpand_calls (exp)
5757 register int nops, i;
5758 int type = TREE_CODE_CLASS (TREE_CODE (exp));
5760 if (! do_preexpand_calls)
5763 /* Only expressions and references can contain calls. */
5765 if (type != 'e' && type != '<' && type != '1' && type != '2' && type != 'r')
5768 switch (TREE_CODE (exp))
5771 /* Do nothing if already expanded. */
5772 if (CALL_EXPR_RTL (exp) != 0)
5775 /* Do nothing to built-in functions. */
5776 if (TREE_CODE (TREE_OPERAND (exp, 0)) != ADDR_EXPR
5777 || TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) != FUNCTION_DECL
5778 || ! DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
5779 CALL_EXPR_RTL (exp) = expand_call (exp, NULL_RTX, 0);
5784 case TRUTH_ANDIF_EXPR:
5785 case TRUTH_ORIF_EXPR:
5786 /* If we find one of these, then we can be sure
5787 the adjust will be done for it (since it makes jumps).
5788 Do it now, so that if this is inside an argument
5789 of a function, we don't get the stack adjustment
5790 after some other args have already been pushed. */
5791 do_pending_stack_adjust ();
5796 case WITH_CLEANUP_EXPR:
5800 if (SAVE_EXPR_RTL (exp) != 0)
5804 nops = tree_code_length[(int) TREE_CODE (exp)];
5805 for (i = 0; i < nops; i++)
5806 if (TREE_OPERAND (exp, i) != 0)
5808 type = TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, i)));
5809 if (type == 'e' || type == '<' || type == '1' || type == '2'
5811 preexpand_calls (TREE_OPERAND (exp, i));
5815 /* At the start of a function, record that we have no previously-pushed
5816 arguments waiting to be popped. */
5819 init_pending_stack_adjust ()
5821 pending_stack_adjust = 0;
5824 /* When exiting from function, if safe, clear out any pending stack adjust
5825 so the adjustment won't get done. */
5828 clear_pending_stack_adjust ()
5830 #ifdef EXIT_IGNORE_STACK
5831 if (! flag_omit_frame_pointer && EXIT_IGNORE_STACK
5832 && ! (TREE_INLINE (current_function_decl) && ! flag_no_inline)
5833 && ! flag_inline_functions)
5834 pending_stack_adjust = 0;
5838 /* Pop any previously-pushed arguments that have not been popped yet. */
5841 do_pending_stack_adjust ()
5843 if (inhibit_defer_pop == 0)
5845 if (pending_stack_adjust != 0)
5846 adjust_stack (GEN_INT (pending_stack_adjust));
5847 pending_stack_adjust = 0;
5851 /* Expand all cleanups up to OLD_CLEANUPS.
5852 Needed here, and also for language-dependent calls. */
5855 expand_cleanups_to (old_cleanups)
5858 while (cleanups_this_call != old_cleanups)
5860 expand_expr (TREE_VALUE (cleanups_this_call), NULL_RTX, VOIDmode, 0);
5861 cleanups_this_call = TREE_CHAIN (cleanups_this_call);
5865 /* Expand conditional expressions. */
5867 /* Generate code to evaluate EXP and jump to LABEL if the value is zero.
5868 LABEL is an rtx of code CODE_LABEL, in this function and all the
5872 jumpifnot (exp, label)
5876 do_jump (exp, label, NULL_RTX);
5879 /* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
5886 do_jump (exp, NULL_RTX, label);
5889 /* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
5890 the result is zero, or IF_TRUE_LABEL if the result is one.
5891 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
5892 meaning fall through in that case.
5894 do_jump always does any pending stack adjust except when it does not
5895 actually perform a jump. An example where there is no jump
5896 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
5898 This function is responsible for optimizing cases such as
5899 &&, || and comparison operators in EXP. */
5902 do_jump (exp, if_false_label, if_true_label)
5904 rtx if_false_label, if_true_label;
5906 register enum tree_code code = TREE_CODE (exp);
5907 /* Some cases need to create a label to jump to
5908 in order to properly fall through.
5909 These cases set DROP_THROUGH_LABEL nonzero. */
5910 rtx drop_through_label = 0;
5924 temp = integer_zerop (exp) ? if_false_label : if_true_label;
5930 /* This is not true with #pragma weak */
5932 /* The address of something can never be zero. */
5934 emit_jump (if_true_label);
5939 if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
5940 || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
5941 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF)
5944 /* If we are narrowing the operand, we have to do the compare in the
5946 if ((TYPE_PRECISION (TREE_TYPE (exp))
5947 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
5949 case NON_LVALUE_EXPR:
5950 case REFERENCE_EXPR:
5955 /* These cannot change zero->non-zero or vice versa. */
5956 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
5960 /* This is never less insns than evaluating the PLUS_EXPR followed by
5961 a test and can be longer if the test is eliminated. */
5963 /* Reduce to minus. */
5964 exp = build (MINUS_EXPR, TREE_TYPE (exp),
5965 TREE_OPERAND (exp, 0),
5966 fold (build1 (NEGATE_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
5967 TREE_OPERAND (exp, 1))));
5968 /* Process as MINUS. */
5972 /* Non-zero iff operands of minus differ. */
5973 comparison = compare (build (NE_EXPR, TREE_TYPE (exp),
5974 TREE_OPERAND (exp, 0),
5975 TREE_OPERAND (exp, 1)),
5980 /* If we are AND'ing with a small constant, do this comparison in the
5981 smallest type that fits. If the machine doesn't have comparisons
5982 that small, it will be converted back to the wider comparison.
5983 This helps if we are testing the sign bit of a narrower object.
5984 combine can't do this for us because it can't know whether a
5985 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
5987 if (! SLOW_BYTE_ACCESS
5988 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
5989 && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
5990 && (i = floor_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))) >= 0
5991 && (type = type_for_size (i + 1, 1)) != 0
5992 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
5993 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
5994 != CODE_FOR_nothing))
5996 do_jump (convert (type, exp), if_false_label, if_true_label);
6001 case TRUTH_NOT_EXPR:
6002 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
6005 case TRUTH_ANDIF_EXPR:
6006 if (if_false_label == 0)
6007 if_false_label = drop_through_label = gen_label_rtx ();
6008 do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
6009 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
6012 case TRUTH_ORIF_EXPR:
6013 if (if_true_label == 0)
6014 if_true_label = drop_through_label = gen_label_rtx ();
6015 do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
6016 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
6020 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
6023 do_pending_stack_adjust ();
6024 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
6031 int bitsize, bitpos, unsignedp;
6032 enum machine_mode mode;
6037 /* Get description of this reference. We don't actually care
6038 about the underlying object here. */
6039 get_inner_reference (exp, &bitsize, &bitpos, &offset,
6040 &mode, &unsignedp, &volatilep);
6042 type = type_for_size (bitsize, unsignedp);
6043 if (! SLOW_BYTE_ACCESS
6044 && type != 0 && bitsize >= 0
6045 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
6046 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
6047 != CODE_FOR_nothing))
6049 do_jump (convert (type, exp), if_false_label, if_true_label);
6056 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
6057 if (integer_onep (TREE_OPERAND (exp, 1))
6058 && integer_zerop (TREE_OPERAND (exp, 2)))
6059 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
6061 else if (integer_zerop (TREE_OPERAND (exp, 1))
6062 && integer_onep (TREE_OPERAND (exp, 2)))
6063 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
6067 register rtx label1 = gen_label_rtx ();
6068 drop_through_label = gen_label_rtx ();
6069 do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX);
6070 /* Now the THEN-expression. */
6071 do_jump (TREE_OPERAND (exp, 1),
6072 if_false_label ? if_false_label : drop_through_label,
6073 if_true_label ? if_true_label : drop_through_label);
6074 /* In case the do_jump just above never jumps. */
6075 do_pending_stack_adjust ();
6076 emit_label (label1);
6077 /* Now the ELSE-expression. */
6078 do_jump (TREE_OPERAND (exp, 2),
6079 if_false_label ? if_false_label : drop_through_label,
6080 if_true_label ? if_true_label : drop_through_label);
6085 if (integer_zerop (TREE_OPERAND (exp, 1)))
6086 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
6087 else if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
6090 !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
6091 do_jump_by_parts_equality (exp, if_false_label, if_true_label);
6093 comparison = compare (exp, EQ, EQ);
6097 if (integer_zerop (TREE_OPERAND (exp, 1)))
6098 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
6099 else if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
6102 !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
6103 do_jump_by_parts_equality (exp, if_true_label, if_false_label);
6105 comparison = compare (exp, NE, NE);
6109 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
6111 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
6112 do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
6114 comparison = compare (exp, LT, LTU);
6118 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
6120 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
6121 do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
6123 comparison = compare (exp, LE, LEU);
6127 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
6129 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
6130 do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
6132 comparison = compare (exp, GT, GTU);
6136 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
6138 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
6139 do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
6141 comparison = compare (exp, GE, GEU);
6146 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
6148 /* This is not needed any more and causes poor code since it causes
6149 comparisons and tests from non-SI objects to have different code
6151 /* Copy to register to avoid generating bad insns by cse
6152 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
6153 if (!cse_not_expected && GET_CODE (temp) == MEM)
6154 temp = copy_to_reg (temp);
6156 do_pending_stack_adjust ();
6157 if (GET_CODE (temp) == CONST_INT)
6158 comparison = (temp == const0_rtx ? const0_rtx : const_true_rtx);
6159 else if (GET_CODE (temp) == LABEL_REF)
6160 comparison = const_true_rtx;
6161 else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
6162 && !can_compare_p (GET_MODE (temp)))
6163 /* Note swapping the labels gives us not-equal. */
6164 do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label);
6165 else if (GET_MODE (temp) != VOIDmode)
6166 comparison = compare_from_rtx (temp, CONST0_RTX (GET_MODE (temp)),
6167 NE, 1, GET_MODE (temp), NULL_RTX, 0);
6172 /* Do any postincrements in the expression that was tested. */
6175 /* If COMPARISON is nonzero here, it is an rtx that can be substituted
6176 straight into a conditional jump instruction as the jump condition.
6177 Otherwise, all the work has been done already. */
6179 if (comparison == const_true_rtx)
6182 emit_jump (if_true_label);
6184 else if (comparison == const0_rtx)
6187 emit_jump (if_false_label);
6189 else if (comparison)
6190 do_jump_for_compare (comparison, if_false_label, if_true_label);
6194 if (drop_through_label)
6196 /* If do_jump produces code that might be jumped around,
6197 do any stack adjusts from that code, before the place
6198 where control merges in. */
6199 do_pending_stack_adjust ();
6200 emit_label (drop_through_label);
6204 /* Given a comparison expression EXP for values too wide to be compared
6205 with one insn, test the comparison and jump to the appropriate label.
6206 The code of EXP is ignored; we always test GT if SWAP is 0,
6207 and LT if SWAP is 1. */
6210 do_jump_by_parts_greater (exp, swap, if_false_label, if_true_label)
6213 rtx if_false_label, if_true_label;
6215 rtx op0 = expand_expr (TREE_OPERAND (exp, swap), NULL_RTX, VOIDmode, 0);
6216 rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), NULL_RTX, VOIDmode, 0);
6217 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
6218 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
6219 rtx drop_through_label = 0;
6220 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
6223 if (! if_true_label || ! if_false_label)
6224 drop_through_label = gen_label_rtx ();
6225 if (! if_true_label)
6226 if_true_label = drop_through_label;
6227 if (! if_false_label)
6228 if_false_label = drop_through_label;
6230 /* Compare a word at a time, high order first. */
6231 for (i = 0; i < nwords; i++)
6234 rtx op0_word, op1_word;
6236 if (WORDS_BIG_ENDIAN)
6238 op0_word = operand_subword_force (op0, i, mode);
6239 op1_word = operand_subword_force (op1, i, mode);
6243 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
6244 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
6247 /* All but high-order word must be compared as unsigned. */
6248 comp = compare_from_rtx (op0_word, op1_word,
6249 (unsignedp || i > 0) ? GTU : GT,
6250 unsignedp, word_mode, NULL_RTX, 0);
6251 if (comp == const_true_rtx)
6252 emit_jump (if_true_label);
6253 else if (comp != const0_rtx)
6254 do_jump_for_compare (comp, NULL_RTX, if_true_label);
6256 /* Consider lower words only if these are equal. */
6257 comp = compare_from_rtx (op0_word, op1_word, NE, unsignedp, word_mode,
6259 if (comp == const_true_rtx)
6260 emit_jump (if_false_label);
6261 else if (comp != const0_rtx)
6262 do_jump_for_compare (comp, NULL_RTX, if_false_label);
6266 emit_jump (if_false_label);
6267 if (drop_through_label)
6268 emit_label (drop_through_label);
6271 /* Given an EQ_EXPR expression EXP for values too wide to be compared
6272 with one insn, test the comparison and jump to the appropriate label. */
6275 do_jump_by_parts_equality (exp, if_false_label, if_true_label)
6277 rtx if_false_label, if_true_label;
6279 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
6280 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
6281 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
6282 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
6284 rtx drop_through_label = 0;
6286 if (! if_false_label)
6287 drop_through_label = if_false_label = gen_label_rtx ();
6289 for (i = 0; i < nwords; i++)
6291 rtx comp = compare_from_rtx (operand_subword_force (op0, i, mode),
6292 operand_subword_force (op1, i, mode),
6293 EQ, 0, word_mode, NULL_RTX, 0);
6294 if (comp == const_true_rtx)
6295 emit_jump (if_false_label);
6296 else if (comp != const0_rtx)
6297 do_jump_for_compare (comp, if_false_label, NULL_RTX);
6301 emit_jump (if_true_label);
6302 if (drop_through_label)
6303 emit_label (drop_through_label);
6306 /* Jump according to whether OP0 is 0.
6307 We assume that OP0 has an integer mode that is too wide
6308 for the available compare insns. */
6311 do_jump_by_parts_equality_rtx (op0, if_false_label, if_true_label)
6313 rtx if_false_label, if_true_label;
6315 int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD;
6317 rtx drop_through_label = 0;
6319 if (! if_false_label)
6320 drop_through_label = if_false_label = gen_label_rtx ();
6322 for (i = 0; i < nwords; i++)
6324 rtx comp = compare_from_rtx (operand_subword_force (op0, i,
6326 const0_rtx, EQ, 0, word_mode, NULL_RTX, 0);
6327 if (comp == const_true_rtx)
6328 emit_jump (if_false_label);
6329 else if (comp != const0_rtx)
6330 do_jump_for_compare (comp, if_false_label, NULL_RTX);
6334 emit_jump (if_true_label);
6335 if (drop_through_label)
6336 emit_label (drop_through_label);
6339 /* Given a comparison expression in rtl form, output conditional branches to
6340 IF_TRUE_LABEL, IF_FALSE_LABEL, or both. */
6343 do_jump_for_compare (comparison, if_false_label, if_true_label)
6344 rtx comparison, if_false_label, if_true_label;
6348 if (bcc_gen_fctn[(int) GET_CODE (comparison)] != 0)
6349 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (comparison)]) (if_true_label));
6354 emit_jump (if_false_label);
6356 else if (if_false_label)
6359 rtx prev = PREV_INSN (get_last_insn ());
6362 /* Output the branch with the opposite condition. Then try to invert
6363 what is generated. If more than one insn is a branch, or if the
6364 branch is not the last insn written, abort. If we can't invert
6365 the branch, emit make a true label, redirect this jump to that,
6366 emit a jump to the false label and define the true label. */
6368 if (bcc_gen_fctn[(int) GET_CODE (comparison)] != 0)
6369 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (comparison)]) (if_false_label));
6373 /* Here we get the insn before what was just emitted.
6374 On some machines, emitting the branch can discard
6375 the previous compare insn and emit a replacement. */
6377 /* If there's only one preceding insn... */
6378 insn = get_insns ();
6380 insn = NEXT_INSN (prev);
6382 for (insn = NEXT_INSN (insn); insn; insn = NEXT_INSN (insn))
6383 if (GET_CODE (insn) == JUMP_INSN)
6390 if (branch != get_last_insn ())
6393 if (! invert_jump (branch, if_false_label))
6395 if_true_label = gen_label_rtx ();
6396 redirect_jump (branch, if_true_label);
6397 emit_jump (if_false_label);
6398 emit_label (if_true_label);
6403 /* Generate code for a comparison expression EXP
6404 (including code to compute the values to be compared)
6405 and set (CC0) according to the result.
6406 SIGNED_CODE should be the rtx operation for this comparison for
6407 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
6409 We force a stack adjustment unless there are currently
6410 things pushed on the stack that aren't yet used. */
6413 compare (exp, signed_code, unsigned_code)
6415 enum rtx_code signed_code, unsigned_code;
6418 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
6420 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
6421 register tree type = TREE_TYPE (TREE_OPERAND (exp, 0));
6422 register enum machine_mode mode = TYPE_MODE (type);
6423 int unsignedp = TREE_UNSIGNED (type);
6424 enum rtx_code code = unsignedp ? unsigned_code : signed_code;
6426 return compare_from_rtx (op0, op1, code, unsignedp, mode,
6428 ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
6429 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
6432 /* Like compare but expects the values to compare as two rtx's.
6433 The decision as to signed or unsigned comparison must be made by the caller.
6435 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
6438 If ALIGN is non-zero, it is the alignment of this type; if zero, the
6439 size of MODE should be used. */
6442 compare_from_rtx (op0, op1, code, unsignedp, mode, size, align)
6443 register rtx op0, op1;
6446 enum machine_mode mode;
6450 /* If one operand is constant, make it the second one. */
6452 if (GET_CODE (op0) == CONST_INT || GET_CODE (op0) == CONST_DOUBLE)
6457 code = swap_condition (code);
6462 op0 = force_not_mem (op0);
6463 op1 = force_not_mem (op1);
6466 do_pending_stack_adjust ();
6468 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT)
6469 return simplify_relational_operation (code, mode, op0, op1);
6471 /* If this is a signed equality comparison, we can do it as an
6472 unsigned comparison since zero-extension is cheaper than sign
6473 extension and comparisons with zero are done as unsigned. This is
6474 the case even on machines that can do fast sign extension, since
6475 zero-extension is easier to combinen with other operations than
6476 sign-extension is. If we are comparing against a constant, we must
6477 convert it to what it would look like unsigned. */
6478 if ((code == EQ || code == NE) && ! unsignedp
6479 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
6481 if (GET_CODE (op1) == CONST_INT
6482 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
6483 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
6487 emit_cmp_insn (op0, op1, code, size, mode, unsignedp, align);
6489 return gen_rtx (code, VOIDmode, cc0_rtx, const0_rtx);
6492 /* Generate code to calculate EXP using a store-flag instruction
6493 and return an rtx for the result. EXP is either a comparison
6494 or a TRUTH_NOT_EXPR whose operand is a comparison.
6496 If TARGET is nonzero, store the result there if convenient.
6498 If ONLY_CHEAP is non-zero, only do this if it is likely to be very
6501 Return zero if there is no suitable set-flag instruction
6502 available on this machine.
6504 Once expand_expr has been called on the arguments of the comparison,
6505 we are committed to doing the store flag, since it is not safe to
6506 re-evaluate the expression. We emit the store-flag insn by calling
6507 emit_store_flag, but only expand the arguments if we have a reason
6508 to believe that emit_store_flag will be successful. If we think that
6509 it will, but it isn't, we have to simulate the store-flag with a
6510 set/jump/set sequence. */
6513 do_store_flag (exp, target, mode, only_cheap)
6516 enum machine_mode mode;
6520 tree arg0, arg1, type;
6522 enum machine_mode operand_mode;
6526 enum insn_code icode;
6527 rtx subtarget = target;
6528 rtx result, label, pattern, jump_pat;
6530 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
6531 result at the end. We can't simply invert the test since it would
6532 have already been inverted if it were valid. This case occurs for
6533 some floating-point comparisons. */
6535 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
6536 invert = 1, exp = TREE_OPERAND (exp, 0);
6538 arg0 = TREE_OPERAND (exp, 0);
6539 arg1 = TREE_OPERAND (exp, 1);
6540 type = TREE_TYPE (arg0);
6541 operand_mode = TYPE_MODE (type);
6542 unsignedp = TREE_UNSIGNED (type);
6544 /* We won't bother with BLKmode store-flag operations because it would mean
6545 passing a lot of information to emit_store_flag. */
6546 if (operand_mode == BLKmode)
6552 /* Get the rtx comparison code to use. We know that EXP is a comparison
6553 operation of some type. Some comparisons against 1 and -1 can be
6554 converted to comparisons with zero. Do so here so that the tests
6555 below will be aware that we have a comparison with zero. These
6556 tests will not catch constants in the first operand, but constants
6557 are rarely passed as the first operand. */
6559 switch (TREE_CODE (exp))
6568 if (integer_onep (arg1))
6569 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
6571 code = unsignedp ? LTU : LT;
6574 if (integer_all_onesp (arg1))
6575 arg1 = integer_zero_node, code = unsignedp ? LTU : LT;
6577 code = unsignedp ? LEU : LE;
6580 if (integer_all_onesp (arg1))
6581 arg1 = integer_zero_node, code = unsignedp ? GEU : GE;
6583 code = unsignedp ? GTU : GT;
6586 if (integer_onep (arg1))
6587 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
6589 code = unsignedp ? GEU : GE;
6595 /* Put a constant second. */
6596 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
6598 tem = arg0; arg0 = arg1; arg1 = tem;
6599 code = swap_condition (code);
6602 /* If this is an equality or inequality test of a single bit, we can
6603 do this by shifting the bit being tested to the low-order bit and
6604 masking the result with the constant 1. If the condition was EQ,
6605 we xor it with 1. This does not require an scc insn and is faster
6606 than an scc insn even if we have it. */
6608 if ((code == NE || code == EQ)
6609 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6610 && integer_pow2p (TREE_OPERAND (arg0, 1))
6611 && TYPE_PRECISION (type) <= HOST_BITS_PER_WIDE_INT)
6613 int bitnum = exact_log2 (INTVAL (expand_expr (TREE_OPERAND (arg0, 1),
6614 NULL_RTX, VOIDmode, 0)));
6616 if (subtarget == 0 || GET_CODE (subtarget) != REG
6617 || GET_MODE (subtarget) != operand_mode
6618 || ! safe_from_p (subtarget, TREE_OPERAND (arg0, 0)))
6621 op0 = expand_expr (TREE_OPERAND (arg0, 0), subtarget, VOIDmode, 0);
6624 op0 = expand_shift (RSHIFT_EXPR, GET_MODE (op0), op0,
6625 size_int (bitnum), target, 1);
6627 if (GET_MODE (op0) != mode)
6628 op0 = convert_to_mode (mode, op0, 1);
6630 if (bitnum != TYPE_PRECISION (type) - 1)
6631 op0 = expand_and (op0, const1_rtx, target);
6633 if ((code == EQ && ! invert) || (code == NE && invert))
6634 op0 = expand_binop (mode, xor_optab, op0, const1_rtx, target, 0,
6640 /* Now see if we are likely to be able to do this. Return if not. */
6641 if (! can_compare_p (operand_mode))
6643 icode = setcc_gen_code[(int) code];
6644 if (icode == CODE_FOR_nothing
6645 || (only_cheap && insn_operand_mode[(int) icode][0] != mode))
6647 /* We can only do this if it is one of the special cases that
6648 can be handled without an scc insn. */
6649 if ((code == LT && integer_zerop (arg1))
6650 || (! only_cheap && code == GE && integer_zerop (arg1)))
6652 else if (BRANCH_COST >= 0
6653 && ! only_cheap && (code == NE || code == EQ)
6654 && TREE_CODE (type) != REAL_TYPE
6655 && ((abs_optab->handlers[(int) operand_mode].insn_code
6656 != CODE_FOR_nothing)
6657 || (ffs_optab->handlers[(int) operand_mode].insn_code
6658 != CODE_FOR_nothing)))
6664 preexpand_calls (exp);
6665 if (subtarget == 0 || GET_CODE (subtarget) != REG
6666 || GET_MODE (subtarget) != operand_mode
6667 || ! safe_from_p (subtarget, arg1))
6670 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
6671 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
6674 target = gen_reg_rtx (mode);
6676 result = emit_store_flag (target, code, op0, op1, operand_mode,
6682 result = expand_binop (mode, xor_optab, result, const1_rtx,
6683 result, 0, OPTAB_LIB_WIDEN);
6687 /* If this failed, we have to do this with set/compare/jump/set code. */
6688 if (target == 0 || GET_CODE (target) != REG
6689 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
6690 target = gen_reg_rtx (GET_MODE (target));
6692 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
6693 result = compare_from_rtx (op0, op1, code, unsignedp,
6694 operand_mode, NULL_RTX, 0);
6695 if (GET_CODE (result) == CONST_INT)
6696 return (((result == const0_rtx && ! invert)
6697 || (result != const0_rtx && invert))
6698 ? const0_rtx : const1_rtx);
6700 label = gen_label_rtx ();
6701 if (bcc_gen_fctn[(int) code] == 0)
6704 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
6705 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
6711 /* Generate a tablejump instruction (used for switch statements). */
6713 #ifdef HAVE_tablejump
6715 /* INDEX is the value being switched on, with the lowest value
6716 in the table already subtracted.
6717 MODE is its expected mode (needed if INDEX is constant).
6718 RANGE is the length of the jump table.
6719 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
6721 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
6722 index value is out of range. */
6725 do_tablejump (index, mode, range, table_label, default_label)
6726 rtx index, range, table_label, default_label;
6727 enum machine_mode mode;
6729 register rtx temp, vector;
6731 /* Do an unsigned comparison (in the proper mode) between the index
6732 expression and the value which represents the length of the range.
6733 Since we just finished subtracting the lower bound of the range
6734 from the index expression, this comparison allows us to simultaneously
6735 check that the original index expression value is both greater than
6736 or equal to the minimum value of the range and less than or equal to
6737 the maximum value of the range. */
6739 emit_cmp_insn (range, index, LTU, NULL_RTX, mode, 0, 0);
6740 emit_jump_insn (gen_bltu (default_label));
6742 /* If index is in range, it must fit in Pmode.
6743 Convert to Pmode so we can index with it. */
6745 index = convert_to_mode (Pmode, index, 1);
6747 /* If flag_force_addr were to affect this address
6748 it could interfere with the tricky assumptions made
6749 about addresses that contain label-refs,
6750 which may be valid only very near the tablejump itself. */
6751 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
6752 GET_MODE_SIZE, because this indicates how large insns are. The other
6753 uses should all be Pmode, because they are addresses. This code
6754 could fail if addresses and insns are not the same size. */
6755 index = memory_address_noforce
6757 gen_rtx (PLUS, Pmode,
6758 gen_rtx (MULT, Pmode, index,
6759 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
6760 gen_rtx (LABEL_REF, Pmode, table_label)));
6761 temp = gen_reg_rtx (CASE_VECTOR_MODE);
6762 vector = gen_rtx (MEM, CASE_VECTOR_MODE, index);
6763 RTX_UNCHANGING_P (vector) = 1;
6764 convert_move (temp, vector, 0);
6766 emit_jump_insn (gen_tablejump (temp, table_label));
6768 #ifndef CASE_VECTOR_PC_RELATIVE
6769 /* If we are generating PIC code or if the table is PC-relative, the
6770 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
6776 #endif /* HAVE_tablejump */