1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA. */
26 #include "insn-flags.h"
27 #include "insn-codes.h"
29 #include "insn-config.h"
33 #include "typeclass.h"
35 #define CEIL(x,y) (((x) + (y) - 1) / (y))
37 /* Decide whether a function's arguments should be processed
38 from first to last or from last to first. */
40 #ifdef STACK_GROWS_DOWNWARD
42 #define PUSH_ARGS_REVERSED /* If it's last to first */
46 #ifndef STACK_PUSH_CODE
47 #ifdef STACK_GROWS_DOWNWARD
48 #define STACK_PUSH_CODE PRE_DEC
50 #define STACK_PUSH_CODE PRE_INC
54 /* Like STACK_BOUNDARY but in units of bytes, not bits. */
55 #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
57 /* If this is nonzero, we do not bother generating VOLATILE
58 around volatile memory references, and we are willing to
59 output indirect addresses. If cse is to follow, we reject
60 indirect addresses so a useful potential cse is generated;
61 if it is used only once, instruction combination will produce
62 the same indirect address eventually. */
65 /* Nonzero to generate code for all the subroutines within an
66 expression before generating the upper levels of the expression.
67 Nowadays this is never zero. */
68 int do_preexpand_calls = 1;
70 /* Number of units that we should eventually pop off the stack.
71 These are the arguments to function calls that have already returned. */
72 int pending_stack_adjust;
74 /* Nonzero means stack pops must not be deferred, and deferred stack
75 pops must not be output. It is nonzero inside a function call,
76 inside a conditional expression, inside a statement expression,
77 and in other cases as well. */
78 int inhibit_defer_pop;
80 /* A list of all cleanups which belong to the arguments of
81 function calls being expanded by expand_call. */
82 tree cleanups_this_call;
84 /* Nonzero means __builtin_saveregs has already been done in this function.
85 The value is the pseudoreg containing the value __builtin_saveregs
87 static rtx saveregs_value;
90 static void store_constructor ();
91 static rtx store_field ();
92 static rtx expand_builtin ();
93 static rtx compare ();
94 static rtx do_store_flag ();
95 static void preexpand_calls ();
96 static rtx expand_increment ();
97 static void init_queue ();
99 void do_pending_stack_adjust ();
100 static void do_jump_for_compare ();
101 static void do_jump_by_parts_equality ();
102 static void do_jump_by_parts_equality_rtx ();
103 static void do_jump_by_parts_greater ();
105 /* MOVE_RATIO is the number of move instructions that is better than
109 #if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi)
112 /* A value of around 6 would minimize code size; infinity would minimize
114 #define MOVE_RATIO 15
118 /* SLOW_UNALIGNED_ACCESS is non-zero if unaligned accesses are very slow. */
120 #ifndef SLOW_UNALIGNED_ACCESS
121 #define SLOW_UNALIGNED_ACCESS 0
124 /* This is run at the start of compiling a function. */
131 pending_stack_adjust = 0;
132 inhibit_defer_pop = 0;
133 cleanups_this_call = 0;
138 /* Save all variables describing the current status into the structure *P.
139 This is used before starting a nested function. */
145 /* Instead of saving the postincrement queue, empty it. */
148 p->pending_stack_adjust = pending_stack_adjust;
149 p->inhibit_defer_pop = inhibit_defer_pop;
150 p->cleanups_this_call = cleanups_this_call;
151 p->saveregs_value = saveregs_value;
152 p->forced_labels = forced_labels;
154 pending_stack_adjust = 0;
155 inhibit_defer_pop = 0;
156 cleanups_this_call = 0;
161 /* Restore all variables describing the current status from the structure *P.
162 This is used after a nested function. */
165 restore_expr_status (p)
168 pending_stack_adjust = p->pending_stack_adjust;
169 inhibit_defer_pop = p->inhibit_defer_pop;
170 cleanups_this_call = p->cleanups_this_call;
171 saveregs_value = p->saveregs_value;
172 forced_labels = p->forced_labels;
175 /* Manage the queue of increment instructions to be output
176 for POSTINCREMENT_EXPR expressions, etc. */
178 static rtx pending_chain;
180 /* Queue up to increment (or change) VAR later. BODY says how:
181 BODY should be the same thing you would pass to emit_insn
182 to increment right away. It will go to emit_insn later on.
184 The value is a QUEUED expression to be used in place of VAR
185 where you want to guarantee the pre-incrementation value of VAR. */
188 enqueue_insn (var, body)
191 pending_chain = gen_rtx (QUEUED, GET_MODE (var),
192 var, 0, 0, body, pending_chain);
193 return pending_chain;
196 /* Use protect_from_queue to convert a QUEUED expression
197 into something that you can put immediately into an instruction.
198 If the queued incrementation has not happened yet,
199 protect_from_queue returns the variable itself.
200 If the incrementation has happened, protect_from_queue returns a temp
201 that contains a copy of the old value of the variable.
203 Any time an rtx which might possibly be a QUEUED is to be put
204 into an instruction, it must be passed through protect_from_queue first.
205 QUEUED expressions are not meaningful in instructions.
207 Do not pass a value through protect_from_queue and then hold
208 on to it for a while before putting it in an instruction!
209 If the queue is flushed in between, incorrect code will result. */
212 protect_from_queue (x, modify)
216 register RTX_CODE code = GET_CODE (x);
218 #if 0 /* A QUEUED can hang around after the queue is forced out. */
219 /* Shortcut for most common case. */
220 if (pending_chain == 0)
226 /* A special hack for read access to (MEM (QUEUED ...))
227 to facilitate use of autoincrement.
228 Make a copy of the contents of the memory location
229 rather than a copy of the address, but not
230 if the value is of mode BLKmode. */
231 if (code == MEM && GET_MODE (x) != BLKmode
232 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
234 register rtx y = XEXP (x, 0);
235 XEXP (x, 0) = QUEUED_VAR (y);
238 register rtx temp = gen_reg_rtx (GET_MODE (x));
239 emit_insn_before (gen_move_insn (temp, x),
245 /* Otherwise, recursively protect the subexpressions of all
246 the kinds of rtx's that can contain a QUEUED. */
248 XEXP (x, 0) = protect_from_queue (XEXP (x, 0), 0);
249 else if (code == PLUS || code == MULT)
251 XEXP (x, 0) = protect_from_queue (XEXP (x, 0), 0);
252 XEXP (x, 1) = protect_from_queue (XEXP (x, 1), 0);
256 /* If the increment has not happened, use the variable itself. */
257 if (QUEUED_INSN (x) == 0)
258 return QUEUED_VAR (x);
259 /* If the increment has happened and a pre-increment copy exists,
261 if (QUEUED_COPY (x) != 0)
262 return QUEUED_COPY (x);
263 /* The increment has happened but we haven't set up a pre-increment copy.
264 Set one up now, and use it. */
265 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
266 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
268 return QUEUED_COPY (x);
271 /* Return nonzero if X contains a QUEUED expression:
272 if it contains anything that will be altered by a queued increment.
273 We handle only combinations of MEM, PLUS, MINUS and MULT operators
274 since memory addresses generally contain only those. */
280 register enum rtx_code code = GET_CODE (x);
286 return queued_subexp_p (XEXP (x, 0));
290 return queued_subexp_p (XEXP (x, 0))
291 || queued_subexp_p (XEXP (x, 1));
296 /* Perform all the pending incrementations. */
302 while (p = pending_chain)
304 QUEUED_INSN (p) = emit_insn (QUEUED_BODY (p));
305 pending_chain = QUEUED_NEXT (p);
316 /* Copy data from FROM to TO, where the machine modes are not the same.
317 Both modes may be integer, or both may be floating.
318 UNSIGNEDP should be nonzero if FROM is an unsigned type.
319 This causes zero-extension instead of sign-extension. */
322 convert_move (to, from, unsignedp)
323 register rtx to, from;
326 enum machine_mode to_mode = GET_MODE (to);
327 enum machine_mode from_mode = GET_MODE (from);
328 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
329 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
333 /* rtx code for making an equivalent value. */
334 enum rtx_code equiv_code = (unsignedp ? ZERO_EXTEND : SIGN_EXTEND);
336 to = protect_from_queue (to, 1);
337 from = protect_from_queue (from, 0);
339 if (to_real != from_real)
342 if (to_mode == from_mode
343 || (from_mode == VOIDmode && CONSTANT_P (from)))
345 emit_move_insn (to, from);
351 #ifdef HAVE_extendsfdf2
352 if (HAVE_extendsfdf2 && from_mode == SFmode && to_mode == DFmode)
354 emit_unop_insn (CODE_FOR_extendsfdf2, to, from, UNKNOWN);
358 #ifdef HAVE_extendsftf2
359 if (HAVE_extendsftf2 && from_mode == SFmode && to_mode == TFmode)
361 emit_unop_insn (CODE_FOR_extendsftf2, to, from, UNKNOWN);
365 #ifdef HAVE_extenddftf2
366 if (HAVE_extenddftf2 && from_mode == DFmode && to_mode == TFmode)
368 emit_unop_insn (CODE_FOR_extenddftf2, to, from, UNKNOWN);
372 #ifdef HAVE_truncdfsf2
373 if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
375 emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
379 #ifdef HAVE_trunctfsf2
380 if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
382 emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
386 #ifdef HAVE_trunctfdf2
387 if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
389 emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
394 if (from_mode == SFmode && to_mode == DFmode)
395 libcall = extendsfdf2_libfunc;
396 else if (from_mode == DFmode && to_mode == SFmode)
397 libcall = truncdfsf2_libfunc;
399 /* This conversion is not implemented yet. There aren't any TFmode
403 emit_library_call (libcall, 1, to_mode, 1, from, from_mode);
404 emit_move_insn (to, hard_libcall_value (to_mode));
408 /* Now both modes are integers. */
410 /* Handle expanding beyond a word. */
411 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
412 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
419 enum machine_mode lowpart_mode;
420 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
422 /* Try converting directly if the insn is supported. */
423 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
426 emit_unop_insn (code, to, from, equiv_code);
429 /* Next, try converting via full word. */
430 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
431 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
432 != CODE_FOR_nothing))
434 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
435 emit_unop_insn (code, to,
436 gen_lowpart (word_mode, to), equiv_code);
440 /* No special multiword conversion insn; do it by hand. */
443 /* Get a copy of FROM widened to a word, if necessary. */
444 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
445 lowpart_mode = word_mode;
447 lowpart_mode = from_mode;
449 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
451 lowpart = gen_lowpart (lowpart_mode, to);
452 emit_move_insn (lowpart, lowfrom);
454 /* Compute the value to put in each remaining word. */
456 fill_value = const0_rtx;
461 && insn_operand_mode[(int) CODE_FOR_slt][0] == word_mode
462 && STORE_FLAG_VALUE == -1)
464 emit_cmp_insn (lowfrom, const0_rtx, NE, 0, lowpart_mode, 0, 0);
465 fill_value = gen_reg_rtx (word_mode);
466 emit_insn (gen_slt (fill_value));
472 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
473 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
475 fill_value = convert_to_mode (word_mode, fill_value, 1);
479 /* Fill the remaining words. */
480 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
482 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
483 rtx subword = operand_subword (to, index, 1, to_mode);
488 if (fill_value != subword)
489 emit_move_insn (subword, fill_value);
492 insns = get_insns ();
495 emit_no_conflict_block (insns, to, from, 0,
496 gen_rtx (equiv_code, to_mode, from));
500 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD)
502 convert_move (to, gen_lowpart (word_mode, from), 0);
506 /* Handle pointer conversion */ /* SPEE 900220 */
507 if (to_mode == PSImode)
509 if (from_mode != SImode)
510 from = convert_to_mode (SImode, from, unsignedp);
512 #ifdef HAVE_truncsipsi
515 emit_unop_insn (CODE_FOR_truncsipsi, to, from, UNKNOWN);
518 #endif /* HAVE_truncsipsi */
522 if (from_mode == PSImode)
524 if (to_mode != SImode)
526 from = convert_to_mode (SImode, from, unsignedp);
531 #ifdef HAVE_extendpsisi
532 if (HAVE_extendpsisi)
534 emit_unop_insn (CODE_FOR_extendpsisi, to, from, UNKNOWN);
537 #endif /* HAVE_extendpsisi */
542 /* Now follow all the conversions between integers
543 no more than a word long. */
545 /* For truncation, usually we can just refer to FROM in a narrower mode. */
546 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
547 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
548 GET_MODE_BITSIZE (from_mode))
549 && ((GET_CODE (from) == MEM
550 && ! MEM_VOLATILE_P (from)
551 && ! mode_dependent_address_p (XEXP (from, 0)))
552 || GET_CODE (from) == REG
553 || GET_CODE (from) == SUBREG))
555 emit_move_insn (to, gen_lowpart (to_mode, from));
559 /* For truncation, usually we can just refer to FROM in a narrower mode. */
560 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
562 /* Convert directly if that works. */
563 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
566 emit_unop_insn (code, to, from, equiv_code);
571 enum machine_mode intermediate;
573 /* Search for a mode to convert via. */
574 for (intermediate = from_mode; intermediate != VOIDmode;
575 intermediate = GET_MODE_WIDER_MODE (intermediate))
576 if ((can_extend_p (to_mode, intermediate, unsignedp)
578 && (can_extend_p (intermediate, from_mode, unsignedp)
579 != CODE_FOR_nothing))
581 convert_move (to, convert_to_mode (intermediate, from,
582 unsignedp), unsignedp);
586 /* No suitable intermediate mode. */
591 /* Support special truncate insns for certain modes. */
593 if (from_mode == DImode && to_mode == SImode)
595 #ifdef HAVE_truncdisi2
598 emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
602 convert_move (to, force_reg (from_mode, from), unsignedp);
606 if (from_mode == DImode && to_mode == HImode)
608 #ifdef HAVE_truncdihi2
611 emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
615 convert_move (to, force_reg (from_mode, from), unsignedp);
619 if (from_mode == DImode && to_mode == QImode)
621 #ifdef HAVE_truncdiqi2
624 emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
628 convert_move (to, force_reg (from_mode, from), unsignedp);
632 if (from_mode == SImode && to_mode == HImode)
634 #ifdef HAVE_truncsihi2
637 emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
641 convert_move (to, force_reg (from_mode, from), unsignedp);
645 if (from_mode == SImode && to_mode == QImode)
647 #ifdef HAVE_truncsiqi2
650 emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
654 convert_move (to, force_reg (from_mode, from), unsignedp);
658 if (from_mode == HImode && to_mode == QImode)
660 #ifdef HAVE_trunchiqi2
663 emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
667 convert_move (to, force_reg (from_mode, from), unsignedp);
671 /* Handle truncation of volatile memrefs, and so on;
672 the things that couldn't be truncated directly,
673 and for which there was no special instruction. */
674 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
676 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
677 emit_move_insn (to, temp);
681 /* Mode combination is not recognized. */
685 /* Return an rtx for a value that would result
686 from converting X to mode MODE.
687 Both X and MODE may be floating, or both integer.
688 UNSIGNEDP is nonzero if X is an unsigned value.
689 This can be done by referring to a part of X in place
690 or by copying to a new temporary with conversion. */
693 convert_to_mode (mode, x, unsignedp)
694 enum machine_mode mode;
700 x = protect_from_queue (x, 0);
702 if (mode == GET_MODE (x))
705 /* There is one case that we must handle specially: If we are converting
706 a CONST_INT into a mode whose size is twice HOST_BITS_PER_INT and
707 we are to interpret the constant as unsigned, gen_lowpart will do
708 the wrong if the constant appears negative. What we want to do is
709 make the high-order word of the constant zero, not all ones. */
711 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
712 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_INT
713 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
714 return immed_double_const (INTVAL (x), 0, mode);
716 /* We can do this with a gen_lowpart if both desired and current modes
717 are integer, and this is either a constant integer, a register, or a
718 non-volatile MEM. Except for the constant case, we must be narrowing
721 if (GET_CODE (x) == CONST_INT
722 || (GET_MODE_CLASS (mode) == MODE_INT
723 && GET_MODE_CLASS (GET_MODE (x)) == MODE_INT
724 && (GET_CODE (x) == CONST_DOUBLE
725 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (GET_MODE (x))
726 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x))
727 || GET_CODE (x) == REG)))))
728 return gen_lowpart (mode, x);
730 temp = gen_reg_rtx (mode);
731 convert_move (temp, x, unsignedp);
735 /* Generate several move instructions to copy LEN bytes
736 from block FROM to block TO. (These are MEM rtx's with BLKmode).
737 The caller must pass FROM and TO
738 through protect_from_queue before calling.
739 ALIGN (in bytes) is maximum alignment we can assume. */
741 struct move_by_pieces
750 int explicit_inc_from;
756 static void move_by_pieces_1 ();
757 static int move_by_pieces_ninsns ();
760 move_by_pieces (to, from, len, align)
764 struct move_by_pieces data;
765 rtx to_addr = XEXP (to, 0), from_addr = XEXP (from, 0);
766 int max_size = MOVE_MAX + 1;
769 data.to_addr = to_addr;
770 data.from_addr = from_addr;
774 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
775 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
777 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
778 || GET_CODE (from_addr) == POST_INC
779 || GET_CODE (from_addr) == POST_DEC);
781 data.explicit_inc_from = 0;
782 data.explicit_inc_to = 0;
784 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
785 if (data.reverse) data.offset = len;
788 /* If copying requires more than two move insns,
789 copy addresses to registers (to make displacements shorter)
790 and use post-increment if available. */
791 if (!(data.autinc_from && data.autinc_to)
792 && move_by_pieces_ninsns (len, align) > 2)
794 #ifdef HAVE_PRE_DECREMENT
795 if (data.reverse && ! data.autinc_from)
797 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
798 data.autinc_from = 1;
799 data.explicit_inc_from = -1;
802 #ifdef HAVE_POST_INCREMENT
803 if (! data.autinc_from)
805 data.from_addr = copy_addr_to_reg (from_addr);
806 data.autinc_from = 1;
807 data.explicit_inc_from = 1;
810 if (!data.autinc_from && CONSTANT_P (from_addr))
811 data.from_addr = copy_addr_to_reg (from_addr);
812 #ifdef HAVE_PRE_DECREMENT
813 if (data.reverse && ! data.autinc_to)
815 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
817 data.explicit_inc_to = -1;
820 #ifdef HAVE_POST_INCREMENT
821 if (! data.reverse && ! data.autinc_to)
823 data.to_addr = copy_addr_to_reg (to_addr);
825 data.explicit_inc_to = 1;
828 if (!data.autinc_to && CONSTANT_P (to_addr))
829 data.to_addr = copy_addr_to_reg (to_addr);
832 if (! (STRICT_ALIGNMENT || SLOW_UNALIGNED_ACCESS)
833 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
836 /* First move what we can in the largest integer mode, then go to
837 successively smaller modes. */
841 enum machine_mode mode = VOIDmode, tmode;
842 enum insn_code icode;
844 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
845 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
846 if (GET_MODE_SIZE (tmode) < max_size)
849 if (mode == VOIDmode)
852 icode = mov_optab->handlers[(int) mode].insn_code;
853 if (icode != CODE_FOR_nothing
854 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
855 GET_MODE_SIZE (mode)))
856 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
858 max_size = GET_MODE_SIZE (mode);
861 /* The code above should have handled everything. */
866 /* Return number of insns required to move L bytes by pieces.
867 ALIGN (in bytes) is maximum alignment we can assume. */
870 move_by_pieces_ninsns (l, align)
874 register int n_insns = 0;
875 int max_size = MOVE_MAX + 1;
877 if (! (STRICT_ALIGNMENT || SLOW_UNALIGNED_ACCESS)
878 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
883 enum machine_mode mode = VOIDmode, tmode;
884 enum insn_code icode;
886 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
887 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
888 if (GET_MODE_SIZE (tmode) < max_size)
891 if (mode == VOIDmode)
894 icode = mov_optab->handlers[(int) mode].insn_code;
895 if (icode != CODE_FOR_nothing
896 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
897 GET_MODE_SIZE (mode)))
898 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
900 max_size = GET_MODE_SIZE (mode);
906 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
907 with move instructions for mode MODE. GENFUN is the gen_... function
908 to make a move insn for that mode. DATA has all the other info. */
911 move_by_pieces_1 (genfun, mode, data)
913 enum machine_mode mode;
914 struct move_by_pieces *data;
916 register int size = GET_MODE_SIZE (mode);
917 register rtx to1, from1;
919 while (data->len >= size)
921 if (data->reverse) data->offset -= size;
923 to1 = (data->autinc_to
924 ? gen_rtx (MEM, mode, data->to_addr)
925 : change_address (data->to, mode,
926 plus_constant (data->to_addr, data->offset)));
929 ? gen_rtx (MEM, mode, data->from_addr)
930 : change_address (data->from, mode,
931 plus_constant (data->from_addr, data->offset)));
933 #ifdef HAVE_PRE_DECREMENT
934 if (data->explicit_inc_to < 0)
935 emit_insn (gen_add2_insn (data->to_addr,
936 gen_rtx (CONST_INT, VOIDmode, -size)));
937 if (data->explicit_inc_from < 0)
938 emit_insn (gen_add2_insn (data->from_addr,
939 gen_rtx (CONST_INT, VOIDmode, -size)));
942 emit_insn ((*genfun) (to1, from1));
943 #ifdef HAVE_POST_INCREMENT
944 if (data->explicit_inc_to > 0)
945 emit_insn (gen_add2_insn (data->to_addr,
946 gen_rtx (CONST_INT, VOIDmode, size)));
947 if (data->explicit_inc_from > 0)
948 emit_insn (gen_add2_insn (data->from_addr,
949 gen_rtx (CONST_INT, VOIDmode, size)));
952 if (! data->reverse) data->offset += size;
958 /* Emit code to move a block Y to a block X.
959 This may be done with string-move instructions,
960 with multiple scalar move instructions, or with a library call.
962 Both X and Y must be MEM rtx's (perhaps inside VOLATILE)
964 SIZE is an rtx that says how long they are.
965 ALIGN is the maximum alignment we can assume they have,
966 measured in bytes. */
969 emit_block_move (x, y, size, align)
974 if (GET_MODE (x) != BLKmode)
977 if (GET_MODE (y) != BLKmode)
980 x = protect_from_queue (x, 1);
981 y = protect_from_queue (y, 0);
983 if (GET_CODE (x) != MEM)
985 if (GET_CODE (y) != MEM)
990 if (GET_CODE (size) == CONST_INT
991 && (move_by_pieces_ninsns ((unsigned) INTVAL (size), align)
993 move_by_pieces (x, y, INTVAL (size), align);
996 /* Try the most limited insn first, because there's no point
997 including more than one in the machine description unless
998 the more limited one has some advantage. */
1001 && GET_CODE (size) == CONST_INT
1002 && ((unsigned) INTVAL (size)
1003 < (1 << (GET_MODE_BITSIZE (QImode) - 1))))
1005 rtx insn = gen_movstrqi (x, y, size,
1006 gen_rtx (CONST_INT, VOIDmode, align));
1014 #ifdef HAVE_movstrhi
1016 && GET_CODE (size) == CONST_INT
1017 && ((unsigned) INTVAL (size)
1018 < (1 << (GET_MODE_BITSIZE (HImode) - 1))))
1020 rtx insn = gen_movstrhi (x, y, size,
1021 gen_rtx (CONST_INT, VOIDmode, align));
1029 #ifdef HAVE_movstrsi
1032 rtx insn = gen_movstrsi (x, y, size,
1033 gen_rtx (CONST_INT, VOIDmode, align));
1041 #ifdef HAVE_movstrdi
1044 rtx insn = gen_movstrdi (x, y, size,
1045 gen_rtx (CONST_INT, VOIDmode, align));
1054 #ifdef TARGET_MEM_FUNCTIONS
1055 emit_library_call (memcpy_libfunc, 1,
1056 VOIDmode, 3, XEXP (x, 0), Pmode,
1060 emit_library_call (bcopy_libfunc, 1,
1061 VOIDmode, 3, XEXP (y, 0), Pmode,
1068 /* Copy all or part of a value X into registers starting at REGNO.
1069 The number of registers to be filled is NREGS. */
1072 move_block_to_reg (regno, x, nregs, mode)
1076 enum machine_mode mode;
1081 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1082 x = validize_mem (force_const_mem (mode, x));
1084 /* See if the machine can do this with a load multiple insn. */
1085 #ifdef HAVE_load_multiple
1086 last = get_last_insn ();
1087 pat = gen_load_multiple (gen_rtx (REG, word_mode, regno), x,
1088 gen_rtx (CONST_INT, VOIDmode, nregs));
1095 delete_insns_since (last);
1098 for (i = 0; i < nregs; i++)
1099 emit_move_insn (gen_rtx (REG, word_mode, regno + i),
1100 operand_subword_force (x, i, mode));
1103 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1104 The number of registers to be filled is NREGS. */
1107 move_block_from_reg (regno, x, nregs)
1115 /* See if the machine can do this with a store multiple insn. */
1116 #ifdef HAVE_store_multiple
1117 last = get_last_insn ();
1118 pat = gen_store_multiple (x, gen_rtx (REG, word_mode, regno),
1119 gen_rtx (CONST_INT, VOIDmode, nregs));
1126 delete_insns_since (last);
1129 for (i = 0; i < nregs; i++)
1131 rtx tem = operand_subword (x, i, 1, BLKmode);
1136 emit_move_insn (tem, gen_rtx (REG, word_mode, regno + i));
1140 /* Mark NREGS consecutive regs, starting at REGNO, as being live now. */
1143 use_regs (regno, nregs)
1149 for (i = 0; i < nregs; i++)
1150 emit_insn (gen_rtx (USE, VOIDmode, gen_rtx (REG, word_mode, regno + i)));
1153 /* Write zeros through the storage of OBJECT.
1154 If OBJECT has BLKmode, SIZE is its length in bytes. */
1157 clear_storage (object, size)
1161 if (GET_MODE (object) == BLKmode)
1163 #ifdef TARGET_MEM_FUNCTIONS
1164 emit_library_call (memset_libfunc, 1,
1166 XEXP (object, 0), Pmode, const0_rtx, Pmode,
1167 gen_rtx (CONST_INT, VOIDmode, size), Pmode);
1169 emit_library_call (bzero_libfunc, 1,
1171 XEXP (object, 0), Pmode,
1172 gen_rtx (CONST_INT, VOIDmode, size), Pmode);
1176 emit_move_insn (object, const0_rtx);
1179 /* Generate code to copy Y into X.
1180 Both Y and X must have the same mode, except that
1181 Y can be a constant with VOIDmode.
1182 This mode cannot be BLKmode; use emit_block_move for that.
1184 Return the last instruction emitted. */
1187 emit_move_insn (x, y)
1190 enum machine_mode mode = GET_MODE (x);
1193 x = protect_from_queue (x, 1);
1194 y = protect_from_queue (y, 0);
1196 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
1199 if (CONSTANT_P (y) && ! LEGITIMATE_CONSTANT_P (y))
1200 y = force_const_mem (mode, y);
1202 /* If X or Y are memory references, verify that their addresses are valid
1204 if (GET_CODE (x) == MEM
1205 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
1206 && ! push_operand (x, GET_MODE (x)))
1208 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
1209 x = change_address (x, VOIDmode, XEXP (x, 0));
1211 if (GET_CODE (y) == MEM
1212 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
1214 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
1215 y = change_address (y, VOIDmode, XEXP (y, 0));
1217 if (mode == BLKmode)
1220 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
1222 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
1224 /* This will handle any multi-word mode that lacks a move_insn pattern.
1225 However, you will get better code if you define such patterns,
1226 even if they must turn into multiple assembler instructions. */
1227 else if (GET_MODE_SIZE (mode) >= UNITS_PER_WORD)
1232 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
1235 rtx xpart = operand_subword (x, i, 1, mode);
1236 rtx ypart = operand_subword (y, i, 1, mode);
1238 /* If we can't get a part of Y, put Y into memory if it is a
1239 constant. Otherwise, force it into a register. If we still
1240 can't get a part of Y, abort. */
1241 if (ypart == 0 && CONSTANT_P (y))
1243 y = force_const_mem (mode, y);
1244 ypart = operand_subword (y, i, 1, mode);
1246 else if (ypart == 0)
1247 ypart = operand_subword_force (y, i, mode);
1249 if (xpart == 0 || ypart == 0)
1252 last_insn = emit_move_insn (xpart, ypart);
1260 /* Pushing data onto the stack. */
1262 /* Push a block of length SIZE (perhaps variable)
1263 and return an rtx to address the beginning of the block.
1264 Note that it is not possible for the value returned to be a QUEUED.
1265 The value may be virtual_outgoing_args_rtx.
1267 EXTRA is the number of bytes of padding to push in addition to SIZE.
1268 BELOW nonzero means this padding comes at low addresses;
1269 otherwise, the padding comes at high addresses. */
1272 push_block (size, extra, below)
1277 if (CONSTANT_P (size))
1278 anti_adjust_stack (plus_constant (size, extra));
1279 else if (GET_CODE (size) == REG && extra == 0)
1280 anti_adjust_stack (size);
1283 rtx temp = copy_to_mode_reg (Pmode, size);
1285 temp = expand_binop (Pmode, add_optab,
1287 gen_rtx (CONST_INT, VOIDmode, extra),
1288 temp, 0, OPTAB_LIB_WIDEN);
1289 anti_adjust_stack (temp);
1292 #ifdef STACK_GROWS_DOWNWARD
1293 temp = virtual_outgoing_args_rtx;
1294 if (extra != 0 && below)
1295 temp = plus_constant (temp, extra);
1297 if (GET_CODE (size) == CONST_INT)
1298 temp = plus_constant (virtual_outgoing_args_rtx,
1299 - INTVAL (size) - (below ? 0 : extra));
1300 else if (extra != 0 && !below)
1301 temp = gen_rtx (PLUS, Pmode, virtual_outgoing_args_rtx,
1302 negate_rtx (Pmode, plus_constant (size, extra)));
1304 temp = gen_rtx (PLUS, Pmode, virtual_outgoing_args_rtx,
1305 negate_rtx (Pmode, size));
1308 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
1314 return gen_rtx (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
1317 /* Generate code to push X onto the stack, assuming it has mode MODE and
1319 MODE is redundant except when X is a CONST_INT (since they don't
1321 SIZE is an rtx for the size of data to be copied (in bytes),
1322 needed only if X is BLKmode.
1324 ALIGN (in bytes) is maximum alignment we can assume.
1326 If PARTIAL is nonzero, then copy that many of the first words
1327 of X into registers starting with REG, and push the rest of X.
1328 The amount of space pushed is decreased by PARTIAL words,
1329 rounded *down* to a multiple of PARM_BOUNDARY.
1330 REG must be a hard register in this case.
1332 EXTRA is the amount in bytes of extra space to leave next to this arg.
1333 This is ignored if an argument block has already been allocted.
1335 On a machine that lacks real push insns, ARGS_ADDR is the address of
1336 the bottom of the argument block for this call. We use indexing off there
1337 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
1338 argument block has not been preallocated.
1340 ARGS_SO_FAR is the size of args previously pushed for this call. */
1343 emit_push_insn (x, mode, type, size, align, partial, reg, extra,
1344 args_addr, args_so_far)
1346 enum machine_mode mode;
1357 enum direction stack_direction
1358 #ifdef STACK_GROWS_DOWNWARD
1364 /* Decide where to pad the argument: `downward' for below,
1365 `upward' for above, or `none' for don't pad it.
1366 Default is below for small data on big-endian machines; else above. */
1367 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
1369 /* Invert direction if stack is post-update. */
1370 if (STACK_PUSH_CODE == POST_INC || STACK_PUSH_CODE == POST_DEC)
1371 if (where_pad != none)
1372 where_pad = (where_pad == downward ? upward : downward);
1374 xinner = x = protect_from_queue (x, 0);
1376 if (mode == BLKmode)
1378 /* Copy a block into the stack, entirely or partially. */
1381 int used = partial * UNITS_PER_WORD;
1382 int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
1390 /* USED is now the # of bytes we need not copy to the stack
1391 because registers will take care of them. */
1394 xinner = change_address (xinner, BLKmode,
1395 plus_constant (XEXP (xinner, 0), used));
1397 /* If the partial register-part of the arg counts in its stack size,
1398 skip the part of stack space corresponding to the registers.
1399 Otherwise, start copying to the beginning of the stack space,
1400 by setting SKIP to 0. */
1401 #ifndef REG_PARM_STACK_SPACE
1407 #ifdef PUSH_ROUNDING
1408 /* Do it with several push insns if that doesn't take lots of insns
1409 and if there is no difficulty with push insns that skip bytes
1410 on the stack for alignment purposes. */
1412 && GET_CODE (size) == CONST_INT
1414 && (move_by_pieces_ninsns ((unsigned) INTVAL (size) - used, align)
1416 /* Here we avoid the case of a structure whose weak alignment
1417 forces many pushes of a small amount of data,
1418 and such small pushes do rounding that causes trouble. */
1419 && ((! STRICT_ALIGNMENT && ! SLOW_UNALIGNED_ACCESS)
1420 || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT
1421 || PUSH_ROUNDING (align) == align)
1422 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
1424 /* Push padding now if padding above and stack grows down,
1425 or if padding below and stack grows up.
1426 But if space already allocated, this has already been done. */
1427 if (extra && args_addr == 0
1428 && where_pad != none && where_pad != stack_direction)
1429 anti_adjust_stack (gen_rtx (CONST_INT, VOIDmode, extra));
1431 move_by_pieces (gen_rtx (MEM, BLKmode, gen_push_operand ()), xinner,
1432 INTVAL (size) - used, align);
1435 #endif /* PUSH_ROUNDING */
1437 /* Otherwise make space on the stack and copy the data
1438 to the address of that space. */
1440 /* Deduct words put into registers from the size we must copy. */
1443 if (GET_CODE (size) == CONST_INT)
1444 size = gen_rtx (CONST_INT, VOIDmode, INTVAL (size) - used);
1446 size = expand_binop (GET_MODE (size), sub_optab, size,
1447 gen_rtx (CONST_INT, VOIDmode, used),
1448 0, 0, OPTAB_LIB_WIDEN);
1451 /* Get the address of the stack space.
1452 In this case, we do not deal with EXTRA separately.
1453 A single stack adjust will do. */
1456 temp = push_block (size, extra, where_pad == downward);
1459 else if (GET_CODE (args_so_far) == CONST_INT)
1460 temp = memory_address (BLKmode,
1461 plus_constant (args_addr,
1462 skip + INTVAL (args_so_far)));
1464 temp = memory_address (BLKmode,
1465 plus_constant (gen_rtx (PLUS, Pmode,
1466 args_addr, args_so_far),
1469 /* TEMP is the address of the block. Copy the data there. */
1470 if (GET_CODE (size) == CONST_INT
1471 && (move_by_pieces_ninsns ((unsigned) INTVAL (size), align)
1474 move_by_pieces (gen_rtx (MEM, BLKmode, temp), xinner,
1475 INTVAL (size), align);
1478 /* Try the most limited insn first, because there's no point
1479 including more than one in the machine description unless
1480 the more limited one has some advantage. */
1481 #ifdef HAVE_movstrqi
1483 && GET_CODE (size) == CONST_INT
1484 && ((unsigned) INTVAL (size)
1485 < (1 << (GET_MODE_BITSIZE (QImode) - 1))))
1487 emit_insn (gen_movstrqi (gen_rtx (MEM, BLKmode, temp),
1489 gen_rtx (CONST_INT, VOIDmode, align)));
1493 #ifdef HAVE_movstrhi
1495 && GET_CODE (size) == CONST_INT
1496 && ((unsigned) INTVAL (size)
1497 < (1 << (GET_MODE_BITSIZE (HImode) - 1))))
1499 emit_insn (gen_movstrhi (gen_rtx (MEM, BLKmode, temp),
1501 gen_rtx (CONST_INT, VOIDmode, align)));
1505 #ifdef HAVE_movstrsi
1508 emit_insn (gen_movstrsi (gen_rtx (MEM, BLKmode, temp),
1510 gen_rtx (CONST_INT, VOIDmode, align)));
1514 #ifdef HAVE_movstrdi
1517 emit_insn (gen_movstrdi (gen_rtx (MEM, BLKmode, temp),
1519 gen_rtx (CONST_INT, VOIDmode, align)));
1524 #ifndef ACCUMULATE_OUTGOING_ARGS
1525 /* If the source is referenced relative to the stack pointer,
1526 copy it to another register to stabilize it. We do not need
1527 to do this if we know that we won't be changing sp. */
1529 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
1530 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
1531 temp = copy_to_reg (temp);
1534 /* Make inhibit_defer_pop nonzero around the library call
1535 to force it to pop the bcopy-arguments right away. */
1537 #ifdef TARGET_MEM_FUNCTIONS
1538 emit_library_call (memcpy_libfunc, 1,
1539 VOIDmode, 3, temp, Pmode, XEXP (xinner, 0), Pmode,
1542 emit_library_call (bcopy_libfunc, 1,
1543 VOIDmode, 3, XEXP (xinner, 0), Pmode, temp, Pmode,
1549 else if (partial > 0)
1551 /* Scalar partly in registers. */
1553 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
1556 /* # words of start of argument
1557 that we must make space for but need not store. */
1558 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
1559 int args_offset = INTVAL (args_so_far);
1562 /* Push padding now if padding above and stack grows down,
1563 or if padding below and stack grows up.
1564 But if space already allocated, this has already been done. */
1565 if (extra && args_addr == 0
1566 && where_pad != none && where_pad != stack_direction)
1567 anti_adjust_stack (gen_rtx (CONST_INT, VOIDmode, extra));
1569 /* If we make space by pushing it, we might as well push
1570 the real data. Otherwise, we can leave OFFSET nonzero
1571 and leave the space uninitialized. */
1575 /* Now NOT_STACK gets the number of words that we don't need to
1576 allocate on the stack. */
1577 not_stack = partial - offset;
1579 /* If the partial register-part of the arg counts in its stack size,
1580 skip the part of stack space corresponding to the registers.
1581 Otherwise, start copying to the beginning of the stack space,
1582 by setting SKIP to 0. */
1583 #ifndef REG_PARM_STACK_SPACE
1589 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1590 x = validize_mem (force_const_mem (mode, x));
1592 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
1593 SUBREGs of such registers are not allowed. */
1594 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
1595 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
1596 x = copy_to_reg (x);
1598 /* Loop over all the words allocated on the stack for this arg. */
1599 /* We can do it by words, because any scalar bigger than a word
1600 has a size a multiple of a word. */
1601 #ifndef PUSH_ARGS_REVERSED
1602 for (i = not_stack; i < size; i++)
1604 for (i = size - 1; i >= not_stack; i--)
1606 if (i >= not_stack + offset)
1607 emit_push_insn (operand_subword_force (x, i, mode),
1608 word_mode, 0, 0, align, 0, 0, 0, args_addr,
1609 gen_rtx (CONST_INT, VOIDmode,
1610 args_offset + ((i - not_stack + skip)
1611 * UNITS_PER_WORD)));
1617 /* Push padding now if padding above and stack grows down,
1618 or if padding below and stack grows up.
1619 But if space already allocated, this has already been done. */
1620 if (extra && args_addr == 0
1621 && where_pad != none && where_pad != stack_direction)
1622 anti_adjust_stack (gen_rtx (CONST_INT, VOIDmode, extra));
1624 #ifdef PUSH_ROUNDING
1626 addr = gen_push_operand ();
1629 if (GET_CODE (args_so_far) == CONST_INT)
1631 = memory_address (mode,
1632 plus_constant (args_addr, INTVAL (args_so_far)));
1634 addr = memory_address (mode, gen_rtx (PLUS, Pmode, args_addr,
1637 emit_move_insn (gen_rtx (MEM, mode, addr), x);
1641 /* If part should go in registers, copy that part
1642 into the appropriate registers. Do this now, at the end,
1643 since mem-to-mem copies above may do function calls. */
1645 move_block_to_reg (REGNO (reg), x, partial, mode);
1647 if (extra && args_addr == 0 && where_pad == stack_direction)
1648 anti_adjust_stack (gen_rtx (CONST_INT, VOIDmode, extra));
1651 /* Output a library call to function FUN (a SYMBOL_REF rtx)
1652 (emitting the queue unless NO_QUEUE is nonzero),
1653 for a value of mode OUTMODE,
1654 with NARGS different arguments, passed as alternating rtx values
1655 and machine_modes to convert them to.
1656 The rtx values should have been passed through protect_from_queue already.
1658 NO_QUEUE will be true if and only if the library call is a `const' call
1659 which will be enclosed in REG_LIBCALL/REG_RETVAL notes; it is equivalent
1660 to the variable is_const in expand_call. */
1663 emit_library_call (va_alist)
1667 struct args_size args_size;
1668 register int argnum;
1669 enum machine_mode outmode;
1676 CUMULATIVE_ARGS args_so_far;
1677 struct arg { rtx value; enum machine_mode mode; rtx reg; int partial;
1678 struct args_size offset; struct args_size size; };
1680 int old_inhibit_defer_pop = inhibit_defer_pop;
1685 orgfun = fun = va_arg (p, rtx);
1686 no_queue = va_arg (p, int);
1687 outmode = va_arg (p, enum machine_mode);
1688 nargs = va_arg (p, int);
1690 /* Copy all the libcall-arguments out of the varargs data
1691 and into a vector ARGVEC.
1693 Compute how to pass each argument. We only support a very small subset
1694 of the full argument passing conventions to limit complexity here since
1695 library functions shouldn't have many args. */
1697 argvec = (struct arg *) alloca (nargs * sizeof (struct arg));
1699 INIT_CUMULATIVE_ARGS (args_so_far, (tree)0, fun);
1701 args_size.constant = 0;
1704 for (count = 0; count < nargs; count++)
1706 rtx val = va_arg (p, rtx);
1707 enum machine_mode mode = va_arg (p, enum machine_mode);
1709 /* We cannot convert the arg value to the mode the library wants here;
1710 must do it earlier where we know the signedness of the arg. */
1712 || (GET_MODE (val) != mode && GET_MODE (val) != VOIDmode))
1715 /* On some machines, there's no way to pass a float to a library fcn.
1716 Pass it as a double instead. */
1717 #ifdef LIBGCC_NEEDS_DOUBLE
1718 if (LIBGCC_NEEDS_DOUBLE && mode == SFmode)
1719 val = convert_to_mode (DFmode, val), mode = DFmode;
1722 /* Make sure it is a reasonable operand for a move or push insn. */
1723 if (GET_CODE (val) != REG && GET_CODE (val) != MEM
1724 && ! (CONSTANT_P (val) && LEGITIMATE_CONSTANT_P (val)))
1725 val = force_operand (val, 0);
1727 argvec[count].value = val;
1728 argvec[count].mode = mode;
1730 #ifdef FUNCTION_ARG_PASS_BY_REFERENCE
1731 if (FUNCTION_ARG_PASS_BY_REFERENCE (args_so_far, mode, (tree)0, 1))
1735 argvec[count].reg = FUNCTION_ARG (args_so_far, mode, (tree)0, 1);
1736 if (argvec[count].reg && GET_CODE (argvec[count].reg) == EXPR_LIST)
1738 #ifdef FUNCTION_ARG_PARTIAL_NREGS
1739 argvec[count].partial
1740 = FUNCTION_ARG_PARTIAL_NREGS (args_so_far, mode, (tree)0, 1);
1742 argvec[count].partial = 0;
1745 locate_and_pad_parm (mode, 0,
1746 argvec[count].reg && argvec[count].partial == 0,
1747 0, &args_size, &argvec[count].offset,
1748 &argvec[count].size);
1750 if (argvec[count].size.var)
1753 #ifndef REG_PARM_STACK_SPACE
1754 if (argvec[count].partial)
1755 argvec[count].size.constant -= argvec[count].partial * UNITS_PER_WORD;
1758 if (argvec[count].reg == 0 || argvec[count].partial != 0
1759 #ifdef REG_PARM_STACK_SPACE
1763 args_size.constant += argvec[count].size.constant;
1765 #ifdef ACCUMULATE_OUTGOING_ARGS
1766 /* If this arg is actually passed on the stack, it might be
1767 clobbering something we already put there (this library call might
1768 be inside the evaluation of an argument to a function whose call
1769 requires the stack). This will only occur when the library call
1770 has sufficient args to run out of argument registers. Abort in
1771 this case; if this ever occurs, code must be added to save and
1772 restore the arg slot. */
1774 if (argvec[count].reg == 0 || argvec[count].partial != 0)
1778 FUNCTION_ARG_ADVANCE (args_so_far, mode, (tree)0, 1);
1782 /* If this machine requires an external definition for library
1783 functions, write one out. */
1784 assemble_external_libcall (fun);
1786 #ifdef STACK_BOUNDARY
1787 args_size.constant = (((args_size.constant + (STACK_BYTES - 1))
1788 / STACK_BYTES) * STACK_BYTES);
1791 #ifdef REG_PARM_STACK_SPACE
1792 args_size.constant = MAX (args_size.constant,
1793 REG_PARM_STACK_SPACE ((tree) 0));
1796 #ifdef ACCUMULATE_OUTGOING_ARGS
1797 if (args_size.constant > current_function_outgoing_args_size)
1798 current_function_outgoing_args_size = args_size.constant;
1799 args_size.constant = 0;
1802 #ifndef PUSH_ROUNDING
1803 argblock = push_block (gen_rtx (CONST_INT, VOIDmode, args_size.constant),
1807 #ifdef PUSH_ARGS_REVERSED
1815 /* Push the args that need to be pushed. */
1817 for (count = 0; count < nargs; count++, argnum += inc)
1819 register enum machine_mode mode = argvec[argnum].mode;
1820 register rtx val = argvec[argnum].value;
1821 rtx reg = argvec[argnum].reg;
1822 int partial = argvec[argnum].partial;
1824 if (! (reg != 0 && partial == 0))
1825 emit_push_insn (val, mode, 0, 0, 0, partial, reg, 0, argblock,
1826 gen_rtx (CONST_INT, VOIDmode,
1827 argvec[count].offset.constant));
1831 #ifdef PUSH_ARGS_REVERSED
1837 /* Now load any reg parms into their regs. */
1839 for (count = 0; count < nargs; count++, argnum += inc)
1841 register enum machine_mode mode = argvec[argnum].mode;
1842 register rtx val = argvec[argnum].value;
1843 rtx reg = argvec[argnum].reg;
1844 int partial = argvec[argnum].partial;
1846 if (reg != 0 && partial == 0)
1847 emit_move_insn (reg, val);
1851 /* For version 1.37, try deleting this entirely. */
1855 /* Any regs containing parms remain in use through the call. */
1857 for (count = 0; count < nargs; count++)
1858 if (argvec[count].reg != 0)
1859 emit_insn (gen_rtx (USE, VOIDmode, argvec[count].reg));
1861 use_insns = get_insns ();
1864 fun = prepare_call_address (fun, 0, &use_insns);
1866 /* Don't allow popping to be deferred, since then
1867 cse'ing of library calls could delete a call and leave the pop. */
1870 /* We pass the old value of inhibit_defer_pop + 1 to emit_call_1, which
1871 will set inhibit_defer_pop to that value. */
1873 emit_call_1 (fun, get_identifier (XSTR (orgfun, 0)), args_size.constant, 0,
1874 FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1),
1875 outmode != VOIDmode ? hard_libcall_value (outmode) : 0,
1876 old_inhibit_defer_pop + 1, use_insns, no_queue);
1878 /* Now restore inhibit_defer_pop to its actual original value. */
1882 /* Expand an assignment that stores the value of FROM into TO.
1883 If WANT_VALUE is nonzero, return an rtx for the value of TO.
1884 (This may contain a QUEUED rtx.)
1885 Otherwise, the returned value is not meaningful.
1887 SUGGEST_REG is no longer actually used.
1888 It used to mean, copy the value through a register
1889 and return that register, if that is possible.
1890 But now we do this if WANT_VALUE.
1892 If the value stored is a constant, we return the constant. */
1895 expand_assignment (to, from, want_value, suggest_reg)
1900 register rtx to_rtx = 0;
1903 /* Don't crash if the lhs of the assignment was erroneous. */
1905 if (TREE_CODE (to) == ERROR_MARK)
1906 return expand_expr (from, 0, VOIDmode, 0);
1908 /* Assignment of a structure component needs special treatment
1909 if the structure component's rtx is not simply a MEM.
1910 Assignment of an array element at a constant index
1911 has the same problem. */
1913 if (TREE_CODE (to) == COMPONENT_REF
1914 || TREE_CODE (to) == BIT_FIELD_REF
1915 || (TREE_CODE (to) == ARRAY_REF
1916 && TREE_CODE (TREE_OPERAND (to, 1)) == INTEGER_CST
1917 && TREE_CODE (TYPE_SIZE (TREE_TYPE (to))) == INTEGER_CST))
1919 enum machine_mode mode1;
1924 tree tem = get_inner_reference (to, &bitsize, &bitpos,
1925 &mode1, &unsignedp, &volatilep);
1927 /* If we are going to use store_bit_field and extract_bit_field,
1928 make sure to_rtx will be safe for multiple use. */
1930 if (mode1 == VOIDmode && want_value)
1931 tem = stabilize_reference (tem);
1933 to_rtx = expand_expr (tem, 0, VOIDmode, 0);
1936 if (GET_CODE (to_rtx) == MEM)
1937 MEM_VOLATILE_P (to_rtx) = 1;
1938 #if 0 /* This was turned off because, when a field is volatile
1939 in an object which is not volatile, the object may be in a register,
1940 and then we would abort over here. */
1946 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
1948 /* Spurious cast makes HPUX compiler happy. */
1949 ? (enum machine_mode) TYPE_MODE (TREE_TYPE (to))
1952 /* Required alignment of containing datum. */
1953 TYPE_ALIGN (TREE_TYPE (tem)) / BITS_PER_UNIT,
1954 int_size_in_bytes (TREE_TYPE (tem)));
1955 preserve_temp_slots (result);
1961 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
1962 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
1965 to_rtx = expand_expr (to, 0, VOIDmode, 0);
1967 /* In case we are returning the contents of an object which overlaps
1968 the place the value is being stored, use a safe function when copying
1969 a value through a pointer into a structure value return block. */
1970 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
1971 && current_function_returns_struct
1972 && !current_function_returns_pcc_struct)
1974 rtx from_rtx = expand_expr (from, 0, VOIDmode, 0);
1975 rtx size = expr_size (from);
1977 #ifdef TARGET_MEM_FUNCTIONS
1978 emit_library_call (memcpy_libfunc, 1,
1979 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
1980 XEXP (from_rtx, 0), Pmode,
1983 emit_library_call (bcopy_libfunc, 1,
1984 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
1985 XEXP (to_rtx, 0), Pmode,
1989 preserve_temp_slots (to_rtx);
1994 /* Compute FROM and store the value in the rtx we got. */
1996 result = store_expr (from, to_rtx, want_value);
1997 preserve_temp_slots (result);
2002 /* Generate code for computing expression EXP,
2003 and storing the value into TARGET.
2004 Returns TARGET or an equivalent value.
2005 TARGET may contain a QUEUED rtx.
2007 If SUGGEST_REG is nonzero, copy the value through a register
2008 and return that register, if that is possible.
2010 If the value stored is a constant, we return the constant. */
2013 store_expr (exp, target, suggest_reg)
2015 register rtx target;
2019 int dont_return_target = 0;
2021 if (TREE_CODE (exp) == COMPOUND_EXPR)
2023 /* Perform first part of compound expression, then assign from second
2025 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
2027 return store_expr (TREE_OPERAND (exp, 1), target, suggest_reg);
2029 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
2031 /* For conditional expression, get safe form of the target. Then
2032 test the condition, doing the appropriate assignment on either
2033 side. This avoids the creation of unnecessary temporaries.
2034 For non-BLKmode, it is more efficient not to do this. */
2036 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
2039 target = protect_from_queue (target, 1);
2042 jumpifnot (TREE_OPERAND (exp, 0), lab1);
2043 store_expr (TREE_OPERAND (exp, 1), target, suggest_reg);
2045 emit_jump_insn (gen_jump (lab2));
2048 store_expr (TREE_OPERAND (exp, 2), target, suggest_reg);
2054 else if (suggest_reg && GET_CODE (target) == MEM
2055 && GET_MODE (target) != BLKmode)
2056 /* If target is in memory and caller wants value in a register instead,
2057 arrange that. Pass TARGET as target for expand_expr so that,
2058 if EXP is another assignment, SUGGEST_REG will be nonzero for it.
2059 We know expand_expr will not use the target in that case. */
2061 temp = expand_expr (exp, cse_not_expected ? 0 : target,
2062 GET_MODE (target), 0);
2063 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
2064 temp = copy_to_reg (temp);
2065 dont_return_target = 1;
2067 else if (queued_subexp_p (target))
2068 /* If target contains a postincrement, it is not safe
2069 to use as the returned value. It would access the wrong
2070 place by the time the queued increment gets output.
2071 So copy the value through a temporary and use that temp
2074 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
2076 /* Expand EXP into a new pseudo. */
2077 temp = gen_reg_rtx (GET_MODE (target));
2078 temp = expand_expr (exp, temp, GET_MODE (target), 0);
2081 temp = expand_expr (exp, 0, GET_MODE (target), 0);
2082 dont_return_target = 1;
2086 temp = expand_expr (exp, target, GET_MODE (target), 0);
2087 /* DO return TARGET if it's a specified hardware register.
2088 expand_return relies on this. */
2089 if (!(target && GET_CODE (target) == REG
2090 && REGNO (target) < FIRST_PSEUDO_REGISTER)
2091 && CONSTANT_P (temp))
2092 dont_return_target = 1;
2095 /* If value was not generated in the target, store it there.
2096 Convert the value to TARGET's type first if nec. */
2098 if (temp != target && TREE_CODE (exp) != ERROR_MARK)
2100 target = protect_from_queue (target, 1);
2101 if (GET_MODE (temp) != GET_MODE (target)
2102 && GET_MODE (temp) != VOIDmode)
2104 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
2105 if (dont_return_target)
2107 /* In this case, we will return TEMP,
2108 so make sure it has the proper mode.
2109 But don't forget to store the value into TARGET. */
2110 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
2111 emit_move_insn (target, temp);
2114 convert_move (target, temp, unsignedp);
2117 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
2119 /* Handle copying a string constant into an array.
2120 The string constant may be shorter than the array.
2121 So copy just the string's actual length, and clear the rest. */
2124 /* Get the size of the data type of the string,
2125 which is actually the size of the target. */
2126 size = expr_size (exp);
2127 if (GET_CODE (size) == CONST_INT
2128 && INTVAL (size) < TREE_STRING_LENGTH (exp))
2129 emit_block_move (target, temp, size,
2130 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
2133 /* Compute the size of the data to copy from the string. */
2135 = fold (build (MIN_EXPR, sizetype,
2136 size_binop (CEIL_DIV_EXPR,
2137 TYPE_SIZE (TREE_TYPE (exp)),
2138 size_int (BITS_PER_UNIT)),
2140 build_int_2 (TREE_STRING_LENGTH (exp), 0))));
2141 rtx copy_size_rtx = expand_expr (copy_size, 0, VOIDmode, 0);
2144 /* Copy that much. */
2145 emit_block_move (target, temp, copy_size_rtx,
2146 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
2148 /* Figure out how much is left in TARGET
2149 that we have to clear. */
2150 if (GET_CODE (copy_size_rtx) == CONST_INT)
2152 temp = plus_constant (XEXP (target, 0),
2153 TREE_STRING_LENGTH (exp));
2154 size = plus_constant (size,
2155 - TREE_STRING_LENGTH (exp));
2159 enum machine_mode size_mode = Pmode;
2161 temp = force_reg (Pmode, XEXP (target, 0));
2162 temp = expand_binop (size_mode, add_optab, temp,
2163 copy_size_rtx, 0, 0, OPTAB_LIB_WIDEN);
2165 size = expand_binop (size_mode, sub_optab, size,
2166 copy_size_rtx, 0, 0, OPTAB_LIB_WIDEN);
2168 emit_cmp_insn (size, const0_rtx, LT, 0,
2169 GET_MODE (size), 0, 0);
2170 label = gen_label_rtx ();
2171 emit_jump_insn (gen_blt (label));
2174 if (size != const0_rtx)
2176 #ifdef TARGET_MEM_FUNCTIONS
2177 emit_library_call (memset_libfunc, 1, VOIDmode, 3,
2178 temp, Pmode, const0_rtx, Pmode, size, Pmode);
2180 emit_library_call (bzero_libfunc, 1, VOIDmode, 2,
2181 temp, Pmode, size, Pmode);
2188 else if (GET_MODE (temp) == BLKmode)
2189 emit_block_move (target, temp, expr_size (exp),
2190 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
2192 emit_move_insn (target, temp);
2194 if (dont_return_target)
2199 /* Store the value of constructor EXP into the rtx TARGET.
2200 TARGET is either a REG or a MEM. */
2203 store_constructor (exp, target)
2207 /* We know our target cannot conflict, since safe_from_p has been called. */
2209 /* Don't try copying piece by piece into a hard register
2210 since that is vulnerable to being clobbered by EXP.
2211 Instead, construct in a pseudo register and then copy it all. */
2212 if (GET_CODE (target) == REG && REGNO (target) < FIRST_PSEUDO_REGISTER)
2214 rtx temp = gen_reg_rtx (GET_MODE (target));
2215 store_constructor (exp, temp);
2216 emit_move_insn (target, temp);
2221 if (TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE
2222 || TREE_CODE (TREE_TYPE (exp)) == UNION_TYPE)
2226 if (TREE_CODE (TREE_TYPE (exp)) == UNION_TYPE)
2227 /* Inform later passes that the whole union value is dead. */
2228 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
2229 /* If the constructor has fewer fields than the structure,
2230 clear the whole structure first. */
2231 else if (list_length (CONSTRUCTOR_ELTS (exp))
2232 != list_length (TYPE_FIELDS (TREE_TYPE (exp))))
2233 clear_storage (target, int_size_in_bytes (TREE_TYPE (exp)));
2235 /* Inform later passes that the old value is dead. */
2236 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
2238 /* Store each element of the constructor into
2239 the corresponding field of TARGET. */
2241 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
2243 register tree field = TREE_PURPOSE (elt);
2244 register enum machine_mode mode;
2249 bitsize = TREE_INT_CST_LOW (DECL_SIZE (field));
2250 unsignedp = TREE_UNSIGNED (field);
2251 mode = DECL_MODE (field);
2252 if (DECL_BIT_FIELD (field))
2255 if (TREE_CODE (DECL_FIELD_BITPOS (field)) != INTEGER_CST)
2256 /* ??? This case remains to be written. */
2259 bitpos = TREE_INT_CST_LOW (DECL_FIELD_BITPOS (field));
2261 store_field (target, bitsize, bitpos, mode, TREE_VALUE (elt),
2262 /* The alignment of TARGET is
2263 at least what its type requires. */
2265 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT,
2266 int_size_in_bytes (TREE_TYPE (exp)));
2269 else if (TREE_CODE (TREE_TYPE (exp)) == ARRAY_TYPE)
2273 tree domain = TYPE_DOMAIN (TREE_TYPE (exp));
2274 int minelt = TREE_INT_CST_LOW (TYPE_MIN_VALUE (domain));
2275 int maxelt = TREE_INT_CST_LOW (TYPE_MAX_VALUE (domain));
2276 tree elttype = TREE_TYPE (TREE_TYPE (exp));
2278 /* If the constructor has fewer fields than the structure,
2279 clear the whole structure first. */
2281 if (list_length (CONSTRUCTOR_ELTS (exp)) < maxelt - minelt + 1)
2282 clear_storage (target, maxelt - minelt + 1);
2284 /* Inform later passes that the old value is dead. */
2285 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
2287 /* Store each element of the constructor into
2288 the corresponding element of TARGET, determined
2289 by counting the elements. */
2290 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
2292 elt = TREE_CHAIN (elt), i++)
2294 register enum machine_mode mode;
2299 mode = TYPE_MODE (elttype);
2300 bitsize = GET_MODE_BITSIZE (mode);
2301 unsignedp = TREE_UNSIGNED (elttype);
2303 bitpos = (i * TREE_INT_CST_LOW (TYPE_SIZE (elttype)));
2305 store_field (target, bitsize, bitpos, mode, TREE_VALUE (elt),
2306 /* The alignment of TARGET is
2307 at least what its type requires. */
2309 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT,
2310 int_size_in_bytes (TREE_TYPE (exp)));
2318 /* Store the value of EXP (an expression tree)
2319 into a subfield of TARGET which has mode MODE and occupies
2320 BITSIZE bits, starting BITPOS bits from the start of TARGET.
2321 If MODE is VOIDmode, it means that we are storing into a bit-field.
2323 If VALUE_MODE is VOIDmode, return nothing in particular.
2324 UNSIGNEDP is not used in this case.
2326 Otherwise, return an rtx for the value stored. This rtx
2327 has mode VALUE_MODE if that is convenient to do.
2328 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
2330 ALIGN is the alignment that TARGET is known to have, measured in bytes.
2331 TOTAL_SIZE is the size in bytes of the structure, or -1 if varying. */
2334 store_field (target, bitsize, bitpos, mode, exp, value_mode,
2335 unsignedp, align, total_size)
2337 int bitsize, bitpos;
2338 enum machine_mode mode;
2340 enum machine_mode value_mode;
2347 if (bitsize < HOST_BITS_PER_INT)
2348 width_mask = (1 << bitsize) - 1;
2350 /* If we are storing into an unaligned field of an aligned union that is
2351 in a register, we may have the mode of TARGET being an integer mode but
2352 MODE == BLKmode. In that case, get an aligned object whose size and
2353 alignment are the same as TARGET and store TARGET into it (we can avoid
2354 the store if the field being stored is the entire width of TARGET). Then
2355 call ourselves recursively to store the field into a BLKmode version of
2356 that object. Finally, load from the object into TARGET. This is not
2357 very efficient in general, but should only be slightly more expensive
2358 than the otherwise-required unaligned accesses. Perhaps this can be
2359 cleaned up later. */
2362 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
2364 rtx object = assign_stack_temp (GET_MODE (target),
2365 GET_MODE_SIZE (GET_MODE (target)), 0);
2366 rtx blk_object = copy_rtx (object);
2368 PUT_MODE (blk_object, BLKmode);
2370 if (bitsize != GET_MODE_BITSIZE (GET_MODE (target)))
2371 emit_move_insn (object, target);
2373 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0,
2376 emit_move_insn (target, object);
2381 /* If the structure is in a register or if the component
2382 is a bit field, we cannot use addressing to access it.
2383 Use bit-field techniques or SUBREG to store in it. */
2385 if (mode == VOIDmode || GET_CODE (target) == REG
2386 || GET_CODE (target) == SUBREG)
2388 rtx temp = expand_expr (exp, 0, VOIDmode, 0);
2389 /* Store the value in the bitfield. */
2390 store_bit_field (target, bitsize, bitpos, mode, temp, align, total_size);
2391 if (value_mode != VOIDmode)
2393 /* The caller wants an rtx for the value. */
2394 /* If possible, avoid refetching from the bitfield itself. */
2396 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
2397 return expand_and (temp,
2398 gen_rtx (CONST_INT, VOIDmode, width_mask), 0);
2399 return extract_bit_field (target, bitsize, bitpos, unsignedp,
2400 0, value_mode, 0, align, total_size);
2406 rtx addr = XEXP (target, 0);
2409 /* If a value is wanted, it must be the lhs;
2410 so make the address stable for multiple use. */
2412 if (value_mode != VOIDmode && GET_CODE (addr) != REG
2413 && ! CONSTANT_ADDRESS_P (addr)
2414 /* A frame-pointer reference is already stable. */
2415 && ! (GET_CODE (addr) == PLUS
2416 && GET_CODE (XEXP (addr, 1)) == CONST_INT
2417 && (XEXP (addr, 0) == virtual_incoming_args_rtx
2418 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
2419 addr = copy_to_reg (addr);
2421 /* Now build a reference to just the desired component. */
2423 to_rtx = change_address (target, mode,
2424 plus_constant (addr, (bitpos / BITS_PER_UNIT)));
2425 MEM_IN_STRUCT_P (to_rtx) = 1;
2427 return store_expr (exp, to_rtx, value_mode != VOIDmode);
2431 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
2432 or an ARRAY_REF, look for nested COMPONENT_REFs, BIT_FIELD_REFs, or
2433 ARRAY_REFs at constant positions and find the ultimate containing object,
2436 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
2437 bit position, and *PUNSIGNEDP to the signedness of the field.
2439 If any of the extraction expressions is volatile,
2440 we store 1 in *PVOLATILEP. Otherwise we don't change that.
2442 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
2443 is a mode that can be used to access the field. In that case, *PBITSIZE
2446 If the field describes a variable-sized object, *PMODE is set to
2447 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
2448 this case, but the address of the object can be found. */
2451 get_inner_reference (exp, pbitsize, pbitpos, pmode, punsignedp, pvolatilep)
2455 enum machine_mode *pmode;
2460 enum machine_mode mode = VOIDmode;
2462 if (TREE_CODE (exp) == COMPONENT_REF)
2464 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
2465 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
2466 mode = DECL_MODE (TREE_OPERAND (exp, 1));
2467 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
2469 else if (TREE_CODE (exp) == BIT_FIELD_REF)
2471 size_tree = TREE_OPERAND (exp, 1);
2472 *punsignedp = TREE_UNSIGNED (exp);
2476 mode = TYPE_MODE (TREE_TYPE (exp));
2477 *pbitsize = GET_MODE_BITSIZE (mode);
2478 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
2483 if (TREE_CODE (size_tree) != INTEGER_CST)
2484 mode = BLKmode, *pbitsize = -1;
2486 *pbitsize = TREE_INT_CST_LOW (size_tree);
2489 /* Compute cumulative bit-offset for nested component-refs and array-refs,
2490 and find the ultimate containing object. */
2496 if (TREE_CODE (exp) == COMPONENT_REF)
2498 tree field = TREE_OPERAND (exp, 1);
2500 if (TREE_CODE (DECL_FIELD_BITPOS (field)) != INTEGER_CST)
2501 /* ??? This case remains to be written. */
2504 *pbitpos += TREE_INT_CST_LOW (DECL_FIELD_BITPOS (field));
2505 if (TREE_THIS_VOLATILE (exp))
2508 else if (TREE_CODE (exp) == BIT_FIELD_REF)
2510 if (TREE_CODE (TREE_OPERAND (exp, 2)) != INTEGER_CST)
2511 /* ??? This case remains to be written. */
2514 *pbitpos += TREE_INT_CST_LOW (TREE_OPERAND (exp, 2));
2515 if (TREE_THIS_VOLATILE (exp))
2518 else if (TREE_CODE (exp) == ARRAY_REF
2519 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
2520 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST)
2522 *pbitpos += (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))
2523 * TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (exp))));
2524 if (TREE_THIS_VOLATILE (exp))
2527 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
2528 && ! ((TREE_CODE (exp) == NOP_EXPR
2529 || TREE_CODE (exp) == CONVERT_EXPR)
2530 && (TYPE_MODE (TREE_TYPE (exp))
2531 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
2533 exp = TREE_OPERAND (exp, 0);
2536 /* If this was a bit-field, see if there is a mode that allows direct
2537 access in case EXP is in memory. */
2538 if (mode == VOIDmode && *pbitpos % *pbitsize == 0)
2540 mode = mode_for_size (*pbitsize, MODE_INT, 0);
2541 if (mode == BLKmode)
2550 /* Given an rtx VALUE that may contain additions and multiplications,
2551 return an equivalent value that just refers to a register or memory.
2552 This is done by generating instructions to perform the arithmetic
2553 and returning a pseudo-register containing the value. */
2556 force_operand (value, target)
2559 register optab binoptab = 0;
2560 /* Use a temporary to force order of execution of calls to
2564 /* Use subtarget as the target for operand 0 of a binary operation. */
2565 register rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
2567 if (GET_CODE (value) == PLUS)
2568 binoptab = add_optab;
2569 else if (GET_CODE (value) == MINUS)
2570 binoptab = sub_optab;
2571 else if (GET_CODE (value) == MULT)
2573 op2 = XEXP (value, 1);
2574 if (!CONSTANT_P (op2)
2575 && !(GET_CODE (op2) == REG && op2 != subtarget))
2577 tmp = force_operand (XEXP (value, 0), subtarget);
2578 return expand_mult (GET_MODE (value), tmp,
2579 force_operand (op2, 0),
2585 op2 = XEXP (value, 1);
2586 if (!CONSTANT_P (op2)
2587 && !(GET_CODE (op2) == REG && op2 != subtarget))
2589 if (binoptab == sub_optab && GET_CODE (op2) == CONST_INT)
2591 binoptab = add_optab;
2592 op2 = negate_rtx (GET_MODE (value), op2);
2595 /* Check for an addition with OP2 a constant integer and our first
2596 operand a PLUS of a virtual register and something else. In that
2597 case, we want to emit the sum of the virtual register and the
2598 constant first and then add the other value. This allows virtual
2599 register instantiation to simply modify the constant rather than
2600 creating another one around this addition. */
2601 if (binoptab == add_optab && GET_CODE (op2) == CONST_INT
2602 && GET_CODE (XEXP (value, 0)) == PLUS
2603 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
2604 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
2605 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
2607 rtx temp = expand_binop (GET_MODE (value), binoptab,
2608 XEXP (XEXP (value, 0), 0), op2,
2609 subtarget, 0, OPTAB_LIB_WIDEN);
2610 return expand_binop (GET_MODE (value), binoptab, temp,
2611 force_operand (XEXP (XEXP (value, 0), 1), 0),
2612 target, 0, OPTAB_LIB_WIDEN);
2615 tmp = force_operand (XEXP (value, 0), subtarget);
2616 return expand_binop (GET_MODE (value), binoptab, tmp,
2617 force_operand (op2, 0),
2618 target, 0, OPTAB_LIB_WIDEN);
2619 /* We give UNSIGNEP = 0 to expand_binop
2620 because the only operations we are expanding here are signed ones. */
2625 /* Subroutine of expand_expr:
2626 save the non-copied parts (LIST) of an expr (LHS), and return a list
2627 which can restore these values to their previous values,
2628 should something modify their storage. */
2631 save_noncopied_parts (lhs, list)
2638 for (tail = list; tail; tail = TREE_CHAIN (tail))
2639 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
2640 parts = chainon (parts, save_noncopied_parts (lhs, TREE_VALUE (tail)));
2643 tree part = TREE_VALUE (tail);
2644 tree part_type = TREE_TYPE (part);
2645 tree to_be_saved = build (COMPONENT_REF, part_type, lhs, part, 0);
2646 rtx target = assign_stack_temp (TYPE_MODE (part_type),
2647 int_size_in_bytes (part_type), 0);
2648 if (! memory_address_p (TYPE_MODE (part_type), XEXP (target, 0)))
2649 target = change_address (target, TYPE_MODE (part_type), 0);
2650 parts = tree_cons (to_be_saved,
2651 build (RTL_EXPR, part_type, 0, (tree) target),
2653 store_expr (TREE_PURPOSE (parts), RTL_EXPR_RTL (TREE_VALUE (parts)), 0);
2658 /* Subroutine of expand_expr:
2659 record the non-copied parts (LIST) of an expr (LHS), and return a list
2660 which specifies the initial values of these parts. */
2663 init_noncopied_parts (lhs, list)
2670 for (tail = list; tail; tail = TREE_CHAIN (tail))
2671 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
2672 parts = chainon (parts, init_noncopied_parts (lhs, TREE_VALUE (tail)));
2675 tree part = TREE_VALUE (tail);
2676 tree part_type = TREE_TYPE (part);
2677 tree to_be_initialized = build (COMPONENT_REF, part_type, lhs, part, 0);
2678 parts = tree_cons (TREE_PURPOSE (tail), to_be_initialized, parts);
2683 /* Subroutine of expand_expr: return nonzero iff there is no way that
2684 EXP can reference X, which is being modified. */
2687 safe_from_p (x, exp)
2697 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
2698 find the underlying pseudo. */
2699 if (GET_CODE (x) == SUBREG)
2702 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
2706 /* If X is a location in the outgoing argument area, it is always safe. */
2707 if (GET_CODE (x) == MEM
2708 && (XEXP (x, 0) == virtual_outgoing_args_rtx
2709 || (GET_CODE (XEXP (x, 0)) == PLUS
2710 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx)))
2713 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
2716 exp_rtl = DECL_RTL (exp);
2723 if (TREE_CODE (exp) == TREE_LIST)
2724 return (safe_from_p (x, TREE_VALUE (exp))
2725 && (TREE_CHAIN (exp) == 0
2726 || safe_from_p (x, TREE_CHAIN (exp))));
2731 return safe_from_p (x, TREE_OPERAND (exp, 0));
2735 return (safe_from_p (x, TREE_OPERAND (exp, 0))
2736 && safe_from_p (x, TREE_OPERAND (exp, 1)));
2740 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
2741 the expression. If it is set, we conflict iff we are that rtx or
2742 both are in memory. Otherwise, we check all operands of the
2743 expression recursively. */
2745 switch (TREE_CODE (exp))
2748 return staticp (TREE_OPERAND (exp, 0));
2751 if (GET_CODE (x) == MEM)
2756 exp_rtl = CALL_EXPR_RTL (exp);
2759 /* Assume that the call will clobber all hard registers and
2761 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
2762 || GET_CODE (x) == MEM)
2769 exp_rtl = RTL_EXPR_RTL (exp);
2771 /* We don't know what this can modify. */
2776 case WITH_CLEANUP_EXPR:
2777 exp_rtl = RTL_EXPR_RTL (exp);
2781 exp_rtl = SAVE_EXPR_RTL (exp);
2784 case METHOD_CALL_EXPR:
2785 /* This takes a rtx argument, but shouldn't appear here. */
2789 /* If we have an rtx, we do not need to scan our operands. */
2793 nops = tree_code_length[(int) TREE_CODE (exp)];
2794 for (i = 0; i < nops; i++)
2795 if (TREE_OPERAND (exp, i) != 0
2796 && ! safe_from_p (x, TREE_OPERAND (exp, i)))
2800 /* If we have an rtl, find any enclosed object. Then see if we conflict
2804 if (GET_CODE (exp_rtl) == SUBREG)
2806 exp_rtl = SUBREG_REG (exp_rtl);
2807 if (GET_CODE (exp_rtl) == REG
2808 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
2812 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
2813 are memory and EXP is not readonly. */
2814 return ! (rtx_equal_p (x, exp_rtl)
2815 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
2816 && ! TREE_READONLY (exp)));
2819 /* If we reach here, it is safe. */
2823 /* Subroutine of expand_expr: return nonzero iff EXP is an
2824 expression whose type is statically determinable. */
2830 if (TREE_CODE (exp) == PARM_DECL
2831 || TREE_CODE (exp) == VAR_DECL
2832 || TREE_CODE (exp) == CALL_EXPR || TREE_CODE (exp) == TARGET_EXPR
2833 || TREE_CODE (exp) == COMPONENT_REF
2834 || TREE_CODE (exp) == ARRAY_REF)
2839 /* expand_expr: generate code for computing expression EXP.
2840 An rtx for the computed value is returned. The value is never null.
2841 In the case of a void EXP, const0_rtx is returned.
2843 The value may be stored in TARGET if TARGET is nonzero.
2844 TARGET is just a suggestion; callers must assume that
2845 the rtx returned may not be the same as TARGET.
2847 If TARGET is CONST0_RTX, it means that the value will be ignored.
2849 If TMODE is not VOIDmode, it suggests generating the
2850 result in mode TMODE. But this is done only when convenient.
2851 Otherwise, TMODE is ignored and the value generated in its natural mode.
2852 TMODE is just a suggestion; callers must assume that
2853 the rtx returned may not have mode TMODE.
2855 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
2856 with a constant address even if that address is not normally legitimate.
2857 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
2859 If MODIFIER is EXPAND_SUM then when EXP is an addition
2860 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
2861 or a nest of (PLUS ...) and (MINUS ...) where the terms are
2862 products as above, or REG or MEM, or constant.
2863 Ordinarily in such cases we would output mul or add instructions
2864 and then return a pseudo reg containing the sum.
2866 EXPAND_INITIALIZER is much like EXPAND_SUM except that
2867 it also marks a label as absolutely required (it can't be dead).
2868 This is used for outputting expressions used in intializers. */
2871 expand_expr (exp, target, tmode, modifier)
2874 enum machine_mode tmode;
2875 enum expand_modifier modifier;
2877 register rtx op0, op1, temp;
2878 tree type = TREE_TYPE (exp);
2879 int unsignedp = TREE_UNSIGNED (type);
2880 register enum machine_mode mode = TYPE_MODE (type);
2881 register enum tree_code code = TREE_CODE (exp);
2883 /* Use subtarget as the target for operand 0 of a binary operation. */
2884 rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
2885 rtx original_target = target;
2886 int ignore = target == const0_rtx;
2889 /* Don't use hard regs as subtargets, because the combiner
2890 can only handle pseudo regs. */
2891 if (subtarget && REGNO (subtarget) < FIRST_PSEUDO_REGISTER)
2893 /* Avoid subtargets inside loops,
2894 since they hide some invariant expressions. */
2895 if (preserve_subexpressions_p ())
2898 if (ignore) target = 0, original_target = 0;
2900 /* If will do cse, generate all results into pseudo registers
2901 since 1) that allows cse to find more things
2902 and 2) otherwise cse could produce an insn the machine
2905 if (! cse_not_expected && mode != BLKmode && target
2906 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER))
2909 /* Ensure we reference a volatile object even if value is ignored. */
2910 if (ignore && TREE_THIS_VOLATILE (exp)
2911 && mode != VOIDmode && mode != BLKmode)
2913 target = gen_reg_rtx (mode);
2914 temp = expand_expr (exp, target, VOIDmode, modifier);
2916 emit_move_insn (target, temp);
2923 if (modifier == EXPAND_INITIALIZER)
2924 forced_labels = gen_rtx (EXPR_LIST, VOIDmode,
2925 label_rtx (exp), forced_labels);
2926 return gen_rtx (MEM, FUNCTION_MODE,
2927 gen_rtx (LABEL_REF, Pmode, label_rtx (exp)));
2930 if (DECL_RTL (exp) == 0)
2932 error_with_decl (exp, "prior parameter's size depends on `%s'");
2939 if (DECL_RTL (exp) == 0)
2941 /* Ensure variable marked as used
2942 even if it doesn't go through a parser. */
2943 TREE_USED (exp) = 1;
2944 /* Handle variables inherited from containing functions. */
2945 context = decl_function_context (exp);
2947 /* We treat inline_function_decl as an alias for the current function
2948 because that is the inline function whose vars, types, etc.
2949 are being merged into the current function.
2950 See expand_inline_function. */
2951 if (context != 0 && context != current_function_decl
2952 && context != inline_function_decl
2953 /* If var is static, we don't need a static chain to access it. */
2954 && ! (GET_CODE (DECL_RTL (exp)) == MEM
2955 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
2959 /* Mark as non-local and addressable. */
2960 TREE_NONLOCAL (exp) = 1;
2961 mark_addressable (exp);
2962 if (GET_CODE (DECL_RTL (exp)) != MEM)
2964 addr = XEXP (DECL_RTL (exp), 0);
2965 if (GET_CODE (addr) == MEM)
2966 addr = gen_rtx (MEM, Pmode, fix_lexical_addr (XEXP (addr, 0), exp));
2968 addr = fix_lexical_addr (addr, exp);
2969 return change_address (DECL_RTL (exp), mode, addr);
2971 /* This is the case of an array whose size is to be determined
2972 from its initializer, while the initializer is still being parsed.
2974 if (GET_CODE (DECL_RTL (exp)) == MEM
2975 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
2976 return change_address (DECL_RTL (exp), GET_MODE (DECL_RTL (exp)),
2977 XEXP (DECL_RTL (exp), 0));
2978 if (GET_CODE (DECL_RTL (exp)) == MEM
2979 && modifier != EXPAND_CONST_ADDRESS
2980 && modifier != EXPAND_SUM
2981 && modifier != EXPAND_INITIALIZER)
2983 /* DECL_RTL probably contains a constant address.
2984 On RISC machines where a constant address isn't valid,
2985 make some insns to get that address into a register. */
2986 if (!memory_address_p (DECL_MODE (exp), XEXP (DECL_RTL (exp), 0))
2988 && CONSTANT_ADDRESS_P (XEXP (DECL_RTL (exp), 0))))
2989 return change_address (DECL_RTL (exp), VOIDmode,
2990 copy_rtx (XEXP (DECL_RTL (exp), 0)));
2992 return DECL_RTL (exp);
2995 return immed_double_const (TREE_INT_CST_LOW (exp),
2996 TREE_INT_CST_HIGH (exp),
3000 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, 0);
3003 /* If optimized, generate immediate CONST_DOUBLE
3004 which will be turned into memory by reload if necessary.
3006 We used to force a register so that loop.c could see it. But
3007 this does not allow gen_* patterns to perform optimizations with
3008 the constants. It also produces two insns in cases like "x = 1.0;".
3009 On most machines, floating-point constants are not permitted in
3010 many insns, so we'd end up copying it to a register in any case.
3012 Now, we do the copying in expand_binop, if appropriate. */
3013 return immed_real_const (exp);
3017 if (! TREE_CST_RTL (exp))
3018 output_constant_def (exp);
3020 /* TREE_CST_RTL probably contains a constant address.
3021 On RISC machines where a constant address isn't valid,
3022 make some insns to get that address into a register. */
3023 if (GET_CODE (TREE_CST_RTL (exp)) == MEM
3024 && modifier != EXPAND_CONST_ADDRESS
3025 && modifier != EXPAND_INITIALIZER
3026 && modifier != EXPAND_SUM
3027 && !memory_address_p (mode, XEXP (TREE_CST_RTL (exp), 0)))
3028 return change_address (TREE_CST_RTL (exp), VOIDmode,
3029 copy_rtx (XEXP (TREE_CST_RTL (exp), 0)));
3030 return TREE_CST_RTL (exp);
3033 context = decl_function_context (exp);
3034 /* We treat inline_function_decl as an alias for the current function
3035 because that is the inline function whose vars, types, etc.
3036 are being merged into the current function.
3037 See expand_inline_function. */
3038 if (context == current_function_decl || context == inline_function_decl)
3041 /* If this is non-local, handle it. */
3044 temp = SAVE_EXPR_RTL (exp);
3045 if (temp && GET_CODE (temp) == REG)
3047 put_var_into_stack (exp);
3048 temp = SAVE_EXPR_RTL (exp);
3050 if (temp == 0 || GET_CODE (temp) != MEM)
3052 return change_address (temp, mode,
3053 fix_lexical_addr (XEXP (temp, 0), exp));
3055 if (SAVE_EXPR_RTL (exp) == 0)
3057 if (mode == BLKmode)
3059 = assign_stack_temp (mode,
3060 int_size_in_bytes (TREE_TYPE (exp)), 0);
3062 temp = gen_reg_rtx (mode);
3063 SAVE_EXPR_RTL (exp) = temp;
3064 store_expr (TREE_OPERAND (exp, 0), temp, 0);
3065 if (!optimize && GET_CODE (temp) == REG)
3066 save_expr_regs = gen_rtx (EXPR_LIST, VOIDmode, temp,
3069 return SAVE_EXPR_RTL (exp);
3072 /* Exit the current loop if the body-expression is true. */
3074 rtx label = gen_label_rtx ();
3075 do_jump (TREE_OPERAND (exp, 0), label, 0);
3076 expand_exit_loop (0);
3082 expand_start_loop (1);
3083 expand_expr_stmt (TREE_OPERAND (exp, 0));
3090 tree vars = TREE_OPERAND (exp, 0);
3091 int vars_need_expansion = 0;
3093 /* Need to open a binding contour here because
3094 if there are any cleanups they most be contained here. */
3095 expand_start_bindings (0);
3097 /* Mark the corresponding BLOCK for output. */
3098 if (TREE_OPERAND (exp, 2) != 0)
3099 TREE_USED (TREE_OPERAND (exp, 2)) = 1;
3101 /* If VARS have not yet been expanded, expand them now. */
3104 if (DECL_RTL (vars) == 0)
3106 vars_need_expansion = 1;
3109 expand_decl_init (vars);
3110 vars = TREE_CHAIN (vars);
3113 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, modifier);
3115 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
3121 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
3123 emit_insns (RTL_EXPR_SEQUENCE (exp));
3124 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
3125 return RTL_EXPR_RTL (exp);
3128 /* All elts simple constants => refer to a constant in memory. */
3129 if (TREE_STATIC (exp))
3130 /* For aggregate types with non-BLKmode modes,
3131 this should ideally construct a CONST_INT. */
3133 rtx constructor = output_constant_def (exp);
3134 if (! memory_address_p (GET_MODE (constructor),
3135 XEXP (constructor, 0)))
3136 constructor = change_address (constructor, VOIDmode,
3137 XEXP (constructor, 0));
3144 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
3145 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode, 0);
3150 if (target == 0 || ! safe_from_p (target, exp))
3152 if (mode != BLKmode && ! TREE_ADDRESSABLE (exp))
3153 target = gen_reg_rtx (mode);
3156 rtx safe_target = assign_stack_temp (mode, int_size_in_bytes (type), 0);
3158 MEM_IN_STRUCT_P (safe_target) = MEM_IN_STRUCT_P (target);
3159 target = safe_target;
3162 store_constructor (exp, target);
3168 tree exp1 = TREE_OPERAND (exp, 0);
3171 /* A SAVE_EXPR as the address in an INDIRECT_EXPR is generated
3172 for *PTR += ANYTHING where PTR is put inside the SAVE_EXPR.
3173 This code has the same general effect as simply doing
3174 expand_expr on the save expr, except that the expression PTR
3175 is computed for use as a memory address. This means different
3176 code, suitable for indexing, may be generated. */
3177 if (TREE_CODE (exp1) == SAVE_EXPR
3178 && SAVE_EXPR_RTL (exp1) == 0
3179 && TREE_CODE (exp2 = TREE_OPERAND (exp1, 0)) != ERROR_MARK
3180 && TYPE_MODE (TREE_TYPE (exp1)) == Pmode
3181 && TYPE_MODE (TREE_TYPE (exp2)) == Pmode)
3183 temp = expand_expr (TREE_OPERAND (exp1, 0), 0, VOIDmode, EXPAND_SUM);
3184 op0 = memory_address (mode, temp);
3185 op0 = copy_all_regs (op0);
3186 SAVE_EXPR_RTL (exp1) = op0;
3190 op0 = expand_expr (exp1, 0, VOIDmode, EXPAND_SUM);
3191 op0 = memory_address (mode, op0);
3194 temp = gen_rtx (MEM, mode, op0);
3195 /* If address was computed by addition,
3196 mark this as an element of an aggregate. */
3197 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
3198 || (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR
3199 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) == PLUS_EXPR)
3200 || TREE_CODE (TREE_TYPE (exp)) == ARRAY_TYPE
3201 || TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE
3202 || TREE_CODE (TREE_TYPE (exp)) == UNION_TYPE
3203 || (TREE_CODE (exp1) == ADDR_EXPR
3204 && (exp2 = TREE_OPERAND (exp1, 0))
3205 && (TREE_CODE (TREE_TYPE (exp2)) == ARRAY_TYPE
3206 || TREE_CODE (TREE_TYPE (exp2)) == RECORD_TYPE
3207 || TREE_CODE (TREE_TYPE (exp2)) == UNION_TYPE)))
3208 MEM_IN_STRUCT_P (temp) = 1;
3209 MEM_VOLATILE_P (temp) = TREE_THIS_VOLATILE (exp) || flag_volatile;
3210 #if 0 /* It is incorrectto set RTX_UNCHANGING_P here, because the fact that
3211 a location is accessed through a pointer to const does not mean
3212 that the value there can never change. */
3213 RTX_UNCHANGING_P (temp) = TREE_READONLY (exp);
3219 if (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST
3220 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
3222 /* Nonconstant array index or nonconstant element size.
3223 Generate the tree for *(&array+index) and expand that,
3224 except do it in a language-independent way
3225 and don't complain about non-lvalue arrays.
3226 `mark_addressable' should already have been called
3227 for any array for which this case will be reached. */
3229 /* Don't forget the const or volatile flag from the array element. */
3230 tree variant_type = build_type_variant (type,
3231 TREE_READONLY (exp),
3232 TREE_THIS_VOLATILE (exp));
3233 tree array_adr = build1 (ADDR_EXPR, build_pointer_type (variant_type),
3234 TREE_OPERAND (exp, 0));
3235 tree index = TREE_OPERAND (exp, 1);
3238 /* Convert the integer argument to a type the same size as a pointer
3239 so the multiply won't overflow spuriously. */
3240 if (TYPE_PRECISION (TREE_TYPE (index)) != POINTER_SIZE)
3241 index = convert (type_for_size (POINTER_SIZE, 0), index);
3243 /* Don't think the address has side effects
3244 just because the array does.
3245 (In some cases the address might have side effects,
3246 and we fail to record that fact here. However, it should not
3247 matter, since expand_expr should not care.) */
3248 TREE_SIDE_EFFECTS (array_adr) = 0;
3250 elt = build1 (INDIRECT_REF, type,
3251 fold (build (PLUS_EXPR, TYPE_POINTER_TO (variant_type),
3253 fold (build (MULT_EXPR,
3254 TYPE_POINTER_TO (variant_type),
3255 index, size_in_bytes (type))))));
3257 /* Volatility, etc., of new expression is same as old expression. */
3258 TREE_SIDE_EFFECTS (elt) = TREE_SIDE_EFFECTS (exp);
3259 TREE_THIS_VOLATILE (elt) = TREE_THIS_VOLATILE (exp);
3260 TREE_READONLY (elt) = TREE_READONLY (exp);
3262 return expand_expr (elt, target, tmode, modifier);
3265 /* Fold an expression like: "foo"[2].
3266 This is not done in fold so it won't happen inside &. */
3269 tree arg0 = TREE_OPERAND (exp, 0);
3270 tree arg1 = TREE_OPERAND (exp, 1);
3272 if (TREE_CODE (arg0) == STRING_CST
3273 && TREE_CODE (arg1) == INTEGER_CST
3274 && !TREE_INT_CST_HIGH (arg1)
3275 && (i = TREE_INT_CST_LOW (arg1)) < TREE_STRING_LENGTH (arg0))
3277 if (TREE_TYPE (TREE_TYPE (arg0)) == integer_type_node)
3279 exp = build_int_2 (((int *)TREE_STRING_POINTER (arg0))[i], 0);
3280 TREE_TYPE (exp) = integer_type_node;
3281 return expand_expr (exp, target, tmode, modifier);
3283 if (TREE_TYPE (TREE_TYPE (arg0)) == char_type_node)
3285 exp = build_int_2 (TREE_STRING_POINTER (arg0)[i], 0);
3286 TREE_TYPE (exp) = integer_type_node;
3287 return expand_expr (convert (TREE_TYPE (TREE_TYPE (arg0)), exp), target, tmode, modifier);
3292 /* If this is a constant index into a constant array,
3293 just get the value from the array. */
3294 if (TREE_READONLY (TREE_OPERAND (exp, 0))
3295 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
3296 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == ARRAY_TYPE
3297 && TREE_CODE (TREE_OPERAND (exp, 0)) == VAR_DECL
3298 && DECL_INITIAL (TREE_OPERAND (exp, 0))
3299 && TREE_CODE (DECL_INITIAL (TREE_OPERAND (exp, 0))) != ERROR_MARK)
3301 tree index = fold (TREE_OPERAND (exp, 1));
3302 if (TREE_CODE (index) == INTEGER_CST
3303 && TREE_INT_CST_HIGH (index) == 0)
3305 int i = TREE_INT_CST_LOW (index);
3306 tree init = DECL_INITIAL (TREE_OPERAND (exp, 0));
3308 if (TREE_CODE (init) == CONSTRUCTOR)
3310 tree elem = CONSTRUCTOR_ELTS (init);
3313 elem = TREE_CHAIN (elem);
3315 return expand_expr (fold (TREE_VALUE (elem)), target,
3318 else if (TREE_CODE (init) == STRING_CST
3319 && i < TREE_STRING_LENGTH (init))
3321 temp = gen_rtx (CONST_INT, VOIDmode,
3322 TREE_STRING_POINTER (init)[i]);
3323 return convert_to_mode (mode, temp, 0);
3327 /* Treat array-ref with constant index as a component-ref. */
3332 enum machine_mode mode1;
3336 tree tem = get_inner_reference (exp, &bitsize, &bitpos,
3337 &mode1, &unsignedp, &volatilep);
3339 /* In some cases, we will be offsetting OP0's address by a constant.
3340 So get it as a sum, if possible. If we will be using it
3341 directly in an insn, we validate it. */
3342 op0 = expand_expr (tem, 0, VOIDmode, EXPAND_SUM);
3344 /* If this is a constant, put it into a register if it is a
3345 legimate constant and memory if it isn't. */
3346 if (CONSTANT_P (op0))
3348 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
3349 if (LEGITIMATE_CONSTANT_P (op0))
3350 op0 = force_reg (mode, op0);
3352 op0 = validize_mem (force_const_mem (mode, op0));
3355 /* Don't forget about volatility even if this is a bitfield. */
3356 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
3358 op0 = copy_rtx (op0);
3359 MEM_VOLATILE_P (op0) = 1;
3362 if (mode1 == VOIDmode
3363 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG)
3365 /* In cases where an aligned union has an unaligned object
3366 as a field, we might be extracting a BLKmode value from
3367 an integer-mode (e.g., SImode) object. Handle this case
3368 by doing the extract into an object as wide as the field
3369 (which we know to be the width of a basic mode), then
3370 storing into memory, and changing the mode to BLKmode. */
3371 enum machine_mode ext_mode = mode;
3373 if (ext_mode == BLKmode)
3374 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
3376 if (ext_mode == BLKmode)
3379 op0 = extract_bit_field (validize_mem (op0), bitsize, bitpos,
3380 unsignedp, target, ext_mode, ext_mode,
3381 TYPE_ALIGN (TREE_TYPE (tem)) / BITS_PER_UNIT,
3382 int_size_in_bytes (TREE_TYPE (tem)));
3383 if (mode == BLKmode)
3385 rtx new = assign_stack_temp (ext_mode,
3386 bitsize / BITS_PER_UNIT, 0);
3388 emit_move_insn (new, op0);
3389 op0 = copy_rtx (new);
3390 PUT_MODE (op0, BLKmode);
3396 /* Get a reference to just this component. */
3397 if (modifier == EXPAND_CONST_ADDRESS
3398 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
3399 op0 = gen_rtx (MEM, mode1, plus_constant (XEXP (op0, 0),
3400 (bitpos / BITS_PER_UNIT)));
3402 op0 = change_address (op0, mode1,
3403 plus_constant (XEXP (op0, 0),
3404 (bitpos / BITS_PER_UNIT)));
3405 MEM_IN_STRUCT_P (op0) = 1;
3406 MEM_VOLATILE_P (op0) |= volatilep;
3407 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode)
3410 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
3411 convert_move (target, op0, unsignedp);
3417 tree base = build_unary_op (ADDR_EXPR, TREE_OPERAND (exp, 0), 0);
3418 tree addr = build (PLUS_EXPR, type, base, TREE_OPERAND (exp, 1));
3419 op0 = expand_expr (addr, 0, VOIDmode, EXPAND_SUM);
3420 temp = gen_rtx (MEM, mode, memory_address (mode, op0));
3421 MEM_IN_STRUCT_P (temp) = 1;
3422 MEM_VOLATILE_P (temp) = TREE_THIS_VOLATILE (exp) || flag_volatile;
3423 #if 0 /* It is incorrectto set RTX_UNCHANGING_P here, because the fact that
3424 a location is accessed through a pointer to const does not mean
3425 that the value there can never change. */
3426 RTX_UNCHANGING_P (temp) = TREE_READONLY (exp);
3431 /* Intended for a reference to a buffer of a file-object in Pascal.
3432 But it's not certain that a special tree code will really be
3433 necessary for these. INDIRECT_REF might work for them. */
3437 case WITH_CLEANUP_EXPR:
3438 if (RTL_EXPR_RTL (exp) == 0)
3441 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
3442 cleanups_this_call = tree_cons (0, TREE_OPERAND (exp, 2), cleanups_this_call);
3443 /* That's it for this cleanup. */
3444 TREE_OPERAND (exp, 2) = 0;
3446 return RTL_EXPR_RTL (exp);
3449 /* Check for a built-in function. */
3450 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
3451 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) == FUNCTION_DECL
3452 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
3453 return expand_builtin (exp, target, subtarget, tmode, ignore);
3454 /* If this call was expanded already by preexpand_calls,
3455 just return the result we got. */
3456 if (CALL_EXPR_RTL (exp) != 0)
3457 return CALL_EXPR_RTL (exp);
3458 return expand_call (exp, target, ignore, modifier);
3460 case NON_LVALUE_EXPR:
3463 case REFERENCE_EXPR:
3464 if (TREE_CODE (type) == VOID_TYPE || ignore)
3466 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
3469 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
3470 return expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, modifier);
3471 if (TREE_CODE (type) == UNION_TYPE)
3473 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
3476 if (mode == BLKmode)
3478 if (TYPE_SIZE (type) == 0
3479 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
3481 target = assign_stack_temp (BLKmode,
3482 (TREE_INT_CST_LOW (TYPE_SIZE (type))
3483 + BITS_PER_UNIT - 1)
3484 / BITS_PER_UNIT, 0);
3487 target = gen_reg_rtx (mode);
3489 if (GET_CODE (target) == MEM)
3490 /* Store data into beginning of memory target. */
3491 store_expr (TREE_OPERAND (exp, 0),
3492 change_address (target, TYPE_MODE (valtype), 0), 0);
3493 else if (GET_CODE (target) == REG)
3494 /* Store this field into a union of the proper type. */
3495 store_field (target, GET_MODE_BITSIZE (TYPE_MODE (valtype)), 0,
3496 TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
3498 int_size_in_bytes (TREE_TYPE (TREE_OPERAND (exp, 0))));
3502 /* Return the entire union. */
3505 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, mode, 0);
3506 if (GET_MODE (op0) == mode || GET_MODE (op0) == VOIDmode)
3508 if (flag_force_mem && GET_CODE (op0) == MEM)
3509 op0 = copy_to_reg (op0);
3512 return convert_to_mode (mode, op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
3514 convert_move (target, op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
3518 /* We come here from MINUS_EXPR when the second operand is a constant. */
3520 this_optab = add_optab;
3522 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
3523 something else, make sure we add the register to the constant and
3524 then to the other thing. This case can occur during strength
3525 reduction and doing it this way will produce better code if the
3526 frame pointer or argument pointer is eliminated.
3528 fold-const.c will ensure that the constant is always in the inner
3529 PLUS_EXPR, so the only case we need to do anything about is if
3530 sp, ap, or fp is our second argument, in which case we must swap
3531 the innermost first argument and our second argument. */
3533 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
3534 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
3535 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
3536 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
3537 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
3538 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
3540 tree t = TREE_OPERAND (exp, 1);
3542 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
3543 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
3546 /* If the result is to be Pmode and we are adding an integer to
3547 something, we might be forming a constant. So try to use
3548 plus_constant. If it produces a sum and we can't accept it,
3549 use force_operand. This allows P = &ARR[const] to generate
3550 efficient code on machines where a SYMBOL_REF is not a valid
3553 If this is an EXPAND_SUM call, always return the sum. */
3554 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
3555 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
3556 && (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
3559 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
3561 op1 = plus_constant (op1, TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)));
3562 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
3563 op1 = force_operand (op1, target);
3567 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
3568 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
3569 && (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
3572 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
3574 op0 = plus_constant (op0, TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)));
3575 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
3576 op0 = force_operand (op0, target);
3580 /* No sense saving up arithmetic to be done
3581 if it's all in the wrong mode to form part of an address.
3582 And force_operand won't know whether to sign-extend or
3584 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
3585 || mode != Pmode) goto binop;
3587 preexpand_calls (exp);
3588 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
3591 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, modifier);
3592 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, modifier);
3594 /* Make sure any term that's a sum with a constant comes last. */
3595 if (GET_CODE (op0) == PLUS
3596 && CONSTANT_P (XEXP (op0, 1)))
3602 /* If adding to a sum including a constant,
3603 associate it to put the constant outside. */
3604 if (GET_CODE (op1) == PLUS
3605 && CONSTANT_P (XEXP (op1, 1)))
3607 rtx constant_term = const0_rtx;
3609 temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0);
3613 op0 = gen_rtx (PLUS, mode, XEXP (op1, 0), op0);
3615 /* Let's also eliminate constants from op0 if possible. */
3616 op0 = eliminate_constant_term (op0, &constant_term);
3618 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
3619 their sum should be a constant. Form it into OP1, since the
3620 result we want will then be OP0 + OP1. */
3622 temp = simplify_binary_operation (PLUS, mode, constant_term,
3627 op1 = gen_rtx (PLUS, mode, constant_term, XEXP (op1, 1));
3630 /* Put a constant term last and put a multiplication first. */
3631 if (CONSTANT_P (op0) || GET_CODE (op1) == MULT)
3632 temp = op1, op1 = op0, op0 = temp;
3634 temp = simplify_binary_operation (PLUS, mode, op0, op1);
3635 return temp ? temp : gen_rtx (PLUS, mode, op0, op1);
3638 /* Handle difference of two symbolic constants,
3639 for the sake of an initializer. */
3640 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
3641 && really_constant_p (TREE_OPERAND (exp, 0))
3642 && really_constant_p (TREE_OPERAND (exp, 1)))
3644 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, modifier);
3645 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, modifier);
3646 return gen_rtx (MINUS, mode, op0, op1);
3648 /* Convert A - const to A + (-const). */
3649 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
3651 exp = build (PLUS_EXPR, type, TREE_OPERAND (exp, 0),
3652 fold (build1 (NEGATE_EXPR, type,
3653 TREE_OPERAND (exp, 1))));
3656 this_optab = sub_optab;
3660 preexpand_calls (exp);
3661 /* If first operand is constant, swap them.
3662 Thus the following special case checks need only
3663 check the second operand. */
3664 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
3666 register tree t1 = TREE_OPERAND (exp, 0);
3667 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
3668 TREE_OPERAND (exp, 1) = t1;
3671 /* Attempt to return something suitable for generating an
3672 indexed address, for machines that support that. */
3674 if (modifier == EXPAND_SUM && mode == Pmode
3675 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
3676 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT)
3678 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, EXPAND_SUM);
3680 /* Apply distributive law if OP0 is x+c. */
3681 if (GET_CODE (op0) == PLUS
3682 && GET_CODE (XEXP (op0, 1)) == CONST_INT)
3683 return gen_rtx (PLUS, mode,
3684 gen_rtx (MULT, mode, XEXP (op0, 0),
3685 gen_rtx (CONST_INT, VOIDmode,
3686 TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))),
3687 gen_rtx (CONST_INT, VOIDmode,
3688 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))
3689 * INTVAL (XEXP (op0, 1)))));
3691 if (GET_CODE (op0) != REG)
3692 op0 = force_operand (op0, 0);
3693 if (GET_CODE (op0) != REG)
3694 op0 = copy_to_mode_reg (mode, op0);
3696 return gen_rtx (MULT, mode, op0,
3697 gen_rtx (CONST_INT, VOIDmode,
3698 TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))));
3701 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
3704 /* Check for multiplying things that have been extended
3705 from a narrower type. If this machine supports multiplying
3706 in that narrower type with a result in the desired type,
3707 do it that way, and avoid the explicit type-conversion. */
3708 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
3709 && TREE_CODE (type) == INTEGER_TYPE
3710 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
3711 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
3712 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
3713 && int_fits_type_p (TREE_OPERAND (exp, 1),
3714 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
3715 /* Don't use a widening multiply if a shift will do. */
3716 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
3717 > HOST_BITS_PER_INT)
3718 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
3720 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
3721 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
3723 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
3724 /* If both operands are extended, they must either both
3725 be zero-extended or both be sign-extended. */
3726 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
3728 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
3730 enum machine_mode innermode
3731 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
3732 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
3733 ? umul_widen_optab : smul_widen_optab);
3734 if (mode == GET_MODE_WIDER_MODE (innermode)
3735 && this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
3737 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
3739 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
3740 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
3742 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
3747 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
3748 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
3749 return expand_mult (mode, op0, op1, target, unsignedp);
3751 case TRUNC_DIV_EXPR:
3752 case FLOOR_DIV_EXPR:
3754 case ROUND_DIV_EXPR:
3755 case EXACT_DIV_EXPR:
3756 preexpand_calls (exp);
3757 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
3759 /* Possible optimization: compute the dividend with EXPAND_SUM
3760 then if the divisor is constant can optimize the case
3761 where some terms of the dividend have coeffs divisible by it. */
3762 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
3763 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
3764 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
3767 this_optab = flodiv_optab;
3770 case TRUNC_MOD_EXPR:
3771 case FLOOR_MOD_EXPR:
3773 case ROUND_MOD_EXPR:
3774 preexpand_calls (exp);
3775 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
3777 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
3778 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
3779 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
3781 case FIX_ROUND_EXPR:
3782 case FIX_FLOOR_EXPR:
3784 abort (); /* Not used for C. */
3786 case FIX_TRUNC_EXPR:
3787 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
3789 target = gen_reg_rtx (mode);
3790 expand_fix (target, op0, unsignedp);
3794 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
3796 target = gen_reg_rtx (mode);
3797 /* expand_float can't figure out what to do if FROM has VOIDmode.
3798 So give it the correct mode. With -O, cse will optimize this. */
3799 if (GET_MODE (op0) == VOIDmode)
3800 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
3802 expand_float (target, op0,
3803 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
3807 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
3808 temp = expand_unop (mode, neg_optab, op0, target, 0);
3814 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
3816 /* Unsigned abs is simply the operand. Testing here means we don't
3817 risk generating incorrect code below. */
3818 if (TREE_UNSIGNED (type))
3821 /* First try to do it with a special abs instruction. */
3822 temp = expand_unop (mode, abs_optab, op0, target, 0);
3826 /* If this machine has expensive jumps, we can do integer absolute
3827 value of X as (((signed) x >> (W-1)) ^ x) - ((signed) x >> (W-1)),
3828 where W is the width of MODE. */
3830 if (GET_MODE_CLASS (mode) == MODE_INT && BRANCH_COST >= 2)
3832 rtx extended = expand_shift (RSHIFT_EXPR, mode, op0,
3833 size_int (GET_MODE_BITSIZE (mode) - 1),
3836 temp = expand_binop (mode, xor_optab, extended, op0, target, 0,
3839 temp = expand_binop (mode, sub_optab, temp, extended, target, 0,
3846 /* If that does not win, use conditional jump and negate. */
3847 target = original_target;
3848 temp = gen_label_rtx ();
3849 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 0))
3850 || (GET_CODE (target) == REG
3851 && REGNO (target) < FIRST_PSEUDO_REGISTER))
3852 target = gen_reg_rtx (mode);
3853 emit_move_insn (target, op0);
3854 emit_cmp_insn (target,
3855 expand_expr (convert (type, integer_zero_node),
3859 emit_jump_insn (gen_bge (temp));
3860 op0 = expand_unop (mode, neg_optab, target, target, 0);
3862 emit_move_insn (target, op0);
3869 target = original_target;
3870 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 1))
3871 || (GET_CODE (target) == REG
3872 && REGNO (target) < FIRST_PSEUDO_REGISTER))
3873 target = gen_reg_rtx (mode);
3874 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
3875 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
3877 /* First try to do it with a special MIN or MAX instruction.
3878 If that does not win, use a conditional jump to select the proper
3880 this_optab = (TREE_UNSIGNED (type)
3881 ? (code == MIN_EXPR ? umin_optab : umax_optab)
3882 : (code == MIN_EXPR ? smin_optab : smax_optab));
3884 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
3890 emit_move_insn (target, op0);
3891 op0 = gen_label_rtx ();
3892 if (code == MAX_EXPR)
3893 temp = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)))
3894 ? compare_from_rtx (target, op1, GEU, 1, mode, 0, 0)
3895 : compare_from_rtx (target, op1, GE, 0, mode, 0, 0));
3897 temp = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)))
3898 ? compare_from_rtx (target, op1, LEU, 1, mode, 0, 0)
3899 : compare_from_rtx (target, op1, LE, 0, mode, 0, 0));
3900 if (temp == const0_rtx)
3901 emit_move_insn (target, op1);
3902 else if (temp != const_true_rtx)
3904 if (bcc_gen_fctn[(int) GET_CODE (temp)] != 0)
3905 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (temp)]) (op0));
3908 emit_move_insn (target, op1);
3913 /* ??? Can optimize when the operand of this is a bitwise operation,
3914 by using a different bitwise operation. */
3916 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
3917 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
3923 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
3924 temp = expand_unop (mode, ffs_optab, op0, target, 1);
3929 /* ??? Can optimize bitwise operations with one arg constant.
3930 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
3931 and (a bitwise1 b) bitwise2 b (etc)
3932 but that is probably not worth while. */
3934 /* BIT_AND_EXPR is for bitwise anding.
3935 TRUTH_AND_EXPR is for anding two boolean values
3936 when we want in all cases to compute both of them.
3937 In general it is fastest to do TRUTH_AND_EXPR by
3938 computing both operands as actual zero-or-1 values
3939 and then bitwise anding. In cases where there cannot
3940 be any side effects, better code would be made by
3941 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR;
3942 but the question is how to recognize those cases. */
3944 case TRUTH_AND_EXPR:
3946 this_optab = and_optab;
3949 /* See comment above about TRUTH_AND_EXPR; it applies here too. */
3952 this_optab = ior_optab;
3956 this_optab = xor_optab;
3963 preexpand_calls (exp);
3964 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
3966 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
3967 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
3970 /* Could determine the answer when only additive constants differ.
3971 Also, the addition of one can be handled by changing the condition. */
3978 preexpand_calls (exp);
3979 temp = do_store_flag (exp, target, tmode != VOIDmode ? tmode : mode, 0);
3982 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
3983 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
3985 && GET_CODE (original_target) == REG
3986 && (GET_MODE (original_target)
3987 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
3989 temp = expand_expr (TREE_OPERAND (exp, 0), original_target, VOIDmode, 0);
3990 if (temp != original_target)
3991 temp = copy_to_reg (temp);
3992 op1 = gen_label_rtx ();
3993 emit_cmp_insn (temp, const0_rtx, EQ, 0,
3994 GET_MODE (temp), unsignedp, 0);
3995 emit_jump_insn (gen_beq (op1));
3996 emit_move_insn (temp, const1_rtx);
4000 /* If no set-flag instruction, must generate a conditional
4001 store into a temporary variable. Drop through
4002 and handle this like && and ||. */
4004 case TRUTH_ANDIF_EXPR:
4005 case TRUTH_ORIF_EXPR:
4006 if (target == 0 || ! safe_from_p (target, exp)
4007 /* Make sure we don't have a hard reg (such as function's return
4008 value) live across basic blocks, if not optimizing. */
4009 || (!optimize && GET_CODE (target) == REG
4010 && REGNO (target) < FIRST_PSEUDO_REGISTER))
4011 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
4012 emit_clr_insn (target);
4013 op1 = gen_label_rtx ();
4014 jumpifnot (exp, op1);
4015 emit_0_to_1_insn (target);
4019 case TRUTH_NOT_EXPR:
4020 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
4021 /* The parser is careful to generate TRUTH_NOT_EXPR
4022 only with operands that are always zero or one. */
4023 temp = expand_binop (mode, xor_optab, op0,
4024 gen_rtx (CONST_INT, mode, 1),
4025 target, 1, OPTAB_LIB_WIDEN);
4031 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
4033 return expand_expr (TREE_OPERAND (exp, 1),
4034 (ignore ? const0_rtx : target),
4039 /* Note that COND_EXPRs whose type is a structure or union
4040 are required to be constructed to contain assignments of
4041 a temporary variable, so that we can evaluate them here
4042 for side effect only. If type is void, we must do likewise. */
4044 /* If an arm of the branch requires a cleanup,
4045 only that cleanup is performed. */
4048 tree binary_op = 0, unary_op = 0;
4049 tree old_cleanups = cleanups_this_call;
4050 cleanups_this_call = 0;
4052 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
4053 convert it to our mode, if necessary. */
4054 if (integer_onep (TREE_OPERAND (exp, 1))
4055 && integer_zerop (TREE_OPERAND (exp, 2))
4056 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
4058 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, modifier);
4059 if (GET_MODE (op0) == mode)
4062 target = gen_reg_rtx (mode);
4063 convert_move (target, op0, unsignedp);
4067 /* If we are not to produce a result, we have no target. Otherwise,
4068 if a target was specified use it; it will not be used as an
4069 intermediate target unless it is safe. If no target, use a
4072 if (mode == VOIDmode || ignore)
4074 else if (original_target
4075 && safe_from_p (original_target, TREE_OPERAND (exp, 0)))
4076 temp = original_target;
4077 else if (mode == BLKmode)
4079 if (TYPE_SIZE (type) == 0
4080 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
4082 temp = assign_stack_temp (BLKmode,
4083 (TREE_INT_CST_LOW (TYPE_SIZE (type))
4084 + BITS_PER_UNIT - 1)
4085 / BITS_PER_UNIT, 0);
4088 temp = gen_reg_rtx (mode);
4090 /* Check for X ? A + B : A. If we have this, we can copy
4091 A to the output and conditionally add B. Similarly for unary
4092 operations. Don't do this if X has side-effects because
4093 those side effects might affect A or B and the "?" operation is
4094 a sequence point in ANSI. (We test for side effects later.) */
4096 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
4097 && operand_equal_p (TREE_OPERAND (exp, 2),
4098 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
4099 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
4100 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
4101 && operand_equal_p (TREE_OPERAND (exp, 1),
4102 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
4103 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
4104 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
4105 && operand_equal_p (TREE_OPERAND (exp, 2),
4106 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
4107 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
4108 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
4109 && operand_equal_p (TREE_OPERAND (exp, 1),
4110 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
4111 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
4113 /* If we had X ? A + 1 : A and we can do the test of X as a store-flag
4114 operation, do this as A + (X != 0). Similarly for other simple
4115 binary operators. */
4116 if (singleton && binary_op
4117 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
4118 && (TREE_CODE (binary_op) == PLUS_EXPR
4119 || TREE_CODE (binary_op) == MINUS_EXPR
4120 || TREE_CODE (binary_op) == BIT_IOR_EXPR
4121 || TREE_CODE (binary_op) == BIT_XOR_EXPR
4122 || TREE_CODE (binary_op) == BIT_AND_EXPR)
4123 && integer_onep (TREE_OPERAND (binary_op, 1))
4124 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
4127 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR ? add_optab
4128 : TREE_CODE (binary_op) == MINUS_EXPR ? sub_optab
4129 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
4130 : TREE_CODE (binary_op) == BIT_XOR_EXPR ? xor_optab
4133 /* If we had X ? A : A + 1, do this as A + (X == 0).
4135 We have to invert the truth value here and then put it
4136 back later if do_store_flag fails. We cannot simply copy
4137 TREE_OPERAND (exp, 0) to another variable and modify that
4138 because invert_truthvalue can modify the tree pointed to
4140 if (singleton == TREE_OPERAND (exp, 1))
4141 TREE_OPERAND (exp, 0)
4142 = invert_truthvalue (TREE_OPERAND (exp, 0));
4144 result = do_store_flag (TREE_OPERAND (exp, 0),
4145 safe_from_p (temp, singleton) ? temp : 0,
4146 mode, BRANCH_COST <= 1);
4150 op1 = expand_expr (singleton, 0, VOIDmode, 0);
4151 return expand_binop (mode, boptab, op1, result, temp,
4152 unsignedp, OPTAB_LIB_WIDEN);
4154 else if (singleton == TREE_OPERAND (exp, 1))
4155 TREE_OPERAND (exp, 0)
4156 = invert_truthvalue (TREE_OPERAND (exp, 0));
4160 op0 = gen_label_rtx ();
4162 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
4166 /* If the target conflicts with the other operand of the
4167 binary op, we can't use it. Also, we can't use the target
4168 if it is a hard register, because evaluating the condition
4169 might clobber it. */
4171 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1)))
4172 || (GET_CODE (temp) == REG
4173 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
4174 temp = gen_reg_rtx (mode);
4175 store_expr (singleton, temp, 0);
4178 expand_expr (singleton, ignore ? const1_rtx : 0, VOIDmode, 0);
4179 if (cleanups_this_call)
4181 sorry ("aggregate value in COND_EXPR");
4182 cleanups_this_call = 0;
4184 if (singleton == TREE_OPERAND (exp, 1))
4185 jumpif (TREE_OPERAND (exp, 0), op0);
4187 jumpifnot (TREE_OPERAND (exp, 0), op0);
4189 if (binary_op && temp == 0)
4190 /* Just touch the other operand. */
4191 expand_expr (TREE_OPERAND (binary_op, 1),
4192 ignore ? const0_rtx : 0, VOIDmode, 0);
4194 store_expr (build (TREE_CODE (binary_op), type,
4195 make_tree (type, temp),
4196 TREE_OPERAND (binary_op, 1)),
4199 store_expr (build1 (TREE_CODE (unary_op), type,
4200 make_tree (type, temp)),
4205 /* This is now done in jump.c and is better done there because it
4206 produces shorter register lifetimes. */
4208 /* Check for both possibilities either constants or variables
4209 in registers (but not the same as the target!). If so, can
4210 save branches by assigning one, branching, and assigning the
4212 else if (temp && GET_MODE (temp) != BLKmode
4213 && (TREE_CONSTANT (TREE_OPERAND (exp, 1))
4214 || ((TREE_CODE (TREE_OPERAND (exp, 1)) == PARM_DECL
4215 || TREE_CODE (TREE_OPERAND (exp, 1)) == VAR_DECL)
4216 && DECL_RTL (TREE_OPERAND (exp, 1))
4217 && GET_CODE (DECL_RTL (TREE_OPERAND (exp, 1))) == REG
4218 && DECL_RTL (TREE_OPERAND (exp, 1)) != temp))
4219 && (TREE_CONSTANT (TREE_OPERAND (exp, 2))
4220 || ((TREE_CODE (TREE_OPERAND (exp, 2)) == PARM_DECL
4221 || TREE_CODE (TREE_OPERAND (exp, 2)) == VAR_DECL)
4222 && DECL_RTL (TREE_OPERAND (exp, 2))
4223 && GET_CODE (DECL_RTL (TREE_OPERAND (exp, 2))) == REG
4224 && DECL_RTL (TREE_OPERAND (exp, 2)) != temp)))
4226 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
4227 temp = gen_reg_rtx (mode);
4228 store_expr (TREE_OPERAND (exp, 2), temp, 0);
4229 jumpifnot (TREE_OPERAND (exp, 0), op0);
4230 store_expr (TREE_OPERAND (exp, 1), temp, 0);
4234 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
4235 comparison operator. If we have one of these cases, set the
4236 output to A, branch on A (cse will merge these two references),
4237 then set the output to FOO. */
4239 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
4240 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
4241 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
4242 TREE_OPERAND (exp, 1), 0)
4243 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
4244 && safe_from_p (temp, TREE_OPERAND (exp, 2)))
4246 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
4247 temp = gen_reg_rtx (mode);
4248 store_expr (TREE_OPERAND (exp, 1), temp, 0);
4249 jumpif (TREE_OPERAND (exp, 0), op0);
4250 store_expr (TREE_OPERAND (exp, 2), temp, 0);
4254 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
4255 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
4256 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
4257 TREE_OPERAND (exp, 2), 0)
4258 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
4259 && safe_from_p (temp, TREE_OPERAND (exp, 1)))
4261 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
4262 temp = gen_reg_rtx (mode);
4263 store_expr (TREE_OPERAND (exp, 2), temp, 0);
4264 jumpifnot (TREE_OPERAND (exp, 0), op0);
4265 store_expr (TREE_OPERAND (exp, 1), temp, 0);
4270 op1 = gen_label_rtx ();
4271 jumpifnot (TREE_OPERAND (exp, 0), op0);
4273 store_expr (TREE_OPERAND (exp, 1), temp, 0);
4275 expand_expr (TREE_OPERAND (exp, 1), ignore ? const0_rtx : 0,
4277 if (cleanups_this_call)
4279 sorry ("aggregate value in COND_EXPR");
4280 cleanups_this_call = 0;
4284 emit_jump_insn (gen_jump (op1));
4288 store_expr (TREE_OPERAND (exp, 2), temp, 0);
4290 expand_expr (TREE_OPERAND (exp, 2), ignore ? const0_rtx : 0,
4294 if (cleanups_this_call)
4296 sorry ("aggregate value in COND_EXPR");
4297 cleanups_this_call = 0;
4303 cleanups_this_call = old_cleanups;
4309 /* Something needs to be initialized, but we didn't know
4310 where that thing was when building the tree. For example,
4311 it could be the return value of a function, or a parameter
4312 to a function which lays down in the stack, or a temporary
4313 variable which must be passed by reference.
4315 We guarantee that the expression will either be constructed
4316 or copied into our original target. */
4318 tree slot = TREE_OPERAND (exp, 0);
4320 if (TREE_CODE (slot) != VAR_DECL)
4325 if (DECL_RTL (slot) != 0)
4326 target = DECL_RTL (slot);
4329 target = assign_stack_temp (mode, int_size_in_bytes (type), 0);
4330 /* All temp slots at this level must not conflict. */
4331 preserve_temp_slots (target);
4332 DECL_RTL (slot) = target;
4336 /* Since SLOT is not known to the called function
4337 to belong to its stack frame, we must build an explicit
4338 cleanup. This case occurs when we must build up a reference
4339 to pass the reference as an argument. In this case,
4340 it is very likely that such a reference need not be
4343 if (TREE_OPERAND (exp, 2) == 0)
4344 TREE_OPERAND (exp, 2) = maybe_build_cleanup (slot);
4345 if (TREE_OPERAND (exp, 2))
4346 cleanups_this_call = tree_cons (0, TREE_OPERAND (exp, 2),
4347 cleanups_this_call);
4352 /* This case does occur, when expanding a parameter which
4353 needs to be constructed on the stack. The target
4354 is the actual stack address that we want to initialize.
4355 The function we call will perform the cleanup in this case. */
4357 DECL_RTL (slot) = target;
4360 return expand_expr (TREE_OPERAND (exp, 1), target, tmode, modifier);
4365 tree lhs = TREE_OPERAND (exp, 0);
4366 tree rhs = TREE_OPERAND (exp, 1);
4367 tree noncopied_parts = 0;
4368 tree lhs_type = TREE_TYPE (lhs);
4370 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
4371 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0 && !fixed_type_p (rhs))
4372 noncopied_parts = init_noncopied_parts (stabilize_reference (lhs),
4373 TYPE_NONCOPIED_PARTS (lhs_type));
4374 while (noncopied_parts != 0)
4376 expand_assignment (TREE_VALUE (noncopied_parts),
4377 TREE_PURPOSE (noncopied_parts), 0, 0);
4378 noncopied_parts = TREE_CHAIN (noncopied_parts);
4385 /* If lhs is complex, expand calls in rhs before computing it.
4386 That's so we don't compute a pointer and save it over a call.
4387 If lhs is simple, compute it first so we can give it as a
4388 target if the rhs is just a call. This avoids an extra temp and copy
4389 and that prevents a partial-subsumption which makes bad code.
4390 Actually we could treat component_ref's of vars like vars. */
4392 tree lhs = TREE_OPERAND (exp, 0);
4393 tree rhs = TREE_OPERAND (exp, 1);
4394 tree noncopied_parts = 0;
4395 tree lhs_type = TREE_TYPE (lhs);
4399 if (TREE_CODE (lhs) != VAR_DECL
4400 && TREE_CODE (lhs) != RESULT_DECL
4401 && TREE_CODE (lhs) != PARM_DECL)
4402 preexpand_calls (exp);
4404 /* Check for |= or &= of a bitfield of size one into another bitfield
4405 of size 1. In this case, (unless we need the result of the
4406 assignment) we can do this more efficiently with a
4407 test followed by an assignment, if necessary.
4409 ??? At this point, we can't get a BIT_FIELD_REF here. But if
4410 things change so we do, this code should be enhanced to
4413 && TREE_CODE (lhs) == COMPONENT_REF
4414 && (TREE_CODE (rhs) == BIT_IOR_EXPR
4415 || TREE_CODE (rhs) == BIT_AND_EXPR)
4416 && TREE_OPERAND (rhs, 0) == lhs
4417 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
4418 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (lhs, 1))) == 1
4419 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))) == 1)
4421 rtx label = gen_label_rtx ();
4423 do_jump (TREE_OPERAND (rhs, 1),
4424 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
4425 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
4426 expand_assignment (lhs, convert (TREE_TYPE (rhs),
4427 (TREE_CODE (rhs) == BIT_IOR_EXPR
4429 : integer_zero_node)),
4431 do_pending_stack_adjust ();
4436 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0
4437 && ! (fixed_type_p (lhs) && fixed_type_p (rhs)))
4438 noncopied_parts = save_noncopied_parts (stabilize_reference (lhs),
4439 TYPE_NONCOPIED_PARTS (lhs_type));
4441 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
4442 while (noncopied_parts != 0)
4444 expand_assignment (TREE_PURPOSE (noncopied_parts),
4445 TREE_VALUE (noncopied_parts), 0, 0);
4446 noncopied_parts = TREE_CHAIN (noncopied_parts);
4451 case PREINCREMENT_EXPR:
4452 case PREDECREMENT_EXPR:
4453 return expand_increment (exp, 0);
4455 case POSTINCREMENT_EXPR:
4456 case POSTDECREMENT_EXPR:
4457 /* Faster to treat as pre-increment if result is not used. */
4458 return expand_increment (exp, ! ignore);
4461 /* Are we taking the address of a nested function? */
4462 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
4463 && decl_function_context (TREE_OPERAND (exp, 0)) != 0)
4465 op0 = trampoline_address (TREE_OPERAND (exp, 0));
4466 op0 = force_operand (op0, target);
4470 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode,
4471 (modifier == EXPAND_INITIALIZER
4472 ? modifier : EXPAND_CONST_ADDRESS));
4473 if (GET_CODE (op0) != MEM)
4476 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
4477 return XEXP (op0, 0);
4478 op0 = force_operand (XEXP (op0, 0), target);
4480 if (flag_force_addr && GET_CODE (op0) != REG)
4481 return force_reg (Pmode, op0);
4484 case ENTRY_VALUE_EXPR:
4491 return (*lang_expand_expr) (exp, target, tmode, modifier);
4494 /* Here to do an ordinary binary operator, generating an instruction
4495 from the optab already placed in `this_optab'. */
4497 preexpand_calls (exp);
4498 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
4500 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
4501 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
4503 temp = expand_binop (mode, this_optab, op0, op1, target,
4504 unsignedp, OPTAB_LIB_WIDEN);
4510 /* Return the alignment in bits of EXP, a pointer valued expression.
4511 But don't return more than MAX_ALIGN no matter what.
4512 The alignment returned is, by default, the alignment of the thing that
4513 EXP points to (if it is not a POINTER_TYPE, 0 is returned).
4515 Otherwise, look at the expression to see if we can do better, i.e., if the
4516 expression is actually pointing at an object whose alignment is tighter. */
4519 get_pointer_alignment (exp, max_align)
4523 unsigned align, inner;
4525 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
4528 align = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
4529 align = MIN (align, max_align);
4533 switch (TREE_CODE (exp))
4537 case NON_LVALUE_EXPR:
4538 exp = TREE_OPERAND (exp, 0);
4539 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
4541 inner = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
4542 inner = MIN (inner, max_align);
4543 align = MAX (align, inner);
4547 /* If sum of pointer + int, restrict our maximum alignment to that
4548 imposed by the integer. If not, we can't do any better than
4550 if (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST)
4553 while (((TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)) * BITS_PER_UNIT)
4558 exp = TREE_OPERAND (exp, 0);
4562 /* See what we are pointing at and look at its alignment. */
4563 exp = TREE_OPERAND (exp, 0);
4564 if (TREE_CODE (exp) == FUNCTION_DECL)
4565 align = MAX (align, FUNCTION_BOUNDARY);
4566 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'd')
4567 align = MAX (align, DECL_ALIGN (exp));
4568 #ifdef CONSTANT_ALIGNMENT
4569 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'c')
4570 align = CONSTANT_ALIGNMENT (exp, align);
4572 return MIN (align, max_align);
4580 /* Return the tree node and offset if a given argument corresponds to
4581 a string constant. */
4584 string_constant (arg, ptr_offset)
4590 if (TREE_CODE (arg) == ADDR_EXPR
4591 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
4593 *ptr_offset = integer_zero_node;
4594 return TREE_OPERAND (arg, 0);
4596 else if (TREE_CODE (arg) == PLUS_EXPR)
4598 tree arg0 = TREE_OPERAND (arg, 0);
4599 tree arg1 = TREE_OPERAND (arg, 1);
4604 if (TREE_CODE (arg0) == ADDR_EXPR
4605 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
4608 return TREE_OPERAND (arg0, 0);
4610 else if (TREE_CODE (arg1) == ADDR_EXPR
4611 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
4614 return TREE_OPERAND (arg1, 0);
4621 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
4622 way, because it could contain a zero byte in the middle.
4623 TREE_STRING_LENGTH is the size of the character array, not the string.
4625 Unfortunately, string_constant can't access the values of const char
4626 arrays with initializers, so neither can we do so here. */
4636 src = string_constant (src, &offset_node);
4639 max = TREE_STRING_LENGTH (src);
4640 ptr = TREE_STRING_POINTER (src);
4641 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
4643 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
4644 compute the offset to the following null if we don't know where to
4645 start searching for it. */
4647 for (i = 0; i < max; i++)
4650 /* We don't know the starting offset, but we do know that the string
4651 has no internal zero bytes. We can assume that the offset falls
4652 within the bounds of the string; otherwise, the programmer deserves
4653 what he gets. Subtract the offset from the length of the string,
4655 /* This would perhaps not be valid if we were dealing with named
4656 arrays in addition to literal string constants. */
4657 return size_binop (MINUS_EXPR, size_int (max), offset_node);
4660 /* We have a known offset into the string. Start searching there for
4661 a null character. */
4662 if (offset_node == 0)
4666 /* Did we get a long long offset? If so, punt. */
4667 if (TREE_INT_CST_HIGH (offset_node) != 0)
4669 offset = TREE_INT_CST_LOW (offset_node);
4671 /* If the offset is known to be out of bounds, warn, and call strlen at
4673 if (offset < 0 || offset > max)
4675 warning ("offset outside bounds of constant string");
4678 /* Use strlen to search for the first zero byte. Since any strings
4679 constructed with build_string will have nulls appended, we win even
4680 if we get handed something like (char[4])"abcd".
4682 Since OFFSET is our starting index into the string, no further
4683 calculation is needed. */
4684 return size_int (strlen (ptr + offset));
4687 /* Expand an expression EXP that calls a built-in function,
4688 with result going to TARGET if that's convenient
4689 (and in mode MODE if that's convenient).
4690 SUBTARGET may be used as the target for computing one of EXP's operands.
4691 IGNORE is nonzero if the value is to be ignored. */
4694 expand_builtin (exp, target, subtarget, mode, ignore)
4698 enum machine_mode mode;
4701 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
4702 tree arglist = TREE_OPERAND (exp, 1);
4704 rtx lab1, lab2, insns;
4705 enum machine_mode value_mode = TYPE_MODE (TREE_TYPE (exp));
4707 switch (DECL_FUNCTION_CODE (fndecl))
4712 /* build_function_call changes these into ABS_EXPR. */
4715 case BUILT_IN_FSQRT:
4716 /* If not optimizing, call the library function. */
4721 /* Arg could be non-integer if user redeclared this fcn wrong. */
4722 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != REAL_TYPE)
4725 /* Compute the argument. */
4726 op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
4728 /* Make a suitable register to place result in. */
4729 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
4731 /* Test the argument to make sure it is in the proper domain for
4732 the sqrt function. If it is not in the domain, branch to a
4735 lab1 = gen_label_rtx ();
4736 lab2 = gen_label_rtx ();
4738 /* By default check the arguments. If flag_fast_math is turned on,
4739 then assume sqrt will always be called with valid arguments. */
4740 if (! flag_fast_math)
4742 /* By checking op > 0 we are able to catch all of the
4743 IEEE special cases with a single if conditional. */
4744 emit_cmp_insn (op0, CONST0_RTX (GET_MODE (op0)), GT, 0,
4745 GET_MODE (op0), 0, 0);
4746 emit_jump_insn (gen_bgt (lab1));
4748 /* The argument was not in the domain; do this via library call. */
4749 expand_call (exp, target, 0, 0);
4751 /* Branch around open coded version */
4752 emit_jump_insn (gen_jump (lab2));
4756 /* Arg is in the domain, compute sqrt, into TARGET.
4757 Set TARGET to wherever the result comes back. */
4758 target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
4759 sqrt_optab, op0, target, 0);
4761 /* If we were unable to expand via the builtin, stop the
4762 sequence (without outputting the insns) and break, causing
4763 a call the the library function. */
4772 /* Output the entire sequence. */
4773 insns = get_insns ();
4779 case BUILT_IN_SAVEREGS:
4780 /* Don't do __builtin_saveregs more than once in a function.
4781 Save the result of the first call and reuse it. */
4782 if (saveregs_value != 0)
4783 return saveregs_value;
4785 /* When this function is called, it means that registers must be
4786 saved on entry to this function. So we migrate the
4787 call to the first insn of this function. */
4790 rtx valreg, saved_valreg;
4792 /* Now really call the function. `expand_call' does not call
4793 expand_builtin, so there is no danger of infinite recursion here. */
4796 #ifdef EXPAND_BUILTIN_SAVEREGS
4797 /* Do whatever the machine needs done in this case. */
4798 temp = EXPAND_BUILTIN_SAVEREGS (arglist);
4800 /* The register where the function returns its value
4801 is likely to have something else in it, such as an argument.
4802 So preserve that register around the call. */
4803 if (value_mode != VOIDmode)
4805 valreg = hard_libcall_value (value_mode);
4806 saved_valreg = gen_reg_rtx (value_mode);
4807 emit_move_insn (saved_valreg, valreg);
4810 /* Generate the call, putting the value in a pseudo. */
4811 temp = expand_call (exp, target, ignore);
4813 if (value_mode != VOIDmode)
4814 emit_move_insn (valreg, saved_valreg);
4820 saveregs_value = temp;
4822 /* This won't work inside a SEQUENCE--it really has to be
4823 at the start of the function. */
4824 if (in_sequence_p ())
4826 /* Better to do this than to crash. */
4827 error ("`va_start' used within `({...})'");
4831 /* Put the sequence after the NOTE that starts the function. */
4832 emit_insns_before (seq, NEXT_INSN (get_insns ()));
4836 /* __builtin_args_info (N) returns word N of the arg space info
4837 for the current function. The number and meanings of words
4838 is controlled by the definition of CUMULATIVE_ARGS. */
4839 case BUILT_IN_ARGS_INFO:
4841 int nwords = sizeof (CUMULATIVE_ARGS) / sizeof (int);
4843 int *word_ptr = (int *) ¤t_function_args_info;
4844 tree type, elts, result;
4846 if (sizeof (CUMULATIVE_ARGS) % sizeof (int) != 0)
4847 fatal ("CUMULATIVE_ARGS type defined badly; see %s, line %d",
4848 __FILE__, __LINE__);
4852 tree arg = TREE_VALUE (arglist);
4853 if (TREE_CODE (arg) != INTEGER_CST)
4854 error ("argument of __builtin_args_info must be constant");
4857 int wordnum = TREE_INT_CST_LOW (arg);
4859 if (wordnum < 0 || wordnum >= nwords)
4860 error ("argument of __builtin_args_info out of range");
4862 return gen_rtx (CONST_INT, VOIDmode, word_ptr[wordnum]);
4866 error ("missing argument in __builtin_args_info");
4871 for (i = 0; i < nwords; i++)
4872 elts = tree_cons (NULL_TREE, build_int_2 (word_ptr[i], 0));
4874 type = build_array_type (integer_type_node,
4875 build_index_type (build_int_2 (nwords, 0)));
4876 result = build (CONSTRUCTOR, type, NULL_TREE, nreverse (elts));
4877 TREE_CONSTANT (result) = 1;
4878 TREE_STATIC (result) = 1;
4879 result = build (INDIRECT_REF, build_pointer_type (type), result);
4880 TREE_CONSTANT (result) = 1;
4881 return expand_expr (result, 0, VOIDmode, 0);
4885 /* Return the address of the first anonymous stack arg. */
4886 case BUILT_IN_NEXT_ARG:
4888 tree fntype = TREE_TYPE (current_function_decl);
4889 if (!(TYPE_ARG_TYPES (fntype) != 0
4890 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
4891 != void_type_node)))
4893 error ("`va_start' used in function with fixed args");
4898 return expand_binop (Pmode, add_optab,
4899 current_function_internal_arg_pointer,
4900 current_function_arg_offset_rtx,
4901 0, 0, OPTAB_LIB_WIDEN);
4903 case BUILT_IN_CLASSIFY_TYPE:
4906 tree type = TREE_TYPE (TREE_VALUE (arglist));
4907 enum tree_code code = TREE_CODE (type);
4908 if (code == VOID_TYPE)
4909 return gen_rtx (CONST_INT, VOIDmode, void_type_class);
4910 if (code == INTEGER_TYPE)
4911 return gen_rtx (CONST_INT, VOIDmode, integer_type_class);
4912 if (code == CHAR_TYPE)
4913 return gen_rtx (CONST_INT, VOIDmode, char_type_class);
4914 if (code == ENUMERAL_TYPE)
4915 return gen_rtx (CONST_INT, VOIDmode, enumeral_type_class);
4916 if (code == BOOLEAN_TYPE)
4917 return gen_rtx (CONST_INT, VOIDmode, boolean_type_class);
4918 if (code == POINTER_TYPE)
4919 return gen_rtx (CONST_INT, VOIDmode, pointer_type_class);
4920 if (code == REFERENCE_TYPE)
4921 return gen_rtx (CONST_INT, VOIDmode, reference_type_class);
4922 if (code == OFFSET_TYPE)
4923 return gen_rtx (CONST_INT, VOIDmode, offset_type_class);
4924 if (code == REAL_TYPE)
4925 return gen_rtx (CONST_INT, VOIDmode, real_type_class);
4926 if (code == COMPLEX_TYPE)
4927 return gen_rtx (CONST_INT, VOIDmode, complex_type_class);
4928 if (code == FUNCTION_TYPE)
4929 return gen_rtx (CONST_INT, VOIDmode, function_type_class);
4930 if (code == METHOD_TYPE)
4931 return gen_rtx (CONST_INT, VOIDmode, method_type_class);
4932 if (code == RECORD_TYPE)
4933 return gen_rtx (CONST_INT, VOIDmode, record_type_class);
4934 if (code == UNION_TYPE)
4935 return gen_rtx (CONST_INT, VOIDmode, union_type_class);
4936 if (code == ARRAY_TYPE)
4937 return gen_rtx (CONST_INT, VOIDmode, array_type_class);
4938 if (code == STRING_TYPE)
4939 return gen_rtx (CONST_INT, VOIDmode, string_type_class);
4940 if (code == SET_TYPE)
4941 return gen_rtx (CONST_INT, VOIDmode, set_type_class);
4942 if (code == FILE_TYPE)
4943 return gen_rtx (CONST_INT, VOIDmode, file_type_class);
4944 if (code == LANG_TYPE)
4945 return gen_rtx (CONST_INT, VOIDmode, lang_type_class);
4947 return gen_rtx (CONST_INT, VOIDmode, no_type_class);
4949 case BUILT_IN_CONSTANT_P:
4953 return (TREE_CODE_CLASS (TREE_VALUE (arglist)) == 'c'
4954 ? const1_rtx : const0_rtx);
4956 case BUILT_IN_FRAME_ADDRESS:
4957 /* The argument must be a nonnegative integer constant.
4958 It counts the number of frames to scan up the stack.
4959 The value is the address of that frame. */
4960 case BUILT_IN_RETURN_ADDRESS:
4961 /* The argument must be a nonnegative integer constant.
4962 It counts the number of frames to scan up the stack.
4963 The value is the return address saved in that frame. */
4965 /* Warning about missing arg was already issued. */
4967 else if (TREE_CODE (TREE_VALUE (arglist)) != INTEGER_CST)
4969 error ("invalid arg to __builtin_return_address");
4972 else if (tree_int_cst_lt (TREE_VALUE (arglist), integer_zero_node))
4974 error ("invalid arg to __builtin_return_address");
4979 int count = TREE_INT_CST_LOW (TREE_VALUE (arglist));
4980 rtx tem = frame_pointer_rtx;
4983 /* Scan back COUNT frames to the specified frame. */
4984 for (i = 0; i < count; i++)
4986 /* Assume the dynamic chain pointer is in the word that
4987 the frame address points to, unless otherwise specified. */
4988 #ifdef DYNAMIC_CHAIN_ADDRESS
4989 tem = DYNAMIC_CHAIN_ADDRESS (tem);
4991 tem = memory_address (Pmode, tem);
4992 tem = copy_to_reg (gen_rtx (MEM, Pmode, tem));
4995 /* For __builtin_frame_address, return what we've got. */
4996 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4999 /* For __builtin_return_address,
5000 Get the return address from that frame. */
5001 #ifdef RETURN_ADDR_RTX
5002 return RETURN_ADDR_RTX (count, tem);
5004 tem = memory_address (Pmode,
5005 plus_constant (tem, GET_MODE_SIZE (Pmode)));
5006 return copy_to_reg (gen_rtx (MEM, Pmode, tem));
5010 case BUILT_IN_ALLOCA:
5012 /* Arg could be non-integer if user redeclared this fcn wrong. */
5013 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE)
5015 current_function_calls_alloca = 1;
5016 /* Compute the argument. */
5017 op0 = expand_expr (TREE_VALUE (arglist), 0, VOIDmode, 0);
5019 /* Allocate the desired space. */
5020 target = allocate_dynamic_stack_space (op0, target, BITS_PER_UNIT);
5022 /* Record the new stack level for nonlocal gotos. */
5023 if (nonlocal_goto_stack_level != 0)
5024 emit_move_insn (nonlocal_goto_stack_level, stack_pointer_rtx);
5028 /* If not optimizing, call the library function. */
5033 /* Arg could be non-integer if user redeclared this fcn wrong. */
5034 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE)
5037 /* Compute the argument. */
5038 op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
5039 /* Compute ffs, into TARGET if possible.
5040 Set TARGET to wherever the result comes back. */
5041 target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
5042 ffs_optab, op0, target, 1);
5047 case BUILT_IN_STRLEN:
5048 /* If not optimizing, call the library function. */
5053 /* Arg could be non-pointer if user redeclared this fcn wrong. */
5054 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE)
5058 tree src = TREE_VALUE (arglist);
5059 tree len = c_strlen (src);
5062 = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
5064 rtx result, src_rtx, char_rtx;
5065 enum machine_mode insn_mode = value_mode, char_mode;
5066 enum insn_code icode;
5068 /* If the length is known, just return it. */
5070 return expand_expr (len, target, mode, 0);
5072 /* If SRC is not a pointer type, don't do this operation inline. */
5076 /* Call a function if we can't compute strlen in the right mode. */
5078 while (insn_mode != VOIDmode)
5080 icode = strlen_optab->handlers[(int) insn_mode].insn_code;
5081 if (icode != CODE_FOR_nothing)
5084 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
5086 if (insn_mode == VOIDmode)
5089 /* Make a place to write the result of the instruction. */
5092 && GET_CODE (result) == REG
5093 && GET_MODE (result) == insn_mode
5094 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
5095 result = gen_reg_rtx (insn_mode);
5097 /* Make the operands are acceptable to the predicates. */
5099 if (! (*insn_operand_predicate[icode][0]) (result, insn_mode))
5100 result = gen_reg_rtx (insn_mode);
5102 src_rtx = memory_address (BLKmode,
5103 expand_expr (src, 0, Pmode,
5105 if (! (*insn_operand_predicate[icode][1]) (src_rtx, Pmode))
5106 src_rtx = copy_to_mode_reg (Pmode, src_rtx);
5108 char_rtx = const0_rtx;
5109 char_mode = insn_operand_mode[icode][2];
5110 if (! (*insn_operand_predicate[icode][2]) (char_rtx, char_mode))
5111 char_rtx = copy_to_mode_reg (char_mode, char_rtx);
5113 emit_insn (GEN_FCN (icode) (result,
5114 gen_rtx (MEM, BLKmode, src_rtx),
5116 gen_rtx (CONST_INT, VOIDmode, align)));
5118 /* Return the value in the proper mode for this function. */
5119 if (GET_MODE (result) == value_mode)
5121 else if (target != 0)
5123 convert_move (target, result, 0);
5127 return convert_to_mode (value_mode, result, 0);
5130 case BUILT_IN_STRCPY:
5131 /* If not optimizing, call the library function. */
5136 /* Arg could be non-pointer if user redeclared this fcn wrong. */
5137 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
5138 || TREE_CHAIN (arglist) == 0
5139 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE)
5143 tree len = c_strlen (TREE_VALUE (TREE_CHAIN (arglist)));
5148 len = size_binop (PLUS_EXPR, len, integer_one_node);
5150 chainon (arglist, build_tree_list (0, len));
5154 case BUILT_IN_MEMCPY:
5155 /* If not optimizing, call the library function. */
5160 /* Arg could be non-pointer if user redeclared this fcn wrong. */
5161 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
5162 || TREE_CHAIN (arglist) == 0
5163 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
5164 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
5165 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
5169 tree dest = TREE_VALUE (arglist);
5170 tree src = TREE_VALUE (TREE_CHAIN (arglist));
5171 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
5174 = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
5176 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
5179 /* If either SRC or DEST is not a pointer type, don't do
5180 this operation in-line. */
5181 if (src_align == 0 || dest_align == 0)
5183 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRCPY)
5184 TREE_CHAIN (TREE_CHAIN (arglist)) = 0;
5188 dest_rtx = expand_expr (dest, 0, Pmode, EXPAND_NORMAL);
5190 /* Copy word part most expediently. */
5191 emit_block_move (gen_rtx (MEM, BLKmode,
5192 memory_address (BLKmode, dest_rtx)),
5193 gen_rtx (MEM, BLKmode,
5194 memory_address (BLKmode,
5195 expand_expr (src, 0, Pmode,
5197 expand_expr (len, 0, VOIDmode, 0),
5198 MIN (src_align, dest_align));
5202 /* These comparison functions need an instruction that returns an actual
5203 index. An ordinary compare that just sets the condition codes
5205 #ifdef HAVE_cmpstrsi
5206 case BUILT_IN_STRCMP:
5207 /* If not optimizing, call the library function. */
5212 /* Arg could be non-pointer if user redeclared this fcn wrong. */
5213 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
5214 || TREE_CHAIN (arglist) == 0
5215 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE)
5217 else if (!HAVE_cmpstrsi)
5220 tree arg1 = TREE_VALUE (arglist);
5221 tree arg2 = TREE_VALUE (TREE_CHAIN (arglist));
5225 len = c_strlen (arg1);
5227 len = size_binop (PLUS_EXPR, integer_one_node, len);
5228 len2 = c_strlen (arg2);
5230 len2 = size_binop (PLUS_EXPR, integer_one_node, len2);
5232 /* If we don't have a constant length for the first, use the length
5233 of the second, if we know it. We don't require a constant for
5234 this case; some cost analysis could be done if both are available
5235 but neither is constant. For now, assume they're equally cheap.
5237 If both strings have constant lengths, use the smaller. This
5238 could arise if optimization results in strcpy being called with
5239 two fixed strings, or if the code was machine-generated. We should
5240 add some code to the `memcmp' handler below to deal with such
5241 situations, someday. */
5242 if (!len || TREE_CODE (len) != INTEGER_CST)
5249 else if (len2 && TREE_CODE (len2) == INTEGER_CST)
5251 if (tree_int_cst_lt (len2, len))
5255 chainon (arglist, build_tree_list (0, len));
5259 case BUILT_IN_MEMCMP:
5260 /* If not optimizing, call the library function. */
5265 /* Arg could be non-pointer if user redeclared this fcn wrong. */
5266 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
5267 || TREE_CHAIN (arglist) == 0
5268 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
5269 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
5270 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
5272 else if (!HAVE_cmpstrsi)
5275 tree arg1 = TREE_VALUE (arglist);
5276 tree arg2 = TREE_VALUE (TREE_CHAIN (arglist));
5277 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
5281 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
5283 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
5284 enum machine_mode insn_mode
5285 = insn_operand_mode[(int) CODE_FOR_cmpstrsi][0];
5287 /* If we don't have POINTER_TYPE, call the function. */
5288 if (arg1_align == 0 || arg2_align == 0)
5290 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRCMP)
5291 TREE_CHAIN (TREE_CHAIN (arglist)) = 0;
5295 /* Make a place to write the result of the instruction. */
5298 && GET_CODE (result) == REG && GET_MODE (result) == insn_mode
5299 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
5300 result = gen_reg_rtx (insn_mode);
5302 emit_insn (gen_cmpstrsi (result,
5303 gen_rtx (MEM, BLKmode,
5304 expand_expr (arg1, 0, Pmode, EXPAND_NORMAL)),
5305 gen_rtx (MEM, BLKmode,
5306 expand_expr (arg2, 0, Pmode, EXPAND_NORMAL)),
5307 expand_expr (len, 0, VOIDmode, 0),
5308 gen_rtx (CONST_INT, VOIDmode,
5309 MIN (arg1_align, arg2_align))));
5311 /* Return the value in the proper mode for this function. */
5312 mode = TYPE_MODE (TREE_TYPE (exp));
5313 if (GET_MODE (result) == mode)
5315 else if (target != 0)
5317 convert_move (target, result, 0);
5321 return convert_to_mode (mode, result, 0);
5324 case BUILT_IN_STRCMP:
5325 case BUILT_IN_MEMCMP:
5329 default: /* just do library call, if unknown builtin */
5330 error ("built-in function %s not currently supported",
5331 IDENTIFIER_POINTER (DECL_NAME (fndecl)));
5334 /* The switch statement above can drop through to cause the function
5335 to be called normally. */
5337 return expand_call (exp, target, ignore);
5340 /* Expand code for a post- or pre- increment or decrement
5341 and return the RTX for the result.
5342 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
5345 expand_increment (exp, post)
5349 register rtx op0, op1;
5350 register rtx temp, value;
5351 register tree incremented = TREE_OPERAND (exp, 0);
5352 optab this_optab = add_optab;
5354 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
5355 int op0_is_copy = 0;
5357 /* Stabilize any component ref that might need to be
5358 evaluated more than once below. */
5359 if (TREE_CODE (incremented) == BIT_FIELD_REF
5360 || (TREE_CODE (incremented) == COMPONENT_REF
5361 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
5362 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
5363 incremented = stabilize_reference (incremented);
5365 /* Compute the operands as RTX.
5366 Note whether OP0 is the actual lvalue or a copy of it:
5367 I believe it is a copy iff it is a register and insns were
5368 generated in computing it or if it is a SUBREG (generated when
5369 the low-order field in a register was referenced). */
5370 temp = get_last_insn ();
5371 op0 = expand_expr (incremented, 0, VOIDmode, 0);
5372 op0_is_copy = (GET_CODE (op0) == SUBREG
5373 || (GET_CODE (op0) == REG && temp != get_last_insn ()));
5374 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
5376 /* Decide whether incrementing or decrementing. */
5377 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
5378 || TREE_CODE (exp) == PREDECREMENT_EXPR)
5379 this_optab = sub_optab;
5381 /* If OP0 is not the actual lvalue, but rather a copy in a register,
5382 then we cannot just increment OP0. We must
5383 therefore contrive to increment the original value.
5384 Then we can return OP0 since it is a copy of the old value. */
5387 /* This is the easiest way to increment the value wherever it is.
5388 Problems with multiple evaluation of INCREMENTED
5389 are prevented because either (1) it is a component_ref,
5390 in which case it was stabilized above, or (2) it is an array_ref
5391 with constant index in an array in a register, which is
5392 safe to reevaluate. */
5393 tree newexp = build ((this_optab == add_optab
5394 ? PLUS_EXPR : MINUS_EXPR),
5397 TREE_OPERAND (exp, 1));
5398 temp = expand_assignment (incremented, newexp, ! post, 0);
5399 return post ? op0 : temp;
5402 /* Convert decrement by a constant into a negative increment. */
5403 if (this_optab == sub_optab
5404 && GET_CODE (op1) == CONST_INT)
5406 op1 = gen_rtx (CONST_INT, VOIDmode, - INTVAL (op1));
5407 this_optab = add_optab;
5412 /* We have a true reference to the value in OP0.
5413 If there is an insn to add or subtract in this mode, queue it. */
5415 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
5416 op0 = stabilize (op0);
5419 icode = (int) this_optab->handlers[(int) mode].insn_code;
5420 if (icode != (int) CODE_FOR_nothing
5421 /* Make sure that OP0 is valid for operands 0 and 1
5422 of the insn we want to queue. */
5423 && (*insn_operand_predicate[icode][0]) (op0, mode)
5424 && (*insn_operand_predicate[icode][1]) (op0, mode))
5426 if (! (*insn_operand_predicate[icode][2]) (op1, mode))
5427 op1 = force_reg (mode, op1);
5429 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
5433 /* Preincrement, or we can't increment with one simple insn. */
5435 /* Save a copy of the value before inc or dec, to return it later. */
5436 temp = value = copy_to_reg (op0);
5438 /* Arrange to return the incremented value. */
5439 /* Copy the rtx because expand_binop will protect from the queue,
5440 and the results of that would be invalid for us to return
5441 if our caller does emit_queue before using our result. */
5442 temp = copy_rtx (value = op0);
5444 /* Increment however we can. */
5445 op1 = expand_binop (mode, this_optab, value, op1, op0,
5446 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
5447 /* Make sure the value is stored into OP0. */
5449 emit_move_insn (op0, op1);
5454 /* Expand all function calls contained within EXP, innermost ones first.
5455 But don't look within expressions that have sequence points.
5456 For each CALL_EXPR, record the rtx for its value
5457 in the CALL_EXPR_RTL field. */
5460 preexpand_calls (exp)
5463 register int nops, i;
5464 int type = TREE_CODE_CLASS (TREE_CODE (exp));
5466 if (! do_preexpand_calls)
5469 /* Only expressions and references can contain calls. */
5471 if (type != 'e' && type != '<' && type != '1' && type != '2' && type != 'r')
5474 switch (TREE_CODE (exp))
5477 /* Do nothing if already expanded. */
5478 if (CALL_EXPR_RTL (exp) != 0)
5481 /* Do nothing to built-in functions. */
5482 if (TREE_CODE (TREE_OPERAND (exp, 0)) != ADDR_EXPR
5483 || TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) != FUNCTION_DECL
5484 || ! DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
5485 CALL_EXPR_RTL (exp) = expand_call (exp, 0, 0, 0);
5490 case TRUTH_ANDIF_EXPR:
5491 case TRUTH_ORIF_EXPR:
5492 /* If we find one of these, then we can be sure
5493 the adjust will be done for it (since it makes jumps).
5494 Do it now, so that if this is inside an argument
5495 of a function, we don't get the stack adjustment
5496 after some other args have already been pushed. */
5497 do_pending_stack_adjust ();
5502 case WITH_CLEANUP_EXPR:
5506 if (SAVE_EXPR_RTL (exp) != 0)
5510 nops = tree_code_length[(int) TREE_CODE (exp)];
5511 for (i = 0; i < nops; i++)
5512 if (TREE_OPERAND (exp, i) != 0)
5514 type = TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, i)));
5515 if (type == 'e' || type == '<' || type == '1' || type == '2'
5517 preexpand_calls (TREE_OPERAND (exp, i));
5521 /* At the start of a function, record that we have no previously-pushed
5522 arguments waiting to be popped. */
5525 init_pending_stack_adjust ()
5527 pending_stack_adjust = 0;
5530 /* When exiting from function, if safe, clear out any pending stack adjust
5531 so the adjustment won't get done. */
5534 clear_pending_stack_adjust ()
5536 #ifdef EXIT_IGNORE_STACK
5537 if (! flag_omit_frame_pointer && EXIT_IGNORE_STACK
5538 && ! (TREE_INLINE (current_function_decl) && ! flag_no_inline)
5539 && ! flag_inline_functions)
5540 pending_stack_adjust = 0;
5544 /* Pop any previously-pushed arguments that have not been popped yet. */
5547 do_pending_stack_adjust ()
5549 if (inhibit_defer_pop == 0)
5551 if (pending_stack_adjust != 0)
5552 adjust_stack (gen_rtx (CONST_INT, VOIDmode, pending_stack_adjust));
5553 pending_stack_adjust = 0;
5557 /* Expand all cleanups up to OLD_CLEANUPS.
5558 Needed here, and also for language-dependent calls. */
5561 expand_cleanups_to (old_cleanups)
5564 while (cleanups_this_call != old_cleanups)
5566 expand_expr (TREE_VALUE (cleanups_this_call), 0, VOIDmode, 0);
5567 cleanups_this_call = TREE_CHAIN (cleanups_this_call);
5571 /* Expand conditional expressions. */
5573 /* Generate code to evaluate EXP and jump to LABEL if the value is zero.
5574 LABEL is an rtx of code CODE_LABEL, in this function and all the
5578 jumpifnot (exp, label)
5582 do_jump (exp, label, 0);
5585 /* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
5592 do_jump (exp, 0, label);
5595 /* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
5596 the result is zero, or IF_TRUE_LABEL if the result is one.
5597 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
5598 meaning fall through in that case.
5600 do_jump always does any pending stack adjust except when it does not
5601 actually perform a jump. An example where there is no jump
5602 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
5604 This function is responsible for optimizing cases such as
5605 &&, || and comparison operators in EXP. */
5608 do_jump (exp, if_false_label, if_true_label)
5610 rtx if_false_label, if_true_label;
5612 register enum tree_code code = TREE_CODE (exp);
5613 /* Some cases need to create a label to jump to
5614 in order to properly fall through.
5615 These cases set DROP_THROUGH_LABEL nonzero. */
5616 rtx drop_through_label = 0;
5630 temp = integer_zerop (exp) ? if_false_label : if_true_label;
5636 /* This is not true with #pragma weak */
5638 /* The address of something can never be zero. */
5640 emit_jump (if_true_label);
5645 if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
5646 || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
5647 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF)
5650 /* If we are narrowing the operand, we have to do the compare in the
5652 if ((TYPE_PRECISION (TREE_TYPE (exp))
5653 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
5655 case NON_LVALUE_EXPR:
5656 case REFERENCE_EXPR:
5661 /* These cannot change zero->non-zero or vice versa. */
5662 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
5666 /* This is never less insns than evaluating the PLUS_EXPR followed by
5667 a test and can be longer if the test is eliminated. */
5669 /* Reduce to minus. */
5670 exp = build (MINUS_EXPR, TREE_TYPE (exp),
5671 TREE_OPERAND (exp, 0),
5672 fold (build1 (NEGATE_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
5673 TREE_OPERAND (exp, 1))));
5674 /* Process as MINUS. */
5678 /* Non-zero iff operands of minus differ. */
5679 comparison = compare (build (NE_EXPR, TREE_TYPE (exp),
5680 TREE_OPERAND (exp, 0),
5681 TREE_OPERAND (exp, 1)),
5686 /* If we are AND'ing with a small constant, do this comparison in the
5687 smallest type that fits. If the machine doesn't have comparisons
5688 that small, it will be converted back to the wider comparison.
5689 This helps if we are testing the sign bit of a narrower object.
5690 combine can't do this for us because it can't know whether a
5691 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
5693 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
5694 && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_INT
5695 && (i = floor_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))) >= 0
5696 && (type = type_for_size (i + 1, 1)) != 0
5697 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp)))
5699 do_jump (convert (type, exp), if_false_label, if_true_label);
5704 case TRUTH_NOT_EXPR:
5705 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
5708 case TRUTH_ANDIF_EXPR:
5709 if (if_false_label == 0)
5710 if_false_label = drop_through_label = gen_label_rtx ();
5711 do_jump (TREE_OPERAND (exp, 0), if_false_label, 0);
5712 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
5715 case TRUTH_ORIF_EXPR:
5716 if (if_true_label == 0)
5717 if_true_label = drop_through_label = gen_label_rtx ();
5718 do_jump (TREE_OPERAND (exp, 0), 0, if_true_label);
5719 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
5723 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
5726 do_pending_stack_adjust ();
5727 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
5734 int bitsize, bitpos, unsignedp;
5735 enum machine_mode mode;
5739 /* Get description of this reference. We don't actually care
5740 about the underlying object here. */
5741 get_inner_reference (exp, &bitsize, &bitpos, &mode, &unsignedp,
5744 type = type_for_size (bitsize, unsignedp);
5745 if (type != 0 && bitsize >= 0
5746 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp)))
5748 do_jump (convert (type, exp), if_false_label, if_true_label);
5755 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
5756 if (integer_onep (TREE_OPERAND (exp, 1))
5757 && integer_zerop (TREE_OPERAND (exp, 2)))
5758 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
5760 else if (integer_zerop (TREE_OPERAND (exp, 1))
5761 && integer_onep (TREE_OPERAND (exp, 2)))
5762 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
5766 register rtx label1 = gen_label_rtx ();
5767 drop_through_label = gen_label_rtx ();
5768 do_jump (TREE_OPERAND (exp, 0), label1, 0);
5769 /* Now the THEN-expression. */
5770 do_jump (TREE_OPERAND (exp, 1),
5771 if_false_label ? if_false_label : drop_through_label,
5772 if_true_label ? if_true_label : drop_through_label);
5773 /* In case the do_jump just above never jumps. */
5774 do_pending_stack_adjust ();
5775 emit_label (label1);
5776 /* Now the ELSE-expression. */
5777 do_jump (TREE_OPERAND (exp, 2),
5778 if_false_label ? if_false_label : drop_through_label,
5779 if_true_label ? if_true_label : drop_through_label);
5784 if (integer_zerop (TREE_OPERAND (exp, 1)))
5785 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
5786 else if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
5789 !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
5790 do_jump_by_parts_equality (exp, if_false_label, if_true_label);
5792 comparison = compare (exp, EQ, EQ);
5796 if (integer_zerop (TREE_OPERAND (exp, 1)))
5797 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
5798 else if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
5801 !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
5802 do_jump_by_parts_equality (exp, if_true_label, if_false_label);
5804 comparison = compare (exp, NE, NE);
5808 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
5810 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
5811 do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
5813 comparison = compare (exp, LT, LTU);
5817 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
5819 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
5820 do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
5822 comparison = compare (exp, LE, LEU);
5826 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
5828 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
5829 do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
5831 comparison = compare (exp, GT, GTU);
5835 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
5837 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
5838 do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
5840 comparison = compare (exp, GE, GEU);
5845 temp = expand_expr (exp, 0, VOIDmode, 0);
5847 /* This is not needed any more and causes poor code since it causes
5848 comparisons and tests from non-SI objects to have different code
5850 /* Copy to register to avoid generating bad insns by cse
5851 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
5852 if (!cse_not_expected && GET_CODE (temp) == MEM)
5853 temp = copy_to_reg (temp);
5855 do_pending_stack_adjust ();
5856 if (GET_CODE (temp) == CONST_INT)
5857 comparison = (temp == const0_rtx ? const0_rtx : const_true_rtx);
5858 else if (GET_CODE (temp) == LABEL_REF)
5859 comparison = const_true_rtx;
5860 else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
5861 && !can_compare_p (GET_MODE (temp)))
5862 /* Note swapping the labels gives us not-equal. */
5863 do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label);
5864 else if (GET_MODE (temp) != VOIDmode)
5865 comparison = compare_from_rtx (temp, CONST0_RTX (GET_MODE (temp)),
5866 NE, 1, GET_MODE (temp), 0, 0);
5871 /* Do any postincrements in the expression that was tested. */
5874 /* If COMPARISON is nonzero here, it is an rtx that can be substituted
5875 straight into a conditional jump instruction as the jump condition.
5876 Otherwise, all the work has been done already. */
5878 if (comparison == const_true_rtx)
5881 emit_jump (if_true_label);
5883 else if (comparison == const0_rtx)
5886 emit_jump (if_false_label);
5888 else if (comparison)
5889 do_jump_for_compare (comparison, if_false_label, if_true_label);
5893 if (drop_through_label)
5895 /* If do_jump produces code that might be jumped around,
5896 do any stack adjusts from that code, before the place
5897 where control merges in. */
5898 do_pending_stack_adjust ();
5899 emit_label (drop_through_label);
5903 /* Given a comparison expression EXP for values too wide to be compared
5904 with one insn, test the comparison and jump to the appropriate label.
5905 The code of EXP is ignored; we always test GT if SWAP is 0,
5906 and LT if SWAP is 1. */
5909 do_jump_by_parts_greater (exp, swap, if_false_label, if_true_label)
5912 rtx if_false_label, if_true_label;
5914 rtx op0 = expand_expr (TREE_OPERAND (exp, swap), 0, VOIDmode, 0);
5915 rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), 0, VOIDmode, 0);
5916 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5917 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
5918 rtx drop_through_label = 0;
5919 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
5922 if (! if_true_label || ! if_false_label)
5923 drop_through_label = gen_label_rtx ();
5924 if (! if_true_label)
5925 if_true_label = drop_through_label;
5926 if (! if_false_label)
5927 if_false_label = drop_through_label;
5929 /* Compare a word at a time, high order first. */
5930 for (i = 0; i < nwords; i++)
5933 rtx op0_word, op1_word;
5935 if (WORDS_BIG_ENDIAN)
5937 op0_word = operand_subword_force (op0, i, mode);
5938 op1_word = operand_subword_force (op1, i, mode);
5942 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
5943 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
5946 /* All but high-order word must be compared as unsigned. */
5947 comp = compare_from_rtx (op0_word, op1_word,
5948 (unsignedp || i > 0) ? GTU : GT,
5949 unsignedp, word_mode, 0, 0);
5950 if (comp == const_true_rtx)
5951 emit_jump (if_true_label);
5952 else if (comp != const0_rtx)
5953 do_jump_for_compare (comp, 0, if_true_label);
5955 /* Consider lower words only if these are equal. */
5956 comp = compare_from_rtx (op0_word, op1_word, NE, unsignedp, word_mode,
5958 if (comp == const_true_rtx)
5959 emit_jump (if_false_label);
5960 else if (comp != const0_rtx)
5961 do_jump_for_compare (comp, 0, if_false_label);
5965 emit_jump (if_false_label);
5966 if (drop_through_label)
5967 emit_label (drop_through_label);
5970 /* Given an EQ_EXPR expression EXP for values too wide to be compared
5971 with one insn, test the comparison and jump to the appropriate label. */
5974 do_jump_by_parts_equality (exp, if_false_label, if_true_label)
5976 rtx if_false_label, if_true_label;
5978 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
5979 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
5980 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5981 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
5983 rtx drop_through_label = 0;
5985 if (! if_false_label)
5986 drop_through_label = if_false_label = gen_label_rtx ();
5988 for (i = 0; i < nwords; i++)
5990 rtx comp = compare_from_rtx (operand_subword_force (op0, i, mode),
5991 operand_subword_force (op1, i, mode),
5992 EQ, 0, word_mode, 0, 0);
5993 if (comp == const_true_rtx)
5994 emit_jump (if_false_label);
5995 else if (comp != const0_rtx)
5996 do_jump_for_compare (comp, if_false_label, 0);
6000 emit_jump (if_true_label);
6001 if (drop_through_label)
6002 emit_label (drop_through_label);
6005 /* Jump according to whether OP0 is 0.
6006 We assume that OP0 has an integer mode that is too wide
6007 for the available compare insns. */
6010 do_jump_by_parts_equality_rtx (op0, if_false_label, if_true_label)
6012 rtx if_false_label, if_true_label;
6014 int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD;
6016 rtx drop_through_label = 0;
6018 if (! if_false_label)
6019 drop_through_label = if_false_label = gen_label_rtx ();
6021 for (i = 0; i < nwords; i++)
6023 rtx comp = compare_from_rtx (operand_subword_force (op0, i,
6025 const0_rtx, EQ, 0, word_mode, 0, 0);
6026 if (comp == const_true_rtx)
6027 emit_jump (if_false_label);
6028 else if (comp != const0_rtx)
6029 do_jump_for_compare (comp, if_false_label, 0);
6033 emit_jump (if_true_label);
6034 if (drop_through_label)
6035 emit_label (drop_through_label);
6038 /* Given a comparison expression in rtl form, output conditional branches to
6039 IF_TRUE_LABEL, IF_FALSE_LABEL, or both. */
6042 do_jump_for_compare (comparison, if_false_label, if_true_label)
6043 rtx comparison, if_false_label, if_true_label;
6047 if (bcc_gen_fctn[(int) GET_CODE (comparison)] != 0)
6048 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (comparison)]) (if_true_label));
6053 emit_jump (if_false_label);
6055 else if (if_false_label)
6058 rtx prev = PREV_INSN (get_last_insn ());
6061 /* Output the branch with the opposite condition. Then try to invert
6062 what is generated. If more than one insn is a branch, or if the
6063 branch is not the last insn written, abort. If we can't invert
6064 the branch, emit make a true label, redirect this jump to that,
6065 emit a jump to the false label and define the true label. */
6067 if (bcc_gen_fctn[(int) GET_CODE (comparison)] != 0)
6068 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (comparison)]) (if_false_label));
6072 /* Here we get the insn before what was just emitted.
6073 On some machines, emitting the branch can discard
6074 the previous compare insn and emit a replacement. */
6076 /* If there's only one preceding insn... */
6077 insn = get_insns ();
6079 insn = NEXT_INSN (prev);
6081 for (insn = NEXT_INSN (insn); insn; insn = NEXT_INSN (insn))
6082 if (GET_CODE (insn) == JUMP_INSN)
6089 if (branch != get_last_insn ())
6092 if (! invert_jump (branch, if_false_label))
6094 if_true_label = gen_label_rtx ();
6095 redirect_jump (branch, if_true_label);
6096 emit_jump (if_false_label);
6097 emit_label (if_true_label);
6102 /* Generate code for a comparison expression EXP
6103 (including code to compute the values to be compared)
6104 and set (CC0) according to the result.
6105 SIGNED_CODE should be the rtx operation for this comparison for
6106 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
6108 We force a stack adjustment unless there are currently
6109 things pushed on the stack that aren't yet used. */
6112 compare (exp, signed_code, unsigned_code)
6114 enum rtx_code signed_code, unsigned_code;
6116 register rtx op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
6117 register rtx op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
6118 register tree type = TREE_TYPE (TREE_OPERAND (exp, 0));
6119 register enum machine_mode mode = TYPE_MODE (type);
6120 int unsignedp = TREE_UNSIGNED (type);
6121 enum rtx_code code = unsignedp ? unsigned_code : signed_code;
6123 return compare_from_rtx (op0, op1, code, unsignedp, mode,
6125 ? expr_size (TREE_OPERAND (exp, 0)) : 0),
6126 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
6129 /* Like compare but expects the values to compare as two rtx's.
6130 The decision as to signed or unsigned comparison must be made by the caller.
6132 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
6135 If ALIGN is non-zero, it is the alignment of this type; if zero, the
6136 size of MODE should be used. */
6139 compare_from_rtx (op0, op1, code, unsignedp, mode, size, align)
6140 register rtx op0, op1;
6143 enum machine_mode mode;
6147 /* If one operand is constant, make it the second one. */
6149 if (GET_CODE (op0) == CONST_INT || GET_CODE (op0) == CONST_DOUBLE)
6154 code = swap_condition (code);
6159 op0 = force_not_mem (op0);
6160 op1 = force_not_mem (op1);
6163 do_pending_stack_adjust ();
6165 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT)
6166 return simplify_relational_operation (code, mode, op0, op1);
6168 /* If this is a signed equality comparison, we can do it as an
6169 unsigned comparison since zero-extension is cheaper than sign
6170 extension and comparisons with zero are done as unsigned. If we
6171 are comparing against a constant, we must convert it to what it
6172 would look like unsigned. */
6173 if ((code == EQ || code == NE) && ! unsignedp
6174 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_INT)
6176 if (GET_CODE (op1) == CONST_INT
6177 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
6178 op1 = gen_rtx (CONST_INT, VOIDmode,
6179 INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
6183 emit_cmp_insn (op0, op1, code, size, mode, unsignedp, align);
6185 return gen_rtx (code, VOIDmode, cc0_rtx, const0_rtx);
6188 /* Generate code to calculate EXP using a store-flag instruction
6189 and return an rtx for the result. EXP is either a comparison
6190 or a TRUTH_NOT_EXPR whose operand is a comparison.
6192 If TARGET is nonzero, store the result there if convenient.
6194 If ONLY_CHEAP is non-zero, only do this if it is likely to be very
6197 Return zero if there is no suitable set-flag instruction
6198 available on this machine.
6200 Once expand_expr has been called on the arguments of the comparison,
6201 we are committed to doing the store flag, since it is not safe to
6202 re-evaluate the expression. We emit the store-flag insn by calling
6203 emit_store_flag, but only expand the arguments if we have a reason
6204 to believe that emit_store_flag will be successful. If we think that
6205 it will, but it isn't, we have to simulate the store-flag with a
6206 set/jump/set sequence. */
6209 do_store_flag (exp, target, mode, only_cheap)
6212 enum machine_mode mode;
6216 tree arg0, arg1, type;
6218 enum machine_mode operand_mode;
6222 enum insn_code icode;
6223 rtx subtarget = target;
6224 rtx result, label, pattern, jump_pat;
6226 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
6227 result at the end. We can't simply invert the test since it would
6228 have already been inverted if it were valid. This case occurs for
6229 some floating-point comparisons. */
6231 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
6232 invert = 1, exp = TREE_OPERAND (exp, 0);
6234 arg0 = TREE_OPERAND (exp, 0);
6235 arg1 = TREE_OPERAND (exp, 1);
6236 type = TREE_TYPE (arg0);
6237 operand_mode = TYPE_MODE (type);
6238 unsignedp = TREE_UNSIGNED (type);
6240 /* We won't bother with BLKmode store-flag operations because it would mean
6241 passing a lot of information to emit_store_flag. */
6242 if (operand_mode == BLKmode)
6245 while (TREE_CODE (arg0) == NON_LVALUE_EXPR)
6246 arg0 = TREE_OPERAND (arg0, 0);
6248 while (TREE_CODE (arg1) == NON_LVALUE_EXPR)
6249 arg1 = TREE_OPERAND (arg1, 0);
6251 /* Get the rtx comparison code to use. We know that EXP is a comparison
6252 operation of some type. Some comparisons against 1 and -1 can be
6253 converted to comparisons with zero. Do so here so that the tests
6254 below will be aware that we have a comparison with zero. These
6255 tests will not catch constants in the first operand, but constants
6256 are rarely passed as the first operand. */
6258 switch (TREE_CODE (exp))
6267 if (integer_onep (arg1))
6268 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
6270 code = unsignedp ? LTU : LT;
6273 if (integer_all_onesp (arg1))
6274 arg1 = integer_zero_node, code = unsignedp ? LTU : LT;
6276 code = unsignedp ? LEU : LE;
6279 if (integer_all_onesp (arg1))
6280 arg1 = integer_zero_node, code = unsignedp ? GEU : GE;
6282 code = unsignedp ? GTU : GT;
6285 if (integer_onep (arg1))
6286 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
6288 code = unsignedp ? GEU : GE;
6294 /* Put a constant second. */
6295 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
6297 tem = arg0; arg0 = arg1; arg1 = tem;
6298 code = swap_condition (code);
6301 /* If this is an equality or inequality test of a single bit, we can
6302 do this by shifting the bit being tested to the low-order bit and
6303 masking the result with the constant 1. If the condition was EQ,
6304 we xor it with 1. This does not require an scc insn and is faster
6305 than an scc insn even if we have it. */
6307 if ((code == NE || code == EQ)
6308 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6309 && integer_pow2p (TREE_OPERAND (arg0, 1))
6310 && TYPE_PRECISION (type) <= HOST_BITS_PER_INT)
6312 int bitnum = exact_log2 (INTVAL (expand_expr (TREE_OPERAND (arg0, 1),
6315 if (subtarget == 0 || GET_CODE (subtarget) != REG
6316 || GET_MODE (subtarget) != operand_mode
6317 || ! safe_from_p (subtarget, TREE_OPERAND (arg0, 0)))
6320 op0 = expand_expr (TREE_OPERAND (arg0, 0), subtarget, VOIDmode, 0);
6323 op0 = expand_shift (RSHIFT_EXPR, GET_MODE (op0), op0,
6324 size_int (bitnum), target, 1);
6326 if (GET_MODE (op0) != mode)
6327 op0 = convert_to_mode (mode, op0, 1);
6329 if (bitnum != TYPE_PRECISION (type) - 1)
6330 op0 = expand_and (op0, const1_rtx, target);
6332 if ((code == EQ && ! invert) || (code == NE && invert))
6333 op0 = expand_binop (mode, xor_optab, op0, const1_rtx, target, 0,
6339 /* Now see if we are likely to be able to do this. Return if not. */
6340 if (! can_compare_p (operand_mode))
6342 icode = setcc_gen_code[(int) code];
6343 if (icode == CODE_FOR_nothing
6344 || (only_cheap && insn_operand_mode[(int) icode][0] != mode))
6346 /* We can only do this if it is one of the special cases that
6347 can be handled without an scc insn. */
6348 if ((code == LT && integer_zerop (arg1))
6349 || (! only_cheap && code == GE && integer_zerop (arg1)))
6351 else if (BRANCH_COST >= 0
6352 && ! only_cheap && (code == NE || code == EQ)
6353 && TREE_CODE (type) != REAL_TYPE
6354 && ((abs_optab->handlers[(int) operand_mode].insn_code
6355 != CODE_FOR_nothing)
6356 || (ffs_optab->handlers[(int) operand_mode].insn_code
6357 != CODE_FOR_nothing)))
6363 preexpand_calls (exp);
6364 if (subtarget == 0 || GET_CODE (subtarget) != REG
6365 || GET_MODE (subtarget) != operand_mode
6366 || ! safe_from_p (subtarget, arg1))
6369 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
6370 op1 = expand_expr (arg1, 0, VOIDmode, 0);
6373 target = gen_reg_rtx (mode);
6375 result = emit_store_flag (target, code, op0, op1, operand_mode,
6381 result = expand_binop (mode, xor_optab, result, const1_rtx,
6382 result, 0, OPTAB_LIB_WIDEN);
6386 /* If this failed, we have to do this with set/compare/jump/set code. */
6387 if (target == 0 || GET_CODE (target) != REG
6388 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
6389 target = gen_reg_rtx (GET_MODE (target));
6391 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
6392 result = compare_from_rtx (op0, op1, code, unsignedp, operand_mode, 0, 0);
6393 if (GET_CODE (result) == CONST_INT)
6394 return (((result == const0_rtx && ! invert)
6395 || (result != const0_rtx && invert))
6396 ? const0_rtx : const1_rtx);
6398 label = gen_label_rtx ();
6399 if (bcc_gen_fctn[(int) code] == 0)
6402 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
6403 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
6409 /* Generate a tablejump instruction (used for switch statements). */
6411 #ifdef HAVE_tablejump
6413 /* INDEX is the value being switched on, with the lowest value
6414 in the table already subtracted.
6415 MODE is its expected mode (needed if INDEX is ever constant).
6416 RANGE is the length of the jump table.
6417 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
6419 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
6420 index value is out of range. */
6423 do_tablejump (index, mode, range, table_label, default_label)
6424 rtx index, range, table_label, default_label;
6425 enum machine_mode mode;
6427 register rtx temp, vector;
6429 /* Code below assumes that MODE is Pmode,
6430 but I think that is a mistake. Let's see if that is true. */
6434 emit_cmp_insn (range, index, LTU, 0, mode, 0, 0);
6435 emit_jump_insn (gen_bltu (default_label));
6436 /* If flag_force_addr were to affect this address
6437 it could interfere with the tricky assumptions made
6438 about addresses that contain label-refs,
6439 which may be valid only very near the tablejump itself. */
6440 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
6441 GET_MODE_SIZE, because this indicates how large insns are. The other
6442 uses should all be Pmode, because they are addresses. This code
6443 could fail if addresses and insns are not the same size. */
6444 index = memory_address_noforce
6446 gen_rtx (PLUS, Pmode,
6447 gen_rtx (MULT, Pmode, index,
6448 gen_rtx (CONST_INT, VOIDmode,
6449 GET_MODE_SIZE (CASE_VECTOR_MODE))),
6450 gen_rtx (LABEL_REF, Pmode, table_label)));
6451 temp = gen_reg_rtx (CASE_VECTOR_MODE);
6452 vector = gen_rtx (MEM, CASE_VECTOR_MODE, index);
6453 RTX_UNCHANGING_P (vector) = 1;
6454 convert_move (temp, vector, 0);
6456 emit_jump_insn (gen_tablejump (temp, table_label));
6458 #ifndef CASE_VECTOR_PC_RELATIVE
6459 /* If we are generating PIC code or if the table is PC-relative, the
6460 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
6466 #endif /* HAVE_tablejump */