1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA. */
26 #include "insn-flags.h"
27 #include "insn-codes.h"
29 #include "insn-config.h"
33 #include "typeclass.h"
35 #define CEIL(x,y) (((x) + (y) - 1) / (y))
37 /* Decide whether a function's arguments should be processed
38 from first to last or from last to first. */
40 #ifdef STACK_GROWS_DOWNWARD
42 #define PUSH_ARGS_REVERSED /* If it's last to first */
46 #ifndef STACK_PUSH_CODE
47 #ifdef STACK_GROWS_DOWNWARD
48 #define STACK_PUSH_CODE PRE_DEC
50 #define STACK_PUSH_CODE PRE_INC
54 /* Like STACK_BOUNDARY but in units of bytes, not bits. */
55 #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
57 /* If this is nonzero, we do not bother generating VOLATILE
58 around volatile memory references, and we are willing to
59 output indirect addresses. If cse is to follow, we reject
60 indirect addresses so a useful potential cse is generated;
61 if it is used only once, instruction combination will produce
62 the same indirect address eventually. */
65 /* Nonzero to generate code for all the subroutines within an
66 expression before generating the upper levels of the expression.
67 Nowadays this is never zero. */
68 int do_preexpand_calls = 1;
70 /* Number of units that we should eventually pop off the stack.
71 These are the arguments to function calls that have already returned. */
72 int pending_stack_adjust;
74 /* Nonzero means stack pops must not be deferred, and deferred stack
75 pops must not be output. It is nonzero inside a function call,
76 inside a conditional expression, inside a statement expression,
77 and in other cases as well. */
78 int inhibit_defer_pop;
80 /* A list of all cleanups which belong to the arguments of
81 function calls being expanded by expand_call. */
82 tree cleanups_this_call;
84 /* Nonzero means __builtin_saveregs has already been done in this function.
85 The value is the pseudoreg containing the value __builtin_saveregs
87 static rtx saveregs_value;
90 static void store_constructor ();
91 static rtx store_field ();
92 static rtx expand_builtin ();
93 static rtx compare ();
94 static rtx do_store_flag ();
95 static void preexpand_calls ();
96 static rtx expand_increment ();
97 static void init_queue ();
99 void do_pending_stack_adjust ();
100 static void do_jump_for_compare ();
101 static void do_jump_by_parts_equality ();
102 static void do_jump_by_parts_equality_rtx ();
103 static void do_jump_by_parts_greater ();
105 /* Record for each mode whether we can move a register directly to or
106 from an object of that mode in memory. If we can't, we won't try
107 to use that mode directly when accessing a field of that mode. */
109 static char direct_load[NUM_MACHINE_MODES];
110 static char direct_store[NUM_MACHINE_MODES];
112 /* MOVE_RATIO is the number of move instructions that is better than
116 #if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
119 /* A value of around 6 would minimize code size; infinity would minimize
121 #define MOVE_RATIO 15
125 /* This array records the insn_code of insns to perform block moves. */
126 static enum insn_code movstr_optab[NUM_MACHINE_MODES];
128 /* SLOW_UNALIGNED_ACCESS is non-zero if unaligned accesses are very slow. */
130 #ifndef SLOW_UNALIGNED_ACCESS
131 #define SLOW_UNALIGNED_ACCESS 0
134 /* This is run once per compilation to set up which modes can be used
135 directly in memory and to initialize the block move optab. */
141 enum machine_mode mode;
142 /* Try indexing by frame ptr and try by stack ptr.
143 It is known that on the Convex the stack ptr isn't a valid index.
144 With luck, one or the other is valid on any machine. */
145 rtx mem = gen_rtx (MEM, VOIDmode, stack_pointer_rtx);
146 rtx mem1 = gen_rtx (MEM, VOIDmode, frame_pointer_rtx);
149 insn = emit_insn (gen_rtx (SET, 0, 0));
150 pat = PATTERN (insn);
152 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
153 mode = (enum machine_mode) ((int) mode + 1))
159 direct_load[(int) mode] = direct_store[(int) mode] = 0;
160 PUT_MODE (mem, mode);
161 PUT_MODE (mem1, mode);
163 /* See if there is some register that can be used in this mode and
164 directly loaded or stored from memory. */
166 if (mode != VOIDmode && mode != BLKmode)
167 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
168 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
171 if (! HARD_REGNO_MODE_OK (regno, mode))
174 reg = gen_rtx (REG, mode, regno);
177 SET_DEST (pat) = reg;
178 if (recog (pat, insn, &num_clobbers) >= 0)
179 direct_load[(int) mode] = 1;
181 SET_SRC (pat) = mem1;
182 SET_DEST (pat) = reg;
183 if (recog (pat, insn, &num_clobbers) >= 0)
184 direct_load[(int) mode] = 1;
187 SET_DEST (pat) = mem;
188 if (recog (pat, insn, &num_clobbers) >= 0)
189 direct_store[(int) mode] = 1;
192 SET_DEST (pat) = mem1;
193 if (recog (pat, insn, &num_clobbers) >= 0)
194 direct_store[(int) mode] = 1;
197 movstr_optab[(int) mode] = CODE_FOR_nothing;
204 movstr_optab[(int) QImode] = CODE_FOR_movstrqi;
208 movstr_optab[(int) HImode] = CODE_FOR_movstrhi;
212 movstr_optab[(int) SImode] = CODE_FOR_movstrsi;
216 movstr_optab[(int) DImode] = CODE_FOR_movstrdi;
220 movstr_optab[(int) TImode] = CODE_FOR_movstrti;
224 /* This is run at the start of compiling a function. */
231 pending_stack_adjust = 0;
232 inhibit_defer_pop = 0;
233 cleanups_this_call = 0;
238 /* Save all variables describing the current status into the structure *P.
239 This is used before starting a nested function. */
245 /* Instead of saving the postincrement queue, empty it. */
248 p->pending_stack_adjust = pending_stack_adjust;
249 p->inhibit_defer_pop = inhibit_defer_pop;
250 p->cleanups_this_call = cleanups_this_call;
251 p->saveregs_value = saveregs_value;
252 p->forced_labels = forced_labels;
254 pending_stack_adjust = 0;
255 inhibit_defer_pop = 0;
256 cleanups_this_call = 0;
261 /* Restore all variables describing the current status from the structure *P.
262 This is used after a nested function. */
265 restore_expr_status (p)
268 pending_stack_adjust = p->pending_stack_adjust;
269 inhibit_defer_pop = p->inhibit_defer_pop;
270 cleanups_this_call = p->cleanups_this_call;
271 saveregs_value = p->saveregs_value;
272 forced_labels = p->forced_labels;
275 /* Manage the queue of increment instructions to be output
276 for POSTINCREMENT_EXPR expressions, etc. */
278 static rtx pending_chain;
280 /* Queue up to increment (or change) VAR later. BODY says how:
281 BODY should be the same thing you would pass to emit_insn
282 to increment right away. It will go to emit_insn later on.
284 The value is a QUEUED expression to be used in place of VAR
285 where you want to guarantee the pre-incrementation value of VAR. */
288 enqueue_insn (var, body)
291 pending_chain = gen_rtx (QUEUED, GET_MODE (var),
292 var, NULL_RTX, NULL_RTX, body, pending_chain);
293 return pending_chain;
296 /* Use protect_from_queue to convert a QUEUED expression
297 into something that you can put immediately into an instruction.
298 If the queued incrementation has not happened yet,
299 protect_from_queue returns the variable itself.
300 If the incrementation has happened, protect_from_queue returns a temp
301 that contains a copy of the old value of the variable.
303 Any time an rtx which might possibly be a QUEUED is to be put
304 into an instruction, it must be passed through protect_from_queue first.
305 QUEUED expressions are not meaningful in instructions.
307 Do not pass a value through protect_from_queue and then hold
308 on to it for a while before putting it in an instruction!
309 If the queue is flushed in between, incorrect code will result. */
312 protect_from_queue (x, modify)
316 register RTX_CODE code = GET_CODE (x);
318 #if 0 /* A QUEUED can hang around after the queue is forced out. */
319 /* Shortcut for most common case. */
320 if (pending_chain == 0)
326 /* A special hack for read access to (MEM (QUEUED ...))
327 to facilitate use of autoincrement.
328 Make a copy of the contents of the memory location
329 rather than a copy of the address, but not
330 if the value is of mode BLKmode. */
331 if (code == MEM && GET_MODE (x) != BLKmode
332 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
334 register rtx y = XEXP (x, 0);
335 XEXP (x, 0) = QUEUED_VAR (y);
338 register rtx temp = gen_reg_rtx (GET_MODE (x));
339 emit_insn_before (gen_move_insn (temp, x),
345 /* Otherwise, recursively protect the subexpressions of all
346 the kinds of rtx's that can contain a QUEUED. */
348 XEXP (x, 0) = protect_from_queue (XEXP (x, 0), 0);
349 else if (code == PLUS || code == MULT)
351 XEXP (x, 0) = protect_from_queue (XEXP (x, 0), 0);
352 XEXP (x, 1) = protect_from_queue (XEXP (x, 1), 0);
356 /* If the increment has not happened, use the variable itself. */
357 if (QUEUED_INSN (x) == 0)
358 return QUEUED_VAR (x);
359 /* If the increment has happened and a pre-increment copy exists,
361 if (QUEUED_COPY (x) != 0)
362 return QUEUED_COPY (x);
363 /* The increment has happened but we haven't set up a pre-increment copy.
364 Set one up now, and use it. */
365 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
366 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
368 return QUEUED_COPY (x);
371 /* Return nonzero if X contains a QUEUED expression:
372 if it contains anything that will be altered by a queued increment.
373 We handle only combinations of MEM, PLUS, MINUS and MULT operators
374 since memory addresses generally contain only those. */
380 register enum rtx_code code = GET_CODE (x);
386 return queued_subexp_p (XEXP (x, 0));
390 return queued_subexp_p (XEXP (x, 0))
391 || queued_subexp_p (XEXP (x, 1));
396 /* Perform all the pending incrementations. */
402 while (p = pending_chain)
404 QUEUED_INSN (p) = emit_insn (QUEUED_BODY (p));
405 pending_chain = QUEUED_NEXT (p);
416 /* Copy data from FROM to TO, where the machine modes are not the same.
417 Both modes may be integer, or both may be floating.
418 UNSIGNEDP should be nonzero if FROM is an unsigned type.
419 This causes zero-extension instead of sign-extension. */
422 convert_move (to, from, unsignedp)
423 register rtx to, from;
426 enum machine_mode to_mode = GET_MODE (to);
427 enum machine_mode from_mode = GET_MODE (from);
428 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
429 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
433 /* rtx code for making an equivalent value. */
434 enum rtx_code equiv_code = (unsignedp ? ZERO_EXTEND : SIGN_EXTEND);
436 to = protect_from_queue (to, 1);
437 from = protect_from_queue (from, 0);
439 if (to_real != from_real)
442 /* If FROM is a SUBREG that indicates that we have already done at least
443 the required extension, strip it. We don't handle such SUBREGs as
446 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
447 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
448 >= GET_MODE_SIZE (to_mode))
449 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
450 from = gen_lowpart (to_mode, from), from_mode = to_mode;
452 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
455 if (to_mode == from_mode
456 || (from_mode == VOIDmode && CONSTANT_P (from)))
458 emit_move_insn (to, from);
464 #ifdef HAVE_extendqfhf2
465 if (HAVE_extendqfsf2 && from_mode == QFmode && to_mode == HFmode)
467 emit_unop_insn (CODE_FOR_extendqfsf2, to, from, UNKNOWN);
471 #ifdef HAVE_extendqfsf2
472 if (HAVE_extendqfsf2 && from_mode == QFmode && to_mode == SFmode)
474 emit_unop_insn (CODE_FOR_extendqfsf2, to, from, UNKNOWN);
478 #ifdef HAVE_extendqfdf2
479 if (HAVE_extendqfdf2 && from_mode == QFmode && to_mode == DFmode)
481 emit_unop_insn (CODE_FOR_extendqfdf2, to, from, UNKNOWN);
485 #ifdef HAVE_extendqfxf2
486 if (HAVE_extendqfxf2 && from_mode == QFmode && to_mode == XFmode)
488 emit_unop_insn (CODE_FOR_extendqfxf2, to, from, UNKNOWN);
492 #ifdef HAVE_extendqftf2
493 if (HAVE_extendqftf2 && from_mode == QFmode && to_mode == TFmode)
495 emit_unop_insn (CODE_FOR_extendqftf2, to, from, UNKNOWN);
500 #ifdef HAVE_extendhfsf2
501 if (HAVE_extendhfsf2 && from_mode == HFmode && to_mode == SFmode)
503 emit_unop_insn (CODE_FOR_extendhfsf2, to, from, UNKNOWN);
507 #ifdef HAVE_extendhfdf2
508 if (HAVE_extendhfdf2 && from_mode == HFmode && to_mode == DFmode)
510 emit_unop_insn (CODE_FOR_extendhfdf2, to, from, UNKNOWN);
514 #ifdef HAVE_extendhfxf2
515 if (HAVE_extendhfxf2 && from_mode == HFmode && to_mode == XFmode)
517 emit_unop_insn (CODE_FOR_extendhfxf2, to, from, UNKNOWN);
521 #ifdef HAVE_extendhftf2
522 if (HAVE_extendhftf2 && from_mode == HFmode && to_mode == TFmode)
524 emit_unop_insn (CODE_FOR_extendhftf2, to, from, UNKNOWN);
529 #ifdef HAVE_extendsfdf2
530 if (HAVE_extendsfdf2 && from_mode == SFmode && to_mode == DFmode)
532 emit_unop_insn (CODE_FOR_extendsfdf2, to, from, UNKNOWN);
536 #ifdef HAVE_extendsfxf2
537 if (HAVE_extendsfxf2 && from_mode == SFmode && to_mode == XFmode)
539 emit_unop_insn (CODE_FOR_extendsfxf2, to, from, UNKNOWN);
543 #ifdef HAVE_extendsftf2
544 if (HAVE_extendsftf2 && from_mode == SFmode && to_mode == TFmode)
546 emit_unop_insn (CODE_FOR_extendsftf2, to, from, UNKNOWN);
550 #ifdef HAVE_extenddfxf2
551 if (HAVE_extenddfxf2 && from_mode == DFmode && to_mode == XFmode)
553 emit_unop_insn (CODE_FOR_extenddfxf2, to, from, UNKNOWN);
557 #ifdef HAVE_extenddftf2
558 if (HAVE_extenddftf2 && from_mode == DFmode && to_mode == TFmode)
560 emit_unop_insn (CODE_FOR_extenddftf2, to, from, UNKNOWN);
565 #ifdef HAVE_trunchfqf2
566 if (HAVE_trunchfqf2 && from_mode == HFmode && to_mode == QFmode)
568 emit_unop_insn (CODE_FOR_trunchfqf2, to, from, UNKNOWN);
572 #ifdef HAVE_truncsfqf2
573 if (HAVE_truncsfqf2 && from_mode == SFmode && to_mode == QFmode)
575 emit_unop_insn (CODE_FOR_truncsfqf2, to, from, UNKNOWN);
579 #ifdef HAVE_truncdfqf2
580 if (HAVE_truncdfqf2 && from_mode == DFmode && to_mode == QFmode)
582 emit_unop_insn (CODE_FOR_truncdfqf2, to, from, UNKNOWN);
586 #ifdef HAVE_truncxfqf2
587 if (HAVE_truncxfqf2 && from_mode == XFmode && to_mode == QFmode)
589 emit_unop_insn (CODE_FOR_truncxfqf2, to, from, UNKNOWN);
593 #ifdef HAVE_trunctfqf2
594 if (HAVE_trunctfqf2 && from_mode == TFmode && to_mode == QFmode)
596 emit_unop_insn (CODE_FOR_trunctfqf2, to, from, UNKNOWN);
600 #ifdef HAVE_truncsfhf2
601 if (HAVE_truncsfhf2 && from_mode == SFmode && to_mode == HFmode)
603 emit_unop_insn (CODE_FOR_truncsfhf2, to, from, UNKNOWN);
607 #ifdef HAVE_truncdfhf2
608 if (HAVE_truncdfhf2 && from_mode == DFmode && to_mode == HFmode)
610 emit_unop_insn (CODE_FOR_truncdfhf2, to, from, UNKNOWN);
614 #ifdef HAVE_truncxfhf2
615 if (HAVE_truncxfhf2 && from_mode == XFmode && to_mode == HFmode)
617 emit_unop_insn (CODE_FOR_truncxfhf2, to, from, UNKNOWN);
621 #ifdef HAVE_trunctfhf2
622 if (HAVE_trunctfhf2 && from_mode == TFmode && to_mode == HFmode)
624 emit_unop_insn (CODE_FOR_trunctfhf2, to, from, UNKNOWN);
628 #ifdef HAVE_truncdfsf2
629 if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
631 emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
635 #ifdef HAVE_truncxfsf2
636 if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode)
638 emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN);
642 #ifdef HAVE_trunctfsf2
643 if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
645 emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
649 #ifdef HAVE_truncxfdf2
650 if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode)
652 emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN);
656 #ifdef HAVE_trunctfdf2
657 if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
659 emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
671 libcall = extendsfdf2_libfunc;
675 libcall = extendsfxf2_libfunc;
679 libcall = extendsftf2_libfunc;
688 libcall = truncdfsf2_libfunc;
692 libcall = extenddfxf2_libfunc;
696 libcall = extenddftf2_libfunc;
705 libcall = truncxfsf2_libfunc;
709 libcall = truncxfdf2_libfunc;
718 libcall = trunctfsf2_libfunc;
722 libcall = trunctfdf2_libfunc;
728 if (libcall == (rtx) 0)
729 /* This conversion is not implemented yet. */
732 emit_library_call (libcall, 1, to_mode, 1, from, from_mode);
733 emit_move_insn (to, hard_libcall_value (to_mode));
737 /* Now both modes are integers. */
739 /* Handle expanding beyond a word. */
740 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
741 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
748 enum machine_mode lowpart_mode;
749 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
751 /* Try converting directly if the insn is supported. */
752 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
755 /* If FROM is a SUBREG, put it into a register. Do this
756 so that we always generate the same set of insns for
757 better cse'ing; if an intermediate assignment occurred,
758 we won't be doing the operation directly on the SUBREG. */
759 if (optimize > 0 && GET_CODE (from) == SUBREG)
760 from = force_reg (from_mode, from);
761 emit_unop_insn (code, to, from, equiv_code);
764 /* Next, try converting via full word. */
765 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
766 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
767 != CODE_FOR_nothing))
769 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
770 emit_unop_insn (code, to,
771 gen_lowpart (word_mode, to), equiv_code);
775 /* No special multiword conversion insn; do it by hand. */
778 /* Get a copy of FROM widened to a word, if necessary. */
779 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
780 lowpart_mode = word_mode;
782 lowpart_mode = from_mode;
784 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
786 lowpart = gen_lowpart (lowpart_mode, to);
787 emit_move_insn (lowpart, lowfrom);
789 /* Compute the value to put in each remaining word. */
791 fill_value = const0_rtx;
796 && insn_operand_mode[(int) CODE_FOR_slt][0] == word_mode
797 && STORE_FLAG_VALUE == -1)
799 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
801 fill_value = gen_reg_rtx (word_mode);
802 emit_insn (gen_slt (fill_value));
808 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
809 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
811 fill_value = convert_to_mode (word_mode, fill_value, 1);
815 /* Fill the remaining words. */
816 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
818 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
819 rtx subword = operand_subword (to, index, 1, to_mode);
824 if (fill_value != subword)
825 emit_move_insn (subword, fill_value);
828 insns = get_insns ();
831 emit_no_conflict_block (insns, to, from, NULL_RTX,
832 gen_rtx (equiv_code, to_mode, from));
836 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD)
838 convert_move (to, gen_lowpart (word_mode, from), 0);
842 /* Handle pointer conversion */ /* SPEE 900220 */
843 if (to_mode == PSImode)
845 if (from_mode != SImode)
846 from = convert_to_mode (SImode, from, unsignedp);
848 #ifdef HAVE_truncsipsi
851 emit_unop_insn (CODE_FOR_truncsipsi, to, from, UNKNOWN);
854 #endif /* HAVE_truncsipsi */
858 if (from_mode == PSImode)
860 if (to_mode != SImode)
862 from = convert_to_mode (SImode, from, unsignedp);
867 #ifdef HAVE_extendpsisi
868 if (HAVE_extendpsisi)
870 emit_unop_insn (CODE_FOR_extendpsisi, to, from, UNKNOWN);
873 #endif /* HAVE_extendpsisi */
878 /* Now follow all the conversions between integers
879 no more than a word long. */
881 /* For truncation, usually we can just refer to FROM in a narrower mode. */
882 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
883 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
884 GET_MODE_BITSIZE (from_mode))
885 && ((GET_CODE (from) == MEM
886 && ! MEM_VOLATILE_P (from)
887 && direct_load[(int) to_mode]
888 && ! mode_dependent_address_p (XEXP (from, 0)))
889 || GET_CODE (from) == REG
890 || GET_CODE (from) == SUBREG))
892 emit_move_insn (to, gen_lowpart (to_mode, from));
896 /* For truncation, usually we can just refer to FROM in a narrower mode. */
897 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
899 /* Convert directly if that works. */
900 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
903 /* If FROM is a SUBREG, put it into a register. Do this
904 so that we always generate the same set of insns for
905 better cse'ing; if an intermediate assignment occurred,
906 we won't be doing the operation directly on the SUBREG. */
907 if (optimize > 0 && GET_CODE (from) == SUBREG)
908 from = force_reg (from_mode, from);
909 emit_unop_insn (code, to, from, equiv_code);
914 enum machine_mode intermediate;
916 /* Search for a mode to convert via. */
917 for (intermediate = from_mode; intermediate != VOIDmode;
918 intermediate = GET_MODE_WIDER_MODE (intermediate))
919 if ((can_extend_p (to_mode, intermediate, unsignedp)
921 && (can_extend_p (intermediate, from_mode, unsignedp)
922 != CODE_FOR_nothing))
924 convert_move (to, convert_to_mode (intermediate, from,
925 unsignedp), unsignedp);
929 /* No suitable intermediate mode. */
934 /* Support special truncate insns for certain modes. */
936 if (from_mode == DImode && to_mode == SImode)
938 #ifdef HAVE_truncdisi2
941 emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
945 convert_move (to, force_reg (from_mode, from), unsignedp);
949 if (from_mode == DImode && to_mode == HImode)
951 #ifdef HAVE_truncdihi2
954 emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
958 convert_move (to, force_reg (from_mode, from), unsignedp);
962 if (from_mode == DImode && to_mode == QImode)
964 #ifdef HAVE_truncdiqi2
967 emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
971 convert_move (to, force_reg (from_mode, from), unsignedp);
975 if (from_mode == SImode && to_mode == HImode)
977 #ifdef HAVE_truncsihi2
980 emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
984 convert_move (to, force_reg (from_mode, from), unsignedp);
988 if (from_mode == SImode && to_mode == QImode)
990 #ifdef HAVE_truncsiqi2
993 emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
997 convert_move (to, force_reg (from_mode, from), unsignedp);
1001 if (from_mode == HImode && to_mode == QImode)
1003 #ifdef HAVE_trunchiqi2
1004 if (HAVE_trunchiqi2)
1006 emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
1010 convert_move (to, force_reg (from_mode, from), unsignedp);
1014 /* Handle truncation of volatile memrefs, and so on;
1015 the things that couldn't be truncated directly,
1016 and for which there was no special instruction. */
1017 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
1019 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
1020 emit_move_insn (to, temp);
1024 /* Mode combination is not recognized. */
1028 /* Return an rtx for a value that would result
1029 from converting X to mode MODE.
1030 Both X and MODE may be floating, or both integer.
1031 UNSIGNEDP is nonzero if X is an unsigned value.
1032 This can be done by referring to a part of X in place
1033 or by copying to a new temporary with conversion.
1035 This function *must not* call protect_from_queue
1036 except when putting X into an insn (in which case convert_move does it). */
1039 convert_to_mode (mode, x, unsignedp)
1040 enum machine_mode mode;
1046 /* If FROM is a SUBREG that indicates that we have already done at least
1047 the required extension, strip it. */
1049 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
1050 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
1051 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
1052 x = gen_lowpart (mode, x);
1054 if (mode == GET_MODE (x))
1057 /* There is one case that we must handle specially: If we are converting
1058 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
1059 we are to interpret the constant as unsigned, gen_lowpart will do
1060 the wrong if the constant appears negative. What we want to do is
1061 make the high-order word of the constant zero, not all ones. */
1063 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
1064 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
1065 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
1066 return immed_double_const (INTVAL (x), (HOST_WIDE_INT) 0, mode);
1068 /* We can do this with a gen_lowpart if both desired and current modes
1069 are integer, and this is either a constant integer, a register, or a
1070 non-volatile MEM. Except for the constant case, we must be narrowing
1073 if (GET_CODE (x) == CONST_INT
1074 || (GET_MODE_CLASS (mode) == MODE_INT
1075 && GET_MODE_CLASS (GET_MODE (x)) == MODE_INT
1076 && (GET_CODE (x) == CONST_DOUBLE
1077 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (GET_MODE (x))
1078 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x))
1079 && direct_load[(int) mode]
1080 || GET_CODE (x) == REG)))))
1081 return gen_lowpart (mode, x);
1083 temp = gen_reg_rtx (mode);
1084 convert_move (temp, x, unsignedp);
1088 /* Generate several move instructions to copy LEN bytes
1089 from block FROM to block TO. (These are MEM rtx's with BLKmode).
1090 The caller must pass FROM and TO
1091 through protect_from_queue before calling.
1092 ALIGN (in bytes) is maximum alignment we can assume. */
1094 struct move_by_pieces
1099 int explicit_inc_to;
1103 int explicit_inc_from;
1109 static void move_by_pieces_1 ();
1110 static int move_by_pieces_ninsns ();
1113 move_by_pieces (to, from, len, align)
1117 struct move_by_pieces data;
1118 rtx to_addr = XEXP (to, 0), from_addr = XEXP (from, 0);
1119 int max_size = MOVE_MAX + 1;
1122 data.to_addr = to_addr;
1123 data.from_addr = from_addr;
1127 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1128 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1130 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1131 || GET_CODE (from_addr) == POST_INC
1132 || GET_CODE (from_addr) == POST_DEC);
1134 data.explicit_inc_from = 0;
1135 data.explicit_inc_to = 0;
1137 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1138 if (data.reverse) data.offset = len;
1141 /* If copying requires more than two move insns,
1142 copy addresses to registers (to make displacements shorter)
1143 and use post-increment if available. */
1144 if (!(data.autinc_from && data.autinc_to)
1145 && move_by_pieces_ninsns (len, align) > 2)
1147 #ifdef HAVE_PRE_DECREMENT
1148 if (data.reverse && ! data.autinc_from)
1150 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1151 data.autinc_from = 1;
1152 data.explicit_inc_from = -1;
1155 #ifdef HAVE_POST_INCREMENT
1156 if (! data.autinc_from)
1158 data.from_addr = copy_addr_to_reg (from_addr);
1159 data.autinc_from = 1;
1160 data.explicit_inc_from = 1;
1163 if (!data.autinc_from && CONSTANT_P (from_addr))
1164 data.from_addr = copy_addr_to_reg (from_addr);
1165 #ifdef HAVE_PRE_DECREMENT
1166 if (data.reverse && ! data.autinc_to)
1168 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1170 data.explicit_inc_to = -1;
1173 #ifdef HAVE_POST_INCREMENT
1174 if (! data.reverse && ! data.autinc_to)
1176 data.to_addr = copy_addr_to_reg (to_addr);
1178 data.explicit_inc_to = 1;
1181 if (!data.autinc_to && CONSTANT_P (to_addr))
1182 data.to_addr = copy_addr_to_reg (to_addr);
1185 if (! (STRICT_ALIGNMENT || SLOW_UNALIGNED_ACCESS)
1186 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
1189 /* First move what we can in the largest integer mode, then go to
1190 successively smaller modes. */
1192 while (max_size > 1)
1194 enum machine_mode mode = VOIDmode, tmode;
1195 enum insn_code icode;
1197 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1198 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1199 if (GET_MODE_SIZE (tmode) < max_size)
1202 if (mode == VOIDmode)
1205 icode = mov_optab->handlers[(int) mode].insn_code;
1206 if (icode != CODE_FOR_nothing
1207 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1208 GET_MODE_SIZE (mode)))
1209 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1211 max_size = GET_MODE_SIZE (mode);
1214 /* The code above should have handled everything. */
1219 /* Return number of insns required to move L bytes by pieces.
1220 ALIGN (in bytes) is maximum alignment we can assume. */
1223 move_by_pieces_ninsns (l, align)
1227 register int n_insns = 0;
1228 int max_size = MOVE_MAX + 1;
1230 if (! (STRICT_ALIGNMENT || SLOW_UNALIGNED_ACCESS)
1231 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
1234 while (max_size > 1)
1236 enum machine_mode mode = VOIDmode, tmode;
1237 enum insn_code icode;
1239 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1240 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1241 if (GET_MODE_SIZE (tmode) < max_size)
1244 if (mode == VOIDmode)
1247 icode = mov_optab->handlers[(int) mode].insn_code;
1248 if (icode != CODE_FOR_nothing
1249 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1250 GET_MODE_SIZE (mode)))
1251 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1253 max_size = GET_MODE_SIZE (mode);
1259 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1260 with move instructions for mode MODE. GENFUN is the gen_... function
1261 to make a move insn for that mode. DATA has all the other info. */
1264 move_by_pieces_1 (genfun, mode, data)
1266 enum machine_mode mode;
1267 struct move_by_pieces *data;
1269 register int size = GET_MODE_SIZE (mode);
1270 register rtx to1, from1;
1272 while (data->len >= size)
1274 if (data->reverse) data->offset -= size;
1276 to1 = (data->autinc_to
1277 ? gen_rtx (MEM, mode, data->to_addr)
1278 : change_address (data->to, mode,
1279 plus_constant (data->to_addr, data->offset)));
1282 ? gen_rtx (MEM, mode, data->from_addr)
1283 : change_address (data->from, mode,
1284 plus_constant (data->from_addr, data->offset)));
1286 #ifdef HAVE_PRE_DECREMENT
1287 if (data->explicit_inc_to < 0)
1288 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
1289 if (data->explicit_inc_from < 0)
1290 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (-size)));
1293 emit_insn ((*genfun) (to1, from1));
1294 #ifdef HAVE_POST_INCREMENT
1295 if (data->explicit_inc_to > 0)
1296 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1297 if (data->explicit_inc_from > 0)
1298 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1301 if (! data->reverse) data->offset += size;
1307 /* Emit code to move a block Y to a block X.
1308 This may be done with string-move instructions,
1309 with multiple scalar move instructions, or with a library call.
1311 Both X and Y must be MEM rtx's (perhaps inside VOLATILE)
1313 SIZE is an rtx that says how long they are.
1314 ALIGN is the maximum alignment we can assume they have,
1315 measured in bytes. */
1318 emit_block_move (x, y, size, align)
1323 if (GET_MODE (x) != BLKmode)
1326 if (GET_MODE (y) != BLKmode)
1329 x = protect_from_queue (x, 1);
1330 y = protect_from_queue (y, 0);
1331 size = protect_from_queue (size, 0);
1333 if (GET_CODE (x) != MEM)
1335 if (GET_CODE (y) != MEM)
1340 if (GET_CODE (size) == CONST_INT
1341 && (move_by_pieces_ninsns (INTVAL (size), align) < MOVE_RATIO))
1342 move_by_pieces (x, y, INTVAL (size), align);
1345 /* Try the most limited insn first, because there's no point
1346 including more than one in the machine description unless
1347 the more limited one has some advantage. */
1349 rtx opalign = GEN_INT (align);
1350 enum machine_mode mode;
1352 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1353 mode = GET_MODE_WIDER_MODE (mode))
1355 enum insn_code code = movstr_optab[(int) mode];
1357 if (code != CODE_FOR_nothing
1358 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1359 here because if SIZE is less than the mode mask, as it is
1360 returned by the macro, it will definitely be less than the
1361 actual mode mask. */
1362 && (unsigned) INTVAL (size) <= GET_MODE_MASK (mode)
1363 && (insn_operand_predicate[(int) code][0] == 0
1364 || (*insn_operand_predicate[(int) code][0]) (x, BLKmode))
1365 && (insn_operand_predicate[(int) code][1] == 0
1366 || (*insn_operand_predicate[(int) code][1]) (y, BLKmode))
1367 && (insn_operand_predicate[(int) code][3] == 0
1368 || (*insn_operand_predicate[(int) code][3]) (opalign,
1372 rtx last = get_last_insn ();
1375 op2 = convert_to_mode (mode, size, 1);
1376 if (insn_operand_predicate[(int) code][2] != 0
1377 && ! (*insn_operand_predicate[(int) code][2]) (op2, mode))
1378 op2 = copy_to_mode_reg (mode, op2);
1380 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1387 delete_insns_since (last);
1391 #ifdef TARGET_MEM_FUNCTIONS
1392 emit_library_call (memcpy_libfunc, 0,
1393 VOIDmode, 3, XEXP (x, 0), Pmode,
1395 convert_to_mode (Pmode, size, 1), Pmode);
1397 emit_library_call (bcopy_libfunc, 0,
1398 VOIDmode, 3, XEXP (y, 0), Pmode,
1400 convert_to_mode (Pmode, size, 1), Pmode);
1405 /* Copy all or part of a value X into registers starting at REGNO.
1406 The number of registers to be filled is NREGS. */
1409 move_block_to_reg (regno, x, nregs, mode)
1413 enum machine_mode mode;
1418 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1419 x = validize_mem (force_const_mem (mode, x));
1421 /* See if the machine can do this with a load multiple insn. */
1422 #ifdef HAVE_load_multiple
1423 last = get_last_insn ();
1424 pat = gen_load_multiple (gen_rtx (REG, word_mode, regno), x,
1432 delete_insns_since (last);
1435 for (i = 0; i < nregs; i++)
1436 emit_move_insn (gen_rtx (REG, word_mode, regno + i),
1437 operand_subword_force (x, i, mode));
1440 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1441 The number of registers to be filled is NREGS. */
1444 move_block_from_reg (regno, x, nregs)
1452 /* See if the machine can do this with a store multiple insn. */
1453 #ifdef HAVE_store_multiple
1454 last = get_last_insn ();
1455 pat = gen_store_multiple (x, gen_rtx (REG, word_mode, regno),
1463 delete_insns_since (last);
1466 for (i = 0; i < nregs; i++)
1468 rtx tem = operand_subword (x, i, 1, BLKmode);
1473 emit_move_insn (tem, gen_rtx (REG, word_mode, regno + i));
1477 /* Mark NREGS consecutive regs, starting at REGNO, as being live now. */
1480 use_regs (regno, nregs)
1486 for (i = 0; i < nregs; i++)
1487 emit_insn (gen_rtx (USE, VOIDmode, gen_rtx (REG, word_mode, regno + i)));
1490 /* Mark the instructions since PREV as a libcall block.
1491 Add REG_LIBCALL to PREV and add a REG_RETVAL to the most recent insn. */
1500 /* Find the instructions to mark */
1502 insn_first = NEXT_INSN (prev);
1504 insn_first = get_insns ();
1506 insn_last = get_last_insn ();
1508 REG_NOTES (insn_last) = gen_rtx (INSN_LIST, REG_RETVAL, insn_first,
1509 REG_NOTES (insn_last));
1511 REG_NOTES (insn_first) = gen_rtx (INSN_LIST, REG_LIBCALL, insn_last,
1512 REG_NOTES (insn_first));
1515 /* Write zeros through the storage of OBJECT.
1516 If OBJECT has BLKmode, SIZE is its length in bytes. */
1519 clear_storage (object, size)
1523 if (GET_MODE (object) == BLKmode)
1525 #ifdef TARGET_MEM_FUNCTIONS
1526 emit_library_call (memset_libfunc, 0,
1528 XEXP (object, 0), Pmode, const0_rtx, Pmode,
1529 GEN_INT (size), Pmode);
1531 emit_library_call (bzero_libfunc, 0,
1533 XEXP (object, 0), Pmode,
1534 GEN_INT (size), Pmode);
1538 emit_move_insn (object, const0_rtx);
1541 /* Generate code to copy Y into X.
1542 Both Y and X must have the same mode, except that
1543 Y can be a constant with VOIDmode.
1544 This mode cannot be BLKmode; use emit_block_move for that.
1546 Return the last instruction emitted. */
1549 emit_move_insn (x, y)
1552 enum machine_mode mode = GET_MODE (x);
1553 enum machine_mode submode;
1554 enum mode_class class = GET_MODE_CLASS (mode);
1557 x = protect_from_queue (x, 1);
1558 y = protect_from_queue (y, 0);
1560 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
1563 if (CONSTANT_P (y) && ! LEGITIMATE_CONSTANT_P (y))
1564 y = force_const_mem (mode, y);
1566 /* If X or Y are memory references, verify that their addresses are valid
1568 if (GET_CODE (x) == MEM
1569 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
1570 && ! push_operand (x, GET_MODE (x)))
1572 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
1573 x = change_address (x, VOIDmode, XEXP (x, 0));
1575 if (GET_CODE (y) == MEM
1576 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
1578 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
1579 y = change_address (y, VOIDmode, XEXP (y, 0));
1581 if (mode == BLKmode)
1584 return emit_move_insn_1 (x, y);
1587 /* Low level part of emit_move_insn.
1588 Called just like emit_move_insn, but assumes X and Y
1589 are basically valid. */
1592 emit_move_insn_1 (x, y)
1595 enum machine_mode mode = GET_MODE (x);
1596 enum machine_mode submode;
1597 enum mode_class class = GET_MODE_CLASS (mode);
1600 if (class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
1601 submode = mode_for_size (GET_MODE_UNIT_SIZE (mode) * BITS_PER_UNIT,
1602 (class == MODE_COMPLEX_INT
1603 ? MODE_INT : MODE_FLOAT),
1606 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
1608 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
1610 /* Expand complex moves by moving real part and imag part, if possible. */
1611 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
1612 && submode != BLKmode
1613 && (mov_optab->handlers[(int) submode].insn_code
1614 != CODE_FOR_nothing))
1616 /* Don't split destination if it is a stack push. */
1617 int stack = push_operand (x, GET_MODE (x));
1618 rtx prev = get_last_insn ();
1620 /* Tell flow that the whole of the destination is being set. */
1621 if (GET_CODE (x) == REG)
1622 emit_insn (gen_rtx (CLOBBER, VOIDmode, x));
1624 /* If this is a stack, push the highpart first, so it
1625 will be in the argument order.
1627 In that case, change_address is used only to convert
1628 the mode, not to change the address. */
1629 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
1630 ((stack ? change_address (x, submode, (rtx) 0)
1631 : gen_highpart (submode, x)),
1632 gen_highpart (submode, y)));
1633 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
1634 ((stack ? change_address (x, submode, (rtx) 0)
1635 : gen_lowpart (submode, x)),
1636 gen_lowpart (submode, y)));
1640 return get_last_insn ();
1643 /* This will handle any multi-word mode that lacks a move_insn pattern.
1644 However, you will get better code if you define such patterns,
1645 even if they must turn into multiple assembler instructions. */
1646 else if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
1649 rtx prev_insn = get_last_insn ();
1652 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
1655 rtx xpart = operand_subword (x, i, 1, mode);
1656 rtx ypart = operand_subword (y, i, 1, mode);
1658 /* If we can't get a part of Y, put Y into memory if it is a
1659 constant. Otherwise, force it into a register. If we still
1660 can't get a part of Y, abort. */
1661 if (ypart == 0 && CONSTANT_P (y))
1663 y = force_const_mem (mode, y);
1664 ypart = operand_subword (y, i, 1, mode);
1666 else if (ypart == 0)
1667 ypart = operand_subword_force (y, i, mode);
1669 if (xpart == 0 || ypart == 0)
1672 last_insn = emit_move_insn (xpart, ypart);
1674 /* Mark these insns as a libcall block. */
1675 group_insns (prev_insn);
1683 /* Pushing data onto the stack. */
1685 /* Push a block of length SIZE (perhaps variable)
1686 and return an rtx to address the beginning of the block.
1687 Note that it is not possible for the value returned to be a QUEUED.
1688 The value may be virtual_outgoing_args_rtx.
1690 EXTRA is the number of bytes of padding to push in addition to SIZE.
1691 BELOW nonzero means this padding comes at low addresses;
1692 otherwise, the padding comes at high addresses. */
1695 push_block (size, extra, below)
1700 if (CONSTANT_P (size))
1701 anti_adjust_stack (plus_constant (size, extra));
1702 else if (GET_CODE (size) == REG && extra == 0)
1703 anti_adjust_stack (size);
1706 rtx temp = copy_to_mode_reg (Pmode, size);
1708 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
1709 temp, 0, OPTAB_LIB_WIDEN);
1710 anti_adjust_stack (temp);
1713 #ifdef STACK_GROWS_DOWNWARD
1714 temp = virtual_outgoing_args_rtx;
1715 if (extra != 0 && below)
1716 temp = plus_constant (temp, extra);
1718 if (GET_CODE (size) == CONST_INT)
1719 temp = plus_constant (virtual_outgoing_args_rtx,
1720 - INTVAL (size) - (below ? 0 : extra));
1721 else if (extra != 0 && !below)
1722 temp = gen_rtx (PLUS, Pmode, virtual_outgoing_args_rtx,
1723 negate_rtx (Pmode, plus_constant (size, extra)));
1725 temp = gen_rtx (PLUS, Pmode, virtual_outgoing_args_rtx,
1726 negate_rtx (Pmode, size));
1729 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
1735 return gen_rtx (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
1738 /* Generate code to push X onto the stack, assuming it has mode MODE and
1740 MODE is redundant except when X is a CONST_INT (since they don't
1742 SIZE is an rtx for the size of data to be copied (in bytes),
1743 needed only if X is BLKmode.
1745 ALIGN (in bytes) is maximum alignment we can assume.
1747 If PARTIAL and REG are both nonzero, then copy that many of the first
1748 words of X into registers starting with REG, and push the rest of X.
1749 The amount of space pushed is decreased by PARTIAL words,
1750 rounded *down* to a multiple of PARM_BOUNDARY.
1751 REG must be a hard register in this case.
1752 If REG is zero but PARTIAL is not, take any all others actions for an
1753 argument partially in registers, but do not actually load any
1756 EXTRA is the amount in bytes of extra space to leave next to this arg.
1757 This is ignored if an argument block has already been allocated.
1759 On a machine that lacks real push insns, ARGS_ADDR is the address of
1760 the bottom of the argument block for this call. We use indexing off there
1761 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
1762 argument block has not been preallocated.
1764 ARGS_SO_FAR is the size of args previously pushed for this call. */
1767 emit_push_insn (x, mode, type, size, align, partial, reg, extra,
1768 args_addr, args_so_far)
1770 enum machine_mode mode;
1781 enum direction stack_direction
1782 #ifdef STACK_GROWS_DOWNWARD
1788 /* Decide where to pad the argument: `downward' for below,
1789 `upward' for above, or `none' for don't pad it.
1790 Default is below for small data on big-endian machines; else above. */
1791 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
1793 /* Invert direction if stack is post-update. */
1794 if (STACK_PUSH_CODE == POST_INC || STACK_PUSH_CODE == POST_DEC)
1795 if (where_pad != none)
1796 where_pad = (where_pad == downward ? upward : downward);
1798 xinner = x = protect_from_queue (x, 0);
1800 if (mode == BLKmode)
1802 /* Copy a block into the stack, entirely or partially. */
1805 int used = partial * UNITS_PER_WORD;
1806 int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
1814 /* USED is now the # of bytes we need not copy to the stack
1815 because registers will take care of them. */
1818 xinner = change_address (xinner, BLKmode,
1819 plus_constant (XEXP (xinner, 0), used));
1821 /* If the partial register-part of the arg counts in its stack size,
1822 skip the part of stack space corresponding to the registers.
1823 Otherwise, start copying to the beginning of the stack space,
1824 by setting SKIP to 0. */
1825 #ifndef REG_PARM_STACK_SPACE
1831 #ifdef PUSH_ROUNDING
1832 /* Do it with several push insns if that doesn't take lots of insns
1833 and if there is no difficulty with push insns that skip bytes
1834 on the stack for alignment purposes. */
1836 && GET_CODE (size) == CONST_INT
1838 && (move_by_pieces_ninsns ((unsigned) INTVAL (size) - used, align)
1840 /* Here we avoid the case of a structure whose weak alignment
1841 forces many pushes of a small amount of data,
1842 and such small pushes do rounding that causes trouble. */
1843 && ((! STRICT_ALIGNMENT && ! SLOW_UNALIGNED_ACCESS)
1844 || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT
1845 || PUSH_ROUNDING (align) == align)
1846 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
1848 /* Push padding now if padding above and stack grows down,
1849 or if padding below and stack grows up.
1850 But if space already allocated, this has already been done. */
1851 if (extra && args_addr == 0
1852 && where_pad != none && where_pad != stack_direction)
1853 anti_adjust_stack (GEN_INT (extra));
1855 move_by_pieces (gen_rtx (MEM, BLKmode, gen_push_operand ()), xinner,
1856 INTVAL (size) - used, align);
1859 #endif /* PUSH_ROUNDING */
1861 /* Otherwise make space on the stack and copy the data
1862 to the address of that space. */
1864 /* Deduct words put into registers from the size we must copy. */
1867 if (GET_CODE (size) == CONST_INT)
1868 size = GEN_INT (INTVAL (size) - used);
1870 size = expand_binop (GET_MODE (size), sub_optab, size,
1871 GEN_INT (used), NULL_RTX, 0,
1875 /* Get the address of the stack space.
1876 In this case, we do not deal with EXTRA separately.
1877 A single stack adjust will do. */
1880 temp = push_block (size, extra, where_pad == downward);
1883 else if (GET_CODE (args_so_far) == CONST_INT)
1884 temp = memory_address (BLKmode,
1885 plus_constant (args_addr,
1886 skip + INTVAL (args_so_far)));
1888 temp = memory_address (BLKmode,
1889 plus_constant (gen_rtx (PLUS, Pmode,
1890 args_addr, args_so_far),
1893 /* TEMP is the address of the block. Copy the data there. */
1894 if (GET_CODE (size) == CONST_INT
1895 && (move_by_pieces_ninsns ((unsigned) INTVAL (size), align)
1898 move_by_pieces (gen_rtx (MEM, BLKmode, temp), xinner,
1899 INTVAL (size), align);
1902 /* Try the most limited insn first, because there's no point
1903 including more than one in the machine description unless
1904 the more limited one has some advantage. */
1905 #ifdef HAVE_movstrqi
1907 && GET_CODE (size) == CONST_INT
1908 && ((unsigned) INTVAL (size)
1909 < (1 << (GET_MODE_BITSIZE (QImode) - 1))))
1911 emit_insn (gen_movstrqi (gen_rtx (MEM, BLKmode, temp),
1912 xinner, size, GEN_INT (align)));
1916 #ifdef HAVE_movstrhi
1918 && GET_CODE (size) == CONST_INT
1919 && ((unsigned) INTVAL (size)
1920 < (1 << (GET_MODE_BITSIZE (HImode) - 1))))
1922 emit_insn (gen_movstrhi (gen_rtx (MEM, BLKmode, temp),
1923 xinner, size, GEN_INT (align)));
1927 #ifdef HAVE_movstrsi
1930 emit_insn (gen_movstrsi (gen_rtx (MEM, BLKmode, temp),
1931 xinner, size, GEN_INT (align)));
1935 #ifdef HAVE_movstrdi
1938 emit_insn (gen_movstrdi (gen_rtx (MEM, BLKmode, temp),
1939 xinner, size, GEN_INT (align)));
1944 #ifndef ACCUMULATE_OUTGOING_ARGS
1945 /* If the source is referenced relative to the stack pointer,
1946 copy it to another register to stabilize it. We do not need
1947 to do this if we know that we won't be changing sp. */
1949 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
1950 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
1951 temp = copy_to_reg (temp);
1954 /* Make inhibit_defer_pop nonzero around the library call
1955 to force it to pop the bcopy-arguments right away. */
1957 #ifdef TARGET_MEM_FUNCTIONS
1958 emit_library_call (memcpy_libfunc, 0,
1959 VOIDmode, 3, temp, Pmode, XEXP (xinner, 0), Pmode,
1962 emit_library_call (bcopy_libfunc, 0,
1963 VOIDmode, 3, XEXP (xinner, 0), Pmode, temp, Pmode,
1969 else if (partial > 0)
1971 /* Scalar partly in registers. */
1973 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
1976 /* # words of start of argument
1977 that we must make space for but need not store. */
1978 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
1979 int args_offset = INTVAL (args_so_far);
1982 /* Push padding now if padding above and stack grows down,
1983 or if padding below and stack grows up.
1984 But if space already allocated, this has already been done. */
1985 if (extra && args_addr == 0
1986 && where_pad != none && where_pad != stack_direction)
1987 anti_adjust_stack (GEN_INT (extra));
1989 /* If we make space by pushing it, we might as well push
1990 the real data. Otherwise, we can leave OFFSET nonzero
1991 and leave the space uninitialized. */
1995 /* Now NOT_STACK gets the number of words that we don't need to
1996 allocate on the stack. */
1997 not_stack = partial - offset;
1999 /* If the partial register-part of the arg counts in its stack size,
2000 skip the part of stack space corresponding to the registers.
2001 Otherwise, start copying to the beginning of the stack space,
2002 by setting SKIP to 0. */
2003 #ifndef REG_PARM_STACK_SPACE
2009 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
2010 x = validize_mem (force_const_mem (mode, x));
2012 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
2013 SUBREGs of such registers are not allowed. */
2014 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
2015 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
2016 x = copy_to_reg (x);
2018 /* Loop over all the words allocated on the stack for this arg. */
2019 /* We can do it by words, because any scalar bigger than a word
2020 has a size a multiple of a word. */
2021 #ifndef PUSH_ARGS_REVERSED
2022 for (i = not_stack; i < size; i++)
2024 for (i = size - 1; i >= not_stack; i--)
2026 if (i >= not_stack + offset)
2027 emit_push_insn (operand_subword_force (x, i, mode),
2028 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
2030 GEN_INT (args_offset + ((i - not_stack + skip)
2031 * UNITS_PER_WORD)));
2037 /* Push padding now if padding above and stack grows down,
2038 or if padding below and stack grows up.
2039 But if space already allocated, this has already been done. */
2040 if (extra && args_addr == 0
2041 && where_pad != none && where_pad != stack_direction)
2042 anti_adjust_stack (GEN_INT (extra));
2044 #ifdef PUSH_ROUNDING
2046 addr = gen_push_operand ();
2049 if (GET_CODE (args_so_far) == CONST_INT)
2051 = memory_address (mode,
2052 plus_constant (args_addr, INTVAL (args_so_far)));
2054 addr = memory_address (mode, gen_rtx (PLUS, Pmode, args_addr,
2057 emit_move_insn (gen_rtx (MEM, mode, addr), x);
2061 /* If part should go in registers, copy that part
2062 into the appropriate registers. Do this now, at the end,
2063 since mem-to-mem copies above may do function calls. */
2064 if (partial > 0 && reg != 0)
2065 move_block_to_reg (REGNO (reg), x, partial, mode);
2067 if (extra && args_addr == 0 && where_pad == stack_direction)
2068 anti_adjust_stack (GEN_INT (extra));
2071 /* Output a library call to function FUN (a SYMBOL_REF rtx)
2072 (emitting the queue unless NO_QUEUE is nonzero),
2073 for a value of mode OUTMODE,
2074 with NARGS different arguments, passed as alternating rtx values
2075 and machine_modes to convert them to.
2076 The rtx values should have been passed through protect_from_queue already.
2078 NO_QUEUE will be true if and only if the library call is a `const' call
2079 which will be enclosed in REG_LIBCALL/REG_RETVAL notes; it is equivalent
2080 to the variable is_const in expand_call.
2082 NO_QUEUE must be true for const calls, because if it isn't, then
2083 any pending increment will be emitted between REG_LIBCALL/REG_RETVAL notes,
2084 and will be lost if the libcall sequence is optimized away.
2086 NO_QUEUE must be false for non-const calls, because if it isn't, the
2087 call insn will have its CONST_CALL_P bit set, and it will be incorrectly
2088 optimized. For instance, the instruction scheduler may incorrectly
2089 move memory references across the non-const call. */
2092 emit_library_call (va_alist)
2096 /* Total size in bytes of all the stack-parms scanned so far. */
2097 struct args_size args_size;
2098 /* Size of arguments before any adjustments (such as rounding). */
2099 struct args_size original_args_size;
2100 register int argnum;
2101 enum machine_mode outmode;
2108 CUMULATIVE_ARGS args_so_far;
2109 struct arg { rtx value; enum machine_mode mode; rtx reg; int partial;
2110 struct args_size offset; struct args_size size; };
2112 int old_inhibit_defer_pop = inhibit_defer_pop;
2117 orgfun = fun = va_arg (p, rtx);
2118 no_queue = va_arg (p, int);
2119 outmode = va_arg (p, enum machine_mode);
2120 nargs = va_arg (p, int);
2122 /* Copy all the libcall-arguments out of the varargs data
2123 and into a vector ARGVEC.
2125 Compute how to pass each argument. We only support a very small subset
2126 of the full argument passing conventions to limit complexity here since
2127 library functions shouldn't have many args. */
2129 argvec = (struct arg *) alloca (nargs * sizeof (struct arg));
2131 INIT_CUMULATIVE_ARGS (args_so_far, NULL_TREE, fun);
2133 args_size.constant = 0;
2136 for (count = 0; count < nargs; count++)
2138 rtx val = va_arg (p, rtx);
2139 enum machine_mode mode = va_arg (p, enum machine_mode);
2141 /* We cannot convert the arg value to the mode the library wants here;
2142 must do it earlier where we know the signedness of the arg. */
2144 || (GET_MODE (val) != mode && GET_MODE (val) != VOIDmode))
2147 /* On some machines, there's no way to pass a float to a library fcn.
2148 Pass it as a double instead. */
2149 #ifdef LIBGCC_NEEDS_DOUBLE
2150 if (LIBGCC_NEEDS_DOUBLE && mode == SFmode)
2151 val = convert_to_mode (DFmode, val, 0), mode = DFmode;
2154 /* There's no need to call protect_from_queue, because
2155 either emit_move_insn or emit_push_insn will do that. */
2157 /* Make sure it is a reasonable operand for a move or push insn. */
2158 if (GET_CODE (val) != REG && GET_CODE (val) != MEM
2159 && ! (CONSTANT_P (val) && LEGITIMATE_CONSTANT_P (val)))
2160 val = force_operand (val, NULL_RTX);
2162 argvec[count].value = val;
2163 argvec[count].mode = mode;
2165 #ifdef FUNCTION_ARG_PASS_BY_REFERENCE
2166 if (FUNCTION_ARG_PASS_BY_REFERENCE (args_so_far, mode, NULL_TREE, 1))
2170 argvec[count].reg = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
2171 if (argvec[count].reg && GET_CODE (argvec[count].reg) == EXPR_LIST)
2173 #ifdef FUNCTION_ARG_PARTIAL_NREGS
2174 argvec[count].partial
2175 = FUNCTION_ARG_PARTIAL_NREGS (args_so_far, mode, NULL_TREE, 1);
2177 argvec[count].partial = 0;
2180 locate_and_pad_parm (mode, NULL_TREE,
2181 argvec[count].reg && argvec[count].partial == 0,
2182 NULL_TREE, &args_size, &argvec[count].offset,
2183 &argvec[count].size);
2185 if (argvec[count].size.var)
2188 #ifndef REG_PARM_STACK_SPACE
2189 if (argvec[count].partial)
2190 argvec[count].size.constant -= argvec[count].partial * UNITS_PER_WORD;
2193 if (argvec[count].reg == 0 || argvec[count].partial != 0
2194 #ifdef REG_PARM_STACK_SPACE
2198 args_size.constant += argvec[count].size.constant;
2200 #ifdef ACCUMULATE_OUTGOING_ARGS
2201 /* If this arg is actually passed on the stack, it might be
2202 clobbering something we already put there (this library call might
2203 be inside the evaluation of an argument to a function whose call
2204 requires the stack). This will only occur when the library call
2205 has sufficient args to run out of argument registers. Abort in
2206 this case; if this ever occurs, code must be added to save and
2207 restore the arg slot. */
2209 if (argvec[count].reg == 0 || argvec[count].partial != 0)
2213 FUNCTION_ARG_ADVANCE (args_so_far, mode, (tree)0, 1);
2217 /* If this machine requires an external definition for library
2218 functions, write one out. */
2219 assemble_external_libcall (fun);
2221 original_args_size = args_size;
2222 #ifdef STACK_BOUNDARY
2223 args_size.constant = (((args_size.constant + (STACK_BYTES - 1))
2224 / STACK_BYTES) * STACK_BYTES);
2227 #ifdef REG_PARM_STACK_SPACE
2228 args_size.constant = MAX (args_size.constant,
2229 REG_PARM_STACK_SPACE (NULL_TREE));
2230 #ifndef OUTGOING_REG_PARM_STACK_SPACE
2231 args_size.constant -= REG_PARM_STACK_SPACE (NULL_TREE);
2235 #ifdef ACCUMULATE_OUTGOING_ARGS
2236 if (args_size.constant > current_function_outgoing_args_size)
2237 current_function_outgoing_args_size = args_size.constant;
2238 args_size.constant = 0;
2241 #ifndef PUSH_ROUNDING
2242 argblock = push_block (GEN_INT (args_size.constant), 0, 0);
2245 #ifdef PUSH_ARGS_REVERSED
2246 #ifdef STACK_BOUNDARY
2247 /* If we push args individually in reverse order, perform stack alignment
2248 before the first push (the last arg). */
2250 anti_adjust_stack (GEN_INT (args_size.constant
2251 - original_args_size.constant));
2255 #ifdef PUSH_ARGS_REVERSED
2263 /* Push the args that need to be pushed. */
2265 for (count = 0; count < nargs; count++, argnum += inc)
2267 register enum machine_mode mode = argvec[argnum].mode;
2268 register rtx val = argvec[argnum].value;
2269 rtx reg = argvec[argnum].reg;
2270 int partial = argvec[argnum].partial;
2272 if (! (reg != 0 && partial == 0))
2273 emit_push_insn (val, mode, NULL_TREE, NULL_RTX, 0, partial, reg, 0,
2274 argblock, GEN_INT (argvec[count].offset.constant));
2278 #ifndef PUSH_ARGS_REVERSED
2279 #ifdef STACK_BOUNDARY
2280 /* If we pushed args in forward order, perform stack alignment
2281 after pushing the last arg. */
2283 anti_adjust_stack (GEN_INT (args_size.constant
2284 - original_args_size.constant));
2288 #ifdef PUSH_ARGS_REVERSED
2294 /* Now load any reg parms into their regs. */
2296 for (count = 0; count < nargs; count++, argnum += inc)
2298 register enum machine_mode mode = argvec[argnum].mode;
2299 register rtx val = argvec[argnum].value;
2300 rtx reg = argvec[argnum].reg;
2301 int partial = argvec[argnum].partial;
2303 if (reg != 0 && partial == 0)
2304 emit_move_insn (reg, val);
2308 /* For version 1.37, try deleting this entirely. */
2312 /* Any regs containing parms remain in use through the call. */
2314 for (count = 0; count < nargs; count++)
2315 if (argvec[count].reg != 0)
2316 emit_insn (gen_rtx (USE, VOIDmode, argvec[count].reg));
2318 use_insns = get_insns ();
2321 fun = prepare_call_address (fun, NULL_TREE, &use_insns);
2323 /* Don't allow popping to be deferred, since then
2324 cse'ing of library calls could delete a call and leave the pop. */
2327 /* We pass the old value of inhibit_defer_pop + 1 to emit_call_1, which
2328 will set inhibit_defer_pop to that value. */
2330 emit_call_1 (fun, get_identifier (XSTR (orgfun, 0)), args_size.constant, 0,
2331 FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1),
2332 outmode != VOIDmode ? hard_libcall_value (outmode) : NULL_RTX,
2333 old_inhibit_defer_pop + 1, use_insns, no_queue);
2335 /* Now restore inhibit_defer_pop to its actual original value. */
2339 /* Like emit_library_call except that an extra argument, VALUE,
2340 comes second and says where to store the result. */
2343 emit_library_call_value (va_alist)
2347 /* Total size in bytes of all the stack-parms scanned so far. */
2348 struct args_size args_size;
2349 /* Size of arguments before any adjustments (such as rounding). */
2350 struct args_size original_args_size;
2351 register int argnum;
2352 enum machine_mode outmode;
2359 CUMULATIVE_ARGS args_so_far;
2360 struct arg { rtx value; enum machine_mode mode; rtx reg; int partial;
2361 struct args_size offset; struct args_size size; };
2363 int old_inhibit_defer_pop = inhibit_defer_pop;
2370 orgfun = fun = va_arg (p, rtx);
2371 value = va_arg (p, rtx);
2372 no_queue = va_arg (p, int);
2373 outmode = va_arg (p, enum machine_mode);
2374 nargs = va_arg (p, int);
2376 /* If this kind of value comes back in memory,
2377 decide where in memory it should come back. */
2378 if (RETURN_IN_MEMORY (type_for_mode (outmode, 0)))
2380 if (GET_CODE (value) == MEM)
2383 mem_value = assign_stack_temp (outmode, GET_MODE_SIZE (outmode), 0);
2386 /* ??? Unfinished: must pass the memory address as an argument. */
2388 /* Copy all the libcall-arguments out of the varargs data
2389 and into a vector ARGVEC.
2391 Compute how to pass each argument. We only support a very small subset
2392 of the full argument passing conventions to limit complexity here since
2393 library functions shouldn't have many args. */
2395 argvec = (struct arg *) alloca (nargs * sizeof (struct arg));
2397 INIT_CUMULATIVE_ARGS (args_so_far, NULL_TREE, fun);
2399 args_size.constant = 0;
2402 for (count = 0; count < nargs; count++)
2404 rtx val = va_arg (p, rtx);
2405 enum machine_mode mode = va_arg (p, enum machine_mode);
2407 /* We cannot convert the arg value to the mode the library wants here;
2408 must do it earlier where we know the signedness of the arg. */
2410 || (GET_MODE (val) != mode && GET_MODE (val) != VOIDmode))
2413 /* On some machines, there's no way to pass a float to a library fcn.
2414 Pass it as a double instead. */
2415 #ifdef LIBGCC_NEEDS_DOUBLE
2416 if (LIBGCC_NEEDS_DOUBLE && mode == SFmode)
2417 val = convert_to_mode (DFmode, val, 0), mode = DFmode;
2420 /* There's no need to call protect_from_queue, because
2421 either emit_move_insn or emit_push_insn will do that. */
2423 /* Make sure it is a reasonable operand for a move or push insn. */
2424 if (GET_CODE (val) != REG && GET_CODE (val) != MEM
2425 && ! (CONSTANT_P (val) && LEGITIMATE_CONSTANT_P (val)))
2426 val = force_operand (val, NULL_RTX);
2428 argvec[count].value = val;
2429 argvec[count].mode = mode;
2431 #ifdef FUNCTION_ARG_PASS_BY_REFERENCE
2432 if (FUNCTION_ARG_PASS_BY_REFERENCE (args_so_far, mode, NULL_TREE, 1))
2436 argvec[count].reg = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
2437 if (argvec[count].reg && GET_CODE (argvec[count].reg) == EXPR_LIST)
2439 #ifdef FUNCTION_ARG_PARTIAL_NREGS
2440 argvec[count].partial
2441 = FUNCTION_ARG_PARTIAL_NREGS (args_so_far, mode, NULL_TREE, 1);
2443 argvec[count].partial = 0;
2446 locate_and_pad_parm (mode, NULL_TREE,
2447 argvec[count].reg && argvec[count].partial == 0,
2448 NULL_TREE, &args_size, &argvec[count].offset,
2449 &argvec[count].size);
2451 if (argvec[count].size.var)
2454 #ifndef REG_PARM_STACK_SPACE
2455 if (argvec[count].partial)
2456 argvec[count].size.constant -= argvec[count].partial * UNITS_PER_WORD;
2459 if (argvec[count].reg == 0 || argvec[count].partial != 0
2460 #ifdef REG_PARM_STACK_SPACE
2464 args_size.constant += argvec[count].size.constant;
2466 #ifdef ACCUMULATE_OUTGOING_ARGS
2467 /* If this arg is actually passed on the stack, it might be
2468 clobbering something we already put there (this library call might
2469 be inside the evaluation of an argument to a function whose call
2470 requires the stack). This will only occur when the library call
2471 has sufficient args to run out of argument registers. Abort in
2472 this case; if this ever occurs, code must be added to save and
2473 restore the arg slot. */
2475 if (argvec[count].reg == 0 || argvec[count].partial != 0)
2479 FUNCTION_ARG_ADVANCE (args_so_far, mode, (tree)0, 1);
2483 /* If this machine requires an external definition for library
2484 functions, write one out. */
2485 assemble_external_libcall (fun);
2487 original_args_size = args_size;
2488 #ifdef STACK_BOUNDARY
2489 args_size.constant = (((args_size.constant + (STACK_BYTES - 1))
2490 / STACK_BYTES) * STACK_BYTES);
2493 #ifdef REG_PARM_STACK_SPACE
2494 args_size.constant = MAX (args_size.constant,
2495 REG_PARM_STACK_SPACE (NULL_TREE));
2496 #ifndef OUTGOING_REG_PARM_STACK_SPACE
2497 args_size.constant -= REG_PARM_STACK_SPACE (NULL_TREE);
2501 #ifdef ACCUMULATE_OUTGOING_ARGS
2502 if (args_size.constant > current_function_outgoing_args_size)
2503 current_function_outgoing_args_size = args_size.constant;
2504 args_size.constant = 0;
2507 #ifndef PUSH_ROUNDING
2508 argblock = push_block (GEN_INT (args_size.constant), 0, 0);
2511 #ifdef PUSH_ARGS_REVERSED
2512 #ifdef STACK_BOUNDARY
2513 /* If we push args individually in reverse order, perform stack alignment
2514 before the first push (the last arg). */
2516 anti_adjust_stack (GEN_INT (args_size.constant
2517 - original_args_size.constant));
2521 #ifdef PUSH_ARGS_REVERSED
2529 /* Push the args that need to be pushed. */
2531 for (count = 0; count < nargs; count++, argnum += inc)
2533 register enum machine_mode mode = argvec[argnum].mode;
2534 register rtx val = argvec[argnum].value;
2535 rtx reg = argvec[argnum].reg;
2536 int partial = argvec[argnum].partial;
2538 if (! (reg != 0 && partial == 0))
2539 emit_push_insn (val, mode, NULL_TREE, NULL_RTX, 0, partial, reg, 0,
2540 argblock, GEN_INT (argvec[count].offset.constant));
2544 #ifndef PUSH_ARGS_REVERSED
2545 #ifdef STACK_BOUNDARY
2546 /* If we pushed args in forward order, perform stack alignment
2547 after pushing the last arg. */
2549 anti_adjust_stack (GEN_INT (args_size.constant
2550 - original_args_size.constant));
2554 #ifdef PUSH_ARGS_REVERSED
2560 /* Now load any reg parms into their regs. */
2562 for (count = 0; count < nargs; count++, argnum += inc)
2564 register enum machine_mode mode = argvec[argnum].mode;
2565 register rtx val = argvec[argnum].value;
2566 rtx reg = argvec[argnum].reg;
2567 int partial = argvec[argnum].partial;
2569 if (reg != 0 && partial == 0)
2570 emit_move_insn (reg, val);
2574 /* For version 1.37, try deleting this entirely. */
2578 /* Any regs containing parms remain in use through the call. */
2580 for (count = 0; count < nargs; count++)
2581 if (argvec[count].reg != 0)
2582 emit_insn (gen_rtx (USE, VOIDmode, argvec[count].reg));
2584 use_insns = get_insns ();
2587 fun = prepare_call_address (fun, NULL_TREE, &use_insns);
2589 /* Don't allow popping to be deferred, since then
2590 cse'ing of library calls could delete a call and leave the pop. */
2593 /* We pass the old value of inhibit_defer_pop + 1 to emit_call_1, which
2594 will set inhibit_defer_pop to that value. */
2596 emit_call_1 (fun, get_identifier (XSTR (orgfun, 0)), args_size.constant, 0,
2597 FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1),
2598 outmode != VOIDmode ? hard_libcall_value (outmode) : NULL_RTX,
2599 old_inhibit_defer_pop + 1, use_insns, no_queue);
2601 /* Now restore inhibit_defer_pop to its actual original value. */
2604 /* Copy the value to the right place. */
2607 if (value != mem_value)
2608 emit_move_insn (value, mem_value);
2611 emit_move_insn (value, hard_libcall_value (outmode));
2614 /* Expand an assignment that stores the value of FROM into TO.
2615 If WANT_VALUE is nonzero, return an rtx for the value of TO.
2616 (This may contain a QUEUED rtx.)
2617 Otherwise, the returned value is not meaningful.
2619 SUGGEST_REG is no longer actually used.
2620 It used to mean, copy the value through a register
2621 and return that register, if that is possible.
2622 But now we do this if WANT_VALUE.
2624 If the value stored is a constant, we return the constant. */
2627 expand_assignment (to, from, want_value, suggest_reg)
2632 register rtx to_rtx = 0;
2635 /* Don't crash if the lhs of the assignment was erroneous. */
2637 if (TREE_CODE (to) == ERROR_MARK)
2638 return expand_expr (from, NULL_RTX, VOIDmode, 0);
2640 /* Assignment of a structure component needs special treatment
2641 if the structure component's rtx is not simply a MEM.
2642 Assignment of an array element at a constant index
2643 has the same problem. */
2645 if (TREE_CODE (to) == COMPONENT_REF
2646 || TREE_CODE (to) == BIT_FIELD_REF
2647 || (TREE_CODE (to) == ARRAY_REF
2648 && TREE_CODE (TREE_OPERAND (to, 1)) == INTEGER_CST
2649 && TREE_CODE (TYPE_SIZE (TREE_TYPE (to))) == INTEGER_CST))
2651 enum machine_mode mode1;
2657 tree tem = get_inner_reference (to, &bitsize, &bitpos, &offset,
2658 &mode1, &unsignedp, &volatilep);
2660 /* If we are going to use store_bit_field and extract_bit_field,
2661 make sure to_rtx will be safe for multiple use. */
2663 if (mode1 == VOIDmode && want_value)
2664 tem = stabilize_reference (tem);
2666 to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, 0);
2669 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
2671 if (GET_CODE (to_rtx) != MEM)
2673 to_rtx = change_address (to_rtx, VOIDmode,
2674 gen_rtx (PLUS, Pmode, XEXP (to_rtx, 0),
2675 force_reg (Pmode, offset_rtx)));
2679 if (GET_CODE (to_rtx) == MEM)
2680 MEM_VOLATILE_P (to_rtx) = 1;
2681 #if 0 /* This was turned off because, when a field is volatile
2682 in an object which is not volatile, the object may be in a register,
2683 and then we would abort over here. */
2689 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
2691 /* Spurious cast makes HPUX compiler happy. */
2692 ? (enum machine_mode) TYPE_MODE (TREE_TYPE (to))
2695 /* Required alignment of containing datum. */
2696 TYPE_ALIGN (TREE_TYPE (tem)) / BITS_PER_UNIT,
2697 int_size_in_bytes (TREE_TYPE (tem)));
2698 preserve_temp_slots (result);
2704 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
2705 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
2708 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, 0);
2710 /* In case we are returning the contents of an object which overlaps
2711 the place the value is being stored, use a safe function when copying
2712 a value through a pointer into a structure value return block. */
2713 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
2714 && current_function_returns_struct
2715 && !current_function_returns_pcc_struct)
2717 rtx from_rtx = expand_expr (from, NULL_RTX, VOIDmode, 0);
2718 rtx size = expr_size (from);
2720 #ifdef TARGET_MEM_FUNCTIONS
2721 emit_library_call (memcpy_libfunc, 0,
2722 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
2723 XEXP (from_rtx, 0), Pmode,
2726 emit_library_call (bcopy_libfunc, 0,
2727 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
2728 XEXP (to_rtx, 0), Pmode,
2732 preserve_temp_slots (to_rtx);
2737 /* Compute FROM and store the value in the rtx we got. */
2739 result = store_expr (from, to_rtx, want_value);
2740 preserve_temp_slots (result);
2745 /* Generate code for computing expression EXP,
2746 and storing the value into TARGET.
2747 Returns TARGET or an equivalent value.
2748 TARGET may contain a QUEUED rtx.
2750 If SUGGEST_REG is nonzero, copy the value through a register
2751 and return that register, if that is possible.
2753 If the value stored is a constant, we return the constant. */
2756 store_expr (exp, target, suggest_reg)
2758 register rtx target;
2762 int dont_return_target = 0;
2764 if (TREE_CODE (exp) == COMPOUND_EXPR)
2766 /* Perform first part of compound expression, then assign from second
2768 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
2770 return store_expr (TREE_OPERAND (exp, 1), target, suggest_reg);
2772 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
2774 /* For conditional expression, get safe form of the target. Then
2775 test the condition, doing the appropriate assignment on either
2776 side. This avoids the creation of unnecessary temporaries.
2777 For non-BLKmode, it is more efficient not to do this. */
2779 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
2782 target = protect_from_queue (target, 1);
2785 jumpifnot (TREE_OPERAND (exp, 0), lab1);
2786 store_expr (TREE_OPERAND (exp, 1), target, suggest_reg);
2788 emit_jump_insn (gen_jump (lab2));
2791 store_expr (TREE_OPERAND (exp, 2), target, suggest_reg);
2797 else if (suggest_reg && GET_CODE (target) == MEM
2798 && GET_MODE (target) != BLKmode)
2799 /* If target is in memory and caller wants value in a register instead,
2800 arrange that. Pass TARGET as target for expand_expr so that,
2801 if EXP is another assignment, SUGGEST_REG will be nonzero for it.
2802 We know expand_expr will not use the target in that case. */
2804 temp = expand_expr (exp, cse_not_expected ? NULL_RTX : target,
2805 GET_MODE (target), 0);
2806 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
2807 temp = copy_to_reg (temp);
2808 dont_return_target = 1;
2810 else if (queued_subexp_p (target))
2811 /* If target contains a postincrement, it is not safe
2812 to use as the returned value. It would access the wrong
2813 place by the time the queued increment gets output.
2814 So copy the value through a temporary and use that temp
2817 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
2819 /* Expand EXP into a new pseudo. */
2820 temp = gen_reg_rtx (GET_MODE (target));
2821 temp = expand_expr (exp, temp, GET_MODE (target), 0);
2824 temp = expand_expr (exp, NULL_RTX, GET_MODE (target), 0);
2825 dont_return_target = 1;
2827 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
2828 /* If this is an scalar in a register that is stored in a wider mode
2829 than the declared mode, compute the result into its declared mode
2830 and then convert to the wider mode. Our value is the computed
2833 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
2834 convert_move (SUBREG_REG (target), temp,
2835 SUBREG_PROMOTED_UNSIGNED_P (target));
2840 temp = expand_expr (exp, target, GET_MODE (target), 0);
2841 /* DO return TARGET if it's a specified hardware register.
2842 expand_return relies on this. */
2843 if (!(target && GET_CODE (target) == REG
2844 && REGNO (target) < FIRST_PSEUDO_REGISTER)
2845 && CONSTANT_P (temp))
2846 dont_return_target = 1;
2849 /* If value was not generated in the target, store it there.
2850 Convert the value to TARGET's type first if nec. */
2852 if (temp != target && TREE_CODE (exp) != ERROR_MARK)
2854 target = protect_from_queue (target, 1);
2855 if (GET_MODE (temp) != GET_MODE (target)
2856 && GET_MODE (temp) != VOIDmode)
2858 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
2859 if (dont_return_target)
2861 /* In this case, we will return TEMP,
2862 so make sure it has the proper mode.
2863 But don't forget to store the value into TARGET. */
2864 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
2865 emit_move_insn (target, temp);
2868 convert_move (target, temp, unsignedp);
2871 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
2873 /* Handle copying a string constant into an array.
2874 The string constant may be shorter than the array.
2875 So copy just the string's actual length, and clear the rest. */
2878 /* Get the size of the data type of the string,
2879 which is actually the size of the target. */
2880 size = expr_size (exp);
2881 if (GET_CODE (size) == CONST_INT
2882 && INTVAL (size) < TREE_STRING_LENGTH (exp))
2883 emit_block_move (target, temp, size,
2884 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
2887 /* Compute the size of the data to copy from the string. */
2889 = fold (build (MIN_EXPR, sizetype,
2890 size_binop (CEIL_DIV_EXPR,
2891 TYPE_SIZE (TREE_TYPE (exp)),
2892 size_int (BITS_PER_UNIT)),
2894 build_int_2 (TREE_STRING_LENGTH (exp), 0))));
2895 rtx copy_size_rtx = expand_expr (copy_size, NULL_RTX,
2899 /* Copy that much. */
2900 emit_block_move (target, temp, copy_size_rtx,
2901 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
2903 /* Figure out how much is left in TARGET
2904 that we have to clear. */
2905 if (GET_CODE (copy_size_rtx) == CONST_INT)
2907 temp = plus_constant (XEXP (target, 0),
2908 TREE_STRING_LENGTH (exp));
2909 size = plus_constant (size,
2910 - TREE_STRING_LENGTH (exp));
2914 enum machine_mode size_mode = Pmode;
2916 temp = force_reg (Pmode, XEXP (target, 0));
2917 temp = expand_binop (size_mode, add_optab, temp,
2918 copy_size_rtx, NULL_RTX, 0,
2921 size = expand_binop (size_mode, sub_optab, size,
2922 copy_size_rtx, NULL_RTX, 0,
2925 emit_cmp_insn (size, const0_rtx, LT, NULL_RTX,
2926 GET_MODE (size), 0, 0);
2927 label = gen_label_rtx ();
2928 emit_jump_insn (gen_blt (label));
2931 if (size != const0_rtx)
2933 #ifdef TARGET_MEM_FUNCTIONS
2934 emit_library_call (memset_libfunc, 0, VOIDmode, 3,
2935 temp, Pmode, const0_rtx, Pmode, size, Pmode);
2937 emit_library_call (bzero_libfunc, 0, VOIDmode, 2,
2938 temp, Pmode, size, Pmode);
2945 else if (GET_MODE (temp) == BLKmode)
2946 emit_block_move (target, temp, expr_size (exp),
2947 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
2949 emit_move_insn (target, temp);
2951 if (dont_return_target)
2956 /* Store the value of constructor EXP into the rtx TARGET.
2957 TARGET is either a REG or a MEM. */
2960 store_constructor (exp, target)
2964 tree type = TREE_TYPE (exp);
2966 /* We know our target cannot conflict, since safe_from_p has been called. */
2968 /* Don't try copying piece by piece into a hard register
2969 since that is vulnerable to being clobbered by EXP.
2970 Instead, construct in a pseudo register and then copy it all. */
2971 if (GET_CODE (target) == REG && REGNO (target) < FIRST_PSEUDO_REGISTER)
2973 rtx temp = gen_reg_rtx (GET_MODE (target));
2974 store_constructor (exp, temp);
2975 emit_move_insn (target, temp);
2980 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE)
2984 /* Inform later passes that the whole union value is dead. */
2985 if (TREE_CODE (type) == UNION_TYPE)
2986 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
2988 /* If we are building a static constructor into a register,
2989 set the initial value as zero so we can fold the value into
2991 else if (GET_CODE (target) == REG && TREE_STATIC (exp))
2992 emit_move_insn (target, const0_rtx);
2994 /* If the constructor has fewer fields than the structure,
2995 clear the whole structure first. */
2996 else if (list_length (CONSTRUCTOR_ELTS (exp))
2997 != list_length (TYPE_FIELDS (type)))
2998 clear_storage (target, int_size_in_bytes (type));
3000 /* Inform later passes that the old value is dead. */
3001 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
3003 /* Store each element of the constructor into
3004 the corresponding field of TARGET. */
3006 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
3008 register tree field = TREE_PURPOSE (elt);
3009 register enum machine_mode mode;
3014 /* Just ignore missing fields.
3015 We cleared the whole structure, above,
3016 if any fields are missing. */
3020 bitsize = TREE_INT_CST_LOW (DECL_SIZE (field));
3021 unsignedp = TREE_UNSIGNED (field);
3022 mode = DECL_MODE (field);
3023 if (DECL_BIT_FIELD (field))
3026 if (TREE_CODE (DECL_FIELD_BITPOS (field)) != INTEGER_CST)
3027 /* ??? This case remains to be written. */
3030 bitpos = TREE_INT_CST_LOW (DECL_FIELD_BITPOS (field));
3032 store_field (target, bitsize, bitpos, mode, TREE_VALUE (elt),
3033 /* The alignment of TARGET is
3034 at least what its type requires. */
3036 TYPE_ALIGN (type) / BITS_PER_UNIT,
3037 int_size_in_bytes (type));
3040 else if (TREE_CODE (type) == ARRAY_TYPE)
3044 tree domain = TYPE_DOMAIN (type);
3045 HOST_WIDE_INT minelt = TREE_INT_CST_LOW (TYPE_MIN_VALUE (domain));
3046 HOST_WIDE_INT maxelt = TREE_INT_CST_LOW (TYPE_MAX_VALUE (domain));
3047 tree elttype = TREE_TYPE (type);
3049 /* If the constructor has fewer fields than the structure,
3050 clear the whole structure first. Similarly if this this is
3051 static constructor of a non-BLKmode object. */
3053 if (list_length (CONSTRUCTOR_ELTS (exp)) < maxelt - minelt + 1
3054 || (GET_CODE (target) == REG && TREE_STATIC (exp)))
3055 clear_storage (target, maxelt - minelt + 1);
3057 /* Inform later passes that the old value is dead. */
3058 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
3060 /* Store each element of the constructor into
3061 the corresponding element of TARGET, determined
3062 by counting the elements. */
3063 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
3065 elt = TREE_CHAIN (elt), i++)
3067 register enum machine_mode mode;
3072 mode = TYPE_MODE (elttype);
3073 bitsize = GET_MODE_BITSIZE (mode);
3074 unsignedp = TREE_UNSIGNED (elttype);
3076 bitpos = (i * TREE_INT_CST_LOW (TYPE_SIZE (elttype)));
3078 store_field (target, bitsize, bitpos, mode, TREE_VALUE (elt),
3079 /* The alignment of TARGET is
3080 at least what its type requires. */
3082 TYPE_ALIGN (type) / BITS_PER_UNIT,
3083 int_size_in_bytes (type));
3091 /* Store the value of EXP (an expression tree)
3092 into a subfield of TARGET which has mode MODE and occupies
3093 BITSIZE bits, starting BITPOS bits from the start of TARGET.
3094 If MODE is VOIDmode, it means that we are storing into a bit-field.
3096 If VALUE_MODE is VOIDmode, return nothing in particular.
3097 UNSIGNEDP is not used in this case.
3099 Otherwise, return an rtx for the value stored. This rtx
3100 has mode VALUE_MODE if that is convenient to do.
3101 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
3103 ALIGN is the alignment that TARGET is known to have, measured in bytes.
3104 TOTAL_SIZE is the size in bytes of the structure, or -1 if varying. */
3107 store_field (target, bitsize, bitpos, mode, exp, value_mode,
3108 unsignedp, align, total_size)
3110 int bitsize, bitpos;
3111 enum machine_mode mode;
3113 enum machine_mode value_mode;
3118 HOST_WIDE_INT width_mask = 0;
3120 if (bitsize < HOST_BITS_PER_WIDE_INT)
3121 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
3123 /* If we are storing into an unaligned field of an aligned union that is
3124 in a register, we may have the mode of TARGET being an integer mode but
3125 MODE == BLKmode. In that case, get an aligned object whose size and
3126 alignment are the same as TARGET and store TARGET into it (we can avoid
3127 the store if the field being stored is the entire width of TARGET). Then
3128 call ourselves recursively to store the field into a BLKmode version of
3129 that object. Finally, load from the object into TARGET. This is not
3130 very efficient in general, but should only be slightly more expensive
3131 than the otherwise-required unaligned accesses. Perhaps this can be
3132 cleaned up later. */
3135 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
3137 rtx object = assign_stack_temp (GET_MODE (target),
3138 GET_MODE_SIZE (GET_MODE (target)), 0);
3139 rtx blk_object = copy_rtx (object);
3141 PUT_MODE (blk_object, BLKmode);
3143 if (bitsize != GET_MODE_BITSIZE (GET_MODE (target)))
3144 emit_move_insn (object, target);
3146 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0,
3149 emit_move_insn (target, object);
3154 /* If the structure is in a register or if the component
3155 is a bit field, we cannot use addressing to access it.
3156 Use bit-field techniques or SUBREG to store in it. */
3158 if (mode == VOIDmode
3159 || (mode != BLKmode && ! direct_store[(int) mode])
3160 || GET_CODE (target) == REG
3161 || GET_CODE (target) == SUBREG)
3163 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
3164 /* Store the value in the bitfield. */
3165 store_bit_field (target, bitsize, bitpos, mode, temp, align, total_size);
3166 if (value_mode != VOIDmode)
3168 /* The caller wants an rtx for the value. */
3169 /* If possible, avoid refetching from the bitfield itself. */
3171 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
3174 enum machine_mode tmode;
3177 return expand_and (temp, GEN_INT (width_mask), NULL_RTX);
3178 tmode = GET_MODE (temp);
3179 if (tmode == VOIDmode)
3181 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
3182 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
3183 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
3185 return extract_bit_field (target, bitsize, bitpos, unsignedp,
3186 NULL_RTX, value_mode, 0, align,
3193 rtx addr = XEXP (target, 0);
3196 /* If a value is wanted, it must be the lhs;
3197 so make the address stable for multiple use. */
3199 if (value_mode != VOIDmode && GET_CODE (addr) != REG
3200 && ! CONSTANT_ADDRESS_P (addr)
3201 /* A frame-pointer reference is already stable. */
3202 && ! (GET_CODE (addr) == PLUS
3203 && GET_CODE (XEXP (addr, 1)) == CONST_INT
3204 && (XEXP (addr, 0) == virtual_incoming_args_rtx
3205 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
3206 addr = copy_to_reg (addr);
3208 /* Now build a reference to just the desired component. */
3210 to_rtx = change_address (target, mode,
3211 plus_constant (addr, (bitpos / BITS_PER_UNIT)));
3212 MEM_IN_STRUCT_P (to_rtx) = 1;
3214 return store_expr (exp, to_rtx, value_mode != VOIDmode);
3218 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
3219 or an ARRAY_REF, look for nested COMPONENT_REFs, BIT_FIELD_REFs, or
3220 ARRAY_REFs at constant positions and find the ultimate containing object,
3223 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
3224 bit position, and *PUNSIGNEDP to the signedness of the field.
3225 If the position of the field is variable, we store a tree
3226 giving the variable offset (in units) in *POFFSET.
3227 This offset is in addition to the bit position.
3228 If the position is not variable, we store 0 in *POFFSET.
3230 If any of the extraction expressions is volatile,
3231 we store 1 in *PVOLATILEP. Otherwise we don't change that.
3233 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
3234 is a mode that can be used to access the field. In that case, *PBITSIZE
3237 If the field describes a variable-sized object, *PMODE is set to
3238 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
3239 this case, but the address of the object can be found. */
3242 get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode, punsignedp, pvolatilep)
3247 enum machine_mode *pmode;
3252 enum machine_mode mode = VOIDmode;
3255 if (TREE_CODE (exp) == COMPONENT_REF)
3257 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
3258 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
3259 mode = DECL_MODE (TREE_OPERAND (exp, 1));
3260 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
3262 else if (TREE_CODE (exp) == BIT_FIELD_REF)
3264 size_tree = TREE_OPERAND (exp, 1);
3265 *punsignedp = TREE_UNSIGNED (exp);
3269 mode = TYPE_MODE (TREE_TYPE (exp));
3270 *pbitsize = GET_MODE_BITSIZE (mode);
3271 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
3276 if (TREE_CODE (size_tree) != INTEGER_CST)
3277 mode = BLKmode, *pbitsize = -1;
3279 *pbitsize = TREE_INT_CST_LOW (size_tree);
3282 /* Compute cumulative bit-offset for nested component-refs and array-refs,
3283 and find the ultimate containing object. */
3289 if (TREE_CODE (exp) == COMPONENT_REF || TREE_CODE (exp) == BIT_FIELD_REF)
3291 tree pos = (TREE_CODE (exp) == COMPONENT_REF
3292 ? DECL_FIELD_BITPOS (TREE_OPERAND (exp, 1))
3293 : TREE_OPERAND (exp, 2));
3295 if (TREE_CODE (pos) == PLUS_EXPR)
3298 if (TREE_CODE (TREE_OPERAND (pos, 0)) == INTEGER_CST)
3300 constant = TREE_OPERAND (pos, 0);
3301 var = TREE_OPERAND (pos, 1);
3303 else if (TREE_CODE (TREE_OPERAND (pos, 1)) == INTEGER_CST)
3305 constant = TREE_OPERAND (pos, 1);
3306 var = TREE_OPERAND (pos, 0);
3310 *pbitpos += TREE_INT_CST_LOW (constant);
3312 offset = size_binop (PLUS_EXPR, offset,
3313 size_binop (FLOOR_DIV_EXPR, var,
3314 size_int (BITS_PER_UNIT)));
3316 offset = size_binop (FLOOR_DIV_EXPR, var,
3317 size_int (BITS_PER_UNIT));
3319 else if (TREE_CODE (pos) == INTEGER_CST)
3320 *pbitpos += TREE_INT_CST_LOW (pos);
3323 /* Assume here that the offset is a multiple of a unit.
3324 If not, there should be an explicitly added constant. */
3326 offset = size_binop (PLUS_EXPR, offset,
3327 size_binop (FLOOR_DIV_EXPR, pos,
3328 size_int (BITS_PER_UNIT)));
3330 offset = size_binop (FLOOR_DIV_EXPR, pos,
3331 size_int (BITS_PER_UNIT));
3335 else if (TREE_CODE (exp) == ARRAY_REF
3336 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
3337 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST)
3339 *pbitpos += (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))
3340 * TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (exp))));
3342 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
3343 && ! ((TREE_CODE (exp) == NOP_EXPR
3344 || TREE_CODE (exp) == CONVERT_EXPR)
3345 && (TYPE_MODE (TREE_TYPE (exp))
3346 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
3349 /* If any reference in the chain is volatile, the effect is volatile. */
3350 if (TREE_THIS_VOLATILE (exp))
3352 exp = TREE_OPERAND (exp, 0);
3355 /* If this was a bit-field, see if there is a mode that allows direct
3356 access in case EXP is in memory. */
3357 if (mode == VOIDmode && *pbitpos % *pbitsize == 0)
3359 mode = mode_for_size (*pbitsize, MODE_INT, 0);
3360 if (mode == BLKmode)
3367 /* We aren't finished fixing the callers to really handle nonzero offset. */
3375 /* Given an rtx VALUE that may contain additions and multiplications,
3376 return an equivalent value that just refers to a register or memory.
3377 This is done by generating instructions to perform the arithmetic
3378 and returning a pseudo-register containing the value.
3380 The returned value may be a REG, SUBREG, MEM or constant. */
3383 force_operand (value, target)
3386 register optab binoptab = 0;
3387 /* Use a temporary to force order of execution of calls to
3391 /* Use subtarget as the target for operand 0 of a binary operation. */
3392 register rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
3394 if (GET_CODE (value) == PLUS)
3395 binoptab = add_optab;
3396 else if (GET_CODE (value) == MINUS)
3397 binoptab = sub_optab;
3398 else if (GET_CODE (value) == MULT)
3400 op2 = XEXP (value, 1);
3401 if (!CONSTANT_P (op2)
3402 && !(GET_CODE (op2) == REG && op2 != subtarget))
3404 tmp = force_operand (XEXP (value, 0), subtarget);
3405 return expand_mult (GET_MODE (value), tmp,
3406 force_operand (op2, NULL_RTX),
3412 op2 = XEXP (value, 1);
3413 if (!CONSTANT_P (op2)
3414 && !(GET_CODE (op2) == REG && op2 != subtarget))
3416 if (binoptab == sub_optab && GET_CODE (op2) == CONST_INT)
3418 binoptab = add_optab;
3419 op2 = negate_rtx (GET_MODE (value), op2);
3422 /* Check for an addition with OP2 a constant integer and our first
3423 operand a PLUS of a virtual register and something else. In that
3424 case, we want to emit the sum of the virtual register and the
3425 constant first and then add the other value. This allows virtual
3426 register instantiation to simply modify the constant rather than
3427 creating another one around this addition. */
3428 if (binoptab == add_optab && GET_CODE (op2) == CONST_INT
3429 && GET_CODE (XEXP (value, 0)) == PLUS
3430 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
3431 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
3432 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
3434 rtx temp = expand_binop (GET_MODE (value), binoptab,
3435 XEXP (XEXP (value, 0), 0), op2,
3436 subtarget, 0, OPTAB_LIB_WIDEN);
3437 return expand_binop (GET_MODE (value), binoptab, temp,
3438 force_operand (XEXP (XEXP (value, 0), 1), 0),
3439 target, 0, OPTAB_LIB_WIDEN);
3442 tmp = force_operand (XEXP (value, 0), subtarget);
3443 return expand_binop (GET_MODE (value), binoptab, tmp,
3444 force_operand (op2, NULL_RTX),
3445 target, 0, OPTAB_LIB_WIDEN);
3446 /* We give UNSIGNEDP = 0 to expand_binop
3447 because the only operations we are expanding here are signed ones. */
3452 /* Subroutine of expand_expr:
3453 save the non-copied parts (LIST) of an expr (LHS), and return a list
3454 which can restore these values to their previous values,
3455 should something modify their storage. */
3458 save_noncopied_parts (lhs, list)
3465 for (tail = list; tail; tail = TREE_CHAIN (tail))
3466 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
3467 parts = chainon (parts, save_noncopied_parts (lhs, TREE_VALUE (tail)));
3470 tree part = TREE_VALUE (tail);
3471 tree part_type = TREE_TYPE (part);
3472 tree to_be_saved = build (COMPONENT_REF, part_type, lhs, part);
3473 rtx target = assign_stack_temp (TYPE_MODE (part_type),
3474 int_size_in_bytes (part_type), 0);
3475 if (! memory_address_p (TYPE_MODE (part_type), XEXP (target, 0)))
3476 target = change_address (target, TYPE_MODE (part_type), NULL_RTX);
3477 parts = tree_cons (to_be_saved,
3478 build (RTL_EXPR, part_type, NULL_TREE,
3481 store_expr (TREE_PURPOSE (parts), RTL_EXPR_RTL (TREE_VALUE (parts)), 0);
3486 /* Subroutine of expand_expr:
3487 record the non-copied parts (LIST) of an expr (LHS), and return a list
3488 which specifies the initial values of these parts. */
3491 init_noncopied_parts (lhs, list)
3498 for (tail = list; tail; tail = TREE_CHAIN (tail))
3499 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
3500 parts = chainon (parts, init_noncopied_parts (lhs, TREE_VALUE (tail)));
3503 tree part = TREE_VALUE (tail);
3504 tree part_type = TREE_TYPE (part);
3505 tree to_be_initialized = build (COMPONENT_REF, part_type, lhs, part);
3506 parts = tree_cons (TREE_PURPOSE (tail), to_be_initialized, parts);
3511 /* Subroutine of expand_expr: return nonzero iff there is no way that
3512 EXP can reference X, which is being modified. */
3515 safe_from_p (x, exp)
3525 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
3526 find the underlying pseudo. */
3527 if (GET_CODE (x) == SUBREG)
3530 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
3534 /* If X is a location in the outgoing argument area, it is always safe. */
3535 if (GET_CODE (x) == MEM
3536 && (XEXP (x, 0) == virtual_outgoing_args_rtx
3537 || (GET_CODE (XEXP (x, 0)) == PLUS
3538 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx)))
3541 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
3544 exp_rtl = DECL_RTL (exp);
3551 if (TREE_CODE (exp) == TREE_LIST)
3552 return ((TREE_VALUE (exp) == 0
3553 || safe_from_p (x, TREE_VALUE (exp)))
3554 && (TREE_CHAIN (exp) == 0
3555 || safe_from_p (x, TREE_CHAIN (exp))));
3560 return safe_from_p (x, TREE_OPERAND (exp, 0));
3564 return (safe_from_p (x, TREE_OPERAND (exp, 0))
3565 && safe_from_p (x, TREE_OPERAND (exp, 1)));
3569 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
3570 the expression. If it is set, we conflict iff we are that rtx or
3571 both are in memory. Otherwise, we check all operands of the
3572 expression recursively. */
3574 switch (TREE_CODE (exp))
3577 return staticp (TREE_OPERAND (exp, 0));
3580 if (GET_CODE (x) == MEM)
3585 exp_rtl = CALL_EXPR_RTL (exp);
3588 /* Assume that the call will clobber all hard registers and
3590 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
3591 || GET_CODE (x) == MEM)
3598 exp_rtl = RTL_EXPR_RTL (exp);
3600 /* We don't know what this can modify. */
3605 case WITH_CLEANUP_EXPR:
3606 exp_rtl = RTL_EXPR_RTL (exp);
3610 exp_rtl = SAVE_EXPR_RTL (exp);
3614 /* The only operand we look at is operand 1. The rest aren't
3615 part of the expression. */
3616 return safe_from_p (x, TREE_OPERAND (exp, 1));
3618 case METHOD_CALL_EXPR:
3619 /* This takes a rtx argument, but shouldn't appear here. */
3623 /* If we have an rtx, we do not need to scan our operands. */
3627 nops = tree_code_length[(int) TREE_CODE (exp)];
3628 for (i = 0; i < nops; i++)
3629 if (TREE_OPERAND (exp, i) != 0
3630 && ! safe_from_p (x, TREE_OPERAND (exp, i)))
3634 /* If we have an rtl, find any enclosed object. Then see if we conflict
3638 if (GET_CODE (exp_rtl) == SUBREG)
3640 exp_rtl = SUBREG_REG (exp_rtl);
3641 if (GET_CODE (exp_rtl) == REG
3642 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
3646 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
3647 are memory and EXP is not readonly. */
3648 return ! (rtx_equal_p (x, exp_rtl)
3649 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
3650 && ! TREE_READONLY (exp)));
3653 /* If we reach here, it is safe. */
3657 /* Subroutine of expand_expr: return nonzero iff EXP is an
3658 expression whose type is statically determinable. */
3664 if (TREE_CODE (exp) == PARM_DECL
3665 || TREE_CODE (exp) == VAR_DECL
3666 || TREE_CODE (exp) == CALL_EXPR || TREE_CODE (exp) == TARGET_EXPR
3667 || TREE_CODE (exp) == COMPONENT_REF
3668 || TREE_CODE (exp) == ARRAY_REF)
3673 /* expand_expr: generate code for computing expression EXP.
3674 An rtx for the computed value is returned. The value is never null.
3675 In the case of a void EXP, const0_rtx is returned.
3677 The value may be stored in TARGET if TARGET is nonzero.
3678 TARGET is just a suggestion; callers must assume that
3679 the rtx returned may not be the same as TARGET.
3681 If TARGET is CONST0_RTX, it means that the value will be ignored.
3683 If TMODE is not VOIDmode, it suggests generating the
3684 result in mode TMODE. But this is done only when convenient.
3685 Otherwise, TMODE is ignored and the value generated in its natural mode.
3686 TMODE is just a suggestion; callers must assume that
3687 the rtx returned may not have mode TMODE.
3689 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
3690 with a constant address even if that address is not normally legitimate.
3691 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
3693 If MODIFIER is EXPAND_SUM then when EXP is an addition
3694 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
3695 or a nest of (PLUS ...) and (MINUS ...) where the terms are
3696 products as above, or REG or MEM, or constant.
3697 Ordinarily in such cases we would output mul or add instructions
3698 and then return a pseudo reg containing the sum.
3700 EXPAND_INITIALIZER is much like EXPAND_SUM except that
3701 it also marks a label as absolutely required (it can't be dead).
3702 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
3703 This is used for outputting expressions used in initializers. */
3706 expand_expr (exp, target, tmode, modifier)
3709 enum machine_mode tmode;
3710 enum expand_modifier modifier;
3712 register rtx op0, op1, temp;
3713 tree type = TREE_TYPE (exp);
3714 int unsignedp = TREE_UNSIGNED (type);
3715 register enum machine_mode mode = TYPE_MODE (type);
3716 register enum tree_code code = TREE_CODE (exp);
3718 /* Use subtarget as the target for operand 0 of a binary operation. */
3719 rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
3720 rtx original_target = target;
3721 int ignore = target == const0_rtx;
3724 /* Don't use hard regs as subtargets, because the combiner
3725 can only handle pseudo regs. */
3726 if (subtarget && REGNO (subtarget) < FIRST_PSEUDO_REGISTER)
3728 /* Avoid subtargets inside loops,
3729 since they hide some invariant expressions. */
3730 if (preserve_subexpressions_p ())
3733 if (ignore) target = 0, original_target = 0;
3735 /* If will do cse, generate all results into pseudo registers
3736 since 1) that allows cse to find more things
3737 and 2) otherwise cse could produce an insn the machine
3740 if (! cse_not_expected && mode != BLKmode && target
3741 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER))
3744 /* Ensure we reference a volatile object even if value is ignored. */
3745 if (ignore && TREE_THIS_VOLATILE (exp)
3746 && mode != VOIDmode && mode != BLKmode)
3748 target = gen_reg_rtx (mode);
3749 temp = expand_expr (exp, target, VOIDmode, modifier);
3751 emit_move_insn (target, temp);
3759 tree function = decl_function_context (exp);
3760 /* Handle using a label in a containing function. */
3761 if (function != current_function_decl && function != 0)
3763 struct function *p = find_function_data (function);
3764 /* Allocate in the memory associated with the function
3765 that the label is in. */
3766 push_obstacks (p->function_obstack,
3767 p->function_maybepermanent_obstack);
3769 p->forced_labels = gen_rtx (EXPR_LIST, VOIDmode,
3770 label_rtx (exp), p->forced_labels);
3773 else if (modifier == EXPAND_INITIALIZER)
3774 forced_labels = gen_rtx (EXPR_LIST, VOIDmode,
3775 label_rtx (exp), forced_labels);
3776 temp = gen_rtx (MEM, FUNCTION_MODE,
3777 gen_rtx (LABEL_REF, Pmode, label_rtx (exp)));
3778 if (function != current_function_decl && function != 0)
3779 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
3784 if (DECL_RTL (exp) == 0)
3786 error_with_decl (exp, "prior parameter's size depends on `%s'");
3787 return CONST0_RTX (mode);
3793 if (DECL_RTL (exp) == 0)
3795 /* Ensure variable marked as used
3796 even if it doesn't go through a parser. */
3797 TREE_USED (exp) = 1;
3798 /* Handle variables inherited from containing functions. */
3799 context = decl_function_context (exp);
3801 /* We treat inline_function_decl as an alias for the current function
3802 because that is the inline function whose vars, types, etc.
3803 are being merged into the current function.
3804 See expand_inline_function. */
3805 if (context != 0 && context != current_function_decl
3806 && context != inline_function_decl
3807 /* If var is static, we don't need a static chain to access it. */
3808 && ! (GET_CODE (DECL_RTL (exp)) == MEM
3809 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
3813 /* Mark as non-local and addressable. */
3814 DECL_NONLOCAL (exp) = 1;
3815 mark_addressable (exp);
3816 if (GET_CODE (DECL_RTL (exp)) != MEM)
3818 addr = XEXP (DECL_RTL (exp), 0);
3819 if (GET_CODE (addr) == MEM)
3820 addr = gen_rtx (MEM, Pmode, fix_lexical_addr (XEXP (addr, 0), exp));
3822 addr = fix_lexical_addr (addr, exp);
3823 return change_address (DECL_RTL (exp), mode, addr);
3826 /* This is the case of an array whose size is to be determined
3827 from its initializer, while the initializer is still being parsed.
3829 if (GET_CODE (DECL_RTL (exp)) == MEM
3830 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
3831 return change_address (DECL_RTL (exp), GET_MODE (DECL_RTL (exp)),
3832 XEXP (DECL_RTL (exp), 0));
3833 if (GET_CODE (DECL_RTL (exp)) == MEM
3834 && modifier != EXPAND_CONST_ADDRESS
3835 && modifier != EXPAND_SUM
3836 && modifier != EXPAND_INITIALIZER)
3838 /* DECL_RTL probably contains a constant address.
3839 On RISC machines where a constant address isn't valid,
3840 make some insns to get that address into a register. */
3841 if (!memory_address_p (DECL_MODE (exp), XEXP (DECL_RTL (exp), 0))
3843 && CONSTANT_ADDRESS_P (XEXP (DECL_RTL (exp), 0))))
3844 return change_address (DECL_RTL (exp), VOIDmode,
3845 copy_rtx (XEXP (DECL_RTL (exp), 0)));
3848 /* If the mode of DECL_RTL does not match that of the decl, it
3849 must be a promoted value. We return a SUBREG of the wanted mode,
3850 but mark it so that we know that it was already extended. */
3852 if (GET_CODE (DECL_RTL (exp)) == REG
3853 && GET_MODE (DECL_RTL (exp)) != mode)
3855 enum machine_mode decl_mode = DECL_MODE (exp);
3857 /* Get the signedness used for this variable. Ensure we get the
3858 same mode we got when the variable was declared. */
3860 PROMOTE_MODE (decl_mode, unsignedp, type);
3862 if (decl_mode != GET_MODE (DECL_RTL (exp)))
3865 temp = gen_rtx (SUBREG, mode, DECL_RTL (exp), 0);
3866 SUBREG_PROMOTED_VAR_P (temp) = 1;
3867 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
3871 return DECL_RTL (exp);
3874 return immed_double_const (TREE_INT_CST_LOW (exp),
3875 TREE_INT_CST_HIGH (exp),
3879 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, 0);
3882 /* If optimized, generate immediate CONST_DOUBLE
3883 which will be turned into memory by reload if necessary.
3885 We used to force a register so that loop.c could see it. But
3886 this does not allow gen_* patterns to perform optimizations with
3887 the constants. It also produces two insns in cases like "x = 1.0;".
3888 On most machines, floating-point constants are not permitted in
3889 many insns, so we'd end up copying it to a register in any case.
3891 Now, we do the copying in expand_binop, if appropriate. */
3892 return immed_real_const (exp);
3896 if (! TREE_CST_RTL (exp))
3897 output_constant_def (exp);
3899 /* TREE_CST_RTL probably contains a constant address.
3900 On RISC machines where a constant address isn't valid,
3901 make some insns to get that address into a register. */
3902 if (GET_CODE (TREE_CST_RTL (exp)) == MEM
3903 && modifier != EXPAND_CONST_ADDRESS
3904 && modifier != EXPAND_INITIALIZER
3905 && modifier != EXPAND_SUM
3906 && !memory_address_p (mode, XEXP (TREE_CST_RTL (exp), 0)))
3907 return change_address (TREE_CST_RTL (exp), VOIDmode,
3908 copy_rtx (XEXP (TREE_CST_RTL (exp), 0)));
3909 return TREE_CST_RTL (exp);
3912 context = decl_function_context (exp);
3913 /* We treat inline_function_decl as an alias for the current function
3914 because that is the inline function whose vars, types, etc.
3915 are being merged into the current function.
3916 See expand_inline_function. */
3917 if (context == current_function_decl || context == inline_function_decl)
3920 /* If this is non-local, handle it. */
3923 temp = SAVE_EXPR_RTL (exp);
3924 if (temp && GET_CODE (temp) == REG)
3926 put_var_into_stack (exp);
3927 temp = SAVE_EXPR_RTL (exp);
3929 if (temp == 0 || GET_CODE (temp) != MEM)
3931 return change_address (temp, mode,
3932 fix_lexical_addr (XEXP (temp, 0), exp));
3934 if (SAVE_EXPR_RTL (exp) == 0)
3936 if (mode == BLKmode)
3938 = assign_stack_temp (mode,
3939 int_size_in_bytes (TREE_TYPE (exp)), 0);
3942 enum machine_mode var_mode = mode;
3944 if (TREE_CODE (type) == INTEGER_TYPE
3945 || TREE_CODE (type) == ENUMERAL_TYPE
3946 || TREE_CODE (type) == BOOLEAN_TYPE
3947 || TREE_CODE (type) == CHAR_TYPE
3948 || TREE_CODE (type) == REAL_TYPE
3949 || TREE_CODE (type) == POINTER_TYPE
3950 || TREE_CODE (type) == OFFSET_TYPE)
3952 PROMOTE_MODE (var_mode, unsignedp, type);
3955 temp = gen_reg_rtx (var_mode);
3958 SAVE_EXPR_RTL (exp) = temp;
3959 store_expr (TREE_OPERAND (exp, 0), temp, 0);
3960 if (!optimize && GET_CODE (temp) == REG)
3961 save_expr_regs = gen_rtx (EXPR_LIST, VOIDmode, temp,
3965 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
3966 must be a promoted value. We return a SUBREG of the wanted mode,
3967 but mark it so that we know that it was already extended. Note
3968 that `unsignedp' was modified above in this case. */
3970 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
3971 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
3973 temp = gen_rtx (SUBREG, mode, SAVE_EXPR_RTL (exp), 0);
3974 SUBREG_PROMOTED_VAR_P (temp) = 1;
3975 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
3979 return SAVE_EXPR_RTL (exp);
3982 /* Exit the current loop if the body-expression is true. */
3984 rtx label = gen_label_rtx ();
3985 do_jump (TREE_OPERAND (exp, 0), label, NULL_RTX);
3986 expand_exit_loop (NULL_PTR);
3992 expand_start_loop (1);
3993 expand_expr_stmt (TREE_OPERAND (exp, 0));
4000 tree vars = TREE_OPERAND (exp, 0);
4001 int vars_need_expansion = 0;
4003 /* Need to open a binding contour here because
4004 if there are any cleanups they most be contained here. */
4005 expand_start_bindings (0);
4007 /* Mark the corresponding BLOCK for output in its proper place. */
4008 if (TREE_OPERAND (exp, 2) != 0
4009 && ! TREE_USED (TREE_OPERAND (exp, 2)))
4010 insert_block (TREE_OPERAND (exp, 2));
4012 /* If VARS have not yet been expanded, expand them now. */
4015 if (DECL_RTL (vars) == 0)
4017 vars_need_expansion = 1;
4020 expand_decl_init (vars);
4021 vars = TREE_CHAIN (vars);
4024 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, modifier);
4026 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
4032 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
4034 emit_insns (RTL_EXPR_SEQUENCE (exp));
4035 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
4036 return RTL_EXPR_RTL (exp);
4039 /* All elts simple constants => refer to a constant in memory. But
4040 if this is a non-BLKmode mode, let it store a field at a time
4041 since that should make a CONST_INT or CONST_DOUBLE when we
4043 if (TREE_STATIC (exp) && (mode == BLKmode || TREE_ADDRESSABLE (exp)))
4045 rtx constructor = output_constant_def (exp);
4046 if (modifier != EXPAND_CONST_ADDRESS
4047 && modifier != EXPAND_INITIALIZER
4048 && modifier != EXPAND_SUM
4049 && !memory_address_p (GET_MODE (constructor),
4050 XEXP (constructor, 0)))
4051 constructor = change_address (constructor, VOIDmode,
4052 XEXP (constructor, 0));
4059 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4060 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode, 0);
4065 if (target == 0 || ! safe_from_p (target, exp))
4067 if (mode != BLKmode && ! TREE_ADDRESSABLE (exp))
4068 target = gen_reg_rtx (mode);
4071 enum tree_code c = TREE_CODE (type);
4073 = assign_stack_temp (mode, int_size_in_bytes (type), 0);
4074 if (c == RECORD_TYPE || c == UNION_TYPE || c == ARRAY_TYPE)
4075 MEM_IN_STRUCT_P (target) = 1;
4078 store_constructor (exp, target);
4084 tree exp1 = TREE_OPERAND (exp, 0);
4087 /* A SAVE_EXPR as the address in an INDIRECT_EXPR is generated
4088 for *PTR += ANYTHING where PTR is put inside the SAVE_EXPR.
4089 This code has the same general effect as simply doing
4090 expand_expr on the save expr, except that the expression PTR
4091 is computed for use as a memory address. This means different
4092 code, suitable for indexing, may be generated. */
4093 if (TREE_CODE (exp1) == SAVE_EXPR
4094 && SAVE_EXPR_RTL (exp1) == 0
4095 && TREE_CODE (exp2 = TREE_OPERAND (exp1, 0)) != ERROR_MARK
4096 && TYPE_MODE (TREE_TYPE (exp1)) == Pmode
4097 && TYPE_MODE (TREE_TYPE (exp2)) == Pmode)
4099 temp = expand_expr (TREE_OPERAND (exp1, 0), NULL_RTX,
4100 VOIDmode, EXPAND_SUM);
4101 op0 = memory_address (mode, temp);
4102 op0 = copy_all_regs (op0);
4103 SAVE_EXPR_RTL (exp1) = op0;
4107 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
4108 op0 = memory_address (mode, op0);
4111 temp = gen_rtx (MEM, mode, op0);
4112 /* If address was computed by addition,
4113 mark this as an element of an aggregate. */
4114 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
4115 || (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR
4116 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) == PLUS_EXPR)
4117 || TREE_CODE (TREE_TYPE (exp)) == ARRAY_TYPE
4118 || TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE
4119 || TREE_CODE (TREE_TYPE (exp)) == UNION_TYPE
4120 || (TREE_CODE (exp1) == ADDR_EXPR
4121 && (exp2 = TREE_OPERAND (exp1, 0))
4122 && (TREE_CODE (TREE_TYPE (exp2)) == ARRAY_TYPE
4123 || TREE_CODE (TREE_TYPE (exp2)) == RECORD_TYPE
4124 || TREE_CODE (TREE_TYPE (exp2)) == UNION_TYPE)))
4125 MEM_IN_STRUCT_P (temp) = 1;
4126 MEM_VOLATILE_P (temp) = TREE_THIS_VOLATILE (exp);
4127 #if 0 /* It is incorrect to set RTX_UNCHANGING_P here, because the fact that
4128 a location is accessed through a pointer to const does not mean
4129 that the value there can never change. */
4130 RTX_UNCHANGING_P (temp) = TREE_READONLY (exp);
4136 if (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST
4137 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
4139 /* Nonconstant array index or nonconstant element size.
4140 Generate the tree for *(&array+index) and expand that,
4141 except do it in a language-independent way
4142 and don't complain about non-lvalue arrays.
4143 `mark_addressable' should already have been called
4144 for any array for which this case will be reached. */
4146 /* Don't forget the const or volatile flag from the array element. */
4147 tree variant_type = build_type_variant (type,
4148 TREE_READONLY (exp),
4149 TREE_THIS_VOLATILE (exp));
4150 tree array_adr = build1 (ADDR_EXPR, build_pointer_type (variant_type),
4151 TREE_OPERAND (exp, 0));
4152 tree index = TREE_OPERAND (exp, 1);
4155 /* Convert the integer argument to a type the same size as a pointer
4156 so the multiply won't overflow spuriously. */
4157 if (TYPE_PRECISION (TREE_TYPE (index)) != POINTER_SIZE)
4158 index = convert (type_for_size (POINTER_SIZE, 0), index);
4160 /* Don't think the address has side effects
4161 just because the array does.
4162 (In some cases the address might have side effects,
4163 and we fail to record that fact here. However, it should not
4164 matter, since expand_expr should not care.) */
4165 TREE_SIDE_EFFECTS (array_adr) = 0;
4167 elt = build1 (INDIRECT_REF, type,
4168 fold (build (PLUS_EXPR, TYPE_POINTER_TO (variant_type),
4170 fold (build (MULT_EXPR,
4171 TYPE_POINTER_TO (variant_type),
4172 index, size_in_bytes (type))))));
4174 /* Volatility, etc., of new expression is same as old expression. */
4175 TREE_SIDE_EFFECTS (elt) = TREE_SIDE_EFFECTS (exp);
4176 TREE_THIS_VOLATILE (elt) = TREE_THIS_VOLATILE (exp);
4177 TREE_READONLY (elt) = TREE_READONLY (exp);
4179 return expand_expr (elt, target, tmode, modifier);
4182 /* Fold an expression like: "foo"[2].
4183 This is not done in fold so it won't happen inside &. */
4186 tree arg0 = TREE_OPERAND (exp, 0);
4187 tree arg1 = TREE_OPERAND (exp, 1);
4189 if (TREE_CODE (arg0) == STRING_CST
4190 && TREE_CODE (arg1) == INTEGER_CST
4191 && !TREE_INT_CST_HIGH (arg1)
4192 && (i = TREE_INT_CST_LOW (arg1)) < TREE_STRING_LENGTH (arg0))
4194 if (TREE_TYPE (TREE_TYPE (arg0)) == integer_type_node)
4196 exp = build_int_2 (((int *)TREE_STRING_POINTER (arg0))[i], 0);
4197 TREE_TYPE (exp) = integer_type_node;
4198 return expand_expr (exp, target, tmode, modifier);
4200 if (TREE_TYPE (TREE_TYPE (arg0)) == char_type_node)
4202 exp = build_int_2 (TREE_STRING_POINTER (arg0)[i], 0);
4203 TREE_TYPE (exp) = integer_type_node;
4204 return expand_expr (convert (TREE_TYPE (TREE_TYPE (arg0)), exp), target, tmode, modifier);
4209 /* If this is a constant index into a constant array,
4210 just get the value from the array. Handle both the cases when
4211 we have an explicit constructor and when our operand is a variable
4212 that was declared const. */
4214 if (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
4215 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
4217 tree index = fold (TREE_OPERAND (exp, 1));
4218 if (TREE_CODE (index) == INTEGER_CST
4219 && TREE_INT_CST_HIGH (index) == 0)
4221 int i = TREE_INT_CST_LOW (index);
4222 tree elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0));
4225 elem = TREE_CHAIN (elem);
4227 return expand_expr (fold (TREE_VALUE (elem)), target,
4232 else if (TREE_READONLY (TREE_OPERAND (exp, 0))
4233 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
4234 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == ARRAY_TYPE
4235 && TREE_CODE (TREE_OPERAND (exp, 0)) == VAR_DECL
4236 && DECL_INITIAL (TREE_OPERAND (exp, 0))
4238 && (TREE_CODE (DECL_INITIAL (TREE_OPERAND (exp, 0)))
4241 tree index = fold (TREE_OPERAND (exp, 1));
4242 if (TREE_CODE (index) == INTEGER_CST
4243 && TREE_INT_CST_HIGH (index) == 0)
4245 int i = TREE_INT_CST_LOW (index);
4246 tree init = DECL_INITIAL (TREE_OPERAND (exp, 0));
4248 if (TREE_CODE (init) == CONSTRUCTOR)
4250 tree elem = CONSTRUCTOR_ELTS (init);
4253 elem = TREE_CHAIN (elem);
4255 return expand_expr (fold (TREE_VALUE (elem)), target,
4258 else if (TREE_CODE (init) == STRING_CST
4259 && i < TREE_STRING_LENGTH (init))
4261 temp = GEN_INT (TREE_STRING_POINTER (init)[i]);
4262 return convert_to_mode (mode, temp, 0);
4266 /* Treat array-ref with constant index as a component-ref. */
4270 /* If the operand is a CONSTRUCTOR, we can just extract the
4271 appropriate field if it is present. */
4272 if (code != ARRAY_REF
4273 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR)
4277 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
4278 elt = TREE_CHAIN (elt))
4279 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1))
4280 return expand_expr (TREE_VALUE (elt), target, tmode, modifier);
4284 enum machine_mode mode1;
4289 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
4290 &mode1, &unsignedp, &volatilep);
4292 /* In some cases, we will be offsetting OP0's address by a constant.
4293 So get it as a sum, if possible. If we will be using it
4294 directly in an insn, we validate it. */
4295 op0 = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_SUM);
4297 /* If this is a constant, put it into a register if it is a
4298 legitimate constant and memory if it isn't. */
4299 if (CONSTANT_P (op0))
4301 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
4302 if (LEGITIMATE_CONSTANT_P (op0))
4303 op0 = force_reg (mode, op0);
4305 op0 = validize_mem (force_const_mem (mode, op0));
4310 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
4312 if (GET_CODE (op0) != MEM)
4314 op0 = change_address (op0, VOIDmode,
4315 gen_rtx (PLUS, Pmode, XEXP (op0, 0),
4316 force_reg (Pmode, offset_rtx)));
4319 /* Don't forget about volatility even if this is a bitfield. */
4320 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
4322 op0 = copy_rtx (op0);
4323 MEM_VOLATILE_P (op0) = 1;
4326 if (mode1 == VOIDmode
4327 || (mode1 != BLKmode && ! direct_load[(int) mode1]
4328 && modifier != EXPAND_CONST_ADDRESS
4329 && modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
4330 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG)
4332 /* In cases where an aligned union has an unaligned object
4333 as a field, we might be extracting a BLKmode value from
4334 an integer-mode (e.g., SImode) object. Handle this case
4335 by doing the extract into an object as wide as the field
4336 (which we know to be the width of a basic mode), then
4337 storing into memory, and changing the mode to BLKmode. */
4338 enum machine_mode ext_mode = mode;
4340 if (ext_mode == BLKmode)
4341 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
4343 if (ext_mode == BLKmode)
4346 op0 = extract_bit_field (validize_mem (op0), bitsize, bitpos,
4347 unsignedp, target, ext_mode, ext_mode,
4348 TYPE_ALIGN (TREE_TYPE (tem)) / BITS_PER_UNIT,
4349 int_size_in_bytes (TREE_TYPE (tem)));
4350 if (mode == BLKmode)
4352 rtx new = assign_stack_temp (ext_mode,
4353 bitsize / BITS_PER_UNIT, 0);
4355 emit_move_insn (new, op0);
4356 op0 = copy_rtx (new);
4357 PUT_MODE (op0, BLKmode);
4363 /* Get a reference to just this component. */
4364 if (modifier == EXPAND_CONST_ADDRESS
4365 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
4366 op0 = gen_rtx (MEM, mode1, plus_constant (XEXP (op0, 0),
4367 (bitpos / BITS_PER_UNIT)));
4369 op0 = change_address (op0, mode1,
4370 plus_constant (XEXP (op0, 0),
4371 (bitpos / BITS_PER_UNIT)));
4372 MEM_IN_STRUCT_P (op0) = 1;
4373 MEM_VOLATILE_P (op0) |= volatilep;
4374 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode)
4377 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
4378 convert_move (target, op0, unsignedp);
4384 tree base = build_unary_op (ADDR_EXPR, TREE_OPERAND (exp, 0), 0);
4385 tree addr = build (PLUS_EXPR, type, base, TREE_OPERAND (exp, 1));
4386 op0 = expand_expr (addr, NULL_RTX, VOIDmode, EXPAND_SUM);
4387 temp = gen_rtx (MEM, mode, memory_address (mode, op0));
4388 MEM_IN_STRUCT_P (temp) = 1;
4389 MEM_VOLATILE_P (temp) = TREE_THIS_VOLATILE (exp);
4390 #if 0 /* It is incorrect to set RTX_UNCHANGING_P here, because the fact that
4391 a location is accessed through a pointer to const does not mean
4392 that the value there can never change. */
4393 RTX_UNCHANGING_P (temp) = TREE_READONLY (exp);
4398 /* Intended for a reference to a buffer of a file-object in Pascal.
4399 But it's not certain that a special tree code will really be
4400 necessary for these. INDIRECT_REF might work for them. */
4404 /* IN_EXPR: Inlined pascal set IN expression.
4407 rlo = set_low - (set_low%bits_per_word);
4408 the_word = set [ (index - rlo)/bits_per_word ];
4409 bit_index = index % bits_per_word;
4410 bitmask = 1 << bit_index;
4411 return !!(the_word & bitmask); */
4413 preexpand_calls (exp);
4415 tree set = TREE_OPERAND (exp, 0);
4416 tree index = TREE_OPERAND (exp, 1);
4417 tree set_type = TREE_TYPE (set);
4419 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
4420 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
4426 rtx diff, quo, rem, addr, bit, result;
4427 rtx setval, setaddr;
4428 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
4431 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
4433 /* If domain is empty, answer is no. */
4434 if (tree_int_cst_lt (set_high_bound, set_low_bound))
4437 index_val = expand_expr (index, 0, VOIDmode, 0);
4438 lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
4439 hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
4440 setval = expand_expr (set, 0, VOIDmode, 0);
4441 setaddr = XEXP (setval, 0);
4443 /* Compare index against bounds, if they are constant. */
4444 if (GET_CODE (index_val) == CONST_INT
4445 && GET_CODE (lo_r) == CONST_INT)
4447 if (INTVAL (index_val) < INTVAL (lo_r))
4451 if (GET_CODE (index_val) == CONST_INT
4452 && GET_CODE (hi_r) == CONST_INT)
4454 if (INTVAL (hi_r) < INTVAL (index_val))
4458 /* If we get here, we have to generate the code for both cases
4459 (in range and out of range). */
4461 op0 = gen_label_rtx ();
4462 op1 = gen_label_rtx ();
4464 if (! (GET_CODE (index_val) == CONST_INT
4465 && GET_CODE (lo_r) == CONST_INT))
4467 emit_cmp_insn (index_val, lo_r, LT, 0, GET_MODE (index_val), 0, 0);
4468 emit_jump_insn (gen_blt (op1));
4471 if (! (GET_CODE (index_val) == CONST_INT
4472 && GET_CODE (hi_r) == CONST_INT))
4474 emit_cmp_insn (index_val, hi_r, GT, 0, GET_MODE (index_val), 0, 0);
4475 emit_jump_insn (gen_bgt (op1));
4478 /* Calculate the element number of bit zero in the first word
4480 if (GET_CODE (lo_r) == CONST_INT)
4481 rlow = gen_rtx (CONST_INT, VOIDmode,
4482 INTVAL (lo_r) & ~ (1 << BITS_PER_UNIT));
4484 rlow = expand_binop (index_mode, and_optab,
4485 lo_r, gen_rtx (CONST_INT, VOIDmode,
4486 ~ (1 << BITS_PER_UNIT)),
4487 0, 0, OPTAB_LIB_WIDEN);
4489 diff = expand_binop (index_mode, sub_optab,
4490 index_val, rlow, 0, 0, OPTAB_LIB_WIDEN);
4492 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
4493 gen_rtx (CONST_INT, VOIDmode, BITS_PER_UNIT),
4495 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
4496 gen_rtx (CONST_INT, VOIDmode, BITS_PER_UNIT),
4498 addr = memory_address (byte_mode,
4499 expand_binop (index_mode, add_optab,
4501 /* Extract the bit we want to examine */
4502 bit = expand_shift (RSHIFT_EXPR, byte_mode,
4503 gen_rtx (MEM, byte_mode, addr), rem, 0, 1);
4504 result = expand_binop (SImode, and_optab, bit, const1_rtx, target,
4505 1, OPTAB_LIB_WIDEN);
4506 emit_move_insn (target, result);
4508 /* Output the code to handle the out-of-range case. */
4511 emit_move_insn (target, const0_rtx);
4516 case WITH_CLEANUP_EXPR:
4517 if (RTL_EXPR_RTL (exp) == 0)
4520 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
4522 = tree_cons (NULL_TREE, TREE_OPERAND (exp, 2), cleanups_this_call);
4523 /* That's it for this cleanup. */
4524 TREE_OPERAND (exp, 2) = 0;
4526 return RTL_EXPR_RTL (exp);
4529 /* Check for a built-in function. */
4530 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
4531 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) == FUNCTION_DECL
4532 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
4533 return expand_builtin (exp, target, subtarget, tmode, ignore);
4534 /* If this call was expanded already by preexpand_calls,
4535 just return the result we got. */
4536 if (CALL_EXPR_RTL (exp) != 0)
4537 return CALL_EXPR_RTL (exp);
4538 return expand_call (exp, target, ignore);
4540 case NON_LVALUE_EXPR:
4543 case REFERENCE_EXPR:
4544 if (TREE_CODE (type) == VOID_TYPE || ignore)
4546 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
4549 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
4550 return expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, modifier);
4551 if (TREE_CODE (type) == UNION_TYPE)
4553 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
4556 if (mode == BLKmode)
4558 if (TYPE_SIZE (type) == 0
4559 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
4561 target = assign_stack_temp (BLKmode,
4562 (TREE_INT_CST_LOW (TYPE_SIZE (type))
4563 + BITS_PER_UNIT - 1)
4564 / BITS_PER_UNIT, 0);
4567 target = gen_reg_rtx (mode);
4569 if (GET_CODE (target) == MEM)
4570 /* Store data into beginning of memory target. */
4571 store_expr (TREE_OPERAND (exp, 0),
4572 change_address (target, TYPE_MODE (valtype), 0), 0);
4574 else if (GET_CODE (target) == REG)
4575 /* Store this field into a union of the proper type. */
4576 store_field (target, GET_MODE_BITSIZE (TYPE_MODE (valtype)), 0,
4577 TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
4579 int_size_in_bytes (TREE_TYPE (TREE_OPERAND (exp, 0))));
4583 /* Return the entire union. */
4586 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, 0);
4587 if (GET_MODE (op0) == mode)
4589 /* If arg is a constant integer being extended from a narrower mode,
4590 we must really truncate to get the extended bits right. Otherwise
4591 (unsigned long) (unsigned char) ("\377"[0])
4592 would come out as ffffffff. */
4593 if (GET_MODE (op0) == VOIDmode
4594 && (GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
4595 < GET_MODE_BITSIZE (mode)))
4597 /* MODE must be narrower than HOST_BITS_PER_INT. */
4598 int width = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
4600 if (width < HOST_BITS_PER_WIDE_INT)
4602 HOST_WIDE_INT val = (GET_CODE (op0) == CONST_INT ? INTVAL (op0)
4603 : CONST_DOUBLE_LOW (op0));
4604 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)))
4605 || !(val & ((HOST_WIDE_INT) 1 << (width - 1))))
4606 val &= ((HOST_WIDE_INT) 1 << width) - 1;
4608 val |= ~(((HOST_WIDE_INT) 1 << width) - 1);
4610 op0 = GEN_INT (val);
4614 op0 = (simplify_unary_operation
4615 ((TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)))
4616 ? ZERO_EXTEND : SIGN_EXTEND),
4618 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))));
4623 if (GET_MODE (op0) == VOIDmode)
4625 if (modifier == EXPAND_INITIALIZER)
4626 return gen_rtx (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
4627 if (flag_force_mem && GET_CODE (op0) == MEM)
4628 op0 = copy_to_reg (op0);
4631 return convert_to_mode (mode, op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
4633 convert_move (target, op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
4637 /* We come here from MINUS_EXPR when the second operand is a constant. */
4639 this_optab = add_optab;
4641 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
4642 something else, make sure we add the register to the constant and
4643 then to the other thing. This case can occur during strength
4644 reduction and doing it this way will produce better code if the
4645 frame pointer or argument pointer is eliminated.
4647 fold-const.c will ensure that the constant is always in the inner
4648 PLUS_EXPR, so the only case we need to do anything about is if
4649 sp, ap, or fp is our second argument, in which case we must swap
4650 the innermost first argument and our second argument. */
4652 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
4653 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
4654 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
4655 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
4656 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
4657 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
4659 tree t = TREE_OPERAND (exp, 1);
4661 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
4662 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
4665 /* If the result is to be Pmode and we are adding an integer to
4666 something, we might be forming a constant. So try to use
4667 plus_constant. If it produces a sum and we can't accept it,
4668 use force_operand. This allows P = &ARR[const] to generate
4669 efficient code on machines where a SYMBOL_REF is not a valid
4672 If this is an EXPAND_SUM call, always return the sum. */
4673 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
4674 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
4675 && (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
4678 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
4680 op1 = plus_constant (op1, TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)));
4681 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
4682 op1 = force_operand (op1, target);
4686 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
4687 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
4688 && (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
4691 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
4693 op0 = plus_constant (op0, TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)));
4694 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
4695 op0 = force_operand (op0, target);
4699 /* No sense saving up arithmetic to be done
4700 if it's all in the wrong mode to form part of an address.
4701 And force_operand won't know whether to sign-extend or
4703 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
4704 || mode != Pmode) goto binop;
4706 preexpand_calls (exp);
4707 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
4710 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, modifier);
4711 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, modifier);
4713 /* Make sure any term that's a sum with a constant comes last. */
4714 if (GET_CODE (op0) == PLUS
4715 && CONSTANT_P (XEXP (op0, 1)))
4721 /* If adding to a sum including a constant,
4722 associate it to put the constant outside. */
4723 if (GET_CODE (op1) == PLUS
4724 && CONSTANT_P (XEXP (op1, 1)))
4726 rtx constant_term = const0_rtx;
4728 temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0);
4731 /* Ensure that MULT comes first if there is one. */
4732 else if (GET_CODE (op0) == MULT)
4733 op0 = gen_rtx (PLUS, mode, op0, XEXP (op1, 0));
4735 op0 = gen_rtx (PLUS, mode, XEXP (op1, 0), op0);
4737 /* Let's also eliminate constants from op0 if possible. */
4738 op0 = eliminate_constant_term (op0, &constant_term);
4740 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
4741 their sum should be a constant. Form it into OP1, since the
4742 result we want will then be OP0 + OP1. */
4744 temp = simplify_binary_operation (PLUS, mode, constant_term,
4749 op1 = gen_rtx (PLUS, mode, constant_term, XEXP (op1, 1));
4752 /* Put a constant term last and put a multiplication first. */
4753 if (CONSTANT_P (op0) || GET_CODE (op1) == MULT)
4754 temp = op1, op1 = op0, op0 = temp;
4756 temp = simplify_binary_operation (PLUS, mode, op0, op1);
4757 return temp ? temp : gen_rtx (PLUS, mode, op0, op1);
4760 /* Handle difference of two symbolic constants,
4761 for the sake of an initializer. */
4762 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
4763 && really_constant_p (TREE_OPERAND (exp, 0))
4764 && really_constant_p (TREE_OPERAND (exp, 1)))
4766 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX,
4767 VOIDmode, modifier);
4768 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
4769 VOIDmode, modifier);
4770 return gen_rtx (MINUS, mode, op0, op1);
4772 /* Convert A - const to A + (-const). */
4773 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
4775 exp = build (PLUS_EXPR, type, TREE_OPERAND (exp, 0),
4776 fold (build1 (NEGATE_EXPR, type,
4777 TREE_OPERAND (exp, 1))));
4780 this_optab = sub_optab;
4784 preexpand_calls (exp);
4785 /* If first operand is constant, swap them.
4786 Thus the following special case checks need only
4787 check the second operand. */
4788 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
4790 register tree t1 = TREE_OPERAND (exp, 0);
4791 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
4792 TREE_OPERAND (exp, 1) = t1;
4795 /* Attempt to return something suitable for generating an
4796 indexed address, for machines that support that. */
4798 if (modifier == EXPAND_SUM && mode == Pmode
4799 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
4800 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
4802 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, EXPAND_SUM);
4804 /* Apply distributive law if OP0 is x+c. */
4805 if (GET_CODE (op0) == PLUS
4806 && GET_CODE (XEXP (op0, 1)) == CONST_INT)
4807 return gen_rtx (PLUS, mode,
4808 gen_rtx (MULT, mode, XEXP (op0, 0),
4809 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))),
4810 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))
4811 * INTVAL (XEXP (op0, 1))));
4813 if (GET_CODE (op0) != REG)
4814 op0 = force_operand (op0, NULL_RTX);
4815 if (GET_CODE (op0) != REG)
4816 op0 = copy_to_mode_reg (mode, op0);
4818 return gen_rtx (MULT, mode, op0,
4819 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))));
4822 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
4825 /* Check for multiplying things that have been extended
4826 from a narrower type. If this machine supports multiplying
4827 in that narrower type with a result in the desired type,
4828 do it that way, and avoid the explicit type-conversion. */
4829 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
4830 && TREE_CODE (type) == INTEGER_TYPE
4831 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
4832 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
4833 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
4834 && int_fits_type_p (TREE_OPERAND (exp, 1),
4835 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
4836 /* Don't use a widening multiply if a shift will do. */
4837 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
4838 > HOST_BITS_PER_WIDE_INT)
4839 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
4841 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
4842 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
4844 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
4845 /* If both operands are extended, they must either both
4846 be zero-extended or both be sign-extended. */
4847 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
4849 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
4851 enum machine_mode innermode
4852 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
4853 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
4854 ? umul_widen_optab : smul_widen_optab);
4855 if (mode == GET_MODE_WIDER_MODE (innermode)
4856 && this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
4858 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
4859 NULL_RTX, VOIDmode, 0);
4860 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
4861 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
4864 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
4865 NULL_RTX, VOIDmode, 0);
4869 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
4870 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
4871 return expand_mult (mode, op0, op1, target, unsignedp);
4873 case TRUNC_DIV_EXPR:
4874 case FLOOR_DIV_EXPR:
4876 case ROUND_DIV_EXPR:
4877 case EXACT_DIV_EXPR:
4878 preexpand_calls (exp);
4879 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
4881 /* Possible optimization: compute the dividend with EXPAND_SUM
4882 then if the divisor is constant can optimize the case
4883 where some terms of the dividend have coeffs divisible by it. */
4884 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
4885 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
4886 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
4889 this_optab = flodiv_optab;
4892 case TRUNC_MOD_EXPR:
4893 case FLOOR_MOD_EXPR:
4895 case ROUND_MOD_EXPR:
4896 preexpand_calls (exp);
4897 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
4899 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
4900 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
4901 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
4903 case FIX_ROUND_EXPR:
4904 case FIX_FLOOR_EXPR:
4906 abort (); /* Not used for C. */
4908 case FIX_TRUNC_EXPR:
4909 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
4911 target = gen_reg_rtx (mode);
4912 expand_fix (target, op0, unsignedp);
4916 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
4918 target = gen_reg_rtx (mode);
4919 /* expand_float can't figure out what to do if FROM has VOIDmode.
4920 So give it the correct mode. With -O, cse will optimize this. */
4921 if (GET_MODE (op0) == VOIDmode)
4922 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
4924 expand_float (target, op0,
4925 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
4929 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
4930 temp = expand_unop (mode, neg_optab, op0, target, 0);
4936 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
4938 /* Handle complex values specially. */
4940 enum machine_mode opmode
4941 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
4943 if (GET_MODE_CLASS (opmode) == MODE_COMPLEX_INT
4944 || GET_MODE_CLASS (opmode) == MODE_COMPLEX_FLOAT)
4945 return expand_complex_abs (opmode, op0, target, unsignedp);
4948 /* Unsigned abs is simply the operand. Testing here means we don't
4949 risk generating incorrect code below. */
4950 if (TREE_UNSIGNED (type))
4953 /* First try to do it with a special abs instruction. */
4954 temp = expand_unop (mode, abs_optab, op0, target, 0);
4958 /* If this machine has expensive jumps, we can do integer absolute
4959 value of X as (((signed) x >> (W-1)) ^ x) - ((signed) x >> (W-1)),
4960 where W is the width of MODE. */
4962 if (GET_MODE_CLASS (mode) == MODE_INT && BRANCH_COST >= 2)
4964 rtx extended = expand_shift (RSHIFT_EXPR, mode, op0,
4965 size_int (GET_MODE_BITSIZE (mode) - 1),
4968 temp = expand_binop (mode, xor_optab, extended, op0, target, 0,
4971 temp = expand_binop (mode, sub_optab, temp, extended, target, 0,
4978 /* If that does not win, use conditional jump and negate. */
4979 target = original_target;
4980 temp = gen_label_rtx ();
4981 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 0))
4982 || (GET_CODE (target) == REG
4983 && REGNO (target) < FIRST_PSEUDO_REGISTER))
4984 target = gen_reg_rtx (mode);
4985 emit_move_insn (target, op0);
4986 emit_cmp_insn (target,
4987 expand_expr (convert (type, integer_zero_node),
4988 NULL_RTX, VOIDmode, 0),
4989 GE, NULL_RTX, mode, 0, 0);
4991 emit_jump_insn (gen_bge (temp));
4992 op0 = expand_unop (mode, neg_optab, target, target, 0);
4994 emit_move_insn (target, op0);
5001 target = original_target;
5002 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 1))
5003 || (GET_CODE (target) == REG
5004 && REGNO (target) < FIRST_PSEUDO_REGISTER))
5005 target = gen_reg_rtx (mode);
5006 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
5007 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
5009 /* First try to do it with a special MIN or MAX instruction.
5010 If that does not win, use a conditional jump to select the proper
5012 this_optab = (TREE_UNSIGNED (type)
5013 ? (code == MIN_EXPR ? umin_optab : umax_optab)
5014 : (code == MIN_EXPR ? smin_optab : smax_optab));
5016 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
5022 emit_move_insn (target, op0);
5023 op0 = gen_label_rtx ();
5024 if (code == MAX_EXPR)
5025 temp = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)))
5026 ? compare_from_rtx (target, op1, GEU, 1, mode, NULL_RTX, 0)
5027 : compare_from_rtx (target, op1, GE, 0, mode, NULL_RTX, 0));
5029 temp = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)))
5030 ? compare_from_rtx (target, op1, LEU, 1, mode, NULL_RTX, 0)
5031 : compare_from_rtx (target, op1, LE, 0, mode, NULL_RTX, 0));
5032 if (temp == const0_rtx)
5033 emit_move_insn (target, op1);
5034 else if (temp != const_true_rtx)
5036 if (bcc_gen_fctn[(int) GET_CODE (temp)] != 0)
5037 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (temp)]) (op0));
5040 emit_move_insn (target, op1);
5045 /* ??? Can optimize when the operand of this is a bitwise operation,
5046 by using a different bitwise operation. */
5048 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
5049 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
5055 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
5056 temp = expand_unop (mode, ffs_optab, op0, target, 1);
5061 /* ??? Can optimize bitwise operations with one arg constant.
5062 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
5063 and (a bitwise1 b) bitwise2 b (etc)
5064 but that is probably not worth while. */
5066 /* BIT_AND_EXPR is for bitwise anding.
5067 TRUTH_AND_EXPR is for anding two boolean values
5068 when we want in all cases to compute both of them.
5069 In general it is fastest to do TRUTH_AND_EXPR by
5070 computing both operands as actual zero-or-1 values
5071 and then bitwise anding. In cases where there cannot
5072 be any side effects, better code would be made by
5073 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR;
5074 but the question is how to recognize those cases. */
5076 case TRUTH_AND_EXPR:
5078 this_optab = and_optab;
5081 /* See comment above about TRUTH_AND_EXPR; it applies here too. */
5084 this_optab = ior_optab;
5087 case TRUTH_XOR_EXPR:
5089 this_optab = xor_optab;
5096 preexpand_calls (exp);
5097 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
5099 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
5100 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
5103 /* Could determine the answer when only additive constants differ.
5104 Also, the addition of one can be handled by changing the condition. */
5111 preexpand_calls (exp);
5112 temp = do_store_flag (exp, target, tmode != VOIDmode ? tmode : mode, 0);
5115 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
5116 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
5118 && GET_CODE (original_target) == REG
5119 && (GET_MODE (original_target)
5120 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
5122 temp = expand_expr (TREE_OPERAND (exp, 0), original_target, VOIDmode, 0);
5123 if (temp != original_target)
5124 temp = copy_to_reg (temp);
5125 op1 = gen_label_rtx ();
5126 emit_cmp_insn (temp, const0_rtx, EQ, NULL_RTX,
5127 GET_MODE (temp), unsignedp, 0);
5128 emit_jump_insn (gen_beq (op1));
5129 emit_move_insn (temp, const1_rtx);
5133 /* If no set-flag instruction, must generate a conditional
5134 store into a temporary variable. Drop through
5135 and handle this like && and ||. */
5137 case TRUTH_ANDIF_EXPR:
5138 case TRUTH_ORIF_EXPR:
5139 if (target == 0 || ! safe_from_p (target, exp)
5140 /* Make sure we don't have a hard reg (such as function's return
5141 value) live across basic blocks, if not optimizing. */
5142 || (!optimize && GET_CODE (target) == REG
5143 && REGNO (target) < FIRST_PSEUDO_REGISTER))
5144 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
5145 emit_clr_insn (target);
5146 op1 = gen_label_rtx ();
5147 jumpifnot (exp, op1);
5148 emit_0_to_1_insn (target);
5152 case TRUTH_NOT_EXPR:
5153 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
5154 /* The parser is careful to generate TRUTH_NOT_EXPR
5155 only with operands that are always zero or one. */
5156 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
5157 target, 1, OPTAB_LIB_WIDEN);
5163 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
5165 return expand_expr (TREE_OPERAND (exp, 1),
5166 (ignore ? const0_rtx : target),
5171 /* Note that COND_EXPRs whose type is a structure or union
5172 are required to be constructed to contain assignments of
5173 a temporary variable, so that we can evaluate them here
5174 for side effect only. If type is void, we must do likewise. */
5176 /* If an arm of the branch requires a cleanup,
5177 only that cleanup is performed. */
5180 tree binary_op = 0, unary_op = 0;
5181 tree old_cleanups = cleanups_this_call;
5182 cleanups_this_call = 0;
5184 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
5185 convert it to our mode, if necessary. */
5186 if (integer_onep (TREE_OPERAND (exp, 1))
5187 && integer_zerop (TREE_OPERAND (exp, 2))
5188 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
5190 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, modifier);
5191 if (GET_MODE (op0) == mode)
5194 target = gen_reg_rtx (mode);
5195 convert_move (target, op0, unsignedp);
5199 /* If we are not to produce a result, we have no target. Otherwise,
5200 if a target was specified use it; it will not be used as an
5201 intermediate target unless it is safe. If no target, use a
5204 if (mode == VOIDmode || ignore)
5206 else if (original_target
5207 && safe_from_p (original_target, TREE_OPERAND (exp, 0)))
5208 temp = original_target;
5209 else if (mode == BLKmode)
5211 if (TYPE_SIZE (type) == 0
5212 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
5214 temp = assign_stack_temp (BLKmode,
5215 (TREE_INT_CST_LOW (TYPE_SIZE (type))
5216 + BITS_PER_UNIT - 1)
5217 / BITS_PER_UNIT, 0);
5220 temp = gen_reg_rtx (mode);
5222 /* Check for X ? A + B : A. If we have this, we can copy
5223 A to the output and conditionally add B. Similarly for unary
5224 operations. Don't do this if X has side-effects because
5225 those side effects might affect A or B and the "?" operation is
5226 a sequence point in ANSI. (We test for side effects later.) */
5228 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
5229 && operand_equal_p (TREE_OPERAND (exp, 2),
5230 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
5231 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
5232 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
5233 && operand_equal_p (TREE_OPERAND (exp, 1),
5234 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
5235 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
5236 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
5237 && operand_equal_p (TREE_OPERAND (exp, 2),
5238 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
5239 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
5240 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
5241 && operand_equal_p (TREE_OPERAND (exp, 1),
5242 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
5243 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
5245 /* If we had X ? A + 1 : A and we can do the test of X as a store-flag
5246 operation, do this as A + (X != 0). Similarly for other simple
5247 binary operators. */
5248 if (singleton && binary_op
5249 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
5250 && (TREE_CODE (binary_op) == PLUS_EXPR
5251 || TREE_CODE (binary_op) == MINUS_EXPR
5252 || TREE_CODE (binary_op) == BIT_IOR_EXPR
5253 || TREE_CODE (binary_op) == BIT_XOR_EXPR
5254 || TREE_CODE (binary_op) == BIT_AND_EXPR)
5255 && integer_onep (TREE_OPERAND (binary_op, 1))
5256 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
5259 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR ? add_optab
5260 : TREE_CODE (binary_op) == MINUS_EXPR ? sub_optab
5261 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
5262 : TREE_CODE (binary_op) == BIT_XOR_EXPR ? xor_optab
5265 /* If we had X ? A : A + 1, do this as A + (X == 0).
5267 We have to invert the truth value here and then put it
5268 back later if do_store_flag fails. We cannot simply copy
5269 TREE_OPERAND (exp, 0) to another variable and modify that
5270 because invert_truthvalue can modify the tree pointed to
5272 if (singleton == TREE_OPERAND (exp, 1))
5273 TREE_OPERAND (exp, 0)
5274 = invert_truthvalue (TREE_OPERAND (exp, 0));
5276 result = do_store_flag (TREE_OPERAND (exp, 0),
5277 (safe_from_p (temp, singleton)
5279 mode, BRANCH_COST <= 1);
5283 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
5284 return expand_binop (mode, boptab, op1, result, temp,
5285 unsignedp, OPTAB_LIB_WIDEN);
5287 else if (singleton == TREE_OPERAND (exp, 1))
5288 TREE_OPERAND (exp, 0)
5289 = invert_truthvalue (TREE_OPERAND (exp, 0));
5293 op0 = gen_label_rtx ();
5295 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
5299 /* If the target conflicts with the other operand of the
5300 binary op, we can't use it. Also, we can't use the target
5301 if it is a hard register, because evaluating the condition
5302 might clobber it. */
5304 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1)))
5305 || (GET_CODE (temp) == REG
5306 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
5307 temp = gen_reg_rtx (mode);
5308 store_expr (singleton, temp, 0);
5311 expand_expr (singleton,
5312 ignore ? const1_rtx : NULL_RTX, VOIDmode, 0);
5313 if (cleanups_this_call)
5315 sorry ("aggregate value in COND_EXPR");
5316 cleanups_this_call = 0;
5318 if (singleton == TREE_OPERAND (exp, 1))
5319 jumpif (TREE_OPERAND (exp, 0), op0);
5321 jumpifnot (TREE_OPERAND (exp, 0), op0);
5323 if (binary_op && temp == 0)
5324 /* Just touch the other operand. */
5325 expand_expr (TREE_OPERAND (binary_op, 1),
5326 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
5328 store_expr (build (TREE_CODE (binary_op), type,
5329 make_tree (type, temp),
5330 TREE_OPERAND (binary_op, 1)),
5333 store_expr (build1 (TREE_CODE (unary_op), type,
5334 make_tree (type, temp)),
5339 /* This is now done in jump.c and is better done there because it
5340 produces shorter register lifetimes. */
5342 /* Check for both possibilities either constants or variables
5343 in registers (but not the same as the target!). If so, can
5344 save branches by assigning one, branching, and assigning the
5346 else if (temp && GET_MODE (temp) != BLKmode
5347 && (TREE_CONSTANT (TREE_OPERAND (exp, 1))
5348 || ((TREE_CODE (TREE_OPERAND (exp, 1)) == PARM_DECL
5349 || TREE_CODE (TREE_OPERAND (exp, 1)) == VAR_DECL)
5350 && DECL_RTL (TREE_OPERAND (exp, 1))
5351 && GET_CODE (DECL_RTL (TREE_OPERAND (exp, 1))) == REG
5352 && DECL_RTL (TREE_OPERAND (exp, 1)) != temp))
5353 && (TREE_CONSTANT (TREE_OPERAND (exp, 2))
5354 || ((TREE_CODE (TREE_OPERAND (exp, 2)) == PARM_DECL
5355 || TREE_CODE (TREE_OPERAND (exp, 2)) == VAR_DECL)
5356 && DECL_RTL (TREE_OPERAND (exp, 2))
5357 && GET_CODE (DECL_RTL (TREE_OPERAND (exp, 2))) == REG
5358 && DECL_RTL (TREE_OPERAND (exp, 2)) != temp)))
5360 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
5361 temp = gen_reg_rtx (mode);
5362 store_expr (TREE_OPERAND (exp, 2), temp, 0);
5363 jumpifnot (TREE_OPERAND (exp, 0), op0);
5364 store_expr (TREE_OPERAND (exp, 1), temp, 0);
5368 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
5369 comparison operator. If we have one of these cases, set the
5370 output to A, branch on A (cse will merge these two references),
5371 then set the output to FOO. */
5373 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
5374 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
5375 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
5376 TREE_OPERAND (exp, 1), 0)
5377 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
5378 && safe_from_p (temp, TREE_OPERAND (exp, 2)))
5380 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
5381 temp = gen_reg_rtx (mode);
5382 store_expr (TREE_OPERAND (exp, 1), temp, 0);
5383 jumpif (TREE_OPERAND (exp, 0), op0);
5384 store_expr (TREE_OPERAND (exp, 2), temp, 0);
5388 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
5389 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
5390 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
5391 TREE_OPERAND (exp, 2), 0)
5392 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
5393 && safe_from_p (temp, TREE_OPERAND (exp, 1)))
5395 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
5396 temp = gen_reg_rtx (mode);
5397 store_expr (TREE_OPERAND (exp, 2), temp, 0);
5398 jumpifnot (TREE_OPERAND (exp, 0), op0);
5399 store_expr (TREE_OPERAND (exp, 1), temp, 0);
5404 op1 = gen_label_rtx ();
5405 jumpifnot (TREE_OPERAND (exp, 0), op0);
5407 store_expr (TREE_OPERAND (exp, 1), temp, 0);
5409 expand_expr (TREE_OPERAND (exp, 1),
5410 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
5411 if (cleanups_this_call)
5413 sorry ("aggregate value in COND_EXPR");
5414 cleanups_this_call = 0;
5418 emit_jump_insn (gen_jump (op1));
5422 store_expr (TREE_OPERAND (exp, 2), temp, 0);
5424 expand_expr (TREE_OPERAND (exp, 2),
5425 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
5428 if (cleanups_this_call)
5430 sorry ("aggregate value in COND_EXPR");
5431 cleanups_this_call = 0;
5437 cleanups_this_call = old_cleanups;
5443 /* Something needs to be initialized, but we didn't know
5444 where that thing was when building the tree. For example,
5445 it could be the return value of a function, or a parameter
5446 to a function which lays down in the stack, or a temporary
5447 variable which must be passed by reference.
5449 We guarantee that the expression will either be constructed
5450 or copied into our original target. */
5452 tree slot = TREE_OPERAND (exp, 0);
5455 if (TREE_CODE (slot) != VAR_DECL)
5460 if (DECL_RTL (slot) != 0)
5462 target = DECL_RTL (slot);
5463 /* If we have already expanded the slot, so don't do
5465 if (TREE_OPERAND (exp, 1) == NULL_TREE)
5470 target = assign_stack_temp (mode, int_size_in_bytes (type), 0);
5471 /* All temp slots at this level must not conflict. */
5472 preserve_temp_slots (target);
5473 DECL_RTL (slot) = target;
5477 /* I bet this needs to be done, and I bet that it needs to
5478 be above, inside the else clause. The reason is
5479 simple, how else is it going to get cleaned up? (mrs)
5481 The reason is probably did not work before, and was
5482 commented out is because this was re-expanding already
5483 expanded target_exprs (target == 0 and DECL_RTL (slot)
5484 != 0) also cleaning them up many times as well. :-( */
5486 /* Since SLOT is not known to the called function
5487 to belong to its stack frame, we must build an explicit
5488 cleanup. This case occurs when we must build up a reference
5489 to pass the reference as an argument. In this case,
5490 it is very likely that such a reference need not be
5493 if (TREE_OPERAND (exp, 2) == 0)
5494 TREE_OPERAND (exp, 2) = maybe_build_cleanup (slot);
5495 if (TREE_OPERAND (exp, 2))
5496 cleanups_this_call = tree_cons (NULL_TREE, TREE_OPERAND (exp, 2),
5497 cleanups_this_call);
5502 /* This case does occur, when expanding a parameter which
5503 needs to be constructed on the stack. The target
5504 is the actual stack address that we want to initialize.
5505 The function we call will perform the cleanup in this case. */
5507 DECL_RTL (slot) = target;
5510 exp1 = TREE_OPERAND (exp, 1);
5511 /* Mark it as expanded. */
5512 TREE_OPERAND (exp, 1) = NULL_TREE;
5514 return expand_expr (exp1, target, tmode, modifier);
5519 tree lhs = TREE_OPERAND (exp, 0);
5520 tree rhs = TREE_OPERAND (exp, 1);
5521 tree noncopied_parts = 0;
5522 tree lhs_type = TREE_TYPE (lhs);
5524 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
5525 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0 && !fixed_type_p (rhs))
5526 noncopied_parts = init_noncopied_parts (stabilize_reference (lhs),
5527 TYPE_NONCOPIED_PARTS (lhs_type));
5528 while (noncopied_parts != 0)
5530 expand_assignment (TREE_VALUE (noncopied_parts),
5531 TREE_PURPOSE (noncopied_parts), 0, 0);
5532 noncopied_parts = TREE_CHAIN (noncopied_parts);
5539 /* If lhs is complex, expand calls in rhs before computing it.
5540 That's so we don't compute a pointer and save it over a call.
5541 If lhs is simple, compute it first so we can give it as a
5542 target if the rhs is just a call. This avoids an extra temp and copy
5543 and that prevents a partial-subsumption which makes bad code.
5544 Actually we could treat component_ref's of vars like vars. */
5546 tree lhs = TREE_OPERAND (exp, 0);
5547 tree rhs = TREE_OPERAND (exp, 1);
5548 tree noncopied_parts = 0;
5549 tree lhs_type = TREE_TYPE (lhs);
5553 if (TREE_CODE (lhs) != VAR_DECL
5554 && TREE_CODE (lhs) != RESULT_DECL
5555 && TREE_CODE (lhs) != PARM_DECL)
5556 preexpand_calls (exp);
5558 /* Check for |= or &= of a bitfield of size one into another bitfield
5559 of size 1. In this case, (unless we need the result of the
5560 assignment) we can do this more efficiently with a
5561 test followed by an assignment, if necessary.
5563 ??? At this point, we can't get a BIT_FIELD_REF here. But if
5564 things change so we do, this code should be enhanced to
5567 && TREE_CODE (lhs) == COMPONENT_REF
5568 && (TREE_CODE (rhs) == BIT_IOR_EXPR
5569 || TREE_CODE (rhs) == BIT_AND_EXPR)
5570 && TREE_OPERAND (rhs, 0) == lhs
5571 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
5572 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (lhs, 1))) == 1
5573 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))) == 1)
5575 rtx label = gen_label_rtx ();
5577 do_jump (TREE_OPERAND (rhs, 1),
5578 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
5579 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
5580 expand_assignment (lhs, convert (TREE_TYPE (rhs),
5581 (TREE_CODE (rhs) == BIT_IOR_EXPR
5583 : integer_zero_node)),
5585 do_pending_stack_adjust ();
5590 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0
5591 && ! (fixed_type_p (lhs) && fixed_type_p (rhs)))
5592 noncopied_parts = save_noncopied_parts (stabilize_reference (lhs),
5593 TYPE_NONCOPIED_PARTS (lhs_type));
5595 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
5596 while (noncopied_parts != 0)
5598 expand_assignment (TREE_PURPOSE (noncopied_parts),
5599 TREE_VALUE (noncopied_parts), 0, 0);
5600 noncopied_parts = TREE_CHAIN (noncopied_parts);
5605 case PREINCREMENT_EXPR:
5606 case PREDECREMENT_EXPR:
5607 return expand_increment (exp, 0);
5609 case POSTINCREMENT_EXPR:
5610 case POSTDECREMENT_EXPR:
5611 /* Faster to treat as pre-increment if result is not used. */
5612 return expand_increment (exp, ! ignore);
5615 /* Are we taking the address of a nested function? */
5616 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
5617 && decl_function_context (TREE_OPERAND (exp, 0)) != 0)
5619 op0 = trampoline_address (TREE_OPERAND (exp, 0));
5620 op0 = force_operand (op0, target);
5624 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode,
5625 (modifier == EXPAND_INITIALIZER
5626 ? modifier : EXPAND_CONST_ADDRESS));
5627 if (GET_CODE (op0) != MEM)
5630 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
5631 return XEXP (op0, 0);
5632 op0 = force_operand (XEXP (op0, 0), target);
5634 if (flag_force_addr && GET_CODE (op0) != REG)
5635 return force_reg (Pmode, op0);
5638 case ENTRY_VALUE_EXPR:
5641 /* COMPLEX type for Extended Pascal & Fortran */
5644 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
5648 /* Get the rtx code of the operands. */
5649 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
5650 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
5653 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
5655 prev = get_last_insn ();
5657 /* Tell flow that the whole of the destination is being set. */
5658 if (GET_CODE (target) == REG)
5659 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
5661 /* Move the real (op0) and imaginary (op1) parts to their location. */
5662 emit_move_insn (gen_realpart (mode, target), op0);
5663 emit_move_insn (gen_imagpart (mode, target), op1);
5665 /* Complex construction should appear as a single unit. */
5672 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
5673 return gen_realpart (mode, op0);
5676 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
5677 return gen_imagpart (mode, op0);
5681 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
5685 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
5688 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
5690 prev = get_last_insn ();
5692 /* Tell flow that the whole of the destination is being set. */
5693 if (GET_CODE (target) == REG)
5694 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
5696 /* Store the realpart and the negated imagpart to target. */
5697 emit_move_insn (gen_realpart (mode, target), gen_realpart (mode, op0));
5699 imag_t = gen_imagpart (mode, target);
5700 temp = expand_unop (mode, neg_optab,
5701 gen_imagpart (mode, op0), imag_t, 0);
5703 emit_move_insn (imag_t, temp);
5705 /* Conjugate should appear as a single unit */
5715 return (*lang_expand_expr) (exp, target, tmode, modifier);
5718 /* Here to do an ordinary binary operator, generating an instruction
5719 from the optab already placed in `this_optab'. */
5721 preexpand_calls (exp);
5722 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
5724 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
5725 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
5727 temp = expand_binop (mode, this_optab, op0, op1, target,
5728 unsignedp, OPTAB_LIB_WIDEN);
5734 /* Return the alignment in bits of EXP, a pointer valued expression.
5735 But don't return more than MAX_ALIGN no matter what.
5736 The alignment returned is, by default, the alignment of the thing that
5737 EXP points to (if it is not a POINTER_TYPE, 0 is returned).
5739 Otherwise, look at the expression to see if we can do better, i.e., if the
5740 expression is actually pointing at an object whose alignment is tighter. */
5743 get_pointer_alignment (exp, max_align)
5747 unsigned align, inner;
5749 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
5752 align = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
5753 align = MIN (align, max_align);
5757 switch (TREE_CODE (exp))
5761 case NON_LVALUE_EXPR:
5762 exp = TREE_OPERAND (exp, 0);
5763 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
5765 inner = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
5766 inner = MIN (inner, max_align);
5767 align = MAX (align, inner);
5771 /* If sum of pointer + int, restrict our maximum alignment to that
5772 imposed by the integer. If not, we can't do any better than
5774 if (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST)
5777 while (((TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)) * BITS_PER_UNIT)
5782 exp = TREE_OPERAND (exp, 0);
5786 /* See what we are pointing at and look at its alignment. */
5787 exp = TREE_OPERAND (exp, 0);
5788 if (TREE_CODE (exp) == FUNCTION_DECL)
5789 align = MAX (align, FUNCTION_BOUNDARY);
5790 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'd')
5791 align = MAX (align, DECL_ALIGN (exp));
5792 #ifdef CONSTANT_ALIGNMENT
5793 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'c')
5794 align = CONSTANT_ALIGNMENT (exp, align);
5796 return MIN (align, max_align);
5804 /* Return the tree node and offset if a given argument corresponds to
5805 a string constant. */
5808 string_constant (arg, ptr_offset)
5814 if (TREE_CODE (arg) == ADDR_EXPR
5815 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
5817 *ptr_offset = integer_zero_node;
5818 return TREE_OPERAND (arg, 0);
5820 else if (TREE_CODE (arg) == PLUS_EXPR)
5822 tree arg0 = TREE_OPERAND (arg, 0);
5823 tree arg1 = TREE_OPERAND (arg, 1);
5828 if (TREE_CODE (arg0) == ADDR_EXPR
5829 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
5832 return TREE_OPERAND (arg0, 0);
5834 else if (TREE_CODE (arg1) == ADDR_EXPR
5835 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
5838 return TREE_OPERAND (arg1, 0);
5845 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
5846 way, because it could contain a zero byte in the middle.
5847 TREE_STRING_LENGTH is the size of the character array, not the string.
5849 Unfortunately, string_constant can't access the values of const char
5850 arrays with initializers, so neither can we do so here. */
5860 src = string_constant (src, &offset_node);
5863 max = TREE_STRING_LENGTH (src);
5864 ptr = TREE_STRING_POINTER (src);
5865 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
5867 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
5868 compute the offset to the following null if we don't know where to
5869 start searching for it. */
5871 for (i = 0; i < max; i++)
5874 /* We don't know the starting offset, but we do know that the string
5875 has no internal zero bytes. We can assume that the offset falls
5876 within the bounds of the string; otherwise, the programmer deserves
5877 what he gets. Subtract the offset from the length of the string,
5879 /* This would perhaps not be valid if we were dealing with named
5880 arrays in addition to literal string constants. */
5881 return size_binop (MINUS_EXPR, size_int (max), offset_node);
5884 /* We have a known offset into the string. Start searching there for
5885 a null character. */
5886 if (offset_node == 0)
5890 /* Did we get a long long offset? If so, punt. */
5891 if (TREE_INT_CST_HIGH (offset_node) != 0)
5893 offset = TREE_INT_CST_LOW (offset_node);
5895 /* If the offset is known to be out of bounds, warn, and call strlen at
5897 if (offset < 0 || offset > max)
5899 warning ("offset outside bounds of constant string");
5902 /* Use strlen to search for the first zero byte. Since any strings
5903 constructed with build_string will have nulls appended, we win even
5904 if we get handed something like (char[4])"abcd".
5906 Since OFFSET is our starting index into the string, no further
5907 calculation is needed. */
5908 return size_int (strlen (ptr + offset));
5911 /* Expand an expression EXP that calls a built-in function,
5912 with result going to TARGET if that's convenient
5913 (and in mode MODE if that's convenient).
5914 SUBTARGET may be used as the target for computing one of EXP's operands.
5915 IGNORE is nonzero if the value is to be ignored. */
5918 expand_builtin (exp, target, subtarget, mode, ignore)
5922 enum machine_mode mode;
5925 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
5926 tree arglist = TREE_OPERAND (exp, 1);
5929 enum machine_mode value_mode = TYPE_MODE (TREE_TYPE (exp));
5930 optab builtin_optab;
5932 switch (DECL_FUNCTION_CODE (fndecl))
5937 /* build_function_call changes these into ABS_EXPR. */
5942 case BUILT_IN_FSQRT:
5943 /* If not optimizing, call the library function. */
5948 /* Arg could be wrong type if user redeclared this fcn wrong. */
5949 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != REAL_TYPE)
5950 return CONST0_RTX (TYPE_MODE (TREE_TYPE (exp)));
5952 /* Stabilize and compute the argument. */
5953 if (TREE_CODE (TREE_VALUE (arglist)) != VAR_DECL
5954 && TREE_CODE (TREE_VALUE (arglist)) != PARM_DECL)
5956 exp = copy_node (exp);
5957 arglist = copy_node (arglist);
5958 TREE_OPERAND (exp, 1) = arglist;
5959 TREE_VALUE (arglist) = save_expr (TREE_VALUE (arglist));
5961 op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
5963 /* Make a suitable register to place result in. */
5964 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
5969 switch (DECL_FUNCTION_CODE (fndecl))
5972 builtin_optab = sin_optab; break;
5974 builtin_optab = cos_optab; break;
5975 case BUILT_IN_FSQRT:
5976 builtin_optab = sqrt_optab; break;
5981 /* Compute into TARGET.
5982 Set TARGET to wherever the result comes back. */
5983 target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
5984 builtin_optab, op0, target, 0);
5986 /* If we were unable to expand via the builtin, stop the
5987 sequence (without outputting the insns) and break, causing
5988 a call the the library function. */
5995 /* Check the results by default. But if flag_fast_math is turned on,
5996 then assume sqrt will always be called with valid arguments. */
5998 if (! flag_fast_math)
6000 /* Don't define the builtin FP instructions
6001 if your machine is not IEEE. */
6002 if (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT)
6005 lab1 = gen_label_rtx ();
6007 /* Test the result; if it is NaN, set errno=EDOM because
6008 the argument was not in the domain. */
6009 emit_cmp_insn (target, target, EQ, 0, GET_MODE (target), 0, 0);
6010 emit_jump_insn (gen_beq (lab1));
6014 #ifdef GEN_ERRNO_RTX
6015 rtx errno_rtx = GEN_ERRNO_RTX;
6018 = gen_rtx (MEM, word_mode, gen_rtx (SYMBOL_REF, Pmode, "*errno"));
6021 emit_move_insn (errno_rtx, GEN_INT (TARGET_EDOM));
6024 /* We can't set errno=EDOM directly; let the library call do it.
6025 Pop the arguments right away in case the call gets deleted. */
6027 expand_call (exp, target, 0);
6034 /* Output the entire sequence. */
6035 insns = get_insns ();
6041 case BUILT_IN_SAVEREGS:
6042 /* Don't do __builtin_saveregs more than once in a function.
6043 Save the result of the first call and reuse it. */
6044 if (saveregs_value != 0)
6045 return saveregs_value;
6047 /* When this function is called, it means that registers must be
6048 saved on entry to this function. So we migrate the
6049 call to the first insn of this function. */
6052 rtx valreg, saved_valreg;
6054 /* Now really call the function. `expand_call' does not call
6055 expand_builtin, so there is no danger of infinite recursion here. */
6058 #ifdef EXPAND_BUILTIN_SAVEREGS
6059 /* Do whatever the machine needs done in this case. */
6060 temp = EXPAND_BUILTIN_SAVEREGS (arglist);
6062 /* The register where the function returns its value
6063 is likely to have something else in it, such as an argument.
6064 So preserve that register around the call. */
6065 if (value_mode != VOIDmode)
6067 valreg = hard_libcall_value (value_mode);
6068 saved_valreg = gen_reg_rtx (value_mode);
6069 emit_move_insn (saved_valreg, valreg);
6072 /* Generate the call, putting the value in a pseudo. */
6073 temp = expand_call (exp, target, ignore);
6075 if (value_mode != VOIDmode)
6076 emit_move_insn (valreg, saved_valreg);
6082 saveregs_value = temp;
6084 /* This won't work inside a SEQUENCE--it really has to be
6085 at the start of the function. */
6086 if (in_sequence_p ())
6088 /* Better to do this than to crash. */
6089 error ("`va_start' used within `({...})'");
6093 /* Put the sequence after the NOTE that starts the function. */
6094 emit_insns_before (seq, NEXT_INSN (get_insns ()));
6098 /* __builtin_args_info (N) returns word N of the arg space info
6099 for the current function. The number and meanings of words
6100 is controlled by the definition of CUMULATIVE_ARGS. */
6101 case BUILT_IN_ARGS_INFO:
6103 int nwords = sizeof (CUMULATIVE_ARGS) / sizeof (int);
6105 int *word_ptr = (int *) ¤t_function_args_info;
6106 tree type, elts, result;
6108 if (sizeof (CUMULATIVE_ARGS) % sizeof (int) != 0)
6109 fatal ("CUMULATIVE_ARGS type defined badly; see %s, line %d",
6110 __FILE__, __LINE__);
6114 tree arg = TREE_VALUE (arglist);
6115 if (TREE_CODE (arg) != INTEGER_CST)
6116 error ("argument of `__builtin_args_info' must be constant");
6119 int wordnum = TREE_INT_CST_LOW (arg);
6121 if (wordnum < 0 || wordnum >= nwords || TREE_INT_CST_HIGH (arg))
6122 error ("argument of `__builtin_args_info' out of range");
6124 return GEN_INT (word_ptr[wordnum]);
6128 error ("missing argument in `__builtin_args_info'");
6133 for (i = 0; i < nwords; i++)
6134 elts = tree_cons (NULL_TREE, build_int_2 (word_ptr[i], 0));
6136 type = build_array_type (integer_type_node,
6137 build_index_type (build_int_2 (nwords, 0)));
6138 result = build (CONSTRUCTOR, type, NULL_TREE, nreverse (elts));
6139 TREE_CONSTANT (result) = 1;
6140 TREE_STATIC (result) = 1;
6141 result = build (INDIRECT_REF, build_pointer_type (type), result);
6142 TREE_CONSTANT (result) = 1;
6143 return expand_expr (result, NULL_RTX, VOIDmode, 0);
6147 /* Return the address of the first anonymous stack arg. */
6148 case BUILT_IN_NEXT_ARG:
6150 tree fntype = TREE_TYPE (current_function_decl);
6151 if (!(TYPE_ARG_TYPES (fntype) != 0
6152 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
6153 != void_type_node)))
6155 error ("`va_start' used in function with fixed args");
6160 return expand_binop (Pmode, add_optab,
6161 current_function_internal_arg_pointer,
6162 current_function_arg_offset_rtx,
6163 NULL_RTX, 0, OPTAB_LIB_WIDEN);
6165 case BUILT_IN_CLASSIFY_TYPE:
6168 tree type = TREE_TYPE (TREE_VALUE (arglist));
6169 enum tree_code code = TREE_CODE (type);
6170 if (code == VOID_TYPE)
6171 return GEN_INT (void_type_class);
6172 if (code == INTEGER_TYPE)
6173 return GEN_INT (integer_type_class);
6174 if (code == CHAR_TYPE)
6175 return GEN_INT (char_type_class);
6176 if (code == ENUMERAL_TYPE)
6177 return GEN_INT (enumeral_type_class);
6178 if (code == BOOLEAN_TYPE)
6179 return GEN_INT (boolean_type_class);
6180 if (code == POINTER_TYPE)
6181 return GEN_INT (pointer_type_class);
6182 if (code == REFERENCE_TYPE)
6183 return GEN_INT (reference_type_class);
6184 if (code == OFFSET_TYPE)
6185 return GEN_INT (offset_type_class);
6186 if (code == REAL_TYPE)
6187 return GEN_INT (real_type_class);
6188 if (code == COMPLEX_TYPE)
6189 return GEN_INT (complex_type_class);
6190 if (code == FUNCTION_TYPE)
6191 return GEN_INT (function_type_class);
6192 if (code == METHOD_TYPE)
6193 return GEN_INT (method_type_class);
6194 if (code == RECORD_TYPE)
6195 return GEN_INT (record_type_class);
6196 if (code == UNION_TYPE)
6197 return GEN_INT (union_type_class);
6198 if (code == ARRAY_TYPE)
6199 return GEN_INT (array_type_class);
6200 if (code == STRING_TYPE)
6201 return GEN_INT (string_type_class);
6202 if (code == SET_TYPE)
6203 return GEN_INT (set_type_class);
6204 if (code == FILE_TYPE)
6205 return GEN_INT (file_type_class);
6206 if (code == LANG_TYPE)
6207 return GEN_INT (lang_type_class);
6209 return GEN_INT (no_type_class);
6211 case BUILT_IN_CONSTANT_P:
6215 return (TREE_CODE_CLASS (TREE_CODE (TREE_VALUE (arglist))) == 'c'
6216 ? const1_rtx : const0_rtx);
6218 case BUILT_IN_FRAME_ADDRESS:
6219 /* The argument must be a nonnegative integer constant.
6220 It counts the number of frames to scan up the stack.
6221 The value is the address of that frame. */
6222 case BUILT_IN_RETURN_ADDRESS:
6223 /* The argument must be a nonnegative integer constant.
6224 It counts the number of frames to scan up the stack.
6225 The value is the return address saved in that frame. */
6227 /* Warning about missing arg was already issued. */
6229 else if (TREE_CODE (TREE_VALUE (arglist)) != INTEGER_CST)
6231 error ("invalid arg to `__builtin_return_address'");
6234 else if (tree_int_cst_lt (TREE_VALUE (arglist), integer_zero_node))
6236 error ("invalid arg to `__builtin_return_address'");
6241 int count = TREE_INT_CST_LOW (TREE_VALUE (arglist));
6242 rtx tem = frame_pointer_rtx;
6245 /* Scan back COUNT frames to the specified frame. */
6246 for (i = 0; i < count; i++)
6248 /* Assume the dynamic chain pointer is in the word that
6249 the frame address points to, unless otherwise specified. */
6250 #ifdef DYNAMIC_CHAIN_ADDRESS
6251 tem = DYNAMIC_CHAIN_ADDRESS (tem);
6253 tem = memory_address (Pmode, tem);
6254 tem = copy_to_reg (gen_rtx (MEM, Pmode, tem));
6257 /* For __builtin_frame_address, return what we've got. */
6258 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
6261 /* For __builtin_return_address,
6262 Get the return address from that frame. */
6263 #ifdef RETURN_ADDR_RTX
6264 return RETURN_ADDR_RTX (count, tem);
6266 tem = memory_address (Pmode,
6267 plus_constant (tem, GET_MODE_SIZE (Pmode)));
6268 return copy_to_reg (gen_rtx (MEM, Pmode, tem));
6272 case BUILT_IN_ALLOCA:
6274 /* Arg could be non-integer if user redeclared this fcn wrong. */
6275 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE)
6277 current_function_calls_alloca = 1;
6278 /* Compute the argument. */
6279 op0 = expand_expr (TREE_VALUE (arglist), NULL_RTX, VOIDmode, 0);
6281 /* Allocate the desired space. */
6282 target = allocate_dynamic_stack_space (op0, target, BITS_PER_UNIT);
6284 /* Record the new stack level for nonlocal gotos. */
6285 if (nonlocal_goto_handler_slot != 0)
6286 emit_stack_save (SAVE_NONLOCAL, &nonlocal_goto_stack_level, NULL_RTX);
6290 /* If not optimizing, call the library function. */
6295 /* Arg could be non-integer if user redeclared this fcn wrong. */
6296 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE)
6299 /* Compute the argument. */
6300 op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
6301 /* Compute ffs, into TARGET if possible.
6302 Set TARGET to wherever the result comes back. */
6303 target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
6304 ffs_optab, op0, target, 1);
6309 case BUILT_IN_STRLEN:
6310 /* If not optimizing, call the library function. */
6315 /* Arg could be non-pointer if user redeclared this fcn wrong. */
6316 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE)
6320 tree src = TREE_VALUE (arglist);
6321 tree len = c_strlen (src);
6324 = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
6326 rtx result, src_rtx, char_rtx;
6327 enum machine_mode insn_mode = value_mode, char_mode;
6328 enum insn_code icode;
6330 /* If the length is known, just return it. */
6332 return expand_expr (len, target, mode, 0);
6334 /* If SRC is not a pointer type, don't do this operation inline. */
6338 /* Call a function if we can't compute strlen in the right mode. */
6340 while (insn_mode != VOIDmode)
6342 icode = strlen_optab->handlers[(int) insn_mode].insn_code;
6343 if (icode != CODE_FOR_nothing)
6346 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
6348 if (insn_mode == VOIDmode)
6351 /* Make a place to write the result of the instruction. */
6354 && GET_CODE (result) == REG
6355 && GET_MODE (result) == insn_mode
6356 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
6357 result = gen_reg_rtx (insn_mode);
6359 /* Make sure the operands are acceptable to the predicates. */
6361 if (! (*insn_operand_predicate[(int)icode][0]) (result, insn_mode))
6362 result = gen_reg_rtx (insn_mode);
6364 src_rtx = memory_address (BLKmode,
6365 expand_expr (src, NULL_RTX, Pmode,
6367 if (! (*insn_operand_predicate[(int)icode][1]) (src_rtx, Pmode))
6368 src_rtx = copy_to_mode_reg (Pmode, src_rtx);
6370 char_rtx = const0_rtx;
6371 char_mode = insn_operand_mode[(int)icode][2];
6372 if (! (*insn_operand_predicate[(int)icode][2]) (char_rtx, char_mode))
6373 char_rtx = copy_to_mode_reg (char_mode, char_rtx);
6375 emit_insn (GEN_FCN (icode) (result,
6376 gen_rtx (MEM, BLKmode, src_rtx),
6377 char_rtx, GEN_INT (align)));
6379 /* Return the value in the proper mode for this function. */
6380 if (GET_MODE (result) == value_mode)
6382 else if (target != 0)
6384 convert_move (target, result, 0);
6388 return convert_to_mode (value_mode, result, 0);
6391 case BUILT_IN_STRCPY:
6392 /* If not optimizing, call the library function. */
6397 /* Arg could be non-pointer if user redeclared this fcn wrong. */
6398 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
6399 || TREE_CHAIN (arglist) == 0
6400 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE)
6404 tree len = c_strlen (TREE_VALUE (TREE_CHAIN (arglist)));
6409 len = size_binop (PLUS_EXPR, len, integer_one_node);
6411 chainon (arglist, build_tree_list (NULL_TREE, len));
6415 case BUILT_IN_MEMCPY:
6416 /* If not optimizing, call the library function. */
6421 /* Arg could be non-pointer if user redeclared this fcn wrong. */
6422 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
6423 || TREE_CHAIN (arglist) == 0
6424 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
6425 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
6426 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
6430 tree dest = TREE_VALUE (arglist);
6431 tree src = TREE_VALUE (TREE_CHAIN (arglist));
6432 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
6435 = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
6437 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
6438 rtx dest_rtx, dest_mem, src_mem;
6440 /* If either SRC or DEST is not a pointer type, don't do
6441 this operation in-line. */
6442 if (src_align == 0 || dest_align == 0)
6444 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRCPY)
6445 TREE_CHAIN (TREE_CHAIN (arglist)) = 0;
6449 dest_rtx = expand_expr (dest, NULL_RTX, Pmode, EXPAND_NORMAL);
6450 dest_mem = gen_rtx (MEM, BLKmode,
6451 memory_address (BLKmode, dest_rtx));
6452 src_mem = gen_rtx (MEM, BLKmode,
6453 memory_address (BLKmode,
6454 expand_expr (src, NULL_RTX,
6458 /* Copy word part most expediently. */
6459 emit_block_move (dest_mem, src_mem,
6460 expand_expr (len, NULL_RTX, VOIDmode, 0),
6461 MIN (src_align, dest_align));
6465 /* These comparison functions need an instruction that returns an actual
6466 index. An ordinary compare that just sets the condition codes
6468 #ifdef HAVE_cmpstrsi
6469 case BUILT_IN_STRCMP:
6470 /* If not optimizing, call the library function. */
6475 /* Arg could be non-pointer if user redeclared this fcn wrong. */
6476 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
6477 || TREE_CHAIN (arglist) == 0
6478 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE)
6480 else if (!HAVE_cmpstrsi)
6483 tree arg1 = TREE_VALUE (arglist);
6484 tree arg2 = TREE_VALUE (TREE_CHAIN (arglist));
6488 len = c_strlen (arg1);
6490 len = size_binop (PLUS_EXPR, integer_one_node, len);
6491 len2 = c_strlen (arg2);
6493 len2 = size_binop (PLUS_EXPR, integer_one_node, len2);
6495 /* If we don't have a constant length for the first, use the length
6496 of the second, if we know it. We don't require a constant for
6497 this case; some cost analysis could be done if both are available
6498 but neither is constant. For now, assume they're equally cheap.
6500 If both strings have constant lengths, use the smaller. This
6501 could arise if optimization results in strcpy being called with
6502 two fixed strings, or if the code was machine-generated. We should
6503 add some code to the `memcmp' handler below to deal with such
6504 situations, someday. */
6505 if (!len || TREE_CODE (len) != INTEGER_CST)
6512 else if (len2 && TREE_CODE (len2) == INTEGER_CST)
6514 if (tree_int_cst_lt (len2, len))
6518 chainon (arglist, build_tree_list (NULL_TREE, len));
6522 case BUILT_IN_MEMCMP:
6523 /* If not optimizing, call the library function. */
6528 /* Arg could be non-pointer if user redeclared this fcn wrong. */
6529 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
6530 || TREE_CHAIN (arglist) == 0
6531 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
6532 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
6533 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
6535 else if (!HAVE_cmpstrsi)
6538 tree arg1 = TREE_VALUE (arglist);
6539 tree arg2 = TREE_VALUE (TREE_CHAIN (arglist));
6540 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
6544 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
6546 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
6547 enum machine_mode insn_mode
6548 = insn_operand_mode[(int) CODE_FOR_cmpstrsi][0];
6550 /* If we don't have POINTER_TYPE, call the function. */
6551 if (arg1_align == 0 || arg2_align == 0)
6553 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRCMP)
6554 TREE_CHAIN (TREE_CHAIN (arglist)) = 0;
6558 /* Make a place to write the result of the instruction. */
6561 && GET_CODE (result) == REG && GET_MODE (result) == insn_mode
6562 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
6563 result = gen_reg_rtx (insn_mode);
6565 emit_insn (gen_cmpstrsi (result,
6566 gen_rtx (MEM, BLKmode,
6567 expand_expr (arg1, NULL_RTX, Pmode,
6569 gen_rtx (MEM, BLKmode,
6570 expand_expr (arg2, NULL_RTX, Pmode,
6572 expand_expr (len, NULL_RTX, VOIDmode, 0),
6573 GEN_INT (MIN (arg1_align, arg2_align))));
6575 /* Return the value in the proper mode for this function. */
6576 mode = TYPE_MODE (TREE_TYPE (exp));
6577 if (GET_MODE (result) == mode)
6579 else if (target != 0)
6581 convert_move (target, result, 0);
6585 return convert_to_mode (mode, result, 0);
6588 case BUILT_IN_STRCMP:
6589 case BUILT_IN_MEMCMP:
6593 default: /* just do library call, if unknown builtin */
6594 error ("built-in function `%s' not currently supported",
6595 IDENTIFIER_POINTER (DECL_NAME (fndecl)));
6598 /* The switch statement above can drop through to cause the function
6599 to be called normally. */
6601 return expand_call (exp, target, ignore);
6604 /* Expand code for a post- or pre- increment or decrement
6605 and return the RTX for the result.
6606 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
6609 expand_increment (exp, post)
6613 register rtx op0, op1;
6614 register rtx temp, value;
6615 register tree incremented = TREE_OPERAND (exp, 0);
6616 optab this_optab = add_optab;
6618 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
6619 int op0_is_copy = 0;
6621 /* Stabilize any component ref that might need to be
6622 evaluated more than once below. */
6624 || TREE_CODE (incremented) == BIT_FIELD_REF
6625 || (TREE_CODE (incremented) == COMPONENT_REF
6626 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
6627 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
6628 incremented = stabilize_reference (incremented);
6630 /* Compute the operands as RTX.
6631 Note whether OP0 is the actual lvalue or a copy of it:
6632 I believe it is a copy iff it is a register or subreg
6633 and insns were generated in computing it. */
6635 temp = get_last_insn ();
6636 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, 0);
6638 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
6639 in place but intead must do sign- or zero-extension during assignment,
6640 so we copy it into a new register and let the code below use it as
6643 Note that we can safely modify this SUBREG since it is know not to be
6644 shared (it was made by the expand_expr call above). */
6646 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
6647 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
6649 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
6650 && temp != get_last_insn ());
6651 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
6653 /* Decide whether incrementing or decrementing. */
6654 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
6655 || TREE_CODE (exp) == PREDECREMENT_EXPR)
6656 this_optab = sub_optab;
6658 /* If OP0 is not the actual lvalue, but rather a copy in a register,
6659 then we cannot just increment OP0. We must therefore contrive to
6660 increment the original value. Then, for postincrement, we can return
6661 OP0 since it is a copy of the old value. For preincrement, we want
6662 to always expand here, since this generates better or equivalent code. */
6663 if (!post || op0_is_copy)
6665 /* This is the easiest way to increment the value wherever it is.
6666 Problems with multiple evaluation of INCREMENTED are prevented
6667 because either (1) it is a component_ref or preincrement,
6668 in which case it was stabilized above, or (2) it is an array_ref
6669 with constant index in an array in a register, which is
6670 safe to reevaluate. */
6671 tree newexp = build ((this_optab == add_optab
6672 ? PLUS_EXPR : MINUS_EXPR),
6675 TREE_OPERAND (exp, 1));
6676 temp = expand_assignment (incremented, newexp, ! post, 0);
6677 return post ? op0 : temp;
6680 /* Convert decrement by a constant into a negative increment. */
6681 if (this_optab == sub_optab
6682 && GET_CODE (op1) == CONST_INT)
6684 op1 = GEN_INT (- INTVAL (op1));
6685 this_optab = add_optab;
6690 /* We have a true reference to the value in OP0.
6691 If there is an insn to add or subtract in this mode, queue it. */
6693 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
6694 op0 = stabilize (op0);
6697 icode = (int) this_optab->handlers[(int) mode].insn_code;
6698 if (icode != (int) CODE_FOR_nothing
6699 /* Make sure that OP0 is valid for operands 0 and 1
6700 of the insn we want to queue. */
6701 && (*insn_operand_predicate[icode][0]) (op0, mode)
6702 && (*insn_operand_predicate[icode][1]) (op0, mode))
6704 if (! (*insn_operand_predicate[icode][2]) (op1, mode))
6705 op1 = force_reg (mode, op1);
6707 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
6711 /* Preincrement, or we can't increment with one simple insn. */
6713 /* Save a copy of the value before inc or dec, to return it later. */
6714 temp = value = copy_to_reg (op0);
6716 /* Arrange to return the incremented value. */
6717 /* Copy the rtx because expand_binop will protect from the queue,
6718 and the results of that would be invalid for us to return
6719 if our caller does emit_queue before using our result. */
6720 temp = copy_rtx (value = op0);
6722 /* Increment however we can. */
6723 op1 = expand_binop (mode, this_optab, value, op1, op0,
6724 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
6725 /* Make sure the value is stored into OP0. */
6727 emit_move_insn (op0, op1);
6732 /* Expand all function calls contained within EXP, innermost ones first.
6733 But don't look within expressions that have sequence points.
6734 For each CALL_EXPR, record the rtx for its value
6735 in the CALL_EXPR_RTL field. */
6738 preexpand_calls (exp)
6741 register int nops, i;
6742 int type = TREE_CODE_CLASS (TREE_CODE (exp));
6744 if (! do_preexpand_calls)
6747 /* Only expressions and references can contain calls. */
6749 if (type != 'e' && type != '<' && type != '1' && type != '2' && type != 'r')
6752 switch (TREE_CODE (exp))
6755 /* Do nothing if already expanded. */
6756 if (CALL_EXPR_RTL (exp) != 0)
6759 /* Do nothing to built-in functions. */
6760 if (TREE_CODE (TREE_OPERAND (exp, 0)) != ADDR_EXPR
6761 || TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) != FUNCTION_DECL
6762 || ! DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
6763 CALL_EXPR_RTL (exp) = expand_call (exp, NULL_RTX, 0);
6768 case TRUTH_ANDIF_EXPR:
6769 case TRUTH_ORIF_EXPR:
6770 /* If we find one of these, then we can be sure
6771 the adjust will be done for it (since it makes jumps).
6772 Do it now, so that if this is inside an argument
6773 of a function, we don't get the stack adjustment
6774 after some other args have already been pushed. */
6775 do_pending_stack_adjust ();
6780 case WITH_CLEANUP_EXPR:
6784 if (SAVE_EXPR_RTL (exp) != 0)
6788 nops = tree_code_length[(int) TREE_CODE (exp)];
6789 for (i = 0; i < nops; i++)
6790 if (TREE_OPERAND (exp, i) != 0)
6792 type = TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, i)));
6793 if (type == 'e' || type == '<' || type == '1' || type == '2'
6795 preexpand_calls (TREE_OPERAND (exp, i));
6799 /* At the start of a function, record that we have no previously-pushed
6800 arguments waiting to be popped. */
6803 init_pending_stack_adjust ()
6805 pending_stack_adjust = 0;
6808 /* When exiting from function, if safe, clear out any pending stack adjust
6809 so the adjustment won't get done. */
6812 clear_pending_stack_adjust ()
6814 #ifdef EXIT_IGNORE_STACK
6815 if (! flag_omit_frame_pointer && EXIT_IGNORE_STACK
6816 && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline)
6817 && ! flag_inline_functions)
6818 pending_stack_adjust = 0;
6822 /* Pop any previously-pushed arguments that have not been popped yet. */
6825 do_pending_stack_adjust ()
6827 if (inhibit_defer_pop == 0)
6829 if (pending_stack_adjust != 0)
6830 adjust_stack (GEN_INT (pending_stack_adjust));
6831 pending_stack_adjust = 0;
6835 /* Expand all cleanups up to OLD_CLEANUPS.
6836 Needed here, and also for language-dependent calls. */
6839 expand_cleanups_to (old_cleanups)
6842 while (cleanups_this_call != old_cleanups)
6844 expand_expr (TREE_VALUE (cleanups_this_call), NULL_RTX, VOIDmode, 0);
6845 cleanups_this_call = TREE_CHAIN (cleanups_this_call);
6849 /* Expand conditional expressions. */
6851 /* Generate code to evaluate EXP and jump to LABEL if the value is zero.
6852 LABEL is an rtx of code CODE_LABEL, in this function and all the
6856 jumpifnot (exp, label)
6860 do_jump (exp, label, NULL_RTX);
6863 /* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
6870 do_jump (exp, NULL_RTX, label);
6873 /* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
6874 the result is zero, or IF_TRUE_LABEL if the result is one.
6875 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
6876 meaning fall through in that case.
6878 do_jump always does any pending stack adjust except when it does not
6879 actually perform a jump. An example where there is no jump
6880 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
6882 This function is responsible for optimizing cases such as
6883 &&, || and comparison operators in EXP. */
6886 do_jump (exp, if_false_label, if_true_label)
6888 rtx if_false_label, if_true_label;
6890 register enum tree_code code = TREE_CODE (exp);
6891 /* Some cases need to create a label to jump to
6892 in order to properly fall through.
6893 These cases set DROP_THROUGH_LABEL nonzero. */
6894 rtx drop_through_label = 0;
6908 temp = integer_zerop (exp) ? if_false_label : if_true_label;
6914 /* This is not true with #pragma weak */
6916 /* The address of something can never be zero. */
6918 emit_jump (if_true_label);
6923 if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
6924 || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
6925 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF)
6928 /* If we are narrowing the operand, we have to do the compare in the
6930 if ((TYPE_PRECISION (TREE_TYPE (exp))
6931 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
6933 case NON_LVALUE_EXPR:
6934 case REFERENCE_EXPR:
6939 /* These cannot change zero->non-zero or vice versa. */
6940 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
6944 /* This is never less insns than evaluating the PLUS_EXPR followed by
6945 a test and can be longer if the test is eliminated. */
6947 /* Reduce to minus. */
6948 exp = build (MINUS_EXPR, TREE_TYPE (exp),
6949 TREE_OPERAND (exp, 0),
6950 fold (build1 (NEGATE_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
6951 TREE_OPERAND (exp, 1))));
6952 /* Process as MINUS. */
6956 /* Non-zero iff operands of minus differ. */
6957 comparison = compare (build (NE_EXPR, TREE_TYPE (exp),
6958 TREE_OPERAND (exp, 0),
6959 TREE_OPERAND (exp, 1)),
6964 /* If we are AND'ing with a small constant, do this comparison in the
6965 smallest type that fits. If the machine doesn't have comparisons
6966 that small, it will be converted back to the wider comparison.
6967 This helps if we are testing the sign bit of a narrower object.
6968 combine can't do this for us because it can't know whether a
6969 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
6971 if (! SLOW_BYTE_ACCESS
6972 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
6973 && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
6974 && (i = floor_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))) >= 0
6975 && (type = type_for_size (i + 1, 1)) != 0
6976 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
6977 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
6978 != CODE_FOR_nothing))
6980 do_jump (convert (type, exp), if_false_label, if_true_label);
6985 case TRUTH_NOT_EXPR:
6986 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
6989 case TRUTH_ANDIF_EXPR:
6990 if (if_false_label == 0)
6991 if_false_label = drop_through_label = gen_label_rtx ();
6992 do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
6993 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
6996 case TRUTH_ORIF_EXPR:
6997 if (if_true_label == 0)
6998 if_true_label = drop_through_label = gen_label_rtx ();
6999 do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
7000 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
7004 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
7007 do_pending_stack_adjust ();
7008 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
7015 int bitsize, bitpos, unsignedp;
7016 enum machine_mode mode;
7021 /* Get description of this reference. We don't actually care
7022 about the underlying object here. */
7023 get_inner_reference (exp, &bitsize, &bitpos, &offset,
7024 &mode, &unsignedp, &volatilep);
7026 type = type_for_size (bitsize, unsignedp);
7027 if (! SLOW_BYTE_ACCESS
7028 && type != 0 && bitsize >= 0
7029 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
7030 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
7031 != CODE_FOR_nothing))
7033 do_jump (convert (type, exp), if_false_label, if_true_label);
7040 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
7041 if (integer_onep (TREE_OPERAND (exp, 1))
7042 && integer_zerop (TREE_OPERAND (exp, 2)))
7043 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
7045 else if (integer_zerop (TREE_OPERAND (exp, 1))
7046 && integer_onep (TREE_OPERAND (exp, 2)))
7047 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
7051 register rtx label1 = gen_label_rtx ();
7052 drop_through_label = gen_label_rtx ();
7053 do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX);
7054 /* Now the THEN-expression. */
7055 do_jump (TREE_OPERAND (exp, 1),
7056 if_false_label ? if_false_label : drop_through_label,
7057 if_true_label ? if_true_label : drop_through_label);
7058 /* In case the do_jump just above never jumps. */
7059 do_pending_stack_adjust ();
7060 emit_label (label1);
7061 /* Now the ELSE-expression. */
7062 do_jump (TREE_OPERAND (exp, 2),
7063 if_false_label ? if_false_label : drop_through_label,
7064 if_true_label ? if_true_label : drop_through_label);
7069 if (integer_zerop (TREE_OPERAND (exp, 1)))
7070 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
7071 else if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7074 !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
7075 do_jump_by_parts_equality (exp, if_false_label, if_true_label);
7077 comparison = compare (exp, EQ, EQ);
7081 if (integer_zerop (TREE_OPERAND (exp, 1)))
7082 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
7083 else if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7086 !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
7087 do_jump_by_parts_equality (exp, if_true_label, if_false_label);
7089 comparison = compare (exp, NE, NE);
7093 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7095 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
7096 do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
7098 comparison = compare (exp, LT, LTU);
7102 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7104 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
7105 do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
7107 comparison = compare (exp, LE, LEU);
7111 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7113 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
7114 do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
7116 comparison = compare (exp, GT, GTU);
7120 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7122 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
7123 do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
7125 comparison = compare (exp, GE, GEU);
7130 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
7132 /* This is not needed any more and causes poor code since it causes
7133 comparisons and tests from non-SI objects to have different code
7135 /* Copy to register to avoid generating bad insns by cse
7136 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
7137 if (!cse_not_expected && GET_CODE (temp) == MEM)
7138 temp = copy_to_reg (temp);
7140 do_pending_stack_adjust ();
7141 if (GET_CODE (temp) == CONST_INT)
7142 comparison = (temp == const0_rtx ? const0_rtx : const_true_rtx);
7143 else if (GET_CODE (temp) == LABEL_REF)
7144 comparison = const_true_rtx;
7145 else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
7146 && !can_compare_p (GET_MODE (temp)))
7147 /* Note swapping the labels gives us not-equal. */
7148 do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label);
7149 else if (GET_MODE (temp) != VOIDmode)
7150 comparison = compare_from_rtx (temp, CONST0_RTX (GET_MODE (temp)),
7151 NE, TREE_UNSIGNED (TREE_TYPE (exp)),
7152 GET_MODE (temp), NULL_RTX, 0);
7157 /* Do any postincrements in the expression that was tested. */
7160 /* If COMPARISON is nonzero here, it is an rtx that can be substituted
7161 straight into a conditional jump instruction as the jump condition.
7162 Otherwise, all the work has been done already. */
7164 if (comparison == const_true_rtx)
7167 emit_jump (if_true_label);
7169 else if (comparison == const0_rtx)
7172 emit_jump (if_false_label);
7174 else if (comparison)
7175 do_jump_for_compare (comparison, if_false_label, if_true_label);
7179 if (drop_through_label)
7181 /* If do_jump produces code that might be jumped around,
7182 do any stack adjusts from that code, before the place
7183 where control merges in. */
7184 do_pending_stack_adjust ();
7185 emit_label (drop_through_label);
7189 /* Given a comparison expression EXP for values too wide to be compared
7190 with one insn, test the comparison and jump to the appropriate label.
7191 The code of EXP is ignored; we always test GT if SWAP is 0,
7192 and LT if SWAP is 1. */
7195 do_jump_by_parts_greater (exp, swap, if_false_label, if_true_label)
7198 rtx if_false_label, if_true_label;
7200 rtx op0 = expand_expr (TREE_OPERAND (exp, swap), NULL_RTX, VOIDmode, 0);
7201 rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), NULL_RTX, VOIDmode, 0);
7202 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
7203 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
7204 rtx drop_through_label = 0;
7205 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
7208 if (! if_true_label || ! if_false_label)
7209 drop_through_label = gen_label_rtx ();
7210 if (! if_true_label)
7211 if_true_label = drop_through_label;
7212 if (! if_false_label)
7213 if_false_label = drop_through_label;
7215 /* Compare a word at a time, high order first. */
7216 for (i = 0; i < nwords; i++)
7219 rtx op0_word, op1_word;
7221 if (WORDS_BIG_ENDIAN)
7223 op0_word = operand_subword_force (op0, i, mode);
7224 op1_word = operand_subword_force (op1, i, mode);
7228 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
7229 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
7232 /* All but high-order word must be compared as unsigned. */
7233 comp = compare_from_rtx (op0_word, op1_word,
7234 (unsignedp || i > 0) ? GTU : GT,
7235 unsignedp, word_mode, NULL_RTX, 0);
7236 if (comp == const_true_rtx)
7237 emit_jump (if_true_label);
7238 else if (comp != const0_rtx)
7239 do_jump_for_compare (comp, NULL_RTX, if_true_label);
7241 /* Consider lower words only if these are equal. */
7242 comp = compare_from_rtx (op0_word, op1_word, NE, unsignedp, word_mode,
7244 if (comp == const_true_rtx)
7245 emit_jump (if_false_label);
7246 else if (comp != const0_rtx)
7247 do_jump_for_compare (comp, NULL_RTX, if_false_label);
7251 emit_jump (if_false_label);
7252 if (drop_through_label)
7253 emit_label (drop_through_label);
7256 /* Given an EQ_EXPR expression EXP for values too wide to be compared
7257 with one insn, test the comparison and jump to the appropriate label. */
7260 do_jump_by_parts_equality (exp, if_false_label, if_true_label)
7262 rtx if_false_label, if_true_label;
7264 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7265 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7266 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
7267 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
7269 rtx drop_through_label = 0;
7271 if (! if_false_label)
7272 drop_through_label = if_false_label = gen_label_rtx ();
7274 for (i = 0; i < nwords; i++)
7276 rtx comp = compare_from_rtx (operand_subword_force (op0, i, mode),
7277 operand_subword_force (op1, i, mode),
7278 EQ, TREE_UNSIGNED (TREE_TYPE (exp)),
7279 word_mode, NULL_RTX, 0);
7280 if (comp == const_true_rtx)
7281 emit_jump (if_false_label);
7282 else if (comp != const0_rtx)
7283 do_jump_for_compare (comp, if_false_label, NULL_RTX);
7287 emit_jump (if_true_label);
7288 if (drop_through_label)
7289 emit_label (drop_through_label);
7292 /* Jump according to whether OP0 is 0.
7293 We assume that OP0 has an integer mode that is too wide
7294 for the available compare insns. */
7297 do_jump_by_parts_equality_rtx (op0, if_false_label, if_true_label)
7299 rtx if_false_label, if_true_label;
7301 int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD;
7303 rtx drop_through_label = 0;
7305 if (! if_false_label)
7306 drop_through_label = if_false_label = gen_label_rtx ();
7308 for (i = 0; i < nwords; i++)
7310 rtx comp = compare_from_rtx (operand_subword_force (op0, i,
7312 const0_rtx, EQ, 1, word_mode, NULL_RTX, 0);
7313 if (comp == const_true_rtx)
7314 emit_jump (if_false_label);
7315 else if (comp != const0_rtx)
7316 do_jump_for_compare (comp, if_false_label, NULL_RTX);
7320 emit_jump (if_true_label);
7321 if (drop_through_label)
7322 emit_label (drop_through_label);
7325 /* Given a comparison expression in rtl form, output conditional branches to
7326 IF_TRUE_LABEL, IF_FALSE_LABEL, or both. */
7329 do_jump_for_compare (comparison, if_false_label, if_true_label)
7330 rtx comparison, if_false_label, if_true_label;
7334 if (bcc_gen_fctn[(int) GET_CODE (comparison)] != 0)
7335 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (comparison)]) (if_true_label));
7340 emit_jump (if_false_label);
7342 else if (if_false_label)
7345 rtx prev = PREV_INSN (get_last_insn ());
7348 /* Output the branch with the opposite condition. Then try to invert
7349 what is generated. If more than one insn is a branch, or if the
7350 branch is not the last insn written, abort. If we can't invert
7351 the branch, emit make a true label, redirect this jump to that,
7352 emit a jump to the false label and define the true label. */
7354 if (bcc_gen_fctn[(int) GET_CODE (comparison)] != 0)
7355 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (comparison)]) (if_false_label));
7359 /* Here we get the insn before what was just emitted.
7360 On some machines, emitting the branch can discard
7361 the previous compare insn and emit a replacement. */
7363 /* If there's only one preceding insn... */
7364 insn = get_insns ();
7366 insn = NEXT_INSN (prev);
7368 for (insn = NEXT_INSN (insn); insn; insn = NEXT_INSN (insn))
7369 if (GET_CODE (insn) == JUMP_INSN)
7376 if (branch != get_last_insn ())
7379 if (! invert_jump (branch, if_false_label))
7381 if_true_label = gen_label_rtx ();
7382 redirect_jump (branch, if_true_label);
7383 emit_jump (if_false_label);
7384 emit_label (if_true_label);
7389 /* Generate code for a comparison expression EXP
7390 (including code to compute the values to be compared)
7391 and set (CC0) according to the result.
7392 SIGNED_CODE should be the rtx operation for this comparison for
7393 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
7395 We force a stack adjustment unless there are currently
7396 things pushed on the stack that aren't yet used. */
7399 compare (exp, signed_code, unsigned_code)
7401 enum rtx_code signed_code, unsigned_code;
7404 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7406 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7407 register tree type = TREE_TYPE (TREE_OPERAND (exp, 0));
7408 register enum machine_mode mode = TYPE_MODE (type);
7409 int unsignedp = TREE_UNSIGNED (type);
7410 enum rtx_code code = unsignedp ? unsigned_code : signed_code;
7412 return compare_from_rtx (op0, op1, code, unsignedp, mode,
7414 ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
7415 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
7418 /* Like compare but expects the values to compare as two rtx's.
7419 The decision as to signed or unsigned comparison must be made by the caller.
7421 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
7424 If ALIGN is non-zero, it is the alignment of this type; if zero, the
7425 size of MODE should be used. */
7428 compare_from_rtx (op0, op1, code, unsignedp, mode, size, align)
7429 register rtx op0, op1;
7432 enum machine_mode mode;
7438 /* If one operand is constant, make it the second one. Only do this
7439 if the other operand is not constant as well. */
7441 if ((CONSTANT_P (op0) && ! CONSTANT_P (op1))
7442 || (GET_CODE (op0) == CONST_INT && GET_CODE (op1) != CONST_INT))
7447 code = swap_condition (code);
7452 op0 = force_not_mem (op0);
7453 op1 = force_not_mem (op1);
7456 do_pending_stack_adjust ();
7458 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
7459 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
7463 /* There's no need to do this now that combine.c can eliminate lots of
7464 sign extensions. This can be less efficient in certain cases on other
7467 /* If this is a signed equality comparison, we can do it as an
7468 unsigned comparison since zero-extension is cheaper than sign
7469 extension and comparisons with zero are done as unsigned. This is
7470 the case even on machines that can do fast sign extension, since
7471 zero-extension is easier to combine with other operations than
7472 sign-extension is. If we are comparing against a constant, we must
7473 convert it to what it would look like unsigned. */
7474 if ((code == EQ || code == NE) && ! unsignedp
7475 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
7477 if (GET_CODE (op1) == CONST_INT
7478 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
7479 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
7484 emit_cmp_insn (op0, op1, code, size, mode, unsignedp, align);
7486 return gen_rtx (code, VOIDmode, cc0_rtx, const0_rtx);
7489 /* Generate code to calculate EXP using a store-flag instruction
7490 and return an rtx for the result. EXP is either a comparison
7491 or a TRUTH_NOT_EXPR whose operand is a comparison.
7493 If TARGET is nonzero, store the result there if convenient.
7495 If ONLY_CHEAP is non-zero, only do this if it is likely to be very
7498 Return zero if there is no suitable set-flag instruction
7499 available on this machine.
7501 Once expand_expr has been called on the arguments of the comparison,
7502 we are committed to doing the store flag, since it is not safe to
7503 re-evaluate the expression. We emit the store-flag insn by calling
7504 emit_store_flag, but only expand the arguments if we have a reason
7505 to believe that emit_store_flag will be successful. If we think that
7506 it will, but it isn't, we have to simulate the store-flag with a
7507 set/jump/set sequence. */
7510 do_store_flag (exp, target, mode, only_cheap)
7513 enum machine_mode mode;
7517 tree arg0, arg1, type;
7519 enum machine_mode operand_mode;
7523 enum insn_code icode;
7524 rtx subtarget = target;
7525 rtx result, label, pattern, jump_pat;
7527 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
7528 result at the end. We can't simply invert the test since it would
7529 have already been inverted if it were valid. This case occurs for
7530 some floating-point comparisons. */
7532 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
7533 invert = 1, exp = TREE_OPERAND (exp, 0);
7535 arg0 = TREE_OPERAND (exp, 0);
7536 arg1 = TREE_OPERAND (exp, 1);
7537 type = TREE_TYPE (arg0);
7538 operand_mode = TYPE_MODE (type);
7539 unsignedp = TREE_UNSIGNED (type);
7541 /* We won't bother with BLKmode store-flag operations because it would mean
7542 passing a lot of information to emit_store_flag. */
7543 if (operand_mode == BLKmode)
7549 /* Get the rtx comparison code to use. We know that EXP is a comparison
7550 operation of some type. Some comparisons against 1 and -1 can be
7551 converted to comparisons with zero. Do so here so that the tests
7552 below will be aware that we have a comparison with zero. These
7553 tests will not catch constants in the first operand, but constants
7554 are rarely passed as the first operand. */
7556 switch (TREE_CODE (exp))
7565 if (integer_onep (arg1))
7566 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
7568 code = unsignedp ? LTU : LT;
7571 if (integer_all_onesp (arg1))
7572 arg1 = integer_zero_node, code = unsignedp ? LTU : LT;
7574 code = unsignedp ? LEU : LE;
7577 if (integer_all_onesp (arg1))
7578 arg1 = integer_zero_node, code = unsignedp ? GEU : GE;
7580 code = unsignedp ? GTU : GT;
7583 if (integer_onep (arg1))
7584 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
7586 code = unsignedp ? GEU : GE;
7592 /* Put a constant second. */
7593 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
7595 tem = arg0; arg0 = arg1; arg1 = tem;
7596 code = swap_condition (code);
7599 /* If this is an equality or inequality test of a single bit, we can
7600 do this by shifting the bit being tested to the low-order bit and
7601 masking the result with the constant 1. If the condition was EQ,
7602 we xor it with 1. This does not require an scc insn and is faster
7603 than an scc insn even if we have it. */
7605 if ((code == NE || code == EQ)
7606 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
7607 && integer_pow2p (TREE_OPERAND (arg0, 1))
7608 && TYPE_PRECISION (type) <= HOST_BITS_PER_WIDE_INT)
7610 int bitnum = exact_log2 (INTVAL (expand_expr (TREE_OPERAND (arg0, 1),
7611 NULL_RTX, VOIDmode, 0)));
7613 if (subtarget == 0 || GET_CODE (subtarget) != REG
7614 || GET_MODE (subtarget) != operand_mode
7615 || ! safe_from_p (subtarget, TREE_OPERAND (arg0, 0)))
7618 op0 = expand_expr (TREE_OPERAND (arg0, 0), subtarget, VOIDmode, 0);
7621 op0 = expand_shift (RSHIFT_EXPR, GET_MODE (op0), op0,
7622 size_int (bitnum), target, 1);
7624 if (GET_MODE (op0) != mode)
7625 op0 = convert_to_mode (mode, op0, 1);
7627 if (bitnum != TYPE_PRECISION (type) - 1)
7628 op0 = expand_and (op0, const1_rtx, target);
7630 if ((code == EQ && ! invert) || (code == NE && invert))
7631 op0 = expand_binop (mode, xor_optab, op0, const1_rtx, target, 0,
7637 /* Now see if we are likely to be able to do this. Return if not. */
7638 if (! can_compare_p (operand_mode))
7640 icode = setcc_gen_code[(int) code];
7641 if (icode == CODE_FOR_nothing
7642 || (only_cheap && insn_operand_mode[(int) icode][0] != mode))
7644 /* We can only do this if it is one of the special cases that
7645 can be handled without an scc insn. */
7646 if ((code == LT && integer_zerop (arg1))
7647 || (! only_cheap && code == GE && integer_zerop (arg1)))
7649 else if (BRANCH_COST >= 0
7650 && ! only_cheap && (code == NE || code == EQ)
7651 && TREE_CODE (type) != REAL_TYPE
7652 && ((abs_optab->handlers[(int) operand_mode].insn_code
7653 != CODE_FOR_nothing)
7654 || (ffs_optab->handlers[(int) operand_mode].insn_code
7655 != CODE_FOR_nothing)))
7661 preexpand_calls (exp);
7662 if (subtarget == 0 || GET_CODE (subtarget) != REG
7663 || GET_MODE (subtarget) != operand_mode
7664 || ! safe_from_p (subtarget, arg1))
7667 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
7668 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
7671 target = gen_reg_rtx (mode);
7673 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
7674 because, if the emit_store_flag does anything it will succeed and
7675 OP0 and OP1 will not be used subsequently. */
7677 result = emit_store_flag (target, code,
7678 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
7679 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
7680 operand_mode, unsignedp, 1);
7685 result = expand_binop (mode, xor_optab, result, const1_rtx,
7686 result, 0, OPTAB_LIB_WIDEN);
7690 /* If this failed, we have to do this with set/compare/jump/set code. */
7691 if (target == 0 || GET_CODE (target) != REG
7692 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
7693 target = gen_reg_rtx (GET_MODE (target));
7695 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
7696 result = compare_from_rtx (op0, op1, code, unsignedp,
7697 operand_mode, NULL_RTX, 0);
7698 if (GET_CODE (result) == CONST_INT)
7699 return (((result == const0_rtx && ! invert)
7700 || (result != const0_rtx && invert))
7701 ? const0_rtx : const1_rtx);
7703 label = gen_label_rtx ();
7704 if (bcc_gen_fctn[(int) code] == 0)
7707 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
7708 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
7714 /* Generate a tablejump instruction (used for switch statements). */
7716 #ifdef HAVE_tablejump
7718 /* INDEX is the value being switched on, with the lowest value
7719 in the table already subtracted.
7720 MODE is its expected mode (needed if INDEX is constant).
7721 RANGE is the length of the jump table.
7722 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
7724 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
7725 index value is out of range. */
7728 do_tablejump (index, mode, range, table_label, default_label)
7729 rtx index, range, table_label, default_label;
7730 enum machine_mode mode;
7732 register rtx temp, vector;
7734 /* Do an unsigned comparison (in the proper mode) between the index
7735 expression and the value which represents the length of the range.
7736 Since we just finished subtracting the lower bound of the range
7737 from the index expression, this comparison allows us to simultaneously
7738 check that the original index expression value is both greater than
7739 or equal to the minimum value of the range and less than or equal to
7740 the maximum value of the range. */
7742 emit_cmp_insn (range, index, LTU, NULL_RTX, mode, 1, 0);
7743 emit_jump_insn (gen_bltu (default_label));
7745 /* If index is in range, it must fit in Pmode.
7746 Convert to Pmode so we can index with it. */
7748 index = convert_to_mode (Pmode, index, 1);
7750 /* If flag_force_addr were to affect this address
7751 it could interfere with the tricky assumptions made
7752 about addresses that contain label-refs,
7753 which may be valid only very near the tablejump itself. */
7754 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
7755 GET_MODE_SIZE, because this indicates how large insns are. The other
7756 uses should all be Pmode, because they are addresses. This code
7757 could fail if addresses and insns are not the same size. */
7758 index = memory_address_noforce
7760 gen_rtx (PLUS, Pmode,
7761 gen_rtx (MULT, Pmode, index,
7762 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
7763 gen_rtx (LABEL_REF, Pmode, table_label)));
7764 temp = gen_reg_rtx (CASE_VECTOR_MODE);
7765 vector = gen_rtx (MEM, CASE_VECTOR_MODE, index);
7766 RTX_UNCHANGING_P (vector) = 1;
7767 convert_move (temp, vector, 0);
7769 emit_jump_insn (gen_tablejump (temp, table_label));
7771 #ifndef CASE_VECTOR_PC_RELATIVE
7772 /* If we are generating PIC code or if the table is PC-relative, the
7773 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
7779 #endif /* HAVE_tablejump */