1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA. */
26 #include "insn-flags.h"
27 #include "insn-codes.h"
29 #include "insn-config.h"
33 #include "typeclass.h"
35 #define CEIL(x,y) (((x) + (y) - 1) / (y))
37 /* Decide whether a function's arguments should be processed
38 from first to last or from last to first. */
40 #ifdef STACK_GROWS_DOWNWARD
42 #define PUSH_ARGS_REVERSED /* If it's last to first */
46 #ifndef STACK_PUSH_CODE
47 #ifdef STACK_GROWS_DOWNWARD
48 #define STACK_PUSH_CODE PRE_DEC
50 #define STACK_PUSH_CODE PRE_INC
54 /* Like STACK_BOUNDARY but in units of bytes, not bits. */
55 #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
57 /* If this is nonzero, we do not bother generating VOLATILE
58 around volatile memory references, and we are willing to
59 output indirect addresses. If cse is to follow, we reject
60 indirect addresses so a useful potential cse is generated;
61 if it is used only once, instruction combination will produce
62 the same indirect address eventually. */
65 /* Nonzero to generate code for all the subroutines within an
66 expression before generating the upper levels of the expression.
67 Nowadays this is never zero. */
68 int do_preexpand_calls = 1;
70 /* Number of units that we should eventually pop off the stack.
71 These are the arguments to function calls that have already returned. */
72 int pending_stack_adjust;
74 /* Nonzero means stack pops must not be deferred, and deferred stack
75 pops must not be output. It is nonzero inside a function call,
76 inside a conditional expression, inside a statement expression,
77 and in other cases as well. */
78 int inhibit_defer_pop;
80 /* A list of all cleanups which belong to the arguments of
81 function calls being expanded by expand_call. */
82 tree cleanups_this_call;
84 /* Nonzero means __builtin_saveregs has already been done in this function.
85 The value is the pseudoreg containing the value __builtin_saveregs
87 static rtx saveregs_value;
90 static void store_constructor ();
91 static rtx store_field ();
92 static rtx expand_builtin ();
93 static rtx compare ();
94 static rtx do_store_flag ();
95 static void preexpand_calls ();
96 static rtx expand_increment ();
97 static void init_queue ();
99 void do_pending_stack_adjust ();
100 static void do_jump_for_compare ();
101 static void do_jump_by_parts_equality ();
102 static void do_jump_by_parts_equality_rtx ();
103 static void do_jump_by_parts_greater ();
105 /* Record for each mode whether we can move a register directly to or
106 from an object of that mode in memory. If we can't, we won't try
107 to use that mode directly when accessing a field of that mode. */
109 static char direct_load[NUM_MACHINE_MODES];
110 static char direct_store[NUM_MACHINE_MODES];
112 /* MOVE_RATIO is the number of move instructions that is better than
116 #if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
119 /* A value of around 6 would minimize code size; infinity would minimize
121 #define MOVE_RATIO 15
125 /* This array records the insn_code of insns to perform block moves. */
126 static enum insn_code movstr_optab[NUM_MACHINE_MODES];
128 /* SLOW_UNALIGNED_ACCESS is non-zero if unaligned accesses are very slow. */
130 #ifndef SLOW_UNALIGNED_ACCESS
131 #define SLOW_UNALIGNED_ACCESS 0
134 /* This is run once per compilation to set up which modes can be used
135 directly in memory and to initialize the block move optab. */
141 enum machine_mode mode;
142 /* Try indexing by frame ptr and try by stack ptr.
143 It is known that on the Convex the stack ptr isn't a valid index.
144 With luck, one or the other is valid on any machine. */
145 rtx mem = gen_rtx (MEM, VOIDmode, stack_pointer_rtx);
146 rtx mem1 = gen_rtx (MEM, VOIDmode, frame_pointer_rtx);
149 insn = emit_insn (gen_rtx (SET, 0, 0));
150 pat = PATTERN (insn);
152 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
153 mode = (enum machine_mode) ((int) mode + 1))
159 direct_load[(int) mode] = direct_store[(int) mode] = 0;
160 PUT_MODE (mem, mode);
161 PUT_MODE (mem1, mode);
163 /* See if there is some register that can be used in this mode and
164 directly loaded or stored from memory. */
166 if (mode != VOIDmode && mode != BLKmode)
167 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
168 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
171 if (! HARD_REGNO_MODE_OK (regno, mode))
174 reg = gen_rtx (REG, mode, regno);
177 SET_DEST (pat) = reg;
178 if (recog (pat, insn, &num_clobbers) >= 0)
179 direct_load[(int) mode] = 1;
181 SET_SRC (pat) = mem1;
182 SET_DEST (pat) = reg;
183 if (recog (pat, insn, &num_clobbers) >= 0)
184 direct_load[(int) mode] = 1;
187 SET_DEST (pat) = mem;
188 if (recog (pat, insn, &num_clobbers) >= 0)
189 direct_store[(int) mode] = 1;
192 SET_DEST (pat) = mem1;
193 if (recog (pat, insn, &num_clobbers) >= 0)
194 direct_store[(int) mode] = 1;
197 movstr_optab[(int) mode] = CODE_FOR_nothing;
204 movstr_optab[(int) QImode] = CODE_FOR_movstrqi;
208 movstr_optab[(int) HImode] = CODE_FOR_movstrhi;
212 movstr_optab[(int) SImode] = CODE_FOR_movstrsi;
216 movstr_optab[(int) DImode] = CODE_FOR_movstrdi;
220 movstr_optab[(int) TImode] = CODE_FOR_movstrti;
224 /* This is run at the start of compiling a function. */
231 pending_stack_adjust = 0;
232 inhibit_defer_pop = 0;
233 cleanups_this_call = 0;
238 /* Save all variables describing the current status into the structure *P.
239 This is used before starting a nested function. */
245 /* Instead of saving the postincrement queue, empty it. */
248 p->pending_stack_adjust = pending_stack_adjust;
249 p->inhibit_defer_pop = inhibit_defer_pop;
250 p->cleanups_this_call = cleanups_this_call;
251 p->saveregs_value = saveregs_value;
252 p->forced_labels = forced_labels;
254 pending_stack_adjust = 0;
255 inhibit_defer_pop = 0;
256 cleanups_this_call = 0;
261 /* Restore all variables describing the current status from the structure *P.
262 This is used after a nested function. */
265 restore_expr_status (p)
268 pending_stack_adjust = p->pending_stack_adjust;
269 inhibit_defer_pop = p->inhibit_defer_pop;
270 cleanups_this_call = p->cleanups_this_call;
271 saveregs_value = p->saveregs_value;
272 forced_labels = p->forced_labels;
275 /* Manage the queue of increment instructions to be output
276 for POSTINCREMENT_EXPR expressions, etc. */
278 static rtx pending_chain;
280 /* Queue up to increment (or change) VAR later. BODY says how:
281 BODY should be the same thing you would pass to emit_insn
282 to increment right away. It will go to emit_insn later on.
284 The value is a QUEUED expression to be used in place of VAR
285 where you want to guarantee the pre-incrementation value of VAR. */
288 enqueue_insn (var, body)
291 pending_chain = gen_rtx (QUEUED, GET_MODE (var),
292 var, NULL_RTX, NULL_RTX, body, pending_chain);
293 return pending_chain;
296 /* Use protect_from_queue to convert a QUEUED expression
297 into something that you can put immediately into an instruction.
298 If the queued incrementation has not happened yet,
299 protect_from_queue returns the variable itself.
300 If the incrementation has happened, protect_from_queue returns a temp
301 that contains a copy of the old value of the variable.
303 Any time an rtx which might possibly be a QUEUED is to be put
304 into an instruction, it must be passed through protect_from_queue first.
305 QUEUED expressions are not meaningful in instructions.
307 Do not pass a value through protect_from_queue and then hold
308 on to it for a while before putting it in an instruction!
309 If the queue is flushed in between, incorrect code will result. */
312 protect_from_queue (x, modify)
316 register RTX_CODE code = GET_CODE (x);
318 #if 0 /* A QUEUED can hang around after the queue is forced out. */
319 /* Shortcut for most common case. */
320 if (pending_chain == 0)
326 /* A special hack for read access to (MEM (QUEUED ...))
327 to facilitate use of autoincrement.
328 Make a copy of the contents of the memory location
329 rather than a copy of the address, but not
330 if the value is of mode BLKmode. */
331 if (code == MEM && GET_MODE (x) != BLKmode
332 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
334 register rtx y = XEXP (x, 0);
335 XEXP (x, 0) = QUEUED_VAR (y);
338 register rtx temp = gen_reg_rtx (GET_MODE (x));
339 emit_insn_before (gen_move_insn (temp, x),
345 /* Otherwise, recursively protect the subexpressions of all
346 the kinds of rtx's that can contain a QUEUED. */
348 XEXP (x, 0) = protect_from_queue (XEXP (x, 0), 0);
349 else if (code == PLUS || code == MULT)
351 XEXP (x, 0) = protect_from_queue (XEXP (x, 0), 0);
352 XEXP (x, 1) = protect_from_queue (XEXP (x, 1), 0);
356 /* If the increment has not happened, use the variable itself. */
357 if (QUEUED_INSN (x) == 0)
358 return QUEUED_VAR (x);
359 /* If the increment has happened and a pre-increment copy exists,
361 if (QUEUED_COPY (x) != 0)
362 return QUEUED_COPY (x);
363 /* The increment has happened but we haven't set up a pre-increment copy.
364 Set one up now, and use it. */
365 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
366 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
368 return QUEUED_COPY (x);
371 /* Return nonzero if X contains a QUEUED expression:
372 if it contains anything that will be altered by a queued increment.
373 We handle only combinations of MEM, PLUS, MINUS and MULT operators
374 since memory addresses generally contain only those. */
380 register enum rtx_code code = GET_CODE (x);
386 return queued_subexp_p (XEXP (x, 0));
390 return queued_subexp_p (XEXP (x, 0))
391 || queued_subexp_p (XEXP (x, 1));
396 /* Perform all the pending incrementations. */
402 while (p = pending_chain)
404 QUEUED_INSN (p) = emit_insn (QUEUED_BODY (p));
405 pending_chain = QUEUED_NEXT (p);
416 /* Copy data from FROM to TO, where the machine modes are not the same.
417 Both modes may be integer, or both may be floating.
418 UNSIGNEDP should be nonzero if FROM is an unsigned type.
419 This causes zero-extension instead of sign-extension. */
422 convert_move (to, from, unsignedp)
423 register rtx to, from;
426 enum machine_mode to_mode = GET_MODE (to);
427 enum machine_mode from_mode = GET_MODE (from);
428 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
429 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
433 /* rtx code for making an equivalent value. */
434 enum rtx_code equiv_code = (unsignedp ? ZERO_EXTEND : SIGN_EXTEND);
436 to = protect_from_queue (to, 1);
437 from = protect_from_queue (from, 0);
439 if (to_real != from_real)
442 /* If FROM is a SUBREG that indicates that we have already done at least
443 the required extension, strip it. We don't handle such SUBREGs as
446 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
447 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
448 >= GET_MODE_SIZE (to_mode))
449 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
450 from = gen_lowpart (to_mode, from), from_mode = to_mode;
452 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
455 if (to_mode == from_mode
456 || (from_mode == VOIDmode && CONSTANT_P (from)))
458 emit_move_insn (to, from);
464 #ifdef HAVE_extendqfhf2
465 if (HAVE_extendqfsf2 && from_mode == QFmode && to_mode == HFmode)
467 emit_unop_insn (CODE_FOR_extendqfsf2, to, from, UNKNOWN);
471 #ifdef HAVE_extendqfsf2
472 if (HAVE_extendqfsf2 && from_mode == QFmode && to_mode == SFmode)
474 emit_unop_insn (CODE_FOR_extendqfsf2, to, from, UNKNOWN);
478 #ifdef HAVE_extendqfdf2
479 if (HAVE_extendqfdf2 && from_mode == QFmode && to_mode == DFmode)
481 emit_unop_insn (CODE_FOR_extendqfdf2, to, from, UNKNOWN);
485 #ifdef HAVE_extendqfxf2
486 if (HAVE_extendqfxf2 && from_mode == QFmode && to_mode == XFmode)
488 emit_unop_insn (CODE_FOR_extendqfxf2, to, from, UNKNOWN);
492 #ifdef HAVE_extendqftf2
493 if (HAVE_extendqftf2 && from_mode == QFmode && to_mode == TFmode)
495 emit_unop_insn (CODE_FOR_extendqftf2, to, from, UNKNOWN);
500 #ifdef HAVE_extendhfsf2
501 if (HAVE_extendhfsf2 && from_mode == HFmode && to_mode == SFmode)
503 emit_unop_insn (CODE_FOR_extendhfsf2, to, from, UNKNOWN);
507 #ifdef HAVE_extendhfdf2
508 if (HAVE_extendhfdf2 && from_mode == HFmode && to_mode == DFmode)
510 emit_unop_insn (CODE_FOR_extendhfdf2, to, from, UNKNOWN);
514 #ifdef HAVE_extendhfxf2
515 if (HAVE_extendhfxf2 && from_mode == HFmode && to_mode == XFmode)
517 emit_unop_insn (CODE_FOR_extendhfxf2, to, from, UNKNOWN);
521 #ifdef HAVE_extendhftf2
522 if (HAVE_extendhftf2 && from_mode == HFmode && to_mode == TFmode)
524 emit_unop_insn (CODE_FOR_extendhftf2, to, from, UNKNOWN);
529 #ifdef HAVE_extendsfdf2
530 if (HAVE_extendsfdf2 && from_mode == SFmode && to_mode == DFmode)
532 emit_unop_insn (CODE_FOR_extendsfdf2, to, from, UNKNOWN);
536 #ifdef HAVE_extendsfxf2
537 if (HAVE_extendsfxf2 && from_mode == SFmode && to_mode == XFmode)
539 emit_unop_insn (CODE_FOR_extendsfxf2, to, from, UNKNOWN);
543 #ifdef HAVE_extendsftf2
544 if (HAVE_extendsftf2 && from_mode == SFmode && to_mode == TFmode)
546 emit_unop_insn (CODE_FOR_extendsftf2, to, from, UNKNOWN);
550 #ifdef HAVE_extenddfxf2
551 if (HAVE_extenddfxf2 && from_mode == DFmode && to_mode == XFmode)
553 emit_unop_insn (CODE_FOR_extenddfxf2, to, from, UNKNOWN);
557 #ifdef HAVE_extenddftf2
558 if (HAVE_extenddftf2 && from_mode == DFmode && to_mode == TFmode)
560 emit_unop_insn (CODE_FOR_extenddftf2, to, from, UNKNOWN);
565 #ifdef HAVE_trunchfqf2
566 if (HAVE_trunchfqf2 && from_mode == HFmode && to_mode == QFmode)
568 emit_unop_insn (CODE_FOR_trunchfqf2, to, from, UNKNOWN);
572 #ifdef HAVE_truncsfqf2
573 if (HAVE_truncsfqf2 && from_mode == SFmode && to_mode == QFmode)
575 emit_unop_insn (CODE_FOR_truncsfqf2, to, from, UNKNOWN);
579 #ifdef HAVE_truncdfqf2
580 if (HAVE_truncdfqf2 && from_mode == DFmode && to_mode == QFmode)
582 emit_unop_insn (CODE_FOR_truncdfqf2, to, from, UNKNOWN);
586 #ifdef HAVE_truncxfqf2
587 if (HAVE_truncxfqf2 && from_mode == XFmode && to_mode == QFmode)
589 emit_unop_insn (CODE_FOR_truncxfqf2, to, from, UNKNOWN);
593 #ifdef HAVE_trunctfqf2
594 if (HAVE_trunctfqf2 && from_mode == TFmode && to_mode == QFmode)
596 emit_unop_insn (CODE_FOR_trunctfqf2, to, from, UNKNOWN);
600 #ifdef HAVE_truncsfhf2
601 if (HAVE_truncsfhf2 && from_mode == SFmode && to_mode == HFmode)
603 emit_unop_insn (CODE_FOR_truncsfhf2, to, from, UNKNOWN);
607 #ifdef HAVE_truncdfhf2
608 if (HAVE_truncdfhf2 && from_mode == DFmode && to_mode == HFmode)
610 emit_unop_insn (CODE_FOR_truncdfhf2, to, from, UNKNOWN);
614 #ifdef HAVE_truncxfhf2
615 if (HAVE_truncxfhf2 && from_mode == XFmode && to_mode == HFmode)
617 emit_unop_insn (CODE_FOR_truncxfhf2, to, from, UNKNOWN);
621 #ifdef HAVE_trunctfhf2
622 if (HAVE_trunctfhf2 && from_mode == TFmode && to_mode == HFmode)
624 emit_unop_insn (CODE_FOR_trunctfhf2, to, from, UNKNOWN);
628 #ifdef HAVE_truncdfsf2
629 if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
631 emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
635 #ifdef HAVE_truncxfsf2
636 if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode)
638 emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN);
642 #ifdef HAVE_trunctfsf2
643 if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
645 emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
649 #ifdef HAVE_truncxfdf2
650 if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode)
652 emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN);
656 #ifdef HAVE_trunctfdf2
657 if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
659 emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
671 libcall = extendsfdf2_libfunc;
675 libcall = extendsfxf2_libfunc;
679 libcall = extendsftf2_libfunc;
688 libcall = truncdfsf2_libfunc;
692 libcall = extenddfxf2_libfunc;
696 libcall = extenddftf2_libfunc;
705 libcall = truncxfsf2_libfunc;
709 libcall = truncxfdf2_libfunc;
718 libcall = trunctfsf2_libfunc;
722 libcall = trunctfdf2_libfunc;
728 if (libcall == (rtx) 0)
729 /* This conversion is not implemented yet. */
732 emit_library_call (libcall, 1, to_mode, 1, from, from_mode);
733 emit_move_insn (to, hard_libcall_value (to_mode));
737 /* Now both modes are integers. */
739 /* Handle expanding beyond a word. */
740 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
741 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
748 enum machine_mode lowpart_mode;
749 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
751 /* Try converting directly if the insn is supported. */
752 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
755 /* If FROM is a SUBREG, put it into a register. Do this
756 so that we always generate the same set of insns for
757 better cse'ing; if an intermediate assignment occurred,
758 we won't be doing the operation directly on the SUBREG. */
759 if (optimize > 0 && GET_CODE (from) == SUBREG)
760 from = force_reg (from_mode, from);
761 emit_unop_insn (code, to, from, equiv_code);
764 /* Next, try converting via full word. */
765 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
766 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
767 != CODE_FOR_nothing))
769 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
770 emit_unop_insn (code, to,
771 gen_lowpart (word_mode, to), equiv_code);
775 /* No special multiword conversion insn; do it by hand. */
778 /* Get a copy of FROM widened to a word, if necessary. */
779 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
780 lowpart_mode = word_mode;
782 lowpart_mode = from_mode;
784 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
786 lowpart = gen_lowpart (lowpart_mode, to);
787 emit_move_insn (lowpart, lowfrom);
789 /* Compute the value to put in each remaining word. */
791 fill_value = const0_rtx;
796 && insn_operand_mode[(int) CODE_FOR_slt][0] == word_mode
797 && STORE_FLAG_VALUE == -1)
799 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
801 fill_value = gen_reg_rtx (word_mode);
802 emit_insn (gen_slt (fill_value));
808 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
809 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
811 fill_value = convert_to_mode (word_mode, fill_value, 1);
815 /* Fill the remaining words. */
816 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
818 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
819 rtx subword = operand_subword (to, index, 1, to_mode);
824 if (fill_value != subword)
825 emit_move_insn (subword, fill_value);
828 insns = get_insns ();
831 emit_no_conflict_block (insns, to, from, NULL_RTX,
832 gen_rtx (equiv_code, to_mode, from));
836 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD)
838 convert_move (to, gen_lowpart (word_mode, from), 0);
842 /* Handle pointer conversion */ /* SPEE 900220 */
843 if (to_mode == PSImode)
845 if (from_mode != SImode)
846 from = convert_to_mode (SImode, from, unsignedp);
848 #ifdef HAVE_truncsipsi
851 emit_unop_insn (CODE_FOR_truncsipsi, to, from, UNKNOWN);
854 #endif /* HAVE_truncsipsi */
858 if (from_mode == PSImode)
860 if (to_mode != SImode)
862 from = convert_to_mode (SImode, from, unsignedp);
867 #ifdef HAVE_extendpsisi
868 if (HAVE_extendpsisi)
870 emit_unop_insn (CODE_FOR_extendpsisi, to, from, UNKNOWN);
873 #endif /* HAVE_extendpsisi */
878 /* Now follow all the conversions between integers
879 no more than a word long. */
881 /* For truncation, usually we can just refer to FROM in a narrower mode. */
882 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
883 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
884 GET_MODE_BITSIZE (from_mode))
885 && ((GET_CODE (from) == MEM
886 && ! MEM_VOLATILE_P (from)
887 && direct_load[(int) to_mode]
888 && ! mode_dependent_address_p (XEXP (from, 0)))
889 || GET_CODE (from) == REG
890 || GET_CODE (from) == SUBREG))
892 emit_move_insn (to, gen_lowpart (to_mode, from));
896 /* For truncation, usually we can just refer to FROM in a narrower mode. */
897 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
899 /* Convert directly if that works. */
900 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
903 /* If FROM is a SUBREG, put it into a register. Do this
904 so that we always generate the same set of insns for
905 better cse'ing; if an intermediate assignment occurred,
906 we won't be doing the operation directly on the SUBREG. */
907 if (optimize > 0 && GET_CODE (from) == SUBREG)
908 from = force_reg (from_mode, from);
909 emit_unop_insn (code, to, from, equiv_code);
914 enum machine_mode intermediate;
916 /* Search for a mode to convert via. */
917 for (intermediate = from_mode; intermediate != VOIDmode;
918 intermediate = GET_MODE_WIDER_MODE (intermediate))
919 if ((can_extend_p (to_mode, intermediate, unsignedp)
921 && (can_extend_p (intermediate, from_mode, unsignedp)
922 != CODE_FOR_nothing))
924 convert_move (to, convert_to_mode (intermediate, from,
925 unsignedp), unsignedp);
929 /* No suitable intermediate mode. */
934 /* Support special truncate insns for certain modes. */
936 if (from_mode == DImode && to_mode == SImode)
938 #ifdef HAVE_truncdisi2
941 emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
945 convert_move (to, force_reg (from_mode, from), unsignedp);
949 if (from_mode == DImode && to_mode == HImode)
951 #ifdef HAVE_truncdihi2
954 emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
958 convert_move (to, force_reg (from_mode, from), unsignedp);
962 if (from_mode == DImode && to_mode == QImode)
964 #ifdef HAVE_truncdiqi2
967 emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
971 convert_move (to, force_reg (from_mode, from), unsignedp);
975 if (from_mode == SImode && to_mode == HImode)
977 #ifdef HAVE_truncsihi2
980 emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
984 convert_move (to, force_reg (from_mode, from), unsignedp);
988 if (from_mode == SImode && to_mode == QImode)
990 #ifdef HAVE_truncsiqi2
993 emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
997 convert_move (to, force_reg (from_mode, from), unsignedp);
1001 if (from_mode == HImode && to_mode == QImode)
1003 #ifdef HAVE_trunchiqi2
1004 if (HAVE_trunchiqi2)
1006 emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
1010 convert_move (to, force_reg (from_mode, from), unsignedp);
1014 /* Handle truncation of volatile memrefs, and so on;
1015 the things that couldn't be truncated directly,
1016 and for which there was no special instruction. */
1017 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
1019 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
1020 emit_move_insn (to, temp);
1024 /* Mode combination is not recognized. */
1028 /* Return an rtx for a value that would result
1029 from converting X to mode MODE.
1030 Both X and MODE may be floating, or both integer.
1031 UNSIGNEDP is nonzero if X is an unsigned value.
1032 This can be done by referring to a part of X in place
1033 or by copying to a new temporary with conversion.
1035 This function *must not* call protect_from_queue
1036 except when putting X into an insn (in which case convert_move does it). */
1039 convert_to_mode (mode, x, unsignedp)
1040 enum machine_mode mode;
1046 /* If FROM is a SUBREG that indicates that we have already done at least
1047 the required extension, strip it. */
1049 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
1050 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
1051 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
1052 x = gen_lowpart (mode, x);
1054 if (mode == GET_MODE (x))
1057 /* There is one case that we must handle specially: If we are converting
1058 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
1059 we are to interpret the constant as unsigned, gen_lowpart will do
1060 the wrong if the constant appears negative. What we want to do is
1061 make the high-order word of the constant zero, not all ones. */
1063 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
1064 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
1065 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
1066 return immed_double_const (INTVAL (x), (HOST_WIDE_INT) 0, mode);
1068 /* We can do this with a gen_lowpart if both desired and current modes
1069 are integer, and this is either a constant integer, a register, or a
1070 non-volatile MEM. Except for the constant case, we must be narrowing
1073 if (GET_CODE (x) == CONST_INT
1074 || (GET_MODE_CLASS (mode) == MODE_INT
1075 && GET_MODE_CLASS (GET_MODE (x)) == MODE_INT
1076 && (GET_CODE (x) == CONST_DOUBLE
1077 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (GET_MODE (x))
1078 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x))
1079 && direct_load[(int) mode]
1080 || GET_CODE (x) == REG)))))
1081 return gen_lowpart (mode, x);
1083 temp = gen_reg_rtx (mode);
1084 convert_move (temp, x, unsignedp);
1088 /* Generate several move instructions to copy LEN bytes
1089 from block FROM to block TO. (These are MEM rtx's with BLKmode).
1090 The caller must pass FROM and TO
1091 through protect_from_queue before calling.
1092 ALIGN (in bytes) is maximum alignment we can assume. */
1094 struct move_by_pieces
1099 int explicit_inc_to;
1103 int explicit_inc_from;
1109 static void move_by_pieces_1 ();
1110 static int move_by_pieces_ninsns ();
1113 move_by_pieces (to, from, len, align)
1117 struct move_by_pieces data;
1118 rtx to_addr = XEXP (to, 0), from_addr = XEXP (from, 0);
1119 int max_size = MOVE_MAX + 1;
1122 data.to_addr = to_addr;
1123 data.from_addr = from_addr;
1127 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1128 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1130 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1131 || GET_CODE (from_addr) == POST_INC
1132 || GET_CODE (from_addr) == POST_DEC);
1134 data.explicit_inc_from = 0;
1135 data.explicit_inc_to = 0;
1137 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1138 if (data.reverse) data.offset = len;
1141 /* If copying requires more than two move insns,
1142 copy addresses to registers (to make displacements shorter)
1143 and use post-increment if available. */
1144 if (!(data.autinc_from && data.autinc_to)
1145 && move_by_pieces_ninsns (len, align) > 2)
1147 #ifdef HAVE_PRE_DECREMENT
1148 if (data.reverse && ! data.autinc_from)
1150 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1151 data.autinc_from = 1;
1152 data.explicit_inc_from = -1;
1155 #ifdef HAVE_POST_INCREMENT
1156 if (! data.autinc_from)
1158 data.from_addr = copy_addr_to_reg (from_addr);
1159 data.autinc_from = 1;
1160 data.explicit_inc_from = 1;
1163 if (!data.autinc_from && CONSTANT_P (from_addr))
1164 data.from_addr = copy_addr_to_reg (from_addr);
1165 #ifdef HAVE_PRE_DECREMENT
1166 if (data.reverse && ! data.autinc_to)
1168 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1170 data.explicit_inc_to = -1;
1173 #ifdef HAVE_POST_INCREMENT
1174 if (! data.reverse && ! data.autinc_to)
1176 data.to_addr = copy_addr_to_reg (to_addr);
1178 data.explicit_inc_to = 1;
1181 if (!data.autinc_to && CONSTANT_P (to_addr))
1182 data.to_addr = copy_addr_to_reg (to_addr);
1185 if (! (STRICT_ALIGNMENT || SLOW_UNALIGNED_ACCESS)
1186 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
1189 /* First move what we can in the largest integer mode, then go to
1190 successively smaller modes. */
1192 while (max_size > 1)
1194 enum machine_mode mode = VOIDmode, tmode;
1195 enum insn_code icode;
1197 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1198 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1199 if (GET_MODE_SIZE (tmode) < max_size)
1202 if (mode == VOIDmode)
1205 icode = mov_optab->handlers[(int) mode].insn_code;
1206 if (icode != CODE_FOR_nothing
1207 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1208 GET_MODE_SIZE (mode)))
1209 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1211 max_size = GET_MODE_SIZE (mode);
1214 /* The code above should have handled everything. */
1219 /* Return number of insns required to move L bytes by pieces.
1220 ALIGN (in bytes) is maximum alignment we can assume. */
1223 move_by_pieces_ninsns (l, align)
1227 register int n_insns = 0;
1228 int max_size = MOVE_MAX + 1;
1230 if (! (STRICT_ALIGNMENT || SLOW_UNALIGNED_ACCESS)
1231 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
1234 while (max_size > 1)
1236 enum machine_mode mode = VOIDmode, tmode;
1237 enum insn_code icode;
1239 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1240 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1241 if (GET_MODE_SIZE (tmode) < max_size)
1244 if (mode == VOIDmode)
1247 icode = mov_optab->handlers[(int) mode].insn_code;
1248 if (icode != CODE_FOR_nothing
1249 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1250 GET_MODE_SIZE (mode)))
1251 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1253 max_size = GET_MODE_SIZE (mode);
1259 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1260 with move instructions for mode MODE. GENFUN is the gen_... function
1261 to make a move insn for that mode. DATA has all the other info. */
1264 move_by_pieces_1 (genfun, mode, data)
1266 enum machine_mode mode;
1267 struct move_by_pieces *data;
1269 register int size = GET_MODE_SIZE (mode);
1270 register rtx to1, from1;
1272 while (data->len >= size)
1274 if (data->reverse) data->offset -= size;
1276 to1 = (data->autinc_to
1277 ? gen_rtx (MEM, mode, data->to_addr)
1278 : change_address (data->to, mode,
1279 plus_constant (data->to_addr, data->offset)));
1282 ? gen_rtx (MEM, mode, data->from_addr)
1283 : change_address (data->from, mode,
1284 plus_constant (data->from_addr, data->offset)));
1286 #ifdef HAVE_PRE_DECREMENT
1287 if (data->explicit_inc_to < 0)
1288 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
1289 if (data->explicit_inc_from < 0)
1290 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (-size)));
1293 emit_insn ((*genfun) (to1, from1));
1294 #ifdef HAVE_POST_INCREMENT
1295 if (data->explicit_inc_to > 0)
1296 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1297 if (data->explicit_inc_from > 0)
1298 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1301 if (! data->reverse) data->offset += size;
1307 /* Emit code to move a block Y to a block X.
1308 This may be done with string-move instructions,
1309 with multiple scalar move instructions, or with a library call.
1311 Both X and Y must be MEM rtx's (perhaps inside VOLATILE)
1313 SIZE is an rtx that says how long they are.
1314 ALIGN is the maximum alignment we can assume they have,
1315 measured in bytes. */
1318 emit_block_move (x, y, size, align)
1323 if (GET_MODE (x) != BLKmode)
1326 if (GET_MODE (y) != BLKmode)
1329 x = protect_from_queue (x, 1);
1330 y = protect_from_queue (y, 0);
1331 size = protect_from_queue (size, 0);
1333 if (GET_CODE (x) != MEM)
1335 if (GET_CODE (y) != MEM)
1340 if (GET_CODE (size) == CONST_INT
1341 && (move_by_pieces_ninsns (INTVAL (size), align) < MOVE_RATIO))
1342 move_by_pieces (x, y, INTVAL (size), align);
1345 /* Try the most limited insn first, because there's no point
1346 including more than one in the machine description unless
1347 the more limited one has some advantage. */
1349 rtx opalign = GEN_INT (align);
1350 enum machine_mode mode;
1352 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1353 mode = GET_MODE_WIDER_MODE (mode))
1355 enum insn_code code = movstr_optab[(int) mode];
1357 if (code != CODE_FOR_nothing
1358 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1359 here because if SIZE is less than the mode mask, as it is
1360 returned by the macro, it will definately be less than the
1361 actual mode mask. */
1362 && (unsigned) INTVAL (size) <= GET_MODE_MASK (mode)
1363 && (insn_operand_predicate[(int) code][0] == 0
1364 || (*insn_operand_predicate[(int) code][0]) (x, BLKmode))
1365 && (insn_operand_predicate[(int) code][1] == 0
1366 || (*insn_operand_predicate[(int) code][1]) (y, BLKmode))
1367 && (insn_operand_predicate[(int) code][3] == 0
1368 || (*insn_operand_predicate[(int) code][3]) (opalign,
1372 rtx last = get_last_insn ();
1375 op2 = convert_to_mode (mode, size, 1);
1376 if (insn_operand_predicate[(int) code][2] != 0
1377 && ! (*insn_operand_predicate[(int) code][2]) (op2, mode))
1378 op2 = copy_to_mode_reg (mode, op2);
1380 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1387 delete_insns_since (last);
1391 #ifdef TARGET_MEM_FUNCTIONS
1392 emit_library_call (memcpy_libfunc, 0,
1393 VOIDmode, 3, XEXP (x, 0), Pmode,
1395 convert_to_mode (Pmode, size, 1), Pmode);
1397 emit_library_call (bcopy_libfunc, 0,
1398 VOIDmode, 3, XEXP (y, 0), Pmode,
1400 convert_to_mode (Pmode, size, 1), Pmode);
1405 /* Copy all or part of a value X into registers starting at REGNO.
1406 The number of registers to be filled is NREGS. */
1409 move_block_to_reg (regno, x, nregs, mode)
1413 enum machine_mode mode;
1418 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1419 x = validize_mem (force_const_mem (mode, x));
1421 /* See if the machine can do this with a load multiple insn. */
1422 #ifdef HAVE_load_multiple
1423 last = get_last_insn ();
1424 pat = gen_load_multiple (gen_rtx (REG, word_mode, regno), x,
1432 delete_insns_since (last);
1435 for (i = 0; i < nregs; i++)
1436 emit_move_insn (gen_rtx (REG, word_mode, regno + i),
1437 operand_subword_force (x, i, mode));
1440 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1441 The number of registers to be filled is NREGS. */
1444 move_block_from_reg (regno, x, nregs)
1452 /* See if the machine can do this with a store multiple insn. */
1453 #ifdef HAVE_store_multiple
1454 last = get_last_insn ();
1455 pat = gen_store_multiple (x, gen_rtx (REG, word_mode, regno),
1463 delete_insns_since (last);
1466 for (i = 0; i < nregs; i++)
1468 rtx tem = operand_subword (x, i, 1, BLKmode);
1473 emit_move_insn (tem, gen_rtx (REG, word_mode, regno + i));
1477 /* Mark NREGS consecutive regs, starting at REGNO, as being live now. */
1480 use_regs (regno, nregs)
1486 for (i = 0; i < nregs; i++)
1487 emit_insn (gen_rtx (USE, VOIDmode, gen_rtx (REG, word_mode, regno + i)));
1490 /* Mark the instructions since PREV as a libcall block.
1491 Add REG_LIBCALL to PREV and add a REG_RETVAL to the most recent insn. */
1500 /* Find the instructions to mark */
1502 insn_first = NEXT_INSN (prev);
1504 insn_first = get_insns ();
1506 insn_last = get_last_insn ();
1508 REG_NOTES (insn_last) = gen_rtx (INSN_LIST, REG_RETVAL, insn_first,
1509 REG_NOTES (insn_last));
1511 REG_NOTES (insn_first) = gen_rtx (INSN_LIST, REG_LIBCALL, insn_last,
1512 REG_NOTES (insn_first));
1515 /* Write zeros through the storage of OBJECT.
1516 If OBJECT has BLKmode, SIZE is its length in bytes. */
1519 clear_storage (object, size)
1523 if (GET_MODE (object) == BLKmode)
1525 #ifdef TARGET_MEM_FUNCTIONS
1526 emit_library_call (memset_libfunc, 0,
1528 XEXP (object, 0), Pmode, const0_rtx, Pmode,
1529 GEN_INT (size), Pmode);
1531 emit_library_call (bzero_libfunc, 0,
1533 XEXP (object, 0), Pmode,
1534 GEN_INT (size), Pmode);
1538 emit_move_insn (object, const0_rtx);
1541 /* Generate code to copy Y into X.
1542 Both Y and X must have the same mode, except that
1543 Y can be a constant with VOIDmode.
1544 This mode cannot be BLKmode; use emit_block_move for that.
1546 Return the last instruction emitted. */
1549 emit_move_insn (x, y)
1552 enum machine_mode mode = GET_MODE (x);
1553 enum machine_mode submode;
1554 enum mode_class class = GET_MODE_CLASS (mode);
1557 x = protect_from_queue (x, 1);
1558 y = protect_from_queue (y, 0);
1560 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
1563 if (CONSTANT_P (y) && ! LEGITIMATE_CONSTANT_P (y))
1564 y = force_const_mem (mode, y);
1566 /* If X or Y are memory references, verify that their addresses are valid
1568 if (GET_CODE (x) == MEM
1569 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
1570 && ! push_operand (x, GET_MODE (x)))
1572 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
1573 x = change_address (x, VOIDmode, XEXP (x, 0));
1575 if (GET_CODE (y) == MEM
1576 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
1578 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
1579 y = change_address (y, VOIDmode, XEXP (y, 0));
1581 if (mode == BLKmode)
1584 if (class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
1585 submode = mode_for_size (GET_MODE_UNIT_SIZE (mode) * BITS_PER_UNIT,
1586 (class == MODE_COMPLEX_INT
1587 ? MODE_INT : MODE_FLOAT),
1590 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
1592 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
1594 /* Expand complex moves by moving real part and imag part, if posible. */
1595 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
1596 && submode != BLKmode
1597 && (mov_optab->handlers[(int) submode].insn_code
1598 != CODE_FOR_nothing))
1600 /* Don't split destination if it is a stack push. */
1601 int stack = push_operand (x, GET_MODE (x));
1602 rtx prev = get_last_insn ();
1604 /* Tell flow that the whole of the destination is being set. */
1605 if (GET_CODE (x) == REG)
1606 emit_insn (gen_rtx (CLOBBER, VOIDmode, x));
1608 /* If this is a stack, push the highpart first, so it
1609 will be in the argument order.
1611 In that case, change_address is used only to convert
1612 the mode, not to change the address. */
1613 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
1614 ((stack ? change_address (x, submode, (rtx) 0)
1615 : gen_highpart (submode, x)),
1616 gen_highpart (submode, y)));
1617 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
1618 ((stack ? change_address (x, submode, (rtx) 0)
1619 : gen_lowpart (submode, x)),
1620 gen_lowpart (submode, y)));
1624 return get_last_insn ();
1627 /* This will handle any multi-word mode that lacks a move_insn pattern.
1628 However, you will get better code if you define such patterns,
1629 even if they must turn into multiple assembler instructions. */
1630 else if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
1633 rtx prev_insn = get_last_insn ();
1636 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
1639 rtx xpart = operand_subword (x, i, 1, mode);
1640 rtx ypart = operand_subword (y, i, 1, mode);
1642 /* If we can't get a part of Y, put Y into memory if it is a
1643 constant. Otherwise, force it into a register. If we still
1644 can't get a part of Y, abort. */
1645 if (ypart == 0 && CONSTANT_P (y))
1647 y = force_const_mem (mode, y);
1648 ypart = operand_subword (y, i, 1, mode);
1650 else if (ypart == 0)
1651 ypart = operand_subword_force (y, i, mode);
1653 if (xpart == 0 || ypart == 0)
1656 last_insn = emit_move_insn (xpart, ypart);
1658 /* Mark these insns as a libcall block. */
1659 group_insns (prev_insn);
1667 /* Pushing data onto the stack. */
1669 /* Push a block of length SIZE (perhaps variable)
1670 and return an rtx to address the beginning of the block.
1671 Note that it is not possible for the value returned to be a QUEUED.
1672 The value may be virtual_outgoing_args_rtx.
1674 EXTRA is the number of bytes of padding to push in addition to SIZE.
1675 BELOW nonzero means this padding comes at low addresses;
1676 otherwise, the padding comes at high addresses. */
1679 push_block (size, extra, below)
1684 if (CONSTANT_P (size))
1685 anti_adjust_stack (plus_constant (size, extra));
1686 else if (GET_CODE (size) == REG && extra == 0)
1687 anti_adjust_stack (size);
1690 rtx temp = copy_to_mode_reg (Pmode, size);
1692 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
1693 temp, 0, OPTAB_LIB_WIDEN);
1694 anti_adjust_stack (temp);
1697 #ifdef STACK_GROWS_DOWNWARD
1698 temp = virtual_outgoing_args_rtx;
1699 if (extra != 0 && below)
1700 temp = plus_constant (temp, extra);
1702 if (GET_CODE (size) == CONST_INT)
1703 temp = plus_constant (virtual_outgoing_args_rtx,
1704 - INTVAL (size) - (below ? 0 : extra));
1705 else if (extra != 0 && !below)
1706 temp = gen_rtx (PLUS, Pmode, virtual_outgoing_args_rtx,
1707 negate_rtx (Pmode, plus_constant (size, extra)));
1709 temp = gen_rtx (PLUS, Pmode, virtual_outgoing_args_rtx,
1710 negate_rtx (Pmode, size));
1713 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
1719 return gen_rtx (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
1722 /* Generate code to push X onto the stack, assuming it has mode MODE and
1724 MODE is redundant except when X is a CONST_INT (since they don't
1726 SIZE is an rtx for the size of data to be copied (in bytes),
1727 needed only if X is BLKmode.
1729 ALIGN (in bytes) is maximum alignment we can assume.
1731 If PARTIAL is nonzero, then copy that many of the first words
1732 of X into registers starting with REG, and push the rest of X.
1733 The amount of space pushed is decreased by PARTIAL words,
1734 rounded *down* to a multiple of PARM_BOUNDARY.
1735 REG must be a hard register in this case.
1737 EXTRA is the amount in bytes of extra space to leave next to this arg.
1738 This is ignored if an argument block has already been allocated.
1740 On a machine that lacks real push insns, ARGS_ADDR is the address of
1741 the bottom of the argument block for this call. We use indexing off there
1742 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
1743 argument block has not been preallocated.
1745 ARGS_SO_FAR is the size of args previously pushed for this call. */
1748 emit_push_insn (x, mode, type, size, align, partial, reg, extra,
1749 args_addr, args_so_far)
1751 enum machine_mode mode;
1762 enum direction stack_direction
1763 #ifdef STACK_GROWS_DOWNWARD
1769 /* Decide where to pad the argument: `downward' for below,
1770 `upward' for above, or `none' for don't pad it.
1771 Default is below for small data on big-endian machines; else above. */
1772 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
1774 /* Invert direction if stack is post-update. */
1775 if (STACK_PUSH_CODE == POST_INC || STACK_PUSH_CODE == POST_DEC)
1776 if (where_pad != none)
1777 where_pad = (where_pad == downward ? upward : downward);
1779 xinner = x = protect_from_queue (x, 0);
1781 if (mode == BLKmode)
1783 /* Copy a block into the stack, entirely or partially. */
1786 int used = partial * UNITS_PER_WORD;
1787 int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
1795 /* USED is now the # of bytes we need not copy to the stack
1796 because registers will take care of them. */
1799 xinner = change_address (xinner, BLKmode,
1800 plus_constant (XEXP (xinner, 0), used));
1802 /* If the partial register-part of the arg counts in its stack size,
1803 skip the part of stack space corresponding to the registers.
1804 Otherwise, start copying to the beginning of the stack space,
1805 by setting SKIP to 0. */
1806 #ifndef REG_PARM_STACK_SPACE
1812 #ifdef PUSH_ROUNDING
1813 /* Do it with several push insns if that doesn't take lots of insns
1814 and if there is no difficulty with push insns that skip bytes
1815 on the stack for alignment purposes. */
1817 && GET_CODE (size) == CONST_INT
1819 && (move_by_pieces_ninsns ((unsigned) INTVAL (size) - used, align)
1821 /* Here we avoid the case of a structure whose weak alignment
1822 forces many pushes of a small amount of data,
1823 and such small pushes do rounding that causes trouble. */
1824 && ((! STRICT_ALIGNMENT && ! SLOW_UNALIGNED_ACCESS)
1825 || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT
1826 || PUSH_ROUNDING (align) == align)
1827 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
1829 /* Push padding now if padding above and stack grows down,
1830 or if padding below and stack grows up.
1831 But if space already allocated, this has already been done. */
1832 if (extra && args_addr == 0
1833 && where_pad != none && where_pad != stack_direction)
1834 anti_adjust_stack (GEN_INT (extra));
1836 move_by_pieces (gen_rtx (MEM, BLKmode, gen_push_operand ()), xinner,
1837 INTVAL (size) - used, align);
1840 #endif /* PUSH_ROUNDING */
1842 /* Otherwise make space on the stack and copy the data
1843 to the address of that space. */
1845 /* Deduct words put into registers from the size we must copy. */
1848 if (GET_CODE (size) == CONST_INT)
1849 size = GEN_INT (INTVAL (size) - used);
1851 size = expand_binop (GET_MODE (size), sub_optab, size,
1852 GEN_INT (used), NULL_RTX, 0,
1856 /* Get the address of the stack space.
1857 In this case, we do not deal with EXTRA separately.
1858 A single stack adjust will do. */
1861 temp = push_block (size, extra, where_pad == downward);
1864 else if (GET_CODE (args_so_far) == CONST_INT)
1865 temp = memory_address (BLKmode,
1866 plus_constant (args_addr,
1867 skip + INTVAL (args_so_far)));
1869 temp = memory_address (BLKmode,
1870 plus_constant (gen_rtx (PLUS, Pmode,
1871 args_addr, args_so_far),
1874 /* TEMP is the address of the block. Copy the data there. */
1875 if (GET_CODE (size) == CONST_INT
1876 && (move_by_pieces_ninsns ((unsigned) INTVAL (size), align)
1879 move_by_pieces (gen_rtx (MEM, BLKmode, temp), xinner,
1880 INTVAL (size), align);
1883 /* Try the most limited insn first, because there's no point
1884 including more than one in the machine description unless
1885 the more limited one has some advantage. */
1886 #ifdef HAVE_movstrqi
1888 && GET_CODE (size) == CONST_INT
1889 && ((unsigned) INTVAL (size)
1890 < (1 << (GET_MODE_BITSIZE (QImode) - 1))))
1892 emit_insn (gen_movstrqi (gen_rtx (MEM, BLKmode, temp),
1893 xinner, size, GEN_INT (align)));
1897 #ifdef HAVE_movstrhi
1899 && GET_CODE (size) == CONST_INT
1900 && ((unsigned) INTVAL (size)
1901 < (1 << (GET_MODE_BITSIZE (HImode) - 1))))
1903 emit_insn (gen_movstrhi (gen_rtx (MEM, BLKmode, temp),
1904 xinner, size, GEN_INT (align)));
1908 #ifdef HAVE_movstrsi
1911 emit_insn (gen_movstrsi (gen_rtx (MEM, BLKmode, temp),
1912 xinner, size, GEN_INT (align)));
1916 #ifdef HAVE_movstrdi
1919 emit_insn (gen_movstrdi (gen_rtx (MEM, BLKmode, temp),
1920 xinner, size, GEN_INT (align)));
1925 #ifndef ACCUMULATE_OUTGOING_ARGS
1926 /* If the source is referenced relative to the stack pointer,
1927 copy it to another register to stabilize it. We do not need
1928 to do this if we know that we won't be changing sp. */
1930 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
1931 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
1932 temp = copy_to_reg (temp);
1935 /* Make inhibit_defer_pop nonzero around the library call
1936 to force it to pop the bcopy-arguments right away. */
1938 #ifdef TARGET_MEM_FUNCTIONS
1939 emit_library_call (memcpy_libfunc, 0,
1940 VOIDmode, 3, temp, Pmode, XEXP (xinner, 0), Pmode,
1943 emit_library_call (bcopy_libfunc, 0,
1944 VOIDmode, 3, XEXP (xinner, 0), Pmode, temp, Pmode,
1950 else if (partial > 0)
1952 /* Scalar partly in registers. */
1954 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
1957 /* # words of start of argument
1958 that we must make space for but need not store. */
1959 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
1960 int args_offset = INTVAL (args_so_far);
1963 /* Push padding now if padding above and stack grows down,
1964 or if padding below and stack grows up.
1965 But if space already allocated, this has already been done. */
1966 if (extra && args_addr == 0
1967 && where_pad != none && where_pad != stack_direction)
1968 anti_adjust_stack (GEN_INT (extra));
1970 /* If we make space by pushing it, we might as well push
1971 the real data. Otherwise, we can leave OFFSET nonzero
1972 and leave the space uninitialized. */
1976 /* Now NOT_STACK gets the number of words that we don't need to
1977 allocate on the stack. */
1978 not_stack = partial - offset;
1980 /* If the partial register-part of the arg counts in its stack size,
1981 skip the part of stack space corresponding to the registers.
1982 Otherwise, start copying to the beginning of the stack space,
1983 by setting SKIP to 0. */
1984 #ifndef REG_PARM_STACK_SPACE
1990 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1991 x = validize_mem (force_const_mem (mode, x));
1993 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
1994 SUBREGs of such registers are not allowed. */
1995 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
1996 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
1997 x = copy_to_reg (x);
1999 /* Loop over all the words allocated on the stack for this arg. */
2000 /* We can do it by words, because any scalar bigger than a word
2001 has a size a multiple of a word. */
2002 #ifndef PUSH_ARGS_REVERSED
2003 for (i = not_stack; i < size; i++)
2005 for (i = size - 1; i >= not_stack; i--)
2007 if (i >= not_stack + offset)
2008 emit_push_insn (operand_subword_force (x, i, mode),
2009 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
2011 GEN_INT (args_offset + ((i - not_stack + skip)
2012 * UNITS_PER_WORD)));
2018 /* Push padding now if padding above and stack grows down,
2019 or if padding below and stack grows up.
2020 But if space already allocated, this has already been done. */
2021 if (extra && args_addr == 0
2022 && where_pad != none && where_pad != stack_direction)
2023 anti_adjust_stack (GEN_INT (extra));
2025 #ifdef PUSH_ROUNDING
2027 addr = gen_push_operand ();
2030 if (GET_CODE (args_so_far) == CONST_INT)
2032 = memory_address (mode,
2033 plus_constant (args_addr, INTVAL (args_so_far)));
2035 addr = memory_address (mode, gen_rtx (PLUS, Pmode, args_addr,
2038 emit_move_insn (gen_rtx (MEM, mode, addr), x);
2042 /* If part should go in registers, copy that part
2043 into the appropriate registers. Do this now, at the end,
2044 since mem-to-mem copies above may do function calls. */
2046 move_block_to_reg (REGNO (reg), x, partial, mode);
2048 if (extra && args_addr == 0 && where_pad == stack_direction)
2049 anti_adjust_stack (GEN_INT (extra));
2052 /* Output a library call to function FUN (a SYMBOL_REF rtx)
2053 (emitting the queue unless NO_QUEUE is nonzero),
2054 for a value of mode OUTMODE,
2055 with NARGS different arguments, passed as alternating rtx values
2056 and machine_modes to convert them to.
2057 The rtx values should have been passed through protect_from_queue already.
2059 NO_QUEUE will be true if and only if the library call is a `const' call
2060 which will be enclosed in REG_LIBCALL/REG_RETVAL notes; it is equivalent
2061 to the variable is_const in expand_call.
2063 NO_QUEUE must be true for const calls, because if it isn't, then
2064 any pending increment will be emitted between REG_LIBCALL/REG_RETVAL notes,
2065 and will be lost if the libcall sequence is optimized away.
2067 NO_QUEUE must be false for non-const calls, because if it isn't, the
2068 call insn will have its CONST_CALL_P bit set, and it will be incorrectly
2069 optimized. For instance, the instruction scheduler may incorrectly
2070 move memory references across the non-const call. */
2073 emit_library_call (va_alist)
2077 struct args_size args_size;
2078 register int argnum;
2079 enum machine_mode outmode;
2086 CUMULATIVE_ARGS args_so_far;
2087 struct arg { rtx value; enum machine_mode mode; rtx reg; int partial;
2088 struct args_size offset; struct args_size size; };
2090 int old_inhibit_defer_pop = inhibit_defer_pop;
2095 orgfun = fun = va_arg (p, rtx);
2096 no_queue = va_arg (p, int);
2097 outmode = va_arg (p, enum machine_mode);
2098 nargs = va_arg (p, int);
2100 /* Copy all the libcall-arguments out of the varargs data
2101 and into a vector ARGVEC.
2103 Compute how to pass each argument. We only support a very small subset
2104 of the full argument passing conventions to limit complexity here since
2105 library functions shouldn't have many args. */
2107 argvec = (struct arg *) alloca (nargs * sizeof (struct arg));
2109 INIT_CUMULATIVE_ARGS (args_so_far, NULL_TREE, fun);
2111 args_size.constant = 0;
2114 for (count = 0; count < nargs; count++)
2116 rtx val = va_arg (p, rtx);
2117 enum machine_mode mode = va_arg (p, enum machine_mode);
2119 /* We cannot convert the arg value to the mode the library wants here;
2120 must do it earlier where we know the signedness of the arg. */
2122 || (GET_MODE (val) != mode && GET_MODE (val) != VOIDmode))
2125 /* On some machines, there's no way to pass a float to a library fcn.
2126 Pass it as a double instead. */
2127 #ifdef LIBGCC_NEEDS_DOUBLE
2128 if (LIBGCC_NEEDS_DOUBLE && mode == SFmode)
2129 val = convert_to_mode (DFmode, val, 0), mode = DFmode;
2132 /* There's no need to call protect_from_queue, because
2133 either emit_move_insn or emit_push_insn will do that. */
2135 /* Make sure it is a reasonable operand for a move or push insn. */
2136 if (GET_CODE (val) != REG && GET_CODE (val) != MEM
2137 && ! (CONSTANT_P (val) && LEGITIMATE_CONSTANT_P (val)))
2138 val = force_operand (val, NULL_RTX);
2140 argvec[count].value = val;
2141 argvec[count].mode = mode;
2143 #ifdef FUNCTION_ARG_PASS_BY_REFERENCE
2144 if (FUNCTION_ARG_PASS_BY_REFERENCE (args_so_far, mode, NULL_TREE, 1))
2148 argvec[count].reg = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
2149 if (argvec[count].reg && GET_CODE (argvec[count].reg) == EXPR_LIST)
2151 #ifdef FUNCTION_ARG_PARTIAL_NREGS
2152 argvec[count].partial
2153 = FUNCTION_ARG_PARTIAL_NREGS (args_so_far, mode, NULL_TREE, 1);
2155 argvec[count].partial = 0;
2158 locate_and_pad_parm (mode, NULL_TREE,
2159 argvec[count].reg && argvec[count].partial == 0,
2160 NULL_TREE, &args_size, &argvec[count].offset,
2161 &argvec[count].size);
2163 if (argvec[count].size.var)
2166 #ifndef REG_PARM_STACK_SPACE
2167 if (argvec[count].partial)
2168 argvec[count].size.constant -= argvec[count].partial * UNITS_PER_WORD;
2171 if (argvec[count].reg == 0 || argvec[count].partial != 0
2172 #ifdef REG_PARM_STACK_SPACE
2176 args_size.constant += argvec[count].size.constant;
2178 #ifdef ACCUMULATE_OUTGOING_ARGS
2179 /* If this arg is actually passed on the stack, it might be
2180 clobbering something we already put there (this library call might
2181 be inside the evaluation of an argument to a function whose call
2182 requires the stack). This will only occur when the library call
2183 has sufficient args to run out of argument registers. Abort in
2184 this case; if this ever occurs, code must be added to save and
2185 restore the arg slot. */
2187 if (argvec[count].reg == 0 || argvec[count].partial != 0)
2191 FUNCTION_ARG_ADVANCE (args_so_far, mode, (tree)0, 1);
2195 /* If this machine requires an external definition for library
2196 functions, write one out. */
2197 assemble_external_libcall (fun);
2199 #ifdef STACK_BOUNDARY
2200 args_size.constant = (((args_size.constant + (STACK_BYTES - 1))
2201 / STACK_BYTES) * STACK_BYTES);
2204 #ifdef REG_PARM_STACK_SPACE
2205 args_size.constant = MAX (args_size.constant,
2206 REG_PARM_STACK_SPACE ((tree) 0));
2209 #ifdef ACCUMULATE_OUTGOING_ARGS
2210 if (args_size.constant > current_function_outgoing_args_size)
2211 current_function_outgoing_args_size = args_size.constant;
2212 args_size.constant = 0;
2215 #ifndef PUSH_ROUNDING
2216 argblock = push_block (GEN_INT (args_size.constant), 0, 0);
2219 #ifdef PUSH_ARGS_REVERSED
2227 /* Push the args that need to be pushed. */
2229 for (count = 0; count < nargs; count++, argnum += inc)
2231 register enum machine_mode mode = argvec[argnum].mode;
2232 register rtx val = argvec[argnum].value;
2233 rtx reg = argvec[argnum].reg;
2234 int partial = argvec[argnum].partial;
2236 if (! (reg != 0 && partial == 0))
2237 emit_push_insn (val, mode, NULL_TREE, NULL_RTX, 0, partial, reg, 0,
2238 argblock, GEN_INT (argvec[count].offset.constant));
2242 #ifdef PUSH_ARGS_REVERSED
2248 /* Now load any reg parms into their regs. */
2250 for (count = 0; count < nargs; count++, argnum += inc)
2252 register enum machine_mode mode = argvec[argnum].mode;
2253 register rtx val = argvec[argnum].value;
2254 rtx reg = argvec[argnum].reg;
2255 int partial = argvec[argnum].partial;
2257 if (reg != 0 && partial == 0)
2258 emit_move_insn (reg, val);
2262 /* For version 1.37, try deleting this entirely. */
2266 /* Any regs containing parms remain in use through the call. */
2268 for (count = 0; count < nargs; count++)
2269 if (argvec[count].reg != 0)
2270 emit_insn (gen_rtx (USE, VOIDmode, argvec[count].reg));
2272 use_insns = get_insns ();
2275 fun = prepare_call_address (fun, NULL_TREE, &use_insns);
2277 /* Don't allow popping to be deferred, since then
2278 cse'ing of library calls could delete a call and leave the pop. */
2281 /* We pass the old value of inhibit_defer_pop + 1 to emit_call_1, which
2282 will set inhibit_defer_pop to that value. */
2284 emit_call_1 (fun, get_identifier (XSTR (orgfun, 0)), args_size.constant, 0,
2285 FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1),
2286 outmode != VOIDmode ? hard_libcall_value (outmode) : NULL_RTX,
2287 old_inhibit_defer_pop + 1, use_insns, no_queue);
2289 /* Now restore inhibit_defer_pop to its actual original value. */
2293 /* Expand an assignment that stores the value of FROM into TO.
2294 If WANT_VALUE is nonzero, return an rtx for the value of TO.
2295 (This may contain a QUEUED rtx.)
2296 Otherwise, the returned value is not meaningful.
2298 SUGGEST_REG is no longer actually used.
2299 It used to mean, copy the value through a register
2300 and return that register, if that is possible.
2301 But now we do this if WANT_VALUE.
2303 If the value stored is a constant, we return the constant. */
2306 expand_assignment (to, from, want_value, suggest_reg)
2311 register rtx to_rtx = 0;
2314 /* Don't crash if the lhs of the assignment was erroneous. */
2316 if (TREE_CODE (to) == ERROR_MARK)
2317 return expand_expr (from, NULL_RTX, VOIDmode, 0);
2319 /* Assignment of a structure component needs special treatment
2320 if the structure component's rtx is not simply a MEM.
2321 Assignment of an array element at a constant index
2322 has the same problem. */
2324 if (TREE_CODE (to) == COMPONENT_REF
2325 || TREE_CODE (to) == BIT_FIELD_REF
2326 || (TREE_CODE (to) == ARRAY_REF
2327 && TREE_CODE (TREE_OPERAND (to, 1)) == INTEGER_CST
2328 && TREE_CODE (TYPE_SIZE (TREE_TYPE (to))) == INTEGER_CST))
2330 enum machine_mode mode1;
2336 tree tem = get_inner_reference (to, &bitsize, &bitpos, &offset,
2337 &mode1, &unsignedp, &volatilep);
2339 /* If we are going to use store_bit_field and extract_bit_field,
2340 make sure to_rtx will be safe for multiple use. */
2342 if (mode1 == VOIDmode && want_value)
2343 tem = stabilize_reference (tem);
2345 to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, 0);
2348 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
2350 if (GET_CODE (to_rtx) != MEM)
2352 to_rtx = change_address (to_rtx, VOIDmode,
2353 gen_rtx (PLUS, Pmode, XEXP (to_rtx, 0),
2354 force_reg (Pmode, offset_rtx)));
2358 if (GET_CODE (to_rtx) == MEM)
2359 MEM_VOLATILE_P (to_rtx) = 1;
2360 #if 0 /* This was turned off because, when a field is volatile
2361 in an object which is not volatile, the object may be in a register,
2362 and then we would abort over here. */
2368 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
2370 /* Spurious cast makes HPUX compiler happy. */
2371 ? (enum machine_mode) TYPE_MODE (TREE_TYPE (to))
2374 /* Required alignment of containing datum. */
2375 TYPE_ALIGN (TREE_TYPE (tem)) / BITS_PER_UNIT,
2376 int_size_in_bytes (TREE_TYPE (tem)));
2377 preserve_temp_slots (result);
2383 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
2384 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
2387 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, 0);
2389 /* In case we are returning the contents of an object which overlaps
2390 the place the value is being stored, use a safe function when copying
2391 a value through a pointer into a structure value return block. */
2392 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
2393 && current_function_returns_struct
2394 && !current_function_returns_pcc_struct)
2396 rtx from_rtx = expand_expr (from, NULL_RTX, VOIDmode, 0);
2397 rtx size = expr_size (from);
2399 #ifdef TARGET_MEM_FUNCTIONS
2400 emit_library_call (memcpy_libfunc, 0,
2401 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
2402 XEXP (from_rtx, 0), Pmode,
2405 emit_library_call (bcopy_libfunc, 0,
2406 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
2407 XEXP (to_rtx, 0), Pmode,
2411 preserve_temp_slots (to_rtx);
2416 /* Compute FROM and store the value in the rtx we got. */
2418 result = store_expr (from, to_rtx, want_value);
2419 preserve_temp_slots (result);
2424 /* Generate code for computing expression EXP,
2425 and storing the value into TARGET.
2426 Returns TARGET or an equivalent value.
2427 TARGET may contain a QUEUED rtx.
2429 If SUGGEST_REG is nonzero, copy the value through a register
2430 and return that register, if that is possible.
2432 If the value stored is a constant, we return the constant. */
2435 store_expr (exp, target, suggest_reg)
2437 register rtx target;
2441 int dont_return_target = 0;
2443 if (TREE_CODE (exp) == COMPOUND_EXPR)
2445 /* Perform first part of compound expression, then assign from second
2447 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
2449 return store_expr (TREE_OPERAND (exp, 1), target, suggest_reg);
2451 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
2453 /* For conditional expression, get safe form of the target. Then
2454 test the condition, doing the appropriate assignment on either
2455 side. This avoids the creation of unnecessary temporaries.
2456 For non-BLKmode, it is more efficient not to do this. */
2458 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
2461 target = protect_from_queue (target, 1);
2464 jumpifnot (TREE_OPERAND (exp, 0), lab1);
2465 store_expr (TREE_OPERAND (exp, 1), target, suggest_reg);
2467 emit_jump_insn (gen_jump (lab2));
2470 store_expr (TREE_OPERAND (exp, 2), target, suggest_reg);
2476 else if (suggest_reg && GET_CODE (target) == MEM
2477 && GET_MODE (target) != BLKmode)
2478 /* If target is in memory and caller wants value in a register instead,
2479 arrange that. Pass TARGET as target for expand_expr so that,
2480 if EXP is another assignment, SUGGEST_REG will be nonzero for it.
2481 We know expand_expr will not use the target in that case. */
2483 temp = expand_expr (exp, cse_not_expected ? NULL_RTX : target,
2484 GET_MODE (target), 0);
2485 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
2486 temp = copy_to_reg (temp);
2487 dont_return_target = 1;
2489 else if (queued_subexp_p (target))
2490 /* If target contains a postincrement, it is not safe
2491 to use as the returned value. It would access the wrong
2492 place by the time the queued increment gets output.
2493 So copy the value through a temporary and use that temp
2496 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
2498 /* Expand EXP into a new pseudo. */
2499 temp = gen_reg_rtx (GET_MODE (target));
2500 temp = expand_expr (exp, temp, GET_MODE (target), 0);
2503 temp = expand_expr (exp, NULL_RTX, GET_MODE (target), 0);
2504 dont_return_target = 1;
2506 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
2507 /* If this is an scalar in a register that is stored in a wider mode
2508 than the declared mode, compute the result into its declared mode
2509 and then convert to the wider mode. Our value is the computed
2512 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
2513 convert_move (SUBREG_REG (target), temp,
2514 SUBREG_PROMOTED_UNSIGNED_P (target));
2519 temp = expand_expr (exp, target, GET_MODE (target), 0);
2520 /* DO return TARGET if it's a specified hardware register.
2521 expand_return relies on this. */
2522 if (!(target && GET_CODE (target) == REG
2523 && REGNO (target) < FIRST_PSEUDO_REGISTER)
2524 && CONSTANT_P (temp))
2525 dont_return_target = 1;
2528 /* If value was not generated in the target, store it there.
2529 Convert the value to TARGET's type first if nec. */
2531 if (temp != target && TREE_CODE (exp) != ERROR_MARK)
2533 target = protect_from_queue (target, 1);
2534 if (GET_MODE (temp) != GET_MODE (target)
2535 && GET_MODE (temp) != VOIDmode)
2537 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
2538 if (dont_return_target)
2540 /* In this case, we will return TEMP,
2541 so make sure it has the proper mode.
2542 But don't forget to store the value into TARGET. */
2543 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
2544 emit_move_insn (target, temp);
2547 convert_move (target, temp, unsignedp);
2550 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
2552 /* Handle copying a string constant into an array.
2553 The string constant may be shorter than the array.
2554 So copy just the string's actual length, and clear the rest. */
2557 /* Get the size of the data type of the string,
2558 which is actually the size of the target. */
2559 size = expr_size (exp);
2560 if (GET_CODE (size) == CONST_INT
2561 && INTVAL (size) < TREE_STRING_LENGTH (exp))
2562 emit_block_move (target, temp, size,
2563 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
2566 /* Compute the size of the data to copy from the string. */
2568 = fold (build (MIN_EXPR, sizetype,
2569 size_binop (CEIL_DIV_EXPR,
2570 TYPE_SIZE (TREE_TYPE (exp)),
2571 size_int (BITS_PER_UNIT)),
2573 build_int_2 (TREE_STRING_LENGTH (exp), 0))));
2574 rtx copy_size_rtx = expand_expr (copy_size, NULL_RTX,
2578 /* Copy that much. */
2579 emit_block_move (target, temp, copy_size_rtx,
2580 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
2582 /* Figure out how much is left in TARGET
2583 that we have to clear. */
2584 if (GET_CODE (copy_size_rtx) == CONST_INT)
2586 temp = plus_constant (XEXP (target, 0),
2587 TREE_STRING_LENGTH (exp));
2588 size = plus_constant (size,
2589 - TREE_STRING_LENGTH (exp));
2593 enum machine_mode size_mode = Pmode;
2595 temp = force_reg (Pmode, XEXP (target, 0));
2596 temp = expand_binop (size_mode, add_optab, temp,
2597 copy_size_rtx, NULL_RTX, 0,
2600 size = expand_binop (size_mode, sub_optab, size,
2601 copy_size_rtx, NULL_RTX, 0,
2604 emit_cmp_insn (size, const0_rtx, LT, NULL_RTX,
2605 GET_MODE (size), 0, 0);
2606 label = gen_label_rtx ();
2607 emit_jump_insn (gen_blt (label));
2610 if (size != const0_rtx)
2612 #ifdef TARGET_MEM_FUNCTIONS
2613 emit_library_call (memset_libfunc, 0, VOIDmode, 3,
2614 temp, Pmode, const0_rtx, Pmode, size, Pmode);
2616 emit_library_call (bzero_libfunc, 0, VOIDmode, 2,
2617 temp, Pmode, size, Pmode);
2624 else if (GET_MODE (temp) == BLKmode)
2625 emit_block_move (target, temp, expr_size (exp),
2626 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
2628 emit_move_insn (target, temp);
2630 if (dont_return_target)
2635 /* Store the value of constructor EXP into the rtx TARGET.
2636 TARGET is either a REG or a MEM. */
2639 store_constructor (exp, target)
2643 tree type = TREE_TYPE (exp);
2645 /* We know our target cannot conflict, since safe_from_p has been called. */
2647 /* Don't try copying piece by piece into a hard register
2648 since that is vulnerable to being clobbered by EXP.
2649 Instead, construct in a pseudo register and then copy it all. */
2650 if (GET_CODE (target) == REG && REGNO (target) < FIRST_PSEUDO_REGISTER)
2652 rtx temp = gen_reg_rtx (GET_MODE (target));
2653 store_constructor (exp, temp);
2654 emit_move_insn (target, temp);
2659 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE)
2663 /* Inform later passes that the whole union value is dead. */
2664 if (TREE_CODE (type) == UNION_TYPE)
2665 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
2667 /* If we are building a static constructor into a register,
2668 set the initial value as zero so we can fold the value into
2670 else if (GET_CODE (target) == REG && TREE_STATIC (exp))
2671 emit_move_insn (target, const0_rtx);
2673 /* If the constructor has fewer fields than the structure,
2674 clear the whole structure first. */
2675 else if (list_length (CONSTRUCTOR_ELTS (exp))
2676 != list_length (TYPE_FIELDS (type)))
2677 clear_storage (target, int_size_in_bytes (type));
2679 /* Inform later passes that the old value is dead. */
2680 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
2682 /* Store each element of the constructor into
2683 the corresponding field of TARGET. */
2685 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
2687 register tree field = TREE_PURPOSE (elt);
2688 register enum machine_mode mode;
2693 /* Just ignore missing fields.
2694 We cleared the whole structure, above,
2695 if any fields are missing. */
2699 bitsize = TREE_INT_CST_LOW (DECL_SIZE (field));
2700 unsignedp = TREE_UNSIGNED (field);
2701 mode = DECL_MODE (field);
2702 if (DECL_BIT_FIELD (field))
2705 if (TREE_CODE (DECL_FIELD_BITPOS (field)) != INTEGER_CST)
2706 /* ??? This case remains to be written. */
2709 bitpos = TREE_INT_CST_LOW (DECL_FIELD_BITPOS (field));
2711 store_field (target, bitsize, bitpos, mode, TREE_VALUE (elt),
2712 /* The alignment of TARGET is
2713 at least what its type requires. */
2715 TYPE_ALIGN (type) / BITS_PER_UNIT,
2716 int_size_in_bytes (type));
2719 else if (TREE_CODE (type) == ARRAY_TYPE)
2723 tree domain = TYPE_DOMAIN (type);
2724 HOST_WIDE_INT minelt = TREE_INT_CST_LOW (TYPE_MIN_VALUE (domain));
2725 HOST_WIDE_INT maxelt = TREE_INT_CST_LOW (TYPE_MAX_VALUE (domain));
2726 tree elttype = TREE_TYPE (type);
2728 /* If the constructor has fewer fields than the structure,
2729 clear the whole structure first. Similarly if this this is
2730 static constructor of a non-BLKmode object. */
2732 if (list_length (CONSTRUCTOR_ELTS (exp)) < maxelt - minelt + 1
2733 || (GET_CODE (target) == REG && TREE_STATIC (exp)))
2734 clear_storage (target, maxelt - minelt + 1);
2736 /* Inform later passes that the old value is dead. */
2737 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
2739 /* Store each element of the constructor into
2740 the corresponding element of TARGET, determined
2741 by counting the elements. */
2742 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
2744 elt = TREE_CHAIN (elt), i++)
2746 register enum machine_mode mode;
2751 mode = TYPE_MODE (elttype);
2752 bitsize = GET_MODE_BITSIZE (mode);
2753 unsignedp = TREE_UNSIGNED (elttype);
2755 bitpos = (i * TREE_INT_CST_LOW (TYPE_SIZE (elttype)));
2757 store_field (target, bitsize, bitpos, mode, TREE_VALUE (elt),
2758 /* The alignment of TARGET is
2759 at least what its type requires. */
2761 TYPE_ALIGN (type) / BITS_PER_UNIT,
2762 int_size_in_bytes (type));
2770 /* Store the value of EXP (an expression tree)
2771 into a subfield of TARGET which has mode MODE and occupies
2772 BITSIZE bits, starting BITPOS bits from the start of TARGET.
2773 If MODE is VOIDmode, it means that we are storing into a bit-field.
2775 If VALUE_MODE is VOIDmode, return nothing in particular.
2776 UNSIGNEDP is not used in this case.
2778 Otherwise, return an rtx for the value stored. This rtx
2779 has mode VALUE_MODE if that is convenient to do.
2780 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
2782 ALIGN is the alignment that TARGET is known to have, measured in bytes.
2783 TOTAL_SIZE is the size in bytes of the structure, or -1 if varying. */
2786 store_field (target, bitsize, bitpos, mode, exp, value_mode,
2787 unsignedp, align, total_size)
2789 int bitsize, bitpos;
2790 enum machine_mode mode;
2792 enum machine_mode value_mode;
2797 HOST_WIDE_INT width_mask = 0;
2799 if (bitsize < HOST_BITS_PER_WIDE_INT)
2800 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
2802 /* If we are storing into an unaligned field of an aligned union that is
2803 in a register, we may have the mode of TARGET being an integer mode but
2804 MODE == BLKmode. In that case, get an aligned object whose size and
2805 alignment are the same as TARGET and store TARGET into it (we can avoid
2806 the store if the field being stored is the entire width of TARGET). Then
2807 call ourselves recursively to store the field into a BLKmode version of
2808 that object. Finally, load from the object into TARGET. This is not
2809 very efficient in general, but should only be slightly more expensive
2810 than the otherwise-required unaligned accesses. Perhaps this can be
2811 cleaned up later. */
2814 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
2816 rtx object = assign_stack_temp (GET_MODE (target),
2817 GET_MODE_SIZE (GET_MODE (target)), 0);
2818 rtx blk_object = copy_rtx (object);
2820 PUT_MODE (blk_object, BLKmode);
2822 if (bitsize != GET_MODE_BITSIZE (GET_MODE (target)))
2823 emit_move_insn (object, target);
2825 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0,
2828 emit_move_insn (target, object);
2833 /* If the structure is in a register or if the component
2834 is a bit field, we cannot use addressing to access it.
2835 Use bit-field techniques or SUBREG to store in it. */
2837 if (mode == VOIDmode
2838 || (mode != BLKmode && ! direct_store[(int) mode])
2839 || GET_CODE (target) == REG
2840 || GET_CODE (target) == SUBREG)
2842 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
2843 /* Store the value in the bitfield. */
2844 store_bit_field (target, bitsize, bitpos, mode, temp, align, total_size);
2845 if (value_mode != VOIDmode)
2847 /* The caller wants an rtx for the value. */
2848 /* If possible, avoid refetching from the bitfield itself. */
2850 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
2853 enum machine_mode tmode;
2856 return expand_and (temp, GEN_INT (width_mask), NULL_RTX);
2857 tmode = GET_MODE (temp);
2858 if (tmode == VOIDmode)
2860 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
2861 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
2862 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
2864 return extract_bit_field (target, bitsize, bitpos, unsignedp,
2865 NULL_RTX, value_mode, 0, align,
2872 rtx addr = XEXP (target, 0);
2875 /* If a value is wanted, it must be the lhs;
2876 so make the address stable for multiple use. */
2878 if (value_mode != VOIDmode && GET_CODE (addr) != REG
2879 && ! CONSTANT_ADDRESS_P (addr)
2880 /* A frame-pointer reference is already stable. */
2881 && ! (GET_CODE (addr) == PLUS
2882 && GET_CODE (XEXP (addr, 1)) == CONST_INT
2883 && (XEXP (addr, 0) == virtual_incoming_args_rtx
2884 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
2885 addr = copy_to_reg (addr);
2887 /* Now build a reference to just the desired component. */
2889 to_rtx = change_address (target, mode,
2890 plus_constant (addr, (bitpos / BITS_PER_UNIT)));
2891 MEM_IN_STRUCT_P (to_rtx) = 1;
2893 return store_expr (exp, to_rtx, value_mode != VOIDmode);
2897 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
2898 or an ARRAY_REF, look for nested COMPONENT_REFs, BIT_FIELD_REFs, or
2899 ARRAY_REFs at constant positions and find the ultimate containing object,
2902 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
2903 bit position, and *PUNSIGNEDP to the signedness of the field.
2904 If the position of the field is variable, we store a tree
2905 giving the variable offset (in units) in *POFFSET.
2906 This offset is in addition to the bit position.
2907 If the position is not variable, we store 0 in *POFFSET.
2909 If any of the extraction expressions is volatile,
2910 we store 1 in *PVOLATILEP. Otherwise we don't change that.
2912 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
2913 is a mode that can be used to access the field. In that case, *PBITSIZE
2916 If the field describes a variable-sized object, *PMODE is set to
2917 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
2918 this case, but the address of the object can be found. */
2921 get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode, punsignedp, pvolatilep)
2926 enum machine_mode *pmode;
2931 enum machine_mode mode = VOIDmode;
2934 if (TREE_CODE (exp) == COMPONENT_REF)
2936 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
2937 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
2938 mode = DECL_MODE (TREE_OPERAND (exp, 1));
2939 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
2941 else if (TREE_CODE (exp) == BIT_FIELD_REF)
2943 size_tree = TREE_OPERAND (exp, 1);
2944 *punsignedp = TREE_UNSIGNED (exp);
2948 mode = TYPE_MODE (TREE_TYPE (exp));
2949 *pbitsize = GET_MODE_BITSIZE (mode);
2950 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
2955 if (TREE_CODE (size_tree) != INTEGER_CST)
2956 mode = BLKmode, *pbitsize = -1;
2958 *pbitsize = TREE_INT_CST_LOW (size_tree);
2961 /* Compute cumulative bit-offset for nested component-refs and array-refs,
2962 and find the ultimate containing object. */
2968 if (TREE_CODE (exp) == COMPONENT_REF || TREE_CODE (exp) == BIT_FIELD_REF)
2970 tree pos = (TREE_CODE (exp) == COMPONENT_REF
2971 ? DECL_FIELD_BITPOS (TREE_OPERAND (exp, 1))
2972 : TREE_OPERAND (exp, 2));
2974 if (TREE_CODE (pos) == PLUS_EXPR)
2977 if (TREE_CODE (TREE_OPERAND (pos, 0)) == INTEGER_CST)
2979 constant = TREE_OPERAND (pos, 0);
2980 var = TREE_OPERAND (pos, 1);
2982 else if (TREE_CODE (TREE_OPERAND (pos, 1)) == INTEGER_CST)
2984 constant = TREE_OPERAND (pos, 1);
2985 var = TREE_OPERAND (pos, 0);
2989 *pbitpos += TREE_INT_CST_LOW (constant);
2991 offset = size_binop (PLUS_EXPR, offset,
2992 size_binop (FLOOR_DIV_EXPR, var,
2993 size_int (BITS_PER_UNIT)));
2995 offset = size_binop (FLOOR_DIV_EXPR, var,
2996 size_int (BITS_PER_UNIT));
2998 else if (TREE_CODE (pos) == INTEGER_CST)
2999 *pbitpos += TREE_INT_CST_LOW (pos);
3002 /* Assume here that the offset is a multiple of a unit.
3003 If not, there should be an explicitly added constant. */
3005 offset = size_binop (PLUS_EXPR, offset,
3006 size_binop (FLOOR_DIV_EXPR, pos,
3007 size_int (BITS_PER_UNIT)));
3009 offset = size_binop (FLOOR_DIV_EXPR, pos,
3010 size_int (BITS_PER_UNIT));
3014 else if (TREE_CODE (exp) == ARRAY_REF
3015 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
3016 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST)
3018 *pbitpos += (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))
3019 * TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (exp))));
3021 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
3022 && ! ((TREE_CODE (exp) == NOP_EXPR
3023 || TREE_CODE (exp) == CONVERT_EXPR)
3024 && (TYPE_MODE (TREE_TYPE (exp))
3025 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
3028 /* If any reference in the chain is volatile, the effect is volatile. */
3029 if (TREE_THIS_VOLATILE (exp))
3031 exp = TREE_OPERAND (exp, 0);
3034 /* If this was a bit-field, see if there is a mode that allows direct
3035 access in case EXP is in memory. */
3036 if (mode == VOIDmode && *pbitpos % *pbitsize == 0)
3038 mode = mode_for_size (*pbitsize, MODE_INT, 0);
3039 if (mode == BLKmode)
3046 /* We aren't finished fixing the callers to really handle nonzero offset. */
3054 /* Given an rtx VALUE that may contain additions and multiplications,
3055 return an equivalent value that just refers to a register or memory.
3056 This is done by generating instructions to perform the arithmetic
3057 and returning a pseudo-register containing the value.
3059 The returned value may be a REG, SUBREG, MEM or constant. */
3062 force_operand (value, target)
3065 register optab binoptab = 0;
3066 /* Use a temporary to force order of execution of calls to
3070 /* Use subtarget as the target for operand 0 of a binary operation. */
3071 register rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
3073 if (GET_CODE (value) == PLUS)
3074 binoptab = add_optab;
3075 else if (GET_CODE (value) == MINUS)
3076 binoptab = sub_optab;
3077 else if (GET_CODE (value) == MULT)
3079 op2 = XEXP (value, 1);
3080 if (!CONSTANT_P (op2)
3081 && !(GET_CODE (op2) == REG && op2 != subtarget))
3083 tmp = force_operand (XEXP (value, 0), subtarget);
3084 return expand_mult (GET_MODE (value), tmp,
3085 force_operand (op2, NULL_RTX),
3091 op2 = XEXP (value, 1);
3092 if (!CONSTANT_P (op2)
3093 && !(GET_CODE (op2) == REG && op2 != subtarget))
3095 if (binoptab == sub_optab && GET_CODE (op2) == CONST_INT)
3097 binoptab = add_optab;
3098 op2 = negate_rtx (GET_MODE (value), op2);
3101 /* Check for an addition with OP2 a constant integer and our first
3102 operand a PLUS of a virtual register and something else. In that
3103 case, we want to emit the sum of the virtual register and the
3104 constant first and then add the other value. This allows virtual
3105 register instantiation to simply modify the constant rather than
3106 creating another one around this addition. */
3107 if (binoptab == add_optab && GET_CODE (op2) == CONST_INT
3108 && GET_CODE (XEXP (value, 0)) == PLUS
3109 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
3110 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
3111 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
3113 rtx temp = expand_binop (GET_MODE (value), binoptab,
3114 XEXP (XEXP (value, 0), 0), op2,
3115 subtarget, 0, OPTAB_LIB_WIDEN);
3116 return expand_binop (GET_MODE (value), binoptab, temp,
3117 force_operand (XEXP (XEXP (value, 0), 1), 0),
3118 target, 0, OPTAB_LIB_WIDEN);
3121 tmp = force_operand (XEXP (value, 0), subtarget);
3122 return expand_binop (GET_MODE (value), binoptab, tmp,
3123 force_operand (op2, NULL_RTX),
3124 target, 0, OPTAB_LIB_WIDEN);
3125 /* We give UNSIGNEP = 0 to expand_binop
3126 because the only operations we are expanding here are signed ones. */
3131 /* Subroutine of expand_expr:
3132 save the non-copied parts (LIST) of an expr (LHS), and return a list
3133 which can restore these values to their previous values,
3134 should something modify their storage. */
3137 save_noncopied_parts (lhs, list)
3144 for (tail = list; tail; tail = TREE_CHAIN (tail))
3145 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
3146 parts = chainon (parts, save_noncopied_parts (lhs, TREE_VALUE (tail)));
3149 tree part = TREE_VALUE (tail);
3150 tree part_type = TREE_TYPE (part);
3151 tree to_be_saved = build (COMPONENT_REF, part_type, lhs, part);
3152 rtx target = assign_stack_temp (TYPE_MODE (part_type),
3153 int_size_in_bytes (part_type), 0);
3154 if (! memory_address_p (TYPE_MODE (part_type), XEXP (target, 0)))
3155 target = change_address (target, TYPE_MODE (part_type), NULL_RTX);
3156 parts = tree_cons (to_be_saved,
3157 build (RTL_EXPR, part_type, NULL_TREE,
3160 store_expr (TREE_PURPOSE (parts), RTL_EXPR_RTL (TREE_VALUE (parts)), 0);
3165 /* Subroutine of expand_expr:
3166 record the non-copied parts (LIST) of an expr (LHS), and return a list
3167 which specifies the initial values of these parts. */
3170 init_noncopied_parts (lhs, list)
3177 for (tail = list; tail; tail = TREE_CHAIN (tail))
3178 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
3179 parts = chainon (parts, init_noncopied_parts (lhs, TREE_VALUE (tail)));
3182 tree part = TREE_VALUE (tail);
3183 tree part_type = TREE_TYPE (part);
3184 tree to_be_initialized = build (COMPONENT_REF, part_type, lhs, part);
3185 parts = tree_cons (TREE_PURPOSE (tail), to_be_initialized, parts);
3190 /* Subroutine of expand_expr: return nonzero iff there is no way that
3191 EXP can reference X, which is being modified. */
3194 safe_from_p (x, exp)
3204 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
3205 find the underlying pseudo. */
3206 if (GET_CODE (x) == SUBREG)
3209 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
3213 /* If X is a location in the outgoing argument area, it is always safe. */
3214 if (GET_CODE (x) == MEM
3215 && (XEXP (x, 0) == virtual_outgoing_args_rtx
3216 || (GET_CODE (XEXP (x, 0)) == PLUS
3217 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx)))
3220 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
3223 exp_rtl = DECL_RTL (exp);
3230 if (TREE_CODE (exp) == TREE_LIST)
3231 return ((TREE_VALUE (exp) == 0
3232 || safe_from_p (x, TREE_VALUE (exp)))
3233 && (TREE_CHAIN (exp) == 0
3234 || safe_from_p (x, TREE_CHAIN (exp))));
3239 return safe_from_p (x, TREE_OPERAND (exp, 0));
3243 return (safe_from_p (x, TREE_OPERAND (exp, 0))
3244 && safe_from_p (x, TREE_OPERAND (exp, 1)));
3248 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
3249 the expression. If it is set, we conflict iff we are that rtx or
3250 both are in memory. Otherwise, we check all operands of the
3251 expression recursively. */
3253 switch (TREE_CODE (exp))
3256 return staticp (TREE_OPERAND (exp, 0));
3259 if (GET_CODE (x) == MEM)
3264 exp_rtl = CALL_EXPR_RTL (exp);
3267 /* Assume that the call will clobber all hard registers and
3269 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
3270 || GET_CODE (x) == MEM)
3277 exp_rtl = RTL_EXPR_RTL (exp);
3279 /* We don't know what this can modify. */
3284 case WITH_CLEANUP_EXPR:
3285 exp_rtl = RTL_EXPR_RTL (exp);
3289 exp_rtl = SAVE_EXPR_RTL (exp);
3293 /* The only operand we look at is operand 1. The rest aren't
3294 part of the expression. */
3295 return safe_from_p (x, TREE_OPERAND (exp, 1));
3297 case METHOD_CALL_EXPR:
3298 /* This takes a rtx argument, but shouldn't appear here. */
3302 /* If we have an rtx, we do not need to scan our operands. */
3306 nops = tree_code_length[(int) TREE_CODE (exp)];
3307 for (i = 0; i < nops; i++)
3308 if (TREE_OPERAND (exp, i) != 0
3309 && ! safe_from_p (x, TREE_OPERAND (exp, i)))
3313 /* If we have an rtl, find any enclosed object. Then see if we conflict
3317 if (GET_CODE (exp_rtl) == SUBREG)
3319 exp_rtl = SUBREG_REG (exp_rtl);
3320 if (GET_CODE (exp_rtl) == REG
3321 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
3325 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
3326 are memory and EXP is not readonly. */
3327 return ! (rtx_equal_p (x, exp_rtl)
3328 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
3329 && ! TREE_READONLY (exp)));
3332 /* If we reach here, it is safe. */
3336 /* Subroutine of expand_expr: return nonzero iff EXP is an
3337 expression whose type is statically determinable. */
3343 if (TREE_CODE (exp) == PARM_DECL
3344 || TREE_CODE (exp) == VAR_DECL
3345 || TREE_CODE (exp) == CALL_EXPR || TREE_CODE (exp) == TARGET_EXPR
3346 || TREE_CODE (exp) == COMPONENT_REF
3347 || TREE_CODE (exp) == ARRAY_REF)
3352 /* expand_expr: generate code for computing expression EXP.
3353 An rtx for the computed value is returned. The value is never null.
3354 In the case of a void EXP, const0_rtx is returned.
3356 The value may be stored in TARGET if TARGET is nonzero.
3357 TARGET is just a suggestion; callers must assume that
3358 the rtx returned may not be the same as TARGET.
3360 If TARGET is CONST0_RTX, it means that the value will be ignored.
3362 If TMODE is not VOIDmode, it suggests generating the
3363 result in mode TMODE. But this is done only when convenient.
3364 Otherwise, TMODE is ignored and the value generated in its natural mode.
3365 TMODE is just a suggestion; callers must assume that
3366 the rtx returned may not have mode TMODE.
3368 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
3369 with a constant address even if that address is not normally legitimate.
3370 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
3372 If MODIFIER is EXPAND_SUM then when EXP is an addition
3373 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
3374 or a nest of (PLUS ...) and (MINUS ...) where the terms are
3375 products as above, or REG or MEM, or constant.
3376 Ordinarily in such cases we would output mul or add instructions
3377 and then return a pseudo reg containing the sum.
3379 EXPAND_INITIALIZER is much like EXPAND_SUM except that
3380 it also marks a label as absolutely required (it can't be dead).
3381 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
3382 This is used for outputting expressions used in initializers. */
3385 expand_expr (exp, target, tmode, modifier)
3388 enum machine_mode tmode;
3389 enum expand_modifier modifier;
3391 register rtx op0, op1, temp;
3392 tree type = TREE_TYPE (exp);
3393 int unsignedp = TREE_UNSIGNED (type);
3394 register enum machine_mode mode = TYPE_MODE (type);
3395 register enum tree_code code = TREE_CODE (exp);
3397 /* Use subtarget as the target for operand 0 of a binary operation. */
3398 rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
3399 rtx original_target = target;
3400 int ignore = target == const0_rtx;
3403 /* Don't use hard regs as subtargets, because the combiner
3404 can only handle pseudo regs. */
3405 if (subtarget && REGNO (subtarget) < FIRST_PSEUDO_REGISTER)
3407 /* Avoid subtargets inside loops,
3408 since they hide some invariant expressions. */
3409 if (preserve_subexpressions_p ())
3412 if (ignore) target = 0, original_target = 0;
3414 /* If will do cse, generate all results into pseudo registers
3415 since 1) that allows cse to find more things
3416 and 2) otherwise cse could produce an insn the machine
3419 if (! cse_not_expected && mode != BLKmode && target
3420 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER))
3423 /* Ensure we reference a volatile object even if value is ignored. */
3424 if (ignore && TREE_THIS_VOLATILE (exp)
3425 && mode != VOIDmode && mode != BLKmode)
3427 target = gen_reg_rtx (mode);
3428 temp = expand_expr (exp, target, VOIDmode, modifier);
3430 emit_move_insn (target, temp);
3438 tree function = decl_function_context (exp);
3439 /* Handle using a label in a containing function. */
3440 if (function != current_function_decl && function != 0)
3442 struct function *p = find_function_data (function);
3443 /* Allocate in the memory associated with the function
3444 that the label is in. */
3445 push_obstacks (p->function_obstack,
3446 p->function_maybepermanent_obstack);
3448 p->forced_labels = gen_rtx (EXPR_LIST, VOIDmode,
3449 label_rtx (exp), p->forced_labels);
3452 else if (modifier == EXPAND_INITIALIZER)
3453 forced_labels = gen_rtx (EXPR_LIST, VOIDmode,
3454 label_rtx (exp), forced_labels);
3455 temp = gen_rtx (MEM, FUNCTION_MODE,
3456 gen_rtx (LABEL_REF, Pmode, label_rtx (exp)));
3457 if (function != current_function_decl && function != 0)
3458 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
3463 if (DECL_RTL (exp) == 0)
3465 error_with_decl (exp, "prior parameter's size depends on `%s'");
3466 return CONST0_RTX (mode);
3472 if (DECL_RTL (exp) == 0)
3474 /* Ensure variable marked as used
3475 even if it doesn't go through a parser. */
3476 TREE_USED (exp) = 1;
3477 /* Handle variables inherited from containing functions. */
3478 context = decl_function_context (exp);
3480 /* We treat inline_function_decl as an alias for the current function
3481 because that is the inline function whose vars, types, etc.
3482 are being merged into the current function.
3483 See expand_inline_function. */
3484 if (context != 0 && context != current_function_decl
3485 && context != inline_function_decl
3486 /* If var is static, we don't need a static chain to access it. */
3487 && ! (GET_CODE (DECL_RTL (exp)) == MEM
3488 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
3492 /* Mark as non-local and addressable. */
3493 DECL_NONLOCAL (exp) = 1;
3494 mark_addressable (exp);
3495 if (GET_CODE (DECL_RTL (exp)) != MEM)
3497 addr = XEXP (DECL_RTL (exp), 0);
3498 if (GET_CODE (addr) == MEM)
3499 addr = gen_rtx (MEM, Pmode, fix_lexical_addr (XEXP (addr, 0), exp));
3501 addr = fix_lexical_addr (addr, exp);
3502 return change_address (DECL_RTL (exp), mode, addr);
3505 /* This is the case of an array whose size is to be determined
3506 from its initializer, while the initializer is still being parsed.
3508 if (GET_CODE (DECL_RTL (exp)) == MEM
3509 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
3510 return change_address (DECL_RTL (exp), GET_MODE (DECL_RTL (exp)),
3511 XEXP (DECL_RTL (exp), 0));
3512 if (GET_CODE (DECL_RTL (exp)) == MEM
3513 && modifier != EXPAND_CONST_ADDRESS
3514 && modifier != EXPAND_SUM
3515 && modifier != EXPAND_INITIALIZER)
3517 /* DECL_RTL probably contains a constant address.
3518 On RISC machines where a constant address isn't valid,
3519 make some insns to get that address into a register. */
3520 if (!memory_address_p (DECL_MODE (exp), XEXP (DECL_RTL (exp), 0))
3522 && CONSTANT_ADDRESS_P (XEXP (DECL_RTL (exp), 0))))
3523 return change_address (DECL_RTL (exp), VOIDmode,
3524 copy_rtx (XEXP (DECL_RTL (exp), 0)));
3527 /* If the mode of DECL_RTL does not match that of the decl, it
3528 must be a promoted value. We return a SUBREG of the wanted mode,
3529 but mark it so that we know that it was already extended. */
3531 if (GET_CODE (DECL_RTL (exp)) == REG
3532 && GET_MODE (DECL_RTL (exp)) != mode)
3534 enum machine_mode decl_mode = DECL_MODE (exp);
3536 /* Get the signedness used for this variable. Ensure we get the
3537 same mode we got when the variable was declared. */
3539 PROMOTE_MODE (decl_mode, unsignedp, type);
3541 if (decl_mode != GET_MODE (DECL_RTL (exp)))
3544 temp = gen_rtx (SUBREG, mode, DECL_RTL (exp), 0);
3545 SUBREG_PROMOTED_VAR_P (temp) = 1;
3546 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
3550 return DECL_RTL (exp);
3553 return immed_double_const (TREE_INT_CST_LOW (exp),
3554 TREE_INT_CST_HIGH (exp),
3558 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, 0);
3561 /* If optimized, generate immediate CONST_DOUBLE
3562 which will be turned into memory by reload if necessary.
3564 We used to force a register so that loop.c could see it. But
3565 this does not allow gen_* patterns to perform optimizations with
3566 the constants. It also produces two insns in cases like "x = 1.0;".
3567 On most machines, floating-point constants are not permitted in
3568 many insns, so we'd end up copying it to a register in any case.
3570 Now, we do the copying in expand_binop, if appropriate. */
3571 return immed_real_const (exp);
3575 if (! TREE_CST_RTL (exp))
3576 output_constant_def (exp);
3578 /* TREE_CST_RTL probably contains a constant address.
3579 On RISC machines where a constant address isn't valid,
3580 make some insns to get that address into a register. */
3581 if (GET_CODE (TREE_CST_RTL (exp)) == MEM
3582 && modifier != EXPAND_CONST_ADDRESS
3583 && modifier != EXPAND_INITIALIZER
3584 && modifier != EXPAND_SUM
3585 && !memory_address_p (mode, XEXP (TREE_CST_RTL (exp), 0)))
3586 return change_address (TREE_CST_RTL (exp), VOIDmode,
3587 copy_rtx (XEXP (TREE_CST_RTL (exp), 0)));
3588 return TREE_CST_RTL (exp);
3591 context = decl_function_context (exp);
3592 /* We treat inline_function_decl as an alias for the current function
3593 because that is the inline function whose vars, types, etc.
3594 are being merged into the current function.
3595 See expand_inline_function. */
3596 if (context == current_function_decl || context == inline_function_decl)
3599 /* If this is non-local, handle it. */
3602 temp = SAVE_EXPR_RTL (exp);
3603 if (temp && GET_CODE (temp) == REG)
3605 put_var_into_stack (exp);
3606 temp = SAVE_EXPR_RTL (exp);
3608 if (temp == 0 || GET_CODE (temp) != MEM)
3610 return change_address (temp, mode,
3611 fix_lexical_addr (XEXP (temp, 0), exp));
3613 if (SAVE_EXPR_RTL (exp) == 0)
3615 if (mode == BLKmode)
3617 = assign_stack_temp (mode,
3618 int_size_in_bytes (TREE_TYPE (exp)), 0);
3621 enum machine_mode var_mode = mode;
3623 if (TREE_CODE (type) == INTEGER_TYPE
3624 || TREE_CODE (type) == ENUMERAL_TYPE
3625 || TREE_CODE (type) == BOOLEAN_TYPE
3626 || TREE_CODE (type) == CHAR_TYPE
3627 || TREE_CODE (type) == REAL_TYPE
3628 || TREE_CODE (type) == POINTER_TYPE
3629 || TREE_CODE (type) == OFFSET_TYPE)
3631 PROMOTE_MODE (var_mode, unsignedp, type);
3634 temp = gen_reg_rtx (var_mode);
3637 SAVE_EXPR_RTL (exp) = temp;
3638 store_expr (TREE_OPERAND (exp, 0), temp, 0);
3639 if (!optimize && GET_CODE (temp) == REG)
3640 save_expr_regs = gen_rtx (EXPR_LIST, VOIDmode, temp,
3644 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
3645 must be a promoted value. We return a SUBREG of the wanted mode,
3646 but mark it so that we know that it was already extended. Note
3647 that `unsignedp' was modified above in this case. */
3649 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
3650 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
3652 temp = gen_rtx (SUBREG, mode, SAVE_EXPR_RTL (exp), 0);
3653 SUBREG_PROMOTED_VAR_P (temp) = 1;
3654 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
3658 return SAVE_EXPR_RTL (exp);
3661 /* Exit the current loop if the body-expression is true. */
3663 rtx label = gen_label_rtx ();
3664 do_jump (TREE_OPERAND (exp, 0), label, NULL_RTX);
3665 expand_exit_loop (NULL_PTR);
3671 expand_start_loop (1);
3672 expand_expr_stmt (TREE_OPERAND (exp, 0));
3679 tree vars = TREE_OPERAND (exp, 0);
3680 int vars_need_expansion = 0;
3682 /* Need to open a binding contour here because
3683 if there are any cleanups they most be contained here. */
3684 expand_start_bindings (0);
3686 /* Mark the corresponding BLOCK for output in its proper place. */
3687 if (TREE_OPERAND (exp, 2) != 0
3688 && ! TREE_USED (TREE_OPERAND (exp, 2)))
3689 insert_block (TREE_OPERAND (exp, 2));
3691 /* If VARS have not yet been expanded, expand them now. */
3694 if (DECL_RTL (vars) == 0)
3696 vars_need_expansion = 1;
3699 expand_decl_init (vars);
3700 vars = TREE_CHAIN (vars);
3703 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, modifier);
3705 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
3711 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
3713 emit_insns (RTL_EXPR_SEQUENCE (exp));
3714 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
3715 return RTL_EXPR_RTL (exp);
3718 /* All elts simple constants => refer to a constant in memory. But
3719 if this is a non-BLKmode mode, let it store a field at a time
3720 since that should make a CONST_INT or CONST_DOUBLE when we
3722 if (TREE_STATIC (exp) && (mode == BLKmode || TREE_ADDRESSABLE (exp)))
3724 rtx constructor = output_constant_def (exp);
3725 if (modifier != EXPAND_CONST_ADDRESS
3726 && modifier != EXPAND_INITIALIZER
3727 && modifier != EXPAND_SUM
3728 && !memory_address_p (GET_MODE (constructor),
3729 XEXP (constructor, 0)))
3730 constructor = change_address (constructor, VOIDmode,
3731 XEXP (constructor, 0));
3738 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
3739 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode, 0);
3744 if (target == 0 || ! safe_from_p (target, exp))
3746 if (mode != BLKmode && ! TREE_ADDRESSABLE (exp))
3747 target = gen_reg_rtx (mode);
3750 enum tree_code c = TREE_CODE (type);
3752 = assign_stack_temp (mode, int_size_in_bytes (type), 0);
3753 if (c == RECORD_TYPE || c == UNION_TYPE || c == ARRAY_TYPE)
3754 MEM_IN_STRUCT_P (target) = 1;
3757 store_constructor (exp, target);
3763 tree exp1 = TREE_OPERAND (exp, 0);
3766 /* A SAVE_EXPR as the address in an INDIRECT_EXPR is generated
3767 for *PTR += ANYTHING where PTR is put inside the SAVE_EXPR.
3768 This code has the same general effect as simply doing
3769 expand_expr on the save expr, except that the expression PTR
3770 is computed for use as a memory address. This means different
3771 code, suitable for indexing, may be generated. */
3772 if (TREE_CODE (exp1) == SAVE_EXPR
3773 && SAVE_EXPR_RTL (exp1) == 0
3774 && TREE_CODE (exp2 = TREE_OPERAND (exp1, 0)) != ERROR_MARK
3775 && TYPE_MODE (TREE_TYPE (exp1)) == Pmode
3776 && TYPE_MODE (TREE_TYPE (exp2)) == Pmode)
3778 temp = expand_expr (TREE_OPERAND (exp1, 0), NULL_RTX,
3779 VOIDmode, EXPAND_SUM);
3780 op0 = memory_address (mode, temp);
3781 op0 = copy_all_regs (op0);
3782 SAVE_EXPR_RTL (exp1) = op0;
3786 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
3787 op0 = memory_address (mode, op0);
3790 temp = gen_rtx (MEM, mode, op0);
3791 /* If address was computed by addition,
3792 mark this as an element of an aggregate. */
3793 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
3794 || (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR
3795 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) == PLUS_EXPR)
3796 || TREE_CODE (TREE_TYPE (exp)) == ARRAY_TYPE
3797 || TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE
3798 || TREE_CODE (TREE_TYPE (exp)) == UNION_TYPE
3799 || (TREE_CODE (exp1) == ADDR_EXPR
3800 && (exp2 = TREE_OPERAND (exp1, 0))
3801 && (TREE_CODE (TREE_TYPE (exp2)) == ARRAY_TYPE
3802 || TREE_CODE (TREE_TYPE (exp2)) == RECORD_TYPE
3803 || TREE_CODE (TREE_TYPE (exp2)) == UNION_TYPE)))
3804 MEM_IN_STRUCT_P (temp) = 1;
3805 MEM_VOLATILE_P (temp) = TREE_THIS_VOLATILE (exp);
3806 #if 0 /* It is incorrectto set RTX_UNCHANGING_P here, because the fact that
3807 a location is accessed through a pointer to const does not mean
3808 that the value there can never change. */
3809 RTX_UNCHANGING_P (temp) = TREE_READONLY (exp);
3815 if (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST
3816 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
3818 /* Nonconstant array index or nonconstant element size.
3819 Generate the tree for *(&array+index) and expand that,
3820 except do it in a language-independent way
3821 and don't complain about non-lvalue arrays.
3822 `mark_addressable' should already have been called
3823 for any array for which this case will be reached. */
3825 /* Don't forget the const or volatile flag from the array element. */
3826 tree variant_type = build_type_variant (type,
3827 TREE_READONLY (exp),
3828 TREE_THIS_VOLATILE (exp));
3829 tree array_adr = build1 (ADDR_EXPR, build_pointer_type (variant_type),
3830 TREE_OPERAND (exp, 0));
3831 tree index = TREE_OPERAND (exp, 1);
3834 /* Convert the integer argument to a type the same size as a pointer
3835 so the multiply won't overflow spuriously. */
3836 if (TYPE_PRECISION (TREE_TYPE (index)) != POINTER_SIZE)
3837 index = convert (type_for_size (POINTER_SIZE, 0), index);
3839 /* Don't think the address has side effects
3840 just because the array does.
3841 (In some cases the address might have side effects,
3842 and we fail to record that fact here. However, it should not
3843 matter, since expand_expr should not care.) */
3844 TREE_SIDE_EFFECTS (array_adr) = 0;
3846 elt = build1 (INDIRECT_REF, type,
3847 fold (build (PLUS_EXPR, TYPE_POINTER_TO (variant_type),
3849 fold (build (MULT_EXPR,
3850 TYPE_POINTER_TO (variant_type),
3851 index, size_in_bytes (type))))));
3853 /* Volatility, etc., of new expression is same as old expression. */
3854 TREE_SIDE_EFFECTS (elt) = TREE_SIDE_EFFECTS (exp);
3855 TREE_THIS_VOLATILE (elt) = TREE_THIS_VOLATILE (exp);
3856 TREE_READONLY (elt) = TREE_READONLY (exp);
3858 return expand_expr (elt, target, tmode, modifier);
3861 /* Fold an expression like: "foo"[2].
3862 This is not done in fold so it won't happen inside &. */
3865 tree arg0 = TREE_OPERAND (exp, 0);
3866 tree arg1 = TREE_OPERAND (exp, 1);
3868 if (TREE_CODE (arg0) == STRING_CST
3869 && TREE_CODE (arg1) == INTEGER_CST
3870 && !TREE_INT_CST_HIGH (arg1)
3871 && (i = TREE_INT_CST_LOW (arg1)) < TREE_STRING_LENGTH (arg0))
3873 if (TREE_TYPE (TREE_TYPE (arg0)) == integer_type_node)
3875 exp = build_int_2 (((int *)TREE_STRING_POINTER (arg0))[i], 0);
3876 TREE_TYPE (exp) = integer_type_node;
3877 return expand_expr (exp, target, tmode, modifier);
3879 if (TREE_TYPE (TREE_TYPE (arg0)) == char_type_node)
3881 exp = build_int_2 (TREE_STRING_POINTER (arg0)[i], 0);
3882 TREE_TYPE (exp) = integer_type_node;
3883 return expand_expr (convert (TREE_TYPE (TREE_TYPE (arg0)), exp), target, tmode, modifier);
3888 /* If this is a constant index into a constant array,
3889 just get the value from the array. Handle both the cases when
3890 we have an explicit constructor and when our operand is a variable
3891 that was declared const. */
3893 if (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
3894 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
3896 tree index = fold (TREE_OPERAND (exp, 1));
3897 if (TREE_CODE (index) == INTEGER_CST
3898 && TREE_INT_CST_HIGH (index) == 0)
3900 int i = TREE_INT_CST_LOW (index);
3901 tree elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0));
3904 elem = TREE_CHAIN (elem);
3906 return expand_expr (fold (TREE_VALUE (elem)), target,
3911 else if (TREE_READONLY (TREE_OPERAND (exp, 0))
3912 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
3913 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == ARRAY_TYPE
3914 && TREE_CODE (TREE_OPERAND (exp, 0)) == VAR_DECL
3915 && DECL_INITIAL (TREE_OPERAND (exp, 0))
3917 && (TREE_CODE (DECL_INITIAL (TREE_OPERAND (exp, 0)))
3920 tree index = fold (TREE_OPERAND (exp, 1));
3921 if (TREE_CODE (index) == INTEGER_CST
3922 && TREE_INT_CST_HIGH (index) == 0)
3924 int i = TREE_INT_CST_LOW (index);
3925 tree init = DECL_INITIAL (TREE_OPERAND (exp, 0));
3927 if (TREE_CODE (init) == CONSTRUCTOR)
3929 tree elem = CONSTRUCTOR_ELTS (init);
3932 elem = TREE_CHAIN (elem);
3934 return expand_expr (fold (TREE_VALUE (elem)), target,
3937 else if (TREE_CODE (init) == STRING_CST
3938 && i < TREE_STRING_LENGTH (init))
3940 temp = GEN_INT (TREE_STRING_POINTER (init)[i]);
3941 return convert_to_mode (mode, temp, 0);
3945 /* Treat array-ref with constant index as a component-ref. */
3949 /* If the operand is a CONSTRUCTOR, we can just extract the
3950 appropriate field if it is present. */
3951 if (code != ARRAY_REF
3952 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR)
3956 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
3957 elt = TREE_CHAIN (elt))
3958 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1))
3959 return expand_expr (TREE_VALUE (elt), target, tmode, modifier);
3963 enum machine_mode mode1;
3968 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
3969 &mode1, &unsignedp, &volatilep);
3971 /* In some cases, we will be offsetting OP0's address by a constant.
3972 So get it as a sum, if possible. If we will be using it
3973 directly in an insn, we validate it. */
3974 op0 = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_SUM);
3976 /* If this is a constant, put it into a register if it is a
3977 legimate constant and memory if it isn't. */
3978 if (CONSTANT_P (op0))
3980 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
3981 if (LEGITIMATE_CONSTANT_P (op0))
3982 op0 = force_reg (mode, op0);
3984 op0 = validize_mem (force_const_mem (mode, op0));
3989 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
3991 if (GET_CODE (op0) != MEM)
3993 op0 = change_address (op0, VOIDmode,
3994 gen_rtx (PLUS, Pmode, XEXP (op0, 0),
3995 force_reg (Pmode, offset_rtx)));
3998 /* Don't forget about volatility even if this is a bitfield. */
3999 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
4001 op0 = copy_rtx (op0);
4002 MEM_VOLATILE_P (op0) = 1;
4005 if (mode1 == VOIDmode
4006 || (mode1 != BLKmode && ! direct_load[(int) mode1]
4007 && modifier != EXPAND_CONST_ADDRESS
4008 && modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
4009 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG)
4011 /* In cases where an aligned union has an unaligned object
4012 as a field, we might be extracting a BLKmode value from
4013 an integer-mode (e.g., SImode) object. Handle this case
4014 by doing the extract into an object as wide as the field
4015 (which we know to be the width of a basic mode), then
4016 storing into memory, and changing the mode to BLKmode. */
4017 enum machine_mode ext_mode = mode;
4019 if (ext_mode == BLKmode)
4020 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
4022 if (ext_mode == BLKmode)
4025 op0 = extract_bit_field (validize_mem (op0), bitsize, bitpos,
4026 unsignedp, target, ext_mode, ext_mode,
4027 TYPE_ALIGN (TREE_TYPE (tem)) / BITS_PER_UNIT,
4028 int_size_in_bytes (TREE_TYPE (tem)));
4029 if (mode == BLKmode)
4031 rtx new = assign_stack_temp (ext_mode,
4032 bitsize / BITS_PER_UNIT, 0);
4034 emit_move_insn (new, op0);
4035 op0 = copy_rtx (new);
4036 PUT_MODE (op0, BLKmode);
4042 /* Get a reference to just this component. */
4043 if (modifier == EXPAND_CONST_ADDRESS
4044 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
4045 op0 = gen_rtx (MEM, mode1, plus_constant (XEXP (op0, 0),
4046 (bitpos / BITS_PER_UNIT)));
4048 op0 = change_address (op0, mode1,
4049 plus_constant (XEXP (op0, 0),
4050 (bitpos / BITS_PER_UNIT)));
4051 MEM_IN_STRUCT_P (op0) = 1;
4052 MEM_VOLATILE_P (op0) |= volatilep;
4053 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode)
4056 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
4057 convert_move (target, op0, unsignedp);
4063 tree base = build_unary_op (ADDR_EXPR, TREE_OPERAND (exp, 0), 0);
4064 tree addr = build (PLUS_EXPR, type, base, TREE_OPERAND (exp, 1));
4065 op0 = expand_expr (addr, NULL_RTX, VOIDmode, EXPAND_SUM);
4066 temp = gen_rtx (MEM, mode, memory_address (mode, op0));
4067 MEM_IN_STRUCT_P (temp) = 1;
4068 MEM_VOLATILE_P (temp) = TREE_THIS_VOLATILE (exp);
4069 #if 0 /* It is incorrectto set RTX_UNCHANGING_P here, because the fact that
4070 a location is accessed through a pointer to const does not mean
4071 that the value there can never change. */
4072 RTX_UNCHANGING_P (temp) = TREE_READONLY (exp);
4077 /* Intended for a reference to a buffer of a file-object in Pascal.
4078 But it's not certain that a special tree code will really be
4079 necessary for these. INDIRECT_REF might work for them. */
4083 /* IN_EXPR: Inlined pascal set IN expression.
4086 rlo = set_low - (set_low%bits_per_word);
4087 the_word = set [ (index - rlo)/bits_per_word ];
4088 bit_index = index % bits_per_word;
4089 bitmask = 1 << bit_index;
4090 return !!(the_word & bitmask); */
4092 preexpand_calls (exp);
4094 tree set = TREE_OPERAND (exp, 0);
4095 tree index = TREE_OPERAND (exp, 1);
4096 tree set_type = TREE_TYPE (set);
4098 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
4099 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
4105 rtx diff, quo, rem, addr, bit, result;
4106 rtx setval, setaddr;
4107 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
4110 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
4112 /* If domain is empty, answer is no. */
4113 if (tree_int_cst_lt (set_high_bound, set_low_bound))
4116 index_val = expand_expr (index, 0, VOIDmode, 0);
4117 lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
4118 hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
4119 setval = expand_expr (set, 0, VOIDmode, 0);
4120 setaddr = XEXP (setval, 0);
4122 /* Compare index against bounds, if they are constant. */
4123 if (GET_CODE (index_val) == CONST_INT
4124 && GET_CODE (lo_r) == CONST_INT)
4126 if (INTVAL (index_val) < INTVAL (lo_r))
4130 if (GET_CODE (index_val) == CONST_INT
4131 && GET_CODE (hi_r) == CONST_INT)
4133 if (INTVAL (hi_r) < INTVAL (index_val))
4137 /* If we get here, we have to generate the code for both cases
4138 (in range and out of range). */
4140 op0 = gen_label_rtx ();
4141 op1 = gen_label_rtx ();
4143 if (! (GET_CODE (index_val) == CONST_INT
4144 && GET_CODE (lo_r) == CONST_INT))
4146 emit_cmp_insn (index_val, lo_r, LT, 0, GET_MODE (index_val), 0, 0);
4147 emit_jump_insn (gen_blt (op1));
4150 if (! (GET_CODE (index_val) == CONST_INT
4151 && GET_CODE (hi_r) == CONST_INT))
4153 emit_cmp_insn (index_val, hi_r, GT, 0, GET_MODE (index_val), 0, 0);
4154 emit_jump_insn (gen_bgt (op1));
4157 /* Calculate the element number of bit zero in the first word
4159 if (GET_CODE (lo_r) == CONST_INT)
4160 rlow = gen_rtx (CONST_INT, VOIDmode,
4161 INTVAL (lo_r) & ~ (1 << BITS_PER_UNIT));
4163 rlow = expand_binop (index_mode, and_optab,
4164 lo_r, gen_rtx (CONST_INT, VOIDmode,
4165 ~ (1 << BITS_PER_UNIT)),
4166 0, 0, OPTAB_LIB_WIDEN);
4168 diff = expand_binop (index_mode, sub_optab,
4169 index_val, rlow, 0, 0, OPTAB_LIB_WIDEN);
4171 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
4172 gen_rtx (CONST_INT, VOIDmode, BITS_PER_UNIT),
4174 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
4175 gen_rtx (CONST_INT, VOIDmode, BITS_PER_UNIT),
4177 addr = memory_address (byte_mode,
4178 expand_binop (index_mode, add_optab,
4180 /* Extract the bit we want to examine */
4181 bit = expand_shift (RSHIFT_EXPR, byte_mode,
4182 gen_rtx (MEM, byte_mode, addr), rem, 0, 1);
4183 result = expand_binop (SImode, and_optab, bit, const1_rtx, target,
4184 1, OPTAB_LIB_WIDEN);
4185 emit_move_insn (target, result);
4187 /* Output the code to handle the out-of-range case. */
4190 emit_move_insn (target, const0_rtx);
4195 case WITH_CLEANUP_EXPR:
4196 if (RTL_EXPR_RTL (exp) == 0)
4199 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
4201 = tree_cons (NULL_TREE, TREE_OPERAND (exp, 2), cleanups_this_call);
4202 /* That's it for this cleanup. */
4203 TREE_OPERAND (exp, 2) = 0;
4205 return RTL_EXPR_RTL (exp);
4208 /* Check for a built-in function. */
4209 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
4210 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) == FUNCTION_DECL
4211 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
4212 return expand_builtin (exp, target, subtarget, tmode, ignore);
4213 /* If this call was expanded already by preexpand_calls,
4214 just return the result we got. */
4215 if (CALL_EXPR_RTL (exp) != 0)
4216 return CALL_EXPR_RTL (exp);
4217 return expand_call (exp, target, ignore);
4219 case NON_LVALUE_EXPR:
4222 case REFERENCE_EXPR:
4223 if (TREE_CODE (type) == VOID_TYPE || ignore)
4225 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
4228 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
4229 return expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, modifier);
4230 if (TREE_CODE (type) == UNION_TYPE)
4232 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
4235 if (mode == BLKmode)
4237 if (TYPE_SIZE (type) == 0
4238 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
4240 target = assign_stack_temp (BLKmode,
4241 (TREE_INT_CST_LOW (TYPE_SIZE (type))
4242 + BITS_PER_UNIT - 1)
4243 / BITS_PER_UNIT, 0);
4246 target = gen_reg_rtx (mode);
4248 if (GET_CODE (target) == MEM)
4249 /* Store data into beginning of memory target. */
4250 store_expr (TREE_OPERAND (exp, 0),
4251 change_address (target, TYPE_MODE (valtype), 0), 0);
4253 else if (GET_CODE (target) == REG)
4254 /* Store this field into a union of the proper type. */
4255 store_field (target, GET_MODE_BITSIZE (TYPE_MODE (valtype)), 0,
4256 TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
4258 int_size_in_bytes (TREE_TYPE (TREE_OPERAND (exp, 0))));
4262 /* Return the entire union. */
4265 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, 0);
4266 if (GET_MODE (op0) == mode)
4268 /* If arg is a constant integer being extended from a narrower mode,
4269 we must really truncate to get the extended bits right. Otherwise
4270 (unsigned long) (unsigned char) ("\377"[0])
4271 would come out as ffffffff. */
4272 if (GET_MODE (op0) == VOIDmode
4273 && (GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
4274 < GET_MODE_BITSIZE (mode)))
4276 /* MODE must be narrower than HOST_BITS_PER_INT. */
4277 int width = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
4279 if (width < HOST_BITS_PER_WIDE_INT)
4281 HOST_WIDE_INT val = (GET_CODE (op0) == CONST_INT ? INTVAL (op0)
4282 : CONST_DOUBLE_LOW (op0));
4283 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)))
4284 || !(val & ((HOST_WIDE_INT) 1 << (width - 1))))
4285 val &= ((HOST_WIDE_INT) 1 << width) - 1;
4287 val |= ~(((HOST_WIDE_INT) 1 << width) - 1);
4289 op0 = GEN_INT (val);
4293 op0 = (simplify_unary_operation
4294 ((TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)))
4295 ? ZERO_EXTEND : SIGN_EXTEND),
4297 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))));
4302 if (GET_MODE (op0) == VOIDmode)
4304 if (modifier == EXPAND_INITIALIZER)
4305 return gen_rtx (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
4306 if (flag_force_mem && GET_CODE (op0) == MEM)
4307 op0 = copy_to_reg (op0);
4310 return convert_to_mode (mode, op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
4312 convert_move (target, op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
4316 /* We come here from MINUS_EXPR when the second operand is a constant. */
4318 this_optab = add_optab;
4320 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
4321 something else, make sure we add the register to the constant and
4322 then to the other thing. This case can occur during strength
4323 reduction and doing it this way will produce better code if the
4324 frame pointer or argument pointer is eliminated.
4326 fold-const.c will ensure that the constant is always in the inner
4327 PLUS_EXPR, so the only case we need to do anything about is if
4328 sp, ap, or fp is our second argument, in which case we must swap
4329 the innermost first argument and our second argument. */
4331 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
4332 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
4333 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
4334 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
4335 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
4336 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
4338 tree t = TREE_OPERAND (exp, 1);
4340 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
4341 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
4344 /* If the result is to be Pmode and we are adding an integer to
4345 something, we might be forming a constant. So try to use
4346 plus_constant. If it produces a sum and we can't accept it,
4347 use force_operand. This allows P = &ARR[const] to generate
4348 efficient code on machines where a SYMBOL_REF is not a valid
4351 If this is an EXPAND_SUM call, always return the sum. */
4352 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
4353 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
4354 && (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
4357 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
4359 op1 = plus_constant (op1, TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)));
4360 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
4361 op1 = force_operand (op1, target);
4365 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
4366 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
4367 && (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
4370 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
4372 op0 = plus_constant (op0, TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)));
4373 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
4374 op0 = force_operand (op0, target);
4378 /* No sense saving up arithmetic to be done
4379 if it's all in the wrong mode to form part of an address.
4380 And force_operand won't know whether to sign-extend or
4382 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
4383 || mode != Pmode) goto binop;
4385 preexpand_calls (exp);
4386 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
4389 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, modifier);
4390 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, modifier);
4392 /* Make sure any term that's a sum with a constant comes last. */
4393 if (GET_CODE (op0) == PLUS
4394 && CONSTANT_P (XEXP (op0, 1)))
4400 /* If adding to a sum including a constant,
4401 associate it to put the constant outside. */
4402 if (GET_CODE (op1) == PLUS
4403 && CONSTANT_P (XEXP (op1, 1)))
4405 rtx constant_term = const0_rtx;
4407 temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0);
4410 /* Ensure that MULT comes first if there is one. */
4411 else if (GET_CODE (op0) == MULT)
4412 op0 = gen_rtx (PLUS, mode, op0, XEXP (op1, 0));
4414 op0 = gen_rtx (PLUS, mode, XEXP (op1, 0), op0);
4416 /* Let's also eliminate constants from op0 if possible. */
4417 op0 = eliminate_constant_term (op0, &constant_term);
4419 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
4420 their sum should be a constant. Form it into OP1, since the
4421 result we want will then be OP0 + OP1. */
4423 temp = simplify_binary_operation (PLUS, mode, constant_term,
4428 op1 = gen_rtx (PLUS, mode, constant_term, XEXP (op1, 1));
4431 /* Put a constant term last and put a multiplication first. */
4432 if (CONSTANT_P (op0) || GET_CODE (op1) == MULT)
4433 temp = op1, op1 = op0, op0 = temp;
4435 temp = simplify_binary_operation (PLUS, mode, op0, op1);
4436 return temp ? temp : gen_rtx (PLUS, mode, op0, op1);
4439 /* Handle difference of two symbolic constants,
4440 for the sake of an initializer. */
4441 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
4442 && really_constant_p (TREE_OPERAND (exp, 0))
4443 && really_constant_p (TREE_OPERAND (exp, 1)))
4445 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX,
4446 VOIDmode, modifier);
4447 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
4448 VOIDmode, modifier);
4449 return gen_rtx (MINUS, mode, op0, op1);
4451 /* Convert A - const to A + (-const). */
4452 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
4454 exp = build (PLUS_EXPR, type, TREE_OPERAND (exp, 0),
4455 fold (build1 (NEGATE_EXPR, type,
4456 TREE_OPERAND (exp, 1))));
4459 this_optab = sub_optab;
4463 preexpand_calls (exp);
4464 /* If first operand is constant, swap them.
4465 Thus the following special case checks need only
4466 check the second operand. */
4467 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
4469 register tree t1 = TREE_OPERAND (exp, 0);
4470 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
4471 TREE_OPERAND (exp, 1) = t1;
4474 /* Attempt to return something suitable for generating an
4475 indexed address, for machines that support that. */
4477 if (modifier == EXPAND_SUM && mode == Pmode
4478 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
4479 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
4481 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, EXPAND_SUM);
4483 /* Apply distributive law if OP0 is x+c. */
4484 if (GET_CODE (op0) == PLUS
4485 && GET_CODE (XEXP (op0, 1)) == CONST_INT)
4486 return gen_rtx (PLUS, mode,
4487 gen_rtx (MULT, mode, XEXP (op0, 0),
4488 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))),
4489 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))
4490 * INTVAL (XEXP (op0, 1))));
4492 if (GET_CODE (op0) != REG)
4493 op0 = force_operand (op0, NULL_RTX);
4494 if (GET_CODE (op0) != REG)
4495 op0 = copy_to_mode_reg (mode, op0);
4497 return gen_rtx (MULT, mode, op0,
4498 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))));
4501 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
4504 /* Check for multiplying things that have been extended
4505 from a narrower type. If this machine supports multiplying
4506 in that narrower type with a result in the desired type,
4507 do it that way, and avoid the explicit type-conversion. */
4508 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
4509 && TREE_CODE (type) == INTEGER_TYPE
4510 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
4511 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
4512 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
4513 && int_fits_type_p (TREE_OPERAND (exp, 1),
4514 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
4515 /* Don't use a widening multiply if a shift will do. */
4516 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
4517 > HOST_BITS_PER_WIDE_INT)
4518 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
4520 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
4521 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
4523 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
4524 /* If both operands are extended, they must either both
4525 be zero-extended or both be sign-extended. */
4526 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
4528 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
4530 enum machine_mode innermode
4531 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
4532 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
4533 ? umul_widen_optab : smul_widen_optab);
4534 if (mode == GET_MODE_WIDER_MODE (innermode)
4535 && this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
4537 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
4538 NULL_RTX, VOIDmode, 0);
4539 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
4540 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
4543 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
4544 NULL_RTX, VOIDmode, 0);
4548 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
4549 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
4550 return expand_mult (mode, op0, op1, target, unsignedp);
4552 case TRUNC_DIV_EXPR:
4553 case FLOOR_DIV_EXPR:
4555 case ROUND_DIV_EXPR:
4556 case EXACT_DIV_EXPR:
4557 preexpand_calls (exp);
4558 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
4560 /* Possible optimization: compute the dividend with EXPAND_SUM
4561 then if the divisor is constant can optimize the case
4562 where some terms of the dividend have coeffs divisible by it. */
4563 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
4564 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
4565 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
4568 this_optab = flodiv_optab;
4571 case TRUNC_MOD_EXPR:
4572 case FLOOR_MOD_EXPR:
4574 case ROUND_MOD_EXPR:
4575 preexpand_calls (exp);
4576 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
4578 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
4579 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
4580 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
4582 case FIX_ROUND_EXPR:
4583 case FIX_FLOOR_EXPR:
4585 abort (); /* Not used for C. */
4587 case FIX_TRUNC_EXPR:
4588 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
4590 target = gen_reg_rtx (mode);
4591 expand_fix (target, op0, unsignedp);
4595 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
4597 target = gen_reg_rtx (mode);
4598 /* expand_float can't figure out what to do if FROM has VOIDmode.
4599 So give it the correct mode. With -O, cse will optimize this. */
4600 if (GET_MODE (op0) == VOIDmode)
4601 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
4603 expand_float (target, op0,
4604 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
4608 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
4609 temp = expand_unop (mode, neg_optab, op0, target, 0);
4615 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
4617 /* Handle complex values specially. */
4619 enum machine_mode opmode
4620 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
4622 if (GET_MODE_CLASS (opmode) == MODE_COMPLEX_INT
4623 || GET_MODE_CLASS (opmode) == MODE_COMPLEX_FLOAT)
4624 return expand_complex_abs (opmode, op0, target, unsignedp);
4627 /* Unsigned abs is simply the operand. Testing here means we don't
4628 risk generating incorrect code below. */
4629 if (TREE_UNSIGNED (type))
4632 /* First try to do it with a special abs instruction. */
4633 temp = expand_unop (mode, abs_optab, op0, target, 0);
4637 /* If this machine has expensive jumps, we can do integer absolute
4638 value of X as (((signed) x >> (W-1)) ^ x) - ((signed) x >> (W-1)),
4639 where W is the width of MODE. */
4641 if (GET_MODE_CLASS (mode) == MODE_INT && BRANCH_COST >= 2)
4643 rtx extended = expand_shift (RSHIFT_EXPR, mode, op0,
4644 size_int (GET_MODE_BITSIZE (mode) - 1),
4647 temp = expand_binop (mode, xor_optab, extended, op0, target, 0,
4650 temp = expand_binop (mode, sub_optab, temp, extended, target, 0,
4657 /* If that does not win, use conditional jump and negate. */
4658 target = original_target;
4659 temp = gen_label_rtx ();
4660 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 0))
4661 || (GET_CODE (target) == REG
4662 && REGNO (target) < FIRST_PSEUDO_REGISTER))
4663 target = gen_reg_rtx (mode);
4664 emit_move_insn (target, op0);
4665 emit_cmp_insn (target,
4666 expand_expr (convert (type, integer_zero_node),
4667 NULL_RTX, VOIDmode, 0),
4668 GE, NULL_RTX, mode, 0, 0);
4670 emit_jump_insn (gen_bge (temp));
4671 op0 = expand_unop (mode, neg_optab, target, target, 0);
4673 emit_move_insn (target, op0);
4680 target = original_target;
4681 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 1))
4682 || (GET_CODE (target) == REG
4683 && REGNO (target) < FIRST_PSEUDO_REGISTER))
4684 target = gen_reg_rtx (mode);
4685 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
4686 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
4688 /* First try to do it with a special MIN or MAX instruction.
4689 If that does not win, use a conditional jump to select the proper
4691 this_optab = (TREE_UNSIGNED (type)
4692 ? (code == MIN_EXPR ? umin_optab : umax_optab)
4693 : (code == MIN_EXPR ? smin_optab : smax_optab));
4695 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
4701 emit_move_insn (target, op0);
4702 op0 = gen_label_rtx ();
4703 if (code == MAX_EXPR)
4704 temp = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)))
4705 ? compare_from_rtx (target, op1, GEU, 1, mode, NULL_RTX, 0)
4706 : compare_from_rtx (target, op1, GE, 0, mode, NULL_RTX, 0));
4708 temp = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)))
4709 ? compare_from_rtx (target, op1, LEU, 1, mode, NULL_RTX, 0)
4710 : compare_from_rtx (target, op1, LE, 0, mode, NULL_RTX, 0));
4711 if (temp == const0_rtx)
4712 emit_move_insn (target, op1);
4713 else if (temp != const_true_rtx)
4715 if (bcc_gen_fctn[(int) GET_CODE (temp)] != 0)
4716 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (temp)]) (op0));
4719 emit_move_insn (target, op1);
4724 /* ??? Can optimize when the operand of this is a bitwise operation,
4725 by using a different bitwise operation. */
4727 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
4728 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
4734 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
4735 temp = expand_unop (mode, ffs_optab, op0, target, 1);
4740 /* ??? Can optimize bitwise operations with one arg constant.
4741 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
4742 and (a bitwise1 b) bitwise2 b (etc)
4743 but that is probably not worth while. */
4745 /* BIT_AND_EXPR is for bitwise anding.
4746 TRUTH_AND_EXPR is for anding two boolean values
4747 when we want in all cases to compute both of them.
4748 In general it is fastest to do TRUTH_AND_EXPR by
4749 computing both operands as actual zero-or-1 values
4750 and then bitwise anding. In cases where there cannot
4751 be any side effects, better code would be made by
4752 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR;
4753 but the question is how to recognize those cases. */
4755 case TRUTH_AND_EXPR:
4757 this_optab = and_optab;
4760 /* See comment above about TRUTH_AND_EXPR; it applies here too. */
4763 this_optab = ior_optab;
4766 case TRUTH_XOR_EXPR:
4768 this_optab = xor_optab;
4775 preexpand_calls (exp);
4776 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
4778 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
4779 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
4782 /* Could determine the answer when only additive constants differ.
4783 Also, the addition of one can be handled by changing the condition. */
4790 preexpand_calls (exp);
4791 temp = do_store_flag (exp, target, tmode != VOIDmode ? tmode : mode, 0);
4794 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
4795 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
4797 && GET_CODE (original_target) == REG
4798 && (GET_MODE (original_target)
4799 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
4801 temp = expand_expr (TREE_OPERAND (exp, 0), original_target, VOIDmode, 0);
4802 if (temp != original_target)
4803 temp = copy_to_reg (temp);
4804 op1 = gen_label_rtx ();
4805 emit_cmp_insn (temp, const0_rtx, EQ, NULL_RTX,
4806 GET_MODE (temp), unsignedp, 0);
4807 emit_jump_insn (gen_beq (op1));
4808 emit_move_insn (temp, const1_rtx);
4812 /* If no set-flag instruction, must generate a conditional
4813 store into a temporary variable. Drop through
4814 and handle this like && and ||. */
4816 case TRUTH_ANDIF_EXPR:
4817 case TRUTH_ORIF_EXPR:
4818 if (target == 0 || ! safe_from_p (target, exp)
4819 /* Make sure we don't have a hard reg (such as function's return
4820 value) live across basic blocks, if not optimizing. */
4821 || (!optimize && GET_CODE (target) == REG
4822 && REGNO (target) < FIRST_PSEUDO_REGISTER))
4823 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
4824 emit_clr_insn (target);
4825 op1 = gen_label_rtx ();
4826 jumpifnot (exp, op1);
4827 emit_0_to_1_insn (target);
4831 case TRUTH_NOT_EXPR:
4832 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
4833 /* The parser is careful to generate TRUTH_NOT_EXPR
4834 only with operands that are always zero or one. */
4835 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
4836 target, 1, OPTAB_LIB_WIDEN);
4842 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
4844 return expand_expr (TREE_OPERAND (exp, 1),
4845 (ignore ? const0_rtx : target),
4850 /* Note that COND_EXPRs whose type is a structure or union
4851 are required to be constructed to contain assignments of
4852 a temporary variable, so that we can evaluate them here
4853 for side effect only. If type is void, we must do likewise. */
4855 /* If an arm of the branch requires a cleanup,
4856 only that cleanup is performed. */
4859 tree binary_op = 0, unary_op = 0;
4860 tree old_cleanups = cleanups_this_call;
4861 cleanups_this_call = 0;
4863 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
4864 convert it to our mode, if necessary. */
4865 if (integer_onep (TREE_OPERAND (exp, 1))
4866 && integer_zerop (TREE_OPERAND (exp, 2))
4867 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
4869 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, modifier);
4870 if (GET_MODE (op0) == mode)
4873 target = gen_reg_rtx (mode);
4874 convert_move (target, op0, unsignedp);
4878 /* If we are not to produce a result, we have no target. Otherwise,
4879 if a target was specified use it; it will not be used as an
4880 intermediate target unless it is safe. If no target, use a
4883 if (mode == VOIDmode || ignore)
4885 else if (original_target
4886 && safe_from_p (original_target, TREE_OPERAND (exp, 0)))
4887 temp = original_target;
4888 else if (mode == BLKmode)
4890 if (TYPE_SIZE (type) == 0
4891 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
4893 temp = assign_stack_temp (BLKmode,
4894 (TREE_INT_CST_LOW (TYPE_SIZE (type))
4895 + BITS_PER_UNIT - 1)
4896 / BITS_PER_UNIT, 0);
4899 temp = gen_reg_rtx (mode);
4901 /* Check for X ? A + B : A. If we have this, we can copy
4902 A to the output and conditionally add B. Similarly for unary
4903 operations. Don't do this if X has side-effects because
4904 those side effects might affect A or B and the "?" operation is
4905 a sequence point in ANSI. (We test for side effects later.) */
4907 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
4908 && operand_equal_p (TREE_OPERAND (exp, 2),
4909 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
4910 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
4911 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
4912 && operand_equal_p (TREE_OPERAND (exp, 1),
4913 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
4914 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
4915 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
4916 && operand_equal_p (TREE_OPERAND (exp, 2),
4917 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
4918 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
4919 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
4920 && operand_equal_p (TREE_OPERAND (exp, 1),
4921 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
4922 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
4924 /* If we had X ? A + 1 : A and we can do the test of X as a store-flag
4925 operation, do this as A + (X != 0). Similarly for other simple
4926 binary operators. */
4927 if (singleton && binary_op
4928 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
4929 && (TREE_CODE (binary_op) == PLUS_EXPR
4930 || TREE_CODE (binary_op) == MINUS_EXPR
4931 || TREE_CODE (binary_op) == BIT_IOR_EXPR
4932 || TREE_CODE (binary_op) == BIT_XOR_EXPR
4933 || TREE_CODE (binary_op) == BIT_AND_EXPR)
4934 && integer_onep (TREE_OPERAND (binary_op, 1))
4935 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
4938 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR ? add_optab
4939 : TREE_CODE (binary_op) == MINUS_EXPR ? sub_optab
4940 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
4941 : TREE_CODE (binary_op) == BIT_XOR_EXPR ? xor_optab
4944 /* If we had X ? A : A + 1, do this as A + (X == 0).
4946 We have to invert the truth value here and then put it
4947 back later if do_store_flag fails. We cannot simply copy
4948 TREE_OPERAND (exp, 0) to another variable and modify that
4949 because invert_truthvalue can modify the tree pointed to
4951 if (singleton == TREE_OPERAND (exp, 1))
4952 TREE_OPERAND (exp, 0)
4953 = invert_truthvalue (TREE_OPERAND (exp, 0));
4955 result = do_store_flag (TREE_OPERAND (exp, 0),
4956 (safe_from_p (temp, singleton)
4958 mode, BRANCH_COST <= 1);
4962 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
4963 return expand_binop (mode, boptab, op1, result, temp,
4964 unsignedp, OPTAB_LIB_WIDEN);
4966 else if (singleton == TREE_OPERAND (exp, 1))
4967 TREE_OPERAND (exp, 0)
4968 = invert_truthvalue (TREE_OPERAND (exp, 0));
4972 op0 = gen_label_rtx ();
4974 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
4978 /* If the target conflicts with the other operand of the
4979 binary op, we can't use it. Also, we can't use the target
4980 if it is a hard register, because evaluating the condition
4981 might clobber it. */
4983 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1)))
4984 || (GET_CODE (temp) == REG
4985 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
4986 temp = gen_reg_rtx (mode);
4987 store_expr (singleton, temp, 0);
4990 expand_expr (singleton,
4991 ignore ? const1_rtx : NULL_RTX, VOIDmode, 0);
4992 if (cleanups_this_call)
4994 sorry ("aggregate value in COND_EXPR");
4995 cleanups_this_call = 0;
4997 if (singleton == TREE_OPERAND (exp, 1))
4998 jumpif (TREE_OPERAND (exp, 0), op0);
5000 jumpifnot (TREE_OPERAND (exp, 0), op0);
5002 if (binary_op && temp == 0)
5003 /* Just touch the other operand. */
5004 expand_expr (TREE_OPERAND (binary_op, 1),
5005 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
5007 store_expr (build (TREE_CODE (binary_op), type,
5008 make_tree (type, temp),
5009 TREE_OPERAND (binary_op, 1)),
5012 store_expr (build1 (TREE_CODE (unary_op), type,
5013 make_tree (type, temp)),
5018 /* This is now done in jump.c and is better done there because it
5019 produces shorter register lifetimes. */
5021 /* Check for both possibilities either constants or variables
5022 in registers (but not the same as the target!). If so, can
5023 save branches by assigning one, branching, and assigning the
5025 else if (temp && GET_MODE (temp) != BLKmode
5026 && (TREE_CONSTANT (TREE_OPERAND (exp, 1))
5027 || ((TREE_CODE (TREE_OPERAND (exp, 1)) == PARM_DECL
5028 || TREE_CODE (TREE_OPERAND (exp, 1)) == VAR_DECL)
5029 && DECL_RTL (TREE_OPERAND (exp, 1))
5030 && GET_CODE (DECL_RTL (TREE_OPERAND (exp, 1))) == REG
5031 && DECL_RTL (TREE_OPERAND (exp, 1)) != temp))
5032 && (TREE_CONSTANT (TREE_OPERAND (exp, 2))
5033 || ((TREE_CODE (TREE_OPERAND (exp, 2)) == PARM_DECL
5034 || TREE_CODE (TREE_OPERAND (exp, 2)) == VAR_DECL)
5035 && DECL_RTL (TREE_OPERAND (exp, 2))
5036 && GET_CODE (DECL_RTL (TREE_OPERAND (exp, 2))) == REG
5037 && DECL_RTL (TREE_OPERAND (exp, 2)) != temp)))
5039 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
5040 temp = gen_reg_rtx (mode);
5041 store_expr (TREE_OPERAND (exp, 2), temp, 0);
5042 jumpifnot (TREE_OPERAND (exp, 0), op0);
5043 store_expr (TREE_OPERAND (exp, 1), temp, 0);
5047 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
5048 comparison operator. If we have one of these cases, set the
5049 output to A, branch on A (cse will merge these two references),
5050 then set the output to FOO. */
5052 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
5053 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
5054 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
5055 TREE_OPERAND (exp, 1), 0)
5056 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
5057 && safe_from_p (temp, TREE_OPERAND (exp, 2)))
5059 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
5060 temp = gen_reg_rtx (mode);
5061 store_expr (TREE_OPERAND (exp, 1), temp, 0);
5062 jumpif (TREE_OPERAND (exp, 0), op0);
5063 store_expr (TREE_OPERAND (exp, 2), temp, 0);
5067 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
5068 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
5069 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
5070 TREE_OPERAND (exp, 2), 0)
5071 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
5072 && safe_from_p (temp, TREE_OPERAND (exp, 1)))
5074 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
5075 temp = gen_reg_rtx (mode);
5076 store_expr (TREE_OPERAND (exp, 2), temp, 0);
5077 jumpifnot (TREE_OPERAND (exp, 0), op0);
5078 store_expr (TREE_OPERAND (exp, 1), temp, 0);
5083 op1 = gen_label_rtx ();
5084 jumpifnot (TREE_OPERAND (exp, 0), op0);
5086 store_expr (TREE_OPERAND (exp, 1), temp, 0);
5088 expand_expr (TREE_OPERAND (exp, 1),
5089 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
5090 if (cleanups_this_call)
5092 sorry ("aggregate value in COND_EXPR");
5093 cleanups_this_call = 0;
5097 emit_jump_insn (gen_jump (op1));
5101 store_expr (TREE_OPERAND (exp, 2), temp, 0);
5103 expand_expr (TREE_OPERAND (exp, 2),
5104 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
5107 if (cleanups_this_call)
5109 sorry ("aggregate value in COND_EXPR");
5110 cleanups_this_call = 0;
5116 cleanups_this_call = old_cleanups;
5122 /* Something needs to be initialized, but we didn't know
5123 where that thing was when building the tree. For example,
5124 it could be the return value of a function, or a parameter
5125 to a function which lays down in the stack, or a temporary
5126 variable which must be passed by reference.
5128 We guarantee that the expression will either be constructed
5129 or copied into our original target. */
5131 tree slot = TREE_OPERAND (exp, 0);
5134 if (TREE_CODE (slot) != VAR_DECL)
5139 if (DECL_RTL (slot) != 0)
5141 target = DECL_RTL (slot);
5142 /* If we have already expanded the slot, so don't do
5144 if (TREE_OPERAND (exp, 1) == NULL_TREE)
5149 target = assign_stack_temp (mode, int_size_in_bytes (type), 0);
5150 /* All temp slots at this level must not conflict. */
5151 preserve_temp_slots (target);
5152 DECL_RTL (slot) = target;
5156 /* I bet this needs to be done, and I bet that it needs to
5157 be above, inside the else clause. The reason is
5158 simple, how else is it going to get cleaned up? (mrs)
5160 The reason is probably did not work before, and was
5161 commented out is because this was re-expanding already
5162 expanded target_exprs (target == 0 and DECL_RTL (slot)
5163 != 0) also cleaning them up many times as well. :-( */
5165 /* Since SLOT is not known to the called function
5166 to belong to its stack frame, we must build an explicit
5167 cleanup. This case occurs when we must build up a reference
5168 to pass the reference as an argument. In this case,
5169 it is very likely that such a reference need not be
5172 if (TREE_OPERAND (exp, 2) == 0)
5173 TREE_OPERAND (exp, 2) = maybe_build_cleanup (slot);
5174 if (TREE_OPERAND (exp, 2))
5175 cleanups_this_call = tree_cons (NULL_TREE, TREE_OPERAND (exp, 2),
5176 cleanups_this_call);
5181 /* This case does occur, when expanding a parameter which
5182 needs to be constructed on the stack. The target
5183 is the actual stack address that we want to initialize.
5184 The function we call will perform the cleanup in this case. */
5186 DECL_RTL (slot) = target;
5189 exp1 = TREE_OPERAND (exp, 1);
5190 /* Mark it as expanded. */
5191 TREE_OPERAND (exp, 1) = NULL_TREE;
5193 return expand_expr (exp1, target, tmode, modifier);
5198 tree lhs = TREE_OPERAND (exp, 0);
5199 tree rhs = TREE_OPERAND (exp, 1);
5200 tree noncopied_parts = 0;
5201 tree lhs_type = TREE_TYPE (lhs);
5203 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
5204 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0 && !fixed_type_p (rhs))
5205 noncopied_parts = init_noncopied_parts (stabilize_reference (lhs),
5206 TYPE_NONCOPIED_PARTS (lhs_type));
5207 while (noncopied_parts != 0)
5209 expand_assignment (TREE_VALUE (noncopied_parts),
5210 TREE_PURPOSE (noncopied_parts), 0, 0);
5211 noncopied_parts = TREE_CHAIN (noncopied_parts);
5218 /* If lhs is complex, expand calls in rhs before computing it.
5219 That's so we don't compute a pointer and save it over a call.
5220 If lhs is simple, compute it first so we can give it as a
5221 target if the rhs is just a call. This avoids an extra temp and copy
5222 and that prevents a partial-subsumption which makes bad code.
5223 Actually we could treat component_ref's of vars like vars. */
5225 tree lhs = TREE_OPERAND (exp, 0);
5226 tree rhs = TREE_OPERAND (exp, 1);
5227 tree noncopied_parts = 0;
5228 tree lhs_type = TREE_TYPE (lhs);
5232 if (TREE_CODE (lhs) != VAR_DECL
5233 && TREE_CODE (lhs) != RESULT_DECL
5234 && TREE_CODE (lhs) != PARM_DECL)
5235 preexpand_calls (exp);
5237 /* Check for |= or &= of a bitfield of size one into another bitfield
5238 of size 1. In this case, (unless we need the result of the
5239 assignment) we can do this more efficiently with a
5240 test followed by an assignment, if necessary.
5242 ??? At this point, we can't get a BIT_FIELD_REF here. But if
5243 things change so we do, this code should be enhanced to
5246 && TREE_CODE (lhs) == COMPONENT_REF
5247 && (TREE_CODE (rhs) == BIT_IOR_EXPR
5248 || TREE_CODE (rhs) == BIT_AND_EXPR)
5249 && TREE_OPERAND (rhs, 0) == lhs
5250 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
5251 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (lhs, 1))) == 1
5252 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))) == 1)
5254 rtx label = gen_label_rtx ();
5256 do_jump (TREE_OPERAND (rhs, 1),
5257 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
5258 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
5259 expand_assignment (lhs, convert (TREE_TYPE (rhs),
5260 (TREE_CODE (rhs) == BIT_IOR_EXPR
5262 : integer_zero_node)),
5264 do_pending_stack_adjust ();
5269 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0
5270 && ! (fixed_type_p (lhs) && fixed_type_p (rhs)))
5271 noncopied_parts = save_noncopied_parts (stabilize_reference (lhs),
5272 TYPE_NONCOPIED_PARTS (lhs_type));
5274 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
5275 while (noncopied_parts != 0)
5277 expand_assignment (TREE_PURPOSE (noncopied_parts),
5278 TREE_VALUE (noncopied_parts), 0, 0);
5279 noncopied_parts = TREE_CHAIN (noncopied_parts);
5284 case PREINCREMENT_EXPR:
5285 case PREDECREMENT_EXPR:
5286 return expand_increment (exp, 0);
5288 case POSTINCREMENT_EXPR:
5289 case POSTDECREMENT_EXPR:
5290 /* Faster to treat as pre-increment if result is not used. */
5291 return expand_increment (exp, ! ignore);
5294 /* Are we taking the address of a nested function? */
5295 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
5296 && decl_function_context (TREE_OPERAND (exp, 0)) != 0)
5298 op0 = trampoline_address (TREE_OPERAND (exp, 0));
5299 op0 = force_operand (op0, target);
5303 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode,
5304 (modifier == EXPAND_INITIALIZER
5305 ? modifier : EXPAND_CONST_ADDRESS));
5306 if (GET_CODE (op0) != MEM)
5309 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
5310 return XEXP (op0, 0);
5311 op0 = force_operand (XEXP (op0, 0), target);
5313 if (flag_force_addr && GET_CODE (op0) != REG)
5314 return force_reg (Pmode, op0);
5317 case ENTRY_VALUE_EXPR:
5320 /* COMPLEX type for Extended Pascal & Fortran */
5323 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
5327 /* Get the rtx code of the operands. */
5328 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
5329 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
5332 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
5334 prev = get_last_insn ();
5336 /* Tell flow that the whole of the destination is being set. */
5337 if (GET_CODE (target) == REG)
5338 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
5340 /* Move the real (op0) and imaginary (op1) parts to their location. */
5341 emit_move_insn (gen_realpart (mode, target), op0);
5342 emit_move_insn (gen_imagpart (mode, target), op1);
5344 /* Complex construction should appear as a single unit. */
5351 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
5352 return gen_realpart (mode, op0);
5355 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
5356 return gen_imagpart (mode, op0);
5360 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
5364 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
5367 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
5369 prev = get_last_insn ();
5371 /* Tell flow that the whole of the destination is being set. */
5372 if (GET_CODE (target) == REG)
5373 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
5375 /* Store the realpart and the negated imagpart to target. */
5376 emit_move_insn (gen_realpart (mode, target), gen_realpart (mode, op0));
5378 imag_t = gen_imagpart (mode, target);
5379 temp = expand_unop (mode, neg_optab,
5380 gen_imagpart (mode, op0), imag_t, 0);
5382 emit_move_insn (imag_t, temp);
5384 /* Conjugate should appear as a single unit */
5394 return (*lang_expand_expr) (exp, target, tmode, modifier);
5397 /* Here to do an ordinary binary operator, generating an instruction
5398 from the optab already placed in `this_optab'. */
5400 preexpand_calls (exp);
5401 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
5403 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
5404 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
5406 temp = expand_binop (mode, this_optab, op0, op1, target,
5407 unsignedp, OPTAB_LIB_WIDEN);
5413 /* Return the alignment in bits of EXP, a pointer valued expression.
5414 But don't return more than MAX_ALIGN no matter what.
5415 The alignment returned is, by default, the alignment of the thing that
5416 EXP points to (if it is not a POINTER_TYPE, 0 is returned).
5418 Otherwise, look at the expression to see if we can do better, i.e., if the
5419 expression is actually pointing at an object whose alignment is tighter. */
5422 get_pointer_alignment (exp, max_align)
5426 unsigned align, inner;
5428 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
5431 align = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
5432 align = MIN (align, max_align);
5436 switch (TREE_CODE (exp))
5440 case NON_LVALUE_EXPR:
5441 exp = TREE_OPERAND (exp, 0);
5442 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
5444 inner = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
5445 inner = MIN (inner, max_align);
5446 align = MAX (align, inner);
5450 /* If sum of pointer + int, restrict our maximum alignment to that
5451 imposed by the integer. If not, we can't do any better than
5453 if (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST)
5456 while (((TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)) * BITS_PER_UNIT)
5461 exp = TREE_OPERAND (exp, 0);
5465 /* See what we are pointing at and look at its alignment. */
5466 exp = TREE_OPERAND (exp, 0);
5467 if (TREE_CODE (exp) == FUNCTION_DECL)
5468 align = MAX (align, FUNCTION_BOUNDARY);
5469 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'd')
5470 align = MAX (align, DECL_ALIGN (exp));
5471 #ifdef CONSTANT_ALIGNMENT
5472 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'c')
5473 align = CONSTANT_ALIGNMENT (exp, align);
5475 return MIN (align, max_align);
5483 /* Return the tree node and offset if a given argument corresponds to
5484 a string constant. */
5487 string_constant (arg, ptr_offset)
5493 if (TREE_CODE (arg) == ADDR_EXPR
5494 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
5496 *ptr_offset = integer_zero_node;
5497 return TREE_OPERAND (arg, 0);
5499 else if (TREE_CODE (arg) == PLUS_EXPR)
5501 tree arg0 = TREE_OPERAND (arg, 0);
5502 tree arg1 = TREE_OPERAND (arg, 1);
5507 if (TREE_CODE (arg0) == ADDR_EXPR
5508 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
5511 return TREE_OPERAND (arg0, 0);
5513 else if (TREE_CODE (arg1) == ADDR_EXPR
5514 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
5517 return TREE_OPERAND (arg1, 0);
5524 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
5525 way, because it could contain a zero byte in the middle.
5526 TREE_STRING_LENGTH is the size of the character array, not the string.
5528 Unfortunately, string_constant can't access the values of const char
5529 arrays with initializers, so neither can we do so here. */
5539 src = string_constant (src, &offset_node);
5542 max = TREE_STRING_LENGTH (src);
5543 ptr = TREE_STRING_POINTER (src);
5544 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
5546 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
5547 compute the offset to the following null if we don't know where to
5548 start searching for it. */
5550 for (i = 0; i < max; i++)
5553 /* We don't know the starting offset, but we do know that the string
5554 has no internal zero bytes. We can assume that the offset falls
5555 within the bounds of the string; otherwise, the programmer deserves
5556 what he gets. Subtract the offset from the length of the string,
5558 /* This would perhaps not be valid if we were dealing with named
5559 arrays in addition to literal string constants. */
5560 return size_binop (MINUS_EXPR, size_int (max), offset_node);
5563 /* We have a known offset into the string. Start searching there for
5564 a null character. */
5565 if (offset_node == 0)
5569 /* Did we get a long long offset? If so, punt. */
5570 if (TREE_INT_CST_HIGH (offset_node) != 0)
5572 offset = TREE_INT_CST_LOW (offset_node);
5574 /* If the offset is known to be out of bounds, warn, and call strlen at
5576 if (offset < 0 || offset > max)
5578 warning ("offset outside bounds of constant string");
5581 /* Use strlen to search for the first zero byte. Since any strings
5582 constructed with build_string will have nulls appended, we win even
5583 if we get handed something like (char[4])"abcd".
5585 Since OFFSET is our starting index into the string, no further
5586 calculation is needed. */
5587 return size_int (strlen (ptr + offset));
5590 /* Expand an expression EXP that calls a built-in function,
5591 with result going to TARGET if that's convenient
5592 (and in mode MODE if that's convenient).
5593 SUBTARGET may be used as the target for computing one of EXP's operands.
5594 IGNORE is nonzero if the value is to be ignored. */
5597 expand_builtin (exp, target, subtarget, mode, ignore)
5601 enum machine_mode mode;
5604 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
5605 tree arglist = TREE_OPERAND (exp, 1);
5608 enum machine_mode value_mode = TYPE_MODE (TREE_TYPE (exp));
5609 optab builtin_optab;
5611 switch (DECL_FUNCTION_CODE (fndecl))
5616 /* build_function_call changes these into ABS_EXPR. */
5621 case BUILT_IN_FSQRT:
5622 /* If not optimizing, call the library function. */
5627 /* Arg could be wrong type if user redeclared this fcn wrong. */
5628 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != REAL_TYPE)
5629 return CONST0_RTX (TYPE_MODE (TREE_TYPE (exp)));
5631 /* Stabilize and compute the argument. */
5632 if (TREE_CODE (TREE_VALUE (arglist)) != VAR_DECL
5633 && TREE_CODE (TREE_VALUE (arglist)) != PARM_DECL)
5635 exp = copy_node (exp);
5636 arglist = copy_node (arglist);
5637 TREE_OPERAND (exp, 1) = arglist;
5638 TREE_VALUE (arglist) = save_expr (TREE_VALUE (arglist));
5640 op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
5642 /* Make a suitable register to place result in. */
5643 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
5648 switch (DECL_FUNCTION_CODE (fndecl))
5651 builtin_optab = sin_optab; break;
5653 builtin_optab = cos_optab; break;
5654 case BUILT_IN_FSQRT:
5655 builtin_optab = sqrt_optab; break;
5660 /* Compute into TARGET.
5661 Set TARGET to wherever the result comes back. */
5662 target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
5663 builtin_optab, op0, target, 0);
5665 /* If we were unable to expand via the builtin, stop the
5666 sequence (without outputting the insns) and break, causing
5667 a call the the library function. */
5674 /* Check the results by default. But if flag_fast_math is turned on,
5675 then assume sqrt will always be called with valid arguments. */
5677 if (! flag_fast_math)
5679 /* Don't define the builtin FP instructions
5680 if your machine is not IEEE. */
5681 if (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT)
5684 lab1 = gen_label_rtx ();
5686 /* Test the result; if it is NaN, set errno=EDOM because
5687 the argument was not in the domain. */
5688 emit_cmp_insn (target, target, EQ, 0, GET_MODE (target), 0, 0);
5689 emit_jump_insn (gen_beq (lab1));
5693 #ifdef GEN_ERRNO_RTX
5694 rtx errno_rtx = GEN_ERRNO_RTX;
5697 = gen_rtx (MEM, word_mode, gen_rtx (SYMBOL_REF, Pmode, "*errno"));
5700 emit_move_insn (errno_rtx, GEN_INT (TARGET_EDOM));
5703 /* We can't set errno=EDOM directly; let the library call do it.
5704 Pop the arguments right away in case the call gets deleted. */
5706 expand_call (exp, target, 0);
5713 /* Output the entire sequence. */
5714 insns = get_insns ();
5720 case BUILT_IN_SAVEREGS:
5721 /* Don't do __builtin_saveregs more than once in a function.
5722 Save the result of the first call and reuse it. */
5723 if (saveregs_value != 0)
5724 return saveregs_value;
5726 /* When this function is called, it means that registers must be
5727 saved on entry to this function. So we migrate the
5728 call to the first insn of this function. */
5731 rtx valreg, saved_valreg;
5733 /* Now really call the function. `expand_call' does not call
5734 expand_builtin, so there is no danger of infinite recursion here. */
5737 #ifdef EXPAND_BUILTIN_SAVEREGS
5738 /* Do whatever the machine needs done in this case. */
5739 temp = EXPAND_BUILTIN_SAVEREGS (arglist);
5741 /* The register where the function returns its value
5742 is likely to have something else in it, such as an argument.
5743 So preserve that register around the call. */
5744 if (value_mode != VOIDmode)
5746 valreg = hard_libcall_value (value_mode);
5747 saved_valreg = gen_reg_rtx (value_mode);
5748 emit_move_insn (saved_valreg, valreg);
5751 /* Generate the call, putting the value in a pseudo. */
5752 temp = expand_call (exp, target, ignore);
5754 if (value_mode != VOIDmode)
5755 emit_move_insn (valreg, saved_valreg);
5761 saveregs_value = temp;
5763 /* This won't work inside a SEQUENCE--it really has to be
5764 at the start of the function. */
5765 if (in_sequence_p ())
5767 /* Better to do this than to crash. */
5768 error ("`va_start' used within `({...})'");
5772 /* Put the sequence after the NOTE that starts the function. */
5773 emit_insns_before (seq, NEXT_INSN (get_insns ()));
5777 /* __builtin_args_info (N) returns word N of the arg space info
5778 for the current function. The number and meanings of words
5779 is controlled by the definition of CUMULATIVE_ARGS. */
5780 case BUILT_IN_ARGS_INFO:
5782 int nwords = sizeof (CUMULATIVE_ARGS) / sizeof (int);
5784 int *word_ptr = (int *) ¤t_function_args_info;
5785 tree type, elts, result;
5787 if (sizeof (CUMULATIVE_ARGS) % sizeof (int) != 0)
5788 fatal ("CUMULATIVE_ARGS type defined badly; see %s, line %d",
5789 __FILE__, __LINE__);
5793 tree arg = TREE_VALUE (arglist);
5794 if (TREE_CODE (arg) != INTEGER_CST)
5795 error ("argument of __builtin_args_info must be constant");
5798 int wordnum = TREE_INT_CST_LOW (arg);
5800 if (wordnum < 0 || wordnum >= nwords)
5801 error ("argument of __builtin_args_info out of range");
5803 return GEN_INT (word_ptr[wordnum]);
5807 error ("missing argument in __builtin_args_info");
5812 for (i = 0; i < nwords; i++)
5813 elts = tree_cons (NULL_TREE, build_int_2 (word_ptr[i], 0));
5815 type = build_array_type (integer_type_node,
5816 build_index_type (build_int_2 (nwords, 0)));
5817 result = build (CONSTRUCTOR, type, NULL_TREE, nreverse (elts));
5818 TREE_CONSTANT (result) = 1;
5819 TREE_STATIC (result) = 1;
5820 result = build (INDIRECT_REF, build_pointer_type (type), result);
5821 TREE_CONSTANT (result) = 1;
5822 return expand_expr (result, NULL_RTX, VOIDmode, 0);
5826 /* Return the address of the first anonymous stack arg. */
5827 case BUILT_IN_NEXT_ARG:
5829 tree fntype = TREE_TYPE (current_function_decl);
5830 if (!(TYPE_ARG_TYPES (fntype) != 0
5831 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
5832 != void_type_node)))
5834 error ("`va_start' used in function with fixed args");
5839 return expand_binop (Pmode, add_optab,
5840 current_function_internal_arg_pointer,
5841 current_function_arg_offset_rtx,
5842 NULL_RTX, 0, OPTAB_LIB_WIDEN);
5844 case BUILT_IN_CLASSIFY_TYPE:
5847 tree type = TREE_TYPE (TREE_VALUE (arglist));
5848 enum tree_code code = TREE_CODE (type);
5849 if (code == VOID_TYPE)
5850 return GEN_INT (void_type_class);
5851 if (code == INTEGER_TYPE)
5852 return GEN_INT (integer_type_class);
5853 if (code == CHAR_TYPE)
5854 return GEN_INT (char_type_class);
5855 if (code == ENUMERAL_TYPE)
5856 return GEN_INT (enumeral_type_class);
5857 if (code == BOOLEAN_TYPE)
5858 return GEN_INT (boolean_type_class);
5859 if (code == POINTER_TYPE)
5860 return GEN_INT (pointer_type_class);
5861 if (code == REFERENCE_TYPE)
5862 return GEN_INT (reference_type_class);
5863 if (code == OFFSET_TYPE)
5864 return GEN_INT (offset_type_class);
5865 if (code == REAL_TYPE)
5866 return GEN_INT (real_type_class);
5867 if (code == COMPLEX_TYPE)
5868 return GEN_INT (complex_type_class);
5869 if (code == FUNCTION_TYPE)
5870 return GEN_INT (function_type_class);
5871 if (code == METHOD_TYPE)
5872 return GEN_INT (method_type_class);
5873 if (code == RECORD_TYPE)
5874 return GEN_INT (record_type_class);
5875 if (code == UNION_TYPE)
5876 return GEN_INT (union_type_class);
5877 if (code == ARRAY_TYPE)
5878 return GEN_INT (array_type_class);
5879 if (code == STRING_TYPE)
5880 return GEN_INT (string_type_class);
5881 if (code == SET_TYPE)
5882 return GEN_INT (set_type_class);
5883 if (code == FILE_TYPE)
5884 return GEN_INT (file_type_class);
5885 if (code == LANG_TYPE)
5886 return GEN_INT (lang_type_class);
5888 return GEN_INT (no_type_class);
5890 case BUILT_IN_CONSTANT_P:
5894 return (TREE_CODE_CLASS (TREE_CODE (TREE_VALUE (arglist))) == 'c'
5895 ? const1_rtx : const0_rtx);
5897 case BUILT_IN_FRAME_ADDRESS:
5898 /* The argument must be a nonnegative integer constant.
5899 It counts the number of frames to scan up the stack.
5900 The value is the address of that frame. */
5901 case BUILT_IN_RETURN_ADDRESS:
5902 /* The argument must be a nonnegative integer constant.
5903 It counts the number of frames to scan up the stack.
5904 The value is the return address saved in that frame. */
5906 /* Warning about missing arg was already issued. */
5908 else if (TREE_CODE (TREE_VALUE (arglist)) != INTEGER_CST)
5910 error ("invalid arg to __builtin_return_address");
5913 else if (tree_int_cst_lt (TREE_VALUE (arglist), integer_zero_node))
5915 error ("invalid arg to __builtin_return_address");
5920 int count = TREE_INT_CST_LOW (TREE_VALUE (arglist));
5921 rtx tem = frame_pointer_rtx;
5924 /* Scan back COUNT frames to the specified frame. */
5925 for (i = 0; i < count; i++)
5927 /* Assume the dynamic chain pointer is in the word that
5928 the frame address points to, unless otherwise specified. */
5929 #ifdef DYNAMIC_CHAIN_ADDRESS
5930 tem = DYNAMIC_CHAIN_ADDRESS (tem);
5932 tem = memory_address (Pmode, tem);
5933 tem = copy_to_reg (gen_rtx (MEM, Pmode, tem));
5936 /* For __builtin_frame_address, return what we've got. */
5937 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
5940 /* For __builtin_return_address,
5941 Get the return address from that frame. */
5942 #ifdef RETURN_ADDR_RTX
5943 return RETURN_ADDR_RTX (count, tem);
5945 tem = memory_address (Pmode,
5946 plus_constant (tem, GET_MODE_SIZE (Pmode)));
5947 return copy_to_reg (gen_rtx (MEM, Pmode, tem));
5951 case BUILT_IN_ALLOCA:
5953 /* Arg could be non-integer if user redeclared this fcn wrong. */
5954 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE)
5956 current_function_calls_alloca = 1;
5957 /* Compute the argument. */
5958 op0 = expand_expr (TREE_VALUE (arglist), NULL_RTX, VOIDmode, 0);
5960 /* Allocate the desired space. */
5961 target = allocate_dynamic_stack_space (op0, target, BITS_PER_UNIT);
5963 /* Record the new stack level for nonlocal gotos. */
5964 if (nonlocal_goto_handler_slot != 0)
5965 emit_stack_save (SAVE_NONLOCAL, &nonlocal_goto_stack_level, NULL_RTX);
5969 /* If not optimizing, call the library function. */
5974 /* Arg could be non-integer if user redeclared this fcn wrong. */
5975 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE)
5978 /* Compute the argument. */
5979 op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
5980 /* Compute ffs, into TARGET if possible.
5981 Set TARGET to wherever the result comes back. */
5982 target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
5983 ffs_optab, op0, target, 1);
5988 case BUILT_IN_STRLEN:
5989 /* If not optimizing, call the library function. */
5994 /* Arg could be non-pointer if user redeclared this fcn wrong. */
5995 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE)
5999 tree src = TREE_VALUE (arglist);
6000 tree len = c_strlen (src);
6003 = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
6005 rtx result, src_rtx, char_rtx;
6006 enum machine_mode insn_mode = value_mode, char_mode;
6007 enum insn_code icode;
6009 /* If the length is known, just return it. */
6011 return expand_expr (len, target, mode, 0);
6013 /* If SRC is not a pointer type, don't do this operation inline. */
6017 /* Call a function if we can't compute strlen in the right mode. */
6019 while (insn_mode != VOIDmode)
6021 icode = strlen_optab->handlers[(int) insn_mode].insn_code;
6022 if (icode != CODE_FOR_nothing)
6025 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
6027 if (insn_mode == VOIDmode)
6030 /* Make a place to write the result of the instruction. */
6033 && GET_CODE (result) == REG
6034 && GET_MODE (result) == insn_mode
6035 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
6036 result = gen_reg_rtx (insn_mode);
6038 /* Make sure the operands are acceptable to the predicates. */
6040 if (! (*insn_operand_predicate[(int)icode][0]) (result, insn_mode))
6041 result = gen_reg_rtx (insn_mode);
6043 src_rtx = memory_address (BLKmode,
6044 expand_expr (src, NULL_RTX, Pmode,
6046 if (! (*insn_operand_predicate[(int)icode][1]) (src_rtx, Pmode))
6047 src_rtx = copy_to_mode_reg (Pmode, src_rtx);
6049 char_rtx = const0_rtx;
6050 char_mode = insn_operand_mode[(int)icode][2];
6051 if (! (*insn_operand_predicate[(int)icode][2]) (char_rtx, char_mode))
6052 char_rtx = copy_to_mode_reg (char_mode, char_rtx);
6054 emit_insn (GEN_FCN (icode) (result,
6055 gen_rtx (MEM, BLKmode, src_rtx),
6056 char_rtx, GEN_INT (align)));
6058 /* Return the value in the proper mode for this function. */
6059 if (GET_MODE (result) == value_mode)
6061 else if (target != 0)
6063 convert_move (target, result, 0);
6067 return convert_to_mode (value_mode, result, 0);
6070 case BUILT_IN_STRCPY:
6071 /* If not optimizing, call the library function. */
6076 /* Arg could be non-pointer if user redeclared this fcn wrong. */
6077 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
6078 || TREE_CHAIN (arglist) == 0
6079 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE)
6083 tree len = c_strlen (TREE_VALUE (TREE_CHAIN (arglist)));
6088 len = size_binop (PLUS_EXPR, len, integer_one_node);
6090 chainon (arglist, build_tree_list (NULL_TREE, len));
6094 case BUILT_IN_MEMCPY:
6095 /* If not optimizing, call the library function. */
6100 /* Arg could be non-pointer if user redeclared this fcn wrong. */
6101 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
6102 || TREE_CHAIN (arglist) == 0
6103 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
6104 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
6105 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
6109 tree dest = TREE_VALUE (arglist);
6110 tree src = TREE_VALUE (TREE_CHAIN (arglist));
6111 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
6114 = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
6116 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
6119 /* If either SRC or DEST is not a pointer type, don't do
6120 this operation in-line. */
6121 if (src_align == 0 || dest_align == 0)
6123 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRCPY)
6124 TREE_CHAIN (TREE_CHAIN (arglist)) = 0;
6128 dest_rtx = expand_expr (dest, NULL_RTX, Pmode, EXPAND_NORMAL);
6130 /* Copy word part most expediently. */
6131 emit_block_move (gen_rtx (MEM, BLKmode,
6132 memory_address (BLKmode, dest_rtx)),
6133 gen_rtx (MEM, BLKmode,
6134 memory_address (BLKmode,
6135 expand_expr (src, NULL_RTX,
6138 expand_expr (len, NULL_RTX, VOIDmode, 0),
6139 MIN (src_align, dest_align));
6143 /* These comparison functions need an instruction that returns an actual
6144 index. An ordinary compare that just sets the condition codes
6146 #ifdef HAVE_cmpstrsi
6147 case BUILT_IN_STRCMP:
6148 /* If not optimizing, call the library function. */
6153 /* Arg could be non-pointer if user redeclared this fcn wrong. */
6154 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
6155 || TREE_CHAIN (arglist) == 0
6156 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE)
6158 else if (!HAVE_cmpstrsi)
6161 tree arg1 = TREE_VALUE (arglist);
6162 tree arg2 = TREE_VALUE (TREE_CHAIN (arglist));
6166 len = c_strlen (arg1);
6168 len = size_binop (PLUS_EXPR, integer_one_node, len);
6169 len2 = c_strlen (arg2);
6171 len2 = size_binop (PLUS_EXPR, integer_one_node, len2);
6173 /* If we don't have a constant length for the first, use the length
6174 of the second, if we know it. We don't require a constant for
6175 this case; some cost analysis could be done if both are available
6176 but neither is constant. For now, assume they're equally cheap.
6178 If both strings have constant lengths, use the smaller. This
6179 could arise if optimization results in strcpy being called with
6180 two fixed strings, or if the code was machine-generated. We should
6181 add some code to the `memcmp' handler below to deal with such
6182 situations, someday. */
6183 if (!len || TREE_CODE (len) != INTEGER_CST)
6190 else if (len2 && TREE_CODE (len2) == INTEGER_CST)
6192 if (tree_int_cst_lt (len2, len))
6196 chainon (arglist, build_tree_list (NULL_TREE, len));
6200 case BUILT_IN_MEMCMP:
6201 /* If not optimizing, call the library function. */
6206 /* Arg could be non-pointer if user redeclared this fcn wrong. */
6207 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
6208 || TREE_CHAIN (arglist) == 0
6209 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
6210 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
6211 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
6213 else if (!HAVE_cmpstrsi)
6216 tree arg1 = TREE_VALUE (arglist);
6217 tree arg2 = TREE_VALUE (TREE_CHAIN (arglist));
6218 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
6222 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
6224 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
6225 enum machine_mode insn_mode
6226 = insn_operand_mode[(int) CODE_FOR_cmpstrsi][0];
6228 /* If we don't have POINTER_TYPE, call the function. */
6229 if (arg1_align == 0 || arg2_align == 0)
6231 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRCMP)
6232 TREE_CHAIN (TREE_CHAIN (arglist)) = 0;
6236 /* Make a place to write the result of the instruction. */
6239 && GET_CODE (result) == REG && GET_MODE (result) == insn_mode
6240 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
6241 result = gen_reg_rtx (insn_mode);
6243 emit_insn (gen_cmpstrsi (result,
6244 gen_rtx (MEM, BLKmode,
6245 expand_expr (arg1, NULL_RTX, Pmode,
6247 gen_rtx (MEM, BLKmode,
6248 expand_expr (arg2, NULL_RTX, Pmode,
6250 expand_expr (len, NULL_RTX, VOIDmode, 0),
6251 GEN_INT (MIN (arg1_align, arg2_align))));
6253 /* Return the value in the proper mode for this function. */
6254 mode = TYPE_MODE (TREE_TYPE (exp));
6255 if (GET_MODE (result) == mode)
6257 else if (target != 0)
6259 convert_move (target, result, 0);
6263 return convert_to_mode (mode, result, 0);
6266 case BUILT_IN_STRCMP:
6267 case BUILT_IN_MEMCMP:
6271 default: /* just do library call, if unknown builtin */
6272 error ("built-in function %s not currently supported",
6273 IDENTIFIER_POINTER (DECL_NAME (fndecl)));
6276 /* The switch statement above can drop through to cause the function
6277 to be called normally. */
6279 return expand_call (exp, target, ignore);
6282 /* Expand code for a post- or pre- increment or decrement
6283 and return the RTX for the result.
6284 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
6287 expand_increment (exp, post)
6291 register rtx op0, op1;
6292 register rtx temp, value;
6293 register tree incremented = TREE_OPERAND (exp, 0);
6294 optab this_optab = add_optab;
6296 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
6297 int op0_is_copy = 0;
6299 /* Stabilize any component ref that might need to be
6300 evaluated more than once below. */
6301 if (TREE_CODE (incremented) == BIT_FIELD_REF
6302 || (TREE_CODE (incremented) == COMPONENT_REF
6303 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
6304 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
6305 incremented = stabilize_reference (incremented);
6307 /* Compute the operands as RTX.
6308 Note whether OP0 is the actual lvalue or a copy of it:
6309 I believe it is a copy iff it is a register or subreg
6310 and insns were generated in computing it. */
6312 temp = get_last_insn ();
6313 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, 0);
6315 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
6316 in place but intead must do sign- or zero-extension during assignment,
6317 so we copy it into a new register and let the code below use it as
6320 Note that we can safely modify this SUBREG since it is know not to be
6321 shared (it was made by the expand_expr call above). */
6323 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
6324 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
6326 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
6327 && temp != get_last_insn ());
6328 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
6330 /* Decide whether incrementing or decrementing. */
6331 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
6332 || TREE_CODE (exp) == PREDECREMENT_EXPR)
6333 this_optab = sub_optab;
6335 /* If OP0 is not the actual lvalue, but rather a copy in a register,
6336 then we cannot just increment OP0. We must
6337 therefore contrive to increment the original value.
6338 Then we can return OP0 since it is a copy of the old value. */
6341 /* This is the easiest way to increment the value wherever it is.
6342 Problems with multiple evaluation of INCREMENTED
6343 are prevented because either (1) it is a component_ref,
6344 in which case it was stabilized above, or (2) it is an array_ref
6345 with constant index in an array in a register, which is
6346 safe to reevaluate. */
6347 tree newexp = build ((this_optab == add_optab
6348 ? PLUS_EXPR : MINUS_EXPR),
6351 TREE_OPERAND (exp, 1));
6352 temp = expand_assignment (incremented, newexp, ! post, 0);
6353 return post ? op0 : temp;
6356 /* Convert decrement by a constant into a negative increment. */
6357 if (this_optab == sub_optab
6358 && GET_CODE (op1) == CONST_INT)
6360 op1 = GEN_INT (- INTVAL (op1));
6361 this_optab = add_optab;
6366 /* We have a true reference to the value in OP0.
6367 If there is an insn to add or subtract in this mode, queue it. */
6369 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
6370 op0 = stabilize (op0);
6373 icode = (int) this_optab->handlers[(int) mode].insn_code;
6374 if (icode != (int) CODE_FOR_nothing
6375 /* Make sure that OP0 is valid for operands 0 and 1
6376 of the insn we want to queue. */
6377 && (*insn_operand_predicate[icode][0]) (op0, mode)
6378 && (*insn_operand_predicate[icode][1]) (op0, mode))
6380 if (! (*insn_operand_predicate[icode][2]) (op1, mode))
6381 op1 = force_reg (mode, op1);
6383 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
6387 /* Preincrement, or we can't increment with one simple insn. */
6389 /* Save a copy of the value before inc or dec, to return it later. */
6390 temp = value = copy_to_reg (op0);
6392 /* Arrange to return the incremented value. */
6393 /* Copy the rtx because expand_binop will protect from the queue,
6394 and the results of that would be invalid for us to return
6395 if our caller does emit_queue before using our result. */
6396 temp = copy_rtx (value = op0);
6398 /* Increment however we can. */
6399 op1 = expand_binop (mode, this_optab, value, op1, op0,
6400 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
6401 /* Make sure the value is stored into OP0. */
6403 emit_move_insn (op0, op1);
6408 /* Expand all function calls contained within EXP, innermost ones first.
6409 But don't look within expressions that have sequence points.
6410 For each CALL_EXPR, record the rtx for its value
6411 in the CALL_EXPR_RTL field. */
6414 preexpand_calls (exp)
6417 register int nops, i;
6418 int type = TREE_CODE_CLASS (TREE_CODE (exp));
6420 if (! do_preexpand_calls)
6423 /* Only expressions and references can contain calls. */
6425 if (type != 'e' && type != '<' && type != '1' && type != '2' && type != 'r')
6428 switch (TREE_CODE (exp))
6431 /* Do nothing if already expanded. */
6432 if (CALL_EXPR_RTL (exp) != 0)
6435 /* Do nothing to built-in functions. */
6436 if (TREE_CODE (TREE_OPERAND (exp, 0)) != ADDR_EXPR
6437 || TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) != FUNCTION_DECL
6438 || ! DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
6439 CALL_EXPR_RTL (exp) = expand_call (exp, NULL_RTX, 0);
6444 case TRUTH_ANDIF_EXPR:
6445 case TRUTH_ORIF_EXPR:
6446 /* If we find one of these, then we can be sure
6447 the adjust will be done for it (since it makes jumps).
6448 Do it now, so that if this is inside an argument
6449 of a function, we don't get the stack adjustment
6450 after some other args have already been pushed. */
6451 do_pending_stack_adjust ();
6456 case WITH_CLEANUP_EXPR:
6460 if (SAVE_EXPR_RTL (exp) != 0)
6464 nops = tree_code_length[(int) TREE_CODE (exp)];
6465 for (i = 0; i < nops; i++)
6466 if (TREE_OPERAND (exp, i) != 0)
6468 type = TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, i)));
6469 if (type == 'e' || type == '<' || type == '1' || type == '2'
6471 preexpand_calls (TREE_OPERAND (exp, i));
6475 /* At the start of a function, record that we have no previously-pushed
6476 arguments waiting to be popped. */
6479 init_pending_stack_adjust ()
6481 pending_stack_adjust = 0;
6484 /* When exiting from function, if safe, clear out any pending stack adjust
6485 so the adjustment won't get done. */
6488 clear_pending_stack_adjust ()
6490 #ifdef EXIT_IGNORE_STACK
6491 if (! flag_omit_frame_pointer && EXIT_IGNORE_STACK
6492 && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline)
6493 && ! flag_inline_functions)
6494 pending_stack_adjust = 0;
6498 /* Pop any previously-pushed arguments that have not been popped yet. */
6501 do_pending_stack_adjust ()
6503 if (inhibit_defer_pop == 0)
6505 if (pending_stack_adjust != 0)
6506 adjust_stack (GEN_INT (pending_stack_adjust));
6507 pending_stack_adjust = 0;
6511 /* Expand all cleanups up to OLD_CLEANUPS.
6512 Needed here, and also for language-dependent calls. */
6515 expand_cleanups_to (old_cleanups)
6518 while (cleanups_this_call != old_cleanups)
6520 expand_expr (TREE_VALUE (cleanups_this_call), NULL_RTX, VOIDmode, 0);
6521 cleanups_this_call = TREE_CHAIN (cleanups_this_call);
6525 /* Expand conditional expressions. */
6527 /* Generate code to evaluate EXP and jump to LABEL if the value is zero.
6528 LABEL is an rtx of code CODE_LABEL, in this function and all the
6532 jumpifnot (exp, label)
6536 do_jump (exp, label, NULL_RTX);
6539 /* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
6546 do_jump (exp, NULL_RTX, label);
6549 /* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
6550 the result is zero, or IF_TRUE_LABEL if the result is one.
6551 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
6552 meaning fall through in that case.
6554 do_jump always does any pending stack adjust except when it does not
6555 actually perform a jump. An example where there is no jump
6556 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
6558 This function is responsible for optimizing cases such as
6559 &&, || and comparison operators in EXP. */
6562 do_jump (exp, if_false_label, if_true_label)
6564 rtx if_false_label, if_true_label;
6566 register enum tree_code code = TREE_CODE (exp);
6567 /* Some cases need to create a label to jump to
6568 in order to properly fall through.
6569 These cases set DROP_THROUGH_LABEL nonzero. */
6570 rtx drop_through_label = 0;
6584 temp = integer_zerop (exp) ? if_false_label : if_true_label;
6590 /* This is not true with #pragma weak */
6592 /* The address of something can never be zero. */
6594 emit_jump (if_true_label);
6599 if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
6600 || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
6601 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF)
6604 /* If we are narrowing the operand, we have to do the compare in the
6606 if ((TYPE_PRECISION (TREE_TYPE (exp))
6607 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
6609 case NON_LVALUE_EXPR:
6610 case REFERENCE_EXPR:
6615 /* These cannot change zero->non-zero or vice versa. */
6616 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
6620 /* This is never less insns than evaluating the PLUS_EXPR followed by
6621 a test and can be longer if the test is eliminated. */
6623 /* Reduce to minus. */
6624 exp = build (MINUS_EXPR, TREE_TYPE (exp),
6625 TREE_OPERAND (exp, 0),
6626 fold (build1 (NEGATE_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
6627 TREE_OPERAND (exp, 1))));
6628 /* Process as MINUS. */
6632 /* Non-zero iff operands of minus differ. */
6633 comparison = compare (build (NE_EXPR, TREE_TYPE (exp),
6634 TREE_OPERAND (exp, 0),
6635 TREE_OPERAND (exp, 1)),
6640 /* If we are AND'ing with a small constant, do this comparison in the
6641 smallest type that fits. If the machine doesn't have comparisons
6642 that small, it will be converted back to the wider comparison.
6643 This helps if we are testing the sign bit of a narrower object.
6644 combine can't do this for us because it can't know whether a
6645 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
6647 if (! SLOW_BYTE_ACCESS
6648 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
6649 && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
6650 && (i = floor_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))) >= 0
6651 && (type = type_for_size (i + 1, 1)) != 0
6652 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
6653 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
6654 != CODE_FOR_nothing))
6656 do_jump (convert (type, exp), if_false_label, if_true_label);
6661 case TRUTH_NOT_EXPR:
6662 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
6665 case TRUTH_ANDIF_EXPR:
6666 if (if_false_label == 0)
6667 if_false_label = drop_through_label = gen_label_rtx ();
6668 do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
6669 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
6672 case TRUTH_ORIF_EXPR:
6673 if (if_true_label == 0)
6674 if_true_label = drop_through_label = gen_label_rtx ();
6675 do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
6676 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
6680 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
6683 do_pending_stack_adjust ();
6684 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
6691 int bitsize, bitpos, unsignedp;
6692 enum machine_mode mode;
6697 /* Get description of this reference. We don't actually care
6698 about the underlying object here. */
6699 get_inner_reference (exp, &bitsize, &bitpos, &offset,
6700 &mode, &unsignedp, &volatilep);
6702 type = type_for_size (bitsize, unsignedp);
6703 if (! SLOW_BYTE_ACCESS
6704 && type != 0 && bitsize >= 0
6705 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
6706 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
6707 != CODE_FOR_nothing))
6709 do_jump (convert (type, exp), if_false_label, if_true_label);
6716 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
6717 if (integer_onep (TREE_OPERAND (exp, 1))
6718 && integer_zerop (TREE_OPERAND (exp, 2)))
6719 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
6721 else if (integer_zerop (TREE_OPERAND (exp, 1))
6722 && integer_onep (TREE_OPERAND (exp, 2)))
6723 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
6727 register rtx label1 = gen_label_rtx ();
6728 drop_through_label = gen_label_rtx ();
6729 do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX);
6730 /* Now the THEN-expression. */
6731 do_jump (TREE_OPERAND (exp, 1),
6732 if_false_label ? if_false_label : drop_through_label,
6733 if_true_label ? if_true_label : drop_through_label);
6734 /* In case the do_jump just above never jumps. */
6735 do_pending_stack_adjust ();
6736 emit_label (label1);
6737 /* Now the ELSE-expression. */
6738 do_jump (TREE_OPERAND (exp, 2),
6739 if_false_label ? if_false_label : drop_through_label,
6740 if_true_label ? if_true_label : drop_through_label);
6745 if (integer_zerop (TREE_OPERAND (exp, 1)))
6746 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
6747 else if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
6750 !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
6751 do_jump_by_parts_equality (exp, if_false_label, if_true_label);
6753 comparison = compare (exp, EQ, EQ);
6757 if (integer_zerop (TREE_OPERAND (exp, 1)))
6758 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
6759 else if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
6762 !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
6763 do_jump_by_parts_equality (exp, if_true_label, if_false_label);
6765 comparison = compare (exp, NE, NE);
6769 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
6771 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
6772 do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
6774 comparison = compare (exp, LT, LTU);
6778 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
6780 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
6781 do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
6783 comparison = compare (exp, LE, LEU);
6787 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
6789 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
6790 do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
6792 comparison = compare (exp, GT, GTU);
6796 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
6798 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
6799 do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
6801 comparison = compare (exp, GE, GEU);
6806 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
6808 /* This is not needed any more and causes poor code since it causes
6809 comparisons and tests from non-SI objects to have different code
6811 /* Copy to register to avoid generating bad insns by cse
6812 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
6813 if (!cse_not_expected && GET_CODE (temp) == MEM)
6814 temp = copy_to_reg (temp);
6816 do_pending_stack_adjust ();
6817 if (GET_CODE (temp) == CONST_INT)
6818 comparison = (temp == const0_rtx ? const0_rtx : const_true_rtx);
6819 else if (GET_CODE (temp) == LABEL_REF)
6820 comparison = const_true_rtx;
6821 else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
6822 && !can_compare_p (GET_MODE (temp)))
6823 /* Note swapping the labels gives us not-equal. */
6824 do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label);
6825 else if (GET_MODE (temp) != VOIDmode)
6826 comparison = compare_from_rtx (temp, CONST0_RTX (GET_MODE (temp)),
6827 NE, TREE_UNSIGNED (TREE_TYPE (exp)),
6828 GET_MODE (temp), NULL_RTX, 0);
6833 /* Do any postincrements in the expression that was tested. */
6836 /* If COMPARISON is nonzero here, it is an rtx that can be substituted
6837 straight into a conditional jump instruction as the jump condition.
6838 Otherwise, all the work has been done already. */
6840 if (comparison == const_true_rtx)
6843 emit_jump (if_true_label);
6845 else if (comparison == const0_rtx)
6848 emit_jump (if_false_label);
6850 else if (comparison)
6851 do_jump_for_compare (comparison, if_false_label, if_true_label);
6855 if (drop_through_label)
6857 /* If do_jump produces code that might be jumped around,
6858 do any stack adjusts from that code, before the place
6859 where control merges in. */
6860 do_pending_stack_adjust ();
6861 emit_label (drop_through_label);
6865 /* Given a comparison expression EXP for values too wide to be compared
6866 with one insn, test the comparison and jump to the appropriate label.
6867 The code of EXP is ignored; we always test GT if SWAP is 0,
6868 and LT if SWAP is 1. */
6871 do_jump_by_parts_greater (exp, swap, if_false_label, if_true_label)
6874 rtx if_false_label, if_true_label;
6876 rtx op0 = expand_expr (TREE_OPERAND (exp, swap), NULL_RTX, VOIDmode, 0);
6877 rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), NULL_RTX, VOIDmode, 0);
6878 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
6879 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
6880 rtx drop_through_label = 0;
6881 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
6884 if (! if_true_label || ! if_false_label)
6885 drop_through_label = gen_label_rtx ();
6886 if (! if_true_label)
6887 if_true_label = drop_through_label;
6888 if (! if_false_label)
6889 if_false_label = drop_through_label;
6891 /* Compare a word at a time, high order first. */
6892 for (i = 0; i < nwords; i++)
6895 rtx op0_word, op1_word;
6897 if (WORDS_BIG_ENDIAN)
6899 op0_word = operand_subword_force (op0, i, mode);
6900 op1_word = operand_subword_force (op1, i, mode);
6904 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
6905 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
6908 /* All but high-order word must be compared as unsigned. */
6909 comp = compare_from_rtx (op0_word, op1_word,
6910 (unsignedp || i > 0) ? GTU : GT,
6911 unsignedp, word_mode, NULL_RTX, 0);
6912 if (comp == const_true_rtx)
6913 emit_jump (if_true_label);
6914 else if (comp != const0_rtx)
6915 do_jump_for_compare (comp, NULL_RTX, if_true_label);
6917 /* Consider lower words only if these are equal. */
6918 comp = compare_from_rtx (op0_word, op1_word, NE, unsignedp, word_mode,
6920 if (comp == const_true_rtx)
6921 emit_jump (if_false_label);
6922 else if (comp != const0_rtx)
6923 do_jump_for_compare (comp, NULL_RTX, if_false_label);
6927 emit_jump (if_false_label);
6928 if (drop_through_label)
6929 emit_label (drop_through_label);
6932 /* Given an EQ_EXPR expression EXP for values too wide to be compared
6933 with one insn, test the comparison and jump to the appropriate label. */
6936 do_jump_by_parts_equality (exp, if_false_label, if_true_label)
6938 rtx if_false_label, if_true_label;
6940 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
6941 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
6942 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
6943 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
6945 rtx drop_through_label = 0;
6947 if (! if_false_label)
6948 drop_through_label = if_false_label = gen_label_rtx ();
6950 for (i = 0; i < nwords; i++)
6952 rtx comp = compare_from_rtx (operand_subword_force (op0, i, mode),
6953 operand_subword_force (op1, i, mode),
6954 EQ, TREE_UNSIGNED (TREE_TYPE (exp)),
6955 word_mode, NULL_RTX, 0);
6956 if (comp == const_true_rtx)
6957 emit_jump (if_false_label);
6958 else if (comp != const0_rtx)
6959 do_jump_for_compare (comp, if_false_label, NULL_RTX);
6963 emit_jump (if_true_label);
6964 if (drop_through_label)
6965 emit_label (drop_through_label);
6968 /* Jump according to whether OP0 is 0.
6969 We assume that OP0 has an integer mode that is too wide
6970 for the available compare insns. */
6973 do_jump_by_parts_equality_rtx (op0, if_false_label, if_true_label)
6975 rtx if_false_label, if_true_label;
6977 int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD;
6979 rtx drop_through_label = 0;
6981 if (! if_false_label)
6982 drop_through_label = if_false_label = gen_label_rtx ();
6984 for (i = 0; i < nwords; i++)
6986 rtx comp = compare_from_rtx (operand_subword_force (op0, i,
6988 const0_rtx, EQ, 1, word_mode, NULL_RTX, 0);
6989 if (comp == const_true_rtx)
6990 emit_jump (if_false_label);
6991 else if (comp != const0_rtx)
6992 do_jump_for_compare (comp, if_false_label, NULL_RTX);
6996 emit_jump (if_true_label);
6997 if (drop_through_label)
6998 emit_label (drop_through_label);
7001 /* Given a comparison expression in rtl form, output conditional branches to
7002 IF_TRUE_LABEL, IF_FALSE_LABEL, or both. */
7005 do_jump_for_compare (comparison, if_false_label, if_true_label)
7006 rtx comparison, if_false_label, if_true_label;
7010 if (bcc_gen_fctn[(int) GET_CODE (comparison)] != 0)
7011 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (comparison)]) (if_true_label));
7016 emit_jump (if_false_label);
7018 else if (if_false_label)
7021 rtx prev = PREV_INSN (get_last_insn ());
7024 /* Output the branch with the opposite condition. Then try to invert
7025 what is generated. If more than one insn is a branch, or if the
7026 branch is not the last insn written, abort. If we can't invert
7027 the branch, emit make a true label, redirect this jump to that,
7028 emit a jump to the false label and define the true label. */
7030 if (bcc_gen_fctn[(int) GET_CODE (comparison)] != 0)
7031 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (comparison)]) (if_false_label));
7035 /* Here we get the insn before what was just emitted.
7036 On some machines, emitting the branch can discard
7037 the previous compare insn and emit a replacement. */
7039 /* If there's only one preceding insn... */
7040 insn = get_insns ();
7042 insn = NEXT_INSN (prev);
7044 for (insn = NEXT_INSN (insn); insn; insn = NEXT_INSN (insn))
7045 if (GET_CODE (insn) == JUMP_INSN)
7052 if (branch != get_last_insn ())
7055 if (! invert_jump (branch, if_false_label))
7057 if_true_label = gen_label_rtx ();
7058 redirect_jump (branch, if_true_label);
7059 emit_jump (if_false_label);
7060 emit_label (if_true_label);
7065 /* Generate code for a comparison expression EXP
7066 (including code to compute the values to be compared)
7067 and set (CC0) according to the result.
7068 SIGNED_CODE should be the rtx operation for this comparison for
7069 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
7071 We force a stack adjustment unless there are currently
7072 things pushed on the stack that aren't yet used. */
7075 compare (exp, signed_code, unsigned_code)
7077 enum rtx_code signed_code, unsigned_code;
7080 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7082 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7083 register tree type = TREE_TYPE (TREE_OPERAND (exp, 0));
7084 register enum machine_mode mode = TYPE_MODE (type);
7085 int unsignedp = TREE_UNSIGNED (type);
7086 enum rtx_code code = unsignedp ? unsigned_code : signed_code;
7088 return compare_from_rtx (op0, op1, code, unsignedp, mode,
7090 ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
7091 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
7094 /* Like compare but expects the values to compare as two rtx's.
7095 The decision as to signed or unsigned comparison must be made by the caller.
7097 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
7100 If ALIGN is non-zero, it is the alignment of this type; if zero, the
7101 size of MODE should be used. */
7104 compare_from_rtx (op0, op1, code, unsignedp, mode, size, align)
7105 register rtx op0, op1;
7108 enum machine_mode mode;
7112 /* If one operand is constant, make it the second one. */
7114 if (GET_CODE (op0) == CONST_INT || GET_CODE (op0) == CONST_DOUBLE)
7119 code = swap_condition (code);
7124 op0 = force_not_mem (op0);
7125 op1 = force_not_mem (op1);
7128 do_pending_stack_adjust ();
7130 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT)
7131 return simplify_relational_operation (code, mode, op0, op1);
7134 /* There's no need to do this now that combine.c can eliminate lots of
7135 sign extensions. This can be less efficient in certain cases on other
7138 /* If this is a signed equality comparison, we can do it as an
7139 unsigned comparison since zero-extension is cheaper than sign
7140 extension and comparisons with zero are done as unsigned. This is
7141 the case even on machines that can do fast sign extension, since
7142 zero-extension is easier to combinen with other operations than
7143 sign-extension is. If we are comparing against a constant, we must
7144 convert it to what it would look like unsigned. */
7145 if ((code == EQ || code == NE) && ! unsignedp
7146 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
7148 if (GET_CODE (op1) == CONST_INT
7149 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
7150 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
7155 emit_cmp_insn (op0, op1, code, size, mode, unsignedp, align);
7157 return gen_rtx (code, VOIDmode, cc0_rtx, const0_rtx);
7160 /* Generate code to calculate EXP using a store-flag instruction
7161 and return an rtx for the result. EXP is either a comparison
7162 or a TRUTH_NOT_EXPR whose operand is a comparison.
7164 If TARGET is nonzero, store the result there if convenient.
7166 If ONLY_CHEAP is non-zero, only do this if it is likely to be very
7169 Return zero if there is no suitable set-flag instruction
7170 available on this machine.
7172 Once expand_expr has been called on the arguments of the comparison,
7173 we are committed to doing the store flag, since it is not safe to
7174 re-evaluate the expression. We emit the store-flag insn by calling
7175 emit_store_flag, but only expand the arguments if we have a reason
7176 to believe that emit_store_flag will be successful. If we think that
7177 it will, but it isn't, we have to simulate the store-flag with a
7178 set/jump/set sequence. */
7181 do_store_flag (exp, target, mode, only_cheap)
7184 enum machine_mode mode;
7188 tree arg0, arg1, type;
7190 enum machine_mode operand_mode;
7194 enum insn_code icode;
7195 rtx subtarget = target;
7196 rtx result, label, pattern, jump_pat;
7198 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
7199 result at the end. We can't simply invert the test since it would
7200 have already been inverted if it were valid. This case occurs for
7201 some floating-point comparisons. */
7203 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
7204 invert = 1, exp = TREE_OPERAND (exp, 0);
7206 arg0 = TREE_OPERAND (exp, 0);
7207 arg1 = TREE_OPERAND (exp, 1);
7208 type = TREE_TYPE (arg0);
7209 operand_mode = TYPE_MODE (type);
7210 unsignedp = TREE_UNSIGNED (type);
7212 /* We won't bother with BLKmode store-flag operations because it would mean
7213 passing a lot of information to emit_store_flag. */
7214 if (operand_mode == BLKmode)
7220 /* Get the rtx comparison code to use. We know that EXP is a comparison
7221 operation of some type. Some comparisons against 1 and -1 can be
7222 converted to comparisons with zero. Do so here so that the tests
7223 below will be aware that we have a comparison with zero. These
7224 tests will not catch constants in the first operand, but constants
7225 are rarely passed as the first operand. */
7227 switch (TREE_CODE (exp))
7236 if (integer_onep (arg1))
7237 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
7239 code = unsignedp ? LTU : LT;
7242 if (integer_all_onesp (arg1))
7243 arg1 = integer_zero_node, code = unsignedp ? LTU : LT;
7245 code = unsignedp ? LEU : LE;
7248 if (integer_all_onesp (arg1))
7249 arg1 = integer_zero_node, code = unsignedp ? GEU : GE;
7251 code = unsignedp ? GTU : GT;
7254 if (integer_onep (arg1))
7255 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
7257 code = unsignedp ? GEU : GE;
7263 /* Put a constant second. */
7264 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
7266 tem = arg0; arg0 = arg1; arg1 = tem;
7267 code = swap_condition (code);
7270 /* If this is an equality or inequality test of a single bit, we can
7271 do this by shifting the bit being tested to the low-order bit and
7272 masking the result with the constant 1. If the condition was EQ,
7273 we xor it with 1. This does not require an scc insn and is faster
7274 than an scc insn even if we have it. */
7276 if ((code == NE || code == EQ)
7277 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
7278 && integer_pow2p (TREE_OPERAND (arg0, 1))
7279 && TYPE_PRECISION (type) <= HOST_BITS_PER_WIDE_INT)
7281 int bitnum = exact_log2 (INTVAL (expand_expr (TREE_OPERAND (arg0, 1),
7282 NULL_RTX, VOIDmode, 0)));
7284 if (subtarget == 0 || GET_CODE (subtarget) != REG
7285 || GET_MODE (subtarget) != operand_mode
7286 || ! safe_from_p (subtarget, TREE_OPERAND (arg0, 0)))
7289 op0 = expand_expr (TREE_OPERAND (arg0, 0), subtarget, VOIDmode, 0);
7292 op0 = expand_shift (RSHIFT_EXPR, GET_MODE (op0), op0,
7293 size_int (bitnum), target, 1);
7295 if (GET_MODE (op0) != mode)
7296 op0 = convert_to_mode (mode, op0, 1);
7298 if (bitnum != TYPE_PRECISION (type) - 1)
7299 op0 = expand_and (op0, const1_rtx, target);
7301 if ((code == EQ && ! invert) || (code == NE && invert))
7302 op0 = expand_binop (mode, xor_optab, op0, const1_rtx, target, 0,
7308 /* Now see if we are likely to be able to do this. Return if not. */
7309 if (! can_compare_p (operand_mode))
7311 icode = setcc_gen_code[(int) code];
7312 if (icode == CODE_FOR_nothing
7313 || (only_cheap && insn_operand_mode[(int) icode][0] != mode))
7315 /* We can only do this if it is one of the special cases that
7316 can be handled without an scc insn. */
7317 if ((code == LT && integer_zerop (arg1))
7318 || (! only_cheap && code == GE && integer_zerop (arg1)))
7320 else if (BRANCH_COST >= 0
7321 && ! only_cheap && (code == NE || code == EQ)
7322 && TREE_CODE (type) != REAL_TYPE
7323 && ((abs_optab->handlers[(int) operand_mode].insn_code
7324 != CODE_FOR_nothing)
7325 || (ffs_optab->handlers[(int) operand_mode].insn_code
7326 != CODE_FOR_nothing)))
7332 preexpand_calls (exp);
7333 if (subtarget == 0 || GET_CODE (subtarget) != REG
7334 || GET_MODE (subtarget) != operand_mode
7335 || ! safe_from_p (subtarget, arg1))
7338 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
7339 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
7342 target = gen_reg_rtx (mode);
7344 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
7345 because, if the emit_store_flag does anything it will succeed and
7346 OP0 and OP1 will not be used subsequently. */
7348 result = emit_store_flag (target, code,
7349 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
7350 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
7351 operand_mode, unsignedp, 1);
7356 result = expand_binop (mode, xor_optab, result, const1_rtx,
7357 result, 0, OPTAB_LIB_WIDEN);
7361 /* If this failed, we have to do this with set/compare/jump/set code. */
7362 if (target == 0 || GET_CODE (target) != REG
7363 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
7364 target = gen_reg_rtx (GET_MODE (target));
7366 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
7367 result = compare_from_rtx (op0, op1, code, unsignedp,
7368 operand_mode, NULL_RTX, 0);
7369 if (GET_CODE (result) == CONST_INT)
7370 return (((result == const0_rtx && ! invert)
7371 || (result != const0_rtx && invert))
7372 ? const0_rtx : const1_rtx);
7374 label = gen_label_rtx ();
7375 if (bcc_gen_fctn[(int) code] == 0)
7378 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
7379 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
7385 /* Generate a tablejump instruction (used for switch statements). */
7387 #ifdef HAVE_tablejump
7389 /* INDEX is the value being switched on, with the lowest value
7390 in the table already subtracted.
7391 MODE is its expected mode (needed if INDEX is constant).
7392 RANGE is the length of the jump table.
7393 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
7395 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
7396 index value is out of range. */
7399 do_tablejump (index, mode, range, table_label, default_label)
7400 rtx index, range, table_label, default_label;
7401 enum machine_mode mode;
7403 register rtx temp, vector;
7405 /* Do an unsigned comparison (in the proper mode) between the index
7406 expression and the value which represents the length of the range.
7407 Since we just finished subtracting the lower bound of the range
7408 from the index expression, this comparison allows us to simultaneously
7409 check that the original index expression value is both greater than
7410 or equal to the minimum value of the range and less than or equal to
7411 the maximum value of the range. */
7413 emit_cmp_insn (range, index, LTU, NULL_RTX, mode, 0, 0);
7414 emit_jump_insn (gen_bltu (default_label));
7416 /* If index is in range, it must fit in Pmode.
7417 Convert to Pmode so we can index with it. */
7419 index = convert_to_mode (Pmode, index, 1);
7421 /* If flag_force_addr were to affect this address
7422 it could interfere with the tricky assumptions made
7423 about addresses that contain label-refs,
7424 which may be valid only very near the tablejump itself. */
7425 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
7426 GET_MODE_SIZE, because this indicates how large insns are. The other
7427 uses should all be Pmode, because they are addresses. This code
7428 could fail if addresses and insns are not the same size. */
7429 index = memory_address_noforce
7431 gen_rtx (PLUS, Pmode,
7432 gen_rtx (MULT, Pmode, index,
7433 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
7434 gen_rtx (LABEL_REF, Pmode, table_label)));
7435 temp = gen_reg_rtx (CASE_VECTOR_MODE);
7436 vector = gen_rtx (MEM, CASE_VECTOR_MODE, index);
7437 RTX_UNCHANGING_P (vector) = 1;
7438 convert_move (temp, vector, 0);
7440 emit_jump_insn (gen_tablejump (temp, table_label));
7442 #ifndef CASE_VECTOR_PC_RELATIVE
7443 /* If we are generating PIC code or if the table is PC-relative, the
7444 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
7450 #endif /* HAVE_tablejump */