1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA. */
26 #include "insn-flags.h"
27 #include "insn-codes.h"
29 #include "insn-config.h"
33 #include "typeclass.h"
35 #define CEIL(x,y) (((x) + (y) - 1) / (y))
37 /* Decide whether a function's arguments should be processed
38 from first to last or from last to first. */
40 #ifdef STACK_GROWS_DOWNWARD
42 #define PUSH_ARGS_REVERSED /* If it's last to first */
46 #ifndef STACK_PUSH_CODE
47 #ifdef STACK_GROWS_DOWNWARD
48 #define STACK_PUSH_CODE PRE_DEC
50 #define STACK_PUSH_CODE PRE_INC
54 /* Like STACK_BOUNDARY but in units of bytes, not bits. */
55 #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
57 /* If this is nonzero, we do not bother generating VOLATILE
58 around volatile memory references, and we are willing to
59 output indirect addresses. If cse is to follow, we reject
60 indirect addresses so a useful potential cse is generated;
61 if it is used only once, instruction combination will produce
62 the same indirect address eventually. */
65 /* Nonzero to generate code for all the subroutines within an
66 expression before generating the upper levels of the expression.
67 Nowadays this is never zero. */
68 int do_preexpand_calls = 1;
70 /* Number of units that we should eventually pop off the stack.
71 These are the arguments to function calls that have already returned. */
72 int pending_stack_adjust;
74 /* Nonzero means stack pops must not be deferred, and deferred stack
75 pops must not be output. It is nonzero inside a function call,
76 inside a conditional expression, inside a statement expression,
77 and in other cases as well. */
78 int inhibit_defer_pop;
80 /* A list of all cleanups which belong to the arguments of
81 function calls being expanded by expand_call. */
82 tree cleanups_this_call;
84 /* Nonzero means __builtin_saveregs has already been done in this function.
85 The value is the pseudoreg containing the value __builtin_saveregs
87 static rtx saveregs_value;
90 static void store_constructor ();
91 static rtx store_field ();
92 static rtx expand_builtin ();
93 static rtx compare ();
94 static rtx do_store_flag ();
95 static void preexpand_calls ();
96 static rtx expand_increment ();
97 static void init_queue ();
99 void do_pending_stack_adjust ();
100 static void do_jump_for_compare ();
101 static void do_jump_by_parts_equality ();
102 static void do_jump_by_parts_equality_rtx ();
103 static void do_jump_by_parts_greater ();
105 /* Record for each mode whether we can move a register directly to or
106 from an object of that mode in memory. If we can't, we won't try
107 to use that mode directly when accessing a field of that mode. */
109 static char direct_load[NUM_MACHINE_MODES];
110 static char direct_store[NUM_MACHINE_MODES];
112 /* MOVE_RATIO is the number of move instructions that is better than
116 #if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
119 /* A value of around 6 would minimize code size; infinity would minimize
121 #define MOVE_RATIO 15
125 /* This array records the insn_code of insns to perform block moves. */
126 static enum insn_code movstr_optab[NUM_MACHINE_MODES];
128 /* SLOW_UNALIGNED_ACCESS is non-zero if unaligned accesses are very slow. */
130 #ifndef SLOW_UNALIGNED_ACCESS
131 #define SLOW_UNALIGNED_ACCESS 0
134 /* This is run once per compilation to set up which modes can be used
135 directly in memory and to initialize the block move optab. */
141 enum machine_mode mode;
142 /* Try indexing by frame ptr and try by stack ptr.
143 It is known that on the Convex the stack ptr isn't a valid index.
144 With luck, one or the other is valid on any machine. */
145 rtx mem = gen_rtx (MEM, VOIDmode, stack_pointer_rtx);
146 rtx mem1 = gen_rtx (MEM, VOIDmode, frame_pointer_rtx);
149 insn = emit_insn (gen_rtx (SET, 0, 0));
150 pat = PATTERN (insn);
152 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
153 mode = (enum machine_mode) ((int) mode + 1))
159 direct_load[(int) mode] = direct_store[(int) mode] = 0;
160 PUT_MODE (mem, mode);
161 PUT_MODE (mem1, mode);
163 /* See if there is some register that can be used in this mode and
164 directly loaded or stored from memory. */
166 if (mode != VOIDmode && mode != BLKmode)
167 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
168 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
171 if (! HARD_REGNO_MODE_OK (regno, mode))
174 reg = gen_rtx (REG, mode, regno);
177 SET_DEST (pat) = reg;
178 if (recog (pat, insn, &num_clobbers) >= 0)
179 direct_load[(int) mode] = 1;
181 SET_SRC (pat) = mem1;
182 SET_DEST (pat) = reg;
183 if (recog (pat, insn, &num_clobbers) >= 0)
184 direct_load[(int) mode] = 1;
187 SET_DEST (pat) = mem;
188 if (recog (pat, insn, &num_clobbers) >= 0)
189 direct_store[(int) mode] = 1;
192 SET_DEST (pat) = mem1;
193 if (recog (pat, insn, &num_clobbers) >= 0)
194 direct_store[(int) mode] = 1;
197 movstr_optab[(int) mode] = CODE_FOR_nothing;
204 movstr_optab[(int) QImode] = CODE_FOR_movstrqi;
208 movstr_optab[(int) HImode] = CODE_FOR_movstrhi;
212 movstr_optab[(int) SImode] = CODE_FOR_movstrsi;
216 movstr_optab[(int) DImode] = CODE_FOR_movstrdi;
220 movstr_optab[(int) TImode] = CODE_FOR_movstrti;
224 /* This is run at the start of compiling a function. */
231 pending_stack_adjust = 0;
232 inhibit_defer_pop = 0;
233 cleanups_this_call = 0;
238 /* Save all variables describing the current status into the structure *P.
239 This is used before starting a nested function. */
245 /* Instead of saving the postincrement queue, empty it. */
248 p->pending_stack_adjust = pending_stack_adjust;
249 p->inhibit_defer_pop = inhibit_defer_pop;
250 p->cleanups_this_call = cleanups_this_call;
251 p->saveregs_value = saveregs_value;
252 p->forced_labels = forced_labels;
254 pending_stack_adjust = 0;
255 inhibit_defer_pop = 0;
256 cleanups_this_call = 0;
261 /* Restore all variables describing the current status from the structure *P.
262 This is used after a nested function. */
265 restore_expr_status (p)
268 pending_stack_adjust = p->pending_stack_adjust;
269 inhibit_defer_pop = p->inhibit_defer_pop;
270 cleanups_this_call = p->cleanups_this_call;
271 saveregs_value = p->saveregs_value;
272 forced_labels = p->forced_labels;
275 /* Manage the queue of increment instructions to be output
276 for POSTINCREMENT_EXPR expressions, etc. */
278 static rtx pending_chain;
280 /* Queue up to increment (or change) VAR later. BODY says how:
281 BODY should be the same thing you would pass to emit_insn
282 to increment right away. It will go to emit_insn later on.
284 The value is a QUEUED expression to be used in place of VAR
285 where you want to guarantee the pre-incrementation value of VAR. */
288 enqueue_insn (var, body)
291 pending_chain = gen_rtx (QUEUED, GET_MODE (var),
292 var, NULL_RTX, NULL_RTX, body, pending_chain);
293 return pending_chain;
296 /* Use protect_from_queue to convert a QUEUED expression
297 into something that you can put immediately into an instruction.
298 If the queued incrementation has not happened yet,
299 protect_from_queue returns the variable itself.
300 If the incrementation has happened, protect_from_queue returns a temp
301 that contains a copy of the old value of the variable.
303 Any time an rtx which might possibly be a QUEUED is to be put
304 into an instruction, it must be passed through protect_from_queue first.
305 QUEUED expressions are not meaningful in instructions.
307 Do not pass a value through protect_from_queue and then hold
308 on to it for a while before putting it in an instruction!
309 If the queue is flushed in between, incorrect code will result. */
312 protect_from_queue (x, modify)
316 register RTX_CODE code = GET_CODE (x);
318 #if 0 /* A QUEUED can hang around after the queue is forced out. */
319 /* Shortcut for most common case. */
320 if (pending_chain == 0)
326 /* A special hack for read access to (MEM (QUEUED ...))
327 to facilitate use of autoincrement.
328 Make a copy of the contents of the memory location
329 rather than a copy of the address, but not
330 if the value is of mode BLKmode. */
331 if (code == MEM && GET_MODE (x) != BLKmode
332 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
334 register rtx y = XEXP (x, 0);
335 XEXP (x, 0) = QUEUED_VAR (y);
338 register rtx temp = gen_reg_rtx (GET_MODE (x));
339 emit_insn_before (gen_move_insn (temp, x),
345 /* Otherwise, recursively protect the subexpressions of all
346 the kinds of rtx's that can contain a QUEUED. */
348 XEXP (x, 0) = protect_from_queue (XEXP (x, 0), 0);
349 else if (code == PLUS || code == MULT)
351 XEXP (x, 0) = protect_from_queue (XEXP (x, 0), 0);
352 XEXP (x, 1) = protect_from_queue (XEXP (x, 1), 0);
356 /* If the increment has not happened, use the variable itself. */
357 if (QUEUED_INSN (x) == 0)
358 return QUEUED_VAR (x);
359 /* If the increment has happened and a pre-increment copy exists,
361 if (QUEUED_COPY (x) != 0)
362 return QUEUED_COPY (x);
363 /* The increment has happened but we haven't set up a pre-increment copy.
364 Set one up now, and use it. */
365 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
366 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
368 return QUEUED_COPY (x);
371 /* Return nonzero if X contains a QUEUED expression:
372 if it contains anything that will be altered by a queued increment.
373 We handle only combinations of MEM, PLUS, MINUS and MULT operators
374 since memory addresses generally contain only those. */
380 register enum rtx_code code = GET_CODE (x);
386 return queued_subexp_p (XEXP (x, 0));
390 return queued_subexp_p (XEXP (x, 0))
391 || queued_subexp_p (XEXP (x, 1));
396 /* Perform all the pending incrementations. */
402 while (p = pending_chain)
404 QUEUED_INSN (p) = emit_insn (QUEUED_BODY (p));
405 pending_chain = QUEUED_NEXT (p);
416 /* Copy data from FROM to TO, where the machine modes are not the same.
417 Both modes may be integer, or both may be floating.
418 UNSIGNEDP should be nonzero if FROM is an unsigned type.
419 This causes zero-extension instead of sign-extension. */
422 convert_move (to, from, unsignedp)
423 register rtx to, from;
426 enum machine_mode to_mode = GET_MODE (to);
427 enum machine_mode from_mode = GET_MODE (from);
428 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
429 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
433 /* rtx code for making an equivalent value. */
434 enum rtx_code equiv_code = (unsignedp ? ZERO_EXTEND : SIGN_EXTEND);
436 to = protect_from_queue (to, 1);
437 from = protect_from_queue (from, 0);
439 if (to_real != from_real)
442 /* If FROM is a SUBREG that indicates that we have already done at least
443 the required extension, strip it. We don't handle such SUBREGs as
446 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
447 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
448 >= GET_MODE_SIZE (to_mode))
449 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
450 from = gen_lowpart (to_mode, from), from_mode = to_mode;
452 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
455 if (to_mode == from_mode
456 || (from_mode == VOIDmode && CONSTANT_P (from)))
458 emit_move_insn (to, from);
464 #ifdef HAVE_extendqfhf2
465 if (HAVE_extendqfsf2 && from_mode == QFmode && to_mode == HFmode)
467 emit_unop_insn (CODE_FOR_extendqfsf2, to, from, UNKNOWN);
471 #ifdef HAVE_extendqfsf2
472 if (HAVE_extendqfsf2 && from_mode == QFmode && to_mode == SFmode)
474 emit_unop_insn (CODE_FOR_extendqfsf2, to, from, UNKNOWN);
478 #ifdef HAVE_extendqfdf2
479 if (HAVE_extendqfdf2 && from_mode == QFmode && to_mode == DFmode)
481 emit_unop_insn (CODE_FOR_extendqfdf2, to, from, UNKNOWN);
485 #ifdef HAVE_extendqfxf2
486 if (HAVE_extendqfxf2 && from_mode == QFmode && to_mode == XFmode)
488 emit_unop_insn (CODE_FOR_extendqfxf2, to, from, UNKNOWN);
492 #ifdef HAVE_extendqftf2
493 if (HAVE_extendqftf2 && from_mode == QFmode && to_mode == TFmode)
495 emit_unop_insn (CODE_FOR_extendqftf2, to, from, UNKNOWN);
500 #ifdef HAVE_extendhfsf2
501 if (HAVE_extendhfsf2 && from_mode == HFmode && to_mode == SFmode)
503 emit_unop_insn (CODE_FOR_extendhfsf2, to, from, UNKNOWN);
507 #ifdef HAVE_extendhfdf2
508 if (HAVE_extendhfdf2 && from_mode == HFmode && to_mode == DFmode)
510 emit_unop_insn (CODE_FOR_extendhfdf2, to, from, UNKNOWN);
514 #ifdef HAVE_extendhfxf2
515 if (HAVE_extendhfxf2 && from_mode == HFmode && to_mode == XFmode)
517 emit_unop_insn (CODE_FOR_extendhfxf2, to, from, UNKNOWN);
521 #ifdef HAVE_extendhftf2
522 if (HAVE_extendhftf2 && from_mode == HFmode && to_mode == TFmode)
524 emit_unop_insn (CODE_FOR_extendhftf2, to, from, UNKNOWN);
529 #ifdef HAVE_extendsfdf2
530 if (HAVE_extendsfdf2 && from_mode == SFmode && to_mode == DFmode)
532 emit_unop_insn (CODE_FOR_extendsfdf2, to, from, UNKNOWN);
536 #ifdef HAVE_extendsfxf2
537 if (HAVE_extendsfxf2 && from_mode == SFmode && to_mode == XFmode)
539 emit_unop_insn (CODE_FOR_extendsfxf2, to, from, UNKNOWN);
543 #ifdef HAVE_extendsftf2
544 if (HAVE_extendsftf2 && from_mode == SFmode && to_mode == TFmode)
546 emit_unop_insn (CODE_FOR_extendsftf2, to, from, UNKNOWN);
550 #ifdef HAVE_extenddfxf2
551 if (HAVE_extenddfxf2 && from_mode == DFmode && to_mode == XFmode)
553 emit_unop_insn (CODE_FOR_extenddfxf2, to, from, UNKNOWN);
557 #ifdef HAVE_extenddftf2
558 if (HAVE_extenddftf2 && from_mode == DFmode && to_mode == TFmode)
560 emit_unop_insn (CODE_FOR_extenddftf2, to, from, UNKNOWN);
565 #ifdef HAVE_trunchfqf2
566 if (HAVE_trunchfqf2 && from_mode == HFmode && to_mode == QFmode)
568 emit_unop_insn (CODE_FOR_trunchfqf2, to, from, UNKNOWN);
572 #ifdef HAVE_truncsfqf2
573 if (HAVE_truncsfqf2 && from_mode == SFmode && to_mode == QFmode)
575 emit_unop_insn (CODE_FOR_truncsfqf2, to, from, UNKNOWN);
579 #ifdef HAVE_truncdfqf2
580 if (HAVE_truncdfqf2 && from_mode == DFmode && to_mode == QFmode)
582 emit_unop_insn (CODE_FOR_truncdfqf2, to, from, UNKNOWN);
586 #ifdef HAVE_truncxfqf2
587 if (HAVE_truncxfqf2 && from_mode == XFmode && to_mode == QFmode)
589 emit_unop_insn (CODE_FOR_truncxfqf2, to, from, UNKNOWN);
593 #ifdef HAVE_trunctfqf2
594 if (HAVE_trunctfqf2 && from_mode == TFmode && to_mode == QFmode)
596 emit_unop_insn (CODE_FOR_trunctfqf2, to, from, UNKNOWN);
600 #ifdef HAVE_truncsfhf2
601 if (HAVE_truncsfhf2 && from_mode == SFmode && to_mode == HFmode)
603 emit_unop_insn (CODE_FOR_truncsfhf2, to, from, UNKNOWN);
607 #ifdef HAVE_truncdfhf2
608 if (HAVE_truncdfhf2 && from_mode == DFmode && to_mode == HFmode)
610 emit_unop_insn (CODE_FOR_truncdfhf2, to, from, UNKNOWN);
614 #ifdef HAVE_truncxfhf2
615 if (HAVE_truncxfhf2 && from_mode == XFmode && to_mode == HFmode)
617 emit_unop_insn (CODE_FOR_truncxfhf2, to, from, UNKNOWN);
621 #ifdef HAVE_trunctfhf2
622 if (HAVE_trunctfhf2 && from_mode == TFmode && to_mode == HFmode)
624 emit_unop_insn (CODE_FOR_trunctfhf2, to, from, UNKNOWN);
628 #ifdef HAVE_truncdfsf2
629 if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
631 emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
635 #ifdef HAVE_truncxfsf2
636 if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode)
638 emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN);
642 #ifdef HAVE_trunctfsf2
643 if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
645 emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
649 #ifdef HAVE_truncxfdf2
650 if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode)
652 emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN);
656 #ifdef HAVE_trunctfdf2
657 if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
659 emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
671 libcall = extendsfdf2_libfunc;
675 libcall = extendsfxf2_libfunc;
679 libcall = extendsftf2_libfunc;
688 libcall = truncdfsf2_libfunc;
692 libcall = extenddfxf2_libfunc;
696 libcall = extenddftf2_libfunc;
705 libcall = truncxfsf2_libfunc;
709 libcall = truncxfdf2_libfunc;
718 libcall = trunctfsf2_libfunc;
722 libcall = trunctfdf2_libfunc;
728 if (libcall == (rtx) 0)
729 /* This conversion is not implemented yet. */
732 emit_library_call (libcall, 1, to_mode, 1, from, from_mode);
733 emit_move_insn (to, hard_libcall_value (to_mode));
737 /* Now both modes are integers. */
739 /* Handle expanding beyond a word. */
740 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
741 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
748 enum machine_mode lowpart_mode;
749 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
751 /* Try converting directly if the insn is supported. */
752 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
755 /* If FROM is a SUBREG, put it into a register. Do this
756 so that we always generate the same set of insns for
757 better cse'ing; if an intermediate assignment occurred,
758 we won't be doing the operation directly on the SUBREG. */
759 if (optimize > 0 && GET_CODE (from) == SUBREG)
760 from = force_reg (from_mode, from);
761 emit_unop_insn (code, to, from, equiv_code);
764 /* Next, try converting via full word. */
765 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
766 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
767 != CODE_FOR_nothing))
769 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
770 emit_unop_insn (code, to,
771 gen_lowpart (word_mode, to), equiv_code);
775 /* No special multiword conversion insn; do it by hand. */
778 /* Get a copy of FROM widened to a word, if necessary. */
779 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
780 lowpart_mode = word_mode;
782 lowpart_mode = from_mode;
784 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
786 lowpart = gen_lowpart (lowpart_mode, to);
787 emit_move_insn (lowpart, lowfrom);
789 /* Compute the value to put in each remaining word. */
791 fill_value = const0_rtx;
796 && insn_operand_mode[(int) CODE_FOR_slt][0] == word_mode
797 && STORE_FLAG_VALUE == -1)
799 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
801 fill_value = gen_reg_rtx (word_mode);
802 emit_insn (gen_slt (fill_value));
808 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
809 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
811 fill_value = convert_to_mode (word_mode, fill_value, 1);
815 /* Fill the remaining words. */
816 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
818 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
819 rtx subword = operand_subword (to, index, 1, to_mode);
824 if (fill_value != subword)
825 emit_move_insn (subword, fill_value);
828 insns = get_insns ();
831 emit_no_conflict_block (insns, to, from, NULL_RTX,
832 gen_rtx (equiv_code, to_mode, from));
836 /* Truncating multi-word to a word or less. */
837 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
838 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
840 convert_move (to, gen_lowpart (word_mode, from), 0);
844 /* Handle pointer conversion */ /* SPEE 900220 */
845 if (to_mode == PSImode)
847 if (from_mode != SImode)
848 from = convert_to_mode (SImode, from, unsignedp);
850 #ifdef HAVE_truncsipsi
853 emit_unop_insn (CODE_FOR_truncsipsi, to, from, UNKNOWN);
856 #endif /* HAVE_truncsipsi */
860 if (from_mode == PSImode)
862 if (to_mode != SImode)
864 from = convert_to_mode (SImode, from, unsignedp);
869 #ifdef HAVE_extendpsisi
870 if (HAVE_extendpsisi)
872 emit_unop_insn (CODE_FOR_extendpsisi, to, from, UNKNOWN);
875 #endif /* HAVE_extendpsisi */
880 /* Now follow all the conversions between integers
881 no more than a word long. */
883 /* For truncation, usually we can just refer to FROM in a narrower mode. */
884 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
885 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
886 GET_MODE_BITSIZE (from_mode)))
888 if (!((GET_CODE (from) == MEM
889 && ! MEM_VOLATILE_P (from)
890 && direct_load[(int) to_mode]
891 && ! mode_dependent_address_p (XEXP (from, 0)))
892 || GET_CODE (from) == REG
893 || GET_CODE (from) == SUBREG))
894 from = force_reg (from_mode, from);
895 emit_move_insn (to, gen_lowpart (to_mode, from));
899 /* Handle extension. */
900 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
902 /* Convert directly if that works. */
903 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
906 /* If FROM is a SUBREG, put it into a register. Do this
907 so that we always generate the same set of insns for
908 better cse'ing; if an intermediate assignment occurred,
909 we won't be doing the operation directly on the SUBREG. */
910 if (optimize > 0 && GET_CODE (from) == SUBREG)
911 from = force_reg (from_mode, from);
912 emit_unop_insn (code, to, from, equiv_code);
917 enum machine_mode intermediate;
919 /* Search for a mode to convert via. */
920 for (intermediate = from_mode; intermediate != VOIDmode;
921 intermediate = GET_MODE_WIDER_MODE (intermediate))
922 if ((can_extend_p (to_mode, intermediate, unsignedp)
924 && (can_extend_p (intermediate, from_mode, unsignedp)
925 != CODE_FOR_nothing))
927 convert_move (to, convert_to_mode (intermediate, from,
928 unsignedp), unsignedp);
932 /* No suitable intermediate mode. */
937 /* Support special truncate insns for certain modes. */
939 if (from_mode == DImode && to_mode == SImode)
941 #ifdef HAVE_truncdisi2
944 emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
948 convert_move (to, force_reg (from_mode, from), unsignedp);
952 if (from_mode == DImode && to_mode == HImode)
954 #ifdef HAVE_truncdihi2
957 emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
961 convert_move (to, force_reg (from_mode, from), unsignedp);
965 if (from_mode == DImode && to_mode == QImode)
967 #ifdef HAVE_truncdiqi2
970 emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
974 convert_move (to, force_reg (from_mode, from), unsignedp);
978 if (from_mode == SImode && to_mode == HImode)
980 #ifdef HAVE_truncsihi2
983 emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
987 convert_move (to, force_reg (from_mode, from), unsignedp);
991 if (from_mode == SImode && to_mode == QImode)
993 #ifdef HAVE_truncsiqi2
996 emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
1000 convert_move (to, force_reg (from_mode, from), unsignedp);
1004 if (from_mode == HImode && to_mode == QImode)
1006 #ifdef HAVE_trunchiqi2
1007 if (HAVE_trunchiqi2)
1009 emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
1013 convert_move (to, force_reg (from_mode, from), unsignedp);
1017 /* Handle truncation of volatile memrefs, and so on;
1018 the things that couldn't be truncated directly,
1019 and for which there was no special instruction. */
1020 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
1022 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
1023 emit_move_insn (to, temp);
1027 /* Mode combination is not recognized. */
1031 /* Return an rtx for a value that would result
1032 from converting X to mode MODE.
1033 Both X and MODE may be floating, or both integer.
1034 UNSIGNEDP is nonzero if X is an unsigned value.
1035 This can be done by referring to a part of X in place
1036 or by copying to a new temporary with conversion.
1038 This function *must not* call protect_from_queue
1039 except when putting X into an insn (in which case convert_move does it). */
1042 convert_to_mode (mode, x, unsignedp)
1043 enum machine_mode mode;
1049 /* If FROM is a SUBREG that indicates that we have already done at least
1050 the required extension, strip it. */
1052 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
1053 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
1054 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
1055 x = gen_lowpart (mode, x);
1057 if (mode == GET_MODE (x))
1060 /* There is one case that we must handle specially: If we are converting
1061 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
1062 we are to interpret the constant as unsigned, gen_lowpart will do
1063 the wrong if the constant appears negative. What we want to do is
1064 make the high-order word of the constant zero, not all ones. */
1066 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
1067 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
1068 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
1069 return immed_double_const (INTVAL (x), (HOST_WIDE_INT) 0, mode);
1071 /* We can do this with a gen_lowpart if both desired and current modes
1072 are integer, and this is either a constant integer, a register, or a
1073 non-volatile MEM. Except for the constant case, we must be narrowing
1076 if (GET_CODE (x) == CONST_INT
1077 || (GET_MODE_CLASS (mode) == MODE_INT
1078 && GET_MODE_CLASS (GET_MODE (x)) == MODE_INT
1079 && (GET_CODE (x) == CONST_DOUBLE
1080 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (GET_MODE (x))
1081 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x))
1082 && direct_load[(int) mode]
1083 || GET_CODE (x) == REG)))))
1084 return gen_lowpart (mode, x);
1086 temp = gen_reg_rtx (mode);
1087 convert_move (temp, x, unsignedp);
1091 /* Generate several move instructions to copy LEN bytes
1092 from block FROM to block TO. (These are MEM rtx's with BLKmode).
1093 The caller must pass FROM and TO
1094 through protect_from_queue before calling.
1095 ALIGN (in bytes) is maximum alignment we can assume. */
1097 struct move_by_pieces
1102 int explicit_inc_to;
1106 int explicit_inc_from;
1112 static void move_by_pieces_1 ();
1113 static int move_by_pieces_ninsns ();
1116 move_by_pieces (to, from, len, align)
1120 struct move_by_pieces data;
1121 rtx to_addr = XEXP (to, 0), from_addr = XEXP (from, 0);
1122 int max_size = MOVE_MAX + 1;
1125 data.to_addr = to_addr;
1126 data.from_addr = from_addr;
1130 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1131 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1133 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1134 || GET_CODE (from_addr) == POST_INC
1135 || GET_CODE (from_addr) == POST_DEC);
1137 data.explicit_inc_from = 0;
1138 data.explicit_inc_to = 0;
1140 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1141 if (data.reverse) data.offset = len;
1144 /* If copying requires more than two move insns,
1145 copy addresses to registers (to make displacements shorter)
1146 and use post-increment if available. */
1147 if (!(data.autinc_from && data.autinc_to)
1148 && move_by_pieces_ninsns (len, align) > 2)
1150 #ifdef HAVE_PRE_DECREMENT
1151 if (data.reverse && ! data.autinc_from)
1153 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1154 data.autinc_from = 1;
1155 data.explicit_inc_from = -1;
1158 #ifdef HAVE_POST_INCREMENT
1159 if (! data.autinc_from)
1161 data.from_addr = copy_addr_to_reg (from_addr);
1162 data.autinc_from = 1;
1163 data.explicit_inc_from = 1;
1166 if (!data.autinc_from && CONSTANT_P (from_addr))
1167 data.from_addr = copy_addr_to_reg (from_addr);
1168 #ifdef HAVE_PRE_DECREMENT
1169 if (data.reverse && ! data.autinc_to)
1171 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1173 data.explicit_inc_to = -1;
1176 #ifdef HAVE_POST_INCREMENT
1177 if (! data.reverse && ! data.autinc_to)
1179 data.to_addr = copy_addr_to_reg (to_addr);
1181 data.explicit_inc_to = 1;
1184 if (!data.autinc_to && CONSTANT_P (to_addr))
1185 data.to_addr = copy_addr_to_reg (to_addr);
1188 if (! (STRICT_ALIGNMENT || SLOW_UNALIGNED_ACCESS)
1189 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
1192 /* First move what we can in the largest integer mode, then go to
1193 successively smaller modes. */
1195 while (max_size > 1)
1197 enum machine_mode mode = VOIDmode, tmode;
1198 enum insn_code icode;
1200 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1201 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1202 if (GET_MODE_SIZE (tmode) < max_size)
1205 if (mode == VOIDmode)
1208 icode = mov_optab->handlers[(int) mode].insn_code;
1209 if (icode != CODE_FOR_nothing
1210 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1211 GET_MODE_SIZE (mode)))
1212 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1214 max_size = GET_MODE_SIZE (mode);
1217 /* The code above should have handled everything. */
1222 /* Return number of insns required to move L bytes by pieces.
1223 ALIGN (in bytes) is maximum alignment we can assume. */
1226 move_by_pieces_ninsns (l, align)
1230 register int n_insns = 0;
1231 int max_size = MOVE_MAX + 1;
1233 if (! (STRICT_ALIGNMENT || SLOW_UNALIGNED_ACCESS)
1234 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
1237 while (max_size > 1)
1239 enum machine_mode mode = VOIDmode, tmode;
1240 enum insn_code icode;
1242 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1243 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1244 if (GET_MODE_SIZE (tmode) < max_size)
1247 if (mode == VOIDmode)
1250 icode = mov_optab->handlers[(int) mode].insn_code;
1251 if (icode != CODE_FOR_nothing
1252 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1253 GET_MODE_SIZE (mode)))
1254 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1256 max_size = GET_MODE_SIZE (mode);
1262 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1263 with move instructions for mode MODE. GENFUN is the gen_... function
1264 to make a move insn for that mode. DATA has all the other info. */
1267 move_by_pieces_1 (genfun, mode, data)
1269 enum machine_mode mode;
1270 struct move_by_pieces *data;
1272 register int size = GET_MODE_SIZE (mode);
1273 register rtx to1, from1;
1275 while (data->len >= size)
1277 if (data->reverse) data->offset -= size;
1279 to1 = (data->autinc_to
1280 ? gen_rtx (MEM, mode, data->to_addr)
1281 : change_address (data->to, mode,
1282 plus_constant (data->to_addr, data->offset)));
1285 ? gen_rtx (MEM, mode, data->from_addr)
1286 : change_address (data->from, mode,
1287 plus_constant (data->from_addr, data->offset)));
1289 #ifdef HAVE_PRE_DECREMENT
1290 if (data->explicit_inc_to < 0)
1291 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
1292 if (data->explicit_inc_from < 0)
1293 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (-size)));
1296 emit_insn ((*genfun) (to1, from1));
1297 #ifdef HAVE_POST_INCREMENT
1298 if (data->explicit_inc_to > 0)
1299 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1300 if (data->explicit_inc_from > 0)
1301 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1304 if (! data->reverse) data->offset += size;
1310 /* Emit code to move a block Y to a block X.
1311 This may be done with string-move instructions,
1312 with multiple scalar move instructions, or with a library call.
1314 Both X and Y must be MEM rtx's (perhaps inside VOLATILE)
1316 SIZE is an rtx that says how long they are.
1317 ALIGN is the maximum alignment we can assume they have,
1318 measured in bytes. */
1321 emit_block_move (x, y, size, align)
1326 if (GET_MODE (x) != BLKmode)
1329 if (GET_MODE (y) != BLKmode)
1332 x = protect_from_queue (x, 1);
1333 y = protect_from_queue (y, 0);
1334 size = protect_from_queue (size, 0);
1336 if (GET_CODE (x) != MEM)
1338 if (GET_CODE (y) != MEM)
1343 if (GET_CODE (size) == CONST_INT
1344 && (move_by_pieces_ninsns (INTVAL (size), align) < MOVE_RATIO))
1345 move_by_pieces (x, y, INTVAL (size), align);
1348 /* Try the most limited insn first, because there's no point
1349 including more than one in the machine description unless
1350 the more limited one has some advantage. */
1352 rtx opalign = GEN_INT (align);
1353 enum machine_mode mode;
1355 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1356 mode = GET_MODE_WIDER_MODE (mode))
1358 enum insn_code code = movstr_optab[(int) mode];
1360 if (code != CODE_FOR_nothing
1361 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1362 here because if SIZE is less than the mode mask, as it is
1363 returned by the macro, it will definitely be less than the
1364 actual mode mask. */
1365 && (unsigned) INTVAL (size) <= GET_MODE_MASK (mode)
1366 && (insn_operand_predicate[(int) code][0] == 0
1367 || (*insn_operand_predicate[(int) code][0]) (x, BLKmode))
1368 && (insn_operand_predicate[(int) code][1] == 0
1369 || (*insn_operand_predicate[(int) code][1]) (y, BLKmode))
1370 && (insn_operand_predicate[(int) code][3] == 0
1371 || (*insn_operand_predicate[(int) code][3]) (opalign,
1375 rtx last = get_last_insn ();
1378 op2 = convert_to_mode (mode, size, 1);
1379 if (insn_operand_predicate[(int) code][2] != 0
1380 && ! (*insn_operand_predicate[(int) code][2]) (op2, mode))
1381 op2 = copy_to_mode_reg (mode, op2);
1383 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1390 delete_insns_since (last);
1394 #ifdef TARGET_MEM_FUNCTIONS
1395 emit_library_call (memcpy_libfunc, 0,
1396 VOIDmode, 3, XEXP (x, 0), Pmode,
1398 convert_to_mode (Pmode, size, 1), Pmode);
1400 emit_library_call (bcopy_libfunc, 0,
1401 VOIDmode, 3, XEXP (y, 0), Pmode,
1403 convert_to_mode (Pmode, size, 1), Pmode);
1408 /* Copy all or part of a value X into registers starting at REGNO.
1409 The number of registers to be filled is NREGS. */
1412 move_block_to_reg (regno, x, nregs, mode)
1416 enum machine_mode mode;
1421 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1422 x = validize_mem (force_const_mem (mode, x));
1424 /* See if the machine can do this with a load multiple insn. */
1425 #ifdef HAVE_load_multiple
1426 last = get_last_insn ();
1427 pat = gen_load_multiple (gen_rtx (REG, word_mode, regno), x,
1435 delete_insns_since (last);
1438 for (i = 0; i < nregs; i++)
1439 emit_move_insn (gen_rtx (REG, word_mode, regno + i),
1440 operand_subword_force (x, i, mode));
1443 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1444 The number of registers to be filled is NREGS. */
1447 move_block_from_reg (regno, x, nregs)
1455 /* See if the machine can do this with a store multiple insn. */
1456 #ifdef HAVE_store_multiple
1457 last = get_last_insn ();
1458 pat = gen_store_multiple (x, gen_rtx (REG, word_mode, regno),
1466 delete_insns_since (last);
1469 for (i = 0; i < nregs; i++)
1471 rtx tem = operand_subword (x, i, 1, BLKmode);
1476 emit_move_insn (tem, gen_rtx (REG, word_mode, regno + i));
1480 /* Mark NREGS consecutive regs, starting at REGNO, as being live now. */
1483 use_regs (regno, nregs)
1489 for (i = 0; i < nregs; i++)
1490 emit_insn (gen_rtx (USE, VOIDmode, gen_rtx (REG, word_mode, regno + i)));
1493 /* Mark the instructions since PREV as a libcall block.
1494 Add REG_LIBCALL to PREV and add a REG_RETVAL to the most recent insn. */
1503 /* Find the instructions to mark */
1505 insn_first = NEXT_INSN (prev);
1507 insn_first = get_insns ();
1509 insn_last = get_last_insn ();
1511 REG_NOTES (insn_last) = gen_rtx (INSN_LIST, REG_RETVAL, insn_first,
1512 REG_NOTES (insn_last));
1514 REG_NOTES (insn_first) = gen_rtx (INSN_LIST, REG_LIBCALL, insn_last,
1515 REG_NOTES (insn_first));
1518 /* Write zeros through the storage of OBJECT.
1519 If OBJECT has BLKmode, SIZE is its length in bytes. */
1522 clear_storage (object, size)
1526 if (GET_MODE (object) == BLKmode)
1528 #ifdef TARGET_MEM_FUNCTIONS
1529 emit_library_call (memset_libfunc, 0,
1531 XEXP (object, 0), Pmode, const0_rtx, Pmode,
1532 GEN_INT (size), Pmode);
1534 emit_library_call (bzero_libfunc, 0,
1536 XEXP (object, 0), Pmode,
1537 GEN_INT (size), Pmode);
1541 emit_move_insn (object, const0_rtx);
1544 /* Generate code to copy Y into X.
1545 Both Y and X must have the same mode, except that
1546 Y can be a constant with VOIDmode.
1547 This mode cannot be BLKmode; use emit_block_move for that.
1549 Return the last instruction emitted. */
1552 emit_move_insn (x, y)
1555 enum machine_mode mode = GET_MODE (x);
1556 enum machine_mode submode;
1557 enum mode_class class = GET_MODE_CLASS (mode);
1560 x = protect_from_queue (x, 1);
1561 y = protect_from_queue (y, 0);
1563 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
1566 if (CONSTANT_P (y) && ! LEGITIMATE_CONSTANT_P (y))
1567 y = force_const_mem (mode, y);
1569 /* If X or Y are memory references, verify that their addresses are valid
1571 if (GET_CODE (x) == MEM
1572 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
1573 && ! push_operand (x, GET_MODE (x)))
1575 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
1576 x = change_address (x, VOIDmode, XEXP (x, 0));
1578 if (GET_CODE (y) == MEM
1579 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
1581 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
1582 y = change_address (y, VOIDmode, XEXP (y, 0));
1584 if (mode == BLKmode)
1587 return emit_move_insn_1 (x, y);
1590 /* Low level part of emit_move_insn.
1591 Called just like emit_move_insn, but assumes X and Y
1592 are basically valid. */
1595 emit_move_insn_1 (x, y)
1598 enum machine_mode mode = GET_MODE (x);
1599 enum machine_mode submode;
1600 enum mode_class class = GET_MODE_CLASS (mode);
1603 if (class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
1604 submode = mode_for_size (GET_MODE_UNIT_SIZE (mode) * BITS_PER_UNIT,
1605 (class == MODE_COMPLEX_INT
1606 ? MODE_INT : MODE_FLOAT),
1609 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
1611 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
1613 /* Expand complex moves by moving real part and imag part, if possible. */
1614 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
1615 && submode != BLKmode
1616 && (mov_optab->handlers[(int) submode].insn_code
1617 != CODE_FOR_nothing))
1619 /* Don't split destination if it is a stack push. */
1620 int stack = push_operand (x, GET_MODE (x));
1621 rtx prev = get_last_insn ();
1623 /* Tell flow that the whole of the destination is being set. */
1624 if (GET_CODE (x) == REG)
1625 emit_insn (gen_rtx (CLOBBER, VOIDmode, x));
1627 /* If this is a stack, push the highpart first, so it
1628 will be in the argument order.
1630 In that case, change_address is used only to convert
1631 the mode, not to change the address. */
1632 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
1633 ((stack ? change_address (x, submode, (rtx) 0)
1634 : gen_highpart (submode, x)),
1635 gen_highpart (submode, y)));
1636 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
1637 ((stack ? change_address (x, submode, (rtx) 0)
1638 : gen_lowpart (submode, x)),
1639 gen_lowpart (submode, y)));
1643 return get_last_insn ();
1646 /* This will handle any multi-word mode that lacks a move_insn pattern.
1647 However, you will get better code if you define such patterns,
1648 even if they must turn into multiple assembler instructions. */
1649 else if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
1652 rtx prev_insn = get_last_insn ();
1655 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
1658 rtx xpart = operand_subword (x, i, 1, mode);
1659 rtx ypart = operand_subword (y, i, 1, mode);
1661 /* If we can't get a part of Y, put Y into memory if it is a
1662 constant. Otherwise, force it into a register. If we still
1663 can't get a part of Y, abort. */
1664 if (ypart == 0 && CONSTANT_P (y))
1666 y = force_const_mem (mode, y);
1667 ypart = operand_subword (y, i, 1, mode);
1669 else if (ypart == 0)
1670 ypart = operand_subword_force (y, i, mode);
1672 if (xpart == 0 || ypart == 0)
1675 last_insn = emit_move_insn (xpart, ypart);
1677 /* Mark these insns as a libcall block. */
1678 group_insns (prev_insn);
1686 /* Pushing data onto the stack. */
1688 /* Push a block of length SIZE (perhaps variable)
1689 and return an rtx to address the beginning of the block.
1690 Note that it is not possible for the value returned to be a QUEUED.
1691 The value may be virtual_outgoing_args_rtx.
1693 EXTRA is the number of bytes of padding to push in addition to SIZE.
1694 BELOW nonzero means this padding comes at low addresses;
1695 otherwise, the padding comes at high addresses. */
1698 push_block (size, extra, below)
1703 if (CONSTANT_P (size))
1704 anti_adjust_stack (plus_constant (size, extra));
1705 else if (GET_CODE (size) == REG && extra == 0)
1706 anti_adjust_stack (size);
1709 rtx temp = copy_to_mode_reg (Pmode, size);
1711 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
1712 temp, 0, OPTAB_LIB_WIDEN);
1713 anti_adjust_stack (temp);
1716 #ifdef STACK_GROWS_DOWNWARD
1717 temp = virtual_outgoing_args_rtx;
1718 if (extra != 0 && below)
1719 temp = plus_constant (temp, extra);
1721 if (GET_CODE (size) == CONST_INT)
1722 temp = plus_constant (virtual_outgoing_args_rtx,
1723 - INTVAL (size) - (below ? 0 : extra));
1724 else if (extra != 0 && !below)
1725 temp = gen_rtx (PLUS, Pmode, virtual_outgoing_args_rtx,
1726 negate_rtx (Pmode, plus_constant (size, extra)));
1728 temp = gen_rtx (PLUS, Pmode, virtual_outgoing_args_rtx,
1729 negate_rtx (Pmode, size));
1732 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
1738 return gen_rtx (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
1741 /* Generate code to push X onto the stack, assuming it has mode MODE and
1743 MODE is redundant except when X is a CONST_INT (since they don't
1745 SIZE is an rtx for the size of data to be copied (in bytes),
1746 needed only if X is BLKmode.
1748 ALIGN (in bytes) is maximum alignment we can assume.
1750 If PARTIAL and REG are both nonzero, then copy that many of the first
1751 words of X into registers starting with REG, and push the rest of X.
1752 The amount of space pushed is decreased by PARTIAL words,
1753 rounded *down* to a multiple of PARM_BOUNDARY.
1754 REG must be a hard register in this case.
1755 If REG is zero but PARTIAL is not, take any all others actions for an
1756 argument partially in registers, but do not actually load any
1759 EXTRA is the amount in bytes of extra space to leave next to this arg.
1760 This is ignored if an argument block has already been allocated.
1762 On a machine that lacks real push insns, ARGS_ADDR is the address of
1763 the bottom of the argument block for this call. We use indexing off there
1764 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
1765 argument block has not been preallocated.
1767 ARGS_SO_FAR is the size of args previously pushed for this call. */
1770 emit_push_insn (x, mode, type, size, align, partial, reg, extra,
1771 args_addr, args_so_far)
1773 enum machine_mode mode;
1784 enum direction stack_direction
1785 #ifdef STACK_GROWS_DOWNWARD
1791 /* Decide where to pad the argument: `downward' for below,
1792 `upward' for above, or `none' for don't pad it.
1793 Default is below for small data on big-endian machines; else above. */
1794 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
1796 /* Invert direction if stack is post-update. */
1797 if (STACK_PUSH_CODE == POST_INC || STACK_PUSH_CODE == POST_DEC)
1798 if (where_pad != none)
1799 where_pad = (where_pad == downward ? upward : downward);
1801 xinner = x = protect_from_queue (x, 0);
1803 if (mode == BLKmode)
1805 /* Copy a block into the stack, entirely or partially. */
1808 int used = partial * UNITS_PER_WORD;
1809 int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
1817 /* USED is now the # of bytes we need not copy to the stack
1818 because registers will take care of them. */
1821 xinner = change_address (xinner, BLKmode,
1822 plus_constant (XEXP (xinner, 0), used));
1824 /* If the partial register-part of the arg counts in its stack size,
1825 skip the part of stack space corresponding to the registers.
1826 Otherwise, start copying to the beginning of the stack space,
1827 by setting SKIP to 0. */
1828 #ifndef REG_PARM_STACK_SPACE
1834 #ifdef PUSH_ROUNDING
1835 /* Do it with several push insns if that doesn't take lots of insns
1836 and if there is no difficulty with push insns that skip bytes
1837 on the stack for alignment purposes. */
1839 && GET_CODE (size) == CONST_INT
1841 && (move_by_pieces_ninsns ((unsigned) INTVAL (size) - used, align)
1843 /* Here we avoid the case of a structure whose weak alignment
1844 forces many pushes of a small amount of data,
1845 and such small pushes do rounding that causes trouble. */
1846 && ((! STRICT_ALIGNMENT && ! SLOW_UNALIGNED_ACCESS)
1847 || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT
1848 || PUSH_ROUNDING (align) == align)
1849 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
1851 /* Push padding now if padding above and stack grows down,
1852 or if padding below and stack grows up.
1853 But if space already allocated, this has already been done. */
1854 if (extra && args_addr == 0
1855 && where_pad != none && where_pad != stack_direction)
1856 anti_adjust_stack (GEN_INT (extra));
1858 move_by_pieces (gen_rtx (MEM, BLKmode, gen_push_operand ()), xinner,
1859 INTVAL (size) - used, align);
1862 #endif /* PUSH_ROUNDING */
1864 /* Otherwise make space on the stack and copy the data
1865 to the address of that space. */
1867 /* Deduct words put into registers from the size we must copy. */
1870 if (GET_CODE (size) == CONST_INT)
1871 size = GEN_INT (INTVAL (size) - used);
1873 size = expand_binop (GET_MODE (size), sub_optab, size,
1874 GEN_INT (used), NULL_RTX, 0,
1878 /* Get the address of the stack space.
1879 In this case, we do not deal with EXTRA separately.
1880 A single stack adjust will do. */
1883 temp = push_block (size, extra, where_pad == downward);
1886 else if (GET_CODE (args_so_far) == CONST_INT)
1887 temp = memory_address (BLKmode,
1888 plus_constant (args_addr,
1889 skip + INTVAL (args_so_far)));
1891 temp = memory_address (BLKmode,
1892 plus_constant (gen_rtx (PLUS, Pmode,
1893 args_addr, args_so_far),
1896 /* TEMP is the address of the block. Copy the data there. */
1897 if (GET_CODE (size) == CONST_INT
1898 && (move_by_pieces_ninsns ((unsigned) INTVAL (size), align)
1901 move_by_pieces (gen_rtx (MEM, BLKmode, temp), xinner,
1902 INTVAL (size), align);
1905 /* Try the most limited insn first, because there's no point
1906 including more than one in the machine description unless
1907 the more limited one has some advantage. */
1908 #ifdef HAVE_movstrqi
1910 && GET_CODE (size) == CONST_INT
1911 && ((unsigned) INTVAL (size)
1912 < (1 << (GET_MODE_BITSIZE (QImode) - 1))))
1914 emit_insn (gen_movstrqi (gen_rtx (MEM, BLKmode, temp),
1915 xinner, size, GEN_INT (align)));
1919 #ifdef HAVE_movstrhi
1921 && GET_CODE (size) == CONST_INT
1922 && ((unsigned) INTVAL (size)
1923 < (1 << (GET_MODE_BITSIZE (HImode) - 1))))
1925 emit_insn (gen_movstrhi (gen_rtx (MEM, BLKmode, temp),
1926 xinner, size, GEN_INT (align)));
1930 #ifdef HAVE_movstrsi
1933 emit_insn (gen_movstrsi (gen_rtx (MEM, BLKmode, temp),
1934 xinner, size, GEN_INT (align)));
1938 #ifdef HAVE_movstrdi
1941 emit_insn (gen_movstrdi (gen_rtx (MEM, BLKmode, temp),
1942 xinner, size, GEN_INT (align)));
1947 #ifndef ACCUMULATE_OUTGOING_ARGS
1948 /* If the source is referenced relative to the stack pointer,
1949 copy it to another register to stabilize it. We do not need
1950 to do this if we know that we won't be changing sp. */
1952 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
1953 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
1954 temp = copy_to_reg (temp);
1957 /* Make inhibit_defer_pop nonzero around the library call
1958 to force it to pop the bcopy-arguments right away. */
1960 #ifdef TARGET_MEM_FUNCTIONS
1961 emit_library_call (memcpy_libfunc, 0,
1962 VOIDmode, 3, temp, Pmode, XEXP (xinner, 0), Pmode,
1965 emit_library_call (bcopy_libfunc, 0,
1966 VOIDmode, 3, XEXP (xinner, 0), Pmode, temp, Pmode,
1972 else if (partial > 0)
1974 /* Scalar partly in registers. */
1976 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
1979 /* # words of start of argument
1980 that we must make space for but need not store. */
1981 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
1982 int args_offset = INTVAL (args_so_far);
1985 /* Push padding now if padding above and stack grows down,
1986 or if padding below and stack grows up.
1987 But if space already allocated, this has already been done. */
1988 if (extra && args_addr == 0
1989 && where_pad != none && where_pad != stack_direction)
1990 anti_adjust_stack (GEN_INT (extra));
1992 /* If we make space by pushing it, we might as well push
1993 the real data. Otherwise, we can leave OFFSET nonzero
1994 and leave the space uninitialized. */
1998 /* Now NOT_STACK gets the number of words that we don't need to
1999 allocate on the stack. */
2000 not_stack = partial - offset;
2002 /* If the partial register-part of the arg counts in its stack size,
2003 skip the part of stack space corresponding to the registers.
2004 Otherwise, start copying to the beginning of the stack space,
2005 by setting SKIP to 0. */
2006 #ifndef REG_PARM_STACK_SPACE
2012 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
2013 x = validize_mem (force_const_mem (mode, x));
2015 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
2016 SUBREGs of such registers are not allowed. */
2017 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
2018 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
2019 x = copy_to_reg (x);
2021 /* Loop over all the words allocated on the stack for this arg. */
2022 /* We can do it by words, because any scalar bigger than a word
2023 has a size a multiple of a word. */
2024 #ifndef PUSH_ARGS_REVERSED
2025 for (i = not_stack; i < size; i++)
2027 for (i = size - 1; i >= not_stack; i--)
2029 if (i >= not_stack + offset)
2030 emit_push_insn (operand_subword_force (x, i, mode),
2031 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
2033 GEN_INT (args_offset + ((i - not_stack + skip)
2034 * UNITS_PER_WORD)));
2040 /* Push padding now if padding above and stack grows down,
2041 or if padding below and stack grows up.
2042 But if space already allocated, this has already been done. */
2043 if (extra && args_addr == 0
2044 && where_pad != none && where_pad != stack_direction)
2045 anti_adjust_stack (GEN_INT (extra));
2047 #ifdef PUSH_ROUNDING
2049 addr = gen_push_operand ();
2052 if (GET_CODE (args_so_far) == CONST_INT)
2054 = memory_address (mode,
2055 plus_constant (args_addr, INTVAL (args_so_far)));
2057 addr = memory_address (mode, gen_rtx (PLUS, Pmode, args_addr,
2060 emit_move_insn (gen_rtx (MEM, mode, addr), x);
2064 /* If part should go in registers, copy that part
2065 into the appropriate registers. Do this now, at the end,
2066 since mem-to-mem copies above may do function calls. */
2067 if (partial > 0 && reg != 0)
2068 move_block_to_reg (REGNO (reg), x, partial, mode);
2070 if (extra && args_addr == 0 && where_pad == stack_direction)
2071 anti_adjust_stack (GEN_INT (extra));
2074 /* Output a library call to function FUN (a SYMBOL_REF rtx)
2075 (emitting the queue unless NO_QUEUE is nonzero),
2076 for a value of mode OUTMODE,
2077 with NARGS different arguments, passed as alternating rtx values
2078 and machine_modes to convert them to.
2079 The rtx values should have been passed through protect_from_queue already.
2081 NO_QUEUE will be true if and only if the library call is a `const' call
2082 which will be enclosed in REG_LIBCALL/REG_RETVAL notes; it is equivalent
2083 to the variable is_const in expand_call.
2085 NO_QUEUE must be true for const calls, because if it isn't, then
2086 any pending increment will be emitted between REG_LIBCALL/REG_RETVAL notes,
2087 and will be lost if the libcall sequence is optimized away.
2089 NO_QUEUE must be false for non-const calls, because if it isn't, the
2090 call insn will have its CONST_CALL_P bit set, and it will be incorrectly
2091 optimized. For instance, the instruction scheduler may incorrectly
2092 move memory references across the non-const call. */
2095 emit_library_call (va_alist)
2099 /* Total size in bytes of all the stack-parms scanned so far. */
2100 struct args_size args_size;
2101 /* Size of arguments before any adjustments (such as rounding). */
2102 struct args_size original_args_size;
2103 register int argnum;
2104 enum machine_mode outmode;
2111 CUMULATIVE_ARGS args_so_far;
2112 struct arg { rtx value; enum machine_mode mode; rtx reg; int partial;
2113 struct args_size offset; struct args_size size; };
2115 int old_inhibit_defer_pop = inhibit_defer_pop;
2120 orgfun = fun = va_arg (p, rtx);
2121 no_queue = va_arg (p, int);
2122 outmode = va_arg (p, enum machine_mode);
2123 nargs = va_arg (p, int);
2125 /* Copy all the libcall-arguments out of the varargs data
2126 and into a vector ARGVEC.
2128 Compute how to pass each argument. We only support a very small subset
2129 of the full argument passing conventions to limit complexity here since
2130 library functions shouldn't have many args. */
2132 argvec = (struct arg *) alloca (nargs * sizeof (struct arg));
2134 INIT_CUMULATIVE_ARGS (args_so_far, NULL_TREE, fun);
2136 args_size.constant = 0;
2139 for (count = 0; count < nargs; count++)
2141 rtx val = va_arg (p, rtx);
2142 enum machine_mode mode = va_arg (p, enum machine_mode);
2144 /* We cannot convert the arg value to the mode the library wants here;
2145 must do it earlier where we know the signedness of the arg. */
2147 || (GET_MODE (val) != mode && GET_MODE (val) != VOIDmode))
2150 /* On some machines, there's no way to pass a float to a library fcn.
2151 Pass it as a double instead. */
2152 #ifdef LIBGCC_NEEDS_DOUBLE
2153 if (LIBGCC_NEEDS_DOUBLE && mode == SFmode)
2154 val = convert_to_mode (DFmode, val, 0), mode = DFmode;
2157 /* There's no need to call protect_from_queue, because
2158 either emit_move_insn or emit_push_insn will do that. */
2160 /* Make sure it is a reasonable operand for a move or push insn. */
2161 if (GET_CODE (val) != REG && GET_CODE (val) != MEM
2162 && ! (CONSTANT_P (val) && LEGITIMATE_CONSTANT_P (val)))
2163 val = force_operand (val, NULL_RTX);
2165 argvec[count].value = val;
2166 argvec[count].mode = mode;
2168 #ifdef FUNCTION_ARG_PASS_BY_REFERENCE
2169 if (FUNCTION_ARG_PASS_BY_REFERENCE (args_so_far, mode, NULL_TREE, 1))
2173 argvec[count].reg = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
2174 if (argvec[count].reg && GET_CODE (argvec[count].reg) == EXPR_LIST)
2176 #ifdef FUNCTION_ARG_PARTIAL_NREGS
2177 argvec[count].partial
2178 = FUNCTION_ARG_PARTIAL_NREGS (args_so_far, mode, NULL_TREE, 1);
2180 argvec[count].partial = 0;
2183 locate_and_pad_parm (mode, NULL_TREE,
2184 argvec[count].reg && argvec[count].partial == 0,
2185 NULL_TREE, &args_size, &argvec[count].offset,
2186 &argvec[count].size);
2188 if (argvec[count].size.var)
2191 #ifndef REG_PARM_STACK_SPACE
2192 if (argvec[count].partial)
2193 argvec[count].size.constant -= argvec[count].partial * UNITS_PER_WORD;
2196 if (argvec[count].reg == 0 || argvec[count].partial != 0
2197 #ifdef REG_PARM_STACK_SPACE
2201 args_size.constant += argvec[count].size.constant;
2203 #ifdef ACCUMULATE_OUTGOING_ARGS
2204 /* If this arg is actually passed on the stack, it might be
2205 clobbering something we already put there (this library call might
2206 be inside the evaluation of an argument to a function whose call
2207 requires the stack). This will only occur when the library call
2208 has sufficient args to run out of argument registers. Abort in
2209 this case; if this ever occurs, code must be added to save and
2210 restore the arg slot. */
2212 if (argvec[count].reg == 0 || argvec[count].partial != 0)
2216 FUNCTION_ARG_ADVANCE (args_so_far, mode, (tree)0, 1);
2220 /* If this machine requires an external definition for library
2221 functions, write one out. */
2222 assemble_external_libcall (fun);
2224 original_args_size = args_size;
2225 #ifdef STACK_BOUNDARY
2226 args_size.constant = (((args_size.constant + (STACK_BYTES - 1))
2227 / STACK_BYTES) * STACK_BYTES);
2230 #ifdef REG_PARM_STACK_SPACE
2231 args_size.constant = MAX (args_size.constant,
2232 REG_PARM_STACK_SPACE (NULL_TREE));
2233 #ifndef OUTGOING_REG_PARM_STACK_SPACE
2234 args_size.constant -= REG_PARM_STACK_SPACE (NULL_TREE);
2238 #ifdef ACCUMULATE_OUTGOING_ARGS
2239 if (args_size.constant > current_function_outgoing_args_size)
2240 current_function_outgoing_args_size = args_size.constant;
2241 args_size.constant = 0;
2244 #ifndef PUSH_ROUNDING
2245 argblock = push_block (GEN_INT (args_size.constant), 0, 0);
2248 #ifdef PUSH_ARGS_REVERSED
2249 #ifdef STACK_BOUNDARY
2250 /* If we push args individually in reverse order, perform stack alignment
2251 before the first push (the last arg). */
2253 anti_adjust_stack (GEN_INT (args_size.constant
2254 - original_args_size.constant));
2258 #ifdef PUSH_ARGS_REVERSED
2266 /* Push the args that need to be pushed. */
2268 for (count = 0; count < nargs; count++, argnum += inc)
2270 register enum machine_mode mode = argvec[argnum].mode;
2271 register rtx val = argvec[argnum].value;
2272 rtx reg = argvec[argnum].reg;
2273 int partial = argvec[argnum].partial;
2275 if (! (reg != 0 && partial == 0))
2276 emit_push_insn (val, mode, NULL_TREE, NULL_RTX, 0, partial, reg, 0,
2277 argblock, GEN_INT (argvec[count].offset.constant));
2281 #ifndef PUSH_ARGS_REVERSED
2282 #ifdef STACK_BOUNDARY
2283 /* If we pushed args in forward order, perform stack alignment
2284 after pushing the last arg. */
2286 anti_adjust_stack (GEN_INT (args_size.constant
2287 - original_args_size.constant));
2291 #ifdef PUSH_ARGS_REVERSED
2297 /* Now load any reg parms into their regs. */
2299 for (count = 0; count < nargs; count++, argnum += inc)
2301 register enum machine_mode mode = argvec[argnum].mode;
2302 register rtx val = argvec[argnum].value;
2303 rtx reg = argvec[argnum].reg;
2304 int partial = argvec[argnum].partial;
2306 if (reg != 0 && partial == 0)
2307 emit_move_insn (reg, val);
2311 /* For version 1.37, try deleting this entirely. */
2315 /* Any regs containing parms remain in use through the call. */
2317 for (count = 0; count < nargs; count++)
2318 if (argvec[count].reg != 0)
2319 emit_insn (gen_rtx (USE, VOIDmode, argvec[count].reg));
2321 use_insns = get_insns ();
2324 fun = prepare_call_address (fun, NULL_TREE, &use_insns);
2326 /* Don't allow popping to be deferred, since then
2327 cse'ing of library calls could delete a call and leave the pop. */
2330 /* We pass the old value of inhibit_defer_pop + 1 to emit_call_1, which
2331 will set inhibit_defer_pop to that value. */
2333 emit_call_1 (fun, get_identifier (XSTR (orgfun, 0)), args_size.constant, 0,
2334 FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1),
2335 outmode != VOIDmode ? hard_libcall_value (outmode) : NULL_RTX,
2336 old_inhibit_defer_pop + 1, use_insns, no_queue);
2338 /* Now restore inhibit_defer_pop to its actual original value. */
2342 /* Like emit_library_call except that an extra argument, VALUE,
2343 comes second and says where to store the result.
2344 (If VALUE is zero, the result comes in the function value register.) */
2347 emit_library_call_value (va_alist)
2351 /* Total size in bytes of all the stack-parms scanned so far. */
2352 struct args_size args_size;
2353 /* Size of arguments before any adjustments (such as rounding). */
2354 struct args_size original_args_size;
2355 register int argnum;
2356 enum machine_mode outmode;
2363 CUMULATIVE_ARGS args_so_far;
2364 struct arg { rtx value; enum machine_mode mode; rtx reg; int partial;
2365 struct args_size offset; struct args_size size; };
2367 int old_inhibit_defer_pop = inhibit_defer_pop;
2374 orgfun = fun = va_arg (p, rtx);
2375 value = va_arg (p, rtx);
2376 no_queue = va_arg (p, int);
2377 outmode = va_arg (p, enum machine_mode);
2378 nargs = va_arg (p, int);
2380 /* If this kind of value comes back in memory,
2381 decide where in memory it should come back. */
2382 if (RETURN_IN_MEMORY (type_for_mode (outmode, 0)))
2384 if (GET_CODE (value) == MEM)
2387 mem_value = assign_stack_temp (outmode, GET_MODE_SIZE (outmode), 0);
2390 /* ??? Unfinished: must pass the memory address as an argument. */
2392 /* Copy all the libcall-arguments out of the varargs data
2393 and into a vector ARGVEC.
2395 Compute how to pass each argument. We only support a very small subset
2396 of the full argument passing conventions to limit complexity here since
2397 library functions shouldn't have many args. */
2399 argvec = (struct arg *) alloca ((nargs + 1) * sizeof (struct arg));
2401 INIT_CUMULATIVE_ARGS (args_so_far, NULL_TREE, fun);
2403 args_size.constant = 0;
2408 /* If there's a structure value address to be passed,
2409 either pass it in the special place, or pass it as an extra argument. */
2412 rtx addr = XEXP (mem_value, 0);
2414 if (! struct_value_rtx)
2418 /* Make sure it is a reasonable operand for a move or push insn. */
2419 if (GET_CODE (addr) != REG && GET_CODE (addr) != MEM
2420 && ! (CONSTANT_P (addr) && LEGITIMATE_CONSTANT_P (addr)))
2421 addr = force_operand (addr, NULL_RTX);
2423 argvec[count].value = addr;
2424 argvec[count].mode = outmode;
2425 argvec[count].partial = 0;
2427 argvec[count].reg = FUNCTION_ARG (args_so_far, outmode, NULL_TREE, 1);
2428 #ifdef FUNCTION_ARG_PARTIAL_NREGS
2429 if (FUNCTION_ARG_PARTIAL_NREGS (args_so_far, outmode, NULL_TREE, 1))
2433 locate_and_pad_parm (outmode, NULL_TREE,
2434 argvec[count].reg && argvec[count].partial == 0,
2435 NULL_TREE, &args_size, &argvec[count].offset,
2436 &argvec[count].size);
2439 if (argvec[count].reg == 0 || argvec[count].partial != 0
2440 #ifdef REG_PARM_STACK_SPACE
2444 args_size.constant += argvec[count].size.constant;
2446 FUNCTION_ARG_ADVANCE (args_so_far, outmode, (tree)0, 1);
2450 for (; count < nargs; count++)
2452 rtx val = va_arg (p, rtx);
2453 enum machine_mode mode = va_arg (p, enum machine_mode);
2455 /* We cannot convert the arg value to the mode the library wants here;
2456 must do it earlier where we know the signedness of the arg. */
2458 || (GET_MODE (val) != mode && GET_MODE (val) != VOIDmode))
2461 /* On some machines, there's no way to pass a float to a library fcn.
2462 Pass it as a double instead. */
2463 #ifdef LIBGCC_NEEDS_DOUBLE
2464 if (LIBGCC_NEEDS_DOUBLE && mode == SFmode)
2465 val = convert_to_mode (DFmode, val, 0), mode = DFmode;
2468 /* There's no need to call protect_from_queue, because
2469 either emit_move_insn or emit_push_insn will do that. */
2471 /* Make sure it is a reasonable operand for a move or push insn. */
2472 if (GET_CODE (val) != REG && GET_CODE (val) != MEM
2473 && ! (CONSTANT_P (val) && LEGITIMATE_CONSTANT_P (val)))
2474 val = force_operand (val, NULL_RTX);
2476 argvec[count].value = val;
2477 argvec[count].mode = mode;
2479 #ifdef FUNCTION_ARG_PASS_BY_REFERENCE
2480 if (FUNCTION_ARG_PASS_BY_REFERENCE (args_so_far, mode, NULL_TREE, 1))
2484 argvec[count].reg = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
2485 if (argvec[count].reg && GET_CODE (argvec[count].reg) == EXPR_LIST)
2487 #ifdef FUNCTION_ARG_PARTIAL_NREGS
2488 argvec[count].partial
2489 = FUNCTION_ARG_PARTIAL_NREGS (args_so_far, mode, NULL_TREE, 1);
2491 argvec[count].partial = 0;
2494 locate_and_pad_parm (mode, NULL_TREE,
2495 argvec[count].reg && argvec[count].partial == 0,
2496 NULL_TREE, &args_size, &argvec[count].offset,
2497 &argvec[count].size);
2499 if (argvec[count].size.var)
2502 #ifndef REG_PARM_STACK_SPACE
2503 if (argvec[count].partial)
2504 argvec[count].size.constant -= argvec[count].partial * UNITS_PER_WORD;
2507 if (argvec[count].reg == 0 || argvec[count].partial != 0
2508 #ifdef REG_PARM_STACK_SPACE
2512 args_size.constant += argvec[count].size.constant;
2514 #ifdef ACCUMULATE_OUTGOING_ARGS
2515 /* If this arg is actually passed on the stack, it might be
2516 clobbering something we already put there (this library call might
2517 be inside the evaluation of an argument to a function whose call
2518 requires the stack). This will only occur when the library call
2519 has sufficient args to run out of argument registers. Abort in
2520 this case; if this ever occurs, code must be added to save and
2521 restore the arg slot. */
2523 if (argvec[count].reg == 0 || argvec[count].partial != 0)
2527 FUNCTION_ARG_ADVANCE (args_so_far, mode, (tree)0, 1);
2531 /* If this machine requires an external definition for library
2532 functions, write one out. */
2533 assemble_external_libcall (fun);
2535 original_args_size = args_size;
2536 #ifdef STACK_BOUNDARY
2537 args_size.constant = (((args_size.constant + (STACK_BYTES - 1))
2538 / STACK_BYTES) * STACK_BYTES);
2541 #ifdef REG_PARM_STACK_SPACE
2542 args_size.constant = MAX (args_size.constant,
2543 REG_PARM_STACK_SPACE (NULL_TREE));
2544 #ifndef OUTGOING_REG_PARM_STACK_SPACE
2545 args_size.constant -= REG_PARM_STACK_SPACE (NULL_TREE);
2549 #ifdef ACCUMULATE_OUTGOING_ARGS
2550 if (args_size.constant > current_function_outgoing_args_size)
2551 current_function_outgoing_args_size = args_size.constant;
2552 args_size.constant = 0;
2555 #ifndef PUSH_ROUNDING
2556 argblock = push_block (GEN_INT (args_size.constant), 0, 0);
2559 #ifdef PUSH_ARGS_REVERSED
2560 #ifdef STACK_BOUNDARY
2561 /* If we push args individually in reverse order, perform stack alignment
2562 before the first push (the last arg). */
2564 anti_adjust_stack (GEN_INT (args_size.constant
2565 - original_args_size.constant));
2569 #ifdef PUSH_ARGS_REVERSED
2577 /* Push the args that need to be pushed. */
2579 for (count = 0; count < nargs; count++, argnum += inc)
2581 register enum machine_mode mode = argvec[argnum].mode;
2582 register rtx val = argvec[argnum].value;
2583 rtx reg = argvec[argnum].reg;
2584 int partial = argvec[argnum].partial;
2586 if (! (reg != 0 && partial == 0))
2587 emit_push_insn (val, mode, NULL_TREE, NULL_RTX, 0, partial, reg, 0,
2588 argblock, GEN_INT (argvec[count].offset.constant));
2592 #ifndef PUSH_ARGS_REVERSED
2593 #ifdef STACK_BOUNDARY
2594 /* If we pushed args in forward order, perform stack alignment
2595 after pushing the last arg. */
2597 anti_adjust_stack (GEN_INT (args_size.constant
2598 - original_args_size.constant));
2602 #ifdef PUSH_ARGS_REVERSED
2608 /* Now load any reg parms into their regs. */
2610 if (mem_value != 0 && struct_value_rtx != 0)
2611 emit_move_insn (struct_value_rtx, XEXP (mem_value, 0));
2613 for (count = 0; count < nargs; count++, argnum += inc)
2615 register enum machine_mode mode = argvec[argnum].mode;
2616 register rtx val = argvec[argnum].value;
2617 rtx reg = argvec[argnum].reg;
2618 int partial = argvec[argnum].partial;
2620 if (reg != 0 && partial == 0)
2621 emit_move_insn (reg, val);
2626 /* For version 1.37, try deleting this entirely. */
2631 /* Any regs containing parms remain in use through the call. */
2633 for (count = 0; count < nargs; count++)
2634 if (argvec[count].reg != 0)
2635 emit_insn (gen_rtx (USE, VOIDmode, argvec[count].reg));
2637 use_insns = get_insns ();
2640 fun = prepare_call_address (fun, NULL_TREE, &use_insns);
2642 /* Don't allow popping to be deferred, since then
2643 cse'ing of library calls could delete a call and leave the pop. */
2646 /* We pass the old value of inhibit_defer_pop + 1 to emit_call_1, which
2647 will set inhibit_defer_pop to that value. */
2649 emit_call_1 (fun, get_identifier (XSTR (orgfun, 0)), args_size.constant, 0,
2650 FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1),
2651 outmode != VOIDmode ? hard_libcall_value (outmode) : NULL_RTX,
2652 old_inhibit_defer_pop + 1, use_insns, no_queue);
2654 /* Now restore inhibit_defer_pop to its actual original value. */
2657 /* Copy the value to the right place. */
2658 if (outmode != VOIDmode)
2663 value = hard_libcall_value (outmode);
2664 if (value != mem_value)
2665 emit_move_insn (value, mem_value);
2667 else if (value != 0)
2668 emit_move_insn (value, hard_libcall_value (outmode));
2672 /* Expand an assignment that stores the value of FROM into TO.
2673 If WANT_VALUE is nonzero, return an rtx for the value of TO.
2674 (This may contain a QUEUED rtx.)
2675 Otherwise, the returned value is not meaningful.
2677 SUGGEST_REG is no longer actually used.
2678 It used to mean, copy the value through a register
2679 and return that register, if that is possible.
2680 But now we do this if WANT_VALUE.
2682 If the value stored is a constant, we return the constant. */
2685 expand_assignment (to, from, want_value, suggest_reg)
2690 register rtx to_rtx = 0;
2693 /* Don't crash if the lhs of the assignment was erroneous. */
2695 if (TREE_CODE (to) == ERROR_MARK)
2696 return expand_expr (from, NULL_RTX, VOIDmode, 0);
2698 /* Assignment of a structure component needs special treatment
2699 if the structure component's rtx is not simply a MEM.
2700 Assignment of an array element at a constant index
2701 has the same problem. */
2703 if (TREE_CODE (to) == COMPONENT_REF
2704 || TREE_CODE (to) == BIT_FIELD_REF
2705 || (TREE_CODE (to) == ARRAY_REF
2706 && TREE_CODE (TREE_OPERAND (to, 1)) == INTEGER_CST
2707 && TREE_CODE (TYPE_SIZE (TREE_TYPE (to))) == INTEGER_CST))
2709 enum machine_mode mode1;
2715 tree tem = get_inner_reference (to, &bitsize, &bitpos, &offset,
2716 &mode1, &unsignedp, &volatilep);
2718 /* If we are going to use store_bit_field and extract_bit_field,
2719 make sure to_rtx will be safe for multiple use. */
2721 if (mode1 == VOIDmode && want_value)
2722 tem = stabilize_reference (tem);
2724 to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, 0);
2727 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
2729 if (GET_CODE (to_rtx) != MEM)
2731 to_rtx = change_address (to_rtx, VOIDmode,
2732 gen_rtx (PLUS, Pmode, XEXP (to_rtx, 0),
2733 force_reg (Pmode, offset_rtx)));
2737 if (GET_CODE (to_rtx) == MEM)
2738 MEM_VOLATILE_P (to_rtx) = 1;
2739 #if 0 /* This was turned off because, when a field is volatile
2740 in an object which is not volatile, the object may be in a register,
2741 and then we would abort over here. */
2747 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
2749 /* Spurious cast makes HPUX compiler happy. */
2750 ? (enum machine_mode) TYPE_MODE (TREE_TYPE (to))
2753 /* Required alignment of containing datum. */
2754 TYPE_ALIGN (TREE_TYPE (tem)) / BITS_PER_UNIT,
2755 int_size_in_bytes (TREE_TYPE (tem)));
2756 preserve_temp_slots (result);
2762 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
2763 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
2766 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, 0);
2768 /* In case we are returning the contents of an object which overlaps
2769 the place the value is being stored, use a safe function when copying
2770 a value through a pointer into a structure value return block. */
2771 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
2772 && current_function_returns_struct
2773 && !current_function_returns_pcc_struct)
2775 rtx from_rtx = expand_expr (from, NULL_RTX, VOIDmode, 0);
2776 rtx size = expr_size (from);
2778 #ifdef TARGET_MEM_FUNCTIONS
2779 emit_library_call (memcpy_libfunc, 0,
2780 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
2781 XEXP (from_rtx, 0), Pmode,
2784 emit_library_call (bcopy_libfunc, 0,
2785 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
2786 XEXP (to_rtx, 0), Pmode,
2790 preserve_temp_slots (to_rtx);
2795 /* Compute FROM and store the value in the rtx we got. */
2797 result = store_expr (from, to_rtx, want_value);
2798 preserve_temp_slots (result);
2803 /* Generate code for computing expression EXP,
2804 and storing the value into TARGET.
2805 Returns TARGET or an equivalent value.
2806 TARGET may contain a QUEUED rtx.
2808 If SUGGEST_REG is nonzero, copy the value through a register
2809 and return that register, if that is possible.
2811 If the value stored is a constant, we return the constant. */
2814 store_expr (exp, target, suggest_reg)
2816 register rtx target;
2820 int dont_return_target = 0;
2822 if (TREE_CODE (exp) == COMPOUND_EXPR)
2824 /* Perform first part of compound expression, then assign from second
2826 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
2828 return store_expr (TREE_OPERAND (exp, 1), target, suggest_reg);
2830 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
2832 /* For conditional expression, get safe form of the target. Then
2833 test the condition, doing the appropriate assignment on either
2834 side. This avoids the creation of unnecessary temporaries.
2835 For non-BLKmode, it is more efficient not to do this. */
2837 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
2840 target = protect_from_queue (target, 1);
2843 jumpifnot (TREE_OPERAND (exp, 0), lab1);
2844 store_expr (TREE_OPERAND (exp, 1), target, suggest_reg);
2846 emit_jump_insn (gen_jump (lab2));
2849 store_expr (TREE_OPERAND (exp, 2), target, suggest_reg);
2855 else if (suggest_reg && GET_CODE (target) == MEM
2856 && GET_MODE (target) != BLKmode)
2857 /* If target is in memory and caller wants value in a register instead,
2858 arrange that. Pass TARGET as target for expand_expr so that,
2859 if EXP is another assignment, SUGGEST_REG will be nonzero for it.
2860 We know expand_expr will not use the target in that case. */
2862 temp = expand_expr (exp, cse_not_expected ? NULL_RTX : target,
2863 GET_MODE (target), 0);
2864 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
2865 temp = copy_to_reg (temp);
2866 dont_return_target = 1;
2868 else if (queued_subexp_p (target))
2869 /* If target contains a postincrement, it is not safe
2870 to use as the returned value. It would access the wrong
2871 place by the time the queued increment gets output.
2872 So copy the value through a temporary and use that temp
2875 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
2877 /* Expand EXP into a new pseudo. */
2878 temp = gen_reg_rtx (GET_MODE (target));
2879 temp = expand_expr (exp, temp, GET_MODE (target), 0);
2882 temp = expand_expr (exp, NULL_RTX, GET_MODE (target), 0);
2883 dont_return_target = 1;
2885 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
2886 /* If this is an scalar in a register that is stored in a wider mode
2887 than the declared mode, compute the result into its declared mode
2888 and then convert to the wider mode. Our value is the computed
2891 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
2892 convert_move (SUBREG_REG (target), temp,
2893 SUBREG_PROMOTED_UNSIGNED_P (target));
2898 temp = expand_expr (exp, target, GET_MODE (target), 0);
2899 /* DO return TARGET if it's a specified hardware register.
2900 expand_return relies on this. */
2901 if (!(target && GET_CODE (target) == REG
2902 && REGNO (target) < FIRST_PSEUDO_REGISTER)
2903 && CONSTANT_P (temp))
2904 dont_return_target = 1;
2907 /* If value was not generated in the target, store it there.
2908 Convert the value to TARGET's type first if nec. */
2910 if (temp != target && TREE_CODE (exp) != ERROR_MARK)
2912 target = protect_from_queue (target, 1);
2913 if (GET_MODE (temp) != GET_MODE (target)
2914 && GET_MODE (temp) != VOIDmode)
2916 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
2917 if (dont_return_target)
2919 /* In this case, we will return TEMP,
2920 so make sure it has the proper mode.
2921 But don't forget to store the value into TARGET. */
2922 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
2923 emit_move_insn (target, temp);
2926 convert_move (target, temp, unsignedp);
2929 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
2931 /* Handle copying a string constant into an array.
2932 The string constant may be shorter than the array.
2933 So copy just the string's actual length, and clear the rest. */
2936 /* Get the size of the data type of the string,
2937 which is actually the size of the target. */
2938 size = expr_size (exp);
2939 if (GET_CODE (size) == CONST_INT
2940 && INTVAL (size) < TREE_STRING_LENGTH (exp))
2941 emit_block_move (target, temp, size,
2942 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
2945 /* Compute the size of the data to copy from the string. */
2947 = fold (build (MIN_EXPR, sizetype,
2948 size_binop (CEIL_DIV_EXPR,
2949 TYPE_SIZE (TREE_TYPE (exp)),
2950 size_int (BITS_PER_UNIT)),
2952 build_int_2 (TREE_STRING_LENGTH (exp), 0))));
2953 rtx copy_size_rtx = expand_expr (copy_size, NULL_RTX,
2957 /* Copy that much. */
2958 emit_block_move (target, temp, copy_size_rtx,
2959 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
2961 /* Figure out how much is left in TARGET
2962 that we have to clear. */
2963 if (GET_CODE (copy_size_rtx) == CONST_INT)
2965 temp = plus_constant (XEXP (target, 0),
2966 TREE_STRING_LENGTH (exp));
2967 size = plus_constant (size,
2968 - TREE_STRING_LENGTH (exp));
2972 enum machine_mode size_mode = Pmode;
2974 temp = force_reg (Pmode, XEXP (target, 0));
2975 temp = expand_binop (size_mode, add_optab, temp,
2976 copy_size_rtx, NULL_RTX, 0,
2979 size = expand_binop (size_mode, sub_optab, size,
2980 copy_size_rtx, NULL_RTX, 0,
2983 emit_cmp_insn (size, const0_rtx, LT, NULL_RTX,
2984 GET_MODE (size), 0, 0);
2985 label = gen_label_rtx ();
2986 emit_jump_insn (gen_blt (label));
2989 if (size != const0_rtx)
2991 #ifdef TARGET_MEM_FUNCTIONS
2992 emit_library_call (memset_libfunc, 0, VOIDmode, 3,
2993 temp, Pmode, const0_rtx, Pmode, size, Pmode);
2995 emit_library_call (bzero_libfunc, 0, VOIDmode, 2,
2996 temp, Pmode, size, Pmode);
3003 else if (GET_MODE (temp) == BLKmode)
3004 emit_block_move (target, temp, expr_size (exp),
3005 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3007 emit_move_insn (target, temp);
3009 if (dont_return_target)
3014 /* Store the value of constructor EXP into the rtx TARGET.
3015 TARGET is either a REG or a MEM. */
3018 store_constructor (exp, target)
3022 tree type = TREE_TYPE (exp);
3024 /* We know our target cannot conflict, since safe_from_p has been called. */
3026 /* Don't try copying piece by piece into a hard register
3027 since that is vulnerable to being clobbered by EXP.
3028 Instead, construct in a pseudo register and then copy it all. */
3029 if (GET_CODE (target) == REG && REGNO (target) < FIRST_PSEUDO_REGISTER)
3031 rtx temp = gen_reg_rtx (GET_MODE (target));
3032 store_constructor (exp, temp);
3033 emit_move_insn (target, temp);
3038 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE)
3042 /* Inform later passes that the whole union value is dead. */
3043 if (TREE_CODE (type) == UNION_TYPE)
3044 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
3046 /* If we are building a static constructor into a register,
3047 set the initial value as zero so we can fold the value into
3049 else if (GET_CODE (target) == REG && TREE_STATIC (exp))
3050 emit_move_insn (target, const0_rtx);
3052 /* If the constructor has fewer fields than the structure,
3053 clear the whole structure first. */
3054 else if (list_length (CONSTRUCTOR_ELTS (exp))
3055 != list_length (TYPE_FIELDS (type)))
3056 clear_storage (target, int_size_in_bytes (type));
3058 /* Inform later passes that the old value is dead. */
3059 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
3061 /* Store each element of the constructor into
3062 the corresponding field of TARGET. */
3064 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
3066 register tree field = TREE_PURPOSE (elt);
3067 register enum machine_mode mode;
3072 /* Just ignore missing fields.
3073 We cleared the whole structure, above,
3074 if any fields are missing. */
3078 bitsize = TREE_INT_CST_LOW (DECL_SIZE (field));
3079 unsignedp = TREE_UNSIGNED (field);
3080 mode = DECL_MODE (field);
3081 if (DECL_BIT_FIELD (field))
3084 if (TREE_CODE (DECL_FIELD_BITPOS (field)) != INTEGER_CST)
3085 /* ??? This case remains to be written. */
3088 bitpos = TREE_INT_CST_LOW (DECL_FIELD_BITPOS (field));
3090 store_field (target, bitsize, bitpos, mode, TREE_VALUE (elt),
3091 /* The alignment of TARGET is
3092 at least what its type requires. */
3094 TYPE_ALIGN (type) / BITS_PER_UNIT,
3095 int_size_in_bytes (type));
3098 else if (TREE_CODE (type) == ARRAY_TYPE)
3102 tree domain = TYPE_DOMAIN (type);
3103 HOST_WIDE_INT minelt = TREE_INT_CST_LOW (TYPE_MIN_VALUE (domain));
3104 HOST_WIDE_INT maxelt = TREE_INT_CST_LOW (TYPE_MAX_VALUE (domain));
3105 tree elttype = TREE_TYPE (type);
3107 /* If the constructor has fewer fields than the structure,
3108 clear the whole structure first. Similarly if this this is
3109 static constructor of a non-BLKmode object. */
3111 if (list_length (CONSTRUCTOR_ELTS (exp)) < maxelt - minelt + 1
3112 || (GET_CODE (target) == REG && TREE_STATIC (exp)))
3113 clear_storage (target, maxelt - minelt + 1);
3115 /* Inform later passes that the old value is dead. */
3116 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
3118 /* Store each element of the constructor into
3119 the corresponding element of TARGET, determined
3120 by counting the elements. */
3121 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
3123 elt = TREE_CHAIN (elt), i++)
3125 register enum machine_mode mode;
3130 mode = TYPE_MODE (elttype);
3131 bitsize = GET_MODE_BITSIZE (mode);
3132 unsignedp = TREE_UNSIGNED (elttype);
3134 bitpos = (i * TREE_INT_CST_LOW (TYPE_SIZE (elttype)));
3136 store_field (target, bitsize, bitpos, mode, TREE_VALUE (elt),
3137 /* The alignment of TARGET is
3138 at least what its type requires. */
3140 TYPE_ALIGN (type) / BITS_PER_UNIT,
3141 int_size_in_bytes (type));
3149 /* Store the value of EXP (an expression tree)
3150 into a subfield of TARGET which has mode MODE and occupies
3151 BITSIZE bits, starting BITPOS bits from the start of TARGET.
3152 If MODE is VOIDmode, it means that we are storing into a bit-field.
3154 If VALUE_MODE is VOIDmode, return nothing in particular.
3155 UNSIGNEDP is not used in this case.
3157 Otherwise, return an rtx for the value stored. This rtx
3158 has mode VALUE_MODE if that is convenient to do.
3159 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
3161 ALIGN is the alignment that TARGET is known to have, measured in bytes.
3162 TOTAL_SIZE is the size in bytes of the structure, or -1 if varying. */
3165 store_field (target, bitsize, bitpos, mode, exp, value_mode,
3166 unsignedp, align, total_size)
3168 int bitsize, bitpos;
3169 enum machine_mode mode;
3171 enum machine_mode value_mode;
3176 HOST_WIDE_INT width_mask = 0;
3178 if (bitsize < HOST_BITS_PER_WIDE_INT)
3179 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
3181 /* If we are storing into an unaligned field of an aligned union that is
3182 in a register, we may have the mode of TARGET being an integer mode but
3183 MODE == BLKmode. In that case, get an aligned object whose size and
3184 alignment are the same as TARGET and store TARGET into it (we can avoid
3185 the store if the field being stored is the entire width of TARGET). Then
3186 call ourselves recursively to store the field into a BLKmode version of
3187 that object. Finally, load from the object into TARGET. This is not
3188 very efficient in general, but should only be slightly more expensive
3189 than the otherwise-required unaligned accesses. Perhaps this can be
3190 cleaned up later. */
3193 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
3195 rtx object = assign_stack_temp (GET_MODE (target),
3196 GET_MODE_SIZE (GET_MODE (target)), 0);
3197 rtx blk_object = copy_rtx (object);
3199 PUT_MODE (blk_object, BLKmode);
3201 if (bitsize != GET_MODE_BITSIZE (GET_MODE (target)))
3202 emit_move_insn (object, target);
3204 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0,
3207 emit_move_insn (target, object);
3212 /* If the structure is in a register or if the component
3213 is a bit field, we cannot use addressing to access it.
3214 Use bit-field techniques or SUBREG to store in it. */
3216 if (mode == VOIDmode
3217 || (mode != BLKmode && ! direct_store[(int) mode])
3218 || GET_CODE (target) == REG
3219 || GET_CODE (target) == SUBREG)
3221 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
3222 /* Store the value in the bitfield. */
3223 store_bit_field (target, bitsize, bitpos, mode, temp, align, total_size);
3224 if (value_mode != VOIDmode)
3226 /* The caller wants an rtx for the value. */
3227 /* If possible, avoid refetching from the bitfield itself. */
3229 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
3232 enum machine_mode tmode;
3235 return expand_and (temp, GEN_INT (width_mask), NULL_RTX);
3236 tmode = GET_MODE (temp);
3237 if (tmode == VOIDmode)
3239 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
3240 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
3241 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
3243 return extract_bit_field (target, bitsize, bitpos, unsignedp,
3244 NULL_RTX, value_mode, 0, align,
3251 rtx addr = XEXP (target, 0);
3254 /* If a value is wanted, it must be the lhs;
3255 so make the address stable for multiple use. */
3257 if (value_mode != VOIDmode && GET_CODE (addr) != REG
3258 && ! CONSTANT_ADDRESS_P (addr)
3259 /* A frame-pointer reference is already stable. */
3260 && ! (GET_CODE (addr) == PLUS
3261 && GET_CODE (XEXP (addr, 1)) == CONST_INT
3262 && (XEXP (addr, 0) == virtual_incoming_args_rtx
3263 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
3264 addr = copy_to_reg (addr);
3266 /* Now build a reference to just the desired component. */
3268 to_rtx = change_address (target, mode,
3269 plus_constant (addr, (bitpos / BITS_PER_UNIT)));
3270 MEM_IN_STRUCT_P (to_rtx) = 1;
3272 return store_expr (exp, to_rtx, value_mode != VOIDmode);
3276 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
3277 or an ARRAY_REF, look for nested COMPONENT_REFs, BIT_FIELD_REFs, or
3278 ARRAY_REFs at constant positions and find the ultimate containing object,
3281 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
3282 bit position, and *PUNSIGNEDP to the signedness of the field.
3283 If the position of the field is variable, we store a tree
3284 giving the variable offset (in units) in *POFFSET.
3285 This offset is in addition to the bit position.
3286 If the position is not variable, we store 0 in *POFFSET.
3288 If any of the extraction expressions is volatile,
3289 we store 1 in *PVOLATILEP. Otherwise we don't change that.
3291 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
3292 is a mode that can be used to access the field. In that case, *PBITSIZE
3295 If the field describes a variable-sized object, *PMODE is set to
3296 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
3297 this case, but the address of the object can be found. */
3300 get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode, punsignedp, pvolatilep)
3305 enum machine_mode *pmode;
3310 enum machine_mode mode = VOIDmode;
3313 if (TREE_CODE (exp) == COMPONENT_REF)
3315 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
3316 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
3317 mode = DECL_MODE (TREE_OPERAND (exp, 1));
3318 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
3320 else if (TREE_CODE (exp) == BIT_FIELD_REF)
3322 size_tree = TREE_OPERAND (exp, 1);
3323 *punsignedp = TREE_UNSIGNED (exp);
3327 mode = TYPE_MODE (TREE_TYPE (exp));
3328 *pbitsize = GET_MODE_BITSIZE (mode);
3329 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
3334 if (TREE_CODE (size_tree) != INTEGER_CST)
3335 mode = BLKmode, *pbitsize = -1;
3337 *pbitsize = TREE_INT_CST_LOW (size_tree);
3340 /* Compute cumulative bit-offset for nested component-refs and array-refs,
3341 and find the ultimate containing object. */
3347 if (TREE_CODE (exp) == COMPONENT_REF || TREE_CODE (exp) == BIT_FIELD_REF)
3349 tree pos = (TREE_CODE (exp) == COMPONENT_REF
3350 ? DECL_FIELD_BITPOS (TREE_OPERAND (exp, 1))
3351 : TREE_OPERAND (exp, 2));
3353 if (TREE_CODE (pos) == PLUS_EXPR)
3356 if (TREE_CODE (TREE_OPERAND (pos, 0)) == INTEGER_CST)
3358 constant = TREE_OPERAND (pos, 0);
3359 var = TREE_OPERAND (pos, 1);
3361 else if (TREE_CODE (TREE_OPERAND (pos, 1)) == INTEGER_CST)
3363 constant = TREE_OPERAND (pos, 1);
3364 var = TREE_OPERAND (pos, 0);
3368 *pbitpos += TREE_INT_CST_LOW (constant);
3370 offset = size_binop (PLUS_EXPR, offset,
3371 size_binop (FLOOR_DIV_EXPR, var,
3372 size_int (BITS_PER_UNIT)));
3374 offset = size_binop (FLOOR_DIV_EXPR, var,
3375 size_int (BITS_PER_UNIT));
3377 else if (TREE_CODE (pos) == INTEGER_CST)
3378 *pbitpos += TREE_INT_CST_LOW (pos);
3381 /* Assume here that the offset is a multiple of a unit.
3382 If not, there should be an explicitly added constant. */
3384 offset = size_binop (PLUS_EXPR, offset,
3385 size_binop (FLOOR_DIV_EXPR, pos,
3386 size_int (BITS_PER_UNIT)));
3388 offset = size_binop (FLOOR_DIV_EXPR, pos,
3389 size_int (BITS_PER_UNIT));
3393 else if (TREE_CODE (exp) == ARRAY_REF
3394 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
3395 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST)
3397 *pbitpos += (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))
3398 * TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (exp))));
3400 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
3401 && ! ((TREE_CODE (exp) == NOP_EXPR
3402 || TREE_CODE (exp) == CONVERT_EXPR)
3403 && (TYPE_MODE (TREE_TYPE (exp))
3404 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
3407 /* If any reference in the chain is volatile, the effect is volatile. */
3408 if (TREE_THIS_VOLATILE (exp))
3410 exp = TREE_OPERAND (exp, 0);
3413 /* If this was a bit-field, see if there is a mode that allows direct
3414 access in case EXP is in memory. */
3415 if (mode == VOIDmode && *pbitpos % *pbitsize == 0)
3417 mode = mode_for_size (*pbitsize, MODE_INT, 0);
3418 if (mode == BLKmode)
3425 /* We aren't finished fixing the callers to really handle nonzero offset. */
3433 /* Given an rtx VALUE that may contain additions and multiplications,
3434 return an equivalent value that just refers to a register or memory.
3435 This is done by generating instructions to perform the arithmetic
3436 and returning a pseudo-register containing the value.
3438 The returned value may be a REG, SUBREG, MEM or constant. */
3441 force_operand (value, target)
3444 register optab binoptab = 0;
3445 /* Use a temporary to force order of execution of calls to
3449 /* Use subtarget as the target for operand 0 of a binary operation. */
3450 register rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
3452 if (GET_CODE (value) == PLUS)
3453 binoptab = add_optab;
3454 else if (GET_CODE (value) == MINUS)
3455 binoptab = sub_optab;
3456 else if (GET_CODE (value) == MULT)
3458 op2 = XEXP (value, 1);
3459 if (!CONSTANT_P (op2)
3460 && !(GET_CODE (op2) == REG && op2 != subtarget))
3462 tmp = force_operand (XEXP (value, 0), subtarget);
3463 return expand_mult (GET_MODE (value), tmp,
3464 force_operand (op2, NULL_RTX),
3470 op2 = XEXP (value, 1);
3471 if (!CONSTANT_P (op2)
3472 && !(GET_CODE (op2) == REG && op2 != subtarget))
3474 if (binoptab == sub_optab && GET_CODE (op2) == CONST_INT)
3476 binoptab = add_optab;
3477 op2 = negate_rtx (GET_MODE (value), op2);
3480 /* Check for an addition with OP2 a constant integer and our first
3481 operand a PLUS of a virtual register and something else. In that
3482 case, we want to emit the sum of the virtual register and the
3483 constant first and then add the other value. This allows virtual
3484 register instantiation to simply modify the constant rather than
3485 creating another one around this addition. */
3486 if (binoptab == add_optab && GET_CODE (op2) == CONST_INT
3487 && GET_CODE (XEXP (value, 0)) == PLUS
3488 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
3489 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
3490 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
3492 rtx temp = expand_binop (GET_MODE (value), binoptab,
3493 XEXP (XEXP (value, 0), 0), op2,
3494 subtarget, 0, OPTAB_LIB_WIDEN);
3495 return expand_binop (GET_MODE (value), binoptab, temp,
3496 force_operand (XEXP (XEXP (value, 0), 1), 0),
3497 target, 0, OPTAB_LIB_WIDEN);
3500 tmp = force_operand (XEXP (value, 0), subtarget);
3501 return expand_binop (GET_MODE (value), binoptab, tmp,
3502 force_operand (op2, NULL_RTX),
3503 target, 0, OPTAB_LIB_WIDEN);
3504 /* We give UNSIGNEDP = 0 to expand_binop
3505 because the only operations we are expanding here are signed ones. */
3510 /* Subroutine of expand_expr:
3511 save the non-copied parts (LIST) of an expr (LHS), and return a list
3512 which can restore these values to their previous values,
3513 should something modify their storage. */
3516 save_noncopied_parts (lhs, list)
3523 for (tail = list; tail; tail = TREE_CHAIN (tail))
3524 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
3525 parts = chainon (parts, save_noncopied_parts (lhs, TREE_VALUE (tail)));
3528 tree part = TREE_VALUE (tail);
3529 tree part_type = TREE_TYPE (part);
3530 tree to_be_saved = build (COMPONENT_REF, part_type, lhs, part);
3531 rtx target = assign_stack_temp (TYPE_MODE (part_type),
3532 int_size_in_bytes (part_type), 0);
3533 if (! memory_address_p (TYPE_MODE (part_type), XEXP (target, 0)))
3534 target = change_address (target, TYPE_MODE (part_type), NULL_RTX);
3535 parts = tree_cons (to_be_saved,
3536 build (RTL_EXPR, part_type, NULL_TREE,
3539 store_expr (TREE_PURPOSE (parts), RTL_EXPR_RTL (TREE_VALUE (parts)), 0);
3544 /* Subroutine of expand_expr:
3545 record the non-copied parts (LIST) of an expr (LHS), and return a list
3546 which specifies the initial values of these parts. */
3549 init_noncopied_parts (lhs, list)
3556 for (tail = list; tail; tail = TREE_CHAIN (tail))
3557 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
3558 parts = chainon (parts, init_noncopied_parts (lhs, TREE_VALUE (tail)));
3561 tree part = TREE_VALUE (tail);
3562 tree part_type = TREE_TYPE (part);
3563 tree to_be_initialized = build (COMPONENT_REF, part_type, lhs, part);
3564 parts = tree_cons (TREE_PURPOSE (tail), to_be_initialized, parts);
3569 /* Subroutine of expand_expr: return nonzero iff there is no way that
3570 EXP can reference X, which is being modified. */
3573 safe_from_p (x, exp)
3583 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
3584 find the underlying pseudo. */
3585 if (GET_CODE (x) == SUBREG)
3588 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
3592 /* If X is a location in the outgoing argument area, it is always safe. */
3593 if (GET_CODE (x) == MEM
3594 && (XEXP (x, 0) == virtual_outgoing_args_rtx
3595 || (GET_CODE (XEXP (x, 0)) == PLUS
3596 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx)))
3599 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
3602 exp_rtl = DECL_RTL (exp);
3609 if (TREE_CODE (exp) == TREE_LIST)
3610 return ((TREE_VALUE (exp) == 0
3611 || safe_from_p (x, TREE_VALUE (exp)))
3612 && (TREE_CHAIN (exp) == 0
3613 || safe_from_p (x, TREE_CHAIN (exp))));
3618 return safe_from_p (x, TREE_OPERAND (exp, 0));
3622 return (safe_from_p (x, TREE_OPERAND (exp, 0))
3623 && safe_from_p (x, TREE_OPERAND (exp, 1)));
3627 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
3628 the expression. If it is set, we conflict iff we are that rtx or
3629 both are in memory. Otherwise, we check all operands of the
3630 expression recursively. */
3632 switch (TREE_CODE (exp))
3635 return staticp (TREE_OPERAND (exp, 0));
3638 if (GET_CODE (x) == MEM)
3643 exp_rtl = CALL_EXPR_RTL (exp);
3646 /* Assume that the call will clobber all hard registers and
3648 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
3649 || GET_CODE (x) == MEM)
3656 exp_rtl = RTL_EXPR_RTL (exp);
3658 /* We don't know what this can modify. */
3663 case WITH_CLEANUP_EXPR:
3664 exp_rtl = RTL_EXPR_RTL (exp);
3668 exp_rtl = SAVE_EXPR_RTL (exp);
3672 /* The only operand we look at is operand 1. The rest aren't
3673 part of the expression. */
3674 return safe_from_p (x, TREE_OPERAND (exp, 1));
3676 case METHOD_CALL_EXPR:
3677 /* This takes a rtx argument, but shouldn't appear here. */
3681 /* If we have an rtx, we do not need to scan our operands. */
3685 nops = tree_code_length[(int) TREE_CODE (exp)];
3686 for (i = 0; i < nops; i++)
3687 if (TREE_OPERAND (exp, i) != 0
3688 && ! safe_from_p (x, TREE_OPERAND (exp, i)))
3692 /* If we have an rtl, find any enclosed object. Then see if we conflict
3696 if (GET_CODE (exp_rtl) == SUBREG)
3698 exp_rtl = SUBREG_REG (exp_rtl);
3699 if (GET_CODE (exp_rtl) == REG
3700 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
3704 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
3705 are memory and EXP is not readonly. */
3706 return ! (rtx_equal_p (x, exp_rtl)
3707 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
3708 && ! TREE_READONLY (exp)));
3711 /* If we reach here, it is safe. */
3715 /* Subroutine of expand_expr: return nonzero iff EXP is an
3716 expression whose type is statically determinable. */
3722 if (TREE_CODE (exp) == PARM_DECL
3723 || TREE_CODE (exp) == VAR_DECL
3724 || TREE_CODE (exp) == CALL_EXPR || TREE_CODE (exp) == TARGET_EXPR
3725 || TREE_CODE (exp) == COMPONENT_REF
3726 || TREE_CODE (exp) == ARRAY_REF)
3731 /* expand_expr: generate code for computing expression EXP.
3732 An rtx for the computed value is returned. The value is never null.
3733 In the case of a void EXP, const0_rtx is returned.
3735 The value may be stored in TARGET if TARGET is nonzero.
3736 TARGET is just a suggestion; callers must assume that
3737 the rtx returned may not be the same as TARGET.
3739 If TARGET is CONST0_RTX, it means that the value will be ignored.
3741 If TMODE is not VOIDmode, it suggests generating the
3742 result in mode TMODE. But this is done only when convenient.
3743 Otherwise, TMODE is ignored and the value generated in its natural mode.
3744 TMODE is just a suggestion; callers must assume that
3745 the rtx returned may not have mode TMODE.
3747 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
3748 with a constant address even if that address is not normally legitimate.
3749 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
3751 If MODIFIER is EXPAND_SUM then when EXP is an addition
3752 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
3753 or a nest of (PLUS ...) and (MINUS ...) where the terms are
3754 products as above, or REG or MEM, or constant.
3755 Ordinarily in such cases we would output mul or add instructions
3756 and then return a pseudo reg containing the sum.
3758 EXPAND_INITIALIZER is much like EXPAND_SUM except that
3759 it also marks a label as absolutely required (it can't be dead).
3760 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
3761 This is used for outputting expressions used in initializers. */
3764 expand_expr (exp, target, tmode, modifier)
3767 enum machine_mode tmode;
3768 enum expand_modifier modifier;
3770 register rtx op0, op1, temp;
3771 tree type = TREE_TYPE (exp);
3772 int unsignedp = TREE_UNSIGNED (type);
3773 register enum machine_mode mode = TYPE_MODE (type);
3774 register enum tree_code code = TREE_CODE (exp);
3776 /* Use subtarget as the target for operand 0 of a binary operation. */
3777 rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
3778 rtx original_target = target;
3779 int ignore = target == const0_rtx;
3782 /* Don't use hard regs as subtargets, because the combiner
3783 can only handle pseudo regs. */
3784 if (subtarget && REGNO (subtarget) < FIRST_PSEUDO_REGISTER)
3786 /* Avoid subtargets inside loops,
3787 since they hide some invariant expressions. */
3788 if (preserve_subexpressions_p ())
3791 if (ignore) target = 0, original_target = 0;
3793 /* If will do cse, generate all results into pseudo registers
3794 since 1) that allows cse to find more things
3795 and 2) otherwise cse could produce an insn the machine
3798 if (! cse_not_expected && mode != BLKmode && target
3799 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER))
3802 /* Ensure we reference a volatile object even if value is ignored. */
3803 if (ignore && TREE_THIS_VOLATILE (exp)
3804 && mode != VOIDmode && mode != BLKmode)
3806 target = gen_reg_rtx (mode);
3807 temp = expand_expr (exp, target, VOIDmode, modifier);
3809 emit_move_insn (target, temp);
3817 tree function = decl_function_context (exp);
3818 /* Handle using a label in a containing function. */
3819 if (function != current_function_decl && function != 0)
3821 struct function *p = find_function_data (function);
3822 /* Allocate in the memory associated with the function
3823 that the label is in. */
3824 push_obstacks (p->function_obstack,
3825 p->function_maybepermanent_obstack);
3827 p->forced_labels = gen_rtx (EXPR_LIST, VOIDmode,
3828 label_rtx (exp), p->forced_labels);
3831 else if (modifier == EXPAND_INITIALIZER)
3832 forced_labels = gen_rtx (EXPR_LIST, VOIDmode,
3833 label_rtx (exp), forced_labels);
3834 temp = gen_rtx (MEM, FUNCTION_MODE,
3835 gen_rtx (LABEL_REF, Pmode, label_rtx (exp)));
3836 if (function != current_function_decl && function != 0)
3837 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
3842 if (DECL_RTL (exp) == 0)
3844 error_with_decl (exp, "prior parameter's size depends on `%s'");
3845 return CONST0_RTX (mode);
3851 if (DECL_RTL (exp) == 0)
3853 /* Ensure variable marked as used
3854 even if it doesn't go through a parser. */
3855 TREE_USED (exp) = 1;
3856 /* Handle variables inherited from containing functions. */
3857 context = decl_function_context (exp);
3859 /* We treat inline_function_decl as an alias for the current function
3860 because that is the inline function whose vars, types, etc.
3861 are being merged into the current function.
3862 See expand_inline_function. */
3863 if (context != 0 && context != current_function_decl
3864 && context != inline_function_decl
3865 /* If var is static, we don't need a static chain to access it. */
3866 && ! (GET_CODE (DECL_RTL (exp)) == MEM
3867 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
3871 /* Mark as non-local and addressable. */
3872 DECL_NONLOCAL (exp) = 1;
3873 mark_addressable (exp);
3874 if (GET_CODE (DECL_RTL (exp)) != MEM)
3876 addr = XEXP (DECL_RTL (exp), 0);
3877 if (GET_CODE (addr) == MEM)
3878 addr = gen_rtx (MEM, Pmode, fix_lexical_addr (XEXP (addr, 0), exp));
3880 addr = fix_lexical_addr (addr, exp);
3881 return change_address (DECL_RTL (exp), mode, addr);
3884 /* This is the case of an array whose size is to be determined
3885 from its initializer, while the initializer is still being parsed.
3887 if (GET_CODE (DECL_RTL (exp)) == MEM
3888 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
3889 return change_address (DECL_RTL (exp), GET_MODE (DECL_RTL (exp)),
3890 XEXP (DECL_RTL (exp), 0));
3891 if (GET_CODE (DECL_RTL (exp)) == MEM
3892 && modifier != EXPAND_CONST_ADDRESS
3893 && modifier != EXPAND_SUM
3894 && modifier != EXPAND_INITIALIZER)
3896 /* DECL_RTL probably contains a constant address.
3897 On RISC machines where a constant address isn't valid,
3898 make some insns to get that address into a register. */
3899 if (!memory_address_p (DECL_MODE (exp), XEXP (DECL_RTL (exp), 0))
3901 && CONSTANT_ADDRESS_P (XEXP (DECL_RTL (exp), 0))))
3902 return change_address (DECL_RTL (exp), VOIDmode,
3903 copy_rtx (XEXP (DECL_RTL (exp), 0)));
3906 /* If the mode of DECL_RTL does not match that of the decl, it
3907 must be a promoted value. We return a SUBREG of the wanted mode,
3908 but mark it so that we know that it was already extended. */
3910 if (GET_CODE (DECL_RTL (exp)) == REG
3911 && GET_MODE (DECL_RTL (exp)) != mode)
3913 enum machine_mode decl_mode = DECL_MODE (exp);
3915 /* Get the signedness used for this variable. Ensure we get the
3916 same mode we got when the variable was declared. */
3918 PROMOTE_MODE (decl_mode, unsignedp, type);
3920 if (decl_mode != GET_MODE (DECL_RTL (exp)))
3923 temp = gen_rtx (SUBREG, mode, DECL_RTL (exp), 0);
3924 SUBREG_PROMOTED_VAR_P (temp) = 1;
3925 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
3929 return DECL_RTL (exp);
3932 return immed_double_const (TREE_INT_CST_LOW (exp),
3933 TREE_INT_CST_HIGH (exp),
3937 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, 0);
3940 /* If optimized, generate immediate CONST_DOUBLE
3941 which will be turned into memory by reload if necessary.
3943 We used to force a register so that loop.c could see it. But
3944 this does not allow gen_* patterns to perform optimizations with
3945 the constants. It also produces two insns in cases like "x = 1.0;".
3946 On most machines, floating-point constants are not permitted in
3947 many insns, so we'd end up copying it to a register in any case.
3949 Now, we do the copying in expand_binop, if appropriate. */
3950 return immed_real_const (exp);
3954 if (! TREE_CST_RTL (exp))
3955 output_constant_def (exp);
3957 /* TREE_CST_RTL probably contains a constant address.
3958 On RISC machines where a constant address isn't valid,
3959 make some insns to get that address into a register. */
3960 if (GET_CODE (TREE_CST_RTL (exp)) == MEM
3961 && modifier != EXPAND_CONST_ADDRESS
3962 && modifier != EXPAND_INITIALIZER
3963 && modifier != EXPAND_SUM
3964 && !memory_address_p (mode, XEXP (TREE_CST_RTL (exp), 0)))
3965 return change_address (TREE_CST_RTL (exp), VOIDmode,
3966 copy_rtx (XEXP (TREE_CST_RTL (exp), 0)));
3967 return TREE_CST_RTL (exp);
3970 context = decl_function_context (exp);
3971 /* We treat inline_function_decl as an alias for the current function
3972 because that is the inline function whose vars, types, etc.
3973 are being merged into the current function.
3974 See expand_inline_function. */
3975 if (context == current_function_decl || context == inline_function_decl)
3978 /* If this is non-local, handle it. */
3981 temp = SAVE_EXPR_RTL (exp);
3982 if (temp && GET_CODE (temp) == REG)
3984 put_var_into_stack (exp);
3985 temp = SAVE_EXPR_RTL (exp);
3987 if (temp == 0 || GET_CODE (temp) != MEM)
3989 return change_address (temp, mode,
3990 fix_lexical_addr (XEXP (temp, 0), exp));
3992 if (SAVE_EXPR_RTL (exp) == 0)
3994 if (mode == BLKmode)
3996 = assign_stack_temp (mode,
3997 int_size_in_bytes (TREE_TYPE (exp)), 0);
4000 enum machine_mode var_mode = mode;
4002 if (TREE_CODE (type) == INTEGER_TYPE
4003 || TREE_CODE (type) == ENUMERAL_TYPE
4004 || TREE_CODE (type) == BOOLEAN_TYPE
4005 || TREE_CODE (type) == CHAR_TYPE
4006 || TREE_CODE (type) == REAL_TYPE
4007 || TREE_CODE (type) == POINTER_TYPE
4008 || TREE_CODE (type) == OFFSET_TYPE)
4010 PROMOTE_MODE (var_mode, unsignedp, type);
4013 temp = gen_reg_rtx (var_mode);
4016 SAVE_EXPR_RTL (exp) = temp;
4017 store_expr (TREE_OPERAND (exp, 0), temp, 0);
4018 if (!optimize && GET_CODE (temp) == REG)
4019 save_expr_regs = gen_rtx (EXPR_LIST, VOIDmode, temp,
4023 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
4024 must be a promoted value. We return a SUBREG of the wanted mode,
4025 but mark it so that we know that it was already extended. Note
4026 that `unsignedp' was modified above in this case. */
4028 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
4029 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
4031 temp = gen_rtx (SUBREG, mode, SAVE_EXPR_RTL (exp), 0);
4032 SUBREG_PROMOTED_VAR_P (temp) = 1;
4033 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
4037 return SAVE_EXPR_RTL (exp);
4040 /* Exit the current loop if the body-expression is true. */
4042 rtx label = gen_label_rtx ();
4043 do_jump (TREE_OPERAND (exp, 0), label, NULL_RTX);
4044 expand_exit_loop (NULL_PTR);
4050 expand_start_loop (1);
4051 expand_expr_stmt (TREE_OPERAND (exp, 0));
4058 tree vars = TREE_OPERAND (exp, 0);
4059 int vars_need_expansion = 0;
4061 /* Need to open a binding contour here because
4062 if there are any cleanups they most be contained here. */
4063 expand_start_bindings (0);
4065 /* Mark the corresponding BLOCK for output in its proper place. */
4066 if (TREE_OPERAND (exp, 2) != 0
4067 && ! TREE_USED (TREE_OPERAND (exp, 2)))
4068 insert_block (TREE_OPERAND (exp, 2));
4070 /* If VARS have not yet been expanded, expand them now. */
4073 if (DECL_RTL (vars) == 0)
4075 vars_need_expansion = 1;
4078 expand_decl_init (vars);
4079 vars = TREE_CHAIN (vars);
4082 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, modifier);
4084 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
4090 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
4092 emit_insns (RTL_EXPR_SEQUENCE (exp));
4093 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
4094 return RTL_EXPR_RTL (exp);
4097 /* All elts simple constants => refer to a constant in memory. But
4098 if this is a non-BLKmode mode, let it store a field at a time
4099 since that should make a CONST_INT or CONST_DOUBLE when we
4101 if (TREE_STATIC (exp) && (mode == BLKmode || TREE_ADDRESSABLE (exp)))
4103 rtx constructor = output_constant_def (exp);
4104 if (modifier != EXPAND_CONST_ADDRESS
4105 && modifier != EXPAND_INITIALIZER
4106 && modifier != EXPAND_SUM
4107 && !memory_address_p (GET_MODE (constructor),
4108 XEXP (constructor, 0)))
4109 constructor = change_address (constructor, VOIDmode,
4110 XEXP (constructor, 0));
4117 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4118 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode, 0);
4123 if (target == 0 || ! safe_from_p (target, exp))
4125 if (mode != BLKmode && ! TREE_ADDRESSABLE (exp))
4126 target = gen_reg_rtx (mode);
4129 enum tree_code c = TREE_CODE (type);
4131 = assign_stack_temp (mode, int_size_in_bytes (type), 0);
4132 if (c == RECORD_TYPE || c == UNION_TYPE || c == ARRAY_TYPE)
4133 MEM_IN_STRUCT_P (target) = 1;
4136 store_constructor (exp, target);
4142 tree exp1 = TREE_OPERAND (exp, 0);
4145 /* A SAVE_EXPR as the address in an INDIRECT_EXPR is generated
4146 for *PTR += ANYTHING where PTR is put inside the SAVE_EXPR.
4147 This code has the same general effect as simply doing
4148 expand_expr on the save expr, except that the expression PTR
4149 is computed for use as a memory address. This means different
4150 code, suitable for indexing, may be generated. */
4151 if (TREE_CODE (exp1) == SAVE_EXPR
4152 && SAVE_EXPR_RTL (exp1) == 0
4153 && TREE_CODE (exp2 = TREE_OPERAND (exp1, 0)) != ERROR_MARK
4154 && TYPE_MODE (TREE_TYPE (exp1)) == Pmode
4155 && TYPE_MODE (TREE_TYPE (exp2)) == Pmode)
4157 temp = expand_expr (TREE_OPERAND (exp1, 0), NULL_RTX,
4158 VOIDmode, EXPAND_SUM);
4159 op0 = memory_address (mode, temp);
4160 op0 = copy_all_regs (op0);
4161 SAVE_EXPR_RTL (exp1) = op0;
4165 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
4166 op0 = memory_address (mode, op0);
4169 temp = gen_rtx (MEM, mode, op0);
4170 /* If address was computed by addition,
4171 mark this as an element of an aggregate. */
4172 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
4173 || (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR
4174 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) == PLUS_EXPR)
4175 || TREE_CODE (TREE_TYPE (exp)) == ARRAY_TYPE
4176 || TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE
4177 || TREE_CODE (TREE_TYPE (exp)) == UNION_TYPE
4178 || (TREE_CODE (exp1) == ADDR_EXPR
4179 && (exp2 = TREE_OPERAND (exp1, 0))
4180 && (TREE_CODE (TREE_TYPE (exp2)) == ARRAY_TYPE
4181 || TREE_CODE (TREE_TYPE (exp2)) == RECORD_TYPE
4182 || TREE_CODE (TREE_TYPE (exp2)) == UNION_TYPE)))
4183 MEM_IN_STRUCT_P (temp) = 1;
4184 MEM_VOLATILE_P (temp) = TREE_THIS_VOLATILE (exp);
4185 #if 0 /* It is incorrect to set RTX_UNCHANGING_P here, because the fact that
4186 a location is accessed through a pointer to const does not mean
4187 that the value there can never change. */
4188 RTX_UNCHANGING_P (temp) = TREE_READONLY (exp);
4194 if (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST
4195 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
4197 /* Nonconstant array index or nonconstant element size.
4198 Generate the tree for *(&array+index) and expand that,
4199 except do it in a language-independent way
4200 and don't complain about non-lvalue arrays.
4201 `mark_addressable' should already have been called
4202 for any array for which this case will be reached. */
4204 /* Don't forget the const or volatile flag from the array element. */
4205 tree variant_type = build_type_variant (type,
4206 TREE_READONLY (exp),
4207 TREE_THIS_VOLATILE (exp));
4208 tree array_adr = build1 (ADDR_EXPR, build_pointer_type (variant_type),
4209 TREE_OPERAND (exp, 0));
4210 tree index = TREE_OPERAND (exp, 1);
4213 /* Convert the integer argument to a type the same size as a pointer
4214 so the multiply won't overflow spuriously. */
4215 if (TYPE_PRECISION (TREE_TYPE (index)) != POINTER_SIZE)
4216 index = convert (type_for_size (POINTER_SIZE, 0), index);
4218 /* Don't think the address has side effects
4219 just because the array does.
4220 (In some cases the address might have side effects,
4221 and we fail to record that fact here. However, it should not
4222 matter, since expand_expr should not care.) */
4223 TREE_SIDE_EFFECTS (array_adr) = 0;
4225 elt = build1 (INDIRECT_REF, type,
4226 fold (build (PLUS_EXPR, TYPE_POINTER_TO (variant_type),
4228 fold (build (MULT_EXPR,
4229 TYPE_POINTER_TO (variant_type),
4230 index, size_in_bytes (type))))));
4232 /* Volatility, etc., of new expression is same as old expression. */
4233 TREE_SIDE_EFFECTS (elt) = TREE_SIDE_EFFECTS (exp);
4234 TREE_THIS_VOLATILE (elt) = TREE_THIS_VOLATILE (exp);
4235 TREE_READONLY (elt) = TREE_READONLY (exp);
4237 return expand_expr (elt, target, tmode, modifier);
4240 /* Fold an expression like: "foo"[2].
4241 This is not done in fold so it won't happen inside &. */
4244 tree arg0 = TREE_OPERAND (exp, 0);
4245 tree arg1 = TREE_OPERAND (exp, 1);
4247 if (TREE_CODE (arg0) == STRING_CST
4248 && TREE_CODE (arg1) == INTEGER_CST
4249 && !TREE_INT_CST_HIGH (arg1)
4250 && (i = TREE_INT_CST_LOW (arg1)) < TREE_STRING_LENGTH (arg0))
4252 if (TREE_TYPE (TREE_TYPE (arg0)) == integer_type_node)
4254 exp = build_int_2 (((int *)TREE_STRING_POINTER (arg0))[i], 0);
4255 TREE_TYPE (exp) = integer_type_node;
4256 return expand_expr (exp, target, tmode, modifier);
4258 if (TREE_TYPE (TREE_TYPE (arg0)) == char_type_node)
4260 exp = build_int_2 (TREE_STRING_POINTER (arg0)[i], 0);
4261 TREE_TYPE (exp) = integer_type_node;
4262 return expand_expr (convert (TREE_TYPE (TREE_TYPE (arg0)), exp), target, tmode, modifier);
4267 /* If this is a constant index into a constant array,
4268 just get the value from the array. Handle both the cases when
4269 we have an explicit constructor and when our operand is a variable
4270 that was declared const. */
4272 if (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
4273 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
4275 tree index = fold (TREE_OPERAND (exp, 1));
4276 if (TREE_CODE (index) == INTEGER_CST
4277 && TREE_INT_CST_HIGH (index) == 0)
4279 int i = TREE_INT_CST_LOW (index);
4280 tree elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0));
4283 elem = TREE_CHAIN (elem);
4285 return expand_expr (fold (TREE_VALUE (elem)), target,
4290 else if (TREE_READONLY (TREE_OPERAND (exp, 0))
4291 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
4292 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == ARRAY_TYPE
4293 && TREE_CODE (TREE_OPERAND (exp, 0)) == VAR_DECL
4294 && DECL_INITIAL (TREE_OPERAND (exp, 0))
4296 && (TREE_CODE (DECL_INITIAL (TREE_OPERAND (exp, 0)))
4299 tree index = fold (TREE_OPERAND (exp, 1));
4300 if (TREE_CODE (index) == INTEGER_CST
4301 && TREE_INT_CST_HIGH (index) == 0)
4303 int i = TREE_INT_CST_LOW (index);
4304 tree init = DECL_INITIAL (TREE_OPERAND (exp, 0));
4306 if (TREE_CODE (init) == CONSTRUCTOR)
4308 tree elem = CONSTRUCTOR_ELTS (init);
4311 elem = TREE_CHAIN (elem);
4313 return expand_expr (fold (TREE_VALUE (elem)), target,
4316 else if (TREE_CODE (init) == STRING_CST
4317 && i < TREE_STRING_LENGTH (init))
4319 temp = GEN_INT (TREE_STRING_POINTER (init)[i]);
4320 return convert_to_mode (mode, temp, 0);
4324 /* Treat array-ref with constant index as a component-ref. */
4328 /* If the operand is a CONSTRUCTOR, we can just extract the
4329 appropriate field if it is present. */
4330 if (code != ARRAY_REF
4331 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR)
4335 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
4336 elt = TREE_CHAIN (elt))
4337 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1))
4338 return expand_expr (TREE_VALUE (elt), target, tmode, modifier);
4342 enum machine_mode mode1;
4347 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
4348 &mode1, &unsignedp, &volatilep);
4350 /* In some cases, we will be offsetting OP0's address by a constant.
4351 So get it as a sum, if possible. If we will be using it
4352 directly in an insn, we validate it. */
4353 op0 = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_SUM);
4355 /* If this is a constant, put it into a register if it is a
4356 legitimate constant and memory if it isn't. */
4357 if (CONSTANT_P (op0))
4359 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
4360 if (LEGITIMATE_CONSTANT_P (op0))
4361 op0 = force_reg (mode, op0);
4363 op0 = validize_mem (force_const_mem (mode, op0));
4368 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
4370 if (GET_CODE (op0) != MEM)
4372 op0 = change_address (op0, VOIDmode,
4373 gen_rtx (PLUS, Pmode, XEXP (op0, 0),
4374 force_reg (Pmode, offset_rtx)));
4377 /* Don't forget about volatility even if this is a bitfield. */
4378 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
4380 op0 = copy_rtx (op0);
4381 MEM_VOLATILE_P (op0) = 1;
4384 if (mode1 == VOIDmode
4385 || (mode1 != BLKmode && ! direct_load[(int) mode1]
4386 && modifier != EXPAND_CONST_ADDRESS
4387 && modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
4388 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG)
4390 /* In cases where an aligned union has an unaligned object
4391 as a field, we might be extracting a BLKmode value from
4392 an integer-mode (e.g., SImode) object. Handle this case
4393 by doing the extract into an object as wide as the field
4394 (which we know to be the width of a basic mode), then
4395 storing into memory, and changing the mode to BLKmode. */
4396 enum machine_mode ext_mode = mode;
4398 if (ext_mode == BLKmode)
4399 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
4401 if (ext_mode == BLKmode)
4404 op0 = extract_bit_field (validize_mem (op0), bitsize, bitpos,
4405 unsignedp, target, ext_mode, ext_mode,
4406 TYPE_ALIGN (TREE_TYPE (tem)) / BITS_PER_UNIT,
4407 int_size_in_bytes (TREE_TYPE (tem)));
4408 if (mode == BLKmode)
4410 rtx new = assign_stack_temp (ext_mode,
4411 bitsize / BITS_PER_UNIT, 0);
4413 emit_move_insn (new, op0);
4414 op0 = copy_rtx (new);
4415 PUT_MODE (op0, BLKmode);
4421 /* Get a reference to just this component. */
4422 if (modifier == EXPAND_CONST_ADDRESS
4423 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
4424 op0 = gen_rtx (MEM, mode1, plus_constant (XEXP (op0, 0),
4425 (bitpos / BITS_PER_UNIT)));
4427 op0 = change_address (op0, mode1,
4428 plus_constant (XEXP (op0, 0),
4429 (bitpos / BITS_PER_UNIT)));
4430 MEM_IN_STRUCT_P (op0) = 1;
4431 MEM_VOLATILE_P (op0) |= volatilep;
4432 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode)
4435 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
4436 convert_move (target, op0, unsignedp);
4442 tree base = build_unary_op (ADDR_EXPR, TREE_OPERAND (exp, 0), 0);
4443 tree addr = build (PLUS_EXPR, type, base, TREE_OPERAND (exp, 1));
4444 op0 = expand_expr (addr, NULL_RTX, VOIDmode, EXPAND_SUM);
4445 temp = gen_rtx (MEM, mode, memory_address (mode, op0));
4446 MEM_IN_STRUCT_P (temp) = 1;
4447 MEM_VOLATILE_P (temp) = TREE_THIS_VOLATILE (exp);
4448 #if 0 /* It is incorrect to set RTX_UNCHANGING_P here, because the fact that
4449 a location is accessed through a pointer to const does not mean
4450 that the value there can never change. */
4451 RTX_UNCHANGING_P (temp) = TREE_READONLY (exp);
4456 /* Intended for a reference to a buffer of a file-object in Pascal.
4457 But it's not certain that a special tree code will really be
4458 necessary for these. INDIRECT_REF might work for them. */
4462 /* IN_EXPR: Inlined pascal set IN expression.
4465 rlo = set_low - (set_low%bits_per_word);
4466 the_word = set [ (index - rlo)/bits_per_word ];
4467 bit_index = index % bits_per_word;
4468 bitmask = 1 << bit_index;
4469 return !!(the_word & bitmask); */
4471 preexpand_calls (exp);
4473 tree set = TREE_OPERAND (exp, 0);
4474 tree index = TREE_OPERAND (exp, 1);
4475 tree set_type = TREE_TYPE (set);
4477 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
4478 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
4484 rtx diff, quo, rem, addr, bit, result;
4485 rtx setval, setaddr;
4486 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
4489 target = gen_reg_rtx (mode);
4491 /* If domain is empty, answer is no. */
4492 if (tree_int_cst_lt (set_high_bound, set_low_bound))
4495 index_val = expand_expr (index, 0, VOIDmode, 0);
4496 lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
4497 hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
4498 setval = expand_expr (set, 0, VOIDmode, 0);
4499 setaddr = XEXP (setval, 0);
4501 /* Compare index against bounds, if they are constant. */
4502 if (GET_CODE (index_val) == CONST_INT
4503 && GET_CODE (lo_r) == CONST_INT
4504 && INTVAL (index_val) < INTVAL (lo_r))
4507 if (GET_CODE (index_val) == CONST_INT
4508 && GET_CODE (hi_r) == CONST_INT
4509 && INTVAL (hi_r) < INTVAL (index_val))
4512 /* If we get here, we have to generate the code for both cases
4513 (in range and out of range). */
4515 op0 = gen_label_rtx ();
4516 op1 = gen_label_rtx ();
4518 if (! (GET_CODE (index_val) == CONST_INT
4519 && GET_CODE (lo_r) == CONST_INT))
4521 emit_cmp_insn (index_val, lo_r, LT, NULL_RTX,
4522 GET_MODE (index_val), 0, 0);
4523 emit_jump_insn (gen_blt (op1));
4526 if (! (GET_CODE (index_val) == CONST_INT
4527 && GET_CODE (hi_r) == CONST_INT))
4529 emit_cmp_insn (index_val, hi_r, GT, NULL_RTX,
4530 GET_MODE (index_val), 0, 0);
4531 emit_jump_insn (gen_bgt (op1));
4534 /* Calculate the element number of bit zero in the first word
4536 if (GET_CODE (lo_r) == CONST_INT)
4537 rlow = GEN_INT (INTVAL (lo_r)
4538 & ~ ((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
4540 rlow = expand_binop (index_mode, and_optab, lo_r,
4541 GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
4542 NULL_RTX, 0, OPTAB_LIB_WIDEN);
4544 diff = expand_binop (index_mode, sub_optab,
4545 index_val, rlow, NULL_RTX, 0, OPTAB_LIB_WIDEN);
4547 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
4548 GEN_INT (BITS_PER_UNIT), NULL_RTX, 0);
4549 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
4550 GEN_INT (BITS_PER_UNIT), NULL_RTX, 0);
4551 addr = memory_address (byte_mode,
4552 expand_binop (index_mode, add_optab,
4553 diff, setaddr, NULL_RTX, 0,
4555 /* Extract the bit we want to examine */
4556 bit = expand_shift (RSHIFT_EXPR, byte_mode,
4557 gen_rtx (MEM, byte_mode, addr),
4558 make_tree (TREE_TYPE (index), rem),
4560 result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
4561 GET_MODE (target) == byte_mode ? target : 0,
4562 1, OPTAB_LIB_WIDEN);
4564 if (result != target)
4565 convert_move (target, result, 1);
4567 /* Output the code to handle the out-of-range case. */
4570 emit_move_insn (target, const0_rtx);
4575 case WITH_CLEANUP_EXPR:
4576 if (RTL_EXPR_RTL (exp) == 0)
4579 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
4581 = tree_cons (NULL_TREE, TREE_OPERAND (exp, 2), cleanups_this_call);
4582 /* That's it for this cleanup. */
4583 TREE_OPERAND (exp, 2) = 0;
4585 return RTL_EXPR_RTL (exp);
4588 /* Check for a built-in function. */
4589 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
4590 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) == FUNCTION_DECL
4591 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
4592 return expand_builtin (exp, target, subtarget, tmode, ignore);
4593 /* If this call was expanded already by preexpand_calls,
4594 just return the result we got. */
4595 if (CALL_EXPR_RTL (exp) != 0)
4596 return CALL_EXPR_RTL (exp);
4597 return expand_call (exp, target, ignore);
4599 case NON_LVALUE_EXPR:
4602 case REFERENCE_EXPR:
4603 if (TREE_CODE (type) == VOID_TYPE || ignore)
4605 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
4608 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
4609 return expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, modifier);
4610 if (TREE_CODE (type) == UNION_TYPE)
4612 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
4615 if (mode == BLKmode)
4617 if (TYPE_SIZE (type) == 0
4618 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
4620 target = assign_stack_temp (BLKmode,
4621 (TREE_INT_CST_LOW (TYPE_SIZE (type))
4622 + BITS_PER_UNIT - 1)
4623 / BITS_PER_UNIT, 0);
4626 target = gen_reg_rtx (mode);
4628 if (GET_CODE (target) == MEM)
4629 /* Store data into beginning of memory target. */
4630 store_expr (TREE_OPERAND (exp, 0),
4631 change_address (target, TYPE_MODE (valtype), 0), 0);
4633 else if (GET_CODE (target) == REG)
4634 /* Store this field into a union of the proper type. */
4635 store_field (target, GET_MODE_BITSIZE (TYPE_MODE (valtype)), 0,
4636 TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
4638 int_size_in_bytes (TREE_TYPE (TREE_OPERAND (exp, 0))));
4642 /* Return the entire union. */
4645 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, 0);
4646 if (GET_MODE (op0) == mode)
4648 /* If arg is a constant integer being extended from a narrower mode,
4649 we must really truncate to get the extended bits right. Otherwise
4650 (unsigned long) (unsigned char) ("\377"[0])
4651 would come out as ffffffff. */
4652 if (GET_MODE (op0) == VOIDmode
4653 && (GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
4654 < GET_MODE_BITSIZE (mode)))
4656 /* MODE must be narrower than HOST_BITS_PER_INT. */
4657 int width = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
4659 if (width < HOST_BITS_PER_WIDE_INT)
4661 HOST_WIDE_INT val = (GET_CODE (op0) == CONST_INT ? INTVAL (op0)
4662 : CONST_DOUBLE_LOW (op0));
4663 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)))
4664 || !(val & ((HOST_WIDE_INT) 1 << (width - 1))))
4665 val &= ((HOST_WIDE_INT) 1 << width) - 1;
4667 val |= ~(((HOST_WIDE_INT) 1 << width) - 1);
4669 op0 = GEN_INT (val);
4673 op0 = (simplify_unary_operation
4674 ((TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)))
4675 ? ZERO_EXTEND : SIGN_EXTEND),
4677 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))));
4682 if (GET_MODE (op0) == VOIDmode)
4684 if (modifier == EXPAND_INITIALIZER)
4685 return gen_rtx (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
4686 if (flag_force_mem && GET_CODE (op0) == MEM)
4687 op0 = copy_to_reg (op0);
4690 return convert_to_mode (mode, op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
4692 convert_move (target, op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
4696 /* We come here from MINUS_EXPR when the second operand is a constant. */
4698 this_optab = add_optab;
4700 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
4701 something else, make sure we add the register to the constant and
4702 then to the other thing. This case can occur during strength
4703 reduction and doing it this way will produce better code if the
4704 frame pointer or argument pointer is eliminated.
4706 fold-const.c will ensure that the constant is always in the inner
4707 PLUS_EXPR, so the only case we need to do anything about is if
4708 sp, ap, or fp is our second argument, in which case we must swap
4709 the innermost first argument and our second argument. */
4711 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
4712 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
4713 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
4714 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
4715 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
4716 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
4718 tree t = TREE_OPERAND (exp, 1);
4720 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
4721 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
4724 /* If the result is to be Pmode and we are adding an integer to
4725 something, we might be forming a constant. So try to use
4726 plus_constant. If it produces a sum and we can't accept it,
4727 use force_operand. This allows P = &ARR[const] to generate
4728 efficient code on machines where a SYMBOL_REF is not a valid
4731 If this is an EXPAND_SUM call, always return the sum. */
4732 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
4733 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
4734 && (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
4737 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
4739 op1 = plus_constant (op1, TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)));
4740 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
4741 op1 = force_operand (op1, target);
4745 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
4746 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
4747 && (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
4750 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
4752 op0 = plus_constant (op0, TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)));
4753 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
4754 op0 = force_operand (op0, target);
4758 /* No sense saving up arithmetic to be done
4759 if it's all in the wrong mode to form part of an address.
4760 And force_operand won't know whether to sign-extend or
4762 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
4763 || mode != Pmode) goto binop;
4765 preexpand_calls (exp);
4766 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
4769 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, modifier);
4770 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, modifier);
4772 /* Make sure any term that's a sum with a constant comes last. */
4773 if (GET_CODE (op0) == PLUS
4774 && CONSTANT_P (XEXP (op0, 1)))
4780 /* If adding to a sum including a constant,
4781 associate it to put the constant outside. */
4782 if (GET_CODE (op1) == PLUS
4783 && CONSTANT_P (XEXP (op1, 1)))
4785 rtx constant_term = const0_rtx;
4787 temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0);
4790 /* Ensure that MULT comes first if there is one. */
4791 else if (GET_CODE (op0) == MULT)
4792 op0 = gen_rtx (PLUS, mode, op0, XEXP (op1, 0));
4794 op0 = gen_rtx (PLUS, mode, XEXP (op1, 0), op0);
4796 /* Let's also eliminate constants from op0 if possible. */
4797 op0 = eliminate_constant_term (op0, &constant_term);
4799 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
4800 their sum should be a constant. Form it into OP1, since the
4801 result we want will then be OP0 + OP1. */
4803 temp = simplify_binary_operation (PLUS, mode, constant_term,
4808 op1 = gen_rtx (PLUS, mode, constant_term, XEXP (op1, 1));
4811 /* Put a constant term last and put a multiplication first. */
4812 if (CONSTANT_P (op0) || GET_CODE (op1) == MULT)
4813 temp = op1, op1 = op0, op0 = temp;
4815 temp = simplify_binary_operation (PLUS, mode, op0, op1);
4816 return temp ? temp : gen_rtx (PLUS, mode, op0, op1);
4819 /* Handle difference of two symbolic constants,
4820 for the sake of an initializer. */
4821 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
4822 && really_constant_p (TREE_OPERAND (exp, 0))
4823 && really_constant_p (TREE_OPERAND (exp, 1)))
4825 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX,
4826 VOIDmode, modifier);
4827 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
4828 VOIDmode, modifier);
4829 return gen_rtx (MINUS, mode, op0, op1);
4831 /* Convert A - const to A + (-const). */
4832 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
4834 exp = build (PLUS_EXPR, type, TREE_OPERAND (exp, 0),
4835 fold (build1 (NEGATE_EXPR, type,
4836 TREE_OPERAND (exp, 1))));
4839 this_optab = sub_optab;
4843 preexpand_calls (exp);
4844 /* If first operand is constant, swap them.
4845 Thus the following special case checks need only
4846 check the second operand. */
4847 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
4849 register tree t1 = TREE_OPERAND (exp, 0);
4850 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
4851 TREE_OPERAND (exp, 1) = t1;
4854 /* Attempt to return something suitable for generating an
4855 indexed address, for machines that support that. */
4857 if (modifier == EXPAND_SUM && mode == Pmode
4858 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
4859 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
4861 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, EXPAND_SUM);
4863 /* Apply distributive law if OP0 is x+c. */
4864 if (GET_CODE (op0) == PLUS
4865 && GET_CODE (XEXP (op0, 1)) == CONST_INT)
4866 return gen_rtx (PLUS, mode,
4867 gen_rtx (MULT, mode, XEXP (op0, 0),
4868 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))),
4869 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))
4870 * INTVAL (XEXP (op0, 1))));
4872 if (GET_CODE (op0) != REG)
4873 op0 = force_operand (op0, NULL_RTX);
4874 if (GET_CODE (op0) != REG)
4875 op0 = copy_to_mode_reg (mode, op0);
4877 return gen_rtx (MULT, mode, op0,
4878 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))));
4881 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
4884 /* Check for multiplying things that have been extended
4885 from a narrower type. If this machine supports multiplying
4886 in that narrower type with a result in the desired type,
4887 do it that way, and avoid the explicit type-conversion. */
4888 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
4889 && TREE_CODE (type) == INTEGER_TYPE
4890 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
4891 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
4892 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
4893 && int_fits_type_p (TREE_OPERAND (exp, 1),
4894 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
4895 /* Don't use a widening multiply if a shift will do. */
4896 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
4897 > HOST_BITS_PER_WIDE_INT)
4898 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
4900 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
4901 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
4903 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
4904 /* If both operands are extended, they must either both
4905 be zero-extended or both be sign-extended. */
4906 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
4908 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
4910 enum machine_mode innermode
4911 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
4912 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
4913 ? umul_widen_optab : smul_widen_optab);
4914 if (mode == GET_MODE_WIDER_MODE (innermode)
4915 && this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
4917 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
4918 NULL_RTX, VOIDmode, 0);
4919 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
4920 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
4923 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
4924 NULL_RTX, VOIDmode, 0);
4928 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
4929 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
4930 return expand_mult (mode, op0, op1, target, unsignedp);
4932 case TRUNC_DIV_EXPR:
4933 case FLOOR_DIV_EXPR:
4935 case ROUND_DIV_EXPR:
4936 case EXACT_DIV_EXPR:
4937 preexpand_calls (exp);
4938 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
4940 /* Possible optimization: compute the dividend with EXPAND_SUM
4941 then if the divisor is constant can optimize the case
4942 where some terms of the dividend have coeffs divisible by it. */
4943 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
4944 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
4945 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
4948 this_optab = flodiv_optab;
4951 case TRUNC_MOD_EXPR:
4952 case FLOOR_MOD_EXPR:
4954 case ROUND_MOD_EXPR:
4955 preexpand_calls (exp);
4956 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
4958 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
4959 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
4960 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
4962 case FIX_ROUND_EXPR:
4963 case FIX_FLOOR_EXPR:
4965 abort (); /* Not used for C. */
4967 case FIX_TRUNC_EXPR:
4968 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
4970 target = gen_reg_rtx (mode);
4971 expand_fix (target, op0, unsignedp);
4975 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
4977 target = gen_reg_rtx (mode);
4978 /* expand_float can't figure out what to do if FROM has VOIDmode.
4979 So give it the correct mode. With -O, cse will optimize this. */
4980 if (GET_MODE (op0) == VOIDmode)
4981 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
4983 expand_float (target, op0,
4984 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
4988 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
4989 temp = expand_unop (mode, neg_optab, op0, target, 0);
4995 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
4997 /* Handle complex values specially. */
4999 enum machine_mode opmode
5000 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5002 if (GET_MODE_CLASS (opmode) == MODE_COMPLEX_INT
5003 || GET_MODE_CLASS (opmode) == MODE_COMPLEX_FLOAT)
5004 return expand_complex_abs (opmode, op0, target, unsignedp);
5007 /* Unsigned abs is simply the operand. Testing here means we don't
5008 risk generating incorrect code below. */
5009 if (TREE_UNSIGNED (type))
5012 /* First try to do it with a special abs instruction. */
5013 temp = expand_unop (mode, abs_optab, op0, target, 0);
5017 /* If this machine has expensive jumps, we can do integer absolute
5018 value of X as (((signed) x >> (W-1)) ^ x) - ((signed) x >> (W-1)),
5019 where W is the width of MODE. */
5021 if (GET_MODE_CLASS (mode) == MODE_INT && BRANCH_COST >= 2)
5023 rtx extended = expand_shift (RSHIFT_EXPR, mode, op0,
5024 size_int (GET_MODE_BITSIZE (mode) - 1),
5027 temp = expand_binop (mode, xor_optab, extended, op0, target, 0,
5030 temp = expand_binop (mode, sub_optab, temp, extended, target, 0,
5037 /* If that does not win, use conditional jump and negate. */
5038 target = original_target;
5039 temp = gen_label_rtx ();
5040 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 0))
5041 || (GET_CODE (target) == REG
5042 && REGNO (target) < FIRST_PSEUDO_REGISTER))
5043 target = gen_reg_rtx (mode);
5044 emit_move_insn (target, op0);
5045 emit_cmp_insn (target,
5046 expand_expr (convert (type, integer_zero_node),
5047 NULL_RTX, VOIDmode, 0),
5048 GE, NULL_RTX, mode, 0, 0);
5050 emit_jump_insn (gen_bge (temp));
5051 op0 = expand_unop (mode, neg_optab, target, target, 0);
5053 emit_move_insn (target, op0);
5060 target = original_target;
5061 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 1))
5062 || (GET_CODE (target) == REG
5063 && REGNO (target) < FIRST_PSEUDO_REGISTER))
5064 target = gen_reg_rtx (mode);
5065 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
5066 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
5068 /* First try to do it with a special MIN or MAX instruction.
5069 If that does not win, use a conditional jump to select the proper
5071 this_optab = (TREE_UNSIGNED (type)
5072 ? (code == MIN_EXPR ? umin_optab : umax_optab)
5073 : (code == MIN_EXPR ? smin_optab : smax_optab));
5075 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
5081 emit_move_insn (target, op0);
5082 op0 = gen_label_rtx ();
5083 if (code == MAX_EXPR)
5084 temp = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)))
5085 ? compare_from_rtx (target, op1, GEU, 1, mode, NULL_RTX, 0)
5086 : compare_from_rtx (target, op1, GE, 0, mode, NULL_RTX, 0));
5088 temp = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)))
5089 ? compare_from_rtx (target, op1, LEU, 1, mode, NULL_RTX, 0)
5090 : compare_from_rtx (target, op1, LE, 0, mode, NULL_RTX, 0));
5091 if (temp == const0_rtx)
5092 emit_move_insn (target, op1);
5093 else if (temp != const_true_rtx)
5095 if (bcc_gen_fctn[(int) GET_CODE (temp)] != 0)
5096 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (temp)]) (op0));
5099 emit_move_insn (target, op1);
5104 /* ??? Can optimize when the operand of this is a bitwise operation,
5105 by using a different bitwise operation. */
5107 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
5108 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
5114 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
5115 temp = expand_unop (mode, ffs_optab, op0, target, 1);
5120 /* ??? Can optimize bitwise operations with one arg constant.
5121 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
5122 and (a bitwise1 b) bitwise2 b (etc)
5123 but that is probably not worth while. */
5125 /* BIT_AND_EXPR is for bitwise anding.
5126 TRUTH_AND_EXPR is for anding two boolean values
5127 when we want in all cases to compute both of them.
5128 In general it is fastest to do TRUTH_AND_EXPR by
5129 computing both operands as actual zero-or-1 values
5130 and then bitwise anding. In cases where there cannot
5131 be any side effects, better code would be made by
5132 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR;
5133 but the question is how to recognize those cases. */
5135 case TRUTH_AND_EXPR:
5137 this_optab = and_optab;
5140 /* See comment above about TRUTH_AND_EXPR; it applies here too. */
5143 this_optab = ior_optab;
5146 case TRUTH_XOR_EXPR:
5148 this_optab = xor_optab;
5155 preexpand_calls (exp);
5156 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
5158 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
5159 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
5162 /* Could determine the answer when only additive constants differ.
5163 Also, the addition of one can be handled by changing the condition. */
5170 preexpand_calls (exp);
5171 temp = do_store_flag (exp, target, tmode != VOIDmode ? tmode : mode, 0);
5174 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
5175 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
5177 && GET_CODE (original_target) == REG
5178 && (GET_MODE (original_target)
5179 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
5181 temp = expand_expr (TREE_OPERAND (exp, 0), original_target, VOIDmode, 0);
5182 if (temp != original_target)
5183 temp = copy_to_reg (temp);
5184 op1 = gen_label_rtx ();
5185 emit_cmp_insn (temp, const0_rtx, EQ, NULL_RTX,
5186 GET_MODE (temp), unsignedp, 0);
5187 emit_jump_insn (gen_beq (op1));
5188 emit_move_insn (temp, const1_rtx);
5192 /* If no set-flag instruction, must generate a conditional
5193 store into a temporary variable. Drop through
5194 and handle this like && and ||. */
5196 case TRUTH_ANDIF_EXPR:
5197 case TRUTH_ORIF_EXPR:
5198 if (target == 0 || ! safe_from_p (target, exp)
5199 /* Make sure we don't have a hard reg (such as function's return
5200 value) live across basic blocks, if not optimizing. */
5201 || (!optimize && GET_CODE (target) == REG
5202 && REGNO (target) < FIRST_PSEUDO_REGISTER))
5203 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
5204 emit_clr_insn (target);
5205 op1 = gen_label_rtx ();
5206 jumpifnot (exp, op1);
5207 emit_0_to_1_insn (target);
5211 case TRUTH_NOT_EXPR:
5212 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
5213 /* The parser is careful to generate TRUTH_NOT_EXPR
5214 only with operands that are always zero or one. */
5215 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
5216 target, 1, OPTAB_LIB_WIDEN);
5222 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
5224 return expand_expr (TREE_OPERAND (exp, 1),
5225 (ignore ? const0_rtx : target),
5230 /* Note that COND_EXPRs whose type is a structure or union
5231 are required to be constructed to contain assignments of
5232 a temporary variable, so that we can evaluate them here
5233 for side effect only. If type is void, we must do likewise. */
5235 /* If an arm of the branch requires a cleanup,
5236 only that cleanup is performed. */
5239 tree binary_op = 0, unary_op = 0;
5240 tree old_cleanups = cleanups_this_call;
5241 cleanups_this_call = 0;
5243 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
5244 convert it to our mode, if necessary. */
5245 if (integer_onep (TREE_OPERAND (exp, 1))
5246 && integer_zerop (TREE_OPERAND (exp, 2))
5247 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
5249 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, modifier);
5250 if (GET_MODE (op0) == mode)
5253 target = gen_reg_rtx (mode);
5254 convert_move (target, op0, unsignedp);
5258 /* If we are not to produce a result, we have no target. Otherwise,
5259 if a target was specified use it; it will not be used as an
5260 intermediate target unless it is safe. If no target, use a
5263 if (mode == VOIDmode || ignore)
5265 else if (original_target
5266 && safe_from_p (original_target, TREE_OPERAND (exp, 0)))
5267 temp = original_target;
5268 else if (mode == BLKmode)
5270 if (TYPE_SIZE (type) == 0
5271 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
5273 temp = assign_stack_temp (BLKmode,
5274 (TREE_INT_CST_LOW (TYPE_SIZE (type))
5275 + BITS_PER_UNIT - 1)
5276 / BITS_PER_UNIT, 0);
5279 temp = gen_reg_rtx (mode);
5281 /* Check for X ? A + B : A. If we have this, we can copy
5282 A to the output and conditionally add B. Similarly for unary
5283 operations. Don't do this if X has side-effects because
5284 those side effects might affect A or B and the "?" operation is
5285 a sequence point in ANSI. (We test for side effects later.) */
5287 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
5288 && operand_equal_p (TREE_OPERAND (exp, 2),
5289 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
5290 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
5291 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
5292 && operand_equal_p (TREE_OPERAND (exp, 1),
5293 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
5294 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
5295 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
5296 && operand_equal_p (TREE_OPERAND (exp, 2),
5297 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
5298 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
5299 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
5300 && operand_equal_p (TREE_OPERAND (exp, 1),
5301 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
5302 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
5304 /* If we had X ? A + 1 : A and we can do the test of X as a store-flag
5305 operation, do this as A + (X != 0). Similarly for other simple
5306 binary operators. */
5307 if (singleton && binary_op
5308 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
5309 && (TREE_CODE (binary_op) == PLUS_EXPR
5310 || TREE_CODE (binary_op) == MINUS_EXPR
5311 || TREE_CODE (binary_op) == BIT_IOR_EXPR
5312 || TREE_CODE (binary_op) == BIT_XOR_EXPR
5313 || TREE_CODE (binary_op) == BIT_AND_EXPR)
5314 && integer_onep (TREE_OPERAND (binary_op, 1))
5315 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
5318 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR ? add_optab
5319 : TREE_CODE (binary_op) == MINUS_EXPR ? sub_optab
5320 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
5321 : TREE_CODE (binary_op) == BIT_XOR_EXPR ? xor_optab
5324 /* If we had X ? A : A + 1, do this as A + (X == 0).
5326 We have to invert the truth value here and then put it
5327 back later if do_store_flag fails. We cannot simply copy
5328 TREE_OPERAND (exp, 0) to another variable and modify that
5329 because invert_truthvalue can modify the tree pointed to
5331 if (singleton == TREE_OPERAND (exp, 1))
5332 TREE_OPERAND (exp, 0)
5333 = invert_truthvalue (TREE_OPERAND (exp, 0));
5335 result = do_store_flag (TREE_OPERAND (exp, 0),
5336 (safe_from_p (temp, singleton)
5338 mode, BRANCH_COST <= 1);
5342 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
5343 return expand_binop (mode, boptab, op1, result, temp,
5344 unsignedp, OPTAB_LIB_WIDEN);
5346 else if (singleton == TREE_OPERAND (exp, 1))
5347 TREE_OPERAND (exp, 0)
5348 = invert_truthvalue (TREE_OPERAND (exp, 0));
5352 op0 = gen_label_rtx ();
5354 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
5358 /* If the target conflicts with the other operand of the
5359 binary op, we can't use it. Also, we can't use the target
5360 if it is a hard register, because evaluating the condition
5361 might clobber it. */
5363 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1)))
5364 || (GET_CODE (temp) == REG
5365 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
5366 temp = gen_reg_rtx (mode);
5367 store_expr (singleton, temp, 0);
5370 expand_expr (singleton,
5371 ignore ? const1_rtx : NULL_RTX, VOIDmode, 0);
5372 if (cleanups_this_call)
5374 sorry ("aggregate value in COND_EXPR");
5375 cleanups_this_call = 0;
5377 if (singleton == TREE_OPERAND (exp, 1))
5378 jumpif (TREE_OPERAND (exp, 0), op0);
5380 jumpifnot (TREE_OPERAND (exp, 0), op0);
5382 if (binary_op && temp == 0)
5383 /* Just touch the other operand. */
5384 expand_expr (TREE_OPERAND (binary_op, 1),
5385 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
5387 store_expr (build (TREE_CODE (binary_op), type,
5388 make_tree (type, temp),
5389 TREE_OPERAND (binary_op, 1)),
5392 store_expr (build1 (TREE_CODE (unary_op), type,
5393 make_tree (type, temp)),
5398 /* This is now done in jump.c and is better done there because it
5399 produces shorter register lifetimes. */
5401 /* Check for both possibilities either constants or variables
5402 in registers (but not the same as the target!). If so, can
5403 save branches by assigning one, branching, and assigning the
5405 else if (temp && GET_MODE (temp) != BLKmode
5406 && (TREE_CONSTANT (TREE_OPERAND (exp, 1))
5407 || ((TREE_CODE (TREE_OPERAND (exp, 1)) == PARM_DECL
5408 || TREE_CODE (TREE_OPERAND (exp, 1)) == VAR_DECL)
5409 && DECL_RTL (TREE_OPERAND (exp, 1))
5410 && GET_CODE (DECL_RTL (TREE_OPERAND (exp, 1))) == REG
5411 && DECL_RTL (TREE_OPERAND (exp, 1)) != temp))
5412 && (TREE_CONSTANT (TREE_OPERAND (exp, 2))
5413 || ((TREE_CODE (TREE_OPERAND (exp, 2)) == PARM_DECL
5414 || TREE_CODE (TREE_OPERAND (exp, 2)) == VAR_DECL)
5415 && DECL_RTL (TREE_OPERAND (exp, 2))
5416 && GET_CODE (DECL_RTL (TREE_OPERAND (exp, 2))) == REG
5417 && DECL_RTL (TREE_OPERAND (exp, 2)) != temp)))
5419 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
5420 temp = gen_reg_rtx (mode);
5421 store_expr (TREE_OPERAND (exp, 2), temp, 0);
5422 jumpifnot (TREE_OPERAND (exp, 0), op0);
5423 store_expr (TREE_OPERAND (exp, 1), temp, 0);
5427 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
5428 comparison operator. If we have one of these cases, set the
5429 output to A, branch on A (cse will merge these two references),
5430 then set the output to FOO. */
5432 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
5433 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
5434 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
5435 TREE_OPERAND (exp, 1), 0)
5436 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
5437 && safe_from_p (temp, TREE_OPERAND (exp, 2)))
5439 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
5440 temp = gen_reg_rtx (mode);
5441 store_expr (TREE_OPERAND (exp, 1), temp, 0);
5442 jumpif (TREE_OPERAND (exp, 0), op0);
5443 store_expr (TREE_OPERAND (exp, 2), temp, 0);
5447 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
5448 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
5449 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
5450 TREE_OPERAND (exp, 2), 0)
5451 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
5452 && safe_from_p (temp, TREE_OPERAND (exp, 1)))
5454 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
5455 temp = gen_reg_rtx (mode);
5456 store_expr (TREE_OPERAND (exp, 2), temp, 0);
5457 jumpifnot (TREE_OPERAND (exp, 0), op0);
5458 store_expr (TREE_OPERAND (exp, 1), temp, 0);
5463 op1 = gen_label_rtx ();
5464 jumpifnot (TREE_OPERAND (exp, 0), op0);
5466 store_expr (TREE_OPERAND (exp, 1), temp, 0);
5468 expand_expr (TREE_OPERAND (exp, 1),
5469 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
5470 if (cleanups_this_call)
5472 sorry ("aggregate value in COND_EXPR");
5473 cleanups_this_call = 0;
5477 emit_jump_insn (gen_jump (op1));
5481 store_expr (TREE_OPERAND (exp, 2), temp, 0);
5483 expand_expr (TREE_OPERAND (exp, 2),
5484 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
5487 if (cleanups_this_call)
5489 sorry ("aggregate value in COND_EXPR");
5490 cleanups_this_call = 0;
5496 cleanups_this_call = old_cleanups;
5502 /* Something needs to be initialized, but we didn't know
5503 where that thing was when building the tree. For example,
5504 it could be the return value of a function, or a parameter
5505 to a function which lays down in the stack, or a temporary
5506 variable which must be passed by reference.
5508 We guarantee that the expression will either be constructed
5509 or copied into our original target. */
5511 tree slot = TREE_OPERAND (exp, 0);
5514 if (TREE_CODE (slot) != VAR_DECL)
5519 if (DECL_RTL (slot) != 0)
5521 target = DECL_RTL (slot);
5522 /* If we have already expanded the slot, so don't do
5524 if (TREE_OPERAND (exp, 1) == NULL_TREE)
5529 target = assign_stack_temp (mode, int_size_in_bytes (type), 0);
5530 /* All temp slots at this level must not conflict. */
5531 preserve_temp_slots (target);
5532 DECL_RTL (slot) = target;
5536 /* I bet this needs to be done, and I bet that it needs to
5537 be above, inside the else clause. The reason is
5538 simple, how else is it going to get cleaned up? (mrs)
5540 The reason is probably did not work before, and was
5541 commented out is because this was re-expanding already
5542 expanded target_exprs (target == 0 and DECL_RTL (slot)
5543 != 0) also cleaning them up many times as well. :-( */
5545 /* Since SLOT is not known to the called function
5546 to belong to its stack frame, we must build an explicit
5547 cleanup. This case occurs when we must build up a reference
5548 to pass the reference as an argument. In this case,
5549 it is very likely that such a reference need not be
5552 if (TREE_OPERAND (exp, 2) == 0)
5553 TREE_OPERAND (exp, 2) = maybe_build_cleanup (slot);
5554 if (TREE_OPERAND (exp, 2))
5555 cleanups_this_call = tree_cons (NULL_TREE, TREE_OPERAND (exp, 2),
5556 cleanups_this_call);
5561 /* This case does occur, when expanding a parameter which
5562 needs to be constructed on the stack. The target
5563 is the actual stack address that we want to initialize.
5564 The function we call will perform the cleanup in this case. */
5566 DECL_RTL (slot) = target;
5569 exp1 = TREE_OPERAND (exp, 1);
5570 /* Mark it as expanded. */
5571 TREE_OPERAND (exp, 1) = NULL_TREE;
5573 return expand_expr (exp1, target, tmode, modifier);
5578 tree lhs = TREE_OPERAND (exp, 0);
5579 tree rhs = TREE_OPERAND (exp, 1);
5580 tree noncopied_parts = 0;
5581 tree lhs_type = TREE_TYPE (lhs);
5583 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
5584 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0 && !fixed_type_p (rhs))
5585 noncopied_parts = init_noncopied_parts (stabilize_reference (lhs),
5586 TYPE_NONCOPIED_PARTS (lhs_type));
5587 while (noncopied_parts != 0)
5589 expand_assignment (TREE_VALUE (noncopied_parts),
5590 TREE_PURPOSE (noncopied_parts), 0, 0);
5591 noncopied_parts = TREE_CHAIN (noncopied_parts);
5598 /* If lhs is complex, expand calls in rhs before computing it.
5599 That's so we don't compute a pointer and save it over a call.
5600 If lhs is simple, compute it first so we can give it as a
5601 target if the rhs is just a call. This avoids an extra temp and copy
5602 and that prevents a partial-subsumption which makes bad code.
5603 Actually we could treat component_ref's of vars like vars. */
5605 tree lhs = TREE_OPERAND (exp, 0);
5606 tree rhs = TREE_OPERAND (exp, 1);
5607 tree noncopied_parts = 0;
5608 tree lhs_type = TREE_TYPE (lhs);
5612 if (TREE_CODE (lhs) != VAR_DECL
5613 && TREE_CODE (lhs) != RESULT_DECL
5614 && TREE_CODE (lhs) != PARM_DECL)
5615 preexpand_calls (exp);
5617 /* Check for |= or &= of a bitfield of size one into another bitfield
5618 of size 1. In this case, (unless we need the result of the
5619 assignment) we can do this more efficiently with a
5620 test followed by an assignment, if necessary.
5622 ??? At this point, we can't get a BIT_FIELD_REF here. But if
5623 things change so we do, this code should be enhanced to
5626 && TREE_CODE (lhs) == COMPONENT_REF
5627 && (TREE_CODE (rhs) == BIT_IOR_EXPR
5628 || TREE_CODE (rhs) == BIT_AND_EXPR)
5629 && TREE_OPERAND (rhs, 0) == lhs
5630 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
5631 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (lhs, 1))) == 1
5632 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))) == 1)
5634 rtx label = gen_label_rtx ();
5636 do_jump (TREE_OPERAND (rhs, 1),
5637 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
5638 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
5639 expand_assignment (lhs, convert (TREE_TYPE (rhs),
5640 (TREE_CODE (rhs) == BIT_IOR_EXPR
5642 : integer_zero_node)),
5644 do_pending_stack_adjust ();
5649 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0
5650 && ! (fixed_type_p (lhs) && fixed_type_p (rhs)))
5651 noncopied_parts = save_noncopied_parts (stabilize_reference (lhs),
5652 TYPE_NONCOPIED_PARTS (lhs_type));
5654 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
5655 while (noncopied_parts != 0)
5657 expand_assignment (TREE_PURPOSE (noncopied_parts),
5658 TREE_VALUE (noncopied_parts), 0, 0);
5659 noncopied_parts = TREE_CHAIN (noncopied_parts);
5664 case PREINCREMENT_EXPR:
5665 case PREDECREMENT_EXPR:
5666 return expand_increment (exp, 0);
5668 case POSTINCREMENT_EXPR:
5669 case POSTDECREMENT_EXPR:
5670 /* Faster to treat as pre-increment if result is not used. */
5671 return expand_increment (exp, ! ignore);
5674 /* Are we taking the address of a nested function? */
5675 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
5676 && decl_function_context (TREE_OPERAND (exp, 0)) != 0)
5678 op0 = trampoline_address (TREE_OPERAND (exp, 0));
5679 op0 = force_operand (op0, target);
5683 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode,
5684 (modifier == EXPAND_INITIALIZER
5685 ? modifier : EXPAND_CONST_ADDRESS));
5686 if (GET_CODE (op0) != MEM)
5689 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
5690 return XEXP (op0, 0);
5691 op0 = force_operand (XEXP (op0, 0), target);
5693 if (flag_force_addr && GET_CODE (op0) != REG)
5694 return force_reg (Pmode, op0);
5697 case ENTRY_VALUE_EXPR:
5700 /* COMPLEX type for Extended Pascal & Fortran */
5703 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
5707 /* Get the rtx code of the operands. */
5708 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
5709 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
5712 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
5714 prev = get_last_insn ();
5716 /* Tell flow that the whole of the destination is being set. */
5717 if (GET_CODE (target) == REG)
5718 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
5720 /* Move the real (op0) and imaginary (op1) parts to their location. */
5721 emit_move_insn (gen_realpart (mode, target), op0);
5722 emit_move_insn (gen_imagpart (mode, target), op1);
5724 /* Complex construction should appear as a single unit. */
5731 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
5732 return gen_realpart (mode, op0);
5735 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
5736 return gen_imagpart (mode, op0);
5740 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
5744 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
5747 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
5749 prev = get_last_insn ();
5751 /* Tell flow that the whole of the destination is being set. */
5752 if (GET_CODE (target) == REG)
5753 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
5755 /* Store the realpart and the negated imagpart to target. */
5756 emit_move_insn (gen_realpart (mode, target), gen_realpart (mode, op0));
5758 imag_t = gen_imagpart (mode, target);
5759 temp = expand_unop (mode, neg_optab,
5760 gen_imagpart (mode, op0), imag_t, 0);
5762 emit_move_insn (imag_t, temp);
5764 /* Conjugate should appear as a single unit */
5774 return (*lang_expand_expr) (exp, target, tmode, modifier);
5777 /* Here to do an ordinary binary operator, generating an instruction
5778 from the optab already placed in `this_optab'. */
5780 preexpand_calls (exp);
5781 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
5783 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
5784 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
5786 temp = expand_binop (mode, this_optab, op0, op1, target,
5787 unsignedp, OPTAB_LIB_WIDEN);
5793 /* Return the alignment in bits of EXP, a pointer valued expression.
5794 But don't return more than MAX_ALIGN no matter what.
5795 The alignment returned is, by default, the alignment of the thing that
5796 EXP points to (if it is not a POINTER_TYPE, 0 is returned).
5798 Otherwise, look at the expression to see if we can do better, i.e., if the
5799 expression is actually pointing at an object whose alignment is tighter. */
5802 get_pointer_alignment (exp, max_align)
5806 unsigned align, inner;
5808 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
5811 align = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
5812 align = MIN (align, max_align);
5816 switch (TREE_CODE (exp))
5820 case NON_LVALUE_EXPR:
5821 exp = TREE_OPERAND (exp, 0);
5822 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
5824 inner = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
5825 inner = MIN (inner, max_align);
5826 align = MAX (align, inner);
5830 /* If sum of pointer + int, restrict our maximum alignment to that
5831 imposed by the integer. If not, we can't do any better than
5833 if (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST)
5836 while (((TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)) * BITS_PER_UNIT)
5841 exp = TREE_OPERAND (exp, 0);
5845 /* See what we are pointing at and look at its alignment. */
5846 exp = TREE_OPERAND (exp, 0);
5847 if (TREE_CODE (exp) == FUNCTION_DECL)
5848 align = MAX (align, FUNCTION_BOUNDARY);
5849 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'd')
5850 align = MAX (align, DECL_ALIGN (exp));
5851 #ifdef CONSTANT_ALIGNMENT
5852 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'c')
5853 align = CONSTANT_ALIGNMENT (exp, align);
5855 return MIN (align, max_align);
5863 /* Return the tree node and offset if a given argument corresponds to
5864 a string constant. */
5867 string_constant (arg, ptr_offset)
5873 if (TREE_CODE (arg) == ADDR_EXPR
5874 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
5876 *ptr_offset = integer_zero_node;
5877 return TREE_OPERAND (arg, 0);
5879 else if (TREE_CODE (arg) == PLUS_EXPR)
5881 tree arg0 = TREE_OPERAND (arg, 0);
5882 tree arg1 = TREE_OPERAND (arg, 1);
5887 if (TREE_CODE (arg0) == ADDR_EXPR
5888 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
5891 return TREE_OPERAND (arg0, 0);
5893 else if (TREE_CODE (arg1) == ADDR_EXPR
5894 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
5897 return TREE_OPERAND (arg1, 0);
5904 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
5905 way, because it could contain a zero byte in the middle.
5906 TREE_STRING_LENGTH is the size of the character array, not the string.
5908 Unfortunately, string_constant can't access the values of const char
5909 arrays with initializers, so neither can we do so here. */
5919 src = string_constant (src, &offset_node);
5922 max = TREE_STRING_LENGTH (src);
5923 ptr = TREE_STRING_POINTER (src);
5924 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
5926 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
5927 compute the offset to the following null if we don't know where to
5928 start searching for it. */
5930 for (i = 0; i < max; i++)
5933 /* We don't know the starting offset, but we do know that the string
5934 has no internal zero bytes. We can assume that the offset falls
5935 within the bounds of the string; otherwise, the programmer deserves
5936 what he gets. Subtract the offset from the length of the string,
5938 /* This would perhaps not be valid if we were dealing with named
5939 arrays in addition to literal string constants. */
5940 return size_binop (MINUS_EXPR, size_int (max), offset_node);
5943 /* We have a known offset into the string. Start searching there for
5944 a null character. */
5945 if (offset_node == 0)
5949 /* Did we get a long long offset? If so, punt. */
5950 if (TREE_INT_CST_HIGH (offset_node) != 0)
5952 offset = TREE_INT_CST_LOW (offset_node);
5954 /* If the offset is known to be out of bounds, warn, and call strlen at
5956 if (offset < 0 || offset > max)
5958 warning ("offset outside bounds of constant string");
5961 /* Use strlen to search for the first zero byte. Since any strings
5962 constructed with build_string will have nulls appended, we win even
5963 if we get handed something like (char[4])"abcd".
5965 Since OFFSET is our starting index into the string, no further
5966 calculation is needed. */
5967 return size_int (strlen (ptr + offset));
5970 /* Expand an expression EXP that calls a built-in function,
5971 with result going to TARGET if that's convenient
5972 (and in mode MODE if that's convenient).
5973 SUBTARGET may be used as the target for computing one of EXP's operands.
5974 IGNORE is nonzero if the value is to be ignored. */
5977 expand_builtin (exp, target, subtarget, mode, ignore)
5981 enum machine_mode mode;
5984 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
5985 tree arglist = TREE_OPERAND (exp, 1);
5988 enum machine_mode value_mode = TYPE_MODE (TREE_TYPE (exp));
5989 optab builtin_optab;
5991 switch (DECL_FUNCTION_CODE (fndecl))
5996 /* build_function_call changes these into ABS_EXPR. */
6001 case BUILT_IN_FSQRT:
6002 /* If not optimizing, call the library function. */
6007 /* Arg could be wrong type if user redeclared this fcn wrong. */
6008 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != REAL_TYPE)
6009 return CONST0_RTX (TYPE_MODE (TREE_TYPE (exp)));
6011 /* Stabilize and compute the argument. */
6012 if (TREE_CODE (TREE_VALUE (arglist)) != VAR_DECL
6013 && TREE_CODE (TREE_VALUE (arglist)) != PARM_DECL)
6015 exp = copy_node (exp);
6016 arglist = copy_node (arglist);
6017 TREE_OPERAND (exp, 1) = arglist;
6018 TREE_VALUE (arglist) = save_expr (TREE_VALUE (arglist));
6020 op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
6022 /* Make a suitable register to place result in. */
6023 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
6028 switch (DECL_FUNCTION_CODE (fndecl))
6031 builtin_optab = sin_optab; break;
6033 builtin_optab = cos_optab; break;
6034 case BUILT_IN_FSQRT:
6035 builtin_optab = sqrt_optab; break;
6040 /* Compute into TARGET.
6041 Set TARGET to wherever the result comes back. */
6042 target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
6043 builtin_optab, op0, target, 0);
6045 /* If we were unable to expand via the builtin, stop the
6046 sequence (without outputting the insns) and break, causing
6047 a call the the library function. */
6054 /* Check the results by default. But if flag_fast_math is turned on,
6055 then assume sqrt will always be called with valid arguments. */
6057 if (! flag_fast_math)
6059 /* Don't define the builtin FP instructions
6060 if your machine is not IEEE. */
6061 if (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT)
6064 lab1 = gen_label_rtx ();
6066 /* Test the result; if it is NaN, set errno=EDOM because
6067 the argument was not in the domain. */
6068 emit_cmp_insn (target, target, EQ, 0, GET_MODE (target), 0, 0);
6069 emit_jump_insn (gen_beq (lab1));
6073 #ifdef GEN_ERRNO_RTX
6074 rtx errno_rtx = GEN_ERRNO_RTX;
6077 = gen_rtx (MEM, word_mode, gen_rtx (SYMBOL_REF, Pmode, "*errno"));
6080 emit_move_insn (errno_rtx, GEN_INT (TARGET_EDOM));
6083 /* We can't set errno=EDOM directly; let the library call do it.
6084 Pop the arguments right away in case the call gets deleted. */
6086 expand_call (exp, target, 0);
6093 /* Output the entire sequence. */
6094 insns = get_insns ();
6100 case BUILT_IN_SAVEREGS:
6101 /* Don't do __builtin_saveregs more than once in a function.
6102 Save the result of the first call and reuse it. */
6103 if (saveregs_value != 0)
6104 return saveregs_value;
6106 /* When this function is called, it means that registers must be
6107 saved on entry to this function. So we migrate the
6108 call to the first insn of this function. */
6111 rtx valreg, saved_valreg;
6113 /* Now really call the function. `expand_call' does not call
6114 expand_builtin, so there is no danger of infinite recursion here. */
6117 #ifdef EXPAND_BUILTIN_SAVEREGS
6118 /* Do whatever the machine needs done in this case. */
6119 temp = EXPAND_BUILTIN_SAVEREGS (arglist);
6121 /* The register where the function returns its value
6122 is likely to have something else in it, such as an argument.
6123 So preserve that register around the call. */
6124 if (value_mode != VOIDmode)
6126 valreg = hard_libcall_value (value_mode);
6127 saved_valreg = gen_reg_rtx (value_mode);
6128 emit_move_insn (saved_valreg, valreg);
6131 /* Generate the call, putting the value in a pseudo. */
6132 temp = expand_call (exp, target, ignore);
6134 if (value_mode != VOIDmode)
6135 emit_move_insn (valreg, saved_valreg);
6141 saveregs_value = temp;
6143 /* This won't work inside a SEQUENCE--it really has to be
6144 at the start of the function. */
6145 if (in_sequence_p ())
6147 /* Better to do this than to crash. */
6148 error ("`va_start' used within `({...})'");
6152 /* Put the sequence after the NOTE that starts the function. */
6153 emit_insns_before (seq, NEXT_INSN (get_insns ()));
6157 /* __builtin_args_info (N) returns word N of the arg space info
6158 for the current function. The number and meanings of words
6159 is controlled by the definition of CUMULATIVE_ARGS. */
6160 case BUILT_IN_ARGS_INFO:
6162 int nwords = sizeof (CUMULATIVE_ARGS) / sizeof (int);
6164 int *word_ptr = (int *) ¤t_function_args_info;
6165 tree type, elts, result;
6167 if (sizeof (CUMULATIVE_ARGS) % sizeof (int) != 0)
6168 fatal ("CUMULATIVE_ARGS type defined badly; see %s, line %d",
6169 __FILE__, __LINE__);
6173 tree arg = TREE_VALUE (arglist);
6174 if (TREE_CODE (arg) != INTEGER_CST)
6175 error ("argument of `__builtin_args_info' must be constant");
6178 int wordnum = TREE_INT_CST_LOW (arg);
6180 if (wordnum < 0 || wordnum >= nwords || TREE_INT_CST_HIGH (arg))
6181 error ("argument of `__builtin_args_info' out of range");
6183 return GEN_INT (word_ptr[wordnum]);
6187 error ("missing argument in `__builtin_args_info'");
6192 for (i = 0; i < nwords; i++)
6193 elts = tree_cons (NULL_TREE, build_int_2 (word_ptr[i], 0));
6195 type = build_array_type (integer_type_node,
6196 build_index_type (build_int_2 (nwords, 0)));
6197 result = build (CONSTRUCTOR, type, NULL_TREE, nreverse (elts));
6198 TREE_CONSTANT (result) = 1;
6199 TREE_STATIC (result) = 1;
6200 result = build (INDIRECT_REF, build_pointer_type (type), result);
6201 TREE_CONSTANT (result) = 1;
6202 return expand_expr (result, NULL_RTX, VOIDmode, 0);
6206 /* Return the address of the first anonymous stack arg. */
6207 case BUILT_IN_NEXT_ARG:
6209 tree fntype = TREE_TYPE (current_function_decl);
6210 if (!(TYPE_ARG_TYPES (fntype) != 0
6211 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
6212 != void_type_node)))
6214 error ("`va_start' used in function with fixed args");
6219 return expand_binop (Pmode, add_optab,
6220 current_function_internal_arg_pointer,
6221 current_function_arg_offset_rtx,
6222 NULL_RTX, 0, OPTAB_LIB_WIDEN);
6224 case BUILT_IN_CLASSIFY_TYPE:
6227 tree type = TREE_TYPE (TREE_VALUE (arglist));
6228 enum tree_code code = TREE_CODE (type);
6229 if (code == VOID_TYPE)
6230 return GEN_INT (void_type_class);
6231 if (code == INTEGER_TYPE)
6232 return GEN_INT (integer_type_class);
6233 if (code == CHAR_TYPE)
6234 return GEN_INT (char_type_class);
6235 if (code == ENUMERAL_TYPE)
6236 return GEN_INT (enumeral_type_class);
6237 if (code == BOOLEAN_TYPE)
6238 return GEN_INT (boolean_type_class);
6239 if (code == POINTER_TYPE)
6240 return GEN_INT (pointer_type_class);
6241 if (code == REFERENCE_TYPE)
6242 return GEN_INT (reference_type_class);
6243 if (code == OFFSET_TYPE)
6244 return GEN_INT (offset_type_class);
6245 if (code == REAL_TYPE)
6246 return GEN_INT (real_type_class);
6247 if (code == COMPLEX_TYPE)
6248 return GEN_INT (complex_type_class);
6249 if (code == FUNCTION_TYPE)
6250 return GEN_INT (function_type_class);
6251 if (code == METHOD_TYPE)
6252 return GEN_INT (method_type_class);
6253 if (code == RECORD_TYPE)
6254 return GEN_INT (record_type_class);
6255 if (code == UNION_TYPE)
6256 return GEN_INT (union_type_class);
6257 if (code == ARRAY_TYPE)
6258 return GEN_INT (array_type_class);
6259 if (code == STRING_TYPE)
6260 return GEN_INT (string_type_class);
6261 if (code == SET_TYPE)
6262 return GEN_INT (set_type_class);
6263 if (code == FILE_TYPE)
6264 return GEN_INT (file_type_class);
6265 if (code == LANG_TYPE)
6266 return GEN_INT (lang_type_class);
6268 return GEN_INT (no_type_class);
6270 case BUILT_IN_CONSTANT_P:
6274 return (TREE_CODE_CLASS (TREE_CODE (TREE_VALUE (arglist))) == 'c'
6275 ? const1_rtx : const0_rtx);
6277 case BUILT_IN_FRAME_ADDRESS:
6278 /* The argument must be a nonnegative integer constant.
6279 It counts the number of frames to scan up the stack.
6280 The value is the address of that frame. */
6281 case BUILT_IN_RETURN_ADDRESS:
6282 /* The argument must be a nonnegative integer constant.
6283 It counts the number of frames to scan up the stack.
6284 The value is the return address saved in that frame. */
6286 /* Warning about missing arg was already issued. */
6288 else if (TREE_CODE (TREE_VALUE (arglist)) != INTEGER_CST)
6290 error ("invalid arg to `__builtin_return_address'");
6293 else if (tree_int_cst_lt (TREE_VALUE (arglist), integer_zero_node))
6295 error ("invalid arg to `__builtin_return_address'");
6300 int count = TREE_INT_CST_LOW (TREE_VALUE (arglist));
6301 rtx tem = frame_pointer_rtx;
6304 /* Some machines need special handling before we can access arbitrary
6305 frames. For example, on the sparc, we must first flush all
6306 register windows to the stack. */
6307 #ifdef SETUP_FRAME_ADDRESSES
6308 SETUP_FRAME_ADDRESSES ();
6311 /* On the sparc, the return address is not in the frame, it is
6312 in a register. There is no way to access it off of the current
6313 frame pointer, but it can be accessed off the previous frame
6314 pointer by reading the value from the register window save
6316 #ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
6317 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_RETURN_ADDRESS)
6321 /* Scan back COUNT frames to the specified frame. */
6322 for (i = 0; i < count; i++)
6324 /* Assume the dynamic chain pointer is in the word that
6325 the frame address points to, unless otherwise specified. */
6326 #ifdef DYNAMIC_CHAIN_ADDRESS
6327 tem = DYNAMIC_CHAIN_ADDRESS (tem);
6329 tem = memory_address (Pmode, tem);
6330 tem = copy_to_reg (gen_rtx (MEM, Pmode, tem));
6333 /* For __builtin_frame_address, return what we've got. */
6334 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
6337 /* For __builtin_return_address,
6338 Get the return address from that frame. */
6339 #ifdef RETURN_ADDR_RTX
6340 return RETURN_ADDR_RTX (count, tem);
6342 tem = memory_address (Pmode,
6343 plus_constant (tem, GET_MODE_SIZE (Pmode)));
6344 return copy_to_reg (gen_rtx (MEM, Pmode, tem));
6348 case BUILT_IN_ALLOCA:
6350 /* Arg could be non-integer if user redeclared this fcn wrong. */
6351 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE)
6353 current_function_calls_alloca = 1;
6354 /* Compute the argument. */
6355 op0 = expand_expr (TREE_VALUE (arglist), NULL_RTX, VOIDmode, 0);
6357 /* Allocate the desired space. */
6358 target = allocate_dynamic_stack_space (op0, target, BITS_PER_UNIT);
6360 /* Record the new stack level for nonlocal gotos. */
6361 if (nonlocal_goto_handler_slot != 0)
6362 emit_stack_save (SAVE_NONLOCAL, &nonlocal_goto_stack_level, NULL_RTX);
6366 /* If not optimizing, call the library function. */
6371 /* Arg could be non-integer if user redeclared this fcn wrong. */
6372 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE)
6375 /* Compute the argument. */
6376 op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
6377 /* Compute ffs, into TARGET if possible.
6378 Set TARGET to wherever the result comes back. */
6379 target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
6380 ffs_optab, op0, target, 1);
6385 case BUILT_IN_STRLEN:
6386 /* If not optimizing, call the library function. */
6391 /* Arg could be non-pointer if user redeclared this fcn wrong. */
6392 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE)
6396 tree src = TREE_VALUE (arglist);
6397 tree len = c_strlen (src);
6400 = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
6402 rtx result, src_rtx, char_rtx;
6403 enum machine_mode insn_mode = value_mode, char_mode;
6404 enum insn_code icode;
6406 /* If the length is known, just return it. */
6408 return expand_expr (len, target, mode, 0);
6410 /* If SRC is not a pointer type, don't do this operation inline. */
6414 /* Call a function if we can't compute strlen in the right mode. */
6416 while (insn_mode != VOIDmode)
6418 icode = strlen_optab->handlers[(int) insn_mode].insn_code;
6419 if (icode != CODE_FOR_nothing)
6422 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
6424 if (insn_mode == VOIDmode)
6427 /* Make a place to write the result of the instruction. */
6430 && GET_CODE (result) == REG
6431 && GET_MODE (result) == insn_mode
6432 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
6433 result = gen_reg_rtx (insn_mode);
6435 /* Make sure the operands are acceptable to the predicates. */
6437 if (! (*insn_operand_predicate[(int)icode][0]) (result, insn_mode))
6438 result = gen_reg_rtx (insn_mode);
6440 src_rtx = memory_address (BLKmode,
6441 expand_expr (src, NULL_RTX, Pmode,
6443 if (! (*insn_operand_predicate[(int)icode][1]) (src_rtx, Pmode))
6444 src_rtx = copy_to_mode_reg (Pmode, src_rtx);
6446 char_rtx = const0_rtx;
6447 char_mode = insn_operand_mode[(int)icode][2];
6448 if (! (*insn_operand_predicate[(int)icode][2]) (char_rtx, char_mode))
6449 char_rtx = copy_to_mode_reg (char_mode, char_rtx);
6451 emit_insn (GEN_FCN (icode) (result,
6452 gen_rtx (MEM, BLKmode, src_rtx),
6453 char_rtx, GEN_INT (align)));
6455 /* Return the value in the proper mode for this function. */
6456 if (GET_MODE (result) == value_mode)
6458 else if (target != 0)
6460 convert_move (target, result, 0);
6464 return convert_to_mode (value_mode, result, 0);
6467 case BUILT_IN_STRCPY:
6468 /* If not optimizing, call the library function. */
6473 /* Arg could be non-pointer if user redeclared this fcn wrong. */
6474 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
6475 || TREE_CHAIN (arglist) == 0
6476 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE)
6480 tree len = c_strlen (TREE_VALUE (TREE_CHAIN (arglist)));
6485 len = size_binop (PLUS_EXPR, len, integer_one_node);
6487 chainon (arglist, build_tree_list (NULL_TREE, len));
6491 case BUILT_IN_MEMCPY:
6492 /* If not optimizing, call the library function. */
6497 /* Arg could be non-pointer if user redeclared this fcn wrong. */
6498 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
6499 || TREE_CHAIN (arglist) == 0
6500 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
6501 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
6502 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
6506 tree dest = TREE_VALUE (arglist);
6507 tree src = TREE_VALUE (TREE_CHAIN (arglist));
6508 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
6511 = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
6513 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
6514 rtx dest_rtx, dest_mem, src_mem;
6516 /* If either SRC or DEST is not a pointer type, don't do
6517 this operation in-line. */
6518 if (src_align == 0 || dest_align == 0)
6520 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRCPY)
6521 TREE_CHAIN (TREE_CHAIN (arglist)) = 0;
6525 dest_rtx = expand_expr (dest, NULL_RTX, Pmode, EXPAND_NORMAL);
6526 dest_mem = gen_rtx (MEM, BLKmode,
6527 memory_address (BLKmode, dest_rtx));
6528 src_mem = gen_rtx (MEM, BLKmode,
6529 memory_address (BLKmode,
6530 expand_expr (src, NULL_RTX,
6534 /* Copy word part most expediently. */
6535 emit_block_move (dest_mem, src_mem,
6536 expand_expr (len, NULL_RTX, VOIDmode, 0),
6537 MIN (src_align, dest_align));
6541 /* These comparison functions need an instruction that returns an actual
6542 index. An ordinary compare that just sets the condition codes
6544 #ifdef HAVE_cmpstrsi
6545 case BUILT_IN_STRCMP:
6546 /* If not optimizing, call the library function. */
6551 /* Arg could be non-pointer if user redeclared this fcn wrong. */
6552 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
6553 || TREE_CHAIN (arglist) == 0
6554 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE)
6556 else if (!HAVE_cmpstrsi)
6559 tree arg1 = TREE_VALUE (arglist);
6560 tree arg2 = TREE_VALUE (TREE_CHAIN (arglist));
6564 len = c_strlen (arg1);
6566 len = size_binop (PLUS_EXPR, integer_one_node, len);
6567 len2 = c_strlen (arg2);
6569 len2 = size_binop (PLUS_EXPR, integer_one_node, len2);
6571 /* If we don't have a constant length for the first, use the length
6572 of the second, if we know it. We don't require a constant for
6573 this case; some cost analysis could be done if both are available
6574 but neither is constant. For now, assume they're equally cheap.
6576 If both strings have constant lengths, use the smaller. This
6577 could arise if optimization results in strcpy being called with
6578 two fixed strings, or if the code was machine-generated. We should
6579 add some code to the `memcmp' handler below to deal with such
6580 situations, someday. */
6581 if (!len || TREE_CODE (len) != INTEGER_CST)
6588 else if (len2 && TREE_CODE (len2) == INTEGER_CST)
6590 if (tree_int_cst_lt (len2, len))
6594 chainon (arglist, build_tree_list (NULL_TREE, len));
6598 case BUILT_IN_MEMCMP:
6599 /* If not optimizing, call the library function. */
6604 /* Arg could be non-pointer if user redeclared this fcn wrong. */
6605 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
6606 || TREE_CHAIN (arglist) == 0
6607 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
6608 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
6609 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
6611 else if (!HAVE_cmpstrsi)
6614 tree arg1 = TREE_VALUE (arglist);
6615 tree arg2 = TREE_VALUE (TREE_CHAIN (arglist));
6616 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
6620 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
6622 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
6623 enum machine_mode insn_mode
6624 = insn_operand_mode[(int) CODE_FOR_cmpstrsi][0];
6626 /* If we don't have POINTER_TYPE, call the function. */
6627 if (arg1_align == 0 || arg2_align == 0)
6629 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRCMP)
6630 TREE_CHAIN (TREE_CHAIN (arglist)) = 0;
6634 /* Make a place to write the result of the instruction. */
6637 && GET_CODE (result) == REG && GET_MODE (result) == insn_mode
6638 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
6639 result = gen_reg_rtx (insn_mode);
6641 emit_insn (gen_cmpstrsi (result,
6642 gen_rtx (MEM, BLKmode,
6643 expand_expr (arg1, NULL_RTX, Pmode,
6645 gen_rtx (MEM, BLKmode,
6646 expand_expr (arg2, NULL_RTX, Pmode,
6648 expand_expr (len, NULL_RTX, VOIDmode, 0),
6649 GEN_INT (MIN (arg1_align, arg2_align))));
6651 /* Return the value in the proper mode for this function. */
6652 mode = TYPE_MODE (TREE_TYPE (exp));
6653 if (GET_MODE (result) == mode)
6655 else if (target != 0)
6657 convert_move (target, result, 0);
6661 return convert_to_mode (mode, result, 0);
6664 case BUILT_IN_STRCMP:
6665 case BUILT_IN_MEMCMP:
6669 default: /* just do library call, if unknown builtin */
6670 error ("built-in function `%s' not currently supported",
6671 IDENTIFIER_POINTER (DECL_NAME (fndecl)));
6674 /* The switch statement above can drop through to cause the function
6675 to be called normally. */
6677 return expand_call (exp, target, ignore);
6680 /* Expand code for a post- or pre- increment or decrement
6681 and return the RTX for the result.
6682 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
6685 expand_increment (exp, post)
6689 register rtx op0, op1;
6690 register rtx temp, value;
6691 register tree incremented = TREE_OPERAND (exp, 0);
6692 optab this_optab = add_optab;
6694 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
6695 int op0_is_copy = 0;
6697 /* Stabilize any component ref that might need to be
6698 evaluated more than once below. */
6700 || TREE_CODE (incremented) == BIT_FIELD_REF
6701 || (TREE_CODE (incremented) == COMPONENT_REF
6702 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
6703 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
6704 incremented = stabilize_reference (incremented);
6706 /* Compute the operands as RTX.
6707 Note whether OP0 is the actual lvalue or a copy of it:
6708 I believe it is a copy iff it is a register or subreg
6709 and insns were generated in computing it. */
6711 temp = get_last_insn ();
6712 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, 0);
6714 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
6715 in place but intead must do sign- or zero-extension during assignment,
6716 so we copy it into a new register and let the code below use it as
6719 Note that we can safely modify this SUBREG since it is know not to be
6720 shared (it was made by the expand_expr call above). */
6722 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
6723 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
6725 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
6726 && temp != get_last_insn ());
6727 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
6729 /* Decide whether incrementing or decrementing. */
6730 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
6731 || TREE_CODE (exp) == PREDECREMENT_EXPR)
6732 this_optab = sub_optab;
6734 /* If OP0 is not the actual lvalue, but rather a copy in a register,
6735 then we cannot just increment OP0. We must therefore contrive to
6736 increment the original value. Then, for postincrement, we can return
6737 OP0 since it is a copy of the old value. For preincrement, we want
6738 to always expand here, since this generates better or equivalent code. */
6739 if (!post || op0_is_copy)
6741 /* This is the easiest way to increment the value wherever it is.
6742 Problems with multiple evaluation of INCREMENTED are prevented
6743 because either (1) it is a component_ref or preincrement,
6744 in which case it was stabilized above, or (2) it is an array_ref
6745 with constant index in an array in a register, which is
6746 safe to reevaluate. */
6747 tree newexp = build ((this_optab == add_optab
6748 ? PLUS_EXPR : MINUS_EXPR),
6751 TREE_OPERAND (exp, 1));
6752 temp = expand_assignment (incremented, newexp, ! post, 0);
6753 return post ? op0 : temp;
6756 /* Convert decrement by a constant into a negative increment. */
6757 if (this_optab == sub_optab
6758 && GET_CODE (op1) == CONST_INT)
6760 op1 = GEN_INT (- INTVAL (op1));
6761 this_optab = add_optab;
6766 /* We have a true reference to the value in OP0.
6767 If there is an insn to add or subtract in this mode, queue it. */
6769 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
6770 op0 = stabilize (op0);
6773 icode = (int) this_optab->handlers[(int) mode].insn_code;
6774 if (icode != (int) CODE_FOR_nothing
6775 /* Make sure that OP0 is valid for operands 0 and 1
6776 of the insn we want to queue. */
6777 && (*insn_operand_predicate[icode][0]) (op0, mode)
6778 && (*insn_operand_predicate[icode][1]) (op0, mode))
6780 if (! (*insn_operand_predicate[icode][2]) (op1, mode))
6781 op1 = force_reg (mode, op1);
6783 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
6787 /* Preincrement, or we can't increment with one simple insn. */
6789 /* Save a copy of the value before inc or dec, to return it later. */
6790 temp = value = copy_to_reg (op0);
6792 /* Arrange to return the incremented value. */
6793 /* Copy the rtx because expand_binop will protect from the queue,
6794 and the results of that would be invalid for us to return
6795 if our caller does emit_queue before using our result. */
6796 temp = copy_rtx (value = op0);
6798 /* Increment however we can. */
6799 op1 = expand_binop (mode, this_optab, value, op1, op0,
6800 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
6801 /* Make sure the value is stored into OP0. */
6803 emit_move_insn (op0, op1);
6808 /* Expand all function calls contained within EXP, innermost ones first.
6809 But don't look within expressions that have sequence points.
6810 For each CALL_EXPR, record the rtx for its value
6811 in the CALL_EXPR_RTL field. */
6814 preexpand_calls (exp)
6817 register int nops, i;
6818 int type = TREE_CODE_CLASS (TREE_CODE (exp));
6820 if (! do_preexpand_calls)
6823 /* Only expressions and references can contain calls. */
6825 if (type != 'e' && type != '<' && type != '1' && type != '2' && type != 'r')
6828 switch (TREE_CODE (exp))
6831 /* Do nothing if already expanded. */
6832 if (CALL_EXPR_RTL (exp) != 0)
6835 /* Do nothing to built-in functions. */
6836 if (TREE_CODE (TREE_OPERAND (exp, 0)) != ADDR_EXPR
6837 || TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) != FUNCTION_DECL
6838 || ! DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
6839 CALL_EXPR_RTL (exp) = expand_call (exp, NULL_RTX, 0);
6844 case TRUTH_ANDIF_EXPR:
6845 case TRUTH_ORIF_EXPR:
6846 /* If we find one of these, then we can be sure
6847 the adjust will be done for it (since it makes jumps).
6848 Do it now, so that if this is inside an argument
6849 of a function, we don't get the stack adjustment
6850 after some other args have already been pushed. */
6851 do_pending_stack_adjust ();
6856 case WITH_CLEANUP_EXPR:
6860 if (SAVE_EXPR_RTL (exp) != 0)
6864 nops = tree_code_length[(int) TREE_CODE (exp)];
6865 for (i = 0; i < nops; i++)
6866 if (TREE_OPERAND (exp, i) != 0)
6868 type = TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, i)));
6869 if (type == 'e' || type == '<' || type == '1' || type == '2'
6871 preexpand_calls (TREE_OPERAND (exp, i));
6875 /* At the start of a function, record that we have no previously-pushed
6876 arguments waiting to be popped. */
6879 init_pending_stack_adjust ()
6881 pending_stack_adjust = 0;
6884 /* When exiting from function, if safe, clear out any pending stack adjust
6885 so the adjustment won't get done. */
6888 clear_pending_stack_adjust ()
6890 #ifdef EXIT_IGNORE_STACK
6891 if (! flag_omit_frame_pointer && EXIT_IGNORE_STACK
6892 && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline)
6893 && ! flag_inline_functions)
6894 pending_stack_adjust = 0;
6898 /* Pop any previously-pushed arguments that have not been popped yet. */
6901 do_pending_stack_adjust ()
6903 if (inhibit_defer_pop == 0)
6905 if (pending_stack_adjust != 0)
6906 adjust_stack (GEN_INT (pending_stack_adjust));
6907 pending_stack_adjust = 0;
6911 /* Expand all cleanups up to OLD_CLEANUPS.
6912 Needed here, and also for language-dependent calls. */
6915 expand_cleanups_to (old_cleanups)
6918 while (cleanups_this_call != old_cleanups)
6920 expand_expr (TREE_VALUE (cleanups_this_call), NULL_RTX, VOIDmode, 0);
6921 cleanups_this_call = TREE_CHAIN (cleanups_this_call);
6925 /* Expand conditional expressions. */
6927 /* Generate code to evaluate EXP and jump to LABEL if the value is zero.
6928 LABEL is an rtx of code CODE_LABEL, in this function and all the
6932 jumpifnot (exp, label)
6936 do_jump (exp, label, NULL_RTX);
6939 /* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
6946 do_jump (exp, NULL_RTX, label);
6949 /* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
6950 the result is zero, or IF_TRUE_LABEL if the result is one.
6951 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
6952 meaning fall through in that case.
6954 do_jump always does any pending stack adjust except when it does not
6955 actually perform a jump. An example where there is no jump
6956 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
6958 This function is responsible for optimizing cases such as
6959 &&, || and comparison operators in EXP. */
6962 do_jump (exp, if_false_label, if_true_label)
6964 rtx if_false_label, if_true_label;
6966 register enum tree_code code = TREE_CODE (exp);
6967 /* Some cases need to create a label to jump to
6968 in order to properly fall through.
6969 These cases set DROP_THROUGH_LABEL nonzero. */
6970 rtx drop_through_label = 0;
6984 temp = integer_zerop (exp) ? if_false_label : if_true_label;
6990 /* This is not true with #pragma weak */
6992 /* The address of something can never be zero. */
6994 emit_jump (if_true_label);
6999 if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
7000 || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
7001 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF)
7004 /* If we are narrowing the operand, we have to do the compare in the
7006 if ((TYPE_PRECISION (TREE_TYPE (exp))
7007 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
7009 case NON_LVALUE_EXPR:
7010 case REFERENCE_EXPR:
7015 /* These cannot change zero->non-zero or vice versa. */
7016 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
7020 /* This is never less insns than evaluating the PLUS_EXPR followed by
7021 a test and can be longer if the test is eliminated. */
7023 /* Reduce to minus. */
7024 exp = build (MINUS_EXPR, TREE_TYPE (exp),
7025 TREE_OPERAND (exp, 0),
7026 fold (build1 (NEGATE_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
7027 TREE_OPERAND (exp, 1))));
7028 /* Process as MINUS. */
7032 /* Non-zero iff operands of minus differ. */
7033 comparison = compare (build (NE_EXPR, TREE_TYPE (exp),
7034 TREE_OPERAND (exp, 0),
7035 TREE_OPERAND (exp, 1)),
7040 /* If we are AND'ing with a small constant, do this comparison in the
7041 smallest type that fits. If the machine doesn't have comparisons
7042 that small, it will be converted back to the wider comparison.
7043 This helps if we are testing the sign bit of a narrower object.
7044 combine can't do this for us because it can't know whether a
7045 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
7047 if (! SLOW_BYTE_ACCESS
7048 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7049 && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
7050 && (i = floor_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))) >= 0
7051 && (type = type_for_size (i + 1, 1)) != 0
7052 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
7053 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
7054 != CODE_FOR_nothing))
7056 do_jump (convert (type, exp), if_false_label, if_true_label);
7061 case TRUTH_NOT_EXPR:
7062 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
7065 case TRUTH_ANDIF_EXPR:
7066 if (if_false_label == 0)
7067 if_false_label = drop_through_label = gen_label_rtx ();
7068 do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
7069 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
7072 case TRUTH_ORIF_EXPR:
7073 if (if_true_label == 0)
7074 if_true_label = drop_through_label = gen_label_rtx ();
7075 do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
7076 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
7080 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
7083 do_pending_stack_adjust ();
7084 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
7091 int bitsize, bitpos, unsignedp;
7092 enum machine_mode mode;
7097 /* Get description of this reference. We don't actually care
7098 about the underlying object here. */
7099 get_inner_reference (exp, &bitsize, &bitpos, &offset,
7100 &mode, &unsignedp, &volatilep);
7102 type = type_for_size (bitsize, unsignedp);
7103 if (! SLOW_BYTE_ACCESS
7104 && type != 0 && bitsize >= 0
7105 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
7106 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
7107 != CODE_FOR_nothing))
7109 do_jump (convert (type, exp), if_false_label, if_true_label);
7116 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
7117 if (integer_onep (TREE_OPERAND (exp, 1))
7118 && integer_zerop (TREE_OPERAND (exp, 2)))
7119 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
7121 else if (integer_zerop (TREE_OPERAND (exp, 1))
7122 && integer_onep (TREE_OPERAND (exp, 2)))
7123 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
7127 register rtx label1 = gen_label_rtx ();
7128 drop_through_label = gen_label_rtx ();
7129 do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX);
7130 /* Now the THEN-expression. */
7131 do_jump (TREE_OPERAND (exp, 1),
7132 if_false_label ? if_false_label : drop_through_label,
7133 if_true_label ? if_true_label : drop_through_label);
7134 /* In case the do_jump just above never jumps. */
7135 do_pending_stack_adjust ();
7136 emit_label (label1);
7137 /* Now the ELSE-expression. */
7138 do_jump (TREE_OPERAND (exp, 2),
7139 if_false_label ? if_false_label : drop_through_label,
7140 if_true_label ? if_true_label : drop_through_label);
7145 if (integer_zerop (TREE_OPERAND (exp, 1)))
7146 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
7147 else if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7150 !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
7151 do_jump_by_parts_equality (exp, if_false_label, if_true_label);
7153 comparison = compare (exp, EQ, EQ);
7157 if (integer_zerop (TREE_OPERAND (exp, 1)))
7158 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
7159 else if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7162 !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
7163 do_jump_by_parts_equality (exp, if_true_label, if_false_label);
7165 comparison = compare (exp, NE, NE);
7169 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7171 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
7172 do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
7174 comparison = compare (exp, LT, LTU);
7178 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7180 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
7181 do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
7183 comparison = compare (exp, LE, LEU);
7187 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7189 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
7190 do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
7192 comparison = compare (exp, GT, GTU);
7196 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7198 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
7199 do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
7201 comparison = compare (exp, GE, GEU);
7206 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
7208 /* This is not needed any more and causes poor code since it causes
7209 comparisons and tests from non-SI objects to have different code
7211 /* Copy to register to avoid generating bad insns by cse
7212 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
7213 if (!cse_not_expected && GET_CODE (temp) == MEM)
7214 temp = copy_to_reg (temp);
7216 do_pending_stack_adjust ();
7217 if (GET_CODE (temp) == CONST_INT)
7218 comparison = (temp == const0_rtx ? const0_rtx : const_true_rtx);
7219 else if (GET_CODE (temp) == LABEL_REF)
7220 comparison = const_true_rtx;
7221 else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
7222 && !can_compare_p (GET_MODE (temp)))
7223 /* Note swapping the labels gives us not-equal. */
7224 do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label);
7225 else if (GET_MODE (temp) != VOIDmode)
7226 comparison = compare_from_rtx (temp, CONST0_RTX (GET_MODE (temp)),
7227 NE, TREE_UNSIGNED (TREE_TYPE (exp)),
7228 GET_MODE (temp), NULL_RTX, 0);
7233 /* Do any postincrements in the expression that was tested. */
7236 /* If COMPARISON is nonzero here, it is an rtx that can be substituted
7237 straight into a conditional jump instruction as the jump condition.
7238 Otherwise, all the work has been done already. */
7240 if (comparison == const_true_rtx)
7243 emit_jump (if_true_label);
7245 else if (comparison == const0_rtx)
7248 emit_jump (if_false_label);
7250 else if (comparison)
7251 do_jump_for_compare (comparison, if_false_label, if_true_label);
7255 if (drop_through_label)
7257 /* If do_jump produces code that might be jumped around,
7258 do any stack adjusts from that code, before the place
7259 where control merges in. */
7260 do_pending_stack_adjust ();
7261 emit_label (drop_through_label);
7265 /* Given a comparison expression EXP for values too wide to be compared
7266 with one insn, test the comparison and jump to the appropriate label.
7267 The code of EXP is ignored; we always test GT if SWAP is 0,
7268 and LT if SWAP is 1. */
7271 do_jump_by_parts_greater (exp, swap, if_false_label, if_true_label)
7274 rtx if_false_label, if_true_label;
7276 rtx op0 = expand_expr (TREE_OPERAND (exp, swap), NULL_RTX, VOIDmode, 0);
7277 rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), NULL_RTX, VOIDmode, 0);
7278 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
7279 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
7280 rtx drop_through_label = 0;
7281 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
7284 if (! if_true_label || ! if_false_label)
7285 drop_through_label = gen_label_rtx ();
7286 if (! if_true_label)
7287 if_true_label = drop_through_label;
7288 if (! if_false_label)
7289 if_false_label = drop_through_label;
7291 /* Compare a word at a time, high order first. */
7292 for (i = 0; i < nwords; i++)
7295 rtx op0_word, op1_word;
7297 if (WORDS_BIG_ENDIAN)
7299 op0_word = operand_subword_force (op0, i, mode);
7300 op1_word = operand_subword_force (op1, i, mode);
7304 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
7305 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
7308 /* All but high-order word must be compared as unsigned. */
7309 comp = compare_from_rtx (op0_word, op1_word,
7310 (unsignedp || i > 0) ? GTU : GT,
7311 unsignedp, word_mode, NULL_RTX, 0);
7312 if (comp == const_true_rtx)
7313 emit_jump (if_true_label);
7314 else if (comp != const0_rtx)
7315 do_jump_for_compare (comp, NULL_RTX, if_true_label);
7317 /* Consider lower words only if these are equal. */
7318 comp = compare_from_rtx (op0_word, op1_word, NE, unsignedp, word_mode,
7320 if (comp == const_true_rtx)
7321 emit_jump (if_false_label);
7322 else if (comp != const0_rtx)
7323 do_jump_for_compare (comp, NULL_RTX, if_false_label);
7327 emit_jump (if_false_label);
7328 if (drop_through_label)
7329 emit_label (drop_through_label);
7332 /* Given an EQ_EXPR expression EXP for values too wide to be compared
7333 with one insn, test the comparison and jump to the appropriate label. */
7336 do_jump_by_parts_equality (exp, if_false_label, if_true_label)
7338 rtx if_false_label, if_true_label;
7340 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7341 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7342 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
7343 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
7345 rtx drop_through_label = 0;
7347 if (! if_false_label)
7348 drop_through_label = if_false_label = gen_label_rtx ();
7350 for (i = 0; i < nwords; i++)
7352 rtx comp = compare_from_rtx (operand_subword_force (op0, i, mode),
7353 operand_subword_force (op1, i, mode),
7354 EQ, TREE_UNSIGNED (TREE_TYPE (exp)),
7355 word_mode, NULL_RTX, 0);
7356 if (comp == const_true_rtx)
7357 emit_jump (if_false_label);
7358 else if (comp != const0_rtx)
7359 do_jump_for_compare (comp, if_false_label, NULL_RTX);
7363 emit_jump (if_true_label);
7364 if (drop_through_label)
7365 emit_label (drop_through_label);
7368 /* Jump according to whether OP0 is 0.
7369 We assume that OP0 has an integer mode that is too wide
7370 for the available compare insns. */
7373 do_jump_by_parts_equality_rtx (op0, if_false_label, if_true_label)
7375 rtx if_false_label, if_true_label;
7377 int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD;
7379 rtx drop_through_label = 0;
7381 if (! if_false_label)
7382 drop_through_label = if_false_label = gen_label_rtx ();
7384 for (i = 0; i < nwords; i++)
7386 rtx comp = compare_from_rtx (operand_subword_force (op0, i,
7388 const0_rtx, EQ, 1, word_mode, NULL_RTX, 0);
7389 if (comp == const_true_rtx)
7390 emit_jump (if_false_label);
7391 else if (comp != const0_rtx)
7392 do_jump_for_compare (comp, if_false_label, NULL_RTX);
7396 emit_jump (if_true_label);
7397 if (drop_through_label)
7398 emit_label (drop_through_label);
7401 /* Given a comparison expression in rtl form, output conditional branches to
7402 IF_TRUE_LABEL, IF_FALSE_LABEL, or both. */
7405 do_jump_for_compare (comparison, if_false_label, if_true_label)
7406 rtx comparison, if_false_label, if_true_label;
7410 if (bcc_gen_fctn[(int) GET_CODE (comparison)] != 0)
7411 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (comparison)]) (if_true_label));
7416 emit_jump (if_false_label);
7418 else if (if_false_label)
7421 rtx prev = PREV_INSN (get_last_insn ());
7424 /* Output the branch with the opposite condition. Then try to invert
7425 what is generated. If more than one insn is a branch, or if the
7426 branch is not the last insn written, abort. If we can't invert
7427 the branch, emit make a true label, redirect this jump to that,
7428 emit a jump to the false label and define the true label. */
7430 if (bcc_gen_fctn[(int) GET_CODE (comparison)] != 0)
7431 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (comparison)]) (if_false_label));
7435 /* Here we get the insn before what was just emitted.
7436 On some machines, emitting the branch can discard
7437 the previous compare insn and emit a replacement. */
7439 /* If there's only one preceding insn... */
7440 insn = get_insns ();
7442 insn = NEXT_INSN (prev);
7444 for (insn = NEXT_INSN (insn); insn; insn = NEXT_INSN (insn))
7445 if (GET_CODE (insn) == JUMP_INSN)
7452 if (branch != get_last_insn ())
7455 if (! invert_jump (branch, if_false_label))
7457 if_true_label = gen_label_rtx ();
7458 redirect_jump (branch, if_true_label);
7459 emit_jump (if_false_label);
7460 emit_label (if_true_label);
7465 /* Generate code for a comparison expression EXP
7466 (including code to compute the values to be compared)
7467 and set (CC0) according to the result.
7468 SIGNED_CODE should be the rtx operation for this comparison for
7469 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
7471 We force a stack adjustment unless there are currently
7472 things pushed on the stack that aren't yet used. */
7475 compare (exp, signed_code, unsigned_code)
7477 enum rtx_code signed_code, unsigned_code;
7480 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7482 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7483 register tree type = TREE_TYPE (TREE_OPERAND (exp, 0));
7484 register enum machine_mode mode = TYPE_MODE (type);
7485 int unsignedp = TREE_UNSIGNED (type);
7486 enum rtx_code code = unsignedp ? unsigned_code : signed_code;
7488 return compare_from_rtx (op0, op1, code, unsignedp, mode,
7490 ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
7491 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
7494 /* Like compare but expects the values to compare as two rtx's.
7495 The decision as to signed or unsigned comparison must be made by the caller.
7497 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
7500 If ALIGN is non-zero, it is the alignment of this type; if zero, the
7501 size of MODE should be used. */
7504 compare_from_rtx (op0, op1, code, unsignedp, mode, size, align)
7505 register rtx op0, op1;
7508 enum machine_mode mode;
7514 /* If one operand is constant, make it the second one. Only do this
7515 if the other operand is not constant as well. */
7517 if ((CONSTANT_P (op0) && ! CONSTANT_P (op1))
7518 || (GET_CODE (op0) == CONST_INT && GET_CODE (op1) != CONST_INT))
7523 code = swap_condition (code);
7528 op0 = force_not_mem (op0);
7529 op1 = force_not_mem (op1);
7532 do_pending_stack_adjust ();
7534 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
7535 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
7539 /* There's no need to do this now that combine.c can eliminate lots of
7540 sign extensions. This can be less efficient in certain cases on other
7543 /* If this is a signed equality comparison, we can do it as an
7544 unsigned comparison since zero-extension is cheaper than sign
7545 extension and comparisons with zero are done as unsigned. This is
7546 the case even on machines that can do fast sign extension, since
7547 zero-extension is easier to combine with other operations than
7548 sign-extension is. If we are comparing against a constant, we must
7549 convert it to what it would look like unsigned. */
7550 if ((code == EQ || code == NE) && ! unsignedp
7551 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
7553 if (GET_CODE (op1) == CONST_INT
7554 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
7555 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
7560 emit_cmp_insn (op0, op1, code, size, mode, unsignedp, align);
7562 return gen_rtx (code, VOIDmode, cc0_rtx, const0_rtx);
7565 /* Generate code to calculate EXP using a store-flag instruction
7566 and return an rtx for the result. EXP is either a comparison
7567 or a TRUTH_NOT_EXPR whose operand is a comparison.
7569 If TARGET is nonzero, store the result there if convenient.
7571 If ONLY_CHEAP is non-zero, only do this if it is likely to be very
7574 Return zero if there is no suitable set-flag instruction
7575 available on this machine.
7577 Once expand_expr has been called on the arguments of the comparison,
7578 we are committed to doing the store flag, since it is not safe to
7579 re-evaluate the expression. We emit the store-flag insn by calling
7580 emit_store_flag, but only expand the arguments if we have a reason
7581 to believe that emit_store_flag will be successful. If we think that
7582 it will, but it isn't, we have to simulate the store-flag with a
7583 set/jump/set sequence. */
7586 do_store_flag (exp, target, mode, only_cheap)
7589 enum machine_mode mode;
7593 tree arg0, arg1, type;
7595 enum machine_mode operand_mode;
7599 enum insn_code icode;
7600 rtx subtarget = target;
7601 rtx result, label, pattern, jump_pat;
7603 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
7604 result at the end. We can't simply invert the test since it would
7605 have already been inverted if it were valid. This case occurs for
7606 some floating-point comparisons. */
7608 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
7609 invert = 1, exp = TREE_OPERAND (exp, 0);
7611 arg0 = TREE_OPERAND (exp, 0);
7612 arg1 = TREE_OPERAND (exp, 1);
7613 type = TREE_TYPE (arg0);
7614 operand_mode = TYPE_MODE (type);
7615 unsignedp = TREE_UNSIGNED (type);
7617 /* We won't bother with BLKmode store-flag operations because it would mean
7618 passing a lot of information to emit_store_flag. */
7619 if (operand_mode == BLKmode)
7625 /* Get the rtx comparison code to use. We know that EXP is a comparison
7626 operation of some type. Some comparisons against 1 and -1 can be
7627 converted to comparisons with zero. Do so here so that the tests
7628 below will be aware that we have a comparison with zero. These
7629 tests will not catch constants in the first operand, but constants
7630 are rarely passed as the first operand. */
7632 switch (TREE_CODE (exp))
7641 if (integer_onep (arg1))
7642 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
7644 code = unsignedp ? LTU : LT;
7647 if (integer_all_onesp (arg1))
7648 arg1 = integer_zero_node, code = unsignedp ? LTU : LT;
7650 code = unsignedp ? LEU : LE;
7653 if (integer_all_onesp (arg1))
7654 arg1 = integer_zero_node, code = unsignedp ? GEU : GE;
7656 code = unsignedp ? GTU : GT;
7659 if (integer_onep (arg1))
7660 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
7662 code = unsignedp ? GEU : GE;
7668 /* Put a constant second. */
7669 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
7671 tem = arg0; arg0 = arg1; arg1 = tem;
7672 code = swap_condition (code);
7675 /* If this is an equality or inequality test of a single bit, we can
7676 do this by shifting the bit being tested to the low-order bit and
7677 masking the result with the constant 1. If the condition was EQ,
7678 we xor it with 1. This does not require an scc insn and is faster
7679 than an scc insn even if we have it. */
7681 if ((code == NE || code == EQ)
7682 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
7683 && integer_pow2p (TREE_OPERAND (arg0, 1))
7684 && TYPE_PRECISION (type) <= HOST_BITS_PER_WIDE_INT)
7686 int bitnum = exact_log2 (INTVAL (expand_expr (TREE_OPERAND (arg0, 1),
7687 NULL_RTX, VOIDmode, 0)));
7689 if (subtarget == 0 || GET_CODE (subtarget) != REG
7690 || GET_MODE (subtarget) != operand_mode
7691 || ! safe_from_p (subtarget, TREE_OPERAND (arg0, 0)))
7694 op0 = expand_expr (TREE_OPERAND (arg0, 0), subtarget, VOIDmode, 0);
7697 op0 = expand_shift (RSHIFT_EXPR, GET_MODE (op0), op0,
7698 size_int (bitnum), target, 1);
7700 if (GET_MODE (op0) != mode)
7701 op0 = convert_to_mode (mode, op0, 1);
7703 if (bitnum != TYPE_PRECISION (type) - 1)
7704 op0 = expand_and (op0, const1_rtx, target);
7706 if ((code == EQ && ! invert) || (code == NE && invert))
7707 op0 = expand_binop (mode, xor_optab, op0, const1_rtx, target, 0,
7713 /* Now see if we are likely to be able to do this. Return if not. */
7714 if (! can_compare_p (operand_mode))
7716 icode = setcc_gen_code[(int) code];
7717 if (icode == CODE_FOR_nothing
7718 || (only_cheap && insn_operand_mode[(int) icode][0] != mode))
7720 /* We can only do this if it is one of the special cases that
7721 can be handled without an scc insn. */
7722 if ((code == LT && integer_zerop (arg1))
7723 || (! only_cheap && code == GE && integer_zerop (arg1)))
7725 else if (BRANCH_COST >= 0
7726 && ! only_cheap && (code == NE || code == EQ)
7727 && TREE_CODE (type) != REAL_TYPE
7728 && ((abs_optab->handlers[(int) operand_mode].insn_code
7729 != CODE_FOR_nothing)
7730 || (ffs_optab->handlers[(int) operand_mode].insn_code
7731 != CODE_FOR_nothing)))
7737 preexpand_calls (exp);
7738 if (subtarget == 0 || GET_CODE (subtarget) != REG
7739 || GET_MODE (subtarget) != operand_mode
7740 || ! safe_from_p (subtarget, arg1))
7743 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
7744 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
7747 target = gen_reg_rtx (mode);
7749 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
7750 because, if the emit_store_flag does anything it will succeed and
7751 OP0 and OP1 will not be used subsequently. */
7753 result = emit_store_flag (target, code,
7754 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
7755 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
7756 operand_mode, unsignedp, 1);
7761 result = expand_binop (mode, xor_optab, result, const1_rtx,
7762 result, 0, OPTAB_LIB_WIDEN);
7766 /* If this failed, we have to do this with set/compare/jump/set code. */
7767 if (target == 0 || GET_CODE (target) != REG
7768 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
7769 target = gen_reg_rtx (GET_MODE (target));
7771 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
7772 result = compare_from_rtx (op0, op1, code, unsignedp,
7773 operand_mode, NULL_RTX, 0);
7774 if (GET_CODE (result) == CONST_INT)
7775 return (((result == const0_rtx && ! invert)
7776 || (result != const0_rtx && invert))
7777 ? const0_rtx : const1_rtx);
7779 label = gen_label_rtx ();
7780 if (bcc_gen_fctn[(int) code] == 0)
7783 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
7784 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
7790 /* Generate a tablejump instruction (used for switch statements). */
7792 #ifdef HAVE_tablejump
7794 /* INDEX is the value being switched on, with the lowest value
7795 in the table already subtracted.
7796 MODE is its expected mode (needed if INDEX is constant).
7797 RANGE is the length of the jump table.
7798 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
7800 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
7801 index value is out of range. */
7804 do_tablejump (index, mode, range, table_label, default_label)
7805 rtx index, range, table_label, default_label;
7806 enum machine_mode mode;
7808 register rtx temp, vector;
7810 /* Do an unsigned comparison (in the proper mode) between the index
7811 expression and the value which represents the length of the range.
7812 Since we just finished subtracting the lower bound of the range
7813 from the index expression, this comparison allows us to simultaneously
7814 check that the original index expression value is both greater than
7815 or equal to the minimum value of the range and less than or equal to
7816 the maximum value of the range. */
7818 emit_cmp_insn (range, index, LTU, NULL_RTX, mode, 1, 0);
7819 emit_jump_insn (gen_bltu (default_label));
7821 /* If index is in range, it must fit in Pmode.
7822 Convert to Pmode so we can index with it. */
7824 index = convert_to_mode (Pmode, index, 1);
7826 /* If flag_force_addr were to affect this address
7827 it could interfere with the tricky assumptions made
7828 about addresses that contain label-refs,
7829 which may be valid only very near the tablejump itself. */
7830 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
7831 GET_MODE_SIZE, because this indicates how large insns are. The other
7832 uses should all be Pmode, because they are addresses. This code
7833 could fail if addresses and insns are not the same size. */
7834 index = memory_address_noforce
7836 gen_rtx (PLUS, Pmode,
7837 gen_rtx (MULT, Pmode, index,
7838 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
7839 gen_rtx (LABEL_REF, Pmode, table_label)));
7840 temp = gen_reg_rtx (CASE_VECTOR_MODE);
7841 vector = gen_rtx (MEM, CASE_VECTOR_MODE, index);
7842 RTX_UNCHANGING_P (vector) = 1;
7843 convert_move (temp, vector, 0);
7845 emit_jump_insn (gen_tablejump (temp, table_label));
7847 #ifndef CASE_VECTOR_PC_RELATIVE
7848 /* If we are generating PIC code or if the table is PC-relative, the
7849 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
7855 #endif /* HAVE_tablejump */