1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA. */
26 #include "insn-flags.h"
27 #include "insn-codes.h"
29 #include "insn-config.h"
33 #include "typeclass.h"
35 #define CEIL(x,y) (((x) + (y) - 1) / (y))
37 /* Decide whether a function's arguments should be processed
38 from first to last or from last to first. */
40 #ifdef STACK_GROWS_DOWNWARD
42 #define PUSH_ARGS_REVERSED /* If it's last to first */
46 #ifndef STACK_PUSH_CODE
47 #ifdef STACK_GROWS_DOWNWARD
48 #define STACK_PUSH_CODE PRE_DEC
50 #define STACK_PUSH_CODE PRE_INC
54 /* Like STACK_BOUNDARY but in units of bytes, not bits. */
55 #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
57 /* If this is nonzero, we do not bother generating VOLATILE
58 around volatile memory references, and we are willing to
59 output indirect addresses. If cse is to follow, we reject
60 indirect addresses so a useful potential cse is generated;
61 if it is used only once, instruction combination will produce
62 the same indirect address eventually. */
65 /* Nonzero to generate code for all the subroutines within an
66 expression before generating the upper levels of the expression.
67 Nowadays this is never zero. */
68 int do_preexpand_calls = 1;
70 /* Number of units that we should eventually pop off the stack.
71 These are the arguments to function calls that have already returned. */
72 int pending_stack_adjust;
74 /* Nonzero means stack pops must not be deferred, and deferred stack
75 pops must not be output. It is nonzero inside a function call,
76 inside a conditional expression, inside a statement expression,
77 and in other cases as well. */
78 int inhibit_defer_pop;
80 /* A list of all cleanups which belong to the arguments of
81 function calls being expanded by expand_call. */
82 tree cleanups_this_call;
84 /* Nonzero means __builtin_saveregs has already been done in this function.
85 The value is the pseudoreg containing the value __builtin_saveregs
87 static rtx saveregs_value;
90 static void store_constructor ();
91 static rtx store_field ();
92 static rtx expand_builtin ();
93 static rtx compare ();
94 static rtx do_store_flag ();
95 static void preexpand_calls ();
96 static rtx expand_increment ();
97 static void init_queue ();
99 void do_pending_stack_adjust ();
100 static void do_jump_for_compare ();
101 static void do_jump_by_parts_equality ();
102 static void do_jump_by_parts_equality_rtx ();
103 static void do_jump_by_parts_greater ();
105 /* Record for each mode whether we can move a register directly to or
106 from an object of that mode in memory. If we can't, we won't try
107 to use that mode directly when accessing a field of that mode. */
109 static char direct_load[NUM_MACHINE_MODES];
110 static char direct_store[NUM_MACHINE_MODES];
112 /* MOVE_RATIO is the number of move instructions that is better than
116 #if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
119 /* A value of around 6 would minimize code size; infinity would minimize
121 #define MOVE_RATIO 15
125 /* This array records the insn_code of insns to perform block moves. */
126 static enum insn_code movstr_optab[NUM_MACHINE_MODES];
128 /* SLOW_UNALIGNED_ACCESS is non-zero if unaligned accesses are very slow. */
130 #ifndef SLOW_UNALIGNED_ACCESS
131 #define SLOW_UNALIGNED_ACCESS 0
134 /* This is run once per compilation to set up which modes can be used
135 directly in memory and to initialize the block move optab. */
141 enum machine_mode mode;
142 /* Try indexing by frame ptr and try by stack ptr.
143 It is known that on the Convex the stack ptr isn't a valid index.
144 With luck, one or the other is valid on any machine. */
145 rtx mem = gen_rtx (MEM, VOIDmode, stack_pointer_rtx);
146 rtx mem1 = gen_rtx (MEM, VOIDmode, frame_pointer_rtx);
149 insn = emit_insn (gen_rtx (SET, 0, 0));
150 pat = PATTERN (insn);
152 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
153 mode = (enum machine_mode) ((int) mode + 1))
159 direct_load[(int) mode] = direct_store[(int) mode] = 0;
160 PUT_MODE (mem, mode);
161 PUT_MODE (mem1, mode);
163 /* See if there is some register that can be used in this mode and
164 directly loaded or stored from memory. */
166 if (mode != VOIDmode && mode != BLKmode)
167 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
168 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
171 if (! HARD_REGNO_MODE_OK (regno, mode))
174 reg = gen_rtx (REG, mode, regno);
177 SET_DEST (pat) = reg;
178 if (recog (pat, insn, &num_clobbers) >= 0)
179 direct_load[(int) mode] = 1;
181 SET_SRC (pat) = mem1;
182 SET_DEST (pat) = reg;
183 if (recog (pat, insn, &num_clobbers) >= 0)
184 direct_load[(int) mode] = 1;
187 SET_DEST (pat) = mem;
188 if (recog (pat, insn, &num_clobbers) >= 0)
189 direct_store[(int) mode] = 1;
192 SET_DEST (pat) = mem1;
193 if (recog (pat, insn, &num_clobbers) >= 0)
194 direct_store[(int) mode] = 1;
197 movstr_optab[(int) mode] = CODE_FOR_nothing;
204 movstr_optab[(int) QImode] = CODE_FOR_movstrqi;
208 movstr_optab[(int) HImode] = CODE_FOR_movstrhi;
212 movstr_optab[(int) SImode] = CODE_FOR_movstrsi;
216 movstr_optab[(int) DImode] = CODE_FOR_movstrdi;
220 movstr_optab[(int) TImode] = CODE_FOR_movstrti;
224 /* This is run at the start of compiling a function. */
231 pending_stack_adjust = 0;
232 inhibit_defer_pop = 0;
233 cleanups_this_call = 0;
238 /* Save all variables describing the current status into the structure *P.
239 This is used before starting a nested function. */
245 /* Instead of saving the postincrement queue, empty it. */
248 p->pending_stack_adjust = pending_stack_adjust;
249 p->inhibit_defer_pop = inhibit_defer_pop;
250 p->cleanups_this_call = cleanups_this_call;
251 p->saveregs_value = saveregs_value;
252 p->forced_labels = forced_labels;
254 pending_stack_adjust = 0;
255 inhibit_defer_pop = 0;
256 cleanups_this_call = 0;
261 /* Restore all variables describing the current status from the structure *P.
262 This is used after a nested function. */
265 restore_expr_status (p)
268 pending_stack_adjust = p->pending_stack_adjust;
269 inhibit_defer_pop = p->inhibit_defer_pop;
270 cleanups_this_call = p->cleanups_this_call;
271 saveregs_value = p->saveregs_value;
272 forced_labels = p->forced_labels;
275 /* Manage the queue of increment instructions to be output
276 for POSTINCREMENT_EXPR expressions, etc. */
278 static rtx pending_chain;
280 /* Queue up to increment (or change) VAR later. BODY says how:
281 BODY should be the same thing you would pass to emit_insn
282 to increment right away. It will go to emit_insn later on.
284 The value is a QUEUED expression to be used in place of VAR
285 where you want to guarantee the pre-incrementation value of VAR. */
288 enqueue_insn (var, body)
291 pending_chain = gen_rtx (QUEUED, GET_MODE (var),
292 var, NULL_RTX, NULL_RTX, body, pending_chain);
293 return pending_chain;
296 /* Use protect_from_queue to convert a QUEUED expression
297 into something that you can put immediately into an instruction.
298 If the queued incrementation has not happened yet,
299 protect_from_queue returns the variable itself.
300 If the incrementation has happened, protect_from_queue returns a temp
301 that contains a copy of the old value of the variable.
303 Any time an rtx which might possibly be a QUEUED is to be put
304 into an instruction, it must be passed through protect_from_queue first.
305 QUEUED expressions are not meaningful in instructions.
307 Do not pass a value through protect_from_queue and then hold
308 on to it for a while before putting it in an instruction!
309 If the queue is flushed in between, incorrect code will result. */
312 protect_from_queue (x, modify)
316 register RTX_CODE code = GET_CODE (x);
318 #if 0 /* A QUEUED can hang around after the queue is forced out. */
319 /* Shortcut for most common case. */
320 if (pending_chain == 0)
326 /* A special hack for read access to (MEM (QUEUED ...))
327 to facilitate use of autoincrement.
328 Make a copy of the contents of the memory location
329 rather than a copy of the address, but not
330 if the value is of mode BLKmode. */
331 if (code == MEM && GET_MODE (x) != BLKmode
332 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
334 register rtx y = XEXP (x, 0);
335 XEXP (x, 0) = QUEUED_VAR (y);
338 register rtx temp = gen_reg_rtx (GET_MODE (x));
339 emit_insn_before (gen_move_insn (temp, x),
345 /* Otherwise, recursively protect the subexpressions of all
346 the kinds of rtx's that can contain a QUEUED. */
348 XEXP (x, 0) = protect_from_queue (XEXP (x, 0), 0);
349 else if (code == PLUS || code == MULT)
351 XEXP (x, 0) = protect_from_queue (XEXP (x, 0), 0);
352 XEXP (x, 1) = protect_from_queue (XEXP (x, 1), 0);
356 /* If the increment has not happened, use the variable itself. */
357 if (QUEUED_INSN (x) == 0)
358 return QUEUED_VAR (x);
359 /* If the increment has happened and a pre-increment copy exists,
361 if (QUEUED_COPY (x) != 0)
362 return QUEUED_COPY (x);
363 /* The increment has happened but we haven't set up a pre-increment copy.
364 Set one up now, and use it. */
365 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
366 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
368 return QUEUED_COPY (x);
371 /* Return nonzero if X contains a QUEUED expression:
372 if it contains anything that will be altered by a queued increment.
373 We handle only combinations of MEM, PLUS, MINUS and MULT operators
374 since memory addresses generally contain only those. */
380 register enum rtx_code code = GET_CODE (x);
386 return queued_subexp_p (XEXP (x, 0));
390 return queued_subexp_p (XEXP (x, 0))
391 || queued_subexp_p (XEXP (x, 1));
396 /* Perform all the pending incrementations. */
402 while (p = pending_chain)
404 QUEUED_INSN (p) = emit_insn (QUEUED_BODY (p));
405 pending_chain = QUEUED_NEXT (p);
416 /* Copy data from FROM to TO, where the machine modes are not the same.
417 Both modes may be integer, or both may be floating.
418 UNSIGNEDP should be nonzero if FROM is an unsigned type.
419 This causes zero-extension instead of sign-extension. */
422 convert_move (to, from, unsignedp)
423 register rtx to, from;
426 enum machine_mode to_mode = GET_MODE (to);
427 enum machine_mode from_mode = GET_MODE (from);
428 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
429 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
433 /* rtx code for making an equivalent value. */
434 enum rtx_code equiv_code = (unsignedp ? ZERO_EXTEND : SIGN_EXTEND);
436 to = protect_from_queue (to, 1);
437 from = protect_from_queue (from, 0);
439 if (to_real != from_real)
442 /* If FROM is a SUBREG that indicates that we have already done at least
443 the required extension, strip it. We don't handle such SUBREGs as
446 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
447 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
448 >= GET_MODE_SIZE (to_mode))
449 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
450 from = gen_lowpart (to_mode, from), from_mode = to_mode;
452 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
455 if (to_mode == from_mode
456 || (from_mode == VOIDmode && CONSTANT_P (from)))
458 emit_move_insn (to, from);
464 #ifdef HAVE_extendqfhf2
465 if (HAVE_extendqfsf2 && from_mode == QFmode && to_mode == HFmode)
467 emit_unop_insn (CODE_FOR_extendqfsf2, to, from, UNKNOWN);
471 #ifdef HAVE_extendqfsf2
472 if (HAVE_extendqfsf2 && from_mode == QFmode && to_mode == SFmode)
474 emit_unop_insn (CODE_FOR_extendqfsf2, to, from, UNKNOWN);
478 #ifdef HAVE_extendqfdf2
479 if (HAVE_extendqfdf2 && from_mode == QFmode && to_mode == DFmode)
481 emit_unop_insn (CODE_FOR_extendqfdf2, to, from, UNKNOWN);
485 #ifdef HAVE_extendqfxf2
486 if (HAVE_extendqfxf2 && from_mode == QFmode && to_mode == XFmode)
488 emit_unop_insn (CODE_FOR_extendqfxf2, to, from, UNKNOWN);
492 #ifdef HAVE_extendqftf2
493 if (HAVE_extendqftf2 && from_mode == QFmode && to_mode == TFmode)
495 emit_unop_insn (CODE_FOR_extendqftf2, to, from, UNKNOWN);
500 #ifdef HAVE_extendhfsf2
501 if (HAVE_extendhfsf2 && from_mode == HFmode && to_mode == SFmode)
503 emit_unop_insn (CODE_FOR_extendhfsf2, to, from, UNKNOWN);
507 #ifdef HAVE_extendhfdf2
508 if (HAVE_extendhfdf2 && from_mode == HFmode && to_mode == DFmode)
510 emit_unop_insn (CODE_FOR_extendhfdf2, to, from, UNKNOWN);
514 #ifdef HAVE_extendhfxf2
515 if (HAVE_extendhfxf2 && from_mode == HFmode && to_mode == XFmode)
517 emit_unop_insn (CODE_FOR_extendhfxf2, to, from, UNKNOWN);
521 #ifdef HAVE_extendhftf2
522 if (HAVE_extendhftf2 && from_mode == HFmode && to_mode == TFmode)
524 emit_unop_insn (CODE_FOR_extendhftf2, to, from, UNKNOWN);
529 #ifdef HAVE_extendsfdf2
530 if (HAVE_extendsfdf2 && from_mode == SFmode && to_mode == DFmode)
532 emit_unop_insn (CODE_FOR_extendsfdf2, to, from, UNKNOWN);
536 #ifdef HAVE_extendsfxf2
537 if (HAVE_extendsfxf2 && from_mode == SFmode && to_mode == XFmode)
539 emit_unop_insn (CODE_FOR_extendsfxf2, to, from, UNKNOWN);
543 #ifdef HAVE_extendsftf2
544 if (HAVE_extendsftf2 && from_mode == SFmode && to_mode == TFmode)
546 emit_unop_insn (CODE_FOR_extendsftf2, to, from, UNKNOWN);
550 #ifdef HAVE_extenddfxf2
551 if (HAVE_extenddfxf2 && from_mode == DFmode && to_mode == XFmode)
553 emit_unop_insn (CODE_FOR_extenddfxf2, to, from, UNKNOWN);
557 #ifdef HAVE_extenddftf2
558 if (HAVE_extenddftf2 && from_mode == DFmode && to_mode == TFmode)
560 emit_unop_insn (CODE_FOR_extenddftf2, to, from, UNKNOWN);
565 #ifdef HAVE_trunchfqf2
566 if (HAVE_trunchfqf2 && from_mode == HFmode && to_mode == QFmode)
568 emit_unop_insn (CODE_FOR_trunchfqf2, to, from, UNKNOWN);
572 #ifdef HAVE_truncsfqf2
573 if (HAVE_truncsfqf2 && from_mode == SFmode && to_mode == QFmode)
575 emit_unop_insn (CODE_FOR_truncsfqf2, to, from, UNKNOWN);
579 #ifdef HAVE_truncdfqf2
580 if (HAVE_truncdfqf2 && from_mode == DFmode && to_mode == QFmode)
582 emit_unop_insn (CODE_FOR_truncdfqf2, to, from, UNKNOWN);
586 #ifdef HAVE_truncxfqf2
587 if (HAVE_truncxfqf2 && from_mode == XFmode && to_mode == QFmode)
589 emit_unop_insn (CODE_FOR_truncxfqf2, to, from, UNKNOWN);
593 #ifdef HAVE_trunctfqf2
594 if (HAVE_trunctfqf2 && from_mode == TFmode && to_mode == QFmode)
596 emit_unop_insn (CODE_FOR_trunctfqf2, to, from, UNKNOWN);
600 #ifdef HAVE_truncsfhf2
601 if (HAVE_truncsfhf2 && from_mode == SFmode && to_mode == HFmode)
603 emit_unop_insn (CODE_FOR_truncsfhf2, to, from, UNKNOWN);
607 #ifdef HAVE_truncdfhf2
608 if (HAVE_truncdfhf2 && from_mode == DFmode && to_mode == HFmode)
610 emit_unop_insn (CODE_FOR_truncdfhf2, to, from, UNKNOWN);
614 #ifdef HAVE_truncxfhf2
615 if (HAVE_truncxfhf2 && from_mode == XFmode && to_mode == HFmode)
617 emit_unop_insn (CODE_FOR_truncxfhf2, to, from, UNKNOWN);
621 #ifdef HAVE_trunctfhf2
622 if (HAVE_trunctfhf2 && from_mode == TFmode && to_mode == HFmode)
624 emit_unop_insn (CODE_FOR_trunctfhf2, to, from, UNKNOWN);
628 #ifdef HAVE_truncdfsf2
629 if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
631 emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
635 #ifdef HAVE_truncxfsf2
636 if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode)
638 emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN);
642 #ifdef HAVE_trunctfsf2
643 if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
645 emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
649 #ifdef HAVE_truncxfdf2
650 if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode)
652 emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN);
656 #ifdef HAVE_trunctfdf2
657 if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
659 emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
671 libcall = extendsfdf2_libfunc;
675 libcall = extendsfxf2_libfunc;
679 libcall = extendsftf2_libfunc;
688 libcall = truncdfsf2_libfunc;
692 libcall = extenddfxf2_libfunc;
696 libcall = extenddftf2_libfunc;
705 libcall = truncxfsf2_libfunc;
709 libcall = truncxfdf2_libfunc;
718 libcall = trunctfsf2_libfunc;
722 libcall = trunctfdf2_libfunc;
728 if (libcall == (rtx) 0)
729 /* This conversion is not implemented yet. */
732 emit_library_call (libcall, 1, to_mode, 1, from, from_mode);
733 emit_move_insn (to, hard_libcall_value (to_mode));
737 /* Now both modes are integers. */
739 /* Handle expanding beyond a word. */
740 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
741 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
748 enum machine_mode lowpart_mode;
749 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
751 /* Try converting directly if the insn is supported. */
752 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
755 /* If FROM is a SUBREG, put it into a register. Do this
756 so that we always generate the same set of insns for
757 better cse'ing; if an intermediate assignment occurred,
758 we won't be doing the operation directly on the SUBREG. */
759 if (optimize > 0 && GET_CODE (from) == SUBREG)
760 from = force_reg (from_mode, from);
761 emit_unop_insn (code, to, from, equiv_code);
764 /* Next, try converting via full word. */
765 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
766 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
767 != CODE_FOR_nothing))
769 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
770 emit_unop_insn (code, to,
771 gen_lowpart (word_mode, to), equiv_code);
775 /* No special multiword conversion insn; do it by hand. */
778 /* Get a copy of FROM widened to a word, if necessary. */
779 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
780 lowpart_mode = word_mode;
782 lowpart_mode = from_mode;
784 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
786 lowpart = gen_lowpart (lowpart_mode, to);
787 emit_move_insn (lowpart, lowfrom);
789 /* Compute the value to put in each remaining word. */
791 fill_value = const0_rtx;
796 && insn_operand_mode[(int) CODE_FOR_slt][0] == word_mode
797 && STORE_FLAG_VALUE == -1)
799 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
801 fill_value = gen_reg_rtx (word_mode);
802 emit_insn (gen_slt (fill_value));
808 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
809 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
811 fill_value = convert_to_mode (word_mode, fill_value, 1);
815 /* Fill the remaining words. */
816 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
818 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
819 rtx subword = operand_subword (to, index, 1, to_mode);
824 if (fill_value != subword)
825 emit_move_insn (subword, fill_value);
828 insns = get_insns ();
831 emit_no_conflict_block (insns, to, from, NULL_RTX,
832 gen_rtx (equiv_code, to_mode, from));
836 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD)
838 convert_move (to, gen_lowpart (word_mode, from), 0);
842 /* Handle pointer conversion */ /* SPEE 900220 */
843 if (to_mode == PSImode)
845 if (from_mode != SImode)
846 from = convert_to_mode (SImode, from, unsignedp);
848 #ifdef HAVE_truncsipsi
851 emit_unop_insn (CODE_FOR_truncsipsi, to, from, UNKNOWN);
854 #endif /* HAVE_truncsipsi */
858 if (from_mode == PSImode)
860 if (to_mode != SImode)
862 from = convert_to_mode (SImode, from, unsignedp);
867 #ifdef HAVE_extendpsisi
868 if (HAVE_extendpsisi)
870 emit_unop_insn (CODE_FOR_extendpsisi, to, from, UNKNOWN);
873 #endif /* HAVE_extendpsisi */
878 /* Now follow all the conversions between integers
879 no more than a word long. */
881 /* For truncation, usually we can just refer to FROM in a narrower mode. */
882 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
883 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
884 GET_MODE_BITSIZE (from_mode))
885 && ((GET_CODE (from) == MEM
886 && ! MEM_VOLATILE_P (from)
887 && direct_load[(int) to_mode]
888 && ! mode_dependent_address_p (XEXP (from, 0)))
889 || GET_CODE (from) == REG
890 || GET_CODE (from) == SUBREG))
892 emit_move_insn (to, gen_lowpart (to_mode, from));
896 /* For truncation, usually we can just refer to FROM in a narrower mode. */
897 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
899 /* Convert directly if that works. */
900 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
903 /* If FROM is a SUBREG, put it into a register. Do this
904 so that we always generate the same set of insns for
905 better cse'ing; if an intermediate assignment occurred,
906 we won't be doing the operation directly on the SUBREG. */
907 if (optimize > 0 && GET_CODE (from) == SUBREG)
908 from = force_reg (from_mode, from);
909 emit_unop_insn (code, to, from, equiv_code);
914 enum machine_mode intermediate;
916 /* Search for a mode to convert via. */
917 for (intermediate = from_mode; intermediate != VOIDmode;
918 intermediate = GET_MODE_WIDER_MODE (intermediate))
919 if ((can_extend_p (to_mode, intermediate, unsignedp)
921 && (can_extend_p (intermediate, from_mode, unsignedp)
922 != CODE_FOR_nothing))
924 convert_move (to, convert_to_mode (intermediate, from,
925 unsignedp), unsignedp);
929 /* No suitable intermediate mode. */
934 /* Support special truncate insns for certain modes. */
936 if (from_mode == DImode && to_mode == SImode)
938 #ifdef HAVE_truncdisi2
941 emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
945 convert_move (to, force_reg (from_mode, from), unsignedp);
949 if (from_mode == DImode && to_mode == HImode)
951 #ifdef HAVE_truncdihi2
954 emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
958 convert_move (to, force_reg (from_mode, from), unsignedp);
962 if (from_mode == DImode && to_mode == QImode)
964 #ifdef HAVE_truncdiqi2
967 emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
971 convert_move (to, force_reg (from_mode, from), unsignedp);
975 if (from_mode == SImode && to_mode == HImode)
977 #ifdef HAVE_truncsihi2
980 emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
984 convert_move (to, force_reg (from_mode, from), unsignedp);
988 if (from_mode == SImode && to_mode == QImode)
990 #ifdef HAVE_truncsiqi2
993 emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
997 convert_move (to, force_reg (from_mode, from), unsignedp);
1001 if (from_mode == HImode && to_mode == QImode)
1003 #ifdef HAVE_trunchiqi2
1004 if (HAVE_trunchiqi2)
1006 emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
1010 convert_move (to, force_reg (from_mode, from), unsignedp);
1014 /* Handle truncation of volatile memrefs, and so on;
1015 the things that couldn't be truncated directly,
1016 and for which there was no special instruction. */
1017 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
1019 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
1020 emit_move_insn (to, temp);
1024 /* Mode combination is not recognized. */
1028 /* Return an rtx for a value that would result
1029 from converting X to mode MODE.
1030 Both X and MODE may be floating, or both integer.
1031 UNSIGNEDP is nonzero if X is an unsigned value.
1032 This can be done by referring to a part of X in place
1033 or by copying to a new temporary with conversion.
1035 This function *must not* call protect_from_queue
1036 except when putting X into an insn (in which case convert_move does it). */
1039 convert_to_mode (mode, x, unsignedp)
1040 enum machine_mode mode;
1046 /* If FROM is a SUBREG that indicates that we have already done at least
1047 the required extension, strip it. */
1049 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
1050 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
1051 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
1052 x = gen_lowpart (mode, x);
1054 if (mode == GET_MODE (x))
1057 /* There is one case that we must handle specially: If we are converting
1058 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
1059 we are to interpret the constant as unsigned, gen_lowpart will do
1060 the wrong if the constant appears negative. What we want to do is
1061 make the high-order word of the constant zero, not all ones. */
1063 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
1064 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
1065 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
1066 return immed_double_const (INTVAL (x), (HOST_WIDE_INT) 0, mode);
1068 /* We can do this with a gen_lowpart if both desired and current modes
1069 are integer, and this is either a constant integer, a register, or a
1070 non-volatile MEM. Except for the constant case, we must be narrowing
1073 if (GET_CODE (x) == CONST_INT
1074 || (GET_MODE_CLASS (mode) == MODE_INT
1075 && GET_MODE_CLASS (GET_MODE (x)) == MODE_INT
1076 && (GET_CODE (x) == CONST_DOUBLE
1077 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (GET_MODE (x))
1078 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x))
1079 && direct_load[(int) mode]
1080 || GET_CODE (x) == REG)))))
1081 return gen_lowpart (mode, x);
1083 temp = gen_reg_rtx (mode);
1084 convert_move (temp, x, unsignedp);
1088 /* Generate several move instructions to copy LEN bytes
1089 from block FROM to block TO. (These are MEM rtx's with BLKmode).
1090 The caller must pass FROM and TO
1091 through protect_from_queue before calling.
1092 ALIGN (in bytes) is maximum alignment we can assume. */
1094 struct move_by_pieces
1099 int explicit_inc_to;
1103 int explicit_inc_from;
1109 static void move_by_pieces_1 ();
1110 static int move_by_pieces_ninsns ();
1113 move_by_pieces (to, from, len, align)
1117 struct move_by_pieces data;
1118 rtx to_addr = XEXP (to, 0), from_addr = XEXP (from, 0);
1119 int max_size = MOVE_MAX + 1;
1122 data.to_addr = to_addr;
1123 data.from_addr = from_addr;
1127 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1128 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1130 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1131 || GET_CODE (from_addr) == POST_INC
1132 || GET_CODE (from_addr) == POST_DEC);
1134 data.explicit_inc_from = 0;
1135 data.explicit_inc_to = 0;
1137 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1138 if (data.reverse) data.offset = len;
1141 /* If copying requires more than two move insns,
1142 copy addresses to registers (to make displacements shorter)
1143 and use post-increment if available. */
1144 if (!(data.autinc_from && data.autinc_to)
1145 && move_by_pieces_ninsns (len, align) > 2)
1147 #ifdef HAVE_PRE_DECREMENT
1148 if (data.reverse && ! data.autinc_from)
1150 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1151 data.autinc_from = 1;
1152 data.explicit_inc_from = -1;
1155 #ifdef HAVE_POST_INCREMENT
1156 if (! data.autinc_from)
1158 data.from_addr = copy_addr_to_reg (from_addr);
1159 data.autinc_from = 1;
1160 data.explicit_inc_from = 1;
1163 if (!data.autinc_from && CONSTANT_P (from_addr))
1164 data.from_addr = copy_addr_to_reg (from_addr);
1165 #ifdef HAVE_PRE_DECREMENT
1166 if (data.reverse && ! data.autinc_to)
1168 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1170 data.explicit_inc_to = -1;
1173 #ifdef HAVE_POST_INCREMENT
1174 if (! data.reverse && ! data.autinc_to)
1176 data.to_addr = copy_addr_to_reg (to_addr);
1178 data.explicit_inc_to = 1;
1181 if (!data.autinc_to && CONSTANT_P (to_addr))
1182 data.to_addr = copy_addr_to_reg (to_addr);
1185 if (! (STRICT_ALIGNMENT || SLOW_UNALIGNED_ACCESS)
1186 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
1189 /* First move what we can in the largest integer mode, then go to
1190 successively smaller modes. */
1192 while (max_size > 1)
1194 enum machine_mode mode = VOIDmode, tmode;
1195 enum insn_code icode;
1197 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1198 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1199 if (GET_MODE_SIZE (tmode) < max_size)
1202 if (mode == VOIDmode)
1205 icode = mov_optab->handlers[(int) mode].insn_code;
1206 if (icode != CODE_FOR_nothing
1207 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1208 GET_MODE_SIZE (mode)))
1209 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1211 max_size = GET_MODE_SIZE (mode);
1214 /* The code above should have handled everything. */
1219 /* Return number of insns required to move L bytes by pieces.
1220 ALIGN (in bytes) is maximum alignment we can assume. */
1223 move_by_pieces_ninsns (l, align)
1227 register int n_insns = 0;
1228 int max_size = MOVE_MAX + 1;
1230 if (! (STRICT_ALIGNMENT || SLOW_UNALIGNED_ACCESS)
1231 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
1234 while (max_size > 1)
1236 enum machine_mode mode = VOIDmode, tmode;
1237 enum insn_code icode;
1239 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1240 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1241 if (GET_MODE_SIZE (tmode) < max_size)
1244 if (mode == VOIDmode)
1247 icode = mov_optab->handlers[(int) mode].insn_code;
1248 if (icode != CODE_FOR_nothing
1249 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1250 GET_MODE_SIZE (mode)))
1251 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1253 max_size = GET_MODE_SIZE (mode);
1259 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1260 with move instructions for mode MODE. GENFUN is the gen_... function
1261 to make a move insn for that mode. DATA has all the other info. */
1264 move_by_pieces_1 (genfun, mode, data)
1266 enum machine_mode mode;
1267 struct move_by_pieces *data;
1269 register int size = GET_MODE_SIZE (mode);
1270 register rtx to1, from1;
1272 while (data->len >= size)
1274 if (data->reverse) data->offset -= size;
1276 to1 = (data->autinc_to
1277 ? gen_rtx (MEM, mode, data->to_addr)
1278 : change_address (data->to, mode,
1279 plus_constant (data->to_addr, data->offset)));
1282 ? gen_rtx (MEM, mode, data->from_addr)
1283 : change_address (data->from, mode,
1284 plus_constant (data->from_addr, data->offset)));
1286 #ifdef HAVE_PRE_DECREMENT
1287 if (data->explicit_inc_to < 0)
1288 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
1289 if (data->explicit_inc_from < 0)
1290 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (-size)));
1293 emit_insn ((*genfun) (to1, from1));
1294 #ifdef HAVE_POST_INCREMENT
1295 if (data->explicit_inc_to > 0)
1296 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1297 if (data->explicit_inc_from > 0)
1298 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1301 if (! data->reverse) data->offset += size;
1307 /* Emit code to move a block Y to a block X.
1308 This may be done with string-move instructions,
1309 with multiple scalar move instructions, or with a library call.
1311 Both X and Y must be MEM rtx's (perhaps inside VOLATILE)
1313 SIZE is an rtx that says how long they are.
1314 ALIGN is the maximum alignment we can assume they have,
1315 measured in bytes. */
1318 emit_block_move (x, y, size, align)
1323 if (GET_MODE (x) != BLKmode)
1326 if (GET_MODE (y) != BLKmode)
1329 x = protect_from_queue (x, 1);
1330 y = protect_from_queue (y, 0);
1331 size = protect_from_queue (size, 0);
1333 if (GET_CODE (x) != MEM)
1335 if (GET_CODE (y) != MEM)
1340 if (GET_CODE (size) == CONST_INT
1341 && (move_by_pieces_ninsns (INTVAL (size), align) < MOVE_RATIO))
1342 move_by_pieces (x, y, INTVAL (size), align);
1345 /* Try the most limited insn first, because there's no point
1346 including more than one in the machine description unless
1347 the more limited one has some advantage. */
1349 rtx opalign = GEN_INT (align);
1350 enum machine_mode mode;
1352 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1353 mode = GET_MODE_WIDER_MODE (mode))
1355 enum insn_code code = movstr_optab[(int) mode];
1357 if (code != CODE_FOR_nothing
1358 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1359 here because if SIZE is less than the mode mask, as it is
1360 returned by the macro, it will definitely be less than the
1361 actual mode mask. */
1362 && (unsigned) INTVAL (size) <= GET_MODE_MASK (mode)
1363 && (insn_operand_predicate[(int) code][0] == 0
1364 || (*insn_operand_predicate[(int) code][0]) (x, BLKmode))
1365 && (insn_operand_predicate[(int) code][1] == 0
1366 || (*insn_operand_predicate[(int) code][1]) (y, BLKmode))
1367 && (insn_operand_predicate[(int) code][3] == 0
1368 || (*insn_operand_predicate[(int) code][3]) (opalign,
1372 rtx last = get_last_insn ();
1375 op2 = convert_to_mode (mode, size, 1);
1376 if (insn_operand_predicate[(int) code][2] != 0
1377 && ! (*insn_operand_predicate[(int) code][2]) (op2, mode))
1378 op2 = copy_to_mode_reg (mode, op2);
1380 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1387 delete_insns_since (last);
1391 #ifdef TARGET_MEM_FUNCTIONS
1392 emit_library_call (memcpy_libfunc, 0,
1393 VOIDmode, 3, XEXP (x, 0), Pmode,
1395 convert_to_mode (Pmode, size, 1), Pmode);
1397 emit_library_call (bcopy_libfunc, 0,
1398 VOIDmode, 3, XEXP (y, 0), Pmode,
1400 convert_to_mode (Pmode, size, 1), Pmode);
1405 /* Copy all or part of a value X into registers starting at REGNO.
1406 The number of registers to be filled is NREGS. */
1409 move_block_to_reg (regno, x, nregs, mode)
1413 enum machine_mode mode;
1418 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1419 x = validize_mem (force_const_mem (mode, x));
1421 /* See if the machine can do this with a load multiple insn. */
1422 #ifdef HAVE_load_multiple
1423 last = get_last_insn ();
1424 pat = gen_load_multiple (gen_rtx (REG, word_mode, regno), x,
1432 delete_insns_since (last);
1435 for (i = 0; i < nregs; i++)
1436 emit_move_insn (gen_rtx (REG, word_mode, regno + i),
1437 operand_subword_force (x, i, mode));
1440 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1441 The number of registers to be filled is NREGS. */
1444 move_block_from_reg (regno, x, nregs)
1452 /* See if the machine can do this with a store multiple insn. */
1453 #ifdef HAVE_store_multiple
1454 last = get_last_insn ();
1455 pat = gen_store_multiple (x, gen_rtx (REG, word_mode, regno),
1463 delete_insns_since (last);
1466 for (i = 0; i < nregs; i++)
1468 rtx tem = operand_subword (x, i, 1, BLKmode);
1473 emit_move_insn (tem, gen_rtx (REG, word_mode, regno + i));
1477 /* Mark NREGS consecutive regs, starting at REGNO, as being live now. */
1480 use_regs (regno, nregs)
1486 for (i = 0; i < nregs; i++)
1487 emit_insn (gen_rtx (USE, VOIDmode, gen_rtx (REG, word_mode, regno + i)));
1490 /* Mark the instructions since PREV as a libcall block.
1491 Add REG_LIBCALL to PREV and add a REG_RETVAL to the most recent insn. */
1500 /* Find the instructions to mark */
1502 insn_first = NEXT_INSN (prev);
1504 insn_first = get_insns ();
1506 insn_last = get_last_insn ();
1508 REG_NOTES (insn_last) = gen_rtx (INSN_LIST, REG_RETVAL, insn_first,
1509 REG_NOTES (insn_last));
1511 REG_NOTES (insn_first) = gen_rtx (INSN_LIST, REG_LIBCALL, insn_last,
1512 REG_NOTES (insn_first));
1515 /* Write zeros through the storage of OBJECT.
1516 If OBJECT has BLKmode, SIZE is its length in bytes. */
1519 clear_storage (object, size)
1523 if (GET_MODE (object) == BLKmode)
1525 #ifdef TARGET_MEM_FUNCTIONS
1526 emit_library_call (memset_libfunc, 0,
1528 XEXP (object, 0), Pmode, const0_rtx, Pmode,
1529 GEN_INT (size), Pmode);
1531 emit_library_call (bzero_libfunc, 0,
1533 XEXP (object, 0), Pmode,
1534 GEN_INT (size), Pmode);
1538 emit_move_insn (object, const0_rtx);
1541 /* Generate code to copy Y into X.
1542 Both Y and X must have the same mode, except that
1543 Y can be a constant with VOIDmode.
1544 This mode cannot be BLKmode; use emit_block_move for that.
1546 Return the last instruction emitted. */
1549 emit_move_insn (x, y)
1552 enum machine_mode mode = GET_MODE (x);
1553 enum machine_mode submode;
1554 enum mode_class class = GET_MODE_CLASS (mode);
1557 x = protect_from_queue (x, 1);
1558 y = protect_from_queue (y, 0);
1560 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
1563 if (CONSTANT_P (y) && ! LEGITIMATE_CONSTANT_P (y))
1564 y = force_const_mem (mode, y);
1566 /* If X or Y are memory references, verify that their addresses are valid
1568 if (GET_CODE (x) == MEM
1569 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
1570 && ! push_operand (x, GET_MODE (x)))
1572 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
1573 x = change_address (x, VOIDmode, XEXP (x, 0));
1575 if (GET_CODE (y) == MEM
1576 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
1578 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
1579 y = change_address (y, VOIDmode, XEXP (y, 0));
1581 if (mode == BLKmode)
1584 return emit_move_insn_1 (x, y);
1587 /* Low level part of emit_move_insn.
1588 Called just like emit_move_insn, but assumes X and Y
1589 are basically valid. */
1592 emit_move_insn_1 (x, y)
1595 enum machine_mode mode = GET_MODE (x);
1596 enum machine_mode submode;
1597 enum mode_class class = GET_MODE_CLASS (mode);
1600 if (class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
1601 submode = mode_for_size (GET_MODE_UNIT_SIZE (mode) * BITS_PER_UNIT,
1602 (class == MODE_COMPLEX_INT
1603 ? MODE_INT : MODE_FLOAT),
1606 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
1608 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
1610 /* Expand complex moves by moving real part and imag part, if possible. */
1611 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
1612 && submode != BLKmode
1613 && (mov_optab->handlers[(int) submode].insn_code
1614 != CODE_FOR_nothing))
1616 /* Don't split destination if it is a stack push. */
1617 int stack = push_operand (x, GET_MODE (x));
1618 rtx prev = get_last_insn ();
1620 /* Tell flow that the whole of the destination is being set. */
1621 if (GET_CODE (x) == REG)
1622 emit_insn (gen_rtx (CLOBBER, VOIDmode, x));
1624 /* If this is a stack, push the highpart first, so it
1625 will be in the argument order.
1627 In that case, change_address is used only to convert
1628 the mode, not to change the address. */
1629 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
1630 ((stack ? change_address (x, submode, (rtx) 0)
1631 : gen_highpart (submode, x)),
1632 gen_highpart (submode, y)));
1633 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
1634 ((stack ? change_address (x, submode, (rtx) 0)
1635 : gen_lowpart (submode, x)),
1636 gen_lowpart (submode, y)));
1640 return get_last_insn ();
1643 /* This will handle any multi-word mode that lacks a move_insn pattern.
1644 However, you will get better code if you define such patterns,
1645 even if they must turn into multiple assembler instructions. */
1646 else if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
1649 rtx prev_insn = get_last_insn ();
1652 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
1655 rtx xpart = operand_subword (x, i, 1, mode);
1656 rtx ypart = operand_subword (y, i, 1, mode);
1658 /* If we can't get a part of Y, put Y into memory if it is a
1659 constant. Otherwise, force it into a register. If we still
1660 can't get a part of Y, abort. */
1661 if (ypart == 0 && CONSTANT_P (y))
1663 y = force_const_mem (mode, y);
1664 ypart = operand_subword (y, i, 1, mode);
1666 else if (ypart == 0)
1667 ypart = operand_subword_force (y, i, mode);
1669 if (xpart == 0 || ypart == 0)
1672 last_insn = emit_move_insn (xpart, ypart);
1674 /* Mark these insns as a libcall block. */
1675 group_insns (prev_insn);
1683 /* Pushing data onto the stack. */
1685 /* Push a block of length SIZE (perhaps variable)
1686 and return an rtx to address the beginning of the block.
1687 Note that it is not possible for the value returned to be a QUEUED.
1688 The value may be virtual_outgoing_args_rtx.
1690 EXTRA is the number of bytes of padding to push in addition to SIZE.
1691 BELOW nonzero means this padding comes at low addresses;
1692 otherwise, the padding comes at high addresses. */
1695 push_block (size, extra, below)
1700 if (CONSTANT_P (size))
1701 anti_adjust_stack (plus_constant (size, extra));
1702 else if (GET_CODE (size) == REG && extra == 0)
1703 anti_adjust_stack (size);
1706 rtx temp = copy_to_mode_reg (Pmode, size);
1708 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
1709 temp, 0, OPTAB_LIB_WIDEN);
1710 anti_adjust_stack (temp);
1713 #ifdef STACK_GROWS_DOWNWARD
1714 temp = virtual_outgoing_args_rtx;
1715 if (extra != 0 && below)
1716 temp = plus_constant (temp, extra);
1718 if (GET_CODE (size) == CONST_INT)
1719 temp = plus_constant (virtual_outgoing_args_rtx,
1720 - INTVAL (size) - (below ? 0 : extra));
1721 else if (extra != 0 && !below)
1722 temp = gen_rtx (PLUS, Pmode, virtual_outgoing_args_rtx,
1723 negate_rtx (Pmode, plus_constant (size, extra)));
1725 temp = gen_rtx (PLUS, Pmode, virtual_outgoing_args_rtx,
1726 negate_rtx (Pmode, size));
1729 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
1735 return gen_rtx (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
1738 /* Generate code to push X onto the stack, assuming it has mode MODE and
1740 MODE is redundant except when X is a CONST_INT (since they don't
1742 SIZE is an rtx for the size of data to be copied (in bytes),
1743 needed only if X is BLKmode.
1745 ALIGN (in bytes) is maximum alignment we can assume.
1747 If PARTIAL and REG are both nonzero, then copy that many of the first
1748 words of X into registers starting with REG, and push the rest of X.
1749 The amount of space pushed is decreased by PARTIAL words,
1750 rounded *down* to a multiple of PARM_BOUNDARY.
1751 REG must be a hard register in this case.
1752 If REG is zero but PARTIAL is not, take any all others actions for an
1753 argument partially in registers, but do not actually load any
1756 EXTRA is the amount in bytes of extra space to leave next to this arg.
1757 This is ignored if an argument block has already been allocated.
1759 On a machine that lacks real push insns, ARGS_ADDR is the address of
1760 the bottom of the argument block for this call. We use indexing off there
1761 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
1762 argument block has not been preallocated.
1764 ARGS_SO_FAR is the size of args previously pushed for this call. */
1767 emit_push_insn (x, mode, type, size, align, partial, reg, extra,
1768 args_addr, args_so_far)
1770 enum machine_mode mode;
1781 enum direction stack_direction
1782 #ifdef STACK_GROWS_DOWNWARD
1788 /* Decide where to pad the argument: `downward' for below,
1789 `upward' for above, or `none' for don't pad it.
1790 Default is below for small data on big-endian machines; else above. */
1791 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
1793 /* Invert direction if stack is post-update. */
1794 if (STACK_PUSH_CODE == POST_INC || STACK_PUSH_CODE == POST_DEC)
1795 if (where_pad != none)
1796 where_pad = (where_pad == downward ? upward : downward);
1798 xinner = x = protect_from_queue (x, 0);
1800 if (mode == BLKmode)
1802 /* Copy a block into the stack, entirely or partially. */
1805 int used = partial * UNITS_PER_WORD;
1806 int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
1814 /* USED is now the # of bytes we need not copy to the stack
1815 because registers will take care of them. */
1818 xinner = change_address (xinner, BLKmode,
1819 plus_constant (XEXP (xinner, 0), used));
1821 /* If the partial register-part of the arg counts in its stack size,
1822 skip the part of stack space corresponding to the registers.
1823 Otherwise, start copying to the beginning of the stack space,
1824 by setting SKIP to 0. */
1825 #ifndef REG_PARM_STACK_SPACE
1831 #ifdef PUSH_ROUNDING
1832 /* Do it with several push insns if that doesn't take lots of insns
1833 and if there is no difficulty with push insns that skip bytes
1834 on the stack for alignment purposes. */
1836 && GET_CODE (size) == CONST_INT
1838 && (move_by_pieces_ninsns ((unsigned) INTVAL (size) - used, align)
1840 /* Here we avoid the case of a structure whose weak alignment
1841 forces many pushes of a small amount of data,
1842 and such small pushes do rounding that causes trouble. */
1843 && ((! STRICT_ALIGNMENT && ! SLOW_UNALIGNED_ACCESS)
1844 || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT
1845 || PUSH_ROUNDING (align) == align)
1846 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
1848 /* Push padding now if padding above and stack grows down,
1849 or if padding below and stack grows up.
1850 But if space already allocated, this has already been done. */
1851 if (extra && args_addr == 0
1852 && where_pad != none && where_pad != stack_direction)
1853 anti_adjust_stack (GEN_INT (extra));
1855 move_by_pieces (gen_rtx (MEM, BLKmode, gen_push_operand ()), xinner,
1856 INTVAL (size) - used, align);
1859 #endif /* PUSH_ROUNDING */
1861 /* Otherwise make space on the stack and copy the data
1862 to the address of that space. */
1864 /* Deduct words put into registers from the size we must copy. */
1867 if (GET_CODE (size) == CONST_INT)
1868 size = GEN_INT (INTVAL (size) - used);
1870 size = expand_binop (GET_MODE (size), sub_optab, size,
1871 GEN_INT (used), NULL_RTX, 0,
1875 /* Get the address of the stack space.
1876 In this case, we do not deal with EXTRA separately.
1877 A single stack adjust will do. */
1880 temp = push_block (size, extra, where_pad == downward);
1883 else if (GET_CODE (args_so_far) == CONST_INT)
1884 temp = memory_address (BLKmode,
1885 plus_constant (args_addr,
1886 skip + INTVAL (args_so_far)));
1888 temp = memory_address (BLKmode,
1889 plus_constant (gen_rtx (PLUS, Pmode,
1890 args_addr, args_so_far),
1893 /* TEMP is the address of the block. Copy the data there. */
1894 if (GET_CODE (size) == CONST_INT
1895 && (move_by_pieces_ninsns ((unsigned) INTVAL (size), align)
1898 move_by_pieces (gen_rtx (MEM, BLKmode, temp), xinner,
1899 INTVAL (size), align);
1902 /* Try the most limited insn first, because there's no point
1903 including more than one in the machine description unless
1904 the more limited one has some advantage. */
1905 #ifdef HAVE_movstrqi
1907 && GET_CODE (size) == CONST_INT
1908 && ((unsigned) INTVAL (size)
1909 < (1 << (GET_MODE_BITSIZE (QImode) - 1))))
1911 emit_insn (gen_movstrqi (gen_rtx (MEM, BLKmode, temp),
1912 xinner, size, GEN_INT (align)));
1916 #ifdef HAVE_movstrhi
1918 && GET_CODE (size) == CONST_INT
1919 && ((unsigned) INTVAL (size)
1920 < (1 << (GET_MODE_BITSIZE (HImode) - 1))))
1922 emit_insn (gen_movstrhi (gen_rtx (MEM, BLKmode, temp),
1923 xinner, size, GEN_INT (align)));
1927 #ifdef HAVE_movstrsi
1930 emit_insn (gen_movstrsi (gen_rtx (MEM, BLKmode, temp),
1931 xinner, size, GEN_INT (align)));
1935 #ifdef HAVE_movstrdi
1938 emit_insn (gen_movstrdi (gen_rtx (MEM, BLKmode, temp),
1939 xinner, size, GEN_INT (align)));
1944 #ifndef ACCUMULATE_OUTGOING_ARGS
1945 /* If the source is referenced relative to the stack pointer,
1946 copy it to another register to stabilize it. We do not need
1947 to do this if we know that we won't be changing sp. */
1949 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
1950 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
1951 temp = copy_to_reg (temp);
1954 /* Make inhibit_defer_pop nonzero around the library call
1955 to force it to pop the bcopy-arguments right away. */
1957 #ifdef TARGET_MEM_FUNCTIONS
1958 emit_library_call (memcpy_libfunc, 0,
1959 VOIDmode, 3, temp, Pmode, XEXP (xinner, 0), Pmode,
1962 emit_library_call (bcopy_libfunc, 0,
1963 VOIDmode, 3, XEXP (xinner, 0), Pmode, temp, Pmode,
1969 else if (partial > 0)
1971 /* Scalar partly in registers. */
1973 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
1976 /* # words of start of argument
1977 that we must make space for but need not store. */
1978 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
1979 int args_offset = INTVAL (args_so_far);
1982 /* Push padding now if padding above and stack grows down,
1983 or if padding below and stack grows up.
1984 But if space already allocated, this has already been done. */
1985 if (extra && args_addr == 0
1986 && where_pad != none && where_pad != stack_direction)
1987 anti_adjust_stack (GEN_INT (extra));
1989 /* If we make space by pushing it, we might as well push
1990 the real data. Otherwise, we can leave OFFSET nonzero
1991 and leave the space uninitialized. */
1995 /* Now NOT_STACK gets the number of words that we don't need to
1996 allocate on the stack. */
1997 not_stack = partial - offset;
1999 /* If the partial register-part of the arg counts in its stack size,
2000 skip the part of stack space corresponding to the registers.
2001 Otherwise, start copying to the beginning of the stack space,
2002 by setting SKIP to 0. */
2003 #ifndef REG_PARM_STACK_SPACE
2009 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
2010 x = validize_mem (force_const_mem (mode, x));
2012 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
2013 SUBREGs of such registers are not allowed. */
2014 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
2015 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
2016 x = copy_to_reg (x);
2018 /* Loop over all the words allocated on the stack for this arg. */
2019 /* We can do it by words, because any scalar bigger than a word
2020 has a size a multiple of a word. */
2021 #ifndef PUSH_ARGS_REVERSED
2022 for (i = not_stack; i < size; i++)
2024 for (i = size - 1; i >= not_stack; i--)
2026 if (i >= not_stack + offset)
2027 emit_push_insn (operand_subword_force (x, i, mode),
2028 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
2030 GEN_INT (args_offset + ((i - not_stack + skip)
2031 * UNITS_PER_WORD)));
2037 /* Push padding now if padding above and stack grows down,
2038 or if padding below and stack grows up.
2039 But if space already allocated, this has already been done. */
2040 if (extra && args_addr == 0
2041 && where_pad != none && where_pad != stack_direction)
2042 anti_adjust_stack (GEN_INT (extra));
2044 #ifdef PUSH_ROUNDING
2046 addr = gen_push_operand ();
2049 if (GET_CODE (args_so_far) == CONST_INT)
2051 = memory_address (mode,
2052 plus_constant (args_addr, INTVAL (args_so_far)));
2054 addr = memory_address (mode, gen_rtx (PLUS, Pmode, args_addr,
2057 emit_move_insn (gen_rtx (MEM, mode, addr), x);
2061 /* If part should go in registers, copy that part
2062 into the appropriate registers. Do this now, at the end,
2063 since mem-to-mem copies above may do function calls. */
2064 if (partial > 0 && reg != 0)
2065 move_block_to_reg (REGNO (reg), x, partial, mode);
2067 if (extra && args_addr == 0 && where_pad == stack_direction)
2068 anti_adjust_stack (GEN_INT (extra));
2071 /* Output a library call to function FUN (a SYMBOL_REF rtx)
2072 (emitting the queue unless NO_QUEUE is nonzero),
2073 for a value of mode OUTMODE,
2074 with NARGS different arguments, passed as alternating rtx values
2075 and machine_modes to convert them to.
2076 The rtx values should have been passed through protect_from_queue already.
2078 NO_QUEUE will be true if and only if the library call is a `const' call
2079 which will be enclosed in REG_LIBCALL/REG_RETVAL notes; it is equivalent
2080 to the variable is_const in expand_call.
2082 NO_QUEUE must be true for const calls, because if it isn't, then
2083 any pending increment will be emitted between REG_LIBCALL/REG_RETVAL notes,
2084 and will be lost if the libcall sequence is optimized away.
2086 NO_QUEUE must be false for non-const calls, because if it isn't, the
2087 call insn will have its CONST_CALL_P bit set, and it will be incorrectly
2088 optimized. For instance, the instruction scheduler may incorrectly
2089 move memory references across the non-const call. */
2092 emit_library_call (va_alist)
2096 /* Total size in bytes of all the stack-parms scanned so far. */
2097 struct args_size args_size;
2098 /* Size of arguments before any adjustments (such as rounding). */
2099 struct args_size original_args_size;
2100 register int argnum;
2101 enum machine_mode outmode;
2108 CUMULATIVE_ARGS args_so_far;
2109 struct arg { rtx value; enum machine_mode mode; rtx reg; int partial;
2110 struct args_size offset; struct args_size size; };
2112 int old_inhibit_defer_pop = inhibit_defer_pop;
2117 orgfun = fun = va_arg (p, rtx);
2118 no_queue = va_arg (p, int);
2119 outmode = va_arg (p, enum machine_mode);
2120 nargs = va_arg (p, int);
2122 /* Copy all the libcall-arguments out of the varargs data
2123 and into a vector ARGVEC.
2125 Compute how to pass each argument. We only support a very small subset
2126 of the full argument passing conventions to limit complexity here since
2127 library functions shouldn't have many args. */
2129 argvec = (struct arg *) alloca (nargs * sizeof (struct arg));
2131 INIT_CUMULATIVE_ARGS (args_so_far, NULL_TREE, fun);
2133 args_size.constant = 0;
2136 for (count = 0; count < nargs; count++)
2138 rtx val = va_arg (p, rtx);
2139 enum machine_mode mode = va_arg (p, enum machine_mode);
2141 /* We cannot convert the arg value to the mode the library wants here;
2142 must do it earlier where we know the signedness of the arg. */
2144 || (GET_MODE (val) != mode && GET_MODE (val) != VOIDmode))
2147 /* On some machines, there's no way to pass a float to a library fcn.
2148 Pass it as a double instead. */
2149 #ifdef LIBGCC_NEEDS_DOUBLE
2150 if (LIBGCC_NEEDS_DOUBLE && mode == SFmode)
2151 val = convert_to_mode (DFmode, val, 0), mode = DFmode;
2154 /* There's no need to call protect_from_queue, because
2155 either emit_move_insn or emit_push_insn will do that. */
2157 /* Make sure it is a reasonable operand for a move or push insn. */
2158 if (GET_CODE (val) != REG && GET_CODE (val) != MEM
2159 && ! (CONSTANT_P (val) && LEGITIMATE_CONSTANT_P (val)))
2160 val = force_operand (val, NULL_RTX);
2162 argvec[count].value = val;
2163 argvec[count].mode = mode;
2165 #ifdef FUNCTION_ARG_PASS_BY_REFERENCE
2166 if (FUNCTION_ARG_PASS_BY_REFERENCE (args_so_far, mode, NULL_TREE, 1))
2170 argvec[count].reg = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
2171 if (argvec[count].reg && GET_CODE (argvec[count].reg) == EXPR_LIST)
2173 #ifdef FUNCTION_ARG_PARTIAL_NREGS
2174 argvec[count].partial
2175 = FUNCTION_ARG_PARTIAL_NREGS (args_so_far, mode, NULL_TREE, 1);
2177 argvec[count].partial = 0;
2180 locate_and_pad_parm (mode, NULL_TREE,
2181 argvec[count].reg && argvec[count].partial == 0,
2182 NULL_TREE, &args_size, &argvec[count].offset,
2183 &argvec[count].size);
2185 if (argvec[count].size.var)
2188 #ifndef REG_PARM_STACK_SPACE
2189 if (argvec[count].partial)
2190 argvec[count].size.constant -= argvec[count].partial * UNITS_PER_WORD;
2193 if (argvec[count].reg == 0 || argvec[count].partial != 0
2194 #ifdef REG_PARM_STACK_SPACE
2198 args_size.constant += argvec[count].size.constant;
2200 #ifdef ACCUMULATE_OUTGOING_ARGS
2201 /* If this arg is actually passed on the stack, it might be
2202 clobbering something we already put there (this library call might
2203 be inside the evaluation of an argument to a function whose call
2204 requires the stack). This will only occur when the library call
2205 has sufficient args to run out of argument registers. Abort in
2206 this case; if this ever occurs, code must be added to save and
2207 restore the arg slot. */
2209 if (argvec[count].reg == 0 || argvec[count].partial != 0)
2213 FUNCTION_ARG_ADVANCE (args_so_far, mode, (tree)0, 1);
2217 /* If this machine requires an external definition for library
2218 functions, write one out. */
2219 assemble_external_libcall (fun);
2221 original_args_size = args_size;
2222 #ifdef STACK_BOUNDARY
2223 args_size.constant = (((args_size.constant + (STACK_BYTES - 1))
2224 / STACK_BYTES) * STACK_BYTES);
2227 #ifdef REG_PARM_STACK_SPACE
2228 args_size.constant = MAX (args_size.constant,
2229 REG_PARM_STACK_SPACE (NULL_TREE));
2230 #ifndef OUTGOING_REG_PARM_STACK_SPACE
2231 args_size.constant -= REG_PARM_STACK_SPACE (NULL_TREE);
2235 #ifdef ACCUMULATE_OUTGOING_ARGS
2236 if (args_size.constant > current_function_outgoing_args_size)
2237 current_function_outgoing_args_size = args_size.constant;
2238 args_size.constant = 0;
2241 #ifndef PUSH_ROUNDING
2242 argblock = push_block (GEN_INT (args_size.constant), 0, 0);
2245 #ifdef PUSH_ARGS_REVERSED
2246 #ifdef STACK_BOUNDARY
2247 /* If we push args individually in reverse order, perform stack alignment
2248 before the first push (the last arg). */
2250 anti_adjust_stack (GEN_INT (args_size.constant
2251 - original_args_size.constant));
2255 #ifdef PUSH_ARGS_REVERSED
2263 /* Push the args that need to be pushed. */
2265 for (count = 0; count < nargs; count++, argnum += inc)
2267 register enum machine_mode mode = argvec[argnum].mode;
2268 register rtx val = argvec[argnum].value;
2269 rtx reg = argvec[argnum].reg;
2270 int partial = argvec[argnum].partial;
2272 if (! (reg != 0 && partial == 0))
2273 emit_push_insn (val, mode, NULL_TREE, NULL_RTX, 0, partial, reg, 0,
2274 argblock, GEN_INT (argvec[count].offset.constant));
2278 #ifndef PUSH_ARGS_REVERSED
2279 #ifdef STACK_BOUNDARY
2280 /* If we pushed args in forward order, perform stack alignment
2281 after pushing the last arg. */
2283 anti_adjust_stack (GEN_INT (args_size.constant
2284 - original_args_size.constant));
2288 #ifdef PUSH_ARGS_REVERSED
2294 /* Now load any reg parms into their regs. */
2296 for (count = 0; count < nargs; count++, argnum += inc)
2298 register enum machine_mode mode = argvec[argnum].mode;
2299 register rtx val = argvec[argnum].value;
2300 rtx reg = argvec[argnum].reg;
2301 int partial = argvec[argnum].partial;
2303 if (reg != 0 && partial == 0)
2304 emit_move_insn (reg, val);
2308 /* For version 1.37, try deleting this entirely. */
2312 /* Any regs containing parms remain in use through the call. */
2314 for (count = 0; count < nargs; count++)
2315 if (argvec[count].reg != 0)
2316 emit_insn (gen_rtx (USE, VOIDmode, argvec[count].reg));
2318 use_insns = get_insns ();
2321 fun = prepare_call_address (fun, NULL_TREE, &use_insns);
2323 /* Don't allow popping to be deferred, since then
2324 cse'ing of library calls could delete a call and leave the pop. */
2327 /* We pass the old value of inhibit_defer_pop + 1 to emit_call_1, which
2328 will set inhibit_defer_pop to that value. */
2330 emit_call_1 (fun, get_identifier (XSTR (orgfun, 0)), args_size.constant, 0,
2331 FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1),
2332 outmode != VOIDmode ? hard_libcall_value (outmode) : NULL_RTX,
2333 old_inhibit_defer_pop + 1, use_insns, no_queue);
2335 /* Now restore inhibit_defer_pop to its actual original value. */
2339 /* Like emit_library_call except that an extra argument, VALUE,
2340 comes second and says where to store the result.
2341 (If VALUE is zero, the result comes in the function value register.) */
2344 emit_library_call_value (va_alist)
2348 /* Total size in bytes of all the stack-parms scanned so far. */
2349 struct args_size args_size;
2350 /* Size of arguments before any adjustments (such as rounding). */
2351 struct args_size original_args_size;
2352 register int argnum;
2353 enum machine_mode outmode;
2360 CUMULATIVE_ARGS args_so_far;
2361 struct arg { rtx value; enum machine_mode mode; rtx reg; int partial;
2362 struct args_size offset; struct args_size size; };
2364 int old_inhibit_defer_pop = inhibit_defer_pop;
2371 orgfun = fun = va_arg (p, rtx);
2372 value = va_arg (p, rtx);
2373 no_queue = va_arg (p, int);
2374 outmode = va_arg (p, enum machine_mode);
2375 nargs = va_arg (p, int);
2377 /* If this kind of value comes back in memory,
2378 decide where in memory it should come back. */
2379 if (RETURN_IN_MEMORY (type_for_mode (outmode, 0)))
2381 if (GET_CODE (value) == MEM)
2384 mem_value = assign_stack_temp (outmode, GET_MODE_SIZE (outmode), 0);
2387 /* ??? Unfinished: must pass the memory address as an argument. */
2389 /* Copy all the libcall-arguments out of the varargs data
2390 and into a vector ARGVEC.
2392 Compute how to pass each argument. We only support a very small subset
2393 of the full argument passing conventions to limit complexity here since
2394 library functions shouldn't have many args. */
2396 argvec = (struct arg *) alloca ((nargs + 1) * sizeof (struct arg));
2398 INIT_CUMULATIVE_ARGS (args_so_far, NULL_TREE, fun);
2400 args_size.constant = 0;
2405 /* If there's a structure value address to be passed,
2406 either pass it in the special place, or pass it as an extra argument. */
2409 rtx addr = XEXP (mem_value, 0);
2411 if (! struct_value_rtx)
2415 /* Make sure it is a reasonable operand for a move or push insn. */
2416 if (GET_CODE (addr) != REG && GET_CODE (addr) != MEM
2417 && ! (CONSTANT_P (addr) && LEGITIMATE_CONSTANT_P (addr)))
2418 addr = force_operand (addr, NULL_RTX);
2420 argvec[count].value = addr;
2421 argvec[count].mode = outmode;
2422 argvec[count].partial = 0;
2424 argvec[count].reg = FUNCTION_ARG (args_so_far, outmode, NULL_TREE, 1);
2425 #ifdef FUNCTION_ARG_PARTIAL_NREGS
2426 if (FUNCTION_ARG_PARTIAL_NREGS (args_so_far, outmode, NULL_TREE, 1))
2430 locate_and_pad_parm (outmode, NULL_TREE,
2431 argvec[count].reg && argvec[count].partial == 0,
2432 NULL_TREE, &args_size, &argvec[count].offset,
2433 &argvec[count].size);
2436 if (argvec[count].reg == 0 || argvec[count].partial != 0
2437 #ifdef REG_PARM_STACK_SPACE
2441 args_size.constant += argvec[count].size.constant;
2443 FUNCTION_ARG_ADVANCE (args_so_far, outmode, (tree)0, 1);
2447 for (; count < nargs; count++)
2449 rtx val = va_arg (p, rtx);
2450 enum machine_mode mode = va_arg (p, enum machine_mode);
2452 /* We cannot convert the arg value to the mode the library wants here;
2453 must do it earlier where we know the signedness of the arg. */
2455 || (GET_MODE (val) != mode && GET_MODE (val) != VOIDmode))
2458 /* On some machines, there's no way to pass a float to a library fcn.
2459 Pass it as a double instead. */
2460 #ifdef LIBGCC_NEEDS_DOUBLE
2461 if (LIBGCC_NEEDS_DOUBLE && mode == SFmode)
2462 val = convert_to_mode (DFmode, val, 0), mode = DFmode;
2465 /* There's no need to call protect_from_queue, because
2466 either emit_move_insn or emit_push_insn will do that. */
2468 /* Make sure it is a reasonable operand for a move or push insn. */
2469 if (GET_CODE (val) != REG && GET_CODE (val) != MEM
2470 && ! (CONSTANT_P (val) && LEGITIMATE_CONSTANT_P (val)))
2471 val = force_operand (val, NULL_RTX);
2473 argvec[count].value = val;
2474 argvec[count].mode = mode;
2476 #ifdef FUNCTION_ARG_PASS_BY_REFERENCE
2477 if (FUNCTION_ARG_PASS_BY_REFERENCE (args_so_far, mode, NULL_TREE, 1))
2481 argvec[count].reg = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
2482 if (argvec[count].reg && GET_CODE (argvec[count].reg) == EXPR_LIST)
2484 #ifdef FUNCTION_ARG_PARTIAL_NREGS
2485 argvec[count].partial
2486 = FUNCTION_ARG_PARTIAL_NREGS (args_so_far, mode, NULL_TREE, 1);
2488 argvec[count].partial = 0;
2491 locate_and_pad_parm (mode, NULL_TREE,
2492 argvec[count].reg && argvec[count].partial == 0,
2493 NULL_TREE, &args_size, &argvec[count].offset,
2494 &argvec[count].size);
2496 if (argvec[count].size.var)
2499 #ifndef REG_PARM_STACK_SPACE
2500 if (argvec[count].partial)
2501 argvec[count].size.constant -= argvec[count].partial * UNITS_PER_WORD;
2504 if (argvec[count].reg == 0 || argvec[count].partial != 0
2505 #ifdef REG_PARM_STACK_SPACE
2509 args_size.constant += argvec[count].size.constant;
2511 #ifdef ACCUMULATE_OUTGOING_ARGS
2512 /* If this arg is actually passed on the stack, it might be
2513 clobbering something we already put there (this library call might
2514 be inside the evaluation of an argument to a function whose call
2515 requires the stack). This will only occur when the library call
2516 has sufficient args to run out of argument registers. Abort in
2517 this case; if this ever occurs, code must be added to save and
2518 restore the arg slot. */
2520 if (argvec[count].reg == 0 || argvec[count].partial != 0)
2524 FUNCTION_ARG_ADVANCE (args_so_far, mode, (tree)0, 1);
2528 /* If this machine requires an external definition for library
2529 functions, write one out. */
2530 assemble_external_libcall (fun);
2532 original_args_size = args_size;
2533 #ifdef STACK_BOUNDARY
2534 args_size.constant = (((args_size.constant + (STACK_BYTES - 1))
2535 / STACK_BYTES) * STACK_BYTES);
2538 #ifdef REG_PARM_STACK_SPACE
2539 args_size.constant = MAX (args_size.constant,
2540 REG_PARM_STACK_SPACE (NULL_TREE));
2541 #ifndef OUTGOING_REG_PARM_STACK_SPACE
2542 args_size.constant -= REG_PARM_STACK_SPACE (NULL_TREE);
2546 #ifdef ACCUMULATE_OUTGOING_ARGS
2547 if (args_size.constant > current_function_outgoing_args_size)
2548 current_function_outgoing_args_size = args_size.constant;
2549 args_size.constant = 0;
2552 #ifndef PUSH_ROUNDING
2553 argblock = push_block (GEN_INT (args_size.constant), 0, 0);
2556 #ifdef PUSH_ARGS_REVERSED
2557 #ifdef STACK_BOUNDARY
2558 /* If we push args individually in reverse order, perform stack alignment
2559 before the first push (the last arg). */
2561 anti_adjust_stack (GEN_INT (args_size.constant
2562 - original_args_size.constant));
2566 #ifdef PUSH_ARGS_REVERSED
2574 /* Push the args that need to be pushed. */
2576 for (count = 0; count < nargs; count++, argnum += inc)
2578 register enum machine_mode mode = argvec[argnum].mode;
2579 register rtx val = argvec[argnum].value;
2580 rtx reg = argvec[argnum].reg;
2581 int partial = argvec[argnum].partial;
2583 if (! (reg != 0 && partial == 0))
2584 emit_push_insn (val, mode, NULL_TREE, NULL_RTX, 0, partial, reg, 0,
2585 argblock, GEN_INT (argvec[count].offset.constant));
2589 #ifndef PUSH_ARGS_REVERSED
2590 #ifdef STACK_BOUNDARY
2591 /* If we pushed args in forward order, perform stack alignment
2592 after pushing the last arg. */
2594 anti_adjust_stack (GEN_INT (args_size.constant
2595 - original_args_size.constant));
2599 #ifdef PUSH_ARGS_REVERSED
2605 /* Now load any reg parms into their regs. */
2607 if (mem_value != 0 && struct_value_rtx != 0)
2608 emit_move_insn (struct_value_rtx, XEXP (mem_value, 0));
2610 for (count = 0; count < nargs; count++, argnum += inc)
2612 register enum machine_mode mode = argvec[argnum].mode;
2613 register rtx val = argvec[argnum].value;
2614 rtx reg = argvec[argnum].reg;
2615 int partial = argvec[argnum].partial;
2617 if (reg != 0 && partial == 0)
2618 emit_move_insn (reg, val);
2623 /* For version 1.37, try deleting this entirely. */
2628 /* Any regs containing parms remain in use through the call. */
2630 for (count = 0; count < nargs; count++)
2631 if (argvec[count].reg != 0)
2632 emit_insn (gen_rtx (USE, VOIDmode, argvec[count].reg));
2634 use_insns = get_insns ();
2637 fun = prepare_call_address (fun, NULL_TREE, &use_insns);
2639 /* Don't allow popping to be deferred, since then
2640 cse'ing of library calls could delete a call and leave the pop. */
2643 /* We pass the old value of inhibit_defer_pop + 1 to emit_call_1, which
2644 will set inhibit_defer_pop to that value. */
2646 emit_call_1 (fun, get_identifier (XSTR (orgfun, 0)), args_size.constant, 0,
2647 FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1),
2648 outmode != VOIDmode ? hard_libcall_value (outmode) : NULL_RTX,
2649 old_inhibit_defer_pop + 1, use_insns, no_queue);
2651 /* Now restore inhibit_defer_pop to its actual original value. */
2654 /* Copy the value to the right place. */
2655 if (outmode != VOIDmode)
2660 value = hard_libcall_value (outmode);
2661 if (value != mem_value)
2662 emit_move_insn (value, mem_value);
2664 else if (value != 0)
2665 emit_move_insn (value, hard_libcall_value (outmode));
2669 /* Expand an assignment that stores the value of FROM into TO.
2670 If WANT_VALUE is nonzero, return an rtx for the value of TO.
2671 (This may contain a QUEUED rtx.)
2672 Otherwise, the returned value is not meaningful.
2674 SUGGEST_REG is no longer actually used.
2675 It used to mean, copy the value through a register
2676 and return that register, if that is possible.
2677 But now we do this if WANT_VALUE.
2679 If the value stored is a constant, we return the constant. */
2682 expand_assignment (to, from, want_value, suggest_reg)
2687 register rtx to_rtx = 0;
2690 /* Don't crash if the lhs of the assignment was erroneous. */
2692 if (TREE_CODE (to) == ERROR_MARK)
2693 return expand_expr (from, NULL_RTX, VOIDmode, 0);
2695 /* Assignment of a structure component needs special treatment
2696 if the structure component's rtx is not simply a MEM.
2697 Assignment of an array element at a constant index
2698 has the same problem. */
2700 if (TREE_CODE (to) == COMPONENT_REF
2701 || TREE_CODE (to) == BIT_FIELD_REF
2702 || (TREE_CODE (to) == ARRAY_REF
2703 && TREE_CODE (TREE_OPERAND (to, 1)) == INTEGER_CST
2704 && TREE_CODE (TYPE_SIZE (TREE_TYPE (to))) == INTEGER_CST))
2706 enum machine_mode mode1;
2712 tree tem = get_inner_reference (to, &bitsize, &bitpos, &offset,
2713 &mode1, &unsignedp, &volatilep);
2715 /* If we are going to use store_bit_field and extract_bit_field,
2716 make sure to_rtx will be safe for multiple use. */
2718 if (mode1 == VOIDmode && want_value)
2719 tem = stabilize_reference (tem);
2721 to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, 0);
2724 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
2726 if (GET_CODE (to_rtx) != MEM)
2728 to_rtx = change_address (to_rtx, VOIDmode,
2729 gen_rtx (PLUS, Pmode, XEXP (to_rtx, 0),
2730 force_reg (Pmode, offset_rtx)));
2734 if (GET_CODE (to_rtx) == MEM)
2735 MEM_VOLATILE_P (to_rtx) = 1;
2736 #if 0 /* This was turned off because, when a field is volatile
2737 in an object which is not volatile, the object may be in a register,
2738 and then we would abort over here. */
2744 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
2746 /* Spurious cast makes HPUX compiler happy. */
2747 ? (enum machine_mode) TYPE_MODE (TREE_TYPE (to))
2750 /* Required alignment of containing datum. */
2751 TYPE_ALIGN (TREE_TYPE (tem)) / BITS_PER_UNIT,
2752 int_size_in_bytes (TREE_TYPE (tem)));
2753 preserve_temp_slots (result);
2759 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
2760 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
2763 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, 0);
2765 /* In case we are returning the contents of an object which overlaps
2766 the place the value is being stored, use a safe function when copying
2767 a value through a pointer into a structure value return block. */
2768 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
2769 && current_function_returns_struct
2770 && !current_function_returns_pcc_struct)
2772 rtx from_rtx = expand_expr (from, NULL_RTX, VOIDmode, 0);
2773 rtx size = expr_size (from);
2775 #ifdef TARGET_MEM_FUNCTIONS
2776 emit_library_call (memcpy_libfunc, 0,
2777 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
2778 XEXP (from_rtx, 0), Pmode,
2781 emit_library_call (bcopy_libfunc, 0,
2782 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
2783 XEXP (to_rtx, 0), Pmode,
2787 preserve_temp_slots (to_rtx);
2792 /* Compute FROM and store the value in the rtx we got. */
2794 result = store_expr (from, to_rtx, want_value);
2795 preserve_temp_slots (result);
2800 /* Generate code for computing expression EXP,
2801 and storing the value into TARGET.
2802 Returns TARGET or an equivalent value.
2803 TARGET may contain a QUEUED rtx.
2805 If SUGGEST_REG is nonzero, copy the value through a register
2806 and return that register, if that is possible.
2808 If the value stored is a constant, we return the constant. */
2811 store_expr (exp, target, suggest_reg)
2813 register rtx target;
2817 int dont_return_target = 0;
2819 if (TREE_CODE (exp) == COMPOUND_EXPR)
2821 /* Perform first part of compound expression, then assign from second
2823 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
2825 return store_expr (TREE_OPERAND (exp, 1), target, suggest_reg);
2827 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
2829 /* For conditional expression, get safe form of the target. Then
2830 test the condition, doing the appropriate assignment on either
2831 side. This avoids the creation of unnecessary temporaries.
2832 For non-BLKmode, it is more efficient not to do this. */
2834 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
2837 target = protect_from_queue (target, 1);
2840 jumpifnot (TREE_OPERAND (exp, 0), lab1);
2841 store_expr (TREE_OPERAND (exp, 1), target, suggest_reg);
2843 emit_jump_insn (gen_jump (lab2));
2846 store_expr (TREE_OPERAND (exp, 2), target, suggest_reg);
2852 else if (suggest_reg && GET_CODE (target) == MEM
2853 && GET_MODE (target) != BLKmode)
2854 /* If target is in memory and caller wants value in a register instead,
2855 arrange that. Pass TARGET as target for expand_expr so that,
2856 if EXP is another assignment, SUGGEST_REG will be nonzero for it.
2857 We know expand_expr will not use the target in that case. */
2859 temp = expand_expr (exp, cse_not_expected ? NULL_RTX : target,
2860 GET_MODE (target), 0);
2861 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
2862 temp = copy_to_reg (temp);
2863 dont_return_target = 1;
2865 else if (queued_subexp_p (target))
2866 /* If target contains a postincrement, it is not safe
2867 to use as the returned value. It would access the wrong
2868 place by the time the queued increment gets output.
2869 So copy the value through a temporary and use that temp
2872 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
2874 /* Expand EXP into a new pseudo. */
2875 temp = gen_reg_rtx (GET_MODE (target));
2876 temp = expand_expr (exp, temp, GET_MODE (target), 0);
2879 temp = expand_expr (exp, NULL_RTX, GET_MODE (target), 0);
2880 dont_return_target = 1;
2882 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
2883 /* If this is an scalar in a register that is stored in a wider mode
2884 than the declared mode, compute the result into its declared mode
2885 and then convert to the wider mode. Our value is the computed
2888 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
2889 convert_move (SUBREG_REG (target), temp,
2890 SUBREG_PROMOTED_UNSIGNED_P (target));
2895 temp = expand_expr (exp, target, GET_MODE (target), 0);
2896 /* DO return TARGET if it's a specified hardware register.
2897 expand_return relies on this. */
2898 if (!(target && GET_CODE (target) == REG
2899 && REGNO (target) < FIRST_PSEUDO_REGISTER)
2900 && CONSTANT_P (temp))
2901 dont_return_target = 1;
2904 /* If value was not generated in the target, store it there.
2905 Convert the value to TARGET's type first if nec. */
2907 if (temp != target && TREE_CODE (exp) != ERROR_MARK)
2909 target = protect_from_queue (target, 1);
2910 if (GET_MODE (temp) != GET_MODE (target)
2911 && GET_MODE (temp) != VOIDmode)
2913 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
2914 if (dont_return_target)
2916 /* In this case, we will return TEMP,
2917 so make sure it has the proper mode.
2918 But don't forget to store the value into TARGET. */
2919 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
2920 emit_move_insn (target, temp);
2923 convert_move (target, temp, unsignedp);
2926 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
2928 /* Handle copying a string constant into an array.
2929 The string constant may be shorter than the array.
2930 So copy just the string's actual length, and clear the rest. */
2933 /* Get the size of the data type of the string,
2934 which is actually the size of the target. */
2935 size = expr_size (exp);
2936 if (GET_CODE (size) == CONST_INT
2937 && INTVAL (size) < TREE_STRING_LENGTH (exp))
2938 emit_block_move (target, temp, size,
2939 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
2942 /* Compute the size of the data to copy from the string. */
2944 = fold (build (MIN_EXPR, sizetype,
2945 size_binop (CEIL_DIV_EXPR,
2946 TYPE_SIZE (TREE_TYPE (exp)),
2947 size_int (BITS_PER_UNIT)),
2949 build_int_2 (TREE_STRING_LENGTH (exp), 0))));
2950 rtx copy_size_rtx = expand_expr (copy_size, NULL_RTX,
2954 /* Copy that much. */
2955 emit_block_move (target, temp, copy_size_rtx,
2956 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
2958 /* Figure out how much is left in TARGET
2959 that we have to clear. */
2960 if (GET_CODE (copy_size_rtx) == CONST_INT)
2962 temp = plus_constant (XEXP (target, 0),
2963 TREE_STRING_LENGTH (exp));
2964 size = plus_constant (size,
2965 - TREE_STRING_LENGTH (exp));
2969 enum machine_mode size_mode = Pmode;
2971 temp = force_reg (Pmode, XEXP (target, 0));
2972 temp = expand_binop (size_mode, add_optab, temp,
2973 copy_size_rtx, NULL_RTX, 0,
2976 size = expand_binop (size_mode, sub_optab, size,
2977 copy_size_rtx, NULL_RTX, 0,
2980 emit_cmp_insn (size, const0_rtx, LT, NULL_RTX,
2981 GET_MODE (size), 0, 0);
2982 label = gen_label_rtx ();
2983 emit_jump_insn (gen_blt (label));
2986 if (size != const0_rtx)
2988 #ifdef TARGET_MEM_FUNCTIONS
2989 emit_library_call (memset_libfunc, 0, VOIDmode, 3,
2990 temp, Pmode, const0_rtx, Pmode, size, Pmode);
2992 emit_library_call (bzero_libfunc, 0, VOIDmode, 2,
2993 temp, Pmode, size, Pmode);
3000 else if (GET_MODE (temp) == BLKmode)
3001 emit_block_move (target, temp, expr_size (exp),
3002 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3004 emit_move_insn (target, temp);
3006 if (dont_return_target)
3011 /* Store the value of constructor EXP into the rtx TARGET.
3012 TARGET is either a REG or a MEM. */
3015 store_constructor (exp, target)
3019 tree type = TREE_TYPE (exp);
3021 /* We know our target cannot conflict, since safe_from_p has been called. */
3023 /* Don't try copying piece by piece into a hard register
3024 since that is vulnerable to being clobbered by EXP.
3025 Instead, construct in a pseudo register and then copy it all. */
3026 if (GET_CODE (target) == REG && REGNO (target) < FIRST_PSEUDO_REGISTER)
3028 rtx temp = gen_reg_rtx (GET_MODE (target));
3029 store_constructor (exp, temp);
3030 emit_move_insn (target, temp);
3035 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE)
3039 /* Inform later passes that the whole union value is dead. */
3040 if (TREE_CODE (type) == UNION_TYPE)
3041 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
3043 /* If we are building a static constructor into a register,
3044 set the initial value as zero so we can fold the value into
3046 else if (GET_CODE (target) == REG && TREE_STATIC (exp))
3047 emit_move_insn (target, const0_rtx);
3049 /* If the constructor has fewer fields than the structure,
3050 clear the whole structure first. */
3051 else if (list_length (CONSTRUCTOR_ELTS (exp))
3052 != list_length (TYPE_FIELDS (type)))
3053 clear_storage (target, int_size_in_bytes (type));
3055 /* Inform later passes that the old value is dead. */
3056 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
3058 /* Store each element of the constructor into
3059 the corresponding field of TARGET. */
3061 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
3063 register tree field = TREE_PURPOSE (elt);
3064 register enum machine_mode mode;
3069 /* Just ignore missing fields.
3070 We cleared the whole structure, above,
3071 if any fields are missing. */
3075 bitsize = TREE_INT_CST_LOW (DECL_SIZE (field));
3076 unsignedp = TREE_UNSIGNED (field);
3077 mode = DECL_MODE (field);
3078 if (DECL_BIT_FIELD (field))
3081 if (TREE_CODE (DECL_FIELD_BITPOS (field)) != INTEGER_CST)
3082 /* ??? This case remains to be written. */
3085 bitpos = TREE_INT_CST_LOW (DECL_FIELD_BITPOS (field));
3087 store_field (target, bitsize, bitpos, mode, TREE_VALUE (elt),
3088 /* The alignment of TARGET is
3089 at least what its type requires. */
3091 TYPE_ALIGN (type) / BITS_PER_UNIT,
3092 int_size_in_bytes (type));
3095 else if (TREE_CODE (type) == ARRAY_TYPE)
3099 tree domain = TYPE_DOMAIN (type);
3100 HOST_WIDE_INT minelt = TREE_INT_CST_LOW (TYPE_MIN_VALUE (domain));
3101 HOST_WIDE_INT maxelt = TREE_INT_CST_LOW (TYPE_MAX_VALUE (domain));
3102 tree elttype = TREE_TYPE (type);
3104 /* If the constructor has fewer fields than the structure,
3105 clear the whole structure first. Similarly if this this is
3106 static constructor of a non-BLKmode object. */
3108 if (list_length (CONSTRUCTOR_ELTS (exp)) < maxelt - minelt + 1
3109 || (GET_CODE (target) == REG && TREE_STATIC (exp)))
3110 clear_storage (target, maxelt - minelt + 1);
3112 /* Inform later passes that the old value is dead. */
3113 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
3115 /* Store each element of the constructor into
3116 the corresponding element of TARGET, determined
3117 by counting the elements. */
3118 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
3120 elt = TREE_CHAIN (elt), i++)
3122 register enum machine_mode mode;
3127 mode = TYPE_MODE (elttype);
3128 bitsize = GET_MODE_BITSIZE (mode);
3129 unsignedp = TREE_UNSIGNED (elttype);
3131 bitpos = (i * TREE_INT_CST_LOW (TYPE_SIZE (elttype)));
3133 store_field (target, bitsize, bitpos, mode, TREE_VALUE (elt),
3134 /* The alignment of TARGET is
3135 at least what its type requires. */
3137 TYPE_ALIGN (type) / BITS_PER_UNIT,
3138 int_size_in_bytes (type));
3146 /* Store the value of EXP (an expression tree)
3147 into a subfield of TARGET which has mode MODE and occupies
3148 BITSIZE bits, starting BITPOS bits from the start of TARGET.
3149 If MODE is VOIDmode, it means that we are storing into a bit-field.
3151 If VALUE_MODE is VOIDmode, return nothing in particular.
3152 UNSIGNEDP is not used in this case.
3154 Otherwise, return an rtx for the value stored. This rtx
3155 has mode VALUE_MODE if that is convenient to do.
3156 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
3158 ALIGN is the alignment that TARGET is known to have, measured in bytes.
3159 TOTAL_SIZE is the size in bytes of the structure, or -1 if varying. */
3162 store_field (target, bitsize, bitpos, mode, exp, value_mode,
3163 unsignedp, align, total_size)
3165 int bitsize, bitpos;
3166 enum machine_mode mode;
3168 enum machine_mode value_mode;
3173 HOST_WIDE_INT width_mask = 0;
3175 if (bitsize < HOST_BITS_PER_WIDE_INT)
3176 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
3178 /* If we are storing into an unaligned field of an aligned union that is
3179 in a register, we may have the mode of TARGET being an integer mode but
3180 MODE == BLKmode. In that case, get an aligned object whose size and
3181 alignment are the same as TARGET and store TARGET into it (we can avoid
3182 the store if the field being stored is the entire width of TARGET). Then
3183 call ourselves recursively to store the field into a BLKmode version of
3184 that object. Finally, load from the object into TARGET. This is not
3185 very efficient in general, but should only be slightly more expensive
3186 than the otherwise-required unaligned accesses. Perhaps this can be
3187 cleaned up later. */
3190 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
3192 rtx object = assign_stack_temp (GET_MODE (target),
3193 GET_MODE_SIZE (GET_MODE (target)), 0);
3194 rtx blk_object = copy_rtx (object);
3196 PUT_MODE (blk_object, BLKmode);
3198 if (bitsize != GET_MODE_BITSIZE (GET_MODE (target)))
3199 emit_move_insn (object, target);
3201 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0,
3204 emit_move_insn (target, object);
3209 /* If the structure is in a register or if the component
3210 is a bit field, we cannot use addressing to access it.
3211 Use bit-field techniques or SUBREG to store in it. */
3213 if (mode == VOIDmode
3214 || (mode != BLKmode && ! direct_store[(int) mode])
3215 || GET_CODE (target) == REG
3216 || GET_CODE (target) == SUBREG)
3218 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
3219 /* Store the value in the bitfield. */
3220 store_bit_field (target, bitsize, bitpos, mode, temp, align, total_size);
3221 if (value_mode != VOIDmode)
3223 /* The caller wants an rtx for the value. */
3224 /* If possible, avoid refetching from the bitfield itself. */
3226 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
3229 enum machine_mode tmode;
3232 return expand_and (temp, GEN_INT (width_mask), NULL_RTX);
3233 tmode = GET_MODE (temp);
3234 if (tmode == VOIDmode)
3236 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
3237 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
3238 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
3240 return extract_bit_field (target, bitsize, bitpos, unsignedp,
3241 NULL_RTX, value_mode, 0, align,
3248 rtx addr = XEXP (target, 0);
3251 /* If a value is wanted, it must be the lhs;
3252 so make the address stable for multiple use. */
3254 if (value_mode != VOIDmode && GET_CODE (addr) != REG
3255 && ! CONSTANT_ADDRESS_P (addr)
3256 /* A frame-pointer reference is already stable. */
3257 && ! (GET_CODE (addr) == PLUS
3258 && GET_CODE (XEXP (addr, 1)) == CONST_INT
3259 && (XEXP (addr, 0) == virtual_incoming_args_rtx
3260 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
3261 addr = copy_to_reg (addr);
3263 /* Now build a reference to just the desired component. */
3265 to_rtx = change_address (target, mode,
3266 plus_constant (addr, (bitpos / BITS_PER_UNIT)));
3267 MEM_IN_STRUCT_P (to_rtx) = 1;
3269 return store_expr (exp, to_rtx, value_mode != VOIDmode);
3273 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
3274 or an ARRAY_REF, look for nested COMPONENT_REFs, BIT_FIELD_REFs, or
3275 ARRAY_REFs at constant positions and find the ultimate containing object,
3278 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
3279 bit position, and *PUNSIGNEDP to the signedness of the field.
3280 If the position of the field is variable, we store a tree
3281 giving the variable offset (in units) in *POFFSET.
3282 This offset is in addition to the bit position.
3283 If the position is not variable, we store 0 in *POFFSET.
3285 If any of the extraction expressions is volatile,
3286 we store 1 in *PVOLATILEP. Otherwise we don't change that.
3288 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
3289 is a mode that can be used to access the field. In that case, *PBITSIZE
3292 If the field describes a variable-sized object, *PMODE is set to
3293 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
3294 this case, but the address of the object can be found. */
3297 get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode, punsignedp, pvolatilep)
3302 enum machine_mode *pmode;
3307 enum machine_mode mode = VOIDmode;
3310 if (TREE_CODE (exp) == COMPONENT_REF)
3312 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
3313 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
3314 mode = DECL_MODE (TREE_OPERAND (exp, 1));
3315 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
3317 else if (TREE_CODE (exp) == BIT_FIELD_REF)
3319 size_tree = TREE_OPERAND (exp, 1);
3320 *punsignedp = TREE_UNSIGNED (exp);
3324 mode = TYPE_MODE (TREE_TYPE (exp));
3325 *pbitsize = GET_MODE_BITSIZE (mode);
3326 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
3331 if (TREE_CODE (size_tree) != INTEGER_CST)
3332 mode = BLKmode, *pbitsize = -1;
3334 *pbitsize = TREE_INT_CST_LOW (size_tree);
3337 /* Compute cumulative bit-offset for nested component-refs and array-refs,
3338 and find the ultimate containing object. */
3344 if (TREE_CODE (exp) == COMPONENT_REF || TREE_CODE (exp) == BIT_FIELD_REF)
3346 tree pos = (TREE_CODE (exp) == COMPONENT_REF
3347 ? DECL_FIELD_BITPOS (TREE_OPERAND (exp, 1))
3348 : TREE_OPERAND (exp, 2));
3350 if (TREE_CODE (pos) == PLUS_EXPR)
3353 if (TREE_CODE (TREE_OPERAND (pos, 0)) == INTEGER_CST)
3355 constant = TREE_OPERAND (pos, 0);
3356 var = TREE_OPERAND (pos, 1);
3358 else if (TREE_CODE (TREE_OPERAND (pos, 1)) == INTEGER_CST)
3360 constant = TREE_OPERAND (pos, 1);
3361 var = TREE_OPERAND (pos, 0);
3365 *pbitpos += TREE_INT_CST_LOW (constant);
3367 offset = size_binop (PLUS_EXPR, offset,
3368 size_binop (FLOOR_DIV_EXPR, var,
3369 size_int (BITS_PER_UNIT)));
3371 offset = size_binop (FLOOR_DIV_EXPR, var,
3372 size_int (BITS_PER_UNIT));
3374 else if (TREE_CODE (pos) == INTEGER_CST)
3375 *pbitpos += TREE_INT_CST_LOW (pos);
3378 /* Assume here that the offset is a multiple of a unit.
3379 If not, there should be an explicitly added constant. */
3381 offset = size_binop (PLUS_EXPR, offset,
3382 size_binop (FLOOR_DIV_EXPR, pos,
3383 size_int (BITS_PER_UNIT)));
3385 offset = size_binop (FLOOR_DIV_EXPR, pos,
3386 size_int (BITS_PER_UNIT));
3390 else if (TREE_CODE (exp) == ARRAY_REF
3391 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
3392 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST)
3394 *pbitpos += (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))
3395 * TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (exp))));
3397 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
3398 && ! ((TREE_CODE (exp) == NOP_EXPR
3399 || TREE_CODE (exp) == CONVERT_EXPR)
3400 && (TYPE_MODE (TREE_TYPE (exp))
3401 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
3404 /* If any reference in the chain is volatile, the effect is volatile. */
3405 if (TREE_THIS_VOLATILE (exp))
3407 exp = TREE_OPERAND (exp, 0);
3410 /* If this was a bit-field, see if there is a mode that allows direct
3411 access in case EXP is in memory. */
3412 if (mode == VOIDmode && *pbitpos % *pbitsize == 0)
3414 mode = mode_for_size (*pbitsize, MODE_INT, 0);
3415 if (mode == BLKmode)
3422 /* We aren't finished fixing the callers to really handle nonzero offset. */
3430 /* Given an rtx VALUE that may contain additions and multiplications,
3431 return an equivalent value that just refers to a register or memory.
3432 This is done by generating instructions to perform the arithmetic
3433 and returning a pseudo-register containing the value.
3435 The returned value may be a REG, SUBREG, MEM or constant. */
3438 force_operand (value, target)
3441 register optab binoptab = 0;
3442 /* Use a temporary to force order of execution of calls to
3446 /* Use subtarget as the target for operand 0 of a binary operation. */
3447 register rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
3449 if (GET_CODE (value) == PLUS)
3450 binoptab = add_optab;
3451 else if (GET_CODE (value) == MINUS)
3452 binoptab = sub_optab;
3453 else if (GET_CODE (value) == MULT)
3455 op2 = XEXP (value, 1);
3456 if (!CONSTANT_P (op2)
3457 && !(GET_CODE (op2) == REG && op2 != subtarget))
3459 tmp = force_operand (XEXP (value, 0), subtarget);
3460 return expand_mult (GET_MODE (value), tmp,
3461 force_operand (op2, NULL_RTX),
3467 op2 = XEXP (value, 1);
3468 if (!CONSTANT_P (op2)
3469 && !(GET_CODE (op2) == REG && op2 != subtarget))
3471 if (binoptab == sub_optab && GET_CODE (op2) == CONST_INT)
3473 binoptab = add_optab;
3474 op2 = negate_rtx (GET_MODE (value), op2);
3477 /* Check for an addition with OP2 a constant integer and our first
3478 operand a PLUS of a virtual register and something else. In that
3479 case, we want to emit the sum of the virtual register and the
3480 constant first and then add the other value. This allows virtual
3481 register instantiation to simply modify the constant rather than
3482 creating another one around this addition. */
3483 if (binoptab == add_optab && GET_CODE (op2) == CONST_INT
3484 && GET_CODE (XEXP (value, 0)) == PLUS
3485 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
3486 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
3487 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
3489 rtx temp = expand_binop (GET_MODE (value), binoptab,
3490 XEXP (XEXP (value, 0), 0), op2,
3491 subtarget, 0, OPTAB_LIB_WIDEN);
3492 return expand_binop (GET_MODE (value), binoptab, temp,
3493 force_operand (XEXP (XEXP (value, 0), 1), 0),
3494 target, 0, OPTAB_LIB_WIDEN);
3497 tmp = force_operand (XEXP (value, 0), subtarget);
3498 return expand_binop (GET_MODE (value), binoptab, tmp,
3499 force_operand (op2, NULL_RTX),
3500 target, 0, OPTAB_LIB_WIDEN);
3501 /* We give UNSIGNEDP = 0 to expand_binop
3502 because the only operations we are expanding here are signed ones. */
3507 /* Subroutine of expand_expr:
3508 save the non-copied parts (LIST) of an expr (LHS), and return a list
3509 which can restore these values to their previous values,
3510 should something modify their storage. */
3513 save_noncopied_parts (lhs, list)
3520 for (tail = list; tail; tail = TREE_CHAIN (tail))
3521 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
3522 parts = chainon (parts, save_noncopied_parts (lhs, TREE_VALUE (tail)));
3525 tree part = TREE_VALUE (tail);
3526 tree part_type = TREE_TYPE (part);
3527 tree to_be_saved = build (COMPONENT_REF, part_type, lhs, part);
3528 rtx target = assign_stack_temp (TYPE_MODE (part_type),
3529 int_size_in_bytes (part_type), 0);
3530 if (! memory_address_p (TYPE_MODE (part_type), XEXP (target, 0)))
3531 target = change_address (target, TYPE_MODE (part_type), NULL_RTX);
3532 parts = tree_cons (to_be_saved,
3533 build (RTL_EXPR, part_type, NULL_TREE,
3536 store_expr (TREE_PURPOSE (parts), RTL_EXPR_RTL (TREE_VALUE (parts)), 0);
3541 /* Subroutine of expand_expr:
3542 record the non-copied parts (LIST) of an expr (LHS), and return a list
3543 which specifies the initial values of these parts. */
3546 init_noncopied_parts (lhs, list)
3553 for (tail = list; tail; tail = TREE_CHAIN (tail))
3554 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
3555 parts = chainon (parts, init_noncopied_parts (lhs, TREE_VALUE (tail)));
3558 tree part = TREE_VALUE (tail);
3559 tree part_type = TREE_TYPE (part);
3560 tree to_be_initialized = build (COMPONENT_REF, part_type, lhs, part);
3561 parts = tree_cons (TREE_PURPOSE (tail), to_be_initialized, parts);
3566 /* Subroutine of expand_expr: return nonzero iff there is no way that
3567 EXP can reference X, which is being modified. */
3570 safe_from_p (x, exp)
3580 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
3581 find the underlying pseudo. */
3582 if (GET_CODE (x) == SUBREG)
3585 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
3589 /* If X is a location in the outgoing argument area, it is always safe. */
3590 if (GET_CODE (x) == MEM
3591 && (XEXP (x, 0) == virtual_outgoing_args_rtx
3592 || (GET_CODE (XEXP (x, 0)) == PLUS
3593 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx)))
3596 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
3599 exp_rtl = DECL_RTL (exp);
3606 if (TREE_CODE (exp) == TREE_LIST)
3607 return ((TREE_VALUE (exp) == 0
3608 || safe_from_p (x, TREE_VALUE (exp)))
3609 && (TREE_CHAIN (exp) == 0
3610 || safe_from_p (x, TREE_CHAIN (exp))));
3615 return safe_from_p (x, TREE_OPERAND (exp, 0));
3619 return (safe_from_p (x, TREE_OPERAND (exp, 0))
3620 && safe_from_p (x, TREE_OPERAND (exp, 1)));
3624 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
3625 the expression. If it is set, we conflict iff we are that rtx or
3626 both are in memory. Otherwise, we check all operands of the
3627 expression recursively. */
3629 switch (TREE_CODE (exp))
3632 return staticp (TREE_OPERAND (exp, 0));
3635 if (GET_CODE (x) == MEM)
3640 exp_rtl = CALL_EXPR_RTL (exp);
3643 /* Assume that the call will clobber all hard registers and
3645 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
3646 || GET_CODE (x) == MEM)
3653 exp_rtl = RTL_EXPR_RTL (exp);
3655 /* We don't know what this can modify. */
3660 case WITH_CLEANUP_EXPR:
3661 exp_rtl = RTL_EXPR_RTL (exp);
3665 exp_rtl = SAVE_EXPR_RTL (exp);
3669 /* The only operand we look at is operand 1. The rest aren't
3670 part of the expression. */
3671 return safe_from_p (x, TREE_OPERAND (exp, 1));
3673 case METHOD_CALL_EXPR:
3674 /* This takes a rtx argument, but shouldn't appear here. */
3678 /* If we have an rtx, we do not need to scan our operands. */
3682 nops = tree_code_length[(int) TREE_CODE (exp)];
3683 for (i = 0; i < nops; i++)
3684 if (TREE_OPERAND (exp, i) != 0
3685 && ! safe_from_p (x, TREE_OPERAND (exp, i)))
3689 /* If we have an rtl, find any enclosed object. Then see if we conflict
3693 if (GET_CODE (exp_rtl) == SUBREG)
3695 exp_rtl = SUBREG_REG (exp_rtl);
3696 if (GET_CODE (exp_rtl) == REG
3697 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
3701 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
3702 are memory and EXP is not readonly. */
3703 return ! (rtx_equal_p (x, exp_rtl)
3704 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
3705 && ! TREE_READONLY (exp)));
3708 /* If we reach here, it is safe. */
3712 /* Subroutine of expand_expr: return nonzero iff EXP is an
3713 expression whose type is statically determinable. */
3719 if (TREE_CODE (exp) == PARM_DECL
3720 || TREE_CODE (exp) == VAR_DECL
3721 || TREE_CODE (exp) == CALL_EXPR || TREE_CODE (exp) == TARGET_EXPR
3722 || TREE_CODE (exp) == COMPONENT_REF
3723 || TREE_CODE (exp) == ARRAY_REF)
3728 /* expand_expr: generate code for computing expression EXP.
3729 An rtx for the computed value is returned. The value is never null.
3730 In the case of a void EXP, const0_rtx is returned.
3732 The value may be stored in TARGET if TARGET is nonzero.
3733 TARGET is just a suggestion; callers must assume that
3734 the rtx returned may not be the same as TARGET.
3736 If TARGET is CONST0_RTX, it means that the value will be ignored.
3738 If TMODE is not VOIDmode, it suggests generating the
3739 result in mode TMODE. But this is done only when convenient.
3740 Otherwise, TMODE is ignored and the value generated in its natural mode.
3741 TMODE is just a suggestion; callers must assume that
3742 the rtx returned may not have mode TMODE.
3744 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
3745 with a constant address even if that address is not normally legitimate.
3746 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
3748 If MODIFIER is EXPAND_SUM then when EXP is an addition
3749 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
3750 or a nest of (PLUS ...) and (MINUS ...) where the terms are
3751 products as above, or REG or MEM, or constant.
3752 Ordinarily in such cases we would output mul or add instructions
3753 and then return a pseudo reg containing the sum.
3755 EXPAND_INITIALIZER is much like EXPAND_SUM except that
3756 it also marks a label as absolutely required (it can't be dead).
3757 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
3758 This is used for outputting expressions used in initializers. */
3761 expand_expr (exp, target, tmode, modifier)
3764 enum machine_mode tmode;
3765 enum expand_modifier modifier;
3767 register rtx op0, op1, temp;
3768 tree type = TREE_TYPE (exp);
3769 int unsignedp = TREE_UNSIGNED (type);
3770 register enum machine_mode mode = TYPE_MODE (type);
3771 register enum tree_code code = TREE_CODE (exp);
3773 /* Use subtarget as the target for operand 0 of a binary operation. */
3774 rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
3775 rtx original_target = target;
3776 int ignore = target == const0_rtx;
3779 /* Don't use hard regs as subtargets, because the combiner
3780 can only handle pseudo regs. */
3781 if (subtarget && REGNO (subtarget) < FIRST_PSEUDO_REGISTER)
3783 /* Avoid subtargets inside loops,
3784 since they hide some invariant expressions. */
3785 if (preserve_subexpressions_p ())
3788 if (ignore) target = 0, original_target = 0;
3790 /* If will do cse, generate all results into pseudo registers
3791 since 1) that allows cse to find more things
3792 and 2) otherwise cse could produce an insn the machine
3795 if (! cse_not_expected && mode != BLKmode && target
3796 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER))
3799 /* Ensure we reference a volatile object even if value is ignored. */
3800 if (ignore && TREE_THIS_VOLATILE (exp)
3801 && mode != VOIDmode && mode != BLKmode)
3803 target = gen_reg_rtx (mode);
3804 temp = expand_expr (exp, target, VOIDmode, modifier);
3806 emit_move_insn (target, temp);
3814 tree function = decl_function_context (exp);
3815 /* Handle using a label in a containing function. */
3816 if (function != current_function_decl && function != 0)
3818 struct function *p = find_function_data (function);
3819 /* Allocate in the memory associated with the function
3820 that the label is in. */
3821 push_obstacks (p->function_obstack,
3822 p->function_maybepermanent_obstack);
3824 p->forced_labels = gen_rtx (EXPR_LIST, VOIDmode,
3825 label_rtx (exp), p->forced_labels);
3828 else if (modifier == EXPAND_INITIALIZER)
3829 forced_labels = gen_rtx (EXPR_LIST, VOIDmode,
3830 label_rtx (exp), forced_labels);
3831 temp = gen_rtx (MEM, FUNCTION_MODE,
3832 gen_rtx (LABEL_REF, Pmode, label_rtx (exp)));
3833 if (function != current_function_decl && function != 0)
3834 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
3839 if (DECL_RTL (exp) == 0)
3841 error_with_decl (exp, "prior parameter's size depends on `%s'");
3842 return CONST0_RTX (mode);
3848 if (DECL_RTL (exp) == 0)
3850 /* Ensure variable marked as used
3851 even if it doesn't go through a parser. */
3852 TREE_USED (exp) = 1;
3853 /* Handle variables inherited from containing functions. */
3854 context = decl_function_context (exp);
3856 /* We treat inline_function_decl as an alias for the current function
3857 because that is the inline function whose vars, types, etc.
3858 are being merged into the current function.
3859 See expand_inline_function. */
3860 if (context != 0 && context != current_function_decl
3861 && context != inline_function_decl
3862 /* If var is static, we don't need a static chain to access it. */
3863 && ! (GET_CODE (DECL_RTL (exp)) == MEM
3864 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
3868 /* Mark as non-local and addressable. */
3869 DECL_NONLOCAL (exp) = 1;
3870 mark_addressable (exp);
3871 if (GET_CODE (DECL_RTL (exp)) != MEM)
3873 addr = XEXP (DECL_RTL (exp), 0);
3874 if (GET_CODE (addr) == MEM)
3875 addr = gen_rtx (MEM, Pmode, fix_lexical_addr (XEXP (addr, 0), exp));
3877 addr = fix_lexical_addr (addr, exp);
3878 return change_address (DECL_RTL (exp), mode, addr);
3881 /* This is the case of an array whose size is to be determined
3882 from its initializer, while the initializer is still being parsed.
3884 if (GET_CODE (DECL_RTL (exp)) == MEM
3885 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
3886 return change_address (DECL_RTL (exp), GET_MODE (DECL_RTL (exp)),
3887 XEXP (DECL_RTL (exp), 0));
3888 if (GET_CODE (DECL_RTL (exp)) == MEM
3889 && modifier != EXPAND_CONST_ADDRESS
3890 && modifier != EXPAND_SUM
3891 && modifier != EXPAND_INITIALIZER)
3893 /* DECL_RTL probably contains a constant address.
3894 On RISC machines where a constant address isn't valid,
3895 make some insns to get that address into a register. */
3896 if (!memory_address_p (DECL_MODE (exp), XEXP (DECL_RTL (exp), 0))
3898 && CONSTANT_ADDRESS_P (XEXP (DECL_RTL (exp), 0))))
3899 return change_address (DECL_RTL (exp), VOIDmode,
3900 copy_rtx (XEXP (DECL_RTL (exp), 0)));
3903 /* If the mode of DECL_RTL does not match that of the decl, it
3904 must be a promoted value. We return a SUBREG of the wanted mode,
3905 but mark it so that we know that it was already extended. */
3907 if (GET_CODE (DECL_RTL (exp)) == REG
3908 && GET_MODE (DECL_RTL (exp)) != mode)
3910 enum machine_mode decl_mode = DECL_MODE (exp);
3912 /* Get the signedness used for this variable. Ensure we get the
3913 same mode we got when the variable was declared. */
3915 PROMOTE_MODE (decl_mode, unsignedp, type);
3917 if (decl_mode != GET_MODE (DECL_RTL (exp)))
3920 temp = gen_rtx (SUBREG, mode, DECL_RTL (exp), 0);
3921 SUBREG_PROMOTED_VAR_P (temp) = 1;
3922 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
3926 return DECL_RTL (exp);
3929 return immed_double_const (TREE_INT_CST_LOW (exp),
3930 TREE_INT_CST_HIGH (exp),
3934 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, 0);
3937 /* If optimized, generate immediate CONST_DOUBLE
3938 which will be turned into memory by reload if necessary.
3940 We used to force a register so that loop.c could see it. But
3941 this does not allow gen_* patterns to perform optimizations with
3942 the constants. It also produces two insns in cases like "x = 1.0;".
3943 On most machines, floating-point constants are not permitted in
3944 many insns, so we'd end up copying it to a register in any case.
3946 Now, we do the copying in expand_binop, if appropriate. */
3947 return immed_real_const (exp);
3951 if (! TREE_CST_RTL (exp))
3952 output_constant_def (exp);
3954 /* TREE_CST_RTL probably contains a constant address.
3955 On RISC machines where a constant address isn't valid,
3956 make some insns to get that address into a register. */
3957 if (GET_CODE (TREE_CST_RTL (exp)) == MEM
3958 && modifier != EXPAND_CONST_ADDRESS
3959 && modifier != EXPAND_INITIALIZER
3960 && modifier != EXPAND_SUM
3961 && !memory_address_p (mode, XEXP (TREE_CST_RTL (exp), 0)))
3962 return change_address (TREE_CST_RTL (exp), VOIDmode,
3963 copy_rtx (XEXP (TREE_CST_RTL (exp), 0)));
3964 return TREE_CST_RTL (exp);
3967 context = decl_function_context (exp);
3968 /* We treat inline_function_decl as an alias for the current function
3969 because that is the inline function whose vars, types, etc.
3970 are being merged into the current function.
3971 See expand_inline_function. */
3972 if (context == current_function_decl || context == inline_function_decl)
3975 /* If this is non-local, handle it. */
3978 temp = SAVE_EXPR_RTL (exp);
3979 if (temp && GET_CODE (temp) == REG)
3981 put_var_into_stack (exp);
3982 temp = SAVE_EXPR_RTL (exp);
3984 if (temp == 0 || GET_CODE (temp) != MEM)
3986 return change_address (temp, mode,
3987 fix_lexical_addr (XEXP (temp, 0), exp));
3989 if (SAVE_EXPR_RTL (exp) == 0)
3991 if (mode == BLKmode)
3993 = assign_stack_temp (mode,
3994 int_size_in_bytes (TREE_TYPE (exp)), 0);
3997 enum machine_mode var_mode = mode;
3999 if (TREE_CODE (type) == INTEGER_TYPE
4000 || TREE_CODE (type) == ENUMERAL_TYPE
4001 || TREE_CODE (type) == BOOLEAN_TYPE
4002 || TREE_CODE (type) == CHAR_TYPE
4003 || TREE_CODE (type) == REAL_TYPE
4004 || TREE_CODE (type) == POINTER_TYPE
4005 || TREE_CODE (type) == OFFSET_TYPE)
4007 PROMOTE_MODE (var_mode, unsignedp, type);
4010 temp = gen_reg_rtx (var_mode);
4013 SAVE_EXPR_RTL (exp) = temp;
4014 store_expr (TREE_OPERAND (exp, 0), temp, 0);
4015 if (!optimize && GET_CODE (temp) == REG)
4016 save_expr_regs = gen_rtx (EXPR_LIST, VOIDmode, temp,
4020 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
4021 must be a promoted value. We return a SUBREG of the wanted mode,
4022 but mark it so that we know that it was already extended. Note
4023 that `unsignedp' was modified above in this case. */
4025 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
4026 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
4028 temp = gen_rtx (SUBREG, mode, SAVE_EXPR_RTL (exp), 0);
4029 SUBREG_PROMOTED_VAR_P (temp) = 1;
4030 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
4034 return SAVE_EXPR_RTL (exp);
4037 /* Exit the current loop if the body-expression is true. */
4039 rtx label = gen_label_rtx ();
4040 do_jump (TREE_OPERAND (exp, 0), label, NULL_RTX);
4041 expand_exit_loop (NULL_PTR);
4047 expand_start_loop (1);
4048 expand_expr_stmt (TREE_OPERAND (exp, 0));
4055 tree vars = TREE_OPERAND (exp, 0);
4056 int vars_need_expansion = 0;
4058 /* Need to open a binding contour here because
4059 if there are any cleanups they most be contained here. */
4060 expand_start_bindings (0);
4062 /* Mark the corresponding BLOCK for output in its proper place. */
4063 if (TREE_OPERAND (exp, 2) != 0
4064 && ! TREE_USED (TREE_OPERAND (exp, 2)))
4065 insert_block (TREE_OPERAND (exp, 2));
4067 /* If VARS have not yet been expanded, expand them now. */
4070 if (DECL_RTL (vars) == 0)
4072 vars_need_expansion = 1;
4075 expand_decl_init (vars);
4076 vars = TREE_CHAIN (vars);
4079 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, modifier);
4081 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
4087 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
4089 emit_insns (RTL_EXPR_SEQUENCE (exp));
4090 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
4091 return RTL_EXPR_RTL (exp);
4094 /* All elts simple constants => refer to a constant in memory. But
4095 if this is a non-BLKmode mode, let it store a field at a time
4096 since that should make a CONST_INT or CONST_DOUBLE when we
4098 if (TREE_STATIC (exp) && (mode == BLKmode || TREE_ADDRESSABLE (exp)))
4100 rtx constructor = output_constant_def (exp);
4101 if (modifier != EXPAND_CONST_ADDRESS
4102 && modifier != EXPAND_INITIALIZER
4103 && modifier != EXPAND_SUM
4104 && !memory_address_p (GET_MODE (constructor),
4105 XEXP (constructor, 0)))
4106 constructor = change_address (constructor, VOIDmode,
4107 XEXP (constructor, 0));
4114 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4115 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode, 0);
4120 if (target == 0 || ! safe_from_p (target, exp))
4122 if (mode != BLKmode && ! TREE_ADDRESSABLE (exp))
4123 target = gen_reg_rtx (mode);
4126 enum tree_code c = TREE_CODE (type);
4128 = assign_stack_temp (mode, int_size_in_bytes (type), 0);
4129 if (c == RECORD_TYPE || c == UNION_TYPE || c == ARRAY_TYPE)
4130 MEM_IN_STRUCT_P (target) = 1;
4133 store_constructor (exp, target);
4139 tree exp1 = TREE_OPERAND (exp, 0);
4142 /* A SAVE_EXPR as the address in an INDIRECT_EXPR is generated
4143 for *PTR += ANYTHING where PTR is put inside the SAVE_EXPR.
4144 This code has the same general effect as simply doing
4145 expand_expr on the save expr, except that the expression PTR
4146 is computed for use as a memory address. This means different
4147 code, suitable for indexing, may be generated. */
4148 if (TREE_CODE (exp1) == SAVE_EXPR
4149 && SAVE_EXPR_RTL (exp1) == 0
4150 && TREE_CODE (exp2 = TREE_OPERAND (exp1, 0)) != ERROR_MARK
4151 && TYPE_MODE (TREE_TYPE (exp1)) == Pmode
4152 && TYPE_MODE (TREE_TYPE (exp2)) == Pmode)
4154 temp = expand_expr (TREE_OPERAND (exp1, 0), NULL_RTX,
4155 VOIDmode, EXPAND_SUM);
4156 op0 = memory_address (mode, temp);
4157 op0 = copy_all_regs (op0);
4158 SAVE_EXPR_RTL (exp1) = op0;
4162 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
4163 op0 = memory_address (mode, op0);
4166 temp = gen_rtx (MEM, mode, op0);
4167 /* If address was computed by addition,
4168 mark this as an element of an aggregate. */
4169 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
4170 || (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR
4171 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) == PLUS_EXPR)
4172 || TREE_CODE (TREE_TYPE (exp)) == ARRAY_TYPE
4173 || TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE
4174 || TREE_CODE (TREE_TYPE (exp)) == UNION_TYPE
4175 || (TREE_CODE (exp1) == ADDR_EXPR
4176 && (exp2 = TREE_OPERAND (exp1, 0))
4177 && (TREE_CODE (TREE_TYPE (exp2)) == ARRAY_TYPE
4178 || TREE_CODE (TREE_TYPE (exp2)) == RECORD_TYPE
4179 || TREE_CODE (TREE_TYPE (exp2)) == UNION_TYPE)))
4180 MEM_IN_STRUCT_P (temp) = 1;
4181 MEM_VOLATILE_P (temp) = TREE_THIS_VOLATILE (exp);
4182 #if 0 /* It is incorrect to set RTX_UNCHANGING_P here, because the fact that
4183 a location is accessed through a pointer to const does not mean
4184 that the value there can never change. */
4185 RTX_UNCHANGING_P (temp) = TREE_READONLY (exp);
4191 if (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST
4192 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
4194 /* Nonconstant array index or nonconstant element size.
4195 Generate the tree for *(&array+index) and expand that,
4196 except do it in a language-independent way
4197 and don't complain about non-lvalue arrays.
4198 `mark_addressable' should already have been called
4199 for any array for which this case will be reached. */
4201 /* Don't forget the const or volatile flag from the array element. */
4202 tree variant_type = build_type_variant (type,
4203 TREE_READONLY (exp),
4204 TREE_THIS_VOLATILE (exp));
4205 tree array_adr = build1 (ADDR_EXPR, build_pointer_type (variant_type),
4206 TREE_OPERAND (exp, 0));
4207 tree index = TREE_OPERAND (exp, 1);
4210 /* Convert the integer argument to a type the same size as a pointer
4211 so the multiply won't overflow spuriously. */
4212 if (TYPE_PRECISION (TREE_TYPE (index)) != POINTER_SIZE)
4213 index = convert (type_for_size (POINTER_SIZE, 0), index);
4215 /* Don't think the address has side effects
4216 just because the array does.
4217 (In some cases the address might have side effects,
4218 and we fail to record that fact here. However, it should not
4219 matter, since expand_expr should not care.) */
4220 TREE_SIDE_EFFECTS (array_adr) = 0;
4222 elt = build1 (INDIRECT_REF, type,
4223 fold (build (PLUS_EXPR, TYPE_POINTER_TO (variant_type),
4225 fold (build (MULT_EXPR,
4226 TYPE_POINTER_TO (variant_type),
4227 index, size_in_bytes (type))))));
4229 /* Volatility, etc., of new expression is same as old expression. */
4230 TREE_SIDE_EFFECTS (elt) = TREE_SIDE_EFFECTS (exp);
4231 TREE_THIS_VOLATILE (elt) = TREE_THIS_VOLATILE (exp);
4232 TREE_READONLY (elt) = TREE_READONLY (exp);
4234 return expand_expr (elt, target, tmode, modifier);
4237 /* Fold an expression like: "foo"[2].
4238 This is not done in fold so it won't happen inside &. */
4241 tree arg0 = TREE_OPERAND (exp, 0);
4242 tree arg1 = TREE_OPERAND (exp, 1);
4244 if (TREE_CODE (arg0) == STRING_CST
4245 && TREE_CODE (arg1) == INTEGER_CST
4246 && !TREE_INT_CST_HIGH (arg1)
4247 && (i = TREE_INT_CST_LOW (arg1)) < TREE_STRING_LENGTH (arg0))
4249 if (TREE_TYPE (TREE_TYPE (arg0)) == integer_type_node)
4251 exp = build_int_2 (((int *)TREE_STRING_POINTER (arg0))[i], 0);
4252 TREE_TYPE (exp) = integer_type_node;
4253 return expand_expr (exp, target, tmode, modifier);
4255 if (TREE_TYPE (TREE_TYPE (arg0)) == char_type_node)
4257 exp = build_int_2 (TREE_STRING_POINTER (arg0)[i], 0);
4258 TREE_TYPE (exp) = integer_type_node;
4259 return expand_expr (convert (TREE_TYPE (TREE_TYPE (arg0)), exp), target, tmode, modifier);
4264 /* If this is a constant index into a constant array,
4265 just get the value from the array. Handle both the cases when
4266 we have an explicit constructor and when our operand is a variable
4267 that was declared const. */
4269 if (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
4270 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
4272 tree index = fold (TREE_OPERAND (exp, 1));
4273 if (TREE_CODE (index) == INTEGER_CST
4274 && TREE_INT_CST_HIGH (index) == 0)
4276 int i = TREE_INT_CST_LOW (index);
4277 tree elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0));
4280 elem = TREE_CHAIN (elem);
4282 return expand_expr (fold (TREE_VALUE (elem)), target,
4287 else if (TREE_READONLY (TREE_OPERAND (exp, 0))
4288 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
4289 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == ARRAY_TYPE
4290 && TREE_CODE (TREE_OPERAND (exp, 0)) == VAR_DECL
4291 && DECL_INITIAL (TREE_OPERAND (exp, 0))
4293 && (TREE_CODE (DECL_INITIAL (TREE_OPERAND (exp, 0)))
4296 tree index = fold (TREE_OPERAND (exp, 1));
4297 if (TREE_CODE (index) == INTEGER_CST
4298 && TREE_INT_CST_HIGH (index) == 0)
4300 int i = TREE_INT_CST_LOW (index);
4301 tree init = DECL_INITIAL (TREE_OPERAND (exp, 0));
4303 if (TREE_CODE (init) == CONSTRUCTOR)
4305 tree elem = CONSTRUCTOR_ELTS (init);
4308 elem = TREE_CHAIN (elem);
4310 return expand_expr (fold (TREE_VALUE (elem)), target,
4313 else if (TREE_CODE (init) == STRING_CST
4314 && i < TREE_STRING_LENGTH (init))
4316 temp = GEN_INT (TREE_STRING_POINTER (init)[i]);
4317 return convert_to_mode (mode, temp, 0);
4321 /* Treat array-ref with constant index as a component-ref. */
4325 /* If the operand is a CONSTRUCTOR, we can just extract the
4326 appropriate field if it is present. */
4327 if (code != ARRAY_REF
4328 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR)
4332 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
4333 elt = TREE_CHAIN (elt))
4334 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1))
4335 return expand_expr (TREE_VALUE (elt), target, tmode, modifier);
4339 enum machine_mode mode1;
4344 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
4345 &mode1, &unsignedp, &volatilep);
4347 /* In some cases, we will be offsetting OP0's address by a constant.
4348 So get it as a sum, if possible. If we will be using it
4349 directly in an insn, we validate it. */
4350 op0 = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_SUM);
4352 /* If this is a constant, put it into a register if it is a
4353 legitimate constant and memory if it isn't. */
4354 if (CONSTANT_P (op0))
4356 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
4357 if (LEGITIMATE_CONSTANT_P (op0))
4358 op0 = force_reg (mode, op0);
4360 op0 = validize_mem (force_const_mem (mode, op0));
4365 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
4367 if (GET_CODE (op0) != MEM)
4369 op0 = change_address (op0, VOIDmode,
4370 gen_rtx (PLUS, Pmode, XEXP (op0, 0),
4371 force_reg (Pmode, offset_rtx)));
4374 /* Don't forget about volatility even if this is a bitfield. */
4375 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
4377 op0 = copy_rtx (op0);
4378 MEM_VOLATILE_P (op0) = 1;
4381 if (mode1 == VOIDmode
4382 || (mode1 != BLKmode && ! direct_load[(int) mode1]
4383 && modifier != EXPAND_CONST_ADDRESS
4384 && modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
4385 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG)
4387 /* In cases where an aligned union has an unaligned object
4388 as a field, we might be extracting a BLKmode value from
4389 an integer-mode (e.g., SImode) object. Handle this case
4390 by doing the extract into an object as wide as the field
4391 (which we know to be the width of a basic mode), then
4392 storing into memory, and changing the mode to BLKmode. */
4393 enum machine_mode ext_mode = mode;
4395 if (ext_mode == BLKmode)
4396 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
4398 if (ext_mode == BLKmode)
4401 op0 = extract_bit_field (validize_mem (op0), bitsize, bitpos,
4402 unsignedp, target, ext_mode, ext_mode,
4403 TYPE_ALIGN (TREE_TYPE (tem)) / BITS_PER_UNIT,
4404 int_size_in_bytes (TREE_TYPE (tem)));
4405 if (mode == BLKmode)
4407 rtx new = assign_stack_temp (ext_mode,
4408 bitsize / BITS_PER_UNIT, 0);
4410 emit_move_insn (new, op0);
4411 op0 = copy_rtx (new);
4412 PUT_MODE (op0, BLKmode);
4418 /* Get a reference to just this component. */
4419 if (modifier == EXPAND_CONST_ADDRESS
4420 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
4421 op0 = gen_rtx (MEM, mode1, plus_constant (XEXP (op0, 0),
4422 (bitpos / BITS_PER_UNIT)));
4424 op0 = change_address (op0, mode1,
4425 plus_constant (XEXP (op0, 0),
4426 (bitpos / BITS_PER_UNIT)));
4427 MEM_IN_STRUCT_P (op0) = 1;
4428 MEM_VOLATILE_P (op0) |= volatilep;
4429 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode)
4432 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
4433 convert_move (target, op0, unsignedp);
4439 tree base = build_unary_op (ADDR_EXPR, TREE_OPERAND (exp, 0), 0);
4440 tree addr = build (PLUS_EXPR, type, base, TREE_OPERAND (exp, 1));
4441 op0 = expand_expr (addr, NULL_RTX, VOIDmode, EXPAND_SUM);
4442 temp = gen_rtx (MEM, mode, memory_address (mode, op0));
4443 MEM_IN_STRUCT_P (temp) = 1;
4444 MEM_VOLATILE_P (temp) = TREE_THIS_VOLATILE (exp);
4445 #if 0 /* It is incorrect to set RTX_UNCHANGING_P here, because the fact that
4446 a location is accessed through a pointer to const does not mean
4447 that the value there can never change. */
4448 RTX_UNCHANGING_P (temp) = TREE_READONLY (exp);
4453 /* Intended for a reference to a buffer of a file-object in Pascal.
4454 But it's not certain that a special tree code will really be
4455 necessary for these. INDIRECT_REF might work for them. */
4459 /* IN_EXPR: Inlined pascal set IN expression.
4462 rlo = set_low - (set_low%bits_per_word);
4463 the_word = set [ (index - rlo)/bits_per_word ];
4464 bit_index = index % bits_per_word;
4465 bitmask = 1 << bit_index;
4466 return !!(the_word & bitmask); */
4468 preexpand_calls (exp);
4470 tree set = TREE_OPERAND (exp, 0);
4471 tree index = TREE_OPERAND (exp, 1);
4472 tree set_type = TREE_TYPE (set);
4474 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
4475 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
4481 rtx diff, quo, rem, addr, bit, result;
4482 rtx setval, setaddr;
4483 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
4486 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
4488 /* If domain is empty, answer is no. */
4489 if (tree_int_cst_lt (set_high_bound, set_low_bound))
4492 index_val = expand_expr (index, 0, VOIDmode, 0);
4493 lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
4494 hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
4495 setval = expand_expr (set, 0, VOIDmode, 0);
4496 setaddr = XEXP (setval, 0);
4498 /* Compare index against bounds, if they are constant. */
4499 if (GET_CODE (index_val) == CONST_INT
4500 && GET_CODE (lo_r) == CONST_INT)
4502 if (INTVAL (index_val) < INTVAL (lo_r))
4506 if (GET_CODE (index_val) == CONST_INT
4507 && GET_CODE (hi_r) == CONST_INT)
4509 if (INTVAL (hi_r) < INTVAL (index_val))
4513 /* If we get here, we have to generate the code for both cases
4514 (in range and out of range). */
4516 op0 = gen_label_rtx ();
4517 op1 = gen_label_rtx ();
4519 if (! (GET_CODE (index_val) == CONST_INT
4520 && GET_CODE (lo_r) == CONST_INT))
4522 emit_cmp_insn (index_val, lo_r, LT, 0, GET_MODE (index_val), 0, 0);
4523 emit_jump_insn (gen_blt (op1));
4526 if (! (GET_CODE (index_val) == CONST_INT
4527 && GET_CODE (hi_r) == CONST_INT))
4529 emit_cmp_insn (index_val, hi_r, GT, 0, GET_MODE (index_val), 0, 0);
4530 emit_jump_insn (gen_bgt (op1));
4533 /* Calculate the element number of bit zero in the first word
4535 if (GET_CODE (lo_r) == CONST_INT)
4536 rlow = gen_rtx (CONST_INT, VOIDmode,
4537 INTVAL (lo_r) & ~ (1 << BITS_PER_UNIT));
4539 rlow = expand_binop (index_mode, and_optab,
4540 lo_r, gen_rtx (CONST_INT, VOIDmode,
4541 ~ (1 << BITS_PER_UNIT)),
4542 0, 0, OPTAB_LIB_WIDEN);
4544 diff = expand_binop (index_mode, sub_optab,
4545 index_val, rlow, 0, 0, OPTAB_LIB_WIDEN);
4547 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
4548 gen_rtx (CONST_INT, VOIDmode, BITS_PER_UNIT),
4550 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
4551 gen_rtx (CONST_INT, VOIDmode, BITS_PER_UNIT),
4553 addr = memory_address (byte_mode,
4554 expand_binop (index_mode, add_optab,
4556 /* Extract the bit we want to examine */
4557 bit = expand_shift (RSHIFT_EXPR, byte_mode,
4558 gen_rtx (MEM, byte_mode, addr), rem, 0, 1);
4559 result = expand_binop (SImode, and_optab, bit, const1_rtx, target,
4560 1, OPTAB_LIB_WIDEN);
4561 emit_move_insn (target, result);
4563 /* Output the code to handle the out-of-range case. */
4566 emit_move_insn (target, const0_rtx);
4571 case WITH_CLEANUP_EXPR:
4572 if (RTL_EXPR_RTL (exp) == 0)
4575 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
4577 = tree_cons (NULL_TREE, TREE_OPERAND (exp, 2), cleanups_this_call);
4578 /* That's it for this cleanup. */
4579 TREE_OPERAND (exp, 2) = 0;
4581 return RTL_EXPR_RTL (exp);
4584 /* Check for a built-in function. */
4585 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
4586 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) == FUNCTION_DECL
4587 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
4588 return expand_builtin (exp, target, subtarget, tmode, ignore);
4589 /* If this call was expanded already by preexpand_calls,
4590 just return the result we got. */
4591 if (CALL_EXPR_RTL (exp) != 0)
4592 return CALL_EXPR_RTL (exp);
4593 return expand_call (exp, target, ignore);
4595 case NON_LVALUE_EXPR:
4598 case REFERENCE_EXPR:
4599 if (TREE_CODE (type) == VOID_TYPE || ignore)
4601 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
4604 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
4605 return expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, modifier);
4606 if (TREE_CODE (type) == UNION_TYPE)
4608 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
4611 if (mode == BLKmode)
4613 if (TYPE_SIZE (type) == 0
4614 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
4616 target = assign_stack_temp (BLKmode,
4617 (TREE_INT_CST_LOW (TYPE_SIZE (type))
4618 + BITS_PER_UNIT - 1)
4619 / BITS_PER_UNIT, 0);
4622 target = gen_reg_rtx (mode);
4624 if (GET_CODE (target) == MEM)
4625 /* Store data into beginning of memory target. */
4626 store_expr (TREE_OPERAND (exp, 0),
4627 change_address (target, TYPE_MODE (valtype), 0), 0);
4629 else if (GET_CODE (target) == REG)
4630 /* Store this field into a union of the proper type. */
4631 store_field (target, GET_MODE_BITSIZE (TYPE_MODE (valtype)), 0,
4632 TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
4634 int_size_in_bytes (TREE_TYPE (TREE_OPERAND (exp, 0))));
4638 /* Return the entire union. */
4641 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, 0);
4642 if (GET_MODE (op0) == mode)
4644 /* If arg is a constant integer being extended from a narrower mode,
4645 we must really truncate to get the extended bits right. Otherwise
4646 (unsigned long) (unsigned char) ("\377"[0])
4647 would come out as ffffffff. */
4648 if (GET_MODE (op0) == VOIDmode
4649 && (GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
4650 < GET_MODE_BITSIZE (mode)))
4652 /* MODE must be narrower than HOST_BITS_PER_INT. */
4653 int width = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
4655 if (width < HOST_BITS_PER_WIDE_INT)
4657 HOST_WIDE_INT val = (GET_CODE (op0) == CONST_INT ? INTVAL (op0)
4658 : CONST_DOUBLE_LOW (op0));
4659 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)))
4660 || !(val & ((HOST_WIDE_INT) 1 << (width - 1))))
4661 val &= ((HOST_WIDE_INT) 1 << width) - 1;
4663 val |= ~(((HOST_WIDE_INT) 1 << width) - 1);
4665 op0 = GEN_INT (val);
4669 op0 = (simplify_unary_operation
4670 ((TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)))
4671 ? ZERO_EXTEND : SIGN_EXTEND),
4673 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))));
4678 if (GET_MODE (op0) == VOIDmode)
4680 if (modifier == EXPAND_INITIALIZER)
4681 return gen_rtx (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
4682 if (flag_force_mem && GET_CODE (op0) == MEM)
4683 op0 = copy_to_reg (op0);
4686 return convert_to_mode (mode, op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
4688 convert_move (target, op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
4692 /* We come here from MINUS_EXPR when the second operand is a constant. */
4694 this_optab = add_optab;
4696 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
4697 something else, make sure we add the register to the constant and
4698 then to the other thing. This case can occur during strength
4699 reduction and doing it this way will produce better code if the
4700 frame pointer or argument pointer is eliminated.
4702 fold-const.c will ensure that the constant is always in the inner
4703 PLUS_EXPR, so the only case we need to do anything about is if
4704 sp, ap, or fp is our second argument, in which case we must swap
4705 the innermost first argument and our second argument. */
4707 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
4708 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
4709 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
4710 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
4711 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
4712 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
4714 tree t = TREE_OPERAND (exp, 1);
4716 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
4717 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
4720 /* If the result is to be Pmode and we are adding an integer to
4721 something, we might be forming a constant. So try to use
4722 plus_constant. If it produces a sum and we can't accept it,
4723 use force_operand. This allows P = &ARR[const] to generate
4724 efficient code on machines where a SYMBOL_REF is not a valid
4727 If this is an EXPAND_SUM call, always return the sum. */
4728 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
4729 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
4730 && (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
4733 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
4735 op1 = plus_constant (op1, TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)));
4736 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
4737 op1 = force_operand (op1, target);
4741 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
4742 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
4743 && (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
4746 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
4748 op0 = plus_constant (op0, TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)));
4749 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
4750 op0 = force_operand (op0, target);
4754 /* No sense saving up arithmetic to be done
4755 if it's all in the wrong mode to form part of an address.
4756 And force_operand won't know whether to sign-extend or
4758 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
4759 || mode != Pmode) goto binop;
4761 preexpand_calls (exp);
4762 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
4765 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, modifier);
4766 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, modifier);
4768 /* Make sure any term that's a sum with a constant comes last. */
4769 if (GET_CODE (op0) == PLUS
4770 && CONSTANT_P (XEXP (op0, 1)))
4776 /* If adding to a sum including a constant,
4777 associate it to put the constant outside. */
4778 if (GET_CODE (op1) == PLUS
4779 && CONSTANT_P (XEXP (op1, 1)))
4781 rtx constant_term = const0_rtx;
4783 temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0);
4786 /* Ensure that MULT comes first if there is one. */
4787 else if (GET_CODE (op0) == MULT)
4788 op0 = gen_rtx (PLUS, mode, op0, XEXP (op1, 0));
4790 op0 = gen_rtx (PLUS, mode, XEXP (op1, 0), op0);
4792 /* Let's also eliminate constants from op0 if possible. */
4793 op0 = eliminate_constant_term (op0, &constant_term);
4795 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
4796 their sum should be a constant. Form it into OP1, since the
4797 result we want will then be OP0 + OP1. */
4799 temp = simplify_binary_operation (PLUS, mode, constant_term,
4804 op1 = gen_rtx (PLUS, mode, constant_term, XEXP (op1, 1));
4807 /* Put a constant term last and put a multiplication first. */
4808 if (CONSTANT_P (op0) || GET_CODE (op1) == MULT)
4809 temp = op1, op1 = op0, op0 = temp;
4811 temp = simplify_binary_operation (PLUS, mode, op0, op1);
4812 return temp ? temp : gen_rtx (PLUS, mode, op0, op1);
4815 /* Handle difference of two symbolic constants,
4816 for the sake of an initializer. */
4817 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
4818 && really_constant_p (TREE_OPERAND (exp, 0))
4819 && really_constant_p (TREE_OPERAND (exp, 1)))
4821 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX,
4822 VOIDmode, modifier);
4823 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
4824 VOIDmode, modifier);
4825 return gen_rtx (MINUS, mode, op0, op1);
4827 /* Convert A - const to A + (-const). */
4828 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
4830 exp = build (PLUS_EXPR, type, TREE_OPERAND (exp, 0),
4831 fold (build1 (NEGATE_EXPR, type,
4832 TREE_OPERAND (exp, 1))));
4835 this_optab = sub_optab;
4839 preexpand_calls (exp);
4840 /* If first operand is constant, swap them.
4841 Thus the following special case checks need only
4842 check the second operand. */
4843 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
4845 register tree t1 = TREE_OPERAND (exp, 0);
4846 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
4847 TREE_OPERAND (exp, 1) = t1;
4850 /* Attempt to return something suitable for generating an
4851 indexed address, for machines that support that. */
4853 if (modifier == EXPAND_SUM && mode == Pmode
4854 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
4855 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
4857 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, EXPAND_SUM);
4859 /* Apply distributive law if OP0 is x+c. */
4860 if (GET_CODE (op0) == PLUS
4861 && GET_CODE (XEXP (op0, 1)) == CONST_INT)
4862 return gen_rtx (PLUS, mode,
4863 gen_rtx (MULT, mode, XEXP (op0, 0),
4864 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))),
4865 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))
4866 * INTVAL (XEXP (op0, 1))));
4868 if (GET_CODE (op0) != REG)
4869 op0 = force_operand (op0, NULL_RTX);
4870 if (GET_CODE (op0) != REG)
4871 op0 = copy_to_mode_reg (mode, op0);
4873 return gen_rtx (MULT, mode, op0,
4874 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))));
4877 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
4880 /* Check for multiplying things that have been extended
4881 from a narrower type. If this machine supports multiplying
4882 in that narrower type with a result in the desired type,
4883 do it that way, and avoid the explicit type-conversion. */
4884 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
4885 && TREE_CODE (type) == INTEGER_TYPE
4886 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
4887 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
4888 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
4889 && int_fits_type_p (TREE_OPERAND (exp, 1),
4890 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
4891 /* Don't use a widening multiply if a shift will do. */
4892 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
4893 > HOST_BITS_PER_WIDE_INT)
4894 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
4896 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
4897 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
4899 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
4900 /* If both operands are extended, they must either both
4901 be zero-extended or both be sign-extended. */
4902 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
4904 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
4906 enum machine_mode innermode
4907 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
4908 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
4909 ? umul_widen_optab : smul_widen_optab);
4910 if (mode == GET_MODE_WIDER_MODE (innermode)
4911 && this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
4913 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
4914 NULL_RTX, VOIDmode, 0);
4915 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
4916 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
4919 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
4920 NULL_RTX, VOIDmode, 0);
4924 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
4925 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
4926 return expand_mult (mode, op0, op1, target, unsignedp);
4928 case TRUNC_DIV_EXPR:
4929 case FLOOR_DIV_EXPR:
4931 case ROUND_DIV_EXPR:
4932 case EXACT_DIV_EXPR:
4933 preexpand_calls (exp);
4934 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
4936 /* Possible optimization: compute the dividend with EXPAND_SUM
4937 then if the divisor is constant can optimize the case
4938 where some terms of the dividend have coeffs divisible by it. */
4939 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
4940 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
4941 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
4944 this_optab = flodiv_optab;
4947 case TRUNC_MOD_EXPR:
4948 case FLOOR_MOD_EXPR:
4950 case ROUND_MOD_EXPR:
4951 preexpand_calls (exp);
4952 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
4954 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
4955 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
4956 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
4958 case FIX_ROUND_EXPR:
4959 case FIX_FLOOR_EXPR:
4961 abort (); /* Not used for C. */
4963 case FIX_TRUNC_EXPR:
4964 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
4966 target = gen_reg_rtx (mode);
4967 expand_fix (target, op0, unsignedp);
4971 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
4973 target = gen_reg_rtx (mode);
4974 /* expand_float can't figure out what to do if FROM has VOIDmode.
4975 So give it the correct mode. With -O, cse will optimize this. */
4976 if (GET_MODE (op0) == VOIDmode)
4977 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
4979 expand_float (target, op0,
4980 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
4984 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
4985 temp = expand_unop (mode, neg_optab, op0, target, 0);
4991 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
4993 /* Handle complex values specially. */
4995 enum machine_mode opmode
4996 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
4998 if (GET_MODE_CLASS (opmode) == MODE_COMPLEX_INT
4999 || GET_MODE_CLASS (opmode) == MODE_COMPLEX_FLOAT)
5000 return expand_complex_abs (opmode, op0, target, unsignedp);
5003 /* Unsigned abs is simply the operand. Testing here means we don't
5004 risk generating incorrect code below. */
5005 if (TREE_UNSIGNED (type))
5008 /* First try to do it with a special abs instruction. */
5009 temp = expand_unop (mode, abs_optab, op0, target, 0);
5013 /* If this machine has expensive jumps, we can do integer absolute
5014 value of X as (((signed) x >> (W-1)) ^ x) - ((signed) x >> (W-1)),
5015 where W is the width of MODE. */
5017 if (GET_MODE_CLASS (mode) == MODE_INT && BRANCH_COST >= 2)
5019 rtx extended = expand_shift (RSHIFT_EXPR, mode, op0,
5020 size_int (GET_MODE_BITSIZE (mode) - 1),
5023 temp = expand_binop (mode, xor_optab, extended, op0, target, 0,
5026 temp = expand_binop (mode, sub_optab, temp, extended, target, 0,
5033 /* If that does not win, use conditional jump and negate. */
5034 target = original_target;
5035 temp = gen_label_rtx ();
5036 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 0))
5037 || (GET_CODE (target) == REG
5038 && REGNO (target) < FIRST_PSEUDO_REGISTER))
5039 target = gen_reg_rtx (mode);
5040 emit_move_insn (target, op0);
5041 emit_cmp_insn (target,
5042 expand_expr (convert (type, integer_zero_node),
5043 NULL_RTX, VOIDmode, 0),
5044 GE, NULL_RTX, mode, 0, 0);
5046 emit_jump_insn (gen_bge (temp));
5047 op0 = expand_unop (mode, neg_optab, target, target, 0);
5049 emit_move_insn (target, op0);
5056 target = original_target;
5057 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 1))
5058 || (GET_CODE (target) == REG
5059 && REGNO (target) < FIRST_PSEUDO_REGISTER))
5060 target = gen_reg_rtx (mode);
5061 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
5062 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
5064 /* First try to do it with a special MIN or MAX instruction.
5065 If that does not win, use a conditional jump to select the proper
5067 this_optab = (TREE_UNSIGNED (type)
5068 ? (code == MIN_EXPR ? umin_optab : umax_optab)
5069 : (code == MIN_EXPR ? smin_optab : smax_optab));
5071 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
5077 emit_move_insn (target, op0);
5078 op0 = gen_label_rtx ();
5079 if (code == MAX_EXPR)
5080 temp = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)))
5081 ? compare_from_rtx (target, op1, GEU, 1, mode, NULL_RTX, 0)
5082 : compare_from_rtx (target, op1, GE, 0, mode, NULL_RTX, 0));
5084 temp = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)))
5085 ? compare_from_rtx (target, op1, LEU, 1, mode, NULL_RTX, 0)
5086 : compare_from_rtx (target, op1, LE, 0, mode, NULL_RTX, 0));
5087 if (temp == const0_rtx)
5088 emit_move_insn (target, op1);
5089 else if (temp != const_true_rtx)
5091 if (bcc_gen_fctn[(int) GET_CODE (temp)] != 0)
5092 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (temp)]) (op0));
5095 emit_move_insn (target, op1);
5100 /* ??? Can optimize when the operand of this is a bitwise operation,
5101 by using a different bitwise operation. */
5103 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
5104 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
5110 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
5111 temp = expand_unop (mode, ffs_optab, op0, target, 1);
5116 /* ??? Can optimize bitwise operations with one arg constant.
5117 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
5118 and (a bitwise1 b) bitwise2 b (etc)
5119 but that is probably not worth while. */
5121 /* BIT_AND_EXPR is for bitwise anding.
5122 TRUTH_AND_EXPR is for anding two boolean values
5123 when we want in all cases to compute both of them.
5124 In general it is fastest to do TRUTH_AND_EXPR by
5125 computing both operands as actual zero-or-1 values
5126 and then bitwise anding. In cases where there cannot
5127 be any side effects, better code would be made by
5128 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR;
5129 but the question is how to recognize those cases. */
5131 case TRUTH_AND_EXPR:
5133 this_optab = and_optab;
5136 /* See comment above about TRUTH_AND_EXPR; it applies here too. */
5139 this_optab = ior_optab;
5142 case TRUTH_XOR_EXPR:
5144 this_optab = xor_optab;
5151 preexpand_calls (exp);
5152 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
5154 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
5155 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
5158 /* Could determine the answer when only additive constants differ.
5159 Also, the addition of one can be handled by changing the condition. */
5166 preexpand_calls (exp);
5167 temp = do_store_flag (exp, target, tmode != VOIDmode ? tmode : mode, 0);
5170 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
5171 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
5173 && GET_CODE (original_target) == REG
5174 && (GET_MODE (original_target)
5175 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
5177 temp = expand_expr (TREE_OPERAND (exp, 0), original_target, VOIDmode, 0);
5178 if (temp != original_target)
5179 temp = copy_to_reg (temp);
5180 op1 = gen_label_rtx ();
5181 emit_cmp_insn (temp, const0_rtx, EQ, NULL_RTX,
5182 GET_MODE (temp), unsignedp, 0);
5183 emit_jump_insn (gen_beq (op1));
5184 emit_move_insn (temp, const1_rtx);
5188 /* If no set-flag instruction, must generate a conditional
5189 store into a temporary variable. Drop through
5190 and handle this like && and ||. */
5192 case TRUTH_ANDIF_EXPR:
5193 case TRUTH_ORIF_EXPR:
5194 if (target == 0 || ! safe_from_p (target, exp)
5195 /* Make sure we don't have a hard reg (such as function's return
5196 value) live across basic blocks, if not optimizing. */
5197 || (!optimize && GET_CODE (target) == REG
5198 && REGNO (target) < FIRST_PSEUDO_REGISTER))
5199 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
5200 emit_clr_insn (target);
5201 op1 = gen_label_rtx ();
5202 jumpifnot (exp, op1);
5203 emit_0_to_1_insn (target);
5207 case TRUTH_NOT_EXPR:
5208 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
5209 /* The parser is careful to generate TRUTH_NOT_EXPR
5210 only with operands that are always zero or one. */
5211 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
5212 target, 1, OPTAB_LIB_WIDEN);
5218 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
5220 return expand_expr (TREE_OPERAND (exp, 1),
5221 (ignore ? const0_rtx : target),
5226 /* Note that COND_EXPRs whose type is a structure or union
5227 are required to be constructed to contain assignments of
5228 a temporary variable, so that we can evaluate them here
5229 for side effect only. If type is void, we must do likewise. */
5231 /* If an arm of the branch requires a cleanup,
5232 only that cleanup is performed. */
5235 tree binary_op = 0, unary_op = 0;
5236 tree old_cleanups = cleanups_this_call;
5237 cleanups_this_call = 0;
5239 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
5240 convert it to our mode, if necessary. */
5241 if (integer_onep (TREE_OPERAND (exp, 1))
5242 && integer_zerop (TREE_OPERAND (exp, 2))
5243 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
5245 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, modifier);
5246 if (GET_MODE (op0) == mode)
5249 target = gen_reg_rtx (mode);
5250 convert_move (target, op0, unsignedp);
5254 /* If we are not to produce a result, we have no target. Otherwise,
5255 if a target was specified use it; it will not be used as an
5256 intermediate target unless it is safe. If no target, use a
5259 if (mode == VOIDmode || ignore)
5261 else if (original_target
5262 && safe_from_p (original_target, TREE_OPERAND (exp, 0)))
5263 temp = original_target;
5264 else if (mode == BLKmode)
5266 if (TYPE_SIZE (type) == 0
5267 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
5269 temp = assign_stack_temp (BLKmode,
5270 (TREE_INT_CST_LOW (TYPE_SIZE (type))
5271 + BITS_PER_UNIT - 1)
5272 / BITS_PER_UNIT, 0);
5275 temp = gen_reg_rtx (mode);
5277 /* Check for X ? A + B : A. If we have this, we can copy
5278 A to the output and conditionally add B. Similarly for unary
5279 operations. Don't do this if X has side-effects because
5280 those side effects might affect A or B and the "?" operation is
5281 a sequence point in ANSI. (We test for side effects later.) */
5283 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
5284 && operand_equal_p (TREE_OPERAND (exp, 2),
5285 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
5286 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
5287 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
5288 && operand_equal_p (TREE_OPERAND (exp, 1),
5289 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
5290 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
5291 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
5292 && operand_equal_p (TREE_OPERAND (exp, 2),
5293 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
5294 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
5295 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
5296 && operand_equal_p (TREE_OPERAND (exp, 1),
5297 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
5298 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
5300 /* If we had X ? A + 1 : A and we can do the test of X as a store-flag
5301 operation, do this as A + (X != 0). Similarly for other simple
5302 binary operators. */
5303 if (singleton && binary_op
5304 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
5305 && (TREE_CODE (binary_op) == PLUS_EXPR
5306 || TREE_CODE (binary_op) == MINUS_EXPR
5307 || TREE_CODE (binary_op) == BIT_IOR_EXPR
5308 || TREE_CODE (binary_op) == BIT_XOR_EXPR
5309 || TREE_CODE (binary_op) == BIT_AND_EXPR)
5310 && integer_onep (TREE_OPERAND (binary_op, 1))
5311 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
5314 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR ? add_optab
5315 : TREE_CODE (binary_op) == MINUS_EXPR ? sub_optab
5316 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
5317 : TREE_CODE (binary_op) == BIT_XOR_EXPR ? xor_optab
5320 /* If we had X ? A : A + 1, do this as A + (X == 0).
5322 We have to invert the truth value here and then put it
5323 back later if do_store_flag fails. We cannot simply copy
5324 TREE_OPERAND (exp, 0) to another variable and modify that
5325 because invert_truthvalue can modify the tree pointed to
5327 if (singleton == TREE_OPERAND (exp, 1))
5328 TREE_OPERAND (exp, 0)
5329 = invert_truthvalue (TREE_OPERAND (exp, 0));
5331 result = do_store_flag (TREE_OPERAND (exp, 0),
5332 (safe_from_p (temp, singleton)
5334 mode, BRANCH_COST <= 1);
5338 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
5339 return expand_binop (mode, boptab, op1, result, temp,
5340 unsignedp, OPTAB_LIB_WIDEN);
5342 else if (singleton == TREE_OPERAND (exp, 1))
5343 TREE_OPERAND (exp, 0)
5344 = invert_truthvalue (TREE_OPERAND (exp, 0));
5348 op0 = gen_label_rtx ();
5350 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
5354 /* If the target conflicts with the other operand of the
5355 binary op, we can't use it. Also, we can't use the target
5356 if it is a hard register, because evaluating the condition
5357 might clobber it. */
5359 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1)))
5360 || (GET_CODE (temp) == REG
5361 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
5362 temp = gen_reg_rtx (mode);
5363 store_expr (singleton, temp, 0);
5366 expand_expr (singleton,
5367 ignore ? const1_rtx : NULL_RTX, VOIDmode, 0);
5368 if (cleanups_this_call)
5370 sorry ("aggregate value in COND_EXPR");
5371 cleanups_this_call = 0;
5373 if (singleton == TREE_OPERAND (exp, 1))
5374 jumpif (TREE_OPERAND (exp, 0), op0);
5376 jumpifnot (TREE_OPERAND (exp, 0), op0);
5378 if (binary_op && temp == 0)
5379 /* Just touch the other operand. */
5380 expand_expr (TREE_OPERAND (binary_op, 1),
5381 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
5383 store_expr (build (TREE_CODE (binary_op), type,
5384 make_tree (type, temp),
5385 TREE_OPERAND (binary_op, 1)),
5388 store_expr (build1 (TREE_CODE (unary_op), type,
5389 make_tree (type, temp)),
5394 /* This is now done in jump.c and is better done there because it
5395 produces shorter register lifetimes. */
5397 /* Check for both possibilities either constants or variables
5398 in registers (but not the same as the target!). If so, can
5399 save branches by assigning one, branching, and assigning the
5401 else if (temp && GET_MODE (temp) != BLKmode
5402 && (TREE_CONSTANT (TREE_OPERAND (exp, 1))
5403 || ((TREE_CODE (TREE_OPERAND (exp, 1)) == PARM_DECL
5404 || TREE_CODE (TREE_OPERAND (exp, 1)) == VAR_DECL)
5405 && DECL_RTL (TREE_OPERAND (exp, 1))
5406 && GET_CODE (DECL_RTL (TREE_OPERAND (exp, 1))) == REG
5407 && DECL_RTL (TREE_OPERAND (exp, 1)) != temp))
5408 && (TREE_CONSTANT (TREE_OPERAND (exp, 2))
5409 || ((TREE_CODE (TREE_OPERAND (exp, 2)) == PARM_DECL
5410 || TREE_CODE (TREE_OPERAND (exp, 2)) == VAR_DECL)
5411 && DECL_RTL (TREE_OPERAND (exp, 2))
5412 && GET_CODE (DECL_RTL (TREE_OPERAND (exp, 2))) == REG
5413 && DECL_RTL (TREE_OPERAND (exp, 2)) != temp)))
5415 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
5416 temp = gen_reg_rtx (mode);
5417 store_expr (TREE_OPERAND (exp, 2), temp, 0);
5418 jumpifnot (TREE_OPERAND (exp, 0), op0);
5419 store_expr (TREE_OPERAND (exp, 1), temp, 0);
5423 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
5424 comparison operator. If we have one of these cases, set the
5425 output to A, branch on A (cse will merge these two references),
5426 then set the output to FOO. */
5428 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
5429 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
5430 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
5431 TREE_OPERAND (exp, 1), 0)
5432 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
5433 && safe_from_p (temp, TREE_OPERAND (exp, 2)))
5435 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
5436 temp = gen_reg_rtx (mode);
5437 store_expr (TREE_OPERAND (exp, 1), temp, 0);
5438 jumpif (TREE_OPERAND (exp, 0), op0);
5439 store_expr (TREE_OPERAND (exp, 2), temp, 0);
5443 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
5444 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
5445 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
5446 TREE_OPERAND (exp, 2), 0)
5447 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
5448 && safe_from_p (temp, TREE_OPERAND (exp, 1)))
5450 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
5451 temp = gen_reg_rtx (mode);
5452 store_expr (TREE_OPERAND (exp, 2), temp, 0);
5453 jumpifnot (TREE_OPERAND (exp, 0), op0);
5454 store_expr (TREE_OPERAND (exp, 1), temp, 0);
5459 op1 = gen_label_rtx ();
5460 jumpifnot (TREE_OPERAND (exp, 0), op0);
5462 store_expr (TREE_OPERAND (exp, 1), temp, 0);
5464 expand_expr (TREE_OPERAND (exp, 1),
5465 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
5466 if (cleanups_this_call)
5468 sorry ("aggregate value in COND_EXPR");
5469 cleanups_this_call = 0;
5473 emit_jump_insn (gen_jump (op1));
5477 store_expr (TREE_OPERAND (exp, 2), temp, 0);
5479 expand_expr (TREE_OPERAND (exp, 2),
5480 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
5483 if (cleanups_this_call)
5485 sorry ("aggregate value in COND_EXPR");
5486 cleanups_this_call = 0;
5492 cleanups_this_call = old_cleanups;
5498 /* Something needs to be initialized, but we didn't know
5499 where that thing was when building the tree. For example,
5500 it could be the return value of a function, or a parameter
5501 to a function which lays down in the stack, or a temporary
5502 variable which must be passed by reference.
5504 We guarantee that the expression will either be constructed
5505 or copied into our original target. */
5507 tree slot = TREE_OPERAND (exp, 0);
5510 if (TREE_CODE (slot) != VAR_DECL)
5515 if (DECL_RTL (slot) != 0)
5517 target = DECL_RTL (slot);
5518 /* If we have already expanded the slot, so don't do
5520 if (TREE_OPERAND (exp, 1) == NULL_TREE)
5525 target = assign_stack_temp (mode, int_size_in_bytes (type), 0);
5526 /* All temp slots at this level must not conflict. */
5527 preserve_temp_slots (target);
5528 DECL_RTL (slot) = target;
5532 /* I bet this needs to be done, and I bet that it needs to
5533 be above, inside the else clause. The reason is
5534 simple, how else is it going to get cleaned up? (mrs)
5536 The reason is probably did not work before, and was
5537 commented out is because this was re-expanding already
5538 expanded target_exprs (target == 0 and DECL_RTL (slot)
5539 != 0) also cleaning them up many times as well. :-( */
5541 /* Since SLOT is not known to the called function
5542 to belong to its stack frame, we must build an explicit
5543 cleanup. This case occurs when we must build up a reference
5544 to pass the reference as an argument. In this case,
5545 it is very likely that such a reference need not be
5548 if (TREE_OPERAND (exp, 2) == 0)
5549 TREE_OPERAND (exp, 2) = maybe_build_cleanup (slot);
5550 if (TREE_OPERAND (exp, 2))
5551 cleanups_this_call = tree_cons (NULL_TREE, TREE_OPERAND (exp, 2),
5552 cleanups_this_call);
5557 /* This case does occur, when expanding a parameter which
5558 needs to be constructed on the stack. The target
5559 is the actual stack address that we want to initialize.
5560 The function we call will perform the cleanup in this case. */
5562 DECL_RTL (slot) = target;
5565 exp1 = TREE_OPERAND (exp, 1);
5566 /* Mark it as expanded. */
5567 TREE_OPERAND (exp, 1) = NULL_TREE;
5569 return expand_expr (exp1, target, tmode, modifier);
5574 tree lhs = TREE_OPERAND (exp, 0);
5575 tree rhs = TREE_OPERAND (exp, 1);
5576 tree noncopied_parts = 0;
5577 tree lhs_type = TREE_TYPE (lhs);
5579 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
5580 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0 && !fixed_type_p (rhs))
5581 noncopied_parts = init_noncopied_parts (stabilize_reference (lhs),
5582 TYPE_NONCOPIED_PARTS (lhs_type));
5583 while (noncopied_parts != 0)
5585 expand_assignment (TREE_VALUE (noncopied_parts),
5586 TREE_PURPOSE (noncopied_parts), 0, 0);
5587 noncopied_parts = TREE_CHAIN (noncopied_parts);
5594 /* If lhs is complex, expand calls in rhs before computing it.
5595 That's so we don't compute a pointer and save it over a call.
5596 If lhs is simple, compute it first so we can give it as a
5597 target if the rhs is just a call. This avoids an extra temp and copy
5598 and that prevents a partial-subsumption which makes bad code.
5599 Actually we could treat component_ref's of vars like vars. */
5601 tree lhs = TREE_OPERAND (exp, 0);
5602 tree rhs = TREE_OPERAND (exp, 1);
5603 tree noncopied_parts = 0;
5604 tree lhs_type = TREE_TYPE (lhs);
5608 if (TREE_CODE (lhs) != VAR_DECL
5609 && TREE_CODE (lhs) != RESULT_DECL
5610 && TREE_CODE (lhs) != PARM_DECL)
5611 preexpand_calls (exp);
5613 /* Check for |= or &= of a bitfield of size one into another bitfield
5614 of size 1. In this case, (unless we need the result of the
5615 assignment) we can do this more efficiently with a
5616 test followed by an assignment, if necessary.
5618 ??? At this point, we can't get a BIT_FIELD_REF here. But if
5619 things change so we do, this code should be enhanced to
5622 && TREE_CODE (lhs) == COMPONENT_REF
5623 && (TREE_CODE (rhs) == BIT_IOR_EXPR
5624 || TREE_CODE (rhs) == BIT_AND_EXPR)
5625 && TREE_OPERAND (rhs, 0) == lhs
5626 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
5627 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (lhs, 1))) == 1
5628 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))) == 1)
5630 rtx label = gen_label_rtx ();
5632 do_jump (TREE_OPERAND (rhs, 1),
5633 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
5634 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
5635 expand_assignment (lhs, convert (TREE_TYPE (rhs),
5636 (TREE_CODE (rhs) == BIT_IOR_EXPR
5638 : integer_zero_node)),
5640 do_pending_stack_adjust ();
5645 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0
5646 && ! (fixed_type_p (lhs) && fixed_type_p (rhs)))
5647 noncopied_parts = save_noncopied_parts (stabilize_reference (lhs),
5648 TYPE_NONCOPIED_PARTS (lhs_type));
5650 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
5651 while (noncopied_parts != 0)
5653 expand_assignment (TREE_PURPOSE (noncopied_parts),
5654 TREE_VALUE (noncopied_parts), 0, 0);
5655 noncopied_parts = TREE_CHAIN (noncopied_parts);
5660 case PREINCREMENT_EXPR:
5661 case PREDECREMENT_EXPR:
5662 return expand_increment (exp, 0);
5664 case POSTINCREMENT_EXPR:
5665 case POSTDECREMENT_EXPR:
5666 /* Faster to treat as pre-increment if result is not used. */
5667 return expand_increment (exp, ! ignore);
5670 /* Are we taking the address of a nested function? */
5671 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
5672 && decl_function_context (TREE_OPERAND (exp, 0)) != 0)
5674 op0 = trampoline_address (TREE_OPERAND (exp, 0));
5675 op0 = force_operand (op0, target);
5679 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode,
5680 (modifier == EXPAND_INITIALIZER
5681 ? modifier : EXPAND_CONST_ADDRESS));
5682 if (GET_CODE (op0) != MEM)
5685 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
5686 return XEXP (op0, 0);
5687 op0 = force_operand (XEXP (op0, 0), target);
5689 if (flag_force_addr && GET_CODE (op0) != REG)
5690 return force_reg (Pmode, op0);
5693 case ENTRY_VALUE_EXPR:
5696 /* COMPLEX type for Extended Pascal & Fortran */
5699 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
5703 /* Get the rtx code of the operands. */
5704 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
5705 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
5708 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
5710 prev = get_last_insn ();
5712 /* Tell flow that the whole of the destination is being set. */
5713 if (GET_CODE (target) == REG)
5714 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
5716 /* Move the real (op0) and imaginary (op1) parts to their location. */
5717 emit_move_insn (gen_realpart (mode, target), op0);
5718 emit_move_insn (gen_imagpart (mode, target), op1);
5720 /* Complex construction should appear as a single unit. */
5727 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
5728 return gen_realpart (mode, op0);
5731 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
5732 return gen_imagpart (mode, op0);
5736 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
5740 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
5743 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
5745 prev = get_last_insn ();
5747 /* Tell flow that the whole of the destination is being set. */
5748 if (GET_CODE (target) == REG)
5749 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
5751 /* Store the realpart and the negated imagpart to target. */
5752 emit_move_insn (gen_realpart (mode, target), gen_realpart (mode, op0));
5754 imag_t = gen_imagpart (mode, target);
5755 temp = expand_unop (mode, neg_optab,
5756 gen_imagpart (mode, op0), imag_t, 0);
5758 emit_move_insn (imag_t, temp);
5760 /* Conjugate should appear as a single unit */
5770 return (*lang_expand_expr) (exp, target, tmode, modifier);
5773 /* Here to do an ordinary binary operator, generating an instruction
5774 from the optab already placed in `this_optab'. */
5776 preexpand_calls (exp);
5777 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
5779 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
5780 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
5782 temp = expand_binop (mode, this_optab, op0, op1, target,
5783 unsignedp, OPTAB_LIB_WIDEN);
5789 /* Return the alignment in bits of EXP, a pointer valued expression.
5790 But don't return more than MAX_ALIGN no matter what.
5791 The alignment returned is, by default, the alignment of the thing that
5792 EXP points to (if it is not a POINTER_TYPE, 0 is returned).
5794 Otherwise, look at the expression to see if we can do better, i.e., if the
5795 expression is actually pointing at an object whose alignment is tighter. */
5798 get_pointer_alignment (exp, max_align)
5802 unsigned align, inner;
5804 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
5807 align = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
5808 align = MIN (align, max_align);
5812 switch (TREE_CODE (exp))
5816 case NON_LVALUE_EXPR:
5817 exp = TREE_OPERAND (exp, 0);
5818 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
5820 inner = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
5821 inner = MIN (inner, max_align);
5822 align = MAX (align, inner);
5826 /* If sum of pointer + int, restrict our maximum alignment to that
5827 imposed by the integer. If not, we can't do any better than
5829 if (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST)
5832 while (((TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)) * BITS_PER_UNIT)
5837 exp = TREE_OPERAND (exp, 0);
5841 /* See what we are pointing at and look at its alignment. */
5842 exp = TREE_OPERAND (exp, 0);
5843 if (TREE_CODE (exp) == FUNCTION_DECL)
5844 align = MAX (align, FUNCTION_BOUNDARY);
5845 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'd')
5846 align = MAX (align, DECL_ALIGN (exp));
5847 #ifdef CONSTANT_ALIGNMENT
5848 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'c')
5849 align = CONSTANT_ALIGNMENT (exp, align);
5851 return MIN (align, max_align);
5859 /* Return the tree node and offset if a given argument corresponds to
5860 a string constant. */
5863 string_constant (arg, ptr_offset)
5869 if (TREE_CODE (arg) == ADDR_EXPR
5870 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
5872 *ptr_offset = integer_zero_node;
5873 return TREE_OPERAND (arg, 0);
5875 else if (TREE_CODE (arg) == PLUS_EXPR)
5877 tree arg0 = TREE_OPERAND (arg, 0);
5878 tree arg1 = TREE_OPERAND (arg, 1);
5883 if (TREE_CODE (arg0) == ADDR_EXPR
5884 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
5887 return TREE_OPERAND (arg0, 0);
5889 else if (TREE_CODE (arg1) == ADDR_EXPR
5890 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
5893 return TREE_OPERAND (arg1, 0);
5900 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
5901 way, because it could contain a zero byte in the middle.
5902 TREE_STRING_LENGTH is the size of the character array, not the string.
5904 Unfortunately, string_constant can't access the values of const char
5905 arrays with initializers, so neither can we do so here. */
5915 src = string_constant (src, &offset_node);
5918 max = TREE_STRING_LENGTH (src);
5919 ptr = TREE_STRING_POINTER (src);
5920 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
5922 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
5923 compute the offset to the following null if we don't know where to
5924 start searching for it. */
5926 for (i = 0; i < max; i++)
5929 /* We don't know the starting offset, but we do know that the string
5930 has no internal zero bytes. We can assume that the offset falls
5931 within the bounds of the string; otherwise, the programmer deserves
5932 what he gets. Subtract the offset from the length of the string,
5934 /* This would perhaps not be valid if we were dealing with named
5935 arrays in addition to literal string constants. */
5936 return size_binop (MINUS_EXPR, size_int (max), offset_node);
5939 /* We have a known offset into the string. Start searching there for
5940 a null character. */
5941 if (offset_node == 0)
5945 /* Did we get a long long offset? If so, punt. */
5946 if (TREE_INT_CST_HIGH (offset_node) != 0)
5948 offset = TREE_INT_CST_LOW (offset_node);
5950 /* If the offset is known to be out of bounds, warn, and call strlen at
5952 if (offset < 0 || offset > max)
5954 warning ("offset outside bounds of constant string");
5957 /* Use strlen to search for the first zero byte. Since any strings
5958 constructed with build_string will have nulls appended, we win even
5959 if we get handed something like (char[4])"abcd".
5961 Since OFFSET is our starting index into the string, no further
5962 calculation is needed. */
5963 return size_int (strlen (ptr + offset));
5966 /* Expand an expression EXP that calls a built-in function,
5967 with result going to TARGET if that's convenient
5968 (and in mode MODE if that's convenient).
5969 SUBTARGET may be used as the target for computing one of EXP's operands.
5970 IGNORE is nonzero if the value is to be ignored. */
5973 expand_builtin (exp, target, subtarget, mode, ignore)
5977 enum machine_mode mode;
5980 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
5981 tree arglist = TREE_OPERAND (exp, 1);
5984 enum machine_mode value_mode = TYPE_MODE (TREE_TYPE (exp));
5985 optab builtin_optab;
5987 switch (DECL_FUNCTION_CODE (fndecl))
5992 /* build_function_call changes these into ABS_EXPR. */
5997 case BUILT_IN_FSQRT:
5998 /* If not optimizing, call the library function. */
6003 /* Arg could be wrong type if user redeclared this fcn wrong. */
6004 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != REAL_TYPE)
6005 return CONST0_RTX (TYPE_MODE (TREE_TYPE (exp)));
6007 /* Stabilize and compute the argument. */
6008 if (TREE_CODE (TREE_VALUE (arglist)) != VAR_DECL
6009 && TREE_CODE (TREE_VALUE (arglist)) != PARM_DECL)
6011 exp = copy_node (exp);
6012 arglist = copy_node (arglist);
6013 TREE_OPERAND (exp, 1) = arglist;
6014 TREE_VALUE (arglist) = save_expr (TREE_VALUE (arglist));
6016 op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
6018 /* Make a suitable register to place result in. */
6019 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
6024 switch (DECL_FUNCTION_CODE (fndecl))
6027 builtin_optab = sin_optab; break;
6029 builtin_optab = cos_optab; break;
6030 case BUILT_IN_FSQRT:
6031 builtin_optab = sqrt_optab; break;
6036 /* Compute into TARGET.
6037 Set TARGET to wherever the result comes back. */
6038 target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
6039 builtin_optab, op0, target, 0);
6041 /* If we were unable to expand via the builtin, stop the
6042 sequence (without outputting the insns) and break, causing
6043 a call the the library function. */
6050 /* Check the results by default. But if flag_fast_math is turned on,
6051 then assume sqrt will always be called with valid arguments. */
6053 if (! flag_fast_math)
6055 /* Don't define the builtin FP instructions
6056 if your machine is not IEEE. */
6057 if (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT)
6060 lab1 = gen_label_rtx ();
6062 /* Test the result; if it is NaN, set errno=EDOM because
6063 the argument was not in the domain. */
6064 emit_cmp_insn (target, target, EQ, 0, GET_MODE (target), 0, 0);
6065 emit_jump_insn (gen_beq (lab1));
6069 #ifdef GEN_ERRNO_RTX
6070 rtx errno_rtx = GEN_ERRNO_RTX;
6073 = gen_rtx (MEM, word_mode, gen_rtx (SYMBOL_REF, Pmode, "*errno"));
6076 emit_move_insn (errno_rtx, GEN_INT (TARGET_EDOM));
6079 /* We can't set errno=EDOM directly; let the library call do it.
6080 Pop the arguments right away in case the call gets deleted. */
6082 expand_call (exp, target, 0);
6089 /* Output the entire sequence. */
6090 insns = get_insns ();
6096 case BUILT_IN_SAVEREGS:
6097 /* Don't do __builtin_saveregs more than once in a function.
6098 Save the result of the first call and reuse it. */
6099 if (saveregs_value != 0)
6100 return saveregs_value;
6102 /* When this function is called, it means that registers must be
6103 saved on entry to this function. So we migrate the
6104 call to the first insn of this function. */
6107 rtx valreg, saved_valreg;
6109 /* Now really call the function. `expand_call' does not call
6110 expand_builtin, so there is no danger of infinite recursion here. */
6113 #ifdef EXPAND_BUILTIN_SAVEREGS
6114 /* Do whatever the machine needs done in this case. */
6115 temp = EXPAND_BUILTIN_SAVEREGS (arglist);
6117 /* The register where the function returns its value
6118 is likely to have something else in it, such as an argument.
6119 So preserve that register around the call. */
6120 if (value_mode != VOIDmode)
6122 valreg = hard_libcall_value (value_mode);
6123 saved_valreg = gen_reg_rtx (value_mode);
6124 emit_move_insn (saved_valreg, valreg);
6127 /* Generate the call, putting the value in a pseudo. */
6128 temp = expand_call (exp, target, ignore);
6130 if (value_mode != VOIDmode)
6131 emit_move_insn (valreg, saved_valreg);
6137 saveregs_value = temp;
6139 /* This won't work inside a SEQUENCE--it really has to be
6140 at the start of the function. */
6141 if (in_sequence_p ())
6143 /* Better to do this than to crash. */
6144 error ("`va_start' used within `({...})'");
6148 /* Put the sequence after the NOTE that starts the function. */
6149 emit_insns_before (seq, NEXT_INSN (get_insns ()));
6153 /* __builtin_args_info (N) returns word N of the arg space info
6154 for the current function. The number and meanings of words
6155 is controlled by the definition of CUMULATIVE_ARGS. */
6156 case BUILT_IN_ARGS_INFO:
6158 int nwords = sizeof (CUMULATIVE_ARGS) / sizeof (int);
6160 int *word_ptr = (int *) ¤t_function_args_info;
6161 tree type, elts, result;
6163 if (sizeof (CUMULATIVE_ARGS) % sizeof (int) != 0)
6164 fatal ("CUMULATIVE_ARGS type defined badly; see %s, line %d",
6165 __FILE__, __LINE__);
6169 tree arg = TREE_VALUE (arglist);
6170 if (TREE_CODE (arg) != INTEGER_CST)
6171 error ("argument of `__builtin_args_info' must be constant");
6174 int wordnum = TREE_INT_CST_LOW (arg);
6176 if (wordnum < 0 || wordnum >= nwords || TREE_INT_CST_HIGH (arg))
6177 error ("argument of `__builtin_args_info' out of range");
6179 return GEN_INT (word_ptr[wordnum]);
6183 error ("missing argument in `__builtin_args_info'");
6188 for (i = 0; i < nwords; i++)
6189 elts = tree_cons (NULL_TREE, build_int_2 (word_ptr[i], 0));
6191 type = build_array_type (integer_type_node,
6192 build_index_type (build_int_2 (nwords, 0)));
6193 result = build (CONSTRUCTOR, type, NULL_TREE, nreverse (elts));
6194 TREE_CONSTANT (result) = 1;
6195 TREE_STATIC (result) = 1;
6196 result = build (INDIRECT_REF, build_pointer_type (type), result);
6197 TREE_CONSTANT (result) = 1;
6198 return expand_expr (result, NULL_RTX, VOIDmode, 0);
6202 /* Return the address of the first anonymous stack arg. */
6203 case BUILT_IN_NEXT_ARG:
6205 tree fntype = TREE_TYPE (current_function_decl);
6206 if (!(TYPE_ARG_TYPES (fntype) != 0
6207 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
6208 != void_type_node)))
6210 error ("`va_start' used in function with fixed args");
6215 return expand_binop (Pmode, add_optab,
6216 current_function_internal_arg_pointer,
6217 current_function_arg_offset_rtx,
6218 NULL_RTX, 0, OPTAB_LIB_WIDEN);
6220 case BUILT_IN_CLASSIFY_TYPE:
6223 tree type = TREE_TYPE (TREE_VALUE (arglist));
6224 enum tree_code code = TREE_CODE (type);
6225 if (code == VOID_TYPE)
6226 return GEN_INT (void_type_class);
6227 if (code == INTEGER_TYPE)
6228 return GEN_INT (integer_type_class);
6229 if (code == CHAR_TYPE)
6230 return GEN_INT (char_type_class);
6231 if (code == ENUMERAL_TYPE)
6232 return GEN_INT (enumeral_type_class);
6233 if (code == BOOLEAN_TYPE)
6234 return GEN_INT (boolean_type_class);
6235 if (code == POINTER_TYPE)
6236 return GEN_INT (pointer_type_class);
6237 if (code == REFERENCE_TYPE)
6238 return GEN_INT (reference_type_class);
6239 if (code == OFFSET_TYPE)
6240 return GEN_INT (offset_type_class);
6241 if (code == REAL_TYPE)
6242 return GEN_INT (real_type_class);
6243 if (code == COMPLEX_TYPE)
6244 return GEN_INT (complex_type_class);
6245 if (code == FUNCTION_TYPE)
6246 return GEN_INT (function_type_class);
6247 if (code == METHOD_TYPE)
6248 return GEN_INT (method_type_class);
6249 if (code == RECORD_TYPE)
6250 return GEN_INT (record_type_class);
6251 if (code == UNION_TYPE)
6252 return GEN_INT (union_type_class);
6253 if (code == ARRAY_TYPE)
6254 return GEN_INT (array_type_class);
6255 if (code == STRING_TYPE)
6256 return GEN_INT (string_type_class);
6257 if (code == SET_TYPE)
6258 return GEN_INT (set_type_class);
6259 if (code == FILE_TYPE)
6260 return GEN_INT (file_type_class);
6261 if (code == LANG_TYPE)
6262 return GEN_INT (lang_type_class);
6264 return GEN_INT (no_type_class);
6266 case BUILT_IN_CONSTANT_P:
6270 return (TREE_CODE_CLASS (TREE_CODE (TREE_VALUE (arglist))) == 'c'
6271 ? const1_rtx : const0_rtx);
6273 case BUILT_IN_FRAME_ADDRESS:
6274 /* The argument must be a nonnegative integer constant.
6275 It counts the number of frames to scan up the stack.
6276 The value is the address of that frame. */
6277 case BUILT_IN_RETURN_ADDRESS:
6278 /* The argument must be a nonnegative integer constant.
6279 It counts the number of frames to scan up the stack.
6280 The value is the return address saved in that frame. */
6282 /* Warning about missing arg was already issued. */
6284 else if (TREE_CODE (TREE_VALUE (arglist)) != INTEGER_CST)
6286 error ("invalid arg to `__builtin_return_address'");
6289 else if (tree_int_cst_lt (TREE_VALUE (arglist), integer_zero_node))
6291 error ("invalid arg to `__builtin_return_address'");
6296 int count = TREE_INT_CST_LOW (TREE_VALUE (arglist));
6297 rtx tem = frame_pointer_rtx;
6300 /* Scan back COUNT frames to the specified frame. */
6301 for (i = 0; i < count; i++)
6303 /* Assume the dynamic chain pointer is in the word that
6304 the frame address points to, unless otherwise specified. */
6305 #ifdef DYNAMIC_CHAIN_ADDRESS
6306 tem = DYNAMIC_CHAIN_ADDRESS (tem);
6308 tem = memory_address (Pmode, tem);
6309 tem = copy_to_reg (gen_rtx (MEM, Pmode, tem));
6312 /* For __builtin_frame_address, return what we've got. */
6313 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
6316 /* For __builtin_return_address,
6317 Get the return address from that frame. */
6318 #ifdef RETURN_ADDR_RTX
6319 return RETURN_ADDR_RTX (count, tem);
6321 tem = memory_address (Pmode,
6322 plus_constant (tem, GET_MODE_SIZE (Pmode)));
6323 return copy_to_reg (gen_rtx (MEM, Pmode, tem));
6327 case BUILT_IN_ALLOCA:
6329 /* Arg could be non-integer if user redeclared this fcn wrong. */
6330 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE)
6332 current_function_calls_alloca = 1;
6333 /* Compute the argument. */
6334 op0 = expand_expr (TREE_VALUE (arglist), NULL_RTX, VOIDmode, 0);
6336 /* Allocate the desired space. */
6337 target = allocate_dynamic_stack_space (op0, target, BITS_PER_UNIT);
6339 /* Record the new stack level for nonlocal gotos. */
6340 if (nonlocal_goto_handler_slot != 0)
6341 emit_stack_save (SAVE_NONLOCAL, &nonlocal_goto_stack_level, NULL_RTX);
6345 /* If not optimizing, call the library function. */
6350 /* Arg could be non-integer if user redeclared this fcn wrong. */
6351 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE)
6354 /* Compute the argument. */
6355 op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
6356 /* Compute ffs, into TARGET if possible.
6357 Set TARGET to wherever the result comes back. */
6358 target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
6359 ffs_optab, op0, target, 1);
6364 case BUILT_IN_STRLEN:
6365 /* If not optimizing, call the library function. */
6370 /* Arg could be non-pointer if user redeclared this fcn wrong. */
6371 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE)
6375 tree src = TREE_VALUE (arglist);
6376 tree len = c_strlen (src);
6379 = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
6381 rtx result, src_rtx, char_rtx;
6382 enum machine_mode insn_mode = value_mode, char_mode;
6383 enum insn_code icode;
6385 /* If the length is known, just return it. */
6387 return expand_expr (len, target, mode, 0);
6389 /* If SRC is not a pointer type, don't do this operation inline. */
6393 /* Call a function if we can't compute strlen in the right mode. */
6395 while (insn_mode != VOIDmode)
6397 icode = strlen_optab->handlers[(int) insn_mode].insn_code;
6398 if (icode != CODE_FOR_nothing)
6401 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
6403 if (insn_mode == VOIDmode)
6406 /* Make a place to write the result of the instruction. */
6409 && GET_CODE (result) == REG
6410 && GET_MODE (result) == insn_mode
6411 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
6412 result = gen_reg_rtx (insn_mode);
6414 /* Make sure the operands are acceptable to the predicates. */
6416 if (! (*insn_operand_predicate[(int)icode][0]) (result, insn_mode))
6417 result = gen_reg_rtx (insn_mode);
6419 src_rtx = memory_address (BLKmode,
6420 expand_expr (src, NULL_RTX, Pmode,
6422 if (! (*insn_operand_predicate[(int)icode][1]) (src_rtx, Pmode))
6423 src_rtx = copy_to_mode_reg (Pmode, src_rtx);
6425 char_rtx = const0_rtx;
6426 char_mode = insn_operand_mode[(int)icode][2];
6427 if (! (*insn_operand_predicate[(int)icode][2]) (char_rtx, char_mode))
6428 char_rtx = copy_to_mode_reg (char_mode, char_rtx);
6430 emit_insn (GEN_FCN (icode) (result,
6431 gen_rtx (MEM, BLKmode, src_rtx),
6432 char_rtx, GEN_INT (align)));
6434 /* Return the value in the proper mode for this function. */
6435 if (GET_MODE (result) == value_mode)
6437 else if (target != 0)
6439 convert_move (target, result, 0);
6443 return convert_to_mode (value_mode, result, 0);
6446 case BUILT_IN_STRCPY:
6447 /* If not optimizing, call the library function. */
6452 /* Arg could be non-pointer if user redeclared this fcn wrong. */
6453 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
6454 || TREE_CHAIN (arglist) == 0
6455 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE)
6459 tree len = c_strlen (TREE_VALUE (TREE_CHAIN (arglist)));
6464 len = size_binop (PLUS_EXPR, len, integer_one_node);
6466 chainon (arglist, build_tree_list (NULL_TREE, len));
6470 case BUILT_IN_MEMCPY:
6471 /* If not optimizing, call the library function. */
6476 /* Arg could be non-pointer if user redeclared this fcn wrong. */
6477 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
6478 || TREE_CHAIN (arglist) == 0
6479 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
6480 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
6481 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
6485 tree dest = TREE_VALUE (arglist);
6486 tree src = TREE_VALUE (TREE_CHAIN (arglist));
6487 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
6490 = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
6492 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
6493 rtx dest_rtx, dest_mem, src_mem;
6495 /* If either SRC or DEST is not a pointer type, don't do
6496 this operation in-line. */
6497 if (src_align == 0 || dest_align == 0)
6499 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRCPY)
6500 TREE_CHAIN (TREE_CHAIN (arglist)) = 0;
6504 dest_rtx = expand_expr (dest, NULL_RTX, Pmode, EXPAND_NORMAL);
6505 dest_mem = gen_rtx (MEM, BLKmode,
6506 memory_address (BLKmode, dest_rtx));
6507 src_mem = gen_rtx (MEM, BLKmode,
6508 memory_address (BLKmode,
6509 expand_expr (src, NULL_RTX,
6513 /* Copy word part most expediently. */
6514 emit_block_move (dest_mem, src_mem,
6515 expand_expr (len, NULL_RTX, VOIDmode, 0),
6516 MIN (src_align, dest_align));
6520 /* These comparison functions need an instruction that returns an actual
6521 index. An ordinary compare that just sets the condition codes
6523 #ifdef HAVE_cmpstrsi
6524 case BUILT_IN_STRCMP:
6525 /* If not optimizing, call the library function. */
6530 /* Arg could be non-pointer if user redeclared this fcn wrong. */
6531 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
6532 || TREE_CHAIN (arglist) == 0
6533 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE)
6535 else if (!HAVE_cmpstrsi)
6538 tree arg1 = TREE_VALUE (arglist);
6539 tree arg2 = TREE_VALUE (TREE_CHAIN (arglist));
6543 len = c_strlen (arg1);
6545 len = size_binop (PLUS_EXPR, integer_one_node, len);
6546 len2 = c_strlen (arg2);
6548 len2 = size_binop (PLUS_EXPR, integer_one_node, len2);
6550 /* If we don't have a constant length for the first, use the length
6551 of the second, if we know it. We don't require a constant for
6552 this case; some cost analysis could be done if both are available
6553 but neither is constant. For now, assume they're equally cheap.
6555 If both strings have constant lengths, use the smaller. This
6556 could arise if optimization results in strcpy being called with
6557 two fixed strings, or if the code was machine-generated. We should
6558 add some code to the `memcmp' handler below to deal with such
6559 situations, someday. */
6560 if (!len || TREE_CODE (len) != INTEGER_CST)
6567 else if (len2 && TREE_CODE (len2) == INTEGER_CST)
6569 if (tree_int_cst_lt (len2, len))
6573 chainon (arglist, build_tree_list (NULL_TREE, len));
6577 case BUILT_IN_MEMCMP:
6578 /* If not optimizing, call the library function. */
6583 /* Arg could be non-pointer if user redeclared this fcn wrong. */
6584 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
6585 || TREE_CHAIN (arglist) == 0
6586 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
6587 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
6588 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
6590 else if (!HAVE_cmpstrsi)
6593 tree arg1 = TREE_VALUE (arglist);
6594 tree arg2 = TREE_VALUE (TREE_CHAIN (arglist));
6595 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
6599 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
6601 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
6602 enum machine_mode insn_mode
6603 = insn_operand_mode[(int) CODE_FOR_cmpstrsi][0];
6605 /* If we don't have POINTER_TYPE, call the function. */
6606 if (arg1_align == 0 || arg2_align == 0)
6608 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRCMP)
6609 TREE_CHAIN (TREE_CHAIN (arglist)) = 0;
6613 /* Make a place to write the result of the instruction. */
6616 && GET_CODE (result) == REG && GET_MODE (result) == insn_mode
6617 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
6618 result = gen_reg_rtx (insn_mode);
6620 emit_insn (gen_cmpstrsi (result,
6621 gen_rtx (MEM, BLKmode,
6622 expand_expr (arg1, NULL_RTX, Pmode,
6624 gen_rtx (MEM, BLKmode,
6625 expand_expr (arg2, NULL_RTX, Pmode,
6627 expand_expr (len, NULL_RTX, VOIDmode, 0),
6628 GEN_INT (MIN (arg1_align, arg2_align))));
6630 /* Return the value in the proper mode for this function. */
6631 mode = TYPE_MODE (TREE_TYPE (exp));
6632 if (GET_MODE (result) == mode)
6634 else if (target != 0)
6636 convert_move (target, result, 0);
6640 return convert_to_mode (mode, result, 0);
6643 case BUILT_IN_STRCMP:
6644 case BUILT_IN_MEMCMP:
6648 default: /* just do library call, if unknown builtin */
6649 error ("built-in function `%s' not currently supported",
6650 IDENTIFIER_POINTER (DECL_NAME (fndecl)));
6653 /* The switch statement above can drop through to cause the function
6654 to be called normally. */
6656 return expand_call (exp, target, ignore);
6659 /* Expand code for a post- or pre- increment or decrement
6660 and return the RTX for the result.
6661 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
6664 expand_increment (exp, post)
6668 register rtx op0, op1;
6669 register rtx temp, value;
6670 register tree incremented = TREE_OPERAND (exp, 0);
6671 optab this_optab = add_optab;
6673 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
6674 int op0_is_copy = 0;
6676 /* Stabilize any component ref that might need to be
6677 evaluated more than once below. */
6679 || TREE_CODE (incremented) == BIT_FIELD_REF
6680 || (TREE_CODE (incremented) == COMPONENT_REF
6681 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
6682 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
6683 incremented = stabilize_reference (incremented);
6685 /* Compute the operands as RTX.
6686 Note whether OP0 is the actual lvalue or a copy of it:
6687 I believe it is a copy iff it is a register or subreg
6688 and insns were generated in computing it. */
6690 temp = get_last_insn ();
6691 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, 0);
6693 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
6694 in place but intead must do sign- or zero-extension during assignment,
6695 so we copy it into a new register and let the code below use it as
6698 Note that we can safely modify this SUBREG since it is know not to be
6699 shared (it was made by the expand_expr call above). */
6701 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
6702 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
6704 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
6705 && temp != get_last_insn ());
6706 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
6708 /* Decide whether incrementing or decrementing. */
6709 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
6710 || TREE_CODE (exp) == PREDECREMENT_EXPR)
6711 this_optab = sub_optab;
6713 /* If OP0 is not the actual lvalue, but rather a copy in a register,
6714 then we cannot just increment OP0. We must therefore contrive to
6715 increment the original value. Then, for postincrement, we can return
6716 OP0 since it is a copy of the old value. For preincrement, we want
6717 to always expand here, since this generates better or equivalent code. */
6718 if (!post || op0_is_copy)
6720 /* This is the easiest way to increment the value wherever it is.
6721 Problems with multiple evaluation of INCREMENTED are prevented
6722 because either (1) it is a component_ref or preincrement,
6723 in which case it was stabilized above, or (2) it is an array_ref
6724 with constant index in an array in a register, which is
6725 safe to reevaluate. */
6726 tree newexp = build ((this_optab == add_optab
6727 ? PLUS_EXPR : MINUS_EXPR),
6730 TREE_OPERAND (exp, 1));
6731 temp = expand_assignment (incremented, newexp, ! post, 0);
6732 return post ? op0 : temp;
6735 /* Convert decrement by a constant into a negative increment. */
6736 if (this_optab == sub_optab
6737 && GET_CODE (op1) == CONST_INT)
6739 op1 = GEN_INT (- INTVAL (op1));
6740 this_optab = add_optab;
6745 /* We have a true reference to the value in OP0.
6746 If there is an insn to add or subtract in this mode, queue it. */
6748 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
6749 op0 = stabilize (op0);
6752 icode = (int) this_optab->handlers[(int) mode].insn_code;
6753 if (icode != (int) CODE_FOR_nothing
6754 /* Make sure that OP0 is valid for operands 0 and 1
6755 of the insn we want to queue. */
6756 && (*insn_operand_predicate[icode][0]) (op0, mode)
6757 && (*insn_operand_predicate[icode][1]) (op0, mode))
6759 if (! (*insn_operand_predicate[icode][2]) (op1, mode))
6760 op1 = force_reg (mode, op1);
6762 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
6766 /* Preincrement, or we can't increment with one simple insn. */
6768 /* Save a copy of the value before inc or dec, to return it later. */
6769 temp = value = copy_to_reg (op0);
6771 /* Arrange to return the incremented value. */
6772 /* Copy the rtx because expand_binop will protect from the queue,
6773 and the results of that would be invalid for us to return
6774 if our caller does emit_queue before using our result. */
6775 temp = copy_rtx (value = op0);
6777 /* Increment however we can. */
6778 op1 = expand_binop (mode, this_optab, value, op1, op0,
6779 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
6780 /* Make sure the value is stored into OP0. */
6782 emit_move_insn (op0, op1);
6787 /* Expand all function calls contained within EXP, innermost ones first.
6788 But don't look within expressions that have sequence points.
6789 For each CALL_EXPR, record the rtx for its value
6790 in the CALL_EXPR_RTL field. */
6793 preexpand_calls (exp)
6796 register int nops, i;
6797 int type = TREE_CODE_CLASS (TREE_CODE (exp));
6799 if (! do_preexpand_calls)
6802 /* Only expressions and references can contain calls. */
6804 if (type != 'e' && type != '<' && type != '1' && type != '2' && type != 'r')
6807 switch (TREE_CODE (exp))
6810 /* Do nothing if already expanded. */
6811 if (CALL_EXPR_RTL (exp) != 0)
6814 /* Do nothing to built-in functions. */
6815 if (TREE_CODE (TREE_OPERAND (exp, 0)) != ADDR_EXPR
6816 || TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) != FUNCTION_DECL
6817 || ! DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
6818 CALL_EXPR_RTL (exp) = expand_call (exp, NULL_RTX, 0);
6823 case TRUTH_ANDIF_EXPR:
6824 case TRUTH_ORIF_EXPR:
6825 /* If we find one of these, then we can be sure
6826 the adjust will be done for it (since it makes jumps).
6827 Do it now, so that if this is inside an argument
6828 of a function, we don't get the stack adjustment
6829 after some other args have already been pushed. */
6830 do_pending_stack_adjust ();
6835 case WITH_CLEANUP_EXPR:
6839 if (SAVE_EXPR_RTL (exp) != 0)
6843 nops = tree_code_length[(int) TREE_CODE (exp)];
6844 for (i = 0; i < nops; i++)
6845 if (TREE_OPERAND (exp, i) != 0)
6847 type = TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, i)));
6848 if (type == 'e' || type == '<' || type == '1' || type == '2'
6850 preexpand_calls (TREE_OPERAND (exp, i));
6854 /* At the start of a function, record that we have no previously-pushed
6855 arguments waiting to be popped. */
6858 init_pending_stack_adjust ()
6860 pending_stack_adjust = 0;
6863 /* When exiting from function, if safe, clear out any pending stack adjust
6864 so the adjustment won't get done. */
6867 clear_pending_stack_adjust ()
6869 #ifdef EXIT_IGNORE_STACK
6870 if (! flag_omit_frame_pointer && EXIT_IGNORE_STACK
6871 && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline)
6872 && ! flag_inline_functions)
6873 pending_stack_adjust = 0;
6877 /* Pop any previously-pushed arguments that have not been popped yet. */
6880 do_pending_stack_adjust ()
6882 if (inhibit_defer_pop == 0)
6884 if (pending_stack_adjust != 0)
6885 adjust_stack (GEN_INT (pending_stack_adjust));
6886 pending_stack_adjust = 0;
6890 /* Expand all cleanups up to OLD_CLEANUPS.
6891 Needed here, and also for language-dependent calls. */
6894 expand_cleanups_to (old_cleanups)
6897 while (cleanups_this_call != old_cleanups)
6899 expand_expr (TREE_VALUE (cleanups_this_call), NULL_RTX, VOIDmode, 0);
6900 cleanups_this_call = TREE_CHAIN (cleanups_this_call);
6904 /* Expand conditional expressions. */
6906 /* Generate code to evaluate EXP and jump to LABEL if the value is zero.
6907 LABEL is an rtx of code CODE_LABEL, in this function and all the
6911 jumpifnot (exp, label)
6915 do_jump (exp, label, NULL_RTX);
6918 /* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
6925 do_jump (exp, NULL_RTX, label);
6928 /* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
6929 the result is zero, or IF_TRUE_LABEL if the result is one.
6930 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
6931 meaning fall through in that case.
6933 do_jump always does any pending stack adjust except when it does not
6934 actually perform a jump. An example where there is no jump
6935 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
6937 This function is responsible for optimizing cases such as
6938 &&, || and comparison operators in EXP. */
6941 do_jump (exp, if_false_label, if_true_label)
6943 rtx if_false_label, if_true_label;
6945 register enum tree_code code = TREE_CODE (exp);
6946 /* Some cases need to create a label to jump to
6947 in order to properly fall through.
6948 These cases set DROP_THROUGH_LABEL nonzero. */
6949 rtx drop_through_label = 0;
6963 temp = integer_zerop (exp) ? if_false_label : if_true_label;
6969 /* This is not true with #pragma weak */
6971 /* The address of something can never be zero. */
6973 emit_jump (if_true_label);
6978 if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
6979 || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
6980 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF)
6983 /* If we are narrowing the operand, we have to do the compare in the
6985 if ((TYPE_PRECISION (TREE_TYPE (exp))
6986 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
6988 case NON_LVALUE_EXPR:
6989 case REFERENCE_EXPR:
6994 /* These cannot change zero->non-zero or vice versa. */
6995 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
6999 /* This is never less insns than evaluating the PLUS_EXPR followed by
7000 a test and can be longer if the test is eliminated. */
7002 /* Reduce to minus. */
7003 exp = build (MINUS_EXPR, TREE_TYPE (exp),
7004 TREE_OPERAND (exp, 0),
7005 fold (build1 (NEGATE_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
7006 TREE_OPERAND (exp, 1))));
7007 /* Process as MINUS. */
7011 /* Non-zero iff operands of minus differ. */
7012 comparison = compare (build (NE_EXPR, TREE_TYPE (exp),
7013 TREE_OPERAND (exp, 0),
7014 TREE_OPERAND (exp, 1)),
7019 /* If we are AND'ing with a small constant, do this comparison in the
7020 smallest type that fits. If the machine doesn't have comparisons
7021 that small, it will be converted back to the wider comparison.
7022 This helps if we are testing the sign bit of a narrower object.
7023 combine can't do this for us because it can't know whether a
7024 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
7026 if (! SLOW_BYTE_ACCESS
7027 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7028 && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
7029 && (i = floor_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))) >= 0
7030 && (type = type_for_size (i + 1, 1)) != 0
7031 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
7032 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
7033 != CODE_FOR_nothing))
7035 do_jump (convert (type, exp), if_false_label, if_true_label);
7040 case TRUTH_NOT_EXPR:
7041 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
7044 case TRUTH_ANDIF_EXPR:
7045 if (if_false_label == 0)
7046 if_false_label = drop_through_label = gen_label_rtx ();
7047 do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
7048 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
7051 case TRUTH_ORIF_EXPR:
7052 if (if_true_label == 0)
7053 if_true_label = drop_through_label = gen_label_rtx ();
7054 do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
7055 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
7059 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
7062 do_pending_stack_adjust ();
7063 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
7070 int bitsize, bitpos, unsignedp;
7071 enum machine_mode mode;
7076 /* Get description of this reference. We don't actually care
7077 about the underlying object here. */
7078 get_inner_reference (exp, &bitsize, &bitpos, &offset,
7079 &mode, &unsignedp, &volatilep);
7081 type = type_for_size (bitsize, unsignedp);
7082 if (! SLOW_BYTE_ACCESS
7083 && type != 0 && bitsize >= 0
7084 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
7085 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
7086 != CODE_FOR_nothing))
7088 do_jump (convert (type, exp), if_false_label, if_true_label);
7095 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
7096 if (integer_onep (TREE_OPERAND (exp, 1))
7097 && integer_zerop (TREE_OPERAND (exp, 2)))
7098 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
7100 else if (integer_zerop (TREE_OPERAND (exp, 1))
7101 && integer_onep (TREE_OPERAND (exp, 2)))
7102 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
7106 register rtx label1 = gen_label_rtx ();
7107 drop_through_label = gen_label_rtx ();
7108 do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX);
7109 /* Now the THEN-expression. */
7110 do_jump (TREE_OPERAND (exp, 1),
7111 if_false_label ? if_false_label : drop_through_label,
7112 if_true_label ? if_true_label : drop_through_label);
7113 /* In case the do_jump just above never jumps. */
7114 do_pending_stack_adjust ();
7115 emit_label (label1);
7116 /* Now the ELSE-expression. */
7117 do_jump (TREE_OPERAND (exp, 2),
7118 if_false_label ? if_false_label : drop_through_label,
7119 if_true_label ? if_true_label : drop_through_label);
7124 if (integer_zerop (TREE_OPERAND (exp, 1)))
7125 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
7126 else if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7129 !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
7130 do_jump_by_parts_equality (exp, if_false_label, if_true_label);
7132 comparison = compare (exp, EQ, EQ);
7136 if (integer_zerop (TREE_OPERAND (exp, 1)))
7137 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
7138 else if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7141 !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
7142 do_jump_by_parts_equality (exp, if_true_label, if_false_label);
7144 comparison = compare (exp, NE, NE);
7148 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7150 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
7151 do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
7153 comparison = compare (exp, LT, LTU);
7157 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7159 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
7160 do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
7162 comparison = compare (exp, LE, LEU);
7166 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7168 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
7169 do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
7171 comparison = compare (exp, GT, GTU);
7175 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7177 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
7178 do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
7180 comparison = compare (exp, GE, GEU);
7185 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
7187 /* This is not needed any more and causes poor code since it causes
7188 comparisons and tests from non-SI objects to have different code
7190 /* Copy to register to avoid generating bad insns by cse
7191 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
7192 if (!cse_not_expected && GET_CODE (temp) == MEM)
7193 temp = copy_to_reg (temp);
7195 do_pending_stack_adjust ();
7196 if (GET_CODE (temp) == CONST_INT)
7197 comparison = (temp == const0_rtx ? const0_rtx : const_true_rtx);
7198 else if (GET_CODE (temp) == LABEL_REF)
7199 comparison = const_true_rtx;
7200 else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
7201 && !can_compare_p (GET_MODE (temp)))
7202 /* Note swapping the labels gives us not-equal. */
7203 do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label);
7204 else if (GET_MODE (temp) != VOIDmode)
7205 comparison = compare_from_rtx (temp, CONST0_RTX (GET_MODE (temp)),
7206 NE, TREE_UNSIGNED (TREE_TYPE (exp)),
7207 GET_MODE (temp), NULL_RTX, 0);
7212 /* Do any postincrements in the expression that was tested. */
7215 /* If COMPARISON is nonzero here, it is an rtx that can be substituted
7216 straight into a conditional jump instruction as the jump condition.
7217 Otherwise, all the work has been done already. */
7219 if (comparison == const_true_rtx)
7222 emit_jump (if_true_label);
7224 else if (comparison == const0_rtx)
7227 emit_jump (if_false_label);
7229 else if (comparison)
7230 do_jump_for_compare (comparison, if_false_label, if_true_label);
7234 if (drop_through_label)
7236 /* If do_jump produces code that might be jumped around,
7237 do any stack adjusts from that code, before the place
7238 where control merges in. */
7239 do_pending_stack_adjust ();
7240 emit_label (drop_through_label);
7244 /* Given a comparison expression EXP for values too wide to be compared
7245 with one insn, test the comparison and jump to the appropriate label.
7246 The code of EXP is ignored; we always test GT if SWAP is 0,
7247 and LT if SWAP is 1. */
7250 do_jump_by_parts_greater (exp, swap, if_false_label, if_true_label)
7253 rtx if_false_label, if_true_label;
7255 rtx op0 = expand_expr (TREE_OPERAND (exp, swap), NULL_RTX, VOIDmode, 0);
7256 rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), NULL_RTX, VOIDmode, 0);
7257 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
7258 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
7259 rtx drop_through_label = 0;
7260 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
7263 if (! if_true_label || ! if_false_label)
7264 drop_through_label = gen_label_rtx ();
7265 if (! if_true_label)
7266 if_true_label = drop_through_label;
7267 if (! if_false_label)
7268 if_false_label = drop_through_label;
7270 /* Compare a word at a time, high order first. */
7271 for (i = 0; i < nwords; i++)
7274 rtx op0_word, op1_word;
7276 if (WORDS_BIG_ENDIAN)
7278 op0_word = operand_subword_force (op0, i, mode);
7279 op1_word = operand_subword_force (op1, i, mode);
7283 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
7284 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
7287 /* All but high-order word must be compared as unsigned. */
7288 comp = compare_from_rtx (op0_word, op1_word,
7289 (unsignedp || i > 0) ? GTU : GT,
7290 unsignedp, word_mode, NULL_RTX, 0);
7291 if (comp == const_true_rtx)
7292 emit_jump (if_true_label);
7293 else if (comp != const0_rtx)
7294 do_jump_for_compare (comp, NULL_RTX, if_true_label);
7296 /* Consider lower words only if these are equal. */
7297 comp = compare_from_rtx (op0_word, op1_word, NE, unsignedp, word_mode,
7299 if (comp == const_true_rtx)
7300 emit_jump (if_false_label);
7301 else if (comp != const0_rtx)
7302 do_jump_for_compare (comp, NULL_RTX, if_false_label);
7306 emit_jump (if_false_label);
7307 if (drop_through_label)
7308 emit_label (drop_through_label);
7311 /* Given an EQ_EXPR expression EXP for values too wide to be compared
7312 with one insn, test the comparison and jump to the appropriate label. */
7315 do_jump_by_parts_equality (exp, if_false_label, if_true_label)
7317 rtx if_false_label, if_true_label;
7319 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7320 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7321 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
7322 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
7324 rtx drop_through_label = 0;
7326 if (! if_false_label)
7327 drop_through_label = if_false_label = gen_label_rtx ();
7329 for (i = 0; i < nwords; i++)
7331 rtx comp = compare_from_rtx (operand_subword_force (op0, i, mode),
7332 operand_subword_force (op1, i, mode),
7333 EQ, TREE_UNSIGNED (TREE_TYPE (exp)),
7334 word_mode, NULL_RTX, 0);
7335 if (comp == const_true_rtx)
7336 emit_jump (if_false_label);
7337 else if (comp != const0_rtx)
7338 do_jump_for_compare (comp, if_false_label, NULL_RTX);
7342 emit_jump (if_true_label);
7343 if (drop_through_label)
7344 emit_label (drop_through_label);
7347 /* Jump according to whether OP0 is 0.
7348 We assume that OP0 has an integer mode that is too wide
7349 for the available compare insns. */
7352 do_jump_by_parts_equality_rtx (op0, if_false_label, if_true_label)
7354 rtx if_false_label, if_true_label;
7356 int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD;
7358 rtx drop_through_label = 0;
7360 if (! if_false_label)
7361 drop_through_label = if_false_label = gen_label_rtx ();
7363 for (i = 0; i < nwords; i++)
7365 rtx comp = compare_from_rtx (operand_subword_force (op0, i,
7367 const0_rtx, EQ, 1, word_mode, NULL_RTX, 0);
7368 if (comp == const_true_rtx)
7369 emit_jump (if_false_label);
7370 else if (comp != const0_rtx)
7371 do_jump_for_compare (comp, if_false_label, NULL_RTX);
7375 emit_jump (if_true_label);
7376 if (drop_through_label)
7377 emit_label (drop_through_label);
7380 /* Given a comparison expression in rtl form, output conditional branches to
7381 IF_TRUE_LABEL, IF_FALSE_LABEL, or both. */
7384 do_jump_for_compare (comparison, if_false_label, if_true_label)
7385 rtx comparison, if_false_label, if_true_label;
7389 if (bcc_gen_fctn[(int) GET_CODE (comparison)] != 0)
7390 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (comparison)]) (if_true_label));
7395 emit_jump (if_false_label);
7397 else if (if_false_label)
7400 rtx prev = PREV_INSN (get_last_insn ());
7403 /* Output the branch with the opposite condition. Then try to invert
7404 what is generated. If more than one insn is a branch, or if the
7405 branch is not the last insn written, abort. If we can't invert
7406 the branch, emit make a true label, redirect this jump to that,
7407 emit a jump to the false label and define the true label. */
7409 if (bcc_gen_fctn[(int) GET_CODE (comparison)] != 0)
7410 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (comparison)]) (if_false_label));
7414 /* Here we get the insn before what was just emitted.
7415 On some machines, emitting the branch can discard
7416 the previous compare insn and emit a replacement. */
7418 /* If there's only one preceding insn... */
7419 insn = get_insns ();
7421 insn = NEXT_INSN (prev);
7423 for (insn = NEXT_INSN (insn); insn; insn = NEXT_INSN (insn))
7424 if (GET_CODE (insn) == JUMP_INSN)
7431 if (branch != get_last_insn ())
7434 if (! invert_jump (branch, if_false_label))
7436 if_true_label = gen_label_rtx ();
7437 redirect_jump (branch, if_true_label);
7438 emit_jump (if_false_label);
7439 emit_label (if_true_label);
7444 /* Generate code for a comparison expression EXP
7445 (including code to compute the values to be compared)
7446 and set (CC0) according to the result.
7447 SIGNED_CODE should be the rtx operation for this comparison for
7448 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
7450 We force a stack adjustment unless there are currently
7451 things pushed on the stack that aren't yet used. */
7454 compare (exp, signed_code, unsigned_code)
7456 enum rtx_code signed_code, unsigned_code;
7459 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7461 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7462 register tree type = TREE_TYPE (TREE_OPERAND (exp, 0));
7463 register enum machine_mode mode = TYPE_MODE (type);
7464 int unsignedp = TREE_UNSIGNED (type);
7465 enum rtx_code code = unsignedp ? unsigned_code : signed_code;
7467 return compare_from_rtx (op0, op1, code, unsignedp, mode,
7469 ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
7470 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
7473 /* Like compare but expects the values to compare as two rtx's.
7474 The decision as to signed or unsigned comparison must be made by the caller.
7476 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
7479 If ALIGN is non-zero, it is the alignment of this type; if zero, the
7480 size of MODE should be used. */
7483 compare_from_rtx (op0, op1, code, unsignedp, mode, size, align)
7484 register rtx op0, op1;
7487 enum machine_mode mode;
7493 /* If one operand is constant, make it the second one. Only do this
7494 if the other operand is not constant as well. */
7496 if ((CONSTANT_P (op0) && ! CONSTANT_P (op1))
7497 || (GET_CODE (op0) == CONST_INT && GET_CODE (op1) != CONST_INT))
7502 code = swap_condition (code);
7507 op0 = force_not_mem (op0);
7508 op1 = force_not_mem (op1);
7511 do_pending_stack_adjust ();
7513 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
7514 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
7518 /* There's no need to do this now that combine.c can eliminate lots of
7519 sign extensions. This can be less efficient in certain cases on other
7522 /* If this is a signed equality comparison, we can do it as an
7523 unsigned comparison since zero-extension is cheaper than sign
7524 extension and comparisons with zero are done as unsigned. This is
7525 the case even on machines that can do fast sign extension, since
7526 zero-extension is easier to combine with other operations than
7527 sign-extension is. If we are comparing against a constant, we must
7528 convert it to what it would look like unsigned. */
7529 if ((code == EQ || code == NE) && ! unsignedp
7530 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
7532 if (GET_CODE (op1) == CONST_INT
7533 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
7534 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
7539 emit_cmp_insn (op0, op1, code, size, mode, unsignedp, align);
7541 return gen_rtx (code, VOIDmode, cc0_rtx, const0_rtx);
7544 /* Generate code to calculate EXP using a store-flag instruction
7545 and return an rtx for the result. EXP is either a comparison
7546 or a TRUTH_NOT_EXPR whose operand is a comparison.
7548 If TARGET is nonzero, store the result there if convenient.
7550 If ONLY_CHEAP is non-zero, only do this if it is likely to be very
7553 Return zero if there is no suitable set-flag instruction
7554 available on this machine.
7556 Once expand_expr has been called on the arguments of the comparison,
7557 we are committed to doing the store flag, since it is not safe to
7558 re-evaluate the expression. We emit the store-flag insn by calling
7559 emit_store_flag, but only expand the arguments if we have a reason
7560 to believe that emit_store_flag will be successful. If we think that
7561 it will, but it isn't, we have to simulate the store-flag with a
7562 set/jump/set sequence. */
7565 do_store_flag (exp, target, mode, only_cheap)
7568 enum machine_mode mode;
7572 tree arg0, arg1, type;
7574 enum machine_mode operand_mode;
7578 enum insn_code icode;
7579 rtx subtarget = target;
7580 rtx result, label, pattern, jump_pat;
7582 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
7583 result at the end. We can't simply invert the test since it would
7584 have already been inverted if it were valid. This case occurs for
7585 some floating-point comparisons. */
7587 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
7588 invert = 1, exp = TREE_OPERAND (exp, 0);
7590 arg0 = TREE_OPERAND (exp, 0);
7591 arg1 = TREE_OPERAND (exp, 1);
7592 type = TREE_TYPE (arg0);
7593 operand_mode = TYPE_MODE (type);
7594 unsignedp = TREE_UNSIGNED (type);
7596 /* We won't bother with BLKmode store-flag operations because it would mean
7597 passing a lot of information to emit_store_flag. */
7598 if (operand_mode == BLKmode)
7604 /* Get the rtx comparison code to use. We know that EXP is a comparison
7605 operation of some type. Some comparisons against 1 and -1 can be
7606 converted to comparisons with zero. Do so here so that the tests
7607 below will be aware that we have a comparison with zero. These
7608 tests will not catch constants in the first operand, but constants
7609 are rarely passed as the first operand. */
7611 switch (TREE_CODE (exp))
7620 if (integer_onep (arg1))
7621 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
7623 code = unsignedp ? LTU : LT;
7626 if (integer_all_onesp (arg1))
7627 arg1 = integer_zero_node, code = unsignedp ? LTU : LT;
7629 code = unsignedp ? LEU : LE;
7632 if (integer_all_onesp (arg1))
7633 arg1 = integer_zero_node, code = unsignedp ? GEU : GE;
7635 code = unsignedp ? GTU : GT;
7638 if (integer_onep (arg1))
7639 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
7641 code = unsignedp ? GEU : GE;
7647 /* Put a constant second. */
7648 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
7650 tem = arg0; arg0 = arg1; arg1 = tem;
7651 code = swap_condition (code);
7654 /* If this is an equality or inequality test of a single bit, we can
7655 do this by shifting the bit being tested to the low-order bit and
7656 masking the result with the constant 1. If the condition was EQ,
7657 we xor it with 1. This does not require an scc insn and is faster
7658 than an scc insn even if we have it. */
7660 if ((code == NE || code == EQ)
7661 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
7662 && integer_pow2p (TREE_OPERAND (arg0, 1))
7663 && TYPE_PRECISION (type) <= HOST_BITS_PER_WIDE_INT)
7665 int bitnum = exact_log2 (INTVAL (expand_expr (TREE_OPERAND (arg0, 1),
7666 NULL_RTX, VOIDmode, 0)));
7668 if (subtarget == 0 || GET_CODE (subtarget) != REG
7669 || GET_MODE (subtarget) != operand_mode
7670 || ! safe_from_p (subtarget, TREE_OPERAND (arg0, 0)))
7673 op0 = expand_expr (TREE_OPERAND (arg0, 0), subtarget, VOIDmode, 0);
7676 op0 = expand_shift (RSHIFT_EXPR, GET_MODE (op0), op0,
7677 size_int (bitnum), target, 1);
7679 if (GET_MODE (op0) != mode)
7680 op0 = convert_to_mode (mode, op0, 1);
7682 if (bitnum != TYPE_PRECISION (type) - 1)
7683 op0 = expand_and (op0, const1_rtx, target);
7685 if ((code == EQ && ! invert) || (code == NE && invert))
7686 op0 = expand_binop (mode, xor_optab, op0, const1_rtx, target, 0,
7692 /* Now see if we are likely to be able to do this. Return if not. */
7693 if (! can_compare_p (operand_mode))
7695 icode = setcc_gen_code[(int) code];
7696 if (icode == CODE_FOR_nothing
7697 || (only_cheap && insn_operand_mode[(int) icode][0] != mode))
7699 /* We can only do this if it is one of the special cases that
7700 can be handled without an scc insn. */
7701 if ((code == LT && integer_zerop (arg1))
7702 || (! only_cheap && code == GE && integer_zerop (arg1)))
7704 else if (BRANCH_COST >= 0
7705 && ! only_cheap && (code == NE || code == EQ)
7706 && TREE_CODE (type) != REAL_TYPE
7707 && ((abs_optab->handlers[(int) operand_mode].insn_code
7708 != CODE_FOR_nothing)
7709 || (ffs_optab->handlers[(int) operand_mode].insn_code
7710 != CODE_FOR_nothing)))
7716 preexpand_calls (exp);
7717 if (subtarget == 0 || GET_CODE (subtarget) != REG
7718 || GET_MODE (subtarget) != operand_mode
7719 || ! safe_from_p (subtarget, arg1))
7722 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
7723 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
7726 target = gen_reg_rtx (mode);
7728 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
7729 because, if the emit_store_flag does anything it will succeed and
7730 OP0 and OP1 will not be used subsequently. */
7732 result = emit_store_flag (target, code,
7733 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
7734 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
7735 operand_mode, unsignedp, 1);
7740 result = expand_binop (mode, xor_optab, result, const1_rtx,
7741 result, 0, OPTAB_LIB_WIDEN);
7745 /* If this failed, we have to do this with set/compare/jump/set code. */
7746 if (target == 0 || GET_CODE (target) != REG
7747 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
7748 target = gen_reg_rtx (GET_MODE (target));
7750 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
7751 result = compare_from_rtx (op0, op1, code, unsignedp,
7752 operand_mode, NULL_RTX, 0);
7753 if (GET_CODE (result) == CONST_INT)
7754 return (((result == const0_rtx && ! invert)
7755 || (result != const0_rtx && invert))
7756 ? const0_rtx : const1_rtx);
7758 label = gen_label_rtx ();
7759 if (bcc_gen_fctn[(int) code] == 0)
7762 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
7763 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
7769 /* Generate a tablejump instruction (used for switch statements). */
7771 #ifdef HAVE_tablejump
7773 /* INDEX is the value being switched on, with the lowest value
7774 in the table already subtracted.
7775 MODE is its expected mode (needed if INDEX is constant).
7776 RANGE is the length of the jump table.
7777 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
7779 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
7780 index value is out of range. */
7783 do_tablejump (index, mode, range, table_label, default_label)
7784 rtx index, range, table_label, default_label;
7785 enum machine_mode mode;
7787 register rtx temp, vector;
7789 /* Do an unsigned comparison (in the proper mode) between the index
7790 expression and the value which represents the length of the range.
7791 Since we just finished subtracting the lower bound of the range
7792 from the index expression, this comparison allows us to simultaneously
7793 check that the original index expression value is both greater than
7794 or equal to the minimum value of the range and less than or equal to
7795 the maximum value of the range. */
7797 emit_cmp_insn (range, index, LTU, NULL_RTX, mode, 1, 0);
7798 emit_jump_insn (gen_bltu (default_label));
7800 /* If index is in range, it must fit in Pmode.
7801 Convert to Pmode so we can index with it. */
7803 index = convert_to_mode (Pmode, index, 1);
7805 /* If flag_force_addr were to affect this address
7806 it could interfere with the tricky assumptions made
7807 about addresses that contain label-refs,
7808 which may be valid only very near the tablejump itself. */
7809 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
7810 GET_MODE_SIZE, because this indicates how large insns are. The other
7811 uses should all be Pmode, because they are addresses. This code
7812 could fail if addresses and insns are not the same size. */
7813 index = memory_address_noforce
7815 gen_rtx (PLUS, Pmode,
7816 gen_rtx (MULT, Pmode, index,
7817 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
7818 gen_rtx (LABEL_REF, Pmode, table_label)));
7819 temp = gen_reg_rtx (CASE_VECTOR_MODE);
7820 vector = gen_rtx (MEM, CASE_VECTOR_MODE, index);
7821 RTX_UNCHANGING_P (vector) = 1;
7822 convert_move (temp, vector, 0);
7824 emit_jump_insn (gen_tablejump (temp, table_label));
7826 #ifndef CASE_VECTOR_PC_RELATIVE
7827 /* If we are generating PIC code or if the table is PC-relative, the
7828 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
7834 #endif /* HAVE_tablejump */