1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA. */
26 #include "insn-flags.h"
27 #include "insn-codes.h"
29 #include "insn-config.h"
33 #include "typeclass.h"
35 #define CEIL(x,y) (((x) + (y) - 1) / (y))
37 /* Decide whether a function's arguments should be processed
38 from first to last or from last to first.
40 They should if the stack and args grow in opposite directions, but
41 only if we have push insns. */
45 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNARD)
46 #define PUSH_ARGS_REVERSED /* If it's last to first */
51 #ifndef STACK_PUSH_CODE
52 #ifdef STACK_GROWS_DOWNWARD
53 #define STACK_PUSH_CODE PRE_DEC
55 #define STACK_PUSH_CODE PRE_INC
59 /* Like STACK_BOUNDARY but in units of bytes, not bits. */
60 #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
62 /* If this is nonzero, we do not bother generating VOLATILE
63 around volatile memory references, and we are willing to
64 output indirect addresses. If cse is to follow, we reject
65 indirect addresses so a useful potential cse is generated;
66 if it is used only once, instruction combination will produce
67 the same indirect address eventually. */
70 /* Nonzero to generate code for all the subroutines within an
71 expression before generating the upper levels of the expression.
72 Nowadays this is never zero. */
73 int do_preexpand_calls = 1;
75 /* Number of units that we should eventually pop off the stack.
76 These are the arguments to function calls that have already returned. */
77 int pending_stack_adjust;
79 /* Nonzero means stack pops must not be deferred, and deferred stack
80 pops must not be output. It is nonzero inside a function call,
81 inside a conditional expression, inside a statement expression,
82 and in other cases as well. */
83 int inhibit_defer_pop;
85 /* A list of all cleanups which belong to the arguments of
86 function calls being expanded by expand_call. */
87 tree cleanups_this_call;
89 /* Nonzero means __builtin_saveregs has already been done in this function.
90 The value is the pseudoreg containing the value __builtin_saveregs
92 static rtx saveregs_value;
94 /* This structure is used by move_by_pieces to describe the move to
106 int explicit_inc_from;
112 static rtx enqueue_insn PROTO((rtx, rtx));
113 static int queued_subexp_p PROTO((rtx));
114 static void init_queue PROTO((void));
115 static void move_by_pieces PROTO((rtx, rtx, int, int));
116 static int move_by_pieces_ninsns PROTO((unsigned int, int));
117 static void move_by_pieces_1 PROTO((rtx (*) (), enum machine_mode,
118 struct move_by_pieces *));
119 static void group_insns PROTO((rtx));
120 static void store_constructor PROTO((tree, rtx));
121 static rtx store_field PROTO((rtx, int, int, enum machine_mode, tree,
122 enum machine_mode, int, int, int));
123 static tree save_noncopied_parts PROTO((tree, tree));
124 static tree init_noncopied_parts PROTO((tree, tree));
125 static int safe_from_p PROTO((rtx, tree));
126 static int fixed_type_p PROTO((tree));
127 static int get_pointer_alignment PROTO((tree, unsigned));
128 static tree string_constant PROTO((tree, tree *));
129 static tree c_strlen PROTO((tree));
130 static rtx expand_builtin PROTO((tree, rtx, rtx, enum machine_mode, int));
131 static rtx expand_increment PROTO((tree, int));
132 static void preexpand_calls PROTO((tree));
133 static void do_jump_by_parts_greater PROTO((tree, int, rtx, rtx));
134 static void do_jump_by_parts_equality PROTO((tree, rtx, rtx));
135 static void do_jump_by_parts_equality_rtx PROTO((rtx, rtx, rtx));
136 static void do_jump_for_compare PROTO((rtx, rtx, rtx));
137 static rtx compare PROTO((tree, enum rtx_code, enum rtx_code));
138 static rtx do_store_flag PROTO((tree, rtx, enum machine_mode, int));
140 /* Record for each mode whether we can move a register directly to or
141 from an object of that mode in memory. If we can't, we won't try
142 to use that mode directly when accessing a field of that mode. */
144 static char direct_load[NUM_MACHINE_MODES];
145 static char direct_store[NUM_MACHINE_MODES];
147 /* MOVE_RATIO is the number of move instructions that is better than
151 #if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
154 /* A value of around 6 would minimize code size; infinity would minimize
156 #define MOVE_RATIO 15
160 /* This array records the insn_code of insns to perform block moves. */
161 static enum insn_code movstr_optab[NUM_MACHINE_MODES];
163 /* SLOW_UNALIGNED_ACCESS is non-zero if unaligned accesses are very slow. */
165 #ifndef SLOW_UNALIGNED_ACCESS
166 #define SLOW_UNALIGNED_ACCESS 0
169 /* This is run once per compilation to set up which modes can be used
170 directly in memory and to initialize the block move optab. */
176 enum machine_mode mode;
177 /* Try indexing by frame ptr and try by stack ptr.
178 It is known that on the Convex the stack ptr isn't a valid index.
179 With luck, one or the other is valid on any machine. */
180 rtx mem = gen_rtx (MEM, VOIDmode, stack_pointer_rtx);
181 rtx mem1 = gen_rtx (MEM, VOIDmode, frame_pointer_rtx);
184 insn = emit_insn (gen_rtx (SET, 0, 0));
185 pat = PATTERN (insn);
187 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
188 mode = (enum machine_mode) ((int) mode + 1))
194 direct_load[(int) mode] = direct_store[(int) mode] = 0;
195 PUT_MODE (mem, mode);
196 PUT_MODE (mem1, mode);
198 /* See if there is some register that can be used in this mode and
199 directly loaded or stored from memory. */
201 if (mode != VOIDmode && mode != BLKmode)
202 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
203 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
206 if (! HARD_REGNO_MODE_OK (regno, mode))
209 reg = gen_rtx (REG, mode, regno);
212 SET_DEST (pat) = reg;
213 if (recog (pat, insn, &num_clobbers) >= 0)
214 direct_load[(int) mode] = 1;
216 SET_SRC (pat) = mem1;
217 SET_DEST (pat) = reg;
218 if (recog (pat, insn, &num_clobbers) >= 0)
219 direct_load[(int) mode] = 1;
222 SET_DEST (pat) = mem;
223 if (recog (pat, insn, &num_clobbers) >= 0)
224 direct_store[(int) mode] = 1;
227 SET_DEST (pat) = mem1;
228 if (recog (pat, insn, &num_clobbers) >= 0)
229 direct_store[(int) mode] = 1;
232 movstr_optab[(int) mode] = CODE_FOR_nothing;
239 movstr_optab[(int) QImode] = CODE_FOR_movstrqi;
243 movstr_optab[(int) HImode] = CODE_FOR_movstrhi;
247 movstr_optab[(int) SImode] = CODE_FOR_movstrsi;
251 movstr_optab[(int) DImode] = CODE_FOR_movstrdi;
255 movstr_optab[(int) TImode] = CODE_FOR_movstrti;
259 /* This is run at the start of compiling a function. */
266 pending_stack_adjust = 0;
267 inhibit_defer_pop = 0;
268 cleanups_this_call = 0;
273 /* Save all variables describing the current status into the structure *P.
274 This is used before starting a nested function. */
280 /* Instead of saving the postincrement queue, empty it. */
283 p->pending_stack_adjust = pending_stack_adjust;
284 p->inhibit_defer_pop = inhibit_defer_pop;
285 p->cleanups_this_call = cleanups_this_call;
286 p->saveregs_value = saveregs_value;
287 p->forced_labels = forced_labels;
289 pending_stack_adjust = 0;
290 inhibit_defer_pop = 0;
291 cleanups_this_call = 0;
296 /* Restore all variables describing the current status from the structure *P.
297 This is used after a nested function. */
300 restore_expr_status (p)
303 pending_stack_adjust = p->pending_stack_adjust;
304 inhibit_defer_pop = p->inhibit_defer_pop;
305 cleanups_this_call = p->cleanups_this_call;
306 saveregs_value = p->saveregs_value;
307 forced_labels = p->forced_labels;
310 /* Manage the queue of increment instructions to be output
311 for POSTINCREMENT_EXPR expressions, etc. */
313 static rtx pending_chain;
315 /* Queue up to increment (or change) VAR later. BODY says how:
316 BODY should be the same thing you would pass to emit_insn
317 to increment right away. It will go to emit_insn later on.
319 The value is a QUEUED expression to be used in place of VAR
320 where you want to guarantee the pre-incrementation value of VAR. */
323 enqueue_insn (var, body)
326 pending_chain = gen_rtx (QUEUED, GET_MODE (var),
327 var, NULL_RTX, NULL_RTX, body, pending_chain);
328 return pending_chain;
331 /* Use protect_from_queue to convert a QUEUED expression
332 into something that you can put immediately into an instruction.
333 If the queued incrementation has not happened yet,
334 protect_from_queue returns the variable itself.
335 If the incrementation has happened, protect_from_queue returns a temp
336 that contains a copy of the old value of the variable.
338 Any time an rtx which might possibly be a QUEUED is to be put
339 into an instruction, it must be passed through protect_from_queue first.
340 QUEUED expressions are not meaningful in instructions.
342 Do not pass a value through protect_from_queue and then hold
343 on to it for a while before putting it in an instruction!
344 If the queue is flushed in between, incorrect code will result. */
347 protect_from_queue (x, modify)
351 register RTX_CODE code = GET_CODE (x);
353 #if 0 /* A QUEUED can hang around after the queue is forced out. */
354 /* Shortcut for most common case. */
355 if (pending_chain == 0)
361 /* A special hack for read access to (MEM (QUEUED ...))
362 to facilitate use of autoincrement.
363 Make a copy of the contents of the memory location
364 rather than a copy of the address, but not
365 if the value is of mode BLKmode. */
366 if (code == MEM && GET_MODE (x) != BLKmode
367 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
369 register rtx y = XEXP (x, 0);
370 XEXP (x, 0) = QUEUED_VAR (y);
373 register rtx temp = gen_reg_rtx (GET_MODE (x));
374 emit_insn_before (gen_move_insn (temp, x),
380 /* Otherwise, recursively protect the subexpressions of all
381 the kinds of rtx's that can contain a QUEUED. */
383 XEXP (x, 0) = protect_from_queue (XEXP (x, 0), 0);
384 else if (code == PLUS || code == MULT)
386 XEXP (x, 0) = protect_from_queue (XEXP (x, 0), 0);
387 XEXP (x, 1) = protect_from_queue (XEXP (x, 1), 0);
391 /* If the increment has not happened, use the variable itself. */
392 if (QUEUED_INSN (x) == 0)
393 return QUEUED_VAR (x);
394 /* If the increment has happened and a pre-increment copy exists,
396 if (QUEUED_COPY (x) != 0)
397 return QUEUED_COPY (x);
398 /* The increment has happened but we haven't set up a pre-increment copy.
399 Set one up now, and use it. */
400 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
401 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
403 return QUEUED_COPY (x);
406 /* Return nonzero if X contains a QUEUED expression:
407 if it contains anything that will be altered by a queued increment.
408 We handle only combinations of MEM, PLUS, MINUS and MULT operators
409 since memory addresses generally contain only those. */
415 register enum rtx_code code = GET_CODE (x);
421 return queued_subexp_p (XEXP (x, 0));
425 return queued_subexp_p (XEXP (x, 0))
426 || queued_subexp_p (XEXP (x, 1));
431 /* Perform all the pending incrementations. */
437 while (p = pending_chain)
439 QUEUED_INSN (p) = emit_insn (QUEUED_BODY (p));
440 pending_chain = QUEUED_NEXT (p);
451 /* Copy data from FROM to TO, where the machine modes are not the same.
452 Both modes may be integer, or both may be floating.
453 UNSIGNEDP should be nonzero if FROM is an unsigned type.
454 This causes zero-extension instead of sign-extension. */
457 convert_move (to, from, unsignedp)
458 register rtx to, from;
461 enum machine_mode to_mode = GET_MODE (to);
462 enum machine_mode from_mode = GET_MODE (from);
463 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
464 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
468 /* rtx code for making an equivalent value. */
469 enum rtx_code equiv_code = (unsignedp ? ZERO_EXTEND : SIGN_EXTEND);
471 to = protect_from_queue (to, 1);
472 from = protect_from_queue (from, 0);
474 if (to_real != from_real)
477 /* If FROM is a SUBREG that indicates that we have already done at least
478 the required extension, strip it. We don't handle such SUBREGs as
481 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
482 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
483 >= GET_MODE_SIZE (to_mode))
484 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
485 from = gen_lowpart (to_mode, from), from_mode = to_mode;
487 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
490 if (to_mode == from_mode
491 || (from_mode == VOIDmode && CONSTANT_P (from)))
493 emit_move_insn (to, from);
499 #ifdef HAVE_extendqfhf2
500 if (HAVE_extendqfsf2 && from_mode == QFmode && to_mode == HFmode)
502 emit_unop_insn (CODE_FOR_extendqfsf2, to, from, UNKNOWN);
506 #ifdef HAVE_extendqfsf2
507 if (HAVE_extendqfsf2 && from_mode == QFmode && to_mode == SFmode)
509 emit_unop_insn (CODE_FOR_extendqfsf2, to, from, UNKNOWN);
513 #ifdef HAVE_extendqfdf2
514 if (HAVE_extendqfdf2 && from_mode == QFmode && to_mode == DFmode)
516 emit_unop_insn (CODE_FOR_extendqfdf2, to, from, UNKNOWN);
520 #ifdef HAVE_extendqfxf2
521 if (HAVE_extendqfxf2 && from_mode == QFmode && to_mode == XFmode)
523 emit_unop_insn (CODE_FOR_extendqfxf2, to, from, UNKNOWN);
527 #ifdef HAVE_extendqftf2
528 if (HAVE_extendqftf2 && from_mode == QFmode && to_mode == TFmode)
530 emit_unop_insn (CODE_FOR_extendqftf2, to, from, UNKNOWN);
535 #ifdef HAVE_extendhfsf2
536 if (HAVE_extendhfsf2 && from_mode == HFmode && to_mode == SFmode)
538 emit_unop_insn (CODE_FOR_extendhfsf2, to, from, UNKNOWN);
542 #ifdef HAVE_extendhfdf2
543 if (HAVE_extendhfdf2 && from_mode == HFmode && to_mode == DFmode)
545 emit_unop_insn (CODE_FOR_extendhfdf2, to, from, UNKNOWN);
549 #ifdef HAVE_extendhfxf2
550 if (HAVE_extendhfxf2 && from_mode == HFmode && to_mode == XFmode)
552 emit_unop_insn (CODE_FOR_extendhfxf2, to, from, UNKNOWN);
556 #ifdef HAVE_extendhftf2
557 if (HAVE_extendhftf2 && from_mode == HFmode && to_mode == TFmode)
559 emit_unop_insn (CODE_FOR_extendhftf2, to, from, UNKNOWN);
564 #ifdef HAVE_extendsfdf2
565 if (HAVE_extendsfdf2 && from_mode == SFmode && to_mode == DFmode)
567 emit_unop_insn (CODE_FOR_extendsfdf2, to, from, UNKNOWN);
571 #ifdef HAVE_extendsfxf2
572 if (HAVE_extendsfxf2 && from_mode == SFmode && to_mode == XFmode)
574 emit_unop_insn (CODE_FOR_extendsfxf2, to, from, UNKNOWN);
578 #ifdef HAVE_extendsftf2
579 if (HAVE_extendsftf2 && from_mode == SFmode && to_mode == TFmode)
581 emit_unop_insn (CODE_FOR_extendsftf2, to, from, UNKNOWN);
585 #ifdef HAVE_extenddfxf2
586 if (HAVE_extenddfxf2 && from_mode == DFmode && to_mode == XFmode)
588 emit_unop_insn (CODE_FOR_extenddfxf2, to, from, UNKNOWN);
592 #ifdef HAVE_extenddftf2
593 if (HAVE_extenddftf2 && from_mode == DFmode && to_mode == TFmode)
595 emit_unop_insn (CODE_FOR_extenddftf2, to, from, UNKNOWN);
600 #ifdef HAVE_trunchfqf2
601 if (HAVE_trunchfqf2 && from_mode == HFmode && to_mode == QFmode)
603 emit_unop_insn (CODE_FOR_trunchfqf2, to, from, UNKNOWN);
607 #ifdef HAVE_truncsfqf2
608 if (HAVE_truncsfqf2 && from_mode == SFmode && to_mode == QFmode)
610 emit_unop_insn (CODE_FOR_truncsfqf2, to, from, UNKNOWN);
614 #ifdef HAVE_truncdfqf2
615 if (HAVE_truncdfqf2 && from_mode == DFmode && to_mode == QFmode)
617 emit_unop_insn (CODE_FOR_truncdfqf2, to, from, UNKNOWN);
621 #ifdef HAVE_truncxfqf2
622 if (HAVE_truncxfqf2 && from_mode == XFmode && to_mode == QFmode)
624 emit_unop_insn (CODE_FOR_truncxfqf2, to, from, UNKNOWN);
628 #ifdef HAVE_trunctfqf2
629 if (HAVE_trunctfqf2 && from_mode == TFmode && to_mode == QFmode)
631 emit_unop_insn (CODE_FOR_trunctfqf2, to, from, UNKNOWN);
635 #ifdef HAVE_truncsfhf2
636 if (HAVE_truncsfhf2 && from_mode == SFmode && to_mode == HFmode)
638 emit_unop_insn (CODE_FOR_truncsfhf2, to, from, UNKNOWN);
642 #ifdef HAVE_truncdfhf2
643 if (HAVE_truncdfhf2 && from_mode == DFmode && to_mode == HFmode)
645 emit_unop_insn (CODE_FOR_truncdfhf2, to, from, UNKNOWN);
649 #ifdef HAVE_truncxfhf2
650 if (HAVE_truncxfhf2 && from_mode == XFmode && to_mode == HFmode)
652 emit_unop_insn (CODE_FOR_truncxfhf2, to, from, UNKNOWN);
656 #ifdef HAVE_trunctfhf2
657 if (HAVE_trunctfhf2 && from_mode == TFmode && to_mode == HFmode)
659 emit_unop_insn (CODE_FOR_trunctfhf2, to, from, UNKNOWN);
663 #ifdef HAVE_truncdfsf2
664 if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
666 emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
670 #ifdef HAVE_truncxfsf2
671 if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode)
673 emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN);
677 #ifdef HAVE_trunctfsf2
678 if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
680 emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
684 #ifdef HAVE_truncxfdf2
685 if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode)
687 emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN);
691 #ifdef HAVE_trunctfdf2
692 if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
694 emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
706 libcall = extendsfdf2_libfunc;
710 libcall = extendsfxf2_libfunc;
714 libcall = extendsftf2_libfunc;
723 libcall = truncdfsf2_libfunc;
727 libcall = extenddfxf2_libfunc;
731 libcall = extenddftf2_libfunc;
740 libcall = truncxfsf2_libfunc;
744 libcall = truncxfdf2_libfunc;
753 libcall = trunctfsf2_libfunc;
757 libcall = trunctfdf2_libfunc;
763 if (libcall == (rtx) 0)
764 /* This conversion is not implemented yet. */
767 emit_library_call (libcall, 1, to_mode, 1, from, from_mode);
768 emit_move_insn (to, hard_libcall_value (to_mode));
772 /* Now both modes are integers. */
774 /* Handle expanding beyond a word. */
775 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
776 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
783 enum machine_mode lowpart_mode;
784 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
786 /* Try converting directly if the insn is supported. */
787 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
790 /* If FROM is a SUBREG, put it into a register. Do this
791 so that we always generate the same set of insns for
792 better cse'ing; if an intermediate assignment occurred,
793 we won't be doing the operation directly on the SUBREG. */
794 if (optimize > 0 && GET_CODE (from) == SUBREG)
795 from = force_reg (from_mode, from);
796 emit_unop_insn (code, to, from, equiv_code);
799 /* Next, try converting via full word. */
800 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
801 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
802 != CODE_FOR_nothing))
804 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
805 emit_unop_insn (code, to,
806 gen_lowpart (word_mode, to), equiv_code);
810 /* No special multiword conversion insn; do it by hand. */
813 /* Get a copy of FROM widened to a word, if necessary. */
814 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
815 lowpart_mode = word_mode;
817 lowpart_mode = from_mode;
819 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
821 lowpart = gen_lowpart (lowpart_mode, to);
822 emit_move_insn (lowpart, lowfrom);
824 /* Compute the value to put in each remaining word. */
826 fill_value = const0_rtx;
831 && insn_operand_mode[(int) CODE_FOR_slt][0] == word_mode
832 && STORE_FLAG_VALUE == -1)
834 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
836 fill_value = gen_reg_rtx (word_mode);
837 emit_insn (gen_slt (fill_value));
843 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
844 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
846 fill_value = convert_to_mode (word_mode, fill_value, 1);
850 /* Fill the remaining words. */
851 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
853 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
854 rtx subword = operand_subword (to, index, 1, to_mode);
859 if (fill_value != subword)
860 emit_move_insn (subword, fill_value);
863 insns = get_insns ();
866 emit_no_conflict_block (insns, to, from, NULL_RTX,
867 gen_rtx (equiv_code, to_mode, from));
871 /* Truncating multi-word to a word or less. */
872 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
873 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
875 convert_move (to, gen_lowpart (word_mode, from), 0);
879 /* Handle pointer conversion */ /* SPEE 900220 */
880 if (to_mode == PSImode)
882 if (from_mode != SImode)
883 from = convert_to_mode (SImode, from, unsignedp);
885 #ifdef HAVE_truncsipsi
888 emit_unop_insn (CODE_FOR_truncsipsi, to, from, UNKNOWN);
891 #endif /* HAVE_truncsipsi */
895 if (from_mode == PSImode)
897 if (to_mode != SImode)
899 from = convert_to_mode (SImode, from, unsignedp);
904 #ifdef HAVE_extendpsisi
905 if (HAVE_extendpsisi)
907 emit_unop_insn (CODE_FOR_extendpsisi, to, from, UNKNOWN);
910 #endif /* HAVE_extendpsisi */
915 /* Now follow all the conversions between integers
916 no more than a word long. */
918 /* For truncation, usually we can just refer to FROM in a narrower mode. */
919 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
920 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
921 GET_MODE_BITSIZE (from_mode)))
923 if (!((GET_CODE (from) == MEM
924 && ! MEM_VOLATILE_P (from)
925 && direct_load[(int) to_mode]
926 && ! mode_dependent_address_p (XEXP (from, 0)))
927 || GET_CODE (from) == REG
928 || GET_CODE (from) == SUBREG))
929 from = force_reg (from_mode, from);
930 emit_move_insn (to, gen_lowpart (to_mode, from));
934 /* Handle extension. */
935 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
937 /* Convert directly if that works. */
938 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
941 /* If FROM is a SUBREG, put it into a register. Do this
942 so that we always generate the same set of insns for
943 better cse'ing; if an intermediate assignment occurred,
944 we won't be doing the operation directly on the SUBREG. */
945 if (optimize > 0 && GET_CODE (from) == SUBREG)
946 from = force_reg (from_mode, from);
947 emit_unop_insn (code, to, from, equiv_code);
952 enum machine_mode intermediate;
954 /* Search for a mode to convert via. */
955 for (intermediate = from_mode; intermediate != VOIDmode;
956 intermediate = GET_MODE_WIDER_MODE (intermediate))
957 if ((can_extend_p (to_mode, intermediate, unsignedp)
959 && (can_extend_p (intermediate, from_mode, unsignedp)
960 != CODE_FOR_nothing))
962 convert_move (to, convert_to_mode (intermediate, from,
963 unsignedp), unsignedp);
967 /* No suitable intermediate mode. */
972 /* Support special truncate insns for certain modes. */
974 if (from_mode == DImode && to_mode == SImode)
976 #ifdef HAVE_truncdisi2
979 emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
983 convert_move (to, force_reg (from_mode, from), unsignedp);
987 if (from_mode == DImode && to_mode == HImode)
989 #ifdef HAVE_truncdihi2
992 emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
996 convert_move (to, force_reg (from_mode, from), unsignedp);
1000 if (from_mode == DImode && to_mode == QImode)
1002 #ifdef HAVE_truncdiqi2
1003 if (HAVE_truncdiqi2)
1005 emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
1009 convert_move (to, force_reg (from_mode, from), unsignedp);
1013 if (from_mode == SImode && to_mode == HImode)
1015 #ifdef HAVE_truncsihi2
1016 if (HAVE_truncsihi2)
1018 emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
1022 convert_move (to, force_reg (from_mode, from), unsignedp);
1026 if (from_mode == SImode && to_mode == QImode)
1028 #ifdef HAVE_truncsiqi2
1029 if (HAVE_truncsiqi2)
1031 emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
1035 convert_move (to, force_reg (from_mode, from), unsignedp);
1039 if (from_mode == HImode && to_mode == QImode)
1041 #ifdef HAVE_trunchiqi2
1042 if (HAVE_trunchiqi2)
1044 emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
1048 convert_move (to, force_reg (from_mode, from), unsignedp);
1052 /* Handle truncation of volatile memrefs, and so on;
1053 the things that couldn't be truncated directly,
1054 and for which there was no special instruction. */
1055 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
1057 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
1058 emit_move_insn (to, temp);
1062 /* Mode combination is not recognized. */
1066 /* Return an rtx for a value that would result
1067 from converting X to mode MODE.
1068 Both X and MODE may be floating, or both integer.
1069 UNSIGNEDP is nonzero if X is an unsigned value.
1070 This can be done by referring to a part of X in place
1071 or by copying to a new temporary with conversion.
1073 This function *must not* call protect_from_queue
1074 except when putting X into an insn (in which case convert_move does it). */
1077 convert_to_mode (mode, x, unsignedp)
1078 enum machine_mode mode;
1084 /* If FROM is a SUBREG that indicates that we have already done at least
1085 the required extension, strip it. */
1087 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
1088 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
1089 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
1090 x = gen_lowpart (mode, x);
1092 if (mode == GET_MODE (x))
1095 /* There is one case that we must handle specially: If we are converting
1096 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
1097 we are to interpret the constant as unsigned, gen_lowpart will do
1098 the wrong if the constant appears negative. What we want to do is
1099 make the high-order word of the constant zero, not all ones. */
1101 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
1102 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
1103 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
1104 return immed_double_const (INTVAL (x), (HOST_WIDE_INT) 0, mode);
1106 /* We can do this with a gen_lowpart if both desired and current modes
1107 are integer, and this is either a constant integer, a register, or a
1108 non-volatile MEM. Except for the constant case, we must be narrowing
1111 if (GET_CODE (x) == CONST_INT
1112 || (GET_MODE_CLASS (mode) == MODE_INT
1113 && GET_MODE_CLASS (GET_MODE (x)) == MODE_INT
1114 && (GET_CODE (x) == CONST_DOUBLE
1115 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (GET_MODE (x))
1116 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x))
1117 && direct_load[(int) mode]
1118 || GET_CODE (x) == REG)))))
1119 return gen_lowpart (mode, x);
1121 temp = gen_reg_rtx (mode);
1122 convert_move (temp, x, unsignedp);
1126 /* Generate several move instructions to copy LEN bytes
1127 from block FROM to block TO. (These are MEM rtx's with BLKmode).
1128 The caller must pass FROM and TO
1129 through protect_from_queue before calling.
1130 ALIGN (in bytes) is maximum alignment we can assume. */
1133 move_by_pieces (to, from, len, align)
1137 struct move_by_pieces data;
1138 rtx to_addr = XEXP (to, 0), from_addr = XEXP (from, 0);
1139 int max_size = MOVE_MAX + 1;
1142 data.to_addr = to_addr;
1143 data.from_addr = from_addr;
1147 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1148 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1150 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1151 || GET_CODE (from_addr) == POST_INC
1152 || GET_CODE (from_addr) == POST_DEC);
1154 data.explicit_inc_from = 0;
1155 data.explicit_inc_to = 0;
1157 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1158 if (data.reverse) data.offset = len;
1161 /* If copying requires more than two move insns,
1162 copy addresses to registers (to make displacements shorter)
1163 and use post-increment if available. */
1164 if (!(data.autinc_from && data.autinc_to)
1165 && move_by_pieces_ninsns (len, align) > 2)
1167 #ifdef HAVE_PRE_DECREMENT
1168 if (data.reverse && ! data.autinc_from)
1170 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1171 data.autinc_from = 1;
1172 data.explicit_inc_from = -1;
1175 #ifdef HAVE_POST_INCREMENT
1176 if (! data.autinc_from)
1178 data.from_addr = copy_addr_to_reg (from_addr);
1179 data.autinc_from = 1;
1180 data.explicit_inc_from = 1;
1183 if (!data.autinc_from && CONSTANT_P (from_addr))
1184 data.from_addr = copy_addr_to_reg (from_addr);
1185 #ifdef HAVE_PRE_DECREMENT
1186 if (data.reverse && ! data.autinc_to)
1188 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1190 data.explicit_inc_to = -1;
1193 #ifdef HAVE_POST_INCREMENT
1194 if (! data.reverse && ! data.autinc_to)
1196 data.to_addr = copy_addr_to_reg (to_addr);
1198 data.explicit_inc_to = 1;
1201 if (!data.autinc_to && CONSTANT_P (to_addr))
1202 data.to_addr = copy_addr_to_reg (to_addr);
1205 if (! (STRICT_ALIGNMENT || SLOW_UNALIGNED_ACCESS)
1206 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
1209 /* First move what we can in the largest integer mode, then go to
1210 successively smaller modes. */
1212 while (max_size > 1)
1214 enum machine_mode mode = VOIDmode, tmode;
1215 enum insn_code icode;
1217 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1218 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1219 if (GET_MODE_SIZE (tmode) < max_size)
1222 if (mode == VOIDmode)
1225 icode = mov_optab->handlers[(int) mode].insn_code;
1226 if (icode != CODE_FOR_nothing
1227 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1228 GET_MODE_SIZE (mode)))
1229 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1231 max_size = GET_MODE_SIZE (mode);
1234 /* The code above should have handled everything. */
1239 /* Return number of insns required to move L bytes by pieces.
1240 ALIGN (in bytes) is maximum alignment we can assume. */
1243 move_by_pieces_ninsns (l, align)
1247 register int n_insns = 0;
1248 int max_size = MOVE_MAX + 1;
1250 if (! (STRICT_ALIGNMENT || SLOW_UNALIGNED_ACCESS)
1251 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
1254 while (max_size > 1)
1256 enum machine_mode mode = VOIDmode, tmode;
1257 enum insn_code icode;
1259 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1260 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1261 if (GET_MODE_SIZE (tmode) < max_size)
1264 if (mode == VOIDmode)
1267 icode = mov_optab->handlers[(int) mode].insn_code;
1268 if (icode != CODE_FOR_nothing
1269 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1270 GET_MODE_SIZE (mode)))
1271 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1273 max_size = GET_MODE_SIZE (mode);
1279 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1280 with move instructions for mode MODE. GENFUN is the gen_... function
1281 to make a move insn for that mode. DATA has all the other info. */
1284 move_by_pieces_1 (genfun, mode, data)
1286 enum machine_mode mode;
1287 struct move_by_pieces *data;
1289 register int size = GET_MODE_SIZE (mode);
1290 register rtx to1, from1;
1292 while (data->len >= size)
1294 if (data->reverse) data->offset -= size;
1296 to1 = (data->autinc_to
1297 ? gen_rtx (MEM, mode, data->to_addr)
1298 : change_address (data->to, mode,
1299 plus_constant (data->to_addr, data->offset)));
1302 ? gen_rtx (MEM, mode, data->from_addr)
1303 : change_address (data->from, mode,
1304 plus_constant (data->from_addr, data->offset)));
1306 #ifdef HAVE_PRE_DECREMENT
1307 if (data->explicit_inc_to < 0)
1308 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
1309 if (data->explicit_inc_from < 0)
1310 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (-size)));
1313 emit_insn ((*genfun) (to1, from1));
1314 #ifdef HAVE_POST_INCREMENT
1315 if (data->explicit_inc_to > 0)
1316 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1317 if (data->explicit_inc_from > 0)
1318 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1321 if (! data->reverse) data->offset += size;
1327 /* Emit code to move a block Y to a block X.
1328 This may be done with string-move instructions,
1329 with multiple scalar move instructions, or with a library call.
1331 Both X and Y must be MEM rtx's (perhaps inside VOLATILE)
1333 SIZE is an rtx that says how long they are.
1334 ALIGN is the maximum alignment we can assume they have,
1335 measured in bytes. */
1338 emit_block_move (x, y, size, align)
1343 if (GET_MODE (x) != BLKmode)
1346 if (GET_MODE (y) != BLKmode)
1349 x = protect_from_queue (x, 1);
1350 y = protect_from_queue (y, 0);
1351 size = protect_from_queue (size, 0);
1353 if (GET_CODE (x) != MEM)
1355 if (GET_CODE (y) != MEM)
1360 if (GET_CODE (size) == CONST_INT
1361 && (move_by_pieces_ninsns (INTVAL (size), align) < MOVE_RATIO))
1362 move_by_pieces (x, y, INTVAL (size), align);
1365 /* Try the most limited insn first, because there's no point
1366 including more than one in the machine description unless
1367 the more limited one has some advantage. */
1369 rtx opalign = GEN_INT (align);
1370 enum machine_mode mode;
1372 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1373 mode = GET_MODE_WIDER_MODE (mode))
1375 enum insn_code code = movstr_optab[(int) mode];
1377 if (code != CODE_FOR_nothing
1378 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1379 here because if SIZE is less than the mode mask, as it is
1380 returned by the macro, it will definitely be less than the
1381 actual mode mask. */
1382 && (unsigned) INTVAL (size) <= GET_MODE_MASK (mode)
1383 && (insn_operand_predicate[(int) code][0] == 0
1384 || (*insn_operand_predicate[(int) code][0]) (x, BLKmode))
1385 && (insn_operand_predicate[(int) code][1] == 0
1386 || (*insn_operand_predicate[(int) code][1]) (y, BLKmode))
1387 && (insn_operand_predicate[(int) code][3] == 0
1388 || (*insn_operand_predicate[(int) code][3]) (opalign,
1392 rtx last = get_last_insn ();
1395 op2 = convert_to_mode (mode, size, 1);
1396 if (insn_operand_predicate[(int) code][2] != 0
1397 && ! (*insn_operand_predicate[(int) code][2]) (op2, mode))
1398 op2 = copy_to_mode_reg (mode, op2);
1400 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1407 delete_insns_since (last);
1411 #ifdef TARGET_MEM_FUNCTIONS
1412 emit_library_call (memcpy_libfunc, 0,
1413 VOIDmode, 3, XEXP (x, 0), Pmode,
1415 convert_to_mode (Pmode, size, 1), Pmode);
1417 emit_library_call (bcopy_libfunc, 0,
1418 VOIDmode, 3, XEXP (y, 0), Pmode,
1420 convert_to_mode (Pmode, size, 1), Pmode);
1425 /* Copy all or part of a value X into registers starting at REGNO.
1426 The number of registers to be filled is NREGS. */
1429 move_block_to_reg (regno, x, nregs, mode)
1433 enum machine_mode mode;
1438 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1439 x = validize_mem (force_const_mem (mode, x));
1441 /* See if the machine can do this with a load multiple insn. */
1442 #ifdef HAVE_load_multiple
1443 last = get_last_insn ();
1444 pat = gen_load_multiple (gen_rtx (REG, word_mode, regno), x,
1452 delete_insns_since (last);
1455 for (i = 0; i < nregs; i++)
1456 emit_move_insn (gen_rtx (REG, word_mode, regno + i),
1457 operand_subword_force (x, i, mode));
1460 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1461 The number of registers to be filled is NREGS. */
1464 move_block_from_reg (regno, x, nregs)
1472 /* See if the machine can do this with a store multiple insn. */
1473 #ifdef HAVE_store_multiple
1474 last = get_last_insn ();
1475 pat = gen_store_multiple (x, gen_rtx (REG, word_mode, regno),
1483 delete_insns_since (last);
1486 for (i = 0; i < nregs; i++)
1488 rtx tem = operand_subword (x, i, 1, BLKmode);
1493 emit_move_insn (tem, gen_rtx (REG, word_mode, regno + i));
1497 /* Mark NREGS consecutive regs, starting at REGNO, as being live now. */
1500 use_regs (regno, nregs)
1506 for (i = 0; i < nregs; i++)
1507 emit_insn (gen_rtx (USE, VOIDmode, gen_rtx (REG, word_mode, regno + i)));
1510 /* Mark the instructions since PREV as a libcall block.
1511 Add REG_LIBCALL to PREV and add a REG_RETVAL to the most recent insn. */
1520 /* Find the instructions to mark */
1522 insn_first = NEXT_INSN (prev);
1524 insn_first = get_insns ();
1526 insn_last = get_last_insn ();
1528 REG_NOTES (insn_last) = gen_rtx (INSN_LIST, REG_RETVAL, insn_first,
1529 REG_NOTES (insn_last));
1531 REG_NOTES (insn_first) = gen_rtx (INSN_LIST, REG_LIBCALL, insn_last,
1532 REG_NOTES (insn_first));
1535 /* Write zeros through the storage of OBJECT.
1536 If OBJECT has BLKmode, SIZE is its length in bytes. */
1539 clear_storage (object, size)
1543 if (GET_MODE (object) == BLKmode)
1545 #ifdef TARGET_MEM_FUNCTIONS
1546 emit_library_call (memset_libfunc, 0,
1548 XEXP (object, 0), Pmode, const0_rtx, Pmode,
1549 GEN_INT (size), Pmode);
1551 emit_library_call (bzero_libfunc, 0,
1553 XEXP (object, 0), Pmode,
1554 GEN_INT (size), Pmode);
1558 emit_move_insn (object, const0_rtx);
1561 /* Generate code to copy Y into X.
1562 Both Y and X must have the same mode, except that
1563 Y can be a constant with VOIDmode.
1564 This mode cannot be BLKmode; use emit_block_move for that.
1566 Return the last instruction emitted. */
1569 emit_move_insn (x, y)
1572 enum machine_mode mode = GET_MODE (x);
1573 enum machine_mode submode;
1574 enum mode_class class = GET_MODE_CLASS (mode);
1577 x = protect_from_queue (x, 1);
1578 y = protect_from_queue (y, 0);
1580 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
1583 if (CONSTANT_P (y) && ! LEGITIMATE_CONSTANT_P (y))
1584 y = force_const_mem (mode, y);
1586 /* If X or Y are memory references, verify that their addresses are valid
1588 if (GET_CODE (x) == MEM
1589 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
1590 && ! push_operand (x, GET_MODE (x)))
1592 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
1593 x = change_address (x, VOIDmode, XEXP (x, 0));
1595 if (GET_CODE (y) == MEM
1596 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
1598 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
1599 y = change_address (y, VOIDmode, XEXP (y, 0));
1601 if (mode == BLKmode)
1604 return emit_move_insn_1 (x, y);
1607 /* Low level part of emit_move_insn.
1608 Called just like emit_move_insn, but assumes X and Y
1609 are basically valid. */
1612 emit_move_insn_1 (x, y)
1615 enum machine_mode mode = GET_MODE (x);
1616 enum machine_mode submode;
1617 enum mode_class class = GET_MODE_CLASS (mode);
1620 if (class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
1621 submode = mode_for_size (GET_MODE_UNIT_SIZE (mode) * BITS_PER_UNIT,
1622 (class == MODE_COMPLEX_INT
1623 ? MODE_INT : MODE_FLOAT),
1626 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
1628 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
1630 /* Expand complex moves by moving real part and imag part, if possible. */
1631 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
1632 && submode != BLKmode
1633 && (mov_optab->handlers[(int) submode].insn_code
1634 != CODE_FOR_nothing))
1636 /* Don't split destination if it is a stack push. */
1637 int stack = push_operand (x, GET_MODE (x));
1638 rtx prev = get_last_insn ();
1640 /* Tell flow that the whole of the destination is being set. */
1641 if (GET_CODE (x) == REG)
1642 emit_insn (gen_rtx (CLOBBER, VOIDmode, x));
1644 /* If this is a stack, push the highpart first, so it
1645 will be in the argument order.
1647 In that case, change_address is used only to convert
1648 the mode, not to change the address. */
1649 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
1650 ((stack ? change_address (x, submode, (rtx) 0)
1651 : gen_highpart (submode, x)),
1652 gen_highpart (submode, y)));
1653 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
1654 ((stack ? change_address (x, submode, (rtx) 0)
1655 : gen_lowpart (submode, x)),
1656 gen_lowpart (submode, y)));
1660 return get_last_insn ();
1663 /* This will handle any multi-word mode that lacks a move_insn pattern.
1664 However, you will get better code if you define such patterns,
1665 even if they must turn into multiple assembler instructions. */
1666 else if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
1669 rtx prev_insn = get_last_insn ();
1672 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
1675 rtx xpart = operand_subword (x, i, 1, mode);
1676 rtx ypart = operand_subword (y, i, 1, mode);
1678 /* If we can't get a part of Y, put Y into memory if it is a
1679 constant. Otherwise, force it into a register. If we still
1680 can't get a part of Y, abort. */
1681 if (ypart == 0 && CONSTANT_P (y))
1683 y = force_const_mem (mode, y);
1684 ypart = operand_subword (y, i, 1, mode);
1686 else if (ypart == 0)
1687 ypart = operand_subword_force (y, i, mode);
1689 if (xpart == 0 || ypart == 0)
1692 last_insn = emit_move_insn (xpart, ypart);
1694 /* Mark these insns as a libcall block. */
1695 group_insns (prev_insn);
1703 /* Pushing data onto the stack. */
1705 /* Push a block of length SIZE (perhaps variable)
1706 and return an rtx to address the beginning of the block.
1707 Note that it is not possible for the value returned to be a QUEUED.
1708 The value may be virtual_outgoing_args_rtx.
1710 EXTRA is the number of bytes of padding to push in addition to SIZE.
1711 BELOW nonzero means this padding comes at low addresses;
1712 otherwise, the padding comes at high addresses. */
1715 push_block (size, extra, below)
1720 if (CONSTANT_P (size))
1721 anti_adjust_stack (plus_constant (size, extra));
1722 else if (GET_CODE (size) == REG && extra == 0)
1723 anti_adjust_stack (size);
1726 rtx temp = copy_to_mode_reg (Pmode, size);
1728 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
1729 temp, 0, OPTAB_LIB_WIDEN);
1730 anti_adjust_stack (temp);
1733 #ifdef STACK_GROWS_DOWNWARD
1734 temp = virtual_outgoing_args_rtx;
1735 if (extra != 0 && below)
1736 temp = plus_constant (temp, extra);
1738 if (GET_CODE (size) == CONST_INT)
1739 temp = plus_constant (virtual_outgoing_args_rtx,
1740 - INTVAL (size) - (below ? 0 : extra));
1741 else if (extra != 0 && !below)
1742 temp = gen_rtx (PLUS, Pmode, virtual_outgoing_args_rtx,
1743 negate_rtx (Pmode, plus_constant (size, extra)));
1745 temp = gen_rtx (PLUS, Pmode, virtual_outgoing_args_rtx,
1746 negate_rtx (Pmode, size));
1749 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
1755 return gen_rtx (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
1758 /* Generate code to push X onto the stack, assuming it has mode MODE and
1760 MODE is redundant except when X is a CONST_INT (since they don't
1762 SIZE is an rtx for the size of data to be copied (in bytes),
1763 needed only if X is BLKmode.
1765 ALIGN (in bytes) is maximum alignment we can assume.
1767 If PARTIAL and REG are both nonzero, then copy that many of the first
1768 words of X into registers starting with REG, and push the rest of X.
1769 The amount of space pushed is decreased by PARTIAL words,
1770 rounded *down* to a multiple of PARM_BOUNDARY.
1771 REG must be a hard register in this case.
1772 If REG is zero but PARTIAL is not, take any all others actions for an
1773 argument partially in registers, but do not actually load any
1776 EXTRA is the amount in bytes of extra space to leave next to this arg.
1777 This is ignored if an argument block has already been allocated.
1779 On a machine that lacks real push insns, ARGS_ADDR is the address of
1780 the bottom of the argument block for this call. We use indexing off there
1781 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
1782 argument block has not been preallocated.
1784 ARGS_SO_FAR is the size of args previously pushed for this call. */
1787 emit_push_insn (x, mode, type, size, align, partial, reg, extra,
1788 args_addr, args_so_far)
1790 enum machine_mode mode;
1801 enum direction stack_direction
1802 #ifdef STACK_GROWS_DOWNWARD
1808 /* Decide where to pad the argument: `downward' for below,
1809 `upward' for above, or `none' for don't pad it.
1810 Default is below for small data on big-endian machines; else above. */
1811 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
1813 /* Invert direction if stack is post-update. */
1814 if (STACK_PUSH_CODE == POST_INC || STACK_PUSH_CODE == POST_DEC)
1815 if (where_pad != none)
1816 where_pad = (where_pad == downward ? upward : downward);
1818 xinner = x = protect_from_queue (x, 0);
1820 if (mode == BLKmode)
1822 /* Copy a block into the stack, entirely or partially. */
1825 int used = partial * UNITS_PER_WORD;
1826 int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
1834 /* USED is now the # of bytes we need not copy to the stack
1835 because registers will take care of them. */
1838 xinner = change_address (xinner, BLKmode,
1839 plus_constant (XEXP (xinner, 0), used));
1841 /* If the partial register-part of the arg counts in its stack size,
1842 skip the part of stack space corresponding to the registers.
1843 Otherwise, start copying to the beginning of the stack space,
1844 by setting SKIP to 0. */
1845 #ifndef REG_PARM_STACK_SPACE
1851 #ifdef PUSH_ROUNDING
1852 /* Do it with several push insns if that doesn't take lots of insns
1853 and if there is no difficulty with push insns that skip bytes
1854 on the stack for alignment purposes. */
1856 && GET_CODE (size) == CONST_INT
1858 && (move_by_pieces_ninsns ((unsigned) INTVAL (size) - used, align)
1860 /* Here we avoid the case of a structure whose weak alignment
1861 forces many pushes of a small amount of data,
1862 and such small pushes do rounding that causes trouble. */
1863 && ((! STRICT_ALIGNMENT && ! SLOW_UNALIGNED_ACCESS)
1864 || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT
1865 || PUSH_ROUNDING (align) == align)
1866 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
1868 /* Push padding now if padding above and stack grows down,
1869 or if padding below and stack grows up.
1870 But if space already allocated, this has already been done. */
1871 if (extra && args_addr == 0
1872 && where_pad != none && where_pad != stack_direction)
1873 anti_adjust_stack (GEN_INT (extra));
1875 move_by_pieces (gen_rtx (MEM, BLKmode, gen_push_operand ()), xinner,
1876 INTVAL (size) - used, align);
1879 #endif /* PUSH_ROUNDING */
1881 /* Otherwise make space on the stack and copy the data
1882 to the address of that space. */
1884 /* Deduct words put into registers from the size we must copy. */
1887 if (GET_CODE (size) == CONST_INT)
1888 size = GEN_INT (INTVAL (size) - used);
1890 size = expand_binop (GET_MODE (size), sub_optab, size,
1891 GEN_INT (used), NULL_RTX, 0,
1895 /* Get the address of the stack space.
1896 In this case, we do not deal with EXTRA separately.
1897 A single stack adjust will do. */
1900 temp = push_block (size, extra, where_pad == downward);
1903 else if (GET_CODE (args_so_far) == CONST_INT)
1904 temp = memory_address (BLKmode,
1905 plus_constant (args_addr,
1906 skip + INTVAL (args_so_far)));
1908 temp = memory_address (BLKmode,
1909 plus_constant (gen_rtx (PLUS, Pmode,
1910 args_addr, args_so_far),
1913 /* TEMP is the address of the block. Copy the data there. */
1914 if (GET_CODE (size) == CONST_INT
1915 && (move_by_pieces_ninsns ((unsigned) INTVAL (size), align)
1918 move_by_pieces (gen_rtx (MEM, BLKmode, temp), xinner,
1919 INTVAL (size), align);
1922 /* Try the most limited insn first, because there's no point
1923 including more than one in the machine description unless
1924 the more limited one has some advantage. */
1925 #ifdef HAVE_movstrqi
1927 && GET_CODE (size) == CONST_INT
1928 && ((unsigned) INTVAL (size)
1929 < (1 << (GET_MODE_BITSIZE (QImode) - 1))))
1931 rtx pat = gen_movstrqi (gen_rtx (MEM, BLKmode, temp),
1932 xinner, size, GEN_INT (align));
1940 #ifdef HAVE_movstrhi
1942 && GET_CODE (size) == CONST_INT
1943 && ((unsigned) INTVAL (size)
1944 < (1 << (GET_MODE_BITSIZE (HImode) - 1))))
1946 rtx pat = gen_movstrhi (gen_rtx (MEM, BLKmode, temp),
1947 xinner, size, GEN_INT (align));
1955 #ifdef HAVE_movstrsi
1958 rtx pat = gen_movstrsi (gen_rtx (MEM, BLKmode, temp),
1959 xinner, size, GEN_INT (align));
1967 #ifdef HAVE_movstrdi
1970 rtx pat = gen_movstrdi (gen_rtx (MEM, BLKmode, temp),
1971 xinner, size, GEN_INT (align));
1980 #ifndef ACCUMULATE_OUTGOING_ARGS
1981 /* If the source is referenced relative to the stack pointer,
1982 copy it to another register to stabilize it. We do not need
1983 to do this if we know that we won't be changing sp. */
1985 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
1986 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
1987 temp = copy_to_reg (temp);
1990 /* Make inhibit_defer_pop nonzero around the library call
1991 to force it to pop the bcopy-arguments right away. */
1993 #ifdef TARGET_MEM_FUNCTIONS
1994 emit_library_call (memcpy_libfunc, 0,
1995 VOIDmode, 3, temp, Pmode, XEXP (xinner, 0), Pmode,
1998 emit_library_call (bcopy_libfunc, 0,
1999 VOIDmode, 3, XEXP (xinner, 0), Pmode, temp, Pmode,
2005 else if (partial > 0)
2007 /* Scalar partly in registers. */
2009 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
2012 /* # words of start of argument
2013 that we must make space for but need not store. */
2014 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
2015 int args_offset = INTVAL (args_so_far);
2018 /* Push padding now if padding above and stack grows down,
2019 or if padding below and stack grows up.
2020 But if space already allocated, this has already been done. */
2021 if (extra && args_addr == 0
2022 && where_pad != none && where_pad != stack_direction)
2023 anti_adjust_stack (GEN_INT (extra));
2025 /* If we make space by pushing it, we might as well push
2026 the real data. Otherwise, we can leave OFFSET nonzero
2027 and leave the space uninitialized. */
2031 /* Now NOT_STACK gets the number of words that we don't need to
2032 allocate on the stack. */
2033 not_stack = partial - offset;
2035 /* If the partial register-part of the arg counts in its stack size,
2036 skip the part of stack space corresponding to the registers.
2037 Otherwise, start copying to the beginning of the stack space,
2038 by setting SKIP to 0. */
2039 #ifndef REG_PARM_STACK_SPACE
2045 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
2046 x = validize_mem (force_const_mem (mode, x));
2048 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
2049 SUBREGs of such registers are not allowed. */
2050 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
2051 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
2052 x = copy_to_reg (x);
2054 /* Loop over all the words allocated on the stack for this arg. */
2055 /* We can do it by words, because any scalar bigger than a word
2056 has a size a multiple of a word. */
2057 #ifndef PUSH_ARGS_REVERSED
2058 for (i = not_stack; i < size; i++)
2060 for (i = size - 1; i >= not_stack; i--)
2062 if (i >= not_stack + offset)
2063 emit_push_insn (operand_subword_force (x, i, mode),
2064 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
2066 GEN_INT (args_offset + ((i - not_stack + skip)
2067 * UNITS_PER_WORD)));
2073 /* Push padding now if padding above and stack grows down,
2074 or if padding below and stack grows up.
2075 But if space already allocated, this has already been done. */
2076 if (extra && args_addr == 0
2077 && where_pad != none && where_pad != stack_direction)
2078 anti_adjust_stack (GEN_INT (extra));
2080 #ifdef PUSH_ROUNDING
2082 addr = gen_push_operand ();
2085 if (GET_CODE (args_so_far) == CONST_INT)
2087 = memory_address (mode,
2088 plus_constant (args_addr, INTVAL (args_so_far)));
2090 addr = memory_address (mode, gen_rtx (PLUS, Pmode, args_addr,
2093 emit_move_insn (gen_rtx (MEM, mode, addr), x);
2097 /* If part should go in registers, copy that part
2098 into the appropriate registers. Do this now, at the end,
2099 since mem-to-mem copies above may do function calls. */
2100 if (partial > 0 && reg != 0)
2101 move_block_to_reg (REGNO (reg), x, partial, mode);
2103 if (extra && args_addr == 0 && where_pad == stack_direction)
2104 anti_adjust_stack (GEN_INT (extra));
2107 /* Output a library call to function FUN (a SYMBOL_REF rtx)
2108 (emitting the queue unless NO_QUEUE is nonzero),
2109 for a value of mode OUTMODE,
2110 with NARGS different arguments, passed as alternating rtx values
2111 and machine_modes to convert them to.
2112 The rtx values should have been passed through protect_from_queue already.
2114 NO_QUEUE will be true if and only if the library call is a `const' call
2115 which will be enclosed in REG_LIBCALL/REG_RETVAL notes; it is equivalent
2116 to the variable is_const in expand_call.
2118 NO_QUEUE must be true for const calls, because if it isn't, then
2119 any pending increment will be emitted between REG_LIBCALL/REG_RETVAL notes,
2120 and will be lost if the libcall sequence is optimized away.
2122 NO_QUEUE must be false for non-const calls, because if it isn't, the
2123 call insn will have its CONST_CALL_P bit set, and it will be incorrectly
2124 optimized. For instance, the instruction scheduler may incorrectly
2125 move memory references across the non-const call. */
2128 emit_library_call (va_alist)
2132 /* Total size in bytes of all the stack-parms scanned so far. */
2133 struct args_size args_size;
2134 /* Size of arguments before any adjustments (such as rounding). */
2135 struct args_size original_args_size;
2136 register int argnum;
2137 enum machine_mode outmode;
2144 CUMULATIVE_ARGS args_so_far;
2145 struct arg { rtx value; enum machine_mode mode; rtx reg; int partial;
2146 struct args_size offset; struct args_size size; };
2148 int old_inhibit_defer_pop = inhibit_defer_pop;
2153 orgfun = fun = va_arg (p, rtx);
2154 no_queue = va_arg (p, int);
2155 outmode = va_arg (p, enum machine_mode);
2156 nargs = va_arg (p, int);
2158 /* Copy all the libcall-arguments out of the varargs data
2159 and into a vector ARGVEC.
2161 Compute how to pass each argument. We only support a very small subset
2162 of the full argument passing conventions to limit complexity here since
2163 library functions shouldn't have many args. */
2165 argvec = (struct arg *) alloca (nargs * sizeof (struct arg));
2167 INIT_CUMULATIVE_ARGS (args_so_far, NULL_TREE, fun);
2169 args_size.constant = 0;
2172 for (count = 0; count < nargs; count++)
2174 rtx val = va_arg (p, rtx);
2175 enum machine_mode mode = va_arg (p, enum machine_mode);
2177 /* We cannot convert the arg value to the mode the library wants here;
2178 must do it earlier where we know the signedness of the arg. */
2180 || (GET_MODE (val) != mode && GET_MODE (val) != VOIDmode))
2183 /* On some machines, there's no way to pass a float to a library fcn.
2184 Pass it as a double instead. */
2185 #ifdef LIBGCC_NEEDS_DOUBLE
2186 if (LIBGCC_NEEDS_DOUBLE && mode == SFmode)
2187 val = convert_to_mode (DFmode, val, 0), mode = DFmode;
2190 /* There's no need to call protect_from_queue, because
2191 either emit_move_insn or emit_push_insn will do that. */
2193 /* Make sure it is a reasonable operand for a move or push insn. */
2194 if (GET_CODE (val) != REG && GET_CODE (val) != MEM
2195 && ! (CONSTANT_P (val) && LEGITIMATE_CONSTANT_P (val)))
2196 val = force_operand (val, NULL_RTX);
2198 argvec[count].value = val;
2199 argvec[count].mode = mode;
2201 #ifdef FUNCTION_ARG_PASS_BY_REFERENCE
2202 if (FUNCTION_ARG_PASS_BY_REFERENCE (args_so_far, mode, NULL_TREE, 1))
2206 argvec[count].reg = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
2207 if (argvec[count].reg && GET_CODE (argvec[count].reg) == EXPR_LIST)
2209 #ifdef FUNCTION_ARG_PARTIAL_NREGS
2210 argvec[count].partial
2211 = FUNCTION_ARG_PARTIAL_NREGS (args_so_far, mode, NULL_TREE, 1);
2213 argvec[count].partial = 0;
2216 locate_and_pad_parm (mode, NULL_TREE,
2217 argvec[count].reg && argvec[count].partial == 0,
2218 NULL_TREE, &args_size, &argvec[count].offset,
2219 &argvec[count].size);
2221 if (argvec[count].size.var)
2224 #ifndef REG_PARM_STACK_SPACE
2225 if (argvec[count].partial)
2226 argvec[count].size.constant -= argvec[count].partial * UNITS_PER_WORD;
2229 if (argvec[count].reg == 0 || argvec[count].partial != 0
2230 #ifdef REG_PARM_STACK_SPACE
2234 args_size.constant += argvec[count].size.constant;
2236 #ifdef ACCUMULATE_OUTGOING_ARGS
2237 /* If this arg is actually passed on the stack, it might be
2238 clobbering something we already put there (this library call might
2239 be inside the evaluation of an argument to a function whose call
2240 requires the stack). This will only occur when the library call
2241 has sufficient args to run out of argument registers. Abort in
2242 this case; if this ever occurs, code must be added to save and
2243 restore the arg slot. */
2245 if (argvec[count].reg == 0 || argvec[count].partial != 0)
2249 FUNCTION_ARG_ADVANCE (args_so_far, mode, (tree)0, 1);
2253 /* If this machine requires an external definition for library
2254 functions, write one out. */
2255 assemble_external_libcall (fun);
2257 original_args_size = args_size;
2258 #ifdef STACK_BOUNDARY
2259 args_size.constant = (((args_size.constant + (STACK_BYTES - 1))
2260 / STACK_BYTES) * STACK_BYTES);
2263 #ifdef REG_PARM_STACK_SPACE
2264 args_size.constant = MAX (args_size.constant,
2265 REG_PARM_STACK_SPACE (NULL_TREE));
2266 #ifndef OUTGOING_REG_PARM_STACK_SPACE
2267 args_size.constant -= REG_PARM_STACK_SPACE (NULL_TREE);
2271 #ifdef ACCUMULATE_OUTGOING_ARGS
2272 if (args_size.constant > current_function_outgoing_args_size)
2273 current_function_outgoing_args_size = args_size.constant;
2274 args_size.constant = 0;
2277 #ifndef PUSH_ROUNDING
2278 argblock = push_block (GEN_INT (args_size.constant), 0, 0);
2281 #ifdef PUSH_ARGS_REVERSED
2282 #ifdef STACK_BOUNDARY
2283 /* If we push args individually in reverse order, perform stack alignment
2284 before the first push (the last arg). */
2286 anti_adjust_stack (GEN_INT (args_size.constant
2287 - original_args_size.constant));
2291 #ifdef PUSH_ARGS_REVERSED
2299 /* Push the args that need to be pushed. */
2301 for (count = 0; count < nargs; count++, argnum += inc)
2303 register enum machine_mode mode = argvec[argnum].mode;
2304 register rtx val = argvec[argnum].value;
2305 rtx reg = argvec[argnum].reg;
2306 int partial = argvec[argnum].partial;
2308 if (! (reg != 0 && partial == 0))
2309 emit_push_insn (val, mode, NULL_TREE, NULL_RTX, 0, partial, reg, 0,
2310 argblock, GEN_INT (argvec[count].offset.constant));
2314 #ifndef PUSH_ARGS_REVERSED
2315 #ifdef STACK_BOUNDARY
2316 /* If we pushed args in forward order, perform stack alignment
2317 after pushing the last arg. */
2319 anti_adjust_stack (GEN_INT (args_size.constant
2320 - original_args_size.constant));
2324 #ifdef PUSH_ARGS_REVERSED
2330 /* Now load any reg parms into their regs. */
2332 for (count = 0; count < nargs; count++, argnum += inc)
2334 register enum machine_mode mode = argvec[argnum].mode;
2335 register rtx val = argvec[argnum].value;
2336 rtx reg = argvec[argnum].reg;
2337 int partial = argvec[argnum].partial;
2339 if (reg != 0 && partial == 0)
2340 emit_move_insn (reg, val);
2344 /* For version 1.37, try deleting this entirely. */
2348 /* Any regs containing parms remain in use through the call. */
2350 for (count = 0; count < nargs; count++)
2351 if (argvec[count].reg != 0)
2352 emit_insn (gen_rtx (USE, VOIDmode, argvec[count].reg));
2354 use_insns = get_insns ();
2357 fun = prepare_call_address (fun, NULL_TREE, &use_insns);
2359 /* Don't allow popping to be deferred, since then
2360 cse'ing of library calls could delete a call and leave the pop. */
2363 /* We pass the old value of inhibit_defer_pop + 1 to emit_call_1, which
2364 will set inhibit_defer_pop to that value. */
2366 emit_call_1 (fun, get_identifier (XSTR (orgfun, 0)), args_size.constant, 0,
2367 FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1),
2368 outmode != VOIDmode ? hard_libcall_value (outmode) : NULL_RTX,
2369 old_inhibit_defer_pop + 1, use_insns, no_queue);
2371 /* Now restore inhibit_defer_pop to its actual original value. */
2375 /* Like emit_library_call except that an extra argument, VALUE,
2376 comes second and says where to store the result.
2377 (If VALUE is zero, the result comes in the function value register.) */
2380 emit_library_call_value (va_alist)
2384 /* Total size in bytes of all the stack-parms scanned so far. */
2385 struct args_size args_size;
2386 /* Size of arguments before any adjustments (such as rounding). */
2387 struct args_size original_args_size;
2388 register int argnum;
2389 enum machine_mode outmode;
2396 CUMULATIVE_ARGS args_so_far;
2397 struct arg { rtx value; enum machine_mode mode; rtx reg; int partial;
2398 struct args_size offset; struct args_size size; };
2400 int old_inhibit_defer_pop = inhibit_defer_pop;
2407 orgfun = fun = va_arg (p, rtx);
2408 value = va_arg (p, rtx);
2409 no_queue = va_arg (p, int);
2410 outmode = va_arg (p, enum machine_mode);
2411 nargs = va_arg (p, int);
2413 /* If this kind of value comes back in memory,
2414 decide where in memory it should come back. */
2415 if (RETURN_IN_MEMORY (type_for_mode (outmode, 0)))
2417 if (GET_CODE (value) == MEM)
2420 mem_value = assign_stack_temp (outmode, GET_MODE_SIZE (outmode), 0);
2423 /* ??? Unfinished: must pass the memory address as an argument. */
2425 /* Copy all the libcall-arguments out of the varargs data
2426 and into a vector ARGVEC.
2428 Compute how to pass each argument. We only support a very small subset
2429 of the full argument passing conventions to limit complexity here since
2430 library functions shouldn't have many args. */
2432 argvec = (struct arg *) alloca ((nargs + 1) * sizeof (struct arg));
2434 INIT_CUMULATIVE_ARGS (args_so_far, NULL_TREE, fun);
2436 args_size.constant = 0;
2441 /* If there's a structure value address to be passed,
2442 either pass it in the special place, or pass it as an extra argument. */
2445 rtx addr = XEXP (mem_value, 0);
2447 if (! struct_value_rtx)
2451 /* Make sure it is a reasonable operand for a move or push insn. */
2452 if (GET_CODE (addr) != REG && GET_CODE (addr) != MEM
2453 && ! (CONSTANT_P (addr) && LEGITIMATE_CONSTANT_P (addr)))
2454 addr = force_operand (addr, NULL_RTX);
2456 argvec[count].value = addr;
2457 argvec[count].mode = outmode;
2458 argvec[count].partial = 0;
2460 argvec[count].reg = FUNCTION_ARG (args_so_far, outmode, NULL_TREE, 1);
2461 #ifdef FUNCTION_ARG_PARTIAL_NREGS
2462 if (FUNCTION_ARG_PARTIAL_NREGS (args_so_far, outmode, NULL_TREE, 1))
2466 locate_and_pad_parm (outmode, NULL_TREE,
2467 argvec[count].reg && argvec[count].partial == 0,
2468 NULL_TREE, &args_size, &argvec[count].offset,
2469 &argvec[count].size);
2472 if (argvec[count].reg == 0 || argvec[count].partial != 0
2473 #ifdef REG_PARM_STACK_SPACE
2477 args_size.constant += argvec[count].size.constant;
2479 FUNCTION_ARG_ADVANCE (args_so_far, outmode, (tree)0, 1);
2483 for (; count < nargs; count++)
2485 rtx val = va_arg (p, rtx);
2486 enum machine_mode mode = va_arg (p, enum machine_mode);
2488 /* We cannot convert the arg value to the mode the library wants here;
2489 must do it earlier where we know the signedness of the arg. */
2491 || (GET_MODE (val) != mode && GET_MODE (val) != VOIDmode))
2494 /* On some machines, there's no way to pass a float to a library fcn.
2495 Pass it as a double instead. */
2496 #ifdef LIBGCC_NEEDS_DOUBLE
2497 if (LIBGCC_NEEDS_DOUBLE && mode == SFmode)
2498 val = convert_to_mode (DFmode, val, 0), mode = DFmode;
2501 /* There's no need to call protect_from_queue, because
2502 either emit_move_insn or emit_push_insn will do that. */
2504 /* Make sure it is a reasonable operand for a move or push insn. */
2505 if (GET_CODE (val) != REG && GET_CODE (val) != MEM
2506 && ! (CONSTANT_P (val) && LEGITIMATE_CONSTANT_P (val)))
2507 val = force_operand (val, NULL_RTX);
2509 argvec[count].value = val;
2510 argvec[count].mode = mode;
2512 #ifdef FUNCTION_ARG_PASS_BY_REFERENCE
2513 if (FUNCTION_ARG_PASS_BY_REFERENCE (args_so_far, mode, NULL_TREE, 1))
2517 argvec[count].reg = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
2518 if (argvec[count].reg && GET_CODE (argvec[count].reg) == EXPR_LIST)
2520 #ifdef FUNCTION_ARG_PARTIAL_NREGS
2521 argvec[count].partial
2522 = FUNCTION_ARG_PARTIAL_NREGS (args_so_far, mode, NULL_TREE, 1);
2524 argvec[count].partial = 0;
2527 locate_and_pad_parm (mode, NULL_TREE,
2528 argvec[count].reg && argvec[count].partial == 0,
2529 NULL_TREE, &args_size, &argvec[count].offset,
2530 &argvec[count].size);
2532 if (argvec[count].size.var)
2535 #ifndef REG_PARM_STACK_SPACE
2536 if (argvec[count].partial)
2537 argvec[count].size.constant -= argvec[count].partial * UNITS_PER_WORD;
2540 if (argvec[count].reg == 0 || argvec[count].partial != 0
2541 #ifdef REG_PARM_STACK_SPACE
2545 args_size.constant += argvec[count].size.constant;
2547 #ifdef ACCUMULATE_OUTGOING_ARGS
2548 /* If this arg is actually passed on the stack, it might be
2549 clobbering something we already put there (this library call might
2550 be inside the evaluation of an argument to a function whose call
2551 requires the stack). This will only occur when the library call
2552 has sufficient args to run out of argument registers. Abort in
2553 this case; if this ever occurs, code must be added to save and
2554 restore the arg slot. */
2556 if (argvec[count].reg == 0 || argvec[count].partial != 0)
2560 FUNCTION_ARG_ADVANCE (args_so_far, mode, (tree)0, 1);
2564 /* If this machine requires an external definition for library
2565 functions, write one out. */
2566 assemble_external_libcall (fun);
2568 original_args_size = args_size;
2569 #ifdef STACK_BOUNDARY
2570 args_size.constant = (((args_size.constant + (STACK_BYTES - 1))
2571 / STACK_BYTES) * STACK_BYTES);
2574 #ifdef REG_PARM_STACK_SPACE
2575 args_size.constant = MAX (args_size.constant,
2576 REG_PARM_STACK_SPACE (NULL_TREE));
2577 #ifndef OUTGOING_REG_PARM_STACK_SPACE
2578 args_size.constant -= REG_PARM_STACK_SPACE (NULL_TREE);
2582 #ifdef ACCUMULATE_OUTGOING_ARGS
2583 if (args_size.constant > current_function_outgoing_args_size)
2584 current_function_outgoing_args_size = args_size.constant;
2585 args_size.constant = 0;
2588 #ifndef PUSH_ROUNDING
2589 argblock = push_block (GEN_INT (args_size.constant), 0, 0);
2592 #ifdef PUSH_ARGS_REVERSED
2593 #ifdef STACK_BOUNDARY
2594 /* If we push args individually in reverse order, perform stack alignment
2595 before the first push (the last arg). */
2597 anti_adjust_stack (GEN_INT (args_size.constant
2598 - original_args_size.constant));
2602 #ifdef PUSH_ARGS_REVERSED
2610 /* Push the args that need to be pushed. */
2612 for (count = 0; count < nargs; count++, argnum += inc)
2614 register enum machine_mode mode = argvec[argnum].mode;
2615 register rtx val = argvec[argnum].value;
2616 rtx reg = argvec[argnum].reg;
2617 int partial = argvec[argnum].partial;
2619 if (! (reg != 0 && partial == 0))
2620 emit_push_insn (val, mode, NULL_TREE, NULL_RTX, 0, partial, reg, 0,
2621 argblock, GEN_INT (argvec[count].offset.constant));
2625 #ifndef PUSH_ARGS_REVERSED
2626 #ifdef STACK_BOUNDARY
2627 /* If we pushed args in forward order, perform stack alignment
2628 after pushing the last arg. */
2630 anti_adjust_stack (GEN_INT (args_size.constant
2631 - original_args_size.constant));
2635 #ifdef PUSH_ARGS_REVERSED
2641 /* Now load any reg parms into their regs. */
2643 if (mem_value != 0 && struct_value_rtx != 0)
2644 emit_move_insn (struct_value_rtx, XEXP (mem_value, 0));
2646 for (count = 0; count < nargs; count++, argnum += inc)
2648 register enum machine_mode mode = argvec[argnum].mode;
2649 register rtx val = argvec[argnum].value;
2650 rtx reg = argvec[argnum].reg;
2651 int partial = argvec[argnum].partial;
2653 if (reg != 0 && partial == 0)
2654 emit_move_insn (reg, val);
2659 /* For version 1.37, try deleting this entirely. */
2664 /* Any regs containing parms remain in use through the call. */
2666 for (count = 0; count < nargs; count++)
2667 if (argvec[count].reg != 0)
2668 emit_insn (gen_rtx (USE, VOIDmode, argvec[count].reg));
2670 use_insns = get_insns ();
2673 fun = prepare_call_address (fun, NULL_TREE, &use_insns);
2675 /* Don't allow popping to be deferred, since then
2676 cse'ing of library calls could delete a call and leave the pop. */
2679 /* We pass the old value of inhibit_defer_pop + 1 to emit_call_1, which
2680 will set inhibit_defer_pop to that value. */
2682 emit_call_1 (fun, get_identifier (XSTR (orgfun, 0)), args_size.constant, 0,
2683 FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1),
2684 outmode != VOIDmode ? hard_libcall_value (outmode) : NULL_RTX,
2685 old_inhibit_defer_pop + 1, use_insns, no_queue);
2687 /* Now restore inhibit_defer_pop to its actual original value. */
2690 /* Copy the value to the right place. */
2691 if (outmode != VOIDmode)
2696 value = hard_libcall_value (outmode);
2697 if (value != mem_value)
2698 emit_move_insn (value, mem_value);
2700 else if (value != 0)
2701 emit_move_insn (value, hard_libcall_value (outmode));
2705 /* Expand an assignment that stores the value of FROM into TO.
2706 If WANT_VALUE is nonzero, return an rtx for the value of TO.
2707 (This may contain a QUEUED rtx.)
2708 Otherwise, the returned value is not meaningful.
2710 SUGGEST_REG is no longer actually used.
2711 It used to mean, copy the value through a register
2712 and return that register, if that is possible.
2713 But now we do this if WANT_VALUE.
2715 If the value stored is a constant, we return the constant. */
2718 expand_assignment (to, from, want_value, suggest_reg)
2723 register rtx to_rtx = 0;
2726 /* Don't crash if the lhs of the assignment was erroneous. */
2728 if (TREE_CODE (to) == ERROR_MARK)
2729 return expand_expr (from, NULL_RTX, VOIDmode, 0);
2731 /* Assignment of a structure component needs special treatment
2732 if the structure component's rtx is not simply a MEM.
2733 Assignment of an array element at a constant index
2734 has the same problem. */
2736 if (TREE_CODE (to) == COMPONENT_REF
2737 || TREE_CODE (to) == BIT_FIELD_REF
2738 || (TREE_CODE (to) == ARRAY_REF
2739 && TREE_CODE (TREE_OPERAND (to, 1)) == INTEGER_CST
2740 && TREE_CODE (TYPE_SIZE (TREE_TYPE (to))) == INTEGER_CST))
2742 enum machine_mode mode1;
2748 tree tem = get_inner_reference (to, &bitsize, &bitpos, &offset,
2749 &mode1, &unsignedp, &volatilep);
2751 /* If we are going to use store_bit_field and extract_bit_field,
2752 make sure to_rtx will be safe for multiple use. */
2754 if (mode1 == VOIDmode && want_value)
2755 tem = stabilize_reference (tem);
2757 to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, 0);
2760 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
2762 if (GET_CODE (to_rtx) != MEM)
2764 to_rtx = change_address (to_rtx, VOIDmode,
2765 gen_rtx (PLUS, Pmode, XEXP (to_rtx, 0),
2766 force_reg (Pmode, offset_rtx)));
2770 if (GET_CODE (to_rtx) == MEM)
2771 MEM_VOLATILE_P (to_rtx) = 1;
2772 #if 0 /* This was turned off because, when a field is volatile
2773 in an object which is not volatile, the object may be in a register,
2774 and then we would abort over here. */
2780 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
2782 /* Spurious cast makes HPUX compiler happy. */
2783 ? (enum machine_mode) TYPE_MODE (TREE_TYPE (to))
2786 /* Required alignment of containing datum. */
2787 TYPE_ALIGN (TREE_TYPE (tem)) / BITS_PER_UNIT,
2788 int_size_in_bytes (TREE_TYPE (tem)));
2789 preserve_temp_slots (result);
2795 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
2796 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
2799 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, 0);
2801 /* Don't move directly into a return register. */
2802 if (TREE_CODE (to) == RESULT_DECL && GET_CODE (to_rtx) == REG)
2804 rtx temp = expand_expr (from, 0, VOIDmode, 0);
2805 emit_move_insn (to_rtx, temp);
2806 preserve_temp_slots (to_rtx);
2811 /* In case we are returning the contents of an object which overlaps
2812 the place the value is being stored, use a safe function when copying
2813 a value through a pointer into a structure value return block. */
2814 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
2815 && current_function_returns_struct
2816 && !current_function_returns_pcc_struct)
2818 rtx from_rtx = expand_expr (from, NULL_RTX, VOIDmode, 0);
2819 rtx size = expr_size (from);
2821 #ifdef TARGET_MEM_FUNCTIONS
2822 emit_library_call (memcpy_libfunc, 0,
2823 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
2824 XEXP (from_rtx, 0), Pmode,
2827 emit_library_call (bcopy_libfunc, 0,
2828 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
2829 XEXP (to_rtx, 0), Pmode,
2833 preserve_temp_slots (to_rtx);
2838 /* Compute FROM and store the value in the rtx we got. */
2840 result = store_expr (from, to_rtx, want_value);
2841 preserve_temp_slots (result);
2846 /* Generate code for computing expression EXP,
2847 and storing the value into TARGET.
2848 Returns TARGET or an equivalent value.
2849 TARGET may contain a QUEUED rtx.
2851 If SUGGEST_REG is nonzero, copy the value through a register
2852 and return that register, if that is possible.
2854 If the value stored is a constant, we return the constant. */
2857 store_expr (exp, target, suggest_reg)
2859 register rtx target;
2863 int dont_return_target = 0;
2865 if (TREE_CODE (exp) == COMPOUND_EXPR)
2867 /* Perform first part of compound expression, then assign from second
2869 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
2871 return store_expr (TREE_OPERAND (exp, 1), target, suggest_reg);
2873 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
2875 /* For conditional expression, get safe form of the target. Then
2876 test the condition, doing the appropriate assignment on either
2877 side. This avoids the creation of unnecessary temporaries.
2878 For non-BLKmode, it is more efficient not to do this. */
2880 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
2883 target = protect_from_queue (target, 1);
2886 jumpifnot (TREE_OPERAND (exp, 0), lab1);
2887 store_expr (TREE_OPERAND (exp, 1), target, suggest_reg);
2889 emit_jump_insn (gen_jump (lab2));
2892 store_expr (TREE_OPERAND (exp, 2), target, suggest_reg);
2898 else if (suggest_reg && GET_CODE (target) == MEM
2899 && GET_MODE (target) != BLKmode)
2900 /* If target is in memory and caller wants value in a register instead,
2901 arrange that. Pass TARGET as target for expand_expr so that,
2902 if EXP is another assignment, SUGGEST_REG will be nonzero for it.
2903 We know expand_expr will not use the target in that case. */
2905 temp = expand_expr (exp, cse_not_expected ? NULL_RTX : target,
2906 GET_MODE (target), 0);
2907 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
2908 temp = copy_to_reg (temp);
2909 dont_return_target = 1;
2911 else if (queued_subexp_p (target))
2912 /* If target contains a postincrement, it is not safe
2913 to use as the returned value. It would access the wrong
2914 place by the time the queued increment gets output.
2915 So copy the value through a temporary and use that temp
2918 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
2920 /* Expand EXP into a new pseudo. */
2921 temp = gen_reg_rtx (GET_MODE (target));
2922 temp = expand_expr (exp, temp, GET_MODE (target), 0);
2925 temp = expand_expr (exp, NULL_RTX, GET_MODE (target), 0);
2926 dont_return_target = 1;
2928 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
2929 /* If this is an scalar in a register that is stored in a wider mode
2930 than the declared mode, compute the result into its declared mode
2931 and then convert to the wider mode. Our value is the computed
2934 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
2935 convert_move (SUBREG_REG (target), temp,
2936 SUBREG_PROMOTED_UNSIGNED_P (target));
2941 temp = expand_expr (exp, target, GET_MODE (target), 0);
2942 /* DO return TARGET if it's a specified hardware register.
2943 expand_return relies on this. */
2944 if (!(target && GET_CODE (target) == REG
2945 && REGNO (target) < FIRST_PSEUDO_REGISTER)
2946 && CONSTANT_P (temp))
2947 dont_return_target = 1;
2950 /* If value was not generated in the target, store it there.
2951 Convert the value to TARGET's type first if nec. */
2953 if (temp != target && TREE_CODE (exp) != ERROR_MARK)
2955 target = protect_from_queue (target, 1);
2956 if (GET_MODE (temp) != GET_MODE (target)
2957 && GET_MODE (temp) != VOIDmode)
2959 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
2960 if (dont_return_target)
2962 /* In this case, we will return TEMP,
2963 so make sure it has the proper mode.
2964 But don't forget to store the value into TARGET. */
2965 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
2966 emit_move_insn (target, temp);
2969 convert_move (target, temp, unsignedp);
2972 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
2974 /* Handle copying a string constant into an array.
2975 The string constant may be shorter than the array.
2976 So copy just the string's actual length, and clear the rest. */
2979 /* Get the size of the data type of the string,
2980 which is actually the size of the target. */
2981 size = expr_size (exp);
2982 if (GET_CODE (size) == CONST_INT
2983 && INTVAL (size) < TREE_STRING_LENGTH (exp))
2984 emit_block_move (target, temp, size,
2985 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
2988 /* Compute the size of the data to copy from the string. */
2990 = fold (build (MIN_EXPR, sizetype,
2991 size_binop (CEIL_DIV_EXPR,
2992 TYPE_SIZE (TREE_TYPE (exp)),
2993 size_int (BITS_PER_UNIT)),
2995 build_int_2 (TREE_STRING_LENGTH (exp), 0))));
2996 rtx copy_size_rtx = expand_expr (copy_size, NULL_RTX,
3000 /* Copy that much. */
3001 emit_block_move (target, temp, copy_size_rtx,
3002 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3004 /* Figure out how much is left in TARGET
3005 that we have to clear. */
3006 if (GET_CODE (copy_size_rtx) == CONST_INT)
3008 temp = plus_constant (XEXP (target, 0),
3009 TREE_STRING_LENGTH (exp));
3010 size = plus_constant (size,
3011 - TREE_STRING_LENGTH (exp));
3015 enum machine_mode size_mode = Pmode;
3017 temp = force_reg (Pmode, XEXP (target, 0));
3018 temp = expand_binop (size_mode, add_optab, temp,
3019 copy_size_rtx, NULL_RTX, 0,
3022 size = expand_binop (size_mode, sub_optab, size,
3023 copy_size_rtx, NULL_RTX, 0,
3026 emit_cmp_insn (size, const0_rtx, LT, NULL_RTX,
3027 GET_MODE (size), 0, 0);
3028 label = gen_label_rtx ();
3029 emit_jump_insn (gen_blt (label));
3032 if (size != const0_rtx)
3034 #ifdef TARGET_MEM_FUNCTIONS
3035 emit_library_call (memset_libfunc, 0, VOIDmode, 3,
3036 temp, Pmode, const0_rtx, Pmode, size, Pmode);
3038 emit_library_call (bzero_libfunc, 0, VOIDmode, 2,
3039 temp, Pmode, size, Pmode);
3046 else if (GET_MODE (temp) == BLKmode)
3047 emit_block_move (target, temp, expr_size (exp),
3048 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3050 emit_move_insn (target, temp);
3052 if (dont_return_target)
3057 /* Store the value of constructor EXP into the rtx TARGET.
3058 TARGET is either a REG or a MEM. */
3061 store_constructor (exp, target)
3065 tree type = TREE_TYPE (exp);
3067 /* We know our target cannot conflict, since safe_from_p has been called. */
3069 /* Don't try copying piece by piece into a hard register
3070 since that is vulnerable to being clobbered by EXP.
3071 Instead, construct in a pseudo register and then copy it all. */
3072 if (GET_CODE (target) == REG && REGNO (target) < FIRST_PSEUDO_REGISTER)
3074 rtx temp = gen_reg_rtx (GET_MODE (target));
3075 store_constructor (exp, temp);
3076 emit_move_insn (target, temp);
3081 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE)
3085 /* Inform later passes that the whole union value is dead. */
3086 if (TREE_CODE (type) == UNION_TYPE)
3087 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
3089 /* If we are building a static constructor into a register,
3090 set the initial value as zero so we can fold the value into
3092 else if (GET_CODE (target) == REG && TREE_STATIC (exp))
3093 emit_move_insn (target, const0_rtx);
3095 /* If the constructor has fewer fields than the structure,
3096 clear the whole structure first. */
3097 else if (list_length (CONSTRUCTOR_ELTS (exp))
3098 != list_length (TYPE_FIELDS (type)))
3099 clear_storage (target, int_size_in_bytes (type));
3101 /* Inform later passes that the old value is dead. */
3102 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
3104 /* Store each element of the constructor into
3105 the corresponding field of TARGET. */
3107 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
3109 register tree field = TREE_PURPOSE (elt);
3110 register enum machine_mode mode;
3115 /* Just ignore missing fields.
3116 We cleared the whole structure, above,
3117 if any fields are missing. */
3121 bitsize = TREE_INT_CST_LOW (DECL_SIZE (field));
3122 unsignedp = TREE_UNSIGNED (field);
3123 mode = DECL_MODE (field);
3124 if (DECL_BIT_FIELD (field))
3127 if (TREE_CODE (DECL_FIELD_BITPOS (field)) != INTEGER_CST)
3128 /* ??? This case remains to be written. */
3131 bitpos = TREE_INT_CST_LOW (DECL_FIELD_BITPOS (field));
3133 store_field (target, bitsize, bitpos, mode, TREE_VALUE (elt),
3134 /* The alignment of TARGET is
3135 at least what its type requires. */
3137 TYPE_ALIGN (type) / BITS_PER_UNIT,
3138 int_size_in_bytes (type));
3141 else if (TREE_CODE (type) == ARRAY_TYPE)
3145 tree domain = TYPE_DOMAIN (type);
3146 HOST_WIDE_INT minelt = TREE_INT_CST_LOW (TYPE_MIN_VALUE (domain));
3147 HOST_WIDE_INT maxelt = TREE_INT_CST_LOW (TYPE_MAX_VALUE (domain));
3148 tree elttype = TREE_TYPE (type);
3150 /* If the constructor has fewer fields than the structure,
3151 clear the whole structure first. Similarly if this this is
3152 static constructor of a non-BLKmode object. */
3154 if (list_length (CONSTRUCTOR_ELTS (exp)) < maxelt - minelt + 1
3155 || (GET_CODE (target) == REG && TREE_STATIC (exp)))
3156 clear_storage (target, maxelt - minelt + 1);
3158 /* Inform later passes that the old value is dead. */
3159 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
3161 /* Store each element of the constructor into
3162 the corresponding element of TARGET, determined
3163 by counting the elements. */
3164 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
3166 elt = TREE_CHAIN (elt), i++)
3168 register enum machine_mode mode;
3173 mode = TYPE_MODE (elttype);
3174 bitsize = GET_MODE_BITSIZE (mode);
3175 unsignedp = TREE_UNSIGNED (elttype);
3177 bitpos = (i * TREE_INT_CST_LOW (TYPE_SIZE (elttype)));
3179 store_field (target, bitsize, bitpos, mode, TREE_VALUE (elt),
3180 /* The alignment of TARGET is
3181 at least what its type requires. */
3183 TYPE_ALIGN (type) / BITS_PER_UNIT,
3184 int_size_in_bytes (type));
3192 /* Store the value of EXP (an expression tree)
3193 into a subfield of TARGET which has mode MODE and occupies
3194 BITSIZE bits, starting BITPOS bits from the start of TARGET.
3195 If MODE is VOIDmode, it means that we are storing into a bit-field.
3197 If VALUE_MODE is VOIDmode, return nothing in particular.
3198 UNSIGNEDP is not used in this case.
3200 Otherwise, return an rtx for the value stored. This rtx
3201 has mode VALUE_MODE if that is convenient to do.
3202 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
3204 ALIGN is the alignment that TARGET is known to have, measured in bytes.
3205 TOTAL_SIZE is the size in bytes of the structure, or -1 if varying. */
3208 store_field (target, bitsize, bitpos, mode, exp, value_mode,
3209 unsignedp, align, total_size)
3211 int bitsize, bitpos;
3212 enum machine_mode mode;
3214 enum machine_mode value_mode;
3219 HOST_WIDE_INT width_mask = 0;
3221 if (bitsize < HOST_BITS_PER_WIDE_INT)
3222 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
3224 /* If we are storing into an unaligned field of an aligned union that is
3225 in a register, we may have the mode of TARGET being an integer mode but
3226 MODE == BLKmode. In that case, get an aligned object whose size and
3227 alignment are the same as TARGET and store TARGET into it (we can avoid
3228 the store if the field being stored is the entire width of TARGET). Then
3229 call ourselves recursively to store the field into a BLKmode version of
3230 that object. Finally, load from the object into TARGET. This is not
3231 very efficient in general, but should only be slightly more expensive
3232 than the otherwise-required unaligned accesses. Perhaps this can be
3233 cleaned up later. */
3236 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
3238 rtx object = assign_stack_temp (GET_MODE (target),
3239 GET_MODE_SIZE (GET_MODE (target)), 0);
3240 rtx blk_object = copy_rtx (object);
3242 PUT_MODE (blk_object, BLKmode);
3244 if (bitsize != GET_MODE_BITSIZE (GET_MODE (target)))
3245 emit_move_insn (object, target);
3247 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0,
3250 emit_move_insn (target, object);
3255 /* If the structure is in a register or if the component
3256 is a bit field, we cannot use addressing to access it.
3257 Use bit-field techniques or SUBREG to store in it. */
3259 if (mode == VOIDmode
3260 || (mode != BLKmode && ! direct_store[(int) mode])
3261 || GET_CODE (target) == REG
3262 || GET_CODE (target) == SUBREG)
3264 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
3265 /* Store the value in the bitfield. */
3266 store_bit_field (target, bitsize, bitpos, mode, temp, align, total_size);
3267 if (value_mode != VOIDmode)
3269 /* The caller wants an rtx for the value. */
3270 /* If possible, avoid refetching from the bitfield itself. */
3272 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
3275 enum machine_mode tmode;
3278 return expand_and (temp, GEN_INT (width_mask), NULL_RTX);
3279 tmode = GET_MODE (temp);
3280 if (tmode == VOIDmode)
3282 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
3283 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
3284 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
3286 return extract_bit_field (target, bitsize, bitpos, unsignedp,
3287 NULL_RTX, value_mode, 0, align,
3294 rtx addr = XEXP (target, 0);
3297 /* If a value is wanted, it must be the lhs;
3298 so make the address stable for multiple use. */
3300 if (value_mode != VOIDmode && GET_CODE (addr) != REG
3301 && ! CONSTANT_ADDRESS_P (addr)
3302 /* A frame-pointer reference is already stable. */
3303 && ! (GET_CODE (addr) == PLUS
3304 && GET_CODE (XEXP (addr, 1)) == CONST_INT
3305 && (XEXP (addr, 0) == virtual_incoming_args_rtx
3306 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
3307 addr = copy_to_reg (addr);
3309 /* Now build a reference to just the desired component. */
3311 to_rtx = change_address (target, mode,
3312 plus_constant (addr, (bitpos / BITS_PER_UNIT)));
3313 MEM_IN_STRUCT_P (to_rtx) = 1;
3315 return store_expr (exp, to_rtx, value_mode != VOIDmode);
3319 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
3320 or an ARRAY_REF, look for nested COMPONENT_REFs, BIT_FIELD_REFs, or
3321 ARRAY_REFs at constant positions and find the ultimate containing object,
3324 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
3325 bit position, and *PUNSIGNEDP to the signedness of the field.
3326 If the position of the field is variable, we store a tree
3327 giving the variable offset (in units) in *POFFSET.
3328 This offset is in addition to the bit position.
3329 If the position is not variable, we store 0 in *POFFSET.
3331 If any of the extraction expressions is volatile,
3332 we store 1 in *PVOLATILEP. Otherwise we don't change that.
3334 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
3335 is a mode that can be used to access the field. In that case, *PBITSIZE
3338 If the field describes a variable-sized object, *PMODE is set to
3339 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
3340 this case, but the address of the object can be found. */
3343 get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode,
3344 punsignedp, pvolatilep)
3349 enum machine_mode *pmode;
3354 enum machine_mode mode = VOIDmode;
3357 if (TREE_CODE (exp) == COMPONENT_REF)
3359 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
3360 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
3361 mode = DECL_MODE (TREE_OPERAND (exp, 1));
3362 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
3364 else if (TREE_CODE (exp) == BIT_FIELD_REF)
3366 size_tree = TREE_OPERAND (exp, 1);
3367 *punsignedp = TREE_UNSIGNED (exp);
3371 mode = TYPE_MODE (TREE_TYPE (exp));
3372 *pbitsize = GET_MODE_BITSIZE (mode);
3373 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
3378 if (TREE_CODE (size_tree) != INTEGER_CST)
3379 mode = BLKmode, *pbitsize = -1;
3381 *pbitsize = TREE_INT_CST_LOW (size_tree);
3384 /* Compute cumulative bit-offset for nested component-refs and array-refs,
3385 and find the ultimate containing object. */
3391 if (TREE_CODE (exp) == COMPONENT_REF || TREE_CODE (exp) == BIT_FIELD_REF)
3393 tree pos = (TREE_CODE (exp) == COMPONENT_REF
3394 ? DECL_FIELD_BITPOS (TREE_OPERAND (exp, 1))
3395 : TREE_OPERAND (exp, 2));
3397 if (TREE_CODE (pos) == PLUS_EXPR)
3400 if (TREE_CODE (TREE_OPERAND (pos, 0)) == INTEGER_CST)
3402 constant = TREE_OPERAND (pos, 0);
3403 var = TREE_OPERAND (pos, 1);
3405 else if (TREE_CODE (TREE_OPERAND (pos, 1)) == INTEGER_CST)
3407 constant = TREE_OPERAND (pos, 1);
3408 var = TREE_OPERAND (pos, 0);
3412 *pbitpos += TREE_INT_CST_LOW (constant);
3414 offset = size_binop (PLUS_EXPR, offset,
3415 size_binop (FLOOR_DIV_EXPR, var,
3416 size_int (BITS_PER_UNIT)));
3418 offset = size_binop (FLOOR_DIV_EXPR, var,
3419 size_int (BITS_PER_UNIT));
3421 else if (TREE_CODE (pos) == INTEGER_CST)
3422 *pbitpos += TREE_INT_CST_LOW (pos);
3425 /* Assume here that the offset is a multiple of a unit.
3426 If not, there should be an explicitly added constant. */
3428 offset = size_binop (PLUS_EXPR, offset,
3429 size_binop (FLOOR_DIV_EXPR, pos,
3430 size_int (BITS_PER_UNIT)));
3432 offset = size_binop (FLOOR_DIV_EXPR, pos,
3433 size_int (BITS_PER_UNIT));
3437 else if (TREE_CODE (exp) == ARRAY_REF
3438 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
3439 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST)
3441 *pbitpos += (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))
3442 * TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (exp))));
3444 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
3445 && ! ((TREE_CODE (exp) == NOP_EXPR
3446 || TREE_CODE (exp) == CONVERT_EXPR)
3447 && (TYPE_MODE (TREE_TYPE (exp))
3448 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
3451 /* If any reference in the chain is volatile, the effect is volatile. */
3452 if (TREE_THIS_VOLATILE (exp))
3454 exp = TREE_OPERAND (exp, 0);
3457 /* If this was a bit-field, see if there is a mode that allows direct
3458 access in case EXP is in memory. */
3459 if (mode == VOIDmode && *pbitpos % *pbitsize == 0)
3461 mode = mode_for_size (*pbitsize, MODE_INT, 0);
3462 if (mode == BLKmode)
3469 /* We aren't finished fixing the callers to really handle nonzero offset. */
3477 /* Given an rtx VALUE that may contain additions and multiplications,
3478 return an equivalent value that just refers to a register or memory.
3479 This is done by generating instructions to perform the arithmetic
3480 and returning a pseudo-register containing the value.
3482 The returned value may be a REG, SUBREG, MEM or constant. */
3485 force_operand (value, target)
3488 register optab binoptab = 0;
3489 /* Use a temporary to force order of execution of calls to
3493 /* Use subtarget as the target for operand 0 of a binary operation. */
3494 register rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
3496 if (GET_CODE (value) == PLUS)
3497 binoptab = add_optab;
3498 else if (GET_CODE (value) == MINUS)
3499 binoptab = sub_optab;
3500 else if (GET_CODE (value) == MULT)
3502 op2 = XEXP (value, 1);
3503 if (!CONSTANT_P (op2)
3504 && !(GET_CODE (op2) == REG && op2 != subtarget))
3506 tmp = force_operand (XEXP (value, 0), subtarget);
3507 return expand_mult (GET_MODE (value), tmp,
3508 force_operand (op2, NULL_RTX),
3514 op2 = XEXP (value, 1);
3515 if (!CONSTANT_P (op2)
3516 && !(GET_CODE (op2) == REG && op2 != subtarget))
3518 if (binoptab == sub_optab && GET_CODE (op2) == CONST_INT)
3520 binoptab = add_optab;
3521 op2 = negate_rtx (GET_MODE (value), op2);
3524 /* Check for an addition with OP2 a constant integer and our first
3525 operand a PLUS of a virtual register and something else. In that
3526 case, we want to emit the sum of the virtual register and the
3527 constant first and then add the other value. This allows virtual
3528 register instantiation to simply modify the constant rather than
3529 creating another one around this addition. */
3530 if (binoptab == add_optab && GET_CODE (op2) == CONST_INT
3531 && GET_CODE (XEXP (value, 0)) == PLUS
3532 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
3533 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
3534 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
3536 rtx temp = expand_binop (GET_MODE (value), binoptab,
3537 XEXP (XEXP (value, 0), 0), op2,
3538 subtarget, 0, OPTAB_LIB_WIDEN);
3539 return expand_binop (GET_MODE (value), binoptab, temp,
3540 force_operand (XEXP (XEXP (value, 0), 1), 0),
3541 target, 0, OPTAB_LIB_WIDEN);
3544 tmp = force_operand (XEXP (value, 0), subtarget);
3545 return expand_binop (GET_MODE (value), binoptab, tmp,
3546 force_operand (op2, NULL_RTX),
3547 target, 0, OPTAB_LIB_WIDEN);
3548 /* We give UNSIGNEDP = 0 to expand_binop
3549 because the only operations we are expanding here are signed ones. */
3554 /* Subroutine of expand_expr:
3555 save the non-copied parts (LIST) of an expr (LHS), and return a list
3556 which can restore these values to their previous values,
3557 should something modify their storage. */
3560 save_noncopied_parts (lhs, list)
3567 for (tail = list; tail; tail = TREE_CHAIN (tail))
3568 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
3569 parts = chainon (parts, save_noncopied_parts (lhs, TREE_VALUE (tail)));
3572 tree part = TREE_VALUE (tail);
3573 tree part_type = TREE_TYPE (part);
3574 tree to_be_saved = build (COMPONENT_REF, part_type, lhs, part);
3575 rtx target = assign_stack_temp (TYPE_MODE (part_type),
3576 int_size_in_bytes (part_type), 0);
3577 if (! memory_address_p (TYPE_MODE (part_type), XEXP (target, 0)))
3578 target = change_address (target, TYPE_MODE (part_type), NULL_RTX);
3579 parts = tree_cons (to_be_saved,
3580 build (RTL_EXPR, part_type, NULL_TREE,
3583 store_expr (TREE_PURPOSE (parts), RTL_EXPR_RTL (TREE_VALUE (parts)), 0);
3588 /* Subroutine of expand_expr:
3589 record the non-copied parts (LIST) of an expr (LHS), and return a list
3590 which specifies the initial values of these parts. */
3593 init_noncopied_parts (lhs, list)
3600 for (tail = list; tail; tail = TREE_CHAIN (tail))
3601 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
3602 parts = chainon (parts, init_noncopied_parts (lhs, TREE_VALUE (tail)));
3605 tree part = TREE_VALUE (tail);
3606 tree part_type = TREE_TYPE (part);
3607 tree to_be_initialized = build (COMPONENT_REF, part_type, lhs, part);
3608 parts = tree_cons (TREE_PURPOSE (tail), to_be_initialized, parts);
3613 /* Subroutine of expand_expr: return nonzero iff there is no way that
3614 EXP can reference X, which is being modified. */
3617 safe_from_p (x, exp)
3627 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
3628 find the underlying pseudo. */
3629 if (GET_CODE (x) == SUBREG)
3632 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
3636 /* If X is a location in the outgoing argument area, it is always safe. */
3637 if (GET_CODE (x) == MEM
3638 && (XEXP (x, 0) == virtual_outgoing_args_rtx
3639 || (GET_CODE (XEXP (x, 0)) == PLUS
3640 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx)))
3643 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
3646 exp_rtl = DECL_RTL (exp);
3653 if (TREE_CODE (exp) == TREE_LIST)
3654 return ((TREE_VALUE (exp) == 0
3655 || safe_from_p (x, TREE_VALUE (exp)))
3656 && (TREE_CHAIN (exp) == 0
3657 || safe_from_p (x, TREE_CHAIN (exp))));
3662 return safe_from_p (x, TREE_OPERAND (exp, 0));
3666 return (safe_from_p (x, TREE_OPERAND (exp, 0))
3667 && safe_from_p (x, TREE_OPERAND (exp, 1)));
3671 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
3672 the expression. If it is set, we conflict iff we are that rtx or
3673 both are in memory. Otherwise, we check all operands of the
3674 expression recursively. */
3676 switch (TREE_CODE (exp))
3679 return staticp (TREE_OPERAND (exp, 0));
3682 if (GET_CODE (x) == MEM)
3687 exp_rtl = CALL_EXPR_RTL (exp);
3690 /* Assume that the call will clobber all hard registers and
3692 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
3693 || GET_CODE (x) == MEM)
3700 exp_rtl = RTL_EXPR_RTL (exp);
3702 /* We don't know what this can modify. */
3707 case WITH_CLEANUP_EXPR:
3708 exp_rtl = RTL_EXPR_RTL (exp);
3712 exp_rtl = SAVE_EXPR_RTL (exp);
3716 /* The only operand we look at is operand 1. The rest aren't
3717 part of the expression. */
3718 return safe_from_p (x, TREE_OPERAND (exp, 1));
3720 case METHOD_CALL_EXPR:
3721 /* This takes a rtx argument, but shouldn't appear here. */
3725 /* If we have an rtx, we do not need to scan our operands. */
3729 nops = tree_code_length[(int) TREE_CODE (exp)];
3730 for (i = 0; i < nops; i++)
3731 if (TREE_OPERAND (exp, i) != 0
3732 && ! safe_from_p (x, TREE_OPERAND (exp, i)))
3736 /* If we have an rtl, find any enclosed object. Then see if we conflict
3740 if (GET_CODE (exp_rtl) == SUBREG)
3742 exp_rtl = SUBREG_REG (exp_rtl);
3743 if (GET_CODE (exp_rtl) == REG
3744 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
3748 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
3749 are memory and EXP is not readonly. */
3750 return ! (rtx_equal_p (x, exp_rtl)
3751 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
3752 && ! TREE_READONLY (exp)));
3755 /* If we reach here, it is safe. */
3759 /* Subroutine of expand_expr: return nonzero iff EXP is an
3760 expression whose type is statically determinable. */
3766 if (TREE_CODE (exp) == PARM_DECL
3767 || TREE_CODE (exp) == VAR_DECL
3768 || TREE_CODE (exp) == CALL_EXPR || TREE_CODE (exp) == TARGET_EXPR
3769 || TREE_CODE (exp) == COMPONENT_REF
3770 || TREE_CODE (exp) == ARRAY_REF)
3775 /* expand_expr: generate code for computing expression EXP.
3776 An rtx for the computed value is returned. The value is never null.
3777 In the case of a void EXP, const0_rtx is returned.
3779 The value may be stored in TARGET if TARGET is nonzero.
3780 TARGET is just a suggestion; callers must assume that
3781 the rtx returned may not be the same as TARGET.
3783 If TARGET is CONST0_RTX, it means that the value will be ignored.
3785 If TMODE is not VOIDmode, it suggests generating the
3786 result in mode TMODE. But this is done only when convenient.
3787 Otherwise, TMODE is ignored and the value generated in its natural mode.
3788 TMODE is just a suggestion; callers must assume that
3789 the rtx returned may not have mode TMODE.
3791 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
3792 with a constant address even if that address is not normally legitimate.
3793 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
3795 If MODIFIER is EXPAND_SUM then when EXP is an addition
3796 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
3797 or a nest of (PLUS ...) and (MINUS ...) where the terms are
3798 products as above, or REG or MEM, or constant.
3799 Ordinarily in such cases we would output mul or add instructions
3800 and then return a pseudo reg containing the sum.
3802 EXPAND_INITIALIZER is much like EXPAND_SUM except that
3803 it also marks a label as absolutely required (it can't be dead).
3804 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
3805 This is used for outputting expressions used in initializers. */
3808 expand_expr (exp, target, tmode, modifier)
3811 enum machine_mode tmode;
3812 enum expand_modifier modifier;
3814 register rtx op0, op1, temp;
3815 tree type = TREE_TYPE (exp);
3816 int unsignedp = TREE_UNSIGNED (type);
3817 register enum machine_mode mode = TYPE_MODE (type);
3818 register enum tree_code code = TREE_CODE (exp);
3820 /* Use subtarget as the target for operand 0 of a binary operation. */
3821 rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
3822 rtx original_target = target;
3823 int ignore = target == const0_rtx;
3826 /* Don't use hard regs as subtargets, because the combiner
3827 can only handle pseudo regs. */
3828 if (subtarget && REGNO (subtarget) < FIRST_PSEUDO_REGISTER)
3830 /* Avoid subtargets inside loops,
3831 since they hide some invariant expressions. */
3832 if (preserve_subexpressions_p ())
3835 if (ignore) target = 0, original_target = 0;
3837 /* If will do cse, generate all results into pseudo registers
3838 since 1) that allows cse to find more things
3839 and 2) otherwise cse could produce an insn the machine
3842 if (! cse_not_expected && mode != BLKmode && target
3843 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER))
3846 /* Ensure we reference a volatile object even if value is ignored. */
3847 if (ignore && TREE_THIS_VOLATILE (exp)
3848 && TREE_CODE (exp) != FUNCTION_DECL
3849 && mode != VOIDmode && mode != BLKmode)
3851 target = gen_reg_rtx (mode);
3852 temp = expand_expr (exp, target, VOIDmode, modifier);
3854 emit_move_insn (target, temp);
3862 tree function = decl_function_context (exp);
3863 /* Handle using a label in a containing function. */
3864 if (function != current_function_decl && function != 0)
3866 struct function *p = find_function_data (function);
3867 /* Allocate in the memory associated with the function
3868 that the label is in. */
3869 push_obstacks (p->function_obstack,
3870 p->function_maybepermanent_obstack);
3872 p->forced_labels = gen_rtx (EXPR_LIST, VOIDmode,
3873 label_rtx (exp), p->forced_labels);
3876 else if (modifier == EXPAND_INITIALIZER)
3877 forced_labels = gen_rtx (EXPR_LIST, VOIDmode,
3878 label_rtx (exp), forced_labels);
3879 temp = gen_rtx (MEM, FUNCTION_MODE,
3880 gen_rtx (LABEL_REF, Pmode, label_rtx (exp)));
3881 if (function != current_function_decl && function != 0)
3882 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
3887 if (DECL_RTL (exp) == 0)
3889 error_with_decl (exp, "prior parameter's size depends on `%s'");
3890 return CONST0_RTX (mode);
3896 if (DECL_RTL (exp) == 0)
3898 /* Ensure variable marked as used
3899 even if it doesn't go through a parser. */
3900 TREE_USED (exp) = 1;
3901 /* Handle variables inherited from containing functions. */
3902 context = decl_function_context (exp);
3904 /* We treat inline_function_decl as an alias for the current function
3905 because that is the inline function whose vars, types, etc.
3906 are being merged into the current function.
3907 See expand_inline_function. */
3908 if (context != 0 && context != current_function_decl
3909 && context != inline_function_decl
3910 /* If var is static, we don't need a static chain to access it. */
3911 && ! (GET_CODE (DECL_RTL (exp)) == MEM
3912 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
3916 /* Mark as non-local and addressable. */
3917 DECL_NONLOCAL (exp) = 1;
3918 mark_addressable (exp);
3919 if (GET_CODE (DECL_RTL (exp)) != MEM)
3921 addr = XEXP (DECL_RTL (exp), 0);
3922 if (GET_CODE (addr) == MEM)
3923 addr = gen_rtx (MEM, Pmode, fix_lexical_addr (XEXP (addr, 0), exp));
3925 addr = fix_lexical_addr (addr, exp);
3926 return change_address (DECL_RTL (exp), mode, addr);
3929 /* This is the case of an array whose size is to be determined
3930 from its initializer, while the initializer is still being parsed.
3932 if (GET_CODE (DECL_RTL (exp)) == MEM
3933 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
3934 return change_address (DECL_RTL (exp), GET_MODE (DECL_RTL (exp)),
3935 XEXP (DECL_RTL (exp), 0));
3936 if (GET_CODE (DECL_RTL (exp)) == MEM
3937 && modifier != EXPAND_CONST_ADDRESS
3938 && modifier != EXPAND_SUM
3939 && modifier != EXPAND_INITIALIZER)
3941 /* DECL_RTL probably contains a constant address.
3942 On RISC machines where a constant address isn't valid,
3943 make some insns to get that address into a register. */
3944 if (!memory_address_p (DECL_MODE (exp), XEXP (DECL_RTL (exp), 0))
3946 && CONSTANT_ADDRESS_P (XEXP (DECL_RTL (exp), 0))))
3947 return change_address (DECL_RTL (exp), VOIDmode,
3948 copy_rtx (XEXP (DECL_RTL (exp), 0)));
3951 /* If the mode of DECL_RTL does not match that of the decl, it
3952 must be a promoted value. We return a SUBREG of the wanted mode,
3953 but mark it so that we know that it was already extended. */
3955 if (GET_CODE (DECL_RTL (exp)) == REG
3956 && GET_MODE (DECL_RTL (exp)) != mode)
3958 enum machine_mode decl_mode = DECL_MODE (exp);
3960 /* Get the signedness used for this variable. Ensure we get the
3961 same mode we got when the variable was declared. */
3963 PROMOTE_MODE (decl_mode, unsignedp, type);
3965 if (decl_mode != GET_MODE (DECL_RTL (exp)))
3968 temp = gen_rtx (SUBREG, mode, DECL_RTL (exp), 0);
3969 SUBREG_PROMOTED_VAR_P (temp) = 1;
3970 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
3974 return DECL_RTL (exp);
3977 return immed_double_const (TREE_INT_CST_LOW (exp),
3978 TREE_INT_CST_HIGH (exp),
3982 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, 0);
3985 /* If optimized, generate immediate CONST_DOUBLE
3986 which will be turned into memory by reload if necessary.
3988 We used to force a register so that loop.c could see it. But
3989 this does not allow gen_* patterns to perform optimizations with
3990 the constants. It also produces two insns in cases like "x = 1.0;".
3991 On most machines, floating-point constants are not permitted in
3992 many insns, so we'd end up copying it to a register in any case.
3994 Now, we do the copying in expand_binop, if appropriate. */
3995 return immed_real_const (exp);
3999 if (! TREE_CST_RTL (exp))
4000 output_constant_def (exp);
4002 /* TREE_CST_RTL probably contains a constant address.
4003 On RISC machines where a constant address isn't valid,
4004 make some insns to get that address into a register. */
4005 if (GET_CODE (TREE_CST_RTL (exp)) == MEM
4006 && modifier != EXPAND_CONST_ADDRESS
4007 && modifier != EXPAND_INITIALIZER
4008 && modifier != EXPAND_SUM
4009 && !memory_address_p (mode, XEXP (TREE_CST_RTL (exp), 0)))
4010 return change_address (TREE_CST_RTL (exp), VOIDmode,
4011 copy_rtx (XEXP (TREE_CST_RTL (exp), 0)));
4012 return TREE_CST_RTL (exp);
4015 context = decl_function_context (exp);
4016 /* We treat inline_function_decl as an alias for the current function
4017 because that is the inline function whose vars, types, etc.
4018 are being merged into the current function.
4019 See expand_inline_function. */
4020 if (context == current_function_decl || context == inline_function_decl)
4023 /* If this is non-local, handle it. */
4026 temp = SAVE_EXPR_RTL (exp);
4027 if (temp && GET_CODE (temp) == REG)
4029 put_var_into_stack (exp);
4030 temp = SAVE_EXPR_RTL (exp);
4032 if (temp == 0 || GET_CODE (temp) != MEM)
4034 return change_address (temp, mode,
4035 fix_lexical_addr (XEXP (temp, 0), exp));
4037 if (SAVE_EXPR_RTL (exp) == 0)
4039 if (mode == BLKmode)
4041 = assign_stack_temp (mode,
4042 int_size_in_bytes (TREE_TYPE (exp)), 0);
4045 enum machine_mode var_mode = mode;
4047 if (TREE_CODE (type) == INTEGER_TYPE
4048 || TREE_CODE (type) == ENUMERAL_TYPE
4049 || TREE_CODE (type) == BOOLEAN_TYPE
4050 || TREE_CODE (type) == CHAR_TYPE
4051 || TREE_CODE (type) == REAL_TYPE
4052 || TREE_CODE (type) == POINTER_TYPE
4053 || TREE_CODE (type) == OFFSET_TYPE)
4055 PROMOTE_MODE (var_mode, unsignedp, type);
4058 temp = gen_reg_rtx (var_mode);
4061 SAVE_EXPR_RTL (exp) = temp;
4062 store_expr (TREE_OPERAND (exp, 0), temp, 0);
4063 if (!optimize && GET_CODE (temp) == REG)
4064 save_expr_regs = gen_rtx (EXPR_LIST, VOIDmode, temp,
4068 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
4069 must be a promoted value. We return a SUBREG of the wanted mode,
4070 but mark it so that we know that it was already extended. Note
4071 that `unsignedp' was modified above in this case. */
4073 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
4074 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
4076 temp = gen_rtx (SUBREG, mode, SAVE_EXPR_RTL (exp), 0);
4077 SUBREG_PROMOTED_VAR_P (temp) = 1;
4078 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
4082 return SAVE_EXPR_RTL (exp);
4085 /* Exit the current loop if the body-expression is true. */
4087 rtx label = gen_label_rtx ();
4088 do_jump (TREE_OPERAND (exp, 0), label, NULL_RTX);
4089 expand_exit_loop (NULL_PTR);
4095 expand_start_loop (1);
4096 expand_expr_stmt (TREE_OPERAND (exp, 0));
4103 tree vars = TREE_OPERAND (exp, 0);
4104 int vars_need_expansion = 0;
4106 /* Need to open a binding contour here because
4107 if there are any cleanups they most be contained here. */
4108 expand_start_bindings (0);
4110 /* Mark the corresponding BLOCK for output in its proper place. */
4111 if (TREE_OPERAND (exp, 2) != 0
4112 && ! TREE_USED (TREE_OPERAND (exp, 2)))
4113 insert_block (TREE_OPERAND (exp, 2));
4115 /* If VARS have not yet been expanded, expand them now. */
4118 if (DECL_RTL (vars) == 0)
4120 vars_need_expansion = 1;
4123 expand_decl_init (vars);
4124 vars = TREE_CHAIN (vars);
4127 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, modifier);
4129 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
4135 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
4137 emit_insns (RTL_EXPR_SEQUENCE (exp));
4138 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
4139 return RTL_EXPR_RTL (exp);
4142 /* All elts simple constants => refer to a constant in memory. But
4143 if this is a non-BLKmode mode, let it store a field at a time
4144 since that should make a CONST_INT or CONST_DOUBLE when we
4146 if (TREE_STATIC (exp) && (mode == BLKmode || TREE_ADDRESSABLE (exp)))
4148 rtx constructor = output_constant_def (exp);
4149 if (modifier != EXPAND_CONST_ADDRESS
4150 && modifier != EXPAND_INITIALIZER
4151 && modifier != EXPAND_SUM
4152 && !memory_address_p (GET_MODE (constructor),
4153 XEXP (constructor, 0)))
4154 constructor = change_address (constructor, VOIDmode,
4155 XEXP (constructor, 0));
4162 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4163 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode, 0);
4168 if (target == 0 || ! safe_from_p (target, exp))
4170 if (mode != BLKmode && ! TREE_ADDRESSABLE (exp))
4171 target = gen_reg_rtx (mode);
4174 enum tree_code c = TREE_CODE (type);
4176 = assign_stack_temp (mode, int_size_in_bytes (type), 0);
4177 if (c == RECORD_TYPE || c == UNION_TYPE || c == ARRAY_TYPE)
4178 MEM_IN_STRUCT_P (target) = 1;
4181 store_constructor (exp, target);
4187 tree exp1 = TREE_OPERAND (exp, 0);
4190 /* A SAVE_EXPR as the address in an INDIRECT_EXPR is generated
4191 for *PTR += ANYTHING where PTR is put inside the SAVE_EXPR.
4192 This code has the same general effect as simply doing
4193 expand_expr on the save expr, except that the expression PTR
4194 is computed for use as a memory address. This means different
4195 code, suitable for indexing, may be generated. */
4196 if (TREE_CODE (exp1) == SAVE_EXPR
4197 && SAVE_EXPR_RTL (exp1) == 0
4198 && TREE_CODE (exp2 = TREE_OPERAND (exp1, 0)) != ERROR_MARK
4199 && TYPE_MODE (TREE_TYPE (exp1)) == Pmode
4200 && TYPE_MODE (TREE_TYPE (exp2)) == Pmode)
4202 temp = expand_expr (TREE_OPERAND (exp1, 0), NULL_RTX,
4203 VOIDmode, EXPAND_SUM);
4204 op0 = memory_address (mode, temp);
4205 op0 = copy_all_regs (op0);
4206 SAVE_EXPR_RTL (exp1) = op0;
4210 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
4211 op0 = memory_address (mode, op0);
4214 temp = gen_rtx (MEM, mode, op0);
4215 /* If address was computed by addition,
4216 mark this as an element of an aggregate. */
4217 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
4218 || (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR
4219 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) == PLUS_EXPR)
4220 || TREE_CODE (TREE_TYPE (exp)) == ARRAY_TYPE
4221 || TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE
4222 || TREE_CODE (TREE_TYPE (exp)) == UNION_TYPE
4223 || (TREE_CODE (exp1) == ADDR_EXPR
4224 && (exp2 = TREE_OPERAND (exp1, 0))
4225 && (TREE_CODE (TREE_TYPE (exp2)) == ARRAY_TYPE
4226 || TREE_CODE (TREE_TYPE (exp2)) == RECORD_TYPE
4227 || TREE_CODE (TREE_TYPE (exp2)) == UNION_TYPE)))
4228 MEM_IN_STRUCT_P (temp) = 1;
4229 MEM_VOLATILE_P (temp) = TREE_THIS_VOLATILE (exp);
4230 #if 0 /* It is incorrect to set RTX_UNCHANGING_P here, because the fact that
4231 a location is accessed through a pointer to const does not mean
4232 that the value there can never change. */
4233 RTX_UNCHANGING_P (temp) = TREE_READONLY (exp);
4239 if (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST
4240 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
4242 /* Nonconstant array index or nonconstant element size.
4243 Generate the tree for *(&array+index) and expand that,
4244 except do it in a language-independent way
4245 and don't complain about non-lvalue arrays.
4246 `mark_addressable' should already have been called
4247 for any array for which this case will be reached. */
4249 /* Don't forget the const or volatile flag from the array element. */
4250 tree variant_type = build_type_variant (type,
4251 TREE_READONLY (exp),
4252 TREE_THIS_VOLATILE (exp));
4253 tree array_adr = build1 (ADDR_EXPR, build_pointer_type (variant_type),
4254 TREE_OPERAND (exp, 0));
4255 tree index = TREE_OPERAND (exp, 1);
4258 /* Convert the integer argument to a type the same size as a pointer
4259 so the multiply won't overflow spuriously. */
4260 if (TYPE_PRECISION (TREE_TYPE (index)) != POINTER_SIZE)
4261 index = convert (type_for_size (POINTER_SIZE, 0), index);
4263 /* Don't think the address has side effects
4264 just because the array does.
4265 (In some cases the address might have side effects,
4266 and we fail to record that fact here. However, it should not
4267 matter, since expand_expr should not care.) */
4268 TREE_SIDE_EFFECTS (array_adr) = 0;
4270 elt = build1 (INDIRECT_REF, type,
4271 fold (build (PLUS_EXPR, TYPE_POINTER_TO (variant_type),
4273 fold (build (MULT_EXPR,
4274 TYPE_POINTER_TO (variant_type),
4275 index, size_in_bytes (type))))));
4277 /* Volatility, etc., of new expression is same as old expression. */
4278 TREE_SIDE_EFFECTS (elt) = TREE_SIDE_EFFECTS (exp);
4279 TREE_THIS_VOLATILE (elt) = TREE_THIS_VOLATILE (exp);
4280 TREE_READONLY (elt) = TREE_READONLY (exp);
4282 return expand_expr (elt, target, tmode, modifier);
4285 /* Fold an expression like: "foo"[2].
4286 This is not done in fold so it won't happen inside &. */
4289 tree arg0 = TREE_OPERAND (exp, 0);
4290 tree arg1 = TREE_OPERAND (exp, 1);
4292 if (TREE_CODE (arg0) == STRING_CST
4293 && TREE_CODE (arg1) == INTEGER_CST
4294 && !TREE_INT_CST_HIGH (arg1)
4295 && (i = TREE_INT_CST_LOW (arg1)) < TREE_STRING_LENGTH (arg0))
4297 if (TREE_TYPE (TREE_TYPE (arg0)) == integer_type_node)
4299 exp = build_int_2 (((int *)TREE_STRING_POINTER (arg0))[i], 0);
4300 TREE_TYPE (exp) = integer_type_node;
4301 return expand_expr (exp, target, tmode, modifier);
4303 if (TREE_TYPE (TREE_TYPE (arg0)) == char_type_node)
4305 exp = build_int_2 (TREE_STRING_POINTER (arg0)[i], 0);
4306 TREE_TYPE (exp) = integer_type_node;
4307 return expand_expr (convert (TREE_TYPE (TREE_TYPE (arg0)), exp), target, tmode, modifier);
4312 /* If this is a constant index into a constant array,
4313 just get the value from the array. Handle both the cases when
4314 we have an explicit constructor and when our operand is a variable
4315 that was declared const. */
4317 if (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
4318 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
4320 tree index = fold (TREE_OPERAND (exp, 1));
4321 if (TREE_CODE (index) == INTEGER_CST
4322 && TREE_INT_CST_HIGH (index) == 0)
4324 int i = TREE_INT_CST_LOW (index);
4325 tree elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0));
4328 elem = TREE_CHAIN (elem);
4330 return expand_expr (fold (TREE_VALUE (elem)), target,
4335 else if (TREE_READONLY (TREE_OPERAND (exp, 0))
4336 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
4337 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == ARRAY_TYPE
4338 && TREE_CODE (TREE_OPERAND (exp, 0)) == VAR_DECL
4339 && DECL_INITIAL (TREE_OPERAND (exp, 0))
4341 && (TREE_CODE (DECL_INITIAL (TREE_OPERAND (exp, 0)))
4344 tree index = fold (TREE_OPERAND (exp, 1));
4345 if (TREE_CODE (index) == INTEGER_CST
4346 && TREE_INT_CST_HIGH (index) == 0)
4348 int i = TREE_INT_CST_LOW (index);
4349 tree init = DECL_INITIAL (TREE_OPERAND (exp, 0));
4351 if (TREE_CODE (init) == CONSTRUCTOR)
4353 tree elem = CONSTRUCTOR_ELTS (init);
4356 elem = TREE_CHAIN (elem);
4358 return expand_expr (fold (TREE_VALUE (elem)), target,
4361 else if (TREE_CODE (init) == STRING_CST
4362 && i < TREE_STRING_LENGTH (init))
4364 temp = GEN_INT (TREE_STRING_POINTER (init)[i]);
4365 return convert_to_mode (mode, temp, 0);
4369 /* Treat array-ref with constant index as a component-ref. */
4373 /* If the operand is a CONSTRUCTOR, we can just extract the
4374 appropriate field if it is present. */
4375 if (code != ARRAY_REF
4376 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR)
4380 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
4381 elt = TREE_CHAIN (elt))
4382 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1))
4383 return expand_expr (TREE_VALUE (elt), target, tmode, modifier);
4387 enum machine_mode mode1;
4392 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
4393 &mode1, &unsignedp, &volatilep);
4395 /* In some cases, we will be offsetting OP0's address by a constant.
4396 So get it as a sum, if possible. If we will be using it
4397 directly in an insn, we validate it. */
4398 op0 = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_SUM);
4400 /* If this is a constant, put it into a register if it is a
4401 legitimate constant and memory if it isn't. */
4402 if (CONSTANT_P (op0))
4404 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
4405 if (LEGITIMATE_CONSTANT_P (op0))
4406 op0 = force_reg (mode, op0);
4408 op0 = validize_mem (force_const_mem (mode, op0));
4413 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
4415 if (GET_CODE (op0) != MEM)
4417 op0 = change_address (op0, VOIDmode,
4418 gen_rtx (PLUS, Pmode, XEXP (op0, 0),
4419 force_reg (Pmode, offset_rtx)));
4422 /* Don't forget about volatility even if this is a bitfield. */
4423 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
4425 op0 = copy_rtx (op0);
4426 MEM_VOLATILE_P (op0) = 1;
4429 if (mode1 == VOIDmode
4430 || (mode1 != BLKmode && ! direct_load[(int) mode1]
4431 && modifier != EXPAND_CONST_ADDRESS
4432 && modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
4433 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG)
4435 /* In cases where an aligned union has an unaligned object
4436 as a field, we might be extracting a BLKmode value from
4437 an integer-mode (e.g., SImode) object. Handle this case
4438 by doing the extract into an object as wide as the field
4439 (which we know to be the width of a basic mode), then
4440 storing into memory, and changing the mode to BLKmode. */
4441 enum machine_mode ext_mode = mode;
4443 if (ext_mode == BLKmode)
4444 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
4446 if (ext_mode == BLKmode)
4449 op0 = extract_bit_field (validize_mem (op0), bitsize, bitpos,
4450 unsignedp, target, ext_mode, ext_mode,
4451 TYPE_ALIGN (TREE_TYPE (tem)) / BITS_PER_UNIT,
4452 int_size_in_bytes (TREE_TYPE (tem)));
4453 if (mode == BLKmode)
4455 rtx new = assign_stack_temp (ext_mode,
4456 bitsize / BITS_PER_UNIT, 0);
4458 emit_move_insn (new, op0);
4459 op0 = copy_rtx (new);
4460 PUT_MODE (op0, BLKmode);
4466 /* Get a reference to just this component. */
4467 if (modifier == EXPAND_CONST_ADDRESS
4468 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
4469 op0 = gen_rtx (MEM, mode1, plus_constant (XEXP (op0, 0),
4470 (bitpos / BITS_PER_UNIT)));
4472 op0 = change_address (op0, mode1,
4473 plus_constant (XEXP (op0, 0),
4474 (bitpos / BITS_PER_UNIT)));
4475 MEM_IN_STRUCT_P (op0) = 1;
4476 MEM_VOLATILE_P (op0) |= volatilep;
4477 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode)
4480 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
4481 convert_move (target, op0, unsignedp);
4487 tree base = build_unary_op (ADDR_EXPR, TREE_OPERAND (exp, 0), 0);
4488 tree addr = build (PLUS_EXPR, type, base, TREE_OPERAND (exp, 1));
4489 op0 = expand_expr (addr, NULL_RTX, VOIDmode, EXPAND_SUM);
4490 temp = gen_rtx (MEM, mode, memory_address (mode, op0));
4491 MEM_IN_STRUCT_P (temp) = 1;
4492 MEM_VOLATILE_P (temp) = TREE_THIS_VOLATILE (exp);
4493 #if 0 /* It is incorrect to set RTX_UNCHANGING_P here, because the fact that
4494 a location is accessed through a pointer to const does not mean
4495 that the value there can never change. */
4496 RTX_UNCHANGING_P (temp) = TREE_READONLY (exp);
4501 /* Intended for a reference to a buffer of a file-object in Pascal.
4502 But it's not certain that a special tree code will really be
4503 necessary for these. INDIRECT_REF might work for them. */
4507 /* IN_EXPR: Inlined pascal set IN expression.
4510 rlo = set_low - (set_low%bits_per_word);
4511 the_word = set [ (index - rlo)/bits_per_word ];
4512 bit_index = index % bits_per_word;
4513 bitmask = 1 << bit_index;
4514 return !!(the_word & bitmask); */
4516 preexpand_calls (exp);
4518 tree set = TREE_OPERAND (exp, 0);
4519 tree index = TREE_OPERAND (exp, 1);
4520 tree set_type = TREE_TYPE (set);
4522 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
4523 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
4529 rtx diff, quo, rem, addr, bit, result;
4530 rtx setval, setaddr;
4531 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
4534 target = gen_reg_rtx (mode);
4536 /* If domain is empty, answer is no. */
4537 if (tree_int_cst_lt (set_high_bound, set_low_bound))
4540 index_val = expand_expr (index, 0, VOIDmode, 0);
4541 lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
4542 hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
4543 setval = expand_expr (set, 0, VOIDmode, 0);
4544 setaddr = XEXP (setval, 0);
4546 /* Compare index against bounds, if they are constant. */
4547 if (GET_CODE (index_val) == CONST_INT
4548 && GET_CODE (lo_r) == CONST_INT
4549 && INTVAL (index_val) < INTVAL (lo_r))
4552 if (GET_CODE (index_val) == CONST_INT
4553 && GET_CODE (hi_r) == CONST_INT
4554 && INTVAL (hi_r) < INTVAL (index_val))
4557 /* If we get here, we have to generate the code for both cases
4558 (in range and out of range). */
4560 op0 = gen_label_rtx ();
4561 op1 = gen_label_rtx ();
4563 if (! (GET_CODE (index_val) == CONST_INT
4564 && GET_CODE (lo_r) == CONST_INT))
4566 emit_cmp_insn (index_val, lo_r, LT, NULL_RTX,
4567 GET_MODE (index_val), 0, 0);
4568 emit_jump_insn (gen_blt (op1));
4571 if (! (GET_CODE (index_val) == CONST_INT
4572 && GET_CODE (hi_r) == CONST_INT))
4574 emit_cmp_insn (index_val, hi_r, GT, NULL_RTX,
4575 GET_MODE (index_val), 0, 0);
4576 emit_jump_insn (gen_bgt (op1));
4579 /* Calculate the element number of bit zero in the first word
4581 if (GET_CODE (lo_r) == CONST_INT)
4582 rlow = GEN_INT (INTVAL (lo_r)
4583 & ~ ((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
4585 rlow = expand_binop (index_mode, and_optab, lo_r,
4586 GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
4587 NULL_RTX, 0, OPTAB_LIB_WIDEN);
4589 diff = expand_binop (index_mode, sub_optab,
4590 index_val, rlow, NULL_RTX, 0, OPTAB_LIB_WIDEN);
4592 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
4593 GEN_INT (BITS_PER_UNIT), NULL_RTX, 0);
4594 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
4595 GEN_INT (BITS_PER_UNIT), NULL_RTX, 0);
4596 addr = memory_address (byte_mode,
4597 expand_binop (index_mode, add_optab,
4598 diff, setaddr, NULL_RTX, 0,
4600 /* Extract the bit we want to examine */
4601 bit = expand_shift (RSHIFT_EXPR, byte_mode,
4602 gen_rtx (MEM, byte_mode, addr),
4603 make_tree (TREE_TYPE (index), rem),
4605 result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
4606 GET_MODE (target) == byte_mode ? target : 0,
4607 1, OPTAB_LIB_WIDEN);
4609 if (result != target)
4610 convert_move (target, result, 1);
4612 /* Output the code to handle the out-of-range case. */
4615 emit_move_insn (target, const0_rtx);
4620 case WITH_CLEANUP_EXPR:
4621 if (RTL_EXPR_RTL (exp) == 0)
4624 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
4626 = tree_cons (NULL_TREE, TREE_OPERAND (exp, 2), cleanups_this_call);
4627 /* That's it for this cleanup. */
4628 TREE_OPERAND (exp, 2) = 0;
4630 return RTL_EXPR_RTL (exp);
4633 /* Check for a built-in function. */
4634 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
4635 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) == FUNCTION_DECL
4636 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
4637 return expand_builtin (exp, target, subtarget, tmode, ignore);
4638 /* If this call was expanded already by preexpand_calls,
4639 just return the result we got. */
4640 if (CALL_EXPR_RTL (exp) != 0)
4641 return CALL_EXPR_RTL (exp);
4642 return expand_call (exp, target, ignore);
4644 case NON_LVALUE_EXPR:
4647 case REFERENCE_EXPR:
4648 if (TREE_CODE (type) == VOID_TYPE || ignore)
4650 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
4653 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
4654 return expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, modifier);
4655 if (TREE_CODE (type) == UNION_TYPE)
4657 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
4660 if (mode == BLKmode)
4662 if (TYPE_SIZE (type) == 0
4663 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
4665 target = assign_stack_temp (BLKmode,
4666 (TREE_INT_CST_LOW (TYPE_SIZE (type))
4667 + BITS_PER_UNIT - 1)
4668 / BITS_PER_UNIT, 0);
4671 target = gen_reg_rtx (mode);
4673 if (GET_CODE (target) == MEM)
4674 /* Store data into beginning of memory target. */
4675 store_expr (TREE_OPERAND (exp, 0),
4676 change_address (target, TYPE_MODE (valtype), 0), 0);
4678 else if (GET_CODE (target) == REG)
4679 /* Store this field into a union of the proper type. */
4680 store_field (target, GET_MODE_BITSIZE (TYPE_MODE (valtype)), 0,
4681 TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
4683 int_size_in_bytes (TREE_TYPE (TREE_OPERAND (exp, 0))));
4687 /* Return the entire union. */
4690 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, 0);
4691 if (GET_MODE (op0) == mode)
4693 /* If arg is a constant integer being extended from a narrower mode,
4694 we must really truncate to get the extended bits right. Otherwise
4695 (unsigned long) (unsigned char) ("\377"[0])
4696 would come out as ffffffff. */
4697 if (GET_MODE (op0) == VOIDmode
4698 && (GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
4699 < GET_MODE_BITSIZE (mode)))
4701 /* MODE must be narrower than HOST_BITS_PER_INT. */
4702 int width = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
4704 if (width < HOST_BITS_PER_WIDE_INT)
4706 HOST_WIDE_INT val = (GET_CODE (op0) == CONST_INT ? INTVAL (op0)
4707 : CONST_DOUBLE_LOW (op0));
4708 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)))
4709 || !(val & ((HOST_WIDE_INT) 1 << (width - 1))))
4710 val &= ((HOST_WIDE_INT) 1 << width) - 1;
4712 val |= ~(((HOST_WIDE_INT) 1 << width) - 1);
4714 op0 = GEN_INT (val);
4718 op0 = (simplify_unary_operation
4719 ((TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)))
4720 ? ZERO_EXTEND : SIGN_EXTEND),
4722 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))));
4727 if (GET_MODE (op0) == VOIDmode)
4729 if (modifier == EXPAND_INITIALIZER)
4730 return gen_rtx (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
4731 if (flag_force_mem && GET_CODE (op0) == MEM)
4732 op0 = copy_to_reg (op0);
4735 return convert_to_mode (mode, op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
4737 convert_move (target, op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
4741 /* We come here from MINUS_EXPR when the second operand is a constant. */
4743 this_optab = add_optab;
4745 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
4746 something else, make sure we add the register to the constant and
4747 then to the other thing. This case can occur during strength
4748 reduction and doing it this way will produce better code if the
4749 frame pointer or argument pointer is eliminated.
4751 fold-const.c will ensure that the constant is always in the inner
4752 PLUS_EXPR, so the only case we need to do anything about is if
4753 sp, ap, or fp is our second argument, in which case we must swap
4754 the innermost first argument and our second argument. */
4756 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
4757 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
4758 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
4759 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
4760 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
4761 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
4763 tree t = TREE_OPERAND (exp, 1);
4765 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
4766 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
4769 /* If the result is to be Pmode and we are adding an integer to
4770 something, we might be forming a constant. So try to use
4771 plus_constant. If it produces a sum and we can't accept it,
4772 use force_operand. This allows P = &ARR[const] to generate
4773 efficient code on machines where a SYMBOL_REF is not a valid
4776 If this is an EXPAND_SUM call, always return the sum. */
4777 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
4778 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
4779 && (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
4782 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
4784 op1 = plus_constant (op1, TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)));
4785 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
4786 op1 = force_operand (op1, target);
4790 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
4791 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
4792 && (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
4795 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
4797 op0 = plus_constant (op0, TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)));
4798 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
4799 op0 = force_operand (op0, target);
4803 /* No sense saving up arithmetic to be done
4804 if it's all in the wrong mode to form part of an address.
4805 And force_operand won't know whether to sign-extend or
4807 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
4808 || mode != Pmode) goto binop;
4810 preexpand_calls (exp);
4811 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
4814 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, modifier);
4815 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, modifier);
4817 /* Make sure any term that's a sum with a constant comes last. */
4818 if (GET_CODE (op0) == PLUS
4819 && CONSTANT_P (XEXP (op0, 1)))
4825 /* If adding to a sum including a constant,
4826 associate it to put the constant outside. */
4827 if (GET_CODE (op1) == PLUS
4828 && CONSTANT_P (XEXP (op1, 1)))
4830 rtx constant_term = const0_rtx;
4832 temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0);
4835 /* Ensure that MULT comes first if there is one. */
4836 else if (GET_CODE (op0) == MULT)
4837 op0 = gen_rtx (PLUS, mode, op0, XEXP (op1, 0));
4839 op0 = gen_rtx (PLUS, mode, XEXP (op1, 0), op0);
4841 /* Let's also eliminate constants from op0 if possible. */
4842 op0 = eliminate_constant_term (op0, &constant_term);
4844 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
4845 their sum should be a constant. Form it into OP1, since the
4846 result we want will then be OP0 + OP1. */
4848 temp = simplify_binary_operation (PLUS, mode, constant_term,
4853 op1 = gen_rtx (PLUS, mode, constant_term, XEXP (op1, 1));
4856 /* Put a constant term last and put a multiplication first. */
4857 if (CONSTANT_P (op0) || GET_CODE (op1) == MULT)
4858 temp = op1, op1 = op0, op0 = temp;
4860 temp = simplify_binary_operation (PLUS, mode, op0, op1);
4861 return temp ? temp : gen_rtx (PLUS, mode, op0, op1);
4864 /* Handle difference of two symbolic constants,
4865 for the sake of an initializer. */
4866 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
4867 && really_constant_p (TREE_OPERAND (exp, 0))
4868 && really_constant_p (TREE_OPERAND (exp, 1)))
4870 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX,
4871 VOIDmode, modifier);
4872 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
4873 VOIDmode, modifier);
4874 return gen_rtx (MINUS, mode, op0, op1);
4876 /* Convert A - const to A + (-const). */
4877 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
4879 exp = build (PLUS_EXPR, type, TREE_OPERAND (exp, 0),
4880 fold (build1 (NEGATE_EXPR, type,
4881 TREE_OPERAND (exp, 1))));
4884 this_optab = sub_optab;
4888 preexpand_calls (exp);
4889 /* If first operand is constant, swap them.
4890 Thus the following special case checks need only
4891 check the second operand. */
4892 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
4894 register tree t1 = TREE_OPERAND (exp, 0);
4895 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
4896 TREE_OPERAND (exp, 1) = t1;
4899 /* Attempt to return something suitable for generating an
4900 indexed address, for machines that support that. */
4902 if (modifier == EXPAND_SUM && mode == Pmode
4903 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
4904 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
4906 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, EXPAND_SUM);
4908 /* Apply distributive law if OP0 is x+c. */
4909 if (GET_CODE (op0) == PLUS
4910 && GET_CODE (XEXP (op0, 1)) == CONST_INT)
4911 return gen_rtx (PLUS, mode,
4912 gen_rtx (MULT, mode, XEXP (op0, 0),
4913 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))),
4914 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))
4915 * INTVAL (XEXP (op0, 1))));
4917 if (GET_CODE (op0) != REG)
4918 op0 = force_operand (op0, NULL_RTX);
4919 if (GET_CODE (op0) != REG)
4920 op0 = copy_to_mode_reg (mode, op0);
4922 return gen_rtx (MULT, mode, op0,
4923 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))));
4926 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
4929 /* Check for multiplying things that have been extended
4930 from a narrower type. If this machine supports multiplying
4931 in that narrower type with a result in the desired type,
4932 do it that way, and avoid the explicit type-conversion. */
4933 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
4934 && TREE_CODE (type) == INTEGER_TYPE
4935 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
4936 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
4937 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
4938 && int_fits_type_p (TREE_OPERAND (exp, 1),
4939 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
4940 /* Don't use a widening multiply if a shift will do. */
4941 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
4942 > HOST_BITS_PER_WIDE_INT)
4943 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
4945 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
4946 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
4948 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
4949 /* If both operands are extended, they must either both
4950 be zero-extended or both be sign-extended. */
4951 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
4953 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
4955 enum machine_mode innermode
4956 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
4957 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
4958 ? umul_widen_optab : smul_widen_optab);
4959 if (mode == GET_MODE_WIDER_MODE (innermode)
4960 && this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
4962 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
4963 NULL_RTX, VOIDmode, 0);
4964 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
4965 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
4968 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
4969 NULL_RTX, VOIDmode, 0);
4973 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
4974 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
4975 return expand_mult (mode, op0, op1, target, unsignedp);
4977 case TRUNC_DIV_EXPR:
4978 case FLOOR_DIV_EXPR:
4980 case ROUND_DIV_EXPR:
4981 case EXACT_DIV_EXPR:
4982 preexpand_calls (exp);
4983 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
4985 /* Possible optimization: compute the dividend with EXPAND_SUM
4986 then if the divisor is constant can optimize the case
4987 where some terms of the dividend have coeffs divisible by it. */
4988 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
4989 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
4990 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
4993 this_optab = flodiv_optab;
4996 case TRUNC_MOD_EXPR:
4997 case FLOOR_MOD_EXPR:
4999 case ROUND_MOD_EXPR:
5000 preexpand_calls (exp);
5001 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
5003 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
5004 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
5005 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
5007 case FIX_ROUND_EXPR:
5008 case FIX_FLOOR_EXPR:
5010 abort (); /* Not used for C. */
5012 case FIX_TRUNC_EXPR:
5013 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
5015 target = gen_reg_rtx (mode);
5016 expand_fix (target, op0, unsignedp);
5020 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
5022 target = gen_reg_rtx (mode);
5023 /* expand_float can't figure out what to do if FROM has VOIDmode.
5024 So give it the correct mode. With -O, cse will optimize this. */
5025 if (GET_MODE (op0) == VOIDmode)
5026 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
5028 expand_float (target, op0,
5029 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
5033 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
5034 temp = expand_unop (mode, neg_optab, op0, target, 0);
5040 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
5042 /* Handle complex values specially. */
5044 enum machine_mode opmode
5045 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5047 if (GET_MODE_CLASS (opmode) == MODE_COMPLEX_INT
5048 || GET_MODE_CLASS (opmode) == MODE_COMPLEX_FLOAT)
5049 return expand_complex_abs (opmode, op0, target, unsignedp);
5052 /* Unsigned abs is simply the operand. Testing here means we don't
5053 risk generating incorrect code below. */
5054 if (TREE_UNSIGNED (type))
5057 /* First try to do it with a special abs instruction. */
5058 temp = expand_unop (mode, abs_optab, op0, target, 0);
5062 /* If this machine has expensive jumps, we can do integer absolute
5063 value of X as (((signed) x >> (W-1)) ^ x) - ((signed) x >> (W-1)),
5064 where W is the width of MODE. */
5066 if (GET_MODE_CLASS (mode) == MODE_INT && BRANCH_COST >= 2)
5068 rtx extended = expand_shift (RSHIFT_EXPR, mode, op0,
5069 size_int (GET_MODE_BITSIZE (mode) - 1),
5072 temp = expand_binop (mode, xor_optab, extended, op0, target, 0,
5075 temp = expand_binop (mode, sub_optab, temp, extended, target, 0,
5082 /* If that does not win, use conditional jump and negate. */
5083 target = original_target;
5084 temp = gen_label_rtx ();
5085 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 0))
5086 || (GET_CODE (target) == REG
5087 && REGNO (target) < FIRST_PSEUDO_REGISTER))
5088 target = gen_reg_rtx (mode);
5089 emit_move_insn (target, op0);
5090 emit_cmp_insn (target,
5091 expand_expr (convert (type, integer_zero_node),
5092 NULL_RTX, VOIDmode, 0),
5093 GE, NULL_RTX, mode, 0, 0);
5095 emit_jump_insn (gen_bge (temp));
5096 op0 = expand_unop (mode, neg_optab, target, target, 0);
5098 emit_move_insn (target, op0);
5105 target = original_target;
5106 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 1))
5107 || (GET_CODE (target) == REG
5108 && REGNO (target) < FIRST_PSEUDO_REGISTER))
5109 target = gen_reg_rtx (mode);
5110 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
5111 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
5113 /* First try to do it with a special MIN or MAX instruction.
5114 If that does not win, use a conditional jump to select the proper
5116 this_optab = (TREE_UNSIGNED (type)
5117 ? (code == MIN_EXPR ? umin_optab : umax_optab)
5118 : (code == MIN_EXPR ? smin_optab : smax_optab));
5120 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
5126 emit_move_insn (target, op0);
5127 op0 = gen_label_rtx ();
5128 if (code == MAX_EXPR)
5129 temp = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)))
5130 ? compare_from_rtx (target, op1, GEU, 1, mode, NULL_RTX, 0)
5131 : compare_from_rtx (target, op1, GE, 0, mode, NULL_RTX, 0));
5133 temp = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)))
5134 ? compare_from_rtx (target, op1, LEU, 1, mode, NULL_RTX, 0)
5135 : compare_from_rtx (target, op1, LE, 0, mode, NULL_RTX, 0));
5136 if (temp == const0_rtx)
5137 emit_move_insn (target, op1);
5138 else if (temp != const_true_rtx)
5140 if (bcc_gen_fctn[(int) GET_CODE (temp)] != 0)
5141 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (temp)]) (op0));
5144 emit_move_insn (target, op1);
5149 /* ??? Can optimize when the operand of this is a bitwise operation,
5150 by using a different bitwise operation. */
5152 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
5153 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
5159 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
5160 temp = expand_unop (mode, ffs_optab, op0, target, 1);
5165 /* ??? Can optimize bitwise operations with one arg constant.
5166 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
5167 and (a bitwise1 b) bitwise2 b (etc)
5168 but that is probably not worth while. */
5170 /* BIT_AND_EXPR is for bitwise anding.
5171 TRUTH_AND_EXPR is for anding two boolean values
5172 when we want in all cases to compute both of them.
5173 In general it is fastest to do TRUTH_AND_EXPR by
5174 computing both operands as actual zero-or-1 values
5175 and then bitwise anding. In cases where there cannot
5176 be any side effects, better code would be made by
5177 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR;
5178 but the question is how to recognize those cases. */
5180 case TRUTH_AND_EXPR:
5182 this_optab = and_optab;
5185 /* See comment above about TRUTH_AND_EXPR; it applies here too. */
5188 this_optab = ior_optab;
5191 case TRUTH_XOR_EXPR:
5193 this_optab = xor_optab;
5200 preexpand_calls (exp);
5201 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
5203 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
5204 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
5207 /* Could determine the answer when only additive constants differ.
5208 Also, the addition of one can be handled by changing the condition. */
5215 preexpand_calls (exp);
5216 temp = do_store_flag (exp, target, tmode != VOIDmode ? tmode : mode, 0);
5219 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
5220 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
5222 && GET_CODE (original_target) == REG
5223 && (GET_MODE (original_target)
5224 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
5226 temp = expand_expr (TREE_OPERAND (exp, 0), original_target, VOIDmode, 0);
5227 if (temp != original_target)
5228 temp = copy_to_reg (temp);
5229 op1 = gen_label_rtx ();
5230 emit_cmp_insn (temp, const0_rtx, EQ, NULL_RTX,
5231 GET_MODE (temp), unsignedp, 0);
5232 emit_jump_insn (gen_beq (op1));
5233 emit_move_insn (temp, const1_rtx);
5237 /* If no set-flag instruction, must generate a conditional
5238 store into a temporary variable. Drop through
5239 and handle this like && and ||. */
5241 case TRUTH_ANDIF_EXPR:
5242 case TRUTH_ORIF_EXPR:
5243 if (target == 0 || ! safe_from_p (target, exp)
5244 /* Make sure we don't have a hard reg (such as function's return
5245 value) live across basic blocks, if not optimizing. */
5246 || (!optimize && GET_CODE (target) == REG
5247 && REGNO (target) < FIRST_PSEUDO_REGISTER))
5248 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
5249 emit_clr_insn (target);
5250 op1 = gen_label_rtx ();
5251 jumpifnot (exp, op1);
5252 emit_0_to_1_insn (target);
5256 case TRUTH_NOT_EXPR:
5257 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
5258 /* The parser is careful to generate TRUTH_NOT_EXPR
5259 only with operands that are always zero or one. */
5260 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
5261 target, 1, OPTAB_LIB_WIDEN);
5267 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
5269 return expand_expr (TREE_OPERAND (exp, 1),
5270 (ignore ? const0_rtx : target),
5275 /* Note that COND_EXPRs whose type is a structure or union
5276 are required to be constructed to contain assignments of
5277 a temporary variable, so that we can evaluate them here
5278 for side effect only. If type is void, we must do likewise. */
5280 /* If an arm of the branch requires a cleanup,
5281 only that cleanup is performed. */
5284 tree binary_op = 0, unary_op = 0;
5285 tree old_cleanups = cleanups_this_call;
5286 cleanups_this_call = 0;
5288 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
5289 convert it to our mode, if necessary. */
5290 if (integer_onep (TREE_OPERAND (exp, 1))
5291 && integer_zerop (TREE_OPERAND (exp, 2))
5292 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
5294 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, modifier);
5295 if (GET_MODE (op0) == mode)
5298 target = gen_reg_rtx (mode);
5299 convert_move (target, op0, unsignedp);
5303 /* If we are not to produce a result, we have no target. Otherwise,
5304 if a target was specified use it; it will not be used as an
5305 intermediate target unless it is safe. If no target, use a
5308 if (mode == VOIDmode || ignore)
5310 else if (original_target
5311 && safe_from_p (original_target, TREE_OPERAND (exp, 0)))
5312 temp = original_target;
5313 else if (mode == BLKmode)
5315 if (TYPE_SIZE (type) == 0
5316 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
5318 temp = assign_stack_temp (BLKmode,
5319 (TREE_INT_CST_LOW (TYPE_SIZE (type))
5320 + BITS_PER_UNIT - 1)
5321 / BITS_PER_UNIT, 0);
5324 temp = gen_reg_rtx (mode);
5326 /* Check for X ? A + B : A. If we have this, we can copy
5327 A to the output and conditionally add B. Similarly for unary
5328 operations. Don't do this if X has side-effects because
5329 those side effects might affect A or B and the "?" operation is
5330 a sequence point in ANSI. (We test for side effects later.) */
5332 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
5333 && operand_equal_p (TREE_OPERAND (exp, 2),
5334 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
5335 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
5336 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
5337 && operand_equal_p (TREE_OPERAND (exp, 1),
5338 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
5339 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
5340 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
5341 && operand_equal_p (TREE_OPERAND (exp, 2),
5342 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
5343 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
5344 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
5345 && operand_equal_p (TREE_OPERAND (exp, 1),
5346 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
5347 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
5349 /* If we had X ? A + 1 : A and we can do the test of X as a store-flag
5350 operation, do this as A + (X != 0). Similarly for other simple
5351 binary operators. */
5352 if (singleton && binary_op
5353 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
5354 && (TREE_CODE (binary_op) == PLUS_EXPR
5355 || TREE_CODE (binary_op) == MINUS_EXPR
5356 || TREE_CODE (binary_op) == BIT_IOR_EXPR
5357 || TREE_CODE (binary_op) == BIT_XOR_EXPR
5358 || TREE_CODE (binary_op) == BIT_AND_EXPR)
5359 && integer_onep (TREE_OPERAND (binary_op, 1))
5360 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
5363 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR ? add_optab
5364 : TREE_CODE (binary_op) == MINUS_EXPR ? sub_optab
5365 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
5366 : TREE_CODE (binary_op) == BIT_XOR_EXPR ? xor_optab
5369 /* If we had X ? A : A + 1, do this as A + (X == 0).
5371 We have to invert the truth value here and then put it
5372 back later if do_store_flag fails. We cannot simply copy
5373 TREE_OPERAND (exp, 0) to another variable and modify that
5374 because invert_truthvalue can modify the tree pointed to
5376 if (singleton == TREE_OPERAND (exp, 1))
5377 TREE_OPERAND (exp, 0)
5378 = invert_truthvalue (TREE_OPERAND (exp, 0));
5380 result = do_store_flag (TREE_OPERAND (exp, 0),
5381 (safe_from_p (temp, singleton)
5383 mode, BRANCH_COST <= 1);
5387 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
5388 return expand_binop (mode, boptab, op1, result, temp,
5389 unsignedp, OPTAB_LIB_WIDEN);
5391 else if (singleton == TREE_OPERAND (exp, 1))
5392 TREE_OPERAND (exp, 0)
5393 = invert_truthvalue (TREE_OPERAND (exp, 0));
5397 op0 = gen_label_rtx ();
5399 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
5403 /* If the target conflicts with the other operand of the
5404 binary op, we can't use it. Also, we can't use the target
5405 if it is a hard register, because evaluating the condition
5406 might clobber it. */
5408 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1)))
5409 || (GET_CODE (temp) == REG
5410 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
5411 temp = gen_reg_rtx (mode);
5412 store_expr (singleton, temp, 0);
5415 expand_expr (singleton,
5416 ignore ? const1_rtx : NULL_RTX, VOIDmode, 0);
5417 if (cleanups_this_call)
5419 sorry ("aggregate value in COND_EXPR");
5420 cleanups_this_call = 0;
5422 if (singleton == TREE_OPERAND (exp, 1))
5423 jumpif (TREE_OPERAND (exp, 0), op0);
5425 jumpifnot (TREE_OPERAND (exp, 0), op0);
5427 if (binary_op && temp == 0)
5428 /* Just touch the other operand. */
5429 expand_expr (TREE_OPERAND (binary_op, 1),
5430 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
5432 store_expr (build (TREE_CODE (binary_op), type,
5433 make_tree (type, temp),
5434 TREE_OPERAND (binary_op, 1)),
5437 store_expr (build1 (TREE_CODE (unary_op), type,
5438 make_tree (type, temp)),
5443 /* This is now done in jump.c and is better done there because it
5444 produces shorter register lifetimes. */
5446 /* Check for both possibilities either constants or variables
5447 in registers (but not the same as the target!). If so, can
5448 save branches by assigning one, branching, and assigning the
5450 else if (temp && GET_MODE (temp) != BLKmode
5451 && (TREE_CONSTANT (TREE_OPERAND (exp, 1))
5452 || ((TREE_CODE (TREE_OPERAND (exp, 1)) == PARM_DECL
5453 || TREE_CODE (TREE_OPERAND (exp, 1)) == VAR_DECL)
5454 && DECL_RTL (TREE_OPERAND (exp, 1))
5455 && GET_CODE (DECL_RTL (TREE_OPERAND (exp, 1))) == REG
5456 && DECL_RTL (TREE_OPERAND (exp, 1)) != temp))
5457 && (TREE_CONSTANT (TREE_OPERAND (exp, 2))
5458 || ((TREE_CODE (TREE_OPERAND (exp, 2)) == PARM_DECL
5459 || TREE_CODE (TREE_OPERAND (exp, 2)) == VAR_DECL)
5460 && DECL_RTL (TREE_OPERAND (exp, 2))
5461 && GET_CODE (DECL_RTL (TREE_OPERAND (exp, 2))) == REG
5462 && DECL_RTL (TREE_OPERAND (exp, 2)) != temp)))
5464 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
5465 temp = gen_reg_rtx (mode);
5466 store_expr (TREE_OPERAND (exp, 2), temp, 0);
5467 jumpifnot (TREE_OPERAND (exp, 0), op0);
5468 store_expr (TREE_OPERAND (exp, 1), temp, 0);
5472 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
5473 comparison operator. If we have one of these cases, set the
5474 output to A, branch on A (cse will merge these two references),
5475 then set the output to FOO. */
5477 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
5478 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
5479 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
5480 TREE_OPERAND (exp, 1), 0)
5481 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
5482 && safe_from_p (temp, TREE_OPERAND (exp, 2)))
5484 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
5485 temp = gen_reg_rtx (mode);
5486 store_expr (TREE_OPERAND (exp, 1), temp, 0);
5487 jumpif (TREE_OPERAND (exp, 0), op0);
5488 store_expr (TREE_OPERAND (exp, 2), temp, 0);
5492 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
5493 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
5494 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
5495 TREE_OPERAND (exp, 2), 0)
5496 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
5497 && safe_from_p (temp, TREE_OPERAND (exp, 1)))
5499 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
5500 temp = gen_reg_rtx (mode);
5501 store_expr (TREE_OPERAND (exp, 2), temp, 0);
5502 jumpifnot (TREE_OPERAND (exp, 0), op0);
5503 store_expr (TREE_OPERAND (exp, 1), temp, 0);
5508 op1 = gen_label_rtx ();
5509 jumpifnot (TREE_OPERAND (exp, 0), op0);
5511 store_expr (TREE_OPERAND (exp, 1), temp, 0);
5513 expand_expr (TREE_OPERAND (exp, 1),
5514 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
5515 if (cleanups_this_call)
5517 sorry ("aggregate value in COND_EXPR");
5518 cleanups_this_call = 0;
5522 emit_jump_insn (gen_jump (op1));
5526 store_expr (TREE_OPERAND (exp, 2), temp, 0);
5528 expand_expr (TREE_OPERAND (exp, 2),
5529 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
5532 if (cleanups_this_call)
5534 sorry ("aggregate value in COND_EXPR");
5535 cleanups_this_call = 0;
5541 cleanups_this_call = old_cleanups;
5547 /* Something needs to be initialized, but we didn't know
5548 where that thing was when building the tree. For example,
5549 it could be the return value of a function, or a parameter
5550 to a function which lays down in the stack, or a temporary
5551 variable which must be passed by reference.
5553 We guarantee that the expression will either be constructed
5554 or copied into our original target. */
5556 tree slot = TREE_OPERAND (exp, 0);
5559 if (TREE_CODE (slot) != VAR_DECL)
5564 if (DECL_RTL (slot) != 0)
5566 target = DECL_RTL (slot);
5567 /* If we have already expanded the slot, so don't do
5569 if (TREE_OPERAND (exp, 1) == NULL_TREE)
5574 target = assign_stack_temp (mode, int_size_in_bytes (type), 0);
5575 /* All temp slots at this level must not conflict. */
5576 preserve_temp_slots (target);
5577 DECL_RTL (slot) = target;
5581 /* I bet this needs to be done, and I bet that it needs to
5582 be above, inside the else clause. The reason is
5583 simple, how else is it going to get cleaned up? (mrs)
5585 The reason is probably did not work before, and was
5586 commented out is because this was re-expanding already
5587 expanded target_exprs (target == 0 and DECL_RTL (slot)
5588 != 0) also cleaning them up many times as well. :-( */
5590 /* Since SLOT is not known to the called function
5591 to belong to its stack frame, we must build an explicit
5592 cleanup. This case occurs when we must build up a reference
5593 to pass the reference as an argument. In this case,
5594 it is very likely that such a reference need not be
5597 if (TREE_OPERAND (exp, 2) == 0)
5598 TREE_OPERAND (exp, 2) = maybe_build_cleanup (slot);
5599 if (TREE_OPERAND (exp, 2))
5600 cleanups_this_call = tree_cons (NULL_TREE, TREE_OPERAND (exp, 2),
5601 cleanups_this_call);
5606 /* This case does occur, when expanding a parameter which
5607 needs to be constructed on the stack. The target
5608 is the actual stack address that we want to initialize.
5609 The function we call will perform the cleanup in this case. */
5611 DECL_RTL (slot) = target;
5614 exp1 = TREE_OPERAND (exp, 1);
5615 /* Mark it as expanded. */
5616 TREE_OPERAND (exp, 1) = NULL_TREE;
5618 return expand_expr (exp1, target, tmode, modifier);
5623 tree lhs = TREE_OPERAND (exp, 0);
5624 tree rhs = TREE_OPERAND (exp, 1);
5625 tree noncopied_parts = 0;
5626 tree lhs_type = TREE_TYPE (lhs);
5628 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
5629 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0 && !fixed_type_p (rhs))
5630 noncopied_parts = init_noncopied_parts (stabilize_reference (lhs),
5631 TYPE_NONCOPIED_PARTS (lhs_type));
5632 while (noncopied_parts != 0)
5634 expand_assignment (TREE_VALUE (noncopied_parts),
5635 TREE_PURPOSE (noncopied_parts), 0, 0);
5636 noncopied_parts = TREE_CHAIN (noncopied_parts);
5643 /* If lhs is complex, expand calls in rhs before computing it.
5644 That's so we don't compute a pointer and save it over a call.
5645 If lhs is simple, compute it first so we can give it as a
5646 target if the rhs is just a call. This avoids an extra temp and copy
5647 and that prevents a partial-subsumption which makes bad code.
5648 Actually we could treat component_ref's of vars like vars. */
5650 tree lhs = TREE_OPERAND (exp, 0);
5651 tree rhs = TREE_OPERAND (exp, 1);
5652 tree noncopied_parts = 0;
5653 tree lhs_type = TREE_TYPE (lhs);
5657 if (TREE_CODE (lhs) != VAR_DECL
5658 && TREE_CODE (lhs) != RESULT_DECL
5659 && TREE_CODE (lhs) != PARM_DECL)
5660 preexpand_calls (exp);
5662 /* Check for |= or &= of a bitfield of size one into another bitfield
5663 of size 1. In this case, (unless we need the result of the
5664 assignment) we can do this more efficiently with a
5665 test followed by an assignment, if necessary.
5667 ??? At this point, we can't get a BIT_FIELD_REF here. But if
5668 things change so we do, this code should be enhanced to
5671 && TREE_CODE (lhs) == COMPONENT_REF
5672 && (TREE_CODE (rhs) == BIT_IOR_EXPR
5673 || TREE_CODE (rhs) == BIT_AND_EXPR)
5674 && TREE_OPERAND (rhs, 0) == lhs
5675 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
5676 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (lhs, 1))) == 1
5677 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))) == 1)
5679 rtx label = gen_label_rtx ();
5681 do_jump (TREE_OPERAND (rhs, 1),
5682 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
5683 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
5684 expand_assignment (lhs, convert (TREE_TYPE (rhs),
5685 (TREE_CODE (rhs) == BIT_IOR_EXPR
5687 : integer_zero_node)),
5689 do_pending_stack_adjust ();
5694 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0
5695 && ! (fixed_type_p (lhs) && fixed_type_p (rhs)))
5696 noncopied_parts = save_noncopied_parts (stabilize_reference (lhs),
5697 TYPE_NONCOPIED_PARTS (lhs_type));
5699 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
5700 while (noncopied_parts != 0)
5702 expand_assignment (TREE_PURPOSE (noncopied_parts),
5703 TREE_VALUE (noncopied_parts), 0, 0);
5704 noncopied_parts = TREE_CHAIN (noncopied_parts);
5709 case PREINCREMENT_EXPR:
5710 case PREDECREMENT_EXPR:
5711 return expand_increment (exp, 0);
5713 case POSTINCREMENT_EXPR:
5714 case POSTDECREMENT_EXPR:
5715 /* Faster to treat as pre-increment if result is not used. */
5716 return expand_increment (exp, ! ignore);
5719 /* Are we taking the address of a nested function? */
5720 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
5721 && decl_function_context (TREE_OPERAND (exp, 0)) != 0)
5723 op0 = trampoline_address (TREE_OPERAND (exp, 0));
5724 op0 = force_operand (op0, target);
5728 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode,
5729 (modifier == EXPAND_INITIALIZER
5730 ? modifier : EXPAND_CONST_ADDRESS));
5731 if (GET_CODE (op0) != MEM)
5734 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
5735 return XEXP (op0, 0);
5736 op0 = force_operand (XEXP (op0, 0), target);
5738 if (flag_force_addr && GET_CODE (op0) != REG)
5739 return force_reg (Pmode, op0);
5742 case ENTRY_VALUE_EXPR:
5745 /* COMPLEX type for Extended Pascal & Fortran */
5748 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
5752 /* Get the rtx code of the operands. */
5753 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
5754 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
5757 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
5759 prev = get_last_insn ();
5761 /* Tell flow that the whole of the destination is being set. */
5762 if (GET_CODE (target) == REG)
5763 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
5765 /* Move the real (op0) and imaginary (op1) parts to their location. */
5766 emit_move_insn (gen_realpart (mode, target), op0);
5767 emit_move_insn (gen_imagpart (mode, target), op1);
5769 /* Complex construction should appear as a single unit. */
5776 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
5777 return gen_realpart (mode, op0);
5780 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
5781 return gen_imagpart (mode, op0);
5785 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
5789 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
5792 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
5794 prev = get_last_insn ();
5796 /* Tell flow that the whole of the destination is being set. */
5797 if (GET_CODE (target) == REG)
5798 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
5800 /* Store the realpart and the negated imagpart to target. */
5801 emit_move_insn (gen_realpart (mode, target), gen_realpart (mode, op0));
5803 imag_t = gen_imagpart (mode, target);
5804 temp = expand_unop (mode, neg_optab,
5805 gen_imagpart (mode, op0), imag_t, 0);
5807 emit_move_insn (imag_t, temp);
5809 /* Conjugate should appear as a single unit */
5819 return (*lang_expand_expr) (exp, target, tmode, modifier);
5822 /* Here to do an ordinary binary operator, generating an instruction
5823 from the optab already placed in `this_optab'. */
5825 preexpand_calls (exp);
5826 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
5828 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
5829 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
5831 temp = expand_binop (mode, this_optab, op0, op1, target,
5832 unsignedp, OPTAB_LIB_WIDEN);
5838 /* Return the alignment in bits of EXP, a pointer valued expression.
5839 But don't return more than MAX_ALIGN no matter what.
5840 The alignment returned is, by default, the alignment of the thing that
5841 EXP points to (if it is not a POINTER_TYPE, 0 is returned).
5843 Otherwise, look at the expression to see if we can do better, i.e., if the
5844 expression is actually pointing at an object whose alignment is tighter. */
5847 get_pointer_alignment (exp, max_align)
5851 unsigned align, inner;
5853 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
5856 align = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
5857 align = MIN (align, max_align);
5861 switch (TREE_CODE (exp))
5865 case NON_LVALUE_EXPR:
5866 exp = TREE_OPERAND (exp, 0);
5867 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
5869 inner = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
5870 inner = MIN (inner, max_align);
5871 align = MAX (align, inner);
5875 /* If sum of pointer + int, restrict our maximum alignment to that
5876 imposed by the integer. If not, we can't do any better than
5878 if (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST)
5881 while (((TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)) * BITS_PER_UNIT)
5886 exp = TREE_OPERAND (exp, 0);
5890 /* See what we are pointing at and look at its alignment. */
5891 exp = TREE_OPERAND (exp, 0);
5892 if (TREE_CODE (exp) == FUNCTION_DECL)
5893 align = MAX (align, FUNCTION_BOUNDARY);
5894 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'd')
5895 align = MAX (align, DECL_ALIGN (exp));
5896 #ifdef CONSTANT_ALIGNMENT
5897 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'c')
5898 align = CONSTANT_ALIGNMENT (exp, align);
5900 return MIN (align, max_align);
5908 /* Return the tree node and offset if a given argument corresponds to
5909 a string constant. */
5912 string_constant (arg, ptr_offset)
5918 if (TREE_CODE (arg) == ADDR_EXPR
5919 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
5921 *ptr_offset = integer_zero_node;
5922 return TREE_OPERAND (arg, 0);
5924 else if (TREE_CODE (arg) == PLUS_EXPR)
5926 tree arg0 = TREE_OPERAND (arg, 0);
5927 tree arg1 = TREE_OPERAND (arg, 1);
5932 if (TREE_CODE (arg0) == ADDR_EXPR
5933 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
5936 return TREE_OPERAND (arg0, 0);
5938 else if (TREE_CODE (arg1) == ADDR_EXPR
5939 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
5942 return TREE_OPERAND (arg1, 0);
5949 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
5950 way, because it could contain a zero byte in the middle.
5951 TREE_STRING_LENGTH is the size of the character array, not the string.
5953 Unfortunately, string_constant can't access the values of const char
5954 arrays with initializers, so neither can we do so here. */
5964 src = string_constant (src, &offset_node);
5967 max = TREE_STRING_LENGTH (src);
5968 ptr = TREE_STRING_POINTER (src);
5969 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
5971 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
5972 compute the offset to the following null if we don't know where to
5973 start searching for it. */
5975 for (i = 0; i < max; i++)
5978 /* We don't know the starting offset, but we do know that the string
5979 has no internal zero bytes. We can assume that the offset falls
5980 within the bounds of the string; otherwise, the programmer deserves
5981 what he gets. Subtract the offset from the length of the string,
5983 /* This would perhaps not be valid if we were dealing with named
5984 arrays in addition to literal string constants. */
5985 return size_binop (MINUS_EXPR, size_int (max), offset_node);
5988 /* We have a known offset into the string. Start searching there for
5989 a null character. */
5990 if (offset_node == 0)
5994 /* Did we get a long long offset? If so, punt. */
5995 if (TREE_INT_CST_HIGH (offset_node) != 0)
5997 offset = TREE_INT_CST_LOW (offset_node);
5999 /* If the offset is known to be out of bounds, warn, and call strlen at
6001 if (offset < 0 || offset > max)
6003 warning ("offset outside bounds of constant string");
6006 /* Use strlen to search for the first zero byte. Since any strings
6007 constructed with build_string will have nulls appended, we win even
6008 if we get handed something like (char[4])"abcd".
6010 Since OFFSET is our starting index into the string, no further
6011 calculation is needed. */
6012 return size_int (strlen (ptr + offset));
6015 /* Expand an expression EXP that calls a built-in function,
6016 with result going to TARGET if that's convenient
6017 (and in mode MODE if that's convenient).
6018 SUBTARGET may be used as the target for computing one of EXP's operands.
6019 IGNORE is nonzero if the value is to be ignored. */
6022 expand_builtin (exp, target, subtarget, mode, ignore)
6026 enum machine_mode mode;
6029 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
6030 tree arglist = TREE_OPERAND (exp, 1);
6033 enum machine_mode value_mode = TYPE_MODE (TREE_TYPE (exp));
6034 optab builtin_optab;
6036 switch (DECL_FUNCTION_CODE (fndecl))
6041 /* build_function_call changes these into ABS_EXPR. */
6046 case BUILT_IN_FSQRT:
6047 /* If not optimizing, call the library function. */
6052 /* Arg could be wrong type if user redeclared this fcn wrong. */
6053 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != REAL_TYPE)
6054 return CONST0_RTX (TYPE_MODE (TREE_TYPE (exp)));
6056 /* Stabilize and compute the argument. */
6057 if (TREE_CODE (TREE_VALUE (arglist)) != VAR_DECL
6058 && TREE_CODE (TREE_VALUE (arglist)) != PARM_DECL)
6060 exp = copy_node (exp);
6061 arglist = copy_node (arglist);
6062 TREE_OPERAND (exp, 1) = arglist;
6063 TREE_VALUE (arglist) = save_expr (TREE_VALUE (arglist));
6065 op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
6067 /* Make a suitable register to place result in. */
6068 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
6073 switch (DECL_FUNCTION_CODE (fndecl))
6076 builtin_optab = sin_optab; break;
6078 builtin_optab = cos_optab; break;
6079 case BUILT_IN_FSQRT:
6080 builtin_optab = sqrt_optab; break;
6085 /* Compute into TARGET.
6086 Set TARGET to wherever the result comes back. */
6087 target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
6088 builtin_optab, op0, target, 0);
6090 /* If we were unable to expand via the builtin, stop the
6091 sequence (without outputting the insns) and break, causing
6092 a call the the library function. */
6099 /* Check the results by default. But if flag_fast_math is turned on,
6100 then assume sqrt will always be called with valid arguments. */
6102 if (! flag_fast_math)
6104 /* Don't define the builtin FP instructions
6105 if your machine is not IEEE. */
6106 if (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT)
6109 lab1 = gen_label_rtx ();
6111 /* Test the result; if it is NaN, set errno=EDOM because
6112 the argument was not in the domain. */
6113 emit_cmp_insn (target, target, EQ, 0, GET_MODE (target), 0, 0);
6114 emit_jump_insn (gen_beq (lab1));
6118 #ifdef GEN_ERRNO_RTX
6119 rtx errno_rtx = GEN_ERRNO_RTX;
6122 = gen_rtx (MEM, word_mode, gen_rtx (SYMBOL_REF, Pmode, "*errno"));
6125 emit_move_insn (errno_rtx, GEN_INT (TARGET_EDOM));
6128 /* We can't set errno=EDOM directly; let the library call do it.
6129 Pop the arguments right away in case the call gets deleted. */
6131 expand_call (exp, target, 0);
6138 /* Output the entire sequence. */
6139 insns = get_insns ();
6145 case BUILT_IN_SAVEREGS:
6146 /* Don't do __builtin_saveregs more than once in a function.
6147 Save the result of the first call and reuse it. */
6148 if (saveregs_value != 0)
6149 return saveregs_value;
6151 /* When this function is called, it means that registers must be
6152 saved on entry to this function. So we migrate the
6153 call to the first insn of this function. */
6156 rtx valreg, saved_valreg;
6158 /* Now really call the function. `expand_call' does not call
6159 expand_builtin, so there is no danger of infinite recursion here. */
6162 #ifdef EXPAND_BUILTIN_SAVEREGS
6163 /* Do whatever the machine needs done in this case. */
6164 temp = EXPAND_BUILTIN_SAVEREGS (arglist);
6166 /* The register where the function returns its value
6167 is likely to have something else in it, such as an argument.
6168 So preserve that register around the call. */
6169 if (value_mode != VOIDmode)
6171 valreg = hard_libcall_value (value_mode);
6172 saved_valreg = gen_reg_rtx (value_mode);
6173 emit_move_insn (saved_valreg, valreg);
6176 /* Generate the call, putting the value in a pseudo. */
6177 temp = expand_call (exp, target, ignore);
6179 if (value_mode != VOIDmode)
6180 emit_move_insn (valreg, saved_valreg);
6186 saveregs_value = temp;
6188 /* This won't work inside a SEQUENCE--it really has to be
6189 at the start of the function. */
6190 if (in_sequence_p ())
6192 /* Better to do this than to crash. */
6193 error ("`va_start' used within `({...})'");
6197 /* Put the sequence after the NOTE that starts the function. */
6198 emit_insns_before (seq, NEXT_INSN (get_insns ()));
6202 /* __builtin_args_info (N) returns word N of the arg space info
6203 for the current function. The number and meanings of words
6204 is controlled by the definition of CUMULATIVE_ARGS. */
6205 case BUILT_IN_ARGS_INFO:
6207 int nwords = sizeof (CUMULATIVE_ARGS) / sizeof (int);
6209 int *word_ptr = (int *) ¤t_function_args_info;
6210 tree type, elts, result;
6212 if (sizeof (CUMULATIVE_ARGS) % sizeof (int) != 0)
6213 fatal ("CUMULATIVE_ARGS type defined badly; see %s, line %d",
6214 __FILE__, __LINE__);
6218 tree arg = TREE_VALUE (arglist);
6219 if (TREE_CODE (arg) != INTEGER_CST)
6220 error ("argument of `__builtin_args_info' must be constant");
6223 int wordnum = TREE_INT_CST_LOW (arg);
6225 if (wordnum < 0 || wordnum >= nwords || TREE_INT_CST_HIGH (arg))
6226 error ("argument of `__builtin_args_info' out of range");
6228 return GEN_INT (word_ptr[wordnum]);
6232 error ("missing argument in `__builtin_args_info'");
6237 for (i = 0; i < nwords; i++)
6238 elts = tree_cons (NULL_TREE, build_int_2 (word_ptr[i], 0));
6240 type = build_array_type (integer_type_node,
6241 build_index_type (build_int_2 (nwords, 0)));
6242 result = build (CONSTRUCTOR, type, NULL_TREE, nreverse (elts));
6243 TREE_CONSTANT (result) = 1;
6244 TREE_STATIC (result) = 1;
6245 result = build (INDIRECT_REF, build_pointer_type (type), result);
6246 TREE_CONSTANT (result) = 1;
6247 return expand_expr (result, NULL_RTX, VOIDmode, 0);
6251 /* Return the address of the first anonymous stack arg. */
6252 case BUILT_IN_NEXT_ARG:
6254 tree fntype = TREE_TYPE (current_function_decl);
6255 if (!(TYPE_ARG_TYPES (fntype) != 0
6256 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
6257 != void_type_node)))
6259 error ("`va_start' used in function with fixed args");
6264 return expand_binop (Pmode, add_optab,
6265 current_function_internal_arg_pointer,
6266 current_function_arg_offset_rtx,
6267 NULL_RTX, 0, OPTAB_LIB_WIDEN);
6269 case BUILT_IN_CLASSIFY_TYPE:
6272 tree type = TREE_TYPE (TREE_VALUE (arglist));
6273 enum tree_code code = TREE_CODE (type);
6274 if (code == VOID_TYPE)
6275 return GEN_INT (void_type_class);
6276 if (code == INTEGER_TYPE)
6277 return GEN_INT (integer_type_class);
6278 if (code == CHAR_TYPE)
6279 return GEN_INT (char_type_class);
6280 if (code == ENUMERAL_TYPE)
6281 return GEN_INT (enumeral_type_class);
6282 if (code == BOOLEAN_TYPE)
6283 return GEN_INT (boolean_type_class);
6284 if (code == POINTER_TYPE)
6285 return GEN_INT (pointer_type_class);
6286 if (code == REFERENCE_TYPE)
6287 return GEN_INT (reference_type_class);
6288 if (code == OFFSET_TYPE)
6289 return GEN_INT (offset_type_class);
6290 if (code == REAL_TYPE)
6291 return GEN_INT (real_type_class);
6292 if (code == COMPLEX_TYPE)
6293 return GEN_INT (complex_type_class);
6294 if (code == FUNCTION_TYPE)
6295 return GEN_INT (function_type_class);
6296 if (code == METHOD_TYPE)
6297 return GEN_INT (method_type_class);
6298 if (code == RECORD_TYPE)
6299 return GEN_INT (record_type_class);
6300 if (code == UNION_TYPE)
6301 return GEN_INT (union_type_class);
6302 if (code == ARRAY_TYPE)
6303 return GEN_INT (array_type_class);
6304 if (code == STRING_TYPE)
6305 return GEN_INT (string_type_class);
6306 if (code == SET_TYPE)
6307 return GEN_INT (set_type_class);
6308 if (code == FILE_TYPE)
6309 return GEN_INT (file_type_class);
6310 if (code == LANG_TYPE)
6311 return GEN_INT (lang_type_class);
6313 return GEN_INT (no_type_class);
6315 case BUILT_IN_CONSTANT_P:
6319 return (TREE_CODE_CLASS (TREE_CODE (TREE_VALUE (arglist))) == 'c'
6320 ? const1_rtx : const0_rtx);
6322 case BUILT_IN_FRAME_ADDRESS:
6323 /* The argument must be a nonnegative integer constant.
6324 It counts the number of frames to scan up the stack.
6325 The value is the address of that frame. */
6326 case BUILT_IN_RETURN_ADDRESS:
6327 /* The argument must be a nonnegative integer constant.
6328 It counts the number of frames to scan up the stack.
6329 The value is the return address saved in that frame. */
6331 /* Warning about missing arg was already issued. */
6333 else if (TREE_CODE (TREE_VALUE (arglist)) != INTEGER_CST)
6335 error ("invalid arg to `__builtin_return_address'");
6338 else if (tree_int_cst_lt (TREE_VALUE (arglist), integer_zero_node))
6340 error ("invalid arg to `__builtin_return_address'");
6345 int count = TREE_INT_CST_LOW (TREE_VALUE (arglist));
6346 rtx tem = frame_pointer_rtx;
6349 /* Some machines need special handling before we can access arbitrary
6350 frames. For example, on the sparc, we must first flush all
6351 register windows to the stack. */
6352 #ifdef SETUP_FRAME_ADDRESSES
6353 SETUP_FRAME_ADDRESSES ();
6356 /* On the sparc, the return address is not in the frame, it is
6357 in a register. There is no way to access it off of the current
6358 frame pointer, but it can be accessed off the previous frame
6359 pointer by reading the value from the register window save
6361 #ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
6362 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_RETURN_ADDRESS)
6366 /* Scan back COUNT frames to the specified frame. */
6367 for (i = 0; i < count; i++)
6369 /* Assume the dynamic chain pointer is in the word that
6370 the frame address points to, unless otherwise specified. */
6371 #ifdef DYNAMIC_CHAIN_ADDRESS
6372 tem = DYNAMIC_CHAIN_ADDRESS (tem);
6374 tem = memory_address (Pmode, tem);
6375 tem = copy_to_reg (gen_rtx (MEM, Pmode, tem));
6378 /* For __builtin_frame_address, return what we've got. */
6379 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
6382 /* For __builtin_return_address,
6383 Get the return address from that frame. */
6384 #ifdef RETURN_ADDR_RTX
6385 return RETURN_ADDR_RTX (count, tem);
6387 tem = memory_address (Pmode,
6388 plus_constant (tem, GET_MODE_SIZE (Pmode)));
6389 return copy_to_reg (gen_rtx (MEM, Pmode, tem));
6393 case BUILT_IN_ALLOCA:
6395 /* Arg could be non-integer if user redeclared this fcn wrong. */
6396 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE)
6398 current_function_calls_alloca = 1;
6399 /* Compute the argument. */
6400 op0 = expand_expr (TREE_VALUE (arglist), NULL_RTX, VOIDmode, 0);
6402 /* Allocate the desired space. */
6403 target = allocate_dynamic_stack_space (op0, target, BITS_PER_UNIT);
6405 /* Record the new stack level for nonlocal gotos. */
6406 if (nonlocal_goto_handler_slot != 0)
6407 emit_stack_save (SAVE_NONLOCAL, &nonlocal_goto_stack_level, NULL_RTX);
6411 /* If not optimizing, call the library function. */
6416 /* Arg could be non-integer if user redeclared this fcn wrong. */
6417 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE)
6420 /* Compute the argument. */
6421 op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
6422 /* Compute ffs, into TARGET if possible.
6423 Set TARGET to wherever the result comes back. */
6424 target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
6425 ffs_optab, op0, target, 1);
6430 case BUILT_IN_STRLEN:
6431 /* If not optimizing, call the library function. */
6436 /* Arg could be non-pointer if user redeclared this fcn wrong. */
6437 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE)
6441 tree src = TREE_VALUE (arglist);
6442 tree len = c_strlen (src);
6445 = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
6447 rtx result, src_rtx, char_rtx;
6448 enum machine_mode insn_mode = value_mode, char_mode;
6449 enum insn_code icode;
6451 /* If the length is known, just return it. */
6453 return expand_expr (len, target, mode, 0);
6455 /* If SRC is not a pointer type, don't do this operation inline. */
6459 /* Call a function if we can't compute strlen in the right mode. */
6461 while (insn_mode != VOIDmode)
6463 icode = strlen_optab->handlers[(int) insn_mode].insn_code;
6464 if (icode != CODE_FOR_nothing)
6467 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
6469 if (insn_mode == VOIDmode)
6472 /* Make a place to write the result of the instruction. */
6475 && GET_CODE (result) == REG
6476 && GET_MODE (result) == insn_mode
6477 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
6478 result = gen_reg_rtx (insn_mode);
6480 /* Make sure the operands are acceptable to the predicates. */
6482 if (! (*insn_operand_predicate[(int)icode][0]) (result, insn_mode))
6483 result = gen_reg_rtx (insn_mode);
6485 src_rtx = memory_address (BLKmode,
6486 expand_expr (src, NULL_RTX, Pmode,
6488 if (! (*insn_operand_predicate[(int)icode][1]) (src_rtx, Pmode))
6489 src_rtx = copy_to_mode_reg (Pmode, src_rtx);
6491 char_rtx = const0_rtx;
6492 char_mode = insn_operand_mode[(int)icode][2];
6493 if (! (*insn_operand_predicate[(int)icode][2]) (char_rtx, char_mode))
6494 char_rtx = copy_to_mode_reg (char_mode, char_rtx);
6496 emit_insn (GEN_FCN (icode) (result,
6497 gen_rtx (MEM, BLKmode, src_rtx),
6498 char_rtx, GEN_INT (align)));
6500 /* Return the value in the proper mode for this function. */
6501 if (GET_MODE (result) == value_mode)
6503 else if (target != 0)
6505 convert_move (target, result, 0);
6509 return convert_to_mode (value_mode, result, 0);
6512 case BUILT_IN_STRCPY:
6513 /* If not optimizing, call the library function. */
6518 /* Arg could be non-pointer if user redeclared this fcn wrong. */
6519 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
6520 || TREE_CHAIN (arglist) == 0
6521 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE)
6525 tree len = c_strlen (TREE_VALUE (TREE_CHAIN (arglist)));
6530 len = size_binop (PLUS_EXPR, len, integer_one_node);
6532 chainon (arglist, build_tree_list (NULL_TREE, len));
6536 case BUILT_IN_MEMCPY:
6537 /* If not optimizing, call the library function. */
6542 /* Arg could be non-pointer if user redeclared this fcn wrong. */
6543 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
6544 || TREE_CHAIN (arglist) == 0
6545 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
6546 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
6547 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
6551 tree dest = TREE_VALUE (arglist);
6552 tree src = TREE_VALUE (TREE_CHAIN (arglist));
6553 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
6556 = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
6558 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
6559 rtx dest_rtx, dest_mem, src_mem;
6561 /* If either SRC or DEST is not a pointer type, don't do
6562 this operation in-line. */
6563 if (src_align == 0 || dest_align == 0)
6565 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRCPY)
6566 TREE_CHAIN (TREE_CHAIN (arglist)) = 0;
6570 dest_rtx = expand_expr (dest, NULL_RTX, Pmode, EXPAND_NORMAL);
6571 dest_mem = gen_rtx (MEM, BLKmode,
6572 memory_address (BLKmode, dest_rtx));
6573 src_mem = gen_rtx (MEM, BLKmode,
6574 memory_address (BLKmode,
6575 expand_expr (src, NULL_RTX,
6579 /* Copy word part most expediently. */
6580 emit_block_move (dest_mem, src_mem,
6581 expand_expr (len, NULL_RTX, VOIDmode, 0),
6582 MIN (src_align, dest_align));
6586 /* These comparison functions need an instruction that returns an actual
6587 index. An ordinary compare that just sets the condition codes
6589 #ifdef HAVE_cmpstrsi
6590 case BUILT_IN_STRCMP:
6591 /* If not optimizing, call the library function. */
6596 /* Arg could be non-pointer if user redeclared this fcn wrong. */
6597 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
6598 || TREE_CHAIN (arglist) == 0
6599 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE)
6601 else if (!HAVE_cmpstrsi)
6604 tree arg1 = TREE_VALUE (arglist);
6605 tree arg2 = TREE_VALUE (TREE_CHAIN (arglist));
6609 len = c_strlen (arg1);
6611 len = size_binop (PLUS_EXPR, integer_one_node, len);
6612 len2 = c_strlen (arg2);
6614 len2 = size_binop (PLUS_EXPR, integer_one_node, len2);
6616 /* If we don't have a constant length for the first, use the length
6617 of the second, if we know it. We don't require a constant for
6618 this case; some cost analysis could be done if both are available
6619 but neither is constant. For now, assume they're equally cheap.
6621 If both strings have constant lengths, use the smaller. This
6622 could arise if optimization results in strcpy being called with
6623 two fixed strings, or if the code was machine-generated. We should
6624 add some code to the `memcmp' handler below to deal with such
6625 situations, someday. */
6626 if (!len || TREE_CODE (len) != INTEGER_CST)
6633 else if (len2 && TREE_CODE (len2) == INTEGER_CST)
6635 if (tree_int_cst_lt (len2, len))
6639 chainon (arglist, build_tree_list (NULL_TREE, len));
6643 case BUILT_IN_MEMCMP:
6644 /* If not optimizing, call the library function. */
6649 /* Arg could be non-pointer if user redeclared this fcn wrong. */
6650 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
6651 || TREE_CHAIN (arglist) == 0
6652 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
6653 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
6654 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
6656 else if (!HAVE_cmpstrsi)
6659 tree arg1 = TREE_VALUE (arglist);
6660 tree arg2 = TREE_VALUE (TREE_CHAIN (arglist));
6661 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
6665 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
6667 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
6668 enum machine_mode insn_mode
6669 = insn_operand_mode[(int) CODE_FOR_cmpstrsi][0];
6671 /* If we don't have POINTER_TYPE, call the function. */
6672 if (arg1_align == 0 || arg2_align == 0)
6674 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRCMP)
6675 TREE_CHAIN (TREE_CHAIN (arglist)) = 0;
6679 /* Make a place to write the result of the instruction. */
6682 && GET_CODE (result) == REG && GET_MODE (result) == insn_mode
6683 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
6684 result = gen_reg_rtx (insn_mode);
6686 emit_insn (gen_cmpstrsi (result,
6687 gen_rtx (MEM, BLKmode,
6688 expand_expr (arg1, NULL_RTX, Pmode,
6690 gen_rtx (MEM, BLKmode,
6691 expand_expr (arg2, NULL_RTX, Pmode,
6693 expand_expr (len, NULL_RTX, VOIDmode, 0),
6694 GEN_INT (MIN (arg1_align, arg2_align))));
6696 /* Return the value in the proper mode for this function. */
6697 mode = TYPE_MODE (TREE_TYPE (exp));
6698 if (GET_MODE (result) == mode)
6700 else if (target != 0)
6702 convert_move (target, result, 0);
6706 return convert_to_mode (mode, result, 0);
6709 case BUILT_IN_STRCMP:
6710 case BUILT_IN_MEMCMP:
6714 default: /* just do library call, if unknown builtin */
6715 error ("built-in function `%s' not currently supported",
6716 IDENTIFIER_POINTER (DECL_NAME (fndecl)));
6719 /* The switch statement above can drop through to cause the function
6720 to be called normally. */
6722 return expand_call (exp, target, ignore);
6725 /* Expand code for a post- or pre- increment or decrement
6726 and return the RTX for the result.
6727 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
6730 expand_increment (exp, post)
6734 register rtx op0, op1;
6735 register rtx temp, value;
6736 register tree incremented = TREE_OPERAND (exp, 0);
6737 optab this_optab = add_optab;
6739 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
6740 int op0_is_copy = 0;
6742 /* Stabilize any component ref that might need to be
6743 evaluated more than once below. */
6745 || TREE_CODE (incremented) == BIT_FIELD_REF
6746 || (TREE_CODE (incremented) == COMPONENT_REF
6747 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
6748 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
6749 incremented = stabilize_reference (incremented);
6751 /* Compute the operands as RTX.
6752 Note whether OP0 is the actual lvalue or a copy of it:
6753 I believe it is a copy iff it is a register or subreg
6754 and insns were generated in computing it. */
6756 temp = get_last_insn ();
6757 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, 0);
6759 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
6760 in place but intead must do sign- or zero-extension during assignment,
6761 so we copy it into a new register and let the code below use it as
6764 Note that we can safely modify this SUBREG since it is know not to be
6765 shared (it was made by the expand_expr call above). */
6767 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
6768 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
6770 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
6771 && temp != get_last_insn ());
6772 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
6774 /* Decide whether incrementing or decrementing. */
6775 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
6776 || TREE_CODE (exp) == PREDECREMENT_EXPR)
6777 this_optab = sub_optab;
6779 /* If OP0 is not the actual lvalue, but rather a copy in a register,
6780 then we cannot just increment OP0. We must therefore contrive to
6781 increment the original value. Then, for postincrement, we can return
6782 OP0 since it is a copy of the old value. For preincrement, we want
6783 to always expand here, since this generates better or equivalent code. */
6784 if (!post || op0_is_copy)
6786 /* This is the easiest way to increment the value wherever it is.
6787 Problems with multiple evaluation of INCREMENTED are prevented
6788 because either (1) it is a component_ref or preincrement,
6789 in which case it was stabilized above, or (2) it is an array_ref
6790 with constant index in an array in a register, which is
6791 safe to reevaluate. */
6792 tree newexp = build ((this_optab == add_optab
6793 ? PLUS_EXPR : MINUS_EXPR),
6796 TREE_OPERAND (exp, 1));
6797 temp = expand_assignment (incremented, newexp, ! post, 0);
6798 return post ? op0 : temp;
6801 /* Convert decrement by a constant into a negative increment. */
6802 if (this_optab == sub_optab
6803 && GET_CODE (op1) == CONST_INT)
6805 op1 = GEN_INT (- INTVAL (op1));
6806 this_optab = add_optab;
6811 /* We have a true reference to the value in OP0.
6812 If there is an insn to add or subtract in this mode, queue it. */
6814 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
6815 op0 = stabilize (op0);
6818 icode = (int) this_optab->handlers[(int) mode].insn_code;
6819 if (icode != (int) CODE_FOR_nothing
6820 /* Make sure that OP0 is valid for operands 0 and 1
6821 of the insn we want to queue. */
6822 && (*insn_operand_predicate[icode][0]) (op0, mode)
6823 && (*insn_operand_predicate[icode][1]) (op0, mode))
6825 if (! (*insn_operand_predicate[icode][2]) (op1, mode))
6826 op1 = force_reg (mode, op1);
6828 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
6832 /* Preincrement, or we can't increment with one simple insn. */
6834 /* Save a copy of the value before inc or dec, to return it later. */
6835 temp = value = copy_to_reg (op0);
6837 /* Arrange to return the incremented value. */
6838 /* Copy the rtx because expand_binop will protect from the queue,
6839 and the results of that would be invalid for us to return
6840 if our caller does emit_queue before using our result. */
6841 temp = copy_rtx (value = op0);
6843 /* Increment however we can. */
6844 op1 = expand_binop (mode, this_optab, value, op1, op0,
6845 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
6846 /* Make sure the value is stored into OP0. */
6848 emit_move_insn (op0, op1);
6853 /* Expand all function calls contained within EXP, innermost ones first.
6854 But don't look within expressions that have sequence points.
6855 For each CALL_EXPR, record the rtx for its value
6856 in the CALL_EXPR_RTL field. */
6859 preexpand_calls (exp)
6862 register int nops, i;
6863 int type = TREE_CODE_CLASS (TREE_CODE (exp));
6865 if (! do_preexpand_calls)
6868 /* Only expressions and references can contain calls. */
6870 if (type != 'e' && type != '<' && type != '1' && type != '2' && type != 'r')
6873 switch (TREE_CODE (exp))
6876 /* Do nothing if already expanded. */
6877 if (CALL_EXPR_RTL (exp) != 0)
6880 /* Do nothing to built-in functions. */
6881 if (TREE_CODE (TREE_OPERAND (exp, 0)) != ADDR_EXPR
6882 || TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) != FUNCTION_DECL
6883 || ! DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
6884 CALL_EXPR_RTL (exp) = expand_call (exp, NULL_RTX, 0);
6889 case TRUTH_ANDIF_EXPR:
6890 case TRUTH_ORIF_EXPR:
6891 /* If we find one of these, then we can be sure
6892 the adjust will be done for it (since it makes jumps).
6893 Do it now, so that if this is inside an argument
6894 of a function, we don't get the stack adjustment
6895 after some other args have already been pushed. */
6896 do_pending_stack_adjust ();
6901 case WITH_CLEANUP_EXPR:
6905 if (SAVE_EXPR_RTL (exp) != 0)
6909 nops = tree_code_length[(int) TREE_CODE (exp)];
6910 for (i = 0; i < nops; i++)
6911 if (TREE_OPERAND (exp, i) != 0)
6913 type = TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, i)));
6914 if (type == 'e' || type == '<' || type == '1' || type == '2'
6916 preexpand_calls (TREE_OPERAND (exp, i));
6920 /* At the start of a function, record that we have no previously-pushed
6921 arguments waiting to be popped. */
6924 init_pending_stack_adjust ()
6926 pending_stack_adjust = 0;
6929 /* When exiting from function, if safe, clear out any pending stack adjust
6930 so the adjustment won't get done. */
6933 clear_pending_stack_adjust ()
6935 #ifdef EXIT_IGNORE_STACK
6936 if (! flag_omit_frame_pointer && EXIT_IGNORE_STACK
6937 && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline)
6938 && ! flag_inline_functions)
6939 pending_stack_adjust = 0;
6943 /* Pop any previously-pushed arguments that have not been popped yet. */
6946 do_pending_stack_adjust ()
6948 if (inhibit_defer_pop == 0)
6950 if (pending_stack_adjust != 0)
6951 adjust_stack (GEN_INT (pending_stack_adjust));
6952 pending_stack_adjust = 0;
6956 /* Expand all cleanups up to OLD_CLEANUPS.
6957 Needed here, and also for language-dependent calls. */
6960 expand_cleanups_to (old_cleanups)
6963 while (cleanups_this_call != old_cleanups)
6965 expand_expr (TREE_VALUE (cleanups_this_call), NULL_RTX, VOIDmode, 0);
6966 cleanups_this_call = TREE_CHAIN (cleanups_this_call);
6970 /* Expand conditional expressions. */
6972 /* Generate code to evaluate EXP and jump to LABEL if the value is zero.
6973 LABEL is an rtx of code CODE_LABEL, in this function and all the
6977 jumpifnot (exp, label)
6981 do_jump (exp, label, NULL_RTX);
6984 /* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
6991 do_jump (exp, NULL_RTX, label);
6994 /* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
6995 the result is zero, or IF_TRUE_LABEL if the result is one.
6996 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
6997 meaning fall through in that case.
6999 do_jump always does any pending stack adjust except when it does not
7000 actually perform a jump. An example where there is no jump
7001 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
7003 This function is responsible for optimizing cases such as
7004 &&, || and comparison operators in EXP. */
7007 do_jump (exp, if_false_label, if_true_label)
7009 rtx if_false_label, if_true_label;
7011 register enum tree_code code = TREE_CODE (exp);
7012 /* Some cases need to create a label to jump to
7013 in order to properly fall through.
7014 These cases set DROP_THROUGH_LABEL nonzero. */
7015 rtx drop_through_label = 0;
7029 temp = integer_zerop (exp) ? if_false_label : if_true_label;
7035 /* This is not true with #pragma weak */
7037 /* The address of something can never be zero. */
7039 emit_jump (if_true_label);
7044 if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
7045 || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
7046 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF)
7049 /* If we are narrowing the operand, we have to do the compare in the
7051 if ((TYPE_PRECISION (TREE_TYPE (exp))
7052 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
7054 case NON_LVALUE_EXPR:
7055 case REFERENCE_EXPR:
7060 /* These cannot change zero->non-zero or vice versa. */
7061 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
7065 /* This is never less insns than evaluating the PLUS_EXPR followed by
7066 a test and can be longer if the test is eliminated. */
7068 /* Reduce to minus. */
7069 exp = build (MINUS_EXPR, TREE_TYPE (exp),
7070 TREE_OPERAND (exp, 0),
7071 fold (build1 (NEGATE_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
7072 TREE_OPERAND (exp, 1))));
7073 /* Process as MINUS. */
7077 /* Non-zero iff operands of minus differ. */
7078 comparison = compare (build (NE_EXPR, TREE_TYPE (exp),
7079 TREE_OPERAND (exp, 0),
7080 TREE_OPERAND (exp, 1)),
7085 /* If we are AND'ing with a small constant, do this comparison in the
7086 smallest type that fits. If the machine doesn't have comparisons
7087 that small, it will be converted back to the wider comparison.
7088 This helps if we are testing the sign bit of a narrower object.
7089 combine can't do this for us because it can't know whether a
7090 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
7092 if (! SLOW_BYTE_ACCESS
7093 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7094 && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
7095 && (i = floor_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))) >= 0
7096 && (type = type_for_size (i + 1, 1)) != 0
7097 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
7098 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
7099 != CODE_FOR_nothing))
7101 do_jump (convert (type, exp), if_false_label, if_true_label);
7106 case TRUTH_NOT_EXPR:
7107 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
7110 case TRUTH_ANDIF_EXPR:
7111 if (if_false_label == 0)
7112 if_false_label = drop_through_label = gen_label_rtx ();
7113 do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
7114 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
7117 case TRUTH_ORIF_EXPR:
7118 if (if_true_label == 0)
7119 if_true_label = drop_through_label = gen_label_rtx ();
7120 do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
7121 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
7125 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
7128 do_pending_stack_adjust ();
7129 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
7136 int bitsize, bitpos, unsignedp;
7137 enum machine_mode mode;
7142 /* Get description of this reference. We don't actually care
7143 about the underlying object here. */
7144 get_inner_reference (exp, &bitsize, &bitpos, &offset,
7145 &mode, &unsignedp, &volatilep);
7147 type = type_for_size (bitsize, unsignedp);
7148 if (! SLOW_BYTE_ACCESS
7149 && type != 0 && bitsize >= 0
7150 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
7151 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
7152 != CODE_FOR_nothing))
7154 do_jump (convert (type, exp), if_false_label, if_true_label);
7161 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
7162 if (integer_onep (TREE_OPERAND (exp, 1))
7163 && integer_zerop (TREE_OPERAND (exp, 2)))
7164 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
7166 else if (integer_zerop (TREE_OPERAND (exp, 1))
7167 && integer_onep (TREE_OPERAND (exp, 2)))
7168 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
7172 register rtx label1 = gen_label_rtx ();
7173 drop_through_label = gen_label_rtx ();
7174 do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX);
7175 /* Now the THEN-expression. */
7176 do_jump (TREE_OPERAND (exp, 1),
7177 if_false_label ? if_false_label : drop_through_label,
7178 if_true_label ? if_true_label : drop_through_label);
7179 /* In case the do_jump just above never jumps. */
7180 do_pending_stack_adjust ();
7181 emit_label (label1);
7182 /* Now the ELSE-expression. */
7183 do_jump (TREE_OPERAND (exp, 2),
7184 if_false_label ? if_false_label : drop_through_label,
7185 if_true_label ? if_true_label : drop_through_label);
7190 if (integer_zerop (TREE_OPERAND (exp, 1)))
7191 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
7192 else if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7195 !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
7196 do_jump_by_parts_equality (exp, if_false_label, if_true_label);
7198 comparison = compare (exp, EQ, EQ);
7202 if (integer_zerop (TREE_OPERAND (exp, 1)))
7203 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
7204 else if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7207 !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
7208 do_jump_by_parts_equality (exp, if_true_label, if_false_label);
7210 comparison = compare (exp, NE, NE);
7214 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7216 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
7217 do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
7219 comparison = compare (exp, LT, LTU);
7223 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7225 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
7226 do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
7228 comparison = compare (exp, LE, LEU);
7232 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7234 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
7235 do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
7237 comparison = compare (exp, GT, GTU);
7241 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7243 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
7244 do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
7246 comparison = compare (exp, GE, GEU);
7251 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
7253 /* This is not needed any more and causes poor code since it causes
7254 comparisons and tests from non-SI objects to have different code
7256 /* Copy to register to avoid generating bad insns by cse
7257 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
7258 if (!cse_not_expected && GET_CODE (temp) == MEM)
7259 temp = copy_to_reg (temp);
7261 do_pending_stack_adjust ();
7262 if (GET_CODE (temp) == CONST_INT)
7263 comparison = (temp == const0_rtx ? const0_rtx : const_true_rtx);
7264 else if (GET_CODE (temp) == LABEL_REF)
7265 comparison = const_true_rtx;
7266 else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
7267 && !can_compare_p (GET_MODE (temp)))
7268 /* Note swapping the labels gives us not-equal. */
7269 do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label);
7270 else if (GET_MODE (temp) != VOIDmode)
7271 comparison = compare_from_rtx (temp, CONST0_RTX (GET_MODE (temp)),
7272 NE, TREE_UNSIGNED (TREE_TYPE (exp)),
7273 GET_MODE (temp), NULL_RTX, 0);
7278 /* Do any postincrements in the expression that was tested. */
7281 /* If COMPARISON is nonzero here, it is an rtx that can be substituted
7282 straight into a conditional jump instruction as the jump condition.
7283 Otherwise, all the work has been done already. */
7285 if (comparison == const_true_rtx)
7288 emit_jump (if_true_label);
7290 else if (comparison == const0_rtx)
7293 emit_jump (if_false_label);
7295 else if (comparison)
7296 do_jump_for_compare (comparison, if_false_label, if_true_label);
7300 if (drop_through_label)
7302 /* If do_jump produces code that might be jumped around,
7303 do any stack adjusts from that code, before the place
7304 where control merges in. */
7305 do_pending_stack_adjust ();
7306 emit_label (drop_through_label);
7310 /* Given a comparison expression EXP for values too wide to be compared
7311 with one insn, test the comparison and jump to the appropriate label.
7312 The code of EXP is ignored; we always test GT if SWAP is 0,
7313 and LT if SWAP is 1. */
7316 do_jump_by_parts_greater (exp, swap, if_false_label, if_true_label)
7319 rtx if_false_label, if_true_label;
7321 rtx op0 = expand_expr (TREE_OPERAND (exp, swap), NULL_RTX, VOIDmode, 0);
7322 rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), NULL_RTX, VOIDmode, 0);
7323 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
7324 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
7325 rtx drop_through_label = 0;
7326 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
7329 if (! if_true_label || ! if_false_label)
7330 drop_through_label = gen_label_rtx ();
7331 if (! if_true_label)
7332 if_true_label = drop_through_label;
7333 if (! if_false_label)
7334 if_false_label = drop_through_label;
7336 /* Compare a word at a time, high order first. */
7337 for (i = 0; i < nwords; i++)
7340 rtx op0_word, op1_word;
7342 if (WORDS_BIG_ENDIAN)
7344 op0_word = operand_subword_force (op0, i, mode);
7345 op1_word = operand_subword_force (op1, i, mode);
7349 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
7350 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
7353 /* All but high-order word must be compared as unsigned. */
7354 comp = compare_from_rtx (op0_word, op1_word,
7355 (unsignedp || i > 0) ? GTU : GT,
7356 unsignedp, word_mode, NULL_RTX, 0);
7357 if (comp == const_true_rtx)
7358 emit_jump (if_true_label);
7359 else if (comp != const0_rtx)
7360 do_jump_for_compare (comp, NULL_RTX, if_true_label);
7362 /* Consider lower words only if these are equal. */
7363 comp = compare_from_rtx (op0_word, op1_word, NE, unsignedp, word_mode,
7365 if (comp == const_true_rtx)
7366 emit_jump (if_false_label);
7367 else if (comp != const0_rtx)
7368 do_jump_for_compare (comp, NULL_RTX, if_false_label);
7372 emit_jump (if_false_label);
7373 if (drop_through_label)
7374 emit_label (drop_through_label);
7377 /* Given an EQ_EXPR expression EXP for values too wide to be compared
7378 with one insn, test the comparison and jump to the appropriate label. */
7381 do_jump_by_parts_equality (exp, if_false_label, if_true_label)
7383 rtx if_false_label, if_true_label;
7385 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7386 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7387 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
7388 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
7390 rtx drop_through_label = 0;
7392 if (! if_false_label)
7393 drop_through_label = if_false_label = gen_label_rtx ();
7395 for (i = 0; i < nwords; i++)
7397 rtx comp = compare_from_rtx (operand_subword_force (op0, i, mode),
7398 operand_subword_force (op1, i, mode),
7399 EQ, TREE_UNSIGNED (TREE_TYPE (exp)),
7400 word_mode, NULL_RTX, 0);
7401 if (comp == const_true_rtx)
7402 emit_jump (if_false_label);
7403 else if (comp != const0_rtx)
7404 do_jump_for_compare (comp, if_false_label, NULL_RTX);
7408 emit_jump (if_true_label);
7409 if (drop_through_label)
7410 emit_label (drop_through_label);
7413 /* Jump according to whether OP0 is 0.
7414 We assume that OP0 has an integer mode that is too wide
7415 for the available compare insns. */
7418 do_jump_by_parts_equality_rtx (op0, if_false_label, if_true_label)
7420 rtx if_false_label, if_true_label;
7422 int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD;
7424 rtx drop_through_label = 0;
7426 if (! if_false_label)
7427 drop_through_label = if_false_label = gen_label_rtx ();
7429 for (i = 0; i < nwords; i++)
7431 rtx comp = compare_from_rtx (operand_subword_force (op0, i,
7433 const0_rtx, EQ, 1, word_mode, NULL_RTX, 0);
7434 if (comp == const_true_rtx)
7435 emit_jump (if_false_label);
7436 else if (comp != const0_rtx)
7437 do_jump_for_compare (comp, if_false_label, NULL_RTX);
7441 emit_jump (if_true_label);
7442 if (drop_through_label)
7443 emit_label (drop_through_label);
7446 /* Given a comparison expression in rtl form, output conditional branches to
7447 IF_TRUE_LABEL, IF_FALSE_LABEL, or both. */
7450 do_jump_for_compare (comparison, if_false_label, if_true_label)
7451 rtx comparison, if_false_label, if_true_label;
7455 if (bcc_gen_fctn[(int) GET_CODE (comparison)] != 0)
7456 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (comparison)]) (if_true_label));
7461 emit_jump (if_false_label);
7463 else if (if_false_label)
7466 rtx prev = PREV_INSN (get_last_insn ());
7469 /* Output the branch with the opposite condition. Then try to invert
7470 what is generated. If more than one insn is a branch, or if the
7471 branch is not the last insn written, abort. If we can't invert
7472 the branch, emit make a true label, redirect this jump to that,
7473 emit a jump to the false label and define the true label. */
7475 if (bcc_gen_fctn[(int) GET_CODE (comparison)] != 0)
7476 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (comparison)]) (if_false_label));
7480 /* Here we get the insn before what was just emitted.
7481 On some machines, emitting the branch can discard
7482 the previous compare insn and emit a replacement. */
7484 /* If there's only one preceding insn... */
7485 insn = get_insns ();
7487 insn = NEXT_INSN (prev);
7489 for (insn = NEXT_INSN (insn); insn; insn = NEXT_INSN (insn))
7490 if (GET_CODE (insn) == JUMP_INSN)
7497 if (branch != get_last_insn ())
7500 if (! invert_jump (branch, if_false_label))
7502 if_true_label = gen_label_rtx ();
7503 redirect_jump (branch, if_true_label);
7504 emit_jump (if_false_label);
7505 emit_label (if_true_label);
7510 /* Generate code for a comparison expression EXP
7511 (including code to compute the values to be compared)
7512 and set (CC0) according to the result.
7513 SIGNED_CODE should be the rtx operation for this comparison for
7514 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
7516 We force a stack adjustment unless there are currently
7517 things pushed on the stack that aren't yet used. */
7520 compare (exp, signed_code, unsigned_code)
7522 enum rtx_code signed_code, unsigned_code;
7525 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7527 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7528 register tree type = TREE_TYPE (TREE_OPERAND (exp, 0));
7529 register enum machine_mode mode = TYPE_MODE (type);
7530 int unsignedp = TREE_UNSIGNED (type);
7531 enum rtx_code code = unsignedp ? unsigned_code : signed_code;
7533 return compare_from_rtx (op0, op1, code, unsignedp, mode,
7535 ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
7536 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
7539 /* Like compare but expects the values to compare as two rtx's.
7540 The decision as to signed or unsigned comparison must be made by the caller.
7542 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
7545 If ALIGN is non-zero, it is the alignment of this type; if zero, the
7546 size of MODE should be used. */
7549 compare_from_rtx (op0, op1, code, unsignedp, mode, size, align)
7550 register rtx op0, op1;
7553 enum machine_mode mode;
7559 /* If one operand is constant, make it the second one. Only do this
7560 if the other operand is not constant as well. */
7562 if ((CONSTANT_P (op0) && ! CONSTANT_P (op1))
7563 || (GET_CODE (op0) == CONST_INT && GET_CODE (op1) != CONST_INT))
7568 code = swap_condition (code);
7573 op0 = force_not_mem (op0);
7574 op1 = force_not_mem (op1);
7577 do_pending_stack_adjust ();
7579 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
7580 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
7584 /* There's no need to do this now that combine.c can eliminate lots of
7585 sign extensions. This can be less efficient in certain cases on other
7588 /* If this is a signed equality comparison, we can do it as an
7589 unsigned comparison since zero-extension is cheaper than sign
7590 extension and comparisons with zero are done as unsigned. This is
7591 the case even on machines that can do fast sign extension, since
7592 zero-extension is easier to combine with other operations than
7593 sign-extension is. If we are comparing against a constant, we must
7594 convert it to what it would look like unsigned. */
7595 if ((code == EQ || code == NE) && ! unsignedp
7596 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
7598 if (GET_CODE (op1) == CONST_INT
7599 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
7600 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
7605 emit_cmp_insn (op0, op1, code, size, mode, unsignedp, align);
7607 return gen_rtx (code, VOIDmode, cc0_rtx, const0_rtx);
7610 /* Generate code to calculate EXP using a store-flag instruction
7611 and return an rtx for the result. EXP is either a comparison
7612 or a TRUTH_NOT_EXPR whose operand is a comparison.
7614 If TARGET is nonzero, store the result there if convenient.
7616 If ONLY_CHEAP is non-zero, only do this if it is likely to be very
7619 Return zero if there is no suitable set-flag instruction
7620 available on this machine.
7622 Once expand_expr has been called on the arguments of the comparison,
7623 we are committed to doing the store flag, since it is not safe to
7624 re-evaluate the expression. We emit the store-flag insn by calling
7625 emit_store_flag, but only expand the arguments if we have a reason
7626 to believe that emit_store_flag will be successful. If we think that
7627 it will, but it isn't, we have to simulate the store-flag with a
7628 set/jump/set sequence. */
7631 do_store_flag (exp, target, mode, only_cheap)
7634 enum machine_mode mode;
7638 tree arg0, arg1, type;
7640 enum machine_mode operand_mode;
7644 enum insn_code icode;
7645 rtx subtarget = target;
7646 rtx result, label, pattern, jump_pat;
7648 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
7649 result at the end. We can't simply invert the test since it would
7650 have already been inverted if it were valid. This case occurs for
7651 some floating-point comparisons. */
7653 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
7654 invert = 1, exp = TREE_OPERAND (exp, 0);
7656 arg0 = TREE_OPERAND (exp, 0);
7657 arg1 = TREE_OPERAND (exp, 1);
7658 type = TREE_TYPE (arg0);
7659 operand_mode = TYPE_MODE (type);
7660 unsignedp = TREE_UNSIGNED (type);
7662 /* We won't bother with BLKmode store-flag operations because it would mean
7663 passing a lot of information to emit_store_flag. */
7664 if (operand_mode == BLKmode)
7670 /* Get the rtx comparison code to use. We know that EXP is a comparison
7671 operation of some type. Some comparisons against 1 and -1 can be
7672 converted to comparisons with zero. Do so here so that the tests
7673 below will be aware that we have a comparison with zero. These
7674 tests will not catch constants in the first operand, but constants
7675 are rarely passed as the first operand. */
7677 switch (TREE_CODE (exp))
7686 if (integer_onep (arg1))
7687 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
7689 code = unsignedp ? LTU : LT;
7692 if (integer_all_onesp (arg1))
7693 arg1 = integer_zero_node, code = unsignedp ? LTU : LT;
7695 code = unsignedp ? LEU : LE;
7698 if (integer_all_onesp (arg1))
7699 arg1 = integer_zero_node, code = unsignedp ? GEU : GE;
7701 code = unsignedp ? GTU : GT;
7704 if (integer_onep (arg1))
7705 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
7707 code = unsignedp ? GEU : GE;
7713 /* Put a constant second. */
7714 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
7716 tem = arg0; arg0 = arg1; arg1 = tem;
7717 code = swap_condition (code);
7720 /* If this is an equality or inequality test of a single bit, we can
7721 do this by shifting the bit being tested to the low-order bit and
7722 masking the result with the constant 1. If the condition was EQ,
7723 we xor it with 1. This does not require an scc insn and is faster
7724 than an scc insn even if we have it. */
7726 if ((code == NE || code == EQ)
7727 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
7728 && integer_pow2p (TREE_OPERAND (arg0, 1))
7729 && TYPE_PRECISION (type) <= HOST_BITS_PER_WIDE_INT)
7731 int bitnum = exact_log2 (INTVAL (expand_expr (TREE_OPERAND (arg0, 1),
7732 NULL_RTX, VOIDmode, 0)));
7734 if (subtarget == 0 || GET_CODE (subtarget) != REG
7735 || GET_MODE (subtarget) != operand_mode
7736 || ! safe_from_p (subtarget, TREE_OPERAND (arg0, 0)))
7739 op0 = expand_expr (TREE_OPERAND (arg0, 0), subtarget, VOIDmode, 0);
7742 op0 = expand_shift (RSHIFT_EXPR, GET_MODE (op0), op0,
7743 size_int (bitnum), target, 1);
7745 if (GET_MODE (op0) != mode)
7746 op0 = convert_to_mode (mode, op0, 1);
7748 if (bitnum != TYPE_PRECISION (type) - 1)
7749 op0 = expand_and (op0, const1_rtx, target);
7751 if ((code == EQ && ! invert) || (code == NE && invert))
7752 op0 = expand_binop (mode, xor_optab, op0, const1_rtx, target, 0,
7758 /* Now see if we are likely to be able to do this. Return if not. */
7759 if (! can_compare_p (operand_mode))
7761 icode = setcc_gen_code[(int) code];
7762 if (icode == CODE_FOR_nothing
7763 || (only_cheap && insn_operand_mode[(int) icode][0] != mode))
7765 /* We can only do this if it is one of the special cases that
7766 can be handled without an scc insn. */
7767 if ((code == LT && integer_zerop (arg1))
7768 || (! only_cheap && code == GE && integer_zerop (arg1)))
7770 else if (BRANCH_COST >= 0
7771 && ! only_cheap && (code == NE || code == EQ)
7772 && TREE_CODE (type) != REAL_TYPE
7773 && ((abs_optab->handlers[(int) operand_mode].insn_code
7774 != CODE_FOR_nothing)
7775 || (ffs_optab->handlers[(int) operand_mode].insn_code
7776 != CODE_FOR_nothing)))
7782 preexpand_calls (exp);
7783 if (subtarget == 0 || GET_CODE (subtarget) != REG
7784 || GET_MODE (subtarget) != operand_mode
7785 || ! safe_from_p (subtarget, arg1))
7788 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
7789 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
7792 target = gen_reg_rtx (mode);
7794 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
7795 because, if the emit_store_flag does anything it will succeed and
7796 OP0 and OP1 will not be used subsequently. */
7798 result = emit_store_flag (target, code,
7799 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
7800 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
7801 operand_mode, unsignedp, 1);
7806 result = expand_binop (mode, xor_optab, result, const1_rtx,
7807 result, 0, OPTAB_LIB_WIDEN);
7811 /* If this failed, we have to do this with set/compare/jump/set code. */
7812 if (target == 0 || GET_CODE (target) != REG
7813 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
7814 target = gen_reg_rtx (GET_MODE (target));
7816 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
7817 result = compare_from_rtx (op0, op1, code, unsignedp,
7818 operand_mode, NULL_RTX, 0);
7819 if (GET_CODE (result) == CONST_INT)
7820 return (((result == const0_rtx && ! invert)
7821 || (result != const0_rtx && invert))
7822 ? const0_rtx : const1_rtx);
7824 label = gen_label_rtx ();
7825 if (bcc_gen_fctn[(int) code] == 0)
7828 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
7829 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
7835 /* Generate a tablejump instruction (used for switch statements). */
7837 #ifdef HAVE_tablejump
7839 /* INDEX is the value being switched on, with the lowest value
7840 in the table already subtracted.
7841 MODE is its expected mode (needed if INDEX is constant).
7842 RANGE is the length of the jump table.
7843 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
7845 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
7846 index value is out of range. */
7849 do_tablejump (index, mode, range, table_label, default_label)
7850 rtx index, range, table_label, default_label;
7851 enum machine_mode mode;
7853 register rtx temp, vector;
7855 /* Do an unsigned comparison (in the proper mode) between the index
7856 expression and the value which represents the length of the range.
7857 Since we just finished subtracting the lower bound of the range
7858 from the index expression, this comparison allows us to simultaneously
7859 check that the original index expression value is both greater than
7860 or equal to the minimum value of the range and less than or equal to
7861 the maximum value of the range. */
7863 emit_cmp_insn (range, index, LTU, NULL_RTX, mode, 1, 0);
7864 emit_jump_insn (gen_bltu (default_label));
7866 /* If index is in range, it must fit in Pmode.
7867 Convert to Pmode so we can index with it. */
7869 index = convert_to_mode (Pmode, index, 1);
7871 /* If flag_force_addr were to affect this address
7872 it could interfere with the tricky assumptions made
7873 about addresses that contain label-refs,
7874 which may be valid only very near the tablejump itself. */
7875 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
7876 GET_MODE_SIZE, because this indicates how large insns are. The other
7877 uses should all be Pmode, because they are addresses. This code
7878 could fail if addresses and insns are not the same size. */
7879 index = memory_address_noforce
7881 gen_rtx (PLUS, Pmode,
7882 gen_rtx (MULT, Pmode, index,
7883 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
7884 gen_rtx (LABEL_REF, Pmode, table_label)));
7885 temp = gen_reg_rtx (CASE_VECTOR_MODE);
7886 vector = gen_rtx (MEM, CASE_VECTOR_MODE, index);
7887 RTX_UNCHANGING_P (vector) = 1;
7888 convert_move (temp, vector, 0);
7890 emit_jump_insn (gen_tablejump (temp, table_label));
7892 #ifndef CASE_VECTOR_PC_RELATIVE
7893 /* If we are generating PIC code or if the table is PC-relative, the
7894 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
7900 #endif /* HAVE_tablejump */