1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA. */
26 #include "insn-flags.h"
27 #include "insn-codes.h"
29 #include "insn-config.h"
32 #include "typeclass.h"
34 #define CEIL(x,y) (((x) + (y) - 1) / (y))
36 /* Decide whether a function's arguments should be processed
37 from first to last or from last to first.
39 They should if the stack and args grow in opposite directions, but
40 only if we have push insns. */
44 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
45 #define PUSH_ARGS_REVERSED /* If it's last to first */
50 #ifndef STACK_PUSH_CODE
51 #ifdef STACK_GROWS_DOWNWARD
52 #define STACK_PUSH_CODE PRE_DEC
54 #define STACK_PUSH_CODE PRE_INC
58 /* Like STACK_BOUNDARY but in units of bytes, not bits. */
59 #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
61 /* If this is nonzero, we do not bother generating VOLATILE
62 around volatile memory references, and we are willing to
63 output indirect addresses. If cse is to follow, we reject
64 indirect addresses so a useful potential cse is generated;
65 if it is used only once, instruction combination will produce
66 the same indirect address eventually. */
69 /* Nonzero to generate code for all the subroutines within an
70 expression before generating the upper levels of the expression.
71 Nowadays this is never zero. */
72 int do_preexpand_calls = 1;
74 /* Number of units that we should eventually pop off the stack.
75 These are the arguments to function calls that have already returned. */
76 int pending_stack_adjust;
78 /* Nonzero means stack pops must not be deferred, and deferred stack
79 pops must not be output. It is nonzero inside a function call,
80 inside a conditional expression, inside a statement expression,
81 and in other cases as well. */
82 int inhibit_defer_pop;
84 /* A list of all cleanups which belong to the arguments of
85 function calls being expanded by expand_call. */
86 tree cleanups_this_call;
88 /* Nonzero means __builtin_saveregs has already been done in this function.
89 The value is the pseudoreg containing the value __builtin_saveregs
91 static rtx saveregs_value;
93 /* Similarly for __builtin_apply_args. */
94 static rtx apply_args_value;
96 /* This structure is used by move_by_pieces to describe the move to
108 int explicit_inc_from;
114 static rtx enqueue_insn PROTO((rtx, rtx));
115 static int queued_subexp_p PROTO((rtx));
116 static void init_queue PROTO((void));
117 static void move_by_pieces PROTO((rtx, rtx, int, int));
118 static int move_by_pieces_ninsns PROTO((unsigned int, int));
119 static void move_by_pieces_1 PROTO((rtx (*) (), enum machine_mode,
120 struct move_by_pieces *));
121 static void group_insns PROTO((rtx));
122 static void store_constructor PROTO((tree, rtx));
123 static rtx store_field PROTO((rtx, int, int, enum machine_mode, tree,
124 enum machine_mode, int, int, int));
125 static tree save_noncopied_parts PROTO((tree, tree));
126 static tree init_noncopied_parts PROTO((tree, tree));
127 static int safe_from_p PROTO((rtx, tree));
128 static int fixed_type_p PROTO((tree));
129 static int get_pointer_alignment PROTO((tree, unsigned));
130 static tree string_constant PROTO((tree, tree *));
131 static tree c_strlen PROTO((tree));
132 static rtx expand_builtin PROTO((tree, rtx, rtx, enum machine_mode, int));
133 static int apply_args_size PROTO((void));
134 static int apply_result_size PROTO((void));
135 static rtx result_vector PROTO((int, rtx));
136 static rtx expand_builtin_apply_args PROTO((void));
137 static rtx expand_builtin_apply PROTO((rtx, rtx, rtx));
138 static void expand_builtin_return PROTO((rtx));
139 static rtx expand_increment PROTO((tree, int));
140 static void preexpand_calls PROTO((tree));
141 static void do_jump_by_parts_greater PROTO((tree, int, rtx, rtx));
142 static void do_jump_by_parts_greater_rtx PROTO((enum machine_mode, int, rtx, rtx, rtx, rtx));
143 static void do_jump_by_parts_equality PROTO((tree, rtx, rtx));
144 static void do_jump_by_parts_equality_rtx PROTO((rtx, rtx, rtx));
145 static void do_jump_for_compare PROTO((rtx, rtx, rtx));
146 static rtx compare PROTO((tree, enum rtx_code, enum rtx_code));
147 static rtx do_store_flag PROTO((tree, rtx, enum machine_mode, int));
149 /* Record for each mode whether we can move a register directly to or
150 from an object of that mode in memory. If we can't, we won't try
151 to use that mode directly when accessing a field of that mode. */
153 static char direct_load[NUM_MACHINE_MODES];
154 static char direct_store[NUM_MACHINE_MODES];
156 /* MOVE_RATIO is the number of move instructions that is better than
160 #if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
163 /* A value of around 6 would minimize code size; infinity would minimize
165 #define MOVE_RATIO 15
169 /* This array records the insn_code of insns to perform block moves. */
170 enum insn_code movstr_optab[NUM_MACHINE_MODES];
172 /* SLOW_UNALIGNED_ACCESS is non-zero if unaligned accesses are very slow. */
174 #ifndef SLOW_UNALIGNED_ACCESS
175 #define SLOW_UNALIGNED_ACCESS 0
178 /* Register mappings for target machines without register windows. */
179 #ifndef INCOMING_REGNO
180 #define INCOMING_REGNO(OUT) (OUT)
182 #ifndef OUTGOING_REGNO
183 #define OUTGOING_REGNO(IN) (IN)
186 /* This is run once per compilation to set up which modes can be used
187 directly in memory and to initialize the block move optab. */
193 enum machine_mode mode;
194 /* Try indexing by frame ptr and try by stack ptr.
195 It is known that on the Convex the stack ptr isn't a valid index.
196 With luck, one or the other is valid on any machine. */
197 rtx mem = gen_rtx (MEM, VOIDmode, stack_pointer_rtx);
198 rtx mem1 = gen_rtx (MEM, VOIDmode, frame_pointer_rtx);
201 insn = emit_insn (gen_rtx (SET, 0, 0));
202 pat = PATTERN (insn);
204 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
205 mode = (enum machine_mode) ((int) mode + 1))
211 direct_load[(int) mode] = direct_store[(int) mode] = 0;
212 PUT_MODE (mem, mode);
213 PUT_MODE (mem1, mode);
215 /* See if there is some register that can be used in this mode and
216 directly loaded or stored from memory. */
218 if (mode != VOIDmode && mode != BLKmode)
219 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
220 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
223 if (! HARD_REGNO_MODE_OK (regno, mode))
226 reg = gen_rtx (REG, mode, regno);
229 SET_DEST (pat) = reg;
230 if (recog (pat, insn, &num_clobbers) >= 0)
231 direct_load[(int) mode] = 1;
233 SET_SRC (pat) = mem1;
234 SET_DEST (pat) = reg;
235 if (recog (pat, insn, &num_clobbers) >= 0)
236 direct_load[(int) mode] = 1;
239 SET_DEST (pat) = mem;
240 if (recog (pat, insn, &num_clobbers) >= 0)
241 direct_store[(int) mode] = 1;
244 SET_DEST (pat) = mem1;
245 if (recog (pat, insn, &num_clobbers) >= 0)
246 direct_store[(int) mode] = 1;
253 /* This is run at the start of compiling a function. */
260 pending_stack_adjust = 0;
261 inhibit_defer_pop = 0;
262 cleanups_this_call = 0;
264 apply_args_value = 0;
268 /* Save all variables describing the current status into the structure *P.
269 This is used before starting a nested function. */
275 /* Instead of saving the postincrement queue, empty it. */
278 p->pending_stack_adjust = pending_stack_adjust;
279 p->inhibit_defer_pop = inhibit_defer_pop;
280 p->cleanups_this_call = cleanups_this_call;
281 p->saveregs_value = saveregs_value;
282 p->apply_args_value = apply_args_value;
283 p->forced_labels = forced_labels;
285 pending_stack_adjust = 0;
286 inhibit_defer_pop = 0;
287 cleanups_this_call = 0;
289 apply_args_value = 0;
293 /* Restore all variables describing the current status from the structure *P.
294 This is used after a nested function. */
297 restore_expr_status (p)
300 pending_stack_adjust = p->pending_stack_adjust;
301 inhibit_defer_pop = p->inhibit_defer_pop;
302 cleanups_this_call = p->cleanups_this_call;
303 saveregs_value = p->saveregs_value;
304 apply_args_value = p->apply_args_value;
305 forced_labels = p->forced_labels;
308 /* Manage the queue of increment instructions to be output
309 for POSTINCREMENT_EXPR expressions, etc. */
311 static rtx pending_chain;
313 /* Queue up to increment (or change) VAR later. BODY says how:
314 BODY should be the same thing you would pass to emit_insn
315 to increment right away. It will go to emit_insn later on.
317 The value is a QUEUED expression to be used in place of VAR
318 where you want to guarantee the pre-incrementation value of VAR. */
321 enqueue_insn (var, body)
324 pending_chain = gen_rtx (QUEUED, GET_MODE (var),
325 var, NULL_RTX, NULL_RTX, body, pending_chain);
326 return pending_chain;
329 /* Use protect_from_queue to convert a QUEUED expression
330 into something that you can put immediately into an instruction.
331 If the queued incrementation has not happened yet,
332 protect_from_queue returns the variable itself.
333 If the incrementation has happened, protect_from_queue returns a temp
334 that contains a copy of the old value of the variable.
336 Any time an rtx which might possibly be a QUEUED is to be put
337 into an instruction, it must be passed through protect_from_queue first.
338 QUEUED expressions are not meaningful in instructions.
340 Do not pass a value through protect_from_queue and then hold
341 on to it for a while before putting it in an instruction!
342 If the queue is flushed in between, incorrect code will result. */
345 protect_from_queue (x, modify)
349 register RTX_CODE code = GET_CODE (x);
351 #if 0 /* A QUEUED can hang around after the queue is forced out. */
352 /* Shortcut for most common case. */
353 if (pending_chain == 0)
359 /* A special hack for read access to (MEM (QUEUED ...))
360 to facilitate use of autoincrement.
361 Make a copy of the contents of the memory location
362 rather than a copy of the address, but not
363 if the value is of mode BLKmode. */
364 if (code == MEM && GET_MODE (x) != BLKmode
365 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
367 register rtx y = XEXP (x, 0);
368 XEXP (x, 0) = QUEUED_VAR (y);
371 register rtx temp = gen_reg_rtx (GET_MODE (x));
372 emit_insn_before (gen_move_insn (temp, x),
378 /* Otherwise, recursively protect the subexpressions of all
379 the kinds of rtx's that can contain a QUEUED. */
381 XEXP (x, 0) = protect_from_queue (XEXP (x, 0), 0);
382 else if (code == PLUS || code == MULT)
384 XEXP (x, 0) = protect_from_queue (XEXP (x, 0), 0);
385 XEXP (x, 1) = protect_from_queue (XEXP (x, 1), 0);
389 /* If the increment has not happened, use the variable itself. */
390 if (QUEUED_INSN (x) == 0)
391 return QUEUED_VAR (x);
392 /* If the increment has happened and a pre-increment copy exists,
394 if (QUEUED_COPY (x) != 0)
395 return QUEUED_COPY (x);
396 /* The increment has happened but we haven't set up a pre-increment copy.
397 Set one up now, and use it. */
398 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
399 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
401 return QUEUED_COPY (x);
404 /* Return nonzero if X contains a QUEUED expression:
405 if it contains anything that will be altered by a queued increment.
406 We handle only combinations of MEM, PLUS, MINUS and MULT operators
407 since memory addresses generally contain only those. */
413 register enum rtx_code code = GET_CODE (x);
419 return queued_subexp_p (XEXP (x, 0));
423 return queued_subexp_p (XEXP (x, 0))
424 || queued_subexp_p (XEXP (x, 1));
429 /* Perform all the pending incrementations. */
435 while (p = pending_chain)
437 QUEUED_INSN (p) = emit_insn (QUEUED_BODY (p));
438 pending_chain = QUEUED_NEXT (p);
449 /* Copy data from FROM to TO, where the machine modes are not the same.
450 Both modes may be integer, or both may be floating.
451 UNSIGNEDP should be nonzero if FROM is an unsigned type.
452 This causes zero-extension instead of sign-extension. */
455 convert_move (to, from, unsignedp)
456 register rtx to, from;
459 enum machine_mode to_mode = GET_MODE (to);
460 enum machine_mode from_mode = GET_MODE (from);
461 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
462 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
466 /* rtx code for making an equivalent value. */
467 enum rtx_code equiv_code = (unsignedp ? ZERO_EXTEND : SIGN_EXTEND);
469 to = protect_from_queue (to, 1);
470 from = protect_from_queue (from, 0);
472 if (to_real != from_real)
475 /* If FROM is a SUBREG that indicates that we have already done at least
476 the required extension, strip it. We don't handle such SUBREGs as
479 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
480 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
481 >= GET_MODE_SIZE (to_mode))
482 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
483 from = gen_lowpart (to_mode, from), from_mode = to_mode;
485 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
488 if (to_mode == from_mode
489 || (from_mode == VOIDmode && CONSTANT_P (from)))
491 emit_move_insn (to, from);
497 #ifdef HAVE_extendqfhf2
498 if (HAVE_extendqfsf2 && from_mode == QFmode && to_mode == HFmode)
500 emit_unop_insn (CODE_FOR_extendqfsf2, to, from, UNKNOWN);
504 #ifdef HAVE_extendqfsf2
505 if (HAVE_extendqfsf2 && from_mode == QFmode && to_mode == SFmode)
507 emit_unop_insn (CODE_FOR_extendqfsf2, to, from, UNKNOWN);
511 #ifdef HAVE_extendqfdf2
512 if (HAVE_extendqfdf2 && from_mode == QFmode && to_mode == DFmode)
514 emit_unop_insn (CODE_FOR_extendqfdf2, to, from, UNKNOWN);
518 #ifdef HAVE_extendqfxf2
519 if (HAVE_extendqfxf2 && from_mode == QFmode && to_mode == XFmode)
521 emit_unop_insn (CODE_FOR_extendqfxf2, to, from, UNKNOWN);
525 #ifdef HAVE_extendqftf2
526 if (HAVE_extendqftf2 && from_mode == QFmode && to_mode == TFmode)
528 emit_unop_insn (CODE_FOR_extendqftf2, to, from, UNKNOWN);
533 #ifdef HAVE_extendhfsf2
534 if (HAVE_extendhfsf2 && from_mode == HFmode && to_mode == SFmode)
536 emit_unop_insn (CODE_FOR_extendhfsf2, to, from, UNKNOWN);
540 #ifdef HAVE_extendhfdf2
541 if (HAVE_extendhfdf2 && from_mode == HFmode && to_mode == DFmode)
543 emit_unop_insn (CODE_FOR_extendhfdf2, to, from, UNKNOWN);
547 #ifdef HAVE_extendhfxf2
548 if (HAVE_extendhfxf2 && from_mode == HFmode && to_mode == XFmode)
550 emit_unop_insn (CODE_FOR_extendhfxf2, to, from, UNKNOWN);
554 #ifdef HAVE_extendhftf2
555 if (HAVE_extendhftf2 && from_mode == HFmode && to_mode == TFmode)
557 emit_unop_insn (CODE_FOR_extendhftf2, to, from, UNKNOWN);
562 #ifdef HAVE_extendsfdf2
563 if (HAVE_extendsfdf2 && from_mode == SFmode && to_mode == DFmode)
565 emit_unop_insn (CODE_FOR_extendsfdf2, to, from, UNKNOWN);
569 #ifdef HAVE_extendsfxf2
570 if (HAVE_extendsfxf2 && from_mode == SFmode && to_mode == XFmode)
572 emit_unop_insn (CODE_FOR_extendsfxf2, to, from, UNKNOWN);
576 #ifdef HAVE_extendsftf2
577 if (HAVE_extendsftf2 && from_mode == SFmode && to_mode == TFmode)
579 emit_unop_insn (CODE_FOR_extendsftf2, to, from, UNKNOWN);
583 #ifdef HAVE_extenddfxf2
584 if (HAVE_extenddfxf2 && from_mode == DFmode && to_mode == XFmode)
586 emit_unop_insn (CODE_FOR_extenddfxf2, to, from, UNKNOWN);
590 #ifdef HAVE_extenddftf2
591 if (HAVE_extenddftf2 && from_mode == DFmode && to_mode == TFmode)
593 emit_unop_insn (CODE_FOR_extenddftf2, to, from, UNKNOWN);
598 #ifdef HAVE_trunchfqf2
599 if (HAVE_trunchfqf2 && from_mode == HFmode && to_mode == QFmode)
601 emit_unop_insn (CODE_FOR_trunchfqf2, to, from, UNKNOWN);
605 #ifdef HAVE_truncsfqf2
606 if (HAVE_truncsfqf2 && from_mode == SFmode && to_mode == QFmode)
608 emit_unop_insn (CODE_FOR_truncsfqf2, to, from, UNKNOWN);
612 #ifdef HAVE_truncdfqf2
613 if (HAVE_truncdfqf2 && from_mode == DFmode && to_mode == QFmode)
615 emit_unop_insn (CODE_FOR_truncdfqf2, to, from, UNKNOWN);
619 #ifdef HAVE_truncxfqf2
620 if (HAVE_truncxfqf2 && from_mode == XFmode && to_mode == QFmode)
622 emit_unop_insn (CODE_FOR_truncxfqf2, to, from, UNKNOWN);
626 #ifdef HAVE_trunctfqf2
627 if (HAVE_trunctfqf2 && from_mode == TFmode && to_mode == QFmode)
629 emit_unop_insn (CODE_FOR_trunctfqf2, to, from, UNKNOWN);
633 #ifdef HAVE_truncsfhf2
634 if (HAVE_truncsfhf2 && from_mode == SFmode && to_mode == HFmode)
636 emit_unop_insn (CODE_FOR_truncsfhf2, to, from, UNKNOWN);
640 #ifdef HAVE_truncdfhf2
641 if (HAVE_truncdfhf2 && from_mode == DFmode && to_mode == HFmode)
643 emit_unop_insn (CODE_FOR_truncdfhf2, to, from, UNKNOWN);
647 #ifdef HAVE_truncxfhf2
648 if (HAVE_truncxfhf2 && from_mode == XFmode && to_mode == HFmode)
650 emit_unop_insn (CODE_FOR_truncxfhf2, to, from, UNKNOWN);
654 #ifdef HAVE_trunctfhf2
655 if (HAVE_trunctfhf2 && from_mode == TFmode && to_mode == HFmode)
657 emit_unop_insn (CODE_FOR_trunctfhf2, to, from, UNKNOWN);
661 #ifdef HAVE_truncdfsf2
662 if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
664 emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
668 #ifdef HAVE_truncxfsf2
669 if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode)
671 emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN);
675 #ifdef HAVE_trunctfsf2
676 if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
678 emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
682 #ifdef HAVE_truncxfdf2
683 if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode)
685 emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN);
689 #ifdef HAVE_trunctfdf2
690 if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
692 emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
704 libcall = extendsfdf2_libfunc;
708 libcall = extendsfxf2_libfunc;
712 libcall = extendsftf2_libfunc;
721 libcall = truncdfsf2_libfunc;
725 libcall = extenddfxf2_libfunc;
729 libcall = extenddftf2_libfunc;
738 libcall = truncxfsf2_libfunc;
742 libcall = truncxfdf2_libfunc;
751 libcall = trunctfsf2_libfunc;
755 libcall = trunctfdf2_libfunc;
761 if (libcall == (rtx) 0)
762 /* This conversion is not implemented yet. */
765 emit_library_call (libcall, 1, to_mode, 1, from, from_mode);
766 emit_move_insn (to, hard_libcall_value (to_mode));
770 /* Now both modes are integers. */
772 /* Handle expanding beyond a word. */
773 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
774 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
781 enum machine_mode lowpart_mode;
782 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
784 /* Try converting directly if the insn is supported. */
785 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
788 /* If FROM is a SUBREG, put it into a register. Do this
789 so that we always generate the same set of insns for
790 better cse'ing; if an intermediate assignment occurred,
791 we won't be doing the operation directly on the SUBREG. */
792 if (optimize > 0 && GET_CODE (from) == SUBREG)
793 from = force_reg (from_mode, from);
794 emit_unop_insn (code, to, from, equiv_code);
797 /* Next, try converting via full word. */
798 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
799 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
800 != CODE_FOR_nothing))
802 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
803 emit_unop_insn (code, to,
804 gen_lowpart (word_mode, to), equiv_code);
808 /* No special multiword conversion insn; do it by hand. */
811 /* Get a copy of FROM widened to a word, if necessary. */
812 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
813 lowpart_mode = word_mode;
815 lowpart_mode = from_mode;
817 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
819 lowpart = gen_lowpart (lowpart_mode, to);
820 emit_move_insn (lowpart, lowfrom);
822 /* Compute the value to put in each remaining word. */
824 fill_value = const0_rtx;
829 && insn_operand_mode[(int) CODE_FOR_slt][0] == word_mode
830 && STORE_FLAG_VALUE == -1)
832 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
834 fill_value = gen_reg_rtx (word_mode);
835 emit_insn (gen_slt (fill_value));
841 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
842 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
844 fill_value = convert_to_mode (word_mode, fill_value, 1);
848 /* Fill the remaining words. */
849 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
851 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
852 rtx subword = operand_subword (to, index, 1, to_mode);
857 if (fill_value != subword)
858 emit_move_insn (subword, fill_value);
861 insns = get_insns ();
864 emit_no_conflict_block (insns, to, from, NULL_RTX,
865 gen_rtx (equiv_code, to_mode, copy_rtx (from)));
869 /* Truncating multi-word to a word or less. */
870 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
871 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
873 convert_move (to, gen_lowpart (word_mode, from), 0);
877 /* Handle pointer conversion */ /* SPEE 900220 */
878 if (to_mode == PSImode)
880 if (from_mode != SImode)
881 from = convert_to_mode (SImode, from, unsignedp);
883 #ifdef HAVE_truncsipsi
886 emit_unop_insn (CODE_FOR_truncsipsi, to, from, UNKNOWN);
889 #endif /* HAVE_truncsipsi */
893 if (from_mode == PSImode)
895 if (to_mode != SImode)
897 from = convert_to_mode (SImode, from, unsignedp);
902 #ifdef HAVE_extendpsisi
903 if (HAVE_extendpsisi)
905 emit_unop_insn (CODE_FOR_extendpsisi, to, from, UNKNOWN);
908 #endif /* HAVE_extendpsisi */
913 /* Now follow all the conversions between integers
914 no more than a word long. */
916 /* For truncation, usually we can just refer to FROM in a narrower mode. */
917 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
918 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
919 GET_MODE_BITSIZE (from_mode)))
921 if (!((GET_CODE (from) == MEM
922 && ! MEM_VOLATILE_P (from)
923 && direct_load[(int) to_mode]
924 && ! mode_dependent_address_p (XEXP (from, 0)))
925 || GET_CODE (from) == REG
926 || GET_CODE (from) == SUBREG))
927 from = force_reg (from_mode, from);
928 emit_move_insn (to, gen_lowpart (to_mode, from));
932 /* Handle extension. */
933 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
935 /* Convert directly if that works. */
936 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
939 /* If FROM is a SUBREG, put it into a register. Do this
940 so that we always generate the same set of insns for
941 better cse'ing; if an intermediate assignment occurred,
942 we won't be doing the operation directly on the SUBREG. */
943 if (optimize > 0 && GET_CODE (from) == SUBREG)
944 from = force_reg (from_mode, from);
945 emit_unop_insn (code, to, from, equiv_code);
950 enum machine_mode intermediate;
952 /* Search for a mode to convert via. */
953 for (intermediate = from_mode; intermediate != VOIDmode;
954 intermediate = GET_MODE_WIDER_MODE (intermediate))
955 if ((can_extend_p (to_mode, intermediate, unsignedp)
957 && (can_extend_p (intermediate, from_mode, unsignedp)
958 != CODE_FOR_nothing))
960 convert_move (to, convert_to_mode (intermediate, from,
961 unsignedp), unsignedp);
965 /* No suitable intermediate mode. */
970 /* Support special truncate insns for certain modes. */
972 if (from_mode == DImode && to_mode == SImode)
974 #ifdef HAVE_truncdisi2
977 emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
981 convert_move (to, force_reg (from_mode, from), unsignedp);
985 if (from_mode == DImode && to_mode == HImode)
987 #ifdef HAVE_truncdihi2
990 emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
994 convert_move (to, force_reg (from_mode, from), unsignedp);
998 if (from_mode == DImode && to_mode == QImode)
1000 #ifdef HAVE_truncdiqi2
1001 if (HAVE_truncdiqi2)
1003 emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
1007 convert_move (to, force_reg (from_mode, from), unsignedp);
1011 if (from_mode == SImode && to_mode == HImode)
1013 #ifdef HAVE_truncsihi2
1014 if (HAVE_truncsihi2)
1016 emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
1020 convert_move (to, force_reg (from_mode, from), unsignedp);
1024 if (from_mode == SImode && to_mode == QImode)
1026 #ifdef HAVE_truncsiqi2
1027 if (HAVE_truncsiqi2)
1029 emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
1033 convert_move (to, force_reg (from_mode, from), unsignedp);
1037 if (from_mode == HImode && to_mode == QImode)
1039 #ifdef HAVE_trunchiqi2
1040 if (HAVE_trunchiqi2)
1042 emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
1046 convert_move (to, force_reg (from_mode, from), unsignedp);
1050 /* Handle truncation of volatile memrefs, and so on;
1051 the things that couldn't be truncated directly,
1052 and for which there was no special instruction. */
1053 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
1055 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
1056 emit_move_insn (to, temp);
1060 /* Mode combination is not recognized. */
1064 /* Return an rtx for a value that would result
1065 from converting X to mode MODE.
1066 Both X and MODE may be floating, or both integer.
1067 UNSIGNEDP is nonzero if X is an unsigned value.
1068 This can be done by referring to a part of X in place
1069 or by copying to a new temporary with conversion.
1071 This function *must not* call protect_from_queue
1072 except when putting X into an insn (in which case convert_move does it). */
1075 convert_to_mode (mode, x, unsignedp)
1076 enum machine_mode mode;
1082 /* If FROM is a SUBREG that indicates that we have already done at least
1083 the required extension, strip it. */
1085 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
1086 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
1087 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
1088 x = gen_lowpart (mode, x);
1090 if (mode == GET_MODE (x))
1093 /* There is one case that we must handle specially: If we are converting
1094 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
1095 we are to interpret the constant as unsigned, gen_lowpart will do
1096 the wrong if the constant appears negative. What we want to do is
1097 make the high-order word of the constant zero, not all ones. */
1099 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
1100 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
1101 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
1102 return immed_double_const (INTVAL (x), (HOST_WIDE_INT) 0, mode);
1104 /* We can do this with a gen_lowpart if both desired and current modes
1105 are integer, and this is either a constant integer, a register, or a
1106 non-volatile MEM. Except for the constant case, we must be narrowing
1109 if (GET_CODE (x) == CONST_INT
1110 || (GET_MODE_CLASS (mode) == MODE_INT
1111 && GET_MODE_CLASS (GET_MODE (x)) == MODE_INT
1112 && (GET_CODE (x) == CONST_DOUBLE
1113 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (GET_MODE (x))
1114 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x))
1115 && direct_load[(int) mode]
1116 || GET_CODE (x) == REG)))))
1117 return gen_lowpart (mode, x);
1119 temp = gen_reg_rtx (mode);
1120 convert_move (temp, x, unsignedp);
1124 /* Generate several move instructions to copy LEN bytes
1125 from block FROM to block TO. (These are MEM rtx's with BLKmode).
1126 The caller must pass FROM and TO
1127 through protect_from_queue before calling.
1128 ALIGN (in bytes) is maximum alignment we can assume. */
1131 move_by_pieces (to, from, len, align)
1135 struct move_by_pieces data;
1136 rtx to_addr = XEXP (to, 0), from_addr = XEXP (from, 0);
1137 int max_size = MOVE_MAX + 1;
1140 data.to_addr = to_addr;
1141 data.from_addr = from_addr;
1145 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1146 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1148 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1149 || GET_CODE (from_addr) == POST_INC
1150 || GET_CODE (from_addr) == POST_DEC);
1152 data.explicit_inc_from = 0;
1153 data.explicit_inc_to = 0;
1155 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1156 if (data.reverse) data.offset = len;
1159 /* If copying requires more than two move insns,
1160 copy addresses to registers (to make displacements shorter)
1161 and use post-increment if available. */
1162 if (!(data.autinc_from && data.autinc_to)
1163 && move_by_pieces_ninsns (len, align) > 2)
1165 #ifdef HAVE_PRE_DECREMENT
1166 if (data.reverse && ! data.autinc_from)
1168 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1169 data.autinc_from = 1;
1170 data.explicit_inc_from = -1;
1173 #ifdef HAVE_POST_INCREMENT
1174 if (! data.autinc_from)
1176 data.from_addr = copy_addr_to_reg (from_addr);
1177 data.autinc_from = 1;
1178 data.explicit_inc_from = 1;
1181 if (!data.autinc_from && CONSTANT_P (from_addr))
1182 data.from_addr = copy_addr_to_reg (from_addr);
1183 #ifdef HAVE_PRE_DECREMENT
1184 if (data.reverse && ! data.autinc_to)
1186 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1188 data.explicit_inc_to = -1;
1191 #ifdef HAVE_POST_INCREMENT
1192 if (! data.reverse && ! data.autinc_to)
1194 data.to_addr = copy_addr_to_reg (to_addr);
1196 data.explicit_inc_to = 1;
1199 if (!data.autinc_to && CONSTANT_P (to_addr))
1200 data.to_addr = copy_addr_to_reg (to_addr);
1203 if (! (STRICT_ALIGNMENT || SLOW_UNALIGNED_ACCESS)
1204 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
1207 /* First move what we can in the largest integer mode, then go to
1208 successively smaller modes. */
1210 while (max_size > 1)
1212 enum machine_mode mode = VOIDmode, tmode;
1213 enum insn_code icode;
1215 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1216 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1217 if (GET_MODE_SIZE (tmode) < max_size)
1220 if (mode == VOIDmode)
1223 icode = mov_optab->handlers[(int) mode].insn_code;
1224 if (icode != CODE_FOR_nothing
1225 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1226 GET_MODE_SIZE (mode)))
1227 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1229 max_size = GET_MODE_SIZE (mode);
1232 /* The code above should have handled everything. */
1237 /* Return number of insns required to move L bytes by pieces.
1238 ALIGN (in bytes) is maximum alignment we can assume. */
1241 move_by_pieces_ninsns (l, align)
1245 register int n_insns = 0;
1246 int max_size = MOVE_MAX + 1;
1248 if (! (STRICT_ALIGNMENT || SLOW_UNALIGNED_ACCESS)
1249 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
1252 while (max_size > 1)
1254 enum machine_mode mode = VOIDmode, tmode;
1255 enum insn_code icode;
1257 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1258 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1259 if (GET_MODE_SIZE (tmode) < max_size)
1262 if (mode == VOIDmode)
1265 icode = mov_optab->handlers[(int) mode].insn_code;
1266 if (icode != CODE_FOR_nothing
1267 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1268 GET_MODE_SIZE (mode)))
1269 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1271 max_size = GET_MODE_SIZE (mode);
1277 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1278 with move instructions for mode MODE. GENFUN is the gen_... function
1279 to make a move insn for that mode. DATA has all the other info. */
1282 move_by_pieces_1 (genfun, mode, data)
1284 enum machine_mode mode;
1285 struct move_by_pieces *data;
1287 register int size = GET_MODE_SIZE (mode);
1288 register rtx to1, from1;
1290 while (data->len >= size)
1292 if (data->reverse) data->offset -= size;
1294 to1 = (data->autinc_to
1295 ? gen_rtx (MEM, mode, data->to_addr)
1296 : change_address (data->to, mode,
1297 plus_constant (data->to_addr, data->offset)));
1300 ? gen_rtx (MEM, mode, data->from_addr)
1301 : change_address (data->from, mode,
1302 plus_constant (data->from_addr, data->offset)));
1304 #ifdef HAVE_PRE_DECREMENT
1305 if (data->explicit_inc_to < 0)
1306 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
1307 if (data->explicit_inc_from < 0)
1308 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (-size)));
1311 emit_insn ((*genfun) (to1, from1));
1312 #ifdef HAVE_POST_INCREMENT
1313 if (data->explicit_inc_to > 0)
1314 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1315 if (data->explicit_inc_from > 0)
1316 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1319 if (! data->reverse) data->offset += size;
1325 /* Emit code to move a block Y to a block X.
1326 This may be done with string-move instructions,
1327 with multiple scalar move instructions, or with a library call.
1329 Both X and Y must be MEM rtx's (perhaps inside VOLATILE)
1331 SIZE is an rtx that says how long they are.
1332 ALIGN is the maximum alignment we can assume they have,
1333 measured in bytes. */
1336 emit_block_move (x, y, size, align)
1341 if (GET_MODE (x) != BLKmode)
1344 if (GET_MODE (y) != BLKmode)
1347 x = protect_from_queue (x, 1);
1348 y = protect_from_queue (y, 0);
1349 size = protect_from_queue (size, 0);
1351 if (GET_CODE (x) != MEM)
1353 if (GET_CODE (y) != MEM)
1358 if (GET_CODE (size) == CONST_INT
1359 && (move_by_pieces_ninsns (INTVAL (size), align) < MOVE_RATIO))
1360 move_by_pieces (x, y, INTVAL (size), align);
1363 /* Try the most limited insn first, because there's no point
1364 including more than one in the machine description unless
1365 the more limited one has some advantage. */
1367 rtx opalign = GEN_INT (align);
1368 enum machine_mode mode;
1370 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1371 mode = GET_MODE_WIDER_MODE (mode))
1373 enum insn_code code = movstr_optab[(int) mode];
1375 if (code != CODE_FOR_nothing
1376 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1377 here because if SIZE is less than the mode mask, as it is
1378 returned by the macro, it will definitely be less than the
1379 actual mode mask. */
1380 && (unsigned HOST_WIDE_INT) INTVAL (size) <= GET_MODE_MASK (mode)
1381 && (insn_operand_predicate[(int) code][0] == 0
1382 || (*insn_operand_predicate[(int) code][0]) (x, BLKmode))
1383 && (insn_operand_predicate[(int) code][1] == 0
1384 || (*insn_operand_predicate[(int) code][1]) (y, BLKmode))
1385 && (insn_operand_predicate[(int) code][3] == 0
1386 || (*insn_operand_predicate[(int) code][3]) (opalign,
1390 rtx last = get_last_insn ();
1393 op2 = convert_to_mode (mode, size, 1);
1394 if (insn_operand_predicate[(int) code][2] != 0
1395 && ! (*insn_operand_predicate[(int) code][2]) (op2, mode))
1396 op2 = copy_to_mode_reg (mode, op2);
1398 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1405 delete_insns_since (last);
1409 #ifdef TARGET_MEM_FUNCTIONS
1410 emit_library_call (memcpy_libfunc, 0,
1411 VOIDmode, 3, XEXP (x, 0), Pmode,
1413 convert_to_mode (TYPE_MODE (sizetype), size,
1414 TREE_UNSIGNED (sizetype)),
1415 TYPE_MODE (sizetype));
1417 emit_library_call (bcopy_libfunc, 0,
1418 VOIDmode, 3, XEXP (y, 0), Pmode,
1420 convert_to_mode (TYPE_MODE (sizetype), size,
1421 TREE_UNSIGNED (sizetype)),
1422 TYPE_MODE (sizetype));
1427 /* Copy all or part of a value X into registers starting at REGNO.
1428 The number of registers to be filled is NREGS. */
1431 move_block_to_reg (regno, x, nregs, mode)
1435 enum machine_mode mode;
1440 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1441 x = validize_mem (force_const_mem (mode, x));
1443 /* See if the machine can do this with a load multiple insn. */
1444 #ifdef HAVE_load_multiple
1445 last = get_last_insn ();
1446 pat = gen_load_multiple (gen_rtx (REG, word_mode, regno), x,
1454 delete_insns_since (last);
1457 for (i = 0; i < nregs; i++)
1458 emit_move_insn (gen_rtx (REG, word_mode, regno + i),
1459 operand_subword_force (x, i, mode));
1462 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1463 The number of registers to be filled is NREGS. */
1466 move_block_from_reg (regno, x, nregs)
1474 /* See if the machine can do this with a store multiple insn. */
1475 #ifdef HAVE_store_multiple
1476 last = get_last_insn ();
1477 pat = gen_store_multiple (x, gen_rtx (REG, word_mode, regno),
1485 delete_insns_since (last);
1488 for (i = 0; i < nregs; i++)
1490 rtx tem = operand_subword (x, i, 1, BLKmode);
1495 emit_move_insn (tem, gen_rtx (REG, word_mode, regno + i));
1499 /* Mark NREGS consecutive regs, starting at REGNO, as being live now. */
1502 use_regs (regno, nregs)
1508 for (i = 0; i < nregs; i++)
1509 emit_insn (gen_rtx (USE, VOIDmode, gen_rtx (REG, word_mode, regno + i)));
1512 /* Mark the instructions since PREV as a libcall block.
1513 Add REG_LIBCALL to PREV and add a REG_RETVAL to the most recent insn. */
1522 /* Find the instructions to mark */
1524 insn_first = NEXT_INSN (prev);
1526 insn_first = get_insns ();
1528 insn_last = get_last_insn ();
1530 REG_NOTES (insn_last) = gen_rtx (INSN_LIST, REG_RETVAL, insn_first,
1531 REG_NOTES (insn_last));
1533 REG_NOTES (insn_first) = gen_rtx (INSN_LIST, REG_LIBCALL, insn_last,
1534 REG_NOTES (insn_first));
1537 /* Write zeros through the storage of OBJECT.
1538 If OBJECT has BLKmode, SIZE is its length in bytes. */
1541 clear_storage (object, size)
1545 if (GET_MODE (object) == BLKmode)
1547 #ifdef TARGET_MEM_FUNCTIONS
1548 emit_library_call (memset_libfunc, 0,
1550 XEXP (object, 0), Pmode, const0_rtx, Pmode,
1551 GEN_INT (size), Pmode);
1553 emit_library_call (bzero_libfunc, 0,
1555 XEXP (object, 0), Pmode,
1556 GEN_INT (size), Pmode);
1560 emit_move_insn (object, const0_rtx);
1563 /* Generate code to copy Y into X.
1564 Both Y and X must have the same mode, except that
1565 Y can be a constant with VOIDmode.
1566 This mode cannot be BLKmode; use emit_block_move for that.
1568 Return the last instruction emitted. */
1571 emit_move_insn (x, y)
1574 enum machine_mode mode = GET_MODE (x);
1575 enum machine_mode submode;
1576 enum mode_class class = GET_MODE_CLASS (mode);
1579 x = protect_from_queue (x, 1);
1580 y = protect_from_queue (y, 0);
1582 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
1585 if (CONSTANT_P (y) && ! LEGITIMATE_CONSTANT_P (y))
1586 y = force_const_mem (mode, y);
1588 /* If X or Y are memory references, verify that their addresses are valid
1590 if (GET_CODE (x) == MEM
1591 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
1592 && ! push_operand (x, GET_MODE (x)))
1594 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
1595 x = change_address (x, VOIDmode, XEXP (x, 0));
1597 if (GET_CODE (y) == MEM
1598 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
1600 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
1601 y = change_address (y, VOIDmode, XEXP (y, 0));
1603 if (mode == BLKmode)
1606 return emit_move_insn_1 (x, y);
1609 /* Low level part of emit_move_insn.
1610 Called just like emit_move_insn, but assumes X and Y
1611 are basically valid. */
1614 emit_move_insn_1 (x, y)
1617 enum machine_mode mode = GET_MODE (x);
1618 enum machine_mode submode;
1619 enum mode_class class = GET_MODE_CLASS (mode);
1622 if (class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
1623 submode = mode_for_size (GET_MODE_UNIT_SIZE (mode) * BITS_PER_UNIT,
1624 (class == MODE_COMPLEX_INT
1625 ? MODE_INT : MODE_FLOAT),
1628 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
1630 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
1632 /* Expand complex moves by moving real part and imag part, if possible. */
1633 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
1634 && submode != BLKmode
1635 && (mov_optab->handlers[(int) submode].insn_code
1636 != CODE_FOR_nothing))
1638 /* Don't split destination if it is a stack push. */
1639 int stack = push_operand (x, GET_MODE (x));
1640 rtx prev = get_last_insn ();
1642 /* Tell flow that the whole of the destination is being set. */
1643 if (GET_CODE (x) == REG)
1644 emit_insn (gen_rtx (CLOBBER, VOIDmode, x));
1646 /* If this is a stack, push the highpart first, so it
1647 will be in the argument order.
1649 In that case, change_address is used only to convert
1650 the mode, not to change the address. */
1651 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
1652 ((stack ? change_address (x, submode, (rtx) 0)
1653 : gen_highpart (submode, x)),
1654 gen_highpart (submode, y)));
1655 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
1656 ((stack ? change_address (x, submode, (rtx) 0)
1657 : gen_lowpart (submode, x)),
1658 gen_lowpart (submode, y)));
1662 return get_last_insn ();
1665 /* This will handle any multi-word mode that lacks a move_insn pattern.
1666 However, you will get better code if you define such patterns,
1667 even if they must turn into multiple assembler instructions. */
1668 else if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
1671 rtx prev_insn = get_last_insn ();
1674 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
1677 rtx xpart = operand_subword (x, i, 1, mode);
1678 rtx ypart = operand_subword (y, i, 1, mode);
1680 /* If we can't get a part of Y, put Y into memory if it is a
1681 constant. Otherwise, force it into a register. If we still
1682 can't get a part of Y, abort. */
1683 if (ypart == 0 && CONSTANT_P (y))
1685 y = force_const_mem (mode, y);
1686 ypart = operand_subword (y, i, 1, mode);
1688 else if (ypart == 0)
1689 ypart = operand_subword_force (y, i, mode);
1691 if (xpart == 0 || ypart == 0)
1694 last_insn = emit_move_insn (xpart, ypart);
1696 /* Mark these insns as a libcall block. */
1697 group_insns (prev_insn);
1705 /* Pushing data onto the stack. */
1707 /* Push a block of length SIZE (perhaps variable)
1708 and return an rtx to address the beginning of the block.
1709 Note that it is not possible for the value returned to be a QUEUED.
1710 The value may be virtual_outgoing_args_rtx.
1712 EXTRA is the number of bytes of padding to push in addition to SIZE.
1713 BELOW nonzero means this padding comes at low addresses;
1714 otherwise, the padding comes at high addresses. */
1717 push_block (size, extra, below)
1722 if (CONSTANT_P (size))
1723 anti_adjust_stack (plus_constant (size, extra));
1724 else if (GET_CODE (size) == REG && extra == 0)
1725 anti_adjust_stack (size);
1728 rtx temp = copy_to_mode_reg (Pmode, size);
1730 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
1731 temp, 0, OPTAB_LIB_WIDEN);
1732 anti_adjust_stack (temp);
1735 #ifdef STACK_GROWS_DOWNWARD
1736 temp = virtual_outgoing_args_rtx;
1737 if (extra != 0 && below)
1738 temp = plus_constant (temp, extra);
1740 if (GET_CODE (size) == CONST_INT)
1741 temp = plus_constant (virtual_outgoing_args_rtx,
1742 - INTVAL (size) - (below ? 0 : extra));
1743 else if (extra != 0 && !below)
1744 temp = gen_rtx (PLUS, Pmode, virtual_outgoing_args_rtx,
1745 negate_rtx (Pmode, plus_constant (size, extra)));
1747 temp = gen_rtx (PLUS, Pmode, virtual_outgoing_args_rtx,
1748 negate_rtx (Pmode, size));
1751 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
1757 return gen_rtx (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
1760 /* Generate code to push X onto the stack, assuming it has mode MODE and
1762 MODE is redundant except when X is a CONST_INT (since they don't
1764 SIZE is an rtx for the size of data to be copied (in bytes),
1765 needed only if X is BLKmode.
1767 ALIGN (in bytes) is maximum alignment we can assume.
1769 If PARTIAL and REG are both nonzero, then copy that many of the first
1770 words of X into registers starting with REG, and push the rest of X.
1771 The amount of space pushed is decreased by PARTIAL words,
1772 rounded *down* to a multiple of PARM_BOUNDARY.
1773 REG must be a hard register in this case.
1774 If REG is zero but PARTIAL is not, take any all others actions for an
1775 argument partially in registers, but do not actually load any
1778 EXTRA is the amount in bytes of extra space to leave next to this arg.
1779 This is ignored if an argument block has already been allocated.
1781 On a machine that lacks real push insns, ARGS_ADDR is the address of
1782 the bottom of the argument block for this call. We use indexing off there
1783 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
1784 argument block has not been preallocated.
1786 ARGS_SO_FAR is the size of args previously pushed for this call. */
1789 emit_push_insn (x, mode, type, size, align, partial, reg, extra,
1790 args_addr, args_so_far)
1792 enum machine_mode mode;
1803 enum direction stack_direction
1804 #ifdef STACK_GROWS_DOWNWARD
1810 /* Decide where to pad the argument: `downward' for below,
1811 `upward' for above, or `none' for don't pad it.
1812 Default is below for small data on big-endian machines; else above. */
1813 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
1815 /* Invert direction if stack is post-update. */
1816 if (STACK_PUSH_CODE == POST_INC || STACK_PUSH_CODE == POST_DEC)
1817 if (where_pad != none)
1818 where_pad = (where_pad == downward ? upward : downward);
1820 xinner = x = protect_from_queue (x, 0);
1822 if (mode == BLKmode)
1824 /* Copy a block into the stack, entirely or partially. */
1827 int used = partial * UNITS_PER_WORD;
1828 int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
1836 /* USED is now the # of bytes we need not copy to the stack
1837 because registers will take care of them. */
1840 xinner = change_address (xinner, BLKmode,
1841 plus_constant (XEXP (xinner, 0), used));
1843 /* If the partial register-part of the arg counts in its stack size,
1844 skip the part of stack space corresponding to the registers.
1845 Otherwise, start copying to the beginning of the stack space,
1846 by setting SKIP to 0. */
1847 #ifndef REG_PARM_STACK_SPACE
1853 #ifdef PUSH_ROUNDING
1854 /* Do it with several push insns if that doesn't take lots of insns
1855 and if there is no difficulty with push insns that skip bytes
1856 on the stack for alignment purposes. */
1858 && GET_CODE (size) == CONST_INT
1860 && (move_by_pieces_ninsns ((unsigned) INTVAL (size) - used, align)
1862 /* Here we avoid the case of a structure whose weak alignment
1863 forces many pushes of a small amount of data,
1864 and such small pushes do rounding that causes trouble. */
1865 && ((! STRICT_ALIGNMENT && ! SLOW_UNALIGNED_ACCESS)
1866 || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT
1867 || PUSH_ROUNDING (align) == align)
1868 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
1870 /* Push padding now if padding above and stack grows down,
1871 or if padding below and stack grows up.
1872 But if space already allocated, this has already been done. */
1873 if (extra && args_addr == 0
1874 && where_pad != none && where_pad != stack_direction)
1875 anti_adjust_stack (GEN_INT (extra));
1877 move_by_pieces (gen_rtx (MEM, BLKmode, gen_push_operand ()), xinner,
1878 INTVAL (size) - used, align);
1881 #endif /* PUSH_ROUNDING */
1883 /* Otherwise make space on the stack and copy the data
1884 to the address of that space. */
1886 /* Deduct words put into registers from the size we must copy. */
1889 if (GET_CODE (size) == CONST_INT)
1890 size = GEN_INT (INTVAL (size) - used);
1892 size = expand_binop (GET_MODE (size), sub_optab, size,
1893 GEN_INT (used), NULL_RTX, 0,
1897 /* Get the address of the stack space.
1898 In this case, we do not deal with EXTRA separately.
1899 A single stack adjust will do. */
1902 temp = push_block (size, extra, where_pad == downward);
1905 else if (GET_CODE (args_so_far) == CONST_INT)
1906 temp = memory_address (BLKmode,
1907 plus_constant (args_addr,
1908 skip + INTVAL (args_so_far)));
1910 temp = memory_address (BLKmode,
1911 plus_constant (gen_rtx (PLUS, Pmode,
1912 args_addr, args_so_far),
1915 /* TEMP is the address of the block. Copy the data there. */
1916 if (GET_CODE (size) == CONST_INT
1917 && (move_by_pieces_ninsns ((unsigned) INTVAL (size), align)
1920 move_by_pieces (gen_rtx (MEM, BLKmode, temp), xinner,
1921 INTVAL (size), align);
1924 /* Try the most limited insn first, because there's no point
1925 including more than one in the machine description unless
1926 the more limited one has some advantage. */
1927 #ifdef HAVE_movstrqi
1929 && GET_CODE (size) == CONST_INT
1930 && ((unsigned) INTVAL (size)
1931 < (1 << (GET_MODE_BITSIZE (QImode) - 1))))
1933 rtx pat = gen_movstrqi (gen_rtx (MEM, BLKmode, temp),
1934 xinner, size, GEN_INT (align));
1942 #ifdef HAVE_movstrhi
1944 && GET_CODE (size) == CONST_INT
1945 && ((unsigned) INTVAL (size)
1946 < (1 << (GET_MODE_BITSIZE (HImode) - 1))))
1948 rtx pat = gen_movstrhi (gen_rtx (MEM, BLKmode, temp),
1949 xinner, size, GEN_INT (align));
1957 #ifdef HAVE_movstrsi
1960 rtx pat = gen_movstrsi (gen_rtx (MEM, BLKmode, temp),
1961 xinner, size, GEN_INT (align));
1969 #ifdef HAVE_movstrdi
1972 rtx pat = gen_movstrdi (gen_rtx (MEM, BLKmode, temp),
1973 xinner, size, GEN_INT (align));
1982 #ifndef ACCUMULATE_OUTGOING_ARGS
1983 /* If the source is referenced relative to the stack pointer,
1984 copy it to another register to stabilize it. We do not need
1985 to do this if we know that we won't be changing sp. */
1987 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
1988 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
1989 temp = copy_to_reg (temp);
1992 /* Make inhibit_defer_pop nonzero around the library call
1993 to force it to pop the bcopy-arguments right away. */
1995 #ifdef TARGET_MEM_FUNCTIONS
1996 emit_library_call (memcpy_libfunc, 0,
1997 VOIDmode, 3, temp, Pmode, XEXP (xinner, 0), Pmode,
1998 convert_to_mode (TYPE_MODE (sizetype),
1999 size, TREE_UNSIGNED (sizetype)),
2000 TYPE_MODE (sizetype));
2002 emit_library_call (bcopy_libfunc, 0,
2003 VOIDmode, 3, XEXP (xinner, 0), Pmode, temp, Pmode,
2004 convert_to_mode (TYPE_MODE (sizetype),
2005 size, TREE_UNSIGNED (sizetype)),
2006 TYPE_MODE (sizetype));
2011 else if (partial > 0)
2013 /* Scalar partly in registers. */
2015 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
2018 /* # words of start of argument
2019 that we must make space for but need not store. */
2020 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
2021 int args_offset = INTVAL (args_so_far);
2024 /* Push padding now if padding above and stack grows down,
2025 or if padding below and stack grows up.
2026 But if space already allocated, this has already been done. */
2027 if (extra && args_addr == 0
2028 && where_pad != none && where_pad != stack_direction)
2029 anti_adjust_stack (GEN_INT (extra));
2031 /* If we make space by pushing it, we might as well push
2032 the real data. Otherwise, we can leave OFFSET nonzero
2033 and leave the space uninitialized. */
2037 /* Now NOT_STACK gets the number of words that we don't need to
2038 allocate on the stack. */
2039 not_stack = partial - offset;
2041 /* If the partial register-part of the arg counts in its stack size,
2042 skip the part of stack space corresponding to the registers.
2043 Otherwise, start copying to the beginning of the stack space,
2044 by setting SKIP to 0. */
2045 #ifndef REG_PARM_STACK_SPACE
2051 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
2052 x = validize_mem (force_const_mem (mode, x));
2054 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
2055 SUBREGs of such registers are not allowed. */
2056 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
2057 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
2058 x = copy_to_reg (x);
2060 /* Loop over all the words allocated on the stack for this arg. */
2061 /* We can do it by words, because any scalar bigger than a word
2062 has a size a multiple of a word. */
2063 #ifndef PUSH_ARGS_REVERSED
2064 for (i = not_stack; i < size; i++)
2066 for (i = size - 1; i >= not_stack; i--)
2068 if (i >= not_stack + offset)
2069 emit_push_insn (operand_subword_force (x, i, mode),
2070 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
2072 GEN_INT (args_offset + ((i - not_stack + skip)
2073 * UNITS_PER_WORD)));
2079 /* Push padding now if padding above and stack grows down,
2080 or if padding below and stack grows up.
2081 But if space already allocated, this has already been done. */
2082 if (extra && args_addr == 0
2083 && where_pad != none && where_pad != stack_direction)
2084 anti_adjust_stack (GEN_INT (extra));
2086 #ifdef PUSH_ROUNDING
2088 addr = gen_push_operand ();
2091 if (GET_CODE (args_so_far) == CONST_INT)
2093 = memory_address (mode,
2094 plus_constant (args_addr, INTVAL (args_so_far)));
2096 addr = memory_address (mode, gen_rtx (PLUS, Pmode, args_addr,
2099 emit_move_insn (gen_rtx (MEM, mode, addr), x);
2103 /* If part should go in registers, copy that part
2104 into the appropriate registers. Do this now, at the end,
2105 since mem-to-mem copies above may do function calls. */
2106 if (partial > 0 && reg != 0)
2107 move_block_to_reg (REGNO (reg), x, partial, mode);
2109 if (extra && args_addr == 0 && where_pad == stack_direction)
2110 anti_adjust_stack (GEN_INT (extra));
2113 /* Expand an assignment that stores the value of FROM into TO.
2114 If WANT_VALUE is nonzero, return an rtx for the value of TO.
2115 (This may contain a QUEUED rtx.)
2116 Otherwise, the returned value is not meaningful.
2118 SUGGEST_REG is no longer actually used.
2119 It used to mean, copy the value through a register
2120 and return that register, if that is possible.
2121 But now we do this if WANT_VALUE.
2123 If the value stored is a constant, we return the constant. */
2126 expand_assignment (to, from, want_value, suggest_reg)
2131 register rtx to_rtx = 0;
2134 /* Don't crash if the lhs of the assignment was erroneous. */
2136 if (TREE_CODE (to) == ERROR_MARK)
2137 return expand_expr (from, NULL_RTX, VOIDmode, 0);
2139 /* Assignment of a structure component needs special treatment
2140 if the structure component's rtx is not simply a MEM.
2141 Assignment of an array element at a constant index
2142 has the same problem. */
2144 if (TREE_CODE (to) == COMPONENT_REF
2145 || TREE_CODE (to) == BIT_FIELD_REF
2146 || (TREE_CODE (to) == ARRAY_REF
2147 && TREE_CODE (TREE_OPERAND (to, 1)) == INTEGER_CST
2148 && TREE_CODE (TYPE_SIZE (TREE_TYPE (to))) == INTEGER_CST))
2150 enum machine_mode mode1;
2156 tree tem = get_inner_reference (to, &bitsize, &bitpos, &offset,
2157 &mode1, &unsignedp, &volatilep);
2159 /* If we are going to use store_bit_field and extract_bit_field,
2160 make sure to_rtx will be safe for multiple use. */
2162 if (mode1 == VOIDmode && want_value)
2163 tem = stabilize_reference (tem);
2165 to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, 0);
2168 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
2170 if (GET_CODE (to_rtx) != MEM)
2172 to_rtx = change_address (to_rtx, VOIDmode,
2173 gen_rtx (PLUS, Pmode, XEXP (to_rtx, 0),
2174 force_reg (Pmode, offset_rtx)));
2178 if (GET_CODE (to_rtx) == MEM)
2179 MEM_VOLATILE_P (to_rtx) = 1;
2180 #if 0 /* This was turned off because, when a field is volatile
2181 in an object which is not volatile, the object may be in a register,
2182 and then we would abort over here. */
2188 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
2190 /* Spurious cast makes HPUX compiler happy. */
2191 ? (enum machine_mode) TYPE_MODE (TREE_TYPE (to))
2194 /* Required alignment of containing datum. */
2195 TYPE_ALIGN (TREE_TYPE (tem)) / BITS_PER_UNIT,
2196 int_size_in_bytes (TREE_TYPE (tem)));
2197 preserve_temp_slots (result);
2200 /* If we aren't returning a result, just pass on what expand_expr
2201 returned; it was probably const0_rtx. Otherwise, convert RESULT
2202 to the proper mode. */
2203 return (want_value ? convert_to_mode (TYPE_MODE (TREE_TYPE (to)), result,
2204 TREE_UNSIGNED (TREE_TYPE (to)))
2208 /* If the rhs is a function call and its value is not an aggregate,
2209 call the function before we start to compute the lhs.
2210 This is needed for correct code for cases such as
2211 val = setjmp (buf) on machines where reference to val
2212 requires loading up part of an address in a separate insn. */
2213 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from))
2215 rtx value = expand_expr (from, NULL_RTX, VOIDmode, 0);
2217 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, 0);
2218 emit_move_insn (to_rtx, value);
2219 preserve_temp_slots (to_rtx);
2224 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
2225 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
2228 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, 0);
2230 /* Don't move directly into a return register. */
2231 if (TREE_CODE (to) == RESULT_DECL && GET_CODE (to_rtx) == REG)
2233 rtx temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
2234 emit_move_insn (to_rtx, temp);
2235 preserve_temp_slots (to_rtx);
2240 /* In case we are returning the contents of an object which overlaps
2241 the place the value is being stored, use a safe function when copying
2242 a value through a pointer into a structure value return block. */
2243 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
2244 && current_function_returns_struct
2245 && !current_function_returns_pcc_struct)
2247 rtx from_rtx = expand_expr (from, NULL_RTX, VOIDmode, 0);
2248 rtx size = expr_size (from);
2250 #ifdef TARGET_MEM_FUNCTIONS
2251 emit_library_call (memcpy_libfunc, 0,
2252 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
2253 XEXP (from_rtx, 0), Pmode,
2254 convert_to_mode (TYPE_MODE (sizetype),
2255 size, TREE_UNSIGNED (sizetype)),
2256 TYPE_MODE (sizetype));
2258 emit_library_call (bcopy_libfunc, 0,
2259 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
2260 XEXP (to_rtx, 0), Pmode,
2261 convert_to_mode (TYPE_MODE (sizetype),
2262 size, TREE_UNSIGNED (sizetype)),
2263 TYPE_MODE (sizetype));
2266 preserve_temp_slots (to_rtx);
2271 /* Compute FROM and store the value in the rtx we got. */
2273 result = store_expr (from, to_rtx, want_value);
2274 preserve_temp_slots (result);
2279 /* Generate code for computing expression EXP,
2280 and storing the value into TARGET.
2281 Returns TARGET or an equivalent value.
2282 TARGET may contain a QUEUED rtx.
2284 If SUGGEST_REG is nonzero, copy the value through a register
2285 and return that register, if that is possible.
2287 If the value stored is a constant, we return the constant. */
2290 store_expr (exp, target, suggest_reg)
2292 register rtx target;
2296 int dont_return_target = 0;
2298 if (TREE_CODE (exp) == COMPOUND_EXPR)
2300 /* Perform first part of compound expression, then assign from second
2302 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
2304 return store_expr (TREE_OPERAND (exp, 1), target, suggest_reg);
2306 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
2308 /* For conditional expression, get safe form of the target. Then
2309 test the condition, doing the appropriate assignment on either
2310 side. This avoids the creation of unnecessary temporaries.
2311 For non-BLKmode, it is more efficient not to do this. */
2313 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
2316 target = protect_from_queue (target, 1);
2319 jumpifnot (TREE_OPERAND (exp, 0), lab1);
2320 store_expr (TREE_OPERAND (exp, 1), target, suggest_reg);
2322 emit_jump_insn (gen_jump (lab2));
2325 store_expr (TREE_OPERAND (exp, 2), target, suggest_reg);
2331 else if (suggest_reg && GET_CODE (target) == MEM && ! MEM_VOLATILE_P (target)
2332 && GET_MODE (target) != BLKmode)
2333 /* If target is in memory and caller wants value in a register instead,
2334 arrange that. Pass TARGET as target for expand_expr so that,
2335 if EXP is another assignment, SUGGEST_REG will be nonzero for it.
2336 We know expand_expr will not use the target in that case.
2337 Don't do this if TARGET is volatile because we are supposed
2338 to write it and then read it. */
2340 temp = expand_expr (exp, cse_not_expected ? NULL_RTX : target,
2341 GET_MODE (target), 0);
2342 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
2343 temp = copy_to_reg (temp);
2344 dont_return_target = 1;
2346 else if (queued_subexp_p (target))
2347 /* If target contains a postincrement, it is not safe
2348 to use as the returned value. It would access the wrong
2349 place by the time the queued increment gets output.
2350 So copy the value through a temporary and use that temp
2353 /* ??? There may be a bug here in the case of a target
2354 that is volatile, but I' too sleepy today to write anything
2356 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
2358 /* Expand EXP into a new pseudo. */
2359 temp = gen_reg_rtx (GET_MODE (target));
2360 temp = expand_expr (exp, temp, GET_MODE (target), 0);
2363 temp = expand_expr (exp, NULL_RTX, GET_MODE (target), 0);
2364 dont_return_target = 1;
2366 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
2367 /* If this is an scalar in a register that is stored in a wider mode
2368 than the declared mode, compute the result into its declared mode
2369 and then convert to the wider mode. Our value is the computed
2372 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
2373 convert_move (SUBREG_REG (target), temp,
2374 SUBREG_PROMOTED_UNSIGNED_P (target));
2379 temp = expand_expr (exp, target, GET_MODE (target), 0);
2380 /* DO return TARGET if it's a specified hardware register.
2381 expand_return relies on this.
2382 DO return TARGET if it's a volatile mem ref; ANSI requires this. */
2383 if (!(target && GET_CODE (target) == REG
2384 && REGNO (target) < FIRST_PSEUDO_REGISTER)
2385 && CONSTANT_P (temp)
2386 && !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
2387 dont_return_target = 1;
2390 /* If value was not generated in the target, store it there.
2391 Convert the value to TARGET's type first if nec. */
2393 if (temp != target && TREE_CODE (exp) != ERROR_MARK)
2395 target = protect_from_queue (target, 1);
2396 if (GET_MODE (temp) != GET_MODE (target)
2397 && GET_MODE (temp) != VOIDmode)
2399 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
2400 if (dont_return_target)
2402 /* In this case, we will return TEMP,
2403 so make sure it has the proper mode.
2404 But don't forget to store the value into TARGET. */
2405 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
2406 emit_move_insn (target, temp);
2409 convert_move (target, temp, unsignedp);
2412 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
2414 /* Handle copying a string constant into an array.
2415 The string constant may be shorter than the array.
2416 So copy just the string's actual length, and clear the rest. */
2419 /* Get the size of the data type of the string,
2420 which is actually the size of the target. */
2421 size = expr_size (exp);
2422 if (GET_CODE (size) == CONST_INT
2423 && INTVAL (size) < TREE_STRING_LENGTH (exp))
2424 emit_block_move (target, temp, size,
2425 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
2428 /* Compute the size of the data to copy from the string. */
2430 = size_binop (MIN_EXPR,
2431 size_binop (CEIL_DIV_EXPR,
2432 TYPE_SIZE (TREE_TYPE (exp)),
2433 size_int (BITS_PER_UNIT)),
2435 build_int_2 (TREE_STRING_LENGTH (exp), 0)));
2436 rtx copy_size_rtx = expand_expr (copy_size, NULL_RTX,
2440 /* Copy that much. */
2441 emit_block_move (target, temp, copy_size_rtx,
2442 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
2444 /* Figure out how much is left in TARGET
2445 that we have to clear. */
2446 if (GET_CODE (copy_size_rtx) == CONST_INT)
2448 temp = plus_constant (XEXP (target, 0),
2449 TREE_STRING_LENGTH (exp));
2450 size = plus_constant (size,
2451 - TREE_STRING_LENGTH (exp));
2455 enum machine_mode size_mode = Pmode;
2457 temp = force_reg (Pmode, XEXP (target, 0));
2458 temp = expand_binop (size_mode, add_optab, temp,
2459 copy_size_rtx, NULL_RTX, 0,
2462 size = expand_binop (size_mode, sub_optab, size,
2463 copy_size_rtx, NULL_RTX, 0,
2466 emit_cmp_insn (size, const0_rtx, LT, NULL_RTX,
2467 GET_MODE (size), 0, 0);
2468 label = gen_label_rtx ();
2469 emit_jump_insn (gen_blt (label));
2472 if (size != const0_rtx)
2474 #ifdef TARGET_MEM_FUNCTIONS
2475 emit_library_call (memset_libfunc, 0, VOIDmode, 3,
2476 temp, Pmode, const0_rtx, Pmode, size, Pmode);
2478 emit_library_call (bzero_libfunc, 0, VOIDmode, 2,
2479 temp, Pmode, size, Pmode);
2486 else if (GET_MODE (temp) == BLKmode)
2487 emit_block_move (target, temp, expr_size (exp),
2488 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
2490 emit_move_insn (target, temp);
2492 if (dont_return_target)
2497 /* Store the value of constructor EXP into the rtx TARGET.
2498 TARGET is either a REG or a MEM. */
2501 store_constructor (exp, target)
2505 tree type = TREE_TYPE (exp);
2507 /* We know our target cannot conflict, since safe_from_p has been called. */
2509 /* Don't try copying piece by piece into a hard register
2510 since that is vulnerable to being clobbered by EXP.
2511 Instead, construct in a pseudo register and then copy it all. */
2512 if (GET_CODE (target) == REG && REGNO (target) < FIRST_PSEUDO_REGISTER)
2514 rtx temp = gen_reg_rtx (GET_MODE (target));
2515 store_constructor (exp, temp);
2516 emit_move_insn (target, temp);
2521 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
2522 || TREE_CODE (type) == QUAL_UNION_TYPE)
2526 /* Inform later passes that the whole union value is dead. */
2527 if (TREE_CODE (type) == UNION_TYPE
2528 || TREE_CODE (type) == QUAL_UNION_TYPE)
2529 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
2531 /* If we are building a static constructor into a register,
2532 set the initial value as zero so we can fold the value into
2534 else if (GET_CODE (target) == REG && TREE_STATIC (exp))
2535 emit_move_insn (target, const0_rtx);
2537 /* If the constructor has fewer fields than the structure,
2538 clear the whole structure first. */
2539 else if (list_length (CONSTRUCTOR_ELTS (exp))
2540 != list_length (TYPE_FIELDS (type)))
2541 clear_storage (target, int_size_in_bytes (type));
2543 /* Inform later passes that the old value is dead. */
2544 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
2546 /* Store each element of the constructor into
2547 the corresponding field of TARGET. */
2549 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
2551 register tree field = TREE_PURPOSE (elt);
2552 register enum machine_mode mode;
2557 /* Just ignore missing fields.
2558 We cleared the whole structure, above,
2559 if any fields are missing. */
2563 bitsize = TREE_INT_CST_LOW (DECL_SIZE (field));
2564 unsignedp = TREE_UNSIGNED (field);
2565 mode = DECL_MODE (field);
2566 if (DECL_BIT_FIELD (field))
2569 if (TREE_CODE (DECL_FIELD_BITPOS (field)) != INTEGER_CST)
2570 /* ??? This case remains to be written. */
2573 bitpos = TREE_INT_CST_LOW (DECL_FIELD_BITPOS (field));
2575 store_field (target, bitsize, bitpos, mode, TREE_VALUE (elt),
2576 /* The alignment of TARGET is
2577 at least what its type requires. */
2579 TYPE_ALIGN (type) / BITS_PER_UNIT,
2580 int_size_in_bytes (type));
2583 else if (TREE_CODE (type) == ARRAY_TYPE)
2587 tree domain = TYPE_DOMAIN (type);
2588 HOST_WIDE_INT minelt = TREE_INT_CST_LOW (TYPE_MIN_VALUE (domain));
2589 HOST_WIDE_INT maxelt = TREE_INT_CST_LOW (TYPE_MAX_VALUE (domain));
2590 tree elttype = TREE_TYPE (type);
2592 /* If the constructor has fewer fields than the structure,
2593 clear the whole structure first. Similarly if this this is
2594 static constructor of a non-BLKmode object. */
2596 if (list_length (CONSTRUCTOR_ELTS (exp)) < maxelt - minelt + 1
2597 || (GET_CODE (target) == REG && TREE_STATIC (exp)))
2598 clear_storage (target, int_size_in_bytes (type));
2600 /* Inform later passes that the old value is dead. */
2601 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
2603 /* Store each element of the constructor into
2604 the corresponding element of TARGET, determined
2605 by counting the elements. */
2606 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
2608 elt = TREE_CHAIN (elt), i++)
2610 register enum machine_mode mode;
2615 mode = TYPE_MODE (elttype);
2616 bitsize = GET_MODE_BITSIZE (mode);
2617 unsignedp = TREE_UNSIGNED (elttype);
2619 bitpos = (i * TREE_INT_CST_LOW (TYPE_SIZE (elttype)));
2621 store_field (target, bitsize, bitpos, mode, TREE_VALUE (elt),
2622 /* The alignment of TARGET is
2623 at least what its type requires. */
2625 TYPE_ALIGN (type) / BITS_PER_UNIT,
2626 int_size_in_bytes (type));
2634 /* Store the value of EXP (an expression tree)
2635 into a subfield of TARGET which has mode MODE and occupies
2636 BITSIZE bits, starting BITPOS bits from the start of TARGET.
2637 If MODE is VOIDmode, it means that we are storing into a bit-field.
2639 If VALUE_MODE is VOIDmode, return nothing in particular.
2640 UNSIGNEDP is not used in this case.
2642 Otherwise, return an rtx for the value stored. This rtx
2643 has mode VALUE_MODE if that is convenient to do.
2644 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
2646 ALIGN is the alignment that TARGET is known to have, measured in bytes.
2647 TOTAL_SIZE is the size in bytes of the structure, or -1 if varying. */
2650 store_field (target, bitsize, bitpos, mode, exp, value_mode,
2651 unsignedp, align, total_size)
2653 int bitsize, bitpos;
2654 enum machine_mode mode;
2656 enum machine_mode value_mode;
2661 HOST_WIDE_INT width_mask = 0;
2663 if (bitsize < HOST_BITS_PER_WIDE_INT)
2664 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
2666 /* If we are storing into an unaligned field of an aligned union that is
2667 in a register, we may have the mode of TARGET being an integer mode but
2668 MODE == BLKmode. In that case, get an aligned object whose size and
2669 alignment are the same as TARGET and store TARGET into it (we can avoid
2670 the store if the field being stored is the entire width of TARGET). Then
2671 call ourselves recursively to store the field into a BLKmode version of
2672 that object. Finally, load from the object into TARGET. This is not
2673 very efficient in general, but should only be slightly more expensive
2674 than the otherwise-required unaligned accesses. Perhaps this can be
2675 cleaned up later. */
2678 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
2680 rtx object = assign_stack_temp (GET_MODE (target),
2681 GET_MODE_SIZE (GET_MODE (target)), 0);
2682 rtx blk_object = copy_rtx (object);
2684 PUT_MODE (blk_object, BLKmode);
2686 if (bitsize != GET_MODE_BITSIZE (GET_MODE (target)))
2687 emit_move_insn (object, target);
2689 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0,
2692 emit_move_insn (target, object);
2697 /* If the structure is in a register or if the component
2698 is a bit field, we cannot use addressing to access it.
2699 Use bit-field techniques or SUBREG to store in it. */
2701 if (mode == VOIDmode
2702 || (mode != BLKmode && ! direct_store[(int) mode])
2703 || GET_CODE (target) == REG
2704 || GET_CODE (target) == SUBREG
2705 /* If the field isn't aligned enough to fetch as a unit,
2706 fetch it as a bit field. */
2707 #ifdef STRICT_ALIGNMENT
2708 || align * BITS_PER_UNIT < GET_MODE_ALIGNMENT (mode)
2709 || bitpos % GET_MODE_ALIGNMENT (mode) != 0
2713 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
2714 /* Store the value in the bitfield. */
2715 store_bit_field (target, bitsize, bitpos, mode, temp, align, total_size);
2716 if (value_mode != VOIDmode)
2718 /* The caller wants an rtx for the value. */
2719 /* If possible, avoid refetching from the bitfield itself. */
2721 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
2724 enum machine_mode tmode;
2727 return expand_and (temp, GEN_INT (width_mask), NULL_RTX);
2728 tmode = GET_MODE (temp);
2729 if (tmode == VOIDmode)
2731 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
2732 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
2733 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
2735 return extract_bit_field (target, bitsize, bitpos, unsignedp,
2736 NULL_RTX, value_mode, 0, align,
2743 rtx addr = XEXP (target, 0);
2746 /* If a value is wanted, it must be the lhs;
2747 so make the address stable for multiple use. */
2749 if (value_mode != VOIDmode && GET_CODE (addr) != REG
2750 && ! CONSTANT_ADDRESS_P (addr)
2751 /* A frame-pointer reference is already stable. */
2752 && ! (GET_CODE (addr) == PLUS
2753 && GET_CODE (XEXP (addr, 1)) == CONST_INT
2754 && (XEXP (addr, 0) == virtual_incoming_args_rtx
2755 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
2756 addr = copy_to_reg (addr);
2758 /* Now build a reference to just the desired component. */
2760 to_rtx = change_address (target, mode,
2761 plus_constant (addr, (bitpos / BITS_PER_UNIT)));
2762 MEM_IN_STRUCT_P (to_rtx) = 1;
2764 return store_expr (exp, to_rtx, value_mode != VOIDmode);
2768 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
2769 or an ARRAY_REF, look for nested COMPONENT_REFs, BIT_FIELD_REFs, or
2770 ARRAY_REFs and find the ultimate containing object, which we return.
2772 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
2773 bit position, and *PUNSIGNEDP to the signedness of the field.
2774 If the position of the field is variable, we store a tree
2775 giving the variable offset (in units) in *POFFSET.
2776 This offset is in addition to the bit position.
2777 If the position is not variable, we store 0 in *POFFSET.
2779 If any of the extraction expressions is volatile,
2780 we store 1 in *PVOLATILEP. Otherwise we don't change that.
2782 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
2783 is a mode that can be used to access the field. In that case, *PBITSIZE
2786 If the field describes a variable-sized object, *PMODE is set to
2787 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
2788 this case, but the address of the object can be found. */
2791 get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode,
2792 punsignedp, pvolatilep)
2797 enum machine_mode *pmode;
2802 enum machine_mode mode = VOIDmode;
2803 tree offset = integer_zero_node;
2805 if (TREE_CODE (exp) == COMPONENT_REF)
2807 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
2808 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
2809 mode = DECL_MODE (TREE_OPERAND (exp, 1));
2810 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
2812 else if (TREE_CODE (exp) == BIT_FIELD_REF)
2814 size_tree = TREE_OPERAND (exp, 1);
2815 *punsignedp = TREE_UNSIGNED (exp);
2819 mode = TYPE_MODE (TREE_TYPE (exp));
2820 *pbitsize = GET_MODE_BITSIZE (mode);
2821 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
2826 if (TREE_CODE (size_tree) != INTEGER_CST)
2827 mode = BLKmode, *pbitsize = -1;
2829 *pbitsize = TREE_INT_CST_LOW (size_tree);
2832 /* Compute cumulative bit-offset for nested component-refs and array-refs,
2833 and find the ultimate containing object. */
2839 if (TREE_CODE (exp) == COMPONENT_REF || TREE_CODE (exp) == BIT_FIELD_REF)
2841 tree pos = (TREE_CODE (exp) == COMPONENT_REF
2842 ? DECL_FIELD_BITPOS (TREE_OPERAND (exp, 1))
2843 : TREE_OPERAND (exp, 2));
2845 /* If this field hasn't been filled in yet, don't go
2846 past it. This should only happen when folding expressions
2847 made during type construction. */
2851 if (TREE_CODE (pos) == PLUS_EXPR)
2854 if (TREE_CODE (TREE_OPERAND (pos, 0)) == INTEGER_CST)
2856 constant = TREE_OPERAND (pos, 0);
2857 var = TREE_OPERAND (pos, 1);
2859 else if (TREE_CODE (TREE_OPERAND (pos, 1)) == INTEGER_CST)
2861 constant = TREE_OPERAND (pos, 1);
2862 var = TREE_OPERAND (pos, 0);
2867 *pbitpos += TREE_INT_CST_LOW (constant);
2868 offset = size_binop (PLUS_EXPR, offset,
2869 size_binop (FLOOR_DIV_EXPR, var,
2870 size_int (BITS_PER_UNIT)));
2872 else if (TREE_CODE (pos) == INTEGER_CST)
2873 *pbitpos += TREE_INT_CST_LOW (pos);
2876 /* Assume here that the offset is a multiple of a unit.
2877 If not, there should be an explicitly added constant. */
2878 offset = size_binop (PLUS_EXPR, offset,
2879 size_binop (FLOOR_DIV_EXPR, pos,
2880 size_int (BITS_PER_UNIT)));
2884 else if (TREE_CODE (exp) == ARRAY_REF)
2886 /* This code is based on the code in case ARRAY_REF in expand_expr
2887 below. We assume here that the size of an array element is
2888 always an integral multiple of BITS_PER_UNIT. */
2890 tree index = TREE_OPERAND (exp, 1);
2891 tree domain = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
2893 = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
2894 tree index_type = TREE_TYPE (index);
2896 if (! integer_zerop (low_bound))
2897 index = fold (build (MINUS_EXPR, index_type, index, low_bound));
2899 if (TYPE_PRECISION (index_type) != POINTER_SIZE)
2901 index = convert (type_for_size (POINTER_SIZE, 0), index);
2902 index_type = TREE_TYPE (index);
2905 index = fold (build (MULT_EXPR, index_type, index,
2906 TYPE_SIZE (TREE_TYPE (exp))));
2908 if (TREE_CODE (index) == INTEGER_CST
2909 && TREE_INT_CST_HIGH (index) == 0)
2910 *pbitpos += TREE_INT_CST_LOW (index);
2912 offset = size_binop (PLUS_EXPR, offset,
2913 size_binop (FLOOR_DIV_EXPR, index,
2914 size_int (BITS_PER_UNIT)));
2916 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
2917 && ! ((TREE_CODE (exp) == NOP_EXPR
2918 || TREE_CODE (exp) == CONVERT_EXPR)
2919 && (TYPE_MODE (TREE_TYPE (exp))
2920 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
2923 /* If any reference in the chain is volatile, the effect is volatile. */
2924 if (TREE_THIS_VOLATILE (exp))
2926 exp = TREE_OPERAND (exp, 0);
2929 /* If this was a bit-field, see if there is a mode that allows direct
2930 access in case EXP is in memory. */
2931 if (mode == VOIDmode && *pbitsize != 0 && *pbitpos % *pbitsize == 0)
2933 mode = mode_for_size (*pbitsize, MODE_INT, 0);
2934 if (mode == BLKmode)
2938 if (integer_zerop (offset))
2944 /* We aren't finished fixing the callers to really handle nonzero offset. */
2952 /* Given an rtx VALUE that may contain additions and multiplications,
2953 return an equivalent value that just refers to a register or memory.
2954 This is done by generating instructions to perform the arithmetic
2955 and returning a pseudo-register containing the value.
2957 The returned value may be a REG, SUBREG, MEM or constant. */
2960 force_operand (value, target)
2963 register optab binoptab = 0;
2964 /* Use a temporary to force order of execution of calls to
2968 /* Use subtarget as the target for operand 0 of a binary operation. */
2969 register rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
2971 if (GET_CODE (value) == PLUS)
2972 binoptab = add_optab;
2973 else if (GET_CODE (value) == MINUS)
2974 binoptab = sub_optab;
2975 else if (GET_CODE (value) == MULT)
2977 op2 = XEXP (value, 1);
2978 if (!CONSTANT_P (op2)
2979 && !(GET_CODE (op2) == REG && op2 != subtarget))
2981 tmp = force_operand (XEXP (value, 0), subtarget);
2982 return expand_mult (GET_MODE (value), tmp,
2983 force_operand (op2, NULL_RTX),
2989 op2 = XEXP (value, 1);
2990 if (!CONSTANT_P (op2)
2991 && !(GET_CODE (op2) == REG && op2 != subtarget))
2993 if (binoptab == sub_optab && GET_CODE (op2) == CONST_INT)
2995 binoptab = add_optab;
2996 op2 = negate_rtx (GET_MODE (value), op2);
2999 /* Check for an addition with OP2 a constant integer and our first
3000 operand a PLUS of a virtual register and something else. In that
3001 case, we want to emit the sum of the virtual register and the
3002 constant first and then add the other value. This allows virtual
3003 register instantiation to simply modify the constant rather than
3004 creating another one around this addition. */
3005 if (binoptab == add_optab && GET_CODE (op2) == CONST_INT
3006 && GET_CODE (XEXP (value, 0)) == PLUS
3007 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
3008 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
3009 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
3011 rtx temp = expand_binop (GET_MODE (value), binoptab,
3012 XEXP (XEXP (value, 0), 0), op2,
3013 subtarget, 0, OPTAB_LIB_WIDEN);
3014 return expand_binop (GET_MODE (value), binoptab, temp,
3015 force_operand (XEXP (XEXP (value, 0), 1), 0),
3016 target, 0, OPTAB_LIB_WIDEN);
3019 tmp = force_operand (XEXP (value, 0), subtarget);
3020 return expand_binop (GET_MODE (value), binoptab, tmp,
3021 force_operand (op2, NULL_RTX),
3022 target, 0, OPTAB_LIB_WIDEN);
3023 /* We give UNSIGNEDP = 0 to expand_binop
3024 because the only operations we are expanding here are signed ones. */
3029 /* Subroutine of expand_expr:
3030 save the non-copied parts (LIST) of an expr (LHS), and return a list
3031 which can restore these values to their previous values,
3032 should something modify their storage. */
3035 save_noncopied_parts (lhs, list)
3042 for (tail = list; tail; tail = TREE_CHAIN (tail))
3043 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
3044 parts = chainon (parts, save_noncopied_parts (lhs, TREE_VALUE (tail)));
3047 tree part = TREE_VALUE (tail);
3048 tree part_type = TREE_TYPE (part);
3049 tree to_be_saved = build (COMPONENT_REF, part_type, lhs, part);
3050 rtx target = assign_stack_temp (TYPE_MODE (part_type),
3051 int_size_in_bytes (part_type), 0);
3052 if (! memory_address_p (TYPE_MODE (part_type), XEXP (target, 0)))
3053 target = change_address (target, TYPE_MODE (part_type), NULL_RTX);
3054 parts = tree_cons (to_be_saved,
3055 build (RTL_EXPR, part_type, NULL_TREE,
3058 store_expr (TREE_PURPOSE (parts), RTL_EXPR_RTL (TREE_VALUE (parts)), 0);
3063 /* Subroutine of expand_expr:
3064 record the non-copied parts (LIST) of an expr (LHS), and return a list
3065 which specifies the initial values of these parts. */
3068 init_noncopied_parts (lhs, list)
3075 for (tail = list; tail; tail = TREE_CHAIN (tail))
3076 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
3077 parts = chainon (parts, init_noncopied_parts (lhs, TREE_VALUE (tail)));
3080 tree part = TREE_VALUE (tail);
3081 tree part_type = TREE_TYPE (part);
3082 tree to_be_initialized = build (COMPONENT_REF, part_type, lhs, part);
3083 parts = tree_cons (TREE_PURPOSE (tail), to_be_initialized, parts);
3088 /* Subroutine of expand_expr: return nonzero iff there is no way that
3089 EXP can reference X, which is being modified. */
3092 safe_from_p (x, exp)
3102 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
3103 find the underlying pseudo. */
3104 if (GET_CODE (x) == SUBREG)
3107 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
3111 /* If X is a location in the outgoing argument area, it is always safe. */
3112 if (GET_CODE (x) == MEM
3113 && (XEXP (x, 0) == virtual_outgoing_args_rtx
3114 || (GET_CODE (XEXP (x, 0)) == PLUS
3115 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx)))
3118 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
3121 exp_rtl = DECL_RTL (exp);
3128 if (TREE_CODE (exp) == TREE_LIST)
3129 return ((TREE_VALUE (exp) == 0
3130 || safe_from_p (x, TREE_VALUE (exp)))
3131 && (TREE_CHAIN (exp) == 0
3132 || safe_from_p (x, TREE_CHAIN (exp))));
3137 return safe_from_p (x, TREE_OPERAND (exp, 0));
3141 return (safe_from_p (x, TREE_OPERAND (exp, 0))
3142 && safe_from_p (x, TREE_OPERAND (exp, 1)));
3146 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
3147 the expression. If it is set, we conflict iff we are that rtx or
3148 both are in memory. Otherwise, we check all operands of the
3149 expression recursively. */
3151 switch (TREE_CODE (exp))
3154 return (staticp (TREE_OPERAND (exp, 0))
3155 || safe_from_p (x, TREE_OPERAND (exp, 0)));
3158 if (GET_CODE (x) == MEM)
3163 exp_rtl = CALL_EXPR_RTL (exp);
3166 /* Assume that the call will clobber all hard registers and
3168 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
3169 || GET_CODE (x) == MEM)
3176 exp_rtl = RTL_EXPR_RTL (exp);
3178 /* We don't know what this can modify. */
3183 case WITH_CLEANUP_EXPR:
3184 exp_rtl = RTL_EXPR_RTL (exp);
3188 exp_rtl = SAVE_EXPR_RTL (exp);
3192 /* The only operand we look at is operand 1. The rest aren't
3193 part of the expression. */
3194 return safe_from_p (x, TREE_OPERAND (exp, 1));
3196 case METHOD_CALL_EXPR:
3197 /* This takes a rtx argument, but shouldn't appear here. */
3201 /* If we have an rtx, we do not need to scan our operands. */
3205 nops = tree_code_length[(int) TREE_CODE (exp)];
3206 for (i = 0; i < nops; i++)
3207 if (TREE_OPERAND (exp, i) != 0
3208 && ! safe_from_p (x, TREE_OPERAND (exp, i)))
3212 /* If we have an rtl, find any enclosed object. Then see if we conflict
3216 if (GET_CODE (exp_rtl) == SUBREG)
3218 exp_rtl = SUBREG_REG (exp_rtl);
3219 if (GET_CODE (exp_rtl) == REG
3220 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
3224 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
3225 are memory and EXP is not readonly. */
3226 return ! (rtx_equal_p (x, exp_rtl)
3227 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
3228 && ! TREE_READONLY (exp)));
3231 /* If we reach here, it is safe. */
3235 /* Subroutine of expand_expr: return nonzero iff EXP is an
3236 expression whose type is statically determinable. */
3242 if (TREE_CODE (exp) == PARM_DECL
3243 || TREE_CODE (exp) == VAR_DECL
3244 || TREE_CODE (exp) == CALL_EXPR || TREE_CODE (exp) == TARGET_EXPR
3245 || TREE_CODE (exp) == COMPONENT_REF
3246 || TREE_CODE (exp) == ARRAY_REF)
3251 /* expand_expr: generate code for computing expression EXP.
3252 An rtx for the computed value is returned. The value is never null.
3253 In the case of a void EXP, const0_rtx is returned.
3255 The value may be stored in TARGET if TARGET is nonzero.
3256 TARGET is just a suggestion; callers must assume that
3257 the rtx returned may not be the same as TARGET.
3259 If TARGET is CONST0_RTX, it means that the value will be ignored.
3261 If TMODE is not VOIDmode, it suggests generating the
3262 result in mode TMODE. But this is done only when convenient.
3263 Otherwise, TMODE is ignored and the value generated in its natural mode.
3264 TMODE is just a suggestion; callers must assume that
3265 the rtx returned may not have mode TMODE.
3267 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
3268 with a constant address even if that address is not normally legitimate.
3269 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
3271 If MODIFIER is EXPAND_SUM then when EXP is an addition
3272 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
3273 or a nest of (PLUS ...) and (MINUS ...) where the terms are
3274 products as above, or REG or MEM, or constant.
3275 Ordinarily in such cases we would output mul or add instructions
3276 and then return a pseudo reg containing the sum.
3278 EXPAND_INITIALIZER is much like EXPAND_SUM except that
3279 it also marks a label as absolutely required (it can't be dead).
3280 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
3281 This is used for outputting expressions used in initializers. */
3284 expand_expr (exp, target, tmode, modifier)
3287 enum machine_mode tmode;
3288 enum expand_modifier modifier;
3290 register rtx op0, op1, temp;
3291 tree type = TREE_TYPE (exp);
3292 int unsignedp = TREE_UNSIGNED (type);
3293 register enum machine_mode mode = TYPE_MODE (type);
3294 register enum tree_code code = TREE_CODE (exp);
3296 /* Use subtarget as the target for operand 0 of a binary operation. */
3297 rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
3298 rtx original_target = target;
3299 int ignore = (target == const0_rtx
3300 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
3301 || code == CONVERT_EXPR || code == REFERENCE_EXPR)
3302 && TREE_CODE (type) == VOID_TYPE));
3305 /* Don't use hard regs as subtargets, because the combiner
3306 can only handle pseudo regs. */
3307 if (subtarget && REGNO (subtarget) < FIRST_PSEUDO_REGISTER)
3309 /* Avoid subtargets inside loops,
3310 since they hide some invariant expressions. */
3311 if (preserve_subexpressions_p ())
3314 /* If we are going to ignore this result, we need only do something
3315 if there is a side-effect somewhere in the expression. If there
3316 is, short-circuit the most common cases here. */
3320 if (! TREE_SIDE_EFFECTS (exp))
3323 /* Ensure we reference a volatile object even if value is ignored. */
3324 if (TREE_THIS_VOLATILE (exp)
3325 && TREE_CODE (exp) != FUNCTION_DECL
3326 && mode != VOIDmode && mode != BLKmode)
3328 temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
3329 if (GET_CODE (temp) == MEM)
3330 temp = copy_to_reg (temp);
3334 if (TREE_CODE_CLASS (code) == '1')
3335 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
3336 VOIDmode, modifier);
3337 else if (TREE_CODE_CLASS (code) == '2'
3338 || TREE_CODE_CLASS (code) == '<')
3340 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
3341 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
3344 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
3345 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
3346 /* If the second operand has no side effects, just evaluate
3348 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
3349 VOIDmode, modifier);
3351 target = 0, original_target = 0;
3354 /* If will do cse, generate all results into pseudo registers
3355 since 1) that allows cse to find more things
3356 and 2) otherwise cse could produce an insn the machine
3359 if (! cse_not_expected && mode != BLKmode && target
3360 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER))
3367 tree function = decl_function_context (exp);
3368 /* Handle using a label in a containing function. */
3369 if (function != current_function_decl && function != 0)
3371 struct function *p = find_function_data (function);
3372 /* Allocate in the memory associated with the function
3373 that the label is in. */
3374 push_obstacks (p->function_obstack,
3375 p->function_maybepermanent_obstack);
3377 p->forced_labels = gen_rtx (EXPR_LIST, VOIDmode,
3378 label_rtx (exp), p->forced_labels);
3381 else if (modifier == EXPAND_INITIALIZER)
3382 forced_labels = gen_rtx (EXPR_LIST, VOIDmode,
3383 label_rtx (exp), forced_labels);
3384 temp = gen_rtx (MEM, FUNCTION_MODE,
3385 gen_rtx (LABEL_REF, Pmode, label_rtx (exp)));
3386 if (function != current_function_decl && function != 0)
3387 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
3392 if (DECL_RTL (exp) == 0)
3394 error_with_decl (exp, "prior parameter's size depends on `%s'");
3395 return CONST0_RTX (mode);
3401 if (DECL_RTL (exp) == 0)
3403 /* Ensure variable marked as used even if it doesn't go through
3404 a parser. If it hasn't be used yet, write out an external
3406 if (! TREE_USED (exp))
3408 assemble_external (exp);
3409 TREE_USED (exp) = 1;
3412 /* Handle variables inherited from containing functions. */
3413 context = decl_function_context (exp);
3415 /* We treat inline_function_decl as an alias for the current function
3416 because that is the inline function whose vars, types, etc.
3417 are being merged into the current function.
3418 See expand_inline_function. */
3419 if (context != 0 && context != current_function_decl
3420 && context != inline_function_decl
3421 /* If var is static, we don't need a static chain to access it. */
3422 && ! (GET_CODE (DECL_RTL (exp)) == MEM
3423 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
3427 /* Mark as non-local and addressable. */
3428 DECL_NONLOCAL (exp) = 1;
3429 mark_addressable (exp);
3430 if (GET_CODE (DECL_RTL (exp)) != MEM)
3432 addr = XEXP (DECL_RTL (exp), 0);
3433 if (GET_CODE (addr) == MEM)
3434 addr = gen_rtx (MEM, Pmode, fix_lexical_addr (XEXP (addr, 0), exp));
3436 addr = fix_lexical_addr (addr, exp);
3437 return change_address (DECL_RTL (exp), mode, addr);
3440 /* This is the case of an array whose size is to be determined
3441 from its initializer, while the initializer is still being parsed.
3443 if (GET_CODE (DECL_RTL (exp)) == MEM
3444 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
3445 return change_address (DECL_RTL (exp), GET_MODE (DECL_RTL (exp)),
3446 XEXP (DECL_RTL (exp), 0));
3447 if (GET_CODE (DECL_RTL (exp)) == MEM
3448 && modifier != EXPAND_CONST_ADDRESS
3449 && modifier != EXPAND_SUM
3450 && modifier != EXPAND_INITIALIZER)
3452 /* DECL_RTL probably contains a constant address.
3453 On RISC machines where a constant address isn't valid,
3454 make some insns to get that address into a register. */
3455 if (!memory_address_p (DECL_MODE (exp), XEXP (DECL_RTL (exp), 0))
3457 && CONSTANT_ADDRESS_P (XEXP (DECL_RTL (exp), 0))))
3458 return change_address (DECL_RTL (exp), VOIDmode,
3459 copy_rtx (XEXP (DECL_RTL (exp), 0)));
3462 /* If the mode of DECL_RTL does not match that of the decl, it
3463 must be a promoted value. We return a SUBREG of the wanted mode,
3464 but mark it so that we know that it was already extended. */
3466 if (GET_CODE (DECL_RTL (exp)) == REG
3467 && GET_MODE (DECL_RTL (exp)) != mode)
3469 enum machine_mode decl_mode = DECL_MODE (exp);
3471 /* Get the signedness used for this variable. Ensure we get the
3472 same mode we got when the variable was declared. */
3474 PROMOTE_MODE (decl_mode, unsignedp, type);
3476 if (decl_mode != GET_MODE (DECL_RTL (exp)))
3479 temp = gen_rtx (SUBREG, mode, DECL_RTL (exp), 0);
3480 SUBREG_PROMOTED_VAR_P (temp) = 1;
3481 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
3485 return DECL_RTL (exp);
3488 return immed_double_const (TREE_INT_CST_LOW (exp),
3489 TREE_INT_CST_HIGH (exp),
3493 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, 0);
3496 /* If optimized, generate immediate CONST_DOUBLE
3497 which will be turned into memory by reload if necessary.
3499 We used to force a register so that loop.c could see it. But
3500 this does not allow gen_* patterns to perform optimizations with
3501 the constants. It also produces two insns in cases like "x = 1.0;".
3502 On most machines, floating-point constants are not permitted in
3503 many insns, so we'd end up copying it to a register in any case.
3505 Now, we do the copying in expand_binop, if appropriate. */
3506 return immed_real_const (exp);
3510 if (! TREE_CST_RTL (exp))
3511 output_constant_def (exp);
3513 /* TREE_CST_RTL probably contains a constant address.
3514 On RISC machines where a constant address isn't valid,
3515 make some insns to get that address into a register. */
3516 if (GET_CODE (TREE_CST_RTL (exp)) == MEM
3517 && modifier != EXPAND_CONST_ADDRESS
3518 && modifier != EXPAND_INITIALIZER
3519 && modifier != EXPAND_SUM
3520 && !memory_address_p (mode, XEXP (TREE_CST_RTL (exp), 0)))
3521 return change_address (TREE_CST_RTL (exp), VOIDmode,
3522 copy_rtx (XEXP (TREE_CST_RTL (exp), 0)));
3523 return TREE_CST_RTL (exp);
3526 context = decl_function_context (exp);
3527 /* We treat inline_function_decl as an alias for the current function
3528 because that is the inline function whose vars, types, etc.
3529 are being merged into the current function.
3530 See expand_inline_function. */
3531 if (context == current_function_decl || context == inline_function_decl)
3534 /* If this is non-local, handle it. */
3537 temp = SAVE_EXPR_RTL (exp);
3538 if (temp && GET_CODE (temp) == REG)
3540 put_var_into_stack (exp);
3541 temp = SAVE_EXPR_RTL (exp);
3543 if (temp == 0 || GET_CODE (temp) != MEM)
3545 return change_address (temp, mode,
3546 fix_lexical_addr (XEXP (temp, 0), exp));
3548 if (SAVE_EXPR_RTL (exp) == 0)
3550 if (mode == BLKmode)
3552 = assign_stack_temp (mode,
3553 int_size_in_bytes (TREE_TYPE (exp)), 0);
3556 enum machine_mode var_mode = mode;
3558 if (TREE_CODE (type) == INTEGER_TYPE
3559 || TREE_CODE (type) == ENUMERAL_TYPE
3560 || TREE_CODE (type) == BOOLEAN_TYPE
3561 || TREE_CODE (type) == CHAR_TYPE
3562 || TREE_CODE (type) == REAL_TYPE
3563 || TREE_CODE (type) == POINTER_TYPE
3564 || TREE_CODE (type) == OFFSET_TYPE)
3566 PROMOTE_MODE (var_mode, unsignedp, type);
3569 temp = gen_reg_rtx (var_mode);
3572 SAVE_EXPR_RTL (exp) = temp;
3573 if (!optimize && GET_CODE (temp) == REG)
3574 save_expr_regs = gen_rtx (EXPR_LIST, VOIDmode, temp,
3577 /* If the mode of TEMP does not match that of the expression, it
3578 must be a promoted value. We pass store_expr a SUBREG of the
3579 wanted mode but mark it so that we know that it was already
3580 extended. Note that `unsignedp' was modified above in
3583 if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
3585 temp = gen_rtx (SUBREG, mode, SAVE_EXPR_RTL (exp), 0);
3586 SUBREG_PROMOTED_VAR_P (temp) = 1;
3587 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
3590 store_expr (TREE_OPERAND (exp, 0), temp, 0);
3593 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
3594 must be a promoted value. We return a SUBREG of the wanted mode,
3595 but mark it so that we know that it was already extended. Note
3596 that `unsignedp' was modified above in this case. */
3598 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
3599 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
3601 temp = gen_rtx (SUBREG, mode, SAVE_EXPR_RTL (exp), 0);
3602 SUBREG_PROMOTED_VAR_P (temp) = 1;
3603 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
3607 return SAVE_EXPR_RTL (exp);
3610 expand_exit_loop_if_false (NULL_PTR,
3611 invert_truthvalue (TREE_OPERAND (exp, 0)));
3615 expand_start_loop (1);
3616 expand_expr_stmt (TREE_OPERAND (exp, 0));
3623 tree vars = TREE_OPERAND (exp, 0);
3624 int vars_need_expansion = 0;
3626 /* Need to open a binding contour here because
3627 if there are any cleanups they most be contained here. */
3628 expand_start_bindings (0);
3630 /* Mark the corresponding BLOCK for output in its proper place. */
3631 if (TREE_OPERAND (exp, 2) != 0
3632 && ! TREE_USED (TREE_OPERAND (exp, 2)))
3633 insert_block (TREE_OPERAND (exp, 2));
3635 /* If VARS have not yet been expanded, expand them now. */
3638 if (DECL_RTL (vars) == 0)
3640 vars_need_expansion = 1;
3643 expand_decl_init (vars);
3644 vars = TREE_CHAIN (vars);
3647 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, modifier);
3649 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
3655 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
3657 emit_insns (RTL_EXPR_SEQUENCE (exp));
3658 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
3659 return RTL_EXPR_RTL (exp);
3662 /* If we don't need the result, just ensure we evaluate any
3667 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
3668 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode, 0);
3671 /* All elts simple constants => refer to a constant in memory. But
3672 if this is a non-BLKmode mode, let it store a field at a time
3673 since that should make a CONST_INT or CONST_DOUBLE when we
3674 fold. If we are making an initializer and all operands are
3675 constant, put it in memory as well. */
3676 else if ((TREE_STATIC (exp)
3677 && (mode == BLKmode || TREE_ADDRESSABLE (exp)))
3678 || (modifier == EXPAND_INITIALIZER && TREE_CONSTANT (exp)))
3680 rtx constructor = output_constant_def (exp);
3681 if (modifier != EXPAND_CONST_ADDRESS
3682 && modifier != EXPAND_INITIALIZER
3683 && modifier != EXPAND_SUM
3684 && !memory_address_p (GET_MODE (constructor),
3685 XEXP (constructor, 0)))
3686 constructor = change_address (constructor, VOIDmode,
3687 XEXP (constructor, 0));
3693 if (target == 0 || ! safe_from_p (target, exp))
3695 if (mode != BLKmode && ! TREE_ADDRESSABLE (exp))
3696 target = gen_reg_rtx (mode);
3699 enum tree_code c = TREE_CODE (type);
3701 = assign_stack_temp (mode, int_size_in_bytes (type), 0);
3702 if (c == RECORD_TYPE || c == UNION_TYPE
3703 || c == QUAL_UNION_TYPE || c == ARRAY_TYPE)
3704 MEM_IN_STRUCT_P (target) = 1;
3707 store_constructor (exp, target);
3713 tree exp1 = TREE_OPERAND (exp, 0);
3716 /* A SAVE_EXPR as the address in an INDIRECT_EXPR is generated
3717 for *PTR += ANYTHING where PTR is put inside the SAVE_EXPR.
3718 This code has the same general effect as simply doing
3719 expand_expr on the save expr, except that the expression PTR
3720 is computed for use as a memory address. This means different
3721 code, suitable for indexing, may be generated. */
3722 if (TREE_CODE (exp1) == SAVE_EXPR
3723 && SAVE_EXPR_RTL (exp1) == 0
3724 && TREE_CODE (exp2 = TREE_OPERAND (exp1, 0)) != ERROR_MARK
3725 && TYPE_MODE (TREE_TYPE (exp1)) == Pmode
3726 && TYPE_MODE (TREE_TYPE (exp2)) == Pmode)
3728 temp = expand_expr (TREE_OPERAND (exp1, 0), NULL_RTX,
3729 VOIDmode, EXPAND_SUM);
3730 op0 = memory_address (mode, temp);
3731 op0 = copy_all_regs (op0);
3732 SAVE_EXPR_RTL (exp1) = op0;
3736 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
3737 op0 = memory_address (mode, op0);
3740 temp = gen_rtx (MEM, mode, op0);
3741 /* If address was computed by addition,
3742 mark this as an element of an aggregate. */
3743 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
3744 || (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR
3745 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) == PLUS_EXPR)
3746 || TREE_CODE (TREE_TYPE (exp)) == ARRAY_TYPE
3747 || TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE
3748 || TREE_CODE (TREE_TYPE (exp)) == UNION_TYPE
3749 || TREE_CODE (TREE_TYPE (exp)) == QUAL_UNION_TYPE
3750 || (TREE_CODE (exp1) == ADDR_EXPR
3751 && (exp2 = TREE_OPERAND (exp1, 0))
3752 && (TREE_CODE (TREE_TYPE (exp2)) == ARRAY_TYPE
3753 || TREE_CODE (TREE_TYPE (exp2)) == RECORD_TYPE
3754 || TREE_CODE (TREE_TYPE (exp2)) == UNION_TYPE
3755 || TREE_CODE (TREE_TYPE (exp2)) == QUAL_UNION_TYPE)))
3756 MEM_IN_STRUCT_P (temp) = 1;
3757 MEM_VOLATILE_P (temp) = TREE_THIS_VOLATILE (exp);
3758 #if 0 /* It is incorrect to set RTX_UNCHANGING_P here, because the fact that
3759 a location is accessed through a pointer to const does not mean
3760 that the value there can never change. */
3761 RTX_UNCHANGING_P (temp) = TREE_READONLY (exp);
3767 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
3771 tree array = TREE_OPERAND (exp, 0);
3772 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
3773 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
3774 tree index = TREE_OPERAND (exp, 1);
3775 tree index_type = TREE_TYPE (index);
3778 /* Optimize the special-case of a zero lower bound. */
3779 if (! integer_zerop (low_bound))
3780 index = fold (build (MINUS_EXPR, index_type, index, low_bound));
3782 if (TREE_CODE (index) != INTEGER_CST
3783 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
3785 /* Nonconstant array index or nonconstant element size.
3786 Generate the tree for *(&array+index) and expand that,
3787 except do it in a language-independent way
3788 and don't complain about non-lvalue arrays.
3789 `mark_addressable' should already have been called
3790 for any array for which this case will be reached. */
3792 /* Don't forget the const or volatile flag from the array
3794 tree variant_type = build_type_variant (type,
3795 TREE_READONLY (exp),
3796 TREE_THIS_VOLATILE (exp));
3797 tree array_adr = build1 (ADDR_EXPR,
3798 build_pointer_type (variant_type), array);
3801 /* Convert the integer argument to a type the same size as a
3802 pointer so the multiply won't overflow spuriously. */
3803 if (TYPE_PRECISION (index_type) != POINTER_SIZE)
3804 index = convert (type_for_size (POINTER_SIZE, 0), index);
3806 /* Don't think the address has side effects
3807 just because the array does.
3808 (In some cases the address might have side effects,
3809 and we fail to record that fact here. However, it should not
3810 matter, since expand_expr should not care.) */
3811 TREE_SIDE_EFFECTS (array_adr) = 0;
3813 elt = build1 (INDIRECT_REF, type,
3814 fold (build (PLUS_EXPR,
3815 TYPE_POINTER_TO (variant_type),
3817 fold (build (MULT_EXPR,
3818 TYPE_POINTER_TO (variant_type),
3820 size_in_bytes (type))))));
3822 /* Volatility, etc., of new expression is same as old
3824 TREE_SIDE_EFFECTS (elt) = TREE_SIDE_EFFECTS (exp);
3825 TREE_THIS_VOLATILE (elt) = TREE_THIS_VOLATILE (exp);
3826 TREE_READONLY (elt) = TREE_READONLY (exp);
3828 return expand_expr (elt, target, tmode, modifier);
3831 /* Fold an expression like: "foo"[2].
3832 This is not done in fold so it won't happen inside &. */
3834 if (TREE_CODE (array) == STRING_CST
3835 && TREE_CODE (index) == INTEGER_CST
3836 && !TREE_INT_CST_HIGH (index)
3837 && (i = TREE_INT_CST_LOW (index)) < TREE_STRING_LENGTH (array))
3839 if (TREE_TYPE (TREE_TYPE (array)) == integer_type_node)
3841 exp = build_int_2 (((int *)TREE_STRING_POINTER (array))[i], 0);
3842 TREE_TYPE (exp) = integer_type_node;
3843 return expand_expr (exp, target, tmode, modifier);
3845 if (TREE_TYPE (TREE_TYPE (array)) == char_type_node)
3847 exp = build_int_2 (TREE_STRING_POINTER (array)[i], 0);
3848 TREE_TYPE (exp) = integer_type_node;
3849 return expand_expr (convert (TREE_TYPE (TREE_TYPE (array)),
3851 target, tmode, modifier);
3855 /* If this is a constant index into a constant array,
3856 just get the value from the array. Handle both the cases when
3857 we have an explicit constructor and when our operand is a variable
3858 that was declared const. */
3860 if (TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array))
3862 if (TREE_CODE (index) == INTEGER_CST
3863 && TREE_INT_CST_HIGH (index) == 0)
3865 tree elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0));
3867 i = TREE_INT_CST_LOW (index);
3869 elem = TREE_CHAIN (elem);
3871 return expand_expr (fold (TREE_VALUE (elem)), target,
3876 else if (optimize >= 1
3877 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
3878 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
3879 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
3881 if (TREE_CODE (index) == INTEGER_CST
3882 && TREE_INT_CST_HIGH (index) == 0)
3884 tree init = DECL_INITIAL (array);
3886 i = TREE_INT_CST_LOW (index);
3887 if (TREE_CODE (init) == CONSTRUCTOR)
3889 tree elem = CONSTRUCTOR_ELTS (init);
3892 elem = TREE_CHAIN (elem);
3894 return expand_expr (fold (TREE_VALUE (elem)), target,
3897 else if (TREE_CODE (init) == STRING_CST
3898 && i < TREE_STRING_LENGTH (init))
3900 temp = GEN_INT (TREE_STRING_POINTER (init)[i]);
3901 return convert_to_mode (mode, temp, 0);
3907 /* Treat array-ref with constant index as a component-ref. */
3911 /* If the operand is a CONSTRUCTOR, we can just extract the
3912 appropriate field if it is present. */
3913 if (code != ARRAY_REF
3914 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR)
3918 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
3919 elt = TREE_CHAIN (elt))
3920 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1))
3921 return expand_expr (TREE_VALUE (elt), target, tmode, modifier);
3925 enum machine_mode mode1;
3930 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
3931 &mode1, &unsignedp, &volatilep);
3933 /* If we got back the original object, something is wrong. Perhaps
3934 we are evaluating an expression too early. In any event, don't
3935 infinitely recurse. */
3939 /* In some cases, we will be offsetting OP0's address by a constant.
3940 So get it as a sum, if possible. If we will be using it
3941 directly in an insn, we validate it. */
3942 op0 = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_SUM);
3944 /* If this is a constant, put it into a register if it is a
3945 legitimate constant and memory if it isn't. */
3946 if (CONSTANT_P (op0))
3948 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
3949 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0))
3950 op0 = force_reg (mode, op0);
3952 op0 = validize_mem (force_const_mem (mode, op0));
3957 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
3959 if (GET_CODE (op0) != MEM)
3961 op0 = change_address (op0, VOIDmode,
3962 gen_rtx (PLUS, Pmode, XEXP (op0, 0),
3963 force_reg (Pmode, offset_rtx)));
3966 /* Don't forget about volatility even if this is a bitfield. */
3967 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
3969 op0 = copy_rtx (op0);
3970 MEM_VOLATILE_P (op0) = 1;
3973 if (mode1 == VOIDmode
3974 || (mode1 != BLKmode && ! direct_load[(int) mode1]
3975 && modifier != EXPAND_CONST_ADDRESS
3976 && modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
3977 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG)
3979 /* In cases where an aligned union has an unaligned object
3980 as a field, we might be extracting a BLKmode value from
3981 an integer-mode (e.g., SImode) object. Handle this case
3982 by doing the extract into an object as wide as the field
3983 (which we know to be the width of a basic mode), then
3984 storing into memory, and changing the mode to BLKmode. */
3985 enum machine_mode ext_mode = mode;
3987 if (ext_mode == BLKmode)
3988 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
3990 if (ext_mode == BLKmode)
3993 op0 = extract_bit_field (validize_mem (op0), bitsize, bitpos,
3994 unsignedp, target, ext_mode, ext_mode,
3995 TYPE_ALIGN (TREE_TYPE (tem)) / BITS_PER_UNIT,
3996 int_size_in_bytes (TREE_TYPE (tem)));
3997 if (mode == BLKmode)
3999 rtx new = assign_stack_temp (ext_mode,
4000 bitsize / BITS_PER_UNIT, 0);
4002 emit_move_insn (new, op0);
4003 op0 = copy_rtx (new);
4004 PUT_MODE (op0, BLKmode);
4010 /* Get a reference to just this component. */
4011 if (modifier == EXPAND_CONST_ADDRESS
4012 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
4013 op0 = gen_rtx (MEM, mode1, plus_constant (XEXP (op0, 0),
4014 (bitpos / BITS_PER_UNIT)));
4016 op0 = change_address (op0, mode1,
4017 plus_constant (XEXP (op0, 0),
4018 (bitpos / BITS_PER_UNIT)));
4019 MEM_IN_STRUCT_P (op0) = 1;
4020 MEM_VOLATILE_P (op0) |= volatilep;
4021 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode)
4024 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
4025 convert_move (target, op0, unsignedp);
4031 tree base = build1 (ADDR_EXPR, type, TREE_OPERAND (exp, 0));
4032 tree addr = build (PLUS_EXPR, type, base, TREE_OPERAND (exp, 1));
4033 op0 = expand_expr (addr, NULL_RTX, VOIDmode, EXPAND_SUM);
4034 temp = gen_rtx (MEM, mode, memory_address (mode, op0));
4035 MEM_IN_STRUCT_P (temp) = 1;
4036 MEM_VOLATILE_P (temp) = TREE_THIS_VOLATILE (exp);
4037 #if 0 /* It is incorrect to set RTX_UNCHANGING_P here, because the fact that
4038 a location is accessed through a pointer to const does not mean
4039 that the value there can never change. */
4040 RTX_UNCHANGING_P (temp) = TREE_READONLY (exp);
4045 /* Intended for a reference to a buffer of a file-object in Pascal.
4046 But it's not certain that a special tree code will really be
4047 necessary for these. INDIRECT_REF might work for them. */
4051 /* IN_EXPR: Inlined pascal set IN expression.
4054 rlo = set_low - (set_low%bits_per_word);
4055 the_word = set [ (index - rlo)/bits_per_word ];
4056 bit_index = index % bits_per_word;
4057 bitmask = 1 << bit_index;
4058 return !!(the_word & bitmask); */
4060 preexpand_calls (exp);
4062 tree set = TREE_OPERAND (exp, 0);
4063 tree index = TREE_OPERAND (exp, 1);
4064 tree set_type = TREE_TYPE (set);
4066 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
4067 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
4073 rtx diff, quo, rem, addr, bit, result;
4074 rtx setval, setaddr;
4075 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
4078 target = gen_reg_rtx (mode);
4080 /* If domain is empty, answer is no. */
4081 if (tree_int_cst_lt (set_high_bound, set_low_bound))
4084 index_val = expand_expr (index, 0, VOIDmode, 0);
4085 lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
4086 hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
4087 setval = expand_expr (set, 0, VOIDmode, 0);
4088 setaddr = XEXP (setval, 0);
4090 /* Compare index against bounds, if they are constant. */
4091 if (GET_CODE (index_val) == CONST_INT
4092 && GET_CODE (lo_r) == CONST_INT
4093 && INTVAL (index_val) < INTVAL (lo_r))
4096 if (GET_CODE (index_val) == CONST_INT
4097 && GET_CODE (hi_r) == CONST_INT
4098 && INTVAL (hi_r) < INTVAL (index_val))
4101 /* If we get here, we have to generate the code for both cases
4102 (in range and out of range). */
4104 op0 = gen_label_rtx ();
4105 op1 = gen_label_rtx ();
4107 if (! (GET_CODE (index_val) == CONST_INT
4108 && GET_CODE (lo_r) == CONST_INT))
4110 emit_cmp_insn (index_val, lo_r, LT, NULL_RTX,
4111 GET_MODE (index_val), 0, 0);
4112 emit_jump_insn (gen_blt (op1));
4115 if (! (GET_CODE (index_val) == CONST_INT
4116 && GET_CODE (hi_r) == CONST_INT))
4118 emit_cmp_insn (index_val, hi_r, GT, NULL_RTX,
4119 GET_MODE (index_val), 0, 0);
4120 emit_jump_insn (gen_bgt (op1));
4123 /* Calculate the element number of bit zero in the first word
4125 if (GET_CODE (lo_r) == CONST_INT)
4126 rlow = GEN_INT (INTVAL (lo_r)
4127 & ~ ((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
4129 rlow = expand_binop (index_mode, and_optab, lo_r,
4130 GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
4131 NULL_RTX, 0, OPTAB_LIB_WIDEN);
4133 diff = expand_binop (index_mode, sub_optab,
4134 index_val, rlow, NULL_RTX, 0, OPTAB_LIB_WIDEN);
4136 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
4137 GEN_INT (BITS_PER_UNIT), NULL_RTX, 0);
4138 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
4139 GEN_INT (BITS_PER_UNIT), NULL_RTX, 0);
4140 addr = memory_address (byte_mode,
4141 expand_binop (index_mode, add_optab,
4142 diff, setaddr, NULL_RTX, 0,
4144 /* Extract the bit we want to examine */
4145 bit = expand_shift (RSHIFT_EXPR, byte_mode,
4146 gen_rtx (MEM, byte_mode, addr),
4147 make_tree (TREE_TYPE (index), rem),
4149 result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
4150 GET_MODE (target) == byte_mode ? target : 0,
4151 1, OPTAB_LIB_WIDEN);
4153 if (result != target)
4154 convert_move (target, result, 1);
4156 /* Output the code to handle the out-of-range case. */
4159 emit_move_insn (target, const0_rtx);
4164 case WITH_CLEANUP_EXPR:
4165 if (RTL_EXPR_RTL (exp) == 0)
4168 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
4170 = tree_cons (NULL_TREE, TREE_OPERAND (exp, 2), cleanups_this_call);
4171 /* That's it for this cleanup. */
4172 TREE_OPERAND (exp, 2) = 0;
4174 return RTL_EXPR_RTL (exp);
4177 /* Check for a built-in function. */
4178 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
4179 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) == FUNCTION_DECL
4180 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
4181 return expand_builtin (exp, target, subtarget, tmode, ignore);
4182 /* If this call was expanded already by preexpand_calls,
4183 just return the result we got. */
4184 if (CALL_EXPR_RTL (exp) != 0)
4185 return CALL_EXPR_RTL (exp);
4186 return expand_call (exp, target, ignore);
4188 case NON_LVALUE_EXPR:
4191 case REFERENCE_EXPR:
4192 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
4193 return expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, modifier);
4194 if (TREE_CODE (type) == UNION_TYPE)
4196 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
4199 if (mode == BLKmode)
4201 if (TYPE_SIZE (type) == 0
4202 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
4204 target = assign_stack_temp (BLKmode,
4205 (TREE_INT_CST_LOW (TYPE_SIZE (type))
4206 + BITS_PER_UNIT - 1)
4207 / BITS_PER_UNIT, 0);
4210 target = gen_reg_rtx (mode);
4212 if (GET_CODE (target) == MEM)
4213 /* Store data into beginning of memory target. */
4214 store_expr (TREE_OPERAND (exp, 0),
4215 change_address (target, TYPE_MODE (valtype), 0), 0);
4217 else if (GET_CODE (target) == REG)
4218 /* Store this field into a union of the proper type. */
4219 store_field (target, GET_MODE_BITSIZE (TYPE_MODE (valtype)), 0,
4220 TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
4222 int_size_in_bytes (TREE_TYPE (TREE_OPERAND (exp, 0))));
4226 /* Return the entire union. */
4229 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, 0);
4230 if (GET_MODE (op0) == mode)
4232 /* If arg is a constant integer being extended from a narrower mode,
4233 we must really truncate to get the extended bits right. Otherwise
4234 (unsigned long) (unsigned char) ("\377"[0])
4235 would come out as ffffffff. */
4236 if (GET_MODE (op0) == VOIDmode
4237 && (GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
4238 < GET_MODE_BITSIZE (mode)))
4240 /* MODE must be narrower than HOST_BITS_PER_INT. */
4241 int width = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
4243 if (width < HOST_BITS_PER_WIDE_INT)
4245 HOST_WIDE_INT val = (GET_CODE (op0) == CONST_INT ? INTVAL (op0)
4246 : CONST_DOUBLE_LOW (op0));
4247 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)))
4248 || !(val & ((HOST_WIDE_INT) 1 << (width - 1))))
4249 val &= ((HOST_WIDE_INT) 1 << width) - 1;
4251 val |= ~(((HOST_WIDE_INT) 1 << width) - 1);
4253 op0 = GEN_INT (val);
4257 op0 = (simplify_unary_operation
4258 ((TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)))
4259 ? ZERO_EXTEND : SIGN_EXTEND),
4261 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))));
4266 if (GET_MODE (op0) == VOIDmode)
4268 if (modifier == EXPAND_INITIALIZER)
4269 return gen_rtx (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
4270 if (flag_force_mem && GET_CODE (op0) == MEM)
4271 op0 = copy_to_reg (op0);
4274 return convert_to_mode (mode, op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
4276 convert_move (target, op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
4280 /* We come here from MINUS_EXPR when the second operand is a constant. */
4282 this_optab = add_optab;
4284 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
4285 something else, make sure we add the register to the constant and
4286 then to the other thing. This case can occur during strength
4287 reduction and doing it this way will produce better code if the
4288 frame pointer or argument pointer is eliminated.
4290 fold-const.c will ensure that the constant is always in the inner
4291 PLUS_EXPR, so the only case we need to do anything about is if
4292 sp, ap, or fp is our second argument, in which case we must swap
4293 the innermost first argument and our second argument. */
4295 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
4296 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
4297 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
4298 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
4299 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
4300 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
4302 tree t = TREE_OPERAND (exp, 1);
4304 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
4305 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
4308 /* If the result is to be Pmode and we are adding an integer to
4309 something, we might be forming a constant. So try to use
4310 plus_constant. If it produces a sum and we can't accept it,
4311 use force_operand. This allows P = &ARR[const] to generate
4312 efficient code on machines where a SYMBOL_REF is not a valid
4315 If this is an EXPAND_SUM call, always return the sum. */
4316 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
4319 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
4320 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
4321 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
4323 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
4325 op1 = plus_constant (op1, TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)));
4326 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
4327 op1 = force_operand (op1, target);
4331 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
4332 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
4333 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
4335 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
4337 if (! CONSTANT_P (op0))
4339 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
4340 VOIDmode, modifier);
4343 op0 = plus_constant (op0, TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)));
4344 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
4345 op0 = force_operand (op0, target);
4350 /* No sense saving up arithmetic to be done
4351 if it's all in the wrong mode to form part of an address.
4352 And force_operand won't know whether to sign-extend or
4354 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
4358 preexpand_calls (exp);
4359 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
4362 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, modifier);
4363 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, modifier);
4366 /* Make sure any term that's a sum with a constant comes last. */
4367 if (GET_CODE (op0) == PLUS
4368 && CONSTANT_P (XEXP (op0, 1)))
4374 /* If adding to a sum including a constant,
4375 associate it to put the constant outside. */
4376 if (GET_CODE (op1) == PLUS
4377 && CONSTANT_P (XEXP (op1, 1)))
4379 rtx constant_term = const0_rtx;
4381 temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0);
4384 /* Ensure that MULT comes first if there is one. */
4385 else if (GET_CODE (op0) == MULT)
4386 op0 = gen_rtx (PLUS, mode, op0, XEXP (op1, 0));
4388 op0 = gen_rtx (PLUS, mode, XEXP (op1, 0), op0);
4390 /* Let's also eliminate constants from op0 if possible. */
4391 op0 = eliminate_constant_term (op0, &constant_term);
4393 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
4394 their sum should be a constant. Form it into OP1, since the
4395 result we want will then be OP0 + OP1. */
4397 temp = simplify_binary_operation (PLUS, mode, constant_term,
4402 op1 = gen_rtx (PLUS, mode, constant_term, XEXP (op1, 1));
4405 /* Put a constant term last and put a multiplication first. */
4406 if (CONSTANT_P (op0) || GET_CODE (op1) == MULT)
4407 temp = op1, op1 = op0, op0 = temp;
4409 temp = simplify_binary_operation (PLUS, mode, op0, op1);
4410 return temp ? temp : gen_rtx (PLUS, mode, op0, op1);
4413 /* Handle difference of two symbolic constants,
4414 for the sake of an initializer. */
4415 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
4416 && really_constant_p (TREE_OPERAND (exp, 0))
4417 && really_constant_p (TREE_OPERAND (exp, 1)))
4419 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX,
4420 VOIDmode, modifier);
4421 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
4422 VOIDmode, modifier);
4423 return gen_rtx (MINUS, mode, op0, op1);
4425 /* Convert A - const to A + (-const). */
4426 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
4428 exp = build (PLUS_EXPR, type, TREE_OPERAND (exp, 0),
4429 fold (build1 (NEGATE_EXPR, type,
4430 TREE_OPERAND (exp, 1))));
4433 this_optab = sub_optab;
4437 preexpand_calls (exp);
4438 /* If first operand is constant, swap them.
4439 Thus the following special case checks need only
4440 check the second operand. */
4441 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
4443 register tree t1 = TREE_OPERAND (exp, 0);
4444 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
4445 TREE_OPERAND (exp, 1) = t1;
4448 /* Attempt to return something suitable for generating an
4449 indexed address, for machines that support that. */
4451 if (modifier == EXPAND_SUM && mode == Pmode
4452 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
4453 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
4455 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, EXPAND_SUM);
4457 /* Apply distributive law if OP0 is x+c. */
4458 if (GET_CODE (op0) == PLUS
4459 && GET_CODE (XEXP (op0, 1)) == CONST_INT)
4460 return gen_rtx (PLUS, mode,
4461 gen_rtx (MULT, mode, XEXP (op0, 0),
4462 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))),
4463 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))
4464 * INTVAL (XEXP (op0, 1))));
4466 if (GET_CODE (op0) != REG)
4467 op0 = force_operand (op0, NULL_RTX);
4468 if (GET_CODE (op0) != REG)
4469 op0 = copy_to_mode_reg (mode, op0);
4471 return gen_rtx (MULT, mode, op0,
4472 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))));
4475 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
4478 /* Check for multiplying things that have been extended
4479 from a narrower type. If this machine supports multiplying
4480 in that narrower type with a result in the desired type,
4481 do it that way, and avoid the explicit type-conversion. */
4482 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
4483 && TREE_CODE (type) == INTEGER_TYPE
4484 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
4485 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
4486 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
4487 && int_fits_type_p (TREE_OPERAND (exp, 1),
4488 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
4489 /* Don't use a widening multiply if a shift will do. */
4490 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
4491 > HOST_BITS_PER_WIDE_INT)
4492 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
4494 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
4495 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
4497 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
4498 /* If both operands are extended, they must either both
4499 be zero-extended or both be sign-extended. */
4500 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
4502 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
4504 enum machine_mode innermode
4505 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
4506 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
4507 ? umul_widen_optab : smul_widen_optab);
4508 if (mode == GET_MODE_WIDER_MODE (innermode)
4509 && this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
4511 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
4512 NULL_RTX, VOIDmode, 0);
4513 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
4514 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
4517 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
4518 NULL_RTX, VOIDmode, 0);
4522 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
4523 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
4524 return expand_mult (mode, op0, op1, target, unsignedp);
4526 case TRUNC_DIV_EXPR:
4527 case FLOOR_DIV_EXPR:
4529 case ROUND_DIV_EXPR:
4530 case EXACT_DIV_EXPR:
4531 preexpand_calls (exp);
4532 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
4534 /* Possible optimization: compute the dividend with EXPAND_SUM
4535 then if the divisor is constant can optimize the case
4536 where some terms of the dividend have coeffs divisible by it. */
4537 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
4538 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
4539 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
4542 this_optab = flodiv_optab;
4545 case TRUNC_MOD_EXPR:
4546 case FLOOR_MOD_EXPR:
4548 case ROUND_MOD_EXPR:
4549 preexpand_calls (exp);
4550 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
4552 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
4553 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
4554 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
4556 case FIX_ROUND_EXPR:
4557 case FIX_FLOOR_EXPR:
4559 abort (); /* Not used for C. */
4561 case FIX_TRUNC_EXPR:
4562 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
4564 target = gen_reg_rtx (mode);
4565 expand_fix (target, op0, unsignedp);
4569 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
4571 target = gen_reg_rtx (mode);
4572 /* expand_float can't figure out what to do if FROM has VOIDmode.
4573 So give it the correct mode. With -O, cse will optimize this. */
4574 if (GET_MODE (op0) == VOIDmode)
4575 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
4577 expand_float (target, op0,
4578 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
4582 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
4583 temp = expand_unop (mode, neg_optab, op0, target, 0);
4589 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
4591 /* Handle complex values specially. */
4593 enum machine_mode opmode
4594 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
4596 if (GET_MODE_CLASS (opmode) == MODE_COMPLEX_INT
4597 || GET_MODE_CLASS (opmode) == MODE_COMPLEX_FLOAT)
4598 return expand_complex_abs (opmode, op0, target, unsignedp);
4601 /* Unsigned abs is simply the operand. Testing here means we don't
4602 risk generating incorrect code below. */
4603 if (TREE_UNSIGNED (type))
4606 /* First try to do it with a special abs instruction. */
4607 temp = expand_unop (mode, abs_optab, op0, target, 0);
4611 /* If this machine has expensive jumps, we can do integer absolute
4612 value of X as (((signed) x >> (W-1)) ^ x) - ((signed) x >> (W-1)),
4613 where W is the width of MODE. */
4615 if (GET_MODE_CLASS (mode) == MODE_INT && BRANCH_COST >= 2)
4617 rtx extended = expand_shift (RSHIFT_EXPR, mode, op0,
4618 size_int (GET_MODE_BITSIZE (mode) - 1),
4621 temp = expand_binop (mode, xor_optab, extended, op0, target, 0,
4624 temp = expand_binop (mode, sub_optab, temp, extended, target, 0,
4631 /* If that does not win, use conditional jump and negate. */
4632 target = original_target;
4633 temp = gen_label_rtx ();
4634 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 0))
4635 || (GET_CODE (target) == REG
4636 && REGNO (target) < FIRST_PSEUDO_REGISTER))
4637 target = gen_reg_rtx (mode);
4638 emit_move_insn (target, op0);
4639 emit_cmp_insn (target,
4640 expand_expr (convert (type, integer_zero_node),
4641 NULL_RTX, VOIDmode, 0),
4642 GE, NULL_RTX, mode, 0, 0);
4644 emit_jump_insn (gen_bge (temp));
4645 op0 = expand_unop (mode, neg_optab, target, target, 0);
4647 emit_move_insn (target, op0);
4654 target = original_target;
4655 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 1))
4656 || (GET_CODE (target) == REG
4657 && REGNO (target) < FIRST_PSEUDO_REGISTER))
4658 target = gen_reg_rtx (mode);
4659 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
4660 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
4662 /* First try to do it with a special MIN or MAX instruction.
4663 If that does not win, use a conditional jump to select the proper
4665 this_optab = (TREE_UNSIGNED (type)
4666 ? (code == MIN_EXPR ? umin_optab : umax_optab)
4667 : (code == MIN_EXPR ? smin_optab : smax_optab));
4669 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
4675 emit_move_insn (target, op0);
4676 op0 = gen_label_rtx ();
4677 /* If this mode is an integer too wide to compare properly,
4678 compare word by word. Rely on cse to optimize constant cases. */
4679 if (GET_MODE_CLASS (mode) == MODE_INT
4680 && !can_compare_p (mode))
4682 if (code == MAX_EXPR)
4683 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type), target, op1, NULL, op0);
4685 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type), op1, target, NULL, op0);
4686 emit_move_insn (target, op1);
4690 if (code == MAX_EXPR)
4691 temp = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)))
4692 ? compare_from_rtx (target, op1, GEU, 1, mode, NULL_RTX, 0)
4693 : compare_from_rtx (target, op1, GE, 0, mode, NULL_RTX, 0));
4695 temp = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)))
4696 ? compare_from_rtx (target, op1, LEU, 1, mode, NULL_RTX, 0)
4697 : compare_from_rtx (target, op1, LE, 0, mode, NULL_RTX, 0));
4698 if (temp == const0_rtx)
4699 emit_move_insn (target, op1);
4700 else if (temp != const_true_rtx)
4702 if (bcc_gen_fctn[(int) GET_CODE (temp)] != 0)
4703 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (temp)]) (op0));
4706 emit_move_insn (target, op1);
4712 /* ??? Can optimize when the operand of this is a bitwise operation,
4713 by using a different bitwise operation. */
4715 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
4716 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
4722 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
4723 temp = expand_unop (mode, ffs_optab, op0, target, 1);
4728 /* ??? Can optimize bitwise operations with one arg constant.
4729 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
4730 and (a bitwise1 b) bitwise2 b (etc)
4731 but that is probably not worth while. */
4733 /* BIT_AND_EXPR is for bitwise anding.
4734 TRUTH_AND_EXPR is for anding two boolean values
4735 when we want in all cases to compute both of them.
4736 In general it is fastest to do TRUTH_AND_EXPR by
4737 computing both operands as actual zero-or-1 values
4738 and then bitwise anding. In cases where there cannot
4739 be any side effects, better code would be made by
4740 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR;
4741 but the question is how to recognize those cases. */
4743 case TRUTH_AND_EXPR:
4745 this_optab = and_optab;
4748 /* See comment above about TRUTH_AND_EXPR; it applies here too. */
4751 this_optab = ior_optab;
4754 case TRUTH_XOR_EXPR:
4756 this_optab = xor_optab;
4763 preexpand_calls (exp);
4764 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
4766 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
4767 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
4770 /* Could determine the answer when only additive constants differ.
4771 Also, the addition of one can be handled by changing the condition. */
4778 preexpand_calls (exp);
4779 temp = do_store_flag (exp, target, tmode != VOIDmode ? tmode : mode, 0);
4782 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
4783 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
4785 && GET_CODE (original_target) == REG
4786 && (GET_MODE (original_target)
4787 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
4789 temp = expand_expr (TREE_OPERAND (exp, 0), original_target, VOIDmode, 0);
4790 if (temp != original_target)
4791 temp = copy_to_reg (temp);
4792 op1 = gen_label_rtx ();
4793 emit_cmp_insn (temp, const0_rtx, EQ, NULL_RTX,
4794 GET_MODE (temp), unsignedp, 0);
4795 emit_jump_insn (gen_beq (op1));
4796 emit_move_insn (temp, const1_rtx);
4800 /* If no set-flag instruction, must generate a conditional
4801 store into a temporary variable. Drop through
4802 and handle this like && and ||. */
4804 case TRUTH_ANDIF_EXPR:
4805 case TRUTH_ORIF_EXPR:
4807 && (target == 0 || ! safe_from_p (target, exp)
4808 /* Make sure we don't have a hard reg (such as function's return
4809 value) live across basic blocks, if not optimizing. */
4810 || (!optimize && GET_CODE (target) == REG
4811 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
4812 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
4815 emit_clr_insn (target);
4817 op1 = gen_label_rtx ();
4818 jumpifnot (exp, op1);
4821 emit_0_to_1_insn (target);
4824 return ignore ? const0_rtx : target;
4826 case TRUTH_NOT_EXPR:
4827 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
4828 /* The parser is careful to generate TRUTH_NOT_EXPR
4829 only with operands that are always zero or one. */
4830 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
4831 target, 1, OPTAB_LIB_WIDEN);
4837 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
4839 return expand_expr (TREE_OPERAND (exp, 1),
4840 (ignore ? const0_rtx : target),
4845 /* Note that COND_EXPRs whose type is a structure or union
4846 are required to be constructed to contain assignments of
4847 a temporary variable, so that we can evaluate them here
4848 for side effect only. If type is void, we must do likewise. */
4850 /* If an arm of the branch requires a cleanup,
4851 only that cleanup is performed. */
4854 tree binary_op = 0, unary_op = 0;
4855 tree old_cleanups = cleanups_this_call;
4856 cleanups_this_call = 0;
4858 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
4859 convert it to our mode, if necessary. */
4860 if (integer_onep (TREE_OPERAND (exp, 1))
4861 && integer_zerop (TREE_OPERAND (exp, 2))
4862 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
4866 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
4871 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, modifier);
4872 if (GET_MODE (op0) == mode)
4875 target = gen_reg_rtx (mode);
4876 convert_move (target, op0, unsignedp);
4880 /* If we are not to produce a result, we have no target. Otherwise,
4881 if a target was specified use it; it will not be used as an
4882 intermediate target unless it is safe. If no target, use a
4887 else if (original_target
4888 && safe_from_p (original_target, TREE_OPERAND (exp, 0)))
4889 temp = original_target;
4890 else if (mode == BLKmode)
4892 if (TYPE_SIZE (type) == 0
4893 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
4895 temp = assign_stack_temp (BLKmode,
4896 (TREE_INT_CST_LOW (TYPE_SIZE (type))
4897 + BITS_PER_UNIT - 1)
4898 / BITS_PER_UNIT, 0);
4901 temp = gen_reg_rtx (mode);
4903 /* Check for X ? A + B : A. If we have this, we can copy
4904 A to the output and conditionally add B. Similarly for unary
4905 operations. Don't do this if X has side-effects because
4906 those side effects might affect A or B and the "?" operation is
4907 a sequence point in ANSI. (We test for side effects later.) */
4909 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
4910 && operand_equal_p (TREE_OPERAND (exp, 2),
4911 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
4912 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
4913 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
4914 && operand_equal_p (TREE_OPERAND (exp, 1),
4915 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
4916 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
4917 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
4918 && operand_equal_p (TREE_OPERAND (exp, 2),
4919 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
4920 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
4921 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
4922 && operand_equal_p (TREE_OPERAND (exp, 1),
4923 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
4924 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
4926 /* If we had X ? A + 1 : A and we can do the test of X as a store-flag
4927 operation, do this as A + (X != 0). Similarly for other simple
4928 binary operators. */
4929 if (temp && singleton && binary_op
4930 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
4931 && (TREE_CODE (binary_op) == PLUS_EXPR
4932 || TREE_CODE (binary_op) == MINUS_EXPR
4933 || TREE_CODE (binary_op) == BIT_IOR_EXPR
4934 || TREE_CODE (binary_op) == BIT_XOR_EXPR
4935 || TREE_CODE (binary_op) == BIT_AND_EXPR)
4936 && integer_onep (TREE_OPERAND (binary_op, 1))
4937 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
4940 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR ? add_optab
4941 : TREE_CODE (binary_op) == MINUS_EXPR ? sub_optab
4942 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
4943 : TREE_CODE (binary_op) == BIT_XOR_EXPR ? xor_optab
4946 /* If we had X ? A : A + 1, do this as A + (X == 0).
4948 We have to invert the truth value here and then put it
4949 back later if do_store_flag fails. We cannot simply copy
4950 TREE_OPERAND (exp, 0) to another variable and modify that
4951 because invert_truthvalue can modify the tree pointed to
4953 if (singleton == TREE_OPERAND (exp, 1))
4954 TREE_OPERAND (exp, 0)
4955 = invert_truthvalue (TREE_OPERAND (exp, 0));
4957 result = do_store_flag (TREE_OPERAND (exp, 0),
4958 (safe_from_p (temp, singleton)
4960 mode, BRANCH_COST <= 1);
4964 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
4965 return expand_binop (mode, boptab, op1, result, temp,
4966 unsignedp, OPTAB_LIB_WIDEN);
4968 else if (singleton == TREE_OPERAND (exp, 1))
4969 TREE_OPERAND (exp, 0)
4970 = invert_truthvalue (TREE_OPERAND (exp, 0));
4974 op0 = gen_label_rtx ();
4976 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
4980 /* If the target conflicts with the other operand of the
4981 binary op, we can't use it. Also, we can't use the target
4982 if it is a hard register, because evaluating the condition
4983 might clobber it. */
4985 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1)))
4986 || (GET_CODE (temp) == REG
4987 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
4988 temp = gen_reg_rtx (mode);
4989 store_expr (singleton, temp, 0);
4992 expand_expr (singleton,
4993 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
4994 if (cleanups_this_call)
4996 sorry ("aggregate value in COND_EXPR");
4997 cleanups_this_call = 0;
4999 if (singleton == TREE_OPERAND (exp, 1))
5000 jumpif (TREE_OPERAND (exp, 0), op0);
5002 jumpifnot (TREE_OPERAND (exp, 0), op0);
5004 if (binary_op && temp == 0)
5005 /* Just touch the other operand. */
5006 expand_expr (TREE_OPERAND (binary_op, 1),
5007 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
5009 store_expr (build (TREE_CODE (binary_op), type,
5010 make_tree (type, temp),
5011 TREE_OPERAND (binary_op, 1)),
5014 store_expr (build1 (TREE_CODE (unary_op), type,
5015 make_tree (type, temp)),
5020 /* This is now done in jump.c and is better done there because it
5021 produces shorter register lifetimes. */
5023 /* Check for both possibilities either constants or variables
5024 in registers (but not the same as the target!). If so, can
5025 save branches by assigning one, branching, and assigning the
5027 else if (temp && GET_MODE (temp) != BLKmode
5028 && (TREE_CONSTANT (TREE_OPERAND (exp, 1))
5029 || ((TREE_CODE (TREE_OPERAND (exp, 1)) == PARM_DECL
5030 || TREE_CODE (TREE_OPERAND (exp, 1)) == VAR_DECL)
5031 && DECL_RTL (TREE_OPERAND (exp, 1))
5032 && GET_CODE (DECL_RTL (TREE_OPERAND (exp, 1))) == REG
5033 && DECL_RTL (TREE_OPERAND (exp, 1)) != temp))
5034 && (TREE_CONSTANT (TREE_OPERAND (exp, 2))
5035 || ((TREE_CODE (TREE_OPERAND (exp, 2)) == PARM_DECL
5036 || TREE_CODE (TREE_OPERAND (exp, 2)) == VAR_DECL)
5037 && DECL_RTL (TREE_OPERAND (exp, 2))
5038 && GET_CODE (DECL_RTL (TREE_OPERAND (exp, 2))) == REG
5039 && DECL_RTL (TREE_OPERAND (exp, 2)) != temp)))
5041 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
5042 temp = gen_reg_rtx (mode);
5043 store_expr (TREE_OPERAND (exp, 2), temp, 0);
5044 jumpifnot (TREE_OPERAND (exp, 0), op0);
5045 store_expr (TREE_OPERAND (exp, 1), temp, 0);
5049 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
5050 comparison operator. If we have one of these cases, set the
5051 output to A, branch on A (cse will merge these two references),
5052 then set the output to FOO. */
5054 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
5055 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
5056 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
5057 TREE_OPERAND (exp, 1), 0)
5058 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
5059 && safe_from_p (temp, TREE_OPERAND (exp, 2)))
5061 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
5062 temp = gen_reg_rtx (mode);
5063 store_expr (TREE_OPERAND (exp, 1), temp, 0);
5064 jumpif (TREE_OPERAND (exp, 0), op0);
5065 store_expr (TREE_OPERAND (exp, 2), temp, 0);
5069 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
5070 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
5071 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
5072 TREE_OPERAND (exp, 2), 0)
5073 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
5074 && safe_from_p (temp, TREE_OPERAND (exp, 1)))
5076 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
5077 temp = gen_reg_rtx (mode);
5078 store_expr (TREE_OPERAND (exp, 2), temp, 0);
5079 jumpifnot (TREE_OPERAND (exp, 0), op0);
5080 store_expr (TREE_OPERAND (exp, 1), temp, 0);
5085 op1 = gen_label_rtx ();
5086 jumpifnot (TREE_OPERAND (exp, 0), op0);
5088 store_expr (TREE_OPERAND (exp, 1), temp, 0);
5090 expand_expr (TREE_OPERAND (exp, 1),
5091 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
5092 if (cleanups_this_call)
5094 sorry ("aggregate value in COND_EXPR");
5095 cleanups_this_call = 0;
5099 emit_jump_insn (gen_jump (op1));
5103 store_expr (TREE_OPERAND (exp, 2), temp, 0);
5105 expand_expr (TREE_OPERAND (exp, 2),
5106 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
5109 if (cleanups_this_call)
5111 sorry ("aggregate value in COND_EXPR");
5112 cleanups_this_call = 0;
5118 cleanups_this_call = old_cleanups;
5124 /* Something needs to be initialized, but we didn't know
5125 where that thing was when building the tree. For example,
5126 it could be the return value of a function, or a parameter
5127 to a function which lays down in the stack, or a temporary
5128 variable which must be passed by reference.
5130 We guarantee that the expression will either be constructed
5131 or copied into our original target. */
5133 tree slot = TREE_OPERAND (exp, 0);
5136 if (TREE_CODE (slot) != VAR_DECL)
5141 if (DECL_RTL (slot) != 0)
5143 target = DECL_RTL (slot);
5144 /* If we have already expanded the slot, so don't do
5146 if (TREE_OPERAND (exp, 1) == NULL_TREE)
5151 target = assign_stack_temp (mode, int_size_in_bytes (type), 0);
5152 /* All temp slots at this level must not conflict. */
5153 preserve_temp_slots (target);
5154 DECL_RTL (slot) = target;
5158 /* I bet this needs to be done, and I bet that it needs to
5159 be above, inside the else clause. The reason is
5160 simple, how else is it going to get cleaned up? (mrs)
5162 The reason is probably did not work before, and was
5163 commented out is because this was re-expanding already
5164 expanded target_exprs (target == 0 and DECL_RTL (slot)
5165 != 0) also cleaning them up many times as well. :-( */
5167 /* Since SLOT is not known to the called function
5168 to belong to its stack frame, we must build an explicit
5169 cleanup. This case occurs when we must build up a reference
5170 to pass the reference as an argument. In this case,
5171 it is very likely that such a reference need not be
5174 if (TREE_OPERAND (exp, 2) == 0)
5175 TREE_OPERAND (exp, 2) = maybe_build_cleanup (slot);
5176 if (TREE_OPERAND (exp, 2))
5177 cleanups_this_call = tree_cons (NULL_TREE, TREE_OPERAND (exp, 2),
5178 cleanups_this_call);
5183 /* This case does occur, when expanding a parameter which
5184 needs to be constructed on the stack. The target
5185 is the actual stack address that we want to initialize.
5186 The function we call will perform the cleanup in this case. */
5188 /* If we have already assigned it space, use that space,
5189 not target that we were passed in, as our target
5190 parameter is only a hint. */
5191 if (DECL_RTL (slot) != 0)
5193 target = DECL_RTL (slot);
5194 /* If we have already expanded the slot, so don't do
5196 if (TREE_OPERAND (exp, 1) == NULL_TREE)
5200 DECL_RTL (slot) = target;
5203 exp1 = TREE_OPERAND (exp, 1);
5204 /* Mark it as expanded. */
5205 TREE_OPERAND (exp, 1) = NULL_TREE;
5207 return expand_expr (exp1, target, tmode, modifier);
5212 tree lhs = TREE_OPERAND (exp, 0);
5213 tree rhs = TREE_OPERAND (exp, 1);
5214 tree noncopied_parts = 0;
5215 tree lhs_type = TREE_TYPE (lhs);
5217 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
5218 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0 && !fixed_type_p (rhs))
5219 noncopied_parts = init_noncopied_parts (stabilize_reference (lhs),
5220 TYPE_NONCOPIED_PARTS (lhs_type));
5221 while (noncopied_parts != 0)
5223 expand_assignment (TREE_VALUE (noncopied_parts),
5224 TREE_PURPOSE (noncopied_parts), 0, 0);
5225 noncopied_parts = TREE_CHAIN (noncopied_parts);
5232 /* If lhs is complex, expand calls in rhs before computing it.
5233 That's so we don't compute a pointer and save it over a call.
5234 If lhs is simple, compute it first so we can give it as a
5235 target if the rhs is just a call. This avoids an extra temp and copy
5236 and that prevents a partial-subsumption which makes bad code.
5237 Actually we could treat component_ref's of vars like vars. */
5239 tree lhs = TREE_OPERAND (exp, 0);
5240 tree rhs = TREE_OPERAND (exp, 1);
5241 tree noncopied_parts = 0;
5242 tree lhs_type = TREE_TYPE (lhs);
5246 if (TREE_CODE (lhs) != VAR_DECL
5247 && TREE_CODE (lhs) != RESULT_DECL
5248 && TREE_CODE (lhs) != PARM_DECL)
5249 preexpand_calls (exp);
5251 /* Check for |= or &= of a bitfield of size one into another bitfield
5252 of size 1. In this case, (unless we need the result of the
5253 assignment) we can do this more efficiently with a
5254 test followed by an assignment, if necessary.
5256 ??? At this point, we can't get a BIT_FIELD_REF here. But if
5257 things change so we do, this code should be enhanced to
5260 && TREE_CODE (lhs) == COMPONENT_REF
5261 && (TREE_CODE (rhs) == BIT_IOR_EXPR
5262 || TREE_CODE (rhs) == BIT_AND_EXPR)
5263 && TREE_OPERAND (rhs, 0) == lhs
5264 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
5265 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (lhs, 1))) == 1
5266 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))) == 1)
5268 rtx label = gen_label_rtx ();
5270 do_jump (TREE_OPERAND (rhs, 1),
5271 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
5272 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
5273 expand_assignment (lhs, convert (TREE_TYPE (rhs),
5274 (TREE_CODE (rhs) == BIT_IOR_EXPR
5276 : integer_zero_node)),
5278 do_pending_stack_adjust ();
5283 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0
5284 && ! (fixed_type_p (lhs) && fixed_type_p (rhs)))
5285 noncopied_parts = save_noncopied_parts (stabilize_reference (lhs),
5286 TYPE_NONCOPIED_PARTS (lhs_type));
5288 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
5289 while (noncopied_parts != 0)
5291 expand_assignment (TREE_PURPOSE (noncopied_parts),
5292 TREE_VALUE (noncopied_parts), 0, 0);
5293 noncopied_parts = TREE_CHAIN (noncopied_parts);
5298 case PREINCREMENT_EXPR:
5299 case PREDECREMENT_EXPR:
5300 return expand_increment (exp, 0);
5302 case POSTINCREMENT_EXPR:
5303 case POSTDECREMENT_EXPR:
5304 /* Faster to treat as pre-increment if result is not used. */
5305 return expand_increment (exp, ! ignore);
5308 /* Are we taking the address of a nested function? */
5309 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
5310 && decl_function_context (TREE_OPERAND (exp, 0)) != 0)
5312 op0 = trampoline_address (TREE_OPERAND (exp, 0));
5313 op0 = force_operand (op0, target);
5317 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode,
5318 (modifier == EXPAND_INITIALIZER
5319 ? modifier : EXPAND_CONST_ADDRESS));
5321 /* We would like the object in memory. If it is a constant,
5322 we can have it be statically allocated into memory. For
5323 a non-constant (REG or SUBREG), we need to allocate some
5324 memory and store the value into it. */
5326 if (CONSTANT_P (op0))
5327 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
5330 if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG)
5332 /* If this object is in a register, it must be not
5334 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
5335 enum machine_mode inner_mode = TYPE_MODE (inner_type);
5337 = assign_stack_temp (inner_mode,
5338 int_size_in_bytes (inner_type), 1);
5340 emit_move_insn (memloc, op0);
5344 if (GET_CODE (op0) != MEM)
5347 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
5348 return XEXP (op0, 0);
5349 op0 = force_operand (XEXP (op0, 0), target);
5351 if (flag_force_addr && GET_CODE (op0) != REG)
5352 return force_reg (Pmode, op0);
5355 case ENTRY_VALUE_EXPR:
5358 /* COMPLEX type for Extended Pascal & Fortran */
5361 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
5365 /* Get the rtx code of the operands. */
5366 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
5367 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
5370 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
5372 prev = get_last_insn ();
5374 /* Tell flow that the whole of the destination is being set. */
5375 if (GET_CODE (target) == REG)
5376 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
5378 /* Move the real (op0) and imaginary (op1) parts to their location. */
5379 emit_move_insn (gen_realpart (mode, target), op0);
5380 emit_move_insn (gen_imagpart (mode, target), op1);
5382 /* Complex construction should appear as a single unit. */
5389 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
5390 return gen_realpart (mode, op0);
5393 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
5394 return gen_imagpart (mode, op0);
5398 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
5402 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
5405 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
5407 prev = get_last_insn ();
5409 /* Tell flow that the whole of the destination is being set. */
5410 if (GET_CODE (target) == REG)
5411 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
5413 /* Store the realpart and the negated imagpart to target. */
5414 emit_move_insn (gen_realpart (mode, target), gen_realpart (mode, op0));
5416 imag_t = gen_imagpart (mode, target);
5417 temp = expand_unop (mode, neg_optab,
5418 gen_imagpart (mode, op0), imag_t, 0);
5420 emit_move_insn (imag_t, temp);
5422 /* Conjugate should appear as a single unit */
5429 op0 = CONST0_RTX (tmode);
5435 return (*lang_expand_expr) (exp, target, tmode, modifier);
5438 /* Here to do an ordinary binary operator, generating an instruction
5439 from the optab already placed in `this_optab'. */
5441 preexpand_calls (exp);
5442 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
5444 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
5445 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
5447 temp = expand_binop (mode, this_optab, op0, op1, target,
5448 unsignedp, OPTAB_LIB_WIDEN);
5454 /* Return the alignment in bits of EXP, a pointer valued expression.
5455 But don't return more than MAX_ALIGN no matter what.
5456 The alignment returned is, by default, the alignment of the thing that
5457 EXP points to (if it is not a POINTER_TYPE, 0 is returned).
5459 Otherwise, look at the expression to see if we can do better, i.e., if the
5460 expression is actually pointing at an object whose alignment is tighter. */
5463 get_pointer_alignment (exp, max_align)
5467 unsigned align, inner;
5469 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
5472 align = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
5473 align = MIN (align, max_align);
5477 switch (TREE_CODE (exp))
5481 case NON_LVALUE_EXPR:
5482 exp = TREE_OPERAND (exp, 0);
5483 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
5485 inner = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
5486 inner = MIN (inner, max_align);
5487 align = MAX (align, inner);
5491 /* If sum of pointer + int, restrict our maximum alignment to that
5492 imposed by the integer. If not, we can't do any better than
5494 if (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST)
5497 while (((TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)) * BITS_PER_UNIT)
5502 exp = TREE_OPERAND (exp, 0);
5506 /* See what we are pointing at and look at its alignment. */
5507 exp = TREE_OPERAND (exp, 0);
5508 if (TREE_CODE (exp) == FUNCTION_DECL)
5509 align = MAX (align, FUNCTION_BOUNDARY);
5510 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'd')
5511 align = MAX (align, DECL_ALIGN (exp));
5512 #ifdef CONSTANT_ALIGNMENT
5513 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'c')
5514 align = CONSTANT_ALIGNMENT (exp, align);
5516 return MIN (align, max_align);
5524 /* Return the tree node and offset if a given argument corresponds to
5525 a string constant. */
5528 string_constant (arg, ptr_offset)
5534 if (TREE_CODE (arg) == ADDR_EXPR
5535 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
5537 *ptr_offset = integer_zero_node;
5538 return TREE_OPERAND (arg, 0);
5540 else if (TREE_CODE (arg) == PLUS_EXPR)
5542 tree arg0 = TREE_OPERAND (arg, 0);
5543 tree arg1 = TREE_OPERAND (arg, 1);
5548 if (TREE_CODE (arg0) == ADDR_EXPR
5549 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
5552 return TREE_OPERAND (arg0, 0);
5554 else if (TREE_CODE (arg1) == ADDR_EXPR
5555 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
5558 return TREE_OPERAND (arg1, 0);
5565 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
5566 way, because it could contain a zero byte in the middle.
5567 TREE_STRING_LENGTH is the size of the character array, not the string.
5569 Unfortunately, string_constant can't access the values of const char
5570 arrays with initializers, so neither can we do so here. */
5580 src = string_constant (src, &offset_node);
5583 max = TREE_STRING_LENGTH (src);
5584 ptr = TREE_STRING_POINTER (src);
5585 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
5587 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
5588 compute the offset to the following null if we don't know where to
5589 start searching for it. */
5591 for (i = 0; i < max; i++)
5594 /* We don't know the starting offset, but we do know that the string
5595 has no internal zero bytes. We can assume that the offset falls
5596 within the bounds of the string; otherwise, the programmer deserves
5597 what he gets. Subtract the offset from the length of the string,
5599 /* This would perhaps not be valid if we were dealing with named
5600 arrays in addition to literal string constants. */
5601 return size_binop (MINUS_EXPR, size_int (max), offset_node);
5604 /* We have a known offset into the string. Start searching there for
5605 a null character. */
5606 if (offset_node == 0)
5610 /* Did we get a long long offset? If so, punt. */
5611 if (TREE_INT_CST_HIGH (offset_node) != 0)
5613 offset = TREE_INT_CST_LOW (offset_node);
5615 /* If the offset is known to be out of bounds, warn, and call strlen at
5617 if (offset < 0 || offset > max)
5619 warning ("offset outside bounds of constant string");
5622 /* Use strlen to search for the first zero byte. Since any strings
5623 constructed with build_string will have nulls appended, we win even
5624 if we get handed something like (char[4])"abcd".
5626 Since OFFSET is our starting index into the string, no further
5627 calculation is needed. */
5628 return size_int (strlen (ptr + offset));
5631 /* Expand an expression EXP that calls a built-in function,
5632 with result going to TARGET if that's convenient
5633 (and in mode MODE if that's convenient).
5634 SUBTARGET may be used as the target for computing one of EXP's operands.
5635 IGNORE is nonzero if the value is to be ignored. */
5638 expand_builtin (exp, target, subtarget, mode, ignore)
5642 enum machine_mode mode;
5645 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
5646 tree arglist = TREE_OPERAND (exp, 1);
5649 enum machine_mode value_mode = TYPE_MODE (TREE_TYPE (exp));
5650 optab builtin_optab;
5652 switch (DECL_FUNCTION_CODE (fndecl))
5657 /* build_function_call changes these into ABS_EXPR. */
5662 case BUILT_IN_FSQRT:
5663 /* If not optimizing, call the library function. */
5668 /* Arg could be wrong type if user redeclared this fcn wrong. */
5669 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != REAL_TYPE)
5670 return CONST0_RTX (TYPE_MODE (TREE_TYPE (exp)));
5672 /* Stabilize and compute the argument. */
5673 if (TREE_CODE (TREE_VALUE (arglist)) != VAR_DECL
5674 && TREE_CODE (TREE_VALUE (arglist)) != PARM_DECL)
5676 exp = copy_node (exp);
5677 arglist = copy_node (arglist);
5678 TREE_OPERAND (exp, 1) = arglist;
5679 TREE_VALUE (arglist) = save_expr (TREE_VALUE (arglist));
5681 op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
5683 /* Make a suitable register to place result in. */
5684 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
5689 switch (DECL_FUNCTION_CODE (fndecl))
5692 builtin_optab = sin_optab; break;
5694 builtin_optab = cos_optab; break;
5695 case BUILT_IN_FSQRT:
5696 builtin_optab = sqrt_optab; break;
5701 /* Compute into TARGET.
5702 Set TARGET to wherever the result comes back. */
5703 target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
5704 builtin_optab, op0, target, 0);
5706 /* If we were unable to expand via the builtin, stop the
5707 sequence (without outputting the insns) and break, causing
5708 a call the the library function. */
5715 /* Check the results by default. But if flag_fast_math is turned on,
5716 then assume sqrt will always be called with valid arguments. */
5718 if (! flag_fast_math)
5720 /* Don't define the builtin FP instructions
5721 if your machine is not IEEE. */
5722 if (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT)
5725 lab1 = gen_label_rtx ();
5727 /* Test the result; if it is NaN, set errno=EDOM because
5728 the argument was not in the domain. */
5729 emit_cmp_insn (target, target, EQ, 0, GET_MODE (target), 0, 0);
5730 emit_jump_insn (gen_beq (lab1));
5734 #ifdef GEN_ERRNO_RTX
5735 rtx errno_rtx = GEN_ERRNO_RTX;
5738 = gen_rtx (MEM, word_mode, gen_rtx (SYMBOL_REF, Pmode, "*errno"));
5741 emit_move_insn (errno_rtx, GEN_INT (TARGET_EDOM));
5744 /* We can't set errno=EDOM directly; let the library call do it.
5745 Pop the arguments right away in case the call gets deleted. */
5747 expand_call (exp, target, 0);
5754 /* Output the entire sequence. */
5755 insns = get_insns ();
5761 /* __builtin_apply_args returns block of memory allocated on
5762 the stack into which is stored the arg pointer, structure
5763 value address, static chain, and all the registers that might
5764 possibly be used in performing a function call. The code is
5765 moved to the start of the function so the incoming values are
5767 case BUILT_IN_APPLY_ARGS:
5768 /* Don't do __builtin_apply_args more than once in a function.
5769 Save the result of the first call and reuse it. */
5770 if (apply_args_value != 0)
5771 return apply_args_value;
5773 /* When this function is called, it means that registers must be
5774 saved on entry to this function. So we migrate the
5775 call to the first insn of this function. */
5780 temp = expand_builtin_apply_args ();
5784 apply_args_value = temp;
5786 /* Put the sequence after the NOTE that starts the function.
5787 If this is inside a SEQUENCE, make the outer-level insn
5788 chain current, so the code is placed at the start of the
5790 push_topmost_sequence ();
5791 emit_insns_before (seq, NEXT_INSN (get_insns ()));
5792 pop_topmost_sequence ();
5796 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
5797 FUNCTION with a copy of the parameters described by
5798 ARGUMENTS, and ARGSIZE. It returns a block of memory
5799 allocated on the stack into which is stored all the registers
5800 that might possibly be used for returning the result of a
5801 function. ARGUMENTS is the value returned by
5802 __builtin_apply_args. ARGSIZE is the number of bytes of
5803 arguments that must be copied. ??? How should this value be
5804 computed? We'll also need a safe worst case value for varargs
5806 case BUILT_IN_APPLY:
5808 /* Arg could be non-pointer if user redeclared this fcn wrong. */
5809 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
5810 || TREE_CHAIN (arglist) == 0
5811 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
5812 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
5813 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
5821 for (t = arglist, i = 0; t; t = TREE_CHAIN (t), i++)
5822 ops[i] = expand_expr (TREE_VALUE (t), NULL_RTX, VOIDmode, 0);
5824 return expand_builtin_apply (ops[0], ops[1], ops[2]);
5827 /* __builtin_return (RESULT) causes the function to return the
5828 value described by RESULT. RESULT is address of the block of
5829 memory returned by __builtin_apply. */
5830 case BUILT_IN_RETURN:
5832 /* Arg could be non-pointer if user redeclared this fcn wrong. */
5833 && TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) == POINTER_TYPE)
5834 expand_builtin_return (expand_expr (TREE_VALUE (arglist),
5835 NULL_RTX, VOIDmode, 0));
5838 case BUILT_IN_SAVEREGS:
5839 /* Don't do __builtin_saveregs more than once in a function.
5840 Save the result of the first call and reuse it. */
5841 if (saveregs_value != 0)
5842 return saveregs_value;
5844 /* When this function is called, it means that registers must be
5845 saved on entry to this function. So we migrate the
5846 call to the first insn of this function. */
5849 rtx valreg, saved_valreg;
5851 /* Now really call the function. `expand_call' does not call
5852 expand_builtin, so there is no danger of infinite recursion here. */
5855 #ifdef EXPAND_BUILTIN_SAVEREGS
5856 /* Do whatever the machine needs done in this case. */
5857 temp = EXPAND_BUILTIN_SAVEREGS (arglist);
5859 /* The register where the function returns its value
5860 is likely to have something else in it, such as an argument.
5861 So preserve that register around the call. */
5862 if (value_mode != VOIDmode)
5864 valreg = hard_libcall_value (value_mode);
5865 saved_valreg = gen_reg_rtx (value_mode);
5866 emit_move_insn (saved_valreg, valreg);
5869 /* Generate the call, putting the value in a pseudo. */
5870 temp = expand_call (exp, target, ignore);
5872 if (value_mode != VOIDmode)
5873 emit_move_insn (valreg, saved_valreg);
5879 saveregs_value = temp;
5881 /* Put the sequence after the NOTE that starts the function.
5882 If this is inside a SEQUENCE, make the outer-level insn
5883 chain current, so the code is placed at the start of the
5885 push_topmost_sequence ();
5886 emit_insns_before (seq, NEXT_INSN (get_insns ()));
5887 pop_topmost_sequence ();
5891 /* __builtin_args_info (N) returns word N of the arg space info
5892 for the current function. The number and meanings of words
5893 is controlled by the definition of CUMULATIVE_ARGS. */
5894 case BUILT_IN_ARGS_INFO:
5896 int nwords = sizeof (CUMULATIVE_ARGS) / sizeof (int);
5898 int *word_ptr = (int *) ¤t_function_args_info;
5899 tree type, elts, result;
5901 if (sizeof (CUMULATIVE_ARGS) % sizeof (int) != 0)
5902 fatal ("CUMULATIVE_ARGS type defined badly; see %s, line %d",
5903 __FILE__, __LINE__);
5907 tree arg = TREE_VALUE (arglist);
5908 if (TREE_CODE (arg) != INTEGER_CST)
5909 error ("argument of `__builtin_args_info' must be constant");
5912 int wordnum = TREE_INT_CST_LOW (arg);
5914 if (wordnum < 0 || wordnum >= nwords || TREE_INT_CST_HIGH (arg))
5915 error ("argument of `__builtin_args_info' out of range");
5917 return GEN_INT (word_ptr[wordnum]);
5921 error ("missing argument in `__builtin_args_info'");
5926 for (i = 0; i < nwords; i++)
5927 elts = tree_cons (NULL_TREE, build_int_2 (word_ptr[i], 0));
5929 type = build_array_type (integer_type_node,
5930 build_index_type (build_int_2 (nwords, 0)));
5931 result = build (CONSTRUCTOR, type, NULL_TREE, nreverse (elts));
5932 TREE_CONSTANT (result) = 1;
5933 TREE_STATIC (result) = 1;
5934 result = build (INDIRECT_REF, build_pointer_type (type), result);
5935 TREE_CONSTANT (result) = 1;
5936 return expand_expr (result, NULL_RTX, VOIDmode, 0);
5940 /* Return the address of the first anonymous stack arg. */
5941 case BUILT_IN_NEXT_ARG:
5943 tree fntype = TREE_TYPE (current_function_decl);
5944 if (!(TYPE_ARG_TYPES (fntype) != 0
5945 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
5946 != void_type_node)))
5948 error ("`va_start' used in function with fixed args");
5953 return expand_binop (Pmode, add_optab,
5954 current_function_internal_arg_pointer,
5955 current_function_arg_offset_rtx,
5956 NULL_RTX, 0, OPTAB_LIB_WIDEN);
5958 case BUILT_IN_CLASSIFY_TYPE:
5961 tree type = TREE_TYPE (TREE_VALUE (arglist));
5962 enum tree_code code = TREE_CODE (type);
5963 if (code == VOID_TYPE)
5964 return GEN_INT (void_type_class);
5965 if (code == INTEGER_TYPE)
5966 return GEN_INT (integer_type_class);
5967 if (code == CHAR_TYPE)
5968 return GEN_INT (char_type_class);
5969 if (code == ENUMERAL_TYPE)
5970 return GEN_INT (enumeral_type_class);
5971 if (code == BOOLEAN_TYPE)
5972 return GEN_INT (boolean_type_class);
5973 if (code == POINTER_TYPE)
5974 return GEN_INT (pointer_type_class);
5975 if (code == REFERENCE_TYPE)
5976 return GEN_INT (reference_type_class);
5977 if (code == OFFSET_TYPE)
5978 return GEN_INT (offset_type_class);
5979 if (code == REAL_TYPE)
5980 return GEN_INT (real_type_class);
5981 if (code == COMPLEX_TYPE)
5982 return GEN_INT (complex_type_class);
5983 if (code == FUNCTION_TYPE)
5984 return GEN_INT (function_type_class);
5985 if (code == METHOD_TYPE)
5986 return GEN_INT (method_type_class);
5987 if (code == RECORD_TYPE)
5988 return GEN_INT (record_type_class);
5989 if (code == UNION_TYPE || code == QUAL_UNION_TYPE)
5990 return GEN_INT (union_type_class);
5991 if (code == ARRAY_TYPE)
5992 return GEN_INT (array_type_class);
5993 if (code == STRING_TYPE)
5994 return GEN_INT (string_type_class);
5995 if (code == SET_TYPE)
5996 return GEN_INT (set_type_class);
5997 if (code == FILE_TYPE)
5998 return GEN_INT (file_type_class);
5999 if (code == LANG_TYPE)
6000 return GEN_INT (lang_type_class);
6002 return GEN_INT (no_type_class);
6004 case BUILT_IN_CONSTANT_P:
6008 return (TREE_CODE_CLASS (TREE_CODE (TREE_VALUE (arglist))) == 'c'
6009 ? const1_rtx : const0_rtx);
6011 case BUILT_IN_FRAME_ADDRESS:
6012 /* The argument must be a nonnegative integer constant.
6013 It counts the number of frames to scan up the stack.
6014 The value is the address of that frame. */
6015 case BUILT_IN_RETURN_ADDRESS:
6016 /* The argument must be a nonnegative integer constant.
6017 It counts the number of frames to scan up the stack.
6018 The value is the return address saved in that frame. */
6020 /* Warning about missing arg was already issued. */
6022 else if (TREE_CODE (TREE_VALUE (arglist)) != INTEGER_CST)
6024 error ("invalid arg to `__builtin_return_address'");
6027 else if (tree_int_cst_lt (TREE_VALUE (arglist), integer_zero_node))
6029 error ("invalid arg to `__builtin_return_address'");
6034 int count = TREE_INT_CST_LOW (TREE_VALUE (arglist));
6035 rtx tem = frame_pointer_rtx;
6038 /* Some machines need special handling before we can access arbitrary
6039 frames. For example, on the sparc, we must first flush all
6040 register windows to the stack. */
6041 #ifdef SETUP_FRAME_ADDRESSES
6042 SETUP_FRAME_ADDRESSES ();
6045 /* On the sparc, the return address is not in the frame, it is
6046 in a register. There is no way to access it off of the current
6047 frame pointer, but it can be accessed off the previous frame
6048 pointer by reading the value from the register window save
6050 #ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
6051 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_RETURN_ADDRESS)
6055 /* Scan back COUNT frames to the specified frame. */
6056 for (i = 0; i < count; i++)
6058 /* Assume the dynamic chain pointer is in the word that
6059 the frame address points to, unless otherwise specified. */
6060 #ifdef DYNAMIC_CHAIN_ADDRESS
6061 tem = DYNAMIC_CHAIN_ADDRESS (tem);
6063 tem = memory_address (Pmode, tem);
6064 tem = copy_to_reg (gen_rtx (MEM, Pmode, tem));
6067 /* For __builtin_frame_address, return what we've got. */
6068 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
6071 /* For __builtin_return_address,
6072 Get the return address from that frame. */
6073 #ifdef RETURN_ADDR_RTX
6074 return RETURN_ADDR_RTX (count, tem);
6076 tem = memory_address (Pmode,
6077 plus_constant (tem, GET_MODE_SIZE (Pmode)));
6078 return copy_to_reg (gen_rtx (MEM, Pmode, tem));
6082 case BUILT_IN_ALLOCA:
6084 /* Arg could be non-integer if user redeclared this fcn wrong. */
6085 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE)
6087 current_function_calls_alloca = 1;
6088 /* Compute the argument. */
6089 op0 = expand_expr (TREE_VALUE (arglist), NULL_RTX, VOIDmode, 0);
6091 /* Allocate the desired space. */
6092 target = allocate_dynamic_stack_space (op0, target, BITS_PER_UNIT);
6094 /* Record the new stack level for nonlocal gotos. */
6095 if (nonlocal_goto_handler_slot != 0)
6096 emit_stack_save (SAVE_NONLOCAL, &nonlocal_goto_stack_level, NULL_RTX);
6100 /* If not optimizing, call the library function. */
6105 /* Arg could be non-integer if user redeclared this fcn wrong. */
6106 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE)
6109 /* Compute the argument. */
6110 op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
6111 /* Compute ffs, into TARGET if possible.
6112 Set TARGET to wherever the result comes back. */
6113 target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
6114 ffs_optab, op0, target, 1);
6119 case BUILT_IN_STRLEN:
6120 /* If not optimizing, call the library function. */
6125 /* Arg could be non-pointer if user redeclared this fcn wrong. */
6126 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE)
6130 tree src = TREE_VALUE (arglist);
6131 tree len = c_strlen (src);
6134 = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
6136 rtx result, src_rtx, char_rtx;
6137 enum machine_mode insn_mode = value_mode, char_mode;
6138 enum insn_code icode;
6140 /* If the length is known, just return it. */
6142 return expand_expr (len, target, mode, 0);
6144 /* If SRC is not a pointer type, don't do this operation inline. */
6148 /* Call a function if we can't compute strlen in the right mode. */
6150 while (insn_mode != VOIDmode)
6152 icode = strlen_optab->handlers[(int) insn_mode].insn_code;
6153 if (icode != CODE_FOR_nothing)
6156 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
6158 if (insn_mode == VOIDmode)
6161 /* Make a place to write the result of the instruction. */
6164 && GET_CODE (result) == REG
6165 && GET_MODE (result) == insn_mode
6166 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
6167 result = gen_reg_rtx (insn_mode);
6169 /* Make sure the operands are acceptable to the predicates. */
6171 if (! (*insn_operand_predicate[(int)icode][0]) (result, insn_mode))
6172 result = gen_reg_rtx (insn_mode);
6174 src_rtx = memory_address (BLKmode,
6175 expand_expr (src, NULL_RTX, Pmode,
6177 if (! (*insn_operand_predicate[(int)icode][1]) (src_rtx, Pmode))
6178 src_rtx = copy_to_mode_reg (Pmode, src_rtx);
6180 char_rtx = const0_rtx;
6181 char_mode = insn_operand_mode[(int)icode][2];
6182 if (! (*insn_operand_predicate[(int)icode][2]) (char_rtx, char_mode))
6183 char_rtx = copy_to_mode_reg (char_mode, char_rtx);
6185 emit_insn (GEN_FCN (icode) (result,
6186 gen_rtx (MEM, BLKmode, src_rtx),
6187 char_rtx, GEN_INT (align)));
6189 /* Return the value in the proper mode for this function. */
6190 if (GET_MODE (result) == value_mode)
6192 else if (target != 0)
6194 convert_move (target, result, 0);
6198 return convert_to_mode (value_mode, result, 0);
6201 case BUILT_IN_STRCPY:
6202 /* If not optimizing, call the library function. */
6207 /* Arg could be non-pointer if user redeclared this fcn wrong. */
6208 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
6209 || TREE_CHAIN (arglist) == 0
6210 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE)
6214 tree len = c_strlen (TREE_VALUE (TREE_CHAIN (arglist)));
6219 len = size_binop (PLUS_EXPR, len, integer_one_node);
6221 chainon (arglist, build_tree_list (NULL_TREE, len));
6225 case BUILT_IN_MEMCPY:
6226 /* If not optimizing, call the library function. */
6231 /* Arg could be non-pointer if user redeclared this fcn wrong. */
6232 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
6233 || TREE_CHAIN (arglist) == 0
6234 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
6235 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
6236 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
6240 tree dest = TREE_VALUE (arglist);
6241 tree src = TREE_VALUE (TREE_CHAIN (arglist));
6242 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
6245 = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
6247 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
6248 rtx dest_rtx, dest_mem, src_mem;
6250 /* If either SRC or DEST is not a pointer type, don't do
6251 this operation in-line. */
6252 if (src_align == 0 || dest_align == 0)
6254 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRCPY)
6255 TREE_CHAIN (TREE_CHAIN (arglist)) = 0;
6259 dest_rtx = expand_expr (dest, NULL_RTX, Pmode, EXPAND_NORMAL);
6260 dest_mem = gen_rtx (MEM, BLKmode,
6261 memory_address (BLKmode, dest_rtx));
6262 src_mem = gen_rtx (MEM, BLKmode,
6263 memory_address (BLKmode,
6264 expand_expr (src, NULL_RTX,
6268 /* Copy word part most expediently. */
6269 emit_block_move (dest_mem, src_mem,
6270 expand_expr (len, NULL_RTX, VOIDmode, 0),
6271 MIN (src_align, dest_align));
6275 /* These comparison functions need an instruction that returns an actual
6276 index. An ordinary compare that just sets the condition codes
6278 #ifdef HAVE_cmpstrsi
6279 case BUILT_IN_STRCMP:
6280 /* If not optimizing, call the library function. */
6285 /* Arg could be non-pointer if user redeclared this fcn wrong. */
6286 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
6287 || TREE_CHAIN (arglist) == 0
6288 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE)
6290 else if (!HAVE_cmpstrsi)
6293 tree arg1 = TREE_VALUE (arglist);
6294 tree arg2 = TREE_VALUE (TREE_CHAIN (arglist));
6298 len = c_strlen (arg1);
6300 len = size_binop (PLUS_EXPR, integer_one_node, len);
6301 len2 = c_strlen (arg2);
6303 len2 = size_binop (PLUS_EXPR, integer_one_node, len2);
6305 /* If we don't have a constant length for the first, use the length
6306 of the second, if we know it. We don't require a constant for
6307 this case; some cost analysis could be done if both are available
6308 but neither is constant. For now, assume they're equally cheap.
6310 If both strings have constant lengths, use the smaller. This
6311 could arise if optimization results in strcpy being called with
6312 two fixed strings, or if the code was machine-generated. We should
6313 add some code to the `memcmp' handler below to deal with such
6314 situations, someday. */
6315 if (!len || TREE_CODE (len) != INTEGER_CST)
6322 else if (len2 && TREE_CODE (len2) == INTEGER_CST)
6324 if (tree_int_cst_lt (len2, len))
6328 chainon (arglist, build_tree_list (NULL_TREE, len));
6332 case BUILT_IN_MEMCMP:
6333 /* If not optimizing, call the library function. */
6338 /* Arg could be non-pointer if user redeclared this fcn wrong. */
6339 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
6340 || TREE_CHAIN (arglist) == 0
6341 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
6342 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
6343 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
6345 else if (!HAVE_cmpstrsi)
6348 tree arg1 = TREE_VALUE (arglist);
6349 tree arg2 = TREE_VALUE (TREE_CHAIN (arglist));
6350 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
6354 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
6356 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
6357 enum machine_mode insn_mode
6358 = insn_operand_mode[(int) CODE_FOR_cmpstrsi][0];
6360 /* If we don't have POINTER_TYPE, call the function. */
6361 if (arg1_align == 0 || arg2_align == 0)
6363 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRCMP)
6364 TREE_CHAIN (TREE_CHAIN (arglist)) = 0;
6368 /* Make a place to write the result of the instruction. */
6371 && GET_CODE (result) == REG && GET_MODE (result) == insn_mode
6372 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
6373 result = gen_reg_rtx (insn_mode);
6375 emit_insn (gen_cmpstrsi (result,
6376 gen_rtx (MEM, BLKmode,
6377 expand_expr (arg1, NULL_RTX, Pmode,
6379 gen_rtx (MEM, BLKmode,
6380 expand_expr (arg2, NULL_RTX, Pmode,
6382 expand_expr (len, NULL_RTX, VOIDmode, 0),
6383 GEN_INT (MIN (arg1_align, arg2_align))));
6385 /* Return the value in the proper mode for this function. */
6386 mode = TYPE_MODE (TREE_TYPE (exp));
6387 if (GET_MODE (result) == mode)
6389 else if (target != 0)
6391 convert_move (target, result, 0);
6395 return convert_to_mode (mode, result, 0);
6398 case BUILT_IN_STRCMP:
6399 case BUILT_IN_MEMCMP:
6403 default: /* just do library call, if unknown builtin */
6404 error ("built-in function `%s' not currently supported",
6405 IDENTIFIER_POINTER (DECL_NAME (fndecl)));
6408 /* The switch statement above can drop through to cause the function
6409 to be called normally. */
6411 return expand_call (exp, target, ignore);
6414 /* Built-in functions to perform an untyped call and return. */
6416 /* For each register that may be used for calling a function, this
6417 gives a mode used to copy the register's value. VOIDmode indicates
6418 the register is not used for calling a function. If the machine
6419 has register windows, this gives only the outbound registers.
6420 INCOMING_REGNO gives the corresponding inbound register. */
6421 static enum machine_mode apply_args_mode[FIRST_PSEUDO_REGISTER];
6423 /* For each register that may be used for returning values, this gives
6424 a mode used to copy the register's value. VOIDmode indicates the
6425 register is not used for returning values. If the machine has
6426 register windows, this gives only the outbound registers.
6427 INCOMING_REGNO gives the corresponding inbound register. */
6428 static enum machine_mode apply_result_mode[FIRST_PSEUDO_REGISTER];
6430 /* Return the size required for the block returned by __builtin_apply_args,
6431 and initialize apply_args_mode. */
6435 static int size = -1;
6437 enum machine_mode mode;
6439 /* The values computed by this function never change. */
6442 /* The first value is the incoming arg-pointer. */
6443 size = GET_MODE_SIZE (Pmode);
6445 /* The second value is the structure value address unless this is
6446 passed as an "invisible" first argument. */
6447 if (struct_value_rtx)
6448 size += GET_MODE_SIZE (Pmode);
6450 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
6451 if (FUNCTION_ARG_REGNO_P (regno))
6453 /* Search for the proper mode for copying this register's
6454 value. I'm not sure this is right, but it works so far. */
6455 enum machine_mode best_mode = VOIDmode;
6457 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
6459 mode = GET_MODE_WIDER_MODE (mode))
6460 if (HARD_REGNO_MODE_OK (regno, mode)
6461 && HARD_REGNO_NREGS (regno, mode) == 1)
6464 if (best_mode == VOIDmode)
6465 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
6467 mode = GET_MODE_WIDER_MODE (mode))
6468 if (HARD_REGNO_MODE_OK (regno, mode)
6469 && (mov_optab->handlers[(int) mode].insn_code
6470 != CODE_FOR_nothing))
6474 if (mode == VOIDmode)
6477 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
6478 if (size % align != 0)
6479 size = CEIL (size, align) * align;
6480 size += GET_MODE_SIZE (mode);
6481 apply_args_mode[regno] = mode;
6484 apply_args_mode[regno] = VOIDmode;
6489 /* Return the size required for the block returned by __builtin_apply,
6490 and initialize apply_result_mode. */
6492 apply_result_size ()
6494 static int size = -1;
6496 enum machine_mode mode;
6498 /* The values computed by this function never change. */
6503 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
6504 if (FUNCTION_VALUE_REGNO_P (regno))
6506 /* Search for the proper mode for copying this register's
6507 value. I'm not sure this is right, but it works so far. */
6508 enum machine_mode best_mode = VOIDmode;
6510 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
6512 mode = GET_MODE_WIDER_MODE (mode))
6513 if (HARD_REGNO_MODE_OK (regno, mode))
6516 if (best_mode == VOIDmode)
6517 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
6519 mode = GET_MODE_WIDER_MODE (mode))
6520 if (HARD_REGNO_MODE_OK (regno, mode)
6521 && (mov_optab->handlers[(int) mode].insn_code
6522 != CODE_FOR_nothing))
6526 if (mode == VOIDmode)
6529 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
6530 if (size % align != 0)
6531 size = CEIL (size, align) * align;
6532 size += GET_MODE_SIZE (mode);
6533 apply_result_mode[regno] = mode;
6536 apply_result_mode[regno] = VOIDmode;
6538 /* Allow targets that use untyped_call and untyped_return to override
6539 the size so that machine-specific information can be stored here. */
6540 #ifdef APPLY_RESULT_SIZE
6541 size = APPLY_RESULT_SIZE;
6547 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
6548 /* Create a vector describing the result block RESULT. If SAVEP is true,
6549 the result block is used to save the values; otherwise it is used to
6550 restore the values. */
6552 result_vector (savep, result)
6556 int regno, size, align, nelts;
6557 enum machine_mode mode;
6559 rtx *savevec = (rtx *) alloca (FIRST_PSEUDO_REGISTER * sizeof (rtx));
6562 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
6563 if ((mode = apply_result_mode[regno]) != VOIDmode)
6565 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
6566 if (size % align != 0)
6567 size = CEIL (size, align) * align;
6568 reg = gen_rtx (REG, mode, savep ? INCOMING_REGNO (regno) : regno);
6569 mem = change_address (result, mode,
6570 plus_constant (XEXP (result, 0), size));
6571 savevec[nelts++] = (savep
6572 ? gen_rtx (SET, VOIDmode, mem, reg)
6573 : gen_rtx (SET, VOIDmode, reg, mem));
6574 size += GET_MODE_SIZE (mode);
6576 return gen_rtx (PARALLEL, VOIDmode, gen_rtvec_v (nelts, savevec));
6578 #endif /* HAVE_untyped_call or HAVE_untyped_return */
6581 /* Save the state required to perform an untyped call with the same
6582 arguments as were passed to the current function. */
6584 expand_builtin_apply_args ()
6587 int size, align, regno;
6588 enum machine_mode mode;
6590 /* Create a block where the arg-pointer, structure value address,
6591 and argument registers can be saved. */
6592 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
6594 /* Walk past the arg-pointer and structure value address. */
6595 size = GET_MODE_SIZE (Pmode);
6596 if (struct_value_rtx)
6597 size += GET_MODE_SIZE (Pmode);
6599 /* Save each register used in calling a function to the block. */
6600 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
6601 if ((mode = apply_args_mode[regno]) != VOIDmode)
6603 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
6604 if (size % align != 0)
6605 size = CEIL (size, align) * align;
6606 emit_move_insn (change_address (registers, mode,
6607 plus_constant (XEXP (registers, 0),
6609 gen_rtx (REG, mode, INCOMING_REGNO (regno)));
6610 size += GET_MODE_SIZE (mode);
6613 /* Save the arg pointer to the block. */
6614 emit_move_insn (change_address (registers, Pmode, XEXP (registers, 0)),
6615 copy_to_reg (virtual_incoming_args_rtx));
6616 size = GET_MODE_SIZE (Pmode);
6618 /* Save the structure value address unless this is passed as an
6619 "invisible" first argument. */
6620 if (struct_value_incoming_rtx)
6622 emit_move_insn (change_address (registers, Pmode,
6623 plus_constant (XEXP (registers, 0),
6625 copy_to_reg (struct_value_incoming_rtx));
6626 size += GET_MODE_SIZE (Pmode);
6629 /* Return the address of the block. */
6630 return copy_addr_to_reg (XEXP (registers, 0));
6633 /* Perform an untyped call and save the state required to perform an
6634 untyped return of whatever value was returned by the given function. */
6636 expand_builtin_apply (function, arguments, argsize)
6637 rtx function, arguments, argsize;
6639 int size, align, regno;
6640 enum machine_mode mode;
6641 rtx incoming_args, result, reg, dest, call_insn;
6642 rtx old_stack_level = 0;
6645 /* Create a block where the return registers can be saved. */
6646 result = assign_stack_local (BLKmode, apply_result_size (), -1);
6648 /* ??? The argsize value should be adjusted here. */
6650 /* Fetch the arg pointer from the ARGUMENTS block. */
6651 incoming_args = gen_reg_rtx (Pmode);
6652 emit_move_insn (incoming_args,
6653 gen_rtx (MEM, Pmode, arguments));
6654 #ifndef STACK_GROWS_DOWNWARD
6655 incoming_args = expand_binop (Pmode, add_optab, incoming_args, argsize,
6656 incoming_args, 0, OPTAB_LIB_WIDEN);
6659 /* Perform postincrements before actually calling the function. */
6662 /* Push a new argument block and copy the arguments. */
6663 do_pending_stack_adjust ();
6664 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
6666 /* Push a block of memory onto the stack to store the memory arguments.
6667 Save the address in a register, and copy the memory arguments. ??? I
6668 haven't figured out how the calling convention macros effect this,
6669 but it's likely that the source and/or destination addresses in
6670 the block copy will need updating in machine specific ways. */
6671 dest = copy_addr_to_reg (push_block (argsize, 0, 0));
6672 emit_block_move (gen_rtx (MEM, BLKmode, dest),
6673 gen_rtx (MEM, BLKmode, incoming_args),
6675 PARM_BOUNDARY / BITS_PER_UNIT);
6677 /* Refer to the argument block. */
6679 arguments = gen_rtx (MEM, BLKmode, arguments);
6681 /* Walk past the arg-pointer and structure value address. */
6682 size = GET_MODE_SIZE (Pmode);
6683 if (struct_value_rtx)
6684 size += GET_MODE_SIZE (Pmode);
6686 /* Restore each of the registers previously saved. Make USE insns
6687 for each of these registers for use in making the call. */
6688 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
6689 if ((mode = apply_args_mode[regno]) != VOIDmode)
6691 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
6692 if (size % align != 0)
6693 size = CEIL (size, align) * align;
6694 reg = gen_rtx (REG, mode, regno);
6695 emit_move_insn (reg,
6696 change_address (arguments, mode,
6697 plus_constant (XEXP (arguments, 0),
6700 push_to_sequence (use_insns);
6701 emit_insn (gen_rtx (USE, VOIDmode, reg));
6702 use_insns = get_insns ();
6704 size += GET_MODE_SIZE (mode);
6707 /* Restore the structure value address unless this is passed as an
6708 "invisible" first argument. */
6709 size = GET_MODE_SIZE (Pmode);
6710 if (struct_value_rtx)
6712 rtx value = gen_reg_rtx (Pmode);
6713 emit_move_insn (value,
6714 change_address (arguments, Pmode,
6715 plus_constant (XEXP (arguments, 0),
6717 emit_move_insn (struct_value_rtx, value);
6718 if (GET_CODE (struct_value_rtx) == REG)
6720 push_to_sequence (use_insns);
6721 emit_insn (gen_rtx (USE, VOIDmode, struct_value_rtx));
6722 use_insns = get_insns ();
6725 size += GET_MODE_SIZE (Pmode);
6728 /* All arguments and registers used for the call are set up by now! */
6729 function = prepare_call_address (function, NULL_TREE, &use_insns);
6731 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
6732 and we don't want to load it into a register as an optimization,
6733 because prepare_call_address already did it if it should be done. */
6734 if (GET_CODE (function) != SYMBOL_REF)
6735 function = memory_address (FUNCTION_MODE, function);
6737 /* Generate the actual call instruction and save the return value. */
6738 #ifdef HAVE_untyped_call
6739 if (HAVE_untyped_call)
6740 emit_call_insn (gen_untyped_call (gen_rtx (MEM, FUNCTION_MODE, function),
6741 result, result_vector (1, result)));
6744 #ifdef HAVE_call_value
6745 if (HAVE_call_value)
6749 /* Locate the unique return register. It is not possible to
6750 express a call that sets more than one return register using
6751 call_value; use untyped_call for that. In fact, untyped_call
6752 only needs to save the return registers in the given block. */
6753 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
6754 if ((mode = apply_result_mode[regno]) != VOIDmode)
6757 abort (); /* HAVE_untyped_call required. */
6758 valreg = gen_rtx (REG, mode, regno);
6761 emit_call_insn (gen_call_value (valreg,
6762 gen_rtx (MEM, FUNCTION_MODE, function),
6763 const0_rtx, NULL_RTX, const0_rtx));
6765 emit_move_insn (change_address (result, GET_MODE (valreg),
6773 /* Find the CALL insn we just emitted and write the USE insns before it. */
6774 for (call_insn = get_last_insn ();
6775 call_insn && GET_CODE (call_insn) != CALL_INSN;
6776 call_insn = PREV_INSN (call_insn))
6782 /* Put the USE insns before the CALL. */
6783 emit_insns_before (use_insns, call_insn);
6785 /* Restore the stack. */
6786 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
6788 /* Return the address of the result block. */
6789 return copy_addr_to_reg (XEXP (result, 0));
6792 /* Perform an untyped return. */
6794 expand_builtin_return (result)
6797 int size, align, regno;
6798 enum machine_mode mode;
6802 apply_result_size ();
6803 result = gen_rtx (MEM, BLKmode, result);
6805 #ifdef HAVE_untyped_return
6806 if (HAVE_untyped_return)
6808 emit_jump_insn (gen_untyped_return (result, result_vector (0, result)));
6814 /* Restore the return value and note that each value is used. */
6816 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
6817 if ((mode = apply_result_mode[regno]) != VOIDmode)
6819 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
6820 if (size % align != 0)
6821 size = CEIL (size, align) * align;
6822 reg = gen_rtx (REG, mode, INCOMING_REGNO (regno));
6823 emit_move_insn (reg,
6824 change_address (result, mode,
6825 plus_constant (XEXP (result, 0),
6828 push_to_sequence (use_insns);
6829 emit_insn (gen_rtx (USE, VOIDmode, reg));
6830 use_insns = get_insns ();
6832 size += GET_MODE_SIZE (mode);
6835 /* Put the USE insns before the return. */
6836 emit_insns (use_insns);
6838 /* Return whatever values was restored by jumping directly to the end
6840 expand_null_return ();
6843 /* Expand code for a post- or pre- increment or decrement
6844 and return the RTX for the result.
6845 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
6848 expand_increment (exp, post)
6852 register rtx op0, op1;
6853 register rtx temp, value;
6854 register tree incremented = TREE_OPERAND (exp, 0);
6855 optab this_optab = add_optab;
6857 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
6858 int op0_is_copy = 0;
6859 int single_insn = 0;
6861 /* Stabilize any component ref that might need to be
6862 evaluated more than once below. */
6864 || TREE_CODE (incremented) == BIT_FIELD_REF
6865 || (TREE_CODE (incremented) == COMPONENT_REF
6866 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
6867 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
6868 incremented = stabilize_reference (incremented);
6870 /* Compute the operands as RTX.
6871 Note whether OP0 is the actual lvalue or a copy of it:
6872 I believe it is a copy iff it is a register or subreg
6873 and insns were generated in computing it. */
6875 temp = get_last_insn ();
6876 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, 0);
6878 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
6879 in place but intead must do sign- or zero-extension during assignment,
6880 so we copy it into a new register and let the code below use it as
6883 Note that we can safely modify this SUBREG since it is know not to be
6884 shared (it was made by the expand_expr call above). */
6886 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
6887 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
6889 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
6890 && temp != get_last_insn ());
6891 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
6893 /* Decide whether incrementing or decrementing. */
6894 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
6895 || TREE_CODE (exp) == PREDECREMENT_EXPR)
6896 this_optab = sub_optab;
6898 /* For a preincrement, see if we can do this with a single instruction. */
6901 icode = (int) this_optab->handlers[(int) mode].insn_code;
6902 if (icode != (int) CODE_FOR_nothing
6903 /* Make sure that OP0 is valid for operands 0 and 1
6904 of the insn we want to queue. */
6905 && (*insn_operand_predicate[icode][0]) (op0, mode)
6906 && (*insn_operand_predicate[icode][1]) (op0, mode)
6907 && (*insn_operand_predicate[icode][2]) (op1, mode))
6911 /* If OP0 is not the actual lvalue, but rather a copy in a register,
6912 then we cannot just increment OP0. We must therefore contrive to
6913 increment the original value. Then, for postincrement, we can return
6914 OP0 since it is a copy of the old value. For preincrement, expand here
6915 unless we can do it with a single insn. */
6916 if (op0_is_copy || (!post && !single_insn))
6918 /* This is the easiest way to increment the value wherever it is.
6919 Problems with multiple evaluation of INCREMENTED are prevented
6920 because either (1) it is a component_ref or preincrement,
6921 in which case it was stabilized above, or (2) it is an array_ref
6922 with constant index in an array in a register, which is
6923 safe to reevaluate. */
6924 tree newexp = build ((this_optab == add_optab
6925 ? PLUS_EXPR : MINUS_EXPR),
6928 TREE_OPERAND (exp, 1));
6929 temp = expand_assignment (incremented, newexp, ! post, 0);
6930 return post ? op0 : temp;
6933 /* Convert decrement by a constant into a negative increment. */
6934 if (this_optab == sub_optab
6935 && GET_CODE (op1) == CONST_INT)
6937 op1 = GEN_INT (- INTVAL (op1));
6938 this_optab = add_optab;
6943 /* We have a true reference to the value in OP0.
6944 If there is an insn to add or subtract in this mode, queue it. */
6946 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
6947 op0 = stabilize (op0);
6950 icode = (int) this_optab->handlers[(int) mode].insn_code;
6951 if (icode != (int) CODE_FOR_nothing
6952 /* Make sure that OP0 is valid for operands 0 and 1
6953 of the insn we want to queue. */
6954 && (*insn_operand_predicate[icode][0]) (op0, mode)
6955 && (*insn_operand_predicate[icode][1]) (op0, mode))
6957 if (! (*insn_operand_predicate[icode][2]) (op1, mode))
6958 op1 = force_reg (mode, op1);
6960 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
6964 /* Preincrement, or we can't increment with one simple insn. */
6966 /* Save a copy of the value before inc or dec, to return it later. */
6967 temp = value = copy_to_reg (op0);
6969 /* Arrange to return the incremented value. */
6970 /* Copy the rtx because expand_binop will protect from the queue,
6971 and the results of that would be invalid for us to return
6972 if our caller does emit_queue before using our result. */
6973 temp = copy_rtx (value = op0);
6975 /* Increment however we can. */
6976 op1 = expand_binop (mode, this_optab, value, op1, op0,
6977 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
6978 /* Make sure the value is stored into OP0. */
6980 emit_move_insn (op0, op1);
6985 /* Expand all function calls contained within EXP, innermost ones first.
6986 But don't look within expressions that have sequence points.
6987 For each CALL_EXPR, record the rtx for its value
6988 in the CALL_EXPR_RTL field. */
6991 preexpand_calls (exp)
6994 register int nops, i;
6995 int type = TREE_CODE_CLASS (TREE_CODE (exp));
6997 if (! do_preexpand_calls)
7000 /* Only expressions and references can contain calls. */
7002 if (type != 'e' && type != '<' && type != '1' && type != '2' && type != 'r')
7005 switch (TREE_CODE (exp))
7008 /* Do nothing if already expanded. */
7009 if (CALL_EXPR_RTL (exp) != 0)
7012 /* Do nothing to built-in functions. */
7013 if (TREE_CODE (TREE_OPERAND (exp, 0)) != ADDR_EXPR
7014 || TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) != FUNCTION_DECL
7015 || ! DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7016 CALL_EXPR_RTL (exp) = expand_call (exp, NULL_RTX, 0);
7021 case TRUTH_ANDIF_EXPR:
7022 case TRUTH_ORIF_EXPR:
7023 /* If we find one of these, then we can be sure
7024 the adjust will be done for it (since it makes jumps).
7025 Do it now, so that if this is inside an argument
7026 of a function, we don't get the stack adjustment
7027 after some other args have already been pushed. */
7028 do_pending_stack_adjust ();
7033 case WITH_CLEANUP_EXPR:
7037 if (SAVE_EXPR_RTL (exp) != 0)
7041 nops = tree_code_length[(int) TREE_CODE (exp)];
7042 for (i = 0; i < nops; i++)
7043 if (TREE_OPERAND (exp, i) != 0)
7045 type = TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, i)));
7046 if (type == 'e' || type == '<' || type == '1' || type == '2'
7048 preexpand_calls (TREE_OPERAND (exp, i));
7052 /* At the start of a function, record that we have no previously-pushed
7053 arguments waiting to be popped. */
7056 init_pending_stack_adjust ()
7058 pending_stack_adjust = 0;
7061 /* When exiting from function, if safe, clear out any pending stack adjust
7062 so the adjustment won't get done. */
7065 clear_pending_stack_adjust ()
7067 #ifdef EXIT_IGNORE_STACK
7068 if (! flag_omit_frame_pointer && EXIT_IGNORE_STACK
7069 && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline)
7070 && ! flag_inline_functions)
7071 pending_stack_adjust = 0;
7075 /* Pop any previously-pushed arguments that have not been popped yet. */
7078 do_pending_stack_adjust ()
7080 if (inhibit_defer_pop == 0)
7082 if (pending_stack_adjust != 0)
7083 adjust_stack (GEN_INT (pending_stack_adjust));
7084 pending_stack_adjust = 0;
7088 /* Expand all cleanups up to OLD_CLEANUPS.
7089 Needed here, and also for language-dependent calls. */
7092 expand_cleanups_to (old_cleanups)
7095 while (cleanups_this_call != old_cleanups)
7097 expand_expr (TREE_VALUE (cleanups_this_call), NULL_RTX, VOIDmode, 0);
7098 cleanups_this_call = TREE_CHAIN (cleanups_this_call);
7102 /* Expand conditional expressions. */
7104 /* Generate code to evaluate EXP and jump to LABEL if the value is zero.
7105 LABEL is an rtx of code CODE_LABEL, in this function and all the
7109 jumpifnot (exp, label)
7113 do_jump (exp, label, NULL_RTX);
7116 /* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
7123 do_jump (exp, NULL_RTX, label);
7126 /* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
7127 the result is zero, or IF_TRUE_LABEL if the result is one.
7128 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
7129 meaning fall through in that case.
7131 do_jump always does any pending stack adjust except when it does not
7132 actually perform a jump. An example where there is no jump
7133 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
7135 This function is responsible for optimizing cases such as
7136 &&, || and comparison operators in EXP. */
7139 do_jump (exp, if_false_label, if_true_label)
7141 rtx if_false_label, if_true_label;
7143 register enum tree_code code = TREE_CODE (exp);
7144 /* Some cases need to create a label to jump to
7145 in order to properly fall through.
7146 These cases set DROP_THROUGH_LABEL nonzero. */
7147 rtx drop_through_label = 0;
7161 temp = integer_zerop (exp) ? if_false_label : if_true_label;
7167 /* This is not true with #pragma weak */
7169 /* The address of something can never be zero. */
7171 emit_jump (if_true_label);
7176 if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
7177 || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
7178 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF)
7181 /* If we are narrowing the operand, we have to do the compare in the
7183 if ((TYPE_PRECISION (TREE_TYPE (exp))
7184 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
7186 case NON_LVALUE_EXPR:
7187 case REFERENCE_EXPR:
7192 /* These cannot change zero->non-zero or vice versa. */
7193 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
7197 /* This is never less insns than evaluating the PLUS_EXPR followed by
7198 a test and can be longer if the test is eliminated. */
7200 /* Reduce to minus. */
7201 exp = build (MINUS_EXPR, TREE_TYPE (exp),
7202 TREE_OPERAND (exp, 0),
7203 fold (build1 (NEGATE_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
7204 TREE_OPERAND (exp, 1))));
7205 /* Process as MINUS. */
7209 /* Non-zero iff operands of minus differ. */
7210 comparison = compare (build (NE_EXPR, TREE_TYPE (exp),
7211 TREE_OPERAND (exp, 0),
7212 TREE_OPERAND (exp, 1)),
7217 /* If we are AND'ing with a small constant, do this comparison in the
7218 smallest type that fits. If the machine doesn't have comparisons
7219 that small, it will be converted back to the wider comparison.
7220 This helps if we are testing the sign bit of a narrower object.
7221 combine can't do this for us because it can't know whether a
7222 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
7224 if (! SLOW_BYTE_ACCESS
7225 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7226 && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
7227 && (i = floor_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))) >= 0
7228 && (type = type_for_size (i + 1, 1)) != 0
7229 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
7230 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
7231 != CODE_FOR_nothing))
7233 do_jump (convert (type, exp), if_false_label, if_true_label);
7238 case TRUTH_NOT_EXPR:
7239 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
7242 case TRUTH_ANDIF_EXPR:
7243 if (if_false_label == 0)
7244 if_false_label = drop_through_label = gen_label_rtx ();
7245 do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
7246 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
7249 case TRUTH_ORIF_EXPR:
7250 if (if_true_label == 0)
7251 if_true_label = drop_through_label = gen_label_rtx ();
7252 do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
7253 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
7257 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
7260 do_pending_stack_adjust ();
7261 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
7268 int bitsize, bitpos, unsignedp;
7269 enum machine_mode mode;
7274 /* Get description of this reference. We don't actually care
7275 about the underlying object here. */
7276 get_inner_reference (exp, &bitsize, &bitpos, &offset,
7277 &mode, &unsignedp, &volatilep);
7279 type = type_for_size (bitsize, unsignedp);
7280 if (! SLOW_BYTE_ACCESS
7281 && type != 0 && bitsize >= 0
7282 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
7283 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
7284 != CODE_FOR_nothing))
7286 do_jump (convert (type, exp), if_false_label, if_true_label);
7293 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
7294 if (integer_onep (TREE_OPERAND (exp, 1))
7295 && integer_zerop (TREE_OPERAND (exp, 2)))
7296 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
7298 else if (integer_zerop (TREE_OPERAND (exp, 1))
7299 && integer_onep (TREE_OPERAND (exp, 2)))
7300 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
7304 register rtx label1 = gen_label_rtx ();
7305 drop_through_label = gen_label_rtx ();
7306 do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX);
7307 /* Now the THEN-expression. */
7308 do_jump (TREE_OPERAND (exp, 1),
7309 if_false_label ? if_false_label : drop_through_label,
7310 if_true_label ? if_true_label : drop_through_label);
7311 /* In case the do_jump just above never jumps. */
7312 do_pending_stack_adjust ();
7313 emit_label (label1);
7314 /* Now the ELSE-expression. */
7315 do_jump (TREE_OPERAND (exp, 2),
7316 if_false_label ? if_false_label : drop_through_label,
7317 if_true_label ? if_true_label : drop_through_label);
7322 if (integer_zerop (TREE_OPERAND (exp, 1)))
7323 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
7324 else if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7327 !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
7328 do_jump_by_parts_equality (exp, if_false_label, if_true_label);
7330 comparison = compare (exp, EQ, EQ);
7334 if (integer_zerop (TREE_OPERAND (exp, 1)))
7335 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
7336 else if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7339 !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
7340 do_jump_by_parts_equality (exp, if_true_label, if_false_label);
7342 comparison = compare (exp, NE, NE);
7346 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7348 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
7349 do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
7351 comparison = compare (exp, LT, LTU);
7355 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7357 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
7358 do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
7360 comparison = compare (exp, LE, LEU);
7364 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7366 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
7367 do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
7369 comparison = compare (exp, GT, GTU);
7373 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7375 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
7376 do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
7378 comparison = compare (exp, GE, GEU);
7383 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
7385 /* This is not needed any more and causes poor code since it causes
7386 comparisons and tests from non-SI objects to have different code
7388 /* Copy to register to avoid generating bad insns by cse
7389 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
7390 if (!cse_not_expected && GET_CODE (temp) == MEM)
7391 temp = copy_to_reg (temp);
7393 do_pending_stack_adjust ();
7394 if (GET_CODE (temp) == CONST_INT)
7395 comparison = (temp == const0_rtx ? const0_rtx : const_true_rtx);
7396 else if (GET_CODE (temp) == LABEL_REF)
7397 comparison = const_true_rtx;
7398 else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
7399 && !can_compare_p (GET_MODE (temp)))
7400 /* Note swapping the labels gives us not-equal. */
7401 do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label);
7402 else if (GET_MODE (temp) != VOIDmode)
7403 comparison = compare_from_rtx (temp, CONST0_RTX (GET_MODE (temp)),
7404 NE, TREE_UNSIGNED (TREE_TYPE (exp)),
7405 GET_MODE (temp), NULL_RTX, 0);
7410 /* Do any postincrements in the expression that was tested. */
7413 /* If COMPARISON is nonzero here, it is an rtx that can be substituted
7414 straight into a conditional jump instruction as the jump condition.
7415 Otherwise, all the work has been done already. */
7417 if (comparison == const_true_rtx)
7420 emit_jump (if_true_label);
7422 else if (comparison == const0_rtx)
7425 emit_jump (if_false_label);
7427 else if (comparison)
7428 do_jump_for_compare (comparison, if_false_label, if_true_label);
7432 if (drop_through_label)
7434 /* If do_jump produces code that might be jumped around,
7435 do any stack adjusts from that code, before the place
7436 where control merges in. */
7437 do_pending_stack_adjust ();
7438 emit_label (drop_through_label);
7442 /* Given a comparison expression EXP for values too wide to be compared
7443 with one insn, test the comparison and jump to the appropriate label.
7444 The code of EXP is ignored; we always test GT if SWAP is 0,
7445 and LT if SWAP is 1. */
7448 do_jump_by_parts_greater (exp, swap, if_false_label, if_true_label)
7451 rtx if_false_label, if_true_label;
7453 rtx op0 = expand_expr (TREE_OPERAND (exp, swap), NULL_RTX, VOIDmode, 0);
7454 rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), NULL_RTX, VOIDmode, 0);
7455 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
7456 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
7457 rtx drop_through_label = 0;
7458 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
7461 if (! if_true_label || ! if_false_label)
7462 drop_through_label = gen_label_rtx ();
7463 if (! if_true_label)
7464 if_true_label = drop_through_label;
7465 if (! if_false_label)
7466 if_false_label = drop_through_label;
7468 /* Compare a word at a time, high order first. */
7469 for (i = 0; i < nwords; i++)
7472 rtx op0_word, op1_word;
7474 if (WORDS_BIG_ENDIAN)
7476 op0_word = operand_subword_force (op0, i, mode);
7477 op1_word = operand_subword_force (op1, i, mode);
7481 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
7482 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
7485 /* All but high-order word must be compared as unsigned. */
7486 comp = compare_from_rtx (op0_word, op1_word,
7487 (unsignedp || i > 0) ? GTU : GT,
7488 unsignedp, word_mode, NULL_RTX, 0);
7489 if (comp == const_true_rtx)
7490 emit_jump (if_true_label);
7491 else if (comp != const0_rtx)
7492 do_jump_for_compare (comp, NULL_RTX, if_true_label);
7494 /* Consider lower words only if these are equal. */
7495 comp = compare_from_rtx (op0_word, op1_word, NE, unsignedp, word_mode,
7497 if (comp == const_true_rtx)
7498 emit_jump (if_false_label);
7499 else if (comp != const0_rtx)
7500 do_jump_for_compare (comp, NULL_RTX, if_false_label);
7504 emit_jump (if_false_label);
7505 if (drop_through_label)
7506 emit_label (drop_through_label);
7509 /* Compare OP0 with OP1, word at a time, in mode MODE.
7510 UNSIGNEDP says to do unsigned comparison.
7511 Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
7514 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label)
7515 enum machine_mode mode;
7518 rtx if_false_label, if_true_label;
7520 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
7521 rtx drop_through_label = 0;
7524 if (! if_true_label || ! if_false_label)
7525 drop_through_label = gen_label_rtx ();
7526 if (! if_true_label)
7527 if_true_label = drop_through_label;
7528 if (! if_false_label)
7529 if_false_label = drop_through_label;
7531 /* Compare a word at a time, high order first. */
7532 for (i = 0; i < nwords; i++)
7535 rtx op0_word, op1_word;
7537 if (WORDS_BIG_ENDIAN)
7539 op0_word = operand_subword_force (op0, i, mode);
7540 op1_word = operand_subword_force (op1, i, mode);
7544 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
7545 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
7548 /* All but high-order word must be compared as unsigned. */
7549 comp = compare_from_rtx (op0_word, op1_word,
7550 (unsignedp || i > 0) ? GTU : GT,
7551 unsignedp, word_mode, NULL_RTX, 0);
7552 if (comp == const_true_rtx)
7553 emit_jump (if_true_label);
7554 else if (comp != const0_rtx)
7555 do_jump_for_compare (comp, NULL_RTX, if_true_label);
7557 /* Consider lower words only if these are equal. */
7558 comp = compare_from_rtx (op0_word, op1_word, NE, unsignedp, word_mode,
7560 if (comp == const_true_rtx)
7561 emit_jump (if_false_label);
7562 else if (comp != const0_rtx)
7563 do_jump_for_compare (comp, NULL_RTX, if_false_label);
7567 emit_jump (if_false_label);
7568 if (drop_through_label)
7569 emit_label (drop_through_label);
7572 /* Given an EQ_EXPR expression EXP for values too wide to be compared
7573 with one insn, test the comparison and jump to the appropriate label. */
7576 do_jump_by_parts_equality (exp, if_false_label, if_true_label)
7578 rtx if_false_label, if_true_label;
7580 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7581 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7582 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
7583 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
7585 rtx drop_through_label = 0;
7587 if (! if_false_label)
7588 drop_through_label = if_false_label = gen_label_rtx ();
7590 for (i = 0; i < nwords; i++)
7592 rtx comp = compare_from_rtx (operand_subword_force (op0, i, mode),
7593 operand_subword_force (op1, i, mode),
7594 EQ, TREE_UNSIGNED (TREE_TYPE (exp)),
7595 word_mode, NULL_RTX, 0);
7596 if (comp == const_true_rtx)
7597 emit_jump (if_false_label);
7598 else if (comp != const0_rtx)
7599 do_jump_for_compare (comp, if_false_label, NULL_RTX);
7603 emit_jump (if_true_label);
7604 if (drop_through_label)
7605 emit_label (drop_through_label);
7608 /* Jump according to whether OP0 is 0.
7609 We assume that OP0 has an integer mode that is too wide
7610 for the available compare insns. */
7613 do_jump_by_parts_equality_rtx (op0, if_false_label, if_true_label)
7615 rtx if_false_label, if_true_label;
7617 int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD;
7619 rtx drop_through_label = 0;
7621 if (! if_false_label)
7622 drop_through_label = if_false_label = gen_label_rtx ();
7624 for (i = 0; i < nwords; i++)
7626 rtx comp = compare_from_rtx (operand_subword_force (op0, i,
7628 const0_rtx, EQ, 1, word_mode, NULL_RTX, 0);
7629 if (comp == const_true_rtx)
7630 emit_jump (if_false_label);
7631 else if (comp != const0_rtx)
7632 do_jump_for_compare (comp, if_false_label, NULL_RTX);
7636 emit_jump (if_true_label);
7637 if (drop_through_label)
7638 emit_label (drop_through_label);
7641 /* Given a comparison expression in rtl form, output conditional branches to
7642 IF_TRUE_LABEL, IF_FALSE_LABEL, or both. */
7645 do_jump_for_compare (comparison, if_false_label, if_true_label)
7646 rtx comparison, if_false_label, if_true_label;
7650 if (bcc_gen_fctn[(int) GET_CODE (comparison)] != 0)
7651 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (comparison)]) (if_true_label));
7656 emit_jump (if_false_label);
7658 else if (if_false_label)
7661 rtx prev = PREV_INSN (get_last_insn ());
7664 /* Output the branch with the opposite condition. Then try to invert
7665 what is generated. If more than one insn is a branch, or if the
7666 branch is not the last insn written, abort. If we can't invert
7667 the branch, emit make a true label, redirect this jump to that,
7668 emit a jump to the false label and define the true label. */
7670 if (bcc_gen_fctn[(int) GET_CODE (comparison)] != 0)
7671 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (comparison)]) (if_false_label));
7675 /* Here we get the insn before what was just emitted.
7676 On some machines, emitting the branch can discard
7677 the previous compare insn and emit a replacement. */
7679 /* If there's only one preceding insn... */
7680 insn = get_insns ();
7682 insn = NEXT_INSN (prev);
7684 for (insn = NEXT_INSN (insn); insn; insn = NEXT_INSN (insn))
7685 if (GET_CODE (insn) == JUMP_INSN)
7692 if (branch != get_last_insn ())
7695 if (! invert_jump (branch, if_false_label))
7697 if_true_label = gen_label_rtx ();
7698 redirect_jump (branch, if_true_label);
7699 emit_jump (if_false_label);
7700 emit_label (if_true_label);
7705 /* Generate code for a comparison expression EXP
7706 (including code to compute the values to be compared)
7707 and set (CC0) according to the result.
7708 SIGNED_CODE should be the rtx operation for this comparison for
7709 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
7711 We force a stack adjustment unless there are currently
7712 things pushed on the stack that aren't yet used. */
7715 compare (exp, signed_code, unsigned_code)
7717 enum rtx_code signed_code, unsigned_code;
7720 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7722 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7723 register tree type = TREE_TYPE (TREE_OPERAND (exp, 0));
7724 register enum machine_mode mode = TYPE_MODE (type);
7725 int unsignedp = TREE_UNSIGNED (type);
7726 enum rtx_code code = unsignedp ? unsigned_code : signed_code;
7728 return compare_from_rtx (op0, op1, code, unsignedp, mode,
7730 ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
7731 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
7734 /* Like compare but expects the values to compare as two rtx's.
7735 The decision as to signed or unsigned comparison must be made by the caller.
7737 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
7740 If ALIGN is non-zero, it is the alignment of this type; if zero, the
7741 size of MODE should be used. */
7744 compare_from_rtx (op0, op1, code, unsignedp, mode, size, align)
7745 register rtx op0, op1;
7748 enum machine_mode mode;
7754 /* If one operand is constant, make it the second one. Only do this
7755 if the other operand is not constant as well. */
7757 if ((CONSTANT_P (op0) && ! CONSTANT_P (op1))
7758 || (GET_CODE (op0) == CONST_INT && GET_CODE (op1) != CONST_INT))
7763 code = swap_condition (code);
7768 op0 = force_not_mem (op0);
7769 op1 = force_not_mem (op1);
7772 do_pending_stack_adjust ();
7774 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
7775 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
7779 /* There's no need to do this now that combine.c can eliminate lots of
7780 sign extensions. This can be less efficient in certain cases on other
7783 /* If this is a signed equality comparison, we can do it as an
7784 unsigned comparison since zero-extension is cheaper than sign
7785 extension and comparisons with zero are done as unsigned. This is
7786 the case even on machines that can do fast sign extension, since
7787 zero-extension is easier to combine with other operations than
7788 sign-extension is. If we are comparing against a constant, we must
7789 convert it to what it would look like unsigned. */
7790 if ((code == EQ || code == NE) && ! unsignedp
7791 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
7793 if (GET_CODE (op1) == CONST_INT
7794 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
7795 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
7800 emit_cmp_insn (op0, op1, code, size, mode, unsignedp, align);
7802 return gen_rtx (code, VOIDmode, cc0_rtx, const0_rtx);
7805 /* Generate code to calculate EXP using a store-flag instruction
7806 and return an rtx for the result. EXP is either a comparison
7807 or a TRUTH_NOT_EXPR whose operand is a comparison.
7809 If TARGET is nonzero, store the result there if convenient.
7811 If ONLY_CHEAP is non-zero, only do this if it is likely to be very
7814 Return zero if there is no suitable set-flag instruction
7815 available on this machine.
7817 Once expand_expr has been called on the arguments of the comparison,
7818 we are committed to doing the store flag, since it is not safe to
7819 re-evaluate the expression. We emit the store-flag insn by calling
7820 emit_store_flag, but only expand the arguments if we have a reason
7821 to believe that emit_store_flag will be successful. If we think that
7822 it will, but it isn't, we have to simulate the store-flag with a
7823 set/jump/set sequence. */
7826 do_store_flag (exp, target, mode, only_cheap)
7829 enum machine_mode mode;
7833 tree arg0, arg1, type;
7835 enum machine_mode operand_mode;
7839 enum insn_code icode;
7840 rtx subtarget = target;
7841 rtx result, label, pattern, jump_pat;
7843 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
7844 result at the end. We can't simply invert the test since it would
7845 have already been inverted if it were valid. This case occurs for
7846 some floating-point comparisons. */
7848 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
7849 invert = 1, exp = TREE_OPERAND (exp, 0);
7851 arg0 = TREE_OPERAND (exp, 0);
7852 arg1 = TREE_OPERAND (exp, 1);
7853 type = TREE_TYPE (arg0);
7854 operand_mode = TYPE_MODE (type);
7855 unsignedp = TREE_UNSIGNED (type);
7857 /* We won't bother with BLKmode store-flag operations because it would mean
7858 passing a lot of information to emit_store_flag. */
7859 if (operand_mode == BLKmode)
7865 /* Get the rtx comparison code to use. We know that EXP is a comparison
7866 operation of some type. Some comparisons against 1 and -1 can be
7867 converted to comparisons with zero. Do so here so that the tests
7868 below will be aware that we have a comparison with zero. These
7869 tests will not catch constants in the first operand, but constants
7870 are rarely passed as the first operand. */
7872 switch (TREE_CODE (exp))
7881 if (integer_onep (arg1))
7882 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
7884 code = unsignedp ? LTU : LT;
7887 if (! unsignedp && integer_all_onesp (arg1))
7888 arg1 = integer_zero_node, code = LT;
7890 code = unsignedp ? LEU : LE;
7893 if (! unsignedp && integer_all_onesp (arg1))
7894 arg1 = integer_zero_node, code = GE;
7896 code = unsignedp ? GTU : GT;
7899 if (integer_onep (arg1))
7900 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
7902 code = unsignedp ? GEU : GE;
7908 /* Put a constant second. */
7909 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
7911 tem = arg0; arg0 = arg1; arg1 = tem;
7912 code = swap_condition (code);
7915 /* If this is an equality or inequality test of a single bit, we can
7916 do this by shifting the bit being tested to the low-order bit and
7917 masking the result with the constant 1. If the condition was EQ,
7918 we xor it with 1. This does not require an scc insn and is faster
7919 than an scc insn even if we have it. */
7921 if ((code == NE || code == EQ)
7922 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
7923 && integer_pow2p (TREE_OPERAND (arg0, 1))
7924 && TYPE_PRECISION (type) <= HOST_BITS_PER_WIDE_INT)
7926 tree inner = TREE_OPERAND (arg0, 0);
7927 int bitnum = exact_log2 (INTVAL (expand_expr (TREE_OPERAND (arg0, 1),
7928 NULL_RTX, VOIDmode, 0)));
7931 /* If INNER is a right shift of a constant and it plus BITNUM does
7932 not overflow, adjust BITNUM and INNER. */
7934 if (TREE_CODE (inner) == RSHIFT_EXPR
7935 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
7936 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
7937 && (bitnum + TREE_INT_CST_LOW (TREE_OPERAND (inner, 1))
7938 < TYPE_PRECISION (type)))
7940 bitnum +=TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
7941 inner = TREE_OPERAND (inner, 0);
7944 /* If we are going to be able to omit the AND below, we must do our
7945 operations as unsigned. If we must use the AND, we have a choice.
7946 Normally unsigned is faster, but for some machines signed is. */
7947 ops_unsignedp = (bitnum == TYPE_PRECISION (type) - 1 ? 1
7948 #ifdef BYTE_LOADS_SIGN_EXTEND
7955 if (subtarget == 0 || GET_CODE (subtarget) != REG
7956 || GET_MODE (subtarget) != operand_mode
7957 || ! safe_from_p (subtarget, inner))
7960 op0 = expand_expr (inner, subtarget, VOIDmode, 0);
7963 op0 = expand_shift (RSHIFT_EXPR, GET_MODE (op0), op0,
7964 size_int (bitnum), target, ops_unsignedp);
7966 if (GET_MODE (op0) != mode)
7967 op0 = convert_to_mode (mode, op0, ops_unsignedp);
7969 if ((code == EQ && ! invert) || (code == NE && invert))
7970 op0 = expand_binop (mode, xor_optab, op0, const1_rtx, target,
7971 ops_unsignedp, OPTAB_LIB_WIDEN);
7973 /* Put the AND last so it can combine with more things. */
7974 if (bitnum != TYPE_PRECISION (type) - 1)
7975 op0 = expand_and (op0, const1_rtx, target);
7980 /* Now see if we are likely to be able to do this. Return if not. */
7981 if (! can_compare_p (operand_mode))
7983 icode = setcc_gen_code[(int) code];
7984 if (icode == CODE_FOR_nothing
7985 || (only_cheap && insn_operand_mode[(int) icode][0] != mode))
7987 /* We can only do this if it is one of the special cases that
7988 can be handled without an scc insn. */
7989 if ((code == LT && integer_zerop (arg1))
7990 || (! only_cheap && code == GE && integer_zerop (arg1)))
7992 else if (BRANCH_COST >= 0
7993 && ! only_cheap && (code == NE || code == EQ)
7994 && TREE_CODE (type) != REAL_TYPE
7995 && ((abs_optab->handlers[(int) operand_mode].insn_code
7996 != CODE_FOR_nothing)
7997 || (ffs_optab->handlers[(int) operand_mode].insn_code
7998 != CODE_FOR_nothing)))
8004 preexpand_calls (exp);
8005 if (subtarget == 0 || GET_CODE (subtarget) != REG
8006 || GET_MODE (subtarget) != operand_mode
8007 || ! safe_from_p (subtarget, arg1))
8010 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
8011 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
8014 target = gen_reg_rtx (mode);
8016 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
8017 because, if the emit_store_flag does anything it will succeed and
8018 OP0 and OP1 will not be used subsequently. */
8020 result = emit_store_flag (target, code,
8021 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
8022 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
8023 operand_mode, unsignedp, 1);
8028 result = expand_binop (mode, xor_optab, result, const1_rtx,
8029 result, 0, OPTAB_LIB_WIDEN);
8033 /* If this failed, we have to do this with set/compare/jump/set code. */
8034 if (target == 0 || GET_CODE (target) != REG
8035 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
8036 target = gen_reg_rtx (GET_MODE (target));
8038 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
8039 result = compare_from_rtx (op0, op1, code, unsignedp,
8040 operand_mode, NULL_RTX, 0);
8041 if (GET_CODE (result) == CONST_INT)
8042 return (((result == const0_rtx && ! invert)
8043 || (result != const0_rtx && invert))
8044 ? const0_rtx : const1_rtx);
8046 label = gen_label_rtx ();
8047 if (bcc_gen_fctn[(int) code] == 0)
8050 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
8051 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
8057 /* Generate a tablejump instruction (used for switch statements). */
8059 #ifdef HAVE_tablejump
8061 /* INDEX is the value being switched on, with the lowest value
8062 in the table already subtracted.
8063 MODE is its expected mode (needed if INDEX is constant).
8064 RANGE is the length of the jump table.
8065 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
8067 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
8068 index value is out of range. */
8071 do_tablejump (index, mode, range, table_label, default_label)
8072 rtx index, range, table_label, default_label;
8073 enum machine_mode mode;
8075 register rtx temp, vector;
8077 /* Do an unsigned comparison (in the proper mode) between the index
8078 expression and the value which represents the length of the range.
8079 Since we just finished subtracting the lower bound of the range
8080 from the index expression, this comparison allows us to simultaneously
8081 check that the original index expression value is both greater than
8082 or equal to the minimum value of the range and less than or equal to
8083 the maximum value of the range. */
8085 emit_cmp_insn (range, index, LTU, NULL_RTX, mode, 1, 0);
8086 emit_jump_insn (gen_bltu (default_label));
8088 /* If index is in range, it must fit in Pmode.
8089 Convert to Pmode so we can index with it. */
8091 index = convert_to_mode (Pmode, index, 1);
8093 /* If flag_force_addr were to affect this address
8094 it could interfere with the tricky assumptions made
8095 about addresses that contain label-refs,
8096 which may be valid only very near the tablejump itself. */
8097 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
8098 GET_MODE_SIZE, because this indicates how large insns are. The other
8099 uses should all be Pmode, because they are addresses. This code
8100 could fail if addresses and insns are not the same size. */
8101 index = memory_address_noforce
8103 gen_rtx (PLUS, Pmode,
8104 gen_rtx (MULT, Pmode, index,
8105 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
8106 gen_rtx (LABEL_REF, Pmode, table_label)));
8107 temp = gen_reg_rtx (CASE_VECTOR_MODE);
8108 vector = gen_rtx (MEM, CASE_VECTOR_MODE, index);
8109 RTX_UNCHANGING_P (vector) = 1;
8110 convert_move (temp, vector, 0);
8112 emit_jump_insn (gen_tablejump (temp, table_label));
8114 #ifndef CASE_VECTOR_PC_RELATIVE
8115 /* If we are generating PIC code or if the table is PC-relative, the
8116 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
8122 #endif /* HAVE_tablejump */