1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA. */
26 #include "insn-flags.h"
27 #include "insn-codes.h"
29 #include "insn-config.h"
32 #include "typeclass.h"
34 #define CEIL(x,y) (((x) + (y) - 1) / (y))
36 /* Decide whether a function's arguments should be processed
37 from first to last or from last to first.
39 They should if the stack and args grow in opposite directions, but
40 only if we have push insns. */
44 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
45 #define PUSH_ARGS_REVERSED /* If it's last to first */
50 #ifndef STACK_PUSH_CODE
51 #ifdef STACK_GROWS_DOWNWARD
52 #define STACK_PUSH_CODE PRE_DEC
54 #define STACK_PUSH_CODE PRE_INC
58 /* Like STACK_BOUNDARY but in units of bytes, not bits. */
59 #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
61 /* If this is nonzero, we do not bother generating VOLATILE
62 around volatile memory references, and we are willing to
63 output indirect addresses. If cse is to follow, we reject
64 indirect addresses so a useful potential cse is generated;
65 if it is used only once, instruction combination will produce
66 the same indirect address eventually. */
69 /* Nonzero to generate code for all the subroutines within an
70 expression before generating the upper levels of the expression.
71 Nowadays this is never zero. */
72 int do_preexpand_calls = 1;
74 /* Number of units that we should eventually pop off the stack.
75 These are the arguments to function calls that have already returned. */
76 int pending_stack_adjust;
78 /* Nonzero means stack pops must not be deferred, and deferred stack
79 pops must not be output. It is nonzero inside a function call,
80 inside a conditional expression, inside a statement expression,
81 and in other cases as well. */
82 int inhibit_defer_pop;
84 /* A list of all cleanups which belong to the arguments of
85 function calls being expanded by expand_call. */
86 tree cleanups_this_call;
88 /* Nonzero means __builtin_saveregs has already been done in this function.
89 The value is the pseudoreg containing the value __builtin_saveregs
91 static rtx saveregs_value;
93 /* Similarly for __builtin_apply_args. */
94 static rtx apply_args_value;
96 /* This structure is used by move_by_pieces to describe the move to
108 int explicit_inc_from;
114 static rtx enqueue_insn PROTO((rtx, rtx));
115 static int queued_subexp_p PROTO((rtx));
116 static void init_queue PROTO((void));
117 static void move_by_pieces PROTO((rtx, rtx, int, int));
118 static int move_by_pieces_ninsns PROTO((unsigned int, int));
119 static void move_by_pieces_1 PROTO((rtx (*) (), enum machine_mode,
120 struct move_by_pieces *));
121 static void group_insns PROTO((rtx));
122 static void store_constructor PROTO((tree, rtx));
123 static rtx store_field PROTO((rtx, int, int, enum machine_mode, tree,
124 enum machine_mode, int, int, int));
125 static tree save_noncopied_parts PROTO((tree, tree));
126 static tree init_noncopied_parts PROTO((tree, tree));
127 static int safe_from_p PROTO((rtx, tree));
128 static int fixed_type_p PROTO((tree));
129 static int get_pointer_alignment PROTO((tree, unsigned));
130 static tree string_constant PROTO((tree, tree *));
131 static tree c_strlen PROTO((tree));
132 static rtx expand_builtin PROTO((tree, rtx, rtx, enum machine_mode, int));
133 static int apply_args_size PROTO((void));
134 static int apply_result_size PROTO((void));
135 static rtx result_vector PROTO((int, rtx));
136 static rtx expand_builtin_apply_args PROTO((void));
137 static rtx expand_builtin_apply PROTO((rtx, rtx, rtx));
138 static void expand_builtin_return PROTO((rtx));
139 static rtx expand_increment PROTO((tree, int));
140 static void preexpand_calls PROTO((tree));
141 static void do_jump_by_parts_greater PROTO((tree, int, rtx, rtx));
142 static void do_jump_by_parts_greater_rtx PROTO((enum machine_mode, int, rtx, rtx, rtx, rtx));
143 static void do_jump_by_parts_equality PROTO((tree, rtx, rtx));
144 static void do_jump_by_parts_equality_rtx PROTO((rtx, rtx, rtx));
145 static void do_jump_for_compare PROTO((rtx, rtx, rtx));
146 static rtx compare PROTO((tree, enum rtx_code, enum rtx_code));
147 static rtx do_store_flag PROTO((tree, rtx, enum machine_mode, int));
149 /* Record for each mode whether we can move a register directly to or
150 from an object of that mode in memory. If we can't, we won't try
151 to use that mode directly when accessing a field of that mode. */
153 static char direct_load[NUM_MACHINE_MODES];
154 static char direct_store[NUM_MACHINE_MODES];
156 /* MOVE_RATIO is the number of move instructions that is better than
160 #if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
163 /* A value of around 6 would minimize code size; infinity would minimize
165 #define MOVE_RATIO 15
169 /* This array records the insn_code of insns to perform block moves. */
170 enum insn_code movstr_optab[NUM_MACHINE_MODES];
172 /* SLOW_UNALIGNED_ACCESS is non-zero if unaligned accesses are very slow. */
174 #ifndef SLOW_UNALIGNED_ACCESS
175 #define SLOW_UNALIGNED_ACCESS 0
178 /* Register mappings for target machines without register windows. */
179 #ifndef INCOMING_REGNO
180 #define INCOMING_REGNO(OUT) (OUT)
182 #ifndef OUTGOING_REGNO
183 #define OUTGOING_REGNO(IN) (IN)
186 /* This is run once per compilation to set up which modes can be used
187 directly in memory and to initialize the block move optab. */
193 enum machine_mode mode;
194 /* Try indexing by frame ptr and try by stack ptr.
195 It is known that on the Convex the stack ptr isn't a valid index.
196 With luck, one or the other is valid on any machine. */
197 rtx mem = gen_rtx (MEM, VOIDmode, stack_pointer_rtx);
198 rtx mem1 = gen_rtx (MEM, VOIDmode, frame_pointer_rtx);
201 insn = emit_insn (gen_rtx (SET, 0, 0));
202 pat = PATTERN (insn);
204 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
205 mode = (enum machine_mode) ((int) mode + 1))
211 direct_load[(int) mode] = direct_store[(int) mode] = 0;
212 PUT_MODE (mem, mode);
213 PUT_MODE (mem1, mode);
215 /* See if there is some register that can be used in this mode and
216 directly loaded or stored from memory. */
218 if (mode != VOIDmode && mode != BLKmode)
219 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
220 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
223 if (! HARD_REGNO_MODE_OK (regno, mode))
226 reg = gen_rtx (REG, mode, regno);
229 SET_DEST (pat) = reg;
230 if (recog (pat, insn, &num_clobbers) >= 0)
231 direct_load[(int) mode] = 1;
233 SET_SRC (pat) = mem1;
234 SET_DEST (pat) = reg;
235 if (recog (pat, insn, &num_clobbers) >= 0)
236 direct_load[(int) mode] = 1;
239 SET_DEST (pat) = mem;
240 if (recog (pat, insn, &num_clobbers) >= 0)
241 direct_store[(int) mode] = 1;
244 SET_DEST (pat) = mem1;
245 if (recog (pat, insn, &num_clobbers) >= 0)
246 direct_store[(int) mode] = 1;
253 /* This is run at the start of compiling a function. */
260 pending_stack_adjust = 0;
261 inhibit_defer_pop = 0;
262 cleanups_this_call = 0;
264 apply_args_value = 0;
268 /* Save all variables describing the current status into the structure *P.
269 This is used before starting a nested function. */
275 /* Instead of saving the postincrement queue, empty it. */
278 p->pending_stack_adjust = pending_stack_adjust;
279 p->inhibit_defer_pop = inhibit_defer_pop;
280 p->cleanups_this_call = cleanups_this_call;
281 p->saveregs_value = saveregs_value;
282 p->apply_args_value = apply_args_value;
283 p->forced_labels = forced_labels;
285 pending_stack_adjust = 0;
286 inhibit_defer_pop = 0;
287 cleanups_this_call = 0;
289 apply_args_value = 0;
293 /* Restore all variables describing the current status from the structure *P.
294 This is used after a nested function. */
297 restore_expr_status (p)
300 pending_stack_adjust = p->pending_stack_adjust;
301 inhibit_defer_pop = p->inhibit_defer_pop;
302 cleanups_this_call = p->cleanups_this_call;
303 saveregs_value = p->saveregs_value;
304 apply_args_value = p->apply_args_value;
305 forced_labels = p->forced_labels;
308 /* Manage the queue of increment instructions to be output
309 for POSTINCREMENT_EXPR expressions, etc. */
311 static rtx pending_chain;
313 /* Queue up to increment (or change) VAR later. BODY says how:
314 BODY should be the same thing you would pass to emit_insn
315 to increment right away. It will go to emit_insn later on.
317 The value is a QUEUED expression to be used in place of VAR
318 where you want to guarantee the pre-incrementation value of VAR. */
321 enqueue_insn (var, body)
324 pending_chain = gen_rtx (QUEUED, GET_MODE (var),
325 var, NULL_RTX, NULL_RTX, body, pending_chain);
326 return pending_chain;
329 /* Use protect_from_queue to convert a QUEUED expression
330 into something that you can put immediately into an instruction.
331 If the queued incrementation has not happened yet,
332 protect_from_queue returns the variable itself.
333 If the incrementation has happened, protect_from_queue returns a temp
334 that contains a copy of the old value of the variable.
336 Any time an rtx which might possibly be a QUEUED is to be put
337 into an instruction, it must be passed through protect_from_queue first.
338 QUEUED expressions are not meaningful in instructions.
340 Do not pass a value through protect_from_queue and then hold
341 on to it for a while before putting it in an instruction!
342 If the queue is flushed in between, incorrect code will result. */
345 protect_from_queue (x, modify)
349 register RTX_CODE code = GET_CODE (x);
351 #if 0 /* A QUEUED can hang around after the queue is forced out. */
352 /* Shortcut for most common case. */
353 if (pending_chain == 0)
359 /* A special hack for read access to (MEM (QUEUED ...))
360 to facilitate use of autoincrement.
361 Make a copy of the contents of the memory location
362 rather than a copy of the address, but not
363 if the value is of mode BLKmode. */
364 if (code == MEM && GET_MODE (x) != BLKmode
365 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
367 register rtx y = XEXP (x, 0);
368 XEXP (x, 0) = QUEUED_VAR (y);
371 register rtx temp = gen_reg_rtx (GET_MODE (x));
372 emit_insn_before (gen_move_insn (temp, x),
378 /* Otherwise, recursively protect the subexpressions of all
379 the kinds of rtx's that can contain a QUEUED. */
381 XEXP (x, 0) = protect_from_queue (XEXP (x, 0), 0);
382 else if (code == PLUS || code == MULT)
384 XEXP (x, 0) = protect_from_queue (XEXP (x, 0), 0);
385 XEXP (x, 1) = protect_from_queue (XEXP (x, 1), 0);
389 /* If the increment has not happened, use the variable itself. */
390 if (QUEUED_INSN (x) == 0)
391 return QUEUED_VAR (x);
392 /* If the increment has happened and a pre-increment copy exists,
394 if (QUEUED_COPY (x) != 0)
395 return QUEUED_COPY (x);
396 /* The increment has happened but we haven't set up a pre-increment copy.
397 Set one up now, and use it. */
398 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
399 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
401 return QUEUED_COPY (x);
404 /* Return nonzero if X contains a QUEUED expression:
405 if it contains anything that will be altered by a queued increment.
406 We handle only combinations of MEM, PLUS, MINUS and MULT operators
407 since memory addresses generally contain only those. */
413 register enum rtx_code code = GET_CODE (x);
419 return queued_subexp_p (XEXP (x, 0));
423 return queued_subexp_p (XEXP (x, 0))
424 || queued_subexp_p (XEXP (x, 1));
429 /* Perform all the pending incrementations. */
435 while (p = pending_chain)
437 QUEUED_INSN (p) = emit_insn (QUEUED_BODY (p));
438 pending_chain = QUEUED_NEXT (p);
449 /* Copy data from FROM to TO, where the machine modes are not the same.
450 Both modes may be integer, or both may be floating.
451 UNSIGNEDP should be nonzero if FROM is an unsigned type.
452 This causes zero-extension instead of sign-extension. */
455 convert_move (to, from, unsignedp)
456 register rtx to, from;
459 enum machine_mode to_mode = GET_MODE (to);
460 enum machine_mode from_mode = GET_MODE (from);
461 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
462 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
466 /* rtx code for making an equivalent value. */
467 enum rtx_code equiv_code = (unsignedp ? ZERO_EXTEND : SIGN_EXTEND);
469 to = protect_from_queue (to, 1);
470 from = protect_from_queue (from, 0);
472 if (to_real != from_real)
475 /* If FROM is a SUBREG that indicates that we have already done at least
476 the required extension, strip it. We don't handle such SUBREGs as
479 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
480 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
481 >= GET_MODE_SIZE (to_mode))
482 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
483 from = gen_lowpart (to_mode, from), from_mode = to_mode;
485 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
488 if (to_mode == from_mode
489 || (from_mode == VOIDmode && CONSTANT_P (from)))
491 emit_move_insn (to, from);
497 #ifdef HAVE_extendqfhf2
498 if (HAVE_extendqfsf2 && from_mode == QFmode && to_mode == HFmode)
500 emit_unop_insn (CODE_FOR_extendqfsf2, to, from, UNKNOWN);
504 #ifdef HAVE_extendqfsf2
505 if (HAVE_extendqfsf2 && from_mode == QFmode && to_mode == SFmode)
507 emit_unop_insn (CODE_FOR_extendqfsf2, to, from, UNKNOWN);
511 #ifdef HAVE_extendqfdf2
512 if (HAVE_extendqfdf2 && from_mode == QFmode && to_mode == DFmode)
514 emit_unop_insn (CODE_FOR_extendqfdf2, to, from, UNKNOWN);
518 #ifdef HAVE_extendqfxf2
519 if (HAVE_extendqfxf2 && from_mode == QFmode && to_mode == XFmode)
521 emit_unop_insn (CODE_FOR_extendqfxf2, to, from, UNKNOWN);
525 #ifdef HAVE_extendqftf2
526 if (HAVE_extendqftf2 && from_mode == QFmode && to_mode == TFmode)
528 emit_unop_insn (CODE_FOR_extendqftf2, to, from, UNKNOWN);
533 #ifdef HAVE_extendhfsf2
534 if (HAVE_extendhfsf2 && from_mode == HFmode && to_mode == SFmode)
536 emit_unop_insn (CODE_FOR_extendhfsf2, to, from, UNKNOWN);
540 #ifdef HAVE_extendhfdf2
541 if (HAVE_extendhfdf2 && from_mode == HFmode && to_mode == DFmode)
543 emit_unop_insn (CODE_FOR_extendhfdf2, to, from, UNKNOWN);
547 #ifdef HAVE_extendhfxf2
548 if (HAVE_extendhfxf2 && from_mode == HFmode && to_mode == XFmode)
550 emit_unop_insn (CODE_FOR_extendhfxf2, to, from, UNKNOWN);
554 #ifdef HAVE_extendhftf2
555 if (HAVE_extendhftf2 && from_mode == HFmode && to_mode == TFmode)
557 emit_unop_insn (CODE_FOR_extendhftf2, to, from, UNKNOWN);
562 #ifdef HAVE_extendsfdf2
563 if (HAVE_extendsfdf2 && from_mode == SFmode && to_mode == DFmode)
565 emit_unop_insn (CODE_FOR_extendsfdf2, to, from, UNKNOWN);
569 #ifdef HAVE_extendsfxf2
570 if (HAVE_extendsfxf2 && from_mode == SFmode && to_mode == XFmode)
572 emit_unop_insn (CODE_FOR_extendsfxf2, to, from, UNKNOWN);
576 #ifdef HAVE_extendsftf2
577 if (HAVE_extendsftf2 && from_mode == SFmode && to_mode == TFmode)
579 emit_unop_insn (CODE_FOR_extendsftf2, to, from, UNKNOWN);
583 #ifdef HAVE_extenddfxf2
584 if (HAVE_extenddfxf2 && from_mode == DFmode && to_mode == XFmode)
586 emit_unop_insn (CODE_FOR_extenddfxf2, to, from, UNKNOWN);
590 #ifdef HAVE_extenddftf2
591 if (HAVE_extenddftf2 && from_mode == DFmode && to_mode == TFmode)
593 emit_unop_insn (CODE_FOR_extenddftf2, to, from, UNKNOWN);
598 #ifdef HAVE_trunchfqf2
599 if (HAVE_trunchfqf2 && from_mode == HFmode && to_mode == QFmode)
601 emit_unop_insn (CODE_FOR_trunchfqf2, to, from, UNKNOWN);
605 #ifdef HAVE_truncsfqf2
606 if (HAVE_truncsfqf2 && from_mode == SFmode && to_mode == QFmode)
608 emit_unop_insn (CODE_FOR_truncsfqf2, to, from, UNKNOWN);
612 #ifdef HAVE_truncdfqf2
613 if (HAVE_truncdfqf2 && from_mode == DFmode && to_mode == QFmode)
615 emit_unop_insn (CODE_FOR_truncdfqf2, to, from, UNKNOWN);
619 #ifdef HAVE_truncxfqf2
620 if (HAVE_truncxfqf2 && from_mode == XFmode && to_mode == QFmode)
622 emit_unop_insn (CODE_FOR_truncxfqf2, to, from, UNKNOWN);
626 #ifdef HAVE_trunctfqf2
627 if (HAVE_trunctfqf2 && from_mode == TFmode && to_mode == QFmode)
629 emit_unop_insn (CODE_FOR_trunctfqf2, to, from, UNKNOWN);
633 #ifdef HAVE_truncsfhf2
634 if (HAVE_truncsfhf2 && from_mode == SFmode && to_mode == HFmode)
636 emit_unop_insn (CODE_FOR_truncsfhf2, to, from, UNKNOWN);
640 #ifdef HAVE_truncdfhf2
641 if (HAVE_truncdfhf2 && from_mode == DFmode && to_mode == HFmode)
643 emit_unop_insn (CODE_FOR_truncdfhf2, to, from, UNKNOWN);
647 #ifdef HAVE_truncxfhf2
648 if (HAVE_truncxfhf2 && from_mode == XFmode && to_mode == HFmode)
650 emit_unop_insn (CODE_FOR_truncxfhf2, to, from, UNKNOWN);
654 #ifdef HAVE_trunctfhf2
655 if (HAVE_trunctfhf2 && from_mode == TFmode && to_mode == HFmode)
657 emit_unop_insn (CODE_FOR_trunctfhf2, to, from, UNKNOWN);
661 #ifdef HAVE_truncdfsf2
662 if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
664 emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
668 #ifdef HAVE_truncxfsf2
669 if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode)
671 emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN);
675 #ifdef HAVE_trunctfsf2
676 if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
678 emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
682 #ifdef HAVE_truncxfdf2
683 if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode)
685 emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN);
689 #ifdef HAVE_trunctfdf2
690 if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
692 emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
704 libcall = extendsfdf2_libfunc;
708 libcall = extendsfxf2_libfunc;
712 libcall = extendsftf2_libfunc;
721 libcall = truncdfsf2_libfunc;
725 libcall = extenddfxf2_libfunc;
729 libcall = extenddftf2_libfunc;
738 libcall = truncxfsf2_libfunc;
742 libcall = truncxfdf2_libfunc;
751 libcall = trunctfsf2_libfunc;
755 libcall = trunctfdf2_libfunc;
761 if (libcall == (rtx) 0)
762 /* This conversion is not implemented yet. */
765 emit_library_call (libcall, 1, to_mode, 1, from, from_mode);
766 emit_move_insn (to, hard_libcall_value (to_mode));
770 /* Now both modes are integers. */
772 /* Handle expanding beyond a word. */
773 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
774 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
781 enum machine_mode lowpart_mode;
782 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
784 /* Try converting directly if the insn is supported. */
785 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
788 /* If FROM is a SUBREG, put it into a register. Do this
789 so that we always generate the same set of insns for
790 better cse'ing; if an intermediate assignment occurred,
791 we won't be doing the operation directly on the SUBREG. */
792 if (optimize > 0 && GET_CODE (from) == SUBREG)
793 from = force_reg (from_mode, from);
794 emit_unop_insn (code, to, from, equiv_code);
797 /* Next, try converting via full word. */
798 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
799 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
800 != CODE_FOR_nothing))
802 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
803 emit_unop_insn (code, to,
804 gen_lowpart (word_mode, to), equiv_code);
808 /* No special multiword conversion insn; do it by hand. */
811 /* Get a copy of FROM widened to a word, if necessary. */
812 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
813 lowpart_mode = word_mode;
815 lowpart_mode = from_mode;
817 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
819 lowpart = gen_lowpart (lowpart_mode, to);
820 emit_move_insn (lowpart, lowfrom);
822 /* Compute the value to put in each remaining word. */
824 fill_value = const0_rtx;
829 && insn_operand_mode[(int) CODE_FOR_slt][0] == word_mode
830 && STORE_FLAG_VALUE == -1)
832 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
834 fill_value = gen_reg_rtx (word_mode);
835 emit_insn (gen_slt (fill_value));
841 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
842 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
844 fill_value = convert_to_mode (word_mode, fill_value, 1);
848 /* Fill the remaining words. */
849 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
851 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
852 rtx subword = operand_subword (to, index, 1, to_mode);
857 if (fill_value != subword)
858 emit_move_insn (subword, fill_value);
861 insns = get_insns ();
864 emit_no_conflict_block (insns, to, from, NULL_RTX,
865 gen_rtx (equiv_code, to_mode, copy_rtx (from)));
869 /* Truncating multi-word to a word or less. */
870 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
871 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
873 convert_move (to, gen_lowpart (word_mode, from), 0);
877 /* Handle pointer conversion */ /* SPEE 900220 */
878 if (to_mode == PSImode)
880 if (from_mode != SImode)
881 from = convert_to_mode (SImode, from, unsignedp);
883 #ifdef HAVE_truncsipsi
886 emit_unop_insn (CODE_FOR_truncsipsi, to, from, UNKNOWN);
889 #endif /* HAVE_truncsipsi */
893 if (from_mode == PSImode)
895 if (to_mode != SImode)
897 from = convert_to_mode (SImode, from, unsignedp);
902 #ifdef HAVE_extendpsisi
903 if (HAVE_extendpsisi)
905 emit_unop_insn (CODE_FOR_extendpsisi, to, from, UNKNOWN);
908 #endif /* HAVE_extendpsisi */
913 /* Now follow all the conversions between integers
914 no more than a word long. */
916 /* For truncation, usually we can just refer to FROM in a narrower mode. */
917 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
918 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
919 GET_MODE_BITSIZE (from_mode)))
921 if (!((GET_CODE (from) == MEM
922 && ! MEM_VOLATILE_P (from)
923 && direct_load[(int) to_mode]
924 && ! mode_dependent_address_p (XEXP (from, 0)))
925 || GET_CODE (from) == REG
926 || GET_CODE (from) == SUBREG))
927 from = force_reg (from_mode, from);
928 emit_move_insn (to, gen_lowpart (to_mode, from));
932 /* Handle extension. */
933 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
935 /* Convert directly if that works. */
936 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
939 /* If FROM is a SUBREG, put it into a register. Do this
940 so that we always generate the same set of insns for
941 better cse'ing; if an intermediate assignment occurred,
942 we won't be doing the operation directly on the SUBREG. */
943 if (optimize > 0 && GET_CODE (from) == SUBREG)
944 from = force_reg (from_mode, from);
945 emit_unop_insn (code, to, from, equiv_code);
950 enum machine_mode intermediate;
952 /* Search for a mode to convert via. */
953 for (intermediate = from_mode; intermediate != VOIDmode;
954 intermediate = GET_MODE_WIDER_MODE (intermediate))
955 if ((can_extend_p (to_mode, intermediate, unsignedp)
957 && (can_extend_p (intermediate, from_mode, unsignedp)
958 != CODE_FOR_nothing))
960 convert_move (to, convert_to_mode (intermediate, from,
961 unsignedp), unsignedp);
965 /* No suitable intermediate mode. */
970 /* Support special truncate insns for certain modes. */
972 if (from_mode == DImode && to_mode == SImode)
974 #ifdef HAVE_truncdisi2
977 emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
981 convert_move (to, force_reg (from_mode, from), unsignedp);
985 if (from_mode == DImode && to_mode == HImode)
987 #ifdef HAVE_truncdihi2
990 emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
994 convert_move (to, force_reg (from_mode, from), unsignedp);
998 if (from_mode == DImode && to_mode == QImode)
1000 #ifdef HAVE_truncdiqi2
1001 if (HAVE_truncdiqi2)
1003 emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
1007 convert_move (to, force_reg (from_mode, from), unsignedp);
1011 if (from_mode == SImode && to_mode == HImode)
1013 #ifdef HAVE_truncsihi2
1014 if (HAVE_truncsihi2)
1016 emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
1020 convert_move (to, force_reg (from_mode, from), unsignedp);
1024 if (from_mode == SImode && to_mode == QImode)
1026 #ifdef HAVE_truncsiqi2
1027 if (HAVE_truncsiqi2)
1029 emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
1033 convert_move (to, force_reg (from_mode, from), unsignedp);
1037 if (from_mode == HImode && to_mode == QImode)
1039 #ifdef HAVE_trunchiqi2
1040 if (HAVE_trunchiqi2)
1042 emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
1046 convert_move (to, force_reg (from_mode, from), unsignedp);
1050 /* Handle truncation of volatile memrefs, and so on;
1051 the things that couldn't be truncated directly,
1052 and for which there was no special instruction. */
1053 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
1055 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
1056 emit_move_insn (to, temp);
1060 /* Mode combination is not recognized. */
1064 /* Return an rtx for a value that would result
1065 from converting X to mode MODE.
1066 Both X and MODE may be floating, or both integer.
1067 UNSIGNEDP is nonzero if X is an unsigned value.
1068 This can be done by referring to a part of X in place
1069 or by copying to a new temporary with conversion.
1071 This function *must not* call protect_from_queue
1072 except when putting X into an insn (in which case convert_move does it). */
1075 convert_to_mode (mode, x, unsignedp)
1076 enum machine_mode mode;
1082 /* If FROM is a SUBREG that indicates that we have already done at least
1083 the required extension, strip it. */
1085 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
1086 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
1087 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
1088 x = gen_lowpart (mode, x);
1090 if (mode == GET_MODE (x))
1093 /* There is one case that we must handle specially: If we are converting
1094 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
1095 we are to interpret the constant as unsigned, gen_lowpart will do
1096 the wrong if the constant appears negative. What we want to do is
1097 make the high-order word of the constant zero, not all ones. */
1099 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
1100 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
1101 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
1102 return immed_double_const (INTVAL (x), (HOST_WIDE_INT) 0, mode);
1104 /* We can do this with a gen_lowpart if both desired and current modes
1105 are integer, and this is either a constant integer, a register, or a
1106 non-volatile MEM. Except for the constant case, we must be narrowing
1109 if (GET_CODE (x) == CONST_INT
1110 || (GET_MODE_CLASS (mode) == MODE_INT
1111 && GET_MODE_CLASS (GET_MODE (x)) == MODE_INT
1112 && (GET_CODE (x) == CONST_DOUBLE
1113 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (GET_MODE (x))
1114 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x)
1115 && direct_load[(int) mode])
1116 || GET_CODE (x) == REG)))))
1117 return gen_lowpart (mode, x);
1119 temp = gen_reg_rtx (mode);
1120 convert_move (temp, x, unsignedp);
1124 /* Generate several move instructions to copy LEN bytes
1125 from block FROM to block TO. (These are MEM rtx's with BLKmode).
1126 The caller must pass FROM and TO
1127 through protect_from_queue before calling.
1128 ALIGN (in bytes) is maximum alignment we can assume. */
1131 move_by_pieces (to, from, len, align)
1135 struct move_by_pieces data;
1136 rtx to_addr = XEXP (to, 0), from_addr = XEXP (from, 0);
1137 int max_size = MOVE_MAX + 1;
1140 data.to_addr = to_addr;
1141 data.from_addr = from_addr;
1145 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1146 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1148 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1149 || GET_CODE (from_addr) == POST_INC
1150 || GET_CODE (from_addr) == POST_DEC);
1152 data.explicit_inc_from = 0;
1153 data.explicit_inc_to = 0;
1155 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1156 if (data.reverse) data.offset = len;
1159 /* If copying requires more than two move insns,
1160 copy addresses to registers (to make displacements shorter)
1161 and use post-increment if available. */
1162 if (!(data.autinc_from && data.autinc_to)
1163 && move_by_pieces_ninsns (len, align) > 2)
1165 #ifdef HAVE_PRE_DECREMENT
1166 if (data.reverse && ! data.autinc_from)
1168 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1169 data.autinc_from = 1;
1170 data.explicit_inc_from = -1;
1173 #ifdef HAVE_POST_INCREMENT
1174 if (! data.autinc_from)
1176 data.from_addr = copy_addr_to_reg (from_addr);
1177 data.autinc_from = 1;
1178 data.explicit_inc_from = 1;
1181 if (!data.autinc_from && CONSTANT_P (from_addr))
1182 data.from_addr = copy_addr_to_reg (from_addr);
1183 #ifdef HAVE_PRE_DECREMENT
1184 if (data.reverse && ! data.autinc_to)
1186 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1188 data.explicit_inc_to = -1;
1191 #ifdef HAVE_POST_INCREMENT
1192 if (! data.reverse && ! data.autinc_to)
1194 data.to_addr = copy_addr_to_reg (to_addr);
1196 data.explicit_inc_to = 1;
1199 if (!data.autinc_to && CONSTANT_P (to_addr))
1200 data.to_addr = copy_addr_to_reg (to_addr);
1203 if (! (STRICT_ALIGNMENT || SLOW_UNALIGNED_ACCESS)
1204 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
1207 /* First move what we can in the largest integer mode, then go to
1208 successively smaller modes. */
1210 while (max_size > 1)
1212 enum machine_mode mode = VOIDmode, tmode;
1213 enum insn_code icode;
1215 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1216 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1217 if (GET_MODE_SIZE (tmode) < max_size)
1220 if (mode == VOIDmode)
1223 icode = mov_optab->handlers[(int) mode].insn_code;
1224 if (icode != CODE_FOR_nothing
1225 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1226 GET_MODE_SIZE (mode)))
1227 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1229 max_size = GET_MODE_SIZE (mode);
1232 /* The code above should have handled everything. */
1237 /* Return number of insns required to move L bytes by pieces.
1238 ALIGN (in bytes) is maximum alignment we can assume. */
1241 move_by_pieces_ninsns (l, align)
1245 register int n_insns = 0;
1246 int max_size = MOVE_MAX + 1;
1248 if (! (STRICT_ALIGNMENT || SLOW_UNALIGNED_ACCESS)
1249 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
1252 while (max_size > 1)
1254 enum machine_mode mode = VOIDmode, tmode;
1255 enum insn_code icode;
1257 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1258 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1259 if (GET_MODE_SIZE (tmode) < max_size)
1262 if (mode == VOIDmode)
1265 icode = mov_optab->handlers[(int) mode].insn_code;
1266 if (icode != CODE_FOR_nothing
1267 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1268 GET_MODE_SIZE (mode)))
1269 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1271 max_size = GET_MODE_SIZE (mode);
1277 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1278 with move instructions for mode MODE. GENFUN is the gen_... function
1279 to make a move insn for that mode. DATA has all the other info. */
1282 move_by_pieces_1 (genfun, mode, data)
1284 enum machine_mode mode;
1285 struct move_by_pieces *data;
1287 register int size = GET_MODE_SIZE (mode);
1288 register rtx to1, from1;
1290 while (data->len >= size)
1292 if (data->reverse) data->offset -= size;
1294 to1 = (data->autinc_to
1295 ? gen_rtx (MEM, mode, data->to_addr)
1296 : change_address (data->to, mode,
1297 plus_constant (data->to_addr, data->offset)));
1300 ? gen_rtx (MEM, mode, data->from_addr)
1301 : change_address (data->from, mode,
1302 plus_constant (data->from_addr, data->offset)));
1304 #ifdef HAVE_PRE_DECREMENT
1305 if (data->explicit_inc_to < 0)
1306 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
1307 if (data->explicit_inc_from < 0)
1308 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (-size)));
1311 emit_insn ((*genfun) (to1, from1));
1312 #ifdef HAVE_POST_INCREMENT
1313 if (data->explicit_inc_to > 0)
1314 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1315 if (data->explicit_inc_from > 0)
1316 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1319 if (! data->reverse) data->offset += size;
1325 /* Emit code to move a block Y to a block X.
1326 This may be done with string-move instructions,
1327 with multiple scalar move instructions, or with a library call.
1329 Both X and Y must be MEM rtx's (perhaps inside VOLATILE)
1331 SIZE is an rtx that says how long they are.
1332 ALIGN is the maximum alignment we can assume they have,
1333 measured in bytes. */
1336 emit_block_move (x, y, size, align)
1341 if (GET_MODE (x) != BLKmode)
1344 if (GET_MODE (y) != BLKmode)
1347 x = protect_from_queue (x, 1);
1348 y = protect_from_queue (y, 0);
1349 size = protect_from_queue (size, 0);
1351 if (GET_CODE (x) != MEM)
1353 if (GET_CODE (y) != MEM)
1358 if (GET_CODE (size) == CONST_INT
1359 && (move_by_pieces_ninsns (INTVAL (size), align) < MOVE_RATIO))
1360 move_by_pieces (x, y, INTVAL (size), align);
1363 /* Try the most limited insn first, because there's no point
1364 including more than one in the machine description unless
1365 the more limited one has some advantage. */
1367 rtx opalign = GEN_INT (align);
1368 enum machine_mode mode;
1370 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1371 mode = GET_MODE_WIDER_MODE (mode))
1373 enum insn_code code = movstr_optab[(int) mode];
1375 if (code != CODE_FOR_nothing
1376 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1377 here because if SIZE is less than the mode mask, as it is
1378 returned by the macro, it will definitely be less than the
1379 actual mode mask. */
1380 && (unsigned HOST_WIDE_INT) INTVAL (size) <= GET_MODE_MASK (mode)
1381 && (insn_operand_predicate[(int) code][0] == 0
1382 || (*insn_operand_predicate[(int) code][0]) (x, BLKmode))
1383 && (insn_operand_predicate[(int) code][1] == 0
1384 || (*insn_operand_predicate[(int) code][1]) (y, BLKmode))
1385 && (insn_operand_predicate[(int) code][3] == 0
1386 || (*insn_operand_predicate[(int) code][3]) (opalign,
1390 rtx last = get_last_insn ();
1393 op2 = convert_to_mode (mode, size, 1);
1394 if (insn_operand_predicate[(int) code][2] != 0
1395 && ! (*insn_operand_predicate[(int) code][2]) (op2, mode))
1396 op2 = copy_to_mode_reg (mode, op2);
1398 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1405 delete_insns_since (last);
1409 #ifdef TARGET_MEM_FUNCTIONS
1410 emit_library_call (memcpy_libfunc, 0,
1411 VOIDmode, 3, XEXP (x, 0), Pmode,
1413 convert_to_mode (TYPE_MODE (sizetype), size,
1414 TREE_UNSIGNED (sizetype)),
1415 TYPE_MODE (sizetype));
1417 emit_library_call (bcopy_libfunc, 0,
1418 VOIDmode, 3, XEXP (y, 0), Pmode,
1420 convert_to_mode (TYPE_MODE (sizetype), size,
1421 TREE_UNSIGNED (sizetype)),
1422 TYPE_MODE (sizetype));
1427 /* Copy all or part of a value X into registers starting at REGNO.
1428 The number of registers to be filled is NREGS. */
1431 move_block_to_reg (regno, x, nregs, mode)
1435 enum machine_mode mode;
1440 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1441 x = validize_mem (force_const_mem (mode, x));
1443 /* See if the machine can do this with a load multiple insn. */
1444 #ifdef HAVE_load_multiple
1445 last = get_last_insn ();
1446 pat = gen_load_multiple (gen_rtx (REG, word_mode, regno), x,
1454 delete_insns_since (last);
1457 for (i = 0; i < nregs; i++)
1458 emit_move_insn (gen_rtx (REG, word_mode, regno + i),
1459 operand_subword_force (x, i, mode));
1462 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1463 The number of registers to be filled is NREGS. */
1466 move_block_from_reg (regno, x, nregs)
1474 /* See if the machine can do this with a store multiple insn. */
1475 #ifdef HAVE_store_multiple
1476 last = get_last_insn ();
1477 pat = gen_store_multiple (x, gen_rtx (REG, word_mode, regno),
1485 delete_insns_since (last);
1488 for (i = 0; i < nregs; i++)
1490 rtx tem = operand_subword (x, i, 1, BLKmode);
1495 emit_move_insn (tem, gen_rtx (REG, word_mode, regno + i));
1499 /* Mark NREGS consecutive regs, starting at REGNO, as being live now. */
1502 use_regs (regno, nregs)
1508 for (i = 0; i < nregs; i++)
1509 emit_insn (gen_rtx (USE, VOIDmode, gen_rtx (REG, word_mode, regno + i)));
1512 /* Mark the instructions since PREV as a libcall block.
1513 Add REG_LIBCALL to PREV and add a REG_RETVAL to the most recent insn. */
1522 /* Find the instructions to mark */
1524 insn_first = NEXT_INSN (prev);
1526 insn_first = get_insns ();
1528 insn_last = get_last_insn ();
1530 REG_NOTES (insn_last) = gen_rtx (INSN_LIST, REG_RETVAL, insn_first,
1531 REG_NOTES (insn_last));
1533 REG_NOTES (insn_first) = gen_rtx (INSN_LIST, REG_LIBCALL, insn_last,
1534 REG_NOTES (insn_first));
1537 /* Write zeros through the storage of OBJECT.
1538 If OBJECT has BLKmode, SIZE is its length in bytes. */
1541 clear_storage (object, size)
1545 if (GET_MODE (object) == BLKmode)
1547 #ifdef TARGET_MEM_FUNCTIONS
1548 emit_library_call (memset_libfunc, 0,
1550 XEXP (object, 0), Pmode, const0_rtx, Pmode,
1551 GEN_INT (size), Pmode);
1553 emit_library_call (bzero_libfunc, 0,
1555 XEXP (object, 0), Pmode,
1556 GEN_INT (size), Pmode);
1560 emit_move_insn (object, const0_rtx);
1563 /* Generate code to copy Y into X.
1564 Both Y and X must have the same mode, except that
1565 Y can be a constant with VOIDmode.
1566 This mode cannot be BLKmode; use emit_block_move for that.
1568 Return the last instruction emitted. */
1571 emit_move_insn (x, y)
1574 enum machine_mode mode = GET_MODE (x);
1575 enum machine_mode submode;
1576 enum mode_class class = GET_MODE_CLASS (mode);
1579 x = protect_from_queue (x, 1);
1580 y = protect_from_queue (y, 0);
1582 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
1585 if (CONSTANT_P (y) && ! LEGITIMATE_CONSTANT_P (y))
1586 y = force_const_mem (mode, y);
1588 /* If X or Y are memory references, verify that their addresses are valid
1590 if (GET_CODE (x) == MEM
1591 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
1592 && ! push_operand (x, GET_MODE (x)))
1594 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
1595 x = change_address (x, VOIDmode, XEXP (x, 0));
1597 if (GET_CODE (y) == MEM
1598 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
1600 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
1601 y = change_address (y, VOIDmode, XEXP (y, 0));
1603 if (mode == BLKmode)
1606 return emit_move_insn_1 (x, y);
1609 /* Low level part of emit_move_insn.
1610 Called just like emit_move_insn, but assumes X and Y
1611 are basically valid. */
1614 emit_move_insn_1 (x, y)
1617 enum machine_mode mode = GET_MODE (x);
1618 enum machine_mode submode;
1619 enum mode_class class = GET_MODE_CLASS (mode);
1622 if (class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
1623 submode = mode_for_size (GET_MODE_UNIT_SIZE (mode) * BITS_PER_UNIT,
1624 (class == MODE_COMPLEX_INT
1625 ? MODE_INT : MODE_FLOAT),
1628 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
1630 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
1632 /* Expand complex moves by moving real part and imag part, if possible. */
1633 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
1634 && submode != BLKmode
1635 && (mov_optab->handlers[(int) submode].insn_code
1636 != CODE_FOR_nothing))
1638 /* Don't split destination if it is a stack push. */
1639 int stack = push_operand (x, GET_MODE (x));
1640 rtx prev = get_last_insn ();
1642 /* Tell flow that the whole of the destination is being set. */
1643 if (GET_CODE (x) == REG)
1644 emit_insn (gen_rtx (CLOBBER, VOIDmode, x));
1646 /* If this is a stack, push the highpart first, so it
1647 will be in the argument order.
1649 In that case, change_address is used only to convert
1650 the mode, not to change the address. */
1651 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
1652 ((stack ? change_address (x, submode, (rtx) 0)
1653 : gen_highpart (submode, x)),
1654 gen_highpart (submode, y)));
1655 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
1656 ((stack ? change_address (x, submode, (rtx) 0)
1657 : gen_lowpart (submode, x)),
1658 gen_lowpart (submode, y)));
1662 return get_last_insn ();
1665 /* This will handle any multi-word mode that lacks a move_insn pattern.
1666 However, you will get better code if you define such patterns,
1667 even if they must turn into multiple assembler instructions. */
1668 else if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
1671 rtx prev_insn = get_last_insn ();
1674 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
1677 rtx xpart = operand_subword (x, i, 1, mode);
1678 rtx ypart = operand_subword (y, i, 1, mode);
1680 /* If we can't get a part of Y, put Y into memory if it is a
1681 constant. Otherwise, force it into a register. If we still
1682 can't get a part of Y, abort. */
1683 if (ypart == 0 && CONSTANT_P (y))
1685 y = force_const_mem (mode, y);
1686 ypart = operand_subword (y, i, 1, mode);
1688 else if (ypart == 0)
1689 ypart = operand_subword_force (y, i, mode);
1691 if (xpart == 0 || ypart == 0)
1694 last_insn = emit_move_insn (xpart, ypart);
1696 /* Mark these insns as a libcall block. */
1697 group_insns (prev_insn);
1705 /* Pushing data onto the stack. */
1707 /* Push a block of length SIZE (perhaps variable)
1708 and return an rtx to address the beginning of the block.
1709 Note that it is not possible for the value returned to be a QUEUED.
1710 The value may be virtual_outgoing_args_rtx.
1712 EXTRA is the number of bytes of padding to push in addition to SIZE.
1713 BELOW nonzero means this padding comes at low addresses;
1714 otherwise, the padding comes at high addresses. */
1717 push_block (size, extra, below)
1722 if (CONSTANT_P (size))
1723 anti_adjust_stack (plus_constant (size, extra));
1724 else if (GET_CODE (size) == REG && extra == 0)
1725 anti_adjust_stack (size);
1728 rtx temp = copy_to_mode_reg (Pmode, size);
1730 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
1731 temp, 0, OPTAB_LIB_WIDEN);
1732 anti_adjust_stack (temp);
1735 #ifdef STACK_GROWS_DOWNWARD
1736 temp = virtual_outgoing_args_rtx;
1737 if (extra != 0 && below)
1738 temp = plus_constant (temp, extra);
1740 if (GET_CODE (size) == CONST_INT)
1741 temp = plus_constant (virtual_outgoing_args_rtx,
1742 - INTVAL (size) - (below ? 0 : extra));
1743 else if (extra != 0 && !below)
1744 temp = gen_rtx (PLUS, Pmode, virtual_outgoing_args_rtx,
1745 negate_rtx (Pmode, plus_constant (size, extra)));
1747 temp = gen_rtx (PLUS, Pmode, virtual_outgoing_args_rtx,
1748 negate_rtx (Pmode, size));
1751 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
1757 return gen_rtx (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
1760 /* Generate code to push X onto the stack, assuming it has mode MODE and
1762 MODE is redundant except when X is a CONST_INT (since they don't
1764 SIZE is an rtx for the size of data to be copied (in bytes),
1765 needed only if X is BLKmode.
1767 ALIGN (in bytes) is maximum alignment we can assume.
1769 If PARTIAL and REG are both nonzero, then copy that many of the first
1770 words of X into registers starting with REG, and push the rest of X.
1771 The amount of space pushed is decreased by PARTIAL words,
1772 rounded *down* to a multiple of PARM_BOUNDARY.
1773 REG must be a hard register in this case.
1774 If REG is zero but PARTIAL is not, take any all others actions for an
1775 argument partially in registers, but do not actually load any
1778 EXTRA is the amount in bytes of extra space to leave next to this arg.
1779 This is ignored if an argument block has already been allocated.
1781 On a machine that lacks real push insns, ARGS_ADDR is the address of
1782 the bottom of the argument block for this call. We use indexing off there
1783 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
1784 argument block has not been preallocated.
1786 ARGS_SO_FAR is the size of args previously pushed for this call. */
1789 emit_push_insn (x, mode, type, size, align, partial, reg, extra,
1790 args_addr, args_so_far)
1792 enum machine_mode mode;
1803 enum direction stack_direction
1804 #ifdef STACK_GROWS_DOWNWARD
1810 /* Decide where to pad the argument: `downward' for below,
1811 `upward' for above, or `none' for don't pad it.
1812 Default is below for small data on big-endian machines; else above. */
1813 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
1815 /* Invert direction if stack is post-update. */
1816 if (STACK_PUSH_CODE == POST_INC || STACK_PUSH_CODE == POST_DEC)
1817 if (where_pad != none)
1818 where_pad = (where_pad == downward ? upward : downward);
1820 xinner = x = protect_from_queue (x, 0);
1822 if (mode == BLKmode)
1824 /* Copy a block into the stack, entirely or partially. */
1827 int used = partial * UNITS_PER_WORD;
1828 int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
1836 /* USED is now the # of bytes we need not copy to the stack
1837 because registers will take care of them. */
1840 xinner = change_address (xinner, BLKmode,
1841 plus_constant (XEXP (xinner, 0), used));
1843 /* If the partial register-part of the arg counts in its stack size,
1844 skip the part of stack space corresponding to the registers.
1845 Otherwise, start copying to the beginning of the stack space,
1846 by setting SKIP to 0. */
1847 #ifndef REG_PARM_STACK_SPACE
1853 #ifdef PUSH_ROUNDING
1854 /* Do it with several push insns if that doesn't take lots of insns
1855 and if there is no difficulty with push insns that skip bytes
1856 on the stack for alignment purposes. */
1858 && GET_CODE (size) == CONST_INT
1860 && (move_by_pieces_ninsns ((unsigned) INTVAL (size) - used, align)
1862 /* Here we avoid the case of a structure whose weak alignment
1863 forces many pushes of a small amount of data,
1864 and such small pushes do rounding that causes trouble. */
1865 && ((! STRICT_ALIGNMENT && ! SLOW_UNALIGNED_ACCESS)
1866 || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT
1867 || PUSH_ROUNDING (align) == align)
1868 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
1870 /* Push padding now if padding above and stack grows down,
1871 or if padding below and stack grows up.
1872 But if space already allocated, this has already been done. */
1873 if (extra && args_addr == 0
1874 && where_pad != none && where_pad != stack_direction)
1875 anti_adjust_stack (GEN_INT (extra));
1877 move_by_pieces (gen_rtx (MEM, BLKmode, gen_push_operand ()), xinner,
1878 INTVAL (size) - used, align);
1881 #endif /* PUSH_ROUNDING */
1883 /* Otherwise make space on the stack and copy the data
1884 to the address of that space. */
1886 /* Deduct words put into registers from the size we must copy. */
1889 if (GET_CODE (size) == CONST_INT)
1890 size = GEN_INT (INTVAL (size) - used);
1892 size = expand_binop (GET_MODE (size), sub_optab, size,
1893 GEN_INT (used), NULL_RTX, 0,
1897 /* Get the address of the stack space.
1898 In this case, we do not deal with EXTRA separately.
1899 A single stack adjust will do. */
1902 temp = push_block (size, extra, where_pad == downward);
1905 else if (GET_CODE (args_so_far) == CONST_INT)
1906 temp = memory_address (BLKmode,
1907 plus_constant (args_addr,
1908 skip + INTVAL (args_so_far)));
1910 temp = memory_address (BLKmode,
1911 plus_constant (gen_rtx (PLUS, Pmode,
1912 args_addr, args_so_far),
1915 /* TEMP is the address of the block. Copy the data there. */
1916 if (GET_CODE (size) == CONST_INT
1917 && (move_by_pieces_ninsns ((unsigned) INTVAL (size), align)
1920 move_by_pieces (gen_rtx (MEM, BLKmode, temp), xinner,
1921 INTVAL (size), align);
1924 /* Try the most limited insn first, because there's no point
1925 including more than one in the machine description unless
1926 the more limited one has some advantage. */
1927 #ifdef HAVE_movstrqi
1929 && GET_CODE (size) == CONST_INT
1930 && ((unsigned) INTVAL (size)
1931 < (1 << (GET_MODE_BITSIZE (QImode) - 1))))
1933 rtx pat = gen_movstrqi (gen_rtx (MEM, BLKmode, temp),
1934 xinner, size, GEN_INT (align));
1942 #ifdef HAVE_movstrhi
1944 && GET_CODE (size) == CONST_INT
1945 && ((unsigned) INTVAL (size)
1946 < (1 << (GET_MODE_BITSIZE (HImode) - 1))))
1948 rtx pat = gen_movstrhi (gen_rtx (MEM, BLKmode, temp),
1949 xinner, size, GEN_INT (align));
1957 #ifdef HAVE_movstrsi
1960 rtx pat = gen_movstrsi (gen_rtx (MEM, BLKmode, temp),
1961 xinner, size, GEN_INT (align));
1969 #ifdef HAVE_movstrdi
1972 rtx pat = gen_movstrdi (gen_rtx (MEM, BLKmode, temp),
1973 xinner, size, GEN_INT (align));
1982 #ifndef ACCUMULATE_OUTGOING_ARGS
1983 /* If the source is referenced relative to the stack pointer,
1984 copy it to another register to stabilize it. We do not need
1985 to do this if we know that we won't be changing sp. */
1987 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
1988 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
1989 temp = copy_to_reg (temp);
1992 /* Make inhibit_defer_pop nonzero around the library call
1993 to force it to pop the bcopy-arguments right away. */
1995 #ifdef TARGET_MEM_FUNCTIONS
1996 emit_library_call (memcpy_libfunc, 0,
1997 VOIDmode, 3, temp, Pmode, XEXP (xinner, 0), Pmode,
1998 convert_to_mode (TYPE_MODE (sizetype),
1999 size, TREE_UNSIGNED (sizetype)),
2000 TYPE_MODE (sizetype));
2002 emit_library_call (bcopy_libfunc, 0,
2003 VOIDmode, 3, XEXP (xinner, 0), Pmode, temp, Pmode,
2004 convert_to_mode (TYPE_MODE (sizetype),
2005 size, TREE_UNSIGNED (sizetype)),
2006 TYPE_MODE (sizetype));
2011 else if (partial > 0)
2013 /* Scalar partly in registers. */
2015 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
2018 /* # words of start of argument
2019 that we must make space for but need not store. */
2020 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
2021 int args_offset = INTVAL (args_so_far);
2024 /* Push padding now if padding above and stack grows down,
2025 or if padding below and stack grows up.
2026 But if space already allocated, this has already been done. */
2027 if (extra && args_addr == 0
2028 && where_pad != none && where_pad != stack_direction)
2029 anti_adjust_stack (GEN_INT (extra));
2031 /* If we make space by pushing it, we might as well push
2032 the real data. Otherwise, we can leave OFFSET nonzero
2033 and leave the space uninitialized. */
2037 /* Now NOT_STACK gets the number of words that we don't need to
2038 allocate on the stack. */
2039 not_stack = partial - offset;
2041 /* If the partial register-part of the arg counts in its stack size,
2042 skip the part of stack space corresponding to the registers.
2043 Otherwise, start copying to the beginning of the stack space,
2044 by setting SKIP to 0. */
2045 #ifndef REG_PARM_STACK_SPACE
2051 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
2052 x = validize_mem (force_const_mem (mode, x));
2054 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
2055 SUBREGs of such registers are not allowed. */
2056 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
2057 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
2058 x = copy_to_reg (x);
2060 /* Loop over all the words allocated on the stack for this arg. */
2061 /* We can do it by words, because any scalar bigger than a word
2062 has a size a multiple of a word. */
2063 #ifndef PUSH_ARGS_REVERSED
2064 for (i = not_stack; i < size; i++)
2066 for (i = size - 1; i >= not_stack; i--)
2068 if (i >= not_stack + offset)
2069 emit_push_insn (operand_subword_force (x, i, mode),
2070 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
2072 GEN_INT (args_offset + ((i - not_stack + skip)
2073 * UNITS_PER_WORD)));
2079 /* Push padding now if padding above and stack grows down,
2080 or if padding below and stack grows up.
2081 But if space already allocated, this has already been done. */
2082 if (extra && args_addr == 0
2083 && where_pad != none && where_pad != stack_direction)
2084 anti_adjust_stack (GEN_INT (extra));
2086 #ifdef PUSH_ROUNDING
2088 addr = gen_push_operand ();
2091 if (GET_CODE (args_so_far) == CONST_INT)
2093 = memory_address (mode,
2094 plus_constant (args_addr, INTVAL (args_so_far)));
2096 addr = memory_address (mode, gen_rtx (PLUS, Pmode, args_addr,
2099 emit_move_insn (gen_rtx (MEM, mode, addr), x);
2103 /* If part should go in registers, copy that part
2104 into the appropriate registers. Do this now, at the end,
2105 since mem-to-mem copies above may do function calls. */
2106 if (partial > 0 && reg != 0)
2107 move_block_to_reg (REGNO (reg), x, partial, mode);
2109 if (extra && args_addr == 0 && where_pad == stack_direction)
2110 anti_adjust_stack (GEN_INT (extra));
2113 /* Expand an assignment that stores the value of FROM into TO.
2114 If WANT_VALUE is nonzero, return an rtx for the value of TO.
2115 (This may contain a QUEUED rtx.)
2116 Otherwise, the returned value is not meaningful.
2118 SUGGEST_REG is no longer actually used.
2119 It used to mean, copy the value through a register
2120 and return that register, if that is possible.
2121 But now we do this if WANT_VALUE.
2123 If the value stored is a constant, we return the constant. */
2126 expand_assignment (to, from, want_value, suggest_reg)
2131 register rtx to_rtx = 0;
2134 /* Don't crash if the lhs of the assignment was erroneous. */
2136 if (TREE_CODE (to) == ERROR_MARK)
2137 return expand_expr (from, NULL_RTX, VOIDmode, 0);
2139 /* Assignment of a structure component needs special treatment
2140 if the structure component's rtx is not simply a MEM.
2141 Assignment of an array element at a constant index
2142 has the same problem. */
2144 if (TREE_CODE (to) == COMPONENT_REF
2145 || TREE_CODE (to) == BIT_FIELD_REF
2146 || (TREE_CODE (to) == ARRAY_REF
2147 && TREE_CODE (TREE_OPERAND (to, 1)) == INTEGER_CST
2148 && TREE_CODE (TYPE_SIZE (TREE_TYPE (to))) == INTEGER_CST))
2150 enum machine_mode mode1;
2156 tree tem = get_inner_reference (to, &bitsize, &bitpos, &offset,
2157 &mode1, &unsignedp, &volatilep);
2159 /* If we are going to use store_bit_field and extract_bit_field,
2160 make sure to_rtx will be safe for multiple use. */
2162 if (mode1 == VOIDmode && want_value)
2163 tem = stabilize_reference (tem);
2165 to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, 0);
2168 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
2170 if (GET_CODE (to_rtx) != MEM)
2172 to_rtx = change_address (to_rtx, VOIDmode,
2173 gen_rtx (PLUS, Pmode, XEXP (to_rtx, 0),
2174 force_reg (Pmode, offset_rtx)));
2178 if (GET_CODE (to_rtx) == MEM)
2179 MEM_VOLATILE_P (to_rtx) = 1;
2180 #if 0 /* This was turned off because, when a field is volatile
2181 in an object which is not volatile, the object may be in a register,
2182 and then we would abort over here. */
2188 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
2190 /* Spurious cast makes HPUX compiler happy. */
2191 ? (enum machine_mode) TYPE_MODE (TREE_TYPE (to))
2194 /* Required alignment of containing datum. */
2195 TYPE_ALIGN (TREE_TYPE (tem)) / BITS_PER_UNIT,
2196 int_size_in_bytes (TREE_TYPE (tem)));
2197 preserve_temp_slots (result);
2200 /* If we aren't returning a result, just pass on what expand_expr
2201 returned; it was probably const0_rtx. Otherwise, convert RESULT
2202 to the proper mode. */
2203 return (want_value ? convert_to_mode (TYPE_MODE (TREE_TYPE (to)), result,
2204 TREE_UNSIGNED (TREE_TYPE (to)))
2208 /* If the rhs is a function call and its value is not an aggregate,
2209 call the function before we start to compute the lhs.
2210 This is needed for correct code for cases such as
2211 val = setjmp (buf) on machines where reference to val
2212 requires loading up part of an address in a separate insn. */
2213 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from))
2215 rtx value = expand_expr (from, NULL_RTX, VOIDmode, 0);
2217 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, 0);
2218 emit_move_insn (to_rtx, value);
2219 preserve_temp_slots (to_rtx);
2224 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
2225 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
2228 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, 0);
2230 /* Don't move directly into a return register. */
2231 if (TREE_CODE (to) == RESULT_DECL && GET_CODE (to_rtx) == REG)
2233 rtx temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
2234 emit_move_insn (to_rtx, temp);
2235 preserve_temp_slots (to_rtx);
2240 /* In case we are returning the contents of an object which overlaps
2241 the place the value is being stored, use a safe function when copying
2242 a value through a pointer into a structure value return block. */
2243 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
2244 && current_function_returns_struct
2245 && !current_function_returns_pcc_struct)
2247 rtx from_rtx = expand_expr (from, NULL_RTX, VOIDmode, 0);
2248 rtx size = expr_size (from);
2250 #ifdef TARGET_MEM_FUNCTIONS
2251 emit_library_call (memcpy_libfunc, 0,
2252 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
2253 XEXP (from_rtx, 0), Pmode,
2254 convert_to_mode (TYPE_MODE (sizetype),
2255 size, TREE_UNSIGNED (sizetype)),
2256 TYPE_MODE (sizetype));
2258 emit_library_call (bcopy_libfunc, 0,
2259 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
2260 XEXP (to_rtx, 0), Pmode,
2261 convert_to_mode (TYPE_MODE (sizetype),
2262 size, TREE_UNSIGNED (sizetype)),
2263 TYPE_MODE (sizetype));
2266 preserve_temp_slots (to_rtx);
2271 /* Compute FROM and store the value in the rtx we got. */
2273 result = store_expr (from, to_rtx, want_value);
2274 preserve_temp_slots (result);
2279 /* Generate code for computing expression EXP,
2280 and storing the value into TARGET.
2281 Returns TARGET or an equivalent value.
2282 TARGET may contain a QUEUED rtx.
2284 If SUGGEST_REG is nonzero, copy the value through a register
2285 and return that register, if that is possible.
2287 If the value stored is a constant, we return the constant. */
2290 store_expr (exp, target, suggest_reg)
2292 register rtx target;
2296 int dont_return_target = 0;
2298 if (TREE_CODE (exp) == COMPOUND_EXPR)
2300 /* Perform first part of compound expression, then assign from second
2302 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
2304 return store_expr (TREE_OPERAND (exp, 1), target, suggest_reg);
2306 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
2308 /* For conditional expression, get safe form of the target. Then
2309 test the condition, doing the appropriate assignment on either
2310 side. This avoids the creation of unnecessary temporaries.
2311 For non-BLKmode, it is more efficient not to do this. */
2313 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
2316 target = protect_from_queue (target, 1);
2319 jumpifnot (TREE_OPERAND (exp, 0), lab1);
2320 store_expr (TREE_OPERAND (exp, 1), target, suggest_reg);
2322 emit_jump_insn (gen_jump (lab2));
2325 store_expr (TREE_OPERAND (exp, 2), target, suggest_reg);
2331 else if (suggest_reg && GET_CODE (target) == MEM && ! MEM_VOLATILE_P (target)
2332 && GET_MODE (target) != BLKmode)
2333 /* If target is in memory and caller wants value in a register instead,
2334 arrange that. Pass TARGET as target for expand_expr so that,
2335 if EXP is another assignment, SUGGEST_REG will be nonzero for it.
2336 We know expand_expr will not use the target in that case.
2337 Don't do this if TARGET is volatile because we are supposed
2338 to write it and then read it. */
2340 temp = expand_expr (exp, cse_not_expected ? NULL_RTX : target,
2341 GET_MODE (target), 0);
2342 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
2343 temp = copy_to_reg (temp);
2344 dont_return_target = 1;
2346 else if (queued_subexp_p (target))
2347 /* If target contains a postincrement, it is not safe
2348 to use as the returned value. It would access the wrong
2349 place by the time the queued increment gets output.
2350 So copy the value through a temporary and use that temp
2353 /* ??? There may be a bug here in the case of a target
2354 that is volatile, but I' too sleepy today to write anything
2356 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
2358 /* Expand EXP into a new pseudo. */
2359 temp = gen_reg_rtx (GET_MODE (target));
2360 temp = expand_expr (exp, temp, GET_MODE (target), 0);
2363 temp = expand_expr (exp, NULL_RTX, GET_MODE (target), 0);
2364 dont_return_target = 1;
2366 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
2367 /* If this is an scalar in a register that is stored in a wider mode
2368 than the declared mode, compute the result into its declared mode
2369 and then convert to the wider mode. Our value is the computed
2372 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
2373 convert_move (SUBREG_REG (target), temp,
2374 SUBREG_PROMOTED_UNSIGNED_P (target));
2379 temp = expand_expr (exp, target, GET_MODE (target), 0);
2380 /* DO return TARGET if it's a specified hardware register.
2381 expand_return relies on this.
2382 DO return TARGET if it's a volatile mem ref; ANSI requires this. */
2383 if (!(target && GET_CODE (target) == REG
2384 && REGNO (target) < FIRST_PSEUDO_REGISTER)
2385 && CONSTANT_P (temp)
2386 && !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
2387 dont_return_target = 1;
2390 /* If value was not generated in the target, store it there.
2391 Convert the value to TARGET's type first if nec. */
2393 if (temp != target && TREE_CODE (exp) != ERROR_MARK)
2395 target = protect_from_queue (target, 1);
2396 if (GET_MODE (temp) != GET_MODE (target)
2397 && GET_MODE (temp) != VOIDmode)
2399 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
2400 if (dont_return_target)
2402 /* In this case, we will return TEMP,
2403 so make sure it has the proper mode.
2404 But don't forget to store the value into TARGET. */
2405 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
2406 emit_move_insn (target, temp);
2409 convert_move (target, temp, unsignedp);
2412 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
2414 /* Handle copying a string constant into an array.
2415 The string constant may be shorter than the array.
2416 So copy just the string's actual length, and clear the rest. */
2419 /* Get the size of the data type of the string,
2420 which is actually the size of the target. */
2421 size = expr_size (exp);
2422 if (GET_CODE (size) == CONST_INT
2423 && INTVAL (size) < TREE_STRING_LENGTH (exp))
2424 emit_block_move (target, temp, size,
2425 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
2428 /* Compute the size of the data to copy from the string. */
2430 = size_binop (MIN_EXPR,
2431 size_binop (CEIL_DIV_EXPR,
2432 TYPE_SIZE (TREE_TYPE (exp)),
2433 size_int (BITS_PER_UNIT)),
2435 build_int_2 (TREE_STRING_LENGTH (exp), 0)));
2436 rtx copy_size_rtx = expand_expr (copy_size, NULL_RTX,
2440 /* Copy that much. */
2441 emit_block_move (target, temp, copy_size_rtx,
2442 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
2444 /* Figure out how much is left in TARGET
2445 that we have to clear. */
2446 if (GET_CODE (copy_size_rtx) == CONST_INT)
2448 temp = plus_constant (XEXP (target, 0),
2449 TREE_STRING_LENGTH (exp));
2450 size = plus_constant (size,
2451 - TREE_STRING_LENGTH (exp));
2455 enum machine_mode size_mode = Pmode;
2457 temp = force_reg (Pmode, XEXP (target, 0));
2458 temp = expand_binop (size_mode, add_optab, temp,
2459 copy_size_rtx, NULL_RTX, 0,
2462 size = expand_binop (size_mode, sub_optab, size,
2463 copy_size_rtx, NULL_RTX, 0,
2466 emit_cmp_insn (size, const0_rtx, LT, NULL_RTX,
2467 GET_MODE (size), 0, 0);
2468 label = gen_label_rtx ();
2469 emit_jump_insn (gen_blt (label));
2472 if (size != const0_rtx)
2474 #ifdef TARGET_MEM_FUNCTIONS
2475 emit_library_call (memset_libfunc, 0, VOIDmode, 3,
2476 temp, Pmode, const0_rtx, Pmode, size, Pmode);
2478 emit_library_call (bzero_libfunc, 0, VOIDmode, 2,
2479 temp, Pmode, size, Pmode);
2486 else if (GET_MODE (temp) == BLKmode)
2487 emit_block_move (target, temp, expr_size (exp),
2488 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
2490 emit_move_insn (target, temp);
2492 if (dont_return_target)
2497 /* Store the value of constructor EXP into the rtx TARGET.
2498 TARGET is either a REG or a MEM. */
2501 store_constructor (exp, target)
2505 tree type = TREE_TYPE (exp);
2507 /* We know our target cannot conflict, since safe_from_p has been called. */
2509 /* Don't try copying piece by piece into a hard register
2510 since that is vulnerable to being clobbered by EXP.
2511 Instead, construct in a pseudo register and then copy it all. */
2512 if (GET_CODE (target) == REG && REGNO (target) < FIRST_PSEUDO_REGISTER)
2514 rtx temp = gen_reg_rtx (GET_MODE (target));
2515 store_constructor (exp, temp);
2516 emit_move_insn (target, temp);
2521 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
2522 || TREE_CODE (type) == QUAL_UNION_TYPE)
2526 /* Inform later passes that the whole union value is dead. */
2527 if (TREE_CODE (type) == UNION_TYPE
2528 || TREE_CODE (type) == QUAL_UNION_TYPE)
2529 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
2531 /* If we are building a static constructor into a register,
2532 set the initial value as zero so we can fold the value into
2534 else if (GET_CODE (target) == REG && TREE_STATIC (exp))
2535 emit_move_insn (target, const0_rtx);
2537 /* If the constructor has fewer fields than the structure,
2538 clear the whole structure first. */
2539 else if (list_length (CONSTRUCTOR_ELTS (exp))
2540 != list_length (TYPE_FIELDS (type)))
2541 clear_storage (target, int_size_in_bytes (type));
2543 /* Inform later passes that the old value is dead. */
2544 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
2546 /* Store each element of the constructor into
2547 the corresponding field of TARGET. */
2549 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
2551 register tree field = TREE_PURPOSE (elt);
2552 register enum machine_mode mode;
2557 /* Just ignore missing fields.
2558 We cleared the whole structure, above,
2559 if any fields are missing. */
2563 bitsize = TREE_INT_CST_LOW (DECL_SIZE (field));
2564 unsignedp = TREE_UNSIGNED (field);
2565 mode = DECL_MODE (field);
2566 if (DECL_BIT_FIELD (field))
2569 if (TREE_CODE (DECL_FIELD_BITPOS (field)) != INTEGER_CST)
2570 /* ??? This case remains to be written. */
2573 bitpos = TREE_INT_CST_LOW (DECL_FIELD_BITPOS (field));
2575 store_field (target, bitsize, bitpos, mode, TREE_VALUE (elt),
2576 /* The alignment of TARGET is
2577 at least what its type requires. */
2579 TYPE_ALIGN (type) / BITS_PER_UNIT,
2580 int_size_in_bytes (type));
2583 else if (TREE_CODE (type) == ARRAY_TYPE)
2587 tree domain = TYPE_DOMAIN (type);
2588 HOST_WIDE_INT minelt = TREE_INT_CST_LOW (TYPE_MIN_VALUE (domain));
2589 HOST_WIDE_INT maxelt = TREE_INT_CST_LOW (TYPE_MAX_VALUE (domain));
2590 tree elttype = TREE_TYPE (type);
2592 /* If the constructor has fewer fields than the structure,
2593 clear the whole structure first. Similarly if this this is
2594 static constructor of a non-BLKmode object. */
2596 if (list_length (CONSTRUCTOR_ELTS (exp)) < maxelt - minelt + 1
2597 || (GET_CODE (target) == REG && TREE_STATIC (exp)))
2598 clear_storage (target, int_size_in_bytes (type));
2600 /* Inform later passes that the old value is dead. */
2601 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
2603 /* Store each element of the constructor into
2604 the corresponding element of TARGET, determined
2605 by counting the elements. */
2606 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
2608 elt = TREE_CHAIN (elt), i++)
2610 register enum machine_mode mode;
2615 mode = TYPE_MODE (elttype);
2616 bitsize = GET_MODE_BITSIZE (mode);
2617 unsignedp = TREE_UNSIGNED (elttype);
2619 bitpos = (i * TREE_INT_CST_LOW (TYPE_SIZE (elttype)));
2621 store_field (target, bitsize, bitpos, mode, TREE_VALUE (elt),
2622 /* The alignment of TARGET is
2623 at least what its type requires. */
2625 TYPE_ALIGN (type) / BITS_PER_UNIT,
2626 int_size_in_bytes (type));
2634 /* Store the value of EXP (an expression tree)
2635 into a subfield of TARGET which has mode MODE and occupies
2636 BITSIZE bits, starting BITPOS bits from the start of TARGET.
2637 If MODE is VOIDmode, it means that we are storing into a bit-field.
2639 If VALUE_MODE is VOIDmode, return nothing in particular.
2640 UNSIGNEDP is not used in this case.
2642 Otherwise, return an rtx for the value stored. This rtx
2643 has mode VALUE_MODE if that is convenient to do.
2644 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
2646 ALIGN is the alignment that TARGET is known to have, measured in bytes.
2647 TOTAL_SIZE is the size in bytes of the structure, or -1 if varying. */
2650 store_field (target, bitsize, bitpos, mode, exp, value_mode,
2651 unsignedp, align, total_size)
2653 int bitsize, bitpos;
2654 enum machine_mode mode;
2656 enum machine_mode value_mode;
2661 HOST_WIDE_INT width_mask = 0;
2663 if (bitsize < HOST_BITS_PER_WIDE_INT)
2664 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
2666 /* If we are storing into an unaligned field of an aligned union that is
2667 in a register, we may have the mode of TARGET being an integer mode but
2668 MODE == BLKmode. In that case, get an aligned object whose size and
2669 alignment are the same as TARGET and store TARGET into it (we can avoid
2670 the store if the field being stored is the entire width of TARGET). Then
2671 call ourselves recursively to store the field into a BLKmode version of
2672 that object. Finally, load from the object into TARGET. This is not
2673 very efficient in general, but should only be slightly more expensive
2674 than the otherwise-required unaligned accesses. Perhaps this can be
2675 cleaned up later. */
2678 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
2680 rtx object = assign_stack_temp (GET_MODE (target),
2681 GET_MODE_SIZE (GET_MODE (target)), 0);
2682 rtx blk_object = copy_rtx (object);
2684 PUT_MODE (blk_object, BLKmode);
2686 if (bitsize != GET_MODE_BITSIZE (GET_MODE (target)))
2687 emit_move_insn (object, target);
2689 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0,
2692 emit_move_insn (target, object);
2697 /* If the structure is in a register or if the component
2698 is a bit field, we cannot use addressing to access it.
2699 Use bit-field techniques or SUBREG to store in it. */
2701 if (mode == VOIDmode
2702 || (mode != BLKmode && ! direct_store[(int) mode])
2703 || GET_CODE (target) == REG
2704 || GET_CODE (target) == SUBREG
2705 /* If the field isn't aligned enough to store as an ordinary memref,
2706 store it as a bit field. */
2707 || (STRICT_ALIGNMENT
2708 && align * BITS_PER_UNIT < GET_MODE_ALIGNMENT (mode))
2709 || (STRICT_ALIGNMENT && bitpos % GET_MODE_ALIGNMENT (mode) != 0))
2711 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
2712 /* Store the value in the bitfield. */
2713 store_bit_field (target, bitsize, bitpos, mode, temp, align, total_size);
2714 if (value_mode != VOIDmode)
2716 /* The caller wants an rtx for the value. */
2717 /* If possible, avoid refetching from the bitfield itself. */
2719 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
2722 enum machine_mode tmode;
2725 return expand_and (temp, GEN_INT (width_mask), NULL_RTX);
2726 tmode = GET_MODE (temp);
2727 if (tmode == VOIDmode)
2729 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
2730 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
2731 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
2733 return extract_bit_field (target, bitsize, bitpos, unsignedp,
2734 NULL_RTX, value_mode, 0, align,
2741 rtx addr = XEXP (target, 0);
2744 /* If a value is wanted, it must be the lhs;
2745 so make the address stable for multiple use. */
2747 if (value_mode != VOIDmode && GET_CODE (addr) != REG
2748 && ! CONSTANT_ADDRESS_P (addr)
2749 /* A frame-pointer reference is already stable. */
2750 && ! (GET_CODE (addr) == PLUS
2751 && GET_CODE (XEXP (addr, 1)) == CONST_INT
2752 && (XEXP (addr, 0) == virtual_incoming_args_rtx
2753 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
2754 addr = copy_to_reg (addr);
2756 /* Now build a reference to just the desired component. */
2758 to_rtx = change_address (target, mode,
2759 plus_constant (addr, (bitpos / BITS_PER_UNIT)));
2760 MEM_IN_STRUCT_P (to_rtx) = 1;
2762 return store_expr (exp, to_rtx, value_mode != VOIDmode);
2766 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
2767 or an ARRAY_REF, look for nested COMPONENT_REFs, BIT_FIELD_REFs, or
2768 ARRAY_REFs and find the ultimate containing object, which we return.
2770 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
2771 bit position, and *PUNSIGNEDP to the signedness of the field.
2772 If the position of the field is variable, we store a tree
2773 giving the variable offset (in units) in *POFFSET.
2774 This offset is in addition to the bit position.
2775 If the position is not variable, we store 0 in *POFFSET.
2777 If any of the extraction expressions is volatile,
2778 we store 1 in *PVOLATILEP. Otherwise we don't change that.
2780 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
2781 is a mode that can be used to access the field. In that case, *PBITSIZE
2784 If the field describes a variable-sized object, *PMODE is set to
2785 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
2786 this case, but the address of the object can be found. */
2789 get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode,
2790 punsignedp, pvolatilep)
2795 enum machine_mode *pmode;
2800 enum machine_mode mode = VOIDmode;
2801 tree offset = integer_zero_node;
2803 if (TREE_CODE (exp) == COMPONENT_REF)
2805 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
2806 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
2807 mode = DECL_MODE (TREE_OPERAND (exp, 1));
2808 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
2810 else if (TREE_CODE (exp) == BIT_FIELD_REF)
2812 size_tree = TREE_OPERAND (exp, 1);
2813 *punsignedp = TREE_UNSIGNED (exp);
2817 mode = TYPE_MODE (TREE_TYPE (exp));
2818 *pbitsize = GET_MODE_BITSIZE (mode);
2819 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
2824 if (TREE_CODE (size_tree) != INTEGER_CST)
2825 mode = BLKmode, *pbitsize = -1;
2827 *pbitsize = TREE_INT_CST_LOW (size_tree);
2830 /* Compute cumulative bit-offset for nested component-refs and array-refs,
2831 and find the ultimate containing object. */
2837 if (TREE_CODE (exp) == COMPONENT_REF || TREE_CODE (exp) == BIT_FIELD_REF)
2839 tree pos = (TREE_CODE (exp) == COMPONENT_REF
2840 ? DECL_FIELD_BITPOS (TREE_OPERAND (exp, 1))
2841 : TREE_OPERAND (exp, 2));
2843 /* If this field hasn't been filled in yet, don't go
2844 past it. This should only happen when folding expressions
2845 made during type construction. */
2849 if (TREE_CODE (pos) == PLUS_EXPR)
2852 if (TREE_CODE (TREE_OPERAND (pos, 0)) == INTEGER_CST)
2854 constant = TREE_OPERAND (pos, 0);
2855 var = TREE_OPERAND (pos, 1);
2857 else if (TREE_CODE (TREE_OPERAND (pos, 1)) == INTEGER_CST)
2859 constant = TREE_OPERAND (pos, 1);
2860 var = TREE_OPERAND (pos, 0);
2865 *pbitpos += TREE_INT_CST_LOW (constant);
2866 offset = size_binop (PLUS_EXPR, offset,
2867 size_binop (FLOOR_DIV_EXPR, var,
2868 size_int (BITS_PER_UNIT)));
2870 else if (TREE_CODE (pos) == INTEGER_CST)
2871 *pbitpos += TREE_INT_CST_LOW (pos);
2874 /* Assume here that the offset is a multiple of a unit.
2875 If not, there should be an explicitly added constant. */
2876 offset = size_binop (PLUS_EXPR, offset,
2877 size_binop (FLOOR_DIV_EXPR, pos,
2878 size_int (BITS_PER_UNIT)));
2882 else if (TREE_CODE (exp) == ARRAY_REF)
2884 /* This code is based on the code in case ARRAY_REF in expand_expr
2885 below. We assume here that the size of an array element is
2886 always an integral multiple of BITS_PER_UNIT. */
2888 tree index = TREE_OPERAND (exp, 1);
2889 tree domain = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
2891 = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
2892 tree index_type = TREE_TYPE (index);
2894 if (! integer_zerop (low_bound))
2895 index = fold (build (MINUS_EXPR, index_type, index, low_bound));
2897 if (TYPE_PRECISION (index_type) != POINTER_SIZE)
2899 index = convert (type_for_size (POINTER_SIZE, 0), index);
2900 index_type = TREE_TYPE (index);
2903 index = fold (build (MULT_EXPR, index_type, index,
2904 TYPE_SIZE (TREE_TYPE (exp))));
2906 if (TREE_CODE (index) == INTEGER_CST
2907 && TREE_INT_CST_HIGH (index) == 0)
2908 *pbitpos += TREE_INT_CST_LOW (index);
2910 offset = size_binop (PLUS_EXPR, offset,
2911 size_binop (FLOOR_DIV_EXPR, index,
2912 size_int (BITS_PER_UNIT)));
2914 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
2915 && ! ((TREE_CODE (exp) == NOP_EXPR
2916 || TREE_CODE (exp) == CONVERT_EXPR)
2917 && (TYPE_MODE (TREE_TYPE (exp))
2918 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
2921 /* If any reference in the chain is volatile, the effect is volatile. */
2922 if (TREE_THIS_VOLATILE (exp))
2924 exp = TREE_OPERAND (exp, 0);
2927 /* If this was a bit-field, see if there is a mode that allows direct
2928 access in case EXP is in memory. */
2929 if (mode == VOIDmode && *pbitsize != 0 && *pbitpos % *pbitsize == 0)
2931 mode = mode_for_size (*pbitsize, MODE_INT, 0);
2932 if (mode == BLKmode)
2936 if (integer_zerop (offset))
2942 /* We aren't finished fixing the callers to really handle nonzero offset. */
2950 /* Given an rtx VALUE that may contain additions and multiplications,
2951 return an equivalent value that just refers to a register or memory.
2952 This is done by generating instructions to perform the arithmetic
2953 and returning a pseudo-register containing the value.
2955 The returned value may be a REG, SUBREG, MEM or constant. */
2958 force_operand (value, target)
2961 register optab binoptab = 0;
2962 /* Use a temporary to force order of execution of calls to
2966 /* Use subtarget as the target for operand 0 of a binary operation. */
2967 register rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
2969 if (GET_CODE (value) == PLUS)
2970 binoptab = add_optab;
2971 else if (GET_CODE (value) == MINUS)
2972 binoptab = sub_optab;
2973 else if (GET_CODE (value) == MULT)
2975 op2 = XEXP (value, 1);
2976 if (!CONSTANT_P (op2)
2977 && !(GET_CODE (op2) == REG && op2 != subtarget))
2979 tmp = force_operand (XEXP (value, 0), subtarget);
2980 return expand_mult (GET_MODE (value), tmp,
2981 force_operand (op2, NULL_RTX),
2987 op2 = XEXP (value, 1);
2988 if (!CONSTANT_P (op2)
2989 && !(GET_CODE (op2) == REG && op2 != subtarget))
2991 if (binoptab == sub_optab && GET_CODE (op2) == CONST_INT)
2993 binoptab = add_optab;
2994 op2 = negate_rtx (GET_MODE (value), op2);
2997 /* Check for an addition with OP2 a constant integer and our first
2998 operand a PLUS of a virtual register and something else. In that
2999 case, we want to emit the sum of the virtual register and the
3000 constant first and then add the other value. This allows virtual
3001 register instantiation to simply modify the constant rather than
3002 creating another one around this addition. */
3003 if (binoptab == add_optab && GET_CODE (op2) == CONST_INT
3004 && GET_CODE (XEXP (value, 0)) == PLUS
3005 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
3006 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
3007 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
3009 rtx temp = expand_binop (GET_MODE (value), binoptab,
3010 XEXP (XEXP (value, 0), 0), op2,
3011 subtarget, 0, OPTAB_LIB_WIDEN);
3012 return expand_binop (GET_MODE (value), binoptab, temp,
3013 force_operand (XEXP (XEXP (value, 0), 1), 0),
3014 target, 0, OPTAB_LIB_WIDEN);
3017 tmp = force_operand (XEXP (value, 0), subtarget);
3018 return expand_binop (GET_MODE (value), binoptab, tmp,
3019 force_operand (op2, NULL_RTX),
3020 target, 0, OPTAB_LIB_WIDEN);
3021 /* We give UNSIGNEDP = 0 to expand_binop
3022 because the only operations we are expanding here are signed ones. */
3027 /* Subroutine of expand_expr:
3028 save the non-copied parts (LIST) of an expr (LHS), and return a list
3029 which can restore these values to their previous values,
3030 should something modify their storage. */
3033 save_noncopied_parts (lhs, list)
3040 for (tail = list; tail; tail = TREE_CHAIN (tail))
3041 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
3042 parts = chainon (parts, save_noncopied_parts (lhs, TREE_VALUE (tail)));
3045 tree part = TREE_VALUE (tail);
3046 tree part_type = TREE_TYPE (part);
3047 tree to_be_saved = build (COMPONENT_REF, part_type, lhs, part);
3048 rtx target = assign_stack_temp (TYPE_MODE (part_type),
3049 int_size_in_bytes (part_type), 0);
3050 if (! memory_address_p (TYPE_MODE (part_type), XEXP (target, 0)))
3051 target = change_address (target, TYPE_MODE (part_type), NULL_RTX);
3052 parts = tree_cons (to_be_saved,
3053 build (RTL_EXPR, part_type, NULL_TREE,
3056 store_expr (TREE_PURPOSE (parts), RTL_EXPR_RTL (TREE_VALUE (parts)), 0);
3061 /* Subroutine of expand_expr:
3062 record the non-copied parts (LIST) of an expr (LHS), and return a list
3063 which specifies the initial values of these parts. */
3066 init_noncopied_parts (lhs, list)
3073 for (tail = list; tail; tail = TREE_CHAIN (tail))
3074 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
3075 parts = chainon (parts, init_noncopied_parts (lhs, TREE_VALUE (tail)));
3078 tree part = TREE_VALUE (tail);
3079 tree part_type = TREE_TYPE (part);
3080 tree to_be_initialized = build (COMPONENT_REF, part_type, lhs, part);
3081 parts = tree_cons (TREE_PURPOSE (tail), to_be_initialized, parts);
3086 /* Subroutine of expand_expr: return nonzero iff there is no way that
3087 EXP can reference X, which is being modified. */
3090 safe_from_p (x, exp)
3100 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
3101 find the underlying pseudo. */
3102 if (GET_CODE (x) == SUBREG)
3105 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
3109 /* If X is a location in the outgoing argument area, it is always safe. */
3110 if (GET_CODE (x) == MEM
3111 && (XEXP (x, 0) == virtual_outgoing_args_rtx
3112 || (GET_CODE (XEXP (x, 0)) == PLUS
3113 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx)))
3116 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
3119 exp_rtl = DECL_RTL (exp);
3126 if (TREE_CODE (exp) == TREE_LIST)
3127 return ((TREE_VALUE (exp) == 0
3128 || safe_from_p (x, TREE_VALUE (exp)))
3129 && (TREE_CHAIN (exp) == 0
3130 || safe_from_p (x, TREE_CHAIN (exp))));
3135 return safe_from_p (x, TREE_OPERAND (exp, 0));
3139 return (safe_from_p (x, TREE_OPERAND (exp, 0))
3140 && safe_from_p (x, TREE_OPERAND (exp, 1)));
3144 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
3145 the expression. If it is set, we conflict iff we are that rtx or
3146 both are in memory. Otherwise, we check all operands of the
3147 expression recursively. */
3149 switch (TREE_CODE (exp))
3152 return (staticp (TREE_OPERAND (exp, 0))
3153 || safe_from_p (x, TREE_OPERAND (exp, 0)));
3156 if (GET_CODE (x) == MEM)
3161 exp_rtl = CALL_EXPR_RTL (exp);
3164 /* Assume that the call will clobber all hard registers and
3166 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
3167 || GET_CODE (x) == MEM)
3174 exp_rtl = RTL_EXPR_RTL (exp);
3176 /* We don't know what this can modify. */
3181 case WITH_CLEANUP_EXPR:
3182 exp_rtl = RTL_EXPR_RTL (exp);
3186 exp_rtl = SAVE_EXPR_RTL (exp);
3190 /* The only operand we look at is operand 1. The rest aren't
3191 part of the expression. */
3192 return safe_from_p (x, TREE_OPERAND (exp, 1));
3194 case METHOD_CALL_EXPR:
3195 /* This takes a rtx argument, but shouldn't appear here. */
3199 /* If we have an rtx, we do not need to scan our operands. */
3203 nops = tree_code_length[(int) TREE_CODE (exp)];
3204 for (i = 0; i < nops; i++)
3205 if (TREE_OPERAND (exp, i) != 0
3206 && ! safe_from_p (x, TREE_OPERAND (exp, i)))
3210 /* If we have an rtl, find any enclosed object. Then see if we conflict
3214 if (GET_CODE (exp_rtl) == SUBREG)
3216 exp_rtl = SUBREG_REG (exp_rtl);
3217 if (GET_CODE (exp_rtl) == REG
3218 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
3222 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
3223 are memory and EXP is not readonly. */
3224 return ! (rtx_equal_p (x, exp_rtl)
3225 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
3226 && ! TREE_READONLY (exp)));
3229 /* If we reach here, it is safe. */
3233 /* Subroutine of expand_expr: return nonzero iff EXP is an
3234 expression whose type is statically determinable. */
3240 if (TREE_CODE (exp) == PARM_DECL
3241 || TREE_CODE (exp) == VAR_DECL
3242 || TREE_CODE (exp) == CALL_EXPR || TREE_CODE (exp) == TARGET_EXPR
3243 || TREE_CODE (exp) == COMPONENT_REF
3244 || TREE_CODE (exp) == ARRAY_REF)
3249 /* expand_expr: generate code for computing expression EXP.
3250 An rtx for the computed value is returned. The value is never null.
3251 In the case of a void EXP, const0_rtx is returned.
3253 The value may be stored in TARGET if TARGET is nonzero.
3254 TARGET is just a suggestion; callers must assume that
3255 the rtx returned may not be the same as TARGET.
3257 If TARGET is CONST0_RTX, it means that the value will be ignored.
3259 If TMODE is not VOIDmode, it suggests generating the
3260 result in mode TMODE. But this is done only when convenient.
3261 Otherwise, TMODE is ignored and the value generated in its natural mode.
3262 TMODE is just a suggestion; callers must assume that
3263 the rtx returned may not have mode TMODE.
3265 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
3266 with a constant address even if that address is not normally legitimate.
3267 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
3269 If MODIFIER is EXPAND_SUM then when EXP is an addition
3270 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
3271 or a nest of (PLUS ...) and (MINUS ...) where the terms are
3272 products as above, or REG or MEM, or constant.
3273 Ordinarily in such cases we would output mul or add instructions
3274 and then return a pseudo reg containing the sum.
3276 EXPAND_INITIALIZER is much like EXPAND_SUM except that
3277 it also marks a label as absolutely required (it can't be dead).
3278 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
3279 This is used for outputting expressions used in initializers. */
3282 expand_expr (exp, target, tmode, modifier)
3285 enum machine_mode tmode;
3286 enum expand_modifier modifier;
3288 register rtx op0, op1, temp;
3289 tree type = TREE_TYPE (exp);
3290 int unsignedp = TREE_UNSIGNED (type);
3291 register enum machine_mode mode = TYPE_MODE (type);
3292 register enum tree_code code = TREE_CODE (exp);
3294 /* Use subtarget as the target for operand 0 of a binary operation. */
3295 rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
3296 rtx original_target = target;
3297 int ignore = (target == const0_rtx
3298 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
3299 || code == CONVERT_EXPR || code == REFERENCE_EXPR)
3300 && TREE_CODE (type) == VOID_TYPE));
3303 /* Don't use hard regs as subtargets, because the combiner
3304 can only handle pseudo regs. */
3305 if (subtarget && REGNO (subtarget) < FIRST_PSEUDO_REGISTER)
3307 /* Avoid subtargets inside loops,
3308 since they hide some invariant expressions. */
3309 if (preserve_subexpressions_p ())
3312 /* If we are going to ignore this result, we need only do something
3313 if there is a side-effect somewhere in the expression. If there
3314 is, short-circuit the most common cases here. */
3318 if (! TREE_SIDE_EFFECTS (exp))
3321 /* Ensure we reference a volatile object even if value is ignored. */
3322 if (TREE_THIS_VOLATILE (exp)
3323 && TREE_CODE (exp) != FUNCTION_DECL
3324 && mode != VOIDmode && mode != BLKmode)
3326 temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
3327 if (GET_CODE (temp) == MEM)
3328 temp = copy_to_reg (temp);
3332 if (TREE_CODE_CLASS (code) == '1')
3333 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
3334 VOIDmode, modifier);
3335 else if (TREE_CODE_CLASS (code) == '2'
3336 || TREE_CODE_CLASS (code) == '<')
3338 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
3339 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
3342 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
3343 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
3344 /* If the second operand has no side effects, just evaluate
3346 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
3347 VOIDmode, modifier);
3349 target = 0, original_target = 0;
3352 /* If will do cse, generate all results into pseudo registers
3353 since 1) that allows cse to find more things
3354 and 2) otherwise cse could produce an insn the machine
3357 if (! cse_not_expected && mode != BLKmode && target
3358 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER))
3365 tree function = decl_function_context (exp);
3366 /* Handle using a label in a containing function. */
3367 if (function != current_function_decl && function != 0)
3369 struct function *p = find_function_data (function);
3370 /* Allocate in the memory associated with the function
3371 that the label is in. */
3372 push_obstacks (p->function_obstack,
3373 p->function_maybepermanent_obstack);
3375 p->forced_labels = gen_rtx (EXPR_LIST, VOIDmode,
3376 label_rtx (exp), p->forced_labels);
3379 else if (modifier == EXPAND_INITIALIZER)
3380 forced_labels = gen_rtx (EXPR_LIST, VOIDmode,
3381 label_rtx (exp), forced_labels);
3382 temp = gen_rtx (MEM, FUNCTION_MODE,
3383 gen_rtx (LABEL_REF, Pmode, label_rtx (exp)));
3384 if (function != current_function_decl && function != 0)
3385 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
3390 if (DECL_RTL (exp) == 0)
3392 error_with_decl (exp, "prior parameter's size depends on `%s'");
3393 return CONST0_RTX (mode);
3399 if (DECL_RTL (exp) == 0)
3401 /* Ensure variable marked as used even if it doesn't go through
3402 a parser. If it hasn't be used yet, write out an external
3404 if (! TREE_USED (exp))
3406 assemble_external (exp);
3407 TREE_USED (exp) = 1;
3410 /* Handle variables inherited from containing functions. */
3411 context = decl_function_context (exp);
3413 /* We treat inline_function_decl as an alias for the current function
3414 because that is the inline function whose vars, types, etc.
3415 are being merged into the current function.
3416 See expand_inline_function. */
3417 if (context != 0 && context != current_function_decl
3418 && context != inline_function_decl
3419 /* If var is static, we don't need a static chain to access it. */
3420 && ! (GET_CODE (DECL_RTL (exp)) == MEM
3421 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
3425 /* Mark as non-local and addressable. */
3426 DECL_NONLOCAL (exp) = 1;
3427 mark_addressable (exp);
3428 if (GET_CODE (DECL_RTL (exp)) != MEM)
3430 addr = XEXP (DECL_RTL (exp), 0);
3431 if (GET_CODE (addr) == MEM)
3432 addr = gen_rtx (MEM, Pmode, fix_lexical_addr (XEXP (addr, 0), exp));
3434 addr = fix_lexical_addr (addr, exp);
3435 return change_address (DECL_RTL (exp), mode, addr);
3438 /* This is the case of an array whose size is to be determined
3439 from its initializer, while the initializer is still being parsed.
3441 if (GET_CODE (DECL_RTL (exp)) == MEM
3442 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
3443 return change_address (DECL_RTL (exp), GET_MODE (DECL_RTL (exp)),
3444 XEXP (DECL_RTL (exp), 0));
3445 if (GET_CODE (DECL_RTL (exp)) == MEM
3446 && modifier != EXPAND_CONST_ADDRESS
3447 && modifier != EXPAND_SUM
3448 && modifier != EXPAND_INITIALIZER)
3450 /* DECL_RTL probably contains a constant address.
3451 On RISC machines where a constant address isn't valid,
3452 make some insns to get that address into a register. */
3453 if (!memory_address_p (DECL_MODE (exp), XEXP (DECL_RTL (exp), 0))
3455 && CONSTANT_ADDRESS_P (XEXP (DECL_RTL (exp), 0))))
3456 return change_address (DECL_RTL (exp), VOIDmode,
3457 copy_rtx (XEXP (DECL_RTL (exp), 0)));
3460 /* If the mode of DECL_RTL does not match that of the decl, it
3461 must be a promoted value. We return a SUBREG of the wanted mode,
3462 but mark it so that we know that it was already extended. */
3464 if (GET_CODE (DECL_RTL (exp)) == REG
3465 && GET_MODE (DECL_RTL (exp)) != mode)
3467 enum machine_mode decl_mode = DECL_MODE (exp);
3469 /* Get the signedness used for this variable. Ensure we get the
3470 same mode we got when the variable was declared. */
3472 PROMOTE_MODE (decl_mode, unsignedp, type);
3474 if (decl_mode != GET_MODE (DECL_RTL (exp)))
3477 temp = gen_rtx (SUBREG, mode, DECL_RTL (exp), 0);
3478 SUBREG_PROMOTED_VAR_P (temp) = 1;
3479 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
3483 return DECL_RTL (exp);
3486 return immed_double_const (TREE_INT_CST_LOW (exp),
3487 TREE_INT_CST_HIGH (exp),
3491 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, 0);
3494 /* If optimized, generate immediate CONST_DOUBLE
3495 which will be turned into memory by reload if necessary.
3497 We used to force a register so that loop.c could see it. But
3498 this does not allow gen_* patterns to perform optimizations with
3499 the constants. It also produces two insns in cases like "x = 1.0;".
3500 On most machines, floating-point constants are not permitted in
3501 many insns, so we'd end up copying it to a register in any case.
3503 Now, we do the copying in expand_binop, if appropriate. */
3504 return immed_real_const (exp);
3508 if (! TREE_CST_RTL (exp))
3509 output_constant_def (exp);
3511 /* TREE_CST_RTL probably contains a constant address.
3512 On RISC machines where a constant address isn't valid,
3513 make some insns to get that address into a register. */
3514 if (GET_CODE (TREE_CST_RTL (exp)) == MEM
3515 && modifier != EXPAND_CONST_ADDRESS
3516 && modifier != EXPAND_INITIALIZER
3517 && modifier != EXPAND_SUM
3518 && !memory_address_p (mode, XEXP (TREE_CST_RTL (exp), 0)))
3519 return change_address (TREE_CST_RTL (exp), VOIDmode,
3520 copy_rtx (XEXP (TREE_CST_RTL (exp), 0)));
3521 return TREE_CST_RTL (exp);
3524 context = decl_function_context (exp);
3525 /* We treat inline_function_decl as an alias for the current function
3526 because that is the inline function whose vars, types, etc.
3527 are being merged into the current function.
3528 See expand_inline_function. */
3529 if (context == current_function_decl || context == inline_function_decl)
3532 /* If this is non-local, handle it. */
3535 temp = SAVE_EXPR_RTL (exp);
3536 if (temp && GET_CODE (temp) == REG)
3538 put_var_into_stack (exp);
3539 temp = SAVE_EXPR_RTL (exp);
3541 if (temp == 0 || GET_CODE (temp) != MEM)
3543 return change_address (temp, mode,
3544 fix_lexical_addr (XEXP (temp, 0), exp));
3546 if (SAVE_EXPR_RTL (exp) == 0)
3548 if (mode == BLKmode)
3550 = assign_stack_temp (mode,
3551 int_size_in_bytes (TREE_TYPE (exp)), 0);
3554 enum machine_mode var_mode = mode;
3556 if (TREE_CODE (type) == INTEGER_TYPE
3557 || TREE_CODE (type) == ENUMERAL_TYPE
3558 || TREE_CODE (type) == BOOLEAN_TYPE
3559 || TREE_CODE (type) == CHAR_TYPE
3560 || TREE_CODE (type) == REAL_TYPE
3561 || TREE_CODE (type) == POINTER_TYPE
3562 || TREE_CODE (type) == OFFSET_TYPE)
3564 PROMOTE_MODE (var_mode, unsignedp, type);
3567 temp = gen_reg_rtx (var_mode);
3570 SAVE_EXPR_RTL (exp) = temp;
3571 if (!optimize && GET_CODE (temp) == REG)
3572 save_expr_regs = gen_rtx (EXPR_LIST, VOIDmode, temp,
3575 /* If the mode of TEMP does not match that of the expression, it
3576 must be a promoted value. We pass store_expr a SUBREG of the
3577 wanted mode but mark it so that we know that it was already
3578 extended. Note that `unsignedp' was modified above in
3581 if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
3583 temp = gen_rtx (SUBREG, mode, SAVE_EXPR_RTL (exp), 0);
3584 SUBREG_PROMOTED_VAR_P (temp) = 1;
3585 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
3588 store_expr (TREE_OPERAND (exp, 0), temp, 0);
3591 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
3592 must be a promoted value. We return a SUBREG of the wanted mode,
3593 but mark it so that we know that it was already extended. Note
3594 that `unsignedp' was modified above in this case. */
3596 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
3597 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
3599 temp = gen_rtx (SUBREG, mode, SAVE_EXPR_RTL (exp), 0);
3600 SUBREG_PROMOTED_VAR_P (temp) = 1;
3601 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
3605 return SAVE_EXPR_RTL (exp);
3608 expand_exit_loop_if_false (NULL_PTR,
3609 invert_truthvalue (TREE_OPERAND (exp, 0)));
3613 expand_start_loop (1);
3614 expand_expr_stmt (TREE_OPERAND (exp, 0));
3621 tree vars = TREE_OPERAND (exp, 0);
3622 int vars_need_expansion = 0;
3624 /* Need to open a binding contour here because
3625 if there are any cleanups they most be contained here. */
3626 expand_start_bindings (0);
3628 /* Mark the corresponding BLOCK for output in its proper place. */
3629 if (TREE_OPERAND (exp, 2) != 0
3630 && ! TREE_USED (TREE_OPERAND (exp, 2)))
3631 insert_block (TREE_OPERAND (exp, 2));
3633 /* If VARS have not yet been expanded, expand them now. */
3636 if (DECL_RTL (vars) == 0)
3638 vars_need_expansion = 1;
3641 expand_decl_init (vars);
3642 vars = TREE_CHAIN (vars);
3645 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, modifier);
3647 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
3653 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
3655 emit_insns (RTL_EXPR_SEQUENCE (exp));
3656 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
3657 return RTL_EXPR_RTL (exp);
3660 /* If we don't need the result, just ensure we evaluate any
3665 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
3666 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode, 0);
3669 /* All elts simple constants => refer to a constant in memory. But
3670 if this is a non-BLKmode mode, let it store a field at a time
3671 since that should make a CONST_INT or CONST_DOUBLE when we
3672 fold. If we are making an initializer and all operands are
3673 constant, put it in memory as well. */
3674 else if ((TREE_STATIC (exp)
3675 && (mode == BLKmode || TREE_ADDRESSABLE (exp)))
3676 || (modifier == EXPAND_INITIALIZER && TREE_CONSTANT (exp)))
3678 rtx constructor = output_constant_def (exp);
3679 if (modifier != EXPAND_CONST_ADDRESS
3680 && modifier != EXPAND_INITIALIZER
3681 && modifier != EXPAND_SUM
3682 && !memory_address_p (GET_MODE (constructor),
3683 XEXP (constructor, 0)))
3684 constructor = change_address (constructor, VOIDmode,
3685 XEXP (constructor, 0));
3691 if (target == 0 || ! safe_from_p (target, exp))
3693 if (mode != BLKmode && ! TREE_ADDRESSABLE (exp))
3694 target = gen_reg_rtx (mode);
3697 enum tree_code c = TREE_CODE (type);
3699 = assign_stack_temp (mode, int_size_in_bytes (type), 0);
3700 if (c == RECORD_TYPE || c == UNION_TYPE
3701 || c == QUAL_UNION_TYPE || c == ARRAY_TYPE)
3702 MEM_IN_STRUCT_P (target) = 1;
3705 store_constructor (exp, target);
3711 tree exp1 = TREE_OPERAND (exp, 0);
3714 /* A SAVE_EXPR as the address in an INDIRECT_EXPR is generated
3715 for *PTR += ANYTHING where PTR is put inside the SAVE_EXPR.
3716 This code has the same general effect as simply doing
3717 expand_expr on the save expr, except that the expression PTR
3718 is computed for use as a memory address. This means different
3719 code, suitable for indexing, may be generated. */
3720 if (TREE_CODE (exp1) == SAVE_EXPR
3721 && SAVE_EXPR_RTL (exp1) == 0
3722 && TREE_CODE (exp2 = TREE_OPERAND (exp1, 0)) != ERROR_MARK
3723 && TYPE_MODE (TREE_TYPE (exp1)) == Pmode
3724 && TYPE_MODE (TREE_TYPE (exp2)) == Pmode)
3726 temp = expand_expr (TREE_OPERAND (exp1, 0), NULL_RTX,
3727 VOIDmode, EXPAND_SUM);
3728 op0 = memory_address (mode, temp);
3729 op0 = copy_all_regs (op0);
3730 SAVE_EXPR_RTL (exp1) = op0;
3734 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
3735 op0 = memory_address (mode, op0);
3738 temp = gen_rtx (MEM, mode, op0);
3739 /* If address was computed by addition,
3740 mark this as an element of an aggregate. */
3741 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
3742 || (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR
3743 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) == PLUS_EXPR)
3744 || TREE_CODE (TREE_TYPE (exp)) == ARRAY_TYPE
3745 || TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE
3746 || TREE_CODE (TREE_TYPE (exp)) == UNION_TYPE
3747 || TREE_CODE (TREE_TYPE (exp)) == QUAL_UNION_TYPE
3748 || (TREE_CODE (exp1) == ADDR_EXPR
3749 && (exp2 = TREE_OPERAND (exp1, 0))
3750 && (TREE_CODE (TREE_TYPE (exp2)) == ARRAY_TYPE
3751 || TREE_CODE (TREE_TYPE (exp2)) == RECORD_TYPE
3752 || TREE_CODE (TREE_TYPE (exp2)) == UNION_TYPE
3753 || TREE_CODE (TREE_TYPE (exp2)) == QUAL_UNION_TYPE)))
3754 MEM_IN_STRUCT_P (temp) = 1;
3755 MEM_VOLATILE_P (temp) = TREE_THIS_VOLATILE (exp);
3756 #if 0 /* It is incorrect to set RTX_UNCHANGING_P here, because the fact that
3757 a location is accessed through a pointer to const does not mean
3758 that the value there can never change. */
3759 RTX_UNCHANGING_P (temp) = TREE_READONLY (exp);
3765 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
3769 tree array = TREE_OPERAND (exp, 0);
3770 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
3771 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
3772 tree index = TREE_OPERAND (exp, 1);
3773 tree index_type = TREE_TYPE (index);
3776 /* Optimize the special-case of a zero lower bound. */
3777 if (! integer_zerop (low_bound))
3778 index = fold (build (MINUS_EXPR, index_type, index, low_bound));
3780 if (TREE_CODE (index) != INTEGER_CST
3781 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
3783 /* Nonconstant array index or nonconstant element size.
3784 Generate the tree for *(&array+index) and expand that,
3785 except do it in a language-independent way
3786 and don't complain about non-lvalue arrays.
3787 `mark_addressable' should already have been called
3788 for any array for which this case will be reached. */
3790 /* Don't forget the const or volatile flag from the array
3792 tree variant_type = build_type_variant (type,
3793 TREE_READONLY (exp),
3794 TREE_THIS_VOLATILE (exp));
3795 tree array_adr = build1 (ADDR_EXPR,
3796 build_pointer_type (variant_type), array);
3799 /* Convert the integer argument to a type the same size as a
3800 pointer so the multiply won't overflow spuriously. */
3801 if (TYPE_PRECISION (index_type) != POINTER_SIZE)
3802 index = convert (type_for_size (POINTER_SIZE, 0), index);
3804 /* Don't think the address has side effects
3805 just because the array does.
3806 (In some cases the address might have side effects,
3807 and we fail to record that fact here. However, it should not
3808 matter, since expand_expr should not care.) */
3809 TREE_SIDE_EFFECTS (array_adr) = 0;
3811 elt = build1 (INDIRECT_REF, type,
3812 fold (build (PLUS_EXPR,
3813 TYPE_POINTER_TO (variant_type),
3815 fold (build (MULT_EXPR,
3816 TYPE_POINTER_TO (variant_type),
3818 size_in_bytes (type))))));
3820 /* Volatility, etc., of new expression is same as old
3822 TREE_SIDE_EFFECTS (elt) = TREE_SIDE_EFFECTS (exp);
3823 TREE_THIS_VOLATILE (elt) = TREE_THIS_VOLATILE (exp);
3824 TREE_READONLY (elt) = TREE_READONLY (exp);
3826 return expand_expr (elt, target, tmode, modifier);
3829 /* Fold an expression like: "foo"[2].
3830 This is not done in fold so it won't happen inside &. */
3832 if (TREE_CODE (array) == STRING_CST
3833 && TREE_CODE (index) == INTEGER_CST
3834 && !TREE_INT_CST_HIGH (index)
3835 && (i = TREE_INT_CST_LOW (index)) < TREE_STRING_LENGTH (array))
3837 if (TREE_TYPE (TREE_TYPE (array)) == integer_type_node)
3839 exp = build_int_2 (((int *)TREE_STRING_POINTER (array))[i], 0);
3840 TREE_TYPE (exp) = integer_type_node;
3841 return expand_expr (exp, target, tmode, modifier);
3843 if (TREE_TYPE (TREE_TYPE (array)) == char_type_node)
3845 exp = build_int_2 (TREE_STRING_POINTER (array)[i], 0);
3846 TREE_TYPE (exp) = integer_type_node;
3847 return expand_expr (convert (TREE_TYPE (TREE_TYPE (array)),
3849 target, tmode, modifier);
3853 /* If this is a constant index into a constant array,
3854 just get the value from the array. Handle both the cases when
3855 we have an explicit constructor and when our operand is a variable
3856 that was declared const. */
3858 if (TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array))
3860 if (TREE_CODE (index) == INTEGER_CST
3861 && TREE_INT_CST_HIGH (index) == 0)
3863 tree elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0));
3865 i = TREE_INT_CST_LOW (index);
3867 elem = TREE_CHAIN (elem);
3869 return expand_expr (fold (TREE_VALUE (elem)), target,
3874 else if (optimize >= 1
3875 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
3876 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
3877 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
3879 if (TREE_CODE (index) == INTEGER_CST
3880 && TREE_INT_CST_HIGH (index) == 0)
3882 tree init = DECL_INITIAL (array);
3884 i = TREE_INT_CST_LOW (index);
3885 if (TREE_CODE (init) == CONSTRUCTOR)
3887 tree elem = CONSTRUCTOR_ELTS (init);
3890 elem = TREE_CHAIN (elem);
3892 return expand_expr (fold (TREE_VALUE (elem)), target,
3895 else if (TREE_CODE (init) == STRING_CST
3896 && i < TREE_STRING_LENGTH (init))
3898 temp = GEN_INT (TREE_STRING_POINTER (init)[i]);
3899 return convert_to_mode (mode, temp, 0);
3905 /* Treat array-ref with constant index as a component-ref. */
3909 /* If the operand is a CONSTRUCTOR, we can just extract the
3910 appropriate field if it is present. */
3911 if (code != ARRAY_REF
3912 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR)
3916 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
3917 elt = TREE_CHAIN (elt))
3918 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1))
3919 return expand_expr (TREE_VALUE (elt), target, tmode, modifier);
3923 enum machine_mode mode1;
3928 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
3929 &mode1, &unsignedp, &volatilep);
3931 /* If we got back the original object, something is wrong. Perhaps
3932 we are evaluating an expression too early. In any event, don't
3933 infinitely recurse. */
3937 /* In some cases, we will be offsetting OP0's address by a constant.
3938 So get it as a sum, if possible. If we will be using it
3939 directly in an insn, we validate it. */
3940 op0 = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_SUM);
3942 /* If this is a constant, put it into a register if it is a
3943 legitimate constant and memory if it isn't. */
3944 if (CONSTANT_P (op0))
3946 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
3947 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0))
3948 op0 = force_reg (mode, op0);
3950 op0 = validize_mem (force_const_mem (mode, op0));
3955 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
3957 if (GET_CODE (op0) != MEM)
3959 op0 = change_address (op0, VOIDmode,
3960 gen_rtx (PLUS, Pmode, XEXP (op0, 0),
3961 force_reg (Pmode, offset_rtx)));
3964 /* Don't forget about volatility even if this is a bitfield. */
3965 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
3967 op0 = copy_rtx (op0);
3968 MEM_VOLATILE_P (op0) = 1;
3971 /* In cases where an aligned union has an unaligned object
3972 as a field, we might be extracting a BLKmode value from
3973 an integer-mode (e.g., SImode) object. Handle this case
3974 by doing the extract into an object as wide as the field
3975 (which we know to be the width of a basic mode), then
3976 storing into memory, and changing the mode to BLKmode. */
3977 if (mode1 == VOIDmode
3978 || (mode1 != BLKmode && ! direct_load[(int) mode1]
3979 && modifier != EXPAND_CONST_ADDRESS
3980 && modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
3981 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
3982 /* If the field isn't aligned enough to fetch as a memref,
3983 fetch it as a bit field. */
3984 || (STRICT_ALIGNMENT
3985 && TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode))
3986 || (STRICT_ALIGNMENT && bitpos % GET_MODE_ALIGNMENT (mode) != 0))
3988 enum machine_mode ext_mode = mode;
3990 if (ext_mode == BLKmode)
3991 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
3993 if (ext_mode == BLKmode)
3996 op0 = extract_bit_field (validize_mem (op0), bitsize, bitpos,
3997 unsignedp, target, ext_mode, ext_mode,
3998 TYPE_ALIGN (TREE_TYPE (tem)) / BITS_PER_UNIT,
3999 int_size_in_bytes (TREE_TYPE (tem)));
4000 if (mode == BLKmode)
4002 rtx new = assign_stack_temp (ext_mode,
4003 bitsize / BITS_PER_UNIT, 0);
4005 emit_move_insn (new, op0);
4006 op0 = copy_rtx (new);
4007 PUT_MODE (op0, BLKmode);
4013 /* Get a reference to just this component. */
4014 if (modifier == EXPAND_CONST_ADDRESS
4015 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
4016 op0 = gen_rtx (MEM, mode1, plus_constant (XEXP (op0, 0),
4017 (bitpos / BITS_PER_UNIT)));
4019 op0 = change_address (op0, mode1,
4020 plus_constant (XEXP (op0, 0),
4021 (bitpos / BITS_PER_UNIT)));
4022 MEM_IN_STRUCT_P (op0) = 1;
4023 MEM_VOLATILE_P (op0) |= volatilep;
4024 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode)
4027 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
4028 convert_move (target, op0, unsignedp);
4034 tree base = build1 (ADDR_EXPR, type, TREE_OPERAND (exp, 0));
4035 tree addr = build (PLUS_EXPR, type, base, TREE_OPERAND (exp, 1));
4036 op0 = expand_expr (addr, NULL_RTX, VOIDmode, EXPAND_SUM);
4037 temp = gen_rtx (MEM, mode, memory_address (mode, op0));
4038 MEM_IN_STRUCT_P (temp) = 1;
4039 MEM_VOLATILE_P (temp) = TREE_THIS_VOLATILE (exp);
4040 #if 0 /* It is incorrect to set RTX_UNCHANGING_P here, because the fact that
4041 a location is accessed through a pointer to const does not mean
4042 that the value there can never change. */
4043 RTX_UNCHANGING_P (temp) = TREE_READONLY (exp);
4048 /* Intended for a reference to a buffer of a file-object in Pascal.
4049 But it's not certain that a special tree code will really be
4050 necessary for these. INDIRECT_REF might work for them. */
4054 /* IN_EXPR: Inlined pascal set IN expression.
4057 rlo = set_low - (set_low%bits_per_word);
4058 the_word = set [ (index - rlo)/bits_per_word ];
4059 bit_index = index % bits_per_word;
4060 bitmask = 1 << bit_index;
4061 return !!(the_word & bitmask); */
4063 preexpand_calls (exp);
4065 tree set = TREE_OPERAND (exp, 0);
4066 tree index = TREE_OPERAND (exp, 1);
4067 tree set_type = TREE_TYPE (set);
4069 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
4070 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
4076 rtx diff, quo, rem, addr, bit, result;
4077 rtx setval, setaddr;
4078 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
4081 target = gen_reg_rtx (mode);
4083 /* If domain is empty, answer is no. */
4084 if (tree_int_cst_lt (set_high_bound, set_low_bound))
4087 index_val = expand_expr (index, 0, VOIDmode, 0);
4088 lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
4089 hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
4090 setval = expand_expr (set, 0, VOIDmode, 0);
4091 setaddr = XEXP (setval, 0);
4093 /* Compare index against bounds, if they are constant. */
4094 if (GET_CODE (index_val) == CONST_INT
4095 && GET_CODE (lo_r) == CONST_INT
4096 && INTVAL (index_val) < INTVAL (lo_r))
4099 if (GET_CODE (index_val) == CONST_INT
4100 && GET_CODE (hi_r) == CONST_INT
4101 && INTVAL (hi_r) < INTVAL (index_val))
4104 /* If we get here, we have to generate the code for both cases
4105 (in range and out of range). */
4107 op0 = gen_label_rtx ();
4108 op1 = gen_label_rtx ();
4110 if (! (GET_CODE (index_val) == CONST_INT
4111 && GET_CODE (lo_r) == CONST_INT))
4113 emit_cmp_insn (index_val, lo_r, LT, NULL_RTX,
4114 GET_MODE (index_val), 0, 0);
4115 emit_jump_insn (gen_blt (op1));
4118 if (! (GET_CODE (index_val) == CONST_INT
4119 && GET_CODE (hi_r) == CONST_INT))
4121 emit_cmp_insn (index_val, hi_r, GT, NULL_RTX,
4122 GET_MODE (index_val), 0, 0);
4123 emit_jump_insn (gen_bgt (op1));
4126 /* Calculate the element number of bit zero in the first word
4128 if (GET_CODE (lo_r) == CONST_INT)
4129 rlow = GEN_INT (INTVAL (lo_r)
4130 & ~ ((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
4132 rlow = expand_binop (index_mode, and_optab, lo_r,
4133 GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
4134 NULL_RTX, 0, OPTAB_LIB_WIDEN);
4136 diff = expand_binop (index_mode, sub_optab,
4137 index_val, rlow, NULL_RTX, 0, OPTAB_LIB_WIDEN);
4139 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
4140 GEN_INT (BITS_PER_UNIT), NULL_RTX, 0);
4141 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
4142 GEN_INT (BITS_PER_UNIT), NULL_RTX, 0);
4143 addr = memory_address (byte_mode,
4144 expand_binop (index_mode, add_optab,
4145 diff, setaddr, NULL_RTX, 0,
4147 /* Extract the bit we want to examine */
4148 bit = expand_shift (RSHIFT_EXPR, byte_mode,
4149 gen_rtx (MEM, byte_mode, addr),
4150 make_tree (TREE_TYPE (index), rem),
4152 result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
4153 GET_MODE (target) == byte_mode ? target : 0,
4154 1, OPTAB_LIB_WIDEN);
4156 if (result != target)
4157 convert_move (target, result, 1);
4159 /* Output the code to handle the out-of-range case. */
4162 emit_move_insn (target, const0_rtx);
4167 case WITH_CLEANUP_EXPR:
4168 if (RTL_EXPR_RTL (exp) == 0)
4171 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
4173 = tree_cons (NULL_TREE, TREE_OPERAND (exp, 2), cleanups_this_call);
4174 /* That's it for this cleanup. */
4175 TREE_OPERAND (exp, 2) = 0;
4177 return RTL_EXPR_RTL (exp);
4180 /* Check for a built-in function. */
4181 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
4182 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) == FUNCTION_DECL
4183 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
4184 return expand_builtin (exp, target, subtarget, tmode, ignore);
4185 /* If this call was expanded already by preexpand_calls,
4186 just return the result we got. */
4187 if (CALL_EXPR_RTL (exp) != 0)
4188 return CALL_EXPR_RTL (exp);
4189 return expand_call (exp, target, ignore);
4191 case NON_LVALUE_EXPR:
4194 case REFERENCE_EXPR:
4195 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
4196 return expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, modifier);
4197 if (TREE_CODE (type) == UNION_TYPE)
4199 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
4202 if (mode == BLKmode)
4204 if (TYPE_SIZE (type) == 0
4205 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
4207 target = assign_stack_temp (BLKmode,
4208 (TREE_INT_CST_LOW (TYPE_SIZE (type))
4209 + BITS_PER_UNIT - 1)
4210 / BITS_PER_UNIT, 0);
4213 target = gen_reg_rtx (mode);
4215 if (GET_CODE (target) == MEM)
4216 /* Store data into beginning of memory target. */
4217 store_expr (TREE_OPERAND (exp, 0),
4218 change_address (target, TYPE_MODE (valtype), 0), 0);
4220 else if (GET_CODE (target) == REG)
4221 /* Store this field into a union of the proper type. */
4222 store_field (target, GET_MODE_BITSIZE (TYPE_MODE (valtype)), 0,
4223 TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
4225 int_size_in_bytes (TREE_TYPE (TREE_OPERAND (exp, 0))));
4229 /* Return the entire union. */
4232 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, 0);
4233 if (GET_MODE (op0) == mode)
4235 /* If arg is a constant integer being extended from a narrower mode,
4236 we must really truncate to get the extended bits right. Otherwise
4237 (unsigned long) (unsigned char) ("\377"[0])
4238 would come out as ffffffff. */
4239 if (GET_MODE (op0) == VOIDmode
4240 && (GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
4241 < GET_MODE_BITSIZE (mode)))
4243 /* MODE must be narrower than HOST_BITS_PER_INT. */
4244 int width = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
4246 if (width < HOST_BITS_PER_WIDE_INT)
4248 HOST_WIDE_INT val = (GET_CODE (op0) == CONST_INT ? INTVAL (op0)
4249 : CONST_DOUBLE_LOW (op0));
4250 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)))
4251 || !(val & ((HOST_WIDE_INT) 1 << (width - 1))))
4252 val &= ((HOST_WIDE_INT) 1 << width) - 1;
4254 val |= ~(((HOST_WIDE_INT) 1 << width) - 1);
4256 op0 = GEN_INT (val);
4260 op0 = (simplify_unary_operation
4261 ((TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)))
4262 ? ZERO_EXTEND : SIGN_EXTEND),
4264 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))));
4269 if (GET_MODE (op0) == VOIDmode)
4271 if (modifier == EXPAND_INITIALIZER)
4272 return gen_rtx (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
4273 if (flag_force_mem && GET_CODE (op0) == MEM)
4274 op0 = copy_to_reg (op0);
4277 return convert_to_mode (mode, op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
4279 convert_move (target, op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
4283 /* We come here from MINUS_EXPR when the second operand is a constant. */
4285 this_optab = add_optab;
4287 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
4288 something else, make sure we add the register to the constant and
4289 then to the other thing. This case can occur during strength
4290 reduction and doing it this way will produce better code if the
4291 frame pointer or argument pointer is eliminated.
4293 fold-const.c will ensure that the constant is always in the inner
4294 PLUS_EXPR, so the only case we need to do anything about is if
4295 sp, ap, or fp is our second argument, in which case we must swap
4296 the innermost first argument and our second argument. */
4298 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
4299 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
4300 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
4301 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
4302 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
4303 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
4305 tree t = TREE_OPERAND (exp, 1);
4307 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
4308 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
4311 /* If the result is to be Pmode and we are adding an integer to
4312 something, we might be forming a constant. So try to use
4313 plus_constant. If it produces a sum and we can't accept it,
4314 use force_operand. This allows P = &ARR[const] to generate
4315 efficient code on machines where a SYMBOL_REF is not a valid
4318 If this is an EXPAND_SUM call, always return the sum. */
4319 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
4322 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
4323 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
4324 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
4326 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
4328 op1 = plus_constant (op1, TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)));
4329 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
4330 op1 = force_operand (op1, target);
4334 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
4335 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
4336 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
4338 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
4340 if (! CONSTANT_P (op0))
4342 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
4343 VOIDmode, modifier);
4346 op0 = plus_constant (op0, TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)));
4347 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
4348 op0 = force_operand (op0, target);
4353 /* No sense saving up arithmetic to be done
4354 if it's all in the wrong mode to form part of an address.
4355 And force_operand won't know whether to sign-extend or
4357 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
4361 preexpand_calls (exp);
4362 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
4365 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, modifier);
4366 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, modifier);
4369 /* Make sure any term that's a sum with a constant comes last. */
4370 if (GET_CODE (op0) == PLUS
4371 && CONSTANT_P (XEXP (op0, 1)))
4377 /* If adding to a sum including a constant,
4378 associate it to put the constant outside. */
4379 if (GET_CODE (op1) == PLUS
4380 && CONSTANT_P (XEXP (op1, 1)))
4382 rtx constant_term = const0_rtx;
4384 temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0);
4387 /* Ensure that MULT comes first if there is one. */
4388 else if (GET_CODE (op0) == MULT)
4389 op0 = gen_rtx (PLUS, mode, op0, XEXP (op1, 0));
4391 op0 = gen_rtx (PLUS, mode, XEXP (op1, 0), op0);
4393 /* Let's also eliminate constants from op0 if possible. */
4394 op0 = eliminate_constant_term (op0, &constant_term);
4396 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
4397 their sum should be a constant. Form it into OP1, since the
4398 result we want will then be OP0 + OP1. */
4400 temp = simplify_binary_operation (PLUS, mode, constant_term,
4405 op1 = gen_rtx (PLUS, mode, constant_term, XEXP (op1, 1));
4408 /* Put a constant term last and put a multiplication first. */
4409 if (CONSTANT_P (op0) || GET_CODE (op1) == MULT)
4410 temp = op1, op1 = op0, op0 = temp;
4412 temp = simplify_binary_operation (PLUS, mode, op0, op1);
4413 return temp ? temp : gen_rtx (PLUS, mode, op0, op1);
4416 /* Handle difference of two symbolic constants,
4417 for the sake of an initializer. */
4418 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
4419 && really_constant_p (TREE_OPERAND (exp, 0))
4420 && really_constant_p (TREE_OPERAND (exp, 1)))
4422 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX,
4423 VOIDmode, modifier);
4424 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
4425 VOIDmode, modifier);
4426 return gen_rtx (MINUS, mode, op0, op1);
4428 /* Convert A - const to A + (-const). */
4429 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
4431 exp = build (PLUS_EXPR, type, TREE_OPERAND (exp, 0),
4432 fold (build1 (NEGATE_EXPR, type,
4433 TREE_OPERAND (exp, 1))));
4436 this_optab = sub_optab;
4440 preexpand_calls (exp);
4441 /* If first operand is constant, swap them.
4442 Thus the following special case checks need only
4443 check the second operand. */
4444 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
4446 register tree t1 = TREE_OPERAND (exp, 0);
4447 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
4448 TREE_OPERAND (exp, 1) = t1;
4451 /* Attempt to return something suitable for generating an
4452 indexed address, for machines that support that. */
4454 if (modifier == EXPAND_SUM && mode == Pmode
4455 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
4456 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
4458 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, EXPAND_SUM);
4460 /* Apply distributive law if OP0 is x+c. */
4461 if (GET_CODE (op0) == PLUS
4462 && GET_CODE (XEXP (op0, 1)) == CONST_INT)
4463 return gen_rtx (PLUS, mode,
4464 gen_rtx (MULT, mode, XEXP (op0, 0),
4465 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))),
4466 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))
4467 * INTVAL (XEXP (op0, 1))));
4469 if (GET_CODE (op0) != REG)
4470 op0 = force_operand (op0, NULL_RTX);
4471 if (GET_CODE (op0) != REG)
4472 op0 = copy_to_mode_reg (mode, op0);
4474 return gen_rtx (MULT, mode, op0,
4475 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))));
4478 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
4481 /* Check for multiplying things that have been extended
4482 from a narrower type. If this machine supports multiplying
4483 in that narrower type with a result in the desired type,
4484 do it that way, and avoid the explicit type-conversion. */
4485 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
4486 && TREE_CODE (type) == INTEGER_TYPE
4487 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
4488 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
4489 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
4490 && int_fits_type_p (TREE_OPERAND (exp, 1),
4491 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
4492 /* Don't use a widening multiply if a shift will do. */
4493 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
4494 > HOST_BITS_PER_WIDE_INT)
4495 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
4497 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
4498 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
4500 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
4501 /* If both operands are extended, they must either both
4502 be zero-extended or both be sign-extended. */
4503 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
4505 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
4507 enum machine_mode innermode
4508 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
4509 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
4510 ? umul_widen_optab : smul_widen_optab);
4511 if (mode == GET_MODE_WIDER_MODE (innermode)
4512 && this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
4514 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
4515 NULL_RTX, VOIDmode, 0);
4516 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
4517 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
4520 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
4521 NULL_RTX, VOIDmode, 0);
4525 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
4526 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
4527 return expand_mult (mode, op0, op1, target, unsignedp);
4529 case TRUNC_DIV_EXPR:
4530 case FLOOR_DIV_EXPR:
4532 case ROUND_DIV_EXPR:
4533 case EXACT_DIV_EXPR:
4534 preexpand_calls (exp);
4535 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
4537 /* Possible optimization: compute the dividend with EXPAND_SUM
4538 then if the divisor is constant can optimize the case
4539 where some terms of the dividend have coeffs divisible by it. */
4540 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
4541 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
4542 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
4545 this_optab = flodiv_optab;
4548 case TRUNC_MOD_EXPR:
4549 case FLOOR_MOD_EXPR:
4551 case ROUND_MOD_EXPR:
4552 preexpand_calls (exp);
4553 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
4555 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
4556 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
4557 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
4559 case FIX_ROUND_EXPR:
4560 case FIX_FLOOR_EXPR:
4562 abort (); /* Not used for C. */
4564 case FIX_TRUNC_EXPR:
4565 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
4567 target = gen_reg_rtx (mode);
4568 expand_fix (target, op0, unsignedp);
4572 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
4574 target = gen_reg_rtx (mode);
4575 /* expand_float can't figure out what to do if FROM has VOIDmode.
4576 So give it the correct mode. With -O, cse will optimize this. */
4577 if (GET_MODE (op0) == VOIDmode)
4578 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
4580 expand_float (target, op0,
4581 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
4585 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
4586 temp = expand_unop (mode, neg_optab, op0, target, 0);
4592 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
4594 /* Handle complex values specially. */
4596 enum machine_mode opmode
4597 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
4599 if (GET_MODE_CLASS (opmode) == MODE_COMPLEX_INT
4600 || GET_MODE_CLASS (opmode) == MODE_COMPLEX_FLOAT)
4601 return expand_complex_abs (opmode, op0, target, unsignedp);
4604 /* Unsigned abs is simply the operand. Testing here means we don't
4605 risk generating incorrect code below. */
4606 if (TREE_UNSIGNED (type))
4609 /* First try to do it with a special abs instruction. */
4610 temp = expand_unop (mode, abs_optab, op0, target, 0);
4614 /* If this machine has expensive jumps, we can do integer absolute
4615 value of X as (((signed) x >> (W-1)) ^ x) - ((signed) x >> (W-1)),
4616 where W is the width of MODE. */
4618 if (GET_MODE_CLASS (mode) == MODE_INT && BRANCH_COST >= 2)
4620 rtx extended = expand_shift (RSHIFT_EXPR, mode, op0,
4621 size_int (GET_MODE_BITSIZE (mode) - 1),
4624 temp = expand_binop (mode, xor_optab, extended, op0, target, 0,
4627 temp = expand_binop (mode, sub_optab, temp, extended, target, 0,
4634 /* If that does not win, use conditional jump and negate. */
4635 target = original_target;
4636 temp = gen_label_rtx ();
4637 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 0))
4638 || (GET_CODE (target) == REG
4639 && REGNO (target) < FIRST_PSEUDO_REGISTER))
4640 target = gen_reg_rtx (mode);
4641 emit_move_insn (target, op0);
4642 emit_cmp_insn (target,
4643 expand_expr (convert (type, integer_zero_node),
4644 NULL_RTX, VOIDmode, 0),
4645 GE, NULL_RTX, mode, 0, 0);
4647 emit_jump_insn (gen_bge (temp));
4648 op0 = expand_unop (mode, neg_optab, target, target, 0);
4650 emit_move_insn (target, op0);
4657 target = original_target;
4658 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 1))
4659 || (GET_CODE (target) == REG
4660 && REGNO (target) < FIRST_PSEUDO_REGISTER))
4661 target = gen_reg_rtx (mode);
4662 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
4663 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
4665 /* First try to do it with a special MIN or MAX instruction.
4666 If that does not win, use a conditional jump to select the proper
4668 this_optab = (TREE_UNSIGNED (type)
4669 ? (code == MIN_EXPR ? umin_optab : umax_optab)
4670 : (code == MIN_EXPR ? smin_optab : smax_optab));
4672 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
4678 emit_move_insn (target, op0);
4679 op0 = gen_label_rtx ();
4680 /* If this mode is an integer too wide to compare properly,
4681 compare word by word. Rely on cse to optimize constant cases. */
4682 if (GET_MODE_CLASS (mode) == MODE_INT
4683 && !can_compare_p (mode))
4685 if (code == MAX_EXPR)
4686 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type), target, op1, NULL, op0);
4688 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type), op1, target, NULL, op0);
4689 emit_move_insn (target, op1);
4693 if (code == MAX_EXPR)
4694 temp = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)))
4695 ? compare_from_rtx (target, op1, GEU, 1, mode, NULL_RTX, 0)
4696 : compare_from_rtx (target, op1, GE, 0, mode, NULL_RTX, 0));
4698 temp = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)))
4699 ? compare_from_rtx (target, op1, LEU, 1, mode, NULL_RTX, 0)
4700 : compare_from_rtx (target, op1, LE, 0, mode, NULL_RTX, 0));
4701 if (temp == const0_rtx)
4702 emit_move_insn (target, op1);
4703 else if (temp != const_true_rtx)
4705 if (bcc_gen_fctn[(int) GET_CODE (temp)] != 0)
4706 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (temp)]) (op0));
4709 emit_move_insn (target, op1);
4715 /* ??? Can optimize when the operand of this is a bitwise operation,
4716 by using a different bitwise operation. */
4718 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
4719 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
4725 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
4726 temp = expand_unop (mode, ffs_optab, op0, target, 1);
4731 /* ??? Can optimize bitwise operations with one arg constant.
4732 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
4733 and (a bitwise1 b) bitwise2 b (etc)
4734 but that is probably not worth while. */
4736 /* BIT_AND_EXPR is for bitwise anding.
4737 TRUTH_AND_EXPR is for anding two boolean values
4738 when we want in all cases to compute both of them.
4739 In general it is fastest to do TRUTH_AND_EXPR by
4740 computing both operands as actual zero-or-1 values
4741 and then bitwise anding. In cases where there cannot
4742 be any side effects, better code would be made by
4743 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR;
4744 but the question is how to recognize those cases. */
4746 case TRUTH_AND_EXPR:
4748 this_optab = and_optab;
4751 /* See comment above about TRUTH_AND_EXPR; it applies here too. */
4754 this_optab = ior_optab;
4757 case TRUTH_XOR_EXPR:
4759 this_optab = xor_optab;
4766 preexpand_calls (exp);
4767 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
4769 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
4770 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
4773 /* Could determine the answer when only additive constants differ.
4774 Also, the addition of one can be handled by changing the condition. */
4781 preexpand_calls (exp);
4782 temp = do_store_flag (exp, target, tmode != VOIDmode ? tmode : mode, 0);
4785 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
4786 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
4788 && GET_CODE (original_target) == REG
4789 && (GET_MODE (original_target)
4790 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
4792 temp = expand_expr (TREE_OPERAND (exp, 0), original_target, VOIDmode, 0);
4793 if (temp != original_target)
4794 temp = copy_to_reg (temp);
4795 op1 = gen_label_rtx ();
4796 emit_cmp_insn (temp, const0_rtx, EQ, NULL_RTX,
4797 GET_MODE (temp), unsignedp, 0);
4798 emit_jump_insn (gen_beq (op1));
4799 emit_move_insn (temp, const1_rtx);
4803 /* If no set-flag instruction, must generate a conditional
4804 store into a temporary variable. Drop through
4805 and handle this like && and ||. */
4807 case TRUTH_ANDIF_EXPR:
4808 case TRUTH_ORIF_EXPR:
4810 && (target == 0 || ! safe_from_p (target, exp)
4811 /* Make sure we don't have a hard reg (such as function's return
4812 value) live across basic blocks, if not optimizing. */
4813 || (!optimize && GET_CODE (target) == REG
4814 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
4815 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
4818 emit_clr_insn (target);
4820 op1 = gen_label_rtx ();
4821 jumpifnot (exp, op1);
4824 emit_0_to_1_insn (target);
4827 return ignore ? const0_rtx : target;
4829 case TRUTH_NOT_EXPR:
4830 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
4831 /* The parser is careful to generate TRUTH_NOT_EXPR
4832 only with operands that are always zero or one. */
4833 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
4834 target, 1, OPTAB_LIB_WIDEN);
4840 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
4842 return expand_expr (TREE_OPERAND (exp, 1),
4843 (ignore ? const0_rtx : target),
4848 /* Note that COND_EXPRs whose type is a structure or union
4849 are required to be constructed to contain assignments of
4850 a temporary variable, so that we can evaluate them here
4851 for side effect only. If type is void, we must do likewise. */
4853 /* If an arm of the branch requires a cleanup,
4854 only that cleanup is performed. */
4857 tree binary_op = 0, unary_op = 0;
4858 tree old_cleanups = cleanups_this_call;
4859 cleanups_this_call = 0;
4861 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
4862 convert it to our mode, if necessary. */
4863 if (integer_onep (TREE_OPERAND (exp, 1))
4864 && integer_zerop (TREE_OPERAND (exp, 2))
4865 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
4869 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
4874 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, modifier);
4875 if (GET_MODE (op0) == mode)
4878 target = gen_reg_rtx (mode);
4879 convert_move (target, op0, unsignedp);
4883 /* If we are not to produce a result, we have no target. Otherwise,
4884 if a target was specified use it; it will not be used as an
4885 intermediate target unless it is safe. If no target, use a
4890 else if (original_target
4891 && safe_from_p (original_target, TREE_OPERAND (exp, 0)))
4892 temp = original_target;
4893 else if (mode == BLKmode)
4895 if (TYPE_SIZE (type) == 0
4896 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
4898 temp = assign_stack_temp (BLKmode,
4899 (TREE_INT_CST_LOW (TYPE_SIZE (type))
4900 + BITS_PER_UNIT - 1)
4901 / BITS_PER_UNIT, 0);
4904 temp = gen_reg_rtx (mode);
4906 /* Check for X ? A + B : A. If we have this, we can copy
4907 A to the output and conditionally add B. Similarly for unary
4908 operations. Don't do this if X has side-effects because
4909 those side effects might affect A or B and the "?" operation is
4910 a sequence point in ANSI. (We test for side effects later.) */
4912 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
4913 && operand_equal_p (TREE_OPERAND (exp, 2),
4914 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
4915 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
4916 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
4917 && operand_equal_p (TREE_OPERAND (exp, 1),
4918 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
4919 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
4920 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
4921 && operand_equal_p (TREE_OPERAND (exp, 2),
4922 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
4923 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
4924 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
4925 && operand_equal_p (TREE_OPERAND (exp, 1),
4926 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
4927 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
4929 /* If we had X ? A + 1 : A and we can do the test of X as a store-flag
4930 operation, do this as A + (X != 0). Similarly for other simple
4931 binary operators. */
4932 if (temp && singleton && binary_op
4933 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
4934 && (TREE_CODE (binary_op) == PLUS_EXPR
4935 || TREE_CODE (binary_op) == MINUS_EXPR
4936 || TREE_CODE (binary_op) == BIT_IOR_EXPR
4937 || TREE_CODE (binary_op) == BIT_XOR_EXPR
4938 || TREE_CODE (binary_op) == BIT_AND_EXPR)
4939 && integer_onep (TREE_OPERAND (binary_op, 1))
4940 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
4943 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR ? add_optab
4944 : TREE_CODE (binary_op) == MINUS_EXPR ? sub_optab
4945 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
4946 : TREE_CODE (binary_op) == BIT_XOR_EXPR ? xor_optab
4949 /* If we had X ? A : A + 1, do this as A + (X == 0).
4951 We have to invert the truth value here and then put it
4952 back later if do_store_flag fails. We cannot simply copy
4953 TREE_OPERAND (exp, 0) to another variable and modify that
4954 because invert_truthvalue can modify the tree pointed to
4956 if (singleton == TREE_OPERAND (exp, 1))
4957 TREE_OPERAND (exp, 0)
4958 = invert_truthvalue (TREE_OPERAND (exp, 0));
4960 result = do_store_flag (TREE_OPERAND (exp, 0),
4961 (safe_from_p (temp, singleton)
4963 mode, BRANCH_COST <= 1);
4967 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
4968 return expand_binop (mode, boptab, op1, result, temp,
4969 unsignedp, OPTAB_LIB_WIDEN);
4971 else if (singleton == TREE_OPERAND (exp, 1))
4972 TREE_OPERAND (exp, 0)
4973 = invert_truthvalue (TREE_OPERAND (exp, 0));
4977 op0 = gen_label_rtx ();
4979 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
4983 /* If the target conflicts with the other operand of the
4984 binary op, we can't use it. Also, we can't use the target
4985 if it is a hard register, because evaluating the condition
4986 might clobber it. */
4988 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1)))
4989 || (GET_CODE (temp) == REG
4990 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
4991 temp = gen_reg_rtx (mode);
4992 store_expr (singleton, temp, 0);
4995 expand_expr (singleton,
4996 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
4997 if (cleanups_this_call)
4999 sorry ("aggregate value in COND_EXPR");
5000 cleanups_this_call = 0;
5002 if (singleton == TREE_OPERAND (exp, 1))
5003 jumpif (TREE_OPERAND (exp, 0), op0);
5005 jumpifnot (TREE_OPERAND (exp, 0), op0);
5007 if (binary_op && temp == 0)
5008 /* Just touch the other operand. */
5009 expand_expr (TREE_OPERAND (binary_op, 1),
5010 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
5012 store_expr (build (TREE_CODE (binary_op), type,
5013 make_tree (type, temp),
5014 TREE_OPERAND (binary_op, 1)),
5017 store_expr (build1 (TREE_CODE (unary_op), type,
5018 make_tree (type, temp)),
5023 /* This is now done in jump.c and is better done there because it
5024 produces shorter register lifetimes. */
5026 /* Check for both possibilities either constants or variables
5027 in registers (but not the same as the target!). If so, can
5028 save branches by assigning one, branching, and assigning the
5030 else if (temp && GET_MODE (temp) != BLKmode
5031 && (TREE_CONSTANT (TREE_OPERAND (exp, 1))
5032 || ((TREE_CODE (TREE_OPERAND (exp, 1)) == PARM_DECL
5033 || TREE_CODE (TREE_OPERAND (exp, 1)) == VAR_DECL)
5034 && DECL_RTL (TREE_OPERAND (exp, 1))
5035 && GET_CODE (DECL_RTL (TREE_OPERAND (exp, 1))) == REG
5036 && DECL_RTL (TREE_OPERAND (exp, 1)) != temp))
5037 && (TREE_CONSTANT (TREE_OPERAND (exp, 2))
5038 || ((TREE_CODE (TREE_OPERAND (exp, 2)) == PARM_DECL
5039 || TREE_CODE (TREE_OPERAND (exp, 2)) == VAR_DECL)
5040 && DECL_RTL (TREE_OPERAND (exp, 2))
5041 && GET_CODE (DECL_RTL (TREE_OPERAND (exp, 2))) == REG
5042 && DECL_RTL (TREE_OPERAND (exp, 2)) != temp)))
5044 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
5045 temp = gen_reg_rtx (mode);
5046 store_expr (TREE_OPERAND (exp, 2), temp, 0);
5047 jumpifnot (TREE_OPERAND (exp, 0), op0);
5048 store_expr (TREE_OPERAND (exp, 1), temp, 0);
5052 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
5053 comparison operator. If we have one of these cases, set the
5054 output to A, branch on A (cse will merge these two references),
5055 then set the output to FOO. */
5057 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
5058 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
5059 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
5060 TREE_OPERAND (exp, 1), 0)
5061 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
5062 && safe_from_p (temp, TREE_OPERAND (exp, 2)))
5064 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
5065 temp = gen_reg_rtx (mode);
5066 store_expr (TREE_OPERAND (exp, 1), temp, 0);
5067 jumpif (TREE_OPERAND (exp, 0), op0);
5068 store_expr (TREE_OPERAND (exp, 2), temp, 0);
5072 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
5073 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
5074 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
5075 TREE_OPERAND (exp, 2), 0)
5076 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
5077 && safe_from_p (temp, TREE_OPERAND (exp, 1)))
5079 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
5080 temp = gen_reg_rtx (mode);
5081 store_expr (TREE_OPERAND (exp, 2), temp, 0);
5082 jumpifnot (TREE_OPERAND (exp, 0), op0);
5083 store_expr (TREE_OPERAND (exp, 1), temp, 0);
5088 op1 = gen_label_rtx ();
5089 jumpifnot (TREE_OPERAND (exp, 0), op0);
5091 store_expr (TREE_OPERAND (exp, 1), temp, 0);
5093 expand_expr (TREE_OPERAND (exp, 1),
5094 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
5095 if (cleanups_this_call)
5097 sorry ("aggregate value in COND_EXPR");
5098 cleanups_this_call = 0;
5102 emit_jump_insn (gen_jump (op1));
5106 store_expr (TREE_OPERAND (exp, 2), temp, 0);
5108 expand_expr (TREE_OPERAND (exp, 2),
5109 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
5112 if (cleanups_this_call)
5114 sorry ("aggregate value in COND_EXPR");
5115 cleanups_this_call = 0;
5121 cleanups_this_call = old_cleanups;
5127 /* Something needs to be initialized, but we didn't know
5128 where that thing was when building the tree. For example,
5129 it could be the return value of a function, or a parameter
5130 to a function which lays down in the stack, or a temporary
5131 variable which must be passed by reference.
5133 We guarantee that the expression will either be constructed
5134 or copied into our original target. */
5136 tree slot = TREE_OPERAND (exp, 0);
5139 if (TREE_CODE (slot) != VAR_DECL)
5144 if (DECL_RTL (slot) != 0)
5146 target = DECL_RTL (slot);
5147 /* If we have already expanded the slot, so don't do
5149 if (TREE_OPERAND (exp, 1) == NULL_TREE)
5154 target = assign_stack_temp (mode, int_size_in_bytes (type), 0);
5155 /* All temp slots at this level must not conflict. */
5156 preserve_temp_slots (target);
5157 DECL_RTL (slot) = target;
5161 /* I bet this needs to be done, and I bet that it needs to
5162 be above, inside the else clause. The reason is
5163 simple, how else is it going to get cleaned up? (mrs)
5165 The reason is probably did not work before, and was
5166 commented out is because this was re-expanding already
5167 expanded target_exprs (target == 0 and DECL_RTL (slot)
5168 != 0) also cleaning them up many times as well. :-( */
5170 /* Since SLOT is not known to the called function
5171 to belong to its stack frame, we must build an explicit
5172 cleanup. This case occurs when we must build up a reference
5173 to pass the reference as an argument. In this case,
5174 it is very likely that such a reference need not be
5177 if (TREE_OPERAND (exp, 2) == 0)
5178 TREE_OPERAND (exp, 2) = maybe_build_cleanup (slot);
5179 if (TREE_OPERAND (exp, 2))
5180 cleanups_this_call = tree_cons (NULL_TREE, TREE_OPERAND (exp, 2),
5181 cleanups_this_call);
5186 /* This case does occur, when expanding a parameter which
5187 needs to be constructed on the stack. The target
5188 is the actual stack address that we want to initialize.
5189 The function we call will perform the cleanup in this case. */
5191 /* If we have already assigned it space, use that space,
5192 not target that we were passed in, as our target
5193 parameter is only a hint. */
5194 if (DECL_RTL (slot) != 0)
5196 target = DECL_RTL (slot);
5197 /* If we have already expanded the slot, so don't do
5199 if (TREE_OPERAND (exp, 1) == NULL_TREE)
5203 DECL_RTL (slot) = target;
5206 exp1 = TREE_OPERAND (exp, 1);
5207 /* Mark it as expanded. */
5208 TREE_OPERAND (exp, 1) = NULL_TREE;
5210 return expand_expr (exp1, target, tmode, modifier);
5215 tree lhs = TREE_OPERAND (exp, 0);
5216 tree rhs = TREE_OPERAND (exp, 1);
5217 tree noncopied_parts = 0;
5218 tree lhs_type = TREE_TYPE (lhs);
5220 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
5221 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0 && !fixed_type_p (rhs))
5222 noncopied_parts = init_noncopied_parts (stabilize_reference (lhs),
5223 TYPE_NONCOPIED_PARTS (lhs_type));
5224 while (noncopied_parts != 0)
5226 expand_assignment (TREE_VALUE (noncopied_parts),
5227 TREE_PURPOSE (noncopied_parts), 0, 0);
5228 noncopied_parts = TREE_CHAIN (noncopied_parts);
5235 /* If lhs is complex, expand calls in rhs before computing it.
5236 That's so we don't compute a pointer and save it over a call.
5237 If lhs is simple, compute it first so we can give it as a
5238 target if the rhs is just a call. This avoids an extra temp and copy
5239 and that prevents a partial-subsumption which makes bad code.
5240 Actually we could treat component_ref's of vars like vars. */
5242 tree lhs = TREE_OPERAND (exp, 0);
5243 tree rhs = TREE_OPERAND (exp, 1);
5244 tree noncopied_parts = 0;
5245 tree lhs_type = TREE_TYPE (lhs);
5249 if (TREE_CODE (lhs) != VAR_DECL
5250 && TREE_CODE (lhs) != RESULT_DECL
5251 && TREE_CODE (lhs) != PARM_DECL)
5252 preexpand_calls (exp);
5254 /* Check for |= or &= of a bitfield of size one into another bitfield
5255 of size 1. In this case, (unless we need the result of the
5256 assignment) we can do this more efficiently with a
5257 test followed by an assignment, if necessary.
5259 ??? At this point, we can't get a BIT_FIELD_REF here. But if
5260 things change so we do, this code should be enhanced to
5263 && TREE_CODE (lhs) == COMPONENT_REF
5264 && (TREE_CODE (rhs) == BIT_IOR_EXPR
5265 || TREE_CODE (rhs) == BIT_AND_EXPR)
5266 && TREE_OPERAND (rhs, 0) == lhs
5267 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
5268 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (lhs, 1))) == 1
5269 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))) == 1)
5271 rtx label = gen_label_rtx ();
5273 do_jump (TREE_OPERAND (rhs, 1),
5274 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
5275 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
5276 expand_assignment (lhs, convert (TREE_TYPE (rhs),
5277 (TREE_CODE (rhs) == BIT_IOR_EXPR
5279 : integer_zero_node)),
5281 do_pending_stack_adjust ();
5286 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0
5287 && ! (fixed_type_p (lhs) && fixed_type_p (rhs)))
5288 noncopied_parts = save_noncopied_parts (stabilize_reference (lhs),
5289 TYPE_NONCOPIED_PARTS (lhs_type));
5291 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
5292 while (noncopied_parts != 0)
5294 expand_assignment (TREE_PURPOSE (noncopied_parts),
5295 TREE_VALUE (noncopied_parts), 0, 0);
5296 noncopied_parts = TREE_CHAIN (noncopied_parts);
5301 case PREINCREMENT_EXPR:
5302 case PREDECREMENT_EXPR:
5303 return expand_increment (exp, 0);
5305 case POSTINCREMENT_EXPR:
5306 case POSTDECREMENT_EXPR:
5307 /* Faster to treat as pre-increment if result is not used. */
5308 return expand_increment (exp, ! ignore);
5311 /* Are we taking the address of a nested function? */
5312 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
5313 && decl_function_context (TREE_OPERAND (exp, 0)) != 0)
5315 op0 = trampoline_address (TREE_OPERAND (exp, 0));
5316 op0 = force_operand (op0, target);
5320 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode,
5321 (modifier == EXPAND_INITIALIZER
5322 ? modifier : EXPAND_CONST_ADDRESS));
5324 /* We would like the object in memory. If it is a constant,
5325 we can have it be statically allocated into memory. For
5326 a non-constant (REG or SUBREG), we need to allocate some
5327 memory and store the value into it. */
5329 if (CONSTANT_P (op0))
5330 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
5333 if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG)
5335 /* If this object is in a register, it must be not
5337 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
5338 enum machine_mode inner_mode = TYPE_MODE (inner_type);
5340 = assign_stack_temp (inner_mode,
5341 int_size_in_bytes (inner_type), 1);
5343 emit_move_insn (memloc, op0);
5347 if (GET_CODE (op0) != MEM)
5350 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
5351 return XEXP (op0, 0);
5352 op0 = force_operand (XEXP (op0, 0), target);
5354 if (flag_force_addr && GET_CODE (op0) != REG)
5355 return force_reg (Pmode, op0);
5358 case ENTRY_VALUE_EXPR:
5361 /* COMPLEX type for Extended Pascal & Fortran */
5364 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
5368 /* Get the rtx code of the operands. */
5369 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
5370 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
5373 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
5375 prev = get_last_insn ();
5377 /* Tell flow that the whole of the destination is being set. */
5378 if (GET_CODE (target) == REG)
5379 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
5381 /* Move the real (op0) and imaginary (op1) parts to their location. */
5382 emit_move_insn (gen_realpart (mode, target), op0);
5383 emit_move_insn (gen_imagpart (mode, target), op1);
5385 /* Complex construction should appear as a single unit. */
5392 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
5393 return gen_realpart (mode, op0);
5396 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
5397 return gen_imagpart (mode, op0);
5401 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
5405 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
5408 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
5410 prev = get_last_insn ();
5412 /* Tell flow that the whole of the destination is being set. */
5413 if (GET_CODE (target) == REG)
5414 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
5416 /* Store the realpart and the negated imagpart to target. */
5417 emit_move_insn (gen_realpart (mode, target), gen_realpart (mode, op0));
5419 imag_t = gen_imagpart (mode, target);
5420 temp = expand_unop (mode, neg_optab,
5421 gen_imagpart (mode, op0), imag_t, 0);
5423 emit_move_insn (imag_t, temp);
5425 /* Conjugate should appear as a single unit */
5432 op0 = CONST0_RTX (tmode);
5438 return (*lang_expand_expr) (exp, target, tmode, modifier);
5441 /* Here to do an ordinary binary operator, generating an instruction
5442 from the optab already placed in `this_optab'. */
5444 preexpand_calls (exp);
5445 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
5447 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
5448 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
5450 temp = expand_binop (mode, this_optab, op0, op1, target,
5451 unsignedp, OPTAB_LIB_WIDEN);
5457 /* Return the alignment in bits of EXP, a pointer valued expression.
5458 But don't return more than MAX_ALIGN no matter what.
5459 The alignment returned is, by default, the alignment of the thing that
5460 EXP points to (if it is not a POINTER_TYPE, 0 is returned).
5462 Otherwise, look at the expression to see if we can do better, i.e., if the
5463 expression is actually pointing at an object whose alignment is tighter. */
5466 get_pointer_alignment (exp, max_align)
5470 unsigned align, inner;
5472 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
5475 align = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
5476 align = MIN (align, max_align);
5480 switch (TREE_CODE (exp))
5484 case NON_LVALUE_EXPR:
5485 exp = TREE_OPERAND (exp, 0);
5486 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
5488 inner = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
5489 inner = MIN (inner, max_align);
5490 align = MAX (align, inner);
5494 /* If sum of pointer + int, restrict our maximum alignment to that
5495 imposed by the integer. If not, we can't do any better than
5497 if (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST)
5500 while (((TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)) * BITS_PER_UNIT)
5505 exp = TREE_OPERAND (exp, 0);
5509 /* See what we are pointing at and look at its alignment. */
5510 exp = TREE_OPERAND (exp, 0);
5511 if (TREE_CODE (exp) == FUNCTION_DECL)
5512 align = MAX (align, FUNCTION_BOUNDARY);
5513 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'd')
5514 align = MAX (align, DECL_ALIGN (exp));
5515 #ifdef CONSTANT_ALIGNMENT
5516 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'c')
5517 align = CONSTANT_ALIGNMENT (exp, align);
5519 return MIN (align, max_align);
5527 /* Return the tree node and offset if a given argument corresponds to
5528 a string constant. */
5531 string_constant (arg, ptr_offset)
5537 if (TREE_CODE (arg) == ADDR_EXPR
5538 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
5540 *ptr_offset = integer_zero_node;
5541 return TREE_OPERAND (arg, 0);
5543 else if (TREE_CODE (arg) == PLUS_EXPR)
5545 tree arg0 = TREE_OPERAND (arg, 0);
5546 tree arg1 = TREE_OPERAND (arg, 1);
5551 if (TREE_CODE (arg0) == ADDR_EXPR
5552 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
5555 return TREE_OPERAND (arg0, 0);
5557 else if (TREE_CODE (arg1) == ADDR_EXPR
5558 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
5561 return TREE_OPERAND (arg1, 0);
5568 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
5569 way, because it could contain a zero byte in the middle.
5570 TREE_STRING_LENGTH is the size of the character array, not the string.
5572 Unfortunately, string_constant can't access the values of const char
5573 arrays with initializers, so neither can we do so here. */
5583 src = string_constant (src, &offset_node);
5586 max = TREE_STRING_LENGTH (src);
5587 ptr = TREE_STRING_POINTER (src);
5588 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
5590 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
5591 compute the offset to the following null if we don't know where to
5592 start searching for it. */
5594 for (i = 0; i < max; i++)
5597 /* We don't know the starting offset, but we do know that the string
5598 has no internal zero bytes. We can assume that the offset falls
5599 within the bounds of the string; otherwise, the programmer deserves
5600 what he gets. Subtract the offset from the length of the string,
5602 /* This would perhaps not be valid if we were dealing with named
5603 arrays in addition to literal string constants. */
5604 return size_binop (MINUS_EXPR, size_int (max), offset_node);
5607 /* We have a known offset into the string. Start searching there for
5608 a null character. */
5609 if (offset_node == 0)
5613 /* Did we get a long long offset? If so, punt. */
5614 if (TREE_INT_CST_HIGH (offset_node) != 0)
5616 offset = TREE_INT_CST_LOW (offset_node);
5618 /* If the offset is known to be out of bounds, warn, and call strlen at
5620 if (offset < 0 || offset > max)
5622 warning ("offset outside bounds of constant string");
5625 /* Use strlen to search for the first zero byte. Since any strings
5626 constructed with build_string will have nulls appended, we win even
5627 if we get handed something like (char[4])"abcd".
5629 Since OFFSET is our starting index into the string, no further
5630 calculation is needed. */
5631 return size_int (strlen (ptr + offset));
5634 /* Expand an expression EXP that calls a built-in function,
5635 with result going to TARGET if that's convenient
5636 (and in mode MODE if that's convenient).
5637 SUBTARGET may be used as the target for computing one of EXP's operands.
5638 IGNORE is nonzero if the value is to be ignored. */
5641 expand_builtin (exp, target, subtarget, mode, ignore)
5645 enum machine_mode mode;
5648 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
5649 tree arglist = TREE_OPERAND (exp, 1);
5652 enum machine_mode value_mode = TYPE_MODE (TREE_TYPE (exp));
5653 optab builtin_optab;
5655 switch (DECL_FUNCTION_CODE (fndecl))
5660 /* build_function_call changes these into ABS_EXPR. */
5665 case BUILT_IN_FSQRT:
5666 /* If not optimizing, call the library function. */
5671 /* Arg could be wrong type if user redeclared this fcn wrong. */
5672 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != REAL_TYPE)
5673 return CONST0_RTX (TYPE_MODE (TREE_TYPE (exp)));
5675 /* Stabilize and compute the argument. */
5676 if (TREE_CODE (TREE_VALUE (arglist)) != VAR_DECL
5677 && TREE_CODE (TREE_VALUE (arglist)) != PARM_DECL)
5679 exp = copy_node (exp);
5680 arglist = copy_node (arglist);
5681 TREE_OPERAND (exp, 1) = arglist;
5682 TREE_VALUE (arglist) = save_expr (TREE_VALUE (arglist));
5684 op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
5686 /* Make a suitable register to place result in. */
5687 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
5692 switch (DECL_FUNCTION_CODE (fndecl))
5695 builtin_optab = sin_optab; break;
5697 builtin_optab = cos_optab; break;
5698 case BUILT_IN_FSQRT:
5699 builtin_optab = sqrt_optab; break;
5704 /* Compute into TARGET.
5705 Set TARGET to wherever the result comes back. */
5706 target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
5707 builtin_optab, op0, target, 0);
5709 /* If we were unable to expand via the builtin, stop the
5710 sequence (without outputting the insns) and break, causing
5711 a call the the library function. */
5718 /* Check the results by default. But if flag_fast_math is turned on,
5719 then assume sqrt will always be called with valid arguments. */
5721 if (! flag_fast_math)
5723 /* Don't define the builtin FP instructions
5724 if your machine is not IEEE. */
5725 if (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT)
5728 lab1 = gen_label_rtx ();
5730 /* Test the result; if it is NaN, set errno=EDOM because
5731 the argument was not in the domain. */
5732 emit_cmp_insn (target, target, EQ, 0, GET_MODE (target), 0, 0);
5733 emit_jump_insn (gen_beq (lab1));
5737 #ifdef GEN_ERRNO_RTX
5738 rtx errno_rtx = GEN_ERRNO_RTX;
5741 = gen_rtx (MEM, word_mode, gen_rtx (SYMBOL_REF, Pmode, "*errno"));
5744 emit_move_insn (errno_rtx, GEN_INT (TARGET_EDOM));
5747 /* We can't set errno=EDOM directly; let the library call do it.
5748 Pop the arguments right away in case the call gets deleted. */
5750 expand_call (exp, target, 0);
5757 /* Output the entire sequence. */
5758 insns = get_insns ();
5764 /* __builtin_apply_args returns block of memory allocated on
5765 the stack into which is stored the arg pointer, structure
5766 value address, static chain, and all the registers that might
5767 possibly be used in performing a function call. The code is
5768 moved to the start of the function so the incoming values are
5770 case BUILT_IN_APPLY_ARGS:
5771 /* Don't do __builtin_apply_args more than once in a function.
5772 Save the result of the first call and reuse it. */
5773 if (apply_args_value != 0)
5774 return apply_args_value;
5776 /* When this function is called, it means that registers must be
5777 saved on entry to this function. So we migrate the
5778 call to the first insn of this function. */
5783 temp = expand_builtin_apply_args ();
5787 apply_args_value = temp;
5789 /* Put the sequence after the NOTE that starts the function.
5790 If this is inside a SEQUENCE, make the outer-level insn
5791 chain current, so the code is placed at the start of the
5793 push_topmost_sequence ();
5794 emit_insns_before (seq, NEXT_INSN (get_insns ()));
5795 pop_topmost_sequence ();
5799 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
5800 FUNCTION with a copy of the parameters described by
5801 ARGUMENTS, and ARGSIZE. It returns a block of memory
5802 allocated on the stack into which is stored all the registers
5803 that might possibly be used for returning the result of a
5804 function. ARGUMENTS is the value returned by
5805 __builtin_apply_args. ARGSIZE is the number of bytes of
5806 arguments that must be copied. ??? How should this value be
5807 computed? We'll also need a safe worst case value for varargs
5809 case BUILT_IN_APPLY:
5811 /* Arg could be non-pointer if user redeclared this fcn wrong. */
5812 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
5813 || TREE_CHAIN (arglist) == 0
5814 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
5815 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
5816 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
5824 for (t = arglist, i = 0; t; t = TREE_CHAIN (t), i++)
5825 ops[i] = expand_expr (TREE_VALUE (t), NULL_RTX, VOIDmode, 0);
5827 return expand_builtin_apply (ops[0], ops[1], ops[2]);
5830 /* __builtin_return (RESULT) causes the function to return the
5831 value described by RESULT. RESULT is address of the block of
5832 memory returned by __builtin_apply. */
5833 case BUILT_IN_RETURN:
5835 /* Arg could be non-pointer if user redeclared this fcn wrong. */
5836 && TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) == POINTER_TYPE)
5837 expand_builtin_return (expand_expr (TREE_VALUE (arglist),
5838 NULL_RTX, VOIDmode, 0));
5841 case BUILT_IN_SAVEREGS:
5842 /* Don't do __builtin_saveregs more than once in a function.
5843 Save the result of the first call and reuse it. */
5844 if (saveregs_value != 0)
5845 return saveregs_value;
5847 /* When this function is called, it means that registers must be
5848 saved on entry to this function. So we migrate the
5849 call to the first insn of this function. */
5852 rtx valreg, saved_valreg;
5854 /* Now really call the function. `expand_call' does not call
5855 expand_builtin, so there is no danger of infinite recursion here. */
5858 #ifdef EXPAND_BUILTIN_SAVEREGS
5859 /* Do whatever the machine needs done in this case. */
5860 temp = EXPAND_BUILTIN_SAVEREGS (arglist);
5862 /* The register where the function returns its value
5863 is likely to have something else in it, such as an argument.
5864 So preserve that register around the call. */
5865 if (value_mode != VOIDmode)
5867 valreg = hard_libcall_value (value_mode);
5868 saved_valreg = gen_reg_rtx (value_mode);
5869 emit_move_insn (saved_valreg, valreg);
5872 /* Generate the call, putting the value in a pseudo. */
5873 temp = expand_call (exp, target, ignore);
5875 if (value_mode != VOIDmode)
5876 emit_move_insn (valreg, saved_valreg);
5882 saveregs_value = temp;
5884 /* Put the sequence after the NOTE that starts the function.
5885 If this is inside a SEQUENCE, make the outer-level insn
5886 chain current, so the code is placed at the start of the
5888 push_topmost_sequence ();
5889 emit_insns_before (seq, NEXT_INSN (get_insns ()));
5890 pop_topmost_sequence ();
5894 /* __builtin_args_info (N) returns word N of the arg space info
5895 for the current function. The number and meanings of words
5896 is controlled by the definition of CUMULATIVE_ARGS. */
5897 case BUILT_IN_ARGS_INFO:
5899 int nwords = sizeof (CUMULATIVE_ARGS) / sizeof (int);
5901 int *word_ptr = (int *) ¤t_function_args_info;
5902 tree type, elts, result;
5904 if (sizeof (CUMULATIVE_ARGS) % sizeof (int) != 0)
5905 fatal ("CUMULATIVE_ARGS type defined badly; see %s, line %d",
5906 __FILE__, __LINE__);
5910 tree arg = TREE_VALUE (arglist);
5911 if (TREE_CODE (arg) != INTEGER_CST)
5912 error ("argument of `__builtin_args_info' must be constant");
5915 int wordnum = TREE_INT_CST_LOW (arg);
5917 if (wordnum < 0 || wordnum >= nwords || TREE_INT_CST_HIGH (arg))
5918 error ("argument of `__builtin_args_info' out of range");
5920 return GEN_INT (word_ptr[wordnum]);
5924 error ("missing argument in `__builtin_args_info'");
5929 for (i = 0; i < nwords; i++)
5930 elts = tree_cons (NULL_TREE, build_int_2 (word_ptr[i], 0));
5932 type = build_array_type (integer_type_node,
5933 build_index_type (build_int_2 (nwords, 0)));
5934 result = build (CONSTRUCTOR, type, NULL_TREE, nreverse (elts));
5935 TREE_CONSTANT (result) = 1;
5936 TREE_STATIC (result) = 1;
5937 result = build (INDIRECT_REF, build_pointer_type (type), result);
5938 TREE_CONSTANT (result) = 1;
5939 return expand_expr (result, NULL_RTX, VOIDmode, 0);
5943 /* Return the address of the first anonymous stack arg. */
5944 case BUILT_IN_NEXT_ARG:
5946 tree fntype = TREE_TYPE (current_function_decl);
5947 if (!(TYPE_ARG_TYPES (fntype) != 0
5948 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
5949 != void_type_node)))
5951 error ("`va_start' used in function with fixed args");
5956 return expand_binop (Pmode, add_optab,
5957 current_function_internal_arg_pointer,
5958 current_function_arg_offset_rtx,
5959 NULL_RTX, 0, OPTAB_LIB_WIDEN);
5961 case BUILT_IN_CLASSIFY_TYPE:
5964 tree type = TREE_TYPE (TREE_VALUE (arglist));
5965 enum tree_code code = TREE_CODE (type);
5966 if (code == VOID_TYPE)
5967 return GEN_INT (void_type_class);
5968 if (code == INTEGER_TYPE)
5969 return GEN_INT (integer_type_class);
5970 if (code == CHAR_TYPE)
5971 return GEN_INT (char_type_class);
5972 if (code == ENUMERAL_TYPE)
5973 return GEN_INT (enumeral_type_class);
5974 if (code == BOOLEAN_TYPE)
5975 return GEN_INT (boolean_type_class);
5976 if (code == POINTER_TYPE)
5977 return GEN_INT (pointer_type_class);
5978 if (code == REFERENCE_TYPE)
5979 return GEN_INT (reference_type_class);
5980 if (code == OFFSET_TYPE)
5981 return GEN_INT (offset_type_class);
5982 if (code == REAL_TYPE)
5983 return GEN_INT (real_type_class);
5984 if (code == COMPLEX_TYPE)
5985 return GEN_INT (complex_type_class);
5986 if (code == FUNCTION_TYPE)
5987 return GEN_INT (function_type_class);
5988 if (code == METHOD_TYPE)
5989 return GEN_INT (method_type_class);
5990 if (code == RECORD_TYPE)
5991 return GEN_INT (record_type_class);
5992 if (code == UNION_TYPE || code == QUAL_UNION_TYPE)
5993 return GEN_INT (union_type_class);
5994 if (code == ARRAY_TYPE)
5995 return GEN_INT (array_type_class);
5996 if (code == STRING_TYPE)
5997 return GEN_INT (string_type_class);
5998 if (code == SET_TYPE)
5999 return GEN_INT (set_type_class);
6000 if (code == FILE_TYPE)
6001 return GEN_INT (file_type_class);
6002 if (code == LANG_TYPE)
6003 return GEN_INT (lang_type_class);
6005 return GEN_INT (no_type_class);
6007 case BUILT_IN_CONSTANT_P:
6011 return (TREE_CODE_CLASS (TREE_CODE (TREE_VALUE (arglist))) == 'c'
6012 ? const1_rtx : const0_rtx);
6014 case BUILT_IN_FRAME_ADDRESS:
6015 /* The argument must be a nonnegative integer constant.
6016 It counts the number of frames to scan up the stack.
6017 The value is the address of that frame. */
6018 case BUILT_IN_RETURN_ADDRESS:
6019 /* The argument must be a nonnegative integer constant.
6020 It counts the number of frames to scan up the stack.
6021 The value is the return address saved in that frame. */
6023 /* Warning about missing arg was already issued. */
6025 else if (TREE_CODE (TREE_VALUE (arglist)) != INTEGER_CST)
6027 error ("invalid arg to `__builtin_return_address'");
6030 else if (tree_int_cst_lt (TREE_VALUE (arglist), integer_zero_node))
6032 error ("invalid arg to `__builtin_return_address'");
6037 int count = TREE_INT_CST_LOW (TREE_VALUE (arglist));
6038 rtx tem = frame_pointer_rtx;
6041 /* Some machines need special handling before we can access arbitrary
6042 frames. For example, on the sparc, we must first flush all
6043 register windows to the stack. */
6044 #ifdef SETUP_FRAME_ADDRESSES
6045 SETUP_FRAME_ADDRESSES ();
6048 /* On the sparc, the return address is not in the frame, it is
6049 in a register. There is no way to access it off of the current
6050 frame pointer, but it can be accessed off the previous frame
6051 pointer by reading the value from the register window save
6053 #ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
6054 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_RETURN_ADDRESS)
6058 /* Scan back COUNT frames to the specified frame. */
6059 for (i = 0; i < count; i++)
6061 /* Assume the dynamic chain pointer is in the word that
6062 the frame address points to, unless otherwise specified. */
6063 #ifdef DYNAMIC_CHAIN_ADDRESS
6064 tem = DYNAMIC_CHAIN_ADDRESS (tem);
6066 tem = memory_address (Pmode, tem);
6067 tem = copy_to_reg (gen_rtx (MEM, Pmode, tem));
6070 /* For __builtin_frame_address, return what we've got. */
6071 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
6074 /* For __builtin_return_address,
6075 Get the return address from that frame. */
6076 #ifdef RETURN_ADDR_RTX
6077 return RETURN_ADDR_RTX (count, tem);
6079 tem = memory_address (Pmode,
6080 plus_constant (tem, GET_MODE_SIZE (Pmode)));
6081 return copy_to_reg (gen_rtx (MEM, Pmode, tem));
6085 case BUILT_IN_ALLOCA:
6087 /* Arg could be non-integer if user redeclared this fcn wrong. */
6088 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE)
6090 current_function_calls_alloca = 1;
6091 /* Compute the argument. */
6092 op0 = expand_expr (TREE_VALUE (arglist), NULL_RTX, VOIDmode, 0);
6094 /* Allocate the desired space. */
6095 target = allocate_dynamic_stack_space (op0, target, BITS_PER_UNIT);
6097 /* Record the new stack level for nonlocal gotos. */
6098 if (nonlocal_goto_handler_slot != 0)
6099 emit_stack_save (SAVE_NONLOCAL, &nonlocal_goto_stack_level, NULL_RTX);
6103 /* If not optimizing, call the library function. */
6108 /* Arg could be non-integer if user redeclared this fcn wrong. */
6109 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE)
6112 /* Compute the argument. */
6113 op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
6114 /* Compute ffs, into TARGET if possible.
6115 Set TARGET to wherever the result comes back. */
6116 target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
6117 ffs_optab, op0, target, 1);
6122 case BUILT_IN_STRLEN:
6123 /* If not optimizing, call the library function. */
6128 /* Arg could be non-pointer if user redeclared this fcn wrong. */
6129 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE)
6133 tree src = TREE_VALUE (arglist);
6134 tree len = c_strlen (src);
6137 = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
6139 rtx result, src_rtx, char_rtx;
6140 enum machine_mode insn_mode = value_mode, char_mode;
6141 enum insn_code icode;
6143 /* If the length is known, just return it. */
6145 return expand_expr (len, target, mode, 0);
6147 /* If SRC is not a pointer type, don't do this operation inline. */
6151 /* Call a function if we can't compute strlen in the right mode. */
6153 while (insn_mode != VOIDmode)
6155 icode = strlen_optab->handlers[(int) insn_mode].insn_code;
6156 if (icode != CODE_FOR_nothing)
6159 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
6161 if (insn_mode == VOIDmode)
6164 /* Make a place to write the result of the instruction. */
6167 && GET_CODE (result) == REG
6168 && GET_MODE (result) == insn_mode
6169 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
6170 result = gen_reg_rtx (insn_mode);
6172 /* Make sure the operands are acceptable to the predicates. */
6174 if (! (*insn_operand_predicate[(int)icode][0]) (result, insn_mode))
6175 result = gen_reg_rtx (insn_mode);
6177 src_rtx = memory_address (BLKmode,
6178 expand_expr (src, NULL_RTX, Pmode,
6180 if (! (*insn_operand_predicate[(int)icode][1]) (src_rtx, Pmode))
6181 src_rtx = copy_to_mode_reg (Pmode, src_rtx);
6183 char_rtx = const0_rtx;
6184 char_mode = insn_operand_mode[(int)icode][2];
6185 if (! (*insn_operand_predicate[(int)icode][2]) (char_rtx, char_mode))
6186 char_rtx = copy_to_mode_reg (char_mode, char_rtx);
6188 emit_insn (GEN_FCN (icode) (result,
6189 gen_rtx (MEM, BLKmode, src_rtx),
6190 char_rtx, GEN_INT (align)));
6192 /* Return the value in the proper mode for this function. */
6193 if (GET_MODE (result) == value_mode)
6195 else if (target != 0)
6197 convert_move (target, result, 0);
6201 return convert_to_mode (value_mode, result, 0);
6204 case BUILT_IN_STRCPY:
6205 /* If not optimizing, call the library function. */
6210 /* Arg could be non-pointer if user redeclared this fcn wrong. */
6211 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
6212 || TREE_CHAIN (arglist) == 0
6213 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE)
6217 tree len = c_strlen (TREE_VALUE (TREE_CHAIN (arglist)));
6222 len = size_binop (PLUS_EXPR, len, integer_one_node);
6224 chainon (arglist, build_tree_list (NULL_TREE, len));
6228 case BUILT_IN_MEMCPY:
6229 /* If not optimizing, call the library function. */
6234 /* Arg could be non-pointer if user redeclared this fcn wrong. */
6235 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
6236 || TREE_CHAIN (arglist) == 0
6237 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
6238 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
6239 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
6243 tree dest = TREE_VALUE (arglist);
6244 tree src = TREE_VALUE (TREE_CHAIN (arglist));
6245 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
6248 = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
6250 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
6251 rtx dest_rtx, dest_mem, src_mem;
6253 /* If either SRC or DEST is not a pointer type, don't do
6254 this operation in-line. */
6255 if (src_align == 0 || dest_align == 0)
6257 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRCPY)
6258 TREE_CHAIN (TREE_CHAIN (arglist)) = 0;
6262 dest_rtx = expand_expr (dest, NULL_RTX, Pmode, EXPAND_NORMAL);
6263 dest_mem = gen_rtx (MEM, BLKmode,
6264 memory_address (BLKmode, dest_rtx));
6265 src_mem = gen_rtx (MEM, BLKmode,
6266 memory_address (BLKmode,
6267 expand_expr (src, NULL_RTX,
6271 /* Copy word part most expediently. */
6272 emit_block_move (dest_mem, src_mem,
6273 expand_expr (len, NULL_RTX, VOIDmode, 0),
6274 MIN (src_align, dest_align));
6278 /* These comparison functions need an instruction that returns an actual
6279 index. An ordinary compare that just sets the condition codes
6281 #ifdef HAVE_cmpstrsi
6282 case BUILT_IN_STRCMP:
6283 /* If not optimizing, call the library function. */
6288 /* Arg could be non-pointer if user redeclared this fcn wrong. */
6289 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
6290 || TREE_CHAIN (arglist) == 0
6291 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE)
6293 else if (!HAVE_cmpstrsi)
6296 tree arg1 = TREE_VALUE (arglist);
6297 tree arg2 = TREE_VALUE (TREE_CHAIN (arglist));
6301 len = c_strlen (arg1);
6303 len = size_binop (PLUS_EXPR, integer_one_node, len);
6304 len2 = c_strlen (arg2);
6306 len2 = size_binop (PLUS_EXPR, integer_one_node, len2);
6308 /* If we don't have a constant length for the first, use the length
6309 of the second, if we know it. We don't require a constant for
6310 this case; some cost analysis could be done if both are available
6311 but neither is constant. For now, assume they're equally cheap.
6313 If both strings have constant lengths, use the smaller. This
6314 could arise if optimization results in strcpy being called with
6315 two fixed strings, or if the code was machine-generated. We should
6316 add some code to the `memcmp' handler below to deal with such
6317 situations, someday. */
6318 if (!len || TREE_CODE (len) != INTEGER_CST)
6325 else if (len2 && TREE_CODE (len2) == INTEGER_CST)
6327 if (tree_int_cst_lt (len2, len))
6331 chainon (arglist, build_tree_list (NULL_TREE, len));
6335 case BUILT_IN_MEMCMP:
6336 /* If not optimizing, call the library function. */
6341 /* Arg could be non-pointer if user redeclared this fcn wrong. */
6342 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
6343 || TREE_CHAIN (arglist) == 0
6344 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
6345 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
6346 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
6348 else if (!HAVE_cmpstrsi)
6351 tree arg1 = TREE_VALUE (arglist);
6352 tree arg2 = TREE_VALUE (TREE_CHAIN (arglist));
6353 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
6357 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
6359 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
6360 enum machine_mode insn_mode
6361 = insn_operand_mode[(int) CODE_FOR_cmpstrsi][0];
6363 /* If we don't have POINTER_TYPE, call the function. */
6364 if (arg1_align == 0 || arg2_align == 0)
6366 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRCMP)
6367 TREE_CHAIN (TREE_CHAIN (arglist)) = 0;
6371 /* Make a place to write the result of the instruction. */
6374 && GET_CODE (result) == REG && GET_MODE (result) == insn_mode
6375 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
6376 result = gen_reg_rtx (insn_mode);
6378 emit_insn (gen_cmpstrsi (result,
6379 gen_rtx (MEM, BLKmode,
6380 expand_expr (arg1, NULL_RTX, Pmode,
6382 gen_rtx (MEM, BLKmode,
6383 expand_expr (arg2, NULL_RTX, Pmode,
6385 expand_expr (len, NULL_RTX, VOIDmode, 0),
6386 GEN_INT (MIN (arg1_align, arg2_align))));
6388 /* Return the value in the proper mode for this function. */
6389 mode = TYPE_MODE (TREE_TYPE (exp));
6390 if (GET_MODE (result) == mode)
6392 else if (target != 0)
6394 convert_move (target, result, 0);
6398 return convert_to_mode (mode, result, 0);
6401 case BUILT_IN_STRCMP:
6402 case BUILT_IN_MEMCMP:
6406 default: /* just do library call, if unknown builtin */
6407 error ("built-in function `%s' not currently supported",
6408 IDENTIFIER_POINTER (DECL_NAME (fndecl)));
6411 /* The switch statement above can drop through to cause the function
6412 to be called normally. */
6414 return expand_call (exp, target, ignore);
6417 /* Built-in functions to perform an untyped call and return. */
6419 /* For each register that may be used for calling a function, this
6420 gives a mode used to copy the register's value. VOIDmode indicates
6421 the register is not used for calling a function. If the machine
6422 has register windows, this gives only the outbound registers.
6423 INCOMING_REGNO gives the corresponding inbound register. */
6424 static enum machine_mode apply_args_mode[FIRST_PSEUDO_REGISTER];
6426 /* For each register that may be used for returning values, this gives
6427 a mode used to copy the register's value. VOIDmode indicates the
6428 register is not used for returning values. If the machine has
6429 register windows, this gives only the outbound registers.
6430 INCOMING_REGNO gives the corresponding inbound register. */
6431 static enum machine_mode apply_result_mode[FIRST_PSEUDO_REGISTER];
6433 /* Return the size required for the block returned by __builtin_apply_args,
6434 and initialize apply_args_mode. */
6438 static int size = -1;
6440 enum machine_mode mode;
6442 /* The values computed by this function never change. */
6445 /* The first value is the incoming arg-pointer. */
6446 size = GET_MODE_SIZE (Pmode);
6448 /* The second value is the structure value address unless this is
6449 passed as an "invisible" first argument. */
6450 if (struct_value_rtx)
6451 size += GET_MODE_SIZE (Pmode);
6453 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
6454 if (FUNCTION_ARG_REGNO_P (regno))
6456 /* Search for the proper mode for copying this register's
6457 value. I'm not sure this is right, but it works so far. */
6458 enum machine_mode best_mode = VOIDmode;
6460 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
6462 mode = GET_MODE_WIDER_MODE (mode))
6463 if (HARD_REGNO_MODE_OK (regno, mode)
6464 && HARD_REGNO_NREGS (regno, mode) == 1)
6467 if (best_mode == VOIDmode)
6468 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
6470 mode = GET_MODE_WIDER_MODE (mode))
6471 if (HARD_REGNO_MODE_OK (regno, mode)
6472 && (mov_optab->handlers[(int) mode].insn_code
6473 != CODE_FOR_nothing))
6477 if (mode == VOIDmode)
6480 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
6481 if (size % align != 0)
6482 size = CEIL (size, align) * align;
6483 size += GET_MODE_SIZE (mode);
6484 apply_args_mode[regno] = mode;
6487 apply_args_mode[regno] = VOIDmode;
6492 /* Return the size required for the block returned by __builtin_apply,
6493 and initialize apply_result_mode. */
6495 apply_result_size ()
6497 static int size = -1;
6499 enum machine_mode mode;
6501 /* The values computed by this function never change. */
6506 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
6507 if (FUNCTION_VALUE_REGNO_P (regno))
6509 /* Search for the proper mode for copying this register's
6510 value. I'm not sure this is right, but it works so far. */
6511 enum machine_mode best_mode = VOIDmode;
6513 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
6515 mode = GET_MODE_WIDER_MODE (mode))
6516 if (HARD_REGNO_MODE_OK (regno, mode))
6519 if (best_mode == VOIDmode)
6520 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
6522 mode = GET_MODE_WIDER_MODE (mode))
6523 if (HARD_REGNO_MODE_OK (regno, mode)
6524 && (mov_optab->handlers[(int) mode].insn_code
6525 != CODE_FOR_nothing))
6529 if (mode == VOIDmode)
6532 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
6533 if (size % align != 0)
6534 size = CEIL (size, align) * align;
6535 size += GET_MODE_SIZE (mode);
6536 apply_result_mode[regno] = mode;
6539 apply_result_mode[regno] = VOIDmode;
6541 /* Allow targets that use untyped_call and untyped_return to override
6542 the size so that machine-specific information can be stored here. */
6543 #ifdef APPLY_RESULT_SIZE
6544 size = APPLY_RESULT_SIZE;
6550 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
6551 /* Create a vector describing the result block RESULT. If SAVEP is true,
6552 the result block is used to save the values; otherwise it is used to
6553 restore the values. */
6555 result_vector (savep, result)
6559 int regno, size, align, nelts;
6560 enum machine_mode mode;
6562 rtx *savevec = (rtx *) alloca (FIRST_PSEUDO_REGISTER * sizeof (rtx));
6565 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
6566 if ((mode = apply_result_mode[regno]) != VOIDmode)
6568 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
6569 if (size % align != 0)
6570 size = CEIL (size, align) * align;
6571 reg = gen_rtx (REG, mode, savep ? INCOMING_REGNO (regno) : regno);
6572 mem = change_address (result, mode,
6573 plus_constant (XEXP (result, 0), size));
6574 savevec[nelts++] = (savep
6575 ? gen_rtx (SET, VOIDmode, mem, reg)
6576 : gen_rtx (SET, VOIDmode, reg, mem));
6577 size += GET_MODE_SIZE (mode);
6579 return gen_rtx (PARALLEL, VOIDmode, gen_rtvec_v (nelts, savevec));
6581 #endif /* HAVE_untyped_call or HAVE_untyped_return */
6584 /* Save the state required to perform an untyped call with the same
6585 arguments as were passed to the current function. */
6587 expand_builtin_apply_args ()
6590 int size, align, regno;
6591 enum machine_mode mode;
6593 /* Create a block where the arg-pointer, structure value address,
6594 and argument registers can be saved. */
6595 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
6597 /* Walk past the arg-pointer and structure value address. */
6598 size = GET_MODE_SIZE (Pmode);
6599 if (struct_value_rtx)
6600 size += GET_MODE_SIZE (Pmode);
6602 /* Save each register used in calling a function to the block. */
6603 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
6604 if ((mode = apply_args_mode[regno]) != VOIDmode)
6606 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
6607 if (size % align != 0)
6608 size = CEIL (size, align) * align;
6609 emit_move_insn (change_address (registers, mode,
6610 plus_constant (XEXP (registers, 0),
6612 gen_rtx (REG, mode, INCOMING_REGNO (regno)));
6613 size += GET_MODE_SIZE (mode);
6616 /* Save the arg pointer to the block. */
6617 emit_move_insn (change_address (registers, Pmode, XEXP (registers, 0)),
6618 copy_to_reg (virtual_incoming_args_rtx));
6619 size = GET_MODE_SIZE (Pmode);
6621 /* Save the structure value address unless this is passed as an
6622 "invisible" first argument. */
6623 if (struct_value_incoming_rtx)
6625 emit_move_insn (change_address (registers, Pmode,
6626 plus_constant (XEXP (registers, 0),
6628 copy_to_reg (struct_value_incoming_rtx));
6629 size += GET_MODE_SIZE (Pmode);
6632 /* Return the address of the block. */
6633 return copy_addr_to_reg (XEXP (registers, 0));
6636 /* Perform an untyped call and save the state required to perform an
6637 untyped return of whatever value was returned by the given function. */
6639 expand_builtin_apply (function, arguments, argsize)
6640 rtx function, arguments, argsize;
6642 int size, align, regno;
6643 enum machine_mode mode;
6644 rtx incoming_args, result, reg, dest, call_insn;
6645 rtx old_stack_level = 0;
6648 /* Create a block where the return registers can be saved. */
6649 result = assign_stack_local (BLKmode, apply_result_size (), -1);
6651 /* ??? The argsize value should be adjusted here. */
6653 /* Fetch the arg pointer from the ARGUMENTS block. */
6654 incoming_args = gen_reg_rtx (Pmode);
6655 emit_move_insn (incoming_args,
6656 gen_rtx (MEM, Pmode, arguments));
6657 #ifndef STACK_GROWS_DOWNWARD
6658 incoming_args = expand_binop (Pmode, add_optab, incoming_args, argsize,
6659 incoming_args, 0, OPTAB_LIB_WIDEN);
6662 /* Perform postincrements before actually calling the function. */
6665 /* Push a new argument block and copy the arguments. */
6666 do_pending_stack_adjust ();
6667 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
6669 /* Push a block of memory onto the stack to store the memory arguments.
6670 Save the address in a register, and copy the memory arguments. ??? I
6671 haven't figured out how the calling convention macros effect this,
6672 but it's likely that the source and/or destination addresses in
6673 the block copy will need updating in machine specific ways. */
6674 dest = copy_addr_to_reg (push_block (argsize, 0, 0));
6675 emit_block_move (gen_rtx (MEM, BLKmode, dest),
6676 gen_rtx (MEM, BLKmode, incoming_args),
6678 PARM_BOUNDARY / BITS_PER_UNIT);
6680 /* Refer to the argument block. */
6682 arguments = gen_rtx (MEM, BLKmode, arguments);
6684 /* Walk past the arg-pointer and structure value address. */
6685 size = GET_MODE_SIZE (Pmode);
6686 if (struct_value_rtx)
6687 size += GET_MODE_SIZE (Pmode);
6689 /* Restore each of the registers previously saved. Make USE insns
6690 for each of these registers for use in making the call. */
6691 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
6692 if ((mode = apply_args_mode[regno]) != VOIDmode)
6694 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
6695 if (size % align != 0)
6696 size = CEIL (size, align) * align;
6697 reg = gen_rtx (REG, mode, regno);
6698 emit_move_insn (reg,
6699 change_address (arguments, mode,
6700 plus_constant (XEXP (arguments, 0),
6703 push_to_sequence (use_insns);
6704 emit_insn (gen_rtx (USE, VOIDmode, reg));
6705 use_insns = get_insns ();
6707 size += GET_MODE_SIZE (mode);
6710 /* Restore the structure value address unless this is passed as an
6711 "invisible" first argument. */
6712 size = GET_MODE_SIZE (Pmode);
6713 if (struct_value_rtx)
6715 rtx value = gen_reg_rtx (Pmode);
6716 emit_move_insn (value,
6717 change_address (arguments, Pmode,
6718 plus_constant (XEXP (arguments, 0),
6720 emit_move_insn (struct_value_rtx, value);
6721 if (GET_CODE (struct_value_rtx) == REG)
6723 push_to_sequence (use_insns);
6724 emit_insn (gen_rtx (USE, VOIDmode, struct_value_rtx));
6725 use_insns = get_insns ();
6728 size += GET_MODE_SIZE (Pmode);
6731 /* All arguments and registers used for the call are set up by now! */
6732 function = prepare_call_address (function, NULL_TREE, &use_insns);
6734 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
6735 and we don't want to load it into a register as an optimization,
6736 because prepare_call_address already did it if it should be done. */
6737 if (GET_CODE (function) != SYMBOL_REF)
6738 function = memory_address (FUNCTION_MODE, function);
6740 /* Generate the actual call instruction and save the return value. */
6741 #ifdef HAVE_untyped_call
6742 if (HAVE_untyped_call)
6743 emit_call_insn (gen_untyped_call (gen_rtx (MEM, FUNCTION_MODE, function),
6744 result, result_vector (1, result)));
6747 #ifdef HAVE_call_value
6748 if (HAVE_call_value)
6752 /* Locate the unique return register. It is not possible to
6753 express a call that sets more than one return register using
6754 call_value; use untyped_call for that. In fact, untyped_call
6755 only needs to save the return registers in the given block. */
6756 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
6757 if ((mode = apply_result_mode[regno]) != VOIDmode)
6760 abort (); /* HAVE_untyped_call required. */
6761 valreg = gen_rtx (REG, mode, regno);
6764 emit_call_insn (gen_call_value (valreg,
6765 gen_rtx (MEM, FUNCTION_MODE, function),
6766 const0_rtx, NULL_RTX, const0_rtx));
6768 emit_move_insn (change_address (result, GET_MODE (valreg),
6776 /* Find the CALL insn we just emitted and write the USE insns before it. */
6777 for (call_insn = get_last_insn ();
6778 call_insn && GET_CODE (call_insn) != CALL_INSN;
6779 call_insn = PREV_INSN (call_insn))
6785 /* Put the USE insns before the CALL. */
6786 emit_insns_before (use_insns, call_insn);
6788 /* Restore the stack. */
6789 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
6791 /* Return the address of the result block. */
6792 return copy_addr_to_reg (XEXP (result, 0));
6795 /* Perform an untyped return. */
6797 expand_builtin_return (result)
6800 int size, align, regno;
6801 enum machine_mode mode;
6805 apply_result_size ();
6806 result = gen_rtx (MEM, BLKmode, result);
6808 #ifdef HAVE_untyped_return
6809 if (HAVE_untyped_return)
6811 emit_jump_insn (gen_untyped_return (result, result_vector (0, result)));
6817 /* Restore the return value and note that each value is used. */
6819 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
6820 if ((mode = apply_result_mode[regno]) != VOIDmode)
6822 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
6823 if (size % align != 0)
6824 size = CEIL (size, align) * align;
6825 reg = gen_rtx (REG, mode, INCOMING_REGNO (regno));
6826 emit_move_insn (reg,
6827 change_address (result, mode,
6828 plus_constant (XEXP (result, 0),
6831 push_to_sequence (use_insns);
6832 emit_insn (gen_rtx (USE, VOIDmode, reg));
6833 use_insns = get_insns ();
6835 size += GET_MODE_SIZE (mode);
6838 /* Put the USE insns before the return. */
6839 emit_insns (use_insns);
6841 /* Return whatever values was restored by jumping directly to the end
6843 expand_null_return ();
6846 /* Expand code for a post- or pre- increment or decrement
6847 and return the RTX for the result.
6848 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
6851 expand_increment (exp, post)
6855 register rtx op0, op1;
6856 register rtx temp, value;
6857 register tree incremented = TREE_OPERAND (exp, 0);
6858 optab this_optab = add_optab;
6860 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
6861 int op0_is_copy = 0;
6862 int single_insn = 0;
6864 /* Stabilize any component ref that might need to be
6865 evaluated more than once below. */
6867 || TREE_CODE (incremented) == BIT_FIELD_REF
6868 || (TREE_CODE (incremented) == COMPONENT_REF
6869 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
6870 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
6871 incremented = stabilize_reference (incremented);
6873 /* Compute the operands as RTX.
6874 Note whether OP0 is the actual lvalue or a copy of it:
6875 I believe it is a copy iff it is a register or subreg
6876 and insns were generated in computing it. */
6878 temp = get_last_insn ();
6879 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, 0);
6881 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
6882 in place but intead must do sign- or zero-extension during assignment,
6883 so we copy it into a new register and let the code below use it as
6886 Note that we can safely modify this SUBREG since it is know not to be
6887 shared (it was made by the expand_expr call above). */
6889 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
6890 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
6892 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
6893 && temp != get_last_insn ());
6894 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
6896 /* Decide whether incrementing or decrementing. */
6897 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
6898 || TREE_CODE (exp) == PREDECREMENT_EXPR)
6899 this_optab = sub_optab;
6901 /* For a preincrement, see if we can do this with a single instruction. */
6904 icode = (int) this_optab->handlers[(int) mode].insn_code;
6905 if (icode != (int) CODE_FOR_nothing
6906 /* Make sure that OP0 is valid for operands 0 and 1
6907 of the insn we want to queue. */
6908 && (*insn_operand_predicate[icode][0]) (op0, mode)
6909 && (*insn_operand_predicate[icode][1]) (op0, mode)
6910 && (*insn_operand_predicate[icode][2]) (op1, mode))
6914 /* If OP0 is not the actual lvalue, but rather a copy in a register,
6915 then we cannot just increment OP0. We must therefore contrive to
6916 increment the original value. Then, for postincrement, we can return
6917 OP0 since it is a copy of the old value. For preincrement, expand here
6918 unless we can do it with a single insn. */
6919 if (op0_is_copy || (!post && !single_insn))
6921 /* This is the easiest way to increment the value wherever it is.
6922 Problems with multiple evaluation of INCREMENTED are prevented
6923 because either (1) it is a component_ref or preincrement,
6924 in which case it was stabilized above, or (2) it is an array_ref
6925 with constant index in an array in a register, which is
6926 safe to reevaluate. */
6927 tree newexp = build ((this_optab == add_optab
6928 ? PLUS_EXPR : MINUS_EXPR),
6931 TREE_OPERAND (exp, 1));
6932 temp = expand_assignment (incremented, newexp, ! post, 0);
6933 return post ? op0 : temp;
6936 /* Convert decrement by a constant into a negative increment. */
6937 if (this_optab == sub_optab
6938 && GET_CODE (op1) == CONST_INT)
6940 op1 = GEN_INT (- INTVAL (op1));
6941 this_optab = add_optab;
6946 /* We have a true reference to the value in OP0.
6947 If there is an insn to add or subtract in this mode, queue it. */
6949 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
6950 op0 = stabilize (op0);
6953 icode = (int) this_optab->handlers[(int) mode].insn_code;
6954 if (icode != (int) CODE_FOR_nothing
6955 /* Make sure that OP0 is valid for operands 0 and 1
6956 of the insn we want to queue. */
6957 && (*insn_operand_predicate[icode][0]) (op0, mode)
6958 && (*insn_operand_predicate[icode][1]) (op0, mode))
6960 if (! (*insn_operand_predicate[icode][2]) (op1, mode))
6961 op1 = force_reg (mode, op1);
6963 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
6967 /* Preincrement, or we can't increment with one simple insn. */
6969 /* Save a copy of the value before inc or dec, to return it later. */
6970 temp = value = copy_to_reg (op0);
6972 /* Arrange to return the incremented value. */
6973 /* Copy the rtx because expand_binop will protect from the queue,
6974 and the results of that would be invalid for us to return
6975 if our caller does emit_queue before using our result. */
6976 temp = copy_rtx (value = op0);
6978 /* Increment however we can. */
6979 op1 = expand_binop (mode, this_optab, value, op1, op0,
6980 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
6981 /* Make sure the value is stored into OP0. */
6983 emit_move_insn (op0, op1);
6988 /* Expand all function calls contained within EXP, innermost ones first.
6989 But don't look within expressions that have sequence points.
6990 For each CALL_EXPR, record the rtx for its value
6991 in the CALL_EXPR_RTL field. */
6994 preexpand_calls (exp)
6997 register int nops, i;
6998 int type = TREE_CODE_CLASS (TREE_CODE (exp));
7000 if (! do_preexpand_calls)
7003 /* Only expressions and references can contain calls. */
7005 if (type != 'e' && type != '<' && type != '1' && type != '2' && type != 'r')
7008 switch (TREE_CODE (exp))
7011 /* Do nothing if already expanded. */
7012 if (CALL_EXPR_RTL (exp) != 0)
7015 /* Do nothing to built-in functions. */
7016 if (TREE_CODE (TREE_OPERAND (exp, 0)) != ADDR_EXPR
7017 || TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) != FUNCTION_DECL
7018 || ! DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7019 CALL_EXPR_RTL (exp) = expand_call (exp, NULL_RTX, 0);
7024 case TRUTH_ANDIF_EXPR:
7025 case TRUTH_ORIF_EXPR:
7026 /* If we find one of these, then we can be sure
7027 the adjust will be done for it (since it makes jumps).
7028 Do it now, so that if this is inside an argument
7029 of a function, we don't get the stack adjustment
7030 after some other args have already been pushed. */
7031 do_pending_stack_adjust ();
7036 case WITH_CLEANUP_EXPR:
7040 if (SAVE_EXPR_RTL (exp) != 0)
7044 nops = tree_code_length[(int) TREE_CODE (exp)];
7045 for (i = 0; i < nops; i++)
7046 if (TREE_OPERAND (exp, i) != 0)
7048 type = TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, i)));
7049 if (type == 'e' || type == '<' || type == '1' || type == '2'
7051 preexpand_calls (TREE_OPERAND (exp, i));
7055 /* At the start of a function, record that we have no previously-pushed
7056 arguments waiting to be popped. */
7059 init_pending_stack_adjust ()
7061 pending_stack_adjust = 0;
7064 /* When exiting from function, if safe, clear out any pending stack adjust
7065 so the adjustment won't get done. */
7068 clear_pending_stack_adjust ()
7070 #ifdef EXIT_IGNORE_STACK
7071 if (! flag_omit_frame_pointer && EXIT_IGNORE_STACK
7072 && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline)
7073 && ! flag_inline_functions)
7074 pending_stack_adjust = 0;
7078 /* Pop any previously-pushed arguments that have not been popped yet. */
7081 do_pending_stack_adjust ()
7083 if (inhibit_defer_pop == 0)
7085 if (pending_stack_adjust != 0)
7086 adjust_stack (GEN_INT (pending_stack_adjust));
7087 pending_stack_adjust = 0;
7091 /* Expand all cleanups up to OLD_CLEANUPS.
7092 Needed here, and also for language-dependent calls. */
7095 expand_cleanups_to (old_cleanups)
7098 while (cleanups_this_call != old_cleanups)
7100 expand_expr (TREE_VALUE (cleanups_this_call), NULL_RTX, VOIDmode, 0);
7101 cleanups_this_call = TREE_CHAIN (cleanups_this_call);
7105 /* Expand conditional expressions. */
7107 /* Generate code to evaluate EXP and jump to LABEL if the value is zero.
7108 LABEL is an rtx of code CODE_LABEL, in this function and all the
7112 jumpifnot (exp, label)
7116 do_jump (exp, label, NULL_RTX);
7119 /* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
7126 do_jump (exp, NULL_RTX, label);
7129 /* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
7130 the result is zero, or IF_TRUE_LABEL if the result is one.
7131 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
7132 meaning fall through in that case.
7134 do_jump always does any pending stack adjust except when it does not
7135 actually perform a jump. An example where there is no jump
7136 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
7138 This function is responsible for optimizing cases such as
7139 &&, || and comparison operators in EXP. */
7142 do_jump (exp, if_false_label, if_true_label)
7144 rtx if_false_label, if_true_label;
7146 register enum tree_code code = TREE_CODE (exp);
7147 /* Some cases need to create a label to jump to
7148 in order to properly fall through.
7149 These cases set DROP_THROUGH_LABEL nonzero. */
7150 rtx drop_through_label = 0;
7164 temp = integer_zerop (exp) ? if_false_label : if_true_label;
7170 /* This is not true with #pragma weak */
7172 /* The address of something can never be zero. */
7174 emit_jump (if_true_label);
7179 if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
7180 || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
7181 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF)
7184 /* If we are narrowing the operand, we have to do the compare in the
7186 if ((TYPE_PRECISION (TREE_TYPE (exp))
7187 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
7189 case NON_LVALUE_EXPR:
7190 case REFERENCE_EXPR:
7195 /* These cannot change zero->non-zero or vice versa. */
7196 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
7200 /* This is never less insns than evaluating the PLUS_EXPR followed by
7201 a test and can be longer if the test is eliminated. */
7203 /* Reduce to minus. */
7204 exp = build (MINUS_EXPR, TREE_TYPE (exp),
7205 TREE_OPERAND (exp, 0),
7206 fold (build1 (NEGATE_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
7207 TREE_OPERAND (exp, 1))));
7208 /* Process as MINUS. */
7212 /* Non-zero iff operands of minus differ. */
7213 comparison = compare (build (NE_EXPR, TREE_TYPE (exp),
7214 TREE_OPERAND (exp, 0),
7215 TREE_OPERAND (exp, 1)),
7220 /* If we are AND'ing with a small constant, do this comparison in the
7221 smallest type that fits. If the machine doesn't have comparisons
7222 that small, it will be converted back to the wider comparison.
7223 This helps if we are testing the sign bit of a narrower object.
7224 combine can't do this for us because it can't know whether a
7225 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
7227 if (! SLOW_BYTE_ACCESS
7228 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7229 && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
7230 && (i = floor_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))) >= 0
7231 && (type = type_for_size (i + 1, 1)) != 0
7232 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
7233 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
7234 != CODE_FOR_nothing))
7236 do_jump (convert (type, exp), if_false_label, if_true_label);
7241 case TRUTH_NOT_EXPR:
7242 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
7245 case TRUTH_ANDIF_EXPR:
7246 if (if_false_label == 0)
7247 if_false_label = drop_through_label = gen_label_rtx ();
7248 do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
7249 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
7252 case TRUTH_ORIF_EXPR:
7253 if (if_true_label == 0)
7254 if_true_label = drop_through_label = gen_label_rtx ();
7255 do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
7256 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
7260 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
7263 do_pending_stack_adjust ();
7264 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
7271 int bitsize, bitpos, unsignedp;
7272 enum machine_mode mode;
7277 /* Get description of this reference. We don't actually care
7278 about the underlying object here. */
7279 get_inner_reference (exp, &bitsize, &bitpos, &offset,
7280 &mode, &unsignedp, &volatilep);
7282 type = type_for_size (bitsize, unsignedp);
7283 if (! SLOW_BYTE_ACCESS
7284 && type != 0 && bitsize >= 0
7285 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
7286 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
7287 != CODE_FOR_nothing))
7289 do_jump (convert (type, exp), if_false_label, if_true_label);
7296 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
7297 if (integer_onep (TREE_OPERAND (exp, 1))
7298 && integer_zerop (TREE_OPERAND (exp, 2)))
7299 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
7301 else if (integer_zerop (TREE_OPERAND (exp, 1))
7302 && integer_onep (TREE_OPERAND (exp, 2)))
7303 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
7307 register rtx label1 = gen_label_rtx ();
7308 drop_through_label = gen_label_rtx ();
7309 do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX);
7310 /* Now the THEN-expression. */
7311 do_jump (TREE_OPERAND (exp, 1),
7312 if_false_label ? if_false_label : drop_through_label,
7313 if_true_label ? if_true_label : drop_through_label);
7314 /* In case the do_jump just above never jumps. */
7315 do_pending_stack_adjust ();
7316 emit_label (label1);
7317 /* Now the ELSE-expression. */
7318 do_jump (TREE_OPERAND (exp, 2),
7319 if_false_label ? if_false_label : drop_through_label,
7320 if_true_label ? if_true_label : drop_through_label);
7325 if (integer_zerop (TREE_OPERAND (exp, 1)))
7326 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
7327 else if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7330 !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
7331 do_jump_by_parts_equality (exp, if_false_label, if_true_label);
7333 comparison = compare (exp, EQ, EQ);
7337 if (integer_zerop (TREE_OPERAND (exp, 1)))
7338 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
7339 else if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7342 !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
7343 do_jump_by_parts_equality (exp, if_true_label, if_false_label);
7345 comparison = compare (exp, NE, NE);
7349 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7351 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
7352 do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
7354 comparison = compare (exp, LT, LTU);
7358 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7360 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
7361 do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
7363 comparison = compare (exp, LE, LEU);
7367 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7369 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
7370 do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
7372 comparison = compare (exp, GT, GTU);
7376 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7378 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
7379 do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
7381 comparison = compare (exp, GE, GEU);
7386 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
7388 /* This is not needed any more and causes poor code since it causes
7389 comparisons and tests from non-SI objects to have different code
7391 /* Copy to register to avoid generating bad insns by cse
7392 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
7393 if (!cse_not_expected && GET_CODE (temp) == MEM)
7394 temp = copy_to_reg (temp);
7396 do_pending_stack_adjust ();
7397 if (GET_CODE (temp) == CONST_INT)
7398 comparison = (temp == const0_rtx ? const0_rtx : const_true_rtx);
7399 else if (GET_CODE (temp) == LABEL_REF)
7400 comparison = const_true_rtx;
7401 else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
7402 && !can_compare_p (GET_MODE (temp)))
7403 /* Note swapping the labels gives us not-equal. */
7404 do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label);
7405 else if (GET_MODE (temp) != VOIDmode)
7406 comparison = compare_from_rtx (temp, CONST0_RTX (GET_MODE (temp)),
7407 NE, TREE_UNSIGNED (TREE_TYPE (exp)),
7408 GET_MODE (temp), NULL_RTX, 0);
7413 /* Do any postincrements in the expression that was tested. */
7416 /* If COMPARISON is nonzero here, it is an rtx that can be substituted
7417 straight into a conditional jump instruction as the jump condition.
7418 Otherwise, all the work has been done already. */
7420 if (comparison == const_true_rtx)
7423 emit_jump (if_true_label);
7425 else if (comparison == const0_rtx)
7428 emit_jump (if_false_label);
7430 else if (comparison)
7431 do_jump_for_compare (comparison, if_false_label, if_true_label);
7435 if (drop_through_label)
7437 /* If do_jump produces code that might be jumped around,
7438 do any stack adjusts from that code, before the place
7439 where control merges in. */
7440 do_pending_stack_adjust ();
7441 emit_label (drop_through_label);
7445 /* Given a comparison expression EXP for values too wide to be compared
7446 with one insn, test the comparison and jump to the appropriate label.
7447 The code of EXP is ignored; we always test GT if SWAP is 0,
7448 and LT if SWAP is 1. */
7451 do_jump_by_parts_greater (exp, swap, if_false_label, if_true_label)
7454 rtx if_false_label, if_true_label;
7456 rtx op0 = expand_expr (TREE_OPERAND (exp, swap), NULL_RTX, VOIDmode, 0);
7457 rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), NULL_RTX, VOIDmode, 0);
7458 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
7459 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
7460 rtx drop_through_label = 0;
7461 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
7464 if (! if_true_label || ! if_false_label)
7465 drop_through_label = gen_label_rtx ();
7466 if (! if_true_label)
7467 if_true_label = drop_through_label;
7468 if (! if_false_label)
7469 if_false_label = drop_through_label;
7471 /* Compare a word at a time, high order first. */
7472 for (i = 0; i < nwords; i++)
7475 rtx op0_word, op1_word;
7477 if (WORDS_BIG_ENDIAN)
7479 op0_word = operand_subword_force (op0, i, mode);
7480 op1_word = operand_subword_force (op1, i, mode);
7484 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
7485 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
7488 /* All but high-order word must be compared as unsigned. */
7489 comp = compare_from_rtx (op0_word, op1_word,
7490 (unsignedp || i > 0) ? GTU : GT,
7491 unsignedp, word_mode, NULL_RTX, 0);
7492 if (comp == const_true_rtx)
7493 emit_jump (if_true_label);
7494 else if (comp != const0_rtx)
7495 do_jump_for_compare (comp, NULL_RTX, if_true_label);
7497 /* Consider lower words only if these are equal. */
7498 comp = compare_from_rtx (op0_word, op1_word, NE, unsignedp, word_mode,
7500 if (comp == const_true_rtx)
7501 emit_jump (if_false_label);
7502 else if (comp != const0_rtx)
7503 do_jump_for_compare (comp, NULL_RTX, if_false_label);
7507 emit_jump (if_false_label);
7508 if (drop_through_label)
7509 emit_label (drop_through_label);
7512 /* Compare OP0 with OP1, word at a time, in mode MODE.
7513 UNSIGNEDP says to do unsigned comparison.
7514 Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
7517 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label)
7518 enum machine_mode mode;
7521 rtx if_false_label, if_true_label;
7523 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
7524 rtx drop_through_label = 0;
7527 if (! if_true_label || ! if_false_label)
7528 drop_through_label = gen_label_rtx ();
7529 if (! if_true_label)
7530 if_true_label = drop_through_label;
7531 if (! if_false_label)
7532 if_false_label = drop_through_label;
7534 /* Compare a word at a time, high order first. */
7535 for (i = 0; i < nwords; i++)
7538 rtx op0_word, op1_word;
7540 if (WORDS_BIG_ENDIAN)
7542 op0_word = operand_subword_force (op0, i, mode);
7543 op1_word = operand_subword_force (op1, i, mode);
7547 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
7548 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
7551 /* All but high-order word must be compared as unsigned. */
7552 comp = compare_from_rtx (op0_word, op1_word,
7553 (unsignedp || i > 0) ? GTU : GT,
7554 unsignedp, word_mode, NULL_RTX, 0);
7555 if (comp == const_true_rtx)
7556 emit_jump (if_true_label);
7557 else if (comp != const0_rtx)
7558 do_jump_for_compare (comp, NULL_RTX, if_true_label);
7560 /* Consider lower words only if these are equal. */
7561 comp = compare_from_rtx (op0_word, op1_word, NE, unsignedp, word_mode,
7563 if (comp == const_true_rtx)
7564 emit_jump (if_false_label);
7565 else if (comp != const0_rtx)
7566 do_jump_for_compare (comp, NULL_RTX, if_false_label);
7570 emit_jump (if_false_label);
7571 if (drop_through_label)
7572 emit_label (drop_through_label);
7575 /* Given an EQ_EXPR expression EXP for values too wide to be compared
7576 with one insn, test the comparison and jump to the appropriate label. */
7579 do_jump_by_parts_equality (exp, if_false_label, if_true_label)
7581 rtx if_false_label, if_true_label;
7583 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7584 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7585 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
7586 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
7588 rtx drop_through_label = 0;
7590 if (! if_false_label)
7591 drop_through_label = if_false_label = gen_label_rtx ();
7593 for (i = 0; i < nwords; i++)
7595 rtx comp = compare_from_rtx (operand_subword_force (op0, i, mode),
7596 operand_subword_force (op1, i, mode),
7597 EQ, TREE_UNSIGNED (TREE_TYPE (exp)),
7598 word_mode, NULL_RTX, 0);
7599 if (comp == const_true_rtx)
7600 emit_jump (if_false_label);
7601 else if (comp != const0_rtx)
7602 do_jump_for_compare (comp, if_false_label, NULL_RTX);
7606 emit_jump (if_true_label);
7607 if (drop_through_label)
7608 emit_label (drop_through_label);
7611 /* Jump according to whether OP0 is 0.
7612 We assume that OP0 has an integer mode that is too wide
7613 for the available compare insns. */
7616 do_jump_by_parts_equality_rtx (op0, if_false_label, if_true_label)
7618 rtx if_false_label, if_true_label;
7620 int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD;
7622 rtx drop_through_label = 0;
7624 if (! if_false_label)
7625 drop_through_label = if_false_label = gen_label_rtx ();
7627 for (i = 0; i < nwords; i++)
7629 rtx comp = compare_from_rtx (operand_subword_force (op0, i,
7631 const0_rtx, EQ, 1, word_mode, NULL_RTX, 0);
7632 if (comp == const_true_rtx)
7633 emit_jump (if_false_label);
7634 else if (comp != const0_rtx)
7635 do_jump_for_compare (comp, if_false_label, NULL_RTX);
7639 emit_jump (if_true_label);
7640 if (drop_through_label)
7641 emit_label (drop_through_label);
7644 /* Given a comparison expression in rtl form, output conditional branches to
7645 IF_TRUE_LABEL, IF_FALSE_LABEL, or both. */
7648 do_jump_for_compare (comparison, if_false_label, if_true_label)
7649 rtx comparison, if_false_label, if_true_label;
7653 if (bcc_gen_fctn[(int) GET_CODE (comparison)] != 0)
7654 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (comparison)]) (if_true_label));
7659 emit_jump (if_false_label);
7661 else if (if_false_label)
7664 rtx prev = PREV_INSN (get_last_insn ());
7667 /* Output the branch with the opposite condition. Then try to invert
7668 what is generated. If more than one insn is a branch, or if the
7669 branch is not the last insn written, abort. If we can't invert
7670 the branch, emit make a true label, redirect this jump to that,
7671 emit a jump to the false label and define the true label. */
7673 if (bcc_gen_fctn[(int) GET_CODE (comparison)] != 0)
7674 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (comparison)]) (if_false_label));
7678 /* Here we get the insn before what was just emitted.
7679 On some machines, emitting the branch can discard
7680 the previous compare insn and emit a replacement. */
7682 /* If there's only one preceding insn... */
7683 insn = get_insns ();
7685 insn = NEXT_INSN (prev);
7687 for (insn = NEXT_INSN (insn); insn; insn = NEXT_INSN (insn))
7688 if (GET_CODE (insn) == JUMP_INSN)
7695 if (branch != get_last_insn ())
7698 if (! invert_jump (branch, if_false_label))
7700 if_true_label = gen_label_rtx ();
7701 redirect_jump (branch, if_true_label);
7702 emit_jump (if_false_label);
7703 emit_label (if_true_label);
7708 /* Generate code for a comparison expression EXP
7709 (including code to compute the values to be compared)
7710 and set (CC0) according to the result.
7711 SIGNED_CODE should be the rtx operation for this comparison for
7712 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
7714 We force a stack adjustment unless there are currently
7715 things pushed on the stack that aren't yet used. */
7718 compare (exp, signed_code, unsigned_code)
7720 enum rtx_code signed_code, unsigned_code;
7723 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7725 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7726 register tree type = TREE_TYPE (TREE_OPERAND (exp, 0));
7727 register enum machine_mode mode = TYPE_MODE (type);
7728 int unsignedp = TREE_UNSIGNED (type);
7729 enum rtx_code code = unsignedp ? unsigned_code : signed_code;
7731 return compare_from_rtx (op0, op1, code, unsignedp, mode,
7733 ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
7734 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
7737 /* Like compare but expects the values to compare as two rtx's.
7738 The decision as to signed or unsigned comparison must be made by the caller.
7740 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
7743 If ALIGN is non-zero, it is the alignment of this type; if zero, the
7744 size of MODE should be used. */
7747 compare_from_rtx (op0, op1, code, unsignedp, mode, size, align)
7748 register rtx op0, op1;
7751 enum machine_mode mode;
7757 /* If one operand is constant, make it the second one. Only do this
7758 if the other operand is not constant as well. */
7760 if ((CONSTANT_P (op0) && ! CONSTANT_P (op1))
7761 || (GET_CODE (op0) == CONST_INT && GET_CODE (op1) != CONST_INT))
7766 code = swap_condition (code);
7771 op0 = force_not_mem (op0);
7772 op1 = force_not_mem (op1);
7775 do_pending_stack_adjust ();
7777 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
7778 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
7782 /* There's no need to do this now that combine.c can eliminate lots of
7783 sign extensions. This can be less efficient in certain cases on other
7786 /* If this is a signed equality comparison, we can do it as an
7787 unsigned comparison since zero-extension is cheaper than sign
7788 extension and comparisons with zero are done as unsigned. This is
7789 the case even on machines that can do fast sign extension, since
7790 zero-extension is easier to combine with other operations than
7791 sign-extension is. If we are comparing against a constant, we must
7792 convert it to what it would look like unsigned. */
7793 if ((code == EQ || code == NE) && ! unsignedp
7794 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
7796 if (GET_CODE (op1) == CONST_INT
7797 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
7798 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
7803 emit_cmp_insn (op0, op1, code, size, mode, unsignedp, align);
7805 return gen_rtx (code, VOIDmode, cc0_rtx, const0_rtx);
7808 /* Generate code to calculate EXP using a store-flag instruction
7809 and return an rtx for the result. EXP is either a comparison
7810 or a TRUTH_NOT_EXPR whose operand is a comparison.
7812 If TARGET is nonzero, store the result there if convenient.
7814 If ONLY_CHEAP is non-zero, only do this if it is likely to be very
7817 Return zero if there is no suitable set-flag instruction
7818 available on this machine.
7820 Once expand_expr has been called on the arguments of the comparison,
7821 we are committed to doing the store flag, since it is not safe to
7822 re-evaluate the expression. We emit the store-flag insn by calling
7823 emit_store_flag, but only expand the arguments if we have a reason
7824 to believe that emit_store_flag will be successful. If we think that
7825 it will, but it isn't, we have to simulate the store-flag with a
7826 set/jump/set sequence. */
7829 do_store_flag (exp, target, mode, only_cheap)
7832 enum machine_mode mode;
7836 tree arg0, arg1, type;
7838 enum machine_mode operand_mode;
7842 enum insn_code icode;
7843 rtx subtarget = target;
7844 rtx result, label, pattern, jump_pat;
7846 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
7847 result at the end. We can't simply invert the test since it would
7848 have already been inverted if it were valid. This case occurs for
7849 some floating-point comparisons. */
7851 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
7852 invert = 1, exp = TREE_OPERAND (exp, 0);
7854 arg0 = TREE_OPERAND (exp, 0);
7855 arg1 = TREE_OPERAND (exp, 1);
7856 type = TREE_TYPE (arg0);
7857 operand_mode = TYPE_MODE (type);
7858 unsignedp = TREE_UNSIGNED (type);
7860 /* We won't bother with BLKmode store-flag operations because it would mean
7861 passing a lot of information to emit_store_flag. */
7862 if (operand_mode == BLKmode)
7868 /* Get the rtx comparison code to use. We know that EXP is a comparison
7869 operation of some type. Some comparisons against 1 and -1 can be
7870 converted to comparisons with zero. Do so here so that the tests
7871 below will be aware that we have a comparison with zero. These
7872 tests will not catch constants in the first operand, but constants
7873 are rarely passed as the first operand. */
7875 switch (TREE_CODE (exp))
7884 if (integer_onep (arg1))
7885 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
7887 code = unsignedp ? LTU : LT;
7890 if (! unsignedp && integer_all_onesp (arg1))
7891 arg1 = integer_zero_node, code = LT;
7893 code = unsignedp ? LEU : LE;
7896 if (! unsignedp && integer_all_onesp (arg1))
7897 arg1 = integer_zero_node, code = GE;
7899 code = unsignedp ? GTU : GT;
7902 if (integer_onep (arg1))
7903 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
7905 code = unsignedp ? GEU : GE;
7911 /* Put a constant second. */
7912 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
7914 tem = arg0; arg0 = arg1; arg1 = tem;
7915 code = swap_condition (code);
7918 /* If this is an equality or inequality test of a single bit, we can
7919 do this by shifting the bit being tested to the low-order bit and
7920 masking the result with the constant 1. If the condition was EQ,
7921 we xor it with 1. This does not require an scc insn and is faster
7922 than an scc insn even if we have it. */
7924 if ((code == NE || code == EQ)
7925 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
7926 && integer_pow2p (TREE_OPERAND (arg0, 1))
7927 && TYPE_PRECISION (type) <= HOST_BITS_PER_WIDE_INT)
7929 tree inner = TREE_OPERAND (arg0, 0);
7930 int bitnum = exact_log2 (INTVAL (expand_expr (TREE_OPERAND (arg0, 1),
7931 NULL_RTX, VOIDmode, 0)));
7934 /* If INNER is a right shift of a constant and it plus BITNUM does
7935 not overflow, adjust BITNUM and INNER. */
7937 if (TREE_CODE (inner) == RSHIFT_EXPR
7938 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
7939 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
7940 && (bitnum + TREE_INT_CST_LOW (TREE_OPERAND (inner, 1))
7941 < TYPE_PRECISION (type)))
7943 bitnum +=TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
7944 inner = TREE_OPERAND (inner, 0);
7947 /* If we are going to be able to omit the AND below, we must do our
7948 operations as unsigned. If we must use the AND, we have a choice.
7949 Normally unsigned is faster, but for some machines signed is. */
7950 ops_unsignedp = (bitnum == TYPE_PRECISION (type) - 1 ? 1
7951 #ifdef BYTE_LOADS_SIGN_EXTEND
7958 if (subtarget == 0 || GET_CODE (subtarget) != REG
7959 || GET_MODE (subtarget) != operand_mode
7960 || ! safe_from_p (subtarget, inner))
7963 op0 = expand_expr (inner, subtarget, VOIDmode, 0);
7966 op0 = expand_shift (RSHIFT_EXPR, GET_MODE (op0), op0,
7967 size_int (bitnum), target, ops_unsignedp);
7969 if (GET_MODE (op0) != mode)
7970 op0 = convert_to_mode (mode, op0, ops_unsignedp);
7972 if ((code == EQ && ! invert) || (code == NE && invert))
7973 op0 = expand_binop (mode, xor_optab, op0, const1_rtx, target,
7974 ops_unsignedp, OPTAB_LIB_WIDEN);
7976 /* Put the AND last so it can combine with more things. */
7977 if (bitnum != TYPE_PRECISION (type) - 1)
7978 op0 = expand_and (op0, const1_rtx, target);
7983 /* Now see if we are likely to be able to do this. Return if not. */
7984 if (! can_compare_p (operand_mode))
7986 icode = setcc_gen_code[(int) code];
7987 if (icode == CODE_FOR_nothing
7988 || (only_cheap && insn_operand_mode[(int) icode][0] != mode))
7990 /* We can only do this if it is one of the special cases that
7991 can be handled without an scc insn. */
7992 if ((code == LT && integer_zerop (arg1))
7993 || (! only_cheap && code == GE && integer_zerop (arg1)))
7995 else if (BRANCH_COST >= 0
7996 && ! only_cheap && (code == NE || code == EQ)
7997 && TREE_CODE (type) != REAL_TYPE
7998 && ((abs_optab->handlers[(int) operand_mode].insn_code
7999 != CODE_FOR_nothing)
8000 || (ffs_optab->handlers[(int) operand_mode].insn_code
8001 != CODE_FOR_nothing)))
8007 preexpand_calls (exp);
8008 if (subtarget == 0 || GET_CODE (subtarget) != REG
8009 || GET_MODE (subtarget) != operand_mode
8010 || ! safe_from_p (subtarget, arg1))
8013 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
8014 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
8017 target = gen_reg_rtx (mode);
8019 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
8020 because, if the emit_store_flag does anything it will succeed and
8021 OP0 and OP1 will not be used subsequently. */
8023 result = emit_store_flag (target, code,
8024 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
8025 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
8026 operand_mode, unsignedp, 1);
8031 result = expand_binop (mode, xor_optab, result, const1_rtx,
8032 result, 0, OPTAB_LIB_WIDEN);
8036 /* If this failed, we have to do this with set/compare/jump/set code. */
8037 if (target == 0 || GET_CODE (target) != REG
8038 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
8039 target = gen_reg_rtx (GET_MODE (target));
8041 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
8042 result = compare_from_rtx (op0, op1, code, unsignedp,
8043 operand_mode, NULL_RTX, 0);
8044 if (GET_CODE (result) == CONST_INT)
8045 return (((result == const0_rtx && ! invert)
8046 || (result != const0_rtx && invert))
8047 ? const0_rtx : const1_rtx);
8049 label = gen_label_rtx ();
8050 if (bcc_gen_fctn[(int) code] == 0)
8053 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
8054 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
8060 /* Generate a tablejump instruction (used for switch statements). */
8062 #ifdef HAVE_tablejump
8064 /* INDEX is the value being switched on, with the lowest value
8065 in the table already subtracted.
8066 MODE is its expected mode (needed if INDEX is constant).
8067 RANGE is the length of the jump table.
8068 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
8070 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
8071 index value is out of range. */
8074 do_tablejump (index, mode, range, table_label, default_label)
8075 rtx index, range, table_label, default_label;
8076 enum machine_mode mode;
8078 register rtx temp, vector;
8080 /* Do an unsigned comparison (in the proper mode) between the index
8081 expression and the value which represents the length of the range.
8082 Since we just finished subtracting the lower bound of the range
8083 from the index expression, this comparison allows us to simultaneously
8084 check that the original index expression value is both greater than
8085 or equal to the minimum value of the range and less than or equal to
8086 the maximum value of the range. */
8088 emit_cmp_insn (range, index, LTU, NULL_RTX, mode, 1, 0);
8089 emit_jump_insn (gen_bltu (default_label));
8091 /* If index is in range, it must fit in Pmode.
8092 Convert to Pmode so we can index with it. */
8094 index = convert_to_mode (Pmode, index, 1);
8096 /* If flag_force_addr were to affect this address
8097 it could interfere with the tricky assumptions made
8098 about addresses that contain label-refs,
8099 which may be valid only very near the tablejump itself. */
8100 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
8101 GET_MODE_SIZE, because this indicates how large insns are. The other
8102 uses should all be Pmode, because they are addresses. This code
8103 could fail if addresses and insns are not the same size. */
8104 index = memory_address_noforce
8106 gen_rtx (PLUS, Pmode,
8107 gen_rtx (MULT, Pmode, index,
8108 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
8109 gen_rtx (LABEL_REF, Pmode, table_label)));
8110 temp = gen_reg_rtx (CASE_VECTOR_MODE);
8111 vector = gen_rtx (MEM, CASE_VECTOR_MODE, index);
8112 RTX_UNCHANGING_P (vector) = 1;
8113 convert_move (temp, vector, 0);
8115 emit_jump_insn (gen_tablejump (temp, table_label));
8117 #ifndef CASE_VECTOR_PC_RELATIVE
8118 /* If we are generating PIC code or if the table is PC-relative, the
8119 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
8125 #endif /* HAVE_tablejump */