1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA. */
26 #include "insn-flags.h"
27 #include "insn-codes.h"
29 #include "insn-config.h"
32 #include "typeclass.h"
34 #define CEIL(x,y) (((x) + (y) - 1) / (y))
36 /* Decide whether a function's arguments should be processed
37 from first to last or from last to first.
39 They should if the stack and args grow in opposite directions, but
40 only if we have push insns. */
44 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNARD)
45 #define PUSH_ARGS_REVERSED /* If it's last to first */
50 #ifndef STACK_PUSH_CODE
51 #ifdef STACK_GROWS_DOWNWARD
52 #define STACK_PUSH_CODE PRE_DEC
54 #define STACK_PUSH_CODE PRE_INC
58 /* Like STACK_BOUNDARY but in units of bytes, not bits. */
59 #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
61 /* If this is nonzero, we do not bother generating VOLATILE
62 around volatile memory references, and we are willing to
63 output indirect addresses. If cse is to follow, we reject
64 indirect addresses so a useful potential cse is generated;
65 if it is used only once, instruction combination will produce
66 the same indirect address eventually. */
69 /* Nonzero to generate code for all the subroutines within an
70 expression before generating the upper levels of the expression.
71 Nowadays this is never zero. */
72 int do_preexpand_calls = 1;
74 /* Number of units that we should eventually pop off the stack.
75 These are the arguments to function calls that have already returned. */
76 int pending_stack_adjust;
78 /* Nonzero means stack pops must not be deferred, and deferred stack
79 pops must not be output. It is nonzero inside a function call,
80 inside a conditional expression, inside a statement expression,
81 and in other cases as well. */
82 int inhibit_defer_pop;
84 /* A list of all cleanups which belong to the arguments of
85 function calls being expanded by expand_call. */
86 tree cleanups_this_call;
88 /* Nonzero means __builtin_saveregs has already been done in this function.
89 The value is the pseudoreg containing the value __builtin_saveregs
91 static rtx saveregs_value;
93 /* Similarly for __builtin_apply_args. */
94 static rtx apply_args_value;
96 /* This structure is used by move_by_pieces to describe the move to
108 int explicit_inc_from;
114 static rtx enqueue_insn PROTO((rtx, rtx));
115 static int queued_subexp_p PROTO((rtx));
116 static void init_queue PROTO((void));
117 static void move_by_pieces PROTO((rtx, rtx, int, int));
118 static int move_by_pieces_ninsns PROTO((unsigned int, int));
119 static void move_by_pieces_1 PROTO((rtx (*) (), enum machine_mode,
120 struct move_by_pieces *));
121 static void group_insns PROTO((rtx));
122 static void store_constructor PROTO((tree, rtx));
123 static rtx store_field PROTO((rtx, int, int, enum machine_mode, tree,
124 enum machine_mode, int, int, int));
125 static tree save_noncopied_parts PROTO((tree, tree));
126 static tree init_noncopied_parts PROTO((tree, tree));
127 static int safe_from_p PROTO((rtx, tree));
128 static int fixed_type_p PROTO((tree));
129 static int get_pointer_alignment PROTO((tree, unsigned));
130 static tree string_constant PROTO((tree, tree *));
131 static tree c_strlen PROTO((tree));
132 static rtx expand_builtin PROTO((tree, rtx, rtx, enum machine_mode, int));
133 static int apply_args_size PROTO((void));
134 static int apply_result_size PROTO((void));
135 static rtx result_vector PROTO((int, rtx));
136 static rtx expand_builtin_apply_args PROTO((void));
137 static rtx expand_builtin_apply PROTO((rtx, rtx, rtx));
138 static void expand_builtin_return PROTO((rtx));
139 static rtx expand_increment PROTO((tree, int));
140 static void preexpand_calls PROTO((tree));
141 static void do_jump_by_parts_greater PROTO((tree, int, rtx, rtx));
142 static void do_jump_by_parts_greater_rtx PROTO((enum machine_mode, int, rtx, rtx, rtx, rtx));
143 static void do_jump_by_parts_equality PROTO((tree, rtx, rtx));
144 static void do_jump_by_parts_equality_rtx PROTO((rtx, rtx, rtx));
145 static void do_jump_for_compare PROTO((rtx, rtx, rtx));
146 static rtx compare PROTO((tree, enum rtx_code, enum rtx_code));
147 static rtx do_store_flag PROTO((tree, rtx, enum machine_mode, int));
149 /* Record for each mode whether we can move a register directly to or
150 from an object of that mode in memory. If we can't, we won't try
151 to use that mode directly when accessing a field of that mode. */
153 static char direct_load[NUM_MACHINE_MODES];
154 static char direct_store[NUM_MACHINE_MODES];
156 /* MOVE_RATIO is the number of move instructions that is better than
160 #if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
163 /* A value of around 6 would minimize code size; infinity would minimize
165 #define MOVE_RATIO 15
169 /* This array records the insn_code of insns to perform block moves. */
170 enum insn_code movstr_optab[NUM_MACHINE_MODES];
172 /* SLOW_UNALIGNED_ACCESS is non-zero if unaligned accesses are very slow. */
174 #ifndef SLOW_UNALIGNED_ACCESS
175 #define SLOW_UNALIGNED_ACCESS 0
178 /* Register mappings for target machines without register windows. */
179 #ifndef INCOMING_REGNO
180 #define INCOMING_REGNO(OUT) (OUT)
182 #ifndef OUTGOING_REGNO
183 #define OUTGOING_REGNO(IN) (IN)
186 /* This is run once per compilation to set up which modes can be used
187 directly in memory and to initialize the block move optab. */
193 enum machine_mode mode;
194 /* Try indexing by frame ptr and try by stack ptr.
195 It is known that on the Convex the stack ptr isn't a valid index.
196 With luck, one or the other is valid on any machine. */
197 rtx mem = gen_rtx (MEM, VOIDmode, stack_pointer_rtx);
198 rtx mem1 = gen_rtx (MEM, VOIDmode, frame_pointer_rtx);
201 insn = emit_insn (gen_rtx (SET, 0, 0));
202 pat = PATTERN (insn);
204 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
205 mode = (enum machine_mode) ((int) mode + 1))
211 direct_load[(int) mode] = direct_store[(int) mode] = 0;
212 PUT_MODE (mem, mode);
213 PUT_MODE (mem1, mode);
215 /* See if there is some register that can be used in this mode and
216 directly loaded or stored from memory. */
218 if (mode != VOIDmode && mode != BLKmode)
219 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
220 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
223 if (! HARD_REGNO_MODE_OK (regno, mode))
226 reg = gen_rtx (REG, mode, regno);
229 SET_DEST (pat) = reg;
230 if (recog (pat, insn, &num_clobbers) >= 0)
231 direct_load[(int) mode] = 1;
233 SET_SRC (pat) = mem1;
234 SET_DEST (pat) = reg;
235 if (recog (pat, insn, &num_clobbers) >= 0)
236 direct_load[(int) mode] = 1;
239 SET_DEST (pat) = mem;
240 if (recog (pat, insn, &num_clobbers) >= 0)
241 direct_store[(int) mode] = 1;
244 SET_DEST (pat) = mem1;
245 if (recog (pat, insn, &num_clobbers) >= 0)
246 direct_store[(int) mode] = 1;
253 /* This is run at the start of compiling a function. */
260 pending_stack_adjust = 0;
261 inhibit_defer_pop = 0;
262 cleanups_this_call = 0;
264 apply_args_value = 0;
268 /* Save all variables describing the current status into the structure *P.
269 This is used before starting a nested function. */
275 /* Instead of saving the postincrement queue, empty it. */
278 p->pending_stack_adjust = pending_stack_adjust;
279 p->inhibit_defer_pop = inhibit_defer_pop;
280 p->cleanups_this_call = cleanups_this_call;
281 p->saveregs_value = saveregs_value;
282 p->apply_args_value = apply_args_value;
283 p->forced_labels = forced_labels;
285 pending_stack_adjust = 0;
286 inhibit_defer_pop = 0;
287 cleanups_this_call = 0;
289 apply_args_value = 0;
293 /* Restore all variables describing the current status from the structure *P.
294 This is used after a nested function. */
297 restore_expr_status (p)
300 pending_stack_adjust = p->pending_stack_adjust;
301 inhibit_defer_pop = p->inhibit_defer_pop;
302 cleanups_this_call = p->cleanups_this_call;
303 saveregs_value = p->saveregs_value;
304 apply_args_value = p->apply_args_value;
305 forced_labels = p->forced_labels;
308 /* Manage the queue of increment instructions to be output
309 for POSTINCREMENT_EXPR expressions, etc. */
311 static rtx pending_chain;
313 /* Queue up to increment (or change) VAR later. BODY says how:
314 BODY should be the same thing you would pass to emit_insn
315 to increment right away. It will go to emit_insn later on.
317 The value is a QUEUED expression to be used in place of VAR
318 where you want to guarantee the pre-incrementation value of VAR. */
321 enqueue_insn (var, body)
324 pending_chain = gen_rtx (QUEUED, GET_MODE (var),
325 var, NULL_RTX, NULL_RTX, body, pending_chain);
326 return pending_chain;
329 /* Use protect_from_queue to convert a QUEUED expression
330 into something that you can put immediately into an instruction.
331 If the queued incrementation has not happened yet,
332 protect_from_queue returns the variable itself.
333 If the incrementation has happened, protect_from_queue returns a temp
334 that contains a copy of the old value of the variable.
336 Any time an rtx which might possibly be a QUEUED is to be put
337 into an instruction, it must be passed through protect_from_queue first.
338 QUEUED expressions are not meaningful in instructions.
340 Do not pass a value through protect_from_queue and then hold
341 on to it for a while before putting it in an instruction!
342 If the queue is flushed in between, incorrect code will result. */
345 protect_from_queue (x, modify)
349 register RTX_CODE code = GET_CODE (x);
351 #if 0 /* A QUEUED can hang around after the queue is forced out. */
352 /* Shortcut for most common case. */
353 if (pending_chain == 0)
359 /* A special hack for read access to (MEM (QUEUED ...))
360 to facilitate use of autoincrement.
361 Make a copy of the contents of the memory location
362 rather than a copy of the address, but not
363 if the value is of mode BLKmode. */
364 if (code == MEM && GET_MODE (x) != BLKmode
365 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
367 register rtx y = XEXP (x, 0);
368 XEXP (x, 0) = QUEUED_VAR (y);
371 register rtx temp = gen_reg_rtx (GET_MODE (x));
372 emit_insn_before (gen_move_insn (temp, x),
378 /* Otherwise, recursively protect the subexpressions of all
379 the kinds of rtx's that can contain a QUEUED. */
381 XEXP (x, 0) = protect_from_queue (XEXP (x, 0), 0);
382 else if (code == PLUS || code == MULT)
384 XEXP (x, 0) = protect_from_queue (XEXP (x, 0), 0);
385 XEXP (x, 1) = protect_from_queue (XEXP (x, 1), 0);
389 /* If the increment has not happened, use the variable itself. */
390 if (QUEUED_INSN (x) == 0)
391 return QUEUED_VAR (x);
392 /* If the increment has happened and a pre-increment copy exists,
394 if (QUEUED_COPY (x) != 0)
395 return QUEUED_COPY (x);
396 /* The increment has happened but we haven't set up a pre-increment copy.
397 Set one up now, and use it. */
398 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
399 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
401 return QUEUED_COPY (x);
404 /* Return nonzero if X contains a QUEUED expression:
405 if it contains anything that will be altered by a queued increment.
406 We handle only combinations of MEM, PLUS, MINUS and MULT operators
407 since memory addresses generally contain only those. */
413 register enum rtx_code code = GET_CODE (x);
419 return queued_subexp_p (XEXP (x, 0));
423 return queued_subexp_p (XEXP (x, 0))
424 || queued_subexp_p (XEXP (x, 1));
429 /* Perform all the pending incrementations. */
435 while (p = pending_chain)
437 QUEUED_INSN (p) = emit_insn (QUEUED_BODY (p));
438 pending_chain = QUEUED_NEXT (p);
449 /* Copy data from FROM to TO, where the machine modes are not the same.
450 Both modes may be integer, or both may be floating.
451 UNSIGNEDP should be nonzero if FROM is an unsigned type.
452 This causes zero-extension instead of sign-extension. */
455 convert_move (to, from, unsignedp)
456 register rtx to, from;
459 enum machine_mode to_mode = GET_MODE (to);
460 enum machine_mode from_mode = GET_MODE (from);
461 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
462 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
466 /* rtx code for making an equivalent value. */
467 enum rtx_code equiv_code = (unsignedp ? ZERO_EXTEND : SIGN_EXTEND);
469 to = protect_from_queue (to, 1);
470 from = protect_from_queue (from, 0);
472 if (to_real != from_real)
475 /* If FROM is a SUBREG that indicates that we have already done at least
476 the required extension, strip it. We don't handle such SUBREGs as
479 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
480 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
481 >= GET_MODE_SIZE (to_mode))
482 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
483 from = gen_lowpart (to_mode, from), from_mode = to_mode;
485 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
488 if (to_mode == from_mode
489 || (from_mode == VOIDmode && CONSTANT_P (from)))
491 emit_move_insn (to, from);
497 #ifdef HAVE_extendqfhf2
498 if (HAVE_extendqfsf2 && from_mode == QFmode && to_mode == HFmode)
500 emit_unop_insn (CODE_FOR_extendqfsf2, to, from, UNKNOWN);
504 #ifdef HAVE_extendqfsf2
505 if (HAVE_extendqfsf2 && from_mode == QFmode && to_mode == SFmode)
507 emit_unop_insn (CODE_FOR_extendqfsf2, to, from, UNKNOWN);
511 #ifdef HAVE_extendqfdf2
512 if (HAVE_extendqfdf2 && from_mode == QFmode && to_mode == DFmode)
514 emit_unop_insn (CODE_FOR_extendqfdf2, to, from, UNKNOWN);
518 #ifdef HAVE_extendqfxf2
519 if (HAVE_extendqfxf2 && from_mode == QFmode && to_mode == XFmode)
521 emit_unop_insn (CODE_FOR_extendqfxf2, to, from, UNKNOWN);
525 #ifdef HAVE_extendqftf2
526 if (HAVE_extendqftf2 && from_mode == QFmode && to_mode == TFmode)
528 emit_unop_insn (CODE_FOR_extendqftf2, to, from, UNKNOWN);
533 #ifdef HAVE_extendhfsf2
534 if (HAVE_extendhfsf2 && from_mode == HFmode && to_mode == SFmode)
536 emit_unop_insn (CODE_FOR_extendhfsf2, to, from, UNKNOWN);
540 #ifdef HAVE_extendhfdf2
541 if (HAVE_extendhfdf2 && from_mode == HFmode && to_mode == DFmode)
543 emit_unop_insn (CODE_FOR_extendhfdf2, to, from, UNKNOWN);
547 #ifdef HAVE_extendhfxf2
548 if (HAVE_extendhfxf2 && from_mode == HFmode && to_mode == XFmode)
550 emit_unop_insn (CODE_FOR_extendhfxf2, to, from, UNKNOWN);
554 #ifdef HAVE_extendhftf2
555 if (HAVE_extendhftf2 && from_mode == HFmode && to_mode == TFmode)
557 emit_unop_insn (CODE_FOR_extendhftf2, to, from, UNKNOWN);
562 #ifdef HAVE_extendsfdf2
563 if (HAVE_extendsfdf2 && from_mode == SFmode && to_mode == DFmode)
565 emit_unop_insn (CODE_FOR_extendsfdf2, to, from, UNKNOWN);
569 #ifdef HAVE_extendsfxf2
570 if (HAVE_extendsfxf2 && from_mode == SFmode && to_mode == XFmode)
572 emit_unop_insn (CODE_FOR_extendsfxf2, to, from, UNKNOWN);
576 #ifdef HAVE_extendsftf2
577 if (HAVE_extendsftf2 && from_mode == SFmode && to_mode == TFmode)
579 emit_unop_insn (CODE_FOR_extendsftf2, to, from, UNKNOWN);
583 #ifdef HAVE_extenddfxf2
584 if (HAVE_extenddfxf2 && from_mode == DFmode && to_mode == XFmode)
586 emit_unop_insn (CODE_FOR_extenddfxf2, to, from, UNKNOWN);
590 #ifdef HAVE_extenddftf2
591 if (HAVE_extenddftf2 && from_mode == DFmode && to_mode == TFmode)
593 emit_unop_insn (CODE_FOR_extenddftf2, to, from, UNKNOWN);
598 #ifdef HAVE_trunchfqf2
599 if (HAVE_trunchfqf2 && from_mode == HFmode && to_mode == QFmode)
601 emit_unop_insn (CODE_FOR_trunchfqf2, to, from, UNKNOWN);
605 #ifdef HAVE_truncsfqf2
606 if (HAVE_truncsfqf2 && from_mode == SFmode && to_mode == QFmode)
608 emit_unop_insn (CODE_FOR_truncsfqf2, to, from, UNKNOWN);
612 #ifdef HAVE_truncdfqf2
613 if (HAVE_truncdfqf2 && from_mode == DFmode && to_mode == QFmode)
615 emit_unop_insn (CODE_FOR_truncdfqf2, to, from, UNKNOWN);
619 #ifdef HAVE_truncxfqf2
620 if (HAVE_truncxfqf2 && from_mode == XFmode && to_mode == QFmode)
622 emit_unop_insn (CODE_FOR_truncxfqf2, to, from, UNKNOWN);
626 #ifdef HAVE_trunctfqf2
627 if (HAVE_trunctfqf2 && from_mode == TFmode && to_mode == QFmode)
629 emit_unop_insn (CODE_FOR_trunctfqf2, to, from, UNKNOWN);
633 #ifdef HAVE_truncsfhf2
634 if (HAVE_truncsfhf2 && from_mode == SFmode && to_mode == HFmode)
636 emit_unop_insn (CODE_FOR_truncsfhf2, to, from, UNKNOWN);
640 #ifdef HAVE_truncdfhf2
641 if (HAVE_truncdfhf2 && from_mode == DFmode && to_mode == HFmode)
643 emit_unop_insn (CODE_FOR_truncdfhf2, to, from, UNKNOWN);
647 #ifdef HAVE_truncxfhf2
648 if (HAVE_truncxfhf2 && from_mode == XFmode && to_mode == HFmode)
650 emit_unop_insn (CODE_FOR_truncxfhf2, to, from, UNKNOWN);
654 #ifdef HAVE_trunctfhf2
655 if (HAVE_trunctfhf2 && from_mode == TFmode && to_mode == HFmode)
657 emit_unop_insn (CODE_FOR_trunctfhf2, to, from, UNKNOWN);
661 #ifdef HAVE_truncdfsf2
662 if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
664 emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
668 #ifdef HAVE_truncxfsf2
669 if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode)
671 emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN);
675 #ifdef HAVE_trunctfsf2
676 if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
678 emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
682 #ifdef HAVE_truncxfdf2
683 if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode)
685 emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN);
689 #ifdef HAVE_trunctfdf2
690 if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
692 emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
704 libcall = extendsfdf2_libfunc;
708 libcall = extendsfxf2_libfunc;
712 libcall = extendsftf2_libfunc;
721 libcall = truncdfsf2_libfunc;
725 libcall = extenddfxf2_libfunc;
729 libcall = extenddftf2_libfunc;
738 libcall = truncxfsf2_libfunc;
742 libcall = truncxfdf2_libfunc;
751 libcall = trunctfsf2_libfunc;
755 libcall = trunctfdf2_libfunc;
761 if (libcall == (rtx) 0)
762 /* This conversion is not implemented yet. */
765 emit_library_call (libcall, 1, to_mode, 1, from, from_mode);
766 emit_move_insn (to, hard_libcall_value (to_mode));
770 /* Now both modes are integers. */
772 /* Handle expanding beyond a word. */
773 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
774 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
781 enum machine_mode lowpart_mode;
782 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
784 /* Try converting directly if the insn is supported. */
785 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
788 /* If FROM is a SUBREG, put it into a register. Do this
789 so that we always generate the same set of insns for
790 better cse'ing; if an intermediate assignment occurred,
791 we won't be doing the operation directly on the SUBREG. */
792 if (optimize > 0 && GET_CODE (from) == SUBREG)
793 from = force_reg (from_mode, from);
794 emit_unop_insn (code, to, from, equiv_code);
797 /* Next, try converting via full word. */
798 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
799 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
800 != CODE_FOR_nothing))
802 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
803 emit_unop_insn (code, to,
804 gen_lowpart (word_mode, to), equiv_code);
808 /* No special multiword conversion insn; do it by hand. */
811 /* Get a copy of FROM widened to a word, if necessary. */
812 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
813 lowpart_mode = word_mode;
815 lowpart_mode = from_mode;
817 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
819 lowpart = gen_lowpart (lowpart_mode, to);
820 emit_move_insn (lowpart, lowfrom);
822 /* Compute the value to put in each remaining word. */
824 fill_value = const0_rtx;
829 && insn_operand_mode[(int) CODE_FOR_slt][0] == word_mode
830 && STORE_FLAG_VALUE == -1)
832 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
834 fill_value = gen_reg_rtx (word_mode);
835 emit_insn (gen_slt (fill_value));
841 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
842 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
844 fill_value = convert_to_mode (word_mode, fill_value, 1);
848 /* Fill the remaining words. */
849 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
851 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
852 rtx subword = operand_subword (to, index, 1, to_mode);
857 if (fill_value != subword)
858 emit_move_insn (subword, fill_value);
861 insns = get_insns ();
864 emit_no_conflict_block (insns, to, from, NULL_RTX,
865 gen_rtx (equiv_code, to_mode, copy_rtx (from)));
869 /* Truncating multi-word to a word or less. */
870 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
871 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
873 convert_move (to, gen_lowpart (word_mode, from), 0);
877 /* Handle pointer conversion */ /* SPEE 900220 */
878 if (to_mode == PSImode)
880 if (from_mode != SImode)
881 from = convert_to_mode (SImode, from, unsignedp);
883 #ifdef HAVE_truncsipsi
886 emit_unop_insn (CODE_FOR_truncsipsi, to, from, UNKNOWN);
889 #endif /* HAVE_truncsipsi */
893 if (from_mode == PSImode)
895 if (to_mode != SImode)
897 from = convert_to_mode (SImode, from, unsignedp);
902 #ifdef HAVE_extendpsisi
903 if (HAVE_extendpsisi)
905 emit_unop_insn (CODE_FOR_extendpsisi, to, from, UNKNOWN);
908 #endif /* HAVE_extendpsisi */
913 /* Now follow all the conversions between integers
914 no more than a word long. */
916 /* For truncation, usually we can just refer to FROM in a narrower mode. */
917 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
918 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
919 GET_MODE_BITSIZE (from_mode)))
921 if (!((GET_CODE (from) == MEM
922 && ! MEM_VOLATILE_P (from)
923 && direct_load[(int) to_mode]
924 && ! mode_dependent_address_p (XEXP (from, 0)))
925 || GET_CODE (from) == REG
926 || GET_CODE (from) == SUBREG))
927 from = force_reg (from_mode, from);
928 emit_move_insn (to, gen_lowpart (to_mode, from));
932 /* Handle extension. */
933 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
935 /* Convert directly if that works. */
936 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
939 /* If FROM is a SUBREG, put it into a register. Do this
940 so that we always generate the same set of insns for
941 better cse'ing; if an intermediate assignment occurred,
942 we won't be doing the operation directly on the SUBREG. */
943 if (optimize > 0 && GET_CODE (from) == SUBREG)
944 from = force_reg (from_mode, from);
945 emit_unop_insn (code, to, from, equiv_code);
950 enum machine_mode intermediate;
952 /* Search for a mode to convert via. */
953 for (intermediate = from_mode; intermediate != VOIDmode;
954 intermediate = GET_MODE_WIDER_MODE (intermediate))
955 if ((can_extend_p (to_mode, intermediate, unsignedp)
957 && (can_extend_p (intermediate, from_mode, unsignedp)
958 != CODE_FOR_nothing))
960 convert_move (to, convert_to_mode (intermediate, from,
961 unsignedp), unsignedp);
965 /* No suitable intermediate mode. */
970 /* Support special truncate insns for certain modes. */
972 if (from_mode == DImode && to_mode == SImode)
974 #ifdef HAVE_truncdisi2
977 emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
981 convert_move (to, force_reg (from_mode, from), unsignedp);
985 if (from_mode == DImode && to_mode == HImode)
987 #ifdef HAVE_truncdihi2
990 emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
994 convert_move (to, force_reg (from_mode, from), unsignedp);
998 if (from_mode == DImode && to_mode == QImode)
1000 #ifdef HAVE_truncdiqi2
1001 if (HAVE_truncdiqi2)
1003 emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
1007 convert_move (to, force_reg (from_mode, from), unsignedp);
1011 if (from_mode == SImode && to_mode == HImode)
1013 #ifdef HAVE_truncsihi2
1014 if (HAVE_truncsihi2)
1016 emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
1020 convert_move (to, force_reg (from_mode, from), unsignedp);
1024 if (from_mode == SImode && to_mode == QImode)
1026 #ifdef HAVE_truncsiqi2
1027 if (HAVE_truncsiqi2)
1029 emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
1033 convert_move (to, force_reg (from_mode, from), unsignedp);
1037 if (from_mode == HImode && to_mode == QImode)
1039 #ifdef HAVE_trunchiqi2
1040 if (HAVE_trunchiqi2)
1042 emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
1046 convert_move (to, force_reg (from_mode, from), unsignedp);
1050 /* Handle truncation of volatile memrefs, and so on;
1051 the things that couldn't be truncated directly,
1052 and for which there was no special instruction. */
1053 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
1055 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
1056 emit_move_insn (to, temp);
1060 /* Mode combination is not recognized. */
1064 /* Return an rtx for a value that would result
1065 from converting X to mode MODE.
1066 Both X and MODE may be floating, or both integer.
1067 UNSIGNEDP is nonzero if X is an unsigned value.
1068 This can be done by referring to a part of X in place
1069 or by copying to a new temporary with conversion.
1071 This function *must not* call protect_from_queue
1072 except when putting X into an insn (in which case convert_move does it). */
1075 convert_to_mode (mode, x, unsignedp)
1076 enum machine_mode mode;
1082 /* If FROM is a SUBREG that indicates that we have already done at least
1083 the required extension, strip it. */
1085 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
1086 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
1087 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
1088 x = gen_lowpart (mode, x);
1090 if (mode == GET_MODE (x))
1093 /* There is one case that we must handle specially: If we are converting
1094 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
1095 we are to interpret the constant as unsigned, gen_lowpart will do
1096 the wrong if the constant appears negative. What we want to do is
1097 make the high-order word of the constant zero, not all ones. */
1099 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
1100 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
1101 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
1102 return immed_double_const (INTVAL (x), (HOST_WIDE_INT) 0, mode);
1104 /* We can do this with a gen_lowpart if both desired and current modes
1105 are integer, and this is either a constant integer, a register, or a
1106 non-volatile MEM. Except for the constant case, we must be narrowing
1109 if (GET_CODE (x) == CONST_INT
1110 || (GET_MODE_CLASS (mode) == MODE_INT
1111 && GET_MODE_CLASS (GET_MODE (x)) == MODE_INT
1112 && (GET_CODE (x) == CONST_DOUBLE
1113 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (GET_MODE (x))
1114 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x))
1115 && direct_load[(int) mode]
1116 || GET_CODE (x) == REG)))))
1117 return gen_lowpart (mode, x);
1119 temp = gen_reg_rtx (mode);
1120 convert_move (temp, x, unsignedp);
1124 /* Generate several move instructions to copy LEN bytes
1125 from block FROM to block TO. (These are MEM rtx's with BLKmode).
1126 The caller must pass FROM and TO
1127 through protect_from_queue before calling.
1128 ALIGN (in bytes) is maximum alignment we can assume. */
1131 move_by_pieces (to, from, len, align)
1135 struct move_by_pieces data;
1136 rtx to_addr = XEXP (to, 0), from_addr = XEXP (from, 0);
1137 int max_size = MOVE_MAX + 1;
1140 data.to_addr = to_addr;
1141 data.from_addr = from_addr;
1145 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1146 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1148 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1149 || GET_CODE (from_addr) == POST_INC
1150 || GET_CODE (from_addr) == POST_DEC);
1152 data.explicit_inc_from = 0;
1153 data.explicit_inc_to = 0;
1155 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1156 if (data.reverse) data.offset = len;
1159 /* If copying requires more than two move insns,
1160 copy addresses to registers (to make displacements shorter)
1161 and use post-increment if available. */
1162 if (!(data.autinc_from && data.autinc_to)
1163 && move_by_pieces_ninsns (len, align) > 2)
1165 #ifdef HAVE_PRE_DECREMENT
1166 if (data.reverse && ! data.autinc_from)
1168 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1169 data.autinc_from = 1;
1170 data.explicit_inc_from = -1;
1173 #ifdef HAVE_POST_INCREMENT
1174 if (! data.autinc_from)
1176 data.from_addr = copy_addr_to_reg (from_addr);
1177 data.autinc_from = 1;
1178 data.explicit_inc_from = 1;
1181 if (!data.autinc_from && CONSTANT_P (from_addr))
1182 data.from_addr = copy_addr_to_reg (from_addr);
1183 #ifdef HAVE_PRE_DECREMENT
1184 if (data.reverse && ! data.autinc_to)
1186 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1188 data.explicit_inc_to = -1;
1191 #ifdef HAVE_POST_INCREMENT
1192 if (! data.reverse && ! data.autinc_to)
1194 data.to_addr = copy_addr_to_reg (to_addr);
1196 data.explicit_inc_to = 1;
1199 if (!data.autinc_to && CONSTANT_P (to_addr))
1200 data.to_addr = copy_addr_to_reg (to_addr);
1203 if (! (STRICT_ALIGNMENT || SLOW_UNALIGNED_ACCESS)
1204 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
1207 /* First move what we can in the largest integer mode, then go to
1208 successively smaller modes. */
1210 while (max_size > 1)
1212 enum machine_mode mode = VOIDmode, tmode;
1213 enum insn_code icode;
1215 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1216 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1217 if (GET_MODE_SIZE (tmode) < max_size)
1220 if (mode == VOIDmode)
1223 icode = mov_optab->handlers[(int) mode].insn_code;
1224 if (icode != CODE_FOR_nothing
1225 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1226 GET_MODE_SIZE (mode)))
1227 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1229 max_size = GET_MODE_SIZE (mode);
1232 /* The code above should have handled everything. */
1237 /* Return number of insns required to move L bytes by pieces.
1238 ALIGN (in bytes) is maximum alignment we can assume. */
1241 move_by_pieces_ninsns (l, align)
1245 register int n_insns = 0;
1246 int max_size = MOVE_MAX + 1;
1248 if (! (STRICT_ALIGNMENT || SLOW_UNALIGNED_ACCESS)
1249 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
1252 while (max_size > 1)
1254 enum machine_mode mode = VOIDmode, tmode;
1255 enum insn_code icode;
1257 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1258 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1259 if (GET_MODE_SIZE (tmode) < max_size)
1262 if (mode == VOIDmode)
1265 icode = mov_optab->handlers[(int) mode].insn_code;
1266 if (icode != CODE_FOR_nothing
1267 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1268 GET_MODE_SIZE (mode)))
1269 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1271 max_size = GET_MODE_SIZE (mode);
1277 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1278 with move instructions for mode MODE. GENFUN is the gen_... function
1279 to make a move insn for that mode. DATA has all the other info. */
1282 move_by_pieces_1 (genfun, mode, data)
1284 enum machine_mode mode;
1285 struct move_by_pieces *data;
1287 register int size = GET_MODE_SIZE (mode);
1288 register rtx to1, from1;
1290 while (data->len >= size)
1292 if (data->reverse) data->offset -= size;
1294 to1 = (data->autinc_to
1295 ? gen_rtx (MEM, mode, data->to_addr)
1296 : change_address (data->to, mode,
1297 plus_constant (data->to_addr, data->offset)));
1300 ? gen_rtx (MEM, mode, data->from_addr)
1301 : change_address (data->from, mode,
1302 plus_constant (data->from_addr, data->offset)));
1304 #ifdef HAVE_PRE_DECREMENT
1305 if (data->explicit_inc_to < 0)
1306 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
1307 if (data->explicit_inc_from < 0)
1308 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (-size)));
1311 emit_insn ((*genfun) (to1, from1));
1312 #ifdef HAVE_POST_INCREMENT
1313 if (data->explicit_inc_to > 0)
1314 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1315 if (data->explicit_inc_from > 0)
1316 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1319 if (! data->reverse) data->offset += size;
1325 /* Emit code to move a block Y to a block X.
1326 This may be done with string-move instructions,
1327 with multiple scalar move instructions, or with a library call.
1329 Both X and Y must be MEM rtx's (perhaps inside VOLATILE)
1331 SIZE is an rtx that says how long they are.
1332 ALIGN is the maximum alignment we can assume they have,
1333 measured in bytes. */
1336 emit_block_move (x, y, size, align)
1341 if (GET_MODE (x) != BLKmode)
1344 if (GET_MODE (y) != BLKmode)
1347 x = protect_from_queue (x, 1);
1348 y = protect_from_queue (y, 0);
1349 size = protect_from_queue (size, 0);
1351 if (GET_CODE (x) != MEM)
1353 if (GET_CODE (y) != MEM)
1358 if (GET_CODE (size) == CONST_INT
1359 && (move_by_pieces_ninsns (INTVAL (size), align) < MOVE_RATIO))
1360 move_by_pieces (x, y, INTVAL (size), align);
1363 /* Try the most limited insn first, because there's no point
1364 including more than one in the machine description unless
1365 the more limited one has some advantage. */
1367 rtx opalign = GEN_INT (align);
1368 enum machine_mode mode;
1370 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1371 mode = GET_MODE_WIDER_MODE (mode))
1373 enum insn_code code = movstr_optab[(int) mode];
1375 if (code != CODE_FOR_nothing
1376 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1377 here because if SIZE is less than the mode mask, as it is
1378 returned by the macro, it will definitely be less than the
1379 actual mode mask. */
1380 && (unsigned HOST_WIDE_INT) INTVAL (size) <= GET_MODE_MASK (mode)
1381 && (insn_operand_predicate[(int) code][0] == 0
1382 || (*insn_operand_predicate[(int) code][0]) (x, BLKmode))
1383 && (insn_operand_predicate[(int) code][1] == 0
1384 || (*insn_operand_predicate[(int) code][1]) (y, BLKmode))
1385 && (insn_operand_predicate[(int) code][3] == 0
1386 || (*insn_operand_predicate[(int) code][3]) (opalign,
1390 rtx last = get_last_insn ();
1393 op2 = convert_to_mode (mode, size, 1);
1394 if (insn_operand_predicate[(int) code][2] != 0
1395 && ! (*insn_operand_predicate[(int) code][2]) (op2, mode))
1396 op2 = copy_to_mode_reg (mode, op2);
1398 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1405 delete_insns_since (last);
1409 #ifdef TARGET_MEM_FUNCTIONS
1410 emit_library_call (memcpy_libfunc, 0,
1411 VOIDmode, 3, XEXP (x, 0), Pmode,
1413 convert_to_mode (TYPE_MODE (sizetype), size,
1414 TREE_UNSIGNED (sizetype)),
1415 TYPE_MODE (sizetype));
1417 emit_library_call (bcopy_libfunc, 0,
1418 VOIDmode, 3, XEXP (y, 0), Pmode,
1420 convert_to_mode (TYPE_MODE (sizetype), size,
1421 TREE_UNSIGNED (sizetype)),
1422 TYPE_MODE (sizetype));
1427 /* Copy all or part of a value X into registers starting at REGNO.
1428 The number of registers to be filled is NREGS. */
1431 move_block_to_reg (regno, x, nregs, mode)
1435 enum machine_mode mode;
1440 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1441 x = validize_mem (force_const_mem (mode, x));
1443 /* See if the machine can do this with a load multiple insn. */
1444 #ifdef HAVE_load_multiple
1445 last = get_last_insn ();
1446 pat = gen_load_multiple (gen_rtx (REG, word_mode, regno), x,
1454 delete_insns_since (last);
1457 for (i = 0; i < nregs; i++)
1458 emit_move_insn (gen_rtx (REG, word_mode, regno + i),
1459 operand_subword_force (x, i, mode));
1462 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1463 The number of registers to be filled is NREGS. */
1466 move_block_from_reg (regno, x, nregs)
1474 /* See if the machine can do this with a store multiple insn. */
1475 #ifdef HAVE_store_multiple
1476 last = get_last_insn ();
1477 pat = gen_store_multiple (x, gen_rtx (REG, word_mode, regno),
1485 delete_insns_since (last);
1488 for (i = 0; i < nregs; i++)
1490 rtx tem = operand_subword (x, i, 1, BLKmode);
1495 emit_move_insn (tem, gen_rtx (REG, word_mode, regno + i));
1499 /* Mark NREGS consecutive regs, starting at REGNO, as being live now. */
1502 use_regs (regno, nregs)
1508 for (i = 0; i < nregs; i++)
1509 emit_insn (gen_rtx (USE, VOIDmode, gen_rtx (REG, word_mode, regno + i)));
1512 /* Mark the instructions since PREV as a libcall block.
1513 Add REG_LIBCALL to PREV and add a REG_RETVAL to the most recent insn. */
1522 /* Find the instructions to mark */
1524 insn_first = NEXT_INSN (prev);
1526 insn_first = get_insns ();
1528 insn_last = get_last_insn ();
1530 REG_NOTES (insn_last) = gen_rtx (INSN_LIST, REG_RETVAL, insn_first,
1531 REG_NOTES (insn_last));
1533 REG_NOTES (insn_first) = gen_rtx (INSN_LIST, REG_LIBCALL, insn_last,
1534 REG_NOTES (insn_first));
1537 /* Write zeros through the storage of OBJECT.
1538 If OBJECT has BLKmode, SIZE is its length in bytes. */
1541 clear_storage (object, size)
1545 if (GET_MODE (object) == BLKmode)
1547 #ifdef TARGET_MEM_FUNCTIONS
1548 emit_library_call (memset_libfunc, 0,
1550 XEXP (object, 0), Pmode, const0_rtx, Pmode,
1551 GEN_INT (size), Pmode);
1553 emit_library_call (bzero_libfunc, 0,
1555 XEXP (object, 0), Pmode,
1556 GEN_INT (size), Pmode);
1560 emit_move_insn (object, const0_rtx);
1563 /* Generate code to copy Y into X.
1564 Both Y and X must have the same mode, except that
1565 Y can be a constant with VOIDmode.
1566 This mode cannot be BLKmode; use emit_block_move for that.
1568 Return the last instruction emitted. */
1571 emit_move_insn (x, y)
1574 enum machine_mode mode = GET_MODE (x);
1575 enum machine_mode submode;
1576 enum mode_class class = GET_MODE_CLASS (mode);
1579 x = protect_from_queue (x, 1);
1580 y = protect_from_queue (y, 0);
1582 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
1585 if (CONSTANT_P (y) && ! LEGITIMATE_CONSTANT_P (y))
1586 y = force_const_mem (mode, y);
1588 /* If X or Y are memory references, verify that their addresses are valid
1590 if (GET_CODE (x) == MEM
1591 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
1592 && ! push_operand (x, GET_MODE (x)))
1594 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
1595 x = change_address (x, VOIDmode, XEXP (x, 0));
1597 if (GET_CODE (y) == MEM
1598 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
1600 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
1601 y = change_address (y, VOIDmode, XEXP (y, 0));
1603 if (mode == BLKmode)
1606 return emit_move_insn_1 (x, y);
1609 /* Low level part of emit_move_insn.
1610 Called just like emit_move_insn, but assumes X and Y
1611 are basically valid. */
1614 emit_move_insn_1 (x, y)
1617 enum machine_mode mode = GET_MODE (x);
1618 enum machine_mode submode;
1619 enum mode_class class = GET_MODE_CLASS (mode);
1622 if (class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
1623 submode = mode_for_size (GET_MODE_UNIT_SIZE (mode) * BITS_PER_UNIT,
1624 (class == MODE_COMPLEX_INT
1625 ? MODE_INT : MODE_FLOAT),
1628 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
1630 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
1632 /* Expand complex moves by moving real part and imag part, if possible. */
1633 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
1634 && submode != BLKmode
1635 && (mov_optab->handlers[(int) submode].insn_code
1636 != CODE_FOR_nothing))
1638 /* Don't split destination if it is a stack push. */
1639 int stack = push_operand (x, GET_MODE (x));
1640 rtx prev = get_last_insn ();
1642 /* Tell flow that the whole of the destination is being set. */
1643 if (GET_CODE (x) == REG)
1644 emit_insn (gen_rtx (CLOBBER, VOIDmode, x));
1646 /* If this is a stack, push the highpart first, so it
1647 will be in the argument order.
1649 In that case, change_address is used only to convert
1650 the mode, not to change the address. */
1651 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
1652 ((stack ? change_address (x, submode, (rtx) 0)
1653 : gen_highpart (submode, x)),
1654 gen_highpart (submode, y)));
1655 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
1656 ((stack ? change_address (x, submode, (rtx) 0)
1657 : gen_lowpart (submode, x)),
1658 gen_lowpart (submode, y)));
1662 return get_last_insn ();
1665 /* This will handle any multi-word mode that lacks a move_insn pattern.
1666 However, you will get better code if you define such patterns,
1667 even if they must turn into multiple assembler instructions. */
1668 else if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
1671 rtx prev_insn = get_last_insn ();
1674 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
1677 rtx xpart = operand_subword (x, i, 1, mode);
1678 rtx ypart = operand_subword (y, i, 1, mode);
1680 /* If we can't get a part of Y, put Y into memory if it is a
1681 constant. Otherwise, force it into a register. If we still
1682 can't get a part of Y, abort. */
1683 if (ypart == 0 && CONSTANT_P (y))
1685 y = force_const_mem (mode, y);
1686 ypart = operand_subword (y, i, 1, mode);
1688 else if (ypart == 0)
1689 ypart = operand_subword_force (y, i, mode);
1691 if (xpart == 0 || ypart == 0)
1694 last_insn = emit_move_insn (xpart, ypart);
1696 /* Mark these insns as a libcall block. */
1697 group_insns (prev_insn);
1705 /* Pushing data onto the stack. */
1707 /* Push a block of length SIZE (perhaps variable)
1708 and return an rtx to address the beginning of the block.
1709 Note that it is not possible for the value returned to be a QUEUED.
1710 The value may be virtual_outgoing_args_rtx.
1712 EXTRA is the number of bytes of padding to push in addition to SIZE.
1713 BELOW nonzero means this padding comes at low addresses;
1714 otherwise, the padding comes at high addresses. */
1717 push_block (size, extra, below)
1722 if (CONSTANT_P (size))
1723 anti_adjust_stack (plus_constant (size, extra));
1724 else if (GET_CODE (size) == REG && extra == 0)
1725 anti_adjust_stack (size);
1728 rtx temp = copy_to_mode_reg (Pmode, size);
1730 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
1731 temp, 0, OPTAB_LIB_WIDEN);
1732 anti_adjust_stack (temp);
1735 #ifdef STACK_GROWS_DOWNWARD
1736 temp = virtual_outgoing_args_rtx;
1737 if (extra != 0 && below)
1738 temp = plus_constant (temp, extra);
1740 if (GET_CODE (size) == CONST_INT)
1741 temp = plus_constant (virtual_outgoing_args_rtx,
1742 - INTVAL (size) - (below ? 0 : extra));
1743 else if (extra != 0 && !below)
1744 temp = gen_rtx (PLUS, Pmode, virtual_outgoing_args_rtx,
1745 negate_rtx (Pmode, plus_constant (size, extra)));
1747 temp = gen_rtx (PLUS, Pmode, virtual_outgoing_args_rtx,
1748 negate_rtx (Pmode, size));
1751 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
1757 return gen_rtx (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
1760 /* Generate code to push X onto the stack, assuming it has mode MODE and
1762 MODE is redundant except when X is a CONST_INT (since they don't
1764 SIZE is an rtx for the size of data to be copied (in bytes),
1765 needed only if X is BLKmode.
1767 ALIGN (in bytes) is maximum alignment we can assume.
1769 If PARTIAL and REG are both nonzero, then copy that many of the first
1770 words of X into registers starting with REG, and push the rest of X.
1771 The amount of space pushed is decreased by PARTIAL words,
1772 rounded *down* to a multiple of PARM_BOUNDARY.
1773 REG must be a hard register in this case.
1774 If REG is zero but PARTIAL is not, take any all others actions for an
1775 argument partially in registers, but do not actually load any
1778 EXTRA is the amount in bytes of extra space to leave next to this arg.
1779 This is ignored if an argument block has already been allocated.
1781 On a machine that lacks real push insns, ARGS_ADDR is the address of
1782 the bottom of the argument block for this call. We use indexing off there
1783 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
1784 argument block has not been preallocated.
1786 ARGS_SO_FAR is the size of args previously pushed for this call. */
1789 emit_push_insn (x, mode, type, size, align, partial, reg, extra,
1790 args_addr, args_so_far)
1792 enum machine_mode mode;
1803 enum direction stack_direction
1804 #ifdef STACK_GROWS_DOWNWARD
1810 /* Decide where to pad the argument: `downward' for below,
1811 `upward' for above, or `none' for don't pad it.
1812 Default is below for small data on big-endian machines; else above. */
1813 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
1815 /* Invert direction if stack is post-update. */
1816 if (STACK_PUSH_CODE == POST_INC || STACK_PUSH_CODE == POST_DEC)
1817 if (where_pad != none)
1818 where_pad = (where_pad == downward ? upward : downward);
1820 xinner = x = protect_from_queue (x, 0);
1822 if (mode == BLKmode)
1824 /* Copy a block into the stack, entirely or partially. */
1827 int used = partial * UNITS_PER_WORD;
1828 int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
1836 /* USED is now the # of bytes we need not copy to the stack
1837 because registers will take care of them. */
1840 xinner = change_address (xinner, BLKmode,
1841 plus_constant (XEXP (xinner, 0), used));
1843 /* If the partial register-part of the arg counts in its stack size,
1844 skip the part of stack space corresponding to the registers.
1845 Otherwise, start copying to the beginning of the stack space,
1846 by setting SKIP to 0. */
1847 #ifndef REG_PARM_STACK_SPACE
1853 #ifdef PUSH_ROUNDING
1854 /* Do it with several push insns if that doesn't take lots of insns
1855 and if there is no difficulty with push insns that skip bytes
1856 on the stack for alignment purposes. */
1858 && GET_CODE (size) == CONST_INT
1860 && (move_by_pieces_ninsns ((unsigned) INTVAL (size) - used, align)
1862 /* Here we avoid the case of a structure whose weak alignment
1863 forces many pushes of a small amount of data,
1864 and such small pushes do rounding that causes trouble. */
1865 && ((! STRICT_ALIGNMENT && ! SLOW_UNALIGNED_ACCESS)
1866 || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT
1867 || PUSH_ROUNDING (align) == align)
1868 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
1870 /* Push padding now if padding above and stack grows down,
1871 or if padding below and stack grows up.
1872 But if space already allocated, this has already been done. */
1873 if (extra && args_addr == 0
1874 && where_pad != none && where_pad != stack_direction)
1875 anti_adjust_stack (GEN_INT (extra));
1877 move_by_pieces (gen_rtx (MEM, BLKmode, gen_push_operand ()), xinner,
1878 INTVAL (size) - used, align);
1881 #endif /* PUSH_ROUNDING */
1883 /* Otherwise make space on the stack and copy the data
1884 to the address of that space. */
1886 /* Deduct words put into registers from the size we must copy. */
1889 if (GET_CODE (size) == CONST_INT)
1890 size = GEN_INT (INTVAL (size) - used);
1892 size = expand_binop (GET_MODE (size), sub_optab, size,
1893 GEN_INT (used), NULL_RTX, 0,
1897 /* Get the address of the stack space.
1898 In this case, we do not deal with EXTRA separately.
1899 A single stack adjust will do. */
1902 temp = push_block (size, extra, where_pad == downward);
1905 else if (GET_CODE (args_so_far) == CONST_INT)
1906 temp = memory_address (BLKmode,
1907 plus_constant (args_addr,
1908 skip + INTVAL (args_so_far)));
1910 temp = memory_address (BLKmode,
1911 plus_constant (gen_rtx (PLUS, Pmode,
1912 args_addr, args_so_far),
1915 /* TEMP is the address of the block. Copy the data there. */
1916 if (GET_CODE (size) == CONST_INT
1917 && (move_by_pieces_ninsns ((unsigned) INTVAL (size), align)
1920 move_by_pieces (gen_rtx (MEM, BLKmode, temp), xinner,
1921 INTVAL (size), align);
1924 /* Try the most limited insn first, because there's no point
1925 including more than one in the machine description unless
1926 the more limited one has some advantage. */
1927 #ifdef HAVE_movstrqi
1929 && GET_CODE (size) == CONST_INT
1930 && ((unsigned) INTVAL (size)
1931 < (1 << (GET_MODE_BITSIZE (QImode) - 1))))
1933 rtx pat = gen_movstrqi (gen_rtx (MEM, BLKmode, temp),
1934 xinner, size, GEN_INT (align));
1942 #ifdef HAVE_movstrhi
1944 && GET_CODE (size) == CONST_INT
1945 && ((unsigned) INTVAL (size)
1946 < (1 << (GET_MODE_BITSIZE (HImode) - 1))))
1948 rtx pat = gen_movstrhi (gen_rtx (MEM, BLKmode, temp),
1949 xinner, size, GEN_INT (align));
1957 #ifdef HAVE_movstrsi
1960 rtx pat = gen_movstrsi (gen_rtx (MEM, BLKmode, temp),
1961 xinner, size, GEN_INT (align));
1969 #ifdef HAVE_movstrdi
1972 rtx pat = gen_movstrdi (gen_rtx (MEM, BLKmode, temp),
1973 xinner, size, GEN_INT (align));
1982 #ifndef ACCUMULATE_OUTGOING_ARGS
1983 /* If the source is referenced relative to the stack pointer,
1984 copy it to another register to stabilize it. We do not need
1985 to do this if we know that we won't be changing sp. */
1987 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
1988 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
1989 temp = copy_to_reg (temp);
1992 /* Make inhibit_defer_pop nonzero around the library call
1993 to force it to pop the bcopy-arguments right away. */
1995 #ifdef TARGET_MEM_FUNCTIONS
1996 emit_library_call (memcpy_libfunc, 0,
1997 VOIDmode, 3, temp, Pmode, XEXP (xinner, 0), Pmode,
1998 convert_to_mode (TYPE_MODE (sizetype),
1999 size, TREE_UNSIGNED (sizetype)),
2000 TYPE_MODE (sizetype));
2002 emit_library_call (bcopy_libfunc, 0,
2003 VOIDmode, 3, XEXP (xinner, 0), Pmode, temp, Pmode,
2004 convert_to_mode (TYPE_MODE (sizetype),
2005 size, TREE_UNSIGNED (sizetype)),
2006 TYPE_MODE (sizetype));
2011 else if (partial > 0)
2013 /* Scalar partly in registers. */
2015 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
2018 /* # words of start of argument
2019 that we must make space for but need not store. */
2020 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
2021 int args_offset = INTVAL (args_so_far);
2024 /* Push padding now if padding above and stack grows down,
2025 or if padding below and stack grows up.
2026 But if space already allocated, this has already been done. */
2027 if (extra && args_addr == 0
2028 && where_pad != none && where_pad != stack_direction)
2029 anti_adjust_stack (GEN_INT (extra));
2031 /* If we make space by pushing it, we might as well push
2032 the real data. Otherwise, we can leave OFFSET nonzero
2033 and leave the space uninitialized. */
2037 /* Now NOT_STACK gets the number of words that we don't need to
2038 allocate on the stack. */
2039 not_stack = partial - offset;
2041 /* If the partial register-part of the arg counts in its stack size,
2042 skip the part of stack space corresponding to the registers.
2043 Otherwise, start copying to the beginning of the stack space,
2044 by setting SKIP to 0. */
2045 #ifndef REG_PARM_STACK_SPACE
2051 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
2052 x = validize_mem (force_const_mem (mode, x));
2054 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
2055 SUBREGs of such registers are not allowed. */
2056 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
2057 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
2058 x = copy_to_reg (x);
2060 /* Loop over all the words allocated on the stack for this arg. */
2061 /* We can do it by words, because any scalar bigger than a word
2062 has a size a multiple of a word. */
2063 #ifndef PUSH_ARGS_REVERSED
2064 for (i = not_stack; i < size; i++)
2066 for (i = size - 1; i >= not_stack; i--)
2068 if (i >= not_stack + offset)
2069 emit_push_insn (operand_subword_force (x, i, mode),
2070 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
2072 GEN_INT (args_offset + ((i - not_stack + skip)
2073 * UNITS_PER_WORD)));
2079 /* Push padding now if padding above and stack grows down,
2080 or if padding below and stack grows up.
2081 But if space already allocated, this has already been done. */
2082 if (extra && args_addr == 0
2083 && where_pad != none && where_pad != stack_direction)
2084 anti_adjust_stack (GEN_INT (extra));
2086 #ifdef PUSH_ROUNDING
2088 addr = gen_push_operand ();
2091 if (GET_CODE (args_so_far) == CONST_INT)
2093 = memory_address (mode,
2094 plus_constant (args_addr, INTVAL (args_so_far)));
2096 addr = memory_address (mode, gen_rtx (PLUS, Pmode, args_addr,
2099 emit_move_insn (gen_rtx (MEM, mode, addr), x);
2103 /* If part should go in registers, copy that part
2104 into the appropriate registers. Do this now, at the end,
2105 since mem-to-mem copies above may do function calls. */
2106 if (partial > 0 && reg != 0)
2107 move_block_to_reg (REGNO (reg), x, partial, mode);
2109 if (extra && args_addr == 0 && where_pad == stack_direction)
2110 anti_adjust_stack (GEN_INT (extra));
2113 /* Expand an assignment that stores the value of FROM into TO.
2114 If WANT_VALUE is nonzero, return an rtx for the value of TO.
2115 (This may contain a QUEUED rtx.)
2116 Otherwise, the returned value is not meaningful.
2118 SUGGEST_REG is no longer actually used.
2119 It used to mean, copy the value through a register
2120 and return that register, if that is possible.
2121 But now we do this if WANT_VALUE.
2123 If the value stored is a constant, we return the constant. */
2126 expand_assignment (to, from, want_value, suggest_reg)
2131 register rtx to_rtx = 0;
2134 /* Don't crash if the lhs of the assignment was erroneous. */
2136 if (TREE_CODE (to) == ERROR_MARK)
2137 return expand_expr (from, NULL_RTX, VOIDmode, 0);
2139 /* Assignment of a structure component needs special treatment
2140 if the structure component's rtx is not simply a MEM.
2141 Assignment of an array element at a constant index
2142 has the same problem. */
2144 if (TREE_CODE (to) == COMPONENT_REF
2145 || TREE_CODE (to) == BIT_FIELD_REF
2146 || (TREE_CODE (to) == ARRAY_REF
2147 && TREE_CODE (TREE_OPERAND (to, 1)) == INTEGER_CST
2148 && TREE_CODE (TYPE_SIZE (TREE_TYPE (to))) == INTEGER_CST))
2150 enum machine_mode mode1;
2156 tree tem = get_inner_reference (to, &bitsize, &bitpos, &offset,
2157 &mode1, &unsignedp, &volatilep);
2159 /* If we are going to use store_bit_field and extract_bit_field,
2160 make sure to_rtx will be safe for multiple use. */
2162 if (mode1 == VOIDmode && want_value)
2163 tem = stabilize_reference (tem);
2165 to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, 0);
2168 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
2170 if (GET_CODE (to_rtx) != MEM)
2172 to_rtx = change_address (to_rtx, VOIDmode,
2173 gen_rtx (PLUS, Pmode, XEXP (to_rtx, 0),
2174 force_reg (Pmode, offset_rtx)));
2178 if (GET_CODE (to_rtx) == MEM)
2179 MEM_VOLATILE_P (to_rtx) = 1;
2180 #if 0 /* This was turned off because, when a field is volatile
2181 in an object which is not volatile, the object may be in a register,
2182 and then we would abort over here. */
2188 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
2190 /* Spurious cast makes HPUX compiler happy. */
2191 ? (enum machine_mode) TYPE_MODE (TREE_TYPE (to))
2194 /* Required alignment of containing datum. */
2195 TYPE_ALIGN (TREE_TYPE (tem)) / BITS_PER_UNIT,
2196 int_size_in_bytes (TREE_TYPE (tem)));
2197 preserve_temp_slots (result);
2200 /* If we aren't returning a result, just pass on what expand_expr
2201 returned; it was probably const0_rtx. Otherwise, convert RESULT
2202 to the proper mode. */
2203 return (want_value ? convert_to_mode (TYPE_MODE (TREE_TYPE (to)), result,
2204 TREE_UNSIGNED (TREE_TYPE (to)))
2208 /* If the rhs is a function call and its value is not an aggregate,
2209 call the function before we start to compute the lhs.
2210 This is needed for correct code for cases such as
2211 val = setjmp (buf) on machines where reference to val
2212 requires loading up part of an address in a separate insn. */
2213 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from))
2215 rtx value = expand_expr (from, NULL_RTX, VOIDmode, 0);
2217 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, 0);
2218 emit_move_insn (to_rtx, value);
2219 preserve_temp_slots (to_rtx);
2224 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
2225 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
2228 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, 0);
2230 /* Don't move directly into a return register. */
2231 if (TREE_CODE (to) == RESULT_DECL && GET_CODE (to_rtx) == REG)
2233 rtx temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
2234 emit_move_insn (to_rtx, temp);
2235 preserve_temp_slots (to_rtx);
2240 /* In case we are returning the contents of an object which overlaps
2241 the place the value is being stored, use a safe function when copying
2242 a value through a pointer into a structure value return block. */
2243 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
2244 && current_function_returns_struct
2245 && !current_function_returns_pcc_struct)
2247 rtx from_rtx = expand_expr (from, NULL_RTX, VOIDmode, 0);
2248 rtx size = expr_size (from);
2250 #ifdef TARGET_MEM_FUNCTIONS
2251 emit_library_call (memcpy_libfunc, 0,
2252 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
2253 XEXP (from_rtx, 0), Pmode,
2254 convert_to_mode (TYPE_MODE (sizetype),
2255 size, TREE_UNSIGNED (sizetype)),
2256 TYPE_MODE (sizetype));
2258 emit_library_call (bcopy_libfunc, 0,
2259 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
2260 XEXP (to_rtx, 0), Pmode,
2261 convert_to_mode (TYPE_MODE (sizetype),
2262 size, TREE_UNSIGNED (sizetype)),
2263 TYPE_MODE (sizetype));
2266 preserve_temp_slots (to_rtx);
2271 /* Compute FROM and store the value in the rtx we got. */
2273 result = store_expr (from, to_rtx, want_value);
2274 preserve_temp_slots (result);
2279 /* Generate code for computing expression EXP,
2280 and storing the value into TARGET.
2281 Returns TARGET or an equivalent value.
2282 TARGET may contain a QUEUED rtx.
2284 If SUGGEST_REG is nonzero, copy the value through a register
2285 and return that register, if that is possible.
2287 If the value stored is a constant, we return the constant. */
2290 store_expr (exp, target, suggest_reg)
2292 register rtx target;
2296 int dont_return_target = 0;
2298 if (TREE_CODE (exp) == COMPOUND_EXPR)
2300 /* Perform first part of compound expression, then assign from second
2302 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
2304 return store_expr (TREE_OPERAND (exp, 1), target, suggest_reg);
2306 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
2308 /* For conditional expression, get safe form of the target. Then
2309 test the condition, doing the appropriate assignment on either
2310 side. This avoids the creation of unnecessary temporaries.
2311 For non-BLKmode, it is more efficient not to do this. */
2313 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
2316 target = protect_from_queue (target, 1);
2319 jumpifnot (TREE_OPERAND (exp, 0), lab1);
2320 store_expr (TREE_OPERAND (exp, 1), target, suggest_reg);
2322 emit_jump_insn (gen_jump (lab2));
2325 store_expr (TREE_OPERAND (exp, 2), target, suggest_reg);
2331 else if (suggest_reg && GET_CODE (target) == MEM && ! MEM_VOLATILE_P (target)
2332 && GET_MODE (target) != BLKmode)
2333 /* If target is in memory and caller wants value in a register instead,
2334 arrange that. Pass TARGET as target for expand_expr so that,
2335 if EXP is another assignment, SUGGEST_REG will be nonzero for it.
2336 We know expand_expr will not use the target in that case.
2337 Don't do this if TARGET is volatile because we are supposed
2338 to write it and then read it. */
2340 temp = expand_expr (exp, cse_not_expected ? NULL_RTX : target,
2341 GET_MODE (target), 0);
2342 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
2343 temp = copy_to_reg (temp);
2344 dont_return_target = 1;
2346 else if (queued_subexp_p (target))
2347 /* If target contains a postincrement, it is not safe
2348 to use as the returned value. It would access the wrong
2349 place by the time the queued increment gets output.
2350 So copy the value through a temporary and use that temp
2353 /* ??? There may be a bug here in the case of a target
2354 that is volatile, but I' too sleepy today to write anything
2356 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
2358 /* Expand EXP into a new pseudo. */
2359 temp = gen_reg_rtx (GET_MODE (target));
2360 temp = expand_expr (exp, temp, GET_MODE (target), 0);
2363 temp = expand_expr (exp, NULL_RTX, GET_MODE (target), 0);
2364 dont_return_target = 1;
2366 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
2367 /* If this is an scalar in a register that is stored in a wider mode
2368 than the declared mode, compute the result into its declared mode
2369 and then convert to the wider mode. Our value is the computed
2372 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
2373 convert_move (SUBREG_REG (target), temp,
2374 SUBREG_PROMOTED_UNSIGNED_P (target));
2379 temp = expand_expr (exp, target, GET_MODE (target), 0);
2380 /* DO return TARGET if it's a specified hardware register.
2381 expand_return relies on this.
2382 DO return TARGET if it's a volatile mem ref; ANSI requires this. */
2383 if (!(target && GET_CODE (target) == REG
2384 && REGNO (target) < FIRST_PSEUDO_REGISTER)
2385 && CONSTANT_P (temp)
2386 && !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
2387 dont_return_target = 1;
2390 /* If value was not generated in the target, store it there.
2391 Convert the value to TARGET's type first if nec. */
2393 if (temp != target && TREE_CODE (exp) != ERROR_MARK)
2395 target = protect_from_queue (target, 1);
2396 if (GET_MODE (temp) != GET_MODE (target)
2397 && GET_MODE (temp) != VOIDmode)
2399 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
2400 if (dont_return_target)
2402 /* In this case, we will return TEMP,
2403 so make sure it has the proper mode.
2404 But don't forget to store the value into TARGET. */
2405 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
2406 emit_move_insn (target, temp);
2409 convert_move (target, temp, unsignedp);
2412 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
2414 /* Handle copying a string constant into an array.
2415 The string constant may be shorter than the array.
2416 So copy just the string's actual length, and clear the rest. */
2419 /* Get the size of the data type of the string,
2420 which is actually the size of the target. */
2421 size = expr_size (exp);
2422 if (GET_CODE (size) == CONST_INT
2423 && INTVAL (size) < TREE_STRING_LENGTH (exp))
2424 emit_block_move (target, temp, size,
2425 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
2428 /* Compute the size of the data to copy from the string. */
2430 = size_binop (MIN_EXPR,
2431 size_binop (CEIL_DIV_EXPR,
2432 TYPE_SIZE (TREE_TYPE (exp)),
2433 size_int (BITS_PER_UNIT)),
2435 build_int_2 (TREE_STRING_LENGTH (exp), 0)));
2436 rtx copy_size_rtx = expand_expr (copy_size, NULL_RTX,
2440 /* Copy that much. */
2441 emit_block_move (target, temp, copy_size_rtx,
2442 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
2444 /* Figure out how much is left in TARGET
2445 that we have to clear. */
2446 if (GET_CODE (copy_size_rtx) == CONST_INT)
2448 temp = plus_constant (XEXP (target, 0),
2449 TREE_STRING_LENGTH (exp));
2450 size = plus_constant (size,
2451 - TREE_STRING_LENGTH (exp));
2455 enum machine_mode size_mode = Pmode;
2457 temp = force_reg (Pmode, XEXP (target, 0));
2458 temp = expand_binop (size_mode, add_optab, temp,
2459 copy_size_rtx, NULL_RTX, 0,
2462 size = expand_binop (size_mode, sub_optab, size,
2463 copy_size_rtx, NULL_RTX, 0,
2466 emit_cmp_insn (size, const0_rtx, LT, NULL_RTX,
2467 GET_MODE (size), 0, 0);
2468 label = gen_label_rtx ();
2469 emit_jump_insn (gen_blt (label));
2472 if (size != const0_rtx)
2474 #ifdef TARGET_MEM_FUNCTIONS
2475 emit_library_call (memset_libfunc, 0, VOIDmode, 3,
2476 temp, Pmode, const0_rtx, Pmode, size, Pmode);
2478 emit_library_call (bzero_libfunc, 0, VOIDmode, 2,
2479 temp, Pmode, size, Pmode);
2486 else if (GET_MODE (temp) == BLKmode)
2487 emit_block_move (target, temp, expr_size (exp),
2488 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
2490 emit_move_insn (target, temp);
2492 if (dont_return_target)
2497 /* Store the value of constructor EXP into the rtx TARGET.
2498 TARGET is either a REG or a MEM. */
2501 store_constructor (exp, target)
2505 tree type = TREE_TYPE (exp);
2507 /* We know our target cannot conflict, since safe_from_p has been called. */
2509 /* Don't try copying piece by piece into a hard register
2510 since that is vulnerable to being clobbered by EXP.
2511 Instead, construct in a pseudo register and then copy it all. */
2512 if (GET_CODE (target) == REG && REGNO (target) < FIRST_PSEUDO_REGISTER)
2514 rtx temp = gen_reg_rtx (GET_MODE (target));
2515 store_constructor (exp, temp);
2516 emit_move_insn (target, temp);
2521 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE)
2525 /* Inform later passes that the whole union value is dead. */
2526 if (TREE_CODE (type) == UNION_TYPE)
2527 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
2529 /* If we are building a static constructor into a register,
2530 set the initial value as zero so we can fold the value into
2532 else if (GET_CODE (target) == REG && TREE_STATIC (exp))
2533 emit_move_insn (target, const0_rtx);
2535 /* If the constructor has fewer fields than the structure,
2536 clear the whole structure first. */
2537 else if (list_length (CONSTRUCTOR_ELTS (exp))
2538 != list_length (TYPE_FIELDS (type)))
2539 clear_storage (target, int_size_in_bytes (type));
2541 /* Inform later passes that the old value is dead. */
2542 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
2544 /* Store each element of the constructor into
2545 the corresponding field of TARGET. */
2547 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
2549 register tree field = TREE_PURPOSE (elt);
2550 register enum machine_mode mode;
2555 /* Just ignore missing fields.
2556 We cleared the whole structure, above,
2557 if any fields are missing. */
2561 bitsize = TREE_INT_CST_LOW (DECL_SIZE (field));
2562 unsignedp = TREE_UNSIGNED (field);
2563 mode = DECL_MODE (field);
2564 if (DECL_BIT_FIELD (field))
2567 if (TREE_CODE (DECL_FIELD_BITPOS (field)) != INTEGER_CST)
2568 /* ??? This case remains to be written. */
2571 bitpos = TREE_INT_CST_LOW (DECL_FIELD_BITPOS (field));
2573 store_field (target, bitsize, bitpos, mode, TREE_VALUE (elt),
2574 /* The alignment of TARGET is
2575 at least what its type requires. */
2577 TYPE_ALIGN (type) / BITS_PER_UNIT,
2578 int_size_in_bytes (type));
2581 else if (TREE_CODE (type) == ARRAY_TYPE)
2585 tree domain = TYPE_DOMAIN (type);
2586 HOST_WIDE_INT minelt = TREE_INT_CST_LOW (TYPE_MIN_VALUE (domain));
2587 HOST_WIDE_INT maxelt = TREE_INT_CST_LOW (TYPE_MAX_VALUE (domain));
2588 tree elttype = TREE_TYPE (type);
2590 /* If the constructor has fewer fields than the structure,
2591 clear the whole structure first. Similarly if this this is
2592 static constructor of a non-BLKmode object. */
2594 if (list_length (CONSTRUCTOR_ELTS (exp)) < maxelt - minelt + 1
2595 || (GET_CODE (target) == REG && TREE_STATIC (exp)))
2596 clear_storage (target, int_size_in_bytes (type));
2598 /* Inform later passes that the old value is dead. */
2599 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
2601 /* Store each element of the constructor into
2602 the corresponding element of TARGET, determined
2603 by counting the elements. */
2604 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
2606 elt = TREE_CHAIN (elt), i++)
2608 register enum machine_mode mode;
2613 mode = TYPE_MODE (elttype);
2614 bitsize = GET_MODE_BITSIZE (mode);
2615 unsignedp = TREE_UNSIGNED (elttype);
2617 bitpos = (i * TREE_INT_CST_LOW (TYPE_SIZE (elttype)));
2619 store_field (target, bitsize, bitpos, mode, TREE_VALUE (elt),
2620 /* The alignment of TARGET is
2621 at least what its type requires. */
2623 TYPE_ALIGN (type) / BITS_PER_UNIT,
2624 int_size_in_bytes (type));
2632 /* Store the value of EXP (an expression tree)
2633 into a subfield of TARGET which has mode MODE and occupies
2634 BITSIZE bits, starting BITPOS bits from the start of TARGET.
2635 If MODE is VOIDmode, it means that we are storing into a bit-field.
2637 If VALUE_MODE is VOIDmode, return nothing in particular.
2638 UNSIGNEDP is not used in this case.
2640 Otherwise, return an rtx for the value stored. This rtx
2641 has mode VALUE_MODE if that is convenient to do.
2642 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
2644 ALIGN is the alignment that TARGET is known to have, measured in bytes.
2645 TOTAL_SIZE is the size in bytes of the structure, or -1 if varying. */
2648 store_field (target, bitsize, bitpos, mode, exp, value_mode,
2649 unsignedp, align, total_size)
2651 int bitsize, bitpos;
2652 enum machine_mode mode;
2654 enum machine_mode value_mode;
2659 HOST_WIDE_INT width_mask = 0;
2661 if (bitsize < HOST_BITS_PER_WIDE_INT)
2662 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
2664 /* If we are storing into an unaligned field of an aligned union that is
2665 in a register, we may have the mode of TARGET being an integer mode but
2666 MODE == BLKmode. In that case, get an aligned object whose size and
2667 alignment are the same as TARGET and store TARGET into it (we can avoid
2668 the store if the field being stored is the entire width of TARGET). Then
2669 call ourselves recursively to store the field into a BLKmode version of
2670 that object. Finally, load from the object into TARGET. This is not
2671 very efficient in general, but should only be slightly more expensive
2672 than the otherwise-required unaligned accesses. Perhaps this can be
2673 cleaned up later. */
2676 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
2678 rtx object = assign_stack_temp (GET_MODE (target),
2679 GET_MODE_SIZE (GET_MODE (target)), 0);
2680 rtx blk_object = copy_rtx (object);
2682 PUT_MODE (blk_object, BLKmode);
2684 if (bitsize != GET_MODE_BITSIZE (GET_MODE (target)))
2685 emit_move_insn (object, target);
2687 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0,
2690 emit_move_insn (target, object);
2695 /* If the structure is in a register or if the component
2696 is a bit field, we cannot use addressing to access it.
2697 Use bit-field techniques or SUBREG to store in it. */
2699 if (mode == VOIDmode
2700 || (mode != BLKmode && ! direct_store[(int) mode])
2701 || GET_CODE (target) == REG
2702 || GET_CODE (target) == SUBREG)
2704 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
2705 /* Store the value in the bitfield. */
2706 store_bit_field (target, bitsize, bitpos, mode, temp, align, total_size);
2707 if (value_mode != VOIDmode)
2709 /* The caller wants an rtx for the value. */
2710 /* If possible, avoid refetching from the bitfield itself. */
2712 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
2715 enum machine_mode tmode;
2718 return expand_and (temp, GEN_INT (width_mask), NULL_RTX);
2719 tmode = GET_MODE (temp);
2720 if (tmode == VOIDmode)
2722 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
2723 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
2724 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
2726 return extract_bit_field (target, bitsize, bitpos, unsignedp,
2727 NULL_RTX, value_mode, 0, align,
2734 rtx addr = XEXP (target, 0);
2737 /* If a value is wanted, it must be the lhs;
2738 so make the address stable for multiple use. */
2740 if (value_mode != VOIDmode && GET_CODE (addr) != REG
2741 && ! CONSTANT_ADDRESS_P (addr)
2742 /* A frame-pointer reference is already stable. */
2743 && ! (GET_CODE (addr) == PLUS
2744 && GET_CODE (XEXP (addr, 1)) == CONST_INT
2745 && (XEXP (addr, 0) == virtual_incoming_args_rtx
2746 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
2747 addr = copy_to_reg (addr);
2749 /* Now build a reference to just the desired component. */
2751 to_rtx = change_address (target, mode,
2752 plus_constant (addr, (bitpos / BITS_PER_UNIT)));
2753 MEM_IN_STRUCT_P (to_rtx) = 1;
2755 return store_expr (exp, to_rtx, value_mode != VOIDmode);
2759 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
2760 or an ARRAY_REF, look for nested COMPONENT_REFs, BIT_FIELD_REFs, or
2761 ARRAY_REFs and find the ultimate containing object, which we return.
2763 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
2764 bit position, and *PUNSIGNEDP to the signedness of the field.
2765 If the position of the field is variable, we store a tree
2766 giving the variable offset (in units) in *POFFSET.
2767 This offset is in addition to the bit position.
2768 If the position is not variable, we store 0 in *POFFSET.
2770 If any of the extraction expressions is volatile,
2771 we store 1 in *PVOLATILEP. Otherwise we don't change that.
2773 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
2774 is a mode that can be used to access the field. In that case, *PBITSIZE
2777 If the field describes a variable-sized object, *PMODE is set to
2778 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
2779 this case, but the address of the object can be found. */
2782 get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode,
2783 punsignedp, pvolatilep)
2788 enum machine_mode *pmode;
2793 enum machine_mode mode = VOIDmode;
2794 tree offset = integer_zero_node;
2796 if (TREE_CODE (exp) == COMPONENT_REF)
2798 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
2799 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
2800 mode = DECL_MODE (TREE_OPERAND (exp, 1));
2801 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
2803 else if (TREE_CODE (exp) == BIT_FIELD_REF)
2805 size_tree = TREE_OPERAND (exp, 1);
2806 *punsignedp = TREE_UNSIGNED (exp);
2810 mode = TYPE_MODE (TREE_TYPE (exp));
2811 *pbitsize = GET_MODE_BITSIZE (mode);
2812 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
2817 if (TREE_CODE (size_tree) != INTEGER_CST)
2818 mode = BLKmode, *pbitsize = -1;
2820 *pbitsize = TREE_INT_CST_LOW (size_tree);
2823 /* Compute cumulative bit-offset for nested component-refs and array-refs,
2824 and find the ultimate containing object. */
2830 if (TREE_CODE (exp) == COMPONENT_REF || TREE_CODE (exp) == BIT_FIELD_REF)
2832 tree pos = (TREE_CODE (exp) == COMPONENT_REF
2833 ? DECL_FIELD_BITPOS (TREE_OPERAND (exp, 1))
2834 : TREE_OPERAND (exp, 2));
2836 /* If this field hasn't been filled in yet, don't go
2837 past it. This should only happen when folding expressions
2838 made during type construction. */
2842 if (TREE_CODE (pos) == PLUS_EXPR)
2845 if (TREE_CODE (TREE_OPERAND (pos, 0)) == INTEGER_CST)
2847 constant = TREE_OPERAND (pos, 0);
2848 var = TREE_OPERAND (pos, 1);
2850 else if (TREE_CODE (TREE_OPERAND (pos, 1)) == INTEGER_CST)
2852 constant = TREE_OPERAND (pos, 1);
2853 var = TREE_OPERAND (pos, 0);
2858 *pbitpos += TREE_INT_CST_LOW (constant);
2859 offset = size_binop (PLUS_EXPR, offset,
2860 size_binop (FLOOR_DIV_EXPR, var,
2861 size_int (BITS_PER_UNIT)));
2863 else if (TREE_CODE (pos) == INTEGER_CST)
2864 *pbitpos += TREE_INT_CST_LOW (pos);
2867 /* Assume here that the offset is a multiple of a unit.
2868 If not, there should be an explicitly added constant. */
2869 offset = size_binop (PLUS_EXPR, offset,
2870 size_binop (FLOOR_DIV_EXPR, pos,
2871 size_int (BITS_PER_UNIT)));
2875 else if (TREE_CODE (exp) == ARRAY_REF)
2877 /* This code is based on the code in case ARRAY_REF in expand_expr
2878 below. We assume here that the size of an array element is
2879 always an integral multiple of BITS_PER_UNIT. */
2881 tree index = TREE_OPERAND (exp, 1);
2882 tree domain = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
2884 = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
2885 tree index_type = TREE_TYPE (index);
2887 if (! integer_zerop (low_bound))
2888 index = fold (build (MINUS_EXPR, index_type, index, low_bound));
2890 if (TYPE_PRECISION (index_type) != POINTER_SIZE)
2892 index = convert (type_for_size (POINTER_SIZE, 0), index);
2893 index_type = TREE_TYPE (index);
2896 index = fold (build (MULT_EXPR, index_type, index,
2897 TYPE_SIZE (TREE_TYPE (exp))));
2899 if (TREE_CODE (index) == INTEGER_CST
2900 && TREE_INT_CST_HIGH (index) == 0)
2901 *pbitpos += TREE_INT_CST_LOW (index);
2903 offset = size_binop (PLUS_EXPR, offset,
2904 size_binop (FLOOR_DIV_EXPR, index,
2905 size_int (BITS_PER_UNIT)));
2907 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
2908 && ! ((TREE_CODE (exp) == NOP_EXPR
2909 || TREE_CODE (exp) == CONVERT_EXPR)
2910 && (TYPE_MODE (TREE_TYPE (exp))
2911 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
2914 /* If any reference in the chain is volatile, the effect is volatile. */
2915 if (TREE_THIS_VOLATILE (exp))
2917 exp = TREE_OPERAND (exp, 0);
2920 /* If this was a bit-field, see if there is a mode that allows direct
2921 access in case EXP is in memory. */
2922 if (mode == VOIDmode && *pbitsize != 0 && *pbitpos % *pbitsize == 0)
2924 mode = mode_for_size (*pbitsize, MODE_INT, 0);
2925 if (mode == BLKmode)
2929 if (integer_zerop (offset))
2935 /* We aren't finished fixing the callers to really handle nonzero offset. */
2943 /* Given an rtx VALUE that may contain additions and multiplications,
2944 return an equivalent value that just refers to a register or memory.
2945 This is done by generating instructions to perform the arithmetic
2946 and returning a pseudo-register containing the value.
2948 The returned value may be a REG, SUBREG, MEM or constant. */
2951 force_operand (value, target)
2954 register optab binoptab = 0;
2955 /* Use a temporary to force order of execution of calls to
2959 /* Use subtarget as the target for operand 0 of a binary operation. */
2960 register rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
2962 if (GET_CODE (value) == PLUS)
2963 binoptab = add_optab;
2964 else if (GET_CODE (value) == MINUS)
2965 binoptab = sub_optab;
2966 else if (GET_CODE (value) == MULT)
2968 op2 = XEXP (value, 1);
2969 if (!CONSTANT_P (op2)
2970 && !(GET_CODE (op2) == REG && op2 != subtarget))
2972 tmp = force_operand (XEXP (value, 0), subtarget);
2973 return expand_mult (GET_MODE (value), tmp,
2974 force_operand (op2, NULL_RTX),
2980 op2 = XEXP (value, 1);
2981 if (!CONSTANT_P (op2)
2982 && !(GET_CODE (op2) == REG && op2 != subtarget))
2984 if (binoptab == sub_optab && GET_CODE (op2) == CONST_INT)
2986 binoptab = add_optab;
2987 op2 = negate_rtx (GET_MODE (value), op2);
2990 /* Check for an addition with OP2 a constant integer and our first
2991 operand a PLUS of a virtual register and something else. In that
2992 case, we want to emit the sum of the virtual register and the
2993 constant first and then add the other value. This allows virtual
2994 register instantiation to simply modify the constant rather than
2995 creating another one around this addition. */
2996 if (binoptab == add_optab && GET_CODE (op2) == CONST_INT
2997 && GET_CODE (XEXP (value, 0)) == PLUS
2998 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
2999 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
3000 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
3002 rtx temp = expand_binop (GET_MODE (value), binoptab,
3003 XEXP (XEXP (value, 0), 0), op2,
3004 subtarget, 0, OPTAB_LIB_WIDEN);
3005 return expand_binop (GET_MODE (value), binoptab, temp,
3006 force_operand (XEXP (XEXP (value, 0), 1), 0),
3007 target, 0, OPTAB_LIB_WIDEN);
3010 tmp = force_operand (XEXP (value, 0), subtarget);
3011 return expand_binop (GET_MODE (value), binoptab, tmp,
3012 force_operand (op2, NULL_RTX),
3013 target, 0, OPTAB_LIB_WIDEN);
3014 /* We give UNSIGNEDP = 0 to expand_binop
3015 because the only operations we are expanding here are signed ones. */
3020 /* Subroutine of expand_expr:
3021 save the non-copied parts (LIST) of an expr (LHS), and return a list
3022 which can restore these values to their previous values,
3023 should something modify their storage. */
3026 save_noncopied_parts (lhs, list)
3033 for (tail = list; tail; tail = TREE_CHAIN (tail))
3034 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
3035 parts = chainon (parts, save_noncopied_parts (lhs, TREE_VALUE (tail)));
3038 tree part = TREE_VALUE (tail);
3039 tree part_type = TREE_TYPE (part);
3040 tree to_be_saved = build (COMPONENT_REF, part_type, lhs, part);
3041 rtx target = assign_stack_temp (TYPE_MODE (part_type),
3042 int_size_in_bytes (part_type), 0);
3043 if (! memory_address_p (TYPE_MODE (part_type), XEXP (target, 0)))
3044 target = change_address (target, TYPE_MODE (part_type), NULL_RTX);
3045 parts = tree_cons (to_be_saved,
3046 build (RTL_EXPR, part_type, NULL_TREE,
3049 store_expr (TREE_PURPOSE (parts), RTL_EXPR_RTL (TREE_VALUE (parts)), 0);
3054 /* Subroutine of expand_expr:
3055 record the non-copied parts (LIST) of an expr (LHS), and return a list
3056 which specifies the initial values of these parts. */
3059 init_noncopied_parts (lhs, list)
3066 for (tail = list; tail; tail = TREE_CHAIN (tail))
3067 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
3068 parts = chainon (parts, init_noncopied_parts (lhs, TREE_VALUE (tail)));
3071 tree part = TREE_VALUE (tail);
3072 tree part_type = TREE_TYPE (part);
3073 tree to_be_initialized = build (COMPONENT_REF, part_type, lhs, part);
3074 parts = tree_cons (TREE_PURPOSE (tail), to_be_initialized, parts);
3079 /* Subroutine of expand_expr: return nonzero iff there is no way that
3080 EXP can reference X, which is being modified. */
3083 safe_from_p (x, exp)
3093 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
3094 find the underlying pseudo. */
3095 if (GET_CODE (x) == SUBREG)
3098 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
3102 /* If X is a location in the outgoing argument area, it is always safe. */
3103 if (GET_CODE (x) == MEM
3104 && (XEXP (x, 0) == virtual_outgoing_args_rtx
3105 || (GET_CODE (XEXP (x, 0)) == PLUS
3106 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx)))
3109 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
3112 exp_rtl = DECL_RTL (exp);
3119 if (TREE_CODE (exp) == TREE_LIST)
3120 return ((TREE_VALUE (exp) == 0
3121 || safe_from_p (x, TREE_VALUE (exp)))
3122 && (TREE_CHAIN (exp) == 0
3123 || safe_from_p (x, TREE_CHAIN (exp))));
3128 return safe_from_p (x, TREE_OPERAND (exp, 0));
3132 return (safe_from_p (x, TREE_OPERAND (exp, 0))
3133 && safe_from_p (x, TREE_OPERAND (exp, 1)));
3137 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
3138 the expression. If it is set, we conflict iff we are that rtx or
3139 both are in memory. Otherwise, we check all operands of the
3140 expression recursively. */
3142 switch (TREE_CODE (exp))
3145 return staticp (TREE_OPERAND (exp, 0));
3148 if (GET_CODE (x) == MEM)
3153 exp_rtl = CALL_EXPR_RTL (exp);
3156 /* Assume that the call will clobber all hard registers and
3158 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
3159 || GET_CODE (x) == MEM)
3166 exp_rtl = RTL_EXPR_RTL (exp);
3168 /* We don't know what this can modify. */
3173 case WITH_CLEANUP_EXPR:
3174 exp_rtl = RTL_EXPR_RTL (exp);
3178 exp_rtl = SAVE_EXPR_RTL (exp);
3182 /* The only operand we look at is operand 1. The rest aren't
3183 part of the expression. */
3184 return safe_from_p (x, TREE_OPERAND (exp, 1));
3186 case METHOD_CALL_EXPR:
3187 /* This takes a rtx argument, but shouldn't appear here. */
3191 /* If we have an rtx, we do not need to scan our operands. */
3195 nops = tree_code_length[(int) TREE_CODE (exp)];
3196 for (i = 0; i < nops; i++)
3197 if (TREE_OPERAND (exp, i) != 0
3198 && ! safe_from_p (x, TREE_OPERAND (exp, i)))
3202 /* If we have an rtl, find any enclosed object. Then see if we conflict
3206 if (GET_CODE (exp_rtl) == SUBREG)
3208 exp_rtl = SUBREG_REG (exp_rtl);
3209 if (GET_CODE (exp_rtl) == REG
3210 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
3214 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
3215 are memory and EXP is not readonly. */
3216 return ! (rtx_equal_p (x, exp_rtl)
3217 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
3218 && ! TREE_READONLY (exp)));
3221 /* If we reach here, it is safe. */
3225 /* Subroutine of expand_expr: return nonzero iff EXP is an
3226 expression whose type is statically determinable. */
3232 if (TREE_CODE (exp) == PARM_DECL
3233 || TREE_CODE (exp) == VAR_DECL
3234 || TREE_CODE (exp) == CALL_EXPR || TREE_CODE (exp) == TARGET_EXPR
3235 || TREE_CODE (exp) == COMPONENT_REF
3236 || TREE_CODE (exp) == ARRAY_REF)
3241 /* expand_expr: generate code for computing expression EXP.
3242 An rtx for the computed value is returned. The value is never null.
3243 In the case of a void EXP, const0_rtx is returned.
3245 The value may be stored in TARGET if TARGET is nonzero.
3246 TARGET is just a suggestion; callers must assume that
3247 the rtx returned may not be the same as TARGET.
3249 If TARGET is CONST0_RTX, it means that the value will be ignored.
3251 If TMODE is not VOIDmode, it suggests generating the
3252 result in mode TMODE. But this is done only when convenient.
3253 Otherwise, TMODE is ignored and the value generated in its natural mode.
3254 TMODE is just a suggestion; callers must assume that
3255 the rtx returned may not have mode TMODE.
3257 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
3258 with a constant address even if that address is not normally legitimate.
3259 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
3261 If MODIFIER is EXPAND_SUM then when EXP is an addition
3262 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
3263 or a nest of (PLUS ...) and (MINUS ...) where the terms are
3264 products as above, or REG or MEM, or constant.
3265 Ordinarily in such cases we would output mul or add instructions
3266 and then return a pseudo reg containing the sum.
3268 EXPAND_INITIALIZER is much like EXPAND_SUM except that
3269 it also marks a label as absolutely required (it can't be dead).
3270 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
3271 This is used for outputting expressions used in initializers. */
3274 expand_expr (exp, target, tmode, modifier)
3277 enum machine_mode tmode;
3278 enum expand_modifier modifier;
3280 register rtx op0, op1, temp;
3281 tree type = TREE_TYPE (exp);
3282 int unsignedp = TREE_UNSIGNED (type);
3283 register enum machine_mode mode = TYPE_MODE (type);
3284 register enum tree_code code = TREE_CODE (exp);
3286 /* Use subtarget as the target for operand 0 of a binary operation. */
3287 rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
3288 rtx original_target = target;
3289 int ignore = (target == const0_rtx
3290 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
3291 || code == CONVERT_EXPR || code == REFERENCE_EXPR)
3292 && TREE_CODE (type) == VOID_TYPE));
3295 /* Don't use hard regs as subtargets, because the combiner
3296 can only handle pseudo regs. */
3297 if (subtarget && REGNO (subtarget) < FIRST_PSEUDO_REGISTER)
3299 /* Avoid subtargets inside loops,
3300 since they hide some invariant expressions. */
3301 if (preserve_subexpressions_p ())
3304 /* If we are going to ignore this result, we need only do something
3305 if there is a side-effect somewhere in the expression. If there
3306 is, short-circuit the most common cases here. */
3310 if (! TREE_SIDE_EFFECTS (exp))
3313 /* Ensure we reference a volatile object even if value is ignored. */
3314 if (TREE_THIS_VOLATILE (exp)
3315 && TREE_CODE (exp) != FUNCTION_DECL
3316 && mode != VOIDmode && mode != BLKmode)
3318 temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
3319 if (GET_CODE (temp) == MEM)
3320 temp = copy_to_reg (temp);
3324 if (TREE_CODE_CLASS (code) == '1')
3325 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
3326 VOIDmode, modifier);
3327 else if (TREE_CODE_CLASS (code) == '2'
3328 || TREE_CODE_CLASS (code) == '<')
3330 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
3331 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
3334 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
3335 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
3336 /* If the second operand has no side effects, just evaluate
3338 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
3339 VOIDmode, modifier);
3340 /* If will do cse, generate all results into pseudo registers
3341 since 1) that allows cse to find more things
3342 and 2) otherwise cse could produce an insn the machine
3345 target = 0, original_target = 0;
3348 if (! cse_not_expected && mode != BLKmode && target
3349 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER))
3356 tree function = decl_function_context (exp);
3357 /* Handle using a label in a containing function. */
3358 if (function != current_function_decl && function != 0)
3360 struct function *p = find_function_data (function);
3361 /* Allocate in the memory associated with the function
3362 that the label is in. */
3363 push_obstacks (p->function_obstack,
3364 p->function_maybepermanent_obstack);
3366 p->forced_labels = gen_rtx (EXPR_LIST, VOIDmode,
3367 label_rtx (exp), p->forced_labels);
3370 else if (modifier == EXPAND_INITIALIZER)
3371 forced_labels = gen_rtx (EXPR_LIST, VOIDmode,
3372 label_rtx (exp), forced_labels);
3373 temp = gen_rtx (MEM, FUNCTION_MODE,
3374 gen_rtx (LABEL_REF, Pmode, label_rtx (exp)));
3375 if (function != current_function_decl && function != 0)
3376 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
3381 if (DECL_RTL (exp) == 0)
3383 error_with_decl (exp, "prior parameter's size depends on `%s'");
3384 return CONST0_RTX (mode);
3390 if (DECL_RTL (exp) == 0)
3392 /* Ensure variable marked as used
3393 even if it doesn't go through a parser. */
3394 TREE_USED (exp) = 1;
3395 /* Handle variables inherited from containing functions. */
3396 context = decl_function_context (exp);
3398 /* We treat inline_function_decl as an alias for the current function
3399 because that is the inline function whose vars, types, etc.
3400 are being merged into the current function.
3401 See expand_inline_function. */
3402 if (context != 0 && context != current_function_decl
3403 && context != inline_function_decl
3404 /* If var is static, we don't need a static chain to access it. */
3405 && ! (GET_CODE (DECL_RTL (exp)) == MEM
3406 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
3410 /* Mark as non-local and addressable. */
3411 DECL_NONLOCAL (exp) = 1;
3412 mark_addressable (exp);
3413 if (GET_CODE (DECL_RTL (exp)) != MEM)
3415 addr = XEXP (DECL_RTL (exp), 0);
3416 if (GET_CODE (addr) == MEM)
3417 addr = gen_rtx (MEM, Pmode, fix_lexical_addr (XEXP (addr, 0), exp));
3419 addr = fix_lexical_addr (addr, exp);
3420 return change_address (DECL_RTL (exp), mode, addr);
3423 /* This is the case of an array whose size is to be determined
3424 from its initializer, while the initializer is still being parsed.
3426 if (GET_CODE (DECL_RTL (exp)) == MEM
3427 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
3428 return change_address (DECL_RTL (exp), GET_MODE (DECL_RTL (exp)),
3429 XEXP (DECL_RTL (exp), 0));
3430 if (GET_CODE (DECL_RTL (exp)) == MEM
3431 && modifier != EXPAND_CONST_ADDRESS
3432 && modifier != EXPAND_SUM
3433 && modifier != EXPAND_INITIALIZER)
3435 /* DECL_RTL probably contains a constant address.
3436 On RISC machines where a constant address isn't valid,
3437 make some insns to get that address into a register. */
3438 if (!memory_address_p (DECL_MODE (exp), XEXP (DECL_RTL (exp), 0))
3440 && CONSTANT_ADDRESS_P (XEXP (DECL_RTL (exp), 0))))
3441 return change_address (DECL_RTL (exp), VOIDmode,
3442 copy_rtx (XEXP (DECL_RTL (exp), 0)));
3445 /* If the mode of DECL_RTL does not match that of the decl, it
3446 must be a promoted value. We return a SUBREG of the wanted mode,
3447 but mark it so that we know that it was already extended. */
3449 if (GET_CODE (DECL_RTL (exp)) == REG
3450 && GET_MODE (DECL_RTL (exp)) != mode)
3452 enum machine_mode decl_mode = DECL_MODE (exp);
3454 /* Get the signedness used for this variable. Ensure we get the
3455 same mode we got when the variable was declared. */
3457 PROMOTE_MODE (decl_mode, unsignedp, type);
3459 if (decl_mode != GET_MODE (DECL_RTL (exp)))
3462 temp = gen_rtx (SUBREG, mode, DECL_RTL (exp), 0);
3463 SUBREG_PROMOTED_VAR_P (temp) = 1;
3464 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
3468 return DECL_RTL (exp);
3471 return immed_double_const (TREE_INT_CST_LOW (exp),
3472 TREE_INT_CST_HIGH (exp),
3476 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, 0);
3479 /* If optimized, generate immediate CONST_DOUBLE
3480 which will be turned into memory by reload if necessary.
3482 We used to force a register so that loop.c could see it. But
3483 this does not allow gen_* patterns to perform optimizations with
3484 the constants. It also produces two insns in cases like "x = 1.0;".
3485 On most machines, floating-point constants are not permitted in
3486 many insns, so we'd end up copying it to a register in any case.
3488 Now, we do the copying in expand_binop, if appropriate. */
3489 return immed_real_const (exp);
3493 if (! TREE_CST_RTL (exp))
3494 output_constant_def (exp);
3496 /* TREE_CST_RTL probably contains a constant address.
3497 On RISC machines where a constant address isn't valid,
3498 make some insns to get that address into a register. */
3499 if (GET_CODE (TREE_CST_RTL (exp)) == MEM
3500 && modifier != EXPAND_CONST_ADDRESS
3501 && modifier != EXPAND_INITIALIZER
3502 && modifier != EXPAND_SUM
3503 && !memory_address_p (mode, XEXP (TREE_CST_RTL (exp), 0)))
3504 return change_address (TREE_CST_RTL (exp), VOIDmode,
3505 copy_rtx (XEXP (TREE_CST_RTL (exp), 0)));
3506 return TREE_CST_RTL (exp);
3509 context = decl_function_context (exp);
3510 /* We treat inline_function_decl as an alias for the current function
3511 because that is the inline function whose vars, types, etc.
3512 are being merged into the current function.
3513 See expand_inline_function. */
3514 if (context == current_function_decl || context == inline_function_decl)
3517 /* If this is non-local, handle it. */
3520 temp = SAVE_EXPR_RTL (exp);
3521 if (temp && GET_CODE (temp) == REG)
3523 put_var_into_stack (exp);
3524 temp = SAVE_EXPR_RTL (exp);
3526 if (temp == 0 || GET_CODE (temp) != MEM)
3528 return change_address (temp, mode,
3529 fix_lexical_addr (XEXP (temp, 0), exp));
3531 if (SAVE_EXPR_RTL (exp) == 0)
3533 if (mode == BLKmode)
3535 = assign_stack_temp (mode,
3536 int_size_in_bytes (TREE_TYPE (exp)), 0);
3539 enum machine_mode var_mode = mode;
3541 if (TREE_CODE (type) == INTEGER_TYPE
3542 || TREE_CODE (type) == ENUMERAL_TYPE
3543 || TREE_CODE (type) == BOOLEAN_TYPE
3544 || TREE_CODE (type) == CHAR_TYPE
3545 || TREE_CODE (type) == REAL_TYPE
3546 || TREE_CODE (type) == POINTER_TYPE
3547 || TREE_CODE (type) == OFFSET_TYPE)
3549 PROMOTE_MODE (var_mode, unsignedp, type);
3552 temp = gen_reg_rtx (var_mode);
3555 SAVE_EXPR_RTL (exp) = temp;
3556 if (!optimize && GET_CODE (temp) == REG)
3557 save_expr_regs = gen_rtx (EXPR_LIST, VOIDmode, temp,
3560 /* If the mode of TEMP does not match that of the expression, it
3561 must be a promoted value. We pass store_expr a SUBREG of the
3562 wanted mode but mark it so that we know that it was already
3563 extended. Note that `unsignedp' was modified above in
3566 if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
3568 temp = gen_rtx (SUBREG, mode, SAVE_EXPR_RTL (exp), 0);
3569 SUBREG_PROMOTED_VAR_P (temp) = 1;
3570 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
3573 store_expr (TREE_OPERAND (exp, 0), temp, 0);
3576 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
3577 must be a promoted value. We return a SUBREG of the wanted mode,
3578 but mark it so that we know that it was already extended. Note
3579 that `unsignedp' was modified above in this case. */
3581 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
3582 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
3584 temp = gen_rtx (SUBREG, mode, SAVE_EXPR_RTL (exp), 0);
3585 SUBREG_PROMOTED_VAR_P (temp) = 1;
3586 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
3590 return SAVE_EXPR_RTL (exp);
3593 /* Exit the current loop if the body-expression is true. */
3595 rtx label = gen_label_rtx ();
3596 do_jump (TREE_OPERAND (exp, 0), label, NULL_RTX);
3597 expand_exit_loop (NULL_PTR);
3603 expand_start_loop (1);
3604 expand_expr_stmt (TREE_OPERAND (exp, 0));
3611 tree vars = TREE_OPERAND (exp, 0);
3612 int vars_need_expansion = 0;
3614 /* Need to open a binding contour here because
3615 if there are any cleanups they most be contained here. */
3616 expand_start_bindings (0);
3618 /* Mark the corresponding BLOCK for output in its proper place. */
3619 if (TREE_OPERAND (exp, 2) != 0
3620 && ! TREE_USED (TREE_OPERAND (exp, 2)))
3621 insert_block (TREE_OPERAND (exp, 2));
3623 /* If VARS have not yet been expanded, expand them now. */
3626 if (DECL_RTL (vars) == 0)
3628 vars_need_expansion = 1;
3631 expand_decl_init (vars);
3632 vars = TREE_CHAIN (vars);
3635 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, modifier);
3637 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
3643 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
3645 emit_insns (RTL_EXPR_SEQUENCE (exp));
3646 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
3647 return RTL_EXPR_RTL (exp);
3650 /* If we don't need the result, just ensure we evaluate any
3655 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
3656 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode, 0);
3659 /* All elts simple constants => refer to a constant in memory. But
3660 if this is a non-BLKmode mode, let it store a field at a time
3661 since that should make a CONST_INT or CONST_DOUBLE when we
3662 fold. If we are making an initializer and all operands are
3663 constant, put it in memory as well. */
3664 else if ((TREE_STATIC (exp)
3665 && (mode == BLKmode || TREE_ADDRESSABLE (exp)))
3666 || (modifier == EXPAND_INITIALIZER && TREE_CONSTANT (exp)))
3668 rtx constructor = output_constant_def (exp);
3669 if (modifier != EXPAND_CONST_ADDRESS
3670 && modifier != EXPAND_INITIALIZER
3671 && modifier != EXPAND_SUM
3672 && !memory_address_p (GET_MODE (constructor),
3673 XEXP (constructor, 0)))
3674 constructor = change_address (constructor, VOIDmode,
3675 XEXP (constructor, 0));
3681 if (target == 0 || ! safe_from_p (target, exp))
3683 if (mode != BLKmode && ! TREE_ADDRESSABLE (exp))
3684 target = gen_reg_rtx (mode);
3687 enum tree_code c = TREE_CODE (type);
3689 = assign_stack_temp (mode, int_size_in_bytes (type), 0);
3690 if (c == RECORD_TYPE || c == UNION_TYPE
3691 || c == QUAL_UNION_TYPE || c == ARRAY_TYPE)
3692 MEM_IN_STRUCT_P (target) = 1;
3695 store_constructor (exp, target);
3701 tree exp1 = TREE_OPERAND (exp, 0);
3704 /* A SAVE_EXPR as the address in an INDIRECT_EXPR is generated
3705 for *PTR += ANYTHING where PTR is put inside the SAVE_EXPR.
3706 This code has the same general effect as simply doing
3707 expand_expr on the save expr, except that the expression PTR
3708 is computed for use as a memory address. This means different
3709 code, suitable for indexing, may be generated. */
3710 if (TREE_CODE (exp1) == SAVE_EXPR
3711 && SAVE_EXPR_RTL (exp1) == 0
3712 && TREE_CODE (exp2 = TREE_OPERAND (exp1, 0)) != ERROR_MARK
3713 && TYPE_MODE (TREE_TYPE (exp1)) == Pmode
3714 && TYPE_MODE (TREE_TYPE (exp2)) == Pmode)
3716 temp = expand_expr (TREE_OPERAND (exp1, 0), NULL_RTX,
3717 VOIDmode, EXPAND_SUM);
3718 op0 = memory_address (mode, temp);
3719 op0 = copy_all_regs (op0);
3720 SAVE_EXPR_RTL (exp1) = op0;
3724 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
3725 op0 = memory_address (mode, op0);
3728 temp = gen_rtx (MEM, mode, op0);
3729 /* If address was computed by addition,
3730 mark this as an element of an aggregate. */
3731 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
3732 || (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR
3733 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) == PLUS_EXPR)
3734 || TREE_CODE (TREE_TYPE (exp)) == ARRAY_TYPE
3735 || TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE
3736 || TREE_CODE (TREE_TYPE (exp)) == UNION_TYPE
3737 || TREE_CODE (TREE_TYPE (exp)) == QUAL_UNION_TYPE
3738 || (TREE_CODE (exp1) == ADDR_EXPR
3739 && (exp2 = TREE_OPERAND (exp1, 0))
3740 && (TREE_CODE (TREE_TYPE (exp2)) == ARRAY_TYPE
3741 || TREE_CODE (TREE_TYPE (exp2)) == RECORD_TYPE
3742 || TREE_CODE (TREE_TYPE (exp2)) == UNION_TYPE
3743 || TREE_CODE (TREE_TYPE (exp2)) == QUAL_UNION_TYPE)))
3744 MEM_IN_STRUCT_P (temp) = 1;
3745 MEM_VOLATILE_P (temp) = TREE_THIS_VOLATILE (exp);
3746 #if 0 /* It is incorrect to set RTX_UNCHANGING_P here, because the fact that
3747 a location is accessed through a pointer to const does not mean
3748 that the value there can never change. */
3749 RTX_UNCHANGING_P (temp) = TREE_READONLY (exp);
3755 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
3759 tree array = TREE_OPERAND (exp, 0);
3760 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
3761 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
3762 tree index = TREE_OPERAND (exp, 1);
3763 tree index_type = TREE_TYPE (index);
3766 /* Optimize the special-case of a zero lower bound. */
3767 if (! integer_zerop (low_bound))
3768 index = fold (build (MINUS_EXPR, index_type, index, low_bound));
3770 if (TREE_CODE (index) != INTEGER_CST
3771 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
3773 /* Nonconstant array index or nonconstant element size.
3774 Generate the tree for *(&array+index) and expand that,
3775 except do it in a language-independent way
3776 and don't complain about non-lvalue arrays.
3777 `mark_addressable' should already have been called
3778 for any array for which this case will be reached. */
3780 /* Don't forget the const or volatile flag from the array
3782 tree variant_type = build_type_variant (type,
3783 TREE_READONLY (exp),
3784 TREE_THIS_VOLATILE (exp));
3785 tree array_adr = build1 (ADDR_EXPR,
3786 build_pointer_type (variant_type), array);
3789 /* Convert the integer argument to a type the same size as a
3790 pointer so the multiply won't overflow spuriously. */
3791 if (TYPE_PRECISION (index_type) != POINTER_SIZE)
3792 index = convert (type_for_size (POINTER_SIZE, 0), index);
3794 /* Don't think the address has side effects
3795 just because the array does.
3796 (In some cases the address might have side effects,
3797 and we fail to record that fact here. However, it should not
3798 matter, since expand_expr should not care.) */
3799 TREE_SIDE_EFFECTS (array_adr) = 0;
3801 elt = build1 (INDIRECT_REF, type,
3802 fold (build (PLUS_EXPR,
3803 TYPE_POINTER_TO (variant_type),
3805 fold (build (MULT_EXPR,
3806 TYPE_POINTER_TO (variant_type),
3808 size_in_bytes (type))))));
3810 /* Volatility, etc., of new expression is same as old
3812 TREE_SIDE_EFFECTS (elt) = TREE_SIDE_EFFECTS (exp);
3813 TREE_THIS_VOLATILE (elt) = TREE_THIS_VOLATILE (exp);
3814 TREE_READONLY (elt) = TREE_READONLY (exp);
3816 return expand_expr (elt, target, tmode, modifier);
3819 /* Fold an expression like: "foo"[2].
3820 This is not done in fold so it won't happen inside &. */
3822 if (TREE_CODE (array) == STRING_CST
3823 && TREE_CODE (index) == INTEGER_CST
3824 && !TREE_INT_CST_HIGH (index)
3825 && (i = TREE_INT_CST_LOW (index)) < TREE_STRING_LENGTH (array))
3827 if (TREE_TYPE (TREE_TYPE (array)) == integer_type_node)
3829 exp = build_int_2 (((int *)TREE_STRING_POINTER (array))[i], 0);
3830 TREE_TYPE (exp) = integer_type_node;
3831 return expand_expr (exp, target, tmode, modifier);
3833 if (TREE_TYPE (TREE_TYPE (array)) == char_type_node)
3835 exp = build_int_2 (TREE_STRING_POINTER (array)[i], 0);
3836 TREE_TYPE (exp) = integer_type_node;
3837 return expand_expr (convert (TREE_TYPE (TREE_TYPE (array)),
3839 target, tmode, modifier);
3843 /* If this is a constant index into a constant array,
3844 just get the value from the array. Handle both the cases when
3845 we have an explicit constructor and when our operand is a variable
3846 that was declared const. */
3848 if (TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array))
3850 if (TREE_CODE (index) == INTEGER_CST
3851 && TREE_INT_CST_HIGH (index) == 0)
3853 tree elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0));
3855 i = TREE_INT_CST_LOW (index);
3857 elem = TREE_CHAIN (elem);
3859 return expand_expr (fold (TREE_VALUE (elem)), target,
3864 else if (optimize >= 1
3865 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
3866 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
3867 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
3869 if (TREE_CODE (index) == INTEGER_CST
3870 && TREE_INT_CST_HIGH (index) == 0)
3872 tree init = DECL_INITIAL (array);
3874 i = TREE_INT_CST_LOW (index);
3875 if (TREE_CODE (init) == CONSTRUCTOR)
3877 tree elem = CONSTRUCTOR_ELTS (init);
3880 elem = TREE_CHAIN (elem);
3882 return expand_expr (fold (TREE_VALUE (elem)), target,
3885 else if (TREE_CODE (init) == STRING_CST
3886 && i < TREE_STRING_LENGTH (init))
3888 temp = GEN_INT (TREE_STRING_POINTER (init)[i]);
3889 return convert_to_mode (mode, temp, 0);
3895 /* Treat array-ref with constant index as a component-ref. */
3899 /* If the operand is a CONSTRUCTOR, we can just extract the
3900 appropriate field if it is present. */
3901 if (code != ARRAY_REF
3902 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR)
3906 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
3907 elt = TREE_CHAIN (elt))
3908 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1))
3909 return expand_expr (TREE_VALUE (elt), target, tmode, modifier);
3913 enum machine_mode mode1;
3918 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
3919 &mode1, &unsignedp, &volatilep);
3921 /* If we got back the original object, something is wrong. Perhaps
3922 we are evaluating an expression too early. In any event, don't
3923 infinitely recurse. */
3927 /* In some cases, we will be offsetting OP0's address by a constant.
3928 So get it as a sum, if possible. If we will be using it
3929 directly in an insn, we validate it. */
3930 op0 = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_SUM);
3932 /* If this is a constant, put it into a register if it is a
3933 legitimate constant and memory if it isn't. */
3934 if (CONSTANT_P (op0))
3936 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
3937 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0))
3938 op0 = force_reg (mode, op0);
3940 op0 = validize_mem (force_const_mem (mode, op0));
3945 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
3947 if (GET_CODE (op0) != MEM)
3949 op0 = change_address (op0, VOIDmode,
3950 gen_rtx (PLUS, Pmode, XEXP (op0, 0),
3951 force_reg (Pmode, offset_rtx)));
3954 /* Don't forget about volatility even if this is a bitfield. */
3955 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
3957 op0 = copy_rtx (op0);
3958 MEM_VOLATILE_P (op0) = 1;
3961 if (mode1 == VOIDmode
3962 || (mode1 != BLKmode && ! direct_load[(int) mode1]
3963 && modifier != EXPAND_CONST_ADDRESS
3964 && modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
3965 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG)
3967 /* In cases where an aligned union has an unaligned object
3968 as a field, we might be extracting a BLKmode value from
3969 an integer-mode (e.g., SImode) object. Handle this case
3970 by doing the extract into an object as wide as the field
3971 (which we know to be the width of a basic mode), then
3972 storing into memory, and changing the mode to BLKmode. */
3973 enum machine_mode ext_mode = mode;
3975 if (ext_mode == BLKmode)
3976 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
3978 if (ext_mode == BLKmode)
3981 op0 = extract_bit_field (validize_mem (op0), bitsize, bitpos,
3982 unsignedp, target, ext_mode, ext_mode,
3983 TYPE_ALIGN (TREE_TYPE (tem)) / BITS_PER_UNIT,
3984 int_size_in_bytes (TREE_TYPE (tem)));
3985 if (mode == BLKmode)
3987 rtx new = assign_stack_temp (ext_mode,
3988 bitsize / BITS_PER_UNIT, 0);
3990 emit_move_insn (new, op0);
3991 op0 = copy_rtx (new);
3992 PUT_MODE (op0, BLKmode);
3998 /* Get a reference to just this component. */
3999 if (modifier == EXPAND_CONST_ADDRESS
4000 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
4001 op0 = gen_rtx (MEM, mode1, plus_constant (XEXP (op0, 0),
4002 (bitpos / BITS_PER_UNIT)));
4004 op0 = change_address (op0, mode1,
4005 plus_constant (XEXP (op0, 0),
4006 (bitpos / BITS_PER_UNIT)));
4007 MEM_IN_STRUCT_P (op0) = 1;
4008 MEM_VOLATILE_P (op0) |= volatilep;
4009 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode)
4012 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
4013 convert_move (target, op0, unsignedp);
4019 tree base = build1 (ADDR_EXPR, type, TREE_OPERAND (exp, 0));
4020 tree addr = build (PLUS_EXPR, type, base, TREE_OPERAND (exp, 1));
4021 op0 = expand_expr (addr, NULL_RTX, VOIDmode, EXPAND_SUM);
4022 temp = gen_rtx (MEM, mode, memory_address (mode, op0));
4023 MEM_IN_STRUCT_P (temp) = 1;
4024 MEM_VOLATILE_P (temp) = TREE_THIS_VOLATILE (exp);
4025 #if 0 /* It is incorrect to set RTX_UNCHANGING_P here, because the fact that
4026 a location is accessed through a pointer to const does not mean
4027 that the value there can never change. */
4028 RTX_UNCHANGING_P (temp) = TREE_READONLY (exp);
4033 /* Intended for a reference to a buffer of a file-object in Pascal.
4034 But it's not certain that a special tree code will really be
4035 necessary for these. INDIRECT_REF might work for them. */
4039 /* IN_EXPR: Inlined pascal set IN expression.
4042 rlo = set_low - (set_low%bits_per_word);
4043 the_word = set [ (index - rlo)/bits_per_word ];
4044 bit_index = index % bits_per_word;
4045 bitmask = 1 << bit_index;
4046 return !!(the_word & bitmask); */
4048 preexpand_calls (exp);
4050 tree set = TREE_OPERAND (exp, 0);
4051 tree index = TREE_OPERAND (exp, 1);
4052 tree set_type = TREE_TYPE (set);
4054 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
4055 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
4061 rtx diff, quo, rem, addr, bit, result;
4062 rtx setval, setaddr;
4063 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
4066 target = gen_reg_rtx (mode);
4068 /* If domain is empty, answer is no. */
4069 if (tree_int_cst_lt (set_high_bound, set_low_bound))
4072 index_val = expand_expr (index, 0, VOIDmode, 0);
4073 lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
4074 hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
4075 setval = expand_expr (set, 0, VOIDmode, 0);
4076 setaddr = XEXP (setval, 0);
4078 /* Compare index against bounds, if they are constant. */
4079 if (GET_CODE (index_val) == CONST_INT
4080 && GET_CODE (lo_r) == CONST_INT
4081 && INTVAL (index_val) < INTVAL (lo_r))
4084 if (GET_CODE (index_val) == CONST_INT
4085 && GET_CODE (hi_r) == CONST_INT
4086 && INTVAL (hi_r) < INTVAL (index_val))
4089 /* If we get here, we have to generate the code for both cases
4090 (in range and out of range). */
4092 op0 = gen_label_rtx ();
4093 op1 = gen_label_rtx ();
4095 if (! (GET_CODE (index_val) == CONST_INT
4096 && GET_CODE (lo_r) == CONST_INT))
4098 emit_cmp_insn (index_val, lo_r, LT, NULL_RTX,
4099 GET_MODE (index_val), 0, 0);
4100 emit_jump_insn (gen_blt (op1));
4103 if (! (GET_CODE (index_val) == CONST_INT
4104 && GET_CODE (hi_r) == CONST_INT))
4106 emit_cmp_insn (index_val, hi_r, GT, NULL_RTX,
4107 GET_MODE (index_val), 0, 0);
4108 emit_jump_insn (gen_bgt (op1));
4111 /* Calculate the element number of bit zero in the first word
4113 if (GET_CODE (lo_r) == CONST_INT)
4114 rlow = GEN_INT (INTVAL (lo_r)
4115 & ~ ((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
4117 rlow = expand_binop (index_mode, and_optab, lo_r,
4118 GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
4119 NULL_RTX, 0, OPTAB_LIB_WIDEN);
4121 diff = expand_binop (index_mode, sub_optab,
4122 index_val, rlow, NULL_RTX, 0, OPTAB_LIB_WIDEN);
4124 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
4125 GEN_INT (BITS_PER_UNIT), NULL_RTX, 0);
4126 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
4127 GEN_INT (BITS_PER_UNIT), NULL_RTX, 0);
4128 addr = memory_address (byte_mode,
4129 expand_binop (index_mode, add_optab,
4130 diff, setaddr, NULL_RTX, 0,
4132 /* Extract the bit we want to examine */
4133 bit = expand_shift (RSHIFT_EXPR, byte_mode,
4134 gen_rtx (MEM, byte_mode, addr),
4135 make_tree (TREE_TYPE (index), rem),
4137 result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
4138 GET_MODE (target) == byte_mode ? target : 0,
4139 1, OPTAB_LIB_WIDEN);
4141 if (result != target)
4142 convert_move (target, result, 1);
4144 /* Output the code to handle the out-of-range case. */
4147 emit_move_insn (target, const0_rtx);
4152 case WITH_CLEANUP_EXPR:
4153 if (RTL_EXPR_RTL (exp) == 0)
4156 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
4158 = tree_cons (NULL_TREE, TREE_OPERAND (exp, 2), cleanups_this_call);
4159 /* That's it for this cleanup. */
4160 TREE_OPERAND (exp, 2) = 0;
4162 return RTL_EXPR_RTL (exp);
4165 /* Check for a built-in function. */
4166 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
4167 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) == FUNCTION_DECL
4168 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
4169 return expand_builtin (exp, target, subtarget, tmode, ignore);
4170 /* If this call was expanded already by preexpand_calls,
4171 just return the result we got. */
4172 if (CALL_EXPR_RTL (exp) != 0)
4173 return CALL_EXPR_RTL (exp);
4174 return expand_call (exp, target, ignore);
4176 case NON_LVALUE_EXPR:
4179 case REFERENCE_EXPR:
4180 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
4181 return expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, modifier);
4182 if (TREE_CODE (type) == UNION_TYPE)
4184 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
4187 if (mode == BLKmode)
4189 if (TYPE_SIZE (type) == 0
4190 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
4192 target = assign_stack_temp (BLKmode,
4193 (TREE_INT_CST_LOW (TYPE_SIZE (type))
4194 + BITS_PER_UNIT - 1)
4195 / BITS_PER_UNIT, 0);
4198 target = gen_reg_rtx (mode);
4200 if (GET_CODE (target) == MEM)
4201 /* Store data into beginning of memory target. */
4202 store_expr (TREE_OPERAND (exp, 0),
4203 change_address (target, TYPE_MODE (valtype), 0), 0);
4205 else if (GET_CODE (target) == REG)
4206 /* Store this field into a union of the proper type. */
4207 store_field (target, GET_MODE_BITSIZE (TYPE_MODE (valtype)), 0,
4208 TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
4210 int_size_in_bytes (TREE_TYPE (TREE_OPERAND (exp, 0))));
4214 /* Return the entire union. */
4217 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, 0);
4218 if (GET_MODE (op0) == mode)
4220 /* If arg is a constant integer being extended from a narrower mode,
4221 we must really truncate to get the extended bits right. Otherwise
4222 (unsigned long) (unsigned char) ("\377"[0])
4223 would come out as ffffffff. */
4224 if (GET_MODE (op0) == VOIDmode
4225 && (GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
4226 < GET_MODE_BITSIZE (mode)))
4228 /* MODE must be narrower than HOST_BITS_PER_INT. */
4229 int width = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
4231 if (width < HOST_BITS_PER_WIDE_INT)
4233 HOST_WIDE_INT val = (GET_CODE (op0) == CONST_INT ? INTVAL (op0)
4234 : CONST_DOUBLE_LOW (op0));
4235 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)))
4236 || !(val & ((HOST_WIDE_INT) 1 << (width - 1))))
4237 val &= ((HOST_WIDE_INT) 1 << width) - 1;
4239 val |= ~(((HOST_WIDE_INT) 1 << width) - 1);
4241 op0 = GEN_INT (val);
4245 op0 = (simplify_unary_operation
4246 ((TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)))
4247 ? ZERO_EXTEND : SIGN_EXTEND),
4249 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))));
4254 if (GET_MODE (op0) == VOIDmode)
4256 if (modifier == EXPAND_INITIALIZER)
4257 return gen_rtx (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
4258 if (flag_force_mem && GET_CODE (op0) == MEM)
4259 op0 = copy_to_reg (op0);
4262 return convert_to_mode (mode, op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
4264 convert_move (target, op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
4268 /* We come here from MINUS_EXPR when the second operand is a constant. */
4270 this_optab = add_optab;
4272 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
4273 something else, make sure we add the register to the constant and
4274 then to the other thing. This case can occur during strength
4275 reduction and doing it this way will produce better code if the
4276 frame pointer or argument pointer is eliminated.
4278 fold-const.c will ensure that the constant is always in the inner
4279 PLUS_EXPR, so the only case we need to do anything about is if
4280 sp, ap, or fp is our second argument, in which case we must swap
4281 the innermost first argument and our second argument. */
4283 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
4284 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
4285 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
4286 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
4287 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
4288 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
4290 tree t = TREE_OPERAND (exp, 1);
4292 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
4293 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
4296 /* If the result is to be Pmode and we are adding an integer to
4297 something, we might be forming a constant. So try to use
4298 plus_constant. If it produces a sum and we can't accept it,
4299 use force_operand. This allows P = &ARR[const] to generate
4300 efficient code on machines where a SYMBOL_REF is not a valid
4303 If this is an EXPAND_SUM call, always return the sum. */
4304 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
4305 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
4306 && (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
4309 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
4311 op1 = plus_constant (op1, TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)));
4312 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
4313 op1 = force_operand (op1, target);
4317 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
4318 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
4319 && (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
4322 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
4324 op0 = plus_constant (op0, TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)));
4325 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
4326 op0 = force_operand (op0, target);
4330 /* No sense saving up arithmetic to be done
4331 if it's all in the wrong mode to form part of an address.
4332 And force_operand won't know whether to sign-extend or
4334 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
4335 || mode != Pmode) goto binop;
4337 preexpand_calls (exp);
4338 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
4341 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, modifier);
4342 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, modifier);
4344 /* Make sure any term that's a sum with a constant comes last. */
4345 if (GET_CODE (op0) == PLUS
4346 && CONSTANT_P (XEXP (op0, 1)))
4352 /* If adding to a sum including a constant,
4353 associate it to put the constant outside. */
4354 if (GET_CODE (op1) == PLUS
4355 && CONSTANT_P (XEXP (op1, 1)))
4357 rtx constant_term = const0_rtx;
4359 temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0);
4362 /* Ensure that MULT comes first if there is one. */
4363 else if (GET_CODE (op0) == MULT)
4364 op0 = gen_rtx (PLUS, mode, op0, XEXP (op1, 0));
4366 op0 = gen_rtx (PLUS, mode, XEXP (op1, 0), op0);
4368 /* Let's also eliminate constants from op0 if possible. */
4369 op0 = eliminate_constant_term (op0, &constant_term);
4371 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
4372 their sum should be a constant. Form it into OP1, since the
4373 result we want will then be OP0 + OP1. */
4375 temp = simplify_binary_operation (PLUS, mode, constant_term,
4380 op1 = gen_rtx (PLUS, mode, constant_term, XEXP (op1, 1));
4383 /* Put a constant term last and put a multiplication first. */
4384 if (CONSTANT_P (op0) || GET_CODE (op1) == MULT)
4385 temp = op1, op1 = op0, op0 = temp;
4387 temp = simplify_binary_operation (PLUS, mode, op0, op1);
4388 return temp ? temp : gen_rtx (PLUS, mode, op0, op1);
4391 /* Handle difference of two symbolic constants,
4392 for the sake of an initializer. */
4393 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
4394 && really_constant_p (TREE_OPERAND (exp, 0))
4395 && really_constant_p (TREE_OPERAND (exp, 1)))
4397 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX,
4398 VOIDmode, modifier);
4399 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
4400 VOIDmode, modifier);
4401 return gen_rtx (MINUS, mode, op0, op1);
4403 /* Convert A - const to A + (-const). */
4404 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
4406 exp = build (PLUS_EXPR, type, TREE_OPERAND (exp, 0),
4407 fold (build1 (NEGATE_EXPR, type,
4408 TREE_OPERAND (exp, 1))));
4411 this_optab = sub_optab;
4415 preexpand_calls (exp);
4416 /* If first operand is constant, swap them.
4417 Thus the following special case checks need only
4418 check the second operand. */
4419 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
4421 register tree t1 = TREE_OPERAND (exp, 0);
4422 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
4423 TREE_OPERAND (exp, 1) = t1;
4426 /* Attempt to return something suitable for generating an
4427 indexed address, for machines that support that. */
4429 if (modifier == EXPAND_SUM && mode == Pmode
4430 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
4431 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
4433 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, EXPAND_SUM);
4435 /* Apply distributive law if OP0 is x+c. */
4436 if (GET_CODE (op0) == PLUS
4437 && GET_CODE (XEXP (op0, 1)) == CONST_INT)
4438 return gen_rtx (PLUS, mode,
4439 gen_rtx (MULT, mode, XEXP (op0, 0),
4440 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))),
4441 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))
4442 * INTVAL (XEXP (op0, 1))));
4444 if (GET_CODE (op0) != REG)
4445 op0 = force_operand (op0, NULL_RTX);
4446 if (GET_CODE (op0) != REG)
4447 op0 = copy_to_mode_reg (mode, op0);
4449 return gen_rtx (MULT, mode, op0,
4450 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))));
4453 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
4456 /* Check for multiplying things that have been extended
4457 from a narrower type. If this machine supports multiplying
4458 in that narrower type with a result in the desired type,
4459 do it that way, and avoid the explicit type-conversion. */
4460 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
4461 && TREE_CODE (type) == INTEGER_TYPE
4462 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
4463 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
4464 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
4465 && int_fits_type_p (TREE_OPERAND (exp, 1),
4466 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
4467 /* Don't use a widening multiply if a shift will do. */
4468 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
4469 > HOST_BITS_PER_WIDE_INT)
4470 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
4472 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
4473 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
4475 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
4476 /* If both operands are extended, they must either both
4477 be zero-extended or both be sign-extended. */
4478 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
4480 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
4482 enum machine_mode innermode
4483 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
4484 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
4485 ? umul_widen_optab : smul_widen_optab);
4486 if (mode == GET_MODE_WIDER_MODE (innermode)
4487 && this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
4489 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
4490 NULL_RTX, VOIDmode, 0);
4491 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
4492 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
4495 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
4496 NULL_RTX, VOIDmode, 0);
4500 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
4501 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
4502 return expand_mult (mode, op0, op1, target, unsignedp);
4504 case TRUNC_DIV_EXPR:
4505 case FLOOR_DIV_EXPR:
4507 case ROUND_DIV_EXPR:
4508 case EXACT_DIV_EXPR:
4509 preexpand_calls (exp);
4510 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
4512 /* Possible optimization: compute the dividend with EXPAND_SUM
4513 then if the divisor is constant can optimize the case
4514 where some terms of the dividend have coeffs divisible by it. */
4515 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
4516 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
4517 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
4520 this_optab = flodiv_optab;
4523 case TRUNC_MOD_EXPR:
4524 case FLOOR_MOD_EXPR:
4526 case ROUND_MOD_EXPR:
4527 preexpand_calls (exp);
4528 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
4530 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
4531 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
4532 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
4534 case FIX_ROUND_EXPR:
4535 case FIX_FLOOR_EXPR:
4537 abort (); /* Not used for C. */
4539 case FIX_TRUNC_EXPR:
4540 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
4542 target = gen_reg_rtx (mode);
4543 expand_fix (target, op0, unsignedp);
4547 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
4549 target = gen_reg_rtx (mode);
4550 /* expand_float can't figure out what to do if FROM has VOIDmode.
4551 So give it the correct mode. With -O, cse will optimize this. */
4552 if (GET_MODE (op0) == VOIDmode)
4553 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
4555 expand_float (target, op0,
4556 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
4560 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
4561 temp = expand_unop (mode, neg_optab, op0, target, 0);
4567 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
4569 /* Handle complex values specially. */
4571 enum machine_mode opmode
4572 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
4574 if (GET_MODE_CLASS (opmode) == MODE_COMPLEX_INT
4575 || GET_MODE_CLASS (opmode) == MODE_COMPLEX_FLOAT)
4576 return expand_complex_abs (opmode, op0, target, unsignedp);
4579 /* Unsigned abs is simply the operand. Testing here means we don't
4580 risk generating incorrect code below. */
4581 if (TREE_UNSIGNED (type))
4584 /* First try to do it with a special abs instruction. */
4585 temp = expand_unop (mode, abs_optab, op0, target, 0);
4589 /* If this machine has expensive jumps, we can do integer absolute
4590 value of X as (((signed) x >> (W-1)) ^ x) - ((signed) x >> (W-1)),
4591 where W is the width of MODE. */
4593 if (GET_MODE_CLASS (mode) == MODE_INT && BRANCH_COST >= 2)
4595 rtx extended = expand_shift (RSHIFT_EXPR, mode, op0,
4596 size_int (GET_MODE_BITSIZE (mode) - 1),
4599 temp = expand_binop (mode, xor_optab, extended, op0, target, 0,
4602 temp = expand_binop (mode, sub_optab, temp, extended, target, 0,
4609 /* If that does not win, use conditional jump and negate. */
4610 target = original_target;
4611 temp = gen_label_rtx ();
4612 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 0))
4613 || (GET_CODE (target) == REG
4614 && REGNO (target) < FIRST_PSEUDO_REGISTER))
4615 target = gen_reg_rtx (mode);
4616 emit_move_insn (target, op0);
4617 emit_cmp_insn (target,
4618 expand_expr (convert (type, integer_zero_node),
4619 NULL_RTX, VOIDmode, 0),
4620 GE, NULL_RTX, mode, 0, 0);
4622 emit_jump_insn (gen_bge (temp));
4623 op0 = expand_unop (mode, neg_optab, target, target, 0);
4625 emit_move_insn (target, op0);
4632 target = original_target;
4633 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 1))
4634 || (GET_CODE (target) == REG
4635 && REGNO (target) < FIRST_PSEUDO_REGISTER))
4636 target = gen_reg_rtx (mode);
4637 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
4638 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
4640 /* First try to do it with a special MIN or MAX instruction.
4641 If that does not win, use a conditional jump to select the proper
4643 this_optab = (TREE_UNSIGNED (type)
4644 ? (code == MIN_EXPR ? umin_optab : umax_optab)
4645 : (code == MIN_EXPR ? smin_optab : smax_optab));
4647 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
4653 emit_move_insn (target, op0);
4654 op0 = gen_label_rtx ();
4655 /* If this mode is an integer too wide to compare properly,
4656 compare word by word. Rely on cse to optimize constant cases. */
4657 if (GET_MODE_CLASS (mode) == MODE_INT
4658 && !can_compare_p (mode))
4660 if (code == MAX_EXPR)
4661 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type), target, op1, NULL, op0);
4663 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type), op1, target, NULL, op0);
4664 emit_move_insn (target, op1);
4668 if (code == MAX_EXPR)
4669 temp = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)))
4670 ? compare_from_rtx (target, op1, GEU, 1, mode, NULL_RTX, 0)
4671 : compare_from_rtx (target, op1, GE, 0, mode, NULL_RTX, 0));
4673 temp = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)))
4674 ? compare_from_rtx (target, op1, LEU, 1, mode, NULL_RTX, 0)
4675 : compare_from_rtx (target, op1, LE, 0, mode, NULL_RTX, 0));
4676 if (temp == const0_rtx)
4677 emit_move_insn (target, op1);
4678 else if (temp != const_true_rtx)
4680 if (bcc_gen_fctn[(int) GET_CODE (temp)] != 0)
4681 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (temp)]) (op0));
4684 emit_move_insn (target, op1);
4690 /* ??? Can optimize when the operand of this is a bitwise operation,
4691 by using a different bitwise operation. */
4693 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
4694 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
4700 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
4701 temp = expand_unop (mode, ffs_optab, op0, target, 1);
4706 /* ??? Can optimize bitwise operations with one arg constant.
4707 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
4708 and (a bitwise1 b) bitwise2 b (etc)
4709 but that is probably not worth while. */
4711 /* BIT_AND_EXPR is for bitwise anding.
4712 TRUTH_AND_EXPR is for anding two boolean values
4713 when we want in all cases to compute both of them.
4714 In general it is fastest to do TRUTH_AND_EXPR by
4715 computing both operands as actual zero-or-1 values
4716 and then bitwise anding. In cases where there cannot
4717 be any side effects, better code would be made by
4718 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR;
4719 but the question is how to recognize those cases. */
4721 case TRUTH_AND_EXPR:
4723 this_optab = and_optab;
4726 /* See comment above about TRUTH_AND_EXPR; it applies here too. */
4729 this_optab = ior_optab;
4732 case TRUTH_XOR_EXPR:
4734 this_optab = xor_optab;
4741 preexpand_calls (exp);
4742 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
4744 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
4745 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
4748 /* Could determine the answer when only additive constants differ.
4749 Also, the addition of one can be handled by changing the condition. */
4756 preexpand_calls (exp);
4757 temp = do_store_flag (exp, target, tmode != VOIDmode ? tmode : mode, 0);
4760 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
4761 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
4763 && GET_CODE (original_target) == REG
4764 && (GET_MODE (original_target)
4765 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
4767 temp = expand_expr (TREE_OPERAND (exp, 0), original_target, VOIDmode, 0);
4768 if (temp != original_target)
4769 temp = copy_to_reg (temp);
4770 op1 = gen_label_rtx ();
4771 emit_cmp_insn (temp, const0_rtx, EQ, NULL_RTX,
4772 GET_MODE (temp), unsignedp, 0);
4773 emit_jump_insn (gen_beq (op1));
4774 emit_move_insn (temp, const1_rtx);
4778 /* If no set-flag instruction, must generate a conditional
4779 store into a temporary variable. Drop through
4780 and handle this like && and ||. */
4782 case TRUTH_ANDIF_EXPR:
4783 case TRUTH_ORIF_EXPR:
4784 if (target == 0 || ! safe_from_p (target, exp)
4785 /* Make sure we don't have a hard reg (such as function's return
4786 value) live across basic blocks, if not optimizing. */
4787 || (!optimize && GET_CODE (target) == REG
4788 && REGNO (target) < FIRST_PSEUDO_REGISTER))
4789 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
4790 emit_clr_insn (target);
4791 op1 = gen_label_rtx ();
4792 jumpifnot (exp, op1);
4793 emit_0_to_1_insn (target);
4797 case TRUTH_NOT_EXPR:
4798 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
4799 /* The parser is careful to generate TRUTH_NOT_EXPR
4800 only with operands that are always zero or one. */
4801 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
4802 target, 1, OPTAB_LIB_WIDEN);
4808 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
4810 return expand_expr (TREE_OPERAND (exp, 1),
4811 (ignore ? const0_rtx : target),
4816 /* Note that COND_EXPRs whose type is a structure or union
4817 are required to be constructed to contain assignments of
4818 a temporary variable, so that we can evaluate them here
4819 for side effect only. If type is void, we must do likewise. */
4821 /* If an arm of the branch requires a cleanup,
4822 only that cleanup is performed. */
4825 tree binary_op = 0, unary_op = 0;
4826 tree old_cleanups = cleanups_this_call;
4827 cleanups_this_call = 0;
4829 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
4830 convert it to our mode, if necessary. */
4831 if (integer_onep (TREE_OPERAND (exp, 1))
4832 && integer_zerop (TREE_OPERAND (exp, 2))
4833 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
4837 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
4842 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, modifier);
4843 if (GET_MODE (op0) == mode)
4846 target = gen_reg_rtx (mode);
4847 convert_move (target, op0, unsignedp);
4851 /* If we are not to produce a result, we have no target. Otherwise,
4852 if a target was specified use it; it will not be used as an
4853 intermediate target unless it is safe. If no target, use a
4858 else if (original_target
4859 && safe_from_p (original_target, TREE_OPERAND (exp, 0)))
4860 temp = original_target;
4861 else if (mode == BLKmode)
4863 if (TYPE_SIZE (type) == 0
4864 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
4866 temp = assign_stack_temp (BLKmode,
4867 (TREE_INT_CST_LOW (TYPE_SIZE (type))
4868 + BITS_PER_UNIT - 1)
4869 / BITS_PER_UNIT, 0);
4872 temp = gen_reg_rtx (mode);
4874 /* Check for X ? A + B : A. If we have this, we can copy
4875 A to the output and conditionally add B. Similarly for unary
4876 operations. Don't do this if X has side-effects because
4877 those side effects might affect A or B and the "?" operation is
4878 a sequence point in ANSI. (We test for side effects later.) */
4880 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
4881 && operand_equal_p (TREE_OPERAND (exp, 2),
4882 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
4883 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
4884 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
4885 && operand_equal_p (TREE_OPERAND (exp, 1),
4886 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
4887 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
4888 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
4889 && operand_equal_p (TREE_OPERAND (exp, 2),
4890 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
4891 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
4892 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
4893 && operand_equal_p (TREE_OPERAND (exp, 1),
4894 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
4895 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
4897 /* If we had X ? A + 1 : A and we can do the test of X as a store-flag
4898 operation, do this as A + (X != 0). Similarly for other simple
4899 binary operators. */
4900 if (temp && singleton && binary_op
4901 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
4902 && (TREE_CODE (binary_op) == PLUS_EXPR
4903 || TREE_CODE (binary_op) == MINUS_EXPR
4904 || TREE_CODE (binary_op) == BIT_IOR_EXPR
4905 || TREE_CODE (binary_op) == BIT_XOR_EXPR
4906 || TREE_CODE (binary_op) == BIT_AND_EXPR)
4907 && integer_onep (TREE_OPERAND (binary_op, 1))
4908 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
4911 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR ? add_optab
4912 : TREE_CODE (binary_op) == MINUS_EXPR ? sub_optab
4913 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
4914 : TREE_CODE (binary_op) == BIT_XOR_EXPR ? xor_optab
4917 /* If we had X ? A : A + 1, do this as A + (X == 0).
4919 We have to invert the truth value here and then put it
4920 back later if do_store_flag fails. We cannot simply copy
4921 TREE_OPERAND (exp, 0) to another variable and modify that
4922 because invert_truthvalue can modify the tree pointed to
4924 if (singleton == TREE_OPERAND (exp, 1))
4925 TREE_OPERAND (exp, 0)
4926 = invert_truthvalue (TREE_OPERAND (exp, 0));
4928 result = do_store_flag (TREE_OPERAND (exp, 0),
4929 (safe_from_p (temp, singleton)
4931 mode, BRANCH_COST <= 1);
4935 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
4936 return expand_binop (mode, boptab, op1, result, temp,
4937 unsignedp, OPTAB_LIB_WIDEN);
4939 else if (singleton == TREE_OPERAND (exp, 1))
4940 TREE_OPERAND (exp, 0)
4941 = invert_truthvalue (TREE_OPERAND (exp, 0));
4945 op0 = gen_label_rtx ();
4947 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
4951 /* If the target conflicts with the other operand of the
4952 binary op, we can't use it. Also, we can't use the target
4953 if it is a hard register, because evaluating the condition
4954 might clobber it. */
4956 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1)))
4957 || (GET_CODE (temp) == REG
4958 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
4959 temp = gen_reg_rtx (mode);
4960 store_expr (singleton, temp, 0);
4963 expand_expr (singleton,
4964 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
4965 if (cleanups_this_call)
4967 sorry ("aggregate value in COND_EXPR");
4968 cleanups_this_call = 0;
4970 if (singleton == TREE_OPERAND (exp, 1))
4971 jumpif (TREE_OPERAND (exp, 0), op0);
4973 jumpifnot (TREE_OPERAND (exp, 0), op0);
4975 if (binary_op && temp == 0)
4976 /* Just touch the other operand. */
4977 expand_expr (TREE_OPERAND (binary_op, 1),
4978 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
4980 store_expr (build (TREE_CODE (binary_op), type,
4981 make_tree (type, temp),
4982 TREE_OPERAND (binary_op, 1)),
4985 store_expr (build1 (TREE_CODE (unary_op), type,
4986 make_tree (type, temp)),
4991 /* This is now done in jump.c and is better done there because it
4992 produces shorter register lifetimes. */
4994 /* Check for both possibilities either constants or variables
4995 in registers (but not the same as the target!). If so, can
4996 save branches by assigning one, branching, and assigning the
4998 else if (temp && GET_MODE (temp) != BLKmode
4999 && (TREE_CONSTANT (TREE_OPERAND (exp, 1))
5000 || ((TREE_CODE (TREE_OPERAND (exp, 1)) == PARM_DECL
5001 || TREE_CODE (TREE_OPERAND (exp, 1)) == VAR_DECL)
5002 && DECL_RTL (TREE_OPERAND (exp, 1))
5003 && GET_CODE (DECL_RTL (TREE_OPERAND (exp, 1))) == REG
5004 && DECL_RTL (TREE_OPERAND (exp, 1)) != temp))
5005 && (TREE_CONSTANT (TREE_OPERAND (exp, 2))
5006 || ((TREE_CODE (TREE_OPERAND (exp, 2)) == PARM_DECL
5007 || TREE_CODE (TREE_OPERAND (exp, 2)) == VAR_DECL)
5008 && DECL_RTL (TREE_OPERAND (exp, 2))
5009 && GET_CODE (DECL_RTL (TREE_OPERAND (exp, 2))) == REG
5010 && DECL_RTL (TREE_OPERAND (exp, 2)) != temp)))
5012 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
5013 temp = gen_reg_rtx (mode);
5014 store_expr (TREE_OPERAND (exp, 2), temp, 0);
5015 jumpifnot (TREE_OPERAND (exp, 0), op0);
5016 store_expr (TREE_OPERAND (exp, 1), temp, 0);
5020 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
5021 comparison operator. If we have one of these cases, set the
5022 output to A, branch on A (cse will merge these two references),
5023 then set the output to FOO. */
5025 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
5026 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
5027 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
5028 TREE_OPERAND (exp, 1), 0)
5029 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
5030 && safe_from_p (temp, TREE_OPERAND (exp, 2)))
5032 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
5033 temp = gen_reg_rtx (mode);
5034 store_expr (TREE_OPERAND (exp, 1), temp, 0);
5035 jumpif (TREE_OPERAND (exp, 0), op0);
5036 store_expr (TREE_OPERAND (exp, 2), temp, 0);
5040 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
5041 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
5042 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
5043 TREE_OPERAND (exp, 2), 0)
5044 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
5045 && safe_from_p (temp, TREE_OPERAND (exp, 1)))
5047 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
5048 temp = gen_reg_rtx (mode);
5049 store_expr (TREE_OPERAND (exp, 2), temp, 0);
5050 jumpifnot (TREE_OPERAND (exp, 0), op0);
5051 store_expr (TREE_OPERAND (exp, 1), temp, 0);
5056 op1 = gen_label_rtx ();
5057 jumpifnot (TREE_OPERAND (exp, 0), op0);
5059 store_expr (TREE_OPERAND (exp, 1), temp, 0);
5061 expand_expr (TREE_OPERAND (exp, 1),
5062 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
5063 if (cleanups_this_call)
5065 sorry ("aggregate value in COND_EXPR");
5066 cleanups_this_call = 0;
5070 emit_jump_insn (gen_jump (op1));
5074 store_expr (TREE_OPERAND (exp, 2), temp, 0);
5076 expand_expr (TREE_OPERAND (exp, 2),
5077 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
5080 if (cleanups_this_call)
5082 sorry ("aggregate value in COND_EXPR");
5083 cleanups_this_call = 0;
5089 cleanups_this_call = old_cleanups;
5095 /* Something needs to be initialized, but we didn't know
5096 where that thing was when building the tree. For example,
5097 it could be the return value of a function, or a parameter
5098 to a function which lays down in the stack, or a temporary
5099 variable which must be passed by reference.
5101 We guarantee that the expression will either be constructed
5102 or copied into our original target. */
5104 tree slot = TREE_OPERAND (exp, 0);
5107 if (TREE_CODE (slot) != VAR_DECL)
5112 if (DECL_RTL (slot) != 0)
5114 target = DECL_RTL (slot);
5115 /* If we have already expanded the slot, so don't do
5117 if (TREE_OPERAND (exp, 1) == NULL_TREE)
5122 target = assign_stack_temp (mode, int_size_in_bytes (type), 0);
5123 /* All temp slots at this level must not conflict. */
5124 preserve_temp_slots (target);
5125 DECL_RTL (slot) = target;
5129 /* I bet this needs to be done, and I bet that it needs to
5130 be above, inside the else clause. The reason is
5131 simple, how else is it going to get cleaned up? (mrs)
5133 The reason is probably did not work before, and was
5134 commented out is because this was re-expanding already
5135 expanded target_exprs (target == 0 and DECL_RTL (slot)
5136 != 0) also cleaning them up many times as well. :-( */
5138 /* Since SLOT is not known to the called function
5139 to belong to its stack frame, we must build an explicit
5140 cleanup. This case occurs when we must build up a reference
5141 to pass the reference as an argument. In this case,
5142 it is very likely that such a reference need not be
5145 if (TREE_OPERAND (exp, 2) == 0)
5146 TREE_OPERAND (exp, 2) = maybe_build_cleanup (slot);
5147 if (TREE_OPERAND (exp, 2))
5148 cleanups_this_call = tree_cons (NULL_TREE, TREE_OPERAND (exp, 2),
5149 cleanups_this_call);
5154 /* This case does occur, when expanding a parameter which
5155 needs to be constructed on the stack. The target
5156 is the actual stack address that we want to initialize.
5157 The function we call will perform the cleanup in this case. */
5159 /* If we have already assigned it space, use that space,
5160 not target that we were passed in, as our target
5161 parameter is only a hint. */
5162 if (DECL_RTL (slot) != 0)
5164 target = DECL_RTL (slot);
5165 /* If we have already expanded the slot, so don't do
5167 if (TREE_OPERAND (exp, 1) == NULL_TREE)
5171 DECL_RTL (slot) = target;
5174 exp1 = TREE_OPERAND (exp, 1);
5175 /* Mark it as expanded. */
5176 TREE_OPERAND (exp, 1) = NULL_TREE;
5178 return expand_expr (exp1, target, tmode, modifier);
5183 tree lhs = TREE_OPERAND (exp, 0);
5184 tree rhs = TREE_OPERAND (exp, 1);
5185 tree noncopied_parts = 0;
5186 tree lhs_type = TREE_TYPE (lhs);
5188 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
5189 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0 && !fixed_type_p (rhs))
5190 noncopied_parts = init_noncopied_parts (stabilize_reference (lhs),
5191 TYPE_NONCOPIED_PARTS (lhs_type));
5192 while (noncopied_parts != 0)
5194 expand_assignment (TREE_VALUE (noncopied_parts),
5195 TREE_PURPOSE (noncopied_parts), 0, 0);
5196 noncopied_parts = TREE_CHAIN (noncopied_parts);
5203 /* If lhs is complex, expand calls in rhs before computing it.
5204 That's so we don't compute a pointer and save it over a call.
5205 If lhs is simple, compute it first so we can give it as a
5206 target if the rhs is just a call. This avoids an extra temp and copy
5207 and that prevents a partial-subsumption which makes bad code.
5208 Actually we could treat component_ref's of vars like vars. */
5210 tree lhs = TREE_OPERAND (exp, 0);
5211 tree rhs = TREE_OPERAND (exp, 1);
5212 tree noncopied_parts = 0;
5213 tree lhs_type = TREE_TYPE (lhs);
5217 if (TREE_CODE (lhs) != VAR_DECL
5218 && TREE_CODE (lhs) != RESULT_DECL
5219 && TREE_CODE (lhs) != PARM_DECL)
5220 preexpand_calls (exp);
5222 /* Check for |= or &= of a bitfield of size one into another bitfield
5223 of size 1. In this case, (unless we need the result of the
5224 assignment) we can do this more efficiently with a
5225 test followed by an assignment, if necessary.
5227 ??? At this point, we can't get a BIT_FIELD_REF here. But if
5228 things change so we do, this code should be enhanced to
5231 && TREE_CODE (lhs) == COMPONENT_REF
5232 && (TREE_CODE (rhs) == BIT_IOR_EXPR
5233 || TREE_CODE (rhs) == BIT_AND_EXPR)
5234 && TREE_OPERAND (rhs, 0) == lhs
5235 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
5236 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (lhs, 1))) == 1
5237 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))) == 1)
5239 rtx label = gen_label_rtx ();
5241 do_jump (TREE_OPERAND (rhs, 1),
5242 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
5243 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
5244 expand_assignment (lhs, convert (TREE_TYPE (rhs),
5245 (TREE_CODE (rhs) == BIT_IOR_EXPR
5247 : integer_zero_node)),
5249 do_pending_stack_adjust ();
5254 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0
5255 && ! (fixed_type_p (lhs) && fixed_type_p (rhs)))
5256 noncopied_parts = save_noncopied_parts (stabilize_reference (lhs),
5257 TYPE_NONCOPIED_PARTS (lhs_type));
5259 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
5260 while (noncopied_parts != 0)
5262 expand_assignment (TREE_PURPOSE (noncopied_parts),
5263 TREE_VALUE (noncopied_parts), 0, 0);
5264 noncopied_parts = TREE_CHAIN (noncopied_parts);
5269 case PREINCREMENT_EXPR:
5270 case PREDECREMENT_EXPR:
5271 return expand_increment (exp, 0);
5273 case POSTINCREMENT_EXPR:
5274 case POSTDECREMENT_EXPR:
5275 /* Faster to treat as pre-increment if result is not used. */
5276 return expand_increment (exp, ! ignore);
5279 /* Are we taking the address of a nested function? */
5280 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
5281 && decl_function_context (TREE_OPERAND (exp, 0)) != 0)
5283 op0 = trampoline_address (TREE_OPERAND (exp, 0));
5284 op0 = force_operand (op0, target);
5288 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode,
5289 (modifier == EXPAND_INITIALIZER
5290 ? modifier : EXPAND_CONST_ADDRESS));
5292 /* We would like the object in memory. If it is a constant,
5293 we can have it be statically allocated into memory. For
5294 a non-constant (REG or SUBREG), we need to allocate some
5295 memory and store the value into it. */
5297 if (CONSTANT_P (op0))
5298 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
5301 if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG)
5303 /* If this object is in a register, it must be not
5305 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
5306 enum machine_mode inner_mode = TYPE_MODE (inner_type);
5308 = assign_stack_temp (inner_mode,
5309 int_size_in_bytes (inner_type), 1);
5311 emit_move_insn (memloc, op0);
5315 if (GET_CODE (op0) != MEM)
5318 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
5319 return XEXP (op0, 0);
5320 op0 = force_operand (XEXP (op0, 0), target);
5322 if (flag_force_addr && GET_CODE (op0) != REG)
5323 return force_reg (Pmode, op0);
5326 case ENTRY_VALUE_EXPR:
5329 /* COMPLEX type for Extended Pascal & Fortran */
5332 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
5336 /* Get the rtx code of the operands. */
5337 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
5338 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
5341 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
5343 prev = get_last_insn ();
5345 /* Tell flow that the whole of the destination is being set. */
5346 if (GET_CODE (target) == REG)
5347 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
5349 /* Move the real (op0) and imaginary (op1) parts to their location. */
5350 emit_move_insn (gen_realpart (mode, target), op0);
5351 emit_move_insn (gen_imagpart (mode, target), op1);
5353 /* Complex construction should appear as a single unit. */
5360 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
5361 return gen_realpart (mode, op0);
5364 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
5365 return gen_imagpart (mode, op0);
5369 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
5373 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
5376 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
5378 prev = get_last_insn ();
5380 /* Tell flow that the whole of the destination is being set. */
5381 if (GET_CODE (target) == REG)
5382 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
5384 /* Store the realpart and the negated imagpart to target. */
5385 emit_move_insn (gen_realpart (mode, target), gen_realpart (mode, op0));
5387 imag_t = gen_imagpart (mode, target);
5388 temp = expand_unop (mode, neg_optab,
5389 gen_imagpart (mode, op0), imag_t, 0);
5391 emit_move_insn (imag_t, temp);
5393 /* Conjugate should appear as a single unit */
5400 op0 = CONST0_RTX (tmode);
5406 return (*lang_expand_expr) (exp, target, tmode, modifier);
5409 /* Here to do an ordinary binary operator, generating an instruction
5410 from the optab already placed in `this_optab'. */
5412 preexpand_calls (exp);
5413 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
5415 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
5416 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
5418 temp = expand_binop (mode, this_optab, op0, op1, target,
5419 unsignedp, OPTAB_LIB_WIDEN);
5425 /* Return the alignment in bits of EXP, a pointer valued expression.
5426 But don't return more than MAX_ALIGN no matter what.
5427 The alignment returned is, by default, the alignment of the thing that
5428 EXP points to (if it is not a POINTER_TYPE, 0 is returned).
5430 Otherwise, look at the expression to see if we can do better, i.e., if the
5431 expression is actually pointing at an object whose alignment is tighter. */
5434 get_pointer_alignment (exp, max_align)
5438 unsigned align, inner;
5440 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
5443 align = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
5444 align = MIN (align, max_align);
5448 switch (TREE_CODE (exp))
5452 case NON_LVALUE_EXPR:
5453 exp = TREE_OPERAND (exp, 0);
5454 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
5456 inner = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
5457 inner = MIN (inner, max_align);
5458 align = MAX (align, inner);
5462 /* If sum of pointer + int, restrict our maximum alignment to that
5463 imposed by the integer. If not, we can't do any better than
5465 if (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST)
5468 while (((TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)) * BITS_PER_UNIT)
5473 exp = TREE_OPERAND (exp, 0);
5477 /* See what we are pointing at and look at its alignment. */
5478 exp = TREE_OPERAND (exp, 0);
5479 if (TREE_CODE (exp) == FUNCTION_DECL)
5480 align = MAX (align, FUNCTION_BOUNDARY);
5481 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'd')
5482 align = MAX (align, DECL_ALIGN (exp));
5483 #ifdef CONSTANT_ALIGNMENT
5484 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'c')
5485 align = CONSTANT_ALIGNMENT (exp, align);
5487 return MIN (align, max_align);
5495 /* Return the tree node and offset if a given argument corresponds to
5496 a string constant. */
5499 string_constant (arg, ptr_offset)
5505 if (TREE_CODE (arg) == ADDR_EXPR
5506 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
5508 *ptr_offset = integer_zero_node;
5509 return TREE_OPERAND (arg, 0);
5511 else if (TREE_CODE (arg) == PLUS_EXPR)
5513 tree arg0 = TREE_OPERAND (arg, 0);
5514 tree arg1 = TREE_OPERAND (arg, 1);
5519 if (TREE_CODE (arg0) == ADDR_EXPR
5520 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
5523 return TREE_OPERAND (arg0, 0);
5525 else if (TREE_CODE (arg1) == ADDR_EXPR
5526 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
5529 return TREE_OPERAND (arg1, 0);
5536 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
5537 way, because it could contain a zero byte in the middle.
5538 TREE_STRING_LENGTH is the size of the character array, not the string.
5540 Unfortunately, string_constant can't access the values of const char
5541 arrays with initializers, so neither can we do so here. */
5551 src = string_constant (src, &offset_node);
5554 max = TREE_STRING_LENGTH (src);
5555 ptr = TREE_STRING_POINTER (src);
5556 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
5558 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
5559 compute the offset to the following null if we don't know where to
5560 start searching for it. */
5562 for (i = 0; i < max; i++)
5565 /* We don't know the starting offset, but we do know that the string
5566 has no internal zero bytes. We can assume that the offset falls
5567 within the bounds of the string; otherwise, the programmer deserves
5568 what he gets. Subtract the offset from the length of the string,
5570 /* This would perhaps not be valid if we were dealing with named
5571 arrays in addition to literal string constants. */
5572 return size_binop (MINUS_EXPR, size_int (max), offset_node);
5575 /* We have a known offset into the string. Start searching there for
5576 a null character. */
5577 if (offset_node == 0)
5581 /* Did we get a long long offset? If so, punt. */
5582 if (TREE_INT_CST_HIGH (offset_node) != 0)
5584 offset = TREE_INT_CST_LOW (offset_node);
5586 /* If the offset is known to be out of bounds, warn, and call strlen at
5588 if (offset < 0 || offset > max)
5590 warning ("offset outside bounds of constant string");
5593 /* Use strlen to search for the first zero byte. Since any strings
5594 constructed with build_string will have nulls appended, we win even
5595 if we get handed something like (char[4])"abcd".
5597 Since OFFSET is our starting index into the string, no further
5598 calculation is needed. */
5599 return size_int (strlen (ptr + offset));
5602 /* Expand an expression EXP that calls a built-in function,
5603 with result going to TARGET if that's convenient
5604 (and in mode MODE if that's convenient).
5605 SUBTARGET may be used as the target for computing one of EXP's operands.
5606 IGNORE is nonzero if the value is to be ignored. */
5609 expand_builtin (exp, target, subtarget, mode, ignore)
5613 enum machine_mode mode;
5616 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
5617 tree arglist = TREE_OPERAND (exp, 1);
5620 enum machine_mode value_mode = TYPE_MODE (TREE_TYPE (exp));
5621 optab builtin_optab;
5623 switch (DECL_FUNCTION_CODE (fndecl))
5628 /* build_function_call changes these into ABS_EXPR. */
5633 case BUILT_IN_FSQRT:
5634 /* If not optimizing, call the library function. */
5639 /* Arg could be wrong type if user redeclared this fcn wrong. */
5640 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != REAL_TYPE)
5641 return CONST0_RTX (TYPE_MODE (TREE_TYPE (exp)));
5643 /* Stabilize and compute the argument. */
5644 if (TREE_CODE (TREE_VALUE (arglist)) != VAR_DECL
5645 && TREE_CODE (TREE_VALUE (arglist)) != PARM_DECL)
5647 exp = copy_node (exp);
5648 arglist = copy_node (arglist);
5649 TREE_OPERAND (exp, 1) = arglist;
5650 TREE_VALUE (arglist) = save_expr (TREE_VALUE (arglist));
5652 op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
5654 /* Make a suitable register to place result in. */
5655 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
5660 switch (DECL_FUNCTION_CODE (fndecl))
5663 builtin_optab = sin_optab; break;
5665 builtin_optab = cos_optab; break;
5666 case BUILT_IN_FSQRT:
5667 builtin_optab = sqrt_optab; break;
5672 /* Compute into TARGET.
5673 Set TARGET to wherever the result comes back. */
5674 target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
5675 builtin_optab, op0, target, 0);
5677 /* If we were unable to expand via the builtin, stop the
5678 sequence (without outputting the insns) and break, causing
5679 a call the the library function. */
5686 /* Check the results by default. But if flag_fast_math is turned on,
5687 then assume sqrt will always be called with valid arguments. */
5689 if (! flag_fast_math)
5691 /* Don't define the builtin FP instructions
5692 if your machine is not IEEE. */
5693 if (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT)
5696 lab1 = gen_label_rtx ();
5698 /* Test the result; if it is NaN, set errno=EDOM because
5699 the argument was not in the domain. */
5700 emit_cmp_insn (target, target, EQ, 0, GET_MODE (target), 0, 0);
5701 emit_jump_insn (gen_beq (lab1));
5705 #ifdef GEN_ERRNO_RTX
5706 rtx errno_rtx = GEN_ERRNO_RTX;
5709 = gen_rtx (MEM, word_mode, gen_rtx (SYMBOL_REF, Pmode, "*errno"));
5712 emit_move_insn (errno_rtx, GEN_INT (TARGET_EDOM));
5715 /* We can't set errno=EDOM directly; let the library call do it.
5716 Pop the arguments right away in case the call gets deleted. */
5718 expand_call (exp, target, 0);
5725 /* Output the entire sequence. */
5726 insns = get_insns ();
5732 /* __builtin_apply_args returns block of memory allocated on
5733 the stack into which is stored the arg pointer, structure
5734 value address, static chain, and all the registers that might
5735 possibly be used in performing a function call. The code is
5736 moved to the start of the function so the incoming values are
5738 case BUILT_IN_APPLY_ARGS:
5739 /* Don't do __builtin_apply_args more than once in a function.
5740 Save the result of the first call and reuse it. */
5741 if (apply_args_value != 0)
5742 return apply_args_value;
5744 /* When this function is called, it means that registers must be
5745 saved on entry to this function. So we migrate the
5746 call to the first insn of this function. */
5751 temp = expand_builtin_apply_args ();
5755 apply_args_value = temp;
5757 /* Put the sequence after the NOTE that starts the function.
5758 If this is inside a SEQUENCE, make the outer-level insn
5759 chain current, so the code is placed at the start of the
5761 push_topmost_sequence ();
5762 emit_insns_before (seq, NEXT_INSN (get_insns ()));
5763 pop_topmost_sequence ();
5767 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
5768 FUNCTION with a copy of the parameters described by
5769 ARGUMENTS, and ARGSIZE. It returns a block of memory
5770 allocated on the stack into which is stored all the registers
5771 that might possibly be used for returning the result of a
5772 function. ARGUMENTS is the value returned by
5773 __builtin_apply_args. ARGSIZE is the number of bytes of
5774 arguments that must be copied. ??? How should this value be
5775 computed? We'll also need a safe worst case value for varargs
5777 case BUILT_IN_APPLY:
5779 /* Arg could be non-pointer if user redeclared this fcn wrong. */
5780 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
5781 || TREE_CHAIN (arglist) == 0
5782 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
5783 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
5784 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
5792 for (t = arglist, i = 0; t; t = TREE_CHAIN (t), i++)
5793 ops[i] = expand_expr (TREE_VALUE (t), NULL_RTX, VOIDmode, 0);
5795 return expand_builtin_apply (ops[0], ops[1], ops[2]);
5798 /* __builtin_return (RESULT) causes the function to return the
5799 value described by RESULT. RESULT is address of the block of
5800 memory returned by __builtin_apply. */
5801 case BUILT_IN_RETURN:
5803 /* Arg could be non-pointer if user redeclared this fcn wrong. */
5804 && TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) == POINTER_TYPE)
5805 expand_builtin_return (expand_expr (TREE_VALUE (arglist),
5806 NULL_RTX, VOIDmode, 0));
5809 case BUILT_IN_SAVEREGS:
5810 /* Don't do __builtin_saveregs more than once in a function.
5811 Save the result of the first call and reuse it. */
5812 if (saveregs_value != 0)
5813 return saveregs_value;
5815 /* When this function is called, it means that registers must be
5816 saved on entry to this function. So we migrate the
5817 call to the first insn of this function. */
5820 rtx valreg, saved_valreg;
5822 /* Now really call the function. `expand_call' does not call
5823 expand_builtin, so there is no danger of infinite recursion here. */
5826 #ifdef EXPAND_BUILTIN_SAVEREGS
5827 /* Do whatever the machine needs done in this case. */
5828 temp = EXPAND_BUILTIN_SAVEREGS (arglist);
5830 /* The register where the function returns its value
5831 is likely to have something else in it, such as an argument.
5832 So preserve that register around the call. */
5833 if (value_mode != VOIDmode)
5835 valreg = hard_libcall_value (value_mode);
5836 saved_valreg = gen_reg_rtx (value_mode);
5837 emit_move_insn (saved_valreg, valreg);
5840 /* Generate the call, putting the value in a pseudo. */
5841 temp = expand_call (exp, target, ignore);
5843 if (value_mode != VOIDmode)
5844 emit_move_insn (valreg, saved_valreg);
5850 saveregs_value = temp;
5852 /* Put the sequence after the NOTE that starts the function.
5853 If this is inside a SEQUENCE, make the outer-level insn
5854 chain current, so the code is placed at the start of the
5856 push_topmost_sequence ();
5857 emit_insns_before (seq, NEXT_INSN (get_insns ()));
5858 pop_topmost_sequence ();
5862 /* __builtin_args_info (N) returns word N of the arg space info
5863 for the current function. The number and meanings of words
5864 is controlled by the definition of CUMULATIVE_ARGS. */
5865 case BUILT_IN_ARGS_INFO:
5867 int nwords = sizeof (CUMULATIVE_ARGS) / sizeof (int);
5869 int *word_ptr = (int *) ¤t_function_args_info;
5870 tree type, elts, result;
5872 if (sizeof (CUMULATIVE_ARGS) % sizeof (int) != 0)
5873 fatal ("CUMULATIVE_ARGS type defined badly; see %s, line %d",
5874 __FILE__, __LINE__);
5878 tree arg = TREE_VALUE (arglist);
5879 if (TREE_CODE (arg) != INTEGER_CST)
5880 error ("argument of `__builtin_args_info' must be constant");
5883 int wordnum = TREE_INT_CST_LOW (arg);
5885 if (wordnum < 0 || wordnum >= nwords || TREE_INT_CST_HIGH (arg))
5886 error ("argument of `__builtin_args_info' out of range");
5888 return GEN_INT (word_ptr[wordnum]);
5892 error ("missing argument in `__builtin_args_info'");
5897 for (i = 0; i < nwords; i++)
5898 elts = tree_cons (NULL_TREE, build_int_2 (word_ptr[i], 0));
5900 type = build_array_type (integer_type_node,
5901 build_index_type (build_int_2 (nwords, 0)));
5902 result = build (CONSTRUCTOR, type, NULL_TREE, nreverse (elts));
5903 TREE_CONSTANT (result) = 1;
5904 TREE_STATIC (result) = 1;
5905 result = build (INDIRECT_REF, build_pointer_type (type), result);
5906 TREE_CONSTANT (result) = 1;
5907 return expand_expr (result, NULL_RTX, VOIDmode, 0);
5911 /* Return the address of the first anonymous stack arg. */
5912 case BUILT_IN_NEXT_ARG:
5914 tree fntype = TREE_TYPE (current_function_decl);
5915 if (!(TYPE_ARG_TYPES (fntype) != 0
5916 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
5917 != void_type_node)))
5919 error ("`va_start' used in function with fixed args");
5924 return expand_binop (Pmode, add_optab,
5925 current_function_internal_arg_pointer,
5926 current_function_arg_offset_rtx,
5927 NULL_RTX, 0, OPTAB_LIB_WIDEN);
5929 case BUILT_IN_CLASSIFY_TYPE:
5932 tree type = TREE_TYPE (TREE_VALUE (arglist));
5933 enum tree_code code = TREE_CODE (type);
5934 if (code == VOID_TYPE)
5935 return GEN_INT (void_type_class);
5936 if (code == INTEGER_TYPE)
5937 return GEN_INT (integer_type_class);
5938 if (code == CHAR_TYPE)
5939 return GEN_INT (char_type_class);
5940 if (code == ENUMERAL_TYPE)
5941 return GEN_INT (enumeral_type_class);
5942 if (code == BOOLEAN_TYPE)
5943 return GEN_INT (boolean_type_class);
5944 if (code == POINTER_TYPE)
5945 return GEN_INT (pointer_type_class);
5946 if (code == REFERENCE_TYPE)
5947 return GEN_INT (reference_type_class);
5948 if (code == OFFSET_TYPE)
5949 return GEN_INT (offset_type_class);
5950 if (code == REAL_TYPE)
5951 return GEN_INT (real_type_class);
5952 if (code == COMPLEX_TYPE)
5953 return GEN_INT (complex_type_class);
5954 if (code == FUNCTION_TYPE)
5955 return GEN_INT (function_type_class);
5956 if (code == METHOD_TYPE)
5957 return GEN_INT (method_type_class);
5958 if (code == RECORD_TYPE)
5959 return GEN_INT (record_type_class);
5960 if (code == UNION_TYPE || code == QUAL_UNION_TYPE)
5961 return GEN_INT (union_type_class);
5962 if (code == ARRAY_TYPE)
5963 return GEN_INT (array_type_class);
5964 if (code == STRING_TYPE)
5965 return GEN_INT (string_type_class);
5966 if (code == SET_TYPE)
5967 return GEN_INT (set_type_class);
5968 if (code == FILE_TYPE)
5969 return GEN_INT (file_type_class);
5970 if (code == LANG_TYPE)
5971 return GEN_INT (lang_type_class);
5973 return GEN_INT (no_type_class);
5975 case BUILT_IN_CONSTANT_P:
5979 return (TREE_CODE_CLASS (TREE_CODE (TREE_VALUE (arglist))) == 'c'
5980 ? const1_rtx : const0_rtx);
5982 case BUILT_IN_FRAME_ADDRESS:
5983 /* The argument must be a nonnegative integer constant.
5984 It counts the number of frames to scan up the stack.
5985 The value is the address of that frame. */
5986 case BUILT_IN_RETURN_ADDRESS:
5987 /* The argument must be a nonnegative integer constant.
5988 It counts the number of frames to scan up the stack.
5989 The value is the return address saved in that frame. */
5991 /* Warning about missing arg was already issued. */
5993 else if (TREE_CODE (TREE_VALUE (arglist)) != INTEGER_CST)
5995 error ("invalid arg to `__builtin_return_address'");
5998 else if (tree_int_cst_lt (TREE_VALUE (arglist), integer_zero_node))
6000 error ("invalid arg to `__builtin_return_address'");
6005 int count = TREE_INT_CST_LOW (TREE_VALUE (arglist));
6006 rtx tem = frame_pointer_rtx;
6009 /* Some machines need special handling before we can access arbitrary
6010 frames. For example, on the sparc, we must first flush all
6011 register windows to the stack. */
6012 #ifdef SETUP_FRAME_ADDRESSES
6013 SETUP_FRAME_ADDRESSES ();
6016 /* On the sparc, the return address is not in the frame, it is
6017 in a register. There is no way to access it off of the current
6018 frame pointer, but it can be accessed off the previous frame
6019 pointer by reading the value from the register window save
6021 #ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
6022 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_RETURN_ADDRESS)
6026 /* Scan back COUNT frames to the specified frame. */
6027 for (i = 0; i < count; i++)
6029 /* Assume the dynamic chain pointer is in the word that
6030 the frame address points to, unless otherwise specified. */
6031 #ifdef DYNAMIC_CHAIN_ADDRESS
6032 tem = DYNAMIC_CHAIN_ADDRESS (tem);
6034 tem = memory_address (Pmode, tem);
6035 tem = copy_to_reg (gen_rtx (MEM, Pmode, tem));
6038 /* For __builtin_frame_address, return what we've got. */
6039 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
6042 /* For __builtin_return_address,
6043 Get the return address from that frame. */
6044 #ifdef RETURN_ADDR_RTX
6045 return RETURN_ADDR_RTX (count, tem);
6047 tem = memory_address (Pmode,
6048 plus_constant (tem, GET_MODE_SIZE (Pmode)));
6049 return copy_to_reg (gen_rtx (MEM, Pmode, tem));
6053 case BUILT_IN_ALLOCA:
6055 /* Arg could be non-integer if user redeclared this fcn wrong. */
6056 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE)
6058 current_function_calls_alloca = 1;
6059 /* Compute the argument. */
6060 op0 = expand_expr (TREE_VALUE (arglist), NULL_RTX, VOIDmode, 0);
6062 /* Allocate the desired space. */
6063 target = allocate_dynamic_stack_space (op0, target, BITS_PER_UNIT);
6065 /* Record the new stack level for nonlocal gotos. */
6066 if (nonlocal_goto_handler_slot != 0)
6067 emit_stack_save (SAVE_NONLOCAL, &nonlocal_goto_stack_level, NULL_RTX);
6071 /* If not optimizing, call the library function. */
6076 /* Arg could be non-integer if user redeclared this fcn wrong. */
6077 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE)
6080 /* Compute the argument. */
6081 op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
6082 /* Compute ffs, into TARGET if possible.
6083 Set TARGET to wherever the result comes back. */
6084 target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
6085 ffs_optab, op0, target, 1);
6090 case BUILT_IN_STRLEN:
6091 /* If not optimizing, call the library function. */
6096 /* Arg could be non-pointer if user redeclared this fcn wrong. */
6097 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE)
6101 tree src = TREE_VALUE (arglist);
6102 tree len = c_strlen (src);
6105 = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
6107 rtx result, src_rtx, char_rtx;
6108 enum machine_mode insn_mode = value_mode, char_mode;
6109 enum insn_code icode;
6111 /* If the length is known, just return it. */
6113 return expand_expr (len, target, mode, 0);
6115 /* If SRC is not a pointer type, don't do this operation inline. */
6119 /* Call a function if we can't compute strlen in the right mode. */
6121 while (insn_mode != VOIDmode)
6123 icode = strlen_optab->handlers[(int) insn_mode].insn_code;
6124 if (icode != CODE_FOR_nothing)
6127 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
6129 if (insn_mode == VOIDmode)
6132 /* Make a place to write the result of the instruction. */
6135 && GET_CODE (result) == REG
6136 && GET_MODE (result) == insn_mode
6137 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
6138 result = gen_reg_rtx (insn_mode);
6140 /* Make sure the operands are acceptable to the predicates. */
6142 if (! (*insn_operand_predicate[(int)icode][0]) (result, insn_mode))
6143 result = gen_reg_rtx (insn_mode);
6145 src_rtx = memory_address (BLKmode,
6146 expand_expr (src, NULL_RTX, Pmode,
6148 if (! (*insn_operand_predicate[(int)icode][1]) (src_rtx, Pmode))
6149 src_rtx = copy_to_mode_reg (Pmode, src_rtx);
6151 char_rtx = const0_rtx;
6152 char_mode = insn_operand_mode[(int)icode][2];
6153 if (! (*insn_operand_predicate[(int)icode][2]) (char_rtx, char_mode))
6154 char_rtx = copy_to_mode_reg (char_mode, char_rtx);
6156 emit_insn (GEN_FCN (icode) (result,
6157 gen_rtx (MEM, BLKmode, src_rtx),
6158 char_rtx, GEN_INT (align)));
6160 /* Return the value in the proper mode for this function. */
6161 if (GET_MODE (result) == value_mode)
6163 else if (target != 0)
6165 convert_move (target, result, 0);
6169 return convert_to_mode (value_mode, result, 0);
6172 case BUILT_IN_STRCPY:
6173 /* If not optimizing, call the library function. */
6178 /* Arg could be non-pointer if user redeclared this fcn wrong. */
6179 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
6180 || TREE_CHAIN (arglist) == 0
6181 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE)
6185 tree len = c_strlen (TREE_VALUE (TREE_CHAIN (arglist)));
6190 len = size_binop (PLUS_EXPR, len, integer_one_node);
6192 chainon (arglist, build_tree_list (NULL_TREE, len));
6196 case BUILT_IN_MEMCPY:
6197 /* If not optimizing, call the library function. */
6202 /* Arg could be non-pointer if user redeclared this fcn wrong. */
6203 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
6204 || TREE_CHAIN (arglist) == 0
6205 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
6206 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
6207 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
6211 tree dest = TREE_VALUE (arglist);
6212 tree src = TREE_VALUE (TREE_CHAIN (arglist));
6213 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
6216 = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
6218 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
6219 rtx dest_rtx, dest_mem, src_mem;
6221 /* If either SRC or DEST is not a pointer type, don't do
6222 this operation in-line. */
6223 if (src_align == 0 || dest_align == 0)
6225 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRCPY)
6226 TREE_CHAIN (TREE_CHAIN (arglist)) = 0;
6230 dest_rtx = expand_expr (dest, NULL_RTX, Pmode, EXPAND_NORMAL);
6231 dest_mem = gen_rtx (MEM, BLKmode,
6232 memory_address (BLKmode, dest_rtx));
6233 src_mem = gen_rtx (MEM, BLKmode,
6234 memory_address (BLKmode,
6235 expand_expr (src, NULL_RTX,
6239 /* Copy word part most expediently. */
6240 emit_block_move (dest_mem, src_mem,
6241 expand_expr (len, NULL_RTX, VOIDmode, 0),
6242 MIN (src_align, dest_align));
6246 /* These comparison functions need an instruction that returns an actual
6247 index. An ordinary compare that just sets the condition codes
6249 #ifdef HAVE_cmpstrsi
6250 case BUILT_IN_STRCMP:
6251 /* If not optimizing, call the library function. */
6256 /* Arg could be non-pointer if user redeclared this fcn wrong. */
6257 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
6258 || TREE_CHAIN (arglist) == 0
6259 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE)
6261 else if (!HAVE_cmpstrsi)
6264 tree arg1 = TREE_VALUE (arglist);
6265 tree arg2 = TREE_VALUE (TREE_CHAIN (arglist));
6269 len = c_strlen (arg1);
6271 len = size_binop (PLUS_EXPR, integer_one_node, len);
6272 len2 = c_strlen (arg2);
6274 len2 = size_binop (PLUS_EXPR, integer_one_node, len2);
6276 /* If we don't have a constant length for the first, use the length
6277 of the second, if we know it. We don't require a constant for
6278 this case; some cost analysis could be done if both are available
6279 but neither is constant. For now, assume they're equally cheap.
6281 If both strings have constant lengths, use the smaller. This
6282 could arise if optimization results in strcpy being called with
6283 two fixed strings, or if the code was machine-generated. We should
6284 add some code to the `memcmp' handler below to deal with such
6285 situations, someday. */
6286 if (!len || TREE_CODE (len) != INTEGER_CST)
6293 else if (len2 && TREE_CODE (len2) == INTEGER_CST)
6295 if (tree_int_cst_lt (len2, len))
6299 chainon (arglist, build_tree_list (NULL_TREE, len));
6303 case BUILT_IN_MEMCMP:
6304 /* If not optimizing, call the library function. */
6309 /* Arg could be non-pointer if user redeclared this fcn wrong. */
6310 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
6311 || TREE_CHAIN (arglist) == 0
6312 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
6313 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
6314 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
6316 else if (!HAVE_cmpstrsi)
6319 tree arg1 = TREE_VALUE (arglist);
6320 tree arg2 = TREE_VALUE (TREE_CHAIN (arglist));
6321 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
6325 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
6327 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
6328 enum machine_mode insn_mode
6329 = insn_operand_mode[(int) CODE_FOR_cmpstrsi][0];
6331 /* If we don't have POINTER_TYPE, call the function. */
6332 if (arg1_align == 0 || arg2_align == 0)
6334 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRCMP)
6335 TREE_CHAIN (TREE_CHAIN (arglist)) = 0;
6339 /* Make a place to write the result of the instruction. */
6342 && GET_CODE (result) == REG && GET_MODE (result) == insn_mode
6343 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
6344 result = gen_reg_rtx (insn_mode);
6346 emit_insn (gen_cmpstrsi (result,
6347 gen_rtx (MEM, BLKmode,
6348 expand_expr (arg1, NULL_RTX, Pmode,
6350 gen_rtx (MEM, BLKmode,
6351 expand_expr (arg2, NULL_RTX, Pmode,
6353 expand_expr (len, NULL_RTX, VOIDmode, 0),
6354 GEN_INT (MIN (arg1_align, arg2_align))));
6356 /* Return the value in the proper mode for this function. */
6357 mode = TYPE_MODE (TREE_TYPE (exp));
6358 if (GET_MODE (result) == mode)
6360 else if (target != 0)
6362 convert_move (target, result, 0);
6366 return convert_to_mode (mode, result, 0);
6369 case BUILT_IN_STRCMP:
6370 case BUILT_IN_MEMCMP:
6374 default: /* just do library call, if unknown builtin */
6375 error ("built-in function `%s' not currently supported",
6376 IDENTIFIER_POINTER (DECL_NAME (fndecl)));
6379 /* The switch statement above can drop through to cause the function
6380 to be called normally. */
6382 return expand_call (exp, target, ignore);
6385 /* Built-in functions to perform an untyped call and return. */
6387 /* For each register that may be used for calling a function, this
6388 gives a mode used to copy the register's value. VOIDmode indicates
6389 the register is not used for calling a function. If the machine
6390 has register windows, this gives only the outbound registers.
6391 INCOMING_REGNO gives the corresponding inbound register. */
6392 static enum machine_mode apply_args_mode[FIRST_PSEUDO_REGISTER];
6394 /* For each register that may be used for returning values, this gives
6395 a mode used to copy the register's value. VOIDmode indicates the
6396 register is not used for returning values. If the machine has
6397 register windows, this gives only the outbound registers.
6398 INCOMING_REGNO gives the corresponding inbound register. */
6399 static enum machine_mode apply_result_mode[FIRST_PSEUDO_REGISTER];
6401 /* Return the size required for the block returned by __builtin_apply_args,
6402 and initialize apply_args_mode. */
6406 static int size = -1;
6408 enum machine_mode mode;
6410 /* The values computed by this function never change. */
6413 /* The first value is the incoming arg-pointer. */
6414 size = GET_MODE_SIZE (Pmode);
6416 /* The second value is the structure value address unless this is
6417 passed as an "invisible" first argument. */
6418 if (struct_value_rtx)
6419 size += GET_MODE_SIZE (Pmode);
6421 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
6422 if (FUNCTION_ARG_REGNO_P (regno))
6424 /* Search for the proper mode for copying this register's
6425 value. I'm not sure this is right, but it works so far. */
6426 enum machine_mode best_mode = VOIDmode;
6428 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
6430 mode = GET_MODE_WIDER_MODE (mode))
6431 if (HARD_REGNO_MODE_OK (regno, mode)
6432 && HARD_REGNO_NREGS (regno, mode) == 1)
6435 if (best_mode == VOIDmode)
6436 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
6438 mode = GET_MODE_WIDER_MODE (mode))
6439 if (HARD_REGNO_MODE_OK (regno, mode)
6440 && (mov_optab->handlers[(int) mode].insn_code
6441 != CODE_FOR_nothing))
6445 if (mode == VOIDmode)
6448 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
6449 if (size % align != 0)
6450 size = CEIL (size, align) * align;
6451 size += GET_MODE_SIZE (mode);
6452 apply_args_mode[regno] = mode;
6455 apply_args_mode[regno] = VOIDmode;
6460 /* Return the size required for the block returned by __builtin_apply,
6461 and initialize apply_result_mode. */
6463 apply_result_size ()
6465 static int size = -1;
6467 enum machine_mode mode;
6469 /* The values computed by this function never change. */
6474 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
6475 if (FUNCTION_VALUE_REGNO_P (regno))
6477 /* Search for the proper mode for copying this register's
6478 value. I'm not sure this is right, but it works so far. */
6479 enum machine_mode best_mode = VOIDmode;
6481 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
6483 mode = GET_MODE_WIDER_MODE (mode))
6484 if (HARD_REGNO_MODE_OK (regno, mode))
6487 if (best_mode == VOIDmode)
6488 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
6490 mode = GET_MODE_WIDER_MODE (mode))
6491 if (HARD_REGNO_MODE_OK (regno, mode)
6492 && (mov_optab->handlers[(int) mode].insn_code
6493 != CODE_FOR_nothing))
6497 if (mode == VOIDmode)
6500 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
6501 if (size % align != 0)
6502 size = CEIL (size, align) * align;
6503 size += GET_MODE_SIZE (mode);
6504 apply_result_mode[regno] = mode;
6507 apply_result_mode[regno] = VOIDmode;
6509 /* Allow targets that use untyped_call and untyped_return to override
6510 the size so that machine-specific information can be stored here. */
6511 #ifdef APPLY_RESULT_SIZE
6512 size = APPLY_RESULT_SIZE;
6518 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
6519 /* Create a vector describing the result block RESULT. If SAVEP is true,
6520 the result block is used to save the values; otherwise it is used to
6521 restore the values. */
6523 result_vector (savep, result)
6527 int regno, size, align, nelts;
6528 enum machine_mode mode;
6530 rtx *savevec = (rtx *) alloca (FIRST_PSEUDO_REGISTER * sizeof (rtx));
6533 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
6534 if ((mode = apply_result_mode[regno]) != VOIDmode)
6536 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
6537 if (size % align != 0)
6538 size = CEIL (size, align) * align;
6539 reg = gen_rtx (REG, mode, savep ? INCOMING_REGNO (regno) : regno);
6540 mem = change_address (result, mode,
6541 plus_constant (XEXP (result, 0), size));
6542 savevec[nelts++] = (savep
6543 ? gen_rtx (SET, VOIDmode, mem, reg)
6544 : gen_rtx (SET, VOIDmode, reg, mem));
6545 size += GET_MODE_SIZE (mode);
6547 return gen_rtx (PARALLEL, VOIDmode, gen_rtvec_v (nelts, savevec));
6549 #endif /* HAVE_untyped_call or HAVE_untyped_return */
6552 /* Save the state required to perform an untyped call with the same
6553 arguments as were passed to the current function. */
6555 expand_builtin_apply_args ()
6558 int size, align, regno;
6559 enum machine_mode mode;
6561 /* Create a block where the arg-pointer, structure value address,
6562 and argument registers can be saved. */
6563 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
6565 /* Walk past the arg-pointer and structure value address. */
6566 size = GET_MODE_SIZE (Pmode);
6567 if (struct_value_rtx)
6568 size += GET_MODE_SIZE (Pmode);
6570 /* Save each register used in calling a function to the block. */
6571 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
6572 if ((mode = apply_args_mode[regno]) != VOIDmode)
6574 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
6575 if (size % align != 0)
6576 size = CEIL (size, align) * align;
6577 emit_move_insn (change_address (registers, mode,
6578 plus_constant (XEXP (registers, 0),
6580 gen_rtx (REG, mode, INCOMING_REGNO (regno)));
6581 size += GET_MODE_SIZE (mode);
6584 /* Save the arg pointer to the block. */
6585 emit_move_insn (change_address (registers, Pmode, XEXP (registers, 0)),
6586 copy_to_reg (virtual_incoming_args_rtx));
6587 size = GET_MODE_SIZE (Pmode);
6589 /* Save the structure value address unless this is passed as an
6590 "invisible" first argument. */
6591 if (struct_value_incoming_rtx)
6593 emit_move_insn (change_address (registers, Pmode,
6594 plus_constant (XEXP (registers, 0),
6596 copy_to_reg (struct_value_incoming_rtx));
6597 size += GET_MODE_SIZE (Pmode);
6600 /* Return the address of the block. */
6601 return copy_addr_to_reg (XEXP (registers, 0));
6604 /* Perform an untyped call and save the state required to perform an
6605 untyped return of whatever value was returned by the given function. */
6607 expand_builtin_apply (function, arguments, argsize)
6608 rtx function, arguments, argsize;
6610 int size, align, regno;
6611 enum machine_mode mode;
6612 rtx incoming_args, result, reg, dest, call_insn;
6613 rtx old_stack_level = 0;
6616 /* Create a block where the return registers can be saved. */
6617 result = assign_stack_local (BLKmode, apply_result_size (), -1);
6619 /* ??? The argsize value should be adjusted here. */
6621 /* Fetch the arg pointer from the ARGUMENTS block. */
6622 incoming_args = gen_reg_rtx (Pmode);
6623 emit_move_insn (incoming_args,
6624 gen_rtx (MEM, Pmode, arguments));
6625 #ifndef STACK_GROWS_DOWNWARD
6626 incoming_args = expand_binop (Pmode, add_optab, incoming_args, argsize,
6627 incoming_args, 0, OPTAB_LIB_WIDEN);
6630 /* Perform postincrements before actually calling the function. */
6633 /* Push a new argument block and copy the arguments. */
6634 do_pending_stack_adjust ();
6635 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
6637 /* Push a block of memory onto the stack to store the memory arguments.
6638 Save the address in a register, and copy the memory arguments. ??? I
6639 haven't figured out how the calling convention macros effect this,
6640 but it's likely that the source and/or destination addresses in
6641 the block copy will need updating in machine specific ways. */
6642 dest = copy_addr_to_reg (push_block (argsize, 0, 0));
6643 emit_block_move (gen_rtx (MEM, BLKmode, dest),
6644 gen_rtx (MEM, BLKmode, incoming_args),
6646 PARM_BOUNDARY / BITS_PER_UNIT);
6648 /* Refer to the argument block. */
6650 arguments = gen_rtx (MEM, BLKmode, arguments);
6652 /* Walk past the arg-pointer and structure value address. */
6653 size = GET_MODE_SIZE (Pmode);
6654 if (struct_value_rtx)
6655 size += GET_MODE_SIZE (Pmode);
6657 /* Restore each of the registers previously saved. Make USE insns
6658 for each of these registers for use in making the call. */
6659 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
6660 if ((mode = apply_args_mode[regno]) != VOIDmode)
6662 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
6663 if (size % align != 0)
6664 size = CEIL (size, align) * align;
6665 reg = gen_rtx (REG, mode, regno);
6666 emit_move_insn (reg,
6667 change_address (arguments, mode,
6668 plus_constant (XEXP (arguments, 0),
6671 push_to_sequence (use_insns);
6672 emit_insn (gen_rtx (USE, VOIDmode, reg));
6673 use_insns = get_insns ();
6675 size += GET_MODE_SIZE (mode);
6678 /* Restore the structure value address unless this is passed as an
6679 "invisible" first argument. */
6680 size = GET_MODE_SIZE (Pmode);
6681 if (struct_value_rtx)
6683 rtx value = gen_reg_rtx (Pmode);
6684 emit_move_insn (value,
6685 change_address (arguments, Pmode,
6686 plus_constant (XEXP (arguments, 0),
6688 emit_move_insn (struct_value_rtx, value);
6689 if (GET_CODE (struct_value_rtx) == REG)
6691 push_to_sequence (use_insns);
6692 emit_insn (gen_rtx (USE, VOIDmode, struct_value_rtx));
6693 use_insns = get_insns ();
6696 size += GET_MODE_SIZE (Pmode);
6699 /* All arguments and registers used for the call are set up by now! */
6700 function = prepare_call_address (function, NULL_TREE, &use_insns);
6702 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
6703 and we don't want to load it into a register as an optimization,
6704 because prepare_call_address already did it if it should be done. */
6705 if (GET_CODE (function) != SYMBOL_REF)
6706 function = memory_address (FUNCTION_MODE, function);
6708 /* Generate the actual call instruction and save the return value. */
6709 #ifdef HAVE_untyped_call
6710 if (HAVE_untyped_call)
6711 emit_call_insn (gen_untyped_call (gen_rtx (MEM, FUNCTION_MODE, function),
6712 result, result_vector (1, result)));
6715 #ifdef HAVE_call_value
6716 if (HAVE_call_value)
6720 /* Locate the unique return register. It is not possible to
6721 express a call that sets more than one return register using
6722 call_value; use untyped_call for that. In fact, untyped_call
6723 only needs to save the return registers in the given block. */
6724 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
6725 if ((mode = apply_result_mode[regno]) != VOIDmode)
6728 abort (); /* HAVE_untyped_call required. */
6729 valreg = gen_rtx (REG, mode, regno);
6732 emit_call_insn (gen_call_value (valreg,
6733 gen_rtx (MEM, FUNCTION_MODE, function),
6734 const0_rtx, NULL_RTX, const0_rtx));
6736 emit_move_insn (change_address (result, GET_MODE (valreg),
6744 /* Find the CALL insn we just emitted and write the USE insns before it. */
6745 for (call_insn = get_last_insn ();
6746 call_insn && GET_CODE (call_insn) != CALL_INSN;
6747 call_insn = PREV_INSN (call_insn))
6753 /* Put the USE insns before the CALL. */
6754 emit_insns_before (use_insns, call_insn);
6756 /* Restore the stack. */
6757 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
6759 /* Return the address of the result block. */
6760 return copy_addr_to_reg (XEXP (result, 0));
6763 /* Perform an untyped return. */
6765 expand_builtin_return (result)
6768 int size, align, regno;
6769 enum machine_mode mode;
6773 apply_result_size ();
6774 result = gen_rtx (MEM, BLKmode, result);
6776 #ifdef HAVE_untyped_return
6777 if (HAVE_untyped_return)
6779 emit_jump_insn (gen_untyped_return (result, result_vector (0, result)));
6785 /* Restore the return value and note that each value is used. */
6787 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
6788 if ((mode = apply_result_mode[regno]) != VOIDmode)
6790 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
6791 if (size % align != 0)
6792 size = CEIL (size, align) * align;
6793 reg = gen_rtx (REG, mode, INCOMING_REGNO (regno));
6794 emit_move_insn (reg,
6795 change_address (result, mode,
6796 plus_constant (XEXP (result, 0),
6799 push_to_sequence (use_insns);
6800 emit_insn (gen_rtx (USE, VOIDmode, reg));
6801 use_insns = get_insns ();
6803 size += GET_MODE_SIZE (mode);
6806 /* Put the USE insns before the return. */
6807 emit_insns (use_insns);
6809 /* Return whatever values was restored by jumping directly to the end
6811 expand_null_return ();
6814 /* Expand code for a post- or pre- increment or decrement
6815 and return the RTX for the result.
6816 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
6819 expand_increment (exp, post)
6823 register rtx op0, op1;
6824 register rtx temp, value;
6825 register tree incremented = TREE_OPERAND (exp, 0);
6826 optab this_optab = add_optab;
6828 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
6829 int op0_is_copy = 0;
6831 /* Stabilize any component ref that might need to be
6832 evaluated more than once below. */
6834 || TREE_CODE (incremented) == BIT_FIELD_REF
6835 || (TREE_CODE (incremented) == COMPONENT_REF
6836 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
6837 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
6838 incremented = stabilize_reference (incremented);
6840 /* Compute the operands as RTX.
6841 Note whether OP0 is the actual lvalue or a copy of it:
6842 I believe it is a copy iff it is a register or subreg
6843 and insns were generated in computing it. */
6845 temp = get_last_insn ();
6846 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, 0);
6848 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
6849 in place but intead must do sign- or zero-extension during assignment,
6850 so we copy it into a new register and let the code below use it as
6853 Note that we can safely modify this SUBREG since it is know not to be
6854 shared (it was made by the expand_expr call above). */
6856 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
6857 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
6859 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
6860 && temp != get_last_insn ());
6861 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
6863 /* Decide whether incrementing or decrementing. */
6864 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
6865 || TREE_CODE (exp) == PREDECREMENT_EXPR)
6866 this_optab = sub_optab;
6868 /* If OP0 is not the actual lvalue, but rather a copy in a register,
6869 then we cannot just increment OP0. We must therefore contrive to
6870 increment the original value. Then, for postincrement, we can return
6871 OP0 since it is a copy of the old value. For preincrement, we want
6872 to always expand here, since this generates better or equivalent code. */
6873 if (!post || op0_is_copy)
6875 /* This is the easiest way to increment the value wherever it is.
6876 Problems with multiple evaluation of INCREMENTED are prevented
6877 because either (1) it is a component_ref or preincrement,
6878 in which case it was stabilized above, or (2) it is an array_ref
6879 with constant index in an array in a register, which is
6880 safe to reevaluate. */
6881 tree newexp = build ((this_optab == add_optab
6882 ? PLUS_EXPR : MINUS_EXPR),
6885 TREE_OPERAND (exp, 1));
6886 temp = expand_assignment (incremented, newexp, ! post, 0);
6887 return post ? op0 : temp;
6890 /* Convert decrement by a constant into a negative increment. */
6891 if (this_optab == sub_optab
6892 && GET_CODE (op1) == CONST_INT)
6894 op1 = GEN_INT (- INTVAL (op1));
6895 this_optab = add_optab;
6900 /* We have a true reference to the value in OP0.
6901 If there is an insn to add or subtract in this mode, queue it. */
6903 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
6904 op0 = stabilize (op0);
6907 icode = (int) this_optab->handlers[(int) mode].insn_code;
6908 if (icode != (int) CODE_FOR_nothing
6909 /* Make sure that OP0 is valid for operands 0 and 1
6910 of the insn we want to queue. */
6911 && (*insn_operand_predicate[icode][0]) (op0, mode)
6912 && (*insn_operand_predicate[icode][1]) (op0, mode))
6914 if (! (*insn_operand_predicate[icode][2]) (op1, mode))
6915 op1 = force_reg (mode, op1);
6917 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
6921 /* Preincrement, or we can't increment with one simple insn. */
6923 /* Save a copy of the value before inc or dec, to return it later. */
6924 temp = value = copy_to_reg (op0);
6926 /* Arrange to return the incremented value. */
6927 /* Copy the rtx because expand_binop will protect from the queue,
6928 and the results of that would be invalid for us to return
6929 if our caller does emit_queue before using our result. */
6930 temp = copy_rtx (value = op0);
6932 /* Increment however we can. */
6933 op1 = expand_binop (mode, this_optab, value, op1, op0,
6934 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
6935 /* Make sure the value is stored into OP0. */
6937 emit_move_insn (op0, op1);
6942 /* Expand all function calls contained within EXP, innermost ones first.
6943 But don't look within expressions that have sequence points.
6944 For each CALL_EXPR, record the rtx for its value
6945 in the CALL_EXPR_RTL field. */
6948 preexpand_calls (exp)
6951 register int nops, i;
6952 int type = TREE_CODE_CLASS (TREE_CODE (exp));
6954 if (! do_preexpand_calls)
6957 /* Only expressions and references can contain calls. */
6959 if (type != 'e' && type != '<' && type != '1' && type != '2' && type != 'r')
6962 switch (TREE_CODE (exp))
6965 /* Do nothing if already expanded. */
6966 if (CALL_EXPR_RTL (exp) != 0)
6969 /* Do nothing to built-in functions. */
6970 if (TREE_CODE (TREE_OPERAND (exp, 0)) != ADDR_EXPR
6971 || TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) != FUNCTION_DECL
6972 || ! DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
6973 CALL_EXPR_RTL (exp) = expand_call (exp, NULL_RTX, 0);
6978 case TRUTH_ANDIF_EXPR:
6979 case TRUTH_ORIF_EXPR:
6980 /* If we find one of these, then we can be sure
6981 the adjust will be done for it (since it makes jumps).
6982 Do it now, so that if this is inside an argument
6983 of a function, we don't get the stack adjustment
6984 after some other args have already been pushed. */
6985 do_pending_stack_adjust ();
6990 case WITH_CLEANUP_EXPR:
6994 if (SAVE_EXPR_RTL (exp) != 0)
6998 nops = tree_code_length[(int) TREE_CODE (exp)];
6999 for (i = 0; i < nops; i++)
7000 if (TREE_OPERAND (exp, i) != 0)
7002 type = TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, i)));
7003 if (type == 'e' || type == '<' || type == '1' || type == '2'
7005 preexpand_calls (TREE_OPERAND (exp, i));
7009 /* At the start of a function, record that we have no previously-pushed
7010 arguments waiting to be popped. */
7013 init_pending_stack_adjust ()
7015 pending_stack_adjust = 0;
7018 /* When exiting from function, if safe, clear out any pending stack adjust
7019 so the adjustment won't get done. */
7022 clear_pending_stack_adjust ()
7024 #ifdef EXIT_IGNORE_STACK
7025 if (! flag_omit_frame_pointer && EXIT_IGNORE_STACK
7026 && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline)
7027 && ! flag_inline_functions)
7028 pending_stack_adjust = 0;
7032 /* Pop any previously-pushed arguments that have not been popped yet. */
7035 do_pending_stack_adjust ()
7037 if (inhibit_defer_pop == 0)
7039 if (pending_stack_adjust != 0)
7040 adjust_stack (GEN_INT (pending_stack_adjust));
7041 pending_stack_adjust = 0;
7045 /* Expand all cleanups up to OLD_CLEANUPS.
7046 Needed here, and also for language-dependent calls. */
7049 expand_cleanups_to (old_cleanups)
7052 while (cleanups_this_call != old_cleanups)
7054 expand_expr (TREE_VALUE (cleanups_this_call), NULL_RTX, VOIDmode, 0);
7055 cleanups_this_call = TREE_CHAIN (cleanups_this_call);
7059 /* Expand conditional expressions. */
7061 /* Generate code to evaluate EXP and jump to LABEL if the value is zero.
7062 LABEL is an rtx of code CODE_LABEL, in this function and all the
7066 jumpifnot (exp, label)
7070 do_jump (exp, label, NULL_RTX);
7073 /* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
7080 do_jump (exp, NULL_RTX, label);
7083 /* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
7084 the result is zero, or IF_TRUE_LABEL if the result is one.
7085 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
7086 meaning fall through in that case.
7088 do_jump always does any pending stack adjust except when it does not
7089 actually perform a jump. An example where there is no jump
7090 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
7092 This function is responsible for optimizing cases such as
7093 &&, || and comparison operators in EXP. */
7096 do_jump (exp, if_false_label, if_true_label)
7098 rtx if_false_label, if_true_label;
7100 register enum tree_code code = TREE_CODE (exp);
7101 /* Some cases need to create a label to jump to
7102 in order to properly fall through.
7103 These cases set DROP_THROUGH_LABEL nonzero. */
7104 rtx drop_through_label = 0;
7118 temp = integer_zerop (exp) ? if_false_label : if_true_label;
7124 /* This is not true with #pragma weak */
7126 /* The address of something can never be zero. */
7128 emit_jump (if_true_label);
7133 if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
7134 || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
7135 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF)
7138 /* If we are narrowing the operand, we have to do the compare in the
7140 if ((TYPE_PRECISION (TREE_TYPE (exp))
7141 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
7143 case NON_LVALUE_EXPR:
7144 case REFERENCE_EXPR:
7149 /* These cannot change zero->non-zero or vice versa. */
7150 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
7154 /* This is never less insns than evaluating the PLUS_EXPR followed by
7155 a test and can be longer if the test is eliminated. */
7157 /* Reduce to minus. */
7158 exp = build (MINUS_EXPR, TREE_TYPE (exp),
7159 TREE_OPERAND (exp, 0),
7160 fold (build1 (NEGATE_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
7161 TREE_OPERAND (exp, 1))));
7162 /* Process as MINUS. */
7166 /* Non-zero iff operands of minus differ. */
7167 comparison = compare (build (NE_EXPR, TREE_TYPE (exp),
7168 TREE_OPERAND (exp, 0),
7169 TREE_OPERAND (exp, 1)),
7174 /* If we are AND'ing with a small constant, do this comparison in the
7175 smallest type that fits. If the machine doesn't have comparisons
7176 that small, it will be converted back to the wider comparison.
7177 This helps if we are testing the sign bit of a narrower object.
7178 combine can't do this for us because it can't know whether a
7179 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
7181 if (! SLOW_BYTE_ACCESS
7182 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7183 && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
7184 && (i = floor_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))) >= 0
7185 && (type = type_for_size (i + 1, 1)) != 0
7186 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
7187 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
7188 != CODE_FOR_nothing))
7190 do_jump (convert (type, exp), if_false_label, if_true_label);
7195 case TRUTH_NOT_EXPR:
7196 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
7199 case TRUTH_ANDIF_EXPR:
7200 if (if_false_label == 0)
7201 if_false_label = drop_through_label = gen_label_rtx ();
7202 do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
7203 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
7206 case TRUTH_ORIF_EXPR:
7207 if (if_true_label == 0)
7208 if_true_label = drop_through_label = gen_label_rtx ();
7209 do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
7210 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
7214 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
7217 do_pending_stack_adjust ();
7218 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
7225 int bitsize, bitpos, unsignedp;
7226 enum machine_mode mode;
7231 /* Get description of this reference. We don't actually care
7232 about the underlying object here. */
7233 get_inner_reference (exp, &bitsize, &bitpos, &offset,
7234 &mode, &unsignedp, &volatilep);
7236 type = type_for_size (bitsize, unsignedp);
7237 if (! SLOW_BYTE_ACCESS
7238 && type != 0 && bitsize >= 0
7239 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
7240 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
7241 != CODE_FOR_nothing))
7243 do_jump (convert (type, exp), if_false_label, if_true_label);
7250 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
7251 if (integer_onep (TREE_OPERAND (exp, 1))
7252 && integer_zerop (TREE_OPERAND (exp, 2)))
7253 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
7255 else if (integer_zerop (TREE_OPERAND (exp, 1))
7256 && integer_onep (TREE_OPERAND (exp, 2)))
7257 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
7261 register rtx label1 = gen_label_rtx ();
7262 drop_through_label = gen_label_rtx ();
7263 do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX);
7264 /* Now the THEN-expression. */
7265 do_jump (TREE_OPERAND (exp, 1),
7266 if_false_label ? if_false_label : drop_through_label,
7267 if_true_label ? if_true_label : drop_through_label);
7268 /* In case the do_jump just above never jumps. */
7269 do_pending_stack_adjust ();
7270 emit_label (label1);
7271 /* Now the ELSE-expression. */
7272 do_jump (TREE_OPERAND (exp, 2),
7273 if_false_label ? if_false_label : drop_through_label,
7274 if_true_label ? if_true_label : drop_through_label);
7279 if (integer_zerop (TREE_OPERAND (exp, 1)))
7280 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
7281 else if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7284 !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
7285 do_jump_by_parts_equality (exp, if_false_label, if_true_label);
7287 comparison = compare (exp, EQ, EQ);
7291 if (integer_zerop (TREE_OPERAND (exp, 1)))
7292 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
7293 else if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7296 !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
7297 do_jump_by_parts_equality (exp, if_true_label, if_false_label);
7299 comparison = compare (exp, NE, NE);
7303 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7305 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
7306 do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
7308 comparison = compare (exp, LT, LTU);
7312 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7314 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
7315 do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
7317 comparison = compare (exp, LE, LEU);
7321 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7323 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
7324 do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
7326 comparison = compare (exp, GT, GTU);
7330 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7332 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
7333 do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
7335 comparison = compare (exp, GE, GEU);
7340 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
7342 /* This is not needed any more and causes poor code since it causes
7343 comparisons and tests from non-SI objects to have different code
7345 /* Copy to register to avoid generating bad insns by cse
7346 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
7347 if (!cse_not_expected && GET_CODE (temp) == MEM)
7348 temp = copy_to_reg (temp);
7350 do_pending_stack_adjust ();
7351 if (GET_CODE (temp) == CONST_INT)
7352 comparison = (temp == const0_rtx ? const0_rtx : const_true_rtx);
7353 else if (GET_CODE (temp) == LABEL_REF)
7354 comparison = const_true_rtx;
7355 else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
7356 && !can_compare_p (GET_MODE (temp)))
7357 /* Note swapping the labels gives us not-equal. */
7358 do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label);
7359 else if (GET_MODE (temp) != VOIDmode)
7360 comparison = compare_from_rtx (temp, CONST0_RTX (GET_MODE (temp)),
7361 NE, TREE_UNSIGNED (TREE_TYPE (exp)),
7362 GET_MODE (temp), NULL_RTX, 0);
7367 /* Do any postincrements in the expression that was tested. */
7370 /* If COMPARISON is nonzero here, it is an rtx that can be substituted
7371 straight into a conditional jump instruction as the jump condition.
7372 Otherwise, all the work has been done already. */
7374 if (comparison == const_true_rtx)
7377 emit_jump (if_true_label);
7379 else if (comparison == const0_rtx)
7382 emit_jump (if_false_label);
7384 else if (comparison)
7385 do_jump_for_compare (comparison, if_false_label, if_true_label);
7389 if (drop_through_label)
7391 /* If do_jump produces code that might be jumped around,
7392 do any stack adjusts from that code, before the place
7393 where control merges in. */
7394 do_pending_stack_adjust ();
7395 emit_label (drop_through_label);
7399 /* Given a comparison expression EXP for values too wide to be compared
7400 with one insn, test the comparison and jump to the appropriate label.
7401 The code of EXP is ignored; we always test GT if SWAP is 0,
7402 and LT if SWAP is 1. */
7405 do_jump_by_parts_greater (exp, swap, if_false_label, if_true_label)
7408 rtx if_false_label, if_true_label;
7410 rtx op0 = expand_expr (TREE_OPERAND (exp, swap), NULL_RTX, VOIDmode, 0);
7411 rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), NULL_RTX, VOIDmode, 0);
7412 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
7413 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
7414 rtx drop_through_label = 0;
7415 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
7418 if (! if_true_label || ! if_false_label)
7419 drop_through_label = gen_label_rtx ();
7420 if (! if_true_label)
7421 if_true_label = drop_through_label;
7422 if (! if_false_label)
7423 if_false_label = drop_through_label;
7425 /* Compare a word at a time, high order first. */
7426 for (i = 0; i < nwords; i++)
7429 rtx op0_word, op1_word;
7431 if (WORDS_BIG_ENDIAN)
7433 op0_word = operand_subword_force (op0, i, mode);
7434 op1_word = operand_subword_force (op1, i, mode);
7438 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
7439 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
7442 /* All but high-order word must be compared as unsigned. */
7443 comp = compare_from_rtx (op0_word, op1_word,
7444 (unsignedp || i > 0) ? GTU : GT,
7445 unsignedp, word_mode, NULL_RTX, 0);
7446 if (comp == const_true_rtx)
7447 emit_jump (if_true_label);
7448 else if (comp != const0_rtx)
7449 do_jump_for_compare (comp, NULL_RTX, if_true_label);
7451 /* Consider lower words only if these are equal. */
7452 comp = compare_from_rtx (op0_word, op1_word, NE, unsignedp, word_mode,
7454 if (comp == const_true_rtx)
7455 emit_jump (if_false_label);
7456 else if (comp != const0_rtx)
7457 do_jump_for_compare (comp, NULL_RTX, if_false_label);
7461 emit_jump (if_false_label);
7462 if (drop_through_label)
7463 emit_label (drop_through_label);
7466 /* Compare OP0 with OP1, word at a time, in mode MODE.
7467 UNSIGNEDP says to do unsigned comparison.
7468 Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
7471 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label)
7472 enum machine_mode mode;
7475 rtx if_false_label, if_true_label;
7477 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
7478 rtx drop_through_label = 0;
7481 if (! if_true_label || ! if_false_label)
7482 drop_through_label = gen_label_rtx ();
7483 if (! if_true_label)
7484 if_true_label = drop_through_label;
7485 if (! if_false_label)
7486 if_false_label = drop_through_label;
7488 /* Compare a word at a time, high order first. */
7489 for (i = 0; i < nwords; i++)
7492 rtx op0_word, op1_word;
7494 if (WORDS_BIG_ENDIAN)
7496 op0_word = operand_subword_force (op0, i, mode);
7497 op1_word = operand_subword_force (op1, i, mode);
7501 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
7502 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
7505 /* All but high-order word must be compared as unsigned. */
7506 comp = compare_from_rtx (op0_word, op1_word,
7507 (unsignedp || i > 0) ? GTU : GT,
7508 unsignedp, word_mode, NULL_RTX, 0);
7509 if (comp == const_true_rtx)
7510 emit_jump (if_true_label);
7511 else if (comp != const0_rtx)
7512 do_jump_for_compare (comp, NULL_RTX, if_true_label);
7514 /* Consider lower words only if these are equal. */
7515 comp = compare_from_rtx (op0_word, op1_word, NE, unsignedp, word_mode,
7517 if (comp == const_true_rtx)
7518 emit_jump (if_false_label);
7519 else if (comp != const0_rtx)
7520 do_jump_for_compare (comp, NULL_RTX, if_false_label);
7524 emit_jump (if_false_label);
7525 if (drop_through_label)
7526 emit_label (drop_through_label);
7529 /* Given an EQ_EXPR expression EXP for values too wide to be compared
7530 with one insn, test the comparison and jump to the appropriate label. */
7533 do_jump_by_parts_equality (exp, if_false_label, if_true_label)
7535 rtx if_false_label, if_true_label;
7537 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7538 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7539 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
7540 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
7542 rtx drop_through_label = 0;
7544 if (! if_false_label)
7545 drop_through_label = if_false_label = gen_label_rtx ();
7547 for (i = 0; i < nwords; i++)
7549 rtx comp = compare_from_rtx (operand_subword_force (op0, i, mode),
7550 operand_subword_force (op1, i, mode),
7551 EQ, TREE_UNSIGNED (TREE_TYPE (exp)),
7552 word_mode, NULL_RTX, 0);
7553 if (comp == const_true_rtx)
7554 emit_jump (if_false_label);
7555 else if (comp != const0_rtx)
7556 do_jump_for_compare (comp, if_false_label, NULL_RTX);
7560 emit_jump (if_true_label);
7561 if (drop_through_label)
7562 emit_label (drop_through_label);
7565 /* Jump according to whether OP0 is 0.
7566 We assume that OP0 has an integer mode that is too wide
7567 for the available compare insns. */
7570 do_jump_by_parts_equality_rtx (op0, if_false_label, if_true_label)
7572 rtx if_false_label, if_true_label;
7574 int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD;
7576 rtx drop_through_label = 0;
7578 if (! if_false_label)
7579 drop_through_label = if_false_label = gen_label_rtx ();
7581 for (i = 0; i < nwords; i++)
7583 rtx comp = compare_from_rtx (operand_subword_force (op0, i,
7585 const0_rtx, EQ, 1, word_mode, NULL_RTX, 0);
7586 if (comp == const_true_rtx)
7587 emit_jump (if_false_label);
7588 else if (comp != const0_rtx)
7589 do_jump_for_compare (comp, if_false_label, NULL_RTX);
7593 emit_jump (if_true_label);
7594 if (drop_through_label)
7595 emit_label (drop_through_label);
7598 /* Given a comparison expression in rtl form, output conditional branches to
7599 IF_TRUE_LABEL, IF_FALSE_LABEL, or both. */
7602 do_jump_for_compare (comparison, if_false_label, if_true_label)
7603 rtx comparison, if_false_label, if_true_label;
7607 if (bcc_gen_fctn[(int) GET_CODE (comparison)] != 0)
7608 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (comparison)]) (if_true_label));
7613 emit_jump (if_false_label);
7615 else if (if_false_label)
7618 rtx prev = PREV_INSN (get_last_insn ());
7621 /* Output the branch with the opposite condition. Then try to invert
7622 what is generated. If more than one insn is a branch, or if the
7623 branch is not the last insn written, abort. If we can't invert
7624 the branch, emit make a true label, redirect this jump to that,
7625 emit a jump to the false label and define the true label. */
7627 if (bcc_gen_fctn[(int) GET_CODE (comparison)] != 0)
7628 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (comparison)]) (if_false_label));
7632 /* Here we get the insn before what was just emitted.
7633 On some machines, emitting the branch can discard
7634 the previous compare insn and emit a replacement. */
7636 /* If there's only one preceding insn... */
7637 insn = get_insns ();
7639 insn = NEXT_INSN (prev);
7641 for (insn = NEXT_INSN (insn); insn; insn = NEXT_INSN (insn))
7642 if (GET_CODE (insn) == JUMP_INSN)
7649 if (branch != get_last_insn ())
7652 if (! invert_jump (branch, if_false_label))
7654 if_true_label = gen_label_rtx ();
7655 redirect_jump (branch, if_true_label);
7656 emit_jump (if_false_label);
7657 emit_label (if_true_label);
7662 /* Generate code for a comparison expression EXP
7663 (including code to compute the values to be compared)
7664 and set (CC0) according to the result.
7665 SIGNED_CODE should be the rtx operation for this comparison for
7666 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
7668 We force a stack adjustment unless there are currently
7669 things pushed on the stack that aren't yet used. */
7672 compare (exp, signed_code, unsigned_code)
7674 enum rtx_code signed_code, unsigned_code;
7677 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7679 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7680 register tree type = TREE_TYPE (TREE_OPERAND (exp, 0));
7681 register enum machine_mode mode = TYPE_MODE (type);
7682 int unsignedp = TREE_UNSIGNED (type);
7683 enum rtx_code code = unsignedp ? unsigned_code : signed_code;
7685 return compare_from_rtx (op0, op1, code, unsignedp, mode,
7687 ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
7688 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
7691 /* Like compare but expects the values to compare as two rtx's.
7692 The decision as to signed or unsigned comparison must be made by the caller.
7694 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
7697 If ALIGN is non-zero, it is the alignment of this type; if zero, the
7698 size of MODE should be used. */
7701 compare_from_rtx (op0, op1, code, unsignedp, mode, size, align)
7702 register rtx op0, op1;
7705 enum machine_mode mode;
7711 /* If one operand is constant, make it the second one. Only do this
7712 if the other operand is not constant as well. */
7714 if ((CONSTANT_P (op0) && ! CONSTANT_P (op1))
7715 || (GET_CODE (op0) == CONST_INT && GET_CODE (op1) != CONST_INT))
7720 code = swap_condition (code);
7725 op0 = force_not_mem (op0);
7726 op1 = force_not_mem (op1);
7729 do_pending_stack_adjust ();
7731 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
7732 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
7736 /* There's no need to do this now that combine.c can eliminate lots of
7737 sign extensions. This can be less efficient in certain cases on other
7740 /* If this is a signed equality comparison, we can do it as an
7741 unsigned comparison since zero-extension is cheaper than sign
7742 extension and comparisons with zero are done as unsigned. This is
7743 the case even on machines that can do fast sign extension, since
7744 zero-extension is easier to combine with other operations than
7745 sign-extension is. If we are comparing against a constant, we must
7746 convert it to what it would look like unsigned. */
7747 if ((code == EQ || code == NE) && ! unsignedp
7748 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
7750 if (GET_CODE (op1) == CONST_INT
7751 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
7752 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
7757 emit_cmp_insn (op0, op1, code, size, mode, unsignedp, align);
7759 return gen_rtx (code, VOIDmode, cc0_rtx, const0_rtx);
7762 /* Generate code to calculate EXP using a store-flag instruction
7763 and return an rtx for the result. EXP is either a comparison
7764 or a TRUTH_NOT_EXPR whose operand is a comparison.
7766 If TARGET is nonzero, store the result there if convenient.
7768 If ONLY_CHEAP is non-zero, only do this if it is likely to be very
7771 Return zero if there is no suitable set-flag instruction
7772 available on this machine.
7774 Once expand_expr has been called on the arguments of the comparison,
7775 we are committed to doing the store flag, since it is not safe to
7776 re-evaluate the expression. We emit the store-flag insn by calling
7777 emit_store_flag, but only expand the arguments if we have a reason
7778 to believe that emit_store_flag will be successful. If we think that
7779 it will, but it isn't, we have to simulate the store-flag with a
7780 set/jump/set sequence. */
7783 do_store_flag (exp, target, mode, only_cheap)
7786 enum machine_mode mode;
7790 tree arg0, arg1, type;
7792 enum machine_mode operand_mode;
7796 enum insn_code icode;
7797 rtx subtarget = target;
7798 rtx result, label, pattern, jump_pat;
7800 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
7801 result at the end. We can't simply invert the test since it would
7802 have already been inverted if it were valid. This case occurs for
7803 some floating-point comparisons. */
7805 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
7806 invert = 1, exp = TREE_OPERAND (exp, 0);
7808 arg0 = TREE_OPERAND (exp, 0);
7809 arg1 = TREE_OPERAND (exp, 1);
7810 type = TREE_TYPE (arg0);
7811 operand_mode = TYPE_MODE (type);
7812 unsignedp = TREE_UNSIGNED (type);
7814 /* We won't bother with BLKmode store-flag operations because it would mean
7815 passing a lot of information to emit_store_flag. */
7816 if (operand_mode == BLKmode)
7822 /* Get the rtx comparison code to use. We know that EXP is a comparison
7823 operation of some type. Some comparisons against 1 and -1 can be
7824 converted to comparisons with zero. Do so here so that the tests
7825 below will be aware that we have a comparison with zero. These
7826 tests will not catch constants in the first operand, but constants
7827 are rarely passed as the first operand. */
7829 switch (TREE_CODE (exp))
7838 if (integer_onep (arg1))
7839 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
7841 code = unsignedp ? LTU : LT;
7844 if (! unsignedp && integer_all_onesp (arg1))
7845 arg1 = integer_zero_node, code = LT;
7847 code = unsignedp ? LEU : LE;
7850 if (! unsignedp && integer_all_onesp (arg1))
7851 arg1 = integer_zero_node, code = GE;
7853 code = unsignedp ? GTU : GT;
7856 if (integer_onep (arg1))
7857 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
7859 code = unsignedp ? GEU : GE;
7865 /* Put a constant second. */
7866 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
7868 tem = arg0; arg0 = arg1; arg1 = tem;
7869 code = swap_condition (code);
7872 /* If this is an equality or inequality test of a single bit, we can
7873 do this by shifting the bit being tested to the low-order bit and
7874 masking the result with the constant 1. If the condition was EQ,
7875 we xor it with 1. This does not require an scc insn and is faster
7876 than an scc insn even if we have it. */
7878 if ((code == NE || code == EQ)
7879 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
7880 && integer_pow2p (TREE_OPERAND (arg0, 1))
7881 && TYPE_PRECISION (type) <= HOST_BITS_PER_WIDE_INT)
7883 tree inner = TREE_OPERAND (arg0, 0);
7884 int bitnum = exact_log2 (INTVAL (expand_expr (TREE_OPERAND (arg0, 1),
7885 NULL_RTX, VOIDmode, 0)));
7888 /* If INNER is a right shift of a constant and it plus BITNUM does
7889 not overflow, adjust BITNUM and INNER. */
7891 if (TREE_CODE (inner) == RSHIFT_EXPR
7892 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
7893 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
7894 && (bitnum + TREE_INT_CST_LOW (TREE_OPERAND (inner, 1))
7895 < TYPE_PRECISION (type)))
7897 bitnum +=TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
7898 inner = TREE_OPERAND (inner, 0);
7901 /* If we are going to be able to omit the AND below, we must do our
7902 operations as unsigned. If we must use the AND, we have a choice.
7903 Normally unsigned is faster, but for some machines signed is. */
7904 ops_unsignedp = (bitnum == TYPE_PRECISION (type) - 1 ? 1
7905 #ifdef BYTE_LOADS_SIGN_EXTEND
7912 if (subtarget == 0 || GET_CODE (subtarget) != REG
7913 || GET_MODE (subtarget) != operand_mode
7914 || ! safe_from_p (subtarget, inner))
7917 op0 = expand_expr (inner, subtarget, VOIDmode, 0);
7920 op0 = expand_shift (RSHIFT_EXPR, GET_MODE (op0), op0,
7921 size_int (bitnum), target, ops_unsignedp);
7923 if (GET_MODE (op0) != mode)
7924 op0 = convert_to_mode (mode, op0, ops_unsignedp);
7926 if ((code == EQ && ! invert) || (code == NE && invert))
7927 op0 = expand_binop (mode, xor_optab, op0, const1_rtx, target,
7928 ops_unsignedp, OPTAB_LIB_WIDEN);
7930 /* Put the AND last so it can combine with more things. */
7931 if (bitnum != TYPE_PRECISION (type) - 1)
7932 op0 = expand_and (op0, const1_rtx, target);
7937 /* Now see if we are likely to be able to do this. Return if not. */
7938 if (! can_compare_p (operand_mode))
7940 icode = setcc_gen_code[(int) code];
7941 if (icode == CODE_FOR_nothing
7942 || (only_cheap && insn_operand_mode[(int) icode][0] != mode))
7944 /* We can only do this if it is one of the special cases that
7945 can be handled without an scc insn. */
7946 if ((code == LT && integer_zerop (arg1))
7947 || (! only_cheap && code == GE && integer_zerop (arg1)))
7949 else if (BRANCH_COST >= 0
7950 && ! only_cheap && (code == NE || code == EQ)
7951 && TREE_CODE (type) != REAL_TYPE
7952 && ((abs_optab->handlers[(int) operand_mode].insn_code
7953 != CODE_FOR_nothing)
7954 || (ffs_optab->handlers[(int) operand_mode].insn_code
7955 != CODE_FOR_nothing)))
7961 preexpand_calls (exp);
7962 if (subtarget == 0 || GET_CODE (subtarget) != REG
7963 || GET_MODE (subtarget) != operand_mode
7964 || ! safe_from_p (subtarget, arg1))
7967 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
7968 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
7971 target = gen_reg_rtx (mode);
7973 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
7974 because, if the emit_store_flag does anything it will succeed and
7975 OP0 and OP1 will not be used subsequently. */
7977 result = emit_store_flag (target, code,
7978 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
7979 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
7980 operand_mode, unsignedp, 1);
7985 result = expand_binop (mode, xor_optab, result, const1_rtx,
7986 result, 0, OPTAB_LIB_WIDEN);
7990 /* If this failed, we have to do this with set/compare/jump/set code. */
7991 if (target == 0 || GET_CODE (target) != REG
7992 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
7993 target = gen_reg_rtx (GET_MODE (target));
7995 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
7996 result = compare_from_rtx (op0, op1, code, unsignedp,
7997 operand_mode, NULL_RTX, 0);
7998 if (GET_CODE (result) == CONST_INT)
7999 return (((result == const0_rtx && ! invert)
8000 || (result != const0_rtx && invert))
8001 ? const0_rtx : const1_rtx);
8003 label = gen_label_rtx ();
8004 if (bcc_gen_fctn[(int) code] == 0)
8007 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
8008 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
8014 /* Generate a tablejump instruction (used for switch statements). */
8016 #ifdef HAVE_tablejump
8018 /* INDEX is the value being switched on, with the lowest value
8019 in the table already subtracted.
8020 MODE is its expected mode (needed if INDEX is constant).
8021 RANGE is the length of the jump table.
8022 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
8024 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
8025 index value is out of range. */
8028 do_tablejump (index, mode, range, table_label, default_label)
8029 rtx index, range, table_label, default_label;
8030 enum machine_mode mode;
8032 register rtx temp, vector;
8034 /* Do an unsigned comparison (in the proper mode) between the index
8035 expression and the value which represents the length of the range.
8036 Since we just finished subtracting the lower bound of the range
8037 from the index expression, this comparison allows us to simultaneously
8038 check that the original index expression value is both greater than
8039 or equal to the minimum value of the range and less than or equal to
8040 the maximum value of the range. */
8042 emit_cmp_insn (range, index, LTU, NULL_RTX, mode, 1, 0);
8043 emit_jump_insn (gen_bltu (default_label));
8045 /* If index is in range, it must fit in Pmode.
8046 Convert to Pmode so we can index with it. */
8048 index = convert_to_mode (Pmode, index, 1);
8050 /* If flag_force_addr were to affect this address
8051 it could interfere with the tricky assumptions made
8052 about addresses that contain label-refs,
8053 which may be valid only very near the tablejump itself. */
8054 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
8055 GET_MODE_SIZE, because this indicates how large insns are. The other
8056 uses should all be Pmode, because they are addresses. This code
8057 could fail if addresses and insns are not the same size. */
8058 index = memory_address_noforce
8060 gen_rtx (PLUS, Pmode,
8061 gen_rtx (MULT, Pmode, index,
8062 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
8063 gen_rtx (LABEL_REF, Pmode, table_label)));
8064 temp = gen_reg_rtx (CASE_VECTOR_MODE);
8065 vector = gen_rtx (MEM, CASE_VECTOR_MODE, index);
8066 RTX_UNCHANGING_P (vector) = 1;
8067 convert_move (temp, vector, 0);
8069 emit_jump_insn (gen_tablejump (temp, table_label));
8071 #ifndef CASE_VECTOR_PC_RELATIVE
8072 /* If we are generating PIC code or if the table is PC-relative, the
8073 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
8079 #endif /* HAVE_tablejump */