1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA. */
26 #include "insn-flags.h"
27 #include "insn-codes.h"
29 #include "insn-config.h"
32 #include "typeclass.h"
34 #define CEIL(x,y) (((x) + (y) - 1) / (y))
36 /* Decide whether a function's arguments should be processed
37 from first to last or from last to first.
39 They should if the stack and args grow in opposite directions, but
40 only if we have push insns. */
44 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNARD)
45 #define PUSH_ARGS_REVERSED /* If it's last to first */
50 #ifndef STACK_PUSH_CODE
51 #ifdef STACK_GROWS_DOWNWARD
52 #define STACK_PUSH_CODE PRE_DEC
54 #define STACK_PUSH_CODE PRE_INC
58 /* Like STACK_BOUNDARY but in units of bytes, not bits. */
59 #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
61 /* If this is nonzero, we do not bother generating VOLATILE
62 around volatile memory references, and we are willing to
63 output indirect addresses. If cse is to follow, we reject
64 indirect addresses so a useful potential cse is generated;
65 if it is used only once, instruction combination will produce
66 the same indirect address eventually. */
69 /* Nonzero to generate code for all the subroutines within an
70 expression before generating the upper levels of the expression.
71 Nowadays this is never zero. */
72 int do_preexpand_calls = 1;
74 /* Number of units that we should eventually pop off the stack.
75 These are the arguments to function calls that have already returned. */
76 int pending_stack_adjust;
78 /* Nonzero means stack pops must not be deferred, and deferred stack
79 pops must not be output. It is nonzero inside a function call,
80 inside a conditional expression, inside a statement expression,
81 and in other cases as well. */
82 int inhibit_defer_pop;
84 /* A list of all cleanups which belong to the arguments of
85 function calls being expanded by expand_call. */
86 tree cleanups_this_call;
88 /* Nonzero means __builtin_saveregs has already been done in this function.
89 The value is the pseudoreg containing the value __builtin_saveregs
91 static rtx saveregs_value;
93 /* Similarly for __builtin_apply_args. */
94 static rtx apply_args_value;
96 /* This structure is used by move_by_pieces to describe the move to
108 int explicit_inc_from;
114 static rtx enqueue_insn PROTO((rtx, rtx));
115 static int queued_subexp_p PROTO((rtx));
116 static void init_queue PROTO((void));
117 static void move_by_pieces PROTO((rtx, rtx, int, int));
118 static int move_by_pieces_ninsns PROTO((unsigned int, int));
119 static void move_by_pieces_1 PROTO((rtx (*) (), enum machine_mode,
120 struct move_by_pieces *));
121 static void group_insns PROTO((rtx));
122 static void store_constructor PROTO((tree, rtx));
123 static rtx store_field PROTO((rtx, int, int, enum machine_mode, tree,
124 enum machine_mode, int, int, int));
125 static tree save_noncopied_parts PROTO((tree, tree));
126 static tree init_noncopied_parts PROTO((tree, tree));
127 static int safe_from_p PROTO((rtx, tree));
128 static int fixed_type_p PROTO((tree));
129 static int get_pointer_alignment PROTO((tree, unsigned));
130 static tree string_constant PROTO((tree, tree *));
131 static tree c_strlen PROTO((tree));
132 static rtx expand_builtin PROTO((tree, rtx, rtx, enum machine_mode, int));
133 static int apply_args_size PROTO((void));
134 static int apply_result_size PROTO((void));
135 static rtx result_vector PROTO((int, rtx));
136 static rtx expand_builtin_apply_args PROTO((void));
137 static rtx expand_builtin_apply PROTO((rtx, rtx, rtx));
138 static void expand_builtin_return PROTO((rtx));
139 static rtx expand_increment PROTO((tree, int));
140 static void preexpand_calls PROTO((tree));
141 static void do_jump_by_parts_greater PROTO((tree, int, rtx, rtx));
142 static void do_jump_by_parts_greater_rtx PROTO((enum machine_mode, int, rtx, rtx, rtx, rtx));
143 static void do_jump_by_parts_equality PROTO((tree, rtx, rtx));
144 static void do_jump_by_parts_equality_rtx PROTO((rtx, rtx, rtx));
145 static void do_jump_for_compare PROTO((rtx, rtx, rtx));
146 static rtx compare PROTO((tree, enum rtx_code, enum rtx_code));
147 static rtx do_store_flag PROTO((tree, rtx, enum machine_mode, int));
149 /* Record for each mode whether we can move a register directly to or
150 from an object of that mode in memory. If we can't, we won't try
151 to use that mode directly when accessing a field of that mode. */
153 static char direct_load[NUM_MACHINE_MODES];
154 static char direct_store[NUM_MACHINE_MODES];
156 /* MOVE_RATIO is the number of move instructions that is better than
160 #if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
163 /* A value of around 6 would minimize code size; infinity would minimize
165 #define MOVE_RATIO 15
169 /* This array records the insn_code of insns to perform block moves. */
170 enum insn_code movstr_optab[NUM_MACHINE_MODES];
172 /* SLOW_UNALIGNED_ACCESS is non-zero if unaligned accesses are very slow. */
174 #ifndef SLOW_UNALIGNED_ACCESS
175 #define SLOW_UNALIGNED_ACCESS 0
178 /* Register mappings for target machines without register windows. */
179 #ifndef INCOMING_REGNO
180 #define INCOMING_REGNO(OUT) (OUT)
182 #ifndef OUTGOING_REGNO
183 #define OUTGOING_REGNO(IN) (IN)
186 /* This is run once per compilation to set up which modes can be used
187 directly in memory and to initialize the block move optab. */
193 enum machine_mode mode;
194 /* Try indexing by frame ptr and try by stack ptr.
195 It is known that on the Convex the stack ptr isn't a valid index.
196 With luck, one or the other is valid on any machine. */
197 rtx mem = gen_rtx (MEM, VOIDmode, stack_pointer_rtx);
198 rtx mem1 = gen_rtx (MEM, VOIDmode, frame_pointer_rtx);
201 insn = emit_insn (gen_rtx (SET, 0, 0));
202 pat = PATTERN (insn);
204 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
205 mode = (enum machine_mode) ((int) mode + 1))
211 direct_load[(int) mode] = direct_store[(int) mode] = 0;
212 PUT_MODE (mem, mode);
213 PUT_MODE (mem1, mode);
215 /* See if there is some register that can be used in this mode and
216 directly loaded or stored from memory. */
218 if (mode != VOIDmode && mode != BLKmode)
219 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
220 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
223 if (! HARD_REGNO_MODE_OK (regno, mode))
226 reg = gen_rtx (REG, mode, regno);
229 SET_DEST (pat) = reg;
230 if (recog (pat, insn, &num_clobbers) >= 0)
231 direct_load[(int) mode] = 1;
233 SET_SRC (pat) = mem1;
234 SET_DEST (pat) = reg;
235 if (recog (pat, insn, &num_clobbers) >= 0)
236 direct_load[(int) mode] = 1;
239 SET_DEST (pat) = mem;
240 if (recog (pat, insn, &num_clobbers) >= 0)
241 direct_store[(int) mode] = 1;
244 SET_DEST (pat) = mem1;
245 if (recog (pat, insn, &num_clobbers) >= 0)
246 direct_store[(int) mode] = 1;
253 /* This is run at the start of compiling a function. */
260 pending_stack_adjust = 0;
261 inhibit_defer_pop = 0;
262 cleanups_this_call = 0;
264 apply_args_value = 0;
268 /* Save all variables describing the current status into the structure *P.
269 This is used before starting a nested function. */
275 /* Instead of saving the postincrement queue, empty it. */
278 p->pending_stack_adjust = pending_stack_adjust;
279 p->inhibit_defer_pop = inhibit_defer_pop;
280 p->cleanups_this_call = cleanups_this_call;
281 p->saveregs_value = saveregs_value;
282 p->apply_args_value = apply_args_value;
283 p->forced_labels = forced_labels;
285 pending_stack_adjust = 0;
286 inhibit_defer_pop = 0;
287 cleanups_this_call = 0;
289 apply_args_value = 0;
293 /* Restore all variables describing the current status from the structure *P.
294 This is used after a nested function. */
297 restore_expr_status (p)
300 pending_stack_adjust = p->pending_stack_adjust;
301 inhibit_defer_pop = p->inhibit_defer_pop;
302 cleanups_this_call = p->cleanups_this_call;
303 saveregs_value = p->saveregs_value;
304 apply_args_value = p->apply_args_value;
305 forced_labels = p->forced_labels;
308 /* Manage the queue of increment instructions to be output
309 for POSTINCREMENT_EXPR expressions, etc. */
311 static rtx pending_chain;
313 /* Queue up to increment (or change) VAR later. BODY says how:
314 BODY should be the same thing you would pass to emit_insn
315 to increment right away. It will go to emit_insn later on.
317 The value is a QUEUED expression to be used in place of VAR
318 where you want to guarantee the pre-incrementation value of VAR. */
321 enqueue_insn (var, body)
324 pending_chain = gen_rtx (QUEUED, GET_MODE (var),
325 var, NULL_RTX, NULL_RTX, body, pending_chain);
326 return pending_chain;
329 /* Use protect_from_queue to convert a QUEUED expression
330 into something that you can put immediately into an instruction.
331 If the queued incrementation has not happened yet,
332 protect_from_queue returns the variable itself.
333 If the incrementation has happened, protect_from_queue returns a temp
334 that contains a copy of the old value of the variable.
336 Any time an rtx which might possibly be a QUEUED is to be put
337 into an instruction, it must be passed through protect_from_queue first.
338 QUEUED expressions are not meaningful in instructions.
340 Do not pass a value through protect_from_queue and then hold
341 on to it for a while before putting it in an instruction!
342 If the queue is flushed in between, incorrect code will result. */
345 protect_from_queue (x, modify)
349 register RTX_CODE code = GET_CODE (x);
351 #if 0 /* A QUEUED can hang around after the queue is forced out. */
352 /* Shortcut for most common case. */
353 if (pending_chain == 0)
359 /* A special hack for read access to (MEM (QUEUED ...))
360 to facilitate use of autoincrement.
361 Make a copy of the contents of the memory location
362 rather than a copy of the address, but not
363 if the value is of mode BLKmode. */
364 if (code == MEM && GET_MODE (x) != BLKmode
365 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
367 register rtx y = XEXP (x, 0);
368 XEXP (x, 0) = QUEUED_VAR (y);
371 register rtx temp = gen_reg_rtx (GET_MODE (x));
372 emit_insn_before (gen_move_insn (temp, x),
378 /* Otherwise, recursively protect the subexpressions of all
379 the kinds of rtx's that can contain a QUEUED. */
381 XEXP (x, 0) = protect_from_queue (XEXP (x, 0), 0);
382 else if (code == PLUS || code == MULT)
384 XEXP (x, 0) = protect_from_queue (XEXP (x, 0), 0);
385 XEXP (x, 1) = protect_from_queue (XEXP (x, 1), 0);
389 /* If the increment has not happened, use the variable itself. */
390 if (QUEUED_INSN (x) == 0)
391 return QUEUED_VAR (x);
392 /* If the increment has happened and a pre-increment copy exists,
394 if (QUEUED_COPY (x) != 0)
395 return QUEUED_COPY (x);
396 /* The increment has happened but we haven't set up a pre-increment copy.
397 Set one up now, and use it. */
398 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
399 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
401 return QUEUED_COPY (x);
404 /* Return nonzero if X contains a QUEUED expression:
405 if it contains anything that will be altered by a queued increment.
406 We handle only combinations of MEM, PLUS, MINUS and MULT operators
407 since memory addresses generally contain only those. */
413 register enum rtx_code code = GET_CODE (x);
419 return queued_subexp_p (XEXP (x, 0));
423 return queued_subexp_p (XEXP (x, 0))
424 || queued_subexp_p (XEXP (x, 1));
429 /* Perform all the pending incrementations. */
435 while (p = pending_chain)
437 QUEUED_INSN (p) = emit_insn (QUEUED_BODY (p));
438 pending_chain = QUEUED_NEXT (p);
449 /* Copy data from FROM to TO, where the machine modes are not the same.
450 Both modes may be integer, or both may be floating.
451 UNSIGNEDP should be nonzero if FROM is an unsigned type.
452 This causes zero-extension instead of sign-extension. */
455 convert_move (to, from, unsignedp)
456 register rtx to, from;
459 enum machine_mode to_mode = GET_MODE (to);
460 enum machine_mode from_mode = GET_MODE (from);
461 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
462 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
466 /* rtx code for making an equivalent value. */
467 enum rtx_code equiv_code = (unsignedp ? ZERO_EXTEND : SIGN_EXTEND);
469 to = protect_from_queue (to, 1);
470 from = protect_from_queue (from, 0);
472 if (to_real != from_real)
475 /* If FROM is a SUBREG that indicates that we have already done at least
476 the required extension, strip it. We don't handle such SUBREGs as
479 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
480 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
481 >= GET_MODE_SIZE (to_mode))
482 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
483 from = gen_lowpart (to_mode, from), from_mode = to_mode;
485 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
488 if (to_mode == from_mode
489 || (from_mode == VOIDmode && CONSTANT_P (from)))
491 emit_move_insn (to, from);
497 #ifdef HAVE_extendqfhf2
498 if (HAVE_extendqfsf2 && from_mode == QFmode && to_mode == HFmode)
500 emit_unop_insn (CODE_FOR_extendqfsf2, to, from, UNKNOWN);
504 #ifdef HAVE_extendqfsf2
505 if (HAVE_extendqfsf2 && from_mode == QFmode && to_mode == SFmode)
507 emit_unop_insn (CODE_FOR_extendqfsf2, to, from, UNKNOWN);
511 #ifdef HAVE_extendqfdf2
512 if (HAVE_extendqfdf2 && from_mode == QFmode && to_mode == DFmode)
514 emit_unop_insn (CODE_FOR_extendqfdf2, to, from, UNKNOWN);
518 #ifdef HAVE_extendqfxf2
519 if (HAVE_extendqfxf2 && from_mode == QFmode && to_mode == XFmode)
521 emit_unop_insn (CODE_FOR_extendqfxf2, to, from, UNKNOWN);
525 #ifdef HAVE_extendqftf2
526 if (HAVE_extendqftf2 && from_mode == QFmode && to_mode == TFmode)
528 emit_unop_insn (CODE_FOR_extendqftf2, to, from, UNKNOWN);
533 #ifdef HAVE_extendhfsf2
534 if (HAVE_extendhfsf2 && from_mode == HFmode && to_mode == SFmode)
536 emit_unop_insn (CODE_FOR_extendhfsf2, to, from, UNKNOWN);
540 #ifdef HAVE_extendhfdf2
541 if (HAVE_extendhfdf2 && from_mode == HFmode && to_mode == DFmode)
543 emit_unop_insn (CODE_FOR_extendhfdf2, to, from, UNKNOWN);
547 #ifdef HAVE_extendhfxf2
548 if (HAVE_extendhfxf2 && from_mode == HFmode && to_mode == XFmode)
550 emit_unop_insn (CODE_FOR_extendhfxf2, to, from, UNKNOWN);
554 #ifdef HAVE_extendhftf2
555 if (HAVE_extendhftf2 && from_mode == HFmode && to_mode == TFmode)
557 emit_unop_insn (CODE_FOR_extendhftf2, to, from, UNKNOWN);
562 #ifdef HAVE_extendsfdf2
563 if (HAVE_extendsfdf2 && from_mode == SFmode && to_mode == DFmode)
565 emit_unop_insn (CODE_FOR_extendsfdf2, to, from, UNKNOWN);
569 #ifdef HAVE_extendsfxf2
570 if (HAVE_extendsfxf2 && from_mode == SFmode && to_mode == XFmode)
572 emit_unop_insn (CODE_FOR_extendsfxf2, to, from, UNKNOWN);
576 #ifdef HAVE_extendsftf2
577 if (HAVE_extendsftf2 && from_mode == SFmode && to_mode == TFmode)
579 emit_unop_insn (CODE_FOR_extendsftf2, to, from, UNKNOWN);
583 #ifdef HAVE_extenddfxf2
584 if (HAVE_extenddfxf2 && from_mode == DFmode && to_mode == XFmode)
586 emit_unop_insn (CODE_FOR_extenddfxf2, to, from, UNKNOWN);
590 #ifdef HAVE_extenddftf2
591 if (HAVE_extenddftf2 && from_mode == DFmode && to_mode == TFmode)
593 emit_unop_insn (CODE_FOR_extenddftf2, to, from, UNKNOWN);
598 #ifdef HAVE_trunchfqf2
599 if (HAVE_trunchfqf2 && from_mode == HFmode && to_mode == QFmode)
601 emit_unop_insn (CODE_FOR_trunchfqf2, to, from, UNKNOWN);
605 #ifdef HAVE_truncsfqf2
606 if (HAVE_truncsfqf2 && from_mode == SFmode && to_mode == QFmode)
608 emit_unop_insn (CODE_FOR_truncsfqf2, to, from, UNKNOWN);
612 #ifdef HAVE_truncdfqf2
613 if (HAVE_truncdfqf2 && from_mode == DFmode && to_mode == QFmode)
615 emit_unop_insn (CODE_FOR_truncdfqf2, to, from, UNKNOWN);
619 #ifdef HAVE_truncxfqf2
620 if (HAVE_truncxfqf2 && from_mode == XFmode && to_mode == QFmode)
622 emit_unop_insn (CODE_FOR_truncxfqf2, to, from, UNKNOWN);
626 #ifdef HAVE_trunctfqf2
627 if (HAVE_trunctfqf2 && from_mode == TFmode && to_mode == QFmode)
629 emit_unop_insn (CODE_FOR_trunctfqf2, to, from, UNKNOWN);
633 #ifdef HAVE_truncsfhf2
634 if (HAVE_truncsfhf2 && from_mode == SFmode && to_mode == HFmode)
636 emit_unop_insn (CODE_FOR_truncsfhf2, to, from, UNKNOWN);
640 #ifdef HAVE_truncdfhf2
641 if (HAVE_truncdfhf2 && from_mode == DFmode && to_mode == HFmode)
643 emit_unop_insn (CODE_FOR_truncdfhf2, to, from, UNKNOWN);
647 #ifdef HAVE_truncxfhf2
648 if (HAVE_truncxfhf2 && from_mode == XFmode && to_mode == HFmode)
650 emit_unop_insn (CODE_FOR_truncxfhf2, to, from, UNKNOWN);
654 #ifdef HAVE_trunctfhf2
655 if (HAVE_trunctfhf2 && from_mode == TFmode && to_mode == HFmode)
657 emit_unop_insn (CODE_FOR_trunctfhf2, to, from, UNKNOWN);
661 #ifdef HAVE_truncdfsf2
662 if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
664 emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
668 #ifdef HAVE_truncxfsf2
669 if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode)
671 emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN);
675 #ifdef HAVE_trunctfsf2
676 if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
678 emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
682 #ifdef HAVE_truncxfdf2
683 if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode)
685 emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN);
689 #ifdef HAVE_trunctfdf2
690 if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
692 emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
704 libcall = extendsfdf2_libfunc;
708 libcall = extendsfxf2_libfunc;
712 libcall = extendsftf2_libfunc;
721 libcall = truncdfsf2_libfunc;
725 libcall = extenddfxf2_libfunc;
729 libcall = extenddftf2_libfunc;
738 libcall = truncxfsf2_libfunc;
742 libcall = truncxfdf2_libfunc;
751 libcall = trunctfsf2_libfunc;
755 libcall = trunctfdf2_libfunc;
761 if (libcall == (rtx) 0)
762 /* This conversion is not implemented yet. */
765 emit_library_call (libcall, 1, to_mode, 1, from, from_mode);
766 emit_move_insn (to, hard_libcall_value (to_mode));
770 /* Now both modes are integers. */
772 /* Handle expanding beyond a word. */
773 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
774 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
781 enum machine_mode lowpart_mode;
782 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
784 /* Try converting directly if the insn is supported. */
785 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
788 /* If FROM is a SUBREG, put it into a register. Do this
789 so that we always generate the same set of insns for
790 better cse'ing; if an intermediate assignment occurred,
791 we won't be doing the operation directly on the SUBREG. */
792 if (optimize > 0 && GET_CODE (from) == SUBREG)
793 from = force_reg (from_mode, from);
794 emit_unop_insn (code, to, from, equiv_code);
797 /* Next, try converting via full word. */
798 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
799 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
800 != CODE_FOR_nothing))
802 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
803 emit_unop_insn (code, to,
804 gen_lowpart (word_mode, to), equiv_code);
808 /* No special multiword conversion insn; do it by hand. */
811 /* Get a copy of FROM widened to a word, if necessary. */
812 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
813 lowpart_mode = word_mode;
815 lowpart_mode = from_mode;
817 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
819 lowpart = gen_lowpart (lowpart_mode, to);
820 emit_move_insn (lowpart, lowfrom);
822 /* Compute the value to put in each remaining word. */
824 fill_value = const0_rtx;
829 && insn_operand_mode[(int) CODE_FOR_slt][0] == word_mode
830 && STORE_FLAG_VALUE == -1)
832 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
834 fill_value = gen_reg_rtx (word_mode);
835 emit_insn (gen_slt (fill_value));
841 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
842 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
844 fill_value = convert_to_mode (word_mode, fill_value, 1);
848 /* Fill the remaining words. */
849 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
851 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
852 rtx subword = operand_subword (to, index, 1, to_mode);
857 if (fill_value != subword)
858 emit_move_insn (subword, fill_value);
861 insns = get_insns ();
864 emit_no_conflict_block (insns, to, from, NULL_RTX,
865 gen_rtx (equiv_code, to_mode, copy_rtx (from)));
869 /* Truncating multi-word to a word or less. */
870 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
871 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
873 convert_move (to, gen_lowpart (word_mode, from), 0);
877 /* Handle pointer conversion */ /* SPEE 900220 */
878 if (to_mode == PSImode)
880 if (from_mode != SImode)
881 from = convert_to_mode (SImode, from, unsignedp);
883 #ifdef HAVE_truncsipsi
886 emit_unop_insn (CODE_FOR_truncsipsi, to, from, UNKNOWN);
889 #endif /* HAVE_truncsipsi */
893 if (from_mode == PSImode)
895 if (to_mode != SImode)
897 from = convert_to_mode (SImode, from, unsignedp);
902 #ifdef HAVE_extendpsisi
903 if (HAVE_extendpsisi)
905 emit_unop_insn (CODE_FOR_extendpsisi, to, from, UNKNOWN);
908 #endif /* HAVE_extendpsisi */
913 /* Now follow all the conversions between integers
914 no more than a word long. */
916 /* For truncation, usually we can just refer to FROM in a narrower mode. */
917 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
918 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
919 GET_MODE_BITSIZE (from_mode)))
921 if (!((GET_CODE (from) == MEM
922 && ! MEM_VOLATILE_P (from)
923 && direct_load[(int) to_mode]
924 && ! mode_dependent_address_p (XEXP (from, 0)))
925 || GET_CODE (from) == REG
926 || GET_CODE (from) == SUBREG))
927 from = force_reg (from_mode, from);
928 emit_move_insn (to, gen_lowpart (to_mode, from));
932 /* Handle extension. */
933 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
935 /* Convert directly if that works. */
936 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
939 /* If FROM is a SUBREG, put it into a register. Do this
940 so that we always generate the same set of insns for
941 better cse'ing; if an intermediate assignment occurred,
942 we won't be doing the operation directly on the SUBREG. */
943 if (optimize > 0 && GET_CODE (from) == SUBREG)
944 from = force_reg (from_mode, from);
945 emit_unop_insn (code, to, from, equiv_code);
950 enum machine_mode intermediate;
952 /* Search for a mode to convert via. */
953 for (intermediate = from_mode; intermediate != VOIDmode;
954 intermediate = GET_MODE_WIDER_MODE (intermediate))
955 if ((can_extend_p (to_mode, intermediate, unsignedp)
957 && (can_extend_p (intermediate, from_mode, unsignedp)
958 != CODE_FOR_nothing))
960 convert_move (to, convert_to_mode (intermediate, from,
961 unsignedp), unsignedp);
965 /* No suitable intermediate mode. */
970 /* Support special truncate insns for certain modes. */
972 if (from_mode == DImode && to_mode == SImode)
974 #ifdef HAVE_truncdisi2
977 emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
981 convert_move (to, force_reg (from_mode, from), unsignedp);
985 if (from_mode == DImode && to_mode == HImode)
987 #ifdef HAVE_truncdihi2
990 emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
994 convert_move (to, force_reg (from_mode, from), unsignedp);
998 if (from_mode == DImode && to_mode == QImode)
1000 #ifdef HAVE_truncdiqi2
1001 if (HAVE_truncdiqi2)
1003 emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
1007 convert_move (to, force_reg (from_mode, from), unsignedp);
1011 if (from_mode == SImode && to_mode == HImode)
1013 #ifdef HAVE_truncsihi2
1014 if (HAVE_truncsihi2)
1016 emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
1020 convert_move (to, force_reg (from_mode, from), unsignedp);
1024 if (from_mode == SImode && to_mode == QImode)
1026 #ifdef HAVE_truncsiqi2
1027 if (HAVE_truncsiqi2)
1029 emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
1033 convert_move (to, force_reg (from_mode, from), unsignedp);
1037 if (from_mode == HImode && to_mode == QImode)
1039 #ifdef HAVE_trunchiqi2
1040 if (HAVE_trunchiqi2)
1042 emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
1046 convert_move (to, force_reg (from_mode, from), unsignedp);
1050 /* Handle truncation of volatile memrefs, and so on;
1051 the things that couldn't be truncated directly,
1052 and for which there was no special instruction. */
1053 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
1055 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
1056 emit_move_insn (to, temp);
1060 /* Mode combination is not recognized. */
1064 /* Return an rtx for a value that would result
1065 from converting X to mode MODE.
1066 Both X and MODE may be floating, or both integer.
1067 UNSIGNEDP is nonzero if X is an unsigned value.
1068 This can be done by referring to a part of X in place
1069 or by copying to a new temporary with conversion.
1071 This function *must not* call protect_from_queue
1072 except when putting X into an insn (in which case convert_move does it). */
1075 convert_to_mode (mode, x, unsignedp)
1076 enum machine_mode mode;
1082 /* If FROM is a SUBREG that indicates that we have already done at least
1083 the required extension, strip it. */
1085 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
1086 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
1087 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
1088 x = gen_lowpart (mode, x);
1090 if (mode == GET_MODE (x))
1093 /* There is one case that we must handle specially: If we are converting
1094 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
1095 we are to interpret the constant as unsigned, gen_lowpart will do
1096 the wrong if the constant appears negative. What we want to do is
1097 make the high-order word of the constant zero, not all ones. */
1099 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
1100 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
1101 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
1102 return immed_double_const (INTVAL (x), (HOST_WIDE_INT) 0, mode);
1104 /* We can do this with a gen_lowpart if both desired and current modes
1105 are integer, and this is either a constant integer, a register, or a
1106 non-volatile MEM. Except for the constant case, we must be narrowing
1109 if (GET_CODE (x) == CONST_INT
1110 || (GET_MODE_CLASS (mode) == MODE_INT
1111 && GET_MODE_CLASS (GET_MODE (x)) == MODE_INT
1112 && (GET_CODE (x) == CONST_DOUBLE
1113 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (GET_MODE (x))
1114 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x))
1115 && direct_load[(int) mode]
1116 || GET_CODE (x) == REG)))))
1117 return gen_lowpart (mode, x);
1119 temp = gen_reg_rtx (mode);
1120 convert_move (temp, x, unsignedp);
1124 /* Generate several move instructions to copy LEN bytes
1125 from block FROM to block TO. (These are MEM rtx's with BLKmode).
1126 The caller must pass FROM and TO
1127 through protect_from_queue before calling.
1128 ALIGN (in bytes) is maximum alignment we can assume. */
1131 move_by_pieces (to, from, len, align)
1135 struct move_by_pieces data;
1136 rtx to_addr = XEXP (to, 0), from_addr = XEXP (from, 0);
1137 int max_size = MOVE_MAX + 1;
1140 data.to_addr = to_addr;
1141 data.from_addr = from_addr;
1145 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1146 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1148 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1149 || GET_CODE (from_addr) == POST_INC
1150 || GET_CODE (from_addr) == POST_DEC);
1152 data.explicit_inc_from = 0;
1153 data.explicit_inc_to = 0;
1155 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1156 if (data.reverse) data.offset = len;
1159 /* If copying requires more than two move insns,
1160 copy addresses to registers (to make displacements shorter)
1161 and use post-increment if available. */
1162 if (!(data.autinc_from && data.autinc_to)
1163 && move_by_pieces_ninsns (len, align) > 2)
1165 #ifdef HAVE_PRE_DECREMENT
1166 if (data.reverse && ! data.autinc_from)
1168 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1169 data.autinc_from = 1;
1170 data.explicit_inc_from = -1;
1173 #ifdef HAVE_POST_INCREMENT
1174 if (! data.autinc_from)
1176 data.from_addr = copy_addr_to_reg (from_addr);
1177 data.autinc_from = 1;
1178 data.explicit_inc_from = 1;
1181 if (!data.autinc_from && CONSTANT_P (from_addr))
1182 data.from_addr = copy_addr_to_reg (from_addr);
1183 #ifdef HAVE_PRE_DECREMENT
1184 if (data.reverse && ! data.autinc_to)
1186 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1188 data.explicit_inc_to = -1;
1191 #ifdef HAVE_POST_INCREMENT
1192 if (! data.reverse && ! data.autinc_to)
1194 data.to_addr = copy_addr_to_reg (to_addr);
1196 data.explicit_inc_to = 1;
1199 if (!data.autinc_to && CONSTANT_P (to_addr))
1200 data.to_addr = copy_addr_to_reg (to_addr);
1203 if (! (STRICT_ALIGNMENT || SLOW_UNALIGNED_ACCESS)
1204 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
1207 /* First move what we can in the largest integer mode, then go to
1208 successively smaller modes. */
1210 while (max_size > 1)
1212 enum machine_mode mode = VOIDmode, tmode;
1213 enum insn_code icode;
1215 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1216 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1217 if (GET_MODE_SIZE (tmode) < max_size)
1220 if (mode == VOIDmode)
1223 icode = mov_optab->handlers[(int) mode].insn_code;
1224 if (icode != CODE_FOR_nothing
1225 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1226 GET_MODE_SIZE (mode)))
1227 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1229 max_size = GET_MODE_SIZE (mode);
1232 /* The code above should have handled everything. */
1237 /* Return number of insns required to move L bytes by pieces.
1238 ALIGN (in bytes) is maximum alignment we can assume. */
1241 move_by_pieces_ninsns (l, align)
1245 register int n_insns = 0;
1246 int max_size = MOVE_MAX + 1;
1248 if (! (STRICT_ALIGNMENT || SLOW_UNALIGNED_ACCESS)
1249 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
1252 while (max_size > 1)
1254 enum machine_mode mode = VOIDmode, tmode;
1255 enum insn_code icode;
1257 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1258 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1259 if (GET_MODE_SIZE (tmode) < max_size)
1262 if (mode == VOIDmode)
1265 icode = mov_optab->handlers[(int) mode].insn_code;
1266 if (icode != CODE_FOR_nothing
1267 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1268 GET_MODE_SIZE (mode)))
1269 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1271 max_size = GET_MODE_SIZE (mode);
1277 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1278 with move instructions for mode MODE. GENFUN is the gen_... function
1279 to make a move insn for that mode. DATA has all the other info. */
1282 move_by_pieces_1 (genfun, mode, data)
1284 enum machine_mode mode;
1285 struct move_by_pieces *data;
1287 register int size = GET_MODE_SIZE (mode);
1288 register rtx to1, from1;
1290 while (data->len >= size)
1292 if (data->reverse) data->offset -= size;
1294 to1 = (data->autinc_to
1295 ? gen_rtx (MEM, mode, data->to_addr)
1296 : change_address (data->to, mode,
1297 plus_constant (data->to_addr, data->offset)));
1300 ? gen_rtx (MEM, mode, data->from_addr)
1301 : change_address (data->from, mode,
1302 plus_constant (data->from_addr, data->offset)));
1304 #ifdef HAVE_PRE_DECREMENT
1305 if (data->explicit_inc_to < 0)
1306 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
1307 if (data->explicit_inc_from < 0)
1308 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (-size)));
1311 emit_insn ((*genfun) (to1, from1));
1312 #ifdef HAVE_POST_INCREMENT
1313 if (data->explicit_inc_to > 0)
1314 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1315 if (data->explicit_inc_from > 0)
1316 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1319 if (! data->reverse) data->offset += size;
1325 /* Emit code to move a block Y to a block X.
1326 This may be done with string-move instructions,
1327 with multiple scalar move instructions, or with a library call.
1329 Both X and Y must be MEM rtx's (perhaps inside VOLATILE)
1331 SIZE is an rtx that says how long they are.
1332 ALIGN is the maximum alignment we can assume they have,
1333 measured in bytes. */
1336 emit_block_move (x, y, size, align)
1341 if (GET_MODE (x) != BLKmode)
1344 if (GET_MODE (y) != BLKmode)
1347 x = protect_from_queue (x, 1);
1348 y = protect_from_queue (y, 0);
1349 size = protect_from_queue (size, 0);
1351 if (GET_CODE (x) != MEM)
1353 if (GET_CODE (y) != MEM)
1358 if (GET_CODE (size) == CONST_INT
1359 && (move_by_pieces_ninsns (INTVAL (size), align) < MOVE_RATIO))
1360 move_by_pieces (x, y, INTVAL (size), align);
1363 /* Try the most limited insn first, because there's no point
1364 including more than one in the machine description unless
1365 the more limited one has some advantage. */
1367 rtx opalign = GEN_INT (align);
1368 enum machine_mode mode;
1370 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1371 mode = GET_MODE_WIDER_MODE (mode))
1373 enum insn_code code = movstr_optab[(int) mode];
1375 if (code != CODE_FOR_nothing
1376 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1377 here because if SIZE is less than the mode mask, as it is
1378 returned by the macro, it will definitely be less than the
1379 actual mode mask. */
1380 && (unsigned HOST_WIDE_INT) INTVAL (size) <= GET_MODE_MASK (mode)
1381 && (insn_operand_predicate[(int) code][0] == 0
1382 || (*insn_operand_predicate[(int) code][0]) (x, BLKmode))
1383 && (insn_operand_predicate[(int) code][1] == 0
1384 || (*insn_operand_predicate[(int) code][1]) (y, BLKmode))
1385 && (insn_operand_predicate[(int) code][3] == 0
1386 || (*insn_operand_predicate[(int) code][3]) (opalign,
1390 rtx last = get_last_insn ();
1393 op2 = convert_to_mode (mode, size, 1);
1394 if (insn_operand_predicate[(int) code][2] != 0
1395 && ! (*insn_operand_predicate[(int) code][2]) (op2, mode))
1396 op2 = copy_to_mode_reg (mode, op2);
1398 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1405 delete_insns_since (last);
1409 #ifdef TARGET_MEM_FUNCTIONS
1410 emit_library_call (memcpy_libfunc, 0,
1411 VOIDmode, 3, XEXP (x, 0), Pmode,
1413 convert_to_mode (TYPE_MODE (sizetype), size,
1414 TREE_UNSIGNED (sizetype)),
1415 TYPE_MODE (sizetype));
1417 emit_library_call (bcopy_libfunc, 0,
1418 VOIDmode, 3, XEXP (y, 0), Pmode,
1420 convert_to_mode (TYPE_MODE (sizetype), size,
1421 TREE_UNSIGNED (sizetype)),
1422 TYPE_MODE (sizetype));
1427 /* Copy all or part of a value X into registers starting at REGNO.
1428 The number of registers to be filled is NREGS. */
1431 move_block_to_reg (regno, x, nregs, mode)
1435 enum machine_mode mode;
1440 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1441 x = validize_mem (force_const_mem (mode, x));
1443 /* See if the machine can do this with a load multiple insn. */
1444 #ifdef HAVE_load_multiple
1445 last = get_last_insn ();
1446 pat = gen_load_multiple (gen_rtx (REG, word_mode, regno), x,
1454 delete_insns_since (last);
1457 for (i = 0; i < nregs; i++)
1458 emit_move_insn (gen_rtx (REG, word_mode, regno + i),
1459 operand_subword_force (x, i, mode));
1462 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1463 The number of registers to be filled is NREGS. */
1466 move_block_from_reg (regno, x, nregs)
1474 /* See if the machine can do this with a store multiple insn. */
1475 #ifdef HAVE_store_multiple
1476 last = get_last_insn ();
1477 pat = gen_store_multiple (x, gen_rtx (REG, word_mode, regno),
1485 delete_insns_since (last);
1488 for (i = 0; i < nregs; i++)
1490 rtx tem = operand_subword (x, i, 1, BLKmode);
1495 emit_move_insn (tem, gen_rtx (REG, word_mode, regno + i));
1499 /* Mark NREGS consecutive regs, starting at REGNO, as being live now. */
1502 use_regs (regno, nregs)
1508 for (i = 0; i < nregs; i++)
1509 emit_insn (gen_rtx (USE, VOIDmode, gen_rtx (REG, word_mode, regno + i)));
1512 /* Mark the instructions since PREV as a libcall block.
1513 Add REG_LIBCALL to PREV and add a REG_RETVAL to the most recent insn. */
1522 /* Find the instructions to mark */
1524 insn_first = NEXT_INSN (prev);
1526 insn_first = get_insns ();
1528 insn_last = get_last_insn ();
1530 REG_NOTES (insn_last) = gen_rtx (INSN_LIST, REG_RETVAL, insn_first,
1531 REG_NOTES (insn_last));
1533 REG_NOTES (insn_first) = gen_rtx (INSN_LIST, REG_LIBCALL, insn_last,
1534 REG_NOTES (insn_first));
1537 /* Write zeros through the storage of OBJECT.
1538 If OBJECT has BLKmode, SIZE is its length in bytes. */
1541 clear_storage (object, size)
1545 if (GET_MODE (object) == BLKmode)
1547 #ifdef TARGET_MEM_FUNCTIONS
1548 emit_library_call (memset_libfunc, 0,
1550 XEXP (object, 0), Pmode, const0_rtx, Pmode,
1551 GEN_INT (size), Pmode);
1553 emit_library_call (bzero_libfunc, 0,
1555 XEXP (object, 0), Pmode,
1556 GEN_INT (size), Pmode);
1560 emit_move_insn (object, const0_rtx);
1563 /* Generate code to copy Y into X.
1564 Both Y and X must have the same mode, except that
1565 Y can be a constant with VOIDmode.
1566 This mode cannot be BLKmode; use emit_block_move for that.
1568 Return the last instruction emitted. */
1571 emit_move_insn (x, y)
1574 enum machine_mode mode = GET_MODE (x);
1575 enum machine_mode submode;
1576 enum mode_class class = GET_MODE_CLASS (mode);
1579 x = protect_from_queue (x, 1);
1580 y = protect_from_queue (y, 0);
1582 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
1585 if (CONSTANT_P (y) && ! LEGITIMATE_CONSTANT_P (y))
1586 y = force_const_mem (mode, y);
1588 /* If X or Y are memory references, verify that their addresses are valid
1590 if (GET_CODE (x) == MEM
1591 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
1592 && ! push_operand (x, GET_MODE (x)))
1594 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
1595 x = change_address (x, VOIDmode, XEXP (x, 0));
1597 if (GET_CODE (y) == MEM
1598 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
1600 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
1601 y = change_address (y, VOIDmode, XEXP (y, 0));
1603 if (mode == BLKmode)
1606 return emit_move_insn_1 (x, y);
1609 /* Low level part of emit_move_insn.
1610 Called just like emit_move_insn, but assumes X and Y
1611 are basically valid. */
1614 emit_move_insn_1 (x, y)
1617 enum machine_mode mode = GET_MODE (x);
1618 enum machine_mode submode;
1619 enum mode_class class = GET_MODE_CLASS (mode);
1622 if (class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
1623 submode = mode_for_size (GET_MODE_UNIT_SIZE (mode) * BITS_PER_UNIT,
1624 (class == MODE_COMPLEX_INT
1625 ? MODE_INT : MODE_FLOAT),
1628 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
1630 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
1632 /* Expand complex moves by moving real part and imag part, if possible. */
1633 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
1634 && submode != BLKmode
1635 && (mov_optab->handlers[(int) submode].insn_code
1636 != CODE_FOR_nothing))
1638 /* Don't split destination if it is a stack push. */
1639 int stack = push_operand (x, GET_MODE (x));
1640 rtx prev = get_last_insn ();
1642 /* Tell flow that the whole of the destination is being set. */
1643 if (GET_CODE (x) == REG)
1644 emit_insn (gen_rtx (CLOBBER, VOIDmode, x));
1646 /* If this is a stack, push the highpart first, so it
1647 will be in the argument order.
1649 In that case, change_address is used only to convert
1650 the mode, not to change the address. */
1651 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
1652 ((stack ? change_address (x, submode, (rtx) 0)
1653 : gen_highpart (submode, x)),
1654 gen_highpart (submode, y)));
1655 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
1656 ((stack ? change_address (x, submode, (rtx) 0)
1657 : gen_lowpart (submode, x)),
1658 gen_lowpart (submode, y)));
1662 return get_last_insn ();
1665 /* This will handle any multi-word mode that lacks a move_insn pattern.
1666 However, you will get better code if you define such patterns,
1667 even if they must turn into multiple assembler instructions. */
1668 else if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
1671 rtx prev_insn = get_last_insn ();
1674 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
1677 rtx xpart = operand_subword (x, i, 1, mode);
1678 rtx ypart = operand_subword (y, i, 1, mode);
1680 /* If we can't get a part of Y, put Y into memory if it is a
1681 constant. Otherwise, force it into a register. If we still
1682 can't get a part of Y, abort. */
1683 if (ypart == 0 && CONSTANT_P (y))
1685 y = force_const_mem (mode, y);
1686 ypart = operand_subword (y, i, 1, mode);
1688 else if (ypart == 0)
1689 ypart = operand_subword_force (y, i, mode);
1691 if (xpart == 0 || ypart == 0)
1694 last_insn = emit_move_insn (xpart, ypart);
1696 /* Mark these insns as a libcall block. */
1697 group_insns (prev_insn);
1705 /* Pushing data onto the stack. */
1707 /* Push a block of length SIZE (perhaps variable)
1708 and return an rtx to address the beginning of the block.
1709 Note that it is not possible for the value returned to be a QUEUED.
1710 The value may be virtual_outgoing_args_rtx.
1712 EXTRA is the number of bytes of padding to push in addition to SIZE.
1713 BELOW nonzero means this padding comes at low addresses;
1714 otherwise, the padding comes at high addresses. */
1717 push_block (size, extra, below)
1722 if (CONSTANT_P (size))
1723 anti_adjust_stack (plus_constant (size, extra));
1724 else if (GET_CODE (size) == REG && extra == 0)
1725 anti_adjust_stack (size);
1728 rtx temp = copy_to_mode_reg (Pmode, size);
1730 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
1731 temp, 0, OPTAB_LIB_WIDEN);
1732 anti_adjust_stack (temp);
1735 #ifdef STACK_GROWS_DOWNWARD
1736 temp = virtual_outgoing_args_rtx;
1737 if (extra != 0 && below)
1738 temp = plus_constant (temp, extra);
1740 if (GET_CODE (size) == CONST_INT)
1741 temp = plus_constant (virtual_outgoing_args_rtx,
1742 - INTVAL (size) - (below ? 0 : extra));
1743 else if (extra != 0 && !below)
1744 temp = gen_rtx (PLUS, Pmode, virtual_outgoing_args_rtx,
1745 negate_rtx (Pmode, plus_constant (size, extra)));
1747 temp = gen_rtx (PLUS, Pmode, virtual_outgoing_args_rtx,
1748 negate_rtx (Pmode, size));
1751 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
1757 return gen_rtx (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
1760 /* Generate code to push X onto the stack, assuming it has mode MODE and
1762 MODE is redundant except when X is a CONST_INT (since they don't
1764 SIZE is an rtx for the size of data to be copied (in bytes),
1765 needed only if X is BLKmode.
1767 ALIGN (in bytes) is maximum alignment we can assume.
1769 If PARTIAL and REG are both nonzero, then copy that many of the first
1770 words of X into registers starting with REG, and push the rest of X.
1771 The amount of space pushed is decreased by PARTIAL words,
1772 rounded *down* to a multiple of PARM_BOUNDARY.
1773 REG must be a hard register in this case.
1774 If REG is zero but PARTIAL is not, take any all others actions for an
1775 argument partially in registers, but do not actually load any
1778 EXTRA is the amount in bytes of extra space to leave next to this arg.
1779 This is ignored if an argument block has already been allocated.
1781 On a machine that lacks real push insns, ARGS_ADDR is the address of
1782 the bottom of the argument block for this call. We use indexing off there
1783 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
1784 argument block has not been preallocated.
1786 ARGS_SO_FAR is the size of args previously pushed for this call. */
1789 emit_push_insn (x, mode, type, size, align, partial, reg, extra,
1790 args_addr, args_so_far)
1792 enum machine_mode mode;
1803 enum direction stack_direction
1804 #ifdef STACK_GROWS_DOWNWARD
1810 /* Decide where to pad the argument: `downward' for below,
1811 `upward' for above, or `none' for don't pad it.
1812 Default is below for small data on big-endian machines; else above. */
1813 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
1815 /* Invert direction if stack is post-update. */
1816 if (STACK_PUSH_CODE == POST_INC || STACK_PUSH_CODE == POST_DEC)
1817 if (where_pad != none)
1818 where_pad = (where_pad == downward ? upward : downward);
1820 xinner = x = protect_from_queue (x, 0);
1822 if (mode == BLKmode)
1824 /* Copy a block into the stack, entirely or partially. */
1827 int used = partial * UNITS_PER_WORD;
1828 int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
1836 /* USED is now the # of bytes we need not copy to the stack
1837 because registers will take care of them. */
1840 xinner = change_address (xinner, BLKmode,
1841 plus_constant (XEXP (xinner, 0), used));
1843 /* If the partial register-part of the arg counts in its stack size,
1844 skip the part of stack space corresponding to the registers.
1845 Otherwise, start copying to the beginning of the stack space,
1846 by setting SKIP to 0. */
1847 #ifndef REG_PARM_STACK_SPACE
1853 #ifdef PUSH_ROUNDING
1854 /* Do it with several push insns if that doesn't take lots of insns
1855 and if there is no difficulty with push insns that skip bytes
1856 on the stack for alignment purposes. */
1858 && GET_CODE (size) == CONST_INT
1860 && (move_by_pieces_ninsns ((unsigned) INTVAL (size) - used, align)
1862 /* Here we avoid the case of a structure whose weak alignment
1863 forces many pushes of a small amount of data,
1864 and such small pushes do rounding that causes trouble. */
1865 && ((! STRICT_ALIGNMENT && ! SLOW_UNALIGNED_ACCESS)
1866 || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT
1867 || PUSH_ROUNDING (align) == align)
1868 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
1870 /* Push padding now if padding above and stack grows down,
1871 or if padding below and stack grows up.
1872 But if space already allocated, this has already been done. */
1873 if (extra && args_addr == 0
1874 && where_pad != none && where_pad != stack_direction)
1875 anti_adjust_stack (GEN_INT (extra));
1877 move_by_pieces (gen_rtx (MEM, BLKmode, gen_push_operand ()), xinner,
1878 INTVAL (size) - used, align);
1881 #endif /* PUSH_ROUNDING */
1883 /* Otherwise make space on the stack and copy the data
1884 to the address of that space. */
1886 /* Deduct words put into registers from the size we must copy. */
1889 if (GET_CODE (size) == CONST_INT)
1890 size = GEN_INT (INTVAL (size) - used);
1892 size = expand_binop (GET_MODE (size), sub_optab, size,
1893 GEN_INT (used), NULL_RTX, 0,
1897 /* Get the address of the stack space.
1898 In this case, we do not deal with EXTRA separately.
1899 A single stack adjust will do. */
1902 temp = push_block (size, extra, where_pad == downward);
1905 else if (GET_CODE (args_so_far) == CONST_INT)
1906 temp = memory_address (BLKmode,
1907 plus_constant (args_addr,
1908 skip + INTVAL (args_so_far)));
1910 temp = memory_address (BLKmode,
1911 plus_constant (gen_rtx (PLUS, Pmode,
1912 args_addr, args_so_far),
1915 /* TEMP is the address of the block. Copy the data there. */
1916 if (GET_CODE (size) == CONST_INT
1917 && (move_by_pieces_ninsns ((unsigned) INTVAL (size), align)
1920 move_by_pieces (gen_rtx (MEM, BLKmode, temp), xinner,
1921 INTVAL (size), align);
1924 /* Try the most limited insn first, because there's no point
1925 including more than one in the machine description unless
1926 the more limited one has some advantage. */
1927 #ifdef HAVE_movstrqi
1929 && GET_CODE (size) == CONST_INT
1930 && ((unsigned) INTVAL (size)
1931 < (1 << (GET_MODE_BITSIZE (QImode) - 1))))
1933 rtx pat = gen_movstrqi (gen_rtx (MEM, BLKmode, temp),
1934 xinner, size, GEN_INT (align));
1942 #ifdef HAVE_movstrhi
1944 && GET_CODE (size) == CONST_INT
1945 && ((unsigned) INTVAL (size)
1946 < (1 << (GET_MODE_BITSIZE (HImode) - 1))))
1948 rtx pat = gen_movstrhi (gen_rtx (MEM, BLKmode, temp),
1949 xinner, size, GEN_INT (align));
1957 #ifdef HAVE_movstrsi
1960 rtx pat = gen_movstrsi (gen_rtx (MEM, BLKmode, temp),
1961 xinner, size, GEN_INT (align));
1969 #ifdef HAVE_movstrdi
1972 rtx pat = gen_movstrdi (gen_rtx (MEM, BLKmode, temp),
1973 xinner, size, GEN_INT (align));
1982 #ifndef ACCUMULATE_OUTGOING_ARGS
1983 /* If the source is referenced relative to the stack pointer,
1984 copy it to another register to stabilize it. We do not need
1985 to do this if we know that we won't be changing sp. */
1987 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
1988 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
1989 temp = copy_to_reg (temp);
1992 /* Make inhibit_defer_pop nonzero around the library call
1993 to force it to pop the bcopy-arguments right away. */
1995 #ifdef TARGET_MEM_FUNCTIONS
1996 emit_library_call (memcpy_libfunc, 0,
1997 VOIDmode, 3, temp, Pmode, XEXP (xinner, 0), Pmode,
1998 convert_to_mode (TYPE_MODE (sizetype),
1999 size, TREE_UNSIGNED (sizetype)),
2000 TYPE_MODE (sizetype));
2002 emit_library_call (bcopy_libfunc, 0,
2003 VOIDmode, 3, XEXP (xinner, 0), Pmode, temp, Pmode,
2004 convert_to_mode (TYPE_MODE (sizetype),
2005 size, TREE_UNSIGNED (sizetype)),
2006 TYPE_MODE (sizetype));
2011 else if (partial > 0)
2013 /* Scalar partly in registers. */
2015 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
2018 /* # words of start of argument
2019 that we must make space for but need not store. */
2020 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
2021 int args_offset = INTVAL (args_so_far);
2024 /* Push padding now if padding above and stack grows down,
2025 or if padding below and stack grows up.
2026 But if space already allocated, this has already been done. */
2027 if (extra && args_addr == 0
2028 && where_pad != none && where_pad != stack_direction)
2029 anti_adjust_stack (GEN_INT (extra));
2031 /* If we make space by pushing it, we might as well push
2032 the real data. Otherwise, we can leave OFFSET nonzero
2033 and leave the space uninitialized. */
2037 /* Now NOT_STACK gets the number of words that we don't need to
2038 allocate on the stack. */
2039 not_stack = partial - offset;
2041 /* If the partial register-part of the arg counts in its stack size,
2042 skip the part of stack space corresponding to the registers.
2043 Otherwise, start copying to the beginning of the stack space,
2044 by setting SKIP to 0. */
2045 #ifndef REG_PARM_STACK_SPACE
2051 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
2052 x = validize_mem (force_const_mem (mode, x));
2054 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
2055 SUBREGs of such registers are not allowed. */
2056 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
2057 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
2058 x = copy_to_reg (x);
2060 /* Loop over all the words allocated on the stack for this arg. */
2061 /* We can do it by words, because any scalar bigger than a word
2062 has a size a multiple of a word. */
2063 #ifndef PUSH_ARGS_REVERSED
2064 for (i = not_stack; i < size; i++)
2066 for (i = size - 1; i >= not_stack; i--)
2068 if (i >= not_stack + offset)
2069 emit_push_insn (operand_subword_force (x, i, mode),
2070 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
2072 GEN_INT (args_offset + ((i - not_stack + skip)
2073 * UNITS_PER_WORD)));
2079 /* Push padding now if padding above and stack grows down,
2080 or if padding below and stack grows up.
2081 But if space already allocated, this has already been done. */
2082 if (extra && args_addr == 0
2083 && where_pad != none && where_pad != stack_direction)
2084 anti_adjust_stack (GEN_INT (extra));
2086 #ifdef PUSH_ROUNDING
2088 addr = gen_push_operand ();
2091 if (GET_CODE (args_so_far) == CONST_INT)
2093 = memory_address (mode,
2094 plus_constant (args_addr, INTVAL (args_so_far)));
2096 addr = memory_address (mode, gen_rtx (PLUS, Pmode, args_addr,
2099 emit_move_insn (gen_rtx (MEM, mode, addr), x);
2103 /* If part should go in registers, copy that part
2104 into the appropriate registers. Do this now, at the end,
2105 since mem-to-mem copies above may do function calls. */
2106 if (partial > 0 && reg != 0)
2107 move_block_to_reg (REGNO (reg), x, partial, mode);
2109 if (extra && args_addr == 0 && where_pad == stack_direction)
2110 anti_adjust_stack (GEN_INT (extra));
2113 /* Expand an assignment that stores the value of FROM into TO.
2114 If WANT_VALUE is nonzero, return an rtx for the value of TO.
2115 (This may contain a QUEUED rtx.)
2116 Otherwise, the returned value is not meaningful.
2118 SUGGEST_REG is no longer actually used.
2119 It used to mean, copy the value through a register
2120 and return that register, if that is possible.
2121 But now we do this if WANT_VALUE.
2123 If the value stored is a constant, we return the constant. */
2126 expand_assignment (to, from, want_value, suggest_reg)
2131 register rtx to_rtx = 0;
2134 /* Don't crash if the lhs of the assignment was erroneous. */
2136 if (TREE_CODE (to) == ERROR_MARK)
2137 return expand_expr (from, NULL_RTX, VOIDmode, 0);
2139 /* Assignment of a structure component needs special treatment
2140 if the structure component's rtx is not simply a MEM.
2141 Assignment of an array element at a constant index
2142 has the same problem. */
2144 if (TREE_CODE (to) == COMPONENT_REF
2145 || TREE_CODE (to) == BIT_FIELD_REF
2146 || (TREE_CODE (to) == ARRAY_REF
2147 && TREE_CODE (TREE_OPERAND (to, 1)) == INTEGER_CST
2148 && TREE_CODE (TYPE_SIZE (TREE_TYPE (to))) == INTEGER_CST))
2150 enum machine_mode mode1;
2156 tree tem = get_inner_reference (to, &bitsize, &bitpos, &offset,
2157 &mode1, &unsignedp, &volatilep);
2159 /* If we are going to use store_bit_field and extract_bit_field,
2160 make sure to_rtx will be safe for multiple use. */
2162 if (mode1 == VOIDmode && want_value)
2163 tem = stabilize_reference (tem);
2165 to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, 0);
2168 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
2170 if (GET_CODE (to_rtx) != MEM)
2172 to_rtx = change_address (to_rtx, VOIDmode,
2173 gen_rtx (PLUS, Pmode, XEXP (to_rtx, 0),
2174 force_reg (Pmode, offset_rtx)));
2178 if (GET_CODE (to_rtx) == MEM)
2179 MEM_VOLATILE_P (to_rtx) = 1;
2180 #if 0 /* This was turned off because, when a field is volatile
2181 in an object which is not volatile, the object may be in a register,
2182 and then we would abort over here. */
2188 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
2190 /* Spurious cast makes HPUX compiler happy. */
2191 ? (enum machine_mode) TYPE_MODE (TREE_TYPE (to))
2194 /* Required alignment of containing datum. */
2195 TYPE_ALIGN (TREE_TYPE (tem)) / BITS_PER_UNIT,
2196 int_size_in_bytes (TREE_TYPE (tem)));
2197 preserve_temp_slots (result);
2200 /* If we aren't returning a result, just pass on what expand_expr
2201 returned; it was probably const0_rtx. Otherwise, convert RESULT
2202 to the proper mode. */
2203 return (want_value ? convert_to_mode (TYPE_MODE (TREE_TYPE (to)), result,
2204 TREE_UNSIGNED (TREE_TYPE (to)))
2208 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
2209 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
2212 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, 0);
2214 /* Don't move directly into a return register. */
2215 if (TREE_CODE (to) == RESULT_DECL && GET_CODE (to_rtx) == REG)
2217 rtx temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
2218 emit_move_insn (to_rtx, temp);
2219 preserve_temp_slots (to_rtx);
2224 /* In case we are returning the contents of an object which overlaps
2225 the place the value is being stored, use a safe function when copying
2226 a value through a pointer into a structure value return block. */
2227 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
2228 && current_function_returns_struct
2229 && !current_function_returns_pcc_struct)
2231 rtx from_rtx = expand_expr (from, NULL_RTX, VOIDmode, 0);
2232 rtx size = expr_size (from);
2234 #ifdef TARGET_MEM_FUNCTIONS
2235 emit_library_call (memcpy_libfunc, 0,
2236 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
2237 XEXP (from_rtx, 0), Pmode,
2238 convert_to_mode (TYPE_MODE (sizetype),
2239 size, TREE_UNSIGNED (sizetype)),
2240 TYPE_MODE (sizetype));
2242 emit_library_call (bcopy_libfunc, 0,
2243 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
2244 XEXP (to_rtx, 0), Pmode,
2245 convert_to_mode (TYPE_MODE (sizetype),
2246 size, TREE_UNSIGNED (sizetype)),
2247 TYPE_MODE (sizetype));
2250 preserve_temp_slots (to_rtx);
2255 /* Compute FROM and store the value in the rtx we got. */
2257 result = store_expr (from, to_rtx, want_value);
2258 preserve_temp_slots (result);
2263 /* Generate code for computing expression EXP,
2264 and storing the value into TARGET.
2265 Returns TARGET or an equivalent value.
2266 TARGET may contain a QUEUED rtx.
2268 If SUGGEST_REG is nonzero, copy the value through a register
2269 and return that register, if that is possible.
2271 If the value stored is a constant, we return the constant. */
2274 store_expr (exp, target, suggest_reg)
2276 register rtx target;
2280 int dont_return_target = 0;
2282 if (TREE_CODE (exp) == COMPOUND_EXPR)
2284 /* Perform first part of compound expression, then assign from second
2286 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
2288 return store_expr (TREE_OPERAND (exp, 1), target, suggest_reg);
2290 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
2292 /* For conditional expression, get safe form of the target. Then
2293 test the condition, doing the appropriate assignment on either
2294 side. This avoids the creation of unnecessary temporaries.
2295 For non-BLKmode, it is more efficient not to do this. */
2297 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
2300 target = protect_from_queue (target, 1);
2303 jumpifnot (TREE_OPERAND (exp, 0), lab1);
2304 store_expr (TREE_OPERAND (exp, 1), target, suggest_reg);
2306 emit_jump_insn (gen_jump (lab2));
2309 store_expr (TREE_OPERAND (exp, 2), target, suggest_reg);
2315 else if (suggest_reg && GET_CODE (target) == MEM
2316 && GET_MODE (target) != BLKmode)
2317 /* If target is in memory and caller wants value in a register instead,
2318 arrange that. Pass TARGET as target for expand_expr so that,
2319 if EXP is another assignment, SUGGEST_REG will be nonzero for it.
2320 We know expand_expr will not use the target in that case. */
2322 temp = expand_expr (exp, cse_not_expected ? NULL_RTX : target,
2323 GET_MODE (target), 0);
2324 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
2325 temp = copy_to_reg (temp);
2326 dont_return_target = 1;
2328 else if (queued_subexp_p (target))
2329 /* If target contains a postincrement, it is not safe
2330 to use as the returned value. It would access the wrong
2331 place by the time the queued increment gets output.
2332 So copy the value through a temporary and use that temp
2335 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
2337 /* Expand EXP into a new pseudo. */
2338 temp = gen_reg_rtx (GET_MODE (target));
2339 temp = expand_expr (exp, temp, GET_MODE (target), 0);
2342 temp = expand_expr (exp, NULL_RTX, GET_MODE (target), 0);
2343 dont_return_target = 1;
2345 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
2346 /* If this is an scalar in a register that is stored in a wider mode
2347 than the declared mode, compute the result into its declared mode
2348 and then convert to the wider mode. Our value is the computed
2351 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
2352 convert_move (SUBREG_REG (target), temp,
2353 SUBREG_PROMOTED_UNSIGNED_P (target));
2358 temp = expand_expr (exp, target, GET_MODE (target), 0);
2359 /* DO return TARGET if it's a specified hardware register.
2360 expand_return relies on this. */
2361 if (!(target && GET_CODE (target) == REG
2362 && REGNO (target) < FIRST_PSEUDO_REGISTER)
2363 && CONSTANT_P (temp))
2364 dont_return_target = 1;
2367 /* If value was not generated in the target, store it there.
2368 Convert the value to TARGET's type first if nec. */
2370 if (temp != target && TREE_CODE (exp) != ERROR_MARK)
2372 target = protect_from_queue (target, 1);
2373 if (GET_MODE (temp) != GET_MODE (target)
2374 && GET_MODE (temp) != VOIDmode)
2376 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
2377 if (dont_return_target)
2379 /* In this case, we will return TEMP,
2380 so make sure it has the proper mode.
2381 But don't forget to store the value into TARGET. */
2382 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
2383 emit_move_insn (target, temp);
2386 convert_move (target, temp, unsignedp);
2389 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
2391 /* Handle copying a string constant into an array.
2392 The string constant may be shorter than the array.
2393 So copy just the string's actual length, and clear the rest. */
2396 /* Get the size of the data type of the string,
2397 which is actually the size of the target. */
2398 size = expr_size (exp);
2399 if (GET_CODE (size) == CONST_INT
2400 && INTVAL (size) < TREE_STRING_LENGTH (exp))
2401 emit_block_move (target, temp, size,
2402 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
2405 /* Compute the size of the data to copy from the string. */
2407 = size_binop (MIN_EXPR,
2408 size_binop (CEIL_DIV_EXPR,
2409 TYPE_SIZE (TREE_TYPE (exp)),
2410 size_int (BITS_PER_UNIT)),
2412 build_int_2 (TREE_STRING_LENGTH (exp), 0)));
2413 rtx copy_size_rtx = expand_expr (copy_size, NULL_RTX,
2417 /* Copy that much. */
2418 emit_block_move (target, temp, copy_size_rtx,
2419 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
2421 /* Figure out how much is left in TARGET
2422 that we have to clear. */
2423 if (GET_CODE (copy_size_rtx) == CONST_INT)
2425 temp = plus_constant (XEXP (target, 0),
2426 TREE_STRING_LENGTH (exp));
2427 size = plus_constant (size,
2428 - TREE_STRING_LENGTH (exp));
2432 enum machine_mode size_mode = Pmode;
2434 temp = force_reg (Pmode, XEXP (target, 0));
2435 temp = expand_binop (size_mode, add_optab, temp,
2436 copy_size_rtx, NULL_RTX, 0,
2439 size = expand_binop (size_mode, sub_optab, size,
2440 copy_size_rtx, NULL_RTX, 0,
2443 emit_cmp_insn (size, const0_rtx, LT, NULL_RTX,
2444 GET_MODE (size), 0, 0);
2445 label = gen_label_rtx ();
2446 emit_jump_insn (gen_blt (label));
2449 if (size != const0_rtx)
2451 #ifdef TARGET_MEM_FUNCTIONS
2452 emit_library_call (memset_libfunc, 0, VOIDmode, 3,
2453 temp, Pmode, const0_rtx, Pmode, size, Pmode);
2455 emit_library_call (bzero_libfunc, 0, VOIDmode, 2,
2456 temp, Pmode, size, Pmode);
2463 else if (GET_MODE (temp) == BLKmode)
2464 emit_block_move (target, temp, expr_size (exp),
2465 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
2467 emit_move_insn (target, temp);
2469 if (dont_return_target)
2474 /* Store the value of constructor EXP into the rtx TARGET.
2475 TARGET is either a REG or a MEM. */
2478 store_constructor (exp, target)
2482 tree type = TREE_TYPE (exp);
2484 /* We know our target cannot conflict, since safe_from_p has been called. */
2486 /* Don't try copying piece by piece into a hard register
2487 since that is vulnerable to being clobbered by EXP.
2488 Instead, construct in a pseudo register and then copy it all. */
2489 if (GET_CODE (target) == REG && REGNO (target) < FIRST_PSEUDO_REGISTER)
2491 rtx temp = gen_reg_rtx (GET_MODE (target));
2492 store_constructor (exp, temp);
2493 emit_move_insn (target, temp);
2498 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE)
2502 /* Inform later passes that the whole union value is dead. */
2503 if (TREE_CODE (type) == UNION_TYPE)
2504 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
2506 /* If we are building a static constructor into a register,
2507 set the initial value as zero so we can fold the value into
2509 else if (GET_CODE (target) == REG && TREE_STATIC (exp))
2510 emit_move_insn (target, const0_rtx);
2512 /* If the constructor has fewer fields than the structure,
2513 clear the whole structure first. */
2514 else if (list_length (CONSTRUCTOR_ELTS (exp))
2515 != list_length (TYPE_FIELDS (type)))
2516 clear_storage (target, int_size_in_bytes (type));
2518 /* Inform later passes that the old value is dead. */
2519 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
2521 /* Store each element of the constructor into
2522 the corresponding field of TARGET. */
2524 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
2526 register tree field = TREE_PURPOSE (elt);
2527 register enum machine_mode mode;
2532 /* Just ignore missing fields.
2533 We cleared the whole structure, above,
2534 if any fields are missing. */
2538 bitsize = TREE_INT_CST_LOW (DECL_SIZE (field));
2539 unsignedp = TREE_UNSIGNED (field);
2540 mode = DECL_MODE (field);
2541 if (DECL_BIT_FIELD (field))
2544 if (TREE_CODE (DECL_FIELD_BITPOS (field)) != INTEGER_CST)
2545 /* ??? This case remains to be written. */
2548 bitpos = TREE_INT_CST_LOW (DECL_FIELD_BITPOS (field));
2550 store_field (target, bitsize, bitpos, mode, TREE_VALUE (elt),
2551 /* The alignment of TARGET is
2552 at least what its type requires. */
2554 TYPE_ALIGN (type) / BITS_PER_UNIT,
2555 int_size_in_bytes (type));
2558 else if (TREE_CODE (type) == ARRAY_TYPE)
2562 tree domain = TYPE_DOMAIN (type);
2563 HOST_WIDE_INT minelt = TREE_INT_CST_LOW (TYPE_MIN_VALUE (domain));
2564 HOST_WIDE_INT maxelt = TREE_INT_CST_LOW (TYPE_MAX_VALUE (domain));
2565 tree elttype = TREE_TYPE (type);
2567 /* If the constructor has fewer fields than the structure,
2568 clear the whole structure first. Similarly if this this is
2569 static constructor of a non-BLKmode object. */
2571 if (list_length (CONSTRUCTOR_ELTS (exp)) < maxelt - minelt + 1
2572 || (GET_CODE (target) == REG && TREE_STATIC (exp)))
2573 clear_storage (target, int_size_in_bytes (type));
2575 /* Inform later passes that the old value is dead. */
2576 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
2578 /* Store each element of the constructor into
2579 the corresponding element of TARGET, determined
2580 by counting the elements. */
2581 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
2583 elt = TREE_CHAIN (elt), i++)
2585 register enum machine_mode mode;
2590 mode = TYPE_MODE (elttype);
2591 bitsize = GET_MODE_BITSIZE (mode);
2592 unsignedp = TREE_UNSIGNED (elttype);
2594 bitpos = (i * TREE_INT_CST_LOW (TYPE_SIZE (elttype)));
2596 store_field (target, bitsize, bitpos, mode, TREE_VALUE (elt),
2597 /* The alignment of TARGET is
2598 at least what its type requires. */
2600 TYPE_ALIGN (type) / BITS_PER_UNIT,
2601 int_size_in_bytes (type));
2609 /* Store the value of EXP (an expression tree)
2610 into a subfield of TARGET which has mode MODE and occupies
2611 BITSIZE bits, starting BITPOS bits from the start of TARGET.
2612 If MODE is VOIDmode, it means that we are storing into a bit-field.
2614 If VALUE_MODE is VOIDmode, return nothing in particular.
2615 UNSIGNEDP is not used in this case.
2617 Otherwise, return an rtx for the value stored. This rtx
2618 has mode VALUE_MODE if that is convenient to do.
2619 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
2621 ALIGN is the alignment that TARGET is known to have, measured in bytes.
2622 TOTAL_SIZE is the size in bytes of the structure, or -1 if varying. */
2625 store_field (target, bitsize, bitpos, mode, exp, value_mode,
2626 unsignedp, align, total_size)
2628 int bitsize, bitpos;
2629 enum machine_mode mode;
2631 enum machine_mode value_mode;
2636 HOST_WIDE_INT width_mask = 0;
2638 if (bitsize < HOST_BITS_PER_WIDE_INT)
2639 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
2641 /* If we are storing into an unaligned field of an aligned union that is
2642 in a register, we may have the mode of TARGET being an integer mode but
2643 MODE == BLKmode. In that case, get an aligned object whose size and
2644 alignment are the same as TARGET and store TARGET into it (we can avoid
2645 the store if the field being stored is the entire width of TARGET). Then
2646 call ourselves recursively to store the field into a BLKmode version of
2647 that object. Finally, load from the object into TARGET. This is not
2648 very efficient in general, but should only be slightly more expensive
2649 than the otherwise-required unaligned accesses. Perhaps this can be
2650 cleaned up later. */
2653 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
2655 rtx object = assign_stack_temp (GET_MODE (target),
2656 GET_MODE_SIZE (GET_MODE (target)), 0);
2657 rtx blk_object = copy_rtx (object);
2659 PUT_MODE (blk_object, BLKmode);
2661 if (bitsize != GET_MODE_BITSIZE (GET_MODE (target)))
2662 emit_move_insn (object, target);
2664 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0,
2667 emit_move_insn (target, object);
2672 /* If the structure is in a register or if the component
2673 is a bit field, we cannot use addressing to access it.
2674 Use bit-field techniques or SUBREG to store in it. */
2676 if (mode == VOIDmode
2677 || (mode != BLKmode && ! direct_store[(int) mode])
2678 || GET_CODE (target) == REG
2679 || GET_CODE (target) == SUBREG)
2681 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
2682 /* Store the value in the bitfield. */
2683 store_bit_field (target, bitsize, bitpos, mode, temp, align, total_size);
2684 if (value_mode != VOIDmode)
2686 /* The caller wants an rtx for the value. */
2687 /* If possible, avoid refetching from the bitfield itself. */
2689 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
2692 enum machine_mode tmode;
2695 return expand_and (temp, GEN_INT (width_mask), NULL_RTX);
2696 tmode = GET_MODE (temp);
2697 if (tmode == VOIDmode)
2699 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
2700 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
2701 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
2703 return extract_bit_field (target, bitsize, bitpos, unsignedp,
2704 NULL_RTX, value_mode, 0, align,
2711 rtx addr = XEXP (target, 0);
2714 /* If a value is wanted, it must be the lhs;
2715 so make the address stable for multiple use. */
2717 if (value_mode != VOIDmode && GET_CODE (addr) != REG
2718 && ! CONSTANT_ADDRESS_P (addr)
2719 /* A frame-pointer reference is already stable. */
2720 && ! (GET_CODE (addr) == PLUS
2721 && GET_CODE (XEXP (addr, 1)) == CONST_INT
2722 && (XEXP (addr, 0) == virtual_incoming_args_rtx
2723 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
2724 addr = copy_to_reg (addr);
2726 /* Now build a reference to just the desired component. */
2728 to_rtx = change_address (target, mode,
2729 plus_constant (addr, (bitpos / BITS_PER_UNIT)));
2730 MEM_IN_STRUCT_P (to_rtx) = 1;
2732 return store_expr (exp, to_rtx, value_mode != VOIDmode);
2736 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
2737 or an ARRAY_REF, look for nested COMPONENT_REFs, BIT_FIELD_REFs, or
2738 ARRAY_REFs and find the ultimate containing object, which we return.
2740 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
2741 bit position, and *PUNSIGNEDP to the signedness of the field.
2742 If the position of the field is variable, we store a tree
2743 giving the variable offset (in units) in *POFFSET.
2744 This offset is in addition to the bit position.
2745 If the position is not variable, we store 0 in *POFFSET.
2747 If any of the extraction expressions is volatile,
2748 we store 1 in *PVOLATILEP. Otherwise we don't change that.
2750 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
2751 is a mode that can be used to access the field. In that case, *PBITSIZE
2754 If the field describes a variable-sized object, *PMODE is set to
2755 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
2756 this case, but the address of the object can be found. */
2759 get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode,
2760 punsignedp, pvolatilep)
2765 enum machine_mode *pmode;
2770 enum machine_mode mode = VOIDmode;
2771 tree offset = integer_zero_node;
2773 if (TREE_CODE (exp) == COMPONENT_REF)
2775 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
2776 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
2777 mode = DECL_MODE (TREE_OPERAND (exp, 1));
2778 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
2780 else if (TREE_CODE (exp) == BIT_FIELD_REF)
2782 size_tree = TREE_OPERAND (exp, 1);
2783 *punsignedp = TREE_UNSIGNED (exp);
2787 mode = TYPE_MODE (TREE_TYPE (exp));
2788 *pbitsize = GET_MODE_BITSIZE (mode);
2789 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
2794 if (TREE_CODE (size_tree) != INTEGER_CST)
2795 mode = BLKmode, *pbitsize = -1;
2797 *pbitsize = TREE_INT_CST_LOW (size_tree);
2800 /* Compute cumulative bit-offset for nested component-refs and array-refs,
2801 and find the ultimate containing object. */
2807 if (TREE_CODE (exp) == COMPONENT_REF || TREE_CODE (exp) == BIT_FIELD_REF)
2809 tree pos = (TREE_CODE (exp) == COMPONENT_REF
2810 ? DECL_FIELD_BITPOS (TREE_OPERAND (exp, 1))
2811 : TREE_OPERAND (exp, 2));
2813 /* If this field hasn't been filled in yet, don't go
2814 past it. This should only happen when folding expressions
2815 made during type construction. */
2819 if (TREE_CODE (pos) == PLUS_EXPR)
2822 if (TREE_CODE (TREE_OPERAND (pos, 0)) == INTEGER_CST)
2824 constant = TREE_OPERAND (pos, 0);
2825 var = TREE_OPERAND (pos, 1);
2827 else if (TREE_CODE (TREE_OPERAND (pos, 1)) == INTEGER_CST)
2829 constant = TREE_OPERAND (pos, 1);
2830 var = TREE_OPERAND (pos, 0);
2835 *pbitpos += TREE_INT_CST_LOW (constant);
2836 offset = size_binop (PLUS_EXPR, offset,
2837 size_binop (FLOOR_DIV_EXPR, var,
2838 size_int (BITS_PER_UNIT)));
2840 else if (TREE_CODE (pos) == INTEGER_CST)
2841 *pbitpos += TREE_INT_CST_LOW (pos);
2844 /* Assume here that the offset is a multiple of a unit.
2845 If not, there should be an explicitly added constant. */
2846 offset = size_binop (PLUS_EXPR, offset,
2847 size_binop (FLOOR_DIV_EXPR, pos,
2848 size_int (BITS_PER_UNIT)));
2852 else if (TREE_CODE (exp) == ARRAY_REF)
2854 /* This code is based on the code in case ARRAY_REF in expand_expr
2855 below. We assume here that the size of an array element is
2856 always an integral multiple of BITS_PER_UNIT. */
2858 tree index = TREE_OPERAND (exp, 1);
2859 tree domain = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
2861 = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
2862 tree index_type = TREE_TYPE (index);
2864 if (! integer_zerop (low_bound))
2865 index = fold (build (MINUS_EXPR, index_type, index, low_bound));
2867 if (TYPE_PRECISION (index_type) != POINTER_SIZE)
2869 index = convert (type_for_size (POINTER_SIZE, 0), index);
2870 index_type = TREE_TYPE (index);
2873 index = fold (build (MULT_EXPR, index_type, index,
2874 TYPE_SIZE (TREE_TYPE (exp))));
2876 if (TREE_CODE (index) == INTEGER_CST
2877 && TREE_INT_CST_HIGH (index) == 0)
2878 *pbitpos += TREE_INT_CST_LOW (index);
2880 offset = size_binop (PLUS_EXPR, offset,
2881 size_binop (FLOOR_DIV_EXPR, index,
2882 size_int (BITS_PER_UNIT)));
2884 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
2885 && ! ((TREE_CODE (exp) == NOP_EXPR
2886 || TREE_CODE (exp) == CONVERT_EXPR)
2887 && (TYPE_MODE (TREE_TYPE (exp))
2888 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
2891 /* If any reference in the chain is volatile, the effect is volatile. */
2892 if (TREE_THIS_VOLATILE (exp))
2894 exp = TREE_OPERAND (exp, 0);
2897 /* If this was a bit-field, see if there is a mode that allows direct
2898 access in case EXP is in memory. */
2899 if (mode == VOIDmode && *pbitsize != 0 && *pbitpos % *pbitsize == 0)
2901 mode = mode_for_size (*pbitsize, MODE_INT, 0);
2902 if (mode == BLKmode)
2906 if (integer_zerop (offset))
2912 /* We aren't finished fixing the callers to really handle nonzero offset. */
2920 /* Given an rtx VALUE that may contain additions and multiplications,
2921 return an equivalent value that just refers to a register or memory.
2922 This is done by generating instructions to perform the arithmetic
2923 and returning a pseudo-register containing the value.
2925 The returned value may be a REG, SUBREG, MEM or constant. */
2928 force_operand (value, target)
2931 register optab binoptab = 0;
2932 /* Use a temporary to force order of execution of calls to
2936 /* Use subtarget as the target for operand 0 of a binary operation. */
2937 register rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
2939 if (GET_CODE (value) == PLUS)
2940 binoptab = add_optab;
2941 else if (GET_CODE (value) == MINUS)
2942 binoptab = sub_optab;
2943 else if (GET_CODE (value) == MULT)
2945 op2 = XEXP (value, 1);
2946 if (!CONSTANT_P (op2)
2947 && !(GET_CODE (op2) == REG && op2 != subtarget))
2949 tmp = force_operand (XEXP (value, 0), subtarget);
2950 return expand_mult (GET_MODE (value), tmp,
2951 force_operand (op2, NULL_RTX),
2957 op2 = XEXP (value, 1);
2958 if (!CONSTANT_P (op2)
2959 && !(GET_CODE (op2) == REG && op2 != subtarget))
2961 if (binoptab == sub_optab && GET_CODE (op2) == CONST_INT)
2963 binoptab = add_optab;
2964 op2 = negate_rtx (GET_MODE (value), op2);
2967 /* Check for an addition with OP2 a constant integer and our first
2968 operand a PLUS of a virtual register and something else. In that
2969 case, we want to emit the sum of the virtual register and the
2970 constant first and then add the other value. This allows virtual
2971 register instantiation to simply modify the constant rather than
2972 creating another one around this addition. */
2973 if (binoptab == add_optab && GET_CODE (op2) == CONST_INT
2974 && GET_CODE (XEXP (value, 0)) == PLUS
2975 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
2976 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
2977 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
2979 rtx temp = expand_binop (GET_MODE (value), binoptab,
2980 XEXP (XEXP (value, 0), 0), op2,
2981 subtarget, 0, OPTAB_LIB_WIDEN);
2982 return expand_binop (GET_MODE (value), binoptab, temp,
2983 force_operand (XEXP (XEXP (value, 0), 1), 0),
2984 target, 0, OPTAB_LIB_WIDEN);
2987 tmp = force_operand (XEXP (value, 0), subtarget);
2988 return expand_binop (GET_MODE (value), binoptab, tmp,
2989 force_operand (op2, NULL_RTX),
2990 target, 0, OPTAB_LIB_WIDEN);
2991 /* We give UNSIGNEDP = 0 to expand_binop
2992 because the only operations we are expanding here are signed ones. */
2997 /* Subroutine of expand_expr:
2998 save the non-copied parts (LIST) of an expr (LHS), and return a list
2999 which can restore these values to their previous values,
3000 should something modify their storage. */
3003 save_noncopied_parts (lhs, list)
3010 for (tail = list; tail; tail = TREE_CHAIN (tail))
3011 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
3012 parts = chainon (parts, save_noncopied_parts (lhs, TREE_VALUE (tail)));
3015 tree part = TREE_VALUE (tail);
3016 tree part_type = TREE_TYPE (part);
3017 tree to_be_saved = build (COMPONENT_REF, part_type, lhs, part);
3018 rtx target = assign_stack_temp (TYPE_MODE (part_type),
3019 int_size_in_bytes (part_type), 0);
3020 if (! memory_address_p (TYPE_MODE (part_type), XEXP (target, 0)))
3021 target = change_address (target, TYPE_MODE (part_type), NULL_RTX);
3022 parts = tree_cons (to_be_saved,
3023 build (RTL_EXPR, part_type, NULL_TREE,
3026 store_expr (TREE_PURPOSE (parts), RTL_EXPR_RTL (TREE_VALUE (parts)), 0);
3031 /* Subroutine of expand_expr:
3032 record the non-copied parts (LIST) of an expr (LHS), and return a list
3033 which specifies the initial values of these parts. */
3036 init_noncopied_parts (lhs, list)
3043 for (tail = list; tail; tail = TREE_CHAIN (tail))
3044 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
3045 parts = chainon (parts, init_noncopied_parts (lhs, TREE_VALUE (tail)));
3048 tree part = TREE_VALUE (tail);
3049 tree part_type = TREE_TYPE (part);
3050 tree to_be_initialized = build (COMPONENT_REF, part_type, lhs, part);
3051 parts = tree_cons (TREE_PURPOSE (tail), to_be_initialized, parts);
3056 /* Subroutine of expand_expr: return nonzero iff there is no way that
3057 EXP can reference X, which is being modified. */
3060 safe_from_p (x, exp)
3070 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
3071 find the underlying pseudo. */
3072 if (GET_CODE (x) == SUBREG)
3075 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
3079 /* If X is a location in the outgoing argument area, it is always safe. */
3080 if (GET_CODE (x) == MEM
3081 && (XEXP (x, 0) == virtual_outgoing_args_rtx
3082 || (GET_CODE (XEXP (x, 0)) == PLUS
3083 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx)))
3086 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
3089 exp_rtl = DECL_RTL (exp);
3096 if (TREE_CODE (exp) == TREE_LIST)
3097 return ((TREE_VALUE (exp) == 0
3098 || safe_from_p (x, TREE_VALUE (exp)))
3099 && (TREE_CHAIN (exp) == 0
3100 || safe_from_p (x, TREE_CHAIN (exp))));
3105 return safe_from_p (x, TREE_OPERAND (exp, 0));
3109 return (safe_from_p (x, TREE_OPERAND (exp, 0))
3110 && safe_from_p (x, TREE_OPERAND (exp, 1)));
3114 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
3115 the expression. If it is set, we conflict iff we are that rtx or
3116 both are in memory. Otherwise, we check all operands of the
3117 expression recursively. */
3119 switch (TREE_CODE (exp))
3122 return staticp (TREE_OPERAND (exp, 0));
3125 if (GET_CODE (x) == MEM)
3130 exp_rtl = CALL_EXPR_RTL (exp);
3133 /* Assume that the call will clobber all hard registers and
3135 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
3136 || GET_CODE (x) == MEM)
3143 exp_rtl = RTL_EXPR_RTL (exp);
3145 /* We don't know what this can modify. */
3150 case WITH_CLEANUP_EXPR:
3151 exp_rtl = RTL_EXPR_RTL (exp);
3155 exp_rtl = SAVE_EXPR_RTL (exp);
3159 /* The only operand we look at is operand 1. The rest aren't
3160 part of the expression. */
3161 return safe_from_p (x, TREE_OPERAND (exp, 1));
3163 case METHOD_CALL_EXPR:
3164 /* This takes a rtx argument, but shouldn't appear here. */
3168 /* If we have an rtx, we do not need to scan our operands. */
3172 nops = tree_code_length[(int) TREE_CODE (exp)];
3173 for (i = 0; i < nops; i++)
3174 if (TREE_OPERAND (exp, i) != 0
3175 && ! safe_from_p (x, TREE_OPERAND (exp, i)))
3179 /* If we have an rtl, find any enclosed object. Then see if we conflict
3183 if (GET_CODE (exp_rtl) == SUBREG)
3185 exp_rtl = SUBREG_REG (exp_rtl);
3186 if (GET_CODE (exp_rtl) == REG
3187 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
3191 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
3192 are memory and EXP is not readonly. */
3193 return ! (rtx_equal_p (x, exp_rtl)
3194 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
3195 && ! TREE_READONLY (exp)));
3198 /* If we reach here, it is safe. */
3202 /* Subroutine of expand_expr: return nonzero iff EXP is an
3203 expression whose type is statically determinable. */
3209 if (TREE_CODE (exp) == PARM_DECL
3210 || TREE_CODE (exp) == VAR_DECL
3211 || TREE_CODE (exp) == CALL_EXPR || TREE_CODE (exp) == TARGET_EXPR
3212 || TREE_CODE (exp) == COMPONENT_REF
3213 || TREE_CODE (exp) == ARRAY_REF)
3218 /* expand_expr: generate code for computing expression EXP.
3219 An rtx for the computed value is returned. The value is never null.
3220 In the case of a void EXP, const0_rtx is returned.
3222 The value may be stored in TARGET if TARGET is nonzero.
3223 TARGET is just a suggestion; callers must assume that
3224 the rtx returned may not be the same as TARGET.
3226 If TARGET is CONST0_RTX, it means that the value will be ignored.
3228 If TMODE is not VOIDmode, it suggests generating the
3229 result in mode TMODE. But this is done only when convenient.
3230 Otherwise, TMODE is ignored and the value generated in its natural mode.
3231 TMODE is just a suggestion; callers must assume that
3232 the rtx returned may not have mode TMODE.
3234 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
3235 with a constant address even if that address is not normally legitimate.
3236 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
3238 If MODIFIER is EXPAND_SUM then when EXP is an addition
3239 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
3240 or a nest of (PLUS ...) and (MINUS ...) where the terms are
3241 products as above, or REG or MEM, or constant.
3242 Ordinarily in such cases we would output mul or add instructions
3243 and then return a pseudo reg containing the sum.
3245 EXPAND_INITIALIZER is much like EXPAND_SUM except that
3246 it also marks a label as absolutely required (it can't be dead).
3247 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
3248 This is used for outputting expressions used in initializers. */
3251 expand_expr (exp, target, tmode, modifier)
3254 enum machine_mode tmode;
3255 enum expand_modifier modifier;
3257 register rtx op0, op1, temp;
3258 tree type = TREE_TYPE (exp);
3259 int unsignedp = TREE_UNSIGNED (type);
3260 register enum machine_mode mode = TYPE_MODE (type);
3261 register enum tree_code code = TREE_CODE (exp);
3263 /* Use subtarget as the target for operand 0 of a binary operation. */
3264 rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
3265 rtx original_target = target;
3266 int ignore = target == const0_rtx;
3269 /* Don't use hard regs as subtargets, because the combiner
3270 can only handle pseudo regs. */
3271 if (subtarget && REGNO (subtarget) < FIRST_PSEUDO_REGISTER)
3273 /* Avoid subtargets inside loops,
3274 since they hide some invariant expressions. */
3275 if (preserve_subexpressions_p ())
3278 if (ignore) target = 0, original_target = 0;
3280 /* If will do cse, generate all results into pseudo registers
3281 since 1) that allows cse to find more things
3282 and 2) otherwise cse could produce an insn the machine
3285 if (! cse_not_expected && mode != BLKmode && target
3286 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER))
3289 /* Ensure we reference a volatile object even if value is ignored. */
3290 if (ignore && TREE_THIS_VOLATILE (exp)
3291 && TREE_CODE (exp) != FUNCTION_DECL
3292 && mode != VOIDmode && mode != BLKmode)
3294 target = gen_reg_rtx (mode);
3295 temp = expand_expr (exp, target, VOIDmode, modifier);
3297 emit_move_insn (target, temp);
3305 tree function = decl_function_context (exp);
3306 /* Handle using a label in a containing function. */
3307 if (function != current_function_decl && function != 0)
3309 struct function *p = find_function_data (function);
3310 /* Allocate in the memory associated with the function
3311 that the label is in. */
3312 push_obstacks (p->function_obstack,
3313 p->function_maybepermanent_obstack);
3315 p->forced_labels = gen_rtx (EXPR_LIST, VOIDmode,
3316 label_rtx (exp), p->forced_labels);
3319 else if (modifier == EXPAND_INITIALIZER)
3320 forced_labels = gen_rtx (EXPR_LIST, VOIDmode,
3321 label_rtx (exp), forced_labels);
3322 temp = gen_rtx (MEM, FUNCTION_MODE,
3323 gen_rtx (LABEL_REF, Pmode, label_rtx (exp)));
3324 if (function != current_function_decl && function != 0)
3325 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
3330 if (DECL_RTL (exp) == 0)
3332 error_with_decl (exp, "prior parameter's size depends on `%s'");
3333 return CONST0_RTX (mode);
3339 if (DECL_RTL (exp) == 0)
3341 /* Ensure variable marked as used
3342 even if it doesn't go through a parser. */
3343 TREE_USED (exp) = 1;
3344 /* Handle variables inherited from containing functions. */
3345 context = decl_function_context (exp);
3347 /* We treat inline_function_decl as an alias for the current function
3348 because that is the inline function whose vars, types, etc.
3349 are being merged into the current function.
3350 See expand_inline_function. */
3351 if (context != 0 && context != current_function_decl
3352 && context != inline_function_decl
3353 /* If var is static, we don't need a static chain to access it. */
3354 && ! (GET_CODE (DECL_RTL (exp)) == MEM
3355 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
3359 /* Mark as non-local and addressable. */
3360 DECL_NONLOCAL (exp) = 1;
3361 mark_addressable (exp);
3362 if (GET_CODE (DECL_RTL (exp)) != MEM)
3364 addr = XEXP (DECL_RTL (exp), 0);
3365 if (GET_CODE (addr) == MEM)
3366 addr = gen_rtx (MEM, Pmode, fix_lexical_addr (XEXP (addr, 0), exp));
3368 addr = fix_lexical_addr (addr, exp);
3369 return change_address (DECL_RTL (exp), mode, addr);
3372 /* This is the case of an array whose size is to be determined
3373 from its initializer, while the initializer is still being parsed.
3375 if (GET_CODE (DECL_RTL (exp)) == MEM
3376 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
3377 return change_address (DECL_RTL (exp), GET_MODE (DECL_RTL (exp)),
3378 XEXP (DECL_RTL (exp), 0));
3379 if (GET_CODE (DECL_RTL (exp)) == MEM
3380 && modifier != EXPAND_CONST_ADDRESS
3381 && modifier != EXPAND_SUM
3382 && modifier != EXPAND_INITIALIZER)
3384 /* DECL_RTL probably contains a constant address.
3385 On RISC machines where a constant address isn't valid,
3386 make some insns to get that address into a register. */
3387 if (!memory_address_p (DECL_MODE (exp), XEXP (DECL_RTL (exp), 0))
3389 && CONSTANT_ADDRESS_P (XEXP (DECL_RTL (exp), 0))))
3390 return change_address (DECL_RTL (exp), VOIDmode,
3391 copy_rtx (XEXP (DECL_RTL (exp), 0)));
3394 /* If the mode of DECL_RTL does not match that of the decl, it
3395 must be a promoted value. We return a SUBREG of the wanted mode,
3396 but mark it so that we know that it was already extended. */
3398 if (GET_CODE (DECL_RTL (exp)) == REG
3399 && GET_MODE (DECL_RTL (exp)) != mode)
3401 enum machine_mode decl_mode = DECL_MODE (exp);
3403 /* Get the signedness used for this variable. Ensure we get the
3404 same mode we got when the variable was declared. */
3406 PROMOTE_MODE (decl_mode, unsignedp, type);
3408 if (decl_mode != GET_MODE (DECL_RTL (exp)))
3411 temp = gen_rtx (SUBREG, mode, DECL_RTL (exp), 0);
3412 SUBREG_PROMOTED_VAR_P (temp) = 1;
3413 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
3417 return DECL_RTL (exp);
3420 return immed_double_const (TREE_INT_CST_LOW (exp),
3421 TREE_INT_CST_HIGH (exp),
3425 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, 0);
3428 /* If optimized, generate immediate CONST_DOUBLE
3429 which will be turned into memory by reload if necessary.
3431 We used to force a register so that loop.c could see it. But
3432 this does not allow gen_* patterns to perform optimizations with
3433 the constants. It also produces two insns in cases like "x = 1.0;".
3434 On most machines, floating-point constants are not permitted in
3435 many insns, so we'd end up copying it to a register in any case.
3437 Now, we do the copying in expand_binop, if appropriate. */
3438 return immed_real_const (exp);
3442 if (! TREE_CST_RTL (exp))
3443 output_constant_def (exp);
3445 /* TREE_CST_RTL probably contains a constant address.
3446 On RISC machines where a constant address isn't valid,
3447 make some insns to get that address into a register. */
3448 if (GET_CODE (TREE_CST_RTL (exp)) == MEM
3449 && modifier != EXPAND_CONST_ADDRESS
3450 && modifier != EXPAND_INITIALIZER
3451 && modifier != EXPAND_SUM
3452 && !memory_address_p (mode, XEXP (TREE_CST_RTL (exp), 0)))
3453 return change_address (TREE_CST_RTL (exp), VOIDmode,
3454 copy_rtx (XEXP (TREE_CST_RTL (exp), 0)));
3455 return TREE_CST_RTL (exp);
3458 context = decl_function_context (exp);
3459 /* We treat inline_function_decl as an alias for the current function
3460 because that is the inline function whose vars, types, etc.
3461 are being merged into the current function.
3462 See expand_inline_function. */
3463 if (context == current_function_decl || context == inline_function_decl)
3466 /* If this is non-local, handle it. */
3469 temp = SAVE_EXPR_RTL (exp);
3470 if (temp && GET_CODE (temp) == REG)
3472 put_var_into_stack (exp);
3473 temp = SAVE_EXPR_RTL (exp);
3475 if (temp == 0 || GET_CODE (temp) != MEM)
3477 return change_address (temp, mode,
3478 fix_lexical_addr (XEXP (temp, 0), exp));
3480 if (SAVE_EXPR_RTL (exp) == 0)
3482 if (mode == BLKmode)
3484 = assign_stack_temp (mode,
3485 int_size_in_bytes (TREE_TYPE (exp)), 0);
3488 enum machine_mode var_mode = mode;
3490 if (TREE_CODE (type) == INTEGER_TYPE
3491 || TREE_CODE (type) == ENUMERAL_TYPE
3492 || TREE_CODE (type) == BOOLEAN_TYPE
3493 || TREE_CODE (type) == CHAR_TYPE
3494 || TREE_CODE (type) == REAL_TYPE
3495 || TREE_CODE (type) == POINTER_TYPE
3496 || TREE_CODE (type) == OFFSET_TYPE)
3498 PROMOTE_MODE (var_mode, unsignedp, type);
3501 temp = gen_reg_rtx (var_mode);
3504 SAVE_EXPR_RTL (exp) = temp;
3505 if (!optimize && GET_CODE (temp) == REG)
3506 save_expr_regs = gen_rtx (EXPR_LIST, VOIDmode, temp,
3509 /* If the mode of TEMP does not match that of the expression, it
3510 must be a promoted value. We pass store_expr a SUBREG of the
3511 wanted mode but mark it so that we know that it was already
3512 extended. Note that `unsignedp' was modified above in
3515 if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
3517 temp = gen_rtx (SUBREG, mode, SAVE_EXPR_RTL (exp), 0);
3518 SUBREG_PROMOTED_VAR_P (temp) = 1;
3519 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
3522 store_expr (TREE_OPERAND (exp, 0), temp, 0);
3525 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
3526 must be a promoted value. We return a SUBREG of the wanted mode,
3527 but mark it so that we know that it was already extended. Note
3528 that `unsignedp' was modified above in this case. */
3530 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
3531 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
3533 temp = gen_rtx (SUBREG, mode, SAVE_EXPR_RTL (exp), 0);
3534 SUBREG_PROMOTED_VAR_P (temp) = 1;
3535 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
3539 return SAVE_EXPR_RTL (exp);
3542 /* Exit the current loop if the body-expression is true. */
3544 rtx label = gen_label_rtx ();
3545 do_jump (TREE_OPERAND (exp, 0), label, NULL_RTX);
3546 expand_exit_loop (NULL_PTR);
3552 expand_start_loop (1);
3553 expand_expr_stmt (TREE_OPERAND (exp, 0));
3560 tree vars = TREE_OPERAND (exp, 0);
3561 int vars_need_expansion = 0;
3563 /* Need to open a binding contour here because
3564 if there are any cleanups they most be contained here. */
3565 expand_start_bindings (0);
3567 /* Mark the corresponding BLOCK for output in its proper place. */
3568 if (TREE_OPERAND (exp, 2) != 0
3569 && ! TREE_USED (TREE_OPERAND (exp, 2)))
3570 insert_block (TREE_OPERAND (exp, 2));
3572 /* If VARS have not yet been expanded, expand them now. */
3575 if (DECL_RTL (vars) == 0)
3577 vars_need_expansion = 1;
3580 expand_decl_init (vars);
3581 vars = TREE_CHAIN (vars);
3584 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, modifier);
3586 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
3592 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
3594 emit_insns (RTL_EXPR_SEQUENCE (exp));
3595 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
3596 return RTL_EXPR_RTL (exp);
3599 /* All elts simple constants => refer to a constant in memory. But
3600 if this is a non-BLKmode mode, let it store a field at a time
3601 since that should make a CONST_INT or CONST_DOUBLE when we
3603 if (TREE_STATIC (exp) && (mode == BLKmode || TREE_ADDRESSABLE (exp)))
3605 rtx constructor = output_constant_def (exp);
3606 if (modifier != EXPAND_CONST_ADDRESS
3607 && modifier != EXPAND_INITIALIZER
3608 && modifier != EXPAND_SUM
3609 && !memory_address_p (GET_MODE (constructor),
3610 XEXP (constructor, 0)))
3611 constructor = change_address (constructor, VOIDmode,
3612 XEXP (constructor, 0));
3619 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
3620 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode, 0);
3625 if (target == 0 || ! safe_from_p (target, exp))
3627 if (mode != BLKmode && ! TREE_ADDRESSABLE (exp))
3628 target = gen_reg_rtx (mode);
3631 enum tree_code c = TREE_CODE (type);
3633 = assign_stack_temp (mode, int_size_in_bytes (type), 0);
3634 if (c == RECORD_TYPE || c == UNION_TYPE
3635 || c == QUAL_UNION_TYPE || c == ARRAY_TYPE)
3636 MEM_IN_STRUCT_P (target) = 1;
3639 store_constructor (exp, target);
3645 tree exp1 = TREE_OPERAND (exp, 0);
3648 /* A SAVE_EXPR as the address in an INDIRECT_EXPR is generated
3649 for *PTR += ANYTHING where PTR is put inside the SAVE_EXPR.
3650 This code has the same general effect as simply doing
3651 expand_expr on the save expr, except that the expression PTR
3652 is computed for use as a memory address. This means different
3653 code, suitable for indexing, may be generated. */
3654 if (TREE_CODE (exp1) == SAVE_EXPR
3655 && SAVE_EXPR_RTL (exp1) == 0
3656 && TREE_CODE (exp2 = TREE_OPERAND (exp1, 0)) != ERROR_MARK
3657 && TYPE_MODE (TREE_TYPE (exp1)) == Pmode
3658 && TYPE_MODE (TREE_TYPE (exp2)) == Pmode)
3660 temp = expand_expr (TREE_OPERAND (exp1, 0), NULL_RTX,
3661 VOIDmode, EXPAND_SUM);
3662 op0 = memory_address (mode, temp);
3663 op0 = copy_all_regs (op0);
3664 SAVE_EXPR_RTL (exp1) = op0;
3668 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
3669 op0 = memory_address (mode, op0);
3672 temp = gen_rtx (MEM, mode, op0);
3673 /* If address was computed by addition,
3674 mark this as an element of an aggregate. */
3675 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
3676 || (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR
3677 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) == PLUS_EXPR)
3678 || TREE_CODE (TREE_TYPE (exp)) == ARRAY_TYPE
3679 || TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE
3680 || TREE_CODE (TREE_TYPE (exp)) == UNION_TYPE
3681 || TREE_CODE (TREE_TYPE (exp)) == QUAL_UNION_TYPE
3682 || (TREE_CODE (exp1) == ADDR_EXPR
3683 && (exp2 = TREE_OPERAND (exp1, 0))
3684 && (TREE_CODE (TREE_TYPE (exp2)) == ARRAY_TYPE
3685 || TREE_CODE (TREE_TYPE (exp2)) == RECORD_TYPE
3686 || TREE_CODE (TREE_TYPE (exp2)) == UNION_TYPE
3687 || TREE_CODE (TREE_TYPE (exp2)) == QUAL_UNION_TYPE)))
3688 MEM_IN_STRUCT_P (temp) = 1;
3689 MEM_VOLATILE_P (temp) = TREE_THIS_VOLATILE (exp);
3690 #if 0 /* It is incorrect to set RTX_UNCHANGING_P here, because the fact that
3691 a location is accessed through a pointer to const does not mean
3692 that the value there can never change. */
3693 RTX_UNCHANGING_P (temp) = TREE_READONLY (exp);
3699 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
3703 tree array = TREE_OPERAND (exp, 0);
3704 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
3705 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
3706 tree index = TREE_OPERAND (exp, 1);
3707 tree index_type = TREE_TYPE (index);
3710 /* Optimize the special-case of a zero lower bound. */
3711 if (! integer_zerop (low_bound))
3712 index = fold (build (MINUS_EXPR, index_type, index, low_bound));
3714 if (TREE_CODE (index) != INTEGER_CST
3715 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
3717 /* Nonconstant array index or nonconstant element size.
3718 Generate the tree for *(&array+index) and expand that,
3719 except do it in a language-independent way
3720 and don't complain about non-lvalue arrays.
3721 `mark_addressable' should already have been called
3722 for any array for which this case will be reached. */
3724 /* Don't forget the const or volatile flag from the array
3726 tree variant_type = build_type_variant (type,
3727 TREE_READONLY (exp),
3728 TREE_THIS_VOLATILE (exp));
3729 tree array_adr = build1 (ADDR_EXPR,
3730 build_pointer_type (variant_type), array);
3733 /* Convert the integer argument to a type the same size as a
3734 pointer so the multiply won't overflow spuriously. */
3735 if (TYPE_PRECISION (index_type) != POINTER_SIZE)
3736 index = convert (type_for_size (POINTER_SIZE, 0), index);
3738 /* Don't think the address has side effects
3739 just because the array does.
3740 (In some cases the address might have side effects,
3741 and we fail to record that fact here. However, it should not
3742 matter, since expand_expr should not care.) */
3743 TREE_SIDE_EFFECTS (array_adr) = 0;
3745 elt = build1 (INDIRECT_REF, type,
3746 fold (build (PLUS_EXPR,
3747 TYPE_POINTER_TO (variant_type),
3749 fold (build (MULT_EXPR,
3750 TYPE_POINTER_TO (variant_type),
3752 size_in_bytes (type))))));
3754 /* Volatility, etc., of new expression is same as old
3756 TREE_SIDE_EFFECTS (elt) = TREE_SIDE_EFFECTS (exp);
3757 TREE_THIS_VOLATILE (elt) = TREE_THIS_VOLATILE (exp);
3758 TREE_READONLY (elt) = TREE_READONLY (exp);
3760 return expand_expr (elt, target, tmode, modifier);
3763 /* Fold an expression like: "foo"[2].
3764 This is not done in fold so it won't happen inside &. */
3766 if (TREE_CODE (array) == STRING_CST
3767 && TREE_CODE (index) == INTEGER_CST
3768 && !TREE_INT_CST_HIGH (index)
3769 && (i = TREE_INT_CST_LOW (index)) < TREE_STRING_LENGTH (array))
3771 if (TREE_TYPE (TREE_TYPE (array)) == integer_type_node)
3773 exp = build_int_2 (((int *)TREE_STRING_POINTER (array))[i], 0);
3774 TREE_TYPE (exp) = integer_type_node;
3775 return expand_expr (exp, target, tmode, modifier);
3777 if (TREE_TYPE (TREE_TYPE (array)) == char_type_node)
3779 exp = build_int_2 (TREE_STRING_POINTER (array)[i], 0);
3780 TREE_TYPE (exp) = integer_type_node;
3781 return expand_expr (convert (TREE_TYPE (TREE_TYPE (array)),
3783 target, tmode, modifier);
3787 /* If this is a constant index into a constant array,
3788 just get the value from the array. Handle both the cases when
3789 we have an explicit constructor and when our operand is a variable
3790 that was declared const. */
3792 if (TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array))
3794 if (TREE_CODE (index) == INTEGER_CST
3795 && TREE_INT_CST_HIGH (index) == 0)
3797 tree elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0));
3799 i = TREE_INT_CST_LOW (index);
3801 elem = TREE_CHAIN (elem);
3803 return expand_expr (fold (TREE_VALUE (elem)), target,
3808 else if (optimize >= 1
3809 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
3810 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
3811 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
3813 if (TREE_CODE (index) == INTEGER_CST
3814 && TREE_INT_CST_HIGH (index) == 0)
3816 tree init = DECL_INITIAL (array);
3818 i = TREE_INT_CST_LOW (index);
3819 if (TREE_CODE (init) == CONSTRUCTOR)
3821 tree elem = CONSTRUCTOR_ELTS (init);
3824 elem = TREE_CHAIN (elem);
3826 return expand_expr (fold (TREE_VALUE (elem)), target,
3829 else if (TREE_CODE (init) == STRING_CST
3830 && i < TREE_STRING_LENGTH (init))
3832 temp = GEN_INT (TREE_STRING_POINTER (init)[i]);
3833 return convert_to_mode (mode, temp, 0);
3839 /* Treat array-ref with constant index as a component-ref. */
3843 /* If the operand is a CONSTRUCTOR, we can just extract the
3844 appropriate field if it is present. */
3845 if (code != ARRAY_REF
3846 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR)
3850 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
3851 elt = TREE_CHAIN (elt))
3852 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1))
3853 return expand_expr (TREE_VALUE (elt), target, tmode, modifier);
3857 enum machine_mode mode1;
3862 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
3863 &mode1, &unsignedp, &volatilep);
3865 /* If we got back the original object, something is wrong. Perhaps
3866 we are evaluating an expression too early. In any event, don't
3867 infinitely recurse. */
3871 /* In some cases, we will be offsetting OP0's address by a constant.
3872 So get it as a sum, if possible. If we will be using it
3873 directly in an insn, we validate it. */
3874 op0 = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_SUM);
3876 /* If this is a constant, put it into a register if it is a
3877 legitimate constant and memory if it isn't. */
3878 if (CONSTANT_P (op0))
3880 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
3881 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0))
3882 op0 = force_reg (mode, op0);
3884 op0 = validize_mem (force_const_mem (mode, op0));
3889 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
3891 if (GET_CODE (op0) != MEM)
3893 op0 = change_address (op0, VOIDmode,
3894 gen_rtx (PLUS, Pmode, XEXP (op0, 0),
3895 force_reg (Pmode, offset_rtx)));
3898 /* Don't forget about volatility even if this is a bitfield. */
3899 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
3901 op0 = copy_rtx (op0);
3902 MEM_VOLATILE_P (op0) = 1;
3905 if (mode1 == VOIDmode
3906 || (mode1 != BLKmode && ! direct_load[(int) mode1]
3907 && modifier != EXPAND_CONST_ADDRESS
3908 && modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
3909 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG)
3911 /* In cases where an aligned union has an unaligned object
3912 as a field, we might be extracting a BLKmode value from
3913 an integer-mode (e.g., SImode) object. Handle this case
3914 by doing the extract into an object as wide as the field
3915 (which we know to be the width of a basic mode), then
3916 storing into memory, and changing the mode to BLKmode. */
3917 enum machine_mode ext_mode = mode;
3919 if (ext_mode == BLKmode)
3920 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
3922 if (ext_mode == BLKmode)
3925 op0 = extract_bit_field (validize_mem (op0), bitsize, bitpos,
3926 unsignedp, target, ext_mode, ext_mode,
3927 TYPE_ALIGN (TREE_TYPE (tem)) / BITS_PER_UNIT,
3928 int_size_in_bytes (TREE_TYPE (tem)));
3929 if (mode == BLKmode)
3931 rtx new = assign_stack_temp (ext_mode,
3932 bitsize / BITS_PER_UNIT, 0);
3934 emit_move_insn (new, op0);
3935 op0 = copy_rtx (new);
3936 PUT_MODE (op0, BLKmode);
3942 /* Get a reference to just this component. */
3943 if (modifier == EXPAND_CONST_ADDRESS
3944 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
3945 op0 = gen_rtx (MEM, mode1, plus_constant (XEXP (op0, 0),
3946 (bitpos / BITS_PER_UNIT)));
3948 op0 = change_address (op0, mode1,
3949 plus_constant (XEXP (op0, 0),
3950 (bitpos / BITS_PER_UNIT)));
3951 MEM_IN_STRUCT_P (op0) = 1;
3952 MEM_VOLATILE_P (op0) |= volatilep;
3953 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode)
3956 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
3957 convert_move (target, op0, unsignedp);
3963 tree base = build1 (ADDR_EXPR, type, TREE_OPERAND (exp, 0));
3964 tree addr = build (PLUS_EXPR, type, base, TREE_OPERAND (exp, 1));
3965 op0 = expand_expr (addr, NULL_RTX, VOIDmode, EXPAND_SUM);
3966 temp = gen_rtx (MEM, mode, memory_address (mode, op0));
3967 MEM_IN_STRUCT_P (temp) = 1;
3968 MEM_VOLATILE_P (temp) = TREE_THIS_VOLATILE (exp);
3969 #if 0 /* It is incorrect to set RTX_UNCHANGING_P here, because the fact that
3970 a location is accessed through a pointer to const does not mean
3971 that the value there can never change. */
3972 RTX_UNCHANGING_P (temp) = TREE_READONLY (exp);
3977 /* Intended for a reference to a buffer of a file-object in Pascal.
3978 But it's not certain that a special tree code will really be
3979 necessary for these. INDIRECT_REF might work for them. */
3983 /* IN_EXPR: Inlined pascal set IN expression.
3986 rlo = set_low - (set_low%bits_per_word);
3987 the_word = set [ (index - rlo)/bits_per_word ];
3988 bit_index = index % bits_per_word;
3989 bitmask = 1 << bit_index;
3990 return !!(the_word & bitmask); */
3992 preexpand_calls (exp);
3994 tree set = TREE_OPERAND (exp, 0);
3995 tree index = TREE_OPERAND (exp, 1);
3996 tree set_type = TREE_TYPE (set);
3998 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
3999 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
4005 rtx diff, quo, rem, addr, bit, result;
4006 rtx setval, setaddr;
4007 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
4010 target = gen_reg_rtx (mode);
4012 /* If domain is empty, answer is no. */
4013 if (tree_int_cst_lt (set_high_bound, set_low_bound))
4016 index_val = expand_expr (index, 0, VOIDmode, 0);
4017 lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
4018 hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
4019 setval = expand_expr (set, 0, VOIDmode, 0);
4020 setaddr = XEXP (setval, 0);
4022 /* Compare index against bounds, if they are constant. */
4023 if (GET_CODE (index_val) == CONST_INT
4024 && GET_CODE (lo_r) == CONST_INT
4025 && INTVAL (index_val) < INTVAL (lo_r))
4028 if (GET_CODE (index_val) == CONST_INT
4029 && GET_CODE (hi_r) == CONST_INT
4030 && INTVAL (hi_r) < INTVAL (index_val))
4033 /* If we get here, we have to generate the code for both cases
4034 (in range and out of range). */
4036 op0 = gen_label_rtx ();
4037 op1 = gen_label_rtx ();
4039 if (! (GET_CODE (index_val) == CONST_INT
4040 && GET_CODE (lo_r) == CONST_INT))
4042 emit_cmp_insn (index_val, lo_r, LT, NULL_RTX,
4043 GET_MODE (index_val), 0, 0);
4044 emit_jump_insn (gen_blt (op1));
4047 if (! (GET_CODE (index_val) == CONST_INT
4048 && GET_CODE (hi_r) == CONST_INT))
4050 emit_cmp_insn (index_val, hi_r, GT, NULL_RTX,
4051 GET_MODE (index_val), 0, 0);
4052 emit_jump_insn (gen_bgt (op1));
4055 /* Calculate the element number of bit zero in the first word
4057 if (GET_CODE (lo_r) == CONST_INT)
4058 rlow = GEN_INT (INTVAL (lo_r)
4059 & ~ ((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
4061 rlow = expand_binop (index_mode, and_optab, lo_r,
4062 GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
4063 NULL_RTX, 0, OPTAB_LIB_WIDEN);
4065 diff = expand_binop (index_mode, sub_optab,
4066 index_val, rlow, NULL_RTX, 0, OPTAB_LIB_WIDEN);
4068 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
4069 GEN_INT (BITS_PER_UNIT), NULL_RTX, 0);
4070 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
4071 GEN_INT (BITS_PER_UNIT), NULL_RTX, 0);
4072 addr = memory_address (byte_mode,
4073 expand_binop (index_mode, add_optab,
4074 diff, setaddr, NULL_RTX, 0,
4076 /* Extract the bit we want to examine */
4077 bit = expand_shift (RSHIFT_EXPR, byte_mode,
4078 gen_rtx (MEM, byte_mode, addr),
4079 make_tree (TREE_TYPE (index), rem),
4081 result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
4082 GET_MODE (target) == byte_mode ? target : 0,
4083 1, OPTAB_LIB_WIDEN);
4085 if (result != target)
4086 convert_move (target, result, 1);
4088 /* Output the code to handle the out-of-range case. */
4091 emit_move_insn (target, const0_rtx);
4096 case WITH_CLEANUP_EXPR:
4097 if (RTL_EXPR_RTL (exp) == 0)
4100 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
4102 = tree_cons (NULL_TREE, TREE_OPERAND (exp, 2), cleanups_this_call);
4103 /* That's it for this cleanup. */
4104 TREE_OPERAND (exp, 2) = 0;
4106 return RTL_EXPR_RTL (exp);
4109 /* Check for a built-in function. */
4110 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
4111 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) == FUNCTION_DECL
4112 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
4113 return expand_builtin (exp, target, subtarget, tmode, ignore);
4114 /* If this call was expanded already by preexpand_calls,
4115 just return the result we got. */
4116 if (CALL_EXPR_RTL (exp) != 0)
4117 return CALL_EXPR_RTL (exp);
4118 return expand_call (exp, target, ignore);
4120 case NON_LVALUE_EXPR:
4123 case REFERENCE_EXPR:
4124 if (TREE_CODE (type) == VOID_TYPE || ignore)
4126 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
4129 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
4130 return expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, modifier);
4131 if (TREE_CODE (type) == UNION_TYPE)
4133 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
4136 if (mode == BLKmode)
4138 if (TYPE_SIZE (type) == 0
4139 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
4141 target = assign_stack_temp (BLKmode,
4142 (TREE_INT_CST_LOW (TYPE_SIZE (type))
4143 + BITS_PER_UNIT - 1)
4144 / BITS_PER_UNIT, 0);
4147 target = gen_reg_rtx (mode);
4149 if (GET_CODE (target) == MEM)
4150 /* Store data into beginning of memory target. */
4151 store_expr (TREE_OPERAND (exp, 0),
4152 change_address (target, TYPE_MODE (valtype), 0), 0);
4154 else if (GET_CODE (target) == REG)
4155 /* Store this field into a union of the proper type. */
4156 store_field (target, GET_MODE_BITSIZE (TYPE_MODE (valtype)), 0,
4157 TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
4159 int_size_in_bytes (TREE_TYPE (TREE_OPERAND (exp, 0))));
4163 /* Return the entire union. */
4166 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, 0);
4167 if (GET_MODE (op0) == mode)
4169 /* If arg is a constant integer being extended from a narrower mode,
4170 we must really truncate to get the extended bits right. Otherwise
4171 (unsigned long) (unsigned char) ("\377"[0])
4172 would come out as ffffffff. */
4173 if (GET_MODE (op0) == VOIDmode
4174 && (GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
4175 < GET_MODE_BITSIZE (mode)))
4177 /* MODE must be narrower than HOST_BITS_PER_INT. */
4178 int width = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
4180 if (width < HOST_BITS_PER_WIDE_INT)
4182 HOST_WIDE_INT val = (GET_CODE (op0) == CONST_INT ? INTVAL (op0)
4183 : CONST_DOUBLE_LOW (op0));
4184 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)))
4185 || !(val & ((HOST_WIDE_INT) 1 << (width - 1))))
4186 val &= ((HOST_WIDE_INT) 1 << width) - 1;
4188 val |= ~(((HOST_WIDE_INT) 1 << width) - 1);
4190 op0 = GEN_INT (val);
4194 op0 = (simplify_unary_operation
4195 ((TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)))
4196 ? ZERO_EXTEND : SIGN_EXTEND),
4198 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))));
4203 if (GET_MODE (op0) == VOIDmode)
4205 if (modifier == EXPAND_INITIALIZER)
4206 return gen_rtx (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
4207 if (flag_force_mem && GET_CODE (op0) == MEM)
4208 op0 = copy_to_reg (op0);
4211 return convert_to_mode (mode, op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
4213 convert_move (target, op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
4217 /* We come here from MINUS_EXPR when the second operand is a constant. */
4219 this_optab = add_optab;
4221 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
4222 something else, make sure we add the register to the constant and
4223 then to the other thing. This case can occur during strength
4224 reduction and doing it this way will produce better code if the
4225 frame pointer or argument pointer is eliminated.
4227 fold-const.c will ensure that the constant is always in the inner
4228 PLUS_EXPR, so the only case we need to do anything about is if
4229 sp, ap, or fp is our second argument, in which case we must swap
4230 the innermost first argument and our second argument. */
4232 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
4233 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
4234 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
4235 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
4236 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
4237 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
4239 tree t = TREE_OPERAND (exp, 1);
4241 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
4242 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
4245 /* If the result is to be Pmode and we are adding an integer to
4246 something, we might be forming a constant. So try to use
4247 plus_constant. If it produces a sum and we can't accept it,
4248 use force_operand. This allows P = &ARR[const] to generate
4249 efficient code on machines where a SYMBOL_REF is not a valid
4252 If this is an EXPAND_SUM call, always return the sum. */
4253 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
4254 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
4255 && (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
4258 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
4260 op1 = plus_constant (op1, TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)));
4261 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
4262 op1 = force_operand (op1, target);
4266 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
4267 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
4268 && (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
4271 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
4273 op0 = plus_constant (op0, TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)));
4274 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
4275 op0 = force_operand (op0, target);
4279 /* No sense saving up arithmetic to be done
4280 if it's all in the wrong mode to form part of an address.
4281 And force_operand won't know whether to sign-extend or
4283 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
4284 || mode != Pmode) goto binop;
4286 preexpand_calls (exp);
4287 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
4290 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, modifier);
4291 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, modifier);
4293 /* Make sure any term that's a sum with a constant comes last. */
4294 if (GET_CODE (op0) == PLUS
4295 && CONSTANT_P (XEXP (op0, 1)))
4301 /* If adding to a sum including a constant,
4302 associate it to put the constant outside. */
4303 if (GET_CODE (op1) == PLUS
4304 && CONSTANT_P (XEXP (op1, 1)))
4306 rtx constant_term = const0_rtx;
4308 temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0);
4311 /* Ensure that MULT comes first if there is one. */
4312 else if (GET_CODE (op0) == MULT)
4313 op0 = gen_rtx (PLUS, mode, op0, XEXP (op1, 0));
4315 op0 = gen_rtx (PLUS, mode, XEXP (op1, 0), op0);
4317 /* Let's also eliminate constants from op0 if possible. */
4318 op0 = eliminate_constant_term (op0, &constant_term);
4320 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
4321 their sum should be a constant. Form it into OP1, since the
4322 result we want will then be OP0 + OP1. */
4324 temp = simplify_binary_operation (PLUS, mode, constant_term,
4329 op1 = gen_rtx (PLUS, mode, constant_term, XEXP (op1, 1));
4332 /* Put a constant term last and put a multiplication first. */
4333 if (CONSTANT_P (op0) || GET_CODE (op1) == MULT)
4334 temp = op1, op1 = op0, op0 = temp;
4336 temp = simplify_binary_operation (PLUS, mode, op0, op1);
4337 return temp ? temp : gen_rtx (PLUS, mode, op0, op1);
4340 /* Handle difference of two symbolic constants,
4341 for the sake of an initializer. */
4342 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
4343 && really_constant_p (TREE_OPERAND (exp, 0))
4344 && really_constant_p (TREE_OPERAND (exp, 1)))
4346 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX,
4347 VOIDmode, modifier);
4348 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
4349 VOIDmode, modifier);
4350 return gen_rtx (MINUS, mode, op0, op1);
4352 /* Convert A - const to A + (-const). */
4353 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
4355 exp = build (PLUS_EXPR, type, TREE_OPERAND (exp, 0),
4356 fold (build1 (NEGATE_EXPR, type,
4357 TREE_OPERAND (exp, 1))));
4360 this_optab = sub_optab;
4364 preexpand_calls (exp);
4365 /* If first operand is constant, swap them.
4366 Thus the following special case checks need only
4367 check the second operand. */
4368 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
4370 register tree t1 = TREE_OPERAND (exp, 0);
4371 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
4372 TREE_OPERAND (exp, 1) = t1;
4375 /* Attempt to return something suitable for generating an
4376 indexed address, for machines that support that. */
4378 if (modifier == EXPAND_SUM && mode == Pmode
4379 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
4380 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
4382 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, EXPAND_SUM);
4384 /* Apply distributive law if OP0 is x+c. */
4385 if (GET_CODE (op0) == PLUS
4386 && GET_CODE (XEXP (op0, 1)) == CONST_INT)
4387 return gen_rtx (PLUS, mode,
4388 gen_rtx (MULT, mode, XEXP (op0, 0),
4389 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))),
4390 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))
4391 * INTVAL (XEXP (op0, 1))));
4393 if (GET_CODE (op0) != REG)
4394 op0 = force_operand (op0, NULL_RTX);
4395 if (GET_CODE (op0) != REG)
4396 op0 = copy_to_mode_reg (mode, op0);
4398 return gen_rtx (MULT, mode, op0,
4399 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))));
4402 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
4405 /* Check for multiplying things that have been extended
4406 from a narrower type. If this machine supports multiplying
4407 in that narrower type with a result in the desired type,
4408 do it that way, and avoid the explicit type-conversion. */
4409 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
4410 && TREE_CODE (type) == INTEGER_TYPE
4411 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
4412 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
4413 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
4414 && int_fits_type_p (TREE_OPERAND (exp, 1),
4415 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
4416 /* Don't use a widening multiply if a shift will do. */
4417 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
4418 > HOST_BITS_PER_WIDE_INT)
4419 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
4421 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
4422 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
4424 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
4425 /* If both operands are extended, they must either both
4426 be zero-extended or both be sign-extended. */
4427 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
4429 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
4431 enum machine_mode innermode
4432 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
4433 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
4434 ? umul_widen_optab : smul_widen_optab);
4435 if (mode == GET_MODE_WIDER_MODE (innermode)
4436 && this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
4438 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
4439 NULL_RTX, VOIDmode, 0);
4440 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
4441 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
4444 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
4445 NULL_RTX, VOIDmode, 0);
4449 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
4450 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
4451 return expand_mult (mode, op0, op1, target, unsignedp);
4453 case TRUNC_DIV_EXPR:
4454 case FLOOR_DIV_EXPR:
4456 case ROUND_DIV_EXPR:
4457 case EXACT_DIV_EXPR:
4458 preexpand_calls (exp);
4459 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
4461 /* Possible optimization: compute the dividend with EXPAND_SUM
4462 then if the divisor is constant can optimize the case
4463 where some terms of the dividend have coeffs divisible by it. */
4464 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
4465 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
4466 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
4469 this_optab = flodiv_optab;
4472 case TRUNC_MOD_EXPR:
4473 case FLOOR_MOD_EXPR:
4475 case ROUND_MOD_EXPR:
4476 preexpand_calls (exp);
4477 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
4479 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
4480 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
4481 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
4483 case FIX_ROUND_EXPR:
4484 case FIX_FLOOR_EXPR:
4486 abort (); /* Not used for C. */
4488 case FIX_TRUNC_EXPR:
4489 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
4491 target = gen_reg_rtx (mode);
4492 expand_fix (target, op0, unsignedp);
4496 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
4498 target = gen_reg_rtx (mode);
4499 /* expand_float can't figure out what to do if FROM has VOIDmode.
4500 So give it the correct mode. With -O, cse will optimize this. */
4501 if (GET_MODE (op0) == VOIDmode)
4502 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
4504 expand_float (target, op0,
4505 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
4509 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
4510 temp = expand_unop (mode, neg_optab, op0, target, 0);
4516 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
4518 /* Handle complex values specially. */
4520 enum machine_mode opmode
4521 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
4523 if (GET_MODE_CLASS (opmode) == MODE_COMPLEX_INT
4524 || GET_MODE_CLASS (opmode) == MODE_COMPLEX_FLOAT)
4525 return expand_complex_abs (opmode, op0, target, unsignedp);
4528 /* Unsigned abs is simply the operand. Testing here means we don't
4529 risk generating incorrect code below. */
4530 if (TREE_UNSIGNED (type))
4533 /* First try to do it with a special abs instruction. */
4534 temp = expand_unop (mode, abs_optab, op0, target, 0);
4538 /* If this machine has expensive jumps, we can do integer absolute
4539 value of X as (((signed) x >> (W-1)) ^ x) - ((signed) x >> (W-1)),
4540 where W is the width of MODE. */
4542 if (GET_MODE_CLASS (mode) == MODE_INT && BRANCH_COST >= 2)
4544 rtx extended = expand_shift (RSHIFT_EXPR, mode, op0,
4545 size_int (GET_MODE_BITSIZE (mode) - 1),
4548 temp = expand_binop (mode, xor_optab, extended, op0, target, 0,
4551 temp = expand_binop (mode, sub_optab, temp, extended, target, 0,
4558 /* If that does not win, use conditional jump and negate. */
4559 target = original_target;
4560 temp = gen_label_rtx ();
4561 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 0))
4562 || (GET_CODE (target) == REG
4563 && REGNO (target) < FIRST_PSEUDO_REGISTER))
4564 target = gen_reg_rtx (mode);
4565 emit_move_insn (target, op0);
4566 emit_cmp_insn (target,
4567 expand_expr (convert (type, integer_zero_node),
4568 NULL_RTX, VOIDmode, 0),
4569 GE, NULL_RTX, mode, 0, 0);
4571 emit_jump_insn (gen_bge (temp));
4572 op0 = expand_unop (mode, neg_optab, target, target, 0);
4574 emit_move_insn (target, op0);
4581 target = original_target;
4582 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 1))
4583 || (GET_CODE (target) == REG
4584 && REGNO (target) < FIRST_PSEUDO_REGISTER))
4585 target = gen_reg_rtx (mode);
4586 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
4587 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
4589 /* First try to do it with a special MIN or MAX instruction.
4590 If that does not win, use a conditional jump to select the proper
4592 this_optab = (TREE_UNSIGNED (type)
4593 ? (code == MIN_EXPR ? umin_optab : umax_optab)
4594 : (code == MIN_EXPR ? smin_optab : smax_optab));
4596 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
4602 emit_move_insn (target, op0);
4603 op0 = gen_label_rtx ();
4604 /* If this mode is an integer too wide to compare properly,
4605 compare word by word. Rely on cse to optimize constant cases. */
4606 if (GET_MODE_CLASS (mode) == MODE_INT
4607 && !can_compare_p (mode))
4609 if (code == MAX_EXPR)
4610 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type), target, op1, NULL, op0);
4612 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type), op1, target, NULL, op0);
4613 emit_move_insn (target, op1);
4617 if (code == MAX_EXPR)
4618 temp = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)))
4619 ? compare_from_rtx (target, op1, GEU, 1, mode, NULL_RTX, 0)
4620 : compare_from_rtx (target, op1, GE, 0, mode, NULL_RTX, 0));
4622 temp = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)))
4623 ? compare_from_rtx (target, op1, LEU, 1, mode, NULL_RTX, 0)
4624 : compare_from_rtx (target, op1, LE, 0, mode, NULL_RTX, 0));
4625 if (temp == const0_rtx)
4626 emit_move_insn (target, op1);
4627 else if (temp != const_true_rtx)
4629 if (bcc_gen_fctn[(int) GET_CODE (temp)] != 0)
4630 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (temp)]) (op0));
4633 emit_move_insn (target, op1);
4639 /* ??? Can optimize when the operand of this is a bitwise operation,
4640 by using a different bitwise operation. */
4642 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
4643 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
4649 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
4650 temp = expand_unop (mode, ffs_optab, op0, target, 1);
4655 /* ??? Can optimize bitwise operations with one arg constant.
4656 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
4657 and (a bitwise1 b) bitwise2 b (etc)
4658 but that is probably not worth while. */
4660 /* BIT_AND_EXPR is for bitwise anding.
4661 TRUTH_AND_EXPR is for anding two boolean values
4662 when we want in all cases to compute both of them.
4663 In general it is fastest to do TRUTH_AND_EXPR by
4664 computing both operands as actual zero-or-1 values
4665 and then bitwise anding. In cases where there cannot
4666 be any side effects, better code would be made by
4667 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR;
4668 but the question is how to recognize those cases. */
4670 case TRUTH_AND_EXPR:
4672 this_optab = and_optab;
4675 /* See comment above about TRUTH_AND_EXPR; it applies here too. */
4678 this_optab = ior_optab;
4681 case TRUTH_XOR_EXPR:
4683 this_optab = xor_optab;
4690 preexpand_calls (exp);
4691 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
4693 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
4694 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
4697 /* Could determine the answer when only additive constants differ.
4698 Also, the addition of one can be handled by changing the condition. */
4705 preexpand_calls (exp);
4706 temp = do_store_flag (exp, target, tmode != VOIDmode ? tmode : mode, 0);
4709 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
4710 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
4712 && GET_CODE (original_target) == REG
4713 && (GET_MODE (original_target)
4714 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
4716 temp = expand_expr (TREE_OPERAND (exp, 0), original_target, VOIDmode, 0);
4717 if (temp != original_target)
4718 temp = copy_to_reg (temp);
4719 op1 = gen_label_rtx ();
4720 emit_cmp_insn (temp, const0_rtx, EQ, NULL_RTX,
4721 GET_MODE (temp), unsignedp, 0);
4722 emit_jump_insn (gen_beq (op1));
4723 emit_move_insn (temp, const1_rtx);
4727 /* If no set-flag instruction, must generate a conditional
4728 store into a temporary variable. Drop through
4729 and handle this like && and ||. */
4731 case TRUTH_ANDIF_EXPR:
4732 case TRUTH_ORIF_EXPR:
4733 if (target == 0 || ! safe_from_p (target, exp)
4734 /* Make sure we don't have a hard reg (such as function's return
4735 value) live across basic blocks, if not optimizing. */
4736 || (!optimize && GET_CODE (target) == REG
4737 && REGNO (target) < FIRST_PSEUDO_REGISTER))
4738 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
4739 emit_clr_insn (target);
4740 op1 = gen_label_rtx ();
4741 jumpifnot (exp, op1);
4742 emit_0_to_1_insn (target);
4746 case TRUTH_NOT_EXPR:
4747 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
4748 /* The parser is careful to generate TRUTH_NOT_EXPR
4749 only with operands that are always zero or one. */
4750 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
4751 target, 1, OPTAB_LIB_WIDEN);
4757 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
4759 return expand_expr (TREE_OPERAND (exp, 1),
4760 (ignore ? const0_rtx : target),
4765 /* Note that COND_EXPRs whose type is a structure or union
4766 are required to be constructed to contain assignments of
4767 a temporary variable, so that we can evaluate them here
4768 for side effect only. If type is void, we must do likewise. */
4770 /* If an arm of the branch requires a cleanup,
4771 only that cleanup is performed. */
4774 tree binary_op = 0, unary_op = 0;
4775 tree old_cleanups = cleanups_this_call;
4776 cleanups_this_call = 0;
4778 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
4779 convert it to our mode, if necessary. */
4780 if (integer_onep (TREE_OPERAND (exp, 1))
4781 && integer_zerop (TREE_OPERAND (exp, 2))
4782 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
4784 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, modifier);
4785 if (GET_MODE (op0) == mode)
4788 target = gen_reg_rtx (mode);
4789 convert_move (target, op0, unsignedp);
4793 /* If we are not to produce a result, we have no target. Otherwise,
4794 if a target was specified use it; it will not be used as an
4795 intermediate target unless it is safe. If no target, use a
4798 if (mode == VOIDmode || ignore)
4800 else if (original_target
4801 && safe_from_p (original_target, TREE_OPERAND (exp, 0)))
4802 temp = original_target;
4803 else if (mode == BLKmode)
4805 if (TYPE_SIZE (type) == 0
4806 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
4808 temp = assign_stack_temp (BLKmode,
4809 (TREE_INT_CST_LOW (TYPE_SIZE (type))
4810 + BITS_PER_UNIT - 1)
4811 / BITS_PER_UNIT, 0);
4814 temp = gen_reg_rtx (mode);
4816 /* Check for X ? A + B : A. If we have this, we can copy
4817 A to the output and conditionally add B. Similarly for unary
4818 operations. Don't do this if X has side-effects because
4819 those side effects might affect A or B and the "?" operation is
4820 a sequence point in ANSI. (We test for side effects later.) */
4822 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
4823 && operand_equal_p (TREE_OPERAND (exp, 2),
4824 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
4825 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
4826 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
4827 && operand_equal_p (TREE_OPERAND (exp, 1),
4828 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
4829 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
4830 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
4831 && operand_equal_p (TREE_OPERAND (exp, 2),
4832 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
4833 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
4834 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
4835 && operand_equal_p (TREE_OPERAND (exp, 1),
4836 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
4837 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
4839 /* If we had X ? A + 1 : A and we can do the test of X as a store-flag
4840 operation, do this as A + (X != 0). Similarly for other simple
4841 binary operators. */
4842 if (singleton && binary_op
4843 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
4844 && (TREE_CODE (binary_op) == PLUS_EXPR
4845 || TREE_CODE (binary_op) == MINUS_EXPR
4846 || TREE_CODE (binary_op) == BIT_IOR_EXPR
4847 || TREE_CODE (binary_op) == BIT_XOR_EXPR
4848 || TREE_CODE (binary_op) == BIT_AND_EXPR)
4849 && integer_onep (TREE_OPERAND (binary_op, 1))
4850 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
4853 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR ? add_optab
4854 : TREE_CODE (binary_op) == MINUS_EXPR ? sub_optab
4855 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
4856 : TREE_CODE (binary_op) == BIT_XOR_EXPR ? xor_optab
4859 /* If we had X ? A : A + 1, do this as A + (X == 0).
4861 We have to invert the truth value here and then put it
4862 back later if do_store_flag fails. We cannot simply copy
4863 TREE_OPERAND (exp, 0) to another variable and modify that
4864 because invert_truthvalue can modify the tree pointed to
4866 if (singleton == TREE_OPERAND (exp, 1))
4867 TREE_OPERAND (exp, 0)
4868 = invert_truthvalue (TREE_OPERAND (exp, 0));
4870 result = do_store_flag (TREE_OPERAND (exp, 0),
4871 (safe_from_p (temp, singleton)
4873 mode, BRANCH_COST <= 1);
4877 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
4878 return expand_binop (mode, boptab, op1, result, temp,
4879 unsignedp, OPTAB_LIB_WIDEN);
4881 else if (singleton == TREE_OPERAND (exp, 1))
4882 TREE_OPERAND (exp, 0)
4883 = invert_truthvalue (TREE_OPERAND (exp, 0));
4887 op0 = gen_label_rtx ();
4889 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
4893 /* If the target conflicts with the other operand of the
4894 binary op, we can't use it. Also, we can't use the target
4895 if it is a hard register, because evaluating the condition
4896 might clobber it. */
4898 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1)))
4899 || (GET_CODE (temp) == REG
4900 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
4901 temp = gen_reg_rtx (mode);
4902 store_expr (singleton, temp, 0);
4905 expand_expr (singleton,
4906 ignore ? const1_rtx : NULL_RTX, VOIDmode, 0);
4907 if (cleanups_this_call)
4909 sorry ("aggregate value in COND_EXPR");
4910 cleanups_this_call = 0;
4912 if (singleton == TREE_OPERAND (exp, 1))
4913 jumpif (TREE_OPERAND (exp, 0), op0);
4915 jumpifnot (TREE_OPERAND (exp, 0), op0);
4917 if (binary_op && temp == 0)
4918 /* Just touch the other operand. */
4919 expand_expr (TREE_OPERAND (binary_op, 1),
4920 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
4922 store_expr (build (TREE_CODE (binary_op), type,
4923 make_tree (type, temp),
4924 TREE_OPERAND (binary_op, 1)),
4927 store_expr (build1 (TREE_CODE (unary_op), type,
4928 make_tree (type, temp)),
4933 /* This is now done in jump.c and is better done there because it
4934 produces shorter register lifetimes. */
4936 /* Check for both possibilities either constants or variables
4937 in registers (but not the same as the target!). If so, can
4938 save branches by assigning one, branching, and assigning the
4940 else if (temp && GET_MODE (temp) != BLKmode
4941 && (TREE_CONSTANT (TREE_OPERAND (exp, 1))
4942 || ((TREE_CODE (TREE_OPERAND (exp, 1)) == PARM_DECL
4943 || TREE_CODE (TREE_OPERAND (exp, 1)) == VAR_DECL)
4944 && DECL_RTL (TREE_OPERAND (exp, 1))
4945 && GET_CODE (DECL_RTL (TREE_OPERAND (exp, 1))) == REG
4946 && DECL_RTL (TREE_OPERAND (exp, 1)) != temp))
4947 && (TREE_CONSTANT (TREE_OPERAND (exp, 2))
4948 || ((TREE_CODE (TREE_OPERAND (exp, 2)) == PARM_DECL
4949 || TREE_CODE (TREE_OPERAND (exp, 2)) == VAR_DECL)
4950 && DECL_RTL (TREE_OPERAND (exp, 2))
4951 && GET_CODE (DECL_RTL (TREE_OPERAND (exp, 2))) == REG
4952 && DECL_RTL (TREE_OPERAND (exp, 2)) != temp)))
4954 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
4955 temp = gen_reg_rtx (mode);
4956 store_expr (TREE_OPERAND (exp, 2), temp, 0);
4957 jumpifnot (TREE_OPERAND (exp, 0), op0);
4958 store_expr (TREE_OPERAND (exp, 1), temp, 0);
4962 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
4963 comparison operator. If we have one of these cases, set the
4964 output to A, branch on A (cse will merge these two references),
4965 then set the output to FOO. */
4967 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
4968 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
4969 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
4970 TREE_OPERAND (exp, 1), 0)
4971 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
4972 && safe_from_p (temp, TREE_OPERAND (exp, 2)))
4974 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
4975 temp = gen_reg_rtx (mode);
4976 store_expr (TREE_OPERAND (exp, 1), temp, 0);
4977 jumpif (TREE_OPERAND (exp, 0), op0);
4978 store_expr (TREE_OPERAND (exp, 2), temp, 0);
4982 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
4983 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
4984 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
4985 TREE_OPERAND (exp, 2), 0)
4986 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
4987 && safe_from_p (temp, TREE_OPERAND (exp, 1)))
4989 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
4990 temp = gen_reg_rtx (mode);
4991 store_expr (TREE_OPERAND (exp, 2), temp, 0);
4992 jumpifnot (TREE_OPERAND (exp, 0), op0);
4993 store_expr (TREE_OPERAND (exp, 1), temp, 0);
4998 op1 = gen_label_rtx ();
4999 jumpifnot (TREE_OPERAND (exp, 0), op0);
5001 store_expr (TREE_OPERAND (exp, 1), temp, 0);
5003 expand_expr (TREE_OPERAND (exp, 1),
5004 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
5005 if (cleanups_this_call)
5007 sorry ("aggregate value in COND_EXPR");
5008 cleanups_this_call = 0;
5012 emit_jump_insn (gen_jump (op1));
5016 store_expr (TREE_OPERAND (exp, 2), temp, 0);
5018 expand_expr (TREE_OPERAND (exp, 2),
5019 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
5022 if (cleanups_this_call)
5024 sorry ("aggregate value in COND_EXPR");
5025 cleanups_this_call = 0;
5031 cleanups_this_call = old_cleanups;
5037 /* Something needs to be initialized, but we didn't know
5038 where that thing was when building the tree. For example,
5039 it could be the return value of a function, or a parameter
5040 to a function which lays down in the stack, or a temporary
5041 variable which must be passed by reference.
5043 We guarantee that the expression will either be constructed
5044 or copied into our original target. */
5046 tree slot = TREE_OPERAND (exp, 0);
5049 if (TREE_CODE (slot) != VAR_DECL)
5054 if (DECL_RTL (slot) != 0)
5056 target = DECL_RTL (slot);
5057 /* If we have already expanded the slot, so don't do
5059 if (TREE_OPERAND (exp, 1) == NULL_TREE)
5064 target = assign_stack_temp (mode, int_size_in_bytes (type), 0);
5065 /* All temp slots at this level must not conflict. */
5066 preserve_temp_slots (target);
5067 DECL_RTL (slot) = target;
5071 /* I bet this needs to be done, and I bet that it needs to
5072 be above, inside the else clause. The reason is
5073 simple, how else is it going to get cleaned up? (mrs)
5075 The reason is probably did not work before, and was
5076 commented out is because this was re-expanding already
5077 expanded target_exprs (target == 0 and DECL_RTL (slot)
5078 != 0) also cleaning them up many times as well. :-( */
5080 /* Since SLOT is not known to the called function
5081 to belong to its stack frame, we must build an explicit
5082 cleanup. This case occurs when we must build up a reference
5083 to pass the reference as an argument. In this case,
5084 it is very likely that such a reference need not be
5087 if (TREE_OPERAND (exp, 2) == 0)
5088 TREE_OPERAND (exp, 2) = maybe_build_cleanup (slot);
5089 if (TREE_OPERAND (exp, 2))
5090 cleanups_this_call = tree_cons (NULL_TREE, TREE_OPERAND (exp, 2),
5091 cleanups_this_call);
5096 /* This case does occur, when expanding a parameter which
5097 needs to be constructed on the stack. The target
5098 is the actual stack address that we want to initialize.
5099 The function we call will perform the cleanup in this case. */
5101 DECL_RTL (slot) = target;
5104 exp1 = TREE_OPERAND (exp, 1);
5105 /* Mark it as expanded. */
5106 TREE_OPERAND (exp, 1) = NULL_TREE;
5108 return expand_expr (exp1, target, tmode, modifier);
5113 tree lhs = TREE_OPERAND (exp, 0);
5114 tree rhs = TREE_OPERAND (exp, 1);
5115 tree noncopied_parts = 0;
5116 tree lhs_type = TREE_TYPE (lhs);
5118 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
5119 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0 && !fixed_type_p (rhs))
5120 noncopied_parts = init_noncopied_parts (stabilize_reference (lhs),
5121 TYPE_NONCOPIED_PARTS (lhs_type));
5122 while (noncopied_parts != 0)
5124 expand_assignment (TREE_VALUE (noncopied_parts),
5125 TREE_PURPOSE (noncopied_parts), 0, 0);
5126 noncopied_parts = TREE_CHAIN (noncopied_parts);
5133 /* If lhs is complex, expand calls in rhs before computing it.
5134 That's so we don't compute a pointer and save it over a call.
5135 If lhs is simple, compute it first so we can give it as a
5136 target if the rhs is just a call. This avoids an extra temp and copy
5137 and that prevents a partial-subsumption which makes bad code.
5138 Actually we could treat component_ref's of vars like vars. */
5140 tree lhs = TREE_OPERAND (exp, 0);
5141 tree rhs = TREE_OPERAND (exp, 1);
5142 tree noncopied_parts = 0;
5143 tree lhs_type = TREE_TYPE (lhs);
5147 if (TREE_CODE (lhs) != VAR_DECL
5148 && TREE_CODE (lhs) != RESULT_DECL
5149 && TREE_CODE (lhs) != PARM_DECL)
5150 preexpand_calls (exp);
5152 /* Check for |= or &= of a bitfield of size one into another bitfield
5153 of size 1. In this case, (unless we need the result of the
5154 assignment) we can do this more efficiently with a
5155 test followed by an assignment, if necessary.
5157 ??? At this point, we can't get a BIT_FIELD_REF here. But if
5158 things change so we do, this code should be enhanced to
5161 && TREE_CODE (lhs) == COMPONENT_REF
5162 && (TREE_CODE (rhs) == BIT_IOR_EXPR
5163 || TREE_CODE (rhs) == BIT_AND_EXPR)
5164 && TREE_OPERAND (rhs, 0) == lhs
5165 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
5166 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (lhs, 1))) == 1
5167 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))) == 1)
5169 rtx label = gen_label_rtx ();
5171 do_jump (TREE_OPERAND (rhs, 1),
5172 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
5173 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
5174 expand_assignment (lhs, convert (TREE_TYPE (rhs),
5175 (TREE_CODE (rhs) == BIT_IOR_EXPR
5177 : integer_zero_node)),
5179 do_pending_stack_adjust ();
5184 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0
5185 && ! (fixed_type_p (lhs) && fixed_type_p (rhs)))
5186 noncopied_parts = save_noncopied_parts (stabilize_reference (lhs),
5187 TYPE_NONCOPIED_PARTS (lhs_type));
5189 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
5190 while (noncopied_parts != 0)
5192 expand_assignment (TREE_PURPOSE (noncopied_parts),
5193 TREE_VALUE (noncopied_parts), 0, 0);
5194 noncopied_parts = TREE_CHAIN (noncopied_parts);
5199 case PREINCREMENT_EXPR:
5200 case PREDECREMENT_EXPR:
5201 return expand_increment (exp, 0);
5203 case POSTINCREMENT_EXPR:
5204 case POSTDECREMENT_EXPR:
5205 /* Faster to treat as pre-increment if result is not used. */
5206 return expand_increment (exp, ! ignore);
5209 /* Are we taking the address of a nested function? */
5210 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
5211 && decl_function_context (TREE_OPERAND (exp, 0)) != 0)
5213 op0 = trampoline_address (TREE_OPERAND (exp, 0));
5214 op0 = force_operand (op0, target);
5218 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode,
5219 (modifier == EXPAND_INITIALIZER
5220 ? modifier : EXPAND_CONST_ADDRESS));
5222 /* We would like the object in memory. If it is a constant,
5223 we can have it be statically allocated into memory. For
5224 a non-constant (REG or SUBREG), we need to allocate some
5225 memory and store the value into it. */
5227 if (CONSTANT_P (op0))
5228 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
5231 if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG)
5233 /* If this object is in a register, it must be not
5235 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
5236 enum machine_mode inner_mode = TYPE_MODE (inner_type);
5238 = assign_stack_temp (inner_mode,
5239 int_size_in_bytes (inner_type), 1);
5241 emit_move_insn (memloc, op0);
5245 if (GET_CODE (op0) != MEM)
5248 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
5249 return XEXP (op0, 0);
5250 op0 = force_operand (XEXP (op0, 0), target);
5252 if (flag_force_addr && GET_CODE (op0) != REG)
5253 return force_reg (Pmode, op0);
5256 case ENTRY_VALUE_EXPR:
5259 /* COMPLEX type for Extended Pascal & Fortran */
5262 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
5266 /* Get the rtx code of the operands. */
5267 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
5268 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
5271 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
5273 prev = get_last_insn ();
5275 /* Tell flow that the whole of the destination is being set. */
5276 if (GET_CODE (target) == REG)
5277 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
5279 /* Move the real (op0) and imaginary (op1) parts to their location. */
5280 emit_move_insn (gen_realpart (mode, target), op0);
5281 emit_move_insn (gen_imagpart (mode, target), op1);
5283 /* Complex construction should appear as a single unit. */
5290 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
5291 return gen_realpart (mode, op0);
5294 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
5295 return gen_imagpart (mode, op0);
5299 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
5303 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
5306 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
5308 prev = get_last_insn ();
5310 /* Tell flow that the whole of the destination is being set. */
5311 if (GET_CODE (target) == REG)
5312 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
5314 /* Store the realpart and the negated imagpart to target. */
5315 emit_move_insn (gen_realpart (mode, target), gen_realpart (mode, op0));
5317 imag_t = gen_imagpart (mode, target);
5318 temp = expand_unop (mode, neg_optab,
5319 gen_imagpart (mode, op0), imag_t, 0);
5321 emit_move_insn (imag_t, temp);
5323 /* Conjugate should appear as a single unit */
5330 op0 = CONST0_RTX (tmode);
5336 return (*lang_expand_expr) (exp, target, tmode, modifier);
5339 /* Here to do an ordinary binary operator, generating an instruction
5340 from the optab already placed in `this_optab'. */
5342 preexpand_calls (exp);
5343 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
5345 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
5346 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
5348 temp = expand_binop (mode, this_optab, op0, op1, target,
5349 unsignedp, OPTAB_LIB_WIDEN);
5355 /* Return the alignment in bits of EXP, a pointer valued expression.
5356 But don't return more than MAX_ALIGN no matter what.
5357 The alignment returned is, by default, the alignment of the thing that
5358 EXP points to (if it is not a POINTER_TYPE, 0 is returned).
5360 Otherwise, look at the expression to see if we can do better, i.e., if the
5361 expression is actually pointing at an object whose alignment is tighter. */
5364 get_pointer_alignment (exp, max_align)
5368 unsigned align, inner;
5370 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
5373 align = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
5374 align = MIN (align, max_align);
5378 switch (TREE_CODE (exp))
5382 case NON_LVALUE_EXPR:
5383 exp = TREE_OPERAND (exp, 0);
5384 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
5386 inner = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
5387 inner = MIN (inner, max_align);
5388 align = MAX (align, inner);
5392 /* If sum of pointer + int, restrict our maximum alignment to that
5393 imposed by the integer. If not, we can't do any better than
5395 if (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST)
5398 while (((TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)) * BITS_PER_UNIT)
5403 exp = TREE_OPERAND (exp, 0);
5407 /* See what we are pointing at and look at its alignment. */
5408 exp = TREE_OPERAND (exp, 0);
5409 if (TREE_CODE (exp) == FUNCTION_DECL)
5410 align = MAX (align, FUNCTION_BOUNDARY);
5411 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'd')
5412 align = MAX (align, DECL_ALIGN (exp));
5413 #ifdef CONSTANT_ALIGNMENT
5414 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'c')
5415 align = CONSTANT_ALIGNMENT (exp, align);
5417 return MIN (align, max_align);
5425 /* Return the tree node and offset if a given argument corresponds to
5426 a string constant. */
5429 string_constant (arg, ptr_offset)
5435 if (TREE_CODE (arg) == ADDR_EXPR
5436 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
5438 *ptr_offset = integer_zero_node;
5439 return TREE_OPERAND (arg, 0);
5441 else if (TREE_CODE (arg) == PLUS_EXPR)
5443 tree arg0 = TREE_OPERAND (arg, 0);
5444 tree arg1 = TREE_OPERAND (arg, 1);
5449 if (TREE_CODE (arg0) == ADDR_EXPR
5450 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
5453 return TREE_OPERAND (arg0, 0);
5455 else if (TREE_CODE (arg1) == ADDR_EXPR
5456 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
5459 return TREE_OPERAND (arg1, 0);
5466 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
5467 way, because it could contain a zero byte in the middle.
5468 TREE_STRING_LENGTH is the size of the character array, not the string.
5470 Unfortunately, string_constant can't access the values of const char
5471 arrays with initializers, so neither can we do so here. */
5481 src = string_constant (src, &offset_node);
5484 max = TREE_STRING_LENGTH (src);
5485 ptr = TREE_STRING_POINTER (src);
5486 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
5488 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
5489 compute the offset to the following null if we don't know where to
5490 start searching for it. */
5492 for (i = 0; i < max; i++)
5495 /* We don't know the starting offset, but we do know that the string
5496 has no internal zero bytes. We can assume that the offset falls
5497 within the bounds of the string; otherwise, the programmer deserves
5498 what he gets. Subtract the offset from the length of the string,
5500 /* This would perhaps not be valid if we were dealing with named
5501 arrays in addition to literal string constants. */
5502 return size_binop (MINUS_EXPR, size_int (max), offset_node);
5505 /* We have a known offset into the string. Start searching there for
5506 a null character. */
5507 if (offset_node == 0)
5511 /* Did we get a long long offset? If so, punt. */
5512 if (TREE_INT_CST_HIGH (offset_node) != 0)
5514 offset = TREE_INT_CST_LOW (offset_node);
5516 /* If the offset is known to be out of bounds, warn, and call strlen at
5518 if (offset < 0 || offset > max)
5520 warning ("offset outside bounds of constant string");
5523 /* Use strlen to search for the first zero byte. Since any strings
5524 constructed with build_string will have nulls appended, we win even
5525 if we get handed something like (char[4])"abcd".
5527 Since OFFSET is our starting index into the string, no further
5528 calculation is needed. */
5529 return size_int (strlen (ptr + offset));
5532 /* Expand an expression EXP that calls a built-in function,
5533 with result going to TARGET if that's convenient
5534 (and in mode MODE if that's convenient).
5535 SUBTARGET may be used as the target for computing one of EXP's operands.
5536 IGNORE is nonzero if the value is to be ignored. */
5539 expand_builtin (exp, target, subtarget, mode, ignore)
5543 enum machine_mode mode;
5546 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
5547 tree arglist = TREE_OPERAND (exp, 1);
5550 enum machine_mode value_mode = TYPE_MODE (TREE_TYPE (exp));
5551 optab builtin_optab;
5553 switch (DECL_FUNCTION_CODE (fndecl))
5558 /* build_function_call changes these into ABS_EXPR. */
5563 case BUILT_IN_FSQRT:
5564 /* If not optimizing, call the library function. */
5569 /* Arg could be wrong type if user redeclared this fcn wrong. */
5570 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != REAL_TYPE)
5571 return CONST0_RTX (TYPE_MODE (TREE_TYPE (exp)));
5573 /* Stabilize and compute the argument. */
5574 if (TREE_CODE (TREE_VALUE (arglist)) != VAR_DECL
5575 && TREE_CODE (TREE_VALUE (arglist)) != PARM_DECL)
5577 exp = copy_node (exp);
5578 arglist = copy_node (arglist);
5579 TREE_OPERAND (exp, 1) = arglist;
5580 TREE_VALUE (arglist) = save_expr (TREE_VALUE (arglist));
5582 op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
5584 /* Make a suitable register to place result in. */
5585 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
5590 switch (DECL_FUNCTION_CODE (fndecl))
5593 builtin_optab = sin_optab; break;
5595 builtin_optab = cos_optab; break;
5596 case BUILT_IN_FSQRT:
5597 builtin_optab = sqrt_optab; break;
5602 /* Compute into TARGET.
5603 Set TARGET to wherever the result comes back. */
5604 target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
5605 builtin_optab, op0, target, 0);
5607 /* If we were unable to expand via the builtin, stop the
5608 sequence (without outputting the insns) and break, causing
5609 a call the the library function. */
5616 /* Check the results by default. But if flag_fast_math is turned on,
5617 then assume sqrt will always be called with valid arguments. */
5619 if (! flag_fast_math)
5621 /* Don't define the builtin FP instructions
5622 if your machine is not IEEE. */
5623 if (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT)
5626 lab1 = gen_label_rtx ();
5628 /* Test the result; if it is NaN, set errno=EDOM because
5629 the argument was not in the domain. */
5630 emit_cmp_insn (target, target, EQ, 0, GET_MODE (target), 0, 0);
5631 emit_jump_insn (gen_beq (lab1));
5635 #ifdef GEN_ERRNO_RTX
5636 rtx errno_rtx = GEN_ERRNO_RTX;
5639 = gen_rtx (MEM, word_mode, gen_rtx (SYMBOL_REF, Pmode, "*errno"));
5642 emit_move_insn (errno_rtx, GEN_INT (TARGET_EDOM));
5645 /* We can't set errno=EDOM directly; let the library call do it.
5646 Pop the arguments right away in case the call gets deleted. */
5648 expand_call (exp, target, 0);
5655 /* Output the entire sequence. */
5656 insns = get_insns ();
5662 /* __builtin_apply_args returns block of memory allocated on
5663 the stack into which is stored the arg pointer, structure
5664 value address, static chain, and all the registers that might
5665 possibly be used in performing a function call. The code is
5666 moved to the start of the function so the incoming values are
5668 case BUILT_IN_APPLY_ARGS:
5669 /* Don't do __builtin_apply_args more than once in a function.
5670 Save the result of the first call and reuse it. */
5671 if (apply_args_value != 0)
5672 return apply_args_value;
5674 /* When this function is called, it means that registers must be
5675 saved on entry to this function. So we migrate the
5676 call to the first insn of this function. */
5681 temp = expand_builtin_apply_args ();
5685 apply_args_value = temp;
5687 /* Put the sequence after the NOTE that starts the function.
5688 If this is inside a SEQUENCE, make the outer-level insn
5689 chain current, so the code is placed at the start of the
5691 push_topmost_sequence ();
5692 emit_insns_before (seq, NEXT_INSN (get_insns ()));
5693 pop_topmost_sequence ();
5697 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
5698 FUNCTION with a copy of the parameters described by
5699 ARGUMENTS, and ARGSIZE. It returns a block of memory
5700 allocated on the stack into which is stored all the registers
5701 that might possibly be used for returning the result of a
5702 function. ARGUMENTS is the value returned by
5703 __builtin_apply_args. ARGSIZE is the number of bytes of
5704 arguments that must be copied. ??? How should this value be
5705 computed? We'll also need a safe worst case value for varargs
5707 case BUILT_IN_APPLY:
5709 /* Arg could be non-pointer if user redeclared this fcn wrong. */
5710 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
5711 || TREE_CHAIN (arglist) == 0
5712 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
5713 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
5714 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
5722 for (t = arglist, i = 0; t; t = TREE_CHAIN (t), i++)
5723 ops[i] = expand_expr (TREE_VALUE (t), NULL_RTX, VOIDmode, 0);
5725 return expand_builtin_apply (ops[0], ops[1], ops[2]);
5728 /* __builtin_return (RESULT) causes the function to return the
5729 value described by RESULT. RESULT is address of the block of
5730 memory returned by __builtin_apply. */
5731 case BUILT_IN_RETURN:
5733 /* Arg could be non-pointer if user redeclared this fcn wrong. */
5734 && TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) == POINTER_TYPE)
5735 expand_builtin_return (expand_expr (TREE_VALUE (arglist),
5736 NULL_RTX, VOIDmode, 0));
5739 case BUILT_IN_SAVEREGS:
5740 /* Don't do __builtin_saveregs more than once in a function.
5741 Save the result of the first call and reuse it. */
5742 if (saveregs_value != 0)
5743 return saveregs_value;
5745 /* When this function is called, it means that registers must be
5746 saved on entry to this function. So we migrate the
5747 call to the first insn of this function. */
5750 rtx valreg, saved_valreg;
5752 /* Now really call the function. `expand_call' does not call
5753 expand_builtin, so there is no danger of infinite recursion here. */
5756 #ifdef EXPAND_BUILTIN_SAVEREGS
5757 /* Do whatever the machine needs done in this case. */
5758 temp = EXPAND_BUILTIN_SAVEREGS (arglist);
5760 /* The register where the function returns its value
5761 is likely to have something else in it, such as an argument.
5762 So preserve that register around the call. */
5763 if (value_mode != VOIDmode)
5765 valreg = hard_libcall_value (value_mode);
5766 saved_valreg = gen_reg_rtx (value_mode);
5767 emit_move_insn (saved_valreg, valreg);
5770 /* Generate the call, putting the value in a pseudo. */
5771 temp = expand_call (exp, target, ignore);
5773 if (value_mode != VOIDmode)
5774 emit_move_insn (valreg, saved_valreg);
5780 saveregs_value = temp;
5782 /* Put the sequence after the NOTE that starts the function.
5783 If this is inside a SEQUENCE, make the outer-level insn
5784 chain current, so the code is placed at the start of the
5786 push_topmost_sequence ();
5787 emit_insns_before (seq, NEXT_INSN (get_insns ()));
5788 pop_topmost_sequence ();
5792 /* __builtin_args_info (N) returns word N of the arg space info
5793 for the current function. The number and meanings of words
5794 is controlled by the definition of CUMULATIVE_ARGS. */
5795 case BUILT_IN_ARGS_INFO:
5797 int nwords = sizeof (CUMULATIVE_ARGS) / sizeof (int);
5799 int *word_ptr = (int *) ¤t_function_args_info;
5800 tree type, elts, result;
5802 if (sizeof (CUMULATIVE_ARGS) % sizeof (int) != 0)
5803 fatal ("CUMULATIVE_ARGS type defined badly; see %s, line %d",
5804 __FILE__, __LINE__);
5808 tree arg = TREE_VALUE (arglist);
5809 if (TREE_CODE (arg) != INTEGER_CST)
5810 error ("argument of `__builtin_args_info' must be constant");
5813 int wordnum = TREE_INT_CST_LOW (arg);
5815 if (wordnum < 0 || wordnum >= nwords || TREE_INT_CST_HIGH (arg))
5816 error ("argument of `__builtin_args_info' out of range");
5818 return GEN_INT (word_ptr[wordnum]);
5822 error ("missing argument in `__builtin_args_info'");
5827 for (i = 0; i < nwords; i++)
5828 elts = tree_cons (NULL_TREE, build_int_2 (word_ptr[i], 0));
5830 type = build_array_type (integer_type_node,
5831 build_index_type (build_int_2 (nwords, 0)));
5832 result = build (CONSTRUCTOR, type, NULL_TREE, nreverse (elts));
5833 TREE_CONSTANT (result) = 1;
5834 TREE_STATIC (result) = 1;
5835 result = build (INDIRECT_REF, build_pointer_type (type), result);
5836 TREE_CONSTANT (result) = 1;
5837 return expand_expr (result, NULL_RTX, VOIDmode, 0);
5841 /* Return the address of the first anonymous stack arg. */
5842 case BUILT_IN_NEXT_ARG:
5844 tree fntype = TREE_TYPE (current_function_decl);
5845 if (!(TYPE_ARG_TYPES (fntype) != 0
5846 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
5847 != void_type_node)))
5849 error ("`va_start' used in function with fixed args");
5854 return expand_binop (Pmode, add_optab,
5855 current_function_internal_arg_pointer,
5856 current_function_arg_offset_rtx,
5857 NULL_RTX, 0, OPTAB_LIB_WIDEN);
5859 case BUILT_IN_CLASSIFY_TYPE:
5862 tree type = TREE_TYPE (TREE_VALUE (arglist));
5863 enum tree_code code = TREE_CODE (type);
5864 if (code == VOID_TYPE)
5865 return GEN_INT (void_type_class);
5866 if (code == INTEGER_TYPE)
5867 return GEN_INT (integer_type_class);
5868 if (code == CHAR_TYPE)
5869 return GEN_INT (char_type_class);
5870 if (code == ENUMERAL_TYPE)
5871 return GEN_INT (enumeral_type_class);
5872 if (code == BOOLEAN_TYPE)
5873 return GEN_INT (boolean_type_class);
5874 if (code == POINTER_TYPE)
5875 return GEN_INT (pointer_type_class);
5876 if (code == REFERENCE_TYPE)
5877 return GEN_INT (reference_type_class);
5878 if (code == OFFSET_TYPE)
5879 return GEN_INT (offset_type_class);
5880 if (code == REAL_TYPE)
5881 return GEN_INT (real_type_class);
5882 if (code == COMPLEX_TYPE)
5883 return GEN_INT (complex_type_class);
5884 if (code == FUNCTION_TYPE)
5885 return GEN_INT (function_type_class);
5886 if (code == METHOD_TYPE)
5887 return GEN_INT (method_type_class);
5888 if (code == RECORD_TYPE)
5889 return GEN_INT (record_type_class);
5890 if (code == UNION_TYPE || code == QUAL_UNION_TYPE)
5891 return GEN_INT (union_type_class);
5892 if (code == ARRAY_TYPE)
5893 return GEN_INT (array_type_class);
5894 if (code == STRING_TYPE)
5895 return GEN_INT (string_type_class);
5896 if (code == SET_TYPE)
5897 return GEN_INT (set_type_class);
5898 if (code == FILE_TYPE)
5899 return GEN_INT (file_type_class);
5900 if (code == LANG_TYPE)
5901 return GEN_INT (lang_type_class);
5903 return GEN_INT (no_type_class);
5905 case BUILT_IN_CONSTANT_P:
5909 return (TREE_CODE_CLASS (TREE_CODE (TREE_VALUE (arglist))) == 'c'
5910 ? const1_rtx : const0_rtx);
5912 case BUILT_IN_FRAME_ADDRESS:
5913 /* The argument must be a nonnegative integer constant.
5914 It counts the number of frames to scan up the stack.
5915 The value is the address of that frame. */
5916 case BUILT_IN_RETURN_ADDRESS:
5917 /* The argument must be a nonnegative integer constant.
5918 It counts the number of frames to scan up the stack.
5919 The value is the return address saved in that frame. */
5921 /* Warning about missing arg was already issued. */
5923 else if (TREE_CODE (TREE_VALUE (arglist)) != INTEGER_CST)
5925 error ("invalid arg to `__builtin_return_address'");
5928 else if (tree_int_cst_lt (TREE_VALUE (arglist), integer_zero_node))
5930 error ("invalid arg to `__builtin_return_address'");
5935 int count = TREE_INT_CST_LOW (TREE_VALUE (arglist));
5936 rtx tem = frame_pointer_rtx;
5939 /* Some machines need special handling before we can access arbitrary
5940 frames. For example, on the sparc, we must first flush all
5941 register windows to the stack. */
5942 #ifdef SETUP_FRAME_ADDRESSES
5943 SETUP_FRAME_ADDRESSES ();
5946 /* On the sparc, the return address is not in the frame, it is
5947 in a register. There is no way to access it off of the current
5948 frame pointer, but it can be accessed off the previous frame
5949 pointer by reading the value from the register window save
5951 #ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
5952 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_RETURN_ADDRESS)
5956 /* Scan back COUNT frames to the specified frame. */
5957 for (i = 0; i < count; i++)
5959 /* Assume the dynamic chain pointer is in the word that
5960 the frame address points to, unless otherwise specified. */
5961 #ifdef DYNAMIC_CHAIN_ADDRESS
5962 tem = DYNAMIC_CHAIN_ADDRESS (tem);
5964 tem = memory_address (Pmode, tem);
5965 tem = copy_to_reg (gen_rtx (MEM, Pmode, tem));
5968 /* For __builtin_frame_address, return what we've got. */
5969 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
5972 /* For __builtin_return_address,
5973 Get the return address from that frame. */
5974 #ifdef RETURN_ADDR_RTX
5975 return RETURN_ADDR_RTX (count, tem);
5977 tem = memory_address (Pmode,
5978 plus_constant (tem, GET_MODE_SIZE (Pmode)));
5979 return copy_to_reg (gen_rtx (MEM, Pmode, tem));
5983 case BUILT_IN_ALLOCA:
5985 /* Arg could be non-integer if user redeclared this fcn wrong. */
5986 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE)
5988 current_function_calls_alloca = 1;
5989 /* Compute the argument. */
5990 op0 = expand_expr (TREE_VALUE (arglist), NULL_RTX, VOIDmode, 0);
5992 /* Allocate the desired space. */
5993 target = allocate_dynamic_stack_space (op0, target, BITS_PER_UNIT);
5995 /* Record the new stack level for nonlocal gotos. */
5996 if (nonlocal_goto_handler_slot != 0)
5997 emit_stack_save (SAVE_NONLOCAL, &nonlocal_goto_stack_level, NULL_RTX);
6001 /* If not optimizing, call the library function. */
6006 /* Arg could be non-integer if user redeclared this fcn wrong. */
6007 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE)
6010 /* Compute the argument. */
6011 op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
6012 /* Compute ffs, into TARGET if possible.
6013 Set TARGET to wherever the result comes back. */
6014 target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
6015 ffs_optab, op0, target, 1);
6020 case BUILT_IN_STRLEN:
6021 /* If not optimizing, call the library function. */
6026 /* Arg could be non-pointer if user redeclared this fcn wrong. */
6027 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE)
6031 tree src = TREE_VALUE (arglist);
6032 tree len = c_strlen (src);
6035 = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
6037 rtx result, src_rtx, char_rtx;
6038 enum machine_mode insn_mode = value_mode, char_mode;
6039 enum insn_code icode;
6041 /* If the length is known, just return it. */
6043 return expand_expr (len, target, mode, 0);
6045 /* If SRC is not a pointer type, don't do this operation inline. */
6049 /* Call a function if we can't compute strlen in the right mode. */
6051 while (insn_mode != VOIDmode)
6053 icode = strlen_optab->handlers[(int) insn_mode].insn_code;
6054 if (icode != CODE_FOR_nothing)
6057 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
6059 if (insn_mode == VOIDmode)
6062 /* Make a place to write the result of the instruction. */
6065 && GET_CODE (result) == REG
6066 && GET_MODE (result) == insn_mode
6067 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
6068 result = gen_reg_rtx (insn_mode);
6070 /* Make sure the operands are acceptable to the predicates. */
6072 if (! (*insn_operand_predicate[(int)icode][0]) (result, insn_mode))
6073 result = gen_reg_rtx (insn_mode);
6075 src_rtx = memory_address (BLKmode,
6076 expand_expr (src, NULL_RTX, Pmode,
6078 if (! (*insn_operand_predicate[(int)icode][1]) (src_rtx, Pmode))
6079 src_rtx = copy_to_mode_reg (Pmode, src_rtx);
6081 char_rtx = const0_rtx;
6082 char_mode = insn_operand_mode[(int)icode][2];
6083 if (! (*insn_operand_predicate[(int)icode][2]) (char_rtx, char_mode))
6084 char_rtx = copy_to_mode_reg (char_mode, char_rtx);
6086 emit_insn (GEN_FCN (icode) (result,
6087 gen_rtx (MEM, BLKmode, src_rtx),
6088 char_rtx, GEN_INT (align)));
6090 /* Return the value in the proper mode for this function. */
6091 if (GET_MODE (result) == value_mode)
6093 else if (target != 0)
6095 convert_move (target, result, 0);
6099 return convert_to_mode (value_mode, result, 0);
6102 case BUILT_IN_STRCPY:
6103 /* If not optimizing, call the library function. */
6108 /* Arg could be non-pointer if user redeclared this fcn wrong. */
6109 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
6110 || TREE_CHAIN (arglist) == 0
6111 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE)
6115 tree len = c_strlen (TREE_VALUE (TREE_CHAIN (arglist)));
6120 len = size_binop (PLUS_EXPR, len, integer_one_node);
6122 chainon (arglist, build_tree_list (NULL_TREE, len));
6126 case BUILT_IN_MEMCPY:
6127 /* If not optimizing, call the library function. */
6132 /* Arg could be non-pointer if user redeclared this fcn wrong. */
6133 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
6134 || TREE_CHAIN (arglist) == 0
6135 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
6136 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
6137 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
6141 tree dest = TREE_VALUE (arglist);
6142 tree src = TREE_VALUE (TREE_CHAIN (arglist));
6143 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
6146 = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
6148 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
6149 rtx dest_rtx, dest_mem, src_mem;
6151 /* If either SRC or DEST is not a pointer type, don't do
6152 this operation in-line. */
6153 if (src_align == 0 || dest_align == 0)
6155 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRCPY)
6156 TREE_CHAIN (TREE_CHAIN (arglist)) = 0;
6160 dest_rtx = expand_expr (dest, NULL_RTX, Pmode, EXPAND_NORMAL);
6161 dest_mem = gen_rtx (MEM, BLKmode,
6162 memory_address (BLKmode, dest_rtx));
6163 src_mem = gen_rtx (MEM, BLKmode,
6164 memory_address (BLKmode,
6165 expand_expr (src, NULL_RTX,
6169 /* Copy word part most expediently. */
6170 emit_block_move (dest_mem, src_mem,
6171 expand_expr (len, NULL_RTX, VOIDmode, 0),
6172 MIN (src_align, dest_align));
6176 /* These comparison functions need an instruction that returns an actual
6177 index. An ordinary compare that just sets the condition codes
6179 #ifdef HAVE_cmpstrsi
6180 case BUILT_IN_STRCMP:
6181 /* If not optimizing, call the library function. */
6186 /* Arg could be non-pointer if user redeclared this fcn wrong. */
6187 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
6188 || TREE_CHAIN (arglist) == 0
6189 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE)
6191 else if (!HAVE_cmpstrsi)
6194 tree arg1 = TREE_VALUE (arglist);
6195 tree arg2 = TREE_VALUE (TREE_CHAIN (arglist));
6199 len = c_strlen (arg1);
6201 len = size_binop (PLUS_EXPR, integer_one_node, len);
6202 len2 = c_strlen (arg2);
6204 len2 = size_binop (PLUS_EXPR, integer_one_node, len2);
6206 /* If we don't have a constant length for the first, use the length
6207 of the second, if we know it. We don't require a constant for
6208 this case; some cost analysis could be done if both are available
6209 but neither is constant. For now, assume they're equally cheap.
6211 If both strings have constant lengths, use the smaller. This
6212 could arise if optimization results in strcpy being called with
6213 two fixed strings, or if the code was machine-generated. We should
6214 add some code to the `memcmp' handler below to deal with such
6215 situations, someday. */
6216 if (!len || TREE_CODE (len) != INTEGER_CST)
6223 else if (len2 && TREE_CODE (len2) == INTEGER_CST)
6225 if (tree_int_cst_lt (len2, len))
6229 chainon (arglist, build_tree_list (NULL_TREE, len));
6233 case BUILT_IN_MEMCMP:
6234 /* If not optimizing, call the library function. */
6239 /* Arg could be non-pointer if user redeclared this fcn wrong. */
6240 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
6241 || TREE_CHAIN (arglist) == 0
6242 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
6243 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
6244 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
6246 else if (!HAVE_cmpstrsi)
6249 tree arg1 = TREE_VALUE (arglist);
6250 tree arg2 = TREE_VALUE (TREE_CHAIN (arglist));
6251 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
6255 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
6257 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
6258 enum machine_mode insn_mode
6259 = insn_operand_mode[(int) CODE_FOR_cmpstrsi][0];
6261 /* If we don't have POINTER_TYPE, call the function. */
6262 if (arg1_align == 0 || arg2_align == 0)
6264 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRCMP)
6265 TREE_CHAIN (TREE_CHAIN (arglist)) = 0;
6269 /* Make a place to write the result of the instruction. */
6272 && GET_CODE (result) == REG && GET_MODE (result) == insn_mode
6273 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
6274 result = gen_reg_rtx (insn_mode);
6276 emit_insn (gen_cmpstrsi (result,
6277 gen_rtx (MEM, BLKmode,
6278 expand_expr (arg1, NULL_RTX, Pmode,
6280 gen_rtx (MEM, BLKmode,
6281 expand_expr (arg2, NULL_RTX, Pmode,
6283 expand_expr (len, NULL_RTX, VOIDmode, 0),
6284 GEN_INT (MIN (arg1_align, arg2_align))));
6286 /* Return the value in the proper mode for this function. */
6287 mode = TYPE_MODE (TREE_TYPE (exp));
6288 if (GET_MODE (result) == mode)
6290 else if (target != 0)
6292 convert_move (target, result, 0);
6296 return convert_to_mode (mode, result, 0);
6299 case BUILT_IN_STRCMP:
6300 case BUILT_IN_MEMCMP:
6304 default: /* just do library call, if unknown builtin */
6305 error ("built-in function `%s' not currently supported",
6306 IDENTIFIER_POINTER (DECL_NAME (fndecl)));
6309 /* The switch statement above can drop through to cause the function
6310 to be called normally. */
6312 return expand_call (exp, target, ignore);
6315 /* Built-in functions to perform an untyped call and return. */
6317 /* For each register that may be used for calling a function, this
6318 gives a mode used to copy the register's value. VOIDmode indicates
6319 the register is not used for calling a function. If the machine
6320 has register windows, this gives only the outbound registers.
6321 INCOMING_REGNO gives the corresponding inbound register. */
6322 static enum machine_mode apply_args_mode[FIRST_PSEUDO_REGISTER];
6324 /* For each register that may be used for returning values, this gives
6325 a mode used to copy the register's value. VOIDmode indicates the
6326 register is not used for returning values. If the machine has
6327 register windows, this gives only the outbound registers.
6328 INCOMING_REGNO gives the corresponding inbound register. */
6329 static enum machine_mode apply_result_mode[FIRST_PSEUDO_REGISTER];
6331 /* Return the size required for the block returned by __builtin_apply_args,
6332 and initialize apply_args_mode. */
6336 static int size = -1;
6338 enum machine_mode mode;
6340 /* The values computed by this function never change. */
6343 /* The first value is the incoming arg-pointer. */
6344 size = GET_MODE_SIZE (Pmode);
6346 /* The second value is the structure value address unless this is
6347 passed as an "invisible" first argument. */
6348 if (struct_value_rtx)
6349 size += GET_MODE_SIZE (Pmode);
6351 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
6352 if (FUNCTION_ARG_REGNO_P (regno))
6354 /* Search for the proper mode for copying this register's
6355 value. I'm not sure this is right, but it works so far. */
6356 enum machine_mode best_mode = VOIDmode;
6358 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
6360 mode = GET_MODE_WIDER_MODE (mode))
6361 if (HARD_REGNO_MODE_OK (regno, mode)
6362 && HARD_REGNO_NREGS (regno, mode) == 1)
6365 if (best_mode == VOIDmode)
6366 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
6368 mode = GET_MODE_WIDER_MODE (mode))
6369 if (HARD_REGNO_MODE_OK (regno, mode)
6370 && (mov_optab->handlers[(int) mode].insn_code
6371 != CODE_FOR_nothing))
6375 if (mode == VOIDmode)
6378 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
6379 if (size % align != 0)
6380 size = CEIL (size, align) * align;
6381 size += GET_MODE_SIZE (mode);
6382 apply_args_mode[regno] = mode;
6385 apply_args_mode[regno] = VOIDmode;
6390 /* Return the size required for the block returned by __builtin_apply,
6391 and initialize apply_result_mode. */
6393 apply_result_size ()
6395 static int size = -1;
6397 enum machine_mode mode;
6399 /* The values computed by this function never change. */
6404 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
6405 if (FUNCTION_VALUE_REGNO_P (regno))
6407 /* Search for the proper mode for copying this register's
6408 value. I'm not sure this is right, but it works so far. */
6409 enum machine_mode best_mode = VOIDmode;
6411 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
6413 mode = GET_MODE_WIDER_MODE (mode))
6414 if (HARD_REGNO_MODE_OK (regno, mode))
6417 if (best_mode == VOIDmode)
6418 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
6420 mode = GET_MODE_WIDER_MODE (mode))
6421 if (HARD_REGNO_MODE_OK (regno, mode)
6422 && (mov_optab->handlers[(int) mode].insn_code
6423 != CODE_FOR_nothing))
6427 if (mode == VOIDmode)
6430 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
6431 if (size % align != 0)
6432 size = CEIL (size, align) * align;
6433 size += GET_MODE_SIZE (mode);
6434 apply_result_mode[regno] = mode;
6437 apply_result_mode[regno] = VOIDmode;
6439 /* Allow targets that use untyped_call and untyped_return to override
6440 the size so that machine-specific information can be stored here. */
6441 #ifdef APPLY_RESULT_SIZE
6442 size = APPLY_RESULT_SIZE;
6448 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
6449 /* Create a vector describing the result block RESULT. If SAVEP is true,
6450 the result block is used to save the values; otherwise it is used to
6451 restore the values. */
6453 result_vector (savep, result)
6457 int regno, size, align, nelts;
6458 enum machine_mode mode;
6460 rtx *savevec = (rtx *) alloca (FIRST_PSEUDO_REGISTER * sizeof (rtx));
6463 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
6464 if ((mode = apply_result_mode[regno]) != VOIDmode)
6466 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
6467 if (size % align != 0)
6468 size = CEIL (size, align) * align;
6469 reg = gen_rtx (REG, mode, savep ? INCOMING_REGNO (regno) : regno);
6470 mem = change_address (result, mode,
6471 plus_constant (XEXP (result, 0), size));
6472 savevec[nelts++] = (savep
6473 ? gen_rtx (SET, VOIDmode, mem, reg)
6474 : gen_rtx (SET, VOIDmode, reg, mem));
6475 size += GET_MODE_SIZE (mode);
6477 return gen_rtx (PARALLEL, VOIDmode, gen_rtvec_v (nelts, savevec));
6479 #endif /* HAVE_untyped_call or HAVE_untyped_return */
6482 /* Save the state required to perform an untyped call with the same
6483 arguments as were passed to the current function. */
6485 expand_builtin_apply_args ()
6488 int size, align, regno;
6489 enum machine_mode mode;
6491 /* Create a block where the arg-pointer, structure value address,
6492 and argument registers can be saved. */
6493 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
6495 /* Walk past the arg-pointer and structure value address. */
6496 size = GET_MODE_SIZE (Pmode);
6497 if (struct_value_rtx)
6498 size += GET_MODE_SIZE (Pmode);
6500 /* Save each register used in calling a function to the block. */
6501 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
6502 if ((mode = apply_args_mode[regno]) != VOIDmode)
6504 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
6505 if (size % align != 0)
6506 size = CEIL (size, align) * align;
6507 emit_move_insn (change_address (registers, mode,
6508 plus_constant (XEXP (registers, 0),
6510 gen_rtx (REG, mode, INCOMING_REGNO (regno)));
6511 size += GET_MODE_SIZE (mode);
6514 /* Save the arg pointer to the block. */
6515 emit_move_insn (change_address (registers, Pmode, XEXP (registers, 0)),
6516 copy_to_reg (virtual_incoming_args_rtx));
6517 size = GET_MODE_SIZE (Pmode);
6519 /* Save the structure value address unless this is passed as an
6520 "invisible" first argument. */
6521 if (struct_value_incoming_rtx)
6523 emit_move_insn (change_address (registers, Pmode,
6524 plus_constant (XEXP (registers, 0),
6526 copy_to_reg (struct_value_incoming_rtx));
6527 size += GET_MODE_SIZE (Pmode);
6530 /* Return the address of the block. */
6531 return copy_addr_to_reg (XEXP (registers, 0));
6534 /* Perform an untyped call and save the state required to perform an
6535 untyped return of whatever value was returned by the given function. */
6537 expand_builtin_apply (function, arguments, argsize)
6538 rtx function, arguments, argsize;
6540 int size, align, regno;
6541 enum machine_mode mode;
6542 rtx incoming_args, result, reg, dest, call_insn;
6543 rtx old_stack_level = 0;
6546 /* Create a block where the return registers can be saved. */
6547 result = assign_stack_local (BLKmode, apply_result_size (), -1);
6549 /* ??? The argsize value should be adjusted here. */
6551 /* Fetch the arg pointer from the ARGUMENTS block. */
6552 incoming_args = gen_reg_rtx (Pmode);
6553 emit_move_insn (incoming_args,
6554 gen_rtx (MEM, Pmode, arguments));
6555 #ifndef STACK_GROWS_DOWNWARD
6556 incoming_args = expand_binop (Pmode, add_optab, incoming_args, argsize,
6557 incoming_args, 0, OPTAB_LIB_WIDEN);
6560 /* Perform postincrements before actually calling the function. */
6563 /* Push a new argument block and copy the arguments. */
6564 do_pending_stack_adjust ();
6565 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
6567 /* Push a block of memory onto the stack to store the memory arguments.
6568 Save the address in a register, and copy the memory arguments. ??? I
6569 haven't figured out how the calling convention macros effect this,
6570 but it's likely that the source and/or destination addresses in
6571 the block copy will need updating in machine specific ways. */
6572 dest = copy_addr_to_reg (push_block (argsize, 0, 0));
6573 emit_block_move (gen_rtx (MEM, BLKmode, dest),
6574 gen_rtx (MEM, BLKmode, incoming_args),
6576 PARM_BOUNDARY / BITS_PER_UNIT);
6578 /* Refer to the argument block. */
6580 arguments = gen_rtx (MEM, BLKmode, arguments);
6582 /* Walk past the arg-pointer and structure value address. */
6583 size = GET_MODE_SIZE (Pmode);
6584 if (struct_value_rtx)
6585 size += GET_MODE_SIZE (Pmode);
6587 /* Restore each of the registers previously saved. Make USE insns
6588 for each of these registers for use in making the call. */
6589 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
6590 if ((mode = apply_args_mode[regno]) != VOIDmode)
6592 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
6593 if (size % align != 0)
6594 size = CEIL (size, align) * align;
6595 reg = gen_rtx (REG, mode, regno);
6596 emit_move_insn (reg,
6597 change_address (arguments, mode,
6598 plus_constant (XEXP (arguments, 0),
6601 push_to_sequence (use_insns);
6602 emit_insn (gen_rtx (USE, VOIDmode, reg));
6603 use_insns = get_insns ();
6605 size += GET_MODE_SIZE (mode);
6608 /* Restore the structure value address unless this is passed as an
6609 "invisible" first argument. */
6610 size = GET_MODE_SIZE (Pmode);
6611 if (struct_value_rtx)
6613 rtx value = gen_reg_rtx (Pmode);
6614 emit_move_insn (value,
6615 change_address (arguments, Pmode,
6616 plus_constant (XEXP (arguments, 0),
6618 emit_move_insn (struct_value_rtx, value);
6619 if (GET_CODE (struct_value_rtx) == REG)
6621 push_to_sequence (use_insns);
6622 emit_insn (gen_rtx (USE, VOIDmode, struct_value_rtx));
6623 use_insns = get_insns ();
6626 size += GET_MODE_SIZE (Pmode);
6629 /* All arguments and registers used for the call are set up by now! */
6630 function = prepare_call_address (function, NULL_TREE, &use_insns);
6632 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
6633 and we don't want to load it into a register as an optimization,
6634 because prepare_call_address already did it if it should be done. */
6635 if (GET_CODE (function) != SYMBOL_REF)
6636 function = memory_address (FUNCTION_MODE, function);
6638 /* Generate the actual call instruction and save the return value. */
6639 #ifdef HAVE_untyped_call
6640 if (HAVE_untyped_call)
6641 emit_call_insn (gen_untyped_call (gen_rtx (MEM, FUNCTION_MODE, function),
6642 result, result_vector (1, result)));
6645 #ifdef HAVE_call_value
6646 if (HAVE_call_value)
6650 /* Locate the unique return register. It is not possible to
6651 express a call that sets more than one return register using
6652 call_value; use untyped_call for that. In fact, untyped_call
6653 only needs to save the return registers in the given block. */
6654 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
6655 if ((mode = apply_result_mode[regno]) != VOIDmode)
6658 abort (); /* HAVE_untyped_call required. */
6659 valreg = gen_rtx (REG, mode, regno);
6662 emit_call_insn (gen_call_value (valreg,
6663 gen_rtx (MEM, FUNCTION_MODE, function),
6664 const0_rtx, NULL_RTX, const0_rtx));
6666 emit_move_insn (change_address (result, GET_MODE (valreg),
6674 /* Find the CALL insn we just emitted and write the USE insns before it. */
6675 for (call_insn = get_last_insn ();
6676 call_insn && GET_CODE (call_insn) != CALL_INSN;
6677 call_insn = PREV_INSN (call_insn))
6683 /* Put the USE insns before the CALL. */
6684 emit_insns_before (use_insns, call_insn);
6686 /* Restore the stack. */
6687 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
6689 /* Return the address of the result block. */
6690 return copy_addr_to_reg (XEXP (result, 0));
6693 /* Perform an untyped return. */
6695 expand_builtin_return (result)
6698 int size, align, regno;
6699 enum machine_mode mode;
6703 apply_result_size ();
6704 result = gen_rtx (MEM, BLKmode, result);
6706 #ifdef HAVE_untyped_return
6707 if (HAVE_untyped_return)
6709 emit_jump_insn (gen_untyped_return (result, result_vector (0, result)));
6715 /* Restore the return value and note that each value is used. */
6717 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
6718 if ((mode = apply_result_mode[regno]) != VOIDmode)
6720 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
6721 if (size % align != 0)
6722 size = CEIL (size, align) * align;
6723 reg = gen_rtx (REG, mode, INCOMING_REGNO (regno));
6724 emit_move_insn (reg,
6725 change_address (result, mode,
6726 plus_constant (XEXP (result, 0),
6729 push_to_sequence (use_insns);
6730 emit_insn (gen_rtx (USE, VOIDmode, reg));
6731 use_insns = get_insns ();
6733 size += GET_MODE_SIZE (mode);
6736 /* Put the USE insns before the return. */
6737 emit_insns (use_insns);
6739 /* Return whatever values was restored by jumping directly to the end
6741 expand_null_return ();
6744 /* Expand code for a post- or pre- increment or decrement
6745 and return the RTX for the result.
6746 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
6749 expand_increment (exp, post)
6753 register rtx op0, op1;
6754 register rtx temp, value;
6755 register tree incremented = TREE_OPERAND (exp, 0);
6756 optab this_optab = add_optab;
6758 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
6759 int op0_is_copy = 0;
6761 /* Stabilize any component ref that might need to be
6762 evaluated more than once below. */
6764 || TREE_CODE (incremented) == BIT_FIELD_REF
6765 || (TREE_CODE (incremented) == COMPONENT_REF
6766 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
6767 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
6768 incremented = stabilize_reference (incremented);
6770 /* Compute the operands as RTX.
6771 Note whether OP0 is the actual lvalue or a copy of it:
6772 I believe it is a copy iff it is a register or subreg
6773 and insns were generated in computing it. */
6775 temp = get_last_insn ();
6776 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, 0);
6778 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
6779 in place but intead must do sign- or zero-extension during assignment,
6780 so we copy it into a new register and let the code below use it as
6783 Note that we can safely modify this SUBREG since it is know not to be
6784 shared (it was made by the expand_expr call above). */
6786 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
6787 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
6789 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
6790 && temp != get_last_insn ());
6791 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
6793 /* Decide whether incrementing or decrementing. */
6794 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
6795 || TREE_CODE (exp) == PREDECREMENT_EXPR)
6796 this_optab = sub_optab;
6798 /* If OP0 is not the actual lvalue, but rather a copy in a register,
6799 then we cannot just increment OP0. We must therefore contrive to
6800 increment the original value. Then, for postincrement, we can return
6801 OP0 since it is a copy of the old value. For preincrement, we want
6802 to always expand here, since this generates better or equivalent code. */
6803 if (!post || op0_is_copy)
6805 /* This is the easiest way to increment the value wherever it is.
6806 Problems with multiple evaluation of INCREMENTED are prevented
6807 because either (1) it is a component_ref or preincrement,
6808 in which case it was stabilized above, or (2) it is an array_ref
6809 with constant index in an array in a register, which is
6810 safe to reevaluate. */
6811 tree newexp = build ((this_optab == add_optab
6812 ? PLUS_EXPR : MINUS_EXPR),
6815 TREE_OPERAND (exp, 1));
6816 temp = expand_assignment (incremented, newexp, ! post, 0);
6817 return post ? op0 : temp;
6820 /* Convert decrement by a constant into a negative increment. */
6821 if (this_optab == sub_optab
6822 && GET_CODE (op1) == CONST_INT)
6824 op1 = GEN_INT (- INTVAL (op1));
6825 this_optab = add_optab;
6830 /* We have a true reference to the value in OP0.
6831 If there is an insn to add or subtract in this mode, queue it. */
6833 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
6834 op0 = stabilize (op0);
6837 icode = (int) this_optab->handlers[(int) mode].insn_code;
6838 if (icode != (int) CODE_FOR_nothing
6839 /* Make sure that OP0 is valid for operands 0 and 1
6840 of the insn we want to queue. */
6841 && (*insn_operand_predicate[icode][0]) (op0, mode)
6842 && (*insn_operand_predicate[icode][1]) (op0, mode))
6844 if (! (*insn_operand_predicate[icode][2]) (op1, mode))
6845 op1 = force_reg (mode, op1);
6847 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
6851 /* Preincrement, or we can't increment with one simple insn. */
6853 /* Save a copy of the value before inc or dec, to return it later. */
6854 temp = value = copy_to_reg (op0);
6856 /* Arrange to return the incremented value. */
6857 /* Copy the rtx because expand_binop will protect from the queue,
6858 and the results of that would be invalid for us to return
6859 if our caller does emit_queue before using our result. */
6860 temp = copy_rtx (value = op0);
6862 /* Increment however we can. */
6863 op1 = expand_binop (mode, this_optab, value, op1, op0,
6864 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
6865 /* Make sure the value is stored into OP0. */
6867 emit_move_insn (op0, op1);
6872 /* Expand all function calls contained within EXP, innermost ones first.
6873 But don't look within expressions that have sequence points.
6874 For each CALL_EXPR, record the rtx for its value
6875 in the CALL_EXPR_RTL field. */
6878 preexpand_calls (exp)
6881 register int nops, i;
6882 int type = TREE_CODE_CLASS (TREE_CODE (exp));
6884 if (! do_preexpand_calls)
6887 /* Only expressions and references can contain calls. */
6889 if (type != 'e' && type != '<' && type != '1' && type != '2' && type != 'r')
6892 switch (TREE_CODE (exp))
6895 /* Do nothing if already expanded. */
6896 if (CALL_EXPR_RTL (exp) != 0)
6899 /* Do nothing to built-in functions. */
6900 if (TREE_CODE (TREE_OPERAND (exp, 0)) != ADDR_EXPR
6901 || TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) != FUNCTION_DECL
6902 || ! DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
6903 CALL_EXPR_RTL (exp) = expand_call (exp, NULL_RTX, 0);
6908 case TRUTH_ANDIF_EXPR:
6909 case TRUTH_ORIF_EXPR:
6910 /* If we find one of these, then we can be sure
6911 the adjust will be done for it (since it makes jumps).
6912 Do it now, so that if this is inside an argument
6913 of a function, we don't get the stack adjustment
6914 after some other args have already been pushed. */
6915 do_pending_stack_adjust ();
6920 case WITH_CLEANUP_EXPR:
6924 if (SAVE_EXPR_RTL (exp) != 0)
6928 nops = tree_code_length[(int) TREE_CODE (exp)];
6929 for (i = 0; i < nops; i++)
6930 if (TREE_OPERAND (exp, i) != 0)
6932 type = TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, i)));
6933 if (type == 'e' || type == '<' || type == '1' || type == '2'
6935 preexpand_calls (TREE_OPERAND (exp, i));
6939 /* At the start of a function, record that we have no previously-pushed
6940 arguments waiting to be popped. */
6943 init_pending_stack_adjust ()
6945 pending_stack_adjust = 0;
6948 /* When exiting from function, if safe, clear out any pending stack adjust
6949 so the adjustment won't get done. */
6952 clear_pending_stack_adjust ()
6954 #ifdef EXIT_IGNORE_STACK
6955 if (! flag_omit_frame_pointer && EXIT_IGNORE_STACK
6956 && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline)
6957 && ! flag_inline_functions)
6958 pending_stack_adjust = 0;
6962 /* Pop any previously-pushed arguments that have not been popped yet. */
6965 do_pending_stack_adjust ()
6967 if (inhibit_defer_pop == 0)
6969 if (pending_stack_adjust != 0)
6970 adjust_stack (GEN_INT (pending_stack_adjust));
6971 pending_stack_adjust = 0;
6975 /* Expand all cleanups up to OLD_CLEANUPS.
6976 Needed here, and also for language-dependent calls. */
6979 expand_cleanups_to (old_cleanups)
6982 while (cleanups_this_call != old_cleanups)
6984 expand_expr (TREE_VALUE (cleanups_this_call), NULL_RTX, VOIDmode, 0);
6985 cleanups_this_call = TREE_CHAIN (cleanups_this_call);
6989 /* Expand conditional expressions. */
6991 /* Generate code to evaluate EXP and jump to LABEL if the value is zero.
6992 LABEL is an rtx of code CODE_LABEL, in this function and all the
6996 jumpifnot (exp, label)
7000 do_jump (exp, label, NULL_RTX);
7003 /* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
7010 do_jump (exp, NULL_RTX, label);
7013 /* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
7014 the result is zero, or IF_TRUE_LABEL if the result is one.
7015 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
7016 meaning fall through in that case.
7018 do_jump always does any pending stack adjust except when it does not
7019 actually perform a jump. An example where there is no jump
7020 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
7022 This function is responsible for optimizing cases such as
7023 &&, || and comparison operators in EXP. */
7026 do_jump (exp, if_false_label, if_true_label)
7028 rtx if_false_label, if_true_label;
7030 register enum tree_code code = TREE_CODE (exp);
7031 /* Some cases need to create a label to jump to
7032 in order to properly fall through.
7033 These cases set DROP_THROUGH_LABEL nonzero. */
7034 rtx drop_through_label = 0;
7048 temp = integer_zerop (exp) ? if_false_label : if_true_label;
7054 /* This is not true with #pragma weak */
7056 /* The address of something can never be zero. */
7058 emit_jump (if_true_label);
7063 if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
7064 || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
7065 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF)
7068 /* If we are narrowing the operand, we have to do the compare in the
7070 if ((TYPE_PRECISION (TREE_TYPE (exp))
7071 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
7073 case NON_LVALUE_EXPR:
7074 case REFERENCE_EXPR:
7079 /* These cannot change zero->non-zero or vice versa. */
7080 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
7084 /* This is never less insns than evaluating the PLUS_EXPR followed by
7085 a test and can be longer if the test is eliminated. */
7087 /* Reduce to minus. */
7088 exp = build (MINUS_EXPR, TREE_TYPE (exp),
7089 TREE_OPERAND (exp, 0),
7090 fold (build1 (NEGATE_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
7091 TREE_OPERAND (exp, 1))));
7092 /* Process as MINUS. */
7096 /* Non-zero iff operands of minus differ. */
7097 comparison = compare (build (NE_EXPR, TREE_TYPE (exp),
7098 TREE_OPERAND (exp, 0),
7099 TREE_OPERAND (exp, 1)),
7104 /* If we are AND'ing with a small constant, do this comparison in the
7105 smallest type that fits. If the machine doesn't have comparisons
7106 that small, it will be converted back to the wider comparison.
7107 This helps if we are testing the sign bit of a narrower object.
7108 combine can't do this for us because it can't know whether a
7109 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
7111 if (! SLOW_BYTE_ACCESS
7112 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7113 && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
7114 && (i = floor_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))) >= 0
7115 && (type = type_for_size (i + 1, 1)) != 0
7116 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
7117 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
7118 != CODE_FOR_nothing))
7120 do_jump (convert (type, exp), if_false_label, if_true_label);
7125 case TRUTH_NOT_EXPR:
7126 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
7129 case TRUTH_ANDIF_EXPR:
7130 if (if_false_label == 0)
7131 if_false_label = drop_through_label = gen_label_rtx ();
7132 do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
7133 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
7136 case TRUTH_ORIF_EXPR:
7137 if (if_true_label == 0)
7138 if_true_label = drop_through_label = gen_label_rtx ();
7139 do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
7140 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
7144 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
7147 do_pending_stack_adjust ();
7148 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
7155 int bitsize, bitpos, unsignedp;
7156 enum machine_mode mode;
7161 /* Get description of this reference. We don't actually care
7162 about the underlying object here. */
7163 get_inner_reference (exp, &bitsize, &bitpos, &offset,
7164 &mode, &unsignedp, &volatilep);
7166 type = type_for_size (bitsize, unsignedp);
7167 if (! SLOW_BYTE_ACCESS
7168 && type != 0 && bitsize >= 0
7169 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
7170 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
7171 != CODE_FOR_nothing))
7173 do_jump (convert (type, exp), if_false_label, if_true_label);
7180 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
7181 if (integer_onep (TREE_OPERAND (exp, 1))
7182 && integer_zerop (TREE_OPERAND (exp, 2)))
7183 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
7185 else if (integer_zerop (TREE_OPERAND (exp, 1))
7186 && integer_onep (TREE_OPERAND (exp, 2)))
7187 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
7191 register rtx label1 = gen_label_rtx ();
7192 drop_through_label = gen_label_rtx ();
7193 do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX);
7194 /* Now the THEN-expression. */
7195 do_jump (TREE_OPERAND (exp, 1),
7196 if_false_label ? if_false_label : drop_through_label,
7197 if_true_label ? if_true_label : drop_through_label);
7198 /* In case the do_jump just above never jumps. */
7199 do_pending_stack_adjust ();
7200 emit_label (label1);
7201 /* Now the ELSE-expression. */
7202 do_jump (TREE_OPERAND (exp, 2),
7203 if_false_label ? if_false_label : drop_through_label,
7204 if_true_label ? if_true_label : drop_through_label);
7209 if (integer_zerop (TREE_OPERAND (exp, 1)))
7210 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
7211 else if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7214 !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
7215 do_jump_by_parts_equality (exp, if_false_label, if_true_label);
7217 comparison = compare (exp, EQ, EQ);
7221 if (integer_zerop (TREE_OPERAND (exp, 1)))
7222 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
7223 else if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7226 !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
7227 do_jump_by_parts_equality (exp, if_true_label, if_false_label);
7229 comparison = compare (exp, NE, NE);
7233 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7235 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
7236 do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
7238 comparison = compare (exp, LT, LTU);
7242 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7244 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
7245 do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
7247 comparison = compare (exp, LE, LEU);
7251 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7253 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
7254 do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
7256 comparison = compare (exp, GT, GTU);
7260 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7262 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
7263 do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
7265 comparison = compare (exp, GE, GEU);
7270 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
7272 /* This is not needed any more and causes poor code since it causes
7273 comparisons and tests from non-SI objects to have different code
7275 /* Copy to register to avoid generating bad insns by cse
7276 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
7277 if (!cse_not_expected && GET_CODE (temp) == MEM)
7278 temp = copy_to_reg (temp);
7280 do_pending_stack_adjust ();
7281 if (GET_CODE (temp) == CONST_INT)
7282 comparison = (temp == const0_rtx ? const0_rtx : const_true_rtx);
7283 else if (GET_CODE (temp) == LABEL_REF)
7284 comparison = const_true_rtx;
7285 else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
7286 && !can_compare_p (GET_MODE (temp)))
7287 /* Note swapping the labels gives us not-equal. */
7288 do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label);
7289 else if (GET_MODE (temp) != VOIDmode)
7290 comparison = compare_from_rtx (temp, CONST0_RTX (GET_MODE (temp)),
7291 NE, TREE_UNSIGNED (TREE_TYPE (exp)),
7292 GET_MODE (temp), NULL_RTX, 0);
7297 /* Do any postincrements in the expression that was tested. */
7300 /* If COMPARISON is nonzero here, it is an rtx that can be substituted
7301 straight into a conditional jump instruction as the jump condition.
7302 Otherwise, all the work has been done already. */
7304 if (comparison == const_true_rtx)
7307 emit_jump (if_true_label);
7309 else if (comparison == const0_rtx)
7312 emit_jump (if_false_label);
7314 else if (comparison)
7315 do_jump_for_compare (comparison, if_false_label, if_true_label);
7319 if (drop_through_label)
7321 /* If do_jump produces code that might be jumped around,
7322 do any stack adjusts from that code, before the place
7323 where control merges in. */
7324 do_pending_stack_adjust ();
7325 emit_label (drop_through_label);
7329 /* Given a comparison expression EXP for values too wide to be compared
7330 with one insn, test the comparison and jump to the appropriate label.
7331 The code of EXP is ignored; we always test GT if SWAP is 0,
7332 and LT if SWAP is 1. */
7335 do_jump_by_parts_greater (exp, swap, if_false_label, if_true_label)
7338 rtx if_false_label, if_true_label;
7340 rtx op0 = expand_expr (TREE_OPERAND (exp, swap), NULL_RTX, VOIDmode, 0);
7341 rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), NULL_RTX, VOIDmode, 0);
7342 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
7343 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
7344 rtx drop_through_label = 0;
7345 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
7348 if (! if_true_label || ! if_false_label)
7349 drop_through_label = gen_label_rtx ();
7350 if (! if_true_label)
7351 if_true_label = drop_through_label;
7352 if (! if_false_label)
7353 if_false_label = drop_through_label;
7355 /* Compare a word at a time, high order first. */
7356 for (i = 0; i < nwords; i++)
7359 rtx op0_word, op1_word;
7361 if (WORDS_BIG_ENDIAN)
7363 op0_word = operand_subword_force (op0, i, mode);
7364 op1_word = operand_subword_force (op1, i, mode);
7368 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
7369 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
7372 /* All but high-order word must be compared as unsigned. */
7373 comp = compare_from_rtx (op0_word, op1_word,
7374 (unsignedp || i > 0) ? GTU : GT,
7375 unsignedp, word_mode, NULL_RTX, 0);
7376 if (comp == const_true_rtx)
7377 emit_jump (if_true_label);
7378 else if (comp != const0_rtx)
7379 do_jump_for_compare (comp, NULL_RTX, if_true_label);
7381 /* Consider lower words only if these are equal. */
7382 comp = compare_from_rtx (op0_word, op1_word, NE, unsignedp, word_mode,
7384 if (comp == const_true_rtx)
7385 emit_jump (if_false_label);
7386 else if (comp != const0_rtx)
7387 do_jump_for_compare (comp, NULL_RTX, if_false_label);
7391 emit_jump (if_false_label);
7392 if (drop_through_label)
7393 emit_label (drop_through_label);
7396 /* Compare OP0 with OP1, word at a time, in mode MODE.
7397 UNSIGNEDP says to do unsigned comparison.
7398 Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
7401 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label)
7402 enum machine_mode mode;
7405 rtx if_false_label, if_true_label;
7407 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
7408 rtx drop_through_label = 0;
7411 if (! if_true_label || ! if_false_label)
7412 drop_through_label = gen_label_rtx ();
7413 if (! if_true_label)
7414 if_true_label = drop_through_label;
7415 if (! if_false_label)
7416 if_false_label = drop_through_label;
7418 /* Compare a word at a time, high order first. */
7419 for (i = 0; i < nwords; i++)
7422 rtx op0_word, op1_word;
7424 if (WORDS_BIG_ENDIAN)
7426 op0_word = operand_subword_force (op0, i, mode);
7427 op1_word = operand_subword_force (op1, i, mode);
7431 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
7432 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
7435 /* All but high-order word must be compared as unsigned. */
7436 comp = compare_from_rtx (op0_word, op1_word,
7437 (unsignedp || i > 0) ? GTU : GT,
7438 unsignedp, word_mode, NULL_RTX, 0);
7439 if (comp == const_true_rtx)
7440 emit_jump (if_true_label);
7441 else if (comp != const0_rtx)
7442 do_jump_for_compare (comp, NULL_RTX, if_true_label);
7444 /* Consider lower words only if these are equal. */
7445 comp = compare_from_rtx (op0_word, op1_word, NE, unsignedp, word_mode,
7447 if (comp == const_true_rtx)
7448 emit_jump (if_false_label);
7449 else if (comp != const0_rtx)
7450 do_jump_for_compare (comp, NULL_RTX, if_false_label);
7454 emit_jump (if_false_label);
7455 if (drop_through_label)
7456 emit_label (drop_through_label);
7459 /* Given an EQ_EXPR expression EXP for values too wide to be compared
7460 with one insn, test the comparison and jump to the appropriate label. */
7463 do_jump_by_parts_equality (exp, if_false_label, if_true_label)
7465 rtx if_false_label, if_true_label;
7467 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7468 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7469 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
7470 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
7472 rtx drop_through_label = 0;
7474 if (! if_false_label)
7475 drop_through_label = if_false_label = gen_label_rtx ();
7477 for (i = 0; i < nwords; i++)
7479 rtx comp = compare_from_rtx (operand_subword_force (op0, i, mode),
7480 operand_subword_force (op1, i, mode),
7481 EQ, TREE_UNSIGNED (TREE_TYPE (exp)),
7482 word_mode, NULL_RTX, 0);
7483 if (comp == const_true_rtx)
7484 emit_jump (if_false_label);
7485 else if (comp != const0_rtx)
7486 do_jump_for_compare (comp, if_false_label, NULL_RTX);
7490 emit_jump (if_true_label);
7491 if (drop_through_label)
7492 emit_label (drop_through_label);
7495 /* Jump according to whether OP0 is 0.
7496 We assume that OP0 has an integer mode that is too wide
7497 for the available compare insns. */
7500 do_jump_by_parts_equality_rtx (op0, if_false_label, if_true_label)
7502 rtx if_false_label, if_true_label;
7504 int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD;
7506 rtx drop_through_label = 0;
7508 if (! if_false_label)
7509 drop_through_label = if_false_label = gen_label_rtx ();
7511 for (i = 0; i < nwords; i++)
7513 rtx comp = compare_from_rtx (operand_subword_force (op0, i,
7515 const0_rtx, EQ, 1, word_mode, NULL_RTX, 0);
7516 if (comp == const_true_rtx)
7517 emit_jump (if_false_label);
7518 else if (comp != const0_rtx)
7519 do_jump_for_compare (comp, if_false_label, NULL_RTX);
7523 emit_jump (if_true_label);
7524 if (drop_through_label)
7525 emit_label (drop_through_label);
7528 /* Given a comparison expression in rtl form, output conditional branches to
7529 IF_TRUE_LABEL, IF_FALSE_LABEL, or both. */
7532 do_jump_for_compare (comparison, if_false_label, if_true_label)
7533 rtx comparison, if_false_label, if_true_label;
7537 if (bcc_gen_fctn[(int) GET_CODE (comparison)] != 0)
7538 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (comparison)]) (if_true_label));
7543 emit_jump (if_false_label);
7545 else if (if_false_label)
7548 rtx prev = PREV_INSN (get_last_insn ());
7551 /* Output the branch with the opposite condition. Then try to invert
7552 what is generated. If more than one insn is a branch, or if the
7553 branch is not the last insn written, abort. If we can't invert
7554 the branch, emit make a true label, redirect this jump to that,
7555 emit a jump to the false label and define the true label. */
7557 if (bcc_gen_fctn[(int) GET_CODE (comparison)] != 0)
7558 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (comparison)]) (if_false_label));
7562 /* Here we get the insn before what was just emitted.
7563 On some machines, emitting the branch can discard
7564 the previous compare insn and emit a replacement. */
7566 /* If there's only one preceding insn... */
7567 insn = get_insns ();
7569 insn = NEXT_INSN (prev);
7571 for (insn = NEXT_INSN (insn); insn; insn = NEXT_INSN (insn))
7572 if (GET_CODE (insn) == JUMP_INSN)
7579 if (branch != get_last_insn ())
7582 if (! invert_jump (branch, if_false_label))
7584 if_true_label = gen_label_rtx ();
7585 redirect_jump (branch, if_true_label);
7586 emit_jump (if_false_label);
7587 emit_label (if_true_label);
7592 /* Generate code for a comparison expression EXP
7593 (including code to compute the values to be compared)
7594 and set (CC0) according to the result.
7595 SIGNED_CODE should be the rtx operation for this comparison for
7596 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
7598 We force a stack adjustment unless there are currently
7599 things pushed on the stack that aren't yet used. */
7602 compare (exp, signed_code, unsigned_code)
7604 enum rtx_code signed_code, unsigned_code;
7607 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7609 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7610 register tree type = TREE_TYPE (TREE_OPERAND (exp, 0));
7611 register enum machine_mode mode = TYPE_MODE (type);
7612 int unsignedp = TREE_UNSIGNED (type);
7613 enum rtx_code code = unsignedp ? unsigned_code : signed_code;
7615 return compare_from_rtx (op0, op1, code, unsignedp, mode,
7617 ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
7618 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
7621 /* Like compare but expects the values to compare as two rtx's.
7622 The decision as to signed or unsigned comparison must be made by the caller.
7624 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
7627 If ALIGN is non-zero, it is the alignment of this type; if zero, the
7628 size of MODE should be used. */
7631 compare_from_rtx (op0, op1, code, unsignedp, mode, size, align)
7632 register rtx op0, op1;
7635 enum machine_mode mode;
7641 /* If one operand is constant, make it the second one. Only do this
7642 if the other operand is not constant as well. */
7644 if ((CONSTANT_P (op0) && ! CONSTANT_P (op1))
7645 || (GET_CODE (op0) == CONST_INT && GET_CODE (op1) != CONST_INT))
7650 code = swap_condition (code);
7655 op0 = force_not_mem (op0);
7656 op1 = force_not_mem (op1);
7659 do_pending_stack_adjust ();
7661 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
7662 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
7666 /* There's no need to do this now that combine.c can eliminate lots of
7667 sign extensions. This can be less efficient in certain cases on other
7670 /* If this is a signed equality comparison, we can do it as an
7671 unsigned comparison since zero-extension is cheaper than sign
7672 extension and comparisons with zero are done as unsigned. This is
7673 the case even on machines that can do fast sign extension, since
7674 zero-extension is easier to combine with other operations than
7675 sign-extension is. If we are comparing against a constant, we must
7676 convert it to what it would look like unsigned. */
7677 if ((code == EQ || code == NE) && ! unsignedp
7678 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
7680 if (GET_CODE (op1) == CONST_INT
7681 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
7682 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
7687 emit_cmp_insn (op0, op1, code, size, mode, unsignedp, align);
7689 return gen_rtx (code, VOIDmode, cc0_rtx, const0_rtx);
7692 /* Generate code to calculate EXP using a store-flag instruction
7693 and return an rtx for the result. EXP is either a comparison
7694 or a TRUTH_NOT_EXPR whose operand is a comparison.
7696 If TARGET is nonzero, store the result there if convenient.
7698 If ONLY_CHEAP is non-zero, only do this if it is likely to be very
7701 Return zero if there is no suitable set-flag instruction
7702 available on this machine.
7704 Once expand_expr has been called on the arguments of the comparison,
7705 we are committed to doing the store flag, since it is not safe to
7706 re-evaluate the expression. We emit the store-flag insn by calling
7707 emit_store_flag, but only expand the arguments if we have a reason
7708 to believe that emit_store_flag will be successful. If we think that
7709 it will, but it isn't, we have to simulate the store-flag with a
7710 set/jump/set sequence. */
7713 do_store_flag (exp, target, mode, only_cheap)
7716 enum machine_mode mode;
7720 tree arg0, arg1, type;
7722 enum machine_mode operand_mode;
7726 enum insn_code icode;
7727 rtx subtarget = target;
7728 rtx result, label, pattern, jump_pat;
7730 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
7731 result at the end. We can't simply invert the test since it would
7732 have already been inverted if it were valid. This case occurs for
7733 some floating-point comparisons. */
7735 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
7736 invert = 1, exp = TREE_OPERAND (exp, 0);
7738 arg0 = TREE_OPERAND (exp, 0);
7739 arg1 = TREE_OPERAND (exp, 1);
7740 type = TREE_TYPE (arg0);
7741 operand_mode = TYPE_MODE (type);
7742 unsignedp = TREE_UNSIGNED (type);
7744 /* We won't bother with BLKmode store-flag operations because it would mean
7745 passing a lot of information to emit_store_flag. */
7746 if (operand_mode == BLKmode)
7752 /* Get the rtx comparison code to use. We know that EXP is a comparison
7753 operation of some type. Some comparisons against 1 and -1 can be
7754 converted to comparisons with zero. Do so here so that the tests
7755 below will be aware that we have a comparison with zero. These
7756 tests will not catch constants in the first operand, but constants
7757 are rarely passed as the first operand. */
7759 switch (TREE_CODE (exp))
7768 if (integer_onep (arg1))
7769 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
7771 code = unsignedp ? LTU : LT;
7774 if (! unsignedp && integer_all_onesp (arg1))
7775 arg1 = integer_zero_node, code = LT;
7777 code = unsignedp ? LEU : LE;
7780 if (! unsignedp && integer_all_onesp (arg1))
7781 arg1 = integer_zero_node, code = GE;
7783 code = unsignedp ? GTU : GT;
7786 if (integer_onep (arg1))
7787 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
7789 code = unsignedp ? GEU : GE;
7795 /* Put a constant second. */
7796 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
7798 tem = arg0; arg0 = arg1; arg1 = tem;
7799 code = swap_condition (code);
7802 /* If this is an equality or inequality test of a single bit, we can
7803 do this by shifting the bit being tested to the low-order bit and
7804 masking the result with the constant 1. If the condition was EQ,
7805 we xor it with 1. This does not require an scc insn and is faster
7806 than an scc insn even if we have it. */
7808 if ((code == NE || code == EQ)
7809 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
7810 && integer_pow2p (TREE_OPERAND (arg0, 1))
7811 && TYPE_PRECISION (type) <= HOST_BITS_PER_WIDE_INT)
7813 int bitnum = exact_log2 (INTVAL (expand_expr (TREE_OPERAND (arg0, 1),
7814 NULL_RTX, VOIDmode, 0)));
7816 if (subtarget == 0 || GET_CODE (subtarget) != REG
7817 || GET_MODE (subtarget) != operand_mode
7818 || ! safe_from_p (subtarget, TREE_OPERAND (arg0, 0)))
7821 op0 = expand_expr (TREE_OPERAND (arg0, 0), subtarget, VOIDmode, 0);
7824 op0 = expand_shift (RSHIFT_EXPR, GET_MODE (op0), op0,
7825 size_int (bitnum), target, 1);
7827 if (GET_MODE (op0) != mode)
7828 op0 = convert_to_mode (mode, op0, 1);
7830 if (bitnum != TYPE_PRECISION (type) - 1)
7831 op0 = expand_and (op0, const1_rtx, target);
7833 if ((code == EQ && ! invert) || (code == NE && invert))
7834 op0 = expand_binop (mode, xor_optab, op0, const1_rtx, target, 0,
7840 /* Now see if we are likely to be able to do this. Return if not. */
7841 if (! can_compare_p (operand_mode))
7843 icode = setcc_gen_code[(int) code];
7844 if (icode == CODE_FOR_nothing
7845 || (only_cheap && insn_operand_mode[(int) icode][0] != mode))
7847 /* We can only do this if it is one of the special cases that
7848 can be handled without an scc insn. */
7849 if ((code == LT && integer_zerop (arg1))
7850 || (! only_cheap && code == GE && integer_zerop (arg1)))
7852 else if (BRANCH_COST >= 0
7853 && ! only_cheap && (code == NE || code == EQ)
7854 && TREE_CODE (type) != REAL_TYPE
7855 && ((abs_optab->handlers[(int) operand_mode].insn_code
7856 != CODE_FOR_nothing)
7857 || (ffs_optab->handlers[(int) operand_mode].insn_code
7858 != CODE_FOR_nothing)))
7864 preexpand_calls (exp);
7865 if (subtarget == 0 || GET_CODE (subtarget) != REG
7866 || GET_MODE (subtarget) != operand_mode
7867 || ! safe_from_p (subtarget, arg1))
7870 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
7871 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
7874 target = gen_reg_rtx (mode);
7876 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
7877 because, if the emit_store_flag does anything it will succeed and
7878 OP0 and OP1 will not be used subsequently. */
7880 result = emit_store_flag (target, code,
7881 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
7882 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
7883 operand_mode, unsignedp, 1);
7888 result = expand_binop (mode, xor_optab, result, const1_rtx,
7889 result, 0, OPTAB_LIB_WIDEN);
7893 /* If this failed, we have to do this with set/compare/jump/set code. */
7894 if (target == 0 || GET_CODE (target) != REG
7895 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
7896 target = gen_reg_rtx (GET_MODE (target));
7898 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
7899 result = compare_from_rtx (op0, op1, code, unsignedp,
7900 operand_mode, NULL_RTX, 0);
7901 if (GET_CODE (result) == CONST_INT)
7902 return (((result == const0_rtx && ! invert)
7903 || (result != const0_rtx && invert))
7904 ? const0_rtx : const1_rtx);
7906 label = gen_label_rtx ();
7907 if (bcc_gen_fctn[(int) code] == 0)
7910 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
7911 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
7917 /* Generate a tablejump instruction (used for switch statements). */
7919 #ifdef HAVE_tablejump
7921 /* INDEX is the value being switched on, with the lowest value
7922 in the table already subtracted.
7923 MODE is its expected mode (needed if INDEX is constant).
7924 RANGE is the length of the jump table.
7925 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
7927 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
7928 index value is out of range. */
7931 do_tablejump (index, mode, range, table_label, default_label)
7932 rtx index, range, table_label, default_label;
7933 enum machine_mode mode;
7935 register rtx temp, vector;
7937 /* Do an unsigned comparison (in the proper mode) between the index
7938 expression and the value which represents the length of the range.
7939 Since we just finished subtracting the lower bound of the range
7940 from the index expression, this comparison allows us to simultaneously
7941 check that the original index expression value is both greater than
7942 or equal to the minimum value of the range and less than or equal to
7943 the maximum value of the range. */
7945 emit_cmp_insn (range, index, LTU, NULL_RTX, mode, 1, 0);
7946 emit_jump_insn (gen_bltu (default_label));
7948 /* If index is in range, it must fit in Pmode.
7949 Convert to Pmode so we can index with it. */
7951 index = convert_to_mode (Pmode, index, 1);
7953 /* If flag_force_addr were to affect this address
7954 it could interfere with the tricky assumptions made
7955 about addresses that contain label-refs,
7956 which may be valid only very near the tablejump itself. */
7957 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
7958 GET_MODE_SIZE, because this indicates how large insns are. The other
7959 uses should all be Pmode, because they are addresses. This code
7960 could fail if addresses and insns are not the same size. */
7961 index = memory_address_noforce
7963 gen_rtx (PLUS, Pmode,
7964 gen_rtx (MULT, Pmode, index,
7965 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
7966 gen_rtx (LABEL_REF, Pmode, table_label)));
7967 temp = gen_reg_rtx (CASE_VECTOR_MODE);
7968 vector = gen_rtx (MEM, CASE_VECTOR_MODE, index);
7969 RTX_UNCHANGING_P (vector) = 1;
7970 convert_move (temp, vector, 0);
7972 emit_jump_insn (gen_tablejump (temp, table_label));
7974 #ifndef CASE_VECTOR_PC_RELATIVE
7975 /* If we are generating PIC code or if the table is PC-relative, the
7976 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
7982 #endif /* HAVE_tablejump */