1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA. */
26 #include "insn-flags.h"
27 #include "insn-codes.h"
29 #include "insn-config.h"
32 #include "typeclass.h"
34 #define CEIL(x,y) (((x) + (y) - 1) / (y))
36 /* Decide whether a function's arguments should be processed
37 from first to last or from last to first.
39 They should if the stack and args grow in opposite directions, but
40 only if we have push insns. */
44 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNARD)
45 #define PUSH_ARGS_REVERSED /* If it's last to first */
50 #ifndef STACK_PUSH_CODE
51 #ifdef STACK_GROWS_DOWNWARD
52 #define STACK_PUSH_CODE PRE_DEC
54 #define STACK_PUSH_CODE PRE_INC
58 /* Like STACK_BOUNDARY but in units of bytes, not bits. */
59 #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
61 /* If this is nonzero, we do not bother generating VOLATILE
62 around volatile memory references, and we are willing to
63 output indirect addresses. If cse is to follow, we reject
64 indirect addresses so a useful potential cse is generated;
65 if it is used only once, instruction combination will produce
66 the same indirect address eventually. */
69 /* Nonzero to generate code for all the subroutines within an
70 expression before generating the upper levels of the expression.
71 Nowadays this is never zero. */
72 int do_preexpand_calls = 1;
74 /* Number of units that we should eventually pop off the stack.
75 These are the arguments to function calls that have already returned. */
76 int pending_stack_adjust;
78 /* Nonzero means stack pops must not be deferred, and deferred stack
79 pops must not be output. It is nonzero inside a function call,
80 inside a conditional expression, inside a statement expression,
81 and in other cases as well. */
82 int inhibit_defer_pop;
84 /* A list of all cleanups which belong to the arguments of
85 function calls being expanded by expand_call. */
86 tree cleanups_this_call;
88 /* Nonzero means __builtin_saveregs has already been done in this function.
89 The value is the pseudoreg containing the value __builtin_saveregs
91 static rtx saveregs_value;
93 /* Similarly for __builtin_apply_args. */
94 static rtx apply_args_value;
96 /* This structure is used by move_by_pieces to describe the move to
108 int explicit_inc_from;
114 static rtx enqueue_insn PROTO((rtx, rtx));
115 static int queued_subexp_p PROTO((rtx));
116 static void init_queue PROTO((void));
117 static void move_by_pieces PROTO((rtx, rtx, int, int));
118 static int move_by_pieces_ninsns PROTO((unsigned int, int));
119 static void move_by_pieces_1 PROTO((rtx (*) (), enum machine_mode,
120 struct move_by_pieces *));
121 static void group_insns PROTO((rtx));
122 static void store_constructor PROTO((tree, rtx));
123 static rtx store_field PROTO((rtx, int, int, enum machine_mode, tree,
124 enum machine_mode, int, int, int));
125 static tree save_noncopied_parts PROTO((tree, tree));
126 static tree init_noncopied_parts PROTO((tree, tree));
127 static int safe_from_p PROTO((rtx, tree));
128 static int fixed_type_p PROTO((tree));
129 static int get_pointer_alignment PROTO((tree, unsigned));
130 static tree string_constant PROTO((tree, tree *));
131 static tree c_strlen PROTO((tree));
132 static rtx expand_builtin PROTO((tree, rtx, rtx, enum machine_mode, int));
133 static int apply_args_size PROTO((void));
134 static int apply_result_size PROTO((void));
135 static rtx result_vector PROTO((int, rtx));
136 static rtx expand_builtin_apply_args PROTO((void));
137 static rtx expand_builtin_apply PROTO((rtx, rtx, rtx));
138 static void expand_builtin_return PROTO((rtx));
139 static rtx expand_increment PROTO((tree, int));
140 static void preexpand_calls PROTO((tree));
141 static void do_jump_by_parts_greater PROTO((tree, int, rtx, rtx));
142 static void do_jump_by_parts_greater_rtx PROTO((enum machine_mode, int, rtx, rtx, rtx, rtx));
143 static void do_jump_by_parts_equality PROTO((tree, rtx, rtx));
144 static void do_jump_by_parts_equality_rtx PROTO((rtx, rtx, rtx));
145 static void do_jump_for_compare PROTO((rtx, rtx, rtx));
146 static rtx compare PROTO((tree, enum rtx_code, enum rtx_code));
147 static rtx do_store_flag PROTO((tree, rtx, enum machine_mode, int));
149 /* Record for each mode whether we can move a register directly to or
150 from an object of that mode in memory. If we can't, we won't try
151 to use that mode directly when accessing a field of that mode. */
153 static char direct_load[NUM_MACHINE_MODES];
154 static char direct_store[NUM_MACHINE_MODES];
156 /* MOVE_RATIO is the number of move instructions that is better than
160 #if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
163 /* A value of around 6 would minimize code size; infinity would minimize
165 #define MOVE_RATIO 15
169 /* This array records the insn_code of insns to perform block moves. */
170 enum insn_code movstr_optab[NUM_MACHINE_MODES];
172 /* SLOW_UNALIGNED_ACCESS is non-zero if unaligned accesses are very slow. */
174 #ifndef SLOW_UNALIGNED_ACCESS
175 #define SLOW_UNALIGNED_ACCESS 0
178 /* Register mappings for target machines without register windows. */
179 #ifndef INCOMING_REGNO
180 #define INCOMING_REGNO(OUT) (OUT)
182 #ifndef OUTGOING_REGNO
183 #define OUTGOING_REGNO(IN) (IN)
186 /* This is run once per compilation to set up which modes can be used
187 directly in memory and to initialize the block move optab. */
193 enum machine_mode mode;
194 /* Try indexing by frame ptr and try by stack ptr.
195 It is known that on the Convex the stack ptr isn't a valid index.
196 With luck, one or the other is valid on any machine. */
197 rtx mem = gen_rtx (MEM, VOIDmode, stack_pointer_rtx);
198 rtx mem1 = gen_rtx (MEM, VOIDmode, frame_pointer_rtx);
201 insn = emit_insn (gen_rtx (SET, 0, 0));
202 pat = PATTERN (insn);
204 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
205 mode = (enum machine_mode) ((int) mode + 1))
211 direct_load[(int) mode] = direct_store[(int) mode] = 0;
212 PUT_MODE (mem, mode);
213 PUT_MODE (mem1, mode);
215 /* See if there is some register that can be used in this mode and
216 directly loaded or stored from memory. */
218 if (mode != VOIDmode && mode != BLKmode)
219 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
220 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
223 if (! HARD_REGNO_MODE_OK (regno, mode))
226 reg = gen_rtx (REG, mode, regno);
229 SET_DEST (pat) = reg;
230 if (recog (pat, insn, &num_clobbers) >= 0)
231 direct_load[(int) mode] = 1;
233 SET_SRC (pat) = mem1;
234 SET_DEST (pat) = reg;
235 if (recog (pat, insn, &num_clobbers) >= 0)
236 direct_load[(int) mode] = 1;
239 SET_DEST (pat) = mem;
240 if (recog (pat, insn, &num_clobbers) >= 0)
241 direct_store[(int) mode] = 1;
244 SET_DEST (pat) = mem1;
245 if (recog (pat, insn, &num_clobbers) >= 0)
246 direct_store[(int) mode] = 1;
253 /* This is run at the start of compiling a function. */
260 pending_stack_adjust = 0;
261 inhibit_defer_pop = 0;
262 cleanups_this_call = 0;
264 apply_args_value = 0;
268 /* Save all variables describing the current status into the structure *P.
269 This is used before starting a nested function. */
275 /* Instead of saving the postincrement queue, empty it. */
278 p->pending_stack_adjust = pending_stack_adjust;
279 p->inhibit_defer_pop = inhibit_defer_pop;
280 p->cleanups_this_call = cleanups_this_call;
281 p->saveregs_value = saveregs_value;
282 p->apply_args_value = apply_args_value;
283 p->forced_labels = forced_labels;
285 pending_stack_adjust = 0;
286 inhibit_defer_pop = 0;
287 cleanups_this_call = 0;
289 apply_args_value = 0;
293 /* Restore all variables describing the current status from the structure *P.
294 This is used after a nested function. */
297 restore_expr_status (p)
300 pending_stack_adjust = p->pending_stack_adjust;
301 inhibit_defer_pop = p->inhibit_defer_pop;
302 cleanups_this_call = p->cleanups_this_call;
303 saveregs_value = p->saveregs_value;
304 apply_args_value = p->apply_args_value;
305 forced_labels = p->forced_labels;
308 /* Manage the queue of increment instructions to be output
309 for POSTINCREMENT_EXPR expressions, etc. */
311 static rtx pending_chain;
313 /* Queue up to increment (or change) VAR later. BODY says how:
314 BODY should be the same thing you would pass to emit_insn
315 to increment right away. It will go to emit_insn later on.
317 The value is a QUEUED expression to be used in place of VAR
318 where you want to guarantee the pre-incrementation value of VAR. */
321 enqueue_insn (var, body)
324 pending_chain = gen_rtx (QUEUED, GET_MODE (var),
325 var, NULL_RTX, NULL_RTX, body, pending_chain);
326 return pending_chain;
329 /* Use protect_from_queue to convert a QUEUED expression
330 into something that you can put immediately into an instruction.
331 If the queued incrementation has not happened yet,
332 protect_from_queue returns the variable itself.
333 If the incrementation has happened, protect_from_queue returns a temp
334 that contains a copy of the old value of the variable.
336 Any time an rtx which might possibly be a QUEUED is to be put
337 into an instruction, it must be passed through protect_from_queue first.
338 QUEUED expressions are not meaningful in instructions.
340 Do not pass a value through protect_from_queue and then hold
341 on to it for a while before putting it in an instruction!
342 If the queue is flushed in between, incorrect code will result. */
345 protect_from_queue (x, modify)
349 register RTX_CODE code = GET_CODE (x);
351 #if 0 /* A QUEUED can hang around after the queue is forced out. */
352 /* Shortcut for most common case. */
353 if (pending_chain == 0)
359 /* A special hack for read access to (MEM (QUEUED ...))
360 to facilitate use of autoincrement.
361 Make a copy of the contents of the memory location
362 rather than a copy of the address, but not
363 if the value is of mode BLKmode. */
364 if (code == MEM && GET_MODE (x) != BLKmode
365 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
367 register rtx y = XEXP (x, 0);
368 XEXP (x, 0) = QUEUED_VAR (y);
371 register rtx temp = gen_reg_rtx (GET_MODE (x));
372 emit_insn_before (gen_move_insn (temp, x),
378 /* Otherwise, recursively protect the subexpressions of all
379 the kinds of rtx's that can contain a QUEUED. */
381 XEXP (x, 0) = protect_from_queue (XEXP (x, 0), 0);
382 else if (code == PLUS || code == MULT)
384 XEXP (x, 0) = protect_from_queue (XEXP (x, 0), 0);
385 XEXP (x, 1) = protect_from_queue (XEXP (x, 1), 0);
389 /* If the increment has not happened, use the variable itself. */
390 if (QUEUED_INSN (x) == 0)
391 return QUEUED_VAR (x);
392 /* If the increment has happened and a pre-increment copy exists,
394 if (QUEUED_COPY (x) != 0)
395 return QUEUED_COPY (x);
396 /* The increment has happened but we haven't set up a pre-increment copy.
397 Set one up now, and use it. */
398 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
399 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
401 return QUEUED_COPY (x);
404 /* Return nonzero if X contains a QUEUED expression:
405 if it contains anything that will be altered by a queued increment.
406 We handle only combinations of MEM, PLUS, MINUS and MULT operators
407 since memory addresses generally contain only those. */
413 register enum rtx_code code = GET_CODE (x);
419 return queued_subexp_p (XEXP (x, 0));
423 return queued_subexp_p (XEXP (x, 0))
424 || queued_subexp_p (XEXP (x, 1));
429 /* Perform all the pending incrementations. */
435 while (p = pending_chain)
437 QUEUED_INSN (p) = emit_insn (QUEUED_BODY (p));
438 pending_chain = QUEUED_NEXT (p);
449 /* Copy data from FROM to TO, where the machine modes are not the same.
450 Both modes may be integer, or both may be floating.
451 UNSIGNEDP should be nonzero if FROM is an unsigned type.
452 This causes zero-extension instead of sign-extension. */
455 convert_move (to, from, unsignedp)
456 register rtx to, from;
459 enum machine_mode to_mode = GET_MODE (to);
460 enum machine_mode from_mode = GET_MODE (from);
461 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
462 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
466 /* rtx code for making an equivalent value. */
467 enum rtx_code equiv_code = (unsignedp ? ZERO_EXTEND : SIGN_EXTEND);
469 to = protect_from_queue (to, 1);
470 from = protect_from_queue (from, 0);
472 if (to_real != from_real)
475 /* If FROM is a SUBREG that indicates that we have already done at least
476 the required extension, strip it. We don't handle such SUBREGs as
479 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
480 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
481 >= GET_MODE_SIZE (to_mode))
482 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
483 from = gen_lowpart (to_mode, from), from_mode = to_mode;
485 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
488 if (to_mode == from_mode
489 || (from_mode == VOIDmode && CONSTANT_P (from)))
491 emit_move_insn (to, from);
497 #ifdef HAVE_extendqfhf2
498 if (HAVE_extendqfsf2 && from_mode == QFmode && to_mode == HFmode)
500 emit_unop_insn (CODE_FOR_extendqfsf2, to, from, UNKNOWN);
504 #ifdef HAVE_extendqfsf2
505 if (HAVE_extendqfsf2 && from_mode == QFmode && to_mode == SFmode)
507 emit_unop_insn (CODE_FOR_extendqfsf2, to, from, UNKNOWN);
511 #ifdef HAVE_extendqfdf2
512 if (HAVE_extendqfdf2 && from_mode == QFmode && to_mode == DFmode)
514 emit_unop_insn (CODE_FOR_extendqfdf2, to, from, UNKNOWN);
518 #ifdef HAVE_extendqfxf2
519 if (HAVE_extendqfxf2 && from_mode == QFmode && to_mode == XFmode)
521 emit_unop_insn (CODE_FOR_extendqfxf2, to, from, UNKNOWN);
525 #ifdef HAVE_extendqftf2
526 if (HAVE_extendqftf2 && from_mode == QFmode && to_mode == TFmode)
528 emit_unop_insn (CODE_FOR_extendqftf2, to, from, UNKNOWN);
533 #ifdef HAVE_extendhfsf2
534 if (HAVE_extendhfsf2 && from_mode == HFmode && to_mode == SFmode)
536 emit_unop_insn (CODE_FOR_extendhfsf2, to, from, UNKNOWN);
540 #ifdef HAVE_extendhfdf2
541 if (HAVE_extendhfdf2 && from_mode == HFmode && to_mode == DFmode)
543 emit_unop_insn (CODE_FOR_extendhfdf2, to, from, UNKNOWN);
547 #ifdef HAVE_extendhfxf2
548 if (HAVE_extendhfxf2 && from_mode == HFmode && to_mode == XFmode)
550 emit_unop_insn (CODE_FOR_extendhfxf2, to, from, UNKNOWN);
554 #ifdef HAVE_extendhftf2
555 if (HAVE_extendhftf2 && from_mode == HFmode && to_mode == TFmode)
557 emit_unop_insn (CODE_FOR_extendhftf2, to, from, UNKNOWN);
562 #ifdef HAVE_extendsfdf2
563 if (HAVE_extendsfdf2 && from_mode == SFmode && to_mode == DFmode)
565 emit_unop_insn (CODE_FOR_extendsfdf2, to, from, UNKNOWN);
569 #ifdef HAVE_extendsfxf2
570 if (HAVE_extendsfxf2 && from_mode == SFmode && to_mode == XFmode)
572 emit_unop_insn (CODE_FOR_extendsfxf2, to, from, UNKNOWN);
576 #ifdef HAVE_extendsftf2
577 if (HAVE_extendsftf2 && from_mode == SFmode && to_mode == TFmode)
579 emit_unop_insn (CODE_FOR_extendsftf2, to, from, UNKNOWN);
583 #ifdef HAVE_extenddfxf2
584 if (HAVE_extenddfxf2 && from_mode == DFmode && to_mode == XFmode)
586 emit_unop_insn (CODE_FOR_extenddfxf2, to, from, UNKNOWN);
590 #ifdef HAVE_extenddftf2
591 if (HAVE_extenddftf2 && from_mode == DFmode && to_mode == TFmode)
593 emit_unop_insn (CODE_FOR_extenddftf2, to, from, UNKNOWN);
598 #ifdef HAVE_trunchfqf2
599 if (HAVE_trunchfqf2 && from_mode == HFmode && to_mode == QFmode)
601 emit_unop_insn (CODE_FOR_trunchfqf2, to, from, UNKNOWN);
605 #ifdef HAVE_truncsfqf2
606 if (HAVE_truncsfqf2 && from_mode == SFmode && to_mode == QFmode)
608 emit_unop_insn (CODE_FOR_truncsfqf2, to, from, UNKNOWN);
612 #ifdef HAVE_truncdfqf2
613 if (HAVE_truncdfqf2 && from_mode == DFmode && to_mode == QFmode)
615 emit_unop_insn (CODE_FOR_truncdfqf2, to, from, UNKNOWN);
619 #ifdef HAVE_truncxfqf2
620 if (HAVE_truncxfqf2 && from_mode == XFmode && to_mode == QFmode)
622 emit_unop_insn (CODE_FOR_truncxfqf2, to, from, UNKNOWN);
626 #ifdef HAVE_trunctfqf2
627 if (HAVE_trunctfqf2 && from_mode == TFmode && to_mode == QFmode)
629 emit_unop_insn (CODE_FOR_trunctfqf2, to, from, UNKNOWN);
633 #ifdef HAVE_truncsfhf2
634 if (HAVE_truncsfhf2 && from_mode == SFmode && to_mode == HFmode)
636 emit_unop_insn (CODE_FOR_truncsfhf2, to, from, UNKNOWN);
640 #ifdef HAVE_truncdfhf2
641 if (HAVE_truncdfhf2 && from_mode == DFmode && to_mode == HFmode)
643 emit_unop_insn (CODE_FOR_truncdfhf2, to, from, UNKNOWN);
647 #ifdef HAVE_truncxfhf2
648 if (HAVE_truncxfhf2 && from_mode == XFmode && to_mode == HFmode)
650 emit_unop_insn (CODE_FOR_truncxfhf2, to, from, UNKNOWN);
654 #ifdef HAVE_trunctfhf2
655 if (HAVE_trunctfhf2 && from_mode == TFmode && to_mode == HFmode)
657 emit_unop_insn (CODE_FOR_trunctfhf2, to, from, UNKNOWN);
661 #ifdef HAVE_truncdfsf2
662 if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
664 emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
668 #ifdef HAVE_truncxfsf2
669 if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode)
671 emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN);
675 #ifdef HAVE_trunctfsf2
676 if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
678 emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
682 #ifdef HAVE_truncxfdf2
683 if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode)
685 emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN);
689 #ifdef HAVE_trunctfdf2
690 if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
692 emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
704 libcall = extendsfdf2_libfunc;
708 libcall = extendsfxf2_libfunc;
712 libcall = extendsftf2_libfunc;
721 libcall = truncdfsf2_libfunc;
725 libcall = extenddfxf2_libfunc;
729 libcall = extenddftf2_libfunc;
738 libcall = truncxfsf2_libfunc;
742 libcall = truncxfdf2_libfunc;
751 libcall = trunctfsf2_libfunc;
755 libcall = trunctfdf2_libfunc;
761 if (libcall == (rtx) 0)
762 /* This conversion is not implemented yet. */
765 emit_library_call (libcall, 1, to_mode, 1, from, from_mode);
766 emit_move_insn (to, hard_libcall_value (to_mode));
770 /* Now both modes are integers. */
772 /* Handle expanding beyond a word. */
773 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
774 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
781 enum machine_mode lowpart_mode;
782 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
784 /* Try converting directly if the insn is supported. */
785 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
788 /* If FROM is a SUBREG, put it into a register. Do this
789 so that we always generate the same set of insns for
790 better cse'ing; if an intermediate assignment occurred,
791 we won't be doing the operation directly on the SUBREG. */
792 if (optimize > 0 && GET_CODE (from) == SUBREG)
793 from = force_reg (from_mode, from);
794 emit_unop_insn (code, to, from, equiv_code);
797 /* Next, try converting via full word. */
798 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
799 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
800 != CODE_FOR_nothing))
802 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
803 emit_unop_insn (code, to,
804 gen_lowpart (word_mode, to), equiv_code);
808 /* No special multiword conversion insn; do it by hand. */
811 /* Get a copy of FROM widened to a word, if necessary. */
812 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
813 lowpart_mode = word_mode;
815 lowpart_mode = from_mode;
817 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
819 lowpart = gen_lowpart (lowpart_mode, to);
820 emit_move_insn (lowpart, lowfrom);
822 /* Compute the value to put in each remaining word. */
824 fill_value = const0_rtx;
829 && insn_operand_mode[(int) CODE_FOR_slt][0] == word_mode
830 && STORE_FLAG_VALUE == -1)
832 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
834 fill_value = gen_reg_rtx (word_mode);
835 emit_insn (gen_slt (fill_value));
841 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
842 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
844 fill_value = convert_to_mode (word_mode, fill_value, 1);
848 /* Fill the remaining words. */
849 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
851 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
852 rtx subword = operand_subword (to, index, 1, to_mode);
857 if (fill_value != subword)
858 emit_move_insn (subword, fill_value);
861 insns = get_insns ();
864 emit_no_conflict_block (insns, to, from, NULL_RTX,
865 gen_rtx (equiv_code, to_mode, copy_rtx (from)));
869 /* Truncating multi-word to a word or less. */
870 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
871 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
873 convert_move (to, gen_lowpart (word_mode, from), 0);
877 /* Handle pointer conversion */ /* SPEE 900220 */
878 if (to_mode == PSImode)
880 if (from_mode != SImode)
881 from = convert_to_mode (SImode, from, unsignedp);
883 #ifdef HAVE_truncsipsi
886 emit_unop_insn (CODE_FOR_truncsipsi, to, from, UNKNOWN);
889 #endif /* HAVE_truncsipsi */
893 if (from_mode == PSImode)
895 if (to_mode != SImode)
897 from = convert_to_mode (SImode, from, unsignedp);
902 #ifdef HAVE_extendpsisi
903 if (HAVE_extendpsisi)
905 emit_unop_insn (CODE_FOR_extendpsisi, to, from, UNKNOWN);
908 #endif /* HAVE_extendpsisi */
913 /* Now follow all the conversions between integers
914 no more than a word long. */
916 /* For truncation, usually we can just refer to FROM in a narrower mode. */
917 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
918 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
919 GET_MODE_BITSIZE (from_mode)))
921 if (!((GET_CODE (from) == MEM
922 && ! MEM_VOLATILE_P (from)
923 && direct_load[(int) to_mode]
924 && ! mode_dependent_address_p (XEXP (from, 0)))
925 || GET_CODE (from) == REG
926 || GET_CODE (from) == SUBREG))
927 from = force_reg (from_mode, from);
928 emit_move_insn (to, gen_lowpart (to_mode, from));
932 /* Handle extension. */
933 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
935 /* Convert directly if that works. */
936 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
939 /* If FROM is a SUBREG, put it into a register. Do this
940 so that we always generate the same set of insns for
941 better cse'ing; if an intermediate assignment occurred,
942 we won't be doing the operation directly on the SUBREG. */
943 if (optimize > 0 && GET_CODE (from) == SUBREG)
944 from = force_reg (from_mode, from);
945 emit_unop_insn (code, to, from, equiv_code);
950 enum machine_mode intermediate;
952 /* Search for a mode to convert via. */
953 for (intermediate = from_mode; intermediate != VOIDmode;
954 intermediate = GET_MODE_WIDER_MODE (intermediate))
955 if ((can_extend_p (to_mode, intermediate, unsignedp)
957 && (can_extend_p (intermediate, from_mode, unsignedp)
958 != CODE_FOR_nothing))
960 convert_move (to, convert_to_mode (intermediate, from,
961 unsignedp), unsignedp);
965 /* No suitable intermediate mode. */
970 /* Support special truncate insns for certain modes. */
972 if (from_mode == DImode && to_mode == SImode)
974 #ifdef HAVE_truncdisi2
977 emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
981 convert_move (to, force_reg (from_mode, from), unsignedp);
985 if (from_mode == DImode && to_mode == HImode)
987 #ifdef HAVE_truncdihi2
990 emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
994 convert_move (to, force_reg (from_mode, from), unsignedp);
998 if (from_mode == DImode && to_mode == QImode)
1000 #ifdef HAVE_truncdiqi2
1001 if (HAVE_truncdiqi2)
1003 emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
1007 convert_move (to, force_reg (from_mode, from), unsignedp);
1011 if (from_mode == SImode && to_mode == HImode)
1013 #ifdef HAVE_truncsihi2
1014 if (HAVE_truncsihi2)
1016 emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
1020 convert_move (to, force_reg (from_mode, from), unsignedp);
1024 if (from_mode == SImode && to_mode == QImode)
1026 #ifdef HAVE_truncsiqi2
1027 if (HAVE_truncsiqi2)
1029 emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
1033 convert_move (to, force_reg (from_mode, from), unsignedp);
1037 if (from_mode == HImode && to_mode == QImode)
1039 #ifdef HAVE_trunchiqi2
1040 if (HAVE_trunchiqi2)
1042 emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
1046 convert_move (to, force_reg (from_mode, from), unsignedp);
1050 /* Handle truncation of volatile memrefs, and so on;
1051 the things that couldn't be truncated directly,
1052 and for which there was no special instruction. */
1053 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
1055 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
1056 emit_move_insn (to, temp);
1060 /* Mode combination is not recognized. */
1064 /* Return an rtx for a value that would result
1065 from converting X to mode MODE.
1066 Both X and MODE may be floating, or both integer.
1067 UNSIGNEDP is nonzero if X is an unsigned value.
1068 This can be done by referring to a part of X in place
1069 or by copying to a new temporary with conversion.
1071 This function *must not* call protect_from_queue
1072 except when putting X into an insn (in which case convert_move does it). */
1075 convert_to_mode (mode, x, unsignedp)
1076 enum machine_mode mode;
1082 /* If FROM is a SUBREG that indicates that we have already done at least
1083 the required extension, strip it. */
1085 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
1086 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
1087 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
1088 x = gen_lowpart (mode, x);
1090 if (mode == GET_MODE (x))
1093 /* There is one case that we must handle specially: If we are converting
1094 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
1095 we are to interpret the constant as unsigned, gen_lowpart will do
1096 the wrong if the constant appears negative. What we want to do is
1097 make the high-order word of the constant zero, not all ones. */
1099 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
1100 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
1101 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
1102 return immed_double_const (INTVAL (x), (HOST_WIDE_INT) 0, mode);
1104 /* We can do this with a gen_lowpart if both desired and current modes
1105 are integer, and this is either a constant integer, a register, or a
1106 non-volatile MEM. Except for the constant case, we must be narrowing
1109 if (GET_CODE (x) == CONST_INT
1110 || (GET_MODE_CLASS (mode) == MODE_INT
1111 && GET_MODE_CLASS (GET_MODE (x)) == MODE_INT
1112 && (GET_CODE (x) == CONST_DOUBLE
1113 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (GET_MODE (x))
1114 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x))
1115 && direct_load[(int) mode]
1116 || GET_CODE (x) == REG)))))
1117 return gen_lowpart (mode, x);
1119 temp = gen_reg_rtx (mode);
1120 convert_move (temp, x, unsignedp);
1124 /* Generate several move instructions to copy LEN bytes
1125 from block FROM to block TO. (These are MEM rtx's with BLKmode).
1126 The caller must pass FROM and TO
1127 through protect_from_queue before calling.
1128 ALIGN (in bytes) is maximum alignment we can assume. */
1131 move_by_pieces (to, from, len, align)
1135 struct move_by_pieces data;
1136 rtx to_addr = XEXP (to, 0), from_addr = XEXP (from, 0);
1137 int max_size = MOVE_MAX + 1;
1140 data.to_addr = to_addr;
1141 data.from_addr = from_addr;
1145 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1146 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1148 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1149 || GET_CODE (from_addr) == POST_INC
1150 || GET_CODE (from_addr) == POST_DEC);
1152 data.explicit_inc_from = 0;
1153 data.explicit_inc_to = 0;
1155 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1156 if (data.reverse) data.offset = len;
1159 /* If copying requires more than two move insns,
1160 copy addresses to registers (to make displacements shorter)
1161 and use post-increment if available. */
1162 if (!(data.autinc_from && data.autinc_to)
1163 && move_by_pieces_ninsns (len, align) > 2)
1165 #ifdef HAVE_PRE_DECREMENT
1166 if (data.reverse && ! data.autinc_from)
1168 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1169 data.autinc_from = 1;
1170 data.explicit_inc_from = -1;
1173 #ifdef HAVE_POST_INCREMENT
1174 if (! data.autinc_from)
1176 data.from_addr = copy_addr_to_reg (from_addr);
1177 data.autinc_from = 1;
1178 data.explicit_inc_from = 1;
1181 if (!data.autinc_from && CONSTANT_P (from_addr))
1182 data.from_addr = copy_addr_to_reg (from_addr);
1183 #ifdef HAVE_PRE_DECREMENT
1184 if (data.reverse && ! data.autinc_to)
1186 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1188 data.explicit_inc_to = -1;
1191 #ifdef HAVE_POST_INCREMENT
1192 if (! data.reverse && ! data.autinc_to)
1194 data.to_addr = copy_addr_to_reg (to_addr);
1196 data.explicit_inc_to = 1;
1199 if (!data.autinc_to && CONSTANT_P (to_addr))
1200 data.to_addr = copy_addr_to_reg (to_addr);
1203 if (! (STRICT_ALIGNMENT || SLOW_UNALIGNED_ACCESS)
1204 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
1207 /* First move what we can in the largest integer mode, then go to
1208 successively smaller modes. */
1210 while (max_size > 1)
1212 enum machine_mode mode = VOIDmode, tmode;
1213 enum insn_code icode;
1215 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1216 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1217 if (GET_MODE_SIZE (tmode) < max_size)
1220 if (mode == VOIDmode)
1223 icode = mov_optab->handlers[(int) mode].insn_code;
1224 if (icode != CODE_FOR_nothing
1225 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1226 GET_MODE_SIZE (mode)))
1227 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1229 max_size = GET_MODE_SIZE (mode);
1232 /* The code above should have handled everything. */
1237 /* Return number of insns required to move L bytes by pieces.
1238 ALIGN (in bytes) is maximum alignment we can assume. */
1241 move_by_pieces_ninsns (l, align)
1245 register int n_insns = 0;
1246 int max_size = MOVE_MAX + 1;
1248 if (! (STRICT_ALIGNMENT || SLOW_UNALIGNED_ACCESS)
1249 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
1252 while (max_size > 1)
1254 enum machine_mode mode = VOIDmode, tmode;
1255 enum insn_code icode;
1257 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1258 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1259 if (GET_MODE_SIZE (tmode) < max_size)
1262 if (mode == VOIDmode)
1265 icode = mov_optab->handlers[(int) mode].insn_code;
1266 if (icode != CODE_FOR_nothing
1267 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1268 GET_MODE_SIZE (mode)))
1269 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1271 max_size = GET_MODE_SIZE (mode);
1277 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1278 with move instructions for mode MODE. GENFUN is the gen_... function
1279 to make a move insn for that mode. DATA has all the other info. */
1282 move_by_pieces_1 (genfun, mode, data)
1284 enum machine_mode mode;
1285 struct move_by_pieces *data;
1287 register int size = GET_MODE_SIZE (mode);
1288 register rtx to1, from1;
1290 while (data->len >= size)
1292 if (data->reverse) data->offset -= size;
1294 to1 = (data->autinc_to
1295 ? gen_rtx (MEM, mode, data->to_addr)
1296 : change_address (data->to, mode,
1297 plus_constant (data->to_addr, data->offset)));
1300 ? gen_rtx (MEM, mode, data->from_addr)
1301 : change_address (data->from, mode,
1302 plus_constant (data->from_addr, data->offset)));
1304 #ifdef HAVE_PRE_DECREMENT
1305 if (data->explicit_inc_to < 0)
1306 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
1307 if (data->explicit_inc_from < 0)
1308 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (-size)));
1311 emit_insn ((*genfun) (to1, from1));
1312 #ifdef HAVE_POST_INCREMENT
1313 if (data->explicit_inc_to > 0)
1314 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1315 if (data->explicit_inc_from > 0)
1316 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1319 if (! data->reverse) data->offset += size;
1325 /* Emit code to move a block Y to a block X.
1326 This may be done with string-move instructions,
1327 with multiple scalar move instructions, or with a library call.
1329 Both X and Y must be MEM rtx's (perhaps inside VOLATILE)
1331 SIZE is an rtx that says how long they are.
1332 ALIGN is the maximum alignment we can assume they have,
1333 measured in bytes. */
1336 emit_block_move (x, y, size, align)
1341 if (GET_MODE (x) != BLKmode)
1344 if (GET_MODE (y) != BLKmode)
1347 x = protect_from_queue (x, 1);
1348 y = protect_from_queue (y, 0);
1349 size = protect_from_queue (size, 0);
1351 if (GET_CODE (x) != MEM)
1353 if (GET_CODE (y) != MEM)
1358 if (GET_CODE (size) == CONST_INT
1359 && (move_by_pieces_ninsns (INTVAL (size), align) < MOVE_RATIO))
1360 move_by_pieces (x, y, INTVAL (size), align);
1363 /* Try the most limited insn first, because there's no point
1364 including more than one in the machine description unless
1365 the more limited one has some advantage. */
1367 rtx opalign = GEN_INT (align);
1368 enum machine_mode mode;
1370 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1371 mode = GET_MODE_WIDER_MODE (mode))
1373 enum insn_code code = movstr_optab[(int) mode];
1375 if (code != CODE_FOR_nothing
1376 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1377 here because if SIZE is less than the mode mask, as it is
1378 returned by the macro, it will definitely be less than the
1379 actual mode mask. */
1380 && (unsigned HOST_WIDE_INT) INTVAL (size) <= GET_MODE_MASK (mode)
1381 && (insn_operand_predicate[(int) code][0] == 0
1382 || (*insn_operand_predicate[(int) code][0]) (x, BLKmode))
1383 && (insn_operand_predicate[(int) code][1] == 0
1384 || (*insn_operand_predicate[(int) code][1]) (y, BLKmode))
1385 && (insn_operand_predicate[(int) code][3] == 0
1386 || (*insn_operand_predicate[(int) code][3]) (opalign,
1390 rtx last = get_last_insn ();
1393 op2 = convert_to_mode (mode, size, 1);
1394 if (insn_operand_predicate[(int) code][2] != 0
1395 && ! (*insn_operand_predicate[(int) code][2]) (op2, mode))
1396 op2 = copy_to_mode_reg (mode, op2);
1398 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1405 delete_insns_since (last);
1409 #ifdef TARGET_MEM_FUNCTIONS
1410 emit_library_call (memcpy_libfunc, 0,
1411 VOIDmode, 3, XEXP (x, 0), Pmode,
1413 convert_to_mode (TYPE_MODE (sizetype), size,
1414 TREE_UNSIGNED (sizetype)),
1415 TYPE_MODE (sizetype));
1417 emit_library_call (bcopy_libfunc, 0,
1418 VOIDmode, 3, XEXP (y, 0), Pmode,
1420 convert_to_mode (TYPE_MODE (sizetype), size,
1421 TREE_UNSIGNED (sizetype)),
1422 TYPE_MODE (sizetype));
1427 /* Copy all or part of a value X into registers starting at REGNO.
1428 The number of registers to be filled is NREGS. */
1431 move_block_to_reg (regno, x, nregs, mode)
1435 enum machine_mode mode;
1440 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1441 x = validize_mem (force_const_mem (mode, x));
1443 /* See if the machine can do this with a load multiple insn. */
1444 #ifdef HAVE_load_multiple
1445 last = get_last_insn ();
1446 pat = gen_load_multiple (gen_rtx (REG, word_mode, regno), x,
1454 delete_insns_since (last);
1457 for (i = 0; i < nregs; i++)
1458 emit_move_insn (gen_rtx (REG, word_mode, regno + i),
1459 operand_subword_force (x, i, mode));
1462 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1463 The number of registers to be filled is NREGS. */
1466 move_block_from_reg (regno, x, nregs)
1474 /* See if the machine can do this with a store multiple insn. */
1475 #ifdef HAVE_store_multiple
1476 last = get_last_insn ();
1477 pat = gen_store_multiple (x, gen_rtx (REG, word_mode, regno),
1485 delete_insns_since (last);
1488 for (i = 0; i < nregs; i++)
1490 rtx tem = operand_subword (x, i, 1, BLKmode);
1495 emit_move_insn (tem, gen_rtx (REG, word_mode, regno + i));
1499 /* Mark NREGS consecutive regs, starting at REGNO, as being live now. */
1502 use_regs (regno, nregs)
1508 for (i = 0; i < nregs; i++)
1509 emit_insn (gen_rtx (USE, VOIDmode, gen_rtx (REG, word_mode, regno + i)));
1512 /* Mark the instructions since PREV as a libcall block.
1513 Add REG_LIBCALL to PREV and add a REG_RETVAL to the most recent insn. */
1522 /* Find the instructions to mark */
1524 insn_first = NEXT_INSN (prev);
1526 insn_first = get_insns ();
1528 insn_last = get_last_insn ();
1530 REG_NOTES (insn_last) = gen_rtx (INSN_LIST, REG_RETVAL, insn_first,
1531 REG_NOTES (insn_last));
1533 REG_NOTES (insn_first) = gen_rtx (INSN_LIST, REG_LIBCALL, insn_last,
1534 REG_NOTES (insn_first));
1537 /* Write zeros through the storage of OBJECT.
1538 If OBJECT has BLKmode, SIZE is its length in bytes. */
1541 clear_storage (object, size)
1545 if (GET_MODE (object) == BLKmode)
1547 #ifdef TARGET_MEM_FUNCTIONS
1548 emit_library_call (memset_libfunc, 0,
1550 XEXP (object, 0), Pmode, const0_rtx, Pmode,
1551 GEN_INT (size), Pmode);
1553 emit_library_call (bzero_libfunc, 0,
1555 XEXP (object, 0), Pmode,
1556 GEN_INT (size), Pmode);
1560 emit_move_insn (object, const0_rtx);
1563 /* Generate code to copy Y into X.
1564 Both Y and X must have the same mode, except that
1565 Y can be a constant with VOIDmode.
1566 This mode cannot be BLKmode; use emit_block_move for that.
1568 Return the last instruction emitted. */
1571 emit_move_insn (x, y)
1574 enum machine_mode mode = GET_MODE (x);
1575 enum machine_mode submode;
1576 enum mode_class class = GET_MODE_CLASS (mode);
1579 x = protect_from_queue (x, 1);
1580 y = protect_from_queue (y, 0);
1582 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
1585 if (CONSTANT_P (y) && ! LEGITIMATE_CONSTANT_P (y))
1586 y = force_const_mem (mode, y);
1588 /* If X or Y are memory references, verify that their addresses are valid
1590 if (GET_CODE (x) == MEM
1591 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
1592 && ! push_operand (x, GET_MODE (x)))
1594 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
1595 x = change_address (x, VOIDmode, XEXP (x, 0));
1597 if (GET_CODE (y) == MEM
1598 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
1600 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
1601 y = change_address (y, VOIDmode, XEXP (y, 0));
1603 if (mode == BLKmode)
1606 return emit_move_insn_1 (x, y);
1609 /* Low level part of emit_move_insn.
1610 Called just like emit_move_insn, but assumes X and Y
1611 are basically valid. */
1614 emit_move_insn_1 (x, y)
1617 enum machine_mode mode = GET_MODE (x);
1618 enum machine_mode submode;
1619 enum mode_class class = GET_MODE_CLASS (mode);
1622 if (class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
1623 submode = mode_for_size (GET_MODE_UNIT_SIZE (mode) * BITS_PER_UNIT,
1624 (class == MODE_COMPLEX_INT
1625 ? MODE_INT : MODE_FLOAT),
1628 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
1630 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
1632 /* Expand complex moves by moving real part and imag part, if possible. */
1633 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
1634 && submode != BLKmode
1635 && (mov_optab->handlers[(int) submode].insn_code
1636 != CODE_FOR_nothing))
1638 /* Don't split destination if it is a stack push. */
1639 int stack = push_operand (x, GET_MODE (x));
1640 rtx prev = get_last_insn ();
1642 /* Tell flow that the whole of the destination is being set. */
1643 if (GET_CODE (x) == REG)
1644 emit_insn (gen_rtx (CLOBBER, VOIDmode, x));
1646 /* If this is a stack, push the highpart first, so it
1647 will be in the argument order.
1649 In that case, change_address is used only to convert
1650 the mode, not to change the address. */
1651 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
1652 ((stack ? change_address (x, submode, (rtx) 0)
1653 : gen_highpart (submode, x)),
1654 gen_highpart (submode, y)));
1655 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
1656 ((stack ? change_address (x, submode, (rtx) 0)
1657 : gen_lowpart (submode, x)),
1658 gen_lowpart (submode, y)));
1662 return get_last_insn ();
1665 /* This will handle any multi-word mode that lacks a move_insn pattern.
1666 However, you will get better code if you define such patterns,
1667 even if they must turn into multiple assembler instructions. */
1668 else if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
1671 rtx prev_insn = get_last_insn ();
1674 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
1677 rtx xpart = operand_subword (x, i, 1, mode);
1678 rtx ypart = operand_subword (y, i, 1, mode);
1680 /* If we can't get a part of Y, put Y into memory if it is a
1681 constant. Otherwise, force it into a register. If we still
1682 can't get a part of Y, abort. */
1683 if (ypart == 0 && CONSTANT_P (y))
1685 y = force_const_mem (mode, y);
1686 ypart = operand_subword (y, i, 1, mode);
1688 else if (ypart == 0)
1689 ypart = operand_subword_force (y, i, mode);
1691 if (xpart == 0 || ypart == 0)
1694 last_insn = emit_move_insn (xpart, ypart);
1696 /* Mark these insns as a libcall block. */
1697 group_insns (prev_insn);
1705 /* Pushing data onto the stack. */
1707 /* Push a block of length SIZE (perhaps variable)
1708 and return an rtx to address the beginning of the block.
1709 Note that it is not possible for the value returned to be a QUEUED.
1710 The value may be virtual_outgoing_args_rtx.
1712 EXTRA is the number of bytes of padding to push in addition to SIZE.
1713 BELOW nonzero means this padding comes at low addresses;
1714 otherwise, the padding comes at high addresses. */
1717 push_block (size, extra, below)
1722 if (CONSTANT_P (size))
1723 anti_adjust_stack (plus_constant (size, extra));
1724 else if (GET_CODE (size) == REG && extra == 0)
1725 anti_adjust_stack (size);
1728 rtx temp = copy_to_mode_reg (Pmode, size);
1730 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
1731 temp, 0, OPTAB_LIB_WIDEN);
1732 anti_adjust_stack (temp);
1735 #ifdef STACK_GROWS_DOWNWARD
1736 temp = virtual_outgoing_args_rtx;
1737 if (extra != 0 && below)
1738 temp = plus_constant (temp, extra);
1740 if (GET_CODE (size) == CONST_INT)
1741 temp = plus_constant (virtual_outgoing_args_rtx,
1742 - INTVAL (size) - (below ? 0 : extra));
1743 else if (extra != 0 && !below)
1744 temp = gen_rtx (PLUS, Pmode, virtual_outgoing_args_rtx,
1745 negate_rtx (Pmode, plus_constant (size, extra)));
1747 temp = gen_rtx (PLUS, Pmode, virtual_outgoing_args_rtx,
1748 negate_rtx (Pmode, size));
1751 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
1757 return gen_rtx (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
1760 /* Generate code to push X onto the stack, assuming it has mode MODE and
1762 MODE is redundant except when X is a CONST_INT (since they don't
1764 SIZE is an rtx for the size of data to be copied (in bytes),
1765 needed only if X is BLKmode.
1767 ALIGN (in bytes) is maximum alignment we can assume.
1769 If PARTIAL and REG are both nonzero, then copy that many of the first
1770 words of X into registers starting with REG, and push the rest of X.
1771 The amount of space pushed is decreased by PARTIAL words,
1772 rounded *down* to a multiple of PARM_BOUNDARY.
1773 REG must be a hard register in this case.
1774 If REG is zero but PARTIAL is not, take any all others actions for an
1775 argument partially in registers, but do not actually load any
1778 EXTRA is the amount in bytes of extra space to leave next to this arg.
1779 This is ignored if an argument block has already been allocated.
1781 On a machine that lacks real push insns, ARGS_ADDR is the address of
1782 the bottom of the argument block for this call. We use indexing off there
1783 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
1784 argument block has not been preallocated.
1786 ARGS_SO_FAR is the size of args previously pushed for this call. */
1789 emit_push_insn (x, mode, type, size, align, partial, reg, extra,
1790 args_addr, args_so_far)
1792 enum machine_mode mode;
1803 enum direction stack_direction
1804 #ifdef STACK_GROWS_DOWNWARD
1810 /* Decide where to pad the argument: `downward' for below,
1811 `upward' for above, or `none' for don't pad it.
1812 Default is below for small data on big-endian machines; else above. */
1813 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
1815 /* Invert direction if stack is post-update. */
1816 if (STACK_PUSH_CODE == POST_INC || STACK_PUSH_CODE == POST_DEC)
1817 if (where_pad != none)
1818 where_pad = (where_pad == downward ? upward : downward);
1820 xinner = x = protect_from_queue (x, 0);
1822 if (mode == BLKmode)
1824 /* Copy a block into the stack, entirely or partially. */
1827 int used = partial * UNITS_PER_WORD;
1828 int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
1836 /* USED is now the # of bytes we need not copy to the stack
1837 because registers will take care of them. */
1840 xinner = change_address (xinner, BLKmode,
1841 plus_constant (XEXP (xinner, 0), used));
1843 /* If the partial register-part of the arg counts in its stack size,
1844 skip the part of stack space corresponding to the registers.
1845 Otherwise, start copying to the beginning of the stack space,
1846 by setting SKIP to 0. */
1847 #ifndef REG_PARM_STACK_SPACE
1853 #ifdef PUSH_ROUNDING
1854 /* Do it with several push insns if that doesn't take lots of insns
1855 and if there is no difficulty with push insns that skip bytes
1856 on the stack for alignment purposes. */
1858 && GET_CODE (size) == CONST_INT
1860 && (move_by_pieces_ninsns ((unsigned) INTVAL (size) - used, align)
1862 /* Here we avoid the case of a structure whose weak alignment
1863 forces many pushes of a small amount of data,
1864 and such small pushes do rounding that causes trouble. */
1865 && ((! STRICT_ALIGNMENT && ! SLOW_UNALIGNED_ACCESS)
1866 || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT
1867 || PUSH_ROUNDING (align) == align)
1868 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
1870 /* Push padding now if padding above and stack grows down,
1871 or if padding below and stack grows up.
1872 But if space already allocated, this has already been done. */
1873 if (extra && args_addr == 0
1874 && where_pad != none && where_pad != stack_direction)
1875 anti_adjust_stack (GEN_INT (extra));
1877 move_by_pieces (gen_rtx (MEM, BLKmode, gen_push_operand ()), xinner,
1878 INTVAL (size) - used, align);
1881 #endif /* PUSH_ROUNDING */
1883 /* Otherwise make space on the stack and copy the data
1884 to the address of that space. */
1886 /* Deduct words put into registers from the size we must copy. */
1889 if (GET_CODE (size) == CONST_INT)
1890 size = GEN_INT (INTVAL (size) - used);
1892 size = expand_binop (GET_MODE (size), sub_optab, size,
1893 GEN_INT (used), NULL_RTX, 0,
1897 /* Get the address of the stack space.
1898 In this case, we do not deal with EXTRA separately.
1899 A single stack adjust will do. */
1902 temp = push_block (size, extra, where_pad == downward);
1905 else if (GET_CODE (args_so_far) == CONST_INT)
1906 temp = memory_address (BLKmode,
1907 plus_constant (args_addr,
1908 skip + INTVAL (args_so_far)));
1910 temp = memory_address (BLKmode,
1911 plus_constant (gen_rtx (PLUS, Pmode,
1912 args_addr, args_so_far),
1915 /* TEMP is the address of the block. Copy the data there. */
1916 if (GET_CODE (size) == CONST_INT
1917 && (move_by_pieces_ninsns ((unsigned) INTVAL (size), align)
1920 move_by_pieces (gen_rtx (MEM, BLKmode, temp), xinner,
1921 INTVAL (size), align);
1924 /* Try the most limited insn first, because there's no point
1925 including more than one in the machine description unless
1926 the more limited one has some advantage. */
1927 #ifdef HAVE_movstrqi
1929 && GET_CODE (size) == CONST_INT
1930 && ((unsigned) INTVAL (size)
1931 < (1 << (GET_MODE_BITSIZE (QImode) - 1))))
1933 rtx pat = gen_movstrqi (gen_rtx (MEM, BLKmode, temp),
1934 xinner, size, GEN_INT (align));
1942 #ifdef HAVE_movstrhi
1944 && GET_CODE (size) == CONST_INT
1945 && ((unsigned) INTVAL (size)
1946 < (1 << (GET_MODE_BITSIZE (HImode) - 1))))
1948 rtx pat = gen_movstrhi (gen_rtx (MEM, BLKmode, temp),
1949 xinner, size, GEN_INT (align));
1957 #ifdef HAVE_movstrsi
1960 rtx pat = gen_movstrsi (gen_rtx (MEM, BLKmode, temp),
1961 xinner, size, GEN_INT (align));
1969 #ifdef HAVE_movstrdi
1972 rtx pat = gen_movstrdi (gen_rtx (MEM, BLKmode, temp),
1973 xinner, size, GEN_INT (align));
1982 #ifndef ACCUMULATE_OUTGOING_ARGS
1983 /* If the source is referenced relative to the stack pointer,
1984 copy it to another register to stabilize it. We do not need
1985 to do this if we know that we won't be changing sp. */
1987 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
1988 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
1989 temp = copy_to_reg (temp);
1992 /* Make inhibit_defer_pop nonzero around the library call
1993 to force it to pop the bcopy-arguments right away. */
1995 #ifdef TARGET_MEM_FUNCTIONS
1996 emit_library_call (memcpy_libfunc, 0,
1997 VOIDmode, 3, temp, Pmode, XEXP (xinner, 0), Pmode,
1998 convert_to_mode (TYPE_MODE (sizetype),
1999 size, TREE_UNSIGNED (sizetype)),
2000 TYPE_MODE (sizetype));
2002 emit_library_call (bcopy_libfunc, 0,
2003 VOIDmode, 3, XEXP (xinner, 0), Pmode, temp, Pmode,
2004 convert_to_mode (TYPE_MODE (sizetype),
2005 size, TREE_UNSIGNED (sizetype)),
2006 TYPE_MODE (sizetype));
2011 else if (partial > 0)
2013 /* Scalar partly in registers. */
2015 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
2018 /* # words of start of argument
2019 that we must make space for but need not store. */
2020 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
2021 int args_offset = INTVAL (args_so_far);
2024 /* Push padding now if padding above and stack grows down,
2025 or if padding below and stack grows up.
2026 But if space already allocated, this has already been done. */
2027 if (extra && args_addr == 0
2028 && where_pad != none && where_pad != stack_direction)
2029 anti_adjust_stack (GEN_INT (extra));
2031 /* If we make space by pushing it, we might as well push
2032 the real data. Otherwise, we can leave OFFSET nonzero
2033 and leave the space uninitialized. */
2037 /* Now NOT_STACK gets the number of words that we don't need to
2038 allocate on the stack. */
2039 not_stack = partial - offset;
2041 /* If the partial register-part of the arg counts in its stack size,
2042 skip the part of stack space corresponding to the registers.
2043 Otherwise, start copying to the beginning of the stack space,
2044 by setting SKIP to 0. */
2045 #ifndef REG_PARM_STACK_SPACE
2051 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
2052 x = validize_mem (force_const_mem (mode, x));
2054 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
2055 SUBREGs of such registers are not allowed. */
2056 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
2057 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
2058 x = copy_to_reg (x);
2060 /* Loop over all the words allocated on the stack for this arg. */
2061 /* We can do it by words, because any scalar bigger than a word
2062 has a size a multiple of a word. */
2063 #ifndef PUSH_ARGS_REVERSED
2064 for (i = not_stack; i < size; i++)
2066 for (i = size - 1; i >= not_stack; i--)
2068 if (i >= not_stack + offset)
2069 emit_push_insn (operand_subword_force (x, i, mode),
2070 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
2072 GEN_INT (args_offset + ((i - not_stack + skip)
2073 * UNITS_PER_WORD)));
2079 /* Push padding now if padding above and stack grows down,
2080 or if padding below and stack grows up.
2081 But if space already allocated, this has already been done. */
2082 if (extra && args_addr == 0
2083 && where_pad != none && where_pad != stack_direction)
2084 anti_adjust_stack (GEN_INT (extra));
2086 #ifdef PUSH_ROUNDING
2088 addr = gen_push_operand ();
2091 if (GET_CODE (args_so_far) == CONST_INT)
2093 = memory_address (mode,
2094 plus_constant (args_addr, INTVAL (args_so_far)));
2096 addr = memory_address (mode, gen_rtx (PLUS, Pmode, args_addr,
2099 emit_move_insn (gen_rtx (MEM, mode, addr), x);
2103 /* If part should go in registers, copy that part
2104 into the appropriate registers. Do this now, at the end,
2105 since mem-to-mem copies above may do function calls. */
2106 if (partial > 0 && reg != 0)
2107 move_block_to_reg (REGNO (reg), x, partial, mode);
2109 if (extra && args_addr == 0 && where_pad == stack_direction)
2110 anti_adjust_stack (GEN_INT (extra));
2113 /* Expand an assignment that stores the value of FROM into TO.
2114 If WANT_VALUE is nonzero, return an rtx for the value of TO.
2115 (This may contain a QUEUED rtx.)
2116 Otherwise, the returned value is not meaningful.
2118 SUGGEST_REG is no longer actually used.
2119 It used to mean, copy the value through a register
2120 and return that register, if that is possible.
2121 But now we do this if WANT_VALUE.
2123 If the value stored is a constant, we return the constant. */
2126 expand_assignment (to, from, want_value, suggest_reg)
2131 register rtx to_rtx = 0;
2134 /* Don't crash if the lhs of the assignment was erroneous. */
2136 if (TREE_CODE (to) == ERROR_MARK)
2137 return expand_expr (from, NULL_RTX, VOIDmode, 0);
2139 /* Assignment of a structure component needs special treatment
2140 if the structure component's rtx is not simply a MEM.
2141 Assignment of an array element at a constant index
2142 has the same problem. */
2144 if (TREE_CODE (to) == COMPONENT_REF
2145 || TREE_CODE (to) == BIT_FIELD_REF
2146 || (TREE_CODE (to) == ARRAY_REF
2147 && TREE_CODE (TREE_OPERAND (to, 1)) == INTEGER_CST
2148 && TREE_CODE (TYPE_SIZE (TREE_TYPE (to))) == INTEGER_CST))
2150 enum machine_mode mode1;
2156 tree tem = get_inner_reference (to, &bitsize, &bitpos, &offset,
2157 &mode1, &unsignedp, &volatilep);
2159 /* If we are going to use store_bit_field and extract_bit_field,
2160 make sure to_rtx will be safe for multiple use. */
2162 if (mode1 == VOIDmode && want_value)
2163 tem = stabilize_reference (tem);
2165 to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, 0);
2168 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
2170 if (GET_CODE (to_rtx) != MEM)
2172 to_rtx = change_address (to_rtx, VOIDmode,
2173 gen_rtx (PLUS, Pmode, XEXP (to_rtx, 0),
2174 force_reg (Pmode, offset_rtx)));
2178 if (GET_CODE (to_rtx) == MEM)
2179 MEM_VOLATILE_P (to_rtx) = 1;
2180 #if 0 /* This was turned off because, when a field is volatile
2181 in an object which is not volatile, the object may be in a register,
2182 and then we would abort over here. */
2188 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
2190 /* Spurious cast makes HPUX compiler happy. */
2191 ? (enum machine_mode) TYPE_MODE (TREE_TYPE (to))
2194 /* Required alignment of containing datum. */
2195 TYPE_ALIGN (TREE_TYPE (tem)) / BITS_PER_UNIT,
2196 int_size_in_bytes (TREE_TYPE (tem)));
2197 preserve_temp_slots (result);
2200 /* If we aren't returning a result, just pass on what expand_expr
2201 returned; it was probably const0_rtx. Otherwise, convert RESULT
2202 to the proper mode. */
2203 return (want_value ? convert_to_mode (TYPE_MODE (TREE_TYPE (to)), result,
2204 TREE_UNSIGNED (TREE_TYPE (to)))
2208 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
2209 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
2212 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, 0);
2214 /* Don't move directly into a return register. */
2215 if (TREE_CODE (to) == RESULT_DECL && GET_CODE (to_rtx) == REG)
2217 rtx temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
2218 emit_move_insn (to_rtx, temp);
2219 preserve_temp_slots (to_rtx);
2224 /* In case we are returning the contents of an object which overlaps
2225 the place the value is being stored, use a safe function when copying
2226 a value through a pointer into a structure value return block. */
2227 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
2228 && current_function_returns_struct
2229 && !current_function_returns_pcc_struct)
2231 rtx from_rtx = expand_expr (from, NULL_RTX, VOIDmode, 0);
2232 rtx size = expr_size (from);
2234 #ifdef TARGET_MEM_FUNCTIONS
2235 emit_library_call (memcpy_libfunc, 0,
2236 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
2237 XEXP (from_rtx, 0), Pmode,
2238 convert_to_mode (TYPE_MODE (sizetype),
2239 size, TREE_UNSIGNED (sizetype)),
2240 TYPE_MODE (sizetype));
2242 emit_library_call (bcopy_libfunc, 0,
2243 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
2244 XEXP (to_rtx, 0), Pmode,
2245 convert_to_mode (TYPE_MODE (sizetype),
2246 size, TREE_UNSIGNED (sizetype)),
2247 TYPE_MODE (sizetype));
2250 preserve_temp_slots (to_rtx);
2255 /* Compute FROM and store the value in the rtx we got. */
2257 result = store_expr (from, to_rtx, want_value);
2258 preserve_temp_slots (result);
2263 /* Generate code for computing expression EXP,
2264 and storing the value into TARGET.
2265 Returns TARGET or an equivalent value.
2266 TARGET may contain a QUEUED rtx.
2268 If SUGGEST_REG is nonzero, copy the value through a register
2269 and return that register, if that is possible.
2271 If the value stored is a constant, we return the constant. */
2274 store_expr (exp, target, suggest_reg)
2276 register rtx target;
2280 int dont_return_target = 0;
2282 if (TREE_CODE (exp) == COMPOUND_EXPR)
2284 /* Perform first part of compound expression, then assign from second
2286 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
2288 return store_expr (TREE_OPERAND (exp, 1), target, suggest_reg);
2290 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
2292 /* For conditional expression, get safe form of the target. Then
2293 test the condition, doing the appropriate assignment on either
2294 side. This avoids the creation of unnecessary temporaries.
2295 For non-BLKmode, it is more efficient not to do this. */
2297 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
2300 target = protect_from_queue (target, 1);
2303 jumpifnot (TREE_OPERAND (exp, 0), lab1);
2304 store_expr (TREE_OPERAND (exp, 1), target, suggest_reg);
2306 emit_jump_insn (gen_jump (lab2));
2309 store_expr (TREE_OPERAND (exp, 2), target, suggest_reg);
2315 else if (suggest_reg && GET_CODE (target) == MEM
2316 && GET_MODE (target) != BLKmode)
2317 /* If target is in memory and caller wants value in a register instead,
2318 arrange that. Pass TARGET as target for expand_expr so that,
2319 if EXP is another assignment, SUGGEST_REG will be nonzero for it.
2320 We know expand_expr will not use the target in that case. */
2322 temp = expand_expr (exp, cse_not_expected ? NULL_RTX : target,
2323 GET_MODE (target), 0);
2324 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
2325 temp = copy_to_reg (temp);
2326 dont_return_target = 1;
2328 else if (queued_subexp_p (target))
2329 /* If target contains a postincrement, it is not safe
2330 to use as the returned value. It would access the wrong
2331 place by the time the queued increment gets output.
2332 So copy the value through a temporary and use that temp
2335 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
2337 /* Expand EXP into a new pseudo. */
2338 temp = gen_reg_rtx (GET_MODE (target));
2339 temp = expand_expr (exp, temp, GET_MODE (target), 0);
2342 temp = expand_expr (exp, NULL_RTX, GET_MODE (target), 0);
2343 dont_return_target = 1;
2345 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
2346 /* If this is an scalar in a register that is stored in a wider mode
2347 than the declared mode, compute the result into its declared mode
2348 and then convert to the wider mode. Our value is the computed
2351 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
2352 convert_move (SUBREG_REG (target), temp,
2353 SUBREG_PROMOTED_UNSIGNED_P (target));
2358 temp = expand_expr (exp, target, GET_MODE (target), 0);
2359 /* DO return TARGET if it's a specified hardware register.
2360 expand_return relies on this. */
2361 if (!(target && GET_CODE (target) == REG
2362 && REGNO (target) < FIRST_PSEUDO_REGISTER)
2363 && CONSTANT_P (temp))
2364 dont_return_target = 1;
2367 /* If value was not generated in the target, store it there.
2368 Convert the value to TARGET's type first if nec. */
2370 if (temp != target && TREE_CODE (exp) != ERROR_MARK)
2372 target = protect_from_queue (target, 1);
2373 if (GET_MODE (temp) != GET_MODE (target)
2374 && GET_MODE (temp) != VOIDmode)
2376 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
2377 if (dont_return_target)
2379 /* In this case, we will return TEMP,
2380 so make sure it has the proper mode.
2381 But don't forget to store the value into TARGET. */
2382 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
2383 emit_move_insn (target, temp);
2386 convert_move (target, temp, unsignedp);
2389 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
2391 /* Handle copying a string constant into an array.
2392 The string constant may be shorter than the array.
2393 So copy just the string's actual length, and clear the rest. */
2396 /* Get the size of the data type of the string,
2397 which is actually the size of the target. */
2398 size = expr_size (exp);
2399 if (GET_CODE (size) == CONST_INT
2400 && INTVAL (size) < TREE_STRING_LENGTH (exp))
2401 emit_block_move (target, temp, size,
2402 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
2405 /* Compute the size of the data to copy from the string. */
2407 = size_binop (MIN_EXPR,
2408 size_binop (CEIL_DIV_EXPR,
2409 TYPE_SIZE (TREE_TYPE (exp)),
2410 size_int (BITS_PER_UNIT)),
2412 build_int_2 (TREE_STRING_LENGTH (exp), 0)));
2413 rtx copy_size_rtx = expand_expr (copy_size, NULL_RTX,
2417 /* Copy that much. */
2418 emit_block_move (target, temp, copy_size_rtx,
2419 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
2421 /* Figure out how much is left in TARGET
2422 that we have to clear. */
2423 if (GET_CODE (copy_size_rtx) == CONST_INT)
2425 temp = plus_constant (XEXP (target, 0),
2426 TREE_STRING_LENGTH (exp));
2427 size = plus_constant (size,
2428 - TREE_STRING_LENGTH (exp));
2432 enum machine_mode size_mode = Pmode;
2434 temp = force_reg (Pmode, XEXP (target, 0));
2435 temp = expand_binop (size_mode, add_optab, temp,
2436 copy_size_rtx, NULL_RTX, 0,
2439 size = expand_binop (size_mode, sub_optab, size,
2440 copy_size_rtx, NULL_RTX, 0,
2443 emit_cmp_insn (size, const0_rtx, LT, NULL_RTX,
2444 GET_MODE (size), 0, 0);
2445 label = gen_label_rtx ();
2446 emit_jump_insn (gen_blt (label));
2449 if (size != const0_rtx)
2451 #ifdef TARGET_MEM_FUNCTIONS
2452 emit_library_call (memset_libfunc, 0, VOIDmode, 3,
2453 temp, Pmode, const0_rtx, Pmode, size, Pmode);
2455 emit_library_call (bzero_libfunc, 0, VOIDmode, 2,
2456 temp, Pmode, size, Pmode);
2463 else if (GET_MODE (temp) == BLKmode)
2464 emit_block_move (target, temp, expr_size (exp),
2465 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
2467 emit_move_insn (target, temp);
2469 if (dont_return_target)
2474 /* Store the value of constructor EXP into the rtx TARGET.
2475 TARGET is either a REG or a MEM. */
2478 store_constructor (exp, target)
2482 tree type = TREE_TYPE (exp);
2484 /* We know our target cannot conflict, since safe_from_p has been called. */
2486 /* Don't try copying piece by piece into a hard register
2487 since that is vulnerable to being clobbered by EXP.
2488 Instead, construct in a pseudo register and then copy it all. */
2489 if (GET_CODE (target) == REG && REGNO (target) < FIRST_PSEUDO_REGISTER)
2491 rtx temp = gen_reg_rtx (GET_MODE (target));
2492 store_constructor (exp, temp);
2493 emit_move_insn (target, temp);
2498 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE)
2502 /* Inform later passes that the whole union value is dead. */
2503 if (TREE_CODE (type) == UNION_TYPE)
2504 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
2506 /* If we are building a static constructor into a register,
2507 set the initial value as zero so we can fold the value into
2509 else if (GET_CODE (target) == REG && TREE_STATIC (exp))
2510 emit_move_insn (target, const0_rtx);
2512 /* If the constructor has fewer fields than the structure,
2513 clear the whole structure first. */
2514 else if (list_length (CONSTRUCTOR_ELTS (exp))
2515 != list_length (TYPE_FIELDS (type)))
2516 clear_storage (target, int_size_in_bytes (type));
2518 /* Inform later passes that the old value is dead. */
2519 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
2521 /* Store each element of the constructor into
2522 the corresponding field of TARGET. */
2524 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
2526 register tree field = TREE_PURPOSE (elt);
2527 register enum machine_mode mode;
2532 /* Just ignore missing fields.
2533 We cleared the whole structure, above,
2534 if any fields are missing. */
2538 bitsize = TREE_INT_CST_LOW (DECL_SIZE (field));
2539 unsignedp = TREE_UNSIGNED (field);
2540 mode = DECL_MODE (field);
2541 if (DECL_BIT_FIELD (field))
2544 if (TREE_CODE (DECL_FIELD_BITPOS (field)) != INTEGER_CST)
2545 /* ??? This case remains to be written. */
2548 bitpos = TREE_INT_CST_LOW (DECL_FIELD_BITPOS (field));
2550 store_field (target, bitsize, bitpos, mode, TREE_VALUE (elt),
2551 /* The alignment of TARGET is
2552 at least what its type requires. */
2554 TYPE_ALIGN (type) / BITS_PER_UNIT,
2555 int_size_in_bytes (type));
2558 else if (TREE_CODE (type) == ARRAY_TYPE)
2562 tree domain = TYPE_DOMAIN (type);
2563 HOST_WIDE_INT minelt = TREE_INT_CST_LOW (TYPE_MIN_VALUE (domain));
2564 HOST_WIDE_INT maxelt = TREE_INT_CST_LOW (TYPE_MAX_VALUE (domain));
2565 tree elttype = TREE_TYPE (type);
2567 /* If the constructor has fewer fields than the structure,
2568 clear the whole structure first. Similarly if this this is
2569 static constructor of a non-BLKmode object. */
2571 if (list_length (CONSTRUCTOR_ELTS (exp)) < maxelt - minelt + 1
2572 || (GET_CODE (target) == REG && TREE_STATIC (exp)))
2573 clear_storage (target, int_size_in_bytes (type));
2575 /* Inform later passes that the old value is dead. */
2576 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
2578 /* Store each element of the constructor into
2579 the corresponding element of TARGET, determined
2580 by counting the elements. */
2581 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
2583 elt = TREE_CHAIN (elt), i++)
2585 register enum machine_mode mode;
2590 mode = TYPE_MODE (elttype);
2591 bitsize = GET_MODE_BITSIZE (mode);
2592 unsignedp = TREE_UNSIGNED (elttype);
2594 bitpos = (i * TREE_INT_CST_LOW (TYPE_SIZE (elttype)));
2596 store_field (target, bitsize, bitpos, mode, TREE_VALUE (elt),
2597 /* The alignment of TARGET is
2598 at least what its type requires. */
2600 TYPE_ALIGN (type) / BITS_PER_UNIT,
2601 int_size_in_bytes (type));
2609 /* Store the value of EXP (an expression tree)
2610 into a subfield of TARGET which has mode MODE and occupies
2611 BITSIZE bits, starting BITPOS bits from the start of TARGET.
2612 If MODE is VOIDmode, it means that we are storing into a bit-field.
2614 If VALUE_MODE is VOIDmode, return nothing in particular.
2615 UNSIGNEDP is not used in this case.
2617 Otherwise, return an rtx for the value stored. This rtx
2618 has mode VALUE_MODE if that is convenient to do.
2619 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
2621 ALIGN is the alignment that TARGET is known to have, measured in bytes.
2622 TOTAL_SIZE is the size in bytes of the structure, or -1 if varying. */
2625 store_field (target, bitsize, bitpos, mode, exp, value_mode,
2626 unsignedp, align, total_size)
2628 int bitsize, bitpos;
2629 enum machine_mode mode;
2631 enum machine_mode value_mode;
2636 HOST_WIDE_INT width_mask = 0;
2638 if (bitsize < HOST_BITS_PER_WIDE_INT)
2639 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
2641 /* If we are storing into an unaligned field of an aligned union that is
2642 in a register, we may have the mode of TARGET being an integer mode but
2643 MODE == BLKmode. In that case, get an aligned object whose size and
2644 alignment are the same as TARGET and store TARGET into it (we can avoid
2645 the store if the field being stored is the entire width of TARGET). Then
2646 call ourselves recursively to store the field into a BLKmode version of
2647 that object. Finally, load from the object into TARGET. This is not
2648 very efficient in general, but should only be slightly more expensive
2649 than the otherwise-required unaligned accesses. Perhaps this can be
2650 cleaned up later. */
2653 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
2655 rtx object = assign_stack_temp (GET_MODE (target),
2656 GET_MODE_SIZE (GET_MODE (target)), 0);
2657 rtx blk_object = copy_rtx (object);
2659 PUT_MODE (blk_object, BLKmode);
2661 if (bitsize != GET_MODE_BITSIZE (GET_MODE (target)))
2662 emit_move_insn (object, target);
2664 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0,
2667 emit_move_insn (target, object);
2672 /* If the structure is in a register or if the component
2673 is a bit field, we cannot use addressing to access it.
2674 Use bit-field techniques or SUBREG to store in it. */
2676 if (mode == VOIDmode
2677 || (mode != BLKmode && ! direct_store[(int) mode])
2678 || GET_CODE (target) == REG
2679 || GET_CODE (target) == SUBREG)
2681 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
2682 /* Store the value in the bitfield. */
2683 store_bit_field (target, bitsize, bitpos, mode, temp, align, total_size);
2684 if (value_mode != VOIDmode)
2686 /* The caller wants an rtx for the value. */
2687 /* If possible, avoid refetching from the bitfield itself. */
2689 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
2692 enum machine_mode tmode;
2695 return expand_and (temp, GEN_INT (width_mask), NULL_RTX);
2696 tmode = GET_MODE (temp);
2697 if (tmode == VOIDmode)
2699 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
2700 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
2701 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
2703 return extract_bit_field (target, bitsize, bitpos, unsignedp,
2704 NULL_RTX, value_mode, 0, align,
2711 rtx addr = XEXP (target, 0);
2714 /* If a value is wanted, it must be the lhs;
2715 so make the address stable for multiple use. */
2717 if (value_mode != VOIDmode && GET_CODE (addr) != REG
2718 && ! CONSTANT_ADDRESS_P (addr)
2719 /* A frame-pointer reference is already stable. */
2720 && ! (GET_CODE (addr) == PLUS
2721 && GET_CODE (XEXP (addr, 1)) == CONST_INT
2722 && (XEXP (addr, 0) == virtual_incoming_args_rtx
2723 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
2724 addr = copy_to_reg (addr);
2726 /* Now build a reference to just the desired component. */
2728 to_rtx = change_address (target, mode,
2729 plus_constant (addr, (bitpos / BITS_PER_UNIT)));
2730 MEM_IN_STRUCT_P (to_rtx) = 1;
2732 return store_expr (exp, to_rtx, value_mode != VOIDmode);
2736 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
2737 or an ARRAY_REF, look for nested COMPONENT_REFs, BIT_FIELD_REFs, or
2738 ARRAY_REFs and find the ultimate containing object, which we return.
2740 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
2741 bit position, and *PUNSIGNEDP to the signedness of the field.
2742 If the position of the field is variable, we store a tree
2743 giving the variable offset (in units) in *POFFSET.
2744 This offset is in addition to the bit position.
2745 If the position is not variable, we store 0 in *POFFSET.
2747 If any of the extraction expressions is volatile,
2748 we store 1 in *PVOLATILEP. Otherwise we don't change that.
2750 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
2751 is a mode that can be used to access the field. In that case, *PBITSIZE
2754 If the field describes a variable-sized object, *PMODE is set to
2755 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
2756 this case, but the address of the object can be found. */
2759 get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode,
2760 punsignedp, pvolatilep)
2765 enum machine_mode *pmode;
2770 enum machine_mode mode = VOIDmode;
2771 tree offset = integer_zero_node;
2773 if (TREE_CODE (exp) == COMPONENT_REF)
2775 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
2776 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
2777 mode = DECL_MODE (TREE_OPERAND (exp, 1));
2778 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
2780 else if (TREE_CODE (exp) == BIT_FIELD_REF)
2782 size_tree = TREE_OPERAND (exp, 1);
2783 *punsignedp = TREE_UNSIGNED (exp);
2787 mode = TYPE_MODE (TREE_TYPE (exp));
2788 *pbitsize = GET_MODE_BITSIZE (mode);
2789 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
2794 if (TREE_CODE (size_tree) != INTEGER_CST)
2795 mode = BLKmode, *pbitsize = -1;
2797 *pbitsize = TREE_INT_CST_LOW (size_tree);
2800 /* Compute cumulative bit-offset for nested component-refs and array-refs,
2801 and find the ultimate containing object. */
2807 if (TREE_CODE (exp) == COMPONENT_REF || TREE_CODE (exp) == BIT_FIELD_REF)
2809 tree pos = (TREE_CODE (exp) == COMPONENT_REF
2810 ? DECL_FIELD_BITPOS (TREE_OPERAND (exp, 1))
2811 : TREE_OPERAND (exp, 2));
2813 /* If this field hasn't been filled in yet, don't go
2814 past it. This should only happen when folding expressions
2815 made during type construction. */
2819 if (TREE_CODE (pos) == PLUS_EXPR)
2822 if (TREE_CODE (TREE_OPERAND (pos, 0)) == INTEGER_CST)
2824 constant = TREE_OPERAND (pos, 0);
2825 var = TREE_OPERAND (pos, 1);
2827 else if (TREE_CODE (TREE_OPERAND (pos, 1)) == INTEGER_CST)
2829 constant = TREE_OPERAND (pos, 1);
2830 var = TREE_OPERAND (pos, 0);
2835 *pbitpos += TREE_INT_CST_LOW (constant);
2836 offset = size_binop (PLUS_EXPR, offset,
2837 size_binop (FLOOR_DIV_EXPR, var,
2838 size_int (BITS_PER_UNIT)));
2840 else if (TREE_CODE (pos) == INTEGER_CST)
2841 *pbitpos += TREE_INT_CST_LOW (pos);
2844 /* Assume here that the offset is a multiple of a unit.
2845 If not, there should be an explicitly added constant. */
2846 offset = size_binop (PLUS_EXPR, offset,
2847 size_binop (FLOOR_DIV_EXPR, pos,
2848 size_int (BITS_PER_UNIT)));
2852 else if (TREE_CODE (exp) == ARRAY_REF)
2854 /* This code is based on the code in case ARRAY_REF in expand_expr
2855 below. We assume here that the size of an array element is
2856 always an integral multiple of BITS_PER_UNIT. */
2858 tree index = TREE_OPERAND (exp, 1);
2859 tree domain = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
2861 = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
2862 tree index_type = TREE_TYPE (index);
2864 if (! integer_zerop (low_bound))
2865 index = fold (build (MINUS_EXPR, index_type, index, low_bound));
2867 if (TYPE_PRECISION (index_type) != POINTER_SIZE)
2869 index = convert (type_for_size (POINTER_SIZE, 0), index);
2870 index_type = TREE_TYPE (index);
2873 index = fold (build (MULT_EXPR, index_type, index,
2874 TYPE_SIZE (TREE_TYPE (exp))));
2876 if (TREE_CODE (index) == INTEGER_CST
2877 && TREE_INT_CST_HIGH (index) == 0)
2878 *pbitpos += TREE_INT_CST_LOW (index);
2880 offset = size_binop (PLUS_EXPR, offset,
2881 size_binop (FLOOR_DIV_EXPR, index,
2882 size_int (BITS_PER_UNIT)));
2884 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
2885 && ! ((TREE_CODE (exp) == NOP_EXPR
2886 || TREE_CODE (exp) == CONVERT_EXPR)
2887 && (TYPE_MODE (TREE_TYPE (exp))
2888 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
2891 /* If any reference in the chain is volatile, the effect is volatile. */
2892 if (TREE_THIS_VOLATILE (exp))
2894 exp = TREE_OPERAND (exp, 0);
2897 /* If this was a bit-field, see if there is a mode that allows direct
2898 access in case EXP is in memory. */
2899 if (mode == VOIDmode && *pbitsize != 0 && *pbitpos % *pbitsize == 0)
2901 mode = mode_for_size (*pbitsize, MODE_INT, 0);
2902 if (mode == BLKmode)
2906 if (integer_zerop (offset))
2912 /* We aren't finished fixing the callers to really handle nonzero offset. */
2920 /* Given an rtx VALUE that may contain additions and multiplications,
2921 return an equivalent value that just refers to a register or memory.
2922 This is done by generating instructions to perform the arithmetic
2923 and returning a pseudo-register containing the value.
2925 The returned value may be a REG, SUBREG, MEM or constant. */
2928 force_operand (value, target)
2931 register optab binoptab = 0;
2932 /* Use a temporary to force order of execution of calls to
2936 /* Use subtarget as the target for operand 0 of a binary operation. */
2937 register rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
2939 if (GET_CODE (value) == PLUS)
2940 binoptab = add_optab;
2941 else if (GET_CODE (value) == MINUS)
2942 binoptab = sub_optab;
2943 else if (GET_CODE (value) == MULT)
2945 op2 = XEXP (value, 1);
2946 if (!CONSTANT_P (op2)
2947 && !(GET_CODE (op2) == REG && op2 != subtarget))
2949 tmp = force_operand (XEXP (value, 0), subtarget);
2950 return expand_mult (GET_MODE (value), tmp,
2951 force_operand (op2, NULL_RTX),
2957 op2 = XEXP (value, 1);
2958 if (!CONSTANT_P (op2)
2959 && !(GET_CODE (op2) == REG && op2 != subtarget))
2961 if (binoptab == sub_optab && GET_CODE (op2) == CONST_INT)
2963 binoptab = add_optab;
2964 op2 = negate_rtx (GET_MODE (value), op2);
2967 /* Check for an addition with OP2 a constant integer and our first
2968 operand a PLUS of a virtual register and something else. In that
2969 case, we want to emit the sum of the virtual register and the
2970 constant first and then add the other value. This allows virtual
2971 register instantiation to simply modify the constant rather than
2972 creating another one around this addition. */
2973 if (binoptab == add_optab && GET_CODE (op2) == CONST_INT
2974 && GET_CODE (XEXP (value, 0)) == PLUS
2975 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
2976 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
2977 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
2979 rtx temp = expand_binop (GET_MODE (value), binoptab,
2980 XEXP (XEXP (value, 0), 0), op2,
2981 subtarget, 0, OPTAB_LIB_WIDEN);
2982 return expand_binop (GET_MODE (value), binoptab, temp,
2983 force_operand (XEXP (XEXP (value, 0), 1), 0),
2984 target, 0, OPTAB_LIB_WIDEN);
2987 tmp = force_operand (XEXP (value, 0), subtarget);
2988 return expand_binop (GET_MODE (value), binoptab, tmp,
2989 force_operand (op2, NULL_RTX),
2990 target, 0, OPTAB_LIB_WIDEN);
2991 /* We give UNSIGNEDP = 0 to expand_binop
2992 because the only operations we are expanding here are signed ones. */
2997 /* Subroutine of expand_expr:
2998 save the non-copied parts (LIST) of an expr (LHS), and return a list
2999 which can restore these values to their previous values,
3000 should something modify their storage. */
3003 save_noncopied_parts (lhs, list)
3010 for (tail = list; tail; tail = TREE_CHAIN (tail))
3011 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
3012 parts = chainon (parts, save_noncopied_parts (lhs, TREE_VALUE (tail)));
3015 tree part = TREE_VALUE (tail);
3016 tree part_type = TREE_TYPE (part);
3017 tree to_be_saved = build (COMPONENT_REF, part_type, lhs, part);
3018 rtx target = assign_stack_temp (TYPE_MODE (part_type),
3019 int_size_in_bytes (part_type), 0);
3020 if (! memory_address_p (TYPE_MODE (part_type), XEXP (target, 0)))
3021 target = change_address (target, TYPE_MODE (part_type), NULL_RTX);
3022 parts = tree_cons (to_be_saved,
3023 build (RTL_EXPR, part_type, NULL_TREE,
3026 store_expr (TREE_PURPOSE (parts), RTL_EXPR_RTL (TREE_VALUE (parts)), 0);
3031 /* Subroutine of expand_expr:
3032 record the non-copied parts (LIST) of an expr (LHS), and return a list
3033 which specifies the initial values of these parts. */
3036 init_noncopied_parts (lhs, list)
3043 for (tail = list; tail; tail = TREE_CHAIN (tail))
3044 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
3045 parts = chainon (parts, init_noncopied_parts (lhs, TREE_VALUE (tail)));
3048 tree part = TREE_VALUE (tail);
3049 tree part_type = TREE_TYPE (part);
3050 tree to_be_initialized = build (COMPONENT_REF, part_type, lhs, part);
3051 parts = tree_cons (TREE_PURPOSE (tail), to_be_initialized, parts);
3056 /* Subroutine of expand_expr: return nonzero iff there is no way that
3057 EXP can reference X, which is being modified. */
3060 safe_from_p (x, exp)
3070 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
3071 find the underlying pseudo. */
3072 if (GET_CODE (x) == SUBREG)
3075 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
3079 /* If X is a location in the outgoing argument area, it is always safe. */
3080 if (GET_CODE (x) == MEM
3081 && (XEXP (x, 0) == virtual_outgoing_args_rtx
3082 || (GET_CODE (XEXP (x, 0)) == PLUS
3083 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx)))
3086 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
3089 exp_rtl = DECL_RTL (exp);
3096 if (TREE_CODE (exp) == TREE_LIST)
3097 return ((TREE_VALUE (exp) == 0
3098 || safe_from_p (x, TREE_VALUE (exp)))
3099 && (TREE_CHAIN (exp) == 0
3100 || safe_from_p (x, TREE_CHAIN (exp))));
3105 return safe_from_p (x, TREE_OPERAND (exp, 0));
3109 return (safe_from_p (x, TREE_OPERAND (exp, 0))
3110 && safe_from_p (x, TREE_OPERAND (exp, 1)));
3114 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
3115 the expression. If it is set, we conflict iff we are that rtx or
3116 both are in memory. Otherwise, we check all operands of the
3117 expression recursively. */
3119 switch (TREE_CODE (exp))
3122 return staticp (TREE_OPERAND (exp, 0));
3125 if (GET_CODE (x) == MEM)
3130 exp_rtl = CALL_EXPR_RTL (exp);
3133 /* Assume that the call will clobber all hard registers and
3135 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
3136 || GET_CODE (x) == MEM)
3143 exp_rtl = RTL_EXPR_RTL (exp);
3145 /* We don't know what this can modify. */
3150 case WITH_CLEANUP_EXPR:
3151 exp_rtl = RTL_EXPR_RTL (exp);
3155 exp_rtl = SAVE_EXPR_RTL (exp);
3159 /* The only operand we look at is operand 1. The rest aren't
3160 part of the expression. */
3161 return safe_from_p (x, TREE_OPERAND (exp, 1));
3163 case METHOD_CALL_EXPR:
3164 /* This takes a rtx argument, but shouldn't appear here. */
3168 /* If we have an rtx, we do not need to scan our operands. */
3172 nops = tree_code_length[(int) TREE_CODE (exp)];
3173 for (i = 0; i < nops; i++)
3174 if (TREE_OPERAND (exp, i) != 0
3175 && ! safe_from_p (x, TREE_OPERAND (exp, i)))
3179 /* If we have an rtl, find any enclosed object. Then see if we conflict
3183 if (GET_CODE (exp_rtl) == SUBREG)
3185 exp_rtl = SUBREG_REG (exp_rtl);
3186 if (GET_CODE (exp_rtl) == REG
3187 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
3191 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
3192 are memory and EXP is not readonly. */
3193 return ! (rtx_equal_p (x, exp_rtl)
3194 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
3195 && ! TREE_READONLY (exp)));
3198 /* If we reach here, it is safe. */
3202 /* Subroutine of expand_expr: return nonzero iff EXP is an
3203 expression whose type is statically determinable. */
3209 if (TREE_CODE (exp) == PARM_DECL
3210 || TREE_CODE (exp) == VAR_DECL
3211 || TREE_CODE (exp) == CALL_EXPR || TREE_CODE (exp) == TARGET_EXPR
3212 || TREE_CODE (exp) == COMPONENT_REF
3213 || TREE_CODE (exp) == ARRAY_REF)
3218 /* expand_expr: generate code for computing expression EXP.
3219 An rtx for the computed value is returned. The value is never null.
3220 In the case of a void EXP, const0_rtx is returned.
3222 The value may be stored in TARGET if TARGET is nonzero.
3223 TARGET is just a suggestion; callers must assume that
3224 the rtx returned may not be the same as TARGET.
3226 If TARGET is CONST0_RTX, it means that the value will be ignored.
3228 If TMODE is not VOIDmode, it suggests generating the
3229 result in mode TMODE. But this is done only when convenient.
3230 Otherwise, TMODE is ignored and the value generated in its natural mode.
3231 TMODE is just a suggestion; callers must assume that
3232 the rtx returned may not have mode TMODE.
3234 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
3235 with a constant address even if that address is not normally legitimate.
3236 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
3238 If MODIFIER is EXPAND_SUM then when EXP is an addition
3239 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
3240 or a nest of (PLUS ...) and (MINUS ...) where the terms are
3241 products as above, or REG or MEM, or constant.
3242 Ordinarily in such cases we would output mul or add instructions
3243 and then return a pseudo reg containing the sum.
3245 EXPAND_INITIALIZER is much like EXPAND_SUM except that
3246 it also marks a label as absolutely required (it can't be dead).
3247 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
3248 This is used for outputting expressions used in initializers. */
3251 expand_expr (exp, target, tmode, modifier)
3254 enum machine_mode tmode;
3255 enum expand_modifier modifier;
3257 register rtx op0, op1, temp;
3258 tree type = TREE_TYPE (exp);
3259 int unsignedp = TREE_UNSIGNED (type);
3260 register enum machine_mode mode = TYPE_MODE (type);
3261 register enum tree_code code = TREE_CODE (exp);
3263 /* Use subtarget as the target for operand 0 of a binary operation. */
3264 rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
3265 rtx original_target = target;
3266 int ignore = (target == const0_rtx
3267 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
3268 || code == CONVERT_EXPR || code == REFERENCE_EXPR)
3269 && TREE_CODE (type) == VOID_TYPE));
3272 /* Don't use hard regs as subtargets, because the combiner
3273 can only handle pseudo regs. */
3274 if (subtarget && REGNO (subtarget) < FIRST_PSEUDO_REGISTER)
3276 /* Avoid subtargets inside loops,
3277 since they hide some invariant expressions. */
3278 if (preserve_subexpressions_p ())
3281 /* If we are going to ignore this result, we need only do something
3282 if there is a side-effect somewhere in the expression. If there
3283 is, short-circuit the most common cases here. */
3287 if (! TREE_SIDE_EFFECTS (exp))
3290 /* Ensure we reference a volatile object even if value is ignored. */
3291 if (TREE_THIS_VOLATILE (exp)
3292 && TREE_CODE (exp) != FUNCTION_DECL
3293 && mode != VOIDmode && mode != BLKmode)
3295 temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
3296 if (GET_CODE (temp) == MEM)
3297 temp = copy_to_reg (temp);
3301 if (TREE_CODE_CLASS (code) == '1')
3302 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
3303 VOIDmode, modifier);
3304 else if (TREE_CODE_CLASS (code) == '2'
3305 || TREE_CODE_CLASS (code) == '<')
3307 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
3308 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
3311 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
3312 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
3313 /* If the second operand has no side effects, just evaluate
3315 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
3316 VOIDmode, modifier);
3317 /* If will do cse, generate all results into pseudo registers
3318 since 1) that allows cse to find more things
3319 and 2) otherwise cse could produce an insn the machine
3322 target = 0, original_target = 0;
3325 if (! cse_not_expected && mode != BLKmode && target
3326 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER))
3333 tree function = decl_function_context (exp);
3334 /* Handle using a label in a containing function. */
3335 if (function != current_function_decl && function != 0)
3337 struct function *p = find_function_data (function);
3338 /* Allocate in the memory associated with the function
3339 that the label is in. */
3340 push_obstacks (p->function_obstack,
3341 p->function_maybepermanent_obstack);
3343 p->forced_labels = gen_rtx (EXPR_LIST, VOIDmode,
3344 label_rtx (exp), p->forced_labels);
3347 else if (modifier == EXPAND_INITIALIZER)
3348 forced_labels = gen_rtx (EXPR_LIST, VOIDmode,
3349 label_rtx (exp), forced_labels);
3350 temp = gen_rtx (MEM, FUNCTION_MODE,
3351 gen_rtx (LABEL_REF, Pmode, label_rtx (exp)));
3352 if (function != current_function_decl && function != 0)
3353 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
3358 if (DECL_RTL (exp) == 0)
3360 error_with_decl (exp, "prior parameter's size depends on `%s'");
3361 return CONST0_RTX (mode);
3367 if (DECL_RTL (exp) == 0)
3369 /* Ensure variable marked as used
3370 even if it doesn't go through a parser. */
3371 TREE_USED (exp) = 1;
3372 /* Handle variables inherited from containing functions. */
3373 context = decl_function_context (exp);
3375 /* We treat inline_function_decl as an alias for the current function
3376 because that is the inline function whose vars, types, etc.
3377 are being merged into the current function.
3378 See expand_inline_function. */
3379 if (context != 0 && context != current_function_decl
3380 && context != inline_function_decl
3381 /* If var is static, we don't need a static chain to access it. */
3382 && ! (GET_CODE (DECL_RTL (exp)) == MEM
3383 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
3387 /* Mark as non-local and addressable. */
3388 DECL_NONLOCAL (exp) = 1;
3389 mark_addressable (exp);
3390 if (GET_CODE (DECL_RTL (exp)) != MEM)
3392 addr = XEXP (DECL_RTL (exp), 0);
3393 if (GET_CODE (addr) == MEM)
3394 addr = gen_rtx (MEM, Pmode, fix_lexical_addr (XEXP (addr, 0), exp));
3396 addr = fix_lexical_addr (addr, exp);
3397 return change_address (DECL_RTL (exp), mode, addr);
3400 /* This is the case of an array whose size is to be determined
3401 from its initializer, while the initializer is still being parsed.
3403 if (GET_CODE (DECL_RTL (exp)) == MEM
3404 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
3405 return change_address (DECL_RTL (exp), GET_MODE (DECL_RTL (exp)),
3406 XEXP (DECL_RTL (exp), 0));
3407 if (GET_CODE (DECL_RTL (exp)) == MEM
3408 && modifier != EXPAND_CONST_ADDRESS
3409 && modifier != EXPAND_SUM
3410 && modifier != EXPAND_INITIALIZER)
3412 /* DECL_RTL probably contains a constant address.
3413 On RISC machines where a constant address isn't valid,
3414 make some insns to get that address into a register. */
3415 if (!memory_address_p (DECL_MODE (exp), XEXP (DECL_RTL (exp), 0))
3417 && CONSTANT_ADDRESS_P (XEXP (DECL_RTL (exp), 0))))
3418 return change_address (DECL_RTL (exp), VOIDmode,
3419 copy_rtx (XEXP (DECL_RTL (exp), 0)));
3422 /* If the mode of DECL_RTL does not match that of the decl, it
3423 must be a promoted value. We return a SUBREG of the wanted mode,
3424 but mark it so that we know that it was already extended. */
3426 if (GET_CODE (DECL_RTL (exp)) == REG
3427 && GET_MODE (DECL_RTL (exp)) != mode)
3429 enum machine_mode decl_mode = DECL_MODE (exp);
3431 /* Get the signedness used for this variable. Ensure we get the
3432 same mode we got when the variable was declared. */
3434 PROMOTE_MODE (decl_mode, unsignedp, type);
3436 if (decl_mode != GET_MODE (DECL_RTL (exp)))
3439 temp = gen_rtx (SUBREG, mode, DECL_RTL (exp), 0);
3440 SUBREG_PROMOTED_VAR_P (temp) = 1;
3441 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
3445 return DECL_RTL (exp);
3448 return immed_double_const (TREE_INT_CST_LOW (exp),
3449 TREE_INT_CST_HIGH (exp),
3453 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, 0);
3456 /* If optimized, generate immediate CONST_DOUBLE
3457 which will be turned into memory by reload if necessary.
3459 We used to force a register so that loop.c could see it. But
3460 this does not allow gen_* patterns to perform optimizations with
3461 the constants. It also produces two insns in cases like "x = 1.0;".
3462 On most machines, floating-point constants are not permitted in
3463 many insns, so we'd end up copying it to a register in any case.
3465 Now, we do the copying in expand_binop, if appropriate. */
3466 return immed_real_const (exp);
3470 if (! TREE_CST_RTL (exp))
3471 output_constant_def (exp);
3473 /* TREE_CST_RTL probably contains a constant address.
3474 On RISC machines where a constant address isn't valid,
3475 make some insns to get that address into a register. */
3476 if (GET_CODE (TREE_CST_RTL (exp)) == MEM
3477 && modifier != EXPAND_CONST_ADDRESS
3478 && modifier != EXPAND_INITIALIZER
3479 && modifier != EXPAND_SUM
3480 && !memory_address_p (mode, XEXP (TREE_CST_RTL (exp), 0)))
3481 return change_address (TREE_CST_RTL (exp), VOIDmode,
3482 copy_rtx (XEXP (TREE_CST_RTL (exp), 0)));
3483 return TREE_CST_RTL (exp);
3486 context = decl_function_context (exp);
3487 /* We treat inline_function_decl as an alias for the current function
3488 because that is the inline function whose vars, types, etc.
3489 are being merged into the current function.
3490 See expand_inline_function. */
3491 if (context == current_function_decl || context == inline_function_decl)
3494 /* If this is non-local, handle it. */
3497 temp = SAVE_EXPR_RTL (exp);
3498 if (temp && GET_CODE (temp) == REG)
3500 put_var_into_stack (exp);
3501 temp = SAVE_EXPR_RTL (exp);
3503 if (temp == 0 || GET_CODE (temp) != MEM)
3505 return change_address (temp, mode,
3506 fix_lexical_addr (XEXP (temp, 0), exp));
3508 if (SAVE_EXPR_RTL (exp) == 0)
3510 if (mode == BLKmode)
3512 = assign_stack_temp (mode,
3513 int_size_in_bytes (TREE_TYPE (exp)), 0);
3516 enum machine_mode var_mode = mode;
3518 if (TREE_CODE (type) == INTEGER_TYPE
3519 || TREE_CODE (type) == ENUMERAL_TYPE
3520 || TREE_CODE (type) == BOOLEAN_TYPE
3521 || TREE_CODE (type) == CHAR_TYPE
3522 || TREE_CODE (type) == REAL_TYPE
3523 || TREE_CODE (type) == POINTER_TYPE
3524 || TREE_CODE (type) == OFFSET_TYPE)
3526 PROMOTE_MODE (var_mode, unsignedp, type);
3529 temp = gen_reg_rtx (var_mode);
3532 SAVE_EXPR_RTL (exp) = temp;
3533 if (!optimize && GET_CODE (temp) == REG)
3534 save_expr_regs = gen_rtx (EXPR_LIST, VOIDmode, temp,
3537 /* If the mode of TEMP does not match that of the expression, it
3538 must be a promoted value. We pass store_expr a SUBREG of the
3539 wanted mode but mark it so that we know that it was already
3540 extended. Note that `unsignedp' was modified above in
3543 if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
3545 temp = gen_rtx (SUBREG, mode, SAVE_EXPR_RTL (exp), 0);
3546 SUBREG_PROMOTED_VAR_P (temp) = 1;
3547 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
3550 store_expr (TREE_OPERAND (exp, 0), temp, 0);
3553 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
3554 must be a promoted value. We return a SUBREG of the wanted mode,
3555 but mark it so that we know that it was already extended. Note
3556 that `unsignedp' was modified above in this case. */
3558 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
3559 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
3561 temp = gen_rtx (SUBREG, mode, SAVE_EXPR_RTL (exp), 0);
3562 SUBREG_PROMOTED_VAR_P (temp) = 1;
3563 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
3567 return SAVE_EXPR_RTL (exp);
3570 /* Exit the current loop if the body-expression is true. */
3572 rtx label = gen_label_rtx ();
3573 do_jump (TREE_OPERAND (exp, 0), label, NULL_RTX);
3574 expand_exit_loop (NULL_PTR);
3580 expand_start_loop (1);
3581 expand_expr_stmt (TREE_OPERAND (exp, 0));
3588 tree vars = TREE_OPERAND (exp, 0);
3589 int vars_need_expansion = 0;
3591 /* Need to open a binding contour here because
3592 if there are any cleanups they most be contained here. */
3593 expand_start_bindings (0);
3595 /* Mark the corresponding BLOCK for output in its proper place. */
3596 if (TREE_OPERAND (exp, 2) != 0
3597 && ! TREE_USED (TREE_OPERAND (exp, 2)))
3598 insert_block (TREE_OPERAND (exp, 2));
3600 /* If VARS have not yet been expanded, expand them now. */
3603 if (DECL_RTL (vars) == 0)
3605 vars_need_expansion = 1;
3608 expand_decl_init (vars);
3609 vars = TREE_CHAIN (vars);
3612 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, modifier);
3614 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
3620 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
3622 emit_insns (RTL_EXPR_SEQUENCE (exp));
3623 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
3624 return RTL_EXPR_RTL (exp);
3627 /* If we don't need the result, just ensure we evaluate any
3632 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
3633 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode, 0);
3636 /* All elts simple constants => refer to a constant in memory. But
3637 if this is a non-BLKmode mode, let it store a field at a time
3638 since that should make a CONST_INT or CONST_DOUBLE when we
3639 fold. If we are making an initializer and all operands are
3640 constant, put it in memory as well. */
3641 else if ((TREE_STATIC (exp)
3642 && (mode == BLKmode || TREE_ADDRESSABLE (exp)))
3643 || (modifier == EXPAND_INITIALIZER && TREE_CONSTANT (exp)))
3645 rtx constructor = output_constant_def (exp);
3646 if (modifier != EXPAND_CONST_ADDRESS
3647 && modifier != EXPAND_INITIALIZER
3648 && modifier != EXPAND_SUM
3649 && !memory_address_p (GET_MODE (constructor),
3650 XEXP (constructor, 0)))
3651 constructor = change_address (constructor, VOIDmode,
3652 XEXP (constructor, 0));
3658 if (target == 0 || ! safe_from_p (target, exp))
3660 if (mode != BLKmode && ! TREE_ADDRESSABLE (exp))
3661 target = gen_reg_rtx (mode);
3664 enum tree_code c = TREE_CODE (type);
3666 = assign_stack_temp (mode, int_size_in_bytes (type), 0);
3667 if (c == RECORD_TYPE || c == UNION_TYPE
3668 || c == QUAL_UNION_TYPE || c == ARRAY_TYPE)
3669 MEM_IN_STRUCT_P (target) = 1;
3672 store_constructor (exp, target);
3678 tree exp1 = TREE_OPERAND (exp, 0);
3681 /* A SAVE_EXPR as the address in an INDIRECT_EXPR is generated
3682 for *PTR += ANYTHING where PTR is put inside the SAVE_EXPR.
3683 This code has the same general effect as simply doing
3684 expand_expr on the save expr, except that the expression PTR
3685 is computed for use as a memory address. This means different
3686 code, suitable for indexing, may be generated. */
3687 if (TREE_CODE (exp1) == SAVE_EXPR
3688 && SAVE_EXPR_RTL (exp1) == 0
3689 && TREE_CODE (exp2 = TREE_OPERAND (exp1, 0)) != ERROR_MARK
3690 && TYPE_MODE (TREE_TYPE (exp1)) == Pmode
3691 && TYPE_MODE (TREE_TYPE (exp2)) == Pmode)
3693 temp = expand_expr (TREE_OPERAND (exp1, 0), NULL_RTX,
3694 VOIDmode, EXPAND_SUM);
3695 op0 = memory_address (mode, temp);
3696 op0 = copy_all_regs (op0);
3697 SAVE_EXPR_RTL (exp1) = op0;
3701 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
3702 op0 = memory_address (mode, op0);
3705 temp = gen_rtx (MEM, mode, op0);
3706 /* If address was computed by addition,
3707 mark this as an element of an aggregate. */
3708 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
3709 || (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR
3710 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) == PLUS_EXPR)
3711 || TREE_CODE (TREE_TYPE (exp)) == ARRAY_TYPE
3712 || TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE
3713 || TREE_CODE (TREE_TYPE (exp)) == UNION_TYPE
3714 || TREE_CODE (TREE_TYPE (exp)) == QUAL_UNION_TYPE
3715 || (TREE_CODE (exp1) == ADDR_EXPR
3716 && (exp2 = TREE_OPERAND (exp1, 0))
3717 && (TREE_CODE (TREE_TYPE (exp2)) == ARRAY_TYPE
3718 || TREE_CODE (TREE_TYPE (exp2)) == RECORD_TYPE
3719 || TREE_CODE (TREE_TYPE (exp2)) == UNION_TYPE
3720 || TREE_CODE (TREE_TYPE (exp2)) == QUAL_UNION_TYPE)))
3721 MEM_IN_STRUCT_P (temp) = 1;
3722 MEM_VOLATILE_P (temp) = TREE_THIS_VOLATILE (exp);
3723 #if 0 /* It is incorrect to set RTX_UNCHANGING_P here, because the fact that
3724 a location is accessed through a pointer to const does not mean
3725 that the value there can never change. */
3726 RTX_UNCHANGING_P (temp) = TREE_READONLY (exp);
3732 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
3736 tree array = TREE_OPERAND (exp, 0);
3737 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
3738 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
3739 tree index = TREE_OPERAND (exp, 1);
3740 tree index_type = TREE_TYPE (index);
3743 /* Optimize the special-case of a zero lower bound. */
3744 if (! integer_zerop (low_bound))
3745 index = fold (build (MINUS_EXPR, index_type, index, low_bound));
3747 if (TREE_CODE (index) != INTEGER_CST
3748 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
3750 /* Nonconstant array index or nonconstant element size.
3751 Generate the tree for *(&array+index) and expand that,
3752 except do it in a language-independent way
3753 and don't complain about non-lvalue arrays.
3754 `mark_addressable' should already have been called
3755 for any array for which this case will be reached. */
3757 /* Don't forget the const or volatile flag from the array
3759 tree variant_type = build_type_variant (type,
3760 TREE_READONLY (exp),
3761 TREE_THIS_VOLATILE (exp));
3762 tree array_adr = build1 (ADDR_EXPR,
3763 build_pointer_type (variant_type), array);
3766 /* Convert the integer argument to a type the same size as a
3767 pointer so the multiply won't overflow spuriously. */
3768 if (TYPE_PRECISION (index_type) != POINTER_SIZE)
3769 index = convert (type_for_size (POINTER_SIZE, 0), index);
3771 /* Don't think the address has side effects
3772 just because the array does.
3773 (In some cases the address might have side effects,
3774 and we fail to record that fact here. However, it should not
3775 matter, since expand_expr should not care.) */
3776 TREE_SIDE_EFFECTS (array_adr) = 0;
3778 elt = build1 (INDIRECT_REF, type,
3779 fold (build (PLUS_EXPR,
3780 TYPE_POINTER_TO (variant_type),
3782 fold (build (MULT_EXPR,
3783 TYPE_POINTER_TO (variant_type),
3785 size_in_bytes (type))))));
3787 /* Volatility, etc., of new expression is same as old
3789 TREE_SIDE_EFFECTS (elt) = TREE_SIDE_EFFECTS (exp);
3790 TREE_THIS_VOLATILE (elt) = TREE_THIS_VOLATILE (exp);
3791 TREE_READONLY (elt) = TREE_READONLY (exp);
3793 return expand_expr (elt, target, tmode, modifier);
3796 /* Fold an expression like: "foo"[2].
3797 This is not done in fold so it won't happen inside &. */
3799 if (TREE_CODE (array) == STRING_CST
3800 && TREE_CODE (index) == INTEGER_CST
3801 && !TREE_INT_CST_HIGH (index)
3802 && (i = TREE_INT_CST_LOW (index)) < TREE_STRING_LENGTH (array))
3804 if (TREE_TYPE (TREE_TYPE (array)) == integer_type_node)
3806 exp = build_int_2 (((int *)TREE_STRING_POINTER (array))[i], 0);
3807 TREE_TYPE (exp) = integer_type_node;
3808 return expand_expr (exp, target, tmode, modifier);
3810 if (TREE_TYPE (TREE_TYPE (array)) == char_type_node)
3812 exp = build_int_2 (TREE_STRING_POINTER (array)[i], 0);
3813 TREE_TYPE (exp) = integer_type_node;
3814 return expand_expr (convert (TREE_TYPE (TREE_TYPE (array)),
3816 target, tmode, modifier);
3820 /* If this is a constant index into a constant array,
3821 just get the value from the array. Handle both the cases when
3822 we have an explicit constructor and when our operand is a variable
3823 that was declared const. */
3825 if (TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array))
3827 if (TREE_CODE (index) == INTEGER_CST
3828 && TREE_INT_CST_HIGH (index) == 0)
3830 tree elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0));
3832 i = TREE_INT_CST_LOW (index);
3834 elem = TREE_CHAIN (elem);
3836 return expand_expr (fold (TREE_VALUE (elem)), target,
3841 else if (optimize >= 1
3842 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
3843 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
3844 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
3846 if (TREE_CODE (index) == INTEGER_CST
3847 && TREE_INT_CST_HIGH (index) == 0)
3849 tree init = DECL_INITIAL (array);
3851 i = TREE_INT_CST_LOW (index);
3852 if (TREE_CODE (init) == CONSTRUCTOR)
3854 tree elem = CONSTRUCTOR_ELTS (init);
3857 elem = TREE_CHAIN (elem);
3859 return expand_expr (fold (TREE_VALUE (elem)), target,
3862 else if (TREE_CODE (init) == STRING_CST
3863 && i < TREE_STRING_LENGTH (init))
3865 temp = GEN_INT (TREE_STRING_POINTER (init)[i]);
3866 return convert_to_mode (mode, temp, 0);
3872 /* Treat array-ref with constant index as a component-ref. */
3876 /* If the operand is a CONSTRUCTOR, we can just extract the
3877 appropriate field if it is present. */
3878 if (code != ARRAY_REF
3879 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR)
3883 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
3884 elt = TREE_CHAIN (elt))
3885 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1))
3886 return expand_expr (TREE_VALUE (elt), target, tmode, modifier);
3890 enum machine_mode mode1;
3895 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
3896 &mode1, &unsignedp, &volatilep);
3898 /* If we got back the original object, something is wrong. Perhaps
3899 we are evaluating an expression too early. In any event, don't
3900 infinitely recurse. */
3904 /* In some cases, we will be offsetting OP0's address by a constant.
3905 So get it as a sum, if possible. If we will be using it
3906 directly in an insn, we validate it. */
3907 op0 = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_SUM);
3909 /* If this is a constant, put it into a register if it is a
3910 legitimate constant and memory if it isn't. */
3911 if (CONSTANT_P (op0))
3913 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
3914 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0))
3915 op0 = force_reg (mode, op0);
3917 op0 = validize_mem (force_const_mem (mode, op0));
3922 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
3924 if (GET_CODE (op0) != MEM)
3926 op0 = change_address (op0, VOIDmode,
3927 gen_rtx (PLUS, Pmode, XEXP (op0, 0),
3928 force_reg (Pmode, offset_rtx)));
3931 /* Don't forget about volatility even if this is a bitfield. */
3932 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
3934 op0 = copy_rtx (op0);
3935 MEM_VOLATILE_P (op0) = 1;
3938 if (mode1 == VOIDmode
3939 || (mode1 != BLKmode && ! direct_load[(int) mode1]
3940 && modifier != EXPAND_CONST_ADDRESS
3941 && modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
3942 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG)
3944 /* In cases where an aligned union has an unaligned object
3945 as a field, we might be extracting a BLKmode value from
3946 an integer-mode (e.g., SImode) object. Handle this case
3947 by doing the extract into an object as wide as the field
3948 (which we know to be the width of a basic mode), then
3949 storing into memory, and changing the mode to BLKmode. */
3950 enum machine_mode ext_mode = mode;
3952 if (ext_mode == BLKmode)
3953 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
3955 if (ext_mode == BLKmode)
3958 op0 = extract_bit_field (validize_mem (op0), bitsize, bitpos,
3959 unsignedp, target, ext_mode, ext_mode,
3960 TYPE_ALIGN (TREE_TYPE (tem)) / BITS_PER_UNIT,
3961 int_size_in_bytes (TREE_TYPE (tem)));
3962 if (mode == BLKmode)
3964 rtx new = assign_stack_temp (ext_mode,
3965 bitsize / BITS_PER_UNIT, 0);
3967 emit_move_insn (new, op0);
3968 op0 = copy_rtx (new);
3969 PUT_MODE (op0, BLKmode);
3975 /* Get a reference to just this component. */
3976 if (modifier == EXPAND_CONST_ADDRESS
3977 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
3978 op0 = gen_rtx (MEM, mode1, plus_constant (XEXP (op0, 0),
3979 (bitpos / BITS_PER_UNIT)));
3981 op0 = change_address (op0, mode1,
3982 plus_constant (XEXP (op0, 0),
3983 (bitpos / BITS_PER_UNIT)));
3984 MEM_IN_STRUCT_P (op0) = 1;
3985 MEM_VOLATILE_P (op0) |= volatilep;
3986 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode)
3989 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
3990 convert_move (target, op0, unsignedp);
3996 tree base = build1 (ADDR_EXPR, type, TREE_OPERAND (exp, 0));
3997 tree addr = build (PLUS_EXPR, type, base, TREE_OPERAND (exp, 1));
3998 op0 = expand_expr (addr, NULL_RTX, VOIDmode, EXPAND_SUM);
3999 temp = gen_rtx (MEM, mode, memory_address (mode, op0));
4000 MEM_IN_STRUCT_P (temp) = 1;
4001 MEM_VOLATILE_P (temp) = TREE_THIS_VOLATILE (exp);
4002 #if 0 /* It is incorrect to set RTX_UNCHANGING_P here, because the fact that
4003 a location is accessed through a pointer to const does not mean
4004 that the value there can never change. */
4005 RTX_UNCHANGING_P (temp) = TREE_READONLY (exp);
4010 /* Intended for a reference to a buffer of a file-object in Pascal.
4011 But it's not certain that a special tree code will really be
4012 necessary for these. INDIRECT_REF might work for them. */
4016 /* IN_EXPR: Inlined pascal set IN expression.
4019 rlo = set_low - (set_low%bits_per_word);
4020 the_word = set [ (index - rlo)/bits_per_word ];
4021 bit_index = index % bits_per_word;
4022 bitmask = 1 << bit_index;
4023 return !!(the_word & bitmask); */
4025 preexpand_calls (exp);
4027 tree set = TREE_OPERAND (exp, 0);
4028 tree index = TREE_OPERAND (exp, 1);
4029 tree set_type = TREE_TYPE (set);
4031 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
4032 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
4038 rtx diff, quo, rem, addr, bit, result;
4039 rtx setval, setaddr;
4040 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
4043 target = gen_reg_rtx (mode);
4045 /* If domain is empty, answer is no. */
4046 if (tree_int_cst_lt (set_high_bound, set_low_bound))
4049 index_val = expand_expr (index, 0, VOIDmode, 0);
4050 lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
4051 hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
4052 setval = expand_expr (set, 0, VOIDmode, 0);
4053 setaddr = XEXP (setval, 0);
4055 /* Compare index against bounds, if they are constant. */
4056 if (GET_CODE (index_val) == CONST_INT
4057 && GET_CODE (lo_r) == CONST_INT
4058 && INTVAL (index_val) < INTVAL (lo_r))
4061 if (GET_CODE (index_val) == CONST_INT
4062 && GET_CODE (hi_r) == CONST_INT
4063 && INTVAL (hi_r) < INTVAL (index_val))
4066 /* If we get here, we have to generate the code for both cases
4067 (in range and out of range). */
4069 op0 = gen_label_rtx ();
4070 op1 = gen_label_rtx ();
4072 if (! (GET_CODE (index_val) == CONST_INT
4073 && GET_CODE (lo_r) == CONST_INT))
4075 emit_cmp_insn (index_val, lo_r, LT, NULL_RTX,
4076 GET_MODE (index_val), 0, 0);
4077 emit_jump_insn (gen_blt (op1));
4080 if (! (GET_CODE (index_val) == CONST_INT
4081 && GET_CODE (hi_r) == CONST_INT))
4083 emit_cmp_insn (index_val, hi_r, GT, NULL_RTX,
4084 GET_MODE (index_val), 0, 0);
4085 emit_jump_insn (gen_bgt (op1));
4088 /* Calculate the element number of bit zero in the first word
4090 if (GET_CODE (lo_r) == CONST_INT)
4091 rlow = GEN_INT (INTVAL (lo_r)
4092 & ~ ((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
4094 rlow = expand_binop (index_mode, and_optab, lo_r,
4095 GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
4096 NULL_RTX, 0, OPTAB_LIB_WIDEN);
4098 diff = expand_binop (index_mode, sub_optab,
4099 index_val, rlow, NULL_RTX, 0, OPTAB_LIB_WIDEN);
4101 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
4102 GEN_INT (BITS_PER_UNIT), NULL_RTX, 0);
4103 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
4104 GEN_INT (BITS_PER_UNIT), NULL_RTX, 0);
4105 addr = memory_address (byte_mode,
4106 expand_binop (index_mode, add_optab,
4107 diff, setaddr, NULL_RTX, 0,
4109 /* Extract the bit we want to examine */
4110 bit = expand_shift (RSHIFT_EXPR, byte_mode,
4111 gen_rtx (MEM, byte_mode, addr),
4112 make_tree (TREE_TYPE (index), rem),
4114 result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
4115 GET_MODE (target) == byte_mode ? target : 0,
4116 1, OPTAB_LIB_WIDEN);
4118 if (result != target)
4119 convert_move (target, result, 1);
4121 /* Output the code to handle the out-of-range case. */
4124 emit_move_insn (target, const0_rtx);
4129 case WITH_CLEANUP_EXPR:
4130 if (RTL_EXPR_RTL (exp) == 0)
4133 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
4135 = tree_cons (NULL_TREE, TREE_OPERAND (exp, 2), cleanups_this_call);
4136 /* That's it for this cleanup. */
4137 TREE_OPERAND (exp, 2) = 0;
4139 return RTL_EXPR_RTL (exp);
4142 /* Check for a built-in function. */
4143 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
4144 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) == FUNCTION_DECL
4145 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
4146 return expand_builtin (exp, target, subtarget, tmode, ignore);
4147 /* If this call was expanded already by preexpand_calls,
4148 just return the result we got. */
4149 if (CALL_EXPR_RTL (exp) != 0)
4150 return CALL_EXPR_RTL (exp);
4151 return expand_call (exp, target, ignore);
4153 case NON_LVALUE_EXPR:
4156 case REFERENCE_EXPR:
4157 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
4158 return expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, modifier);
4159 if (TREE_CODE (type) == UNION_TYPE)
4161 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
4164 if (mode == BLKmode)
4166 if (TYPE_SIZE (type) == 0
4167 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
4169 target = assign_stack_temp (BLKmode,
4170 (TREE_INT_CST_LOW (TYPE_SIZE (type))
4171 + BITS_PER_UNIT - 1)
4172 / BITS_PER_UNIT, 0);
4175 target = gen_reg_rtx (mode);
4177 if (GET_CODE (target) == MEM)
4178 /* Store data into beginning of memory target. */
4179 store_expr (TREE_OPERAND (exp, 0),
4180 change_address (target, TYPE_MODE (valtype), 0), 0);
4182 else if (GET_CODE (target) == REG)
4183 /* Store this field into a union of the proper type. */
4184 store_field (target, GET_MODE_BITSIZE (TYPE_MODE (valtype)), 0,
4185 TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
4187 int_size_in_bytes (TREE_TYPE (TREE_OPERAND (exp, 0))));
4191 /* Return the entire union. */
4194 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, 0);
4195 if (GET_MODE (op0) == mode)
4197 /* If arg is a constant integer being extended from a narrower mode,
4198 we must really truncate to get the extended bits right. Otherwise
4199 (unsigned long) (unsigned char) ("\377"[0])
4200 would come out as ffffffff. */
4201 if (GET_MODE (op0) == VOIDmode
4202 && (GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
4203 < GET_MODE_BITSIZE (mode)))
4205 /* MODE must be narrower than HOST_BITS_PER_INT. */
4206 int width = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
4208 if (width < HOST_BITS_PER_WIDE_INT)
4210 HOST_WIDE_INT val = (GET_CODE (op0) == CONST_INT ? INTVAL (op0)
4211 : CONST_DOUBLE_LOW (op0));
4212 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)))
4213 || !(val & ((HOST_WIDE_INT) 1 << (width - 1))))
4214 val &= ((HOST_WIDE_INT) 1 << width) - 1;
4216 val |= ~(((HOST_WIDE_INT) 1 << width) - 1);
4218 op0 = GEN_INT (val);
4222 op0 = (simplify_unary_operation
4223 ((TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)))
4224 ? ZERO_EXTEND : SIGN_EXTEND),
4226 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))));
4231 if (GET_MODE (op0) == VOIDmode)
4233 if (modifier == EXPAND_INITIALIZER)
4234 return gen_rtx (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
4235 if (flag_force_mem && GET_CODE (op0) == MEM)
4236 op0 = copy_to_reg (op0);
4239 return convert_to_mode (mode, op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
4241 convert_move (target, op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
4245 /* We come here from MINUS_EXPR when the second operand is a constant. */
4247 this_optab = add_optab;
4249 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
4250 something else, make sure we add the register to the constant and
4251 then to the other thing. This case can occur during strength
4252 reduction and doing it this way will produce better code if the
4253 frame pointer or argument pointer is eliminated.
4255 fold-const.c will ensure that the constant is always in the inner
4256 PLUS_EXPR, so the only case we need to do anything about is if
4257 sp, ap, or fp is our second argument, in which case we must swap
4258 the innermost first argument and our second argument. */
4260 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
4261 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
4262 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
4263 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
4264 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
4265 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
4267 tree t = TREE_OPERAND (exp, 1);
4269 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
4270 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
4273 /* If the result is to be Pmode and we are adding an integer to
4274 something, we might be forming a constant. So try to use
4275 plus_constant. If it produces a sum and we can't accept it,
4276 use force_operand. This allows P = &ARR[const] to generate
4277 efficient code on machines where a SYMBOL_REF is not a valid
4280 If this is an EXPAND_SUM call, always return the sum. */
4281 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
4282 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
4283 && (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
4286 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
4288 op1 = plus_constant (op1, TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)));
4289 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
4290 op1 = force_operand (op1, target);
4294 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
4295 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
4296 && (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
4299 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
4301 op0 = plus_constant (op0, TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)));
4302 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
4303 op0 = force_operand (op0, target);
4307 /* No sense saving up arithmetic to be done
4308 if it's all in the wrong mode to form part of an address.
4309 And force_operand won't know whether to sign-extend or
4311 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
4312 || mode != Pmode) goto binop;
4314 preexpand_calls (exp);
4315 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
4318 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, modifier);
4319 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, modifier);
4321 /* Make sure any term that's a sum with a constant comes last. */
4322 if (GET_CODE (op0) == PLUS
4323 && CONSTANT_P (XEXP (op0, 1)))
4329 /* If adding to a sum including a constant,
4330 associate it to put the constant outside. */
4331 if (GET_CODE (op1) == PLUS
4332 && CONSTANT_P (XEXP (op1, 1)))
4334 rtx constant_term = const0_rtx;
4336 temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0);
4339 /* Ensure that MULT comes first if there is one. */
4340 else if (GET_CODE (op0) == MULT)
4341 op0 = gen_rtx (PLUS, mode, op0, XEXP (op1, 0));
4343 op0 = gen_rtx (PLUS, mode, XEXP (op1, 0), op0);
4345 /* Let's also eliminate constants from op0 if possible. */
4346 op0 = eliminate_constant_term (op0, &constant_term);
4348 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
4349 their sum should be a constant. Form it into OP1, since the
4350 result we want will then be OP0 + OP1. */
4352 temp = simplify_binary_operation (PLUS, mode, constant_term,
4357 op1 = gen_rtx (PLUS, mode, constant_term, XEXP (op1, 1));
4360 /* Put a constant term last and put a multiplication first. */
4361 if (CONSTANT_P (op0) || GET_CODE (op1) == MULT)
4362 temp = op1, op1 = op0, op0 = temp;
4364 temp = simplify_binary_operation (PLUS, mode, op0, op1);
4365 return temp ? temp : gen_rtx (PLUS, mode, op0, op1);
4368 /* Handle difference of two symbolic constants,
4369 for the sake of an initializer. */
4370 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
4371 && really_constant_p (TREE_OPERAND (exp, 0))
4372 && really_constant_p (TREE_OPERAND (exp, 1)))
4374 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX,
4375 VOIDmode, modifier);
4376 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
4377 VOIDmode, modifier);
4378 return gen_rtx (MINUS, mode, op0, op1);
4380 /* Convert A - const to A + (-const). */
4381 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
4383 exp = build (PLUS_EXPR, type, TREE_OPERAND (exp, 0),
4384 fold (build1 (NEGATE_EXPR, type,
4385 TREE_OPERAND (exp, 1))));
4388 this_optab = sub_optab;
4392 preexpand_calls (exp);
4393 /* If first operand is constant, swap them.
4394 Thus the following special case checks need only
4395 check the second operand. */
4396 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
4398 register tree t1 = TREE_OPERAND (exp, 0);
4399 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
4400 TREE_OPERAND (exp, 1) = t1;
4403 /* Attempt to return something suitable for generating an
4404 indexed address, for machines that support that. */
4406 if (modifier == EXPAND_SUM && mode == Pmode
4407 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
4408 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
4410 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, EXPAND_SUM);
4412 /* Apply distributive law if OP0 is x+c. */
4413 if (GET_CODE (op0) == PLUS
4414 && GET_CODE (XEXP (op0, 1)) == CONST_INT)
4415 return gen_rtx (PLUS, mode,
4416 gen_rtx (MULT, mode, XEXP (op0, 0),
4417 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))),
4418 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))
4419 * INTVAL (XEXP (op0, 1))));
4421 if (GET_CODE (op0) != REG)
4422 op0 = force_operand (op0, NULL_RTX);
4423 if (GET_CODE (op0) != REG)
4424 op0 = copy_to_mode_reg (mode, op0);
4426 return gen_rtx (MULT, mode, op0,
4427 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))));
4430 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
4433 /* Check for multiplying things that have been extended
4434 from a narrower type. If this machine supports multiplying
4435 in that narrower type with a result in the desired type,
4436 do it that way, and avoid the explicit type-conversion. */
4437 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
4438 && TREE_CODE (type) == INTEGER_TYPE
4439 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
4440 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
4441 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
4442 && int_fits_type_p (TREE_OPERAND (exp, 1),
4443 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
4444 /* Don't use a widening multiply if a shift will do. */
4445 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
4446 > HOST_BITS_PER_WIDE_INT)
4447 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
4449 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
4450 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
4452 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
4453 /* If both operands are extended, they must either both
4454 be zero-extended or both be sign-extended. */
4455 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
4457 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
4459 enum machine_mode innermode
4460 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
4461 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
4462 ? umul_widen_optab : smul_widen_optab);
4463 if (mode == GET_MODE_WIDER_MODE (innermode)
4464 && this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
4466 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
4467 NULL_RTX, VOIDmode, 0);
4468 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
4469 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
4472 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
4473 NULL_RTX, VOIDmode, 0);
4477 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
4478 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
4479 return expand_mult (mode, op0, op1, target, unsignedp);
4481 case TRUNC_DIV_EXPR:
4482 case FLOOR_DIV_EXPR:
4484 case ROUND_DIV_EXPR:
4485 case EXACT_DIV_EXPR:
4486 preexpand_calls (exp);
4487 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
4489 /* Possible optimization: compute the dividend with EXPAND_SUM
4490 then if the divisor is constant can optimize the case
4491 where some terms of the dividend have coeffs divisible by it. */
4492 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
4493 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
4494 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
4497 this_optab = flodiv_optab;
4500 case TRUNC_MOD_EXPR:
4501 case FLOOR_MOD_EXPR:
4503 case ROUND_MOD_EXPR:
4504 preexpand_calls (exp);
4505 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
4507 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
4508 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
4509 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
4511 case FIX_ROUND_EXPR:
4512 case FIX_FLOOR_EXPR:
4514 abort (); /* Not used for C. */
4516 case FIX_TRUNC_EXPR:
4517 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
4519 target = gen_reg_rtx (mode);
4520 expand_fix (target, op0, unsignedp);
4524 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
4526 target = gen_reg_rtx (mode);
4527 /* expand_float can't figure out what to do if FROM has VOIDmode.
4528 So give it the correct mode. With -O, cse will optimize this. */
4529 if (GET_MODE (op0) == VOIDmode)
4530 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
4532 expand_float (target, op0,
4533 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
4537 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
4538 temp = expand_unop (mode, neg_optab, op0, target, 0);
4544 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
4546 /* Handle complex values specially. */
4548 enum machine_mode opmode
4549 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
4551 if (GET_MODE_CLASS (opmode) == MODE_COMPLEX_INT
4552 || GET_MODE_CLASS (opmode) == MODE_COMPLEX_FLOAT)
4553 return expand_complex_abs (opmode, op0, target, unsignedp);
4556 /* Unsigned abs is simply the operand. Testing here means we don't
4557 risk generating incorrect code below. */
4558 if (TREE_UNSIGNED (type))
4561 /* First try to do it with a special abs instruction. */
4562 temp = expand_unop (mode, abs_optab, op0, target, 0);
4566 /* If this machine has expensive jumps, we can do integer absolute
4567 value of X as (((signed) x >> (W-1)) ^ x) - ((signed) x >> (W-1)),
4568 where W is the width of MODE. */
4570 if (GET_MODE_CLASS (mode) == MODE_INT && BRANCH_COST >= 2)
4572 rtx extended = expand_shift (RSHIFT_EXPR, mode, op0,
4573 size_int (GET_MODE_BITSIZE (mode) - 1),
4576 temp = expand_binop (mode, xor_optab, extended, op0, target, 0,
4579 temp = expand_binop (mode, sub_optab, temp, extended, target, 0,
4586 /* If that does not win, use conditional jump and negate. */
4587 target = original_target;
4588 temp = gen_label_rtx ();
4589 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 0))
4590 || (GET_CODE (target) == REG
4591 && REGNO (target) < FIRST_PSEUDO_REGISTER))
4592 target = gen_reg_rtx (mode);
4593 emit_move_insn (target, op0);
4594 emit_cmp_insn (target,
4595 expand_expr (convert (type, integer_zero_node),
4596 NULL_RTX, VOIDmode, 0),
4597 GE, NULL_RTX, mode, 0, 0);
4599 emit_jump_insn (gen_bge (temp));
4600 op0 = expand_unop (mode, neg_optab, target, target, 0);
4602 emit_move_insn (target, op0);
4609 target = original_target;
4610 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 1))
4611 || (GET_CODE (target) == REG
4612 && REGNO (target) < FIRST_PSEUDO_REGISTER))
4613 target = gen_reg_rtx (mode);
4614 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
4615 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
4617 /* First try to do it with a special MIN or MAX instruction.
4618 If that does not win, use a conditional jump to select the proper
4620 this_optab = (TREE_UNSIGNED (type)
4621 ? (code == MIN_EXPR ? umin_optab : umax_optab)
4622 : (code == MIN_EXPR ? smin_optab : smax_optab));
4624 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
4630 emit_move_insn (target, op0);
4631 op0 = gen_label_rtx ();
4632 /* If this mode is an integer too wide to compare properly,
4633 compare word by word. Rely on cse to optimize constant cases. */
4634 if (GET_MODE_CLASS (mode) == MODE_INT
4635 && !can_compare_p (mode))
4637 if (code == MAX_EXPR)
4638 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type), target, op1, NULL, op0);
4640 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type), op1, target, NULL, op0);
4641 emit_move_insn (target, op1);
4645 if (code == MAX_EXPR)
4646 temp = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)))
4647 ? compare_from_rtx (target, op1, GEU, 1, mode, NULL_RTX, 0)
4648 : compare_from_rtx (target, op1, GE, 0, mode, NULL_RTX, 0));
4650 temp = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)))
4651 ? compare_from_rtx (target, op1, LEU, 1, mode, NULL_RTX, 0)
4652 : compare_from_rtx (target, op1, LE, 0, mode, NULL_RTX, 0));
4653 if (temp == const0_rtx)
4654 emit_move_insn (target, op1);
4655 else if (temp != const_true_rtx)
4657 if (bcc_gen_fctn[(int) GET_CODE (temp)] != 0)
4658 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (temp)]) (op0));
4661 emit_move_insn (target, op1);
4667 /* ??? Can optimize when the operand of this is a bitwise operation,
4668 by using a different bitwise operation. */
4670 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
4671 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
4677 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
4678 temp = expand_unop (mode, ffs_optab, op0, target, 1);
4683 /* ??? Can optimize bitwise operations with one arg constant.
4684 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
4685 and (a bitwise1 b) bitwise2 b (etc)
4686 but that is probably not worth while. */
4688 /* BIT_AND_EXPR is for bitwise anding.
4689 TRUTH_AND_EXPR is for anding two boolean values
4690 when we want in all cases to compute both of them.
4691 In general it is fastest to do TRUTH_AND_EXPR by
4692 computing both operands as actual zero-or-1 values
4693 and then bitwise anding. In cases where there cannot
4694 be any side effects, better code would be made by
4695 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR;
4696 but the question is how to recognize those cases. */
4698 case TRUTH_AND_EXPR:
4700 this_optab = and_optab;
4703 /* See comment above about TRUTH_AND_EXPR; it applies here too. */
4706 this_optab = ior_optab;
4709 case TRUTH_XOR_EXPR:
4711 this_optab = xor_optab;
4718 preexpand_calls (exp);
4719 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
4721 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
4722 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
4725 /* Could determine the answer when only additive constants differ.
4726 Also, the addition of one can be handled by changing the condition. */
4733 preexpand_calls (exp);
4734 temp = do_store_flag (exp, target, tmode != VOIDmode ? tmode : mode, 0);
4737 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
4738 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
4740 && GET_CODE (original_target) == REG
4741 && (GET_MODE (original_target)
4742 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
4744 temp = expand_expr (TREE_OPERAND (exp, 0), original_target, VOIDmode, 0);
4745 if (temp != original_target)
4746 temp = copy_to_reg (temp);
4747 op1 = gen_label_rtx ();
4748 emit_cmp_insn (temp, const0_rtx, EQ, NULL_RTX,
4749 GET_MODE (temp), unsignedp, 0);
4750 emit_jump_insn (gen_beq (op1));
4751 emit_move_insn (temp, const1_rtx);
4755 /* If no set-flag instruction, must generate a conditional
4756 store into a temporary variable. Drop through
4757 and handle this like && and ||. */
4759 case TRUTH_ANDIF_EXPR:
4760 case TRUTH_ORIF_EXPR:
4761 if (target == 0 || ! safe_from_p (target, exp)
4762 /* Make sure we don't have a hard reg (such as function's return
4763 value) live across basic blocks, if not optimizing. */
4764 || (!optimize && GET_CODE (target) == REG
4765 && REGNO (target) < FIRST_PSEUDO_REGISTER))
4766 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
4767 emit_clr_insn (target);
4768 op1 = gen_label_rtx ();
4769 jumpifnot (exp, op1);
4770 emit_0_to_1_insn (target);
4774 case TRUTH_NOT_EXPR:
4775 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
4776 /* The parser is careful to generate TRUTH_NOT_EXPR
4777 only with operands that are always zero or one. */
4778 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
4779 target, 1, OPTAB_LIB_WIDEN);
4785 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
4787 return expand_expr (TREE_OPERAND (exp, 1),
4788 (ignore ? const0_rtx : target),
4793 /* Note that COND_EXPRs whose type is a structure or union
4794 are required to be constructed to contain assignments of
4795 a temporary variable, so that we can evaluate them here
4796 for side effect only. If type is void, we must do likewise. */
4798 /* If an arm of the branch requires a cleanup,
4799 only that cleanup is performed. */
4802 tree binary_op = 0, unary_op = 0;
4803 tree old_cleanups = cleanups_this_call;
4804 cleanups_this_call = 0;
4806 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
4807 convert it to our mode, if necessary. */
4808 if (integer_onep (TREE_OPERAND (exp, 1))
4809 && integer_zerop (TREE_OPERAND (exp, 2))
4810 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
4814 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
4819 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, modifier);
4820 if (GET_MODE (op0) == mode)
4823 target = gen_reg_rtx (mode);
4824 convert_move (target, op0, unsignedp);
4828 /* If we are not to produce a result, we have no target. Otherwise,
4829 if a target was specified use it; it will not be used as an
4830 intermediate target unless it is safe. If no target, use a
4835 else if (original_target
4836 && safe_from_p (original_target, TREE_OPERAND (exp, 0)))
4837 temp = original_target;
4838 else if (mode == BLKmode)
4840 if (TYPE_SIZE (type) == 0
4841 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
4843 temp = assign_stack_temp (BLKmode,
4844 (TREE_INT_CST_LOW (TYPE_SIZE (type))
4845 + BITS_PER_UNIT - 1)
4846 / BITS_PER_UNIT, 0);
4849 temp = gen_reg_rtx (mode);
4851 /* Check for X ? A + B : A. If we have this, we can copy
4852 A to the output and conditionally add B. Similarly for unary
4853 operations. Don't do this if X has side-effects because
4854 those side effects might affect A or B and the "?" operation is
4855 a sequence point in ANSI. (We test for side effects later.) */
4857 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
4858 && operand_equal_p (TREE_OPERAND (exp, 2),
4859 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
4860 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
4861 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
4862 && operand_equal_p (TREE_OPERAND (exp, 1),
4863 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
4864 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
4865 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
4866 && operand_equal_p (TREE_OPERAND (exp, 2),
4867 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
4868 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
4869 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
4870 && operand_equal_p (TREE_OPERAND (exp, 1),
4871 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
4872 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
4874 /* If we had X ? A + 1 : A and we can do the test of X as a store-flag
4875 operation, do this as A + (X != 0). Similarly for other simple
4876 binary operators. */
4877 if (temp && singleton && binary_op
4878 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
4879 && (TREE_CODE (binary_op) == PLUS_EXPR
4880 || TREE_CODE (binary_op) == MINUS_EXPR
4881 || TREE_CODE (binary_op) == BIT_IOR_EXPR
4882 || TREE_CODE (binary_op) == BIT_XOR_EXPR
4883 || TREE_CODE (binary_op) == BIT_AND_EXPR)
4884 && integer_onep (TREE_OPERAND (binary_op, 1))
4885 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
4888 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR ? add_optab
4889 : TREE_CODE (binary_op) == MINUS_EXPR ? sub_optab
4890 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
4891 : TREE_CODE (binary_op) == BIT_XOR_EXPR ? xor_optab
4894 /* If we had X ? A : A + 1, do this as A + (X == 0).
4896 We have to invert the truth value here and then put it
4897 back later if do_store_flag fails. We cannot simply copy
4898 TREE_OPERAND (exp, 0) to another variable and modify that
4899 because invert_truthvalue can modify the tree pointed to
4901 if (singleton == TREE_OPERAND (exp, 1))
4902 TREE_OPERAND (exp, 0)
4903 = invert_truthvalue (TREE_OPERAND (exp, 0));
4905 result = do_store_flag (TREE_OPERAND (exp, 0),
4906 (safe_from_p (temp, singleton)
4908 mode, BRANCH_COST <= 1);
4912 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
4913 return expand_binop (mode, boptab, op1, result, temp,
4914 unsignedp, OPTAB_LIB_WIDEN);
4916 else if (singleton == TREE_OPERAND (exp, 1))
4917 TREE_OPERAND (exp, 0)
4918 = invert_truthvalue (TREE_OPERAND (exp, 0));
4922 op0 = gen_label_rtx ();
4924 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
4928 /* If the target conflicts with the other operand of the
4929 binary op, we can't use it. Also, we can't use the target
4930 if it is a hard register, because evaluating the condition
4931 might clobber it. */
4933 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1)))
4934 || (GET_CODE (temp) == REG
4935 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
4936 temp = gen_reg_rtx (mode);
4937 store_expr (singleton, temp, 0);
4940 expand_expr (singleton,
4941 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
4942 if (cleanups_this_call)
4944 sorry ("aggregate value in COND_EXPR");
4945 cleanups_this_call = 0;
4947 if (singleton == TREE_OPERAND (exp, 1))
4948 jumpif (TREE_OPERAND (exp, 0), op0);
4950 jumpifnot (TREE_OPERAND (exp, 0), op0);
4952 if (binary_op && temp == 0)
4953 /* Just touch the other operand. */
4954 expand_expr (TREE_OPERAND (binary_op, 1),
4955 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
4957 store_expr (build (TREE_CODE (binary_op), type,
4958 make_tree (type, temp),
4959 TREE_OPERAND (binary_op, 1)),
4962 store_expr (build1 (TREE_CODE (unary_op), type,
4963 make_tree (type, temp)),
4968 /* This is now done in jump.c and is better done there because it
4969 produces shorter register lifetimes. */
4971 /* Check for both possibilities either constants or variables
4972 in registers (but not the same as the target!). If so, can
4973 save branches by assigning one, branching, and assigning the
4975 else if (temp && GET_MODE (temp) != BLKmode
4976 && (TREE_CONSTANT (TREE_OPERAND (exp, 1))
4977 || ((TREE_CODE (TREE_OPERAND (exp, 1)) == PARM_DECL
4978 || TREE_CODE (TREE_OPERAND (exp, 1)) == VAR_DECL)
4979 && DECL_RTL (TREE_OPERAND (exp, 1))
4980 && GET_CODE (DECL_RTL (TREE_OPERAND (exp, 1))) == REG
4981 && DECL_RTL (TREE_OPERAND (exp, 1)) != temp))
4982 && (TREE_CONSTANT (TREE_OPERAND (exp, 2))
4983 || ((TREE_CODE (TREE_OPERAND (exp, 2)) == PARM_DECL
4984 || TREE_CODE (TREE_OPERAND (exp, 2)) == VAR_DECL)
4985 && DECL_RTL (TREE_OPERAND (exp, 2))
4986 && GET_CODE (DECL_RTL (TREE_OPERAND (exp, 2))) == REG
4987 && DECL_RTL (TREE_OPERAND (exp, 2)) != temp)))
4989 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
4990 temp = gen_reg_rtx (mode);
4991 store_expr (TREE_OPERAND (exp, 2), temp, 0);
4992 jumpifnot (TREE_OPERAND (exp, 0), op0);
4993 store_expr (TREE_OPERAND (exp, 1), temp, 0);
4997 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
4998 comparison operator. If we have one of these cases, set the
4999 output to A, branch on A (cse will merge these two references),
5000 then set the output to FOO. */
5002 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
5003 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
5004 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
5005 TREE_OPERAND (exp, 1), 0)
5006 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
5007 && safe_from_p (temp, TREE_OPERAND (exp, 2)))
5009 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
5010 temp = gen_reg_rtx (mode);
5011 store_expr (TREE_OPERAND (exp, 1), temp, 0);
5012 jumpif (TREE_OPERAND (exp, 0), op0);
5013 store_expr (TREE_OPERAND (exp, 2), temp, 0);
5017 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
5018 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
5019 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
5020 TREE_OPERAND (exp, 2), 0)
5021 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
5022 && safe_from_p (temp, TREE_OPERAND (exp, 1)))
5024 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
5025 temp = gen_reg_rtx (mode);
5026 store_expr (TREE_OPERAND (exp, 2), temp, 0);
5027 jumpifnot (TREE_OPERAND (exp, 0), op0);
5028 store_expr (TREE_OPERAND (exp, 1), temp, 0);
5033 op1 = gen_label_rtx ();
5034 jumpifnot (TREE_OPERAND (exp, 0), op0);
5036 store_expr (TREE_OPERAND (exp, 1), temp, 0);
5038 expand_expr (TREE_OPERAND (exp, 1),
5039 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
5040 if (cleanups_this_call)
5042 sorry ("aggregate value in COND_EXPR");
5043 cleanups_this_call = 0;
5047 emit_jump_insn (gen_jump (op1));
5051 store_expr (TREE_OPERAND (exp, 2), temp, 0);
5053 expand_expr (TREE_OPERAND (exp, 2),
5054 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
5057 if (cleanups_this_call)
5059 sorry ("aggregate value in COND_EXPR");
5060 cleanups_this_call = 0;
5066 cleanups_this_call = old_cleanups;
5072 /* Something needs to be initialized, but we didn't know
5073 where that thing was when building the tree. For example,
5074 it could be the return value of a function, or a parameter
5075 to a function which lays down in the stack, or a temporary
5076 variable which must be passed by reference.
5078 We guarantee that the expression will either be constructed
5079 or copied into our original target. */
5081 tree slot = TREE_OPERAND (exp, 0);
5084 if (TREE_CODE (slot) != VAR_DECL)
5089 if (DECL_RTL (slot) != 0)
5091 target = DECL_RTL (slot);
5092 /* If we have already expanded the slot, so don't do
5094 if (TREE_OPERAND (exp, 1) == NULL_TREE)
5099 target = assign_stack_temp (mode, int_size_in_bytes (type), 0);
5100 /* All temp slots at this level must not conflict. */
5101 preserve_temp_slots (target);
5102 DECL_RTL (slot) = target;
5106 /* I bet this needs to be done, and I bet that it needs to
5107 be above, inside the else clause. The reason is
5108 simple, how else is it going to get cleaned up? (mrs)
5110 The reason is probably did not work before, and was
5111 commented out is because this was re-expanding already
5112 expanded target_exprs (target == 0 and DECL_RTL (slot)
5113 != 0) also cleaning them up many times as well. :-( */
5115 /* Since SLOT is not known to the called function
5116 to belong to its stack frame, we must build an explicit
5117 cleanup. This case occurs when we must build up a reference
5118 to pass the reference as an argument. In this case,
5119 it is very likely that such a reference need not be
5122 if (TREE_OPERAND (exp, 2) == 0)
5123 TREE_OPERAND (exp, 2) = maybe_build_cleanup (slot);
5124 if (TREE_OPERAND (exp, 2))
5125 cleanups_this_call = tree_cons (NULL_TREE, TREE_OPERAND (exp, 2),
5126 cleanups_this_call);
5131 /* This case does occur, when expanding a parameter which
5132 needs to be constructed on the stack. The target
5133 is the actual stack address that we want to initialize.
5134 The function we call will perform the cleanup in this case. */
5136 /* If we have already assigned it space, use that space,
5137 not target that we were passed in, as our target
5138 parameter is only a hint. */
5139 if (DECL_RTL (slot) != 0)
5141 target = DECL_RTL (slot);
5142 /* If we have already expanded the slot, so don't do
5144 if (TREE_OPERAND (exp, 1) == NULL_TREE)
5148 DECL_RTL (slot) = target;
5151 exp1 = TREE_OPERAND (exp, 1);
5152 /* Mark it as expanded. */
5153 TREE_OPERAND (exp, 1) = NULL_TREE;
5155 return expand_expr (exp1, target, tmode, modifier);
5160 tree lhs = TREE_OPERAND (exp, 0);
5161 tree rhs = TREE_OPERAND (exp, 1);
5162 tree noncopied_parts = 0;
5163 tree lhs_type = TREE_TYPE (lhs);
5165 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
5166 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0 && !fixed_type_p (rhs))
5167 noncopied_parts = init_noncopied_parts (stabilize_reference (lhs),
5168 TYPE_NONCOPIED_PARTS (lhs_type));
5169 while (noncopied_parts != 0)
5171 expand_assignment (TREE_VALUE (noncopied_parts),
5172 TREE_PURPOSE (noncopied_parts), 0, 0);
5173 noncopied_parts = TREE_CHAIN (noncopied_parts);
5180 /* If lhs is complex, expand calls in rhs before computing it.
5181 That's so we don't compute a pointer and save it over a call.
5182 If lhs is simple, compute it first so we can give it as a
5183 target if the rhs is just a call. This avoids an extra temp and copy
5184 and that prevents a partial-subsumption which makes bad code.
5185 Actually we could treat component_ref's of vars like vars. */
5187 tree lhs = TREE_OPERAND (exp, 0);
5188 tree rhs = TREE_OPERAND (exp, 1);
5189 tree noncopied_parts = 0;
5190 tree lhs_type = TREE_TYPE (lhs);
5194 if (TREE_CODE (lhs) != VAR_DECL
5195 && TREE_CODE (lhs) != RESULT_DECL
5196 && TREE_CODE (lhs) != PARM_DECL)
5197 preexpand_calls (exp);
5199 /* Check for |= or &= of a bitfield of size one into another bitfield
5200 of size 1. In this case, (unless we need the result of the
5201 assignment) we can do this more efficiently with a
5202 test followed by an assignment, if necessary.
5204 ??? At this point, we can't get a BIT_FIELD_REF here. But if
5205 things change so we do, this code should be enhanced to
5208 && TREE_CODE (lhs) == COMPONENT_REF
5209 && (TREE_CODE (rhs) == BIT_IOR_EXPR
5210 || TREE_CODE (rhs) == BIT_AND_EXPR)
5211 && TREE_OPERAND (rhs, 0) == lhs
5212 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
5213 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (lhs, 1))) == 1
5214 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))) == 1)
5216 rtx label = gen_label_rtx ();
5218 do_jump (TREE_OPERAND (rhs, 1),
5219 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
5220 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
5221 expand_assignment (lhs, convert (TREE_TYPE (rhs),
5222 (TREE_CODE (rhs) == BIT_IOR_EXPR
5224 : integer_zero_node)),
5226 do_pending_stack_adjust ();
5231 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0
5232 && ! (fixed_type_p (lhs) && fixed_type_p (rhs)))
5233 noncopied_parts = save_noncopied_parts (stabilize_reference (lhs),
5234 TYPE_NONCOPIED_PARTS (lhs_type));
5236 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
5237 while (noncopied_parts != 0)
5239 expand_assignment (TREE_PURPOSE (noncopied_parts),
5240 TREE_VALUE (noncopied_parts), 0, 0);
5241 noncopied_parts = TREE_CHAIN (noncopied_parts);
5246 case PREINCREMENT_EXPR:
5247 case PREDECREMENT_EXPR:
5248 return expand_increment (exp, 0);
5250 case POSTINCREMENT_EXPR:
5251 case POSTDECREMENT_EXPR:
5252 /* Faster to treat as pre-increment if result is not used. */
5253 return expand_increment (exp, ! ignore);
5256 /* Are we taking the address of a nested function? */
5257 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
5258 && decl_function_context (TREE_OPERAND (exp, 0)) != 0)
5260 op0 = trampoline_address (TREE_OPERAND (exp, 0));
5261 op0 = force_operand (op0, target);
5265 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode,
5266 (modifier == EXPAND_INITIALIZER
5267 ? modifier : EXPAND_CONST_ADDRESS));
5269 /* We would like the object in memory. If it is a constant,
5270 we can have it be statically allocated into memory. For
5271 a non-constant (REG or SUBREG), we need to allocate some
5272 memory and store the value into it. */
5274 if (CONSTANT_P (op0))
5275 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
5278 if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG)
5280 /* If this object is in a register, it must be not
5282 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
5283 enum machine_mode inner_mode = TYPE_MODE (inner_type);
5285 = assign_stack_temp (inner_mode,
5286 int_size_in_bytes (inner_type), 1);
5288 emit_move_insn (memloc, op0);
5292 if (GET_CODE (op0) != MEM)
5295 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
5296 return XEXP (op0, 0);
5297 op0 = force_operand (XEXP (op0, 0), target);
5299 if (flag_force_addr && GET_CODE (op0) != REG)
5300 return force_reg (Pmode, op0);
5303 case ENTRY_VALUE_EXPR:
5306 /* COMPLEX type for Extended Pascal & Fortran */
5309 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
5313 /* Get the rtx code of the operands. */
5314 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
5315 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
5318 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
5320 prev = get_last_insn ();
5322 /* Tell flow that the whole of the destination is being set. */
5323 if (GET_CODE (target) == REG)
5324 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
5326 /* Move the real (op0) and imaginary (op1) parts to their location. */
5327 emit_move_insn (gen_realpart (mode, target), op0);
5328 emit_move_insn (gen_imagpart (mode, target), op1);
5330 /* Complex construction should appear as a single unit. */
5337 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
5338 return gen_realpart (mode, op0);
5341 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
5342 return gen_imagpart (mode, op0);
5346 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
5350 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
5353 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
5355 prev = get_last_insn ();
5357 /* Tell flow that the whole of the destination is being set. */
5358 if (GET_CODE (target) == REG)
5359 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
5361 /* Store the realpart and the negated imagpart to target. */
5362 emit_move_insn (gen_realpart (mode, target), gen_realpart (mode, op0));
5364 imag_t = gen_imagpart (mode, target);
5365 temp = expand_unop (mode, neg_optab,
5366 gen_imagpart (mode, op0), imag_t, 0);
5368 emit_move_insn (imag_t, temp);
5370 /* Conjugate should appear as a single unit */
5377 op0 = CONST0_RTX (tmode);
5383 return (*lang_expand_expr) (exp, target, tmode, modifier);
5386 /* Here to do an ordinary binary operator, generating an instruction
5387 from the optab already placed in `this_optab'. */
5389 preexpand_calls (exp);
5390 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
5392 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
5393 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
5395 temp = expand_binop (mode, this_optab, op0, op1, target,
5396 unsignedp, OPTAB_LIB_WIDEN);
5402 /* Return the alignment in bits of EXP, a pointer valued expression.
5403 But don't return more than MAX_ALIGN no matter what.
5404 The alignment returned is, by default, the alignment of the thing that
5405 EXP points to (if it is not a POINTER_TYPE, 0 is returned).
5407 Otherwise, look at the expression to see if we can do better, i.e., if the
5408 expression is actually pointing at an object whose alignment is tighter. */
5411 get_pointer_alignment (exp, max_align)
5415 unsigned align, inner;
5417 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
5420 align = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
5421 align = MIN (align, max_align);
5425 switch (TREE_CODE (exp))
5429 case NON_LVALUE_EXPR:
5430 exp = TREE_OPERAND (exp, 0);
5431 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
5433 inner = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
5434 inner = MIN (inner, max_align);
5435 align = MAX (align, inner);
5439 /* If sum of pointer + int, restrict our maximum alignment to that
5440 imposed by the integer. If not, we can't do any better than
5442 if (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST)
5445 while (((TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)) * BITS_PER_UNIT)
5450 exp = TREE_OPERAND (exp, 0);
5454 /* See what we are pointing at and look at its alignment. */
5455 exp = TREE_OPERAND (exp, 0);
5456 if (TREE_CODE (exp) == FUNCTION_DECL)
5457 align = MAX (align, FUNCTION_BOUNDARY);
5458 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'd')
5459 align = MAX (align, DECL_ALIGN (exp));
5460 #ifdef CONSTANT_ALIGNMENT
5461 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'c')
5462 align = CONSTANT_ALIGNMENT (exp, align);
5464 return MIN (align, max_align);
5472 /* Return the tree node and offset if a given argument corresponds to
5473 a string constant. */
5476 string_constant (arg, ptr_offset)
5482 if (TREE_CODE (arg) == ADDR_EXPR
5483 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
5485 *ptr_offset = integer_zero_node;
5486 return TREE_OPERAND (arg, 0);
5488 else if (TREE_CODE (arg) == PLUS_EXPR)
5490 tree arg0 = TREE_OPERAND (arg, 0);
5491 tree arg1 = TREE_OPERAND (arg, 1);
5496 if (TREE_CODE (arg0) == ADDR_EXPR
5497 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
5500 return TREE_OPERAND (arg0, 0);
5502 else if (TREE_CODE (arg1) == ADDR_EXPR
5503 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
5506 return TREE_OPERAND (arg1, 0);
5513 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
5514 way, because it could contain a zero byte in the middle.
5515 TREE_STRING_LENGTH is the size of the character array, not the string.
5517 Unfortunately, string_constant can't access the values of const char
5518 arrays with initializers, so neither can we do so here. */
5528 src = string_constant (src, &offset_node);
5531 max = TREE_STRING_LENGTH (src);
5532 ptr = TREE_STRING_POINTER (src);
5533 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
5535 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
5536 compute the offset to the following null if we don't know where to
5537 start searching for it. */
5539 for (i = 0; i < max; i++)
5542 /* We don't know the starting offset, but we do know that the string
5543 has no internal zero bytes. We can assume that the offset falls
5544 within the bounds of the string; otherwise, the programmer deserves
5545 what he gets. Subtract the offset from the length of the string,
5547 /* This would perhaps not be valid if we were dealing with named
5548 arrays in addition to literal string constants. */
5549 return size_binop (MINUS_EXPR, size_int (max), offset_node);
5552 /* We have a known offset into the string. Start searching there for
5553 a null character. */
5554 if (offset_node == 0)
5558 /* Did we get a long long offset? If so, punt. */
5559 if (TREE_INT_CST_HIGH (offset_node) != 0)
5561 offset = TREE_INT_CST_LOW (offset_node);
5563 /* If the offset is known to be out of bounds, warn, and call strlen at
5565 if (offset < 0 || offset > max)
5567 warning ("offset outside bounds of constant string");
5570 /* Use strlen to search for the first zero byte. Since any strings
5571 constructed with build_string will have nulls appended, we win even
5572 if we get handed something like (char[4])"abcd".
5574 Since OFFSET is our starting index into the string, no further
5575 calculation is needed. */
5576 return size_int (strlen (ptr + offset));
5579 /* Expand an expression EXP that calls a built-in function,
5580 with result going to TARGET if that's convenient
5581 (and in mode MODE if that's convenient).
5582 SUBTARGET may be used as the target for computing one of EXP's operands.
5583 IGNORE is nonzero if the value is to be ignored. */
5586 expand_builtin (exp, target, subtarget, mode, ignore)
5590 enum machine_mode mode;
5593 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
5594 tree arglist = TREE_OPERAND (exp, 1);
5597 enum machine_mode value_mode = TYPE_MODE (TREE_TYPE (exp));
5598 optab builtin_optab;
5600 switch (DECL_FUNCTION_CODE (fndecl))
5605 /* build_function_call changes these into ABS_EXPR. */
5610 case BUILT_IN_FSQRT:
5611 /* If not optimizing, call the library function. */
5616 /* Arg could be wrong type if user redeclared this fcn wrong. */
5617 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != REAL_TYPE)
5618 return CONST0_RTX (TYPE_MODE (TREE_TYPE (exp)));
5620 /* Stabilize and compute the argument. */
5621 if (TREE_CODE (TREE_VALUE (arglist)) != VAR_DECL
5622 && TREE_CODE (TREE_VALUE (arglist)) != PARM_DECL)
5624 exp = copy_node (exp);
5625 arglist = copy_node (arglist);
5626 TREE_OPERAND (exp, 1) = arglist;
5627 TREE_VALUE (arglist) = save_expr (TREE_VALUE (arglist));
5629 op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
5631 /* Make a suitable register to place result in. */
5632 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
5637 switch (DECL_FUNCTION_CODE (fndecl))
5640 builtin_optab = sin_optab; break;
5642 builtin_optab = cos_optab; break;
5643 case BUILT_IN_FSQRT:
5644 builtin_optab = sqrt_optab; break;
5649 /* Compute into TARGET.
5650 Set TARGET to wherever the result comes back. */
5651 target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
5652 builtin_optab, op0, target, 0);
5654 /* If we were unable to expand via the builtin, stop the
5655 sequence (without outputting the insns) and break, causing
5656 a call the the library function. */
5663 /* Check the results by default. But if flag_fast_math is turned on,
5664 then assume sqrt will always be called with valid arguments. */
5666 if (! flag_fast_math)
5668 /* Don't define the builtin FP instructions
5669 if your machine is not IEEE. */
5670 if (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT)
5673 lab1 = gen_label_rtx ();
5675 /* Test the result; if it is NaN, set errno=EDOM because
5676 the argument was not in the domain. */
5677 emit_cmp_insn (target, target, EQ, 0, GET_MODE (target), 0, 0);
5678 emit_jump_insn (gen_beq (lab1));
5682 #ifdef GEN_ERRNO_RTX
5683 rtx errno_rtx = GEN_ERRNO_RTX;
5686 = gen_rtx (MEM, word_mode, gen_rtx (SYMBOL_REF, Pmode, "*errno"));
5689 emit_move_insn (errno_rtx, GEN_INT (TARGET_EDOM));
5692 /* We can't set errno=EDOM directly; let the library call do it.
5693 Pop the arguments right away in case the call gets deleted. */
5695 expand_call (exp, target, 0);
5702 /* Output the entire sequence. */
5703 insns = get_insns ();
5709 /* __builtin_apply_args returns block of memory allocated on
5710 the stack into which is stored the arg pointer, structure
5711 value address, static chain, and all the registers that might
5712 possibly be used in performing a function call. The code is
5713 moved to the start of the function so the incoming values are
5715 case BUILT_IN_APPLY_ARGS:
5716 /* Don't do __builtin_apply_args more than once in a function.
5717 Save the result of the first call and reuse it. */
5718 if (apply_args_value != 0)
5719 return apply_args_value;
5721 /* When this function is called, it means that registers must be
5722 saved on entry to this function. So we migrate the
5723 call to the first insn of this function. */
5728 temp = expand_builtin_apply_args ();
5732 apply_args_value = temp;
5734 /* Put the sequence after the NOTE that starts the function.
5735 If this is inside a SEQUENCE, make the outer-level insn
5736 chain current, so the code is placed at the start of the
5738 push_topmost_sequence ();
5739 emit_insns_before (seq, NEXT_INSN (get_insns ()));
5740 pop_topmost_sequence ();
5744 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
5745 FUNCTION with a copy of the parameters described by
5746 ARGUMENTS, and ARGSIZE. It returns a block of memory
5747 allocated on the stack into which is stored all the registers
5748 that might possibly be used for returning the result of a
5749 function. ARGUMENTS is the value returned by
5750 __builtin_apply_args. ARGSIZE is the number of bytes of
5751 arguments that must be copied. ??? How should this value be
5752 computed? We'll also need a safe worst case value for varargs
5754 case BUILT_IN_APPLY:
5756 /* Arg could be non-pointer if user redeclared this fcn wrong. */
5757 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
5758 || TREE_CHAIN (arglist) == 0
5759 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
5760 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
5761 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
5769 for (t = arglist, i = 0; t; t = TREE_CHAIN (t), i++)
5770 ops[i] = expand_expr (TREE_VALUE (t), NULL_RTX, VOIDmode, 0);
5772 return expand_builtin_apply (ops[0], ops[1], ops[2]);
5775 /* __builtin_return (RESULT) causes the function to return the
5776 value described by RESULT. RESULT is address of the block of
5777 memory returned by __builtin_apply. */
5778 case BUILT_IN_RETURN:
5780 /* Arg could be non-pointer if user redeclared this fcn wrong. */
5781 && TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) == POINTER_TYPE)
5782 expand_builtin_return (expand_expr (TREE_VALUE (arglist),
5783 NULL_RTX, VOIDmode, 0));
5786 case BUILT_IN_SAVEREGS:
5787 /* Don't do __builtin_saveregs more than once in a function.
5788 Save the result of the first call and reuse it. */
5789 if (saveregs_value != 0)
5790 return saveregs_value;
5792 /* When this function is called, it means that registers must be
5793 saved on entry to this function. So we migrate the
5794 call to the first insn of this function. */
5797 rtx valreg, saved_valreg;
5799 /* Now really call the function. `expand_call' does not call
5800 expand_builtin, so there is no danger of infinite recursion here. */
5803 #ifdef EXPAND_BUILTIN_SAVEREGS
5804 /* Do whatever the machine needs done in this case. */
5805 temp = EXPAND_BUILTIN_SAVEREGS (arglist);
5807 /* The register where the function returns its value
5808 is likely to have something else in it, such as an argument.
5809 So preserve that register around the call. */
5810 if (value_mode != VOIDmode)
5812 valreg = hard_libcall_value (value_mode);
5813 saved_valreg = gen_reg_rtx (value_mode);
5814 emit_move_insn (saved_valreg, valreg);
5817 /* Generate the call, putting the value in a pseudo. */
5818 temp = expand_call (exp, target, ignore);
5820 if (value_mode != VOIDmode)
5821 emit_move_insn (valreg, saved_valreg);
5827 saveregs_value = temp;
5829 /* Put the sequence after the NOTE that starts the function.
5830 If this is inside a SEQUENCE, make the outer-level insn
5831 chain current, so the code is placed at the start of the
5833 push_topmost_sequence ();
5834 emit_insns_before (seq, NEXT_INSN (get_insns ()));
5835 pop_topmost_sequence ();
5839 /* __builtin_args_info (N) returns word N of the arg space info
5840 for the current function. The number and meanings of words
5841 is controlled by the definition of CUMULATIVE_ARGS. */
5842 case BUILT_IN_ARGS_INFO:
5844 int nwords = sizeof (CUMULATIVE_ARGS) / sizeof (int);
5846 int *word_ptr = (int *) ¤t_function_args_info;
5847 tree type, elts, result;
5849 if (sizeof (CUMULATIVE_ARGS) % sizeof (int) != 0)
5850 fatal ("CUMULATIVE_ARGS type defined badly; see %s, line %d",
5851 __FILE__, __LINE__);
5855 tree arg = TREE_VALUE (arglist);
5856 if (TREE_CODE (arg) != INTEGER_CST)
5857 error ("argument of `__builtin_args_info' must be constant");
5860 int wordnum = TREE_INT_CST_LOW (arg);
5862 if (wordnum < 0 || wordnum >= nwords || TREE_INT_CST_HIGH (arg))
5863 error ("argument of `__builtin_args_info' out of range");
5865 return GEN_INT (word_ptr[wordnum]);
5869 error ("missing argument in `__builtin_args_info'");
5874 for (i = 0; i < nwords; i++)
5875 elts = tree_cons (NULL_TREE, build_int_2 (word_ptr[i], 0));
5877 type = build_array_type (integer_type_node,
5878 build_index_type (build_int_2 (nwords, 0)));
5879 result = build (CONSTRUCTOR, type, NULL_TREE, nreverse (elts));
5880 TREE_CONSTANT (result) = 1;
5881 TREE_STATIC (result) = 1;
5882 result = build (INDIRECT_REF, build_pointer_type (type), result);
5883 TREE_CONSTANT (result) = 1;
5884 return expand_expr (result, NULL_RTX, VOIDmode, 0);
5888 /* Return the address of the first anonymous stack arg. */
5889 case BUILT_IN_NEXT_ARG:
5891 tree fntype = TREE_TYPE (current_function_decl);
5892 if (!(TYPE_ARG_TYPES (fntype) != 0
5893 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
5894 != void_type_node)))
5896 error ("`va_start' used in function with fixed args");
5901 return expand_binop (Pmode, add_optab,
5902 current_function_internal_arg_pointer,
5903 current_function_arg_offset_rtx,
5904 NULL_RTX, 0, OPTAB_LIB_WIDEN);
5906 case BUILT_IN_CLASSIFY_TYPE:
5909 tree type = TREE_TYPE (TREE_VALUE (arglist));
5910 enum tree_code code = TREE_CODE (type);
5911 if (code == VOID_TYPE)
5912 return GEN_INT (void_type_class);
5913 if (code == INTEGER_TYPE)
5914 return GEN_INT (integer_type_class);
5915 if (code == CHAR_TYPE)
5916 return GEN_INT (char_type_class);
5917 if (code == ENUMERAL_TYPE)
5918 return GEN_INT (enumeral_type_class);
5919 if (code == BOOLEAN_TYPE)
5920 return GEN_INT (boolean_type_class);
5921 if (code == POINTER_TYPE)
5922 return GEN_INT (pointer_type_class);
5923 if (code == REFERENCE_TYPE)
5924 return GEN_INT (reference_type_class);
5925 if (code == OFFSET_TYPE)
5926 return GEN_INT (offset_type_class);
5927 if (code == REAL_TYPE)
5928 return GEN_INT (real_type_class);
5929 if (code == COMPLEX_TYPE)
5930 return GEN_INT (complex_type_class);
5931 if (code == FUNCTION_TYPE)
5932 return GEN_INT (function_type_class);
5933 if (code == METHOD_TYPE)
5934 return GEN_INT (method_type_class);
5935 if (code == RECORD_TYPE)
5936 return GEN_INT (record_type_class);
5937 if (code == UNION_TYPE || code == QUAL_UNION_TYPE)
5938 return GEN_INT (union_type_class);
5939 if (code == ARRAY_TYPE)
5940 return GEN_INT (array_type_class);
5941 if (code == STRING_TYPE)
5942 return GEN_INT (string_type_class);
5943 if (code == SET_TYPE)
5944 return GEN_INT (set_type_class);
5945 if (code == FILE_TYPE)
5946 return GEN_INT (file_type_class);
5947 if (code == LANG_TYPE)
5948 return GEN_INT (lang_type_class);
5950 return GEN_INT (no_type_class);
5952 case BUILT_IN_CONSTANT_P:
5956 return (TREE_CODE_CLASS (TREE_CODE (TREE_VALUE (arglist))) == 'c'
5957 ? const1_rtx : const0_rtx);
5959 case BUILT_IN_FRAME_ADDRESS:
5960 /* The argument must be a nonnegative integer constant.
5961 It counts the number of frames to scan up the stack.
5962 The value is the address of that frame. */
5963 case BUILT_IN_RETURN_ADDRESS:
5964 /* The argument must be a nonnegative integer constant.
5965 It counts the number of frames to scan up the stack.
5966 The value is the return address saved in that frame. */
5968 /* Warning about missing arg was already issued. */
5970 else if (TREE_CODE (TREE_VALUE (arglist)) != INTEGER_CST)
5972 error ("invalid arg to `__builtin_return_address'");
5975 else if (tree_int_cst_lt (TREE_VALUE (arglist), integer_zero_node))
5977 error ("invalid arg to `__builtin_return_address'");
5982 int count = TREE_INT_CST_LOW (TREE_VALUE (arglist));
5983 rtx tem = frame_pointer_rtx;
5986 /* Some machines need special handling before we can access arbitrary
5987 frames. For example, on the sparc, we must first flush all
5988 register windows to the stack. */
5989 #ifdef SETUP_FRAME_ADDRESSES
5990 SETUP_FRAME_ADDRESSES ();
5993 /* On the sparc, the return address is not in the frame, it is
5994 in a register. There is no way to access it off of the current
5995 frame pointer, but it can be accessed off the previous frame
5996 pointer by reading the value from the register window save
5998 #ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
5999 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_RETURN_ADDRESS)
6003 /* Scan back COUNT frames to the specified frame. */
6004 for (i = 0; i < count; i++)
6006 /* Assume the dynamic chain pointer is in the word that
6007 the frame address points to, unless otherwise specified. */
6008 #ifdef DYNAMIC_CHAIN_ADDRESS
6009 tem = DYNAMIC_CHAIN_ADDRESS (tem);
6011 tem = memory_address (Pmode, tem);
6012 tem = copy_to_reg (gen_rtx (MEM, Pmode, tem));
6015 /* For __builtin_frame_address, return what we've got. */
6016 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
6019 /* For __builtin_return_address,
6020 Get the return address from that frame. */
6021 #ifdef RETURN_ADDR_RTX
6022 return RETURN_ADDR_RTX (count, tem);
6024 tem = memory_address (Pmode,
6025 plus_constant (tem, GET_MODE_SIZE (Pmode)));
6026 return copy_to_reg (gen_rtx (MEM, Pmode, tem));
6030 case BUILT_IN_ALLOCA:
6032 /* Arg could be non-integer if user redeclared this fcn wrong. */
6033 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE)
6035 current_function_calls_alloca = 1;
6036 /* Compute the argument. */
6037 op0 = expand_expr (TREE_VALUE (arglist), NULL_RTX, VOIDmode, 0);
6039 /* Allocate the desired space. */
6040 target = allocate_dynamic_stack_space (op0, target, BITS_PER_UNIT);
6042 /* Record the new stack level for nonlocal gotos. */
6043 if (nonlocal_goto_handler_slot != 0)
6044 emit_stack_save (SAVE_NONLOCAL, &nonlocal_goto_stack_level, NULL_RTX);
6048 /* If not optimizing, call the library function. */
6053 /* Arg could be non-integer if user redeclared this fcn wrong. */
6054 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE)
6057 /* Compute the argument. */
6058 op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
6059 /* Compute ffs, into TARGET if possible.
6060 Set TARGET to wherever the result comes back. */
6061 target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
6062 ffs_optab, op0, target, 1);
6067 case BUILT_IN_STRLEN:
6068 /* If not optimizing, call the library function. */
6073 /* Arg could be non-pointer if user redeclared this fcn wrong. */
6074 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE)
6078 tree src = TREE_VALUE (arglist);
6079 tree len = c_strlen (src);
6082 = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
6084 rtx result, src_rtx, char_rtx;
6085 enum machine_mode insn_mode = value_mode, char_mode;
6086 enum insn_code icode;
6088 /* If the length is known, just return it. */
6090 return expand_expr (len, target, mode, 0);
6092 /* If SRC is not a pointer type, don't do this operation inline. */
6096 /* Call a function if we can't compute strlen in the right mode. */
6098 while (insn_mode != VOIDmode)
6100 icode = strlen_optab->handlers[(int) insn_mode].insn_code;
6101 if (icode != CODE_FOR_nothing)
6104 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
6106 if (insn_mode == VOIDmode)
6109 /* Make a place to write the result of the instruction. */
6112 && GET_CODE (result) == REG
6113 && GET_MODE (result) == insn_mode
6114 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
6115 result = gen_reg_rtx (insn_mode);
6117 /* Make sure the operands are acceptable to the predicates. */
6119 if (! (*insn_operand_predicate[(int)icode][0]) (result, insn_mode))
6120 result = gen_reg_rtx (insn_mode);
6122 src_rtx = memory_address (BLKmode,
6123 expand_expr (src, NULL_RTX, Pmode,
6125 if (! (*insn_operand_predicate[(int)icode][1]) (src_rtx, Pmode))
6126 src_rtx = copy_to_mode_reg (Pmode, src_rtx);
6128 char_rtx = const0_rtx;
6129 char_mode = insn_operand_mode[(int)icode][2];
6130 if (! (*insn_operand_predicate[(int)icode][2]) (char_rtx, char_mode))
6131 char_rtx = copy_to_mode_reg (char_mode, char_rtx);
6133 emit_insn (GEN_FCN (icode) (result,
6134 gen_rtx (MEM, BLKmode, src_rtx),
6135 char_rtx, GEN_INT (align)));
6137 /* Return the value in the proper mode for this function. */
6138 if (GET_MODE (result) == value_mode)
6140 else if (target != 0)
6142 convert_move (target, result, 0);
6146 return convert_to_mode (value_mode, result, 0);
6149 case BUILT_IN_STRCPY:
6150 /* If not optimizing, call the library function. */
6155 /* Arg could be non-pointer if user redeclared this fcn wrong. */
6156 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
6157 || TREE_CHAIN (arglist) == 0
6158 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE)
6162 tree len = c_strlen (TREE_VALUE (TREE_CHAIN (arglist)));
6167 len = size_binop (PLUS_EXPR, len, integer_one_node);
6169 chainon (arglist, build_tree_list (NULL_TREE, len));
6173 case BUILT_IN_MEMCPY:
6174 /* If not optimizing, call the library function. */
6179 /* Arg could be non-pointer if user redeclared this fcn wrong. */
6180 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
6181 || TREE_CHAIN (arglist) == 0
6182 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
6183 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
6184 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
6188 tree dest = TREE_VALUE (arglist);
6189 tree src = TREE_VALUE (TREE_CHAIN (arglist));
6190 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
6193 = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
6195 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
6196 rtx dest_rtx, dest_mem, src_mem;
6198 /* If either SRC or DEST is not a pointer type, don't do
6199 this operation in-line. */
6200 if (src_align == 0 || dest_align == 0)
6202 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRCPY)
6203 TREE_CHAIN (TREE_CHAIN (arglist)) = 0;
6207 dest_rtx = expand_expr (dest, NULL_RTX, Pmode, EXPAND_NORMAL);
6208 dest_mem = gen_rtx (MEM, BLKmode,
6209 memory_address (BLKmode, dest_rtx));
6210 src_mem = gen_rtx (MEM, BLKmode,
6211 memory_address (BLKmode,
6212 expand_expr (src, NULL_RTX,
6216 /* Copy word part most expediently. */
6217 emit_block_move (dest_mem, src_mem,
6218 expand_expr (len, NULL_RTX, VOIDmode, 0),
6219 MIN (src_align, dest_align));
6223 /* These comparison functions need an instruction that returns an actual
6224 index. An ordinary compare that just sets the condition codes
6226 #ifdef HAVE_cmpstrsi
6227 case BUILT_IN_STRCMP:
6228 /* If not optimizing, call the library function. */
6233 /* Arg could be non-pointer if user redeclared this fcn wrong. */
6234 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
6235 || TREE_CHAIN (arglist) == 0
6236 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE)
6238 else if (!HAVE_cmpstrsi)
6241 tree arg1 = TREE_VALUE (arglist);
6242 tree arg2 = TREE_VALUE (TREE_CHAIN (arglist));
6246 len = c_strlen (arg1);
6248 len = size_binop (PLUS_EXPR, integer_one_node, len);
6249 len2 = c_strlen (arg2);
6251 len2 = size_binop (PLUS_EXPR, integer_one_node, len2);
6253 /* If we don't have a constant length for the first, use the length
6254 of the second, if we know it. We don't require a constant for
6255 this case; some cost analysis could be done if both are available
6256 but neither is constant. For now, assume they're equally cheap.
6258 If both strings have constant lengths, use the smaller. This
6259 could arise if optimization results in strcpy being called with
6260 two fixed strings, or if the code was machine-generated. We should
6261 add some code to the `memcmp' handler below to deal with such
6262 situations, someday. */
6263 if (!len || TREE_CODE (len) != INTEGER_CST)
6270 else if (len2 && TREE_CODE (len2) == INTEGER_CST)
6272 if (tree_int_cst_lt (len2, len))
6276 chainon (arglist, build_tree_list (NULL_TREE, len));
6280 case BUILT_IN_MEMCMP:
6281 /* If not optimizing, call the library function. */
6286 /* Arg could be non-pointer if user redeclared this fcn wrong. */
6287 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
6288 || TREE_CHAIN (arglist) == 0
6289 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
6290 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
6291 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
6293 else if (!HAVE_cmpstrsi)
6296 tree arg1 = TREE_VALUE (arglist);
6297 tree arg2 = TREE_VALUE (TREE_CHAIN (arglist));
6298 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
6302 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
6304 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
6305 enum machine_mode insn_mode
6306 = insn_operand_mode[(int) CODE_FOR_cmpstrsi][0];
6308 /* If we don't have POINTER_TYPE, call the function. */
6309 if (arg1_align == 0 || arg2_align == 0)
6311 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRCMP)
6312 TREE_CHAIN (TREE_CHAIN (arglist)) = 0;
6316 /* Make a place to write the result of the instruction. */
6319 && GET_CODE (result) == REG && GET_MODE (result) == insn_mode
6320 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
6321 result = gen_reg_rtx (insn_mode);
6323 emit_insn (gen_cmpstrsi (result,
6324 gen_rtx (MEM, BLKmode,
6325 expand_expr (arg1, NULL_RTX, Pmode,
6327 gen_rtx (MEM, BLKmode,
6328 expand_expr (arg2, NULL_RTX, Pmode,
6330 expand_expr (len, NULL_RTX, VOIDmode, 0),
6331 GEN_INT (MIN (arg1_align, arg2_align))));
6333 /* Return the value in the proper mode for this function. */
6334 mode = TYPE_MODE (TREE_TYPE (exp));
6335 if (GET_MODE (result) == mode)
6337 else if (target != 0)
6339 convert_move (target, result, 0);
6343 return convert_to_mode (mode, result, 0);
6346 case BUILT_IN_STRCMP:
6347 case BUILT_IN_MEMCMP:
6351 default: /* just do library call, if unknown builtin */
6352 error ("built-in function `%s' not currently supported",
6353 IDENTIFIER_POINTER (DECL_NAME (fndecl)));
6356 /* The switch statement above can drop through to cause the function
6357 to be called normally. */
6359 return expand_call (exp, target, ignore);
6362 /* Built-in functions to perform an untyped call and return. */
6364 /* For each register that may be used for calling a function, this
6365 gives a mode used to copy the register's value. VOIDmode indicates
6366 the register is not used for calling a function. If the machine
6367 has register windows, this gives only the outbound registers.
6368 INCOMING_REGNO gives the corresponding inbound register. */
6369 static enum machine_mode apply_args_mode[FIRST_PSEUDO_REGISTER];
6371 /* For each register that may be used for returning values, this gives
6372 a mode used to copy the register's value. VOIDmode indicates the
6373 register is not used for returning values. If the machine has
6374 register windows, this gives only the outbound registers.
6375 INCOMING_REGNO gives the corresponding inbound register. */
6376 static enum machine_mode apply_result_mode[FIRST_PSEUDO_REGISTER];
6378 /* Return the size required for the block returned by __builtin_apply_args,
6379 and initialize apply_args_mode. */
6383 static int size = -1;
6385 enum machine_mode mode;
6387 /* The values computed by this function never change. */
6390 /* The first value is the incoming arg-pointer. */
6391 size = GET_MODE_SIZE (Pmode);
6393 /* The second value is the structure value address unless this is
6394 passed as an "invisible" first argument. */
6395 if (struct_value_rtx)
6396 size += GET_MODE_SIZE (Pmode);
6398 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
6399 if (FUNCTION_ARG_REGNO_P (regno))
6401 /* Search for the proper mode for copying this register's
6402 value. I'm not sure this is right, but it works so far. */
6403 enum machine_mode best_mode = VOIDmode;
6405 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
6407 mode = GET_MODE_WIDER_MODE (mode))
6408 if (HARD_REGNO_MODE_OK (regno, mode)
6409 && HARD_REGNO_NREGS (regno, mode) == 1)
6412 if (best_mode == VOIDmode)
6413 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
6415 mode = GET_MODE_WIDER_MODE (mode))
6416 if (HARD_REGNO_MODE_OK (regno, mode)
6417 && (mov_optab->handlers[(int) mode].insn_code
6418 != CODE_FOR_nothing))
6422 if (mode == VOIDmode)
6425 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
6426 if (size % align != 0)
6427 size = CEIL (size, align) * align;
6428 size += GET_MODE_SIZE (mode);
6429 apply_args_mode[regno] = mode;
6432 apply_args_mode[regno] = VOIDmode;
6437 /* Return the size required for the block returned by __builtin_apply,
6438 and initialize apply_result_mode. */
6440 apply_result_size ()
6442 static int size = -1;
6444 enum machine_mode mode;
6446 /* The values computed by this function never change. */
6451 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
6452 if (FUNCTION_VALUE_REGNO_P (regno))
6454 /* Search for the proper mode for copying this register's
6455 value. I'm not sure this is right, but it works so far. */
6456 enum machine_mode best_mode = VOIDmode;
6458 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
6460 mode = GET_MODE_WIDER_MODE (mode))
6461 if (HARD_REGNO_MODE_OK (regno, mode))
6464 if (best_mode == VOIDmode)
6465 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
6467 mode = GET_MODE_WIDER_MODE (mode))
6468 if (HARD_REGNO_MODE_OK (regno, mode)
6469 && (mov_optab->handlers[(int) mode].insn_code
6470 != CODE_FOR_nothing))
6474 if (mode == VOIDmode)
6477 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
6478 if (size % align != 0)
6479 size = CEIL (size, align) * align;
6480 size += GET_MODE_SIZE (mode);
6481 apply_result_mode[regno] = mode;
6484 apply_result_mode[regno] = VOIDmode;
6486 /* Allow targets that use untyped_call and untyped_return to override
6487 the size so that machine-specific information can be stored here. */
6488 #ifdef APPLY_RESULT_SIZE
6489 size = APPLY_RESULT_SIZE;
6495 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
6496 /* Create a vector describing the result block RESULT. If SAVEP is true,
6497 the result block is used to save the values; otherwise it is used to
6498 restore the values. */
6500 result_vector (savep, result)
6504 int regno, size, align, nelts;
6505 enum machine_mode mode;
6507 rtx *savevec = (rtx *) alloca (FIRST_PSEUDO_REGISTER * sizeof (rtx));
6510 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
6511 if ((mode = apply_result_mode[regno]) != VOIDmode)
6513 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
6514 if (size % align != 0)
6515 size = CEIL (size, align) * align;
6516 reg = gen_rtx (REG, mode, savep ? INCOMING_REGNO (regno) : regno);
6517 mem = change_address (result, mode,
6518 plus_constant (XEXP (result, 0), size));
6519 savevec[nelts++] = (savep
6520 ? gen_rtx (SET, VOIDmode, mem, reg)
6521 : gen_rtx (SET, VOIDmode, reg, mem));
6522 size += GET_MODE_SIZE (mode);
6524 return gen_rtx (PARALLEL, VOIDmode, gen_rtvec_v (nelts, savevec));
6526 #endif /* HAVE_untyped_call or HAVE_untyped_return */
6529 /* Save the state required to perform an untyped call with the same
6530 arguments as were passed to the current function. */
6532 expand_builtin_apply_args ()
6535 int size, align, regno;
6536 enum machine_mode mode;
6538 /* Create a block where the arg-pointer, structure value address,
6539 and argument registers can be saved. */
6540 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
6542 /* Walk past the arg-pointer and structure value address. */
6543 size = GET_MODE_SIZE (Pmode);
6544 if (struct_value_rtx)
6545 size += GET_MODE_SIZE (Pmode);
6547 /* Save each register used in calling a function to the block. */
6548 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
6549 if ((mode = apply_args_mode[regno]) != VOIDmode)
6551 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
6552 if (size % align != 0)
6553 size = CEIL (size, align) * align;
6554 emit_move_insn (change_address (registers, mode,
6555 plus_constant (XEXP (registers, 0),
6557 gen_rtx (REG, mode, INCOMING_REGNO (regno)));
6558 size += GET_MODE_SIZE (mode);
6561 /* Save the arg pointer to the block. */
6562 emit_move_insn (change_address (registers, Pmode, XEXP (registers, 0)),
6563 copy_to_reg (virtual_incoming_args_rtx));
6564 size = GET_MODE_SIZE (Pmode);
6566 /* Save the structure value address unless this is passed as an
6567 "invisible" first argument. */
6568 if (struct_value_incoming_rtx)
6570 emit_move_insn (change_address (registers, Pmode,
6571 plus_constant (XEXP (registers, 0),
6573 copy_to_reg (struct_value_incoming_rtx));
6574 size += GET_MODE_SIZE (Pmode);
6577 /* Return the address of the block. */
6578 return copy_addr_to_reg (XEXP (registers, 0));
6581 /* Perform an untyped call and save the state required to perform an
6582 untyped return of whatever value was returned by the given function. */
6584 expand_builtin_apply (function, arguments, argsize)
6585 rtx function, arguments, argsize;
6587 int size, align, regno;
6588 enum machine_mode mode;
6589 rtx incoming_args, result, reg, dest, call_insn;
6590 rtx old_stack_level = 0;
6593 /* Create a block where the return registers can be saved. */
6594 result = assign_stack_local (BLKmode, apply_result_size (), -1);
6596 /* ??? The argsize value should be adjusted here. */
6598 /* Fetch the arg pointer from the ARGUMENTS block. */
6599 incoming_args = gen_reg_rtx (Pmode);
6600 emit_move_insn (incoming_args,
6601 gen_rtx (MEM, Pmode, arguments));
6602 #ifndef STACK_GROWS_DOWNWARD
6603 incoming_args = expand_binop (Pmode, add_optab, incoming_args, argsize,
6604 incoming_args, 0, OPTAB_LIB_WIDEN);
6607 /* Perform postincrements before actually calling the function. */
6610 /* Push a new argument block and copy the arguments. */
6611 do_pending_stack_adjust ();
6612 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
6614 /* Push a block of memory onto the stack to store the memory arguments.
6615 Save the address in a register, and copy the memory arguments. ??? I
6616 haven't figured out how the calling convention macros effect this,
6617 but it's likely that the source and/or destination addresses in
6618 the block copy will need updating in machine specific ways. */
6619 dest = copy_addr_to_reg (push_block (argsize, 0, 0));
6620 emit_block_move (gen_rtx (MEM, BLKmode, dest),
6621 gen_rtx (MEM, BLKmode, incoming_args),
6623 PARM_BOUNDARY / BITS_PER_UNIT);
6625 /* Refer to the argument block. */
6627 arguments = gen_rtx (MEM, BLKmode, arguments);
6629 /* Walk past the arg-pointer and structure value address. */
6630 size = GET_MODE_SIZE (Pmode);
6631 if (struct_value_rtx)
6632 size += GET_MODE_SIZE (Pmode);
6634 /* Restore each of the registers previously saved. Make USE insns
6635 for each of these registers for use in making the call. */
6636 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
6637 if ((mode = apply_args_mode[regno]) != VOIDmode)
6639 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
6640 if (size % align != 0)
6641 size = CEIL (size, align) * align;
6642 reg = gen_rtx (REG, mode, regno);
6643 emit_move_insn (reg,
6644 change_address (arguments, mode,
6645 plus_constant (XEXP (arguments, 0),
6648 push_to_sequence (use_insns);
6649 emit_insn (gen_rtx (USE, VOIDmode, reg));
6650 use_insns = get_insns ();
6652 size += GET_MODE_SIZE (mode);
6655 /* Restore the structure value address unless this is passed as an
6656 "invisible" first argument. */
6657 size = GET_MODE_SIZE (Pmode);
6658 if (struct_value_rtx)
6660 rtx value = gen_reg_rtx (Pmode);
6661 emit_move_insn (value,
6662 change_address (arguments, Pmode,
6663 plus_constant (XEXP (arguments, 0),
6665 emit_move_insn (struct_value_rtx, value);
6666 if (GET_CODE (struct_value_rtx) == REG)
6668 push_to_sequence (use_insns);
6669 emit_insn (gen_rtx (USE, VOIDmode, struct_value_rtx));
6670 use_insns = get_insns ();
6673 size += GET_MODE_SIZE (Pmode);
6676 /* All arguments and registers used for the call are set up by now! */
6677 function = prepare_call_address (function, NULL_TREE, &use_insns);
6679 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
6680 and we don't want to load it into a register as an optimization,
6681 because prepare_call_address already did it if it should be done. */
6682 if (GET_CODE (function) != SYMBOL_REF)
6683 function = memory_address (FUNCTION_MODE, function);
6685 /* Generate the actual call instruction and save the return value. */
6686 #ifdef HAVE_untyped_call
6687 if (HAVE_untyped_call)
6688 emit_call_insn (gen_untyped_call (gen_rtx (MEM, FUNCTION_MODE, function),
6689 result, result_vector (1, result)));
6692 #ifdef HAVE_call_value
6693 if (HAVE_call_value)
6697 /* Locate the unique return register. It is not possible to
6698 express a call that sets more than one return register using
6699 call_value; use untyped_call for that. In fact, untyped_call
6700 only needs to save the return registers in the given block. */
6701 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
6702 if ((mode = apply_result_mode[regno]) != VOIDmode)
6705 abort (); /* HAVE_untyped_call required. */
6706 valreg = gen_rtx (REG, mode, regno);
6709 emit_call_insn (gen_call_value (valreg,
6710 gen_rtx (MEM, FUNCTION_MODE, function),
6711 const0_rtx, NULL_RTX, const0_rtx));
6713 emit_move_insn (change_address (result, GET_MODE (valreg),
6721 /* Find the CALL insn we just emitted and write the USE insns before it. */
6722 for (call_insn = get_last_insn ();
6723 call_insn && GET_CODE (call_insn) != CALL_INSN;
6724 call_insn = PREV_INSN (call_insn))
6730 /* Put the USE insns before the CALL. */
6731 emit_insns_before (use_insns, call_insn);
6733 /* Restore the stack. */
6734 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
6736 /* Return the address of the result block. */
6737 return copy_addr_to_reg (XEXP (result, 0));
6740 /* Perform an untyped return. */
6742 expand_builtin_return (result)
6745 int size, align, regno;
6746 enum machine_mode mode;
6750 apply_result_size ();
6751 result = gen_rtx (MEM, BLKmode, result);
6753 #ifdef HAVE_untyped_return
6754 if (HAVE_untyped_return)
6756 emit_jump_insn (gen_untyped_return (result, result_vector (0, result)));
6762 /* Restore the return value and note that each value is used. */
6764 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
6765 if ((mode = apply_result_mode[regno]) != VOIDmode)
6767 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
6768 if (size % align != 0)
6769 size = CEIL (size, align) * align;
6770 reg = gen_rtx (REG, mode, INCOMING_REGNO (regno));
6771 emit_move_insn (reg,
6772 change_address (result, mode,
6773 plus_constant (XEXP (result, 0),
6776 push_to_sequence (use_insns);
6777 emit_insn (gen_rtx (USE, VOIDmode, reg));
6778 use_insns = get_insns ();
6780 size += GET_MODE_SIZE (mode);
6783 /* Put the USE insns before the return. */
6784 emit_insns (use_insns);
6786 /* Return whatever values was restored by jumping directly to the end
6788 expand_null_return ();
6791 /* Expand code for a post- or pre- increment or decrement
6792 and return the RTX for the result.
6793 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
6796 expand_increment (exp, post)
6800 register rtx op0, op1;
6801 register rtx temp, value;
6802 register tree incremented = TREE_OPERAND (exp, 0);
6803 optab this_optab = add_optab;
6805 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
6806 int op0_is_copy = 0;
6808 /* Stabilize any component ref that might need to be
6809 evaluated more than once below. */
6811 || TREE_CODE (incremented) == BIT_FIELD_REF
6812 || (TREE_CODE (incremented) == COMPONENT_REF
6813 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
6814 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
6815 incremented = stabilize_reference (incremented);
6817 /* Compute the operands as RTX.
6818 Note whether OP0 is the actual lvalue or a copy of it:
6819 I believe it is a copy iff it is a register or subreg
6820 and insns were generated in computing it. */
6822 temp = get_last_insn ();
6823 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, 0);
6825 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
6826 in place but intead must do sign- or zero-extension during assignment,
6827 so we copy it into a new register and let the code below use it as
6830 Note that we can safely modify this SUBREG since it is know not to be
6831 shared (it was made by the expand_expr call above). */
6833 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
6834 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
6836 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
6837 && temp != get_last_insn ());
6838 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
6840 /* Decide whether incrementing or decrementing. */
6841 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
6842 || TREE_CODE (exp) == PREDECREMENT_EXPR)
6843 this_optab = sub_optab;
6845 /* If OP0 is not the actual lvalue, but rather a copy in a register,
6846 then we cannot just increment OP0. We must therefore contrive to
6847 increment the original value. Then, for postincrement, we can return
6848 OP0 since it is a copy of the old value. For preincrement, we want
6849 to always expand here, since this generates better or equivalent code. */
6850 if (!post || op0_is_copy)
6852 /* This is the easiest way to increment the value wherever it is.
6853 Problems with multiple evaluation of INCREMENTED are prevented
6854 because either (1) it is a component_ref or preincrement,
6855 in which case it was stabilized above, or (2) it is an array_ref
6856 with constant index in an array in a register, which is
6857 safe to reevaluate. */
6858 tree newexp = build ((this_optab == add_optab
6859 ? PLUS_EXPR : MINUS_EXPR),
6862 TREE_OPERAND (exp, 1));
6863 temp = expand_assignment (incremented, newexp, ! post, 0);
6864 return post ? op0 : temp;
6867 /* Convert decrement by a constant into a negative increment. */
6868 if (this_optab == sub_optab
6869 && GET_CODE (op1) == CONST_INT)
6871 op1 = GEN_INT (- INTVAL (op1));
6872 this_optab = add_optab;
6877 /* We have a true reference to the value in OP0.
6878 If there is an insn to add or subtract in this mode, queue it. */
6880 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
6881 op0 = stabilize (op0);
6884 icode = (int) this_optab->handlers[(int) mode].insn_code;
6885 if (icode != (int) CODE_FOR_nothing
6886 /* Make sure that OP0 is valid for operands 0 and 1
6887 of the insn we want to queue. */
6888 && (*insn_operand_predicate[icode][0]) (op0, mode)
6889 && (*insn_operand_predicate[icode][1]) (op0, mode))
6891 if (! (*insn_operand_predicate[icode][2]) (op1, mode))
6892 op1 = force_reg (mode, op1);
6894 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
6898 /* Preincrement, or we can't increment with one simple insn. */
6900 /* Save a copy of the value before inc or dec, to return it later. */
6901 temp = value = copy_to_reg (op0);
6903 /* Arrange to return the incremented value. */
6904 /* Copy the rtx because expand_binop will protect from the queue,
6905 and the results of that would be invalid for us to return
6906 if our caller does emit_queue before using our result. */
6907 temp = copy_rtx (value = op0);
6909 /* Increment however we can. */
6910 op1 = expand_binop (mode, this_optab, value, op1, op0,
6911 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
6912 /* Make sure the value is stored into OP0. */
6914 emit_move_insn (op0, op1);
6919 /* Expand all function calls contained within EXP, innermost ones first.
6920 But don't look within expressions that have sequence points.
6921 For each CALL_EXPR, record the rtx for its value
6922 in the CALL_EXPR_RTL field. */
6925 preexpand_calls (exp)
6928 register int nops, i;
6929 int type = TREE_CODE_CLASS (TREE_CODE (exp));
6931 if (! do_preexpand_calls)
6934 /* Only expressions and references can contain calls. */
6936 if (type != 'e' && type != '<' && type != '1' && type != '2' && type != 'r')
6939 switch (TREE_CODE (exp))
6942 /* Do nothing if already expanded. */
6943 if (CALL_EXPR_RTL (exp) != 0)
6946 /* Do nothing to built-in functions. */
6947 if (TREE_CODE (TREE_OPERAND (exp, 0)) != ADDR_EXPR
6948 || TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) != FUNCTION_DECL
6949 || ! DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
6950 CALL_EXPR_RTL (exp) = expand_call (exp, NULL_RTX, 0);
6955 case TRUTH_ANDIF_EXPR:
6956 case TRUTH_ORIF_EXPR:
6957 /* If we find one of these, then we can be sure
6958 the adjust will be done for it (since it makes jumps).
6959 Do it now, so that if this is inside an argument
6960 of a function, we don't get the stack adjustment
6961 after some other args have already been pushed. */
6962 do_pending_stack_adjust ();
6967 case WITH_CLEANUP_EXPR:
6971 if (SAVE_EXPR_RTL (exp) != 0)
6975 nops = tree_code_length[(int) TREE_CODE (exp)];
6976 for (i = 0; i < nops; i++)
6977 if (TREE_OPERAND (exp, i) != 0)
6979 type = TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, i)));
6980 if (type == 'e' || type == '<' || type == '1' || type == '2'
6982 preexpand_calls (TREE_OPERAND (exp, i));
6986 /* At the start of a function, record that we have no previously-pushed
6987 arguments waiting to be popped. */
6990 init_pending_stack_adjust ()
6992 pending_stack_adjust = 0;
6995 /* When exiting from function, if safe, clear out any pending stack adjust
6996 so the adjustment won't get done. */
6999 clear_pending_stack_adjust ()
7001 #ifdef EXIT_IGNORE_STACK
7002 if (! flag_omit_frame_pointer && EXIT_IGNORE_STACK
7003 && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline)
7004 && ! flag_inline_functions)
7005 pending_stack_adjust = 0;
7009 /* Pop any previously-pushed arguments that have not been popped yet. */
7012 do_pending_stack_adjust ()
7014 if (inhibit_defer_pop == 0)
7016 if (pending_stack_adjust != 0)
7017 adjust_stack (GEN_INT (pending_stack_adjust));
7018 pending_stack_adjust = 0;
7022 /* Expand all cleanups up to OLD_CLEANUPS.
7023 Needed here, and also for language-dependent calls. */
7026 expand_cleanups_to (old_cleanups)
7029 while (cleanups_this_call != old_cleanups)
7031 expand_expr (TREE_VALUE (cleanups_this_call), NULL_RTX, VOIDmode, 0);
7032 cleanups_this_call = TREE_CHAIN (cleanups_this_call);
7036 /* Expand conditional expressions. */
7038 /* Generate code to evaluate EXP and jump to LABEL if the value is zero.
7039 LABEL is an rtx of code CODE_LABEL, in this function and all the
7043 jumpifnot (exp, label)
7047 do_jump (exp, label, NULL_RTX);
7050 /* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
7057 do_jump (exp, NULL_RTX, label);
7060 /* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
7061 the result is zero, or IF_TRUE_LABEL if the result is one.
7062 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
7063 meaning fall through in that case.
7065 do_jump always does any pending stack adjust except when it does not
7066 actually perform a jump. An example where there is no jump
7067 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
7069 This function is responsible for optimizing cases such as
7070 &&, || and comparison operators in EXP. */
7073 do_jump (exp, if_false_label, if_true_label)
7075 rtx if_false_label, if_true_label;
7077 register enum tree_code code = TREE_CODE (exp);
7078 /* Some cases need to create a label to jump to
7079 in order to properly fall through.
7080 These cases set DROP_THROUGH_LABEL nonzero. */
7081 rtx drop_through_label = 0;
7095 temp = integer_zerop (exp) ? if_false_label : if_true_label;
7101 /* This is not true with #pragma weak */
7103 /* The address of something can never be zero. */
7105 emit_jump (if_true_label);
7110 if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
7111 || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
7112 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF)
7115 /* If we are narrowing the operand, we have to do the compare in the
7117 if ((TYPE_PRECISION (TREE_TYPE (exp))
7118 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
7120 case NON_LVALUE_EXPR:
7121 case REFERENCE_EXPR:
7126 /* These cannot change zero->non-zero or vice versa. */
7127 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
7131 /* This is never less insns than evaluating the PLUS_EXPR followed by
7132 a test and can be longer if the test is eliminated. */
7134 /* Reduce to minus. */
7135 exp = build (MINUS_EXPR, TREE_TYPE (exp),
7136 TREE_OPERAND (exp, 0),
7137 fold (build1 (NEGATE_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
7138 TREE_OPERAND (exp, 1))));
7139 /* Process as MINUS. */
7143 /* Non-zero iff operands of minus differ. */
7144 comparison = compare (build (NE_EXPR, TREE_TYPE (exp),
7145 TREE_OPERAND (exp, 0),
7146 TREE_OPERAND (exp, 1)),
7151 /* If we are AND'ing with a small constant, do this comparison in the
7152 smallest type that fits. If the machine doesn't have comparisons
7153 that small, it will be converted back to the wider comparison.
7154 This helps if we are testing the sign bit of a narrower object.
7155 combine can't do this for us because it can't know whether a
7156 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
7158 if (! SLOW_BYTE_ACCESS
7159 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7160 && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
7161 && (i = floor_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))) >= 0
7162 && (type = type_for_size (i + 1, 1)) != 0
7163 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
7164 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
7165 != CODE_FOR_nothing))
7167 do_jump (convert (type, exp), if_false_label, if_true_label);
7172 case TRUTH_NOT_EXPR:
7173 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
7176 case TRUTH_ANDIF_EXPR:
7177 if (if_false_label == 0)
7178 if_false_label = drop_through_label = gen_label_rtx ();
7179 do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
7180 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
7183 case TRUTH_ORIF_EXPR:
7184 if (if_true_label == 0)
7185 if_true_label = drop_through_label = gen_label_rtx ();
7186 do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
7187 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
7191 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
7194 do_pending_stack_adjust ();
7195 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
7202 int bitsize, bitpos, unsignedp;
7203 enum machine_mode mode;
7208 /* Get description of this reference. We don't actually care
7209 about the underlying object here. */
7210 get_inner_reference (exp, &bitsize, &bitpos, &offset,
7211 &mode, &unsignedp, &volatilep);
7213 type = type_for_size (bitsize, unsignedp);
7214 if (! SLOW_BYTE_ACCESS
7215 && type != 0 && bitsize >= 0
7216 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
7217 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
7218 != CODE_FOR_nothing))
7220 do_jump (convert (type, exp), if_false_label, if_true_label);
7227 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
7228 if (integer_onep (TREE_OPERAND (exp, 1))
7229 && integer_zerop (TREE_OPERAND (exp, 2)))
7230 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
7232 else if (integer_zerop (TREE_OPERAND (exp, 1))
7233 && integer_onep (TREE_OPERAND (exp, 2)))
7234 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
7238 register rtx label1 = gen_label_rtx ();
7239 drop_through_label = gen_label_rtx ();
7240 do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX);
7241 /* Now the THEN-expression. */
7242 do_jump (TREE_OPERAND (exp, 1),
7243 if_false_label ? if_false_label : drop_through_label,
7244 if_true_label ? if_true_label : drop_through_label);
7245 /* In case the do_jump just above never jumps. */
7246 do_pending_stack_adjust ();
7247 emit_label (label1);
7248 /* Now the ELSE-expression. */
7249 do_jump (TREE_OPERAND (exp, 2),
7250 if_false_label ? if_false_label : drop_through_label,
7251 if_true_label ? if_true_label : drop_through_label);
7256 if (integer_zerop (TREE_OPERAND (exp, 1)))
7257 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
7258 else if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7261 !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
7262 do_jump_by_parts_equality (exp, if_false_label, if_true_label);
7264 comparison = compare (exp, EQ, EQ);
7268 if (integer_zerop (TREE_OPERAND (exp, 1)))
7269 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
7270 else if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7273 !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
7274 do_jump_by_parts_equality (exp, if_true_label, if_false_label);
7276 comparison = compare (exp, NE, NE);
7280 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7282 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
7283 do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
7285 comparison = compare (exp, LT, LTU);
7289 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7291 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
7292 do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
7294 comparison = compare (exp, LE, LEU);
7298 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7300 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
7301 do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
7303 comparison = compare (exp, GT, GTU);
7307 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7309 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
7310 do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
7312 comparison = compare (exp, GE, GEU);
7317 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
7319 /* This is not needed any more and causes poor code since it causes
7320 comparisons and tests from non-SI objects to have different code
7322 /* Copy to register to avoid generating bad insns by cse
7323 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
7324 if (!cse_not_expected && GET_CODE (temp) == MEM)
7325 temp = copy_to_reg (temp);
7327 do_pending_stack_adjust ();
7328 if (GET_CODE (temp) == CONST_INT)
7329 comparison = (temp == const0_rtx ? const0_rtx : const_true_rtx);
7330 else if (GET_CODE (temp) == LABEL_REF)
7331 comparison = const_true_rtx;
7332 else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
7333 && !can_compare_p (GET_MODE (temp)))
7334 /* Note swapping the labels gives us not-equal. */
7335 do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label);
7336 else if (GET_MODE (temp) != VOIDmode)
7337 comparison = compare_from_rtx (temp, CONST0_RTX (GET_MODE (temp)),
7338 NE, TREE_UNSIGNED (TREE_TYPE (exp)),
7339 GET_MODE (temp), NULL_RTX, 0);
7344 /* Do any postincrements in the expression that was tested. */
7347 /* If COMPARISON is nonzero here, it is an rtx that can be substituted
7348 straight into a conditional jump instruction as the jump condition.
7349 Otherwise, all the work has been done already. */
7351 if (comparison == const_true_rtx)
7354 emit_jump (if_true_label);
7356 else if (comparison == const0_rtx)
7359 emit_jump (if_false_label);
7361 else if (comparison)
7362 do_jump_for_compare (comparison, if_false_label, if_true_label);
7366 if (drop_through_label)
7368 /* If do_jump produces code that might be jumped around,
7369 do any stack adjusts from that code, before the place
7370 where control merges in. */
7371 do_pending_stack_adjust ();
7372 emit_label (drop_through_label);
7376 /* Given a comparison expression EXP for values too wide to be compared
7377 with one insn, test the comparison and jump to the appropriate label.
7378 The code of EXP is ignored; we always test GT if SWAP is 0,
7379 and LT if SWAP is 1. */
7382 do_jump_by_parts_greater (exp, swap, if_false_label, if_true_label)
7385 rtx if_false_label, if_true_label;
7387 rtx op0 = expand_expr (TREE_OPERAND (exp, swap), NULL_RTX, VOIDmode, 0);
7388 rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), NULL_RTX, VOIDmode, 0);
7389 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
7390 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
7391 rtx drop_through_label = 0;
7392 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
7395 if (! if_true_label || ! if_false_label)
7396 drop_through_label = gen_label_rtx ();
7397 if (! if_true_label)
7398 if_true_label = drop_through_label;
7399 if (! if_false_label)
7400 if_false_label = drop_through_label;
7402 /* Compare a word at a time, high order first. */
7403 for (i = 0; i < nwords; i++)
7406 rtx op0_word, op1_word;
7408 if (WORDS_BIG_ENDIAN)
7410 op0_word = operand_subword_force (op0, i, mode);
7411 op1_word = operand_subword_force (op1, i, mode);
7415 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
7416 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
7419 /* All but high-order word must be compared as unsigned. */
7420 comp = compare_from_rtx (op0_word, op1_word,
7421 (unsignedp || i > 0) ? GTU : GT,
7422 unsignedp, word_mode, NULL_RTX, 0);
7423 if (comp == const_true_rtx)
7424 emit_jump (if_true_label);
7425 else if (comp != const0_rtx)
7426 do_jump_for_compare (comp, NULL_RTX, if_true_label);
7428 /* Consider lower words only if these are equal. */
7429 comp = compare_from_rtx (op0_word, op1_word, NE, unsignedp, word_mode,
7431 if (comp == const_true_rtx)
7432 emit_jump (if_false_label);
7433 else if (comp != const0_rtx)
7434 do_jump_for_compare (comp, NULL_RTX, if_false_label);
7438 emit_jump (if_false_label);
7439 if (drop_through_label)
7440 emit_label (drop_through_label);
7443 /* Compare OP0 with OP1, word at a time, in mode MODE.
7444 UNSIGNEDP says to do unsigned comparison.
7445 Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
7448 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label)
7449 enum machine_mode mode;
7452 rtx if_false_label, if_true_label;
7454 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
7455 rtx drop_through_label = 0;
7458 if (! if_true_label || ! if_false_label)
7459 drop_through_label = gen_label_rtx ();
7460 if (! if_true_label)
7461 if_true_label = drop_through_label;
7462 if (! if_false_label)
7463 if_false_label = drop_through_label;
7465 /* Compare a word at a time, high order first. */
7466 for (i = 0; i < nwords; i++)
7469 rtx op0_word, op1_word;
7471 if (WORDS_BIG_ENDIAN)
7473 op0_word = operand_subword_force (op0, i, mode);
7474 op1_word = operand_subword_force (op1, i, mode);
7478 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
7479 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
7482 /* All but high-order word must be compared as unsigned. */
7483 comp = compare_from_rtx (op0_word, op1_word,
7484 (unsignedp || i > 0) ? GTU : GT,
7485 unsignedp, word_mode, NULL_RTX, 0);
7486 if (comp == const_true_rtx)
7487 emit_jump (if_true_label);
7488 else if (comp != const0_rtx)
7489 do_jump_for_compare (comp, NULL_RTX, if_true_label);
7491 /* Consider lower words only if these are equal. */
7492 comp = compare_from_rtx (op0_word, op1_word, NE, unsignedp, word_mode,
7494 if (comp == const_true_rtx)
7495 emit_jump (if_false_label);
7496 else if (comp != const0_rtx)
7497 do_jump_for_compare (comp, NULL_RTX, if_false_label);
7501 emit_jump (if_false_label);
7502 if (drop_through_label)
7503 emit_label (drop_through_label);
7506 /* Given an EQ_EXPR expression EXP for values too wide to be compared
7507 with one insn, test the comparison and jump to the appropriate label. */
7510 do_jump_by_parts_equality (exp, if_false_label, if_true_label)
7512 rtx if_false_label, if_true_label;
7514 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7515 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7516 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
7517 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
7519 rtx drop_through_label = 0;
7521 if (! if_false_label)
7522 drop_through_label = if_false_label = gen_label_rtx ();
7524 for (i = 0; i < nwords; i++)
7526 rtx comp = compare_from_rtx (operand_subword_force (op0, i, mode),
7527 operand_subword_force (op1, i, mode),
7528 EQ, TREE_UNSIGNED (TREE_TYPE (exp)),
7529 word_mode, NULL_RTX, 0);
7530 if (comp == const_true_rtx)
7531 emit_jump (if_false_label);
7532 else if (comp != const0_rtx)
7533 do_jump_for_compare (comp, if_false_label, NULL_RTX);
7537 emit_jump (if_true_label);
7538 if (drop_through_label)
7539 emit_label (drop_through_label);
7542 /* Jump according to whether OP0 is 0.
7543 We assume that OP0 has an integer mode that is too wide
7544 for the available compare insns. */
7547 do_jump_by_parts_equality_rtx (op0, if_false_label, if_true_label)
7549 rtx if_false_label, if_true_label;
7551 int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD;
7553 rtx drop_through_label = 0;
7555 if (! if_false_label)
7556 drop_through_label = if_false_label = gen_label_rtx ();
7558 for (i = 0; i < nwords; i++)
7560 rtx comp = compare_from_rtx (operand_subword_force (op0, i,
7562 const0_rtx, EQ, 1, word_mode, NULL_RTX, 0);
7563 if (comp == const_true_rtx)
7564 emit_jump (if_false_label);
7565 else if (comp != const0_rtx)
7566 do_jump_for_compare (comp, if_false_label, NULL_RTX);
7570 emit_jump (if_true_label);
7571 if (drop_through_label)
7572 emit_label (drop_through_label);
7575 /* Given a comparison expression in rtl form, output conditional branches to
7576 IF_TRUE_LABEL, IF_FALSE_LABEL, or both. */
7579 do_jump_for_compare (comparison, if_false_label, if_true_label)
7580 rtx comparison, if_false_label, if_true_label;
7584 if (bcc_gen_fctn[(int) GET_CODE (comparison)] != 0)
7585 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (comparison)]) (if_true_label));
7590 emit_jump (if_false_label);
7592 else if (if_false_label)
7595 rtx prev = PREV_INSN (get_last_insn ());
7598 /* Output the branch with the opposite condition. Then try to invert
7599 what is generated. If more than one insn is a branch, or if the
7600 branch is not the last insn written, abort. If we can't invert
7601 the branch, emit make a true label, redirect this jump to that,
7602 emit a jump to the false label and define the true label. */
7604 if (bcc_gen_fctn[(int) GET_CODE (comparison)] != 0)
7605 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (comparison)]) (if_false_label));
7609 /* Here we get the insn before what was just emitted.
7610 On some machines, emitting the branch can discard
7611 the previous compare insn and emit a replacement. */
7613 /* If there's only one preceding insn... */
7614 insn = get_insns ();
7616 insn = NEXT_INSN (prev);
7618 for (insn = NEXT_INSN (insn); insn; insn = NEXT_INSN (insn))
7619 if (GET_CODE (insn) == JUMP_INSN)
7626 if (branch != get_last_insn ())
7629 if (! invert_jump (branch, if_false_label))
7631 if_true_label = gen_label_rtx ();
7632 redirect_jump (branch, if_true_label);
7633 emit_jump (if_false_label);
7634 emit_label (if_true_label);
7639 /* Generate code for a comparison expression EXP
7640 (including code to compute the values to be compared)
7641 and set (CC0) according to the result.
7642 SIGNED_CODE should be the rtx operation for this comparison for
7643 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
7645 We force a stack adjustment unless there are currently
7646 things pushed on the stack that aren't yet used. */
7649 compare (exp, signed_code, unsigned_code)
7651 enum rtx_code signed_code, unsigned_code;
7654 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7656 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7657 register tree type = TREE_TYPE (TREE_OPERAND (exp, 0));
7658 register enum machine_mode mode = TYPE_MODE (type);
7659 int unsignedp = TREE_UNSIGNED (type);
7660 enum rtx_code code = unsignedp ? unsigned_code : signed_code;
7662 return compare_from_rtx (op0, op1, code, unsignedp, mode,
7664 ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
7665 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
7668 /* Like compare but expects the values to compare as two rtx's.
7669 The decision as to signed or unsigned comparison must be made by the caller.
7671 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
7674 If ALIGN is non-zero, it is the alignment of this type; if zero, the
7675 size of MODE should be used. */
7678 compare_from_rtx (op0, op1, code, unsignedp, mode, size, align)
7679 register rtx op0, op1;
7682 enum machine_mode mode;
7688 /* If one operand is constant, make it the second one. Only do this
7689 if the other operand is not constant as well. */
7691 if ((CONSTANT_P (op0) && ! CONSTANT_P (op1))
7692 || (GET_CODE (op0) == CONST_INT && GET_CODE (op1) != CONST_INT))
7697 code = swap_condition (code);
7702 op0 = force_not_mem (op0);
7703 op1 = force_not_mem (op1);
7706 do_pending_stack_adjust ();
7708 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
7709 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
7713 /* There's no need to do this now that combine.c can eliminate lots of
7714 sign extensions. This can be less efficient in certain cases on other
7717 /* If this is a signed equality comparison, we can do it as an
7718 unsigned comparison since zero-extension is cheaper than sign
7719 extension and comparisons with zero are done as unsigned. This is
7720 the case even on machines that can do fast sign extension, since
7721 zero-extension is easier to combine with other operations than
7722 sign-extension is. If we are comparing against a constant, we must
7723 convert it to what it would look like unsigned. */
7724 if ((code == EQ || code == NE) && ! unsignedp
7725 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
7727 if (GET_CODE (op1) == CONST_INT
7728 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
7729 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
7734 emit_cmp_insn (op0, op1, code, size, mode, unsignedp, align);
7736 return gen_rtx (code, VOIDmode, cc0_rtx, const0_rtx);
7739 /* Generate code to calculate EXP using a store-flag instruction
7740 and return an rtx for the result. EXP is either a comparison
7741 or a TRUTH_NOT_EXPR whose operand is a comparison.
7743 If TARGET is nonzero, store the result there if convenient.
7745 If ONLY_CHEAP is non-zero, only do this if it is likely to be very
7748 Return zero if there is no suitable set-flag instruction
7749 available on this machine.
7751 Once expand_expr has been called on the arguments of the comparison,
7752 we are committed to doing the store flag, since it is not safe to
7753 re-evaluate the expression. We emit the store-flag insn by calling
7754 emit_store_flag, but only expand the arguments if we have a reason
7755 to believe that emit_store_flag will be successful. If we think that
7756 it will, but it isn't, we have to simulate the store-flag with a
7757 set/jump/set sequence. */
7760 do_store_flag (exp, target, mode, only_cheap)
7763 enum machine_mode mode;
7767 tree arg0, arg1, type;
7769 enum machine_mode operand_mode;
7773 enum insn_code icode;
7774 rtx subtarget = target;
7775 rtx result, label, pattern, jump_pat;
7777 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
7778 result at the end. We can't simply invert the test since it would
7779 have already been inverted if it were valid. This case occurs for
7780 some floating-point comparisons. */
7782 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
7783 invert = 1, exp = TREE_OPERAND (exp, 0);
7785 arg0 = TREE_OPERAND (exp, 0);
7786 arg1 = TREE_OPERAND (exp, 1);
7787 type = TREE_TYPE (arg0);
7788 operand_mode = TYPE_MODE (type);
7789 unsignedp = TREE_UNSIGNED (type);
7791 /* We won't bother with BLKmode store-flag operations because it would mean
7792 passing a lot of information to emit_store_flag. */
7793 if (operand_mode == BLKmode)
7799 /* Get the rtx comparison code to use. We know that EXP is a comparison
7800 operation of some type. Some comparisons against 1 and -1 can be
7801 converted to comparisons with zero. Do so here so that the tests
7802 below will be aware that we have a comparison with zero. These
7803 tests will not catch constants in the first operand, but constants
7804 are rarely passed as the first operand. */
7806 switch (TREE_CODE (exp))
7815 if (integer_onep (arg1))
7816 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
7818 code = unsignedp ? LTU : LT;
7821 if (! unsignedp && integer_all_onesp (arg1))
7822 arg1 = integer_zero_node, code = LT;
7824 code = unsignedp ? LEU : LE;
7827 if (! unsignedp && integer_all_onesp (arg1))
7828 arg1 = integer_zero_node, code = GE;
7830 code = unsignedp ? GTU : GT;
7833 if (integer_onep (arg1))
7834 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
7836 code = unsignedp ? GEU : GE;
7842 /* Put a constant second. */
7843 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
7845 tem = arg0; arg0 = arg1; arg1 = tem;
7846 code = swap_condition (code);
7849 /* If this is an equality or inequality test of a single bit, we can
7850 do this by shifting the bit being tested to the low-order bit and
7851 masking the result with the constant 1. If the condition was EQ,
7852 we xor it with 1. This does not require an scc insn and is faster
7853 than an scc insn even if we have it. */
7855 if ((code == NE || code == EQ)
7856 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
7857 && integer_pow2p (TREE_OPERAND (arg0, 1))
7858 && TYPE_PRECISION (type) <= HOST_BITS_PER_WIDE_INT)
7860 int bitnum = exact_log2 (INTVAL (expand_expr (TREE_OPERAND (arg0, 1),
7861 NULL_RTX, VOIDmode, 0)));
7863 if (subtarget == 0 || GET_CODE (subtarget) != REG
7864 || GET_MODE (subtarget) != operand_mode
7865 || ! safe_from_p (subtarget, TREE_OPERAND (arg0, 0)))
7868 op0 = expand_expr (TREE_OPERAND (arg0, 0), subtarget, VOIDmode, 0);
7871 op0 = expand_shift (RSHIFT_EXPR, GET_MODE (op0), op0,
7872 size_int (bitnum), target, 1);
7874 if (GET_MODE (op0) != mode)
7875 op0 = convert_to_mode (mode, op0, 1);
7877 if (bitnum != TYPE_PRECISION (type) - 1)
7878 op0 = expand_and (op0, const1_rtx, target);
7880 if ((code == EQ && ! invert) || (code == NE && invert))
7881 op0 = expand_binop (mode, xor_optab, op0, const1_rtx, target, 0,
7887 /* Now see if we are likely to be able to do this. Return if not. */
7888 if (! can_compare_p (operand_mode))
7890 icode = setcc_gen_code[(int) code];
7891 if (icode == CODE_FOR_nothing
7892 || (only_cheap && insn_operand_mode[(int) icode][0] != mode))
7894 /* We can only do this if it is one of the special cases that
7895 can be handled without an scc insn. */
7896 if ((code == LT && integer_zerop (arg1))
7897 || (! only_cheap && code == GE && integer_zerop (arg1)))
7899 else if (BRANCH_COST >= 0
7900 && ! only_cheap && (code == NE || code == EQ)
7901 && TREE_CODE (type) != REAL_TYPE
7902 && ((abs_optab->handlers[(int) operand_mode].insn_code
7903 != CODE_FOR_nothing)
7904 || (ffs_optab->handlers[(int) operand_mode].insn_code
7905 != CODE_FOR_nothing)))
7911 preexpand_calls (exp);
7912 if (subtarget == 0 || GET_CODE (subtarget) != REG
7913 || GET_MODE (subtarget) != operand_mode
7914 || ! safe_from_p (subtarget, arg1))
7917 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
7918 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
7921 target = gen_reg_rtx (mode);
7923 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
7924 because, if the emit_store_flag does anything it will succeed and
7925 OP0 and OP1 will not be used subsequently. */
7927 result = emit_store_flag (target, code,
7928 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
7929 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
7930 operand_mode, unsignedp, 1);
7935 result = expand_binop (mode, xor_optab, result, const1_rtx,
7936 result, 0, OPTAB_LIB_WIDEN);
7940 /* If this failed, we have to do this with set/compare/jump/set code. */
7941 if (target == 0 || GET_CODE (target) != REG
7942 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
7943 target = gen_reg_rtx (GET_MODE (target));
7945 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
7946 result = compare_from_rtx (op0, op1, code, unsignedp,
7947 operand_mode, NULL_RTX, 0);
7948 if (GET_CODE (result) == CONST_INT)
7949 return (((result == const0_rtx && ! invert)
7950 || (result != const0_rtx && invert))
7951 ? const0_rtx : const1_rtx);
7953 label = gen_label_rtx ();
7954 if (bcc_gen_fctn[(int) code] == 0)
7957 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
7958 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
7964 /* Generate a tablejump instruction (used for switch statements). */
7966 #ifdef HAVE_tablejump
7968 /* INDEX is the value being switched on, with the lowest value
7969 in the table already subtracted.
7970 MODE is its expected mode (needed if INDEX is constant).
7971 RANGE is the length of the jump table.
7972 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
7974 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
7975 index value is out of range. */
7978 do_tablejump (index, mode, range, table_label, default_label)
7979 rtx index, range, table_label, default_label;
7980 enum machine_mode mode;
7982 register rtx temp, vector;
7984 /* Do an unsigned comparison (in the proper mode) between the index
7985 expression and the value which represents the length of the range.
7986 Since we just finished subtracting the lower bound of the range
7987 from the index expression, this comparison allows us to simultaneously
7988 check that the original index expression value is both greater than
7989 or equal to the minimum value of the range and less than or equal to
7990 the maximum value of the range. */
7992 emit_cmp_insn (range, index, LTU, NULL_RTX, mode, 1, 0);
7993 emit_jump_insn (gen_bltu (default_label));
7995 /* If index is in range, it must fit in Pmode.
7996 Convert to Pmode so we can index with it. */
7998 index = convert_to_mode (Pmode, index, 1);
8000 /* If flag_force_addr were to affect this address
8001 it could interfere with the tricky assumptions made
8002 about addresses that contain label-refs,
8003 which may be valid only very near the tablejump itself. */
8004 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
8005 GET_MODE_SIZE, because this indicates how large insns are. The other
8006 uses should all be Pmode, because they are addresses. This code
8007 could fail if addresses and insns are not the same size. */
8008 index = memory_address_noforce
8010 gen_rtx (PLUS, Pmode,
8011 gen_rtx (MULT, Pmode, index,
8012 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
8013 gen_rtx (LABEL_REF, Pmode, table_label)));
8014 temp = gen_reg_rtx (CASE_VECTOR_MODE);
8015 vector = gen_rtx (MEM, CASE_VECTOR_MODE, index);
8016 RTX_UNCHANGING_P (vector) = 1;
8017 convert_move (temp, vector, 0);
8019 emit_jump_insn (gen_tablejump (temp, table_label));
8021 #ifndef CASE_VECTOR_PC_RELATIVE
8022 /* If we are generating PIC code or if the table is PC-relative, the
8023 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
8029 #endif /* HAVE_tablejump */