1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA. */
26 #include "insn-flags.h"
27 #include "insn-codes.h"
29 #include "insn-config.h"
32 #include "typeclass.h"
34 #define CEIL(x,y) (((x) + (y) - 1) / (y))
36 /* Decide whether a function's arguments should be processed
37 from first to last or from last to first.
39 They should if the stack and args grow in opposite directions, but
40 only if we have push insns. */
44 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNARD)
45 #define PUSH_ARGS_REVERSED /* If it's last to first */
50 #ifndef STACK_PUSH_CODE
51 #ifdef STACK_GROWS_DOWNWARD
52 #define STACK_PUSH_CODE PRE_DEC
54 #define STACK_PUSH_CODE PRE_INC
58 /* Like STACK_BOUNDARY but in units of bytes, not bits. */
59 #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
61 /* If this is nonzero, we do not bother generating VOLATILE
62 around volatile memory references, and we are willing to
63 output indirect addresses. If cse is to follow, we reject
64 indirect addresses so a useful potential cse is generated;
65 if it is used only once, instruction combination will produce
66 the same indirect address eventually. */
69 /* Nonzero to generate code for all the subroutines within an
70 expression before generating the upper levels of the expression.
71 Nowadays this is never zero. */
72 int do_preexpand_calls = 1;
74 /* Number of units that we should eventually pop off the stack.
75 These are the arguments to function calls that have already returned. */
76 int pending_stack_adjust;
78 /* Nonzero means stack pops must not be deferred, and deferred stack
79 pops must not be output. It is nonzero inside a function call,
80 inside a conditional expression, inside a statement expression,
81 and in other cases as well. */
82 int inhibit_defer_pop;
84 /* A list of all cleanups which belong to the arguments of
85 function calls being expanded by expand_call. */
86 tree cleanups_this_call;
88 /* Nonzero means __builtin_saveregs has already been done in this function.
89 The value is the pseudoreg containing the value __builtin_saveregs
91 static rtx saveregs_value;
93 /* Similarly for __builtin_apply_args. */
94 static rtx apply_args_value;
96 /* This structure is used by move_by_pieces to describe the move to
108 int explicit_inc_from;
114 static rtx enqueue_insn PROTO((rtx, rtx));
115 static int queued_subexp_p PROTO((rtx));
116 static void init_queue PROTO((void));
117 static void move_by_pieces PROTO((rtx, rtx, int, int));
118 static int move_by_pieces_ninsns PROTO((unsigned int, int));
119 static void move_by_pieces_1 PROTO((rtx (*) (), enum machine_mode,
120 struct move_by_pieces *));
121 static void group_insns PROTO((rtx));
122 static void store_constructor PROTO((tree, rtx));
123 static rtx store_field PROTO((rtx, int, int, enum machine_mode, tree,
124 enum machine_mode, int, int, int));
125 static tree save_noncopied_parts PROTO((tree, tree));
126 static tree init_noncopied_parts PROTO((tree, tree));
127 static int safe_from_p PROTO((rtx, tree));
128 static int fixed_type_p PROTO((tree));
129 static int get_pointer_alignment PROTO((tree, unsigned));
130 static tree string_constant PROTO((tree, tree *));
131 static tree c_strlen PROTO((tree));
132 static rtx expand_builtin PROTO((tree, rtx, rtx, enum machine_mode, int));
133 static int apply_args_size PROTO((void));
134 static int apply_result_size PROTO((void));
135 static rtx result_vector PROTO((int, rtx));
136 static rtx expand_builtin_apply_args PROTO((void));
137 static rtx expand_builtin_apply PROTO((rtx, rtx, rtx));
138 static void expand_builtin_return PROTO((rtx));
139 static rtx expand_increment PROTO((tree, int));
140 static void preexpand_calls PROTO((tree));
141 static void do_jump_by_parts_greater PROTO((tree, int, rtx, rtx));
142 static void do_jump_by_parts_greater_rtx PROTO((enum machine_mode, int, rtx, rtx, rtx, rtx));
143 static void do_jump_by_parts_equality PROTO((tree, rtx, rtx));
144 static void do_jump_by_parts_equality_rtx PROTO((rtx, rtx, rtx));
145 static void do_jump_for_compare PROTO((rtx, rtx, rtx));
146 static rtx compare PROTO((tree, enum rtx_code, enum rtx_code));
147 static rtx do_store_flag PROTO((tree, rtx, enum machine_mode, int));
149 /* Record for each mode whether we can move a register directly to or
150 from an object of that mode in memory. If we can't, we won't try
151 to use that mode directly when accessing a field of that mode. */
153 static char direct_load[NUM_MACHINE_MODES];
154 static char direct_store[NUM_MACHINE_MODES];
156 /* MOVE_RATIO is the number of move instructions that is better than
160 #if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
163 /* A value of around 6 would minimize code size; infinity would minimize
165 #define MOVE_RATIO 15
169 /* This array records the insn_code of insns to perform block moves. */
170 enum insn_code movstr_optab[NUM_MACHINE_MODES];
172 /* SLOW_UNALIGNED_ACCESS is non-zero if unaligned accesses are very slow. */
174 #ifndef SLOW_UNALIGNED_ACCESS
175 #define SLOW_UNALIGNED_ACCESS 0
178 /* Register mappings for target machines without register windows. */
179 #ifndef INCOMING_REGNO
180 #define INCOMING_REGNO(OUT) (OUT)
182 #ifndef OUTGOING_REGNO
183 #define OUTGOING_REGNO(IN) (IN)
186 /* This is run once per compilation to set up which modes can be used
187 directly in memory and to initialize the block move optab. */
193 enum machine_mode mode;
194 /* Try indexing by frame ptr and try by stack ptr.
195 It is known that on the Convex the stack ptr isn't a valid index.
196 With luck, one or the other is valid on any machine. */
197 rtx mem = gen_rtx (MEM, VOIDmode, stack_pointer_rtx);
198 rtx mem1 = gen_rtx (MEM, VOIDmode, frame_pointer_rtx);
201 insn = emit_insn (gen_rtx (SET, 0, 0));
202 pat = PATTERN (insn);
204 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
205 mode = (enum machine_mode) ((int) mode + 1))
211 direct_load[(int) mode] = direct_store[(int) mode] = 0;
212 PUT_MODE (mem, mode);
213 PUT_MODE (mem1, mode);
215 /* See if there is some register that can be used in this mode and
216 directly loaded or stored from memory. */
218 if (mode != VOIDmode && mode != BLKmode)
219 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
220 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
223 if (! HARD_REGNO_MODE_OK (regno, mode))
226 reg = gen_rtx (REG, mode, regno);
229 SET_DEST (pat) = reg;
230 if (recog (pat, insn, &num_clobbers) >= 0)
231 direct_load[(int) mode] = 1;
233 SET_SRC (pat) = mem1;
234 SET_DEST (pat) = reg;
235 if (recog (pat, insn, &num_clobbers) >= 0)
236 direct_load[(int) mode] = 1;
239 SET_DEST (pat) = mem;
240 if (recog (pat, insn, &num_clobbers) >= 0)
241 direct_store[(int) mode] = 1;
244 SET_DEST (pat) = mem1;
245 if (recog (pat, insn, &num_clobbers) >= 0)
246 direct_store[(int) mode] = 1;
253 /* This is run at the start of compiling a function. */
260 pending_stack_adjust = 0;
261 inhibit_defer_pop = 0;
262 cleanups_this_call = 0;
264 apply_args_value = 0;
268 /* Save all variables describing the current status into the structure *P.
269 This is used before starting a nested function. */
275 /* Instead of saving the postincrement queue, empty it. */
278 p->pending_stack_adjust = pending_stack_adjust;
279 p->inhibit_defer_pop = inhibit_defer_pop;
280 p->cleanups_this_call = cleanups_this_call;
281 p->saveregs_value = saveregs_value;
282 p->apply_args_value = apply_args_value;
283 p->forced_labels = forced_labels;
285 pending_stack_adjust = 0;
286 inhibit_defer_pop = 0;
287 cleanups_this_call = 0;
289 apply_args_value = 0;
293 /* Restore all variables describing the current status from the structure *P.
294 This is used after a nested function. */
297 restore_expr_status (p)
300 pending_stack_adjust = p->pending_stack_adjust;
301 inhibit_defer_pop = p->inhibit_defer_pop;
302 cleanups_this_call = p->cleanups_this_call;
303 saveregs_value = p->saveregs_value;
304 apply_args_value = p->apply_args_value;
305 forced_labels = p->forced_labels;
308 /* Manage the queue of increment instructions to be output
309 for POSTINCREMENT_EXPR expressions, etc. */
311 static rtx pending_chain;
313 /* Queue up to increment (or change) VAR later. BODY says how:
314 BODY should be the same thing you would pass to emit_insn
315 to increment right away. It will go to emit_insn later on.
317 The value is a QUEUED expression to be used in place of VAR
318 where you want to guarantee the pre-incrementation value of VAR. */
321 enqueue_insn (var, body)
324 pending_chain = gen_rtx (QUEUED, GET_MODE (var),
325 var, NULL_RTX, NULL_RTX, body, pending_chain);
326 return pending_chain;
329 /* Use protect_from_queue to convert a QUEUED expression
330 into something that you can put immediately into an instruction.
331 If the queued incrementation has not happened yet,
332 protect_from_queue returns the variable itself.
333 If the incrementation has happened, protect_from_queue returns a temp
334 that contains a copy of the old value of the variable.
336 Any time an rtx which might possibly be a QUEUED is to be put
337 into an instruction, it must be passed through protect_from_queue first.
338 QUEUED expressions are not meaningful in instructions.
340 Do not pass a value through protect_from_queue and then hold
341 on to it for a while before putting it in an instruction!
342 If the queue is flushed in between, incorrect code will result. */
345 protect_from_queue (x, modify)
349 register RTX_CODE code = GET_CODE (x);
351 #if 0 /* A QUEUED can hang around after the queue is forced out. */
352 /* Shortcut for most common case. */
353 if (pending_chain == 0)
359 /* A special hack for read access to (MEM (QUEUED ...))
360 to facilitate use of autoincrement.
361 Make a copy of the contents of the memory location
362 rather than a copy of the address, but not
363 if the value is of mode BLKmode. */
364 if (code == MEM && GET_MODE (x) != BLKmode
365 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
367 register rtx y = XEXP (x, 0);
368 XEXP (x, 0) = QUEUED_VAR (y);
371 register rtx temp = gen_reg_rtx (GET_MODE (x));
372 emit_insn_before (gen_move_insn (temp, x),
378 /* Otherwise, recursively protect the subexpressions of all
379 the kinds of rtx's that can contain a QUEUED. */
381 XEXP (x, 0) = protect_from_queue (XEXP (x, 0), 0);
382 else if (code == PLUS || code == MULT)
384 XEXP (x, 0) = protect_from_queue (XEXP (x, 0), 0);
385 XEXP (x, 1) = protect_from_queue (XEXP (x, 1), 0);
389 /* If the increment has not happened, use the variable itself. */
390 if (QUEUED_INSN (x) == 0)
391 return QUEUED_VAR (x);
392 /* If the increment has happened and a pre-increment copy exists,
394 if (QUEUED_COPY (x) != 0)
395 return QUEUED_COPY (x);
396 /* The increment has happened but we haven't set up a pre-increment copy.
397 Set one up now, and use it. */
398 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
399 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
401 return QUEUED_COPY (x);
404 /* Return nonzero if X contains a QUEUED expression:
405 if it contains anything that will be altered by a queued increment.
406 We handle only combinations of MEM, PLUS, MINUS and MULT operators
407 since memory addresses generally contain only those. */
413 register enum rtx_code code = GET_CODE (x);
419 return queued_subexp_p (XEXP (x, 0));
423 return queued_subexp_p (XEXP (x, 0))
424 || queued_subexp_p (XEXP (x, 1));
429 /* Perform all the pending incrementations. */
435 while (p = pending_chain)
437 QUEUED_INSN (p) = emit_insn (QUEUED_BODY (p));
438 pending_chain = QUEUED_NEXT (p);
449 /* Copy data from FROM to TO, where the machine modes are not the same.
450 Both modes may be integer, or both may be floating.
451 UNSIGNEDP should be nonzero if FROM is an unsigned type.
452 This causes zero-extension instead of sign-extension. */
455 convert_move (to, from, unsignedp)
456 register rtx to, from;
459 enum machine_mode to_mode = GET_MODE (to);
460 enum machine_mode from_mode = GET_MODE (from);
461 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
462 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
466 /* rtx code for making an equivalent value. */
467 enum rtx_code equiv_code = (unsignedp ? ZERO_EXTEND : SIGN_EXTEND);
469 to = protect_from_queue (to, 1);
470 from = protect_from_queue (from, 0);
472 if (to_real != from_real)
475 /* If FROM is a SUBREG that indicates that we have already done at least
476 the required extension, strip it. We don't handle such SUBREGs as
479 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
480 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
481 >= GET_MODE_SIZE (to_mode))
482 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
483 from = gen_lowpart (to_mode, from), from_mode = to_mode;
485 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
488 if (to_mode == from_mode
489 || (from_mode == VOIDmode && CONSTANT_P (from)))
491 emit_move_insn (to, from);
497 #ifdef HAVE_extendqfhf2
498 if (HAVE_extendqfsf2 && from_mode == QFmode && to_mode == HFmode)
500 emit_unop_insn (CODE_FOR_extendqfsf2, to, from, UNKNOWN);
504 #ifdef HAVE_extendqfsf2
505 if (HAVE_extendqfsf2 && from_mode == QFmode && to_mode == SFmode)
507 emit_unop_insn (CODE_FOR_extendqfsf2, to, from, UNKNOWN);
511 #ifdef HAVE_extendqfdf2
512 if (HAVE_extendqfdf2 && from_mode == QFmode && to_mode == DFmode)
514 emit_unop_insn (CODE_FOR_extendqfdf2, to, from, UNKNOWN);
518 #ifdef HAVE_extendqfxf2
519 if (HAVE_extendqfxf2 && from_mode == QFmode && to_mode == XFmode)
521 emit_unop_insn (CODE_FOR_extendqfxf2, to, from, UNKNOWN);
525 #ifdef HAVE_extendqftf2
526 if (HAVE_extendqftf2 && from_mode == QFmode && to_mode == TFmode)
528 emit_unop_insn (CODE_FOR_extendqftf2, to, from, UNKNOWN);
533 #ifdef HAVE_extendhfsf2
534 if (HAVE_extendhfsf2 && from_mode == HFmode && to_mode == SFmode)
536 emit_unop_insn (CODE_FOR_extendhfsf2, to, from, UNKNOWN);
540 #ifdef HAVE_extendhfdf2
541 if (HAVE_extendhfdf2 && from_mode == HFmode && to_mode == DFmode)
543 emit_unop_insn (CODE_FOR_extendhfdf2, to, from, UNKNOWN);
547 #ifdef HAVE_extendhfxf2
548 if (HAVE_extendhfxf2 && from_mode == HFmode && to_mode == XFmode)
550 emit_unop_insn (CODE_FOR_extendhfxf2, to, from, UNKNOWN);
554 #ifdef HAVE_extendhftf2
555 if (HAVE_extendhftf2 && from_mode == HFmode && to_mode == TFmode)
557 emit_unop_insn (CODE_FOR_extendhftf2, to, from, UNKNOWN);
562 #ifdef HAVE_extendsfdf2
563 if (HAVE_extendsfdf2 && from_mode == SFmode && to_mode == DFmode)
565 emit_unop_insn (CODE_FOR_extendsfdf2, to, from, UNKNOWN);
569 #ifdef HAVE_extendsfxf2
570 if (HAVE_extendsfxf2 && from_mode == SFmode && to_mode == XFmode)
572 emit_unop_insn (CODE_FOR_extendsfxf2, to, from, UNKNOWN);
576 #ifdef HAVE_extendsftf2
577 if (HAVE_extendsftf2 && from_mode == SFmode && to_mode == TFmode)
579 emit_unop_insn (CODE_FOR_extendsftf2, to, from, UNKNOWN);
583 #ifdef HAVE_extenddfxf2
584 if (HAVE_extenddfxf2 && from_mode == DFmode && to_mode == XFmode)
586 emit_unop_insn (CODE_FOR_extenddfxf2, to, from, UNKNOWN);
590 #ifdef HAVE_extenddftf2
591 if (HAVE_extenddftf2 && from_mode == DFmode && to_mode == TFmode)
593 emit_unop_insn (CODE_FOR_extenddftf2, to, from, UNKNOWN);
598 #ifdef HAVE_trunchfqf2
599 if (HAVE_trunchfqf2 && from_mode == HFmode && to_mode == QFmode)
601 emit_unop_insn (CODE_FOR_trunchfqf2, to, from, UNKNOWN);
605 #ifdef HAVE_truncsfqf2
606 if (HAVE_truncsfqf2 && from_mode == SFmode && to_mode == QFmode)
608 emit_unop_insn (CODE_FOR_truncsfqf2, to, from, UNKNOWN);
612 #ifdef HAVE_truncdfqf2
613 if (HAVE_truncdfqf2 && from_mode == DFmode && to_mode == QFmode)
615 emit_unop_insn (CODE_FOR_truncdfqf2, to, from, UNKNOWN);
619 #ifdef HAVE_truncxfqf2
620 if (HAVE_truncxfqf2 && from_mode == XFmode && to_mode == QFmode)
622 emit_unop_insn (CODE_FOR_truncxfqf2, to, from, UNKNOWN);
626 #ifdef HAVE_trunctfqf2
627 if (HAVE_trunctfqf2 && from_mode == TFmode && to_mode == QFmode)
629 emit_unop_insn (CODE_FOR_trunctfqf2, to, from, UNKNOWN);
633 #ifdef HAVE_truncsfhf2
634 if (HAVE_truncsfhf2 && from_mode == SFmode && to_mode == HFmode)
636 emit_unop_insn (CODE_FOR_truncsfhf2, to, from, UNKNOWN);
640 #ifdef HAVE_truncdfhf2
641 if (HAVE_truncdfhf2 && from_mode == DFmode && to_mode == HFmode)
643 emit_unop_insn (CODE_FOR_truncdfhf2, to, from, UNKNOWN);
647 #ifdef HAVE_truncxfhf2
648 if (HAVE_truncxfhf2 && from_mode == XFmode && to_mode == HFmode)
650 emit_unop_insn (CODE_FOR_truncxfhf2, to, from, UNKNOWN);
654 #ifdef HAVE_trunctfhf2
655 if (HAVE_trunctfhf2 && from_mode == TFmode && to_mode == HFmode)
657 emit_unop_insn (CODE_FOR_trunctfhf2, to, from, UNKNOWN);
661 #ifdef HAVE_truncdfsf2
662 if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
664 emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
668 #ifdef HAVE_truncxfsf2
669 if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode)
671 emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN);
675 #ifdef HAVE_trunctfsf2
676 if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
678 emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
682 #ifdef HAVE_truncxfdf2
683 if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode)
685 emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN);
689 #ifdef HAVE_trunctfdf2
690 if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
692 emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
704 libcall = extendsfdf2_libfunc;
708 libcall = extendsfxf2_libfunc;
712 libcall = extendsftf2_libfunc;
721 libcall = truncdfsf2_libfunc;
725 libcall = extenddfxf2_libfunc;
729 libcall = extenddftf2_libfunc;
738 libcall = truncxfsf2_libfunc;
742 libcall = truncxfdf2_libfunc;
751 libcall = trunctfsf2_libfunc;
755 libcall = trunctfdf2_libfunc;
761 if (libcall == (rtx) 0)
762 /* This conversion is not implemented yet. */
765 emit_library_call (libcall, 1, to_mode, 1, from, from_mode);
766 emit_move_insn (to, hard_libcall_value (to_mode));
770 /* Now both modes are integers. */
772 /* Handle expanding beyond a word. */
773 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
774 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
781 enum machine_mode lowpart_mode;
782 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
784 /* Try converting directly if the insn is supported. */
785 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
788 /* If FROM is a SUBREG, put it into a register. Do this
789 so that we always generate the same set of insns for
790 better cse'ing; if an intermediate assignment occurred,
791 we won't be doing the operation directly on the SUBREG. */
792 if (optimize > 0 && GET_CODE (from) == SUBREG)
793 from = force_reg (from_mode, from);
794 emit_unop_insn (code, to, from, equiv_code);
797 /* Next, try converting via full word. */
798 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
799 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
800 != CODE_FOR_nothing))
802 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
803 emit_unop_insn (code, to,
804 gen_lowpart (word_mode, to), equiv_code);
808 /* No special multiword conversion insn; do it by hand. */
811 /* Get a copy of FROM widened to a word, if necessary. */
812 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
813 lowpart_mode = word_mode;
815 lowpart_mode = from_mode;
817 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
819 lowpart = gen_lowpart (lowpart_mode, to);
820 emit_move_insn (lowpart, lowfrom);
822 /* Compute the value to put in each remaining word. */
824 fill_value = const0_rtx;
829 && insn_operand_mode[(int) CODE_FOR_slt][0] == word_mode
830 && STORE_FLAG_VALUE == -1)
832 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
834 fill_value = gen_reg_rtx (word_mode);
835 emit_insn (gen_slt (fill_value));
841 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
842 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
844 fill_value = convert_to_mode (word_mode, fill_value, 1);
848 /* Fill the remaining words. */
849 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
851 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
852 rtx subword = operand_subword (to, index, 1, to_mode);
857 if (fill_value != subword)
858 emit_move_insn (subword, fill_value);
861 insns = get_insns ();
864 emit_no_conflict_block (insns, to, from, NULL_RTX,
865 gen_rtx (equiv_code, to_mode, copy_rtx (from)));
869 /* Truncating multi-word to a word or less. */
870 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
871 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
873 convert_move (to, gen_lowpart (word_mode, from), 0);
877 /* Handle pointer conversion */ /* SPEE 900220 */
878 if (to_mode == PSImode)
880 if (from_mode != SImode)
881 from = convert_to_mode (SImode, from, unsignedp);
883 #ifdef HAVE_truncsipsi
886 emit_unop_insn (CODE_FOR_truncsipsi, to, from, UNKNOWN);
889 #endif /* HAVE_truncsipsi */
893 if (from_mode == PSImode)
895 if (to_mode != SImode)
897 from = convert_to_mode (SImode, from, unsignedp);
902 #ifdef HAVE_extendpsisi
903 if (HAVE_extendpsisi)
905 emit_unop_insn (CODE_FOR_extendpsisi, to, from, UNKNOWN);
908 #endif /* HAVE_extendpsisi */
913 /* Now follow all the conversions between integers
914 no more than a word long. */
916 /* For truncation, usually we can just refer to FROM in a narrower mode. */
917 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
918 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
919 GET_MODE_BITSIZE (from_mode)))
921 if (!((GET_CODE (from) == MEM
922 && ! MEM_VOLATILE_P (from)
923 && direct_load[(int) to_mode]
924 && ! mode_dependent_address_p (XEXP (from, 0)))
925 || GET_CODE (from) == REG
926 || GET_CODE (from) == SUBREG))
927 from = force_reg (from_mode, from);
928 emit_move_insn (to, gen_lowpart (to_mode, from));
932 /* Handle extension. */
933 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
935 /* Convert directly if that works. */
936 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
939 /* If FROM is a SUBREG, put it into a register. Do this
940 so that we always generate the same set of insns for
941 better cse'ing; if an intermediate assignment occurred,
942 we won't be doing the operation directly on the SUBREG. */
943 if (optimize > 0 && GET_CODE (from) == SUBREG)
944 from = force_reg (from_mode, from);
945 emit_unop_insn (code, to, from, equiv_code);
950 enum machine_mode intermediate;
952 /* Search for a mode to convert via. */
953 for (intermediate = from_mode; intermediate != VOIDmode;
954 intermediate = GET_MODE_WIDER_MODE (intermediate))
955 if ((can_extend_p (to_mode, intermediate, unsignedp)
957 && (can_extend_p (intermediate, from_mode, unsignedp)
958 != CODE_FOR_nothing))
960 convert_move (to, convert_to_mode (intermediate, from,
961 unsignedp), unsignedp);
965 /* No suitable intermediate mode. */
970 /* Support special truncate insns for certain modes. */
972 if (from_mode == DImode && to_mode == SImode)
974 #ifdef HAVE_truncdisi2
977 emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
981 convert_move (to, force_reg (from_mode, from), unsignedp);
985 if (from_mode == DImode && to_mode == HImode)
987 #ifdef HAVE_truncdihi2
990 emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
994 convert_move (to, force_reg (from_mode, from), unsignedp);
998 if (from_mode == DImode && to_mode == QImode)
1000 #ifdef HAVE_truncdiqi2
1001 if (HAVE_truncdiqi2)
1003 emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
1007 convert_move (to, force_reg (from_mode, from), unsignedp);
1011 if (from_mode == SImode && to_mode == HImode)
1013 #ifdef HAVE_truncsihi2
1014 if (HAVE_truncsihi2)
1016 emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
1020 convert_move (to, force_reg (from_mode, from), unsignedp);
1024 if (from_mode == SImode && to_mode == QImode)
1026 #ifdef HAVE_truncsiqi2
1027 if (HAVE_truncsiqi2)
1029 emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
1033 convert_move (to, force_reg (from_mode, from), unsignedp);
1037 if (from_mode == HImode && to_mode == QImode)
1039 #ifdef HAVE_trunchiqi2
1040 if (HAVE_trunchiqi2)
1042 emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
1046 convert_move (to, force_reg (from_mode, from), unsignedp);
1050 /* Handle truncation of volatile memrefs, and so on;
1051 the things that couldn't be truncated directly,
1052 and for which there was no special instruction. */
1053 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
1055 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
1056 emit_move_insn (to, temp);
1060 /* Mode combination is not recognized. */
1064 /* Return an rtx for a value that would result
1065 from converting X to mode MODE.
1066 Both X and MODE may be floating, or both integer.
1067 UNSIGNEDP is nonzero if X is an unsigned value.
1068 This can be done by referring to a part of X in place
1069 or by copying to a new temporary with conversion.
1071 This function *must not* call protect_from_queue
1072 except when putting X into an insn (in which case convert_move does it). */
1075 convert_to_mode (mode, x, unsignedp)
1076 enum machine_mode mode;
1082 /* If FROM is a SUBREG that indicates that we have already done at least
1083 the required extension, strip it. */
1085 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
1086 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
1087 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
1088 x = gen_lowpart (mode, x);
1090 if (mode == GET_MODE (x))
1093 /* There is one case that we must handle specially: If we are converting
1094 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
1095 we are to interpret the constant as unsigned, gen_lowpart will do
1096 the wrong if the constant appears negative. What we want to do is
1097 make the high-order word of the constant zero, not all ones. */
1099 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
1100 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
1101 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
1102 return immed_double_const (INTVAL (x), (HOST_WIDE_INT) 0, mode);
1104 /* We can do this with a gen_lowpart if both desired and current modes
1105 are integer, and this is either a constant integer, a register, or a
1106 non-volatile MEM. Except for the constant case, we must be narrowing
1109 if (GET_CODE (x) == CONST_INT
1110 || (GET_MODE_CLASS (mode) == MODE_INT
1111 && GET_MODE_CLASS (GET_MODE (x)) == MODE_INT
1112 && (GET_CODE (x) == CONST_DOUBLE
1113 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (GET_MODE (x))
1114 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x))
1115 && direct_load[(int) mode]
1116 || GET_CODE (x) == REG)))))
1117 return gen_lowpart (mode, x);
1119 temp = gen_reg_rtx (mode);
1120 convert_move (temp, x, unsignedp);
1124 /* Generate several move instructions to copy LEN bytes
1125 from block FROM to block TO. (These are MEM rtx's with BLKmode).
1126 The caller must pass FROM and TO
1127 through protect_from_queue before calling.
1128 ALIGN (in bytes) is maximum alignment we can assume. */
1131 move_by_pieces (to, from, len, align)
1135 struct move_by_pieces data;
1136 rtx to_addr = XEXP (to, 0), from_addr = XEXP (from, 0);
1137 int max_size = MOVE_MAX + 1;
1140 data.to_addr = to_addr;
1141 data.from_addr = from_addr;
1145 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1146 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1148 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1149 || GET_CODE (from_addr) == POST_INC
1150 || GET_CODE (from_addr) == POST_DEC);
1152 data.explicit_inc_from = 0;
1153 data.explicit_inc_to = 0;
1155 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1156 if (data.reverse) data.offset = len;
1159 /* If copying requires more than two move insns,
1160 copy addresses to registers (to make displacements shorter)
1161 and use post-increment if available. */
1162 if (!(data.autinc_from && data.autinc_to)
1163 && move_by_pieces_ninsns (len, align) > 2)
1165 #ifdef HAVE_PRE_DECREMENT
1166 if (data.reverse && ! data.autinc_from)
1168 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1169 data.autinc_from = 1;
1170 data.explicit_inc_from = -1;
1173 #ifdef HAVE_POST_INCREMENT
1174 if (! data.autinc_from)
1176 data.from_addr = copy_addr_to_reg (from_addr);
1177 data.autinc_from = 1;
1178 data.explicit_inc_from = 1;
1181 if (!data.autinc_from && CONSTANT_P (from_addr))
1182 data.from_addr = copy_addr_to_reg (from_addr);
1183 #ifdef HAVE_PRE_DECREMENT
1184 if (data.reverse && ! data.autinc_to)
1186 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1188 data.explicit_inc_to = -1;
1191 #ifdef HAVE_POST_INCREMENT
1192 if (! data.reverse && ! data.autinc_to)
1194 data.to_addr = copy_addr_to_reg (to_addr);
1196 data.explicit_inc_to = 1;
1199 if (!data.autinc_to && CONSTANT_P (to_addr))
1200 data.to_addr = copy_addr_to_reg (to_addr);
1203 if (! (STRICT_ALIGNMENT || SLOW_UNALIGNED_ACCESS)
1204 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
1207 /* First move what we can in the largest integer mode, then go to
1208 successively smaller modes. */
1210 while (max_size > 1)
1212 enum machine_mode mode = VOIDmode, tmode;
1213 enum insn_code icode;
1215 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1216 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1217 if (GET_MODE_SIZE (tmode) < max_size)
1220 if (mode == VOIDmode)
1223 icode = mov_optab->handlers[(int) mode].insn_code;
1224 if (icode != CODE_FOR_nothing
1225 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1226 GET_MODE_SIZE (mode)))
1227 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1229 max_size = GET_MODE_SIZE (mode);
1232 /* The code above should have handled everything. */
1237 /* Return number of insns required to move L bytes by pieces.
1238 ALIGN (in bytes) is maximum alignment we can assume. */
1241 move_by_pieces_ninsns (l, align)
1245 register int n_insns = 0;
1246 int max_size = MOVE_MAX + 1;
1248 if (! (STRICT_ALIGNMENT || SLOW_UNALIGNED_ACCESS)
1249 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
1252 while (max_size > 1)
1254 enum machine_mode mode = VOIDmode, tmode;
1255 enum insn_code icode;
1257 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1258 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1259 if (GET_MODE_SIZE (tmode) < max_size)
1262 if (mode == VOIDmode)
1265 icode = mov_optab->handlers[(int) mode].insn_code;
1266 if (icode != CODE_FOR_nothing
1267 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1268 GET_MODE_SIZE (mode)))
1269 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1271 max_size = GET_MODE_SIZE (mode);
1277 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1278 with move instructions for mode MODE. GENFUN is the gen_... function
1279 to make a move insn for that mode. DATA has all the other info. */
1282 move_by_pieces_1 (genfun, mode, data)
1284 enum machine_mode mode;
1285 struct move_by_pieces *data;
1287 register int size = GET_MODE_SIZE (mode);
1288 register rtx to1, from1;
1290 while (data->len >= size)
1292 if (data->reverse) data->offset -= size;
1294 to1 = (data->autinc_to
1295 ? gen_rtx (MEM, mode, data->to_addr)
1296 : change_address (data->to, mode,
1297 plus_constant (data->to_addr, data->offset)));
1300 ? gen_rtx (MEM, mode, data->from_addr)
1301 : change_address (data->from, mode,
1302 plus_constant (data->from_addr, data->offset)));
1304 #ifdef HAVE_PRE_DECREMENT
1305 if (data->explicit_inc_to < 0)
1306 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
1307 if (data->explicit_inc_from < 0)
1308 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (-size)));
1311 emit_insn ((*genfun) (to1, from1));
1312 #ifdef HAVE_POST_INCREMENT
1313 if (data->explicit_inc_to > 0)
1314 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1315 if (data->explicit_inc_from > 0)
1316 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1319 if (! data->reverse) data->offset += size;
1325 /* Emit code to move a block Y to a block X.
1326 This may be done with string-move instructions,
1327 with multiple scalar move instructions, or with a library call.
1329 Both X and Y must be MEM rtx's (perhaps inside VOLATILE)
1331 SIZE is an rtx that says how long they are.
1332 ALIGN is the maximum alignment we can assume they have,
1333 measured in bytes. */
1336 emit_block_move (x, y, size, align)
1341 if (GET_MODE (x) != BLKmode)
1344 if (GET_MODE (y) != BLKmode)
1347 x = protect_from_queue (x, 1);
1348 y = protect_from_queue (y, 0);
1349 size = protect_from_queue (size, 0);
1351 if (GET_CODE (x) != MEM)
1353 if (GET_CODE (y) != MEM)
1358 if (GET_CODE (size) == CONST_INT
1359 && (move_by_pieces_ninsns (INTVAL (size), align) < MOVE_RATIO))
1360 move_by_pieces (x, y, INTVAL (size), align);
1363 /* Try the most limited insn first, because there's no point
1364 including more than one in the machine description unless
1365 the more limited one has some advantage. */
1367 rtx opalign = GEN_INT (align);
1368 enum machine_mode mode;
1370 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1371 mode = GET_MODE_WIDER_MODE (mode))
1373 enum insn_code code = movstr_optab[(int) mode];
1375 if (code != CODE_FOR_nothing
1376 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1377 here because if SIZE is less than the mode mask, as it is
1378 returned by the macro, it will definitely be less than the
1379 actual mode mask. */
1380 && (unsigned HOST_WIDE_INT) INTVAL (size) <= GET_MODE_MASK (mode)
1381 && (insn_operand_predicate[(int) code][0] == 0
1382 || (*insn_operand_predicate[(int) code][0]) (x, BLKmode))
1383 && (insn_operand_predicate[(int) code][1] == 0
1384 || (*insn_operand_predicate[(int) code][1]) (y, BLKmode))
1385 && (insn_operand_predicate[(int) code][3] == 0
1386 || (*insn_operand_predicate[(int) code][3]) (opalign,
1390 rtx last = get_last_insn ();
1393 op2 = convert_to_mode (mode, size, 1);
1394 if (insn_operand_predicate[(int) code][2] != 0
1395 && ! (*insn_operand_predicate[(int) code][2]) (op2, mode))
1396 op2 = copy_to_mode_reg (mode, op2);
1398 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1405 delete_insns_since (last);
1409 #ifdef TARGET_MEM_FUNCTIONS
1410 emit_library_call (memcpy_libfunc, 0,
1411 VOIDmode, 3, XEXP (x, 0), Pmode,
1413 convert_to_mode (TYPE_MODE (sizetype), size,
1414 TREE_UNSIGNED (sizetype)),
1415 TYPE_MODE (sizetype));
1417 emit_library_call (bcopy_libfunc, 0,
1418 VOIDmode, 3, XEXP (y, 0), Pmode,
1420 convert_to_mode (TYPE_MODE (sizetype), size,
1421 TREE_UNSIGNED (sizetype)),
1422 TYPE_MODE (sizetype));
1427 /* Copy all or part of a value X into registers starting at REGNO.
1428 The number of registers to be filled is NREGS. */
1431 move_block_to_reg (regno, x, nregs, mode)
1435 enum machine_mode mode;
1440 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1441 x = validize_mem (force_const_mem (mode, x));
1443 /* See if the machine can do this with a load multiple insn. */
1444 #ifdef HAVE_load_multiple
1445 last = get_last_insn ();
1446 pat = gen_load_multiple (gen_rtx (REG, word_mode, regno), x,
1454 delete_insns_since (last);
1457 for (i = 0; i < nregs; i++)
1458 emit_move_insn (gen_rtx (REG, word_mode, regno + i),
1459 operand_subword_force (x, i, mode));
1462 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1463 The number of registers to be filled is NREGS. */
1466 move_block_from_reg (regno, x, nregs)
1474 /* See if the machine can do this with a store multiple insn. */
1475 #ifdef HAVE_store_multiple
1476 last = get_last_insn ();
1477 pat = gen_store_multiple (x, gen_rtx (REG, word_mode, regno),
1485 delete_insns_since (last);
1488 for (i = 0; i < nregs; i++)
1490 rtx tem = operand_subword (x, i, 1, BLKmode);
1495 emit_move_insn (tem, gen_rtx (REG, word_mode, regno + i));
1499 /* Mark NREGS consecutive regs, starting at REGNO, as being live now. */
1502 use_regs (regno, nregs)
1508 for (i = 0; i < nregs; i++)
1509 emit_insn (gen_rtx (USE, VOIDmode, gen_rtx (REG, word_mode, regno + i)));
1512 /* Mark the instructions since PREV as a libcall block.
1513 Add REG_LIBCALL to PREV and add a REG_RETVAL to the most recent insn. */
1522 /* Find the instructions to mark */
1524 insn_first = NEXT_INSN (prev);
1526 insn_first = get_insns ();
1528 insn_last = get_last_insn ();
1530 REG_NOTES (insn_last) = gen_rtx (INSN_LIST, REG_RETVAL, insn_first,
1531 REG_NOTES (insn_last));
1533 REG_NOTES (insn_first) = gen_rtx (INSN_LIST, REG_LIBCALL, insn_last,
1534 REG_NOTES (insn_first));
1537 /* Write zeros through the storage of OBJECT.
1538 If OBJECT has BLKmode, SIZE is its length in bytes. */
1541 clear_storage (object, size)
1545 if (GET_MODE (object) == BLKmode)
1547 #ifdef TARGET_MEM_FUNCTIONS
1548 emit_library_call (memset_libfunc, 0,
1550 XEXP (object, 0), Pmode, const0_rtx, Pmode,
1551 GEN_INT (size), Pmode);
1553 emit_library_call (bzero_libfunc, 0,
1555 XEXP (object, 0), Pmode,
1556 GEN_INT (size), Pmode);
1560 emit_move_insn (object, const0_rtx);
1563 /* Generate code to copy Y into X.
1564 Both Y and X must have the same mode, except that
1565 Y can be a constant with VOIDmode.
1566 This mode cannot be BLKmode; use emit_block_move for that.
1568 Return the last instruction emitted. */
1571 emit_move_insn (x, y)
1574 enum machine_mode mode = GET_MODE (x);
1575 enum machine_mode submode;
1576 enum mode_class class = GET_MODE_CLASS (mode);
1579 x = protect_from_queue (x, 1);
1580 y = protect_from_queue (y, 0);
1582 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
1585 if (CONSTANT_P (y) && ! LEGITIMATE_CONSTANT_P (y))
1586 y = force_const_mem (mode, y);
1588 /* If X or Y are memory references, verify that their addresses are valid
1590 if (GET_CODE (x) == MEM
1591 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
1592 && ! push_operand (x, GET_MODE (x)))
1594 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
1595 x = change_address (x, VOIDmode, XEXP (x, 0));
1597 if (GET_CODE (y) == MEM
1598 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
1600 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
1601 y = change_address (y, VOIDmode, XEXP (y, 0));
1603 if (mode == BLKmode)
1606 return emit_move_insn_1 (x, y);
1609 /* Low level part of emit_move_insn.
1610 Called just like emit_move_insn, but assumes X and Y
1611 are basically valid. */
1614 emit_move_insn_1 (x, y)
1617 enum machine_mode mode = GET_MODE (x);
1618 enum machine_mode submode;
1619 enum mode_class class = GET_MODE_CLASS (mode);
1622 if (class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
1623 submode = mode_for_size (GET_MODE_UNIT_SIZE (mode) * BITS_PER_UNIT,
1624 (class == MODE_COMPLEX_INT
1625 ? MODE_INT : MODE_FLOAT),
1628 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
1630 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
1632 /* Expand complex moves by moving real part and imag part, if possible. */
1633 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
1634 && submode != BLKmode
1635 && (mov_optab->handlers[(int) submode].insn_code
1636 != CODE_FOR_nothing))
1638 /* Don't split destination if it is a stack push. */
1639 int stack = push_operand (x, GET_MODE (x));
1640 rtx prev = get_last_insn ();
1642 /* Tell flow that the whole of the destination is being set. */
1643 if (GET_CODE (x) == REG)
1644 emit_insn (gen_rtx (CLOBBER, VOIDmode, x));
1646 /* If this is a stack, push the highpart first, so it
1647 will be in the argument order.
1649 In that case, change_address is used only to convert
1650 the mode, not to change the address. */
1651 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
1652 ((stack ? change_address (x, submode, (rtx) 0)
1653 : gen_highpart (submode, x)),
1654 gen_highpart (submode, y)));
1655 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
1656 ((stack ? change_address (x, submode, (rtx) 0)
1657 : gen_lowpart (submode, x)),
1658 gen_lowpart (submode, y)));
1662 return get_last_insn ();
1665 /* This will handle any multi-word mode that lacks a move_insn pattern.
1666 However, you will get better code if you define such patterns,
1667 even if they must turn into multiple assembler instructions. */
1668 else if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
1671 rtx prev_insn = get_last_insn ();
1674 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
1677 rtx xpart = operand_subword (x, i, 1, mode);
1678 rtx ypart = operand_subword (y, i, 1, mode);
1680 /* If we can't get a part of Y, put Y into memory if it is a
1681 constant. Otherwise, force it into a register. If we still
1682 can't get a part of Y, abort. */
1683 if (ypart == 0 && CONSTANT_P (y))
1685 y = force_const_mem (mode, y);
1686 ypart = operand_subword (y, i, 1, mode);
1688 else if (ypart == 0)
1689 ypart = operand_subword_force (y, i, mode);
1691 if (xpart == 0 || ypart == 0)
1694 last_insn = emit_move_insn (xpart, ypart);
1696 /* Mark these insns as a libcall block. */
1697 group_insns (prev_insn);
1705 /* Pushing data onto the stack. */
1707 /* Push a block of length SIZE (perhaps variable)
1708 and return an rtx to address the beginning of the block.
1709 Note that it is not possible for the value returned to be a QUEUED.
1710 The value may be virtual_outgoing_args_rtx.
1712 EXTRA is the number of bytes of padding to push in addition to SIZE.
1713 BELOW nonzero means this padding comes at low addresses;
1714 otherwise, the padding comes at high addresses. */
1717 push_block (size, extra, below)
1722 if (CONSTANT_P (size))
1723 anti_adjust_stack (plus_constant (size, extra));
1724 else if (GET_CODE (size) == REG && extra == 0)
1725 anti_adjust_stack (size);
1728 rtx temp = copy_to_mode_reg (Pmode, size);
1730 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
1731 temp, 0, OPTAB_LIB_WIDEN);
1732 anti_adjust_stack (temp);
1735 #ifdef STACK_GROWS_DOWNWARD
1736 temp = virtual_outgoing_args_rtx;
1737 if (extra != 0 && below)
1738 temp = plus_constant (temp, extra);
1740 if (GET_CODE (size) == CONST_INT)
1741 temp = plus_constant (virtual_outgoing_args_rtx,
1742 - INTVAL (size) - (below ? 0 : extra));
1743 else if (extra != 0 && !below)
1744 temp = gen_rtx (PLUS, Pmode, virtual_outgoing_args_rtx,
1745 negate_rtx (Pmode, plus_constant (size, extra)));
1747 temp = gen_rtx (PLUS, Pmode, virtual_outgoing_args_rtx,
1748 negate_rtx (Pmode, size));
1751 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
1757 return gen_rtx (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
1760 /* Generate code to push X onto the stack, assuming it has mode MODE and
1762 MODE is redundant except when X is a CONST_INT (since they don't
1764 SIZE is an rtx for the size of data to be copied (in bytes),
1765 needed only if X is BLKmode.
1767 ALIGN (in bytes) is maximum alignment we can assume.
1769 If PARTIAL and REG are both nonzero, then copy that many of the first
1770 words of X into registers starting with REG, and push the rest of X.
1771 The amount of space pushed is decreased by PARTIAL words,
1772 rounded *down* to a multiple of PARM_BOUNDARY.
1773 REG must be a hard register in this case.
1774 If REG is zero but PARTIAL is not, take any all others actions for an
1775 argument partially in registers, but do not actually load any
1778 EXTRA is the amount in bytes of extra space to leave next to this arg.
1779 This is ignored if an argument block has already been allocated.
1781 On a machine that lacks real push insns, ARGS_ADDR is the address of
1782 the bottom of the argument block for this call. We use indexing off there
1783 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
1784 argument block has not been preallocated.
1786 ARGS_SO_FAR is the size of args previously pushed for this call. */
1789 emit_push_insn (x, mode, type, size, align, partial, reg, extra,
1790 args_addr, args_so_far)
1792 enum machine_mode mode;
1803 enum direction stack_direction
1804 #ifdef STACK_GROWS_DOWNWARD
1810 /* Decide where to pad the argument: `downward' for below,
1811 `upward' for above, or `none' for don't pad it.
1812 Default is below for small data on big-endian machines; else above. */
1813 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
1815 /* Invert direction if stack is post-update. */
1816 if (STACK_PUSH_CODE == POST_INC || STACK_PUSH_CODE == POST_DEC)
1817 if (where_pad != none)
1818 where_pad = (where_pad == downward ? upward : downward);
1820 xinner = x = protect_from_queue (x, 0);
1822 if (mode == BLKmode)
1824 /* Copy a block into the stack, entirely or partially. */
1827 int used = partial * UNITS_PER_WORD;
1828 int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
1836 /* USED is now the # of bytes we need not copy to the stack
1837 because registers will take care of them. */
1840 xinner = change_address (xinner, BLKmode,
1841 plus_constant (XEXP (xinner, 0), used));
1843 /* If the partial register-part of the arg counts in its stack size,
1844 skip the part of stack space corresponding to the registers.
1845 Otherwise, start copying to the beginning of the stack space,
1846 by setting SKIP to 0. */
1847 #ifndef REG_PARM_STACK_SPACE
1853 #ifdef PUSH_ROUNDING
1854 /* Do it with several push insns if that doesn't take lots of insns
1855 and if there is no difficulty with push insns that skip bytes
1856 on the stack for alignment purposes. */
1858 && GET_CODE (size) == CONST_INT
1860 && (move_by_pieces_ninsns ((unsigned) INTVAL (size) - used, align)
1862 /* Here we avoid the case of a structure whose weak alignment
1863 forces many pushes of a small amount of data,
1864 and such small pushes do rounding that causes trouble. */
1865 && ((! STRICT_ALIGNMENT && ! SLOW_UNALIGNED_ACCESS)
1866 || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT
1867 || PUSH_ROUNDING (align) == align)
1868 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
1870 /* Push padding now if padding above and stack grows down,
1871 or if padding below and stack grows up.
1872 But if space already allocated, this has already been done. */
1873 if (extra && args_addr == 0
1874 && where_pad != none && where_pad != stack_direction)
1875 anti_adjust_stack (GEN_INT (extra));
1877 move_by_pieces (gen_rtx (MEM, BLKmode, gen_push_operand ()), xinner,
1878 INTVAL (size) - used, align);
1881 #endif /* PUSH_ROUNDING */
1883 /* Otherwise make space on the stack and copy the data
1884 to the address of that space. */
1886 /* Deduct words put into registers from the size we must copy. */
1889 if (GET_CODE (size) == CONST_INT)
1890 size = GEN_INT (INTVAL (size) - used);
1892 size = expand_binop (GET_MODE (size), sub_optab, size,
1893 GEN_INT (used), NULL_RTX, 0,
1897 /* Get the address of the stack space.
1898 In this case, we do not deal with EXTRA separately.
1899 A single stack adjust will do. */
1902 temp = push_block (size, extra, where_pad == downward);
1905 else if (GET_CODE (args_so_far) == CONST_INT)
1906 temp = memory_address (BLKmode,
1907 plus_constant (args_addr,
1908 skip + INTVAL (args_so_far)));
1910 temp = memory_address (BLKmode,
1911 plus_constant (gen_rtx (PLUS, Pmode,
1912 args_addr, args_so_far),
1915 /* TEMP is the address of the block. Copy the data there. */
1916 if (GET_CODE (size) == CONST_INT
1917 && (move_by_pieces_ninsns ((unsigned) INTVAL (size), align)
1920 move_by_pieces (gen_rtx (MEM, BLKmode, temp), xinner,
1921 INTVAL (size), align);
1924 /* Try the most limited insn first, because there's no point
1925 including more than one in the machine description unless
1926 the more limited one has some advantage. */
1927 #ifdef HAVE_movstrqi
1929 && GET_CODE (size) == CONST_INT
1930 && ((unsigned) INTVAL (size)
1931 < (1 << (GET_MODE_BITSIZE (QImode) - 1))))
1933 rtx pat = gen_movstrqi (gen_rtx (MEM, BLKmode, temp),
1934 xinner, size, GEN_INT (align));
1942 #ifdef HAVE_movstrhi
1944 && GET_CODE (size) == CONST_INT
1945 && ((unsigned) INTVAL (size)
1946 < (1 << (GET_MODE_BITSIZE (HImode) - 1))))
1948 rtx pat = gen_movstrhi (gen_rtx (MEM, BLKmode, temp),
1949 xinner, size, GEN_INT (align));
1957 #ifdef HAVE_movstrsi
1960 rtx pat = gen_movstrsi (gen_rtx (MEM, BLKmode, temp),
1961 xinner, size, GEN_INT (align));
1969 #ifdef HAVE_movstrdi
1972 rtx pat = gen_movstrdi (gen_rtx (MEM, BLKmode, temp),
1973 xinner, size, GEN_INT (align));
1982 #ifndef ACCUMULATE_OUTGOING_ARGS
1983 /* If the source is referenced relative to the stack pointer,
1984 copy it to another register to stabilize it. We do not need
1985 to do this if we know that we won't be changing sp. */
1987 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
1988 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
1989 temp = copy_to_reg (temp);
1992 /* Make inhibit_defer_pop nonzero around the library call
1993 to force it to pop the bcopy-arguments right away. */
1995 #ifdef TARGET_MEM_FUNCTIONS
1996 emit_library_call (memcpy_libfunc, 0,
1997 VOIDmode, 3, temp, Pmode, XEXP (xinner, 0), Pmode,
1998 convert_to_mode (TYPE_MODE (sizetype),
1999 size, TREE_UNSIGNED (sizetype)),
2000 TYPE_MODE (sizetype));
2002 emit_library_call (bcopy_libfunc, 0,
2003 VOIDmode, 3, XEXP (xinner, 0), Pmode, temp, Pmode,
2004 convert_to_mode (TYPE_MODE (sizetype),
2005 size, TREE_UNSIGNED (sizetype)),
2006 TYPE_MODE (sizetype));
2011 else if (partial > 0)
2013 /* Scalar partly in registers. */
2015 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
2018 /* # words of start of argument
2019 that we must make space for but need not store. */
2020 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
2021 int args_offset = INTVAL (args_so_far);
2024 /* Push padding now if padding above and stack grows down,
2025 or if padding below and stack grows up.
2026 But if space already allocated, this has already been done. */
2027 if (extra && args_addr == 0
2028 && where_pad != none && where_pad != stack_direction)
2029 anti_adjust_stack (GEN_INT (extra));
2031 /* If we make space by pushing it, we might as well push
2032 the real data. Otherwise, we can leave OFFSET nonzero
2033 and leave the space uninitialized. */
2037 /* Now NOT_STACK gets the number of words that we don't need to
2038 allocate on the stack. */
2039 not_stack = partial - offset;
2041 /* If the partial register-part of the arg counts in its stack size,
2042 skip the part of stack space corresponding to the registers.
2043 Otherwise, start copying to the beginning of the stack space,
2044 by setting SKIP to 0. */
2045 #ifndef REG_PARM_STACK_SPACE
2051 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
2052 x = validize_mem (force_const_mem (mode, x));
2054 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
2055 SUBREGs of such registers are not allowed. */
2056 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
2057 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
2058 x = copy_to_reg (x);
2060 /* Loop over all the words allocated on the stack for this arg. */
2061 /* We can do it by words, because any scalar bigger than a word
2062 has a size a multiple of a word. */
2063 #ifndef PUSH_ARGS_REVERSED
2064 for (i = not_stack; i < size; i++)
2066 for (i = size - 1; i >= not_stack; i--)
2068 if (i >= not_stack + offset)
2069 emit_push_insn (operand_subword_force (x, i, mode),
2070 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
2072 GEN_INT (args_offset + ((i - not_stack + skip)
2073 * UNITS_PER_WORD)));
2079 /* Push padding now if padding above and stack grows down,
2080 or if padding below and stack grows up.
2081 But if space already allocated, this has already been done. */
2082 if (extra && args_addr == 0
2083 && where_pad != none && where_pad != stack_direction)
2084 anti_adjust_stack (GEN_INT (extra));
2086 #ifdef PUSH_ROUNDING
2088 addr = gen_push_operand ();
2091 if (GET_CODE (args_so_far) == CONST_INT)
2093 = memory_address (mode,
2094 plus_constant (args_addr, INTVAL (args_so_far)));
2096 addr = memory_address (mode, gen_rtx (PLUS, Pmode, args_addr,
2099 emit_move_insn (gen_rtx (MEM, mode, addr), x);
2103 /* If part should go in registers, copy that part
2104 into the appropriate registers. Do this now, at the end,
2105 since mem-to-mem copies above may do function calls. */
2106 if (partial > 0 && reg != 0)
2107 move_block_to_reg (REGNO (reg), x, partial, mode);
2109 if (extra && args_addr == 0 && where_pad == stack_direction)
2110 anti_adjust_stack (GEN_INT (extra));
2113 /* Expand an assignment that stores the value of FROM into TO.
2114 If WANT_VALUE is nonzero, return an rtx for the value of TO.
2115 (This may contain a QUEUED rtx.)
2116 Otherwise, the returned value is not meaningful.
2118 SUGGEST_REG is no longer actually used.
2119 It used to mean, copy the value through a register
2120 and return that register, if that is possible.
2121 But now we do this if WANT_VALUE.
2123 If the value stored is a constant, we return the constant. */
2126 expand_assignment (to, from, want_value, suggest_reg)
2131 register rtx to_rtx = 0;
2134 /* Don't crash if the lhs of the assignment was erroneous. */
2136 if (TREE_CODE (to) == ERROR_MARK)
2137 return expand_expr (from, NULL_RTX, VOIDmode, 0);
2139 /* Assignment of a structure component needs special treatment
2140 if the structure component's rtx is not simply a MEM.
2141 Assignment of an array element at a constant index
2142 has the same problem. */
2144 if (TREE_CODE (to) == COMPONENT_REF
2145 || TREE_CODE (to) == BIT_FIELD_REF
2146 || (TREE_CODE (to) == ARRAY_REF
2147 && TREE_CODE (TREE_OPERAND (to, 1)) == INTEGER_CST
2148 && TREE_CODE (TYPE_SIZE (TREE_TYPE (to))) == INTEGER_CST))
2150 enum machine_mode mode1;
2156 tree tem = get_inner_reference (to, &bitsize, &bitpos, &offset,
2157 &mode1, &unsignedp, &volatilep);
2159 /* If we are going to use store_bit_field and extract_bit_field,
2160 make sure to_rtx will be safe for multiple use. */
2162 if (mode1 == VOIDmode && want_value)
2163 tem = stabilize_reference (tem);
2165 to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, 0);
2168 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
2170 if (GET_CODE (to_rtx) != MEM)
2172 to_rtx = change_address (to_rtx, VOIDmode,
2173 gen_rtx (PLUS, Pmode, XEXP (to_rtx, 0),
2174 force_reg (Pmode, offset_rtx)));
2178 if (GET_CODE (to_rtx) == MEM)
2179 MEM_VOLATILE_P (to_rtx) = 1;
2180 #if 0 /* This was turned off because, when a field is volatile
2181 in an object which is not volatile, the object may be in a register,
2182 and then we would abort over here. */
2188 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
2190 /* Spurious cast makes HPUX compiler happy. */
2191 ? (enum machine_mode) TYPE_MODE (TREE_TYPE (to))
2194 /* Required alignment of containing datum. */
2195 TYPE_ALIGN (TREE_TYPE (tem)) / BITS_PER_UNIT,
2196 int_size_in_bytes (TREE_TYPE (tem)));
2197 preserve_temp_slots (result);
2203 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
2204 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
2207 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, 0);
2209 /* Don't move directly into a return register. */
2210 if (TREE_CODE (to) == RESULT_DECL && GET_CODE (to_rtx) == REG)
2212 rtx temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
2213 emit_move_insn (to_rtx, temp);
2214 preserve_temp_slots (to_rtx);
2219 /* In case we are returning the contents of an object which overlaps
2220 the place the value is being stored, use a safe function when copying
2221 a value through a pointer into a structure value return block. */
2222 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
2223 && current_function_returns_struct
2224 && !current_function_returns_pcc_struct)
2226 rtx from_rtx = expand_expr (from, NULL_RTX, VOIDmode, 0);
2227 rtx size = expr_size (from);
2229 #ifdef TARGET_MEM_FUNCTIONS
2230 emit_library_call (memcpy_libfunc, 0,
2231 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
2232 XEXP (from_rtx, 0), Pmode,
2233 convert_to_mode (TYPE_MODE (sizetype),
2234 size, TREE_UNSIGNED (sizetype)),
2235 TYPE_MODE (sizetype));
2237 emit_library_call (bcopy_libfunc, 0,
2238 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
2239 XEXP (to_rtx, 0), Pmode,
2240 convert_to_mode (TYPE_MODE (sizetype),
2241 size, TREE_UNSIGNED (sizetype)),
2242 TYPE_MODE (sizetype));
2245 preserve_temp_slots (to_rtx);
2250 /* Compute FROM and store the value in the rtx we got. */
2252 result = store_expr (from, to_rtx, want_value);
2253 preserve_temp_slots (result);
2258 /* Generate code for computing expression EXP,
2259 and storing the value into TARGET.
2260 Returns TARGET or an equivalent value.
2261 TARGET may contain a QUEUED rtx.
2263 If SUGGEST_REG is nonzero, copy the value through a register
2264 and return that register, if that is possible.
2266 If the value stored is a constant, we return the constant. */
2269 store_expr (exp, target, suggest_reg)
2271 register rtx target;
2275 int dont_return_target = 0;
2277 if (TREE_CODE (exp) == COMPOUND_EXPR)
2279 /* Perform first part of compound expression, then assign from second
2281 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
2283 return store_expr (TREE_OPERAND (exp, 1), target, suggest_reg);
2285 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
2287 /* For conditional expression, get safe form of the target. Then
2288 test the condition, doing the appropriate assignment on either
2289 side. This avoids the creation of unnecessary temporaries.
2290 For non-BLKmode, it is more efficient not to do this. */
2292 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
2295 target = protect_from_queue (target, 1);
2298 jumpifnot (TREE_OPERAND (exp, 0), lab1);
2299 store_expr (TREE_OPERAND (exp, 1), target, suggest_reg);
2301 emit_jump_insn (gen_jump (lab2));
2304 store_expr (TREE_OPERAND (exp, 2), target, suggest_reg);
2310 else if (suggest_reg && GET_CODE (target) == MEM
2311 && GET_MODE (target) != BLKmode)
2312 /* If target is in memory and caller wants value in a register instead,
2313 arrange that. Pass TARGET as target for expand_expr so that,
2314 if EXP is another assignment, SUGGEST_REG will be nonzero for it.
2315 We know expand_expr will not use the target in that case. */
2317 temp = expand_expr (exp, cse_not_expected ? NULL_RTX : target,
2318 GET_MODE (target), 0);
2319 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
2320 temp = copy_to_reg (temp);
2321 dont_return_target = 1;
2323 else if (queued_subexp_p (target))
2324 /* If target contains a postincrement, it is not safe
2325 to use as the returned value. It would access the wrong
2326 place by the time the queued increment gets output.
2327 So copy the value through a temporary and use that temp
2330 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
2332 /* Expand EXP into a new pseudo. */
2333 temp = gen_reg_rtx (GET_MODE (target));
2334 temp = expand_expr (exp, temp, GET_MODE (target), 0);
2337 temp = expand_expr (exp, NULL_RTX, GET_MODE (target), 0);
2338 dont_return_target = 1;
2340 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
2341 /* If this is an scalar in a register that is stored in a wider mode
2342 than the declared mode, compute the result into its declared mode
2343 and then convert to the wider mode. Our value is the computed
2346 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
2347 convert_move (SUBREG_REG (target), temp,
2348 SUBREG_PROMOTED_UNSIGNED_P (target));
2353 temp = expand_expr (exp, target, GET_MODE (target), 0);
2354 /* DO return TARGET if it's a specified hardware register.
2355 expand_return relies on this. */
2356 if (!(target && GET_CODE (target) == REG
2357 && REGNO (target) < FIRST_PSEUDO_REGISTER)
2358 && CONSTANT_P (temp))
2359 dont_return_target = 1;
2362 /* If value was not generated in the target, store it there.
2363 Convert the value to TARGET's type first if nec. */
2365 if (temp != target && TREE_CODE (exp) != ERROR_MARK)
2367 target = protect_from_queue (target, 1);
2368 if (GET_MODE (temp) != GET_MODE (target)
2369 && GET_MODE (temp) != VOIDmode)
2371 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
2372 if (dont_return_target)
2374 /* In this case, we will return TEMP,
2375 so make sure it has the proper mode.
2376 But don't forget to store the value into TARGET. */
2377 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
2378 emit_move_insn (target, temp);
2381 convert_move (target, temp, unsignedp);
2384 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
2386 /* Handle copying a string constant into an array.
2387 The string constant may be shorter than the array.
2388 So copy just the string's actual length, and clear the rest. */
2391 /* Get the size of the data type of the string,
2392 which is actually the size of the target. */
2393 size = expr_size (exp);
2394 if (GET_CODE (size) == CONST_INT
2395 && INTVAL (size) < TREE_STRING_LENGTH (exp))
2396 emit_block_move (target, temp, size,
2397 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
2400 /* Compute the size of the data to copy from the string. */
2402 = size_binop (MIN_EXPR,
2403 size_binop (CEIL_DIV_EXPR,
2404 TYPE_SIZE (TREE_TYPE (exp)),
2405 size_int (BITS_PER_UNIT)),
2407 build_int_2 (TREE_STRING_LENGTH (exp), 0)));
2408 rtx copy_size_rtx = expand_expr (copy_size, NULL_RTX,
2412 /* Copy that much. */
2413 emit_block_move (target, temp, copy_size_rtx,
2414 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
2416 /* Figure out how much is left in TARGET
2417 that we have to clear. */
2418 if (GET_CODE (copy_size_rtx) == CONST_INT)
2420 temp = plus_constant (XEXP (target, 0),
2421 TREE_STRING_LENGTH (exp));
2422 size = plus_constant (size,
2423 - TREE_STRING_LENGTH (exp));
2427 enum machine_mode size_mode = Pmode;
2429 temp = force_reg (Pmode, XEXP (target, 0));
2430 temp = expand_binop (size_mode, add_optab, temp,
2431 copy_size_rtx, NULL_RTX, 0,
2434 size = expand_binop (size_mode, sub_optab, size,
2435 copy_size_rtx, NULL_RTX, 0,
2438 emit_cmp_insn (size, const0_rtx, LT, NULL_RTX,
2439 GET_MODE (size), 0, 0);
2440 label = gen_label_rtx ();
2441 emit_jump_insn (gen_blt (label));
2444 if (size != const0_rtx)
2446 #ifdef TARGET_MEM_FUNCTIONS
2447 emit_library_call (memset_libfunc, 0, VOIDmode, 3,
2448 temp, Pmode, const0_rtx, Pmode, size, Pmode);
2450 emit_library_call (bzero_libfunc, 0, VOIDmode, 2,
2451 temp, Pmode, size, Pmode);
2458 else if (GET_MODE (temp) == BLKmode)
2459 emit_block_move (target, temp, expr_size (exp),
2460 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
2462 emit_move_insn (target, temp);
2464 if (dont_return_target)
2469 /* Store the value of constructor EXP into the rtx TARGET.
2470 TARGET is either a REG or a MEM. */
2473 store_constructor (exp, target)
2477 tree type = TREE_TYPE (exp);
2479 /* We know our target cannot conflict, since safe_from_p has been called. */
2481 /* Don't try copying piece by piece into a hard register
2482 since that is vulnerable to being clobbered by EXP.
2483 Instead, construct in a pseudo register and then copy it all. */
2484 if (GET_CODE (target) == REG && REGNO (target) < FIRST_PSEUDO_REGISTER)
2486 rtx temp = gen_reg_rtx (GET_MODE (target));
2487 store_constructor (exp, temp);
2488 emit_move_insn (target, temp);
2493 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE)
2497 /* Inform later passes that the whole union value is dead. */
2498 if (TREE_CODE (type) == UNION_TYPE)
2499 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
2501 /* If we are building a static constructor into a register,
2502 set the initial value as zero so we can fold the value into
2504 else if (GET_CODE (target) == REG && TREE_STATIC (exp))
2505 emit_move_insn (target, const0_rtx);
2507 /* If the constructor has fewer fields than the structure,
2508 clear the whole structure first. */
2509 else if (list_length (CONSTRUCTOR_ELTS (exp))
2510 != list_length (TYPE_FIELDS (type)))
2511 clear_storage (target, int_size_in_bytes (type));
2513 /* Inform later passes that the old value is dead. */
2514 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
2516 /* Store each element of the constructor into
2517 the corresponding field of TARGET. */
2519 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
2521 register tree field = TREE_PURPOSE (elt);
2522 register enum machine_mode mode;
2527 /* Just ignore missing fields.
2528 We cleared the whole structure, above,
2529 if any fields are missing. */
2533 bitsize = TREE_INT_CST_LOW (DECL_SIZE (field));
2534 unsignedp = TREE_UNSIGNED (field);
2535 mode = DECL_MODE (field);
2536 if (DECL_BIT_FIELD (field))
2539 if (TREE_CODE (DECL_FIELD_BITPOS (field)) != INTEGER_CST)
2540 /* ??? This case remains to be written. */
2543 bitpos = TREE_INT_CST_LOW (DECL_FIELD_BITPOS (field));
2545 store_field (target, bitsize, bitpos, mode, TREE_VALUE (elt),
2546 /* The alignment of TARGET is
2547 at least what its type requires. */
2549 TYPE_ALIGN (type) / BITS_PER_UNIT,
2550 int_size_in_bytes (type));
2553 else if (TREE_CODE (type) == ARRAY_TYPE)
2557 tree domain = TYPE_DOMAIN (type);
2558 HOST_WIDE_INT minelt = TREE_INT_CST_LOW (TYPE_MIN_VALUE (domain));
2559 HOST_WIDE_INT maxelt = TREE_INT_CST_LOW (TYPE_MAX_VALUE (domain));
2560 tree elttype = TREE_TYPE (type);
2562 /* If the constructor has fewer fields than the structure,
2563 clear the whole structure first. Similarly if this this is
2564 static constructor of a non-BLKmode object. */
2566 if (list_length (CONSTRUCTOR_ELTS (exp)) < maxelt - minelt + 1
2567 || (GET_CODE (target) == REG && TREE_STATIC (exp)))
2568 clear_storage (target, int_size_in_bytes (type));
2570 /* Inform later passes that the old value is dead. */
2571 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
2573 /* Store each element of the constructor into
2574 the corresponding element of TARGET, determined
2575 by counting the elements. */
2576 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
2578 elt = TREE_CHAIN (elt), i++)
2580 register enum machine_mode mode;
2585 mode = TYPE_MODE (elttype);
2586 bitsize = GET_MODE_BITSIZE (mode);
2587 unsignedp = TREE_UNSIGNED (elttype);
2589 bitpos = (i * TREE_INT_CST_LOW (TYPE_SIZE (elttype)));
2591 store_field (target, bitsize, bitpos, mode, TREE_VALUE (elt),
2592 /* The alignment of TARGET is
2593 at least what its type requires. */
2595 TYPE_ALIGN (type) / BITS_PER_UNIT,
2596 int_size_in_bytes (type));
2604 /* Store the value of EXP (an expression tree)
2605 into a subfield of TARGET which has mode MODE and occupies
2606 BITSIZE bits, starting BITPOS bits from the start of TARGET.
2607 If MODE is VOIDmode, it means that we are storing into a bit-field.
2609 If VALUE_MODE is VOIDmode, return nothing in particular.
2610 UNSIGNEDP is not used in this case.
2612 Otherwise, return an rtx for the value stored. This rtx
2613 has mode VALUE_MODE if that is convenient to do.
2614 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
2616 ALIGN is the alignment that TARGET is known to have, measured in bytes.
2617 TOTAL_SIZE is the size in bytes of the structure, or -1 if varying. */
2620 store_field (target, bitsize, bitpos, mode, exp, value_mode,
2621 unsignedp, align, total_size)
2623 int bitsize, bitpos;
2624 enum machine_mode mode;
2626 enum machine_mode value_mode;
2631 HOST_WIDE_INT width_mask = 0;
2633 if (bitsize < HOST_BITS_PER_WIDE_INT)
2634 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
2636 /* If we are storing into an unaligned field of an aligned union that is
2637 in a register, we may have the mode of TARGET being an integer mode but
2638 MODE == BLKmode. In that case, get an aligned object whose size and
2639 alignment are the same as TARGET and store TARGET into it (we can avoid
2640 the store if the field being stored is the entire width of TARGET). Then
2641 call ourselves recursively to store the field into a BLKmode version of
2642 that object. Finally, load from the object into TARGET. This is not
2643 very efficient in general, but should only be slightly more expensive
2644 than the otherwise-required unaligned accesses. Perhaps this can be
2645 cleaned up later. */
2648 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
2650 rtx object = assign_stack_temp (GET_MODE (target),
2651 GET_MODE_SIZE (GET_MODE (target)), 0);
2652 rtx blk_object = copy_rtx (object);
2654 PUT_MODE (blk_object, BLKmode);
2656 if (bitsize != GET_MODE_BITSIZE (GET_MODE (target)))
2657 emit_move_insn (object, target);
2659 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0,
2662 emit_move_insn (target, object);
2667 /* If the structure is in a register or if the component
2668 is a bit field, we cannot use addressing to access it.
2669 Use bit-field techniques or SUBREG to store in it. */
2671 if (mode == VOIDmode
2672 || (mode != BLKmode && ! direct_store[(int) mode])
2673 || GET_CODE (target) == REG
2674 || GET_CODE (target) == SUBREG)
2676 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
2677 /* Store the value in the bitfield. */
2678 store_bit_field (target, bitsize, bitpos, mode, temp, align, total_size);
2679 if (value_mode != VOIDmode)
2681 /* The caller wants an rtx for the value. */
2682 /* If possible, avoid refetching from the bitfield itself. */
2684 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
2687 enum machine_mode tmode;
2690 return expand_and (temp, GEN_INT (width_mask), NULL_RTX);
2691 tmode = GET_MODE (temp);
2692 if (tmode == VOIDmode)
2694 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
2695 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
2696 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
2698 return extract_bit_field (target, bitsize, bitpos, unsignedp,
2699 NULL_RTX, value_mode, 0, align,
2706 rtx addr = XEXP (target, 0);
2709 /* If a value is wanted, it must be the lhs;
2710 so make the address stable for multiple use. */
2712 if (value_mode != VOIDmode && GET_CODE (addr) != REG
2713 && ! CONSTANT_ADDRESS_P (addr)
2714 /* A frame-pointer reference is already stable. */
2715 && ! (GET_CODE (addr) == PLUS
2716 && GET_CODE (XEXP (addr, 1)) == CONST_INT
2717 && (XEXP (addr, 0) == virtual_incoming_args_rtx
2718 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
2719 addr = copy_to_reg (addr);
2721 /* Now build a reference to just the desired component. */
2723 to_rtx = change_address (target, mode,
2724 plus_constant (addr, (bitpos / BITS_PER_UNIT)));
2725 MEM_IN_STRUCT_P (to_rtx) = 1;
2727 return store_expr (exp, to_rtx, value_mode != VOIDmode);
2731 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
2732 or an ARRAY_REF, look for nested COMPONENT_REFs, BIT_FIELD_REFs, or
2733 ARRAY_REFs and find the ultimate containing object, which we return.
2735 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
2736 bit position, and *PUNSIGNEDP to the signedness of the field.
2737 If the position of the field is variable, we store a tree
2738 giving the variable offset (in units) in *POFFSET.
2739 This offset is in addition to the bit position.
2740 If the position is not variable, we store 0 in *POFFSET.
2742 If any of the extraction expressions is volatile,
2743 we store 1 in *PVOLATILEP. Otherwise we don't change that.
2745 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
2746 is a mode that can be used to access the field. In that case, *PBITSIZE
2749 If the field describes a variable-sized object, *PMODE is set to
2750 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
2751 this case, but the address of the object can be found. */
2754 get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode,
2755 punsignedp, pvolatilep)
2760 enum machine_mode *pmode;
2765 enum machine_mode mode = VOIDmode;
2766 tree offset = integer_zero_node;
2768 if (TREE_CODE (exp) == COMPONENT_REF)
2770 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
2771 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
2772 mode = DECL_MODE (TREE_OPERAND (exp, 1));
2773 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
2775 else if (TREE_CODE (exp) == BIT_FIELD_REF)
2777 size_tree = TREE_OPERAND (exp, 1);
2778 *punsignedp = TREE_UNSIGNED (exp);
2782 mode = TYPE_MODE (TREE_TYPE (exp));
2783 *pbitsize = GET_MODE_BITSIZE (mode);
2784 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
2789 if (TREE_CODE (size_tree) != INTEGER_CST)
2790 mode = BLKmode, *pbitsize = -1;
2792 *pbitsize = TREE_INT_CST_LOW (size_tree);
2795 /* Compute cumulative bit-offset for nested component-refs and array-refs,
2796 and find the ultimate containing object. */
2802 if (TREE_CODE (exp) == COMPONENT_REF || TREE_CODE (exp) == BIT_FIELD_REF)
2804 tree pos = (TREE_CODE (exp) == COMPONENT_REF
2805 ? DECL_FIELD_BITPOS (TREE_OPERAND (exp, 1))
2806 : TREE_OPERAND (exp, 2));
2808 /* If this field hasn't been filled in yet, don't go
2809 past it. This should only happen when folding expressions
2810 made during type construction. */
2814 if (TREE_CODE (pos) == PLUS_EXPR)
2817 if (TREE_CODE (TREE_OPERAND (pos, 0)) == INTEGER_CST)
2819 constant = TREE_OPERAND (pos, 0);
2820 var = TREE_OPERAND (pos, 1);
2822 else if (TREE_CODE (TREE_OPERAND (pos, 1)) == INTEGER_CST)
2824 constant = TREE_OPERAND (pos, 1);
2825 var = TREE_OPERAND (pos, 0);
2830 *pbitpos += TREE_INT_CST_LOW (constant);
2831 offset = size_binop (PLUS_EXPR, offset,
2832 size_binop (FLOOR_DIV_EXPR, var,
2833 size_int (BITS_PER_UNIT)));
2835 else if (TREE_CODE (pos) == INTEGER_CST)
2836 *pbitpos += TREE_INT_CST_LOW (pos);
2839 /* Assume here that the offset is a multiple of a unit.
2840 If not, there should be an explicitly added constant. */
2841 offset = size_binop (PLUS_EXPR, offset,
2842 size_binop (FLOOR_DIV_EXPR, pos,
2843 size_int (BITS_PER_UNIT)));
2847 else if (TREE_CODE (exp) == ARRAY_REF)
2849 /* This code is based on the code in case ARRAY_REF in expand_expr
2850 below. We assume here that the size of an array element is
2851 always an integral multiple of BITS_PER_UNIT. */
2853 tree index = TREE_OPERAND (exp, 1);
2854 tree domain = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
2856 = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
2857 tree index_type = TREE_TYPE (index);
2859 if (! integer_zerop (low_bound))
2860 index = fold (build (MINUS_EXPR, index_type, index, low_bound));
2862 if (TYPE_PRECISION (index_type) != POINTER_SIZE)
2864 index = convert (type_for_size (POINTER_SIZE, 0), index);
2865 index_type = TREE_TYPE (index);
2868 index = fold (build (MULT_EXPR, index_type, index,
2869 TYPE_SIZE (TREE_TYPE (exp))));
2871 if (TREE_CODE (index) == INTEGER_CST
2872 && TREE_INT_CST_HIGH (index) == 0)
2873 *pbitpos += TREE_INT_CST_LOW (index);
2875 offset = size_binop (PLUS_EXPR, offset,
2876 size_binop (FLOOR_DIV_EXPR, index,
2877 size_int (BITS_PER_UNIT)));
2879 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
2880 && ! ((TREE_CODE (exp) == NOP_EXPR
2881 || TREE_CODE (exp) == CONVERT_EXPR)
2882 && (TYPE_MODE (TREE_TYPE (exp))
2883 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
2886 /* If any reference in the chain is volatile, the effect is volatile. */
2887 if (TREE_THIS_VOLATILE (exp))
2889 exp = TREE_OPERAND (exp, 0);
2892 /* If this was a bit-field, see if there is a mode that allows direct
2893 access in case EXP is in memory. */
2894 if (mode == VOIDmode && *pbitsize != 0 && *pbitpos % *pbitsize == 0)
2896 mode = mode_for_size (*pbitsize, MODE_INT, 0);
2897 if (mode == BLKmode)
2901 if (integer_zerop (offset))
2907 /* We aren't finished fixing the callers to really handle nonzero offset. */
2915 /* Given an rtx VALUE that may contain additions and multiplications,
2916 return an equivalent value that just refers to a register or memory.
2917 This is done by generating instructions to perform the arithmetic
2918 and returning a pseudo-register containing the value.
2920 The returned value may be a REG, SUBREG, MEM or constant. */
2923 force_operand (value, target)
2926 register optab binoptab = 0;
2927 /* Use a temporary to force order of execution of calls to
2931 /* Use subtarget as the target for operand 0 of a binary operation. */
2932 register rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
2934 if (GET_CODE (value) == PLUS)
2935 binoptab = add_optab;
2936 else if (GET_CODE (value) == MINUS)
2937 binoptab = sub_optab;
2938 else if (GET_CODE (value) == MULT)
2940 op2 = XEXP (value, 1);
2941 if (!CONSTANT_P (op2)
2942 && !(GET_CODE (op2) == REG && op2 != subtarget))
2944 tmp = force_operand (XEXP (value, 0), subtarget);
2945 return expand_mult (GET_MODE (value), tmp,
2946 force_operand (op2, NULL_RTX),
2952 op2 = XEXP (value, 1);
2953 if (!CONSTANT_P (op2)
2954 && !(GET_CODE (op2) == REG && op2 != subtarget))
2956 if (binoptab == sub_optab && GET_CODE (op2) == CONST_INT)
2958 binoptab = add_optab;
2959 op2 = negate_rtx (GET_MODE (value), op2);
2962 /* Check for an addition with OP2 a constant integer and our first
2963 operand a PLUS of a virtual register and something else. In that
2964 case, we want to emit the sum of the virtual register and the
2965 constant first and then add the other value. This allows virtual
2966 register instantiation to simply modify the constant rather than
2967 creating another one around this addition. */
2968 if (binoptab == add_optab && GET_CODE (op2) == CONST_INT
2969 && GET_CODE (XEXP (value, 0)) == PLUS
2970 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
2971 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
2972 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
2974 rtx temp = expand_binop (GET_MODE (value), binoptab,
2975 XEXP (XEXP (value, 0), 0), op2,
2976 subtarget, 0, OPTAB_LIB_WIDEN);
2977 return expand_binop (GET_MODE (value), binoptab, temp,
2978 force_operand (XEXP (XEXP (value, 0), 1), 0),
2979 target, 0, OPTAB_LIB_WIDEN);
2982 tmp = force_operand (XEXP (value, 0), subtarget);
2983 return expand_binop (GET_MODE (value), binoptab, tmp,
2984 force_operand (op2, NULL_RTX),
2985 target, 0, OPTAB_LIB_WIDEN);
2986 /* We give UNSIGNEDP = 0 to expand_binop
2987 because the only operations we are expanding here are signed ones. */
2992 /* Subroutine of expand_expr:
2993 save the non-copied parts (LIST) of an expr (LHS), and return a list
2994 which can restore these values to their previous values,
2995 should something modify their storage. */
2998 save_noncopied_parts (lhs, list)
3005 for (tail = list; tail; tail = TREE_CHAIN (tail))
3006 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
3007 parts = chainon (parts, save_noncopied_parts (lhs, TREE_VALUE (tail)));
3010 tree part = TREE_VALUE (tail);
3011 tree part_type = TREE_TYPE (part);
3012 tree to_be_saved = build (COMPONENT_REF, part_type, lhs, part);
3013 rtx target = assign_stack_temp (TYPE_MODE (part_type),
3014 int_size_in_bytes (part_type), 0);
3015 if (! memory_address_p (TYPE_MODE (part_type), XEXP (target, 0)))
3016 target = change_address (target, TYPE_MODE (part_type), NULL_RTX);
3017 parts = tree_cons (to_be_saved,
3018 build (RTL_EXPR, part_type, NULL_TREE,
3021 store_expr (TREE_PURPOSE (parts), RTL_EXPR_RTL (TREE_VALUE (parts)), 0);
3026 /* Subroutine of expand_expr:
3027 record the non-copied parts (LIST) of an expr (LHS), and return a list
3028 which specifies the initial values of these parts. */
3031 init_noncopied_parts (lhs, list)
3038 for (tail = list; tail; tail = TREE_CHAIN (tail))
3039 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
3040 parts = chainon (parts, init_noncopied_parts (lhs, TREE_VALUE (tail)));
3043 tree part = TREE_VALUE (tail);
3044 tree part_type = TREE_TYPE (part);
3045 tree to_be_initialized = build (COMPONENT_REF, part_type, lhs, part);
3046 parts = tree_cons (TREE_PURPOSE (tail), to_be_initialized, parts);
3051 /* Subroutine of expand_expr: return nonzero iff there is no way that
3052 EXP can reference X, which is being modified. */
3055 safe_from_p (x, exp)
3065 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
3066 find the underlying pseudo. */
3067 if (GET_CODE (x) == SUBREG)
3070 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
3074 /* If X is a location in the outgoing argument area, it is always safe. */
3075 if (GET_CODE (x) == MEM
3076 && (XEXP (x, 0) == virtual_outgoing_args_rtx
3077 || (GET_CODE (XEXP (x, 0)) == PLUS
3078 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx)))
3081 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
3084 exp_rtl = DECL_RTL (exp);
3091 if (TREE_CODE (exp) == TREE_LIST)
3092 return ((TREE_VALUE (exp) == 0
3093 || safe_from_p (x, TREE_VALUE (exp)))
3094 && (TREE_CHAIN (exp) == 0
3095 || safe_from_p (x, TREE_CHAIN (exp))));
3100 return safe_from_p (x, TREE_OPERAND (exp, 0));
3104 return (safe_from_p (x, TREE_OPERAND (exp, 0))
3105 && safe_from_p (x, TREE_OPERAND (exp, 1)));
3109 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
3110 the expression. If it is set, we conflict iff we are that rtx or
3111 both are in memory. Otherwise, we check all operands of the
3112 expression recursively. */
3114 switch (TREE_CODE (exp))
3117 return staticp (TREE_OPERAND (exp, 0));
3120 if (GET_CODE (x) == MEM)
3125 exp_rtl = CALL_EXPR_RTL (exp);
3128 /* Assume that the call will clobber all hard registers and
3130 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
3131 || GET_CODE (x) == MEM)
3138 exp_rtl = RTL_EXPR_RTL (exp);
3140 /* We don't know what this can modify. */
3145 case WITH_CLEANUP_EXPR:
3146 exp_rtl = RTL_EXPR_RTL (exp);
3150 exp_rtl = SAVE_EXPR_RTL (exp);
3154 /* The only operand we look at is operand 1. The rest aren't
3155 part of the expression. */
3156 return safe_from_p (x, TREE_OPERAND (exp, 1));
3158 case METHOD_CALL_EXPR:
3159 /* This takes a rtx argument, but shouldn't appear here. */
3163 /* If we have an rtx, we do not need to scan our operands. */
3167 nops = tree_code_length[(int) TREE_CODE (exp)];
3168 for (i = 0; i < nops; i++)
3169 if (TREE_OPERAND (exp, i) != 0
3170 && ! safe_from_p (x, TREE_OPERAND (exp, i)))
3174 /* If we have an rtl, find any enclosed object. Then see if we conflict
3178 if (GET_CODE (exp_rtl) == SUBREG)
3180 exp_rtl = SUBREG_REG (exp_rtl);
3181 if (GET_CODE (exp_rtl) == REG
3182 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
3186 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
3187 are memory and EXP is not readonly. */
3188 return ! (rtx_equal_p (x, exp_rtl)
3189 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
3190 && ! TREE_READONLY (exp)));
3193 /* If we reach here, it is safe. */
3197 /* Subroutine of expand_expr: return nonzero iff EXP is an
3198 expression whose type is statically determinable. */
3204 if (TREE_CODE (exp) == PARM_DECL
3205 || TREE_CODE (exp) == VAR_DECL
3206 || TREE_CODE (exp) == CALL_EXPR || TREE_CODE (exp) == TARGET_EXPR
3207 || TREE_CODE (exp) == COMPONENT_REF
3208 || TREE_CODE (exp) == ARRAY_REF)
3213 /* expand_expr: generate code for computing expression EXP.
3214 An rtx for the computed value is returned. The value is never null.
3215 In the case of a void EXP, const0_rtx is returned.
3217 The value may be stored in TARGET if TARGET is nonzero.
3218 TARGET is just a suggestion; callers must assume that
3219 the rtx returned may not be the same as TARGET.
3221 If TARGET is CONST0_RTX, it means that the value will be ignored.
3223 If TMODE is not VOIDmode, it suggests generating the
3224 result in mode TMODE. But this is done only when convenient.
3225 Otherwise, TMODE is ignored and the value generated in its natural mode.
3226 TMODE is just a suggestion; callers must assume that
3227 the rtx returned may not have mode TMODE.
3229 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
3230 with a constant address even if that address is not normally legitimate.
3231 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
3233 If MODIFIER is EXPAND_SUM then when EXP is an addition
3234 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
3235 or a nest of (PLUS ...) and (MINUS ...) where the terms are
3236 products as above, or REG or MEM, or constant.
3237 Ordinarily in such cases we would output mul or add instructions
3238 and then return a pseudo reg containing the sum.
3240 EXPAND_INITIALIZER is much like EXPAND_SUM except that
3241 it also marks a label as absolutely required (it can't be dead).
3242 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
3243 This is used for outputting expressions used in initializers. */
3246 expand_expr (exp, target, tmode, modifier)
3249 enum machine_mode tmode;
3250 enum expand_modifier modifier;
3252 register rtx op0, op1, temp;
3253 tree type = TREE_TYPE (exp);
3254 int unsignedp = TREE_UNSIGNED (type);
3255 register enum machine_mode mode = TYPE_MODE (type);
3256 register enum tree_code code = TREE_CODE (exp);
3258 /* Use subtarget as the target for operand 0 of a binary operation. */
3259 rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
3260 rtx original_target = target;
3261 int ignore = target == const0_rtx;
3264 /* Don't use hard regs as subtargets, because the combiner
3265 can only handle pseudo regs. */
3266 if (subtarget && REGNO (subtarget) < FIRST_PSEUDO_REGISTER)
3268 /* Avoid subtargets inside loops,
3269 since they hide some invariant expressions. */
3270 if (preserve_subexpressions_p ())
3273 if (ignore) target = 0, original_target = 0;
3275 /* If will do cse, generate all results into pseudo registers
3276 since 1) that allows cse to find more things
3277 and 2) otherwise cse could produce an insn the machine
3280 if (! cse_not_expected && mode != BLKmode && target
3281 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER))
3284 /* Ensure we reference a volatile object even if value is ignored. */
3285 if (ignore && TREE_THIS_VOLATILE (exp)
3286 && TREE_CODE (exp) != FUNCTION_DECL
3287 && mode != VOIDmode && mode != BLKmode)
3289 target = gen_reg_rtx (mode);
3290 temp = expand_expr (exp, target, VOIDmode, modifier);
3292 emit_move_insn (target, temp);
3300 tree function = decl_function_context (exp);
3301 /* Handle using a label in a containing function. */
3302 if (function != current_function_decl && function != 0)
3304 struct function *p = find_function_data (function);
3305 /* Allocate in the memory associated with the function
3306 that the label is in. */
3307 push_obstacks (p->function_obstack,
3308 p->function_maybepermanent_obstack);
3310 p->forced_labels = gen_rtx (EXPR_LIST, VOIDmode,
3311 label_rtx (exp), p->forced_labels);
3314 else if (modifier == EXPAND_INITIALIZER)
3315 forced_labels = gen_rtx (EXPR_LIST, VOIDmode,
3316 label_rtx (exp), forced_labels);
3317 temp = gen_rtx (MEM, FUNCTION_MODE,
3318 gen_rtx (LABEL_REF, Pmode, label_rtx (exp)));
3319 if (function != current_function_decl && function != 0)
3320 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
3325 if (DECL_RTL (exp) == 0)
3327 error_with_decl (exp, "prior parameter's size depends on `%s'");
3328 return CONST0_RTX (mode);
3334 if (DECL_RTL (exp) == 0)
3336 /* Ensure variable marked as used
3337 even if it doesn't go through a parser. */
3338 TREE_USED (exp) = 1;
3339 /* Handle variables inherited from containing functions. */
3340 context = decl_function_context (exp);
3342 /* We treat inline_function_decl as an alias for the current function
3343 because that is the inline function whose vars, types, etc.
3344 are being merged into the current function.
3345 See expand_inline_function. */
3346 if (context != 0 && context != current_function_decl
3347 && context != inline_function_decl
3348 /* If var is static, we don't need a static chain to access it. */
3349 && ! (GET_CODE (DECL_RTL (exp)) == MEM
3350 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
3354 /* Mark as non-local and addressable. */
3355 DECL_NONLOCAL (exp) = 1;
3356 mark_addressable (exp);
3357 if (GET_CODE (DECL_RTL (exp)) != MEM)
3359 addr = XEXP (DECL_RTL (exp), 0);
3360 if (GET_CODE (addr) == MEM)
3361 addr = gen_rtx (MEM, Pmode, fix_lexical_addr (XEXP (addr, 0), exp));
3363 addr = fix_lexical_addr (addr, exp);
3364 return change_address (DECL_RTL (exp), mode, addr);
3367 /* This is the case of an array whose size is to be determined
3368 from its initializer, while the initializer is still being parsed.
3370 if (GET_CODE (DECL_RTL (exp)) == MEM
3371 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
3372 return change_address (DECL_RTL (exp), GET_MODE (DECL_RTL (exp)),
3373 XEXP (DECL_RTL (exp), 0));
3374 if (GET_CODE (DECL_RTL (exp)) == MEM
3375 && modifier != EXPAND_CONST_ADDRESS
3376 && modifier != EXPAND_SUM
3377 && modifier != EXPAND_INITIALIZER)
3379 /* DECL_RTL probably contains a constant address.
3380 On RISC machines where a constant address isn't valid,
3381 make some insns to get that address into a register. */
3382 if (!memory_address_p (DECL_MODE (exp), XEXP (DECL_RTL (exp), 0))
3384 && CONSTANT_ADDRESS_P (XEXP (DECL_RTL (exp), 0))))
3385 return change_address (DECL_RTL (exp), VOIDmode,
3386 copy_rtx (XEXP (DECL_RTL (exp), 0)));
3389 /* If the mode of DECL_RTL does not match that of the decl, it
3390 must be a promoted value. We return a SUBREG of the wanted mode,
3391 but mark it so that we know that it was already extended. */
3393 if (GET_CODE (DECL_RTL (exp)) == REG
3394 && GET_MODE (DECL_RTL (exp)) != mode)
3396 enum machine_mode decl_mode = DECL_MODE (exp);
3398 /* Get the signedness used for this variable. Ensure we get the
3399 same mode we got when the variable was declared. */
3401 PROMOTE_MODE (decl_mode, unsignedp, type);
3403 if (decl_mode != GET_MODE (DECL_RTL (exp)))
3406 temp = gen_rtx (SUBREG, mode, DECL_RTL (exp), 0);
3407 SUBREG_PROMOTED_VAR_P (temp) = 1;
3408 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
3412 return DECL_RTL (exp);
3415 return immed_double_const (TREE_INT_CST_LOW (exp),
3416 TREE_INT_CST_HIGH (exp),
3420 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, 0);
3423 /* If optimized, generate immediate CONST_DOUBLE
3424 which will be turned into memory by reload if necessary.
3426 We used to force a register so that loop.c could see it. But
3427 this does not allow gen_* patterns to perform optimizations with
3428 the constants. It also produces two insns in cases like "x = 1.0;".
3429 On most machines, floating-point constants are not permitted in
3430 many insns, so we'd end up copying it to a register in any case.
3432 Now, we do the copying in expand_binop, if appropriate. */
3433 return immed_real_const (exp);
3437 if (! TREE_CST_RTL (exp))
3438 output_constant_def (exp);
3440 /* TREE_CST_RTL probably contains a constant address.
3441 On RISC machines where a constant address isn't valid,
3442 make some insns to get that address into a register. */
3443 if (GET_CODE (TREE_CST_RTL (exp)) == MEM
3444 && modifier != EXPAND_CONST_ADDRESS
3445 && modifier != EXPAND_INITIALIZER
3446 && modifier != EXPAND_SUM
3447 && !memory_address_p (mode, XEXP (TREE_CST_RTL (exp), 0)))
3448 return change_address (TREE_CST_RTL (exp), VOIDmode,
3449 copy_rtx (XEXP (TREE_CST_RTL (exp), 0)));
3450 return TREE_CST_RTL (exp);
3453 context = decl_function_context (exp);
3454 /* We treat inline_function_decl as an alias for the current function
3455 because that is the inline function whose vars, types, etc.
3456 are being merged into the current function.
3457 See expand_inline_function. */
3458 if (context == current_function_decl || context == inline_function_decl)
3461 /* If this is non-local, handle it. */
3464 temp = SAVE_EXPR_RTL (exp);
3465 if (temp && GET_CODE (temp) == REG)
3467 put_var_into_stack (exp);
3468 temp = SAVE_EXPR_RTL (exp);
3470 if (temp == 0 || GET_CODE (temp) != MEM)
3472 return change_address (temp, mode,
3473 fix_lexical_addr (XEXP (temp, 0), exp));
3475 if (SAVE_EXPR_RTL (exp) == 0)
3477 if (mode == BLKmode)
3479 = assign_stack_temp (mode,
3480 int_size_in_bytes (TREE_TYPE (exp)), 0);
3483 enum machine_mode var_mode = mode;
3485 if (TREE_CODE (type) == INTEGER_TYPE
3486 || TREE_CODE (type) == ENUMERAL_TYPE
3487 || TREE_CODE (type) == BOOLEAN_TYPE
3488 || TREE_CODE (type) == CHAR_TYPE
3489 || TREE_CODE (type) == REAL_TYPE
3490 || TREE_CODE (type) == POINTER_TYPE
3491 || TREE_CODE (type) == OFFSET_TYPE)
3493 PROMOTE_MODE (var_mode, unsignedp, type);
3496 temp = gen_reg_rtx (var_mode);
3499 SAVE_EXPR_RTL (exp) = temp;
3500 if (!optimize && GET_CODE (temp) == REG)
3501 save_expr_regs = gen_rtx (EXPR_LIST, VOIDmode, temp,
3504 /* If the mode of TEMP does not match that of the expression, it
3505 must be a promoted value. We pass store_expr a SUBREG of the
3506 wanted mode but mark it so that we know that it was already
3507 extended. Note that `unsignedp' was modified above in
3510 if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
3512 temp = gen_rtx (SUBREG, mode, SAVE_EXPR_RTL (exp), 0);
3513 SUBREG_PROMOTED_VAR_P (temp) = 1;
3514 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
3517 store_expr (TREE_OPERAND (exp, 0), temp, 0);
3520 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
3521 must be a promoted value. We return a SUBREG of the wanted mode,
3522 but mark it so that we know that it was already extended. Note
3523 that `unsignedp' was modified above in this case. */
3525 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
3526 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
3528 temp = gen_rtx (SUBREG, mode, SAVE_EXPR_RTL (exp), 0);
3529 SUBREG_PROMOTED_VAR_P (temp) = 1;
3530 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
3534 return SAVE_EXPR_RTL (exp);
3537 /* Exit the current loop if the body-expression is true. */
3539 rtx label = gen_label_rtx ();
3540 do_jump (TREE_OPERAND (exp, 0), label, NULL_RTX);
3541 expand_exit_loop (NULL_PTR);
3547 expand_start_loop (1);
3548 expand_expr_stmt (TREE_OPERAND (exp, 0));
3555 tree vars = TREE_OPERAND (exp, 0);
3556 int vars_need_expansion = 0;
3558 /* Need to open a binding contour here because
3559 if there are any cleanups they most be contained here. */
3560 expand_start_bindings (0);
3562 /* Mark the corresponding BLOCK for output in its proper place. */
3563 if (TREE_OPERAND (exp, 2) != 0
3564 && ! TREE_USED (TREE_OPERAND (exp, 2)))
3565 insert_block (TREE_OPERAND (exp, 2));
3567 /* If VARS have not yet been expanded, expand them now. */
3570 if (DECL_RTL (vars) == 0)
3572 vars_need_expansion = 1;
3575 expand_decl_init (vars);
3576 vars = TREE_CHAIN (vars);
3579 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, modifier);
3581 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
3587 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
3589 emit_insns (RTL_EXPR_SEQUENCE (exp));
3590 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
3591 return RTL_EXPR_RTL (exp);
3594 /* All elts simple constants => refer to a constant in memory. But
3595 if this is a non-BLKmode mode, let it store a field at a time
3596 since that should make a CONST_INT or CONST_DOUBLE when we
3598 if (TREE_STATIC (exp) && (mode == BLKmode || TREE_ADDRESSABLE (exp)))
3600 rtx constructor = output_constant_def (exp);
3601 if (modifier != EXPAND_CONST_ADDRESS
3602 && modifier != EXPAND_INITIALIZER
3603 && modifier != EXPAND_SUM
3604 && !memory_address_p (GET_MODE (constructor),
3605 XEXP (constructor, 0)))
3606 constructor = change_address (constructor, VOIDmode,
3607 XEXP (constructor, 0));
3614 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
3615 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode, 0);
3620 if (target == 0 || ! safe_from_p (target, exp))
3622 if (mode != BLKmode && ! TREE_ADDRESSABLE (exp))
3623 target = gen_reg_rtx (mode);
3626 enum tree_code c = TREE_CODE (type);
3628 = assign_stack_temp (mode, int_size_in_bytes (type), 0);
3629 if (c == RECORD_TYPE || c == UNION_TYPE
3630 || c == QUAL_UNION_TYPE || c == ARRAY_TYPE)
3631 MEM_IN_STRUCT_P (target) = 1;
3634 store_constructor (exp, target);
3640 tree exp1 = TREE_OPERAND (exp, 0);
3643 /* A SAVE_EXPR as the address in an INDIRECT_EXPR is generated
3644 for *PTR += ANYTHING where PTR is put inside the SAVE_EXPR.
3645 This code has the same general effect as simply doing
3646 expand_expr on the save expr, except that the expression PTR
3647 is computed for use as a memory address. This means different
3648 code, suitable for indexing, may be generated. */
3649 if (TREE_CODE (exp1) == SAVE_EXPR
3650 && SAVE_EXPR_RTL (exp1) == 0
3651 && TREE_CODE (exp2 = TREE_OPERAND (exp1, 0)) != ERROR_MARK
3652 && TYPE_MODE (TREE_TYPE (exp1)) == Pmode
3653 && TYPE_MODE (TREE_TYPE (exp2)) == Pmode)
3655 temp = expand_expr (TREE_OPERAND (exp1, 0), NULL_RTX,
3656 VOIDmode, EXPAND_SUM);
3657 op0 = memory_address (mode, temp);
3658 op0 = copy_all_regs (op0);
3659 SAVE_EXPR_RTL (exp1) = op0;
3663 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
3664 op0 = memory_address (mode, op0);
3667 temp = gen_rtx (MEM, mode, op0);
3668 /* If address was computed by addition,
3669 mark this as an element of an aggregate. */
3670 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
3671 || (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR
3672 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) == PLUS_EXPR)
3673 || TREE_CODE (TREE_TYPE (exp)) == ARRAY_TYPE
3674 || TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE
3675 || TREE_CODE (TREE_TYPE (exp)) == UNION_TYPE
3676 || TREE_CODE (TREE_TYPE (exp)) == QUAL_UNION_TYPE
3677 || (TREE_CODE (exp1) == ADDR_EXPR
3678 && (exp2 = TREE_OPERAND (exp1, 0))
3679 && (TREE_CODE (TREE_TYPE (exp2)) == ARRAY_TYPE
3680 || TREE_CODE (TREE_TYPE (exp2)) == RECORD_TYPE
3681 || TREE_CODE (TREE_TYPE (exp2)) == UNION_TYPE
3682 || TREE_CODE (TREE_TYPE (exp2)) == QUAL_UNION_TYPE)))
3683 MEM_IN_STRUCT_P (temp) = 1;
3684 MEM_VOLATILE_P (temp) = TREE_THIS_VOLATILE (exp);
3685 #if 0 /* It is incorrect to set RTX_UNCHANGING_P here, because the fact that
3686 a location is accessed through a pointer to const does not mean
3687 that the value there can never change. */
3688 RTX_UNCHANGING_P (temp) = TREE_READONLY (exp);
3694 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
3698 tree array = TREE_OPERAND (exp, 0);
3699 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
3700 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
3701 tree index = TREE_OPERAND (exp, 1);
3702 tree index_type = TREE_TYPE (index);
3705 /* Optimize the special-case of a zero lower bound. */
3706 if (! integer_zerop (low_bound))
3707 index = fold (build (MINUS_EXPR, index_type, index, low_bound));
3709 if (TREE_CODE (index) != INTEGER_CST
3710 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
3712 /* Nonconstant array index or nonconstant element size.
3713 Generate the tree for *(&array+index) and expand that,
3714 except do it in a language-independent way
3715 and don't complain about non-lvalue arrays.
3716 `mark_addressable' should already have been called
3717 for any array for which this case will be reached. */
3719 /* Don't forget the const or volatile flag from the array
3721 tree variant_type = build_type_variant (type,
3722 TREE_READONLY (exp),
3723 TREE_THIS_VOLATILE (exp));
3724 tree array_adr = build1 (ADDR_EXPR,
3725 build_pointer_type (variant_type), array);
3728 /* Convert the integer argument to a type the same size as a
3729 pointer so the multiply won't overflow spuriously. */
3730 if (TYPE_PRECISION (index_type) != POINTER_SIZE)
3731 index = convert (type_for_size (POINTER_SIZE, 0), index);
3733 /* Don't think the address has side effects
3734 just because the array does.
3735 (In some cases the address might have side effects,
3736 and we fail to record that fact here. However, it should not
3737 matter, since expand_expr should not care.) */
3738 TREE_SIDE_EFFECTS (array_adr) = 0;
3740 elt = build1 (INDIRECT_REF, type,
3741 fold (build (PLUS_EXPR,
3742 TYPE_POINTER_TO (variant_type),
3744 fold (build (MULT_EXPR,
3745 TYPE_POINTER_TO (variant_type),
3747 size_in_bytes (type))))));
3749 /* Volatility, etc., of new expression is same as old
3751 TREE_SIDE_EFFECTS (elt) = TREE_SIDE_EFFECTS (exp);
3752 TREE_THIS_VOLATILE (elt) = TREE_THIS_VOLATILE (exp);
3753 TREE_READONLY (elt) = TREE_READONLY (exp);
3755 return expand_expr (elt, target, tmode, modifier);
3758 /* Fold an expression like: "foo"[2].
3759 This is not done in fold so it won't happen inside &. */
3761 if (TREE_CODE (array) == STRING_CST
3762 && TREE_CODE (index) == INTEGER_CST
3763 && !TREE_INT_CST_HIGH (index)
3764 && (i = TREE_INT_CST_LOW (index)) < TREE_STRING_LENGTH (array))
3766 if (TREE_TYPE (TREE_TYPE (array)) == integer_type_node)
3768 exp = build_int_2 (((int *)TREE_STRING_POINTER (array))[i], 0);
3769 TREE_TYPE (exp) = integer_type_node;
3770 return expand_expr (exp, target, tmode, modifier);
3772 if (TREE_TYPE (TREE_TYPE (array)) == char_type_node)
3774 exp = build_int_2 (TREE_STRING_POINTER (array)[i], 0);
3775 TREE_TYPE (exp) = integer_type_node;
3776 return expand_expr (convert (TREE_TYPE (TREE_TYPE (array)),
3778 target, tmode, modifier);
3782 /* If this is a constant index into a constant array,
3783 just get the value from the array. Handle both the cases when
3784 we have an explicit constructor and when our operand is a variable
3785 that was declared const. */
3787 if (TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array))
3789 if (TREE_CODE (index) == INTEGER_CST
3790 && TREE_INT_CST_HIGH (index) == 0)
3792 tree elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0));
3794 i = TREE_INT_CST_LOW (index);
3796 elem = TREE_CHAIN (elem);
3798 return expand_expr (fold (TREE_VALUE (elem)), target,
3803 else if (optimize >= 1
3804 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
3805 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
3806 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
3808 if (TREE_CODE (index) == INTEGER_CST
3809 && TREE_INT_CST_HIGH (index) == 0)
3811 tree init = DECL_INITIAL (array);
3813 i = TREE_INT_CST_LOW (index);
3814 if (TREE_CODE (init) == CONSTRUCTOR)
3816 tree elem = CONSTRUCTOR_ELTS (init);
3819 elem = TREE_CHAIN (elem);
3821 return expand_expr (fold (TREE_VALUE (elem)), target,
3824 else if (TREE_CODE (init) == STRING_CST
3825 && i < TREE_STRING_LENGTH (init))
3827 temp = GEN_INT (TREE_STRING_POINTER (init)[i]);
3828 return convert_to_mode (mode, temp, 0);
3834 /* Treat array-ref with constant index as a component-ref. */
3838 /* If the operand is a CONSTRUCTOR, we can just extract the
3839 appropriate field if it is present. */
3840 if (code != ARRAY_REF
3841 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR)
3845 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
3846 elt = TREE_CHAIN (elt))
3847 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1))
3848 return expand_expr (TREE_VALUE (elt), target, tmode, modifier);
3852 enum machine_mode mode1;
3857 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
3858 &mode1, &unsignedp, &volatilep);
3860 /* If we got back the original object, something is wrong. Perhaps
3861 we are evaluating an expression too early. In any event, don't
3862 infinitely recurse. */
3866 /* In some cases, we will be offsetting OP0's address by a constant.
3867 So get it as a sum, if possible. If we will be using it
3868 directly in an insn, we validate it. */
3869 op0 = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_SUM);
3871 /* If this is a constant, put it into a register if it is a
3872 legitimate constant and memory if it isn't. */
3873 if (CONSTANT_P (op0))
3875 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
3876 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0))
3877 op0 = force_reg (mode, op0);
3879 op0 = validize_mem (force_const_mem (mode, op0));
3884 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
3886 if (GET_CODE (op0) != MEM)
3888 op0 = change_address (op0, VOIDmode,
3889 gen_rtx (PLUS, Pmode, XEXP (op0, 0),
3890 force_reg (Pmode, offset_rtx)));
3893 /* Don't forget about volatility even if this is a bitfield. */
3894 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
3896 op0 = copy_rtx (op0);
3897 MEM_VOLATILE_P (op0) = 1;
3900 if (mode1 == VOIDmode
3901 || (mode1 != BLKmode && ! direct_load[(int) mode1]
3902 && modifier != EXPAND_CONST_ADDRESS
3903 && modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
3904 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG)
3906 /* In cases where an aligned union has an unaligned object
3907 as a field, we might be extracting a BLKmode value from
3908 an integer-mode (e.g., SImode) object. Handle this case
3909 by doing the extract into an object as wide as the field
3910 (which we know to be the width of a basic mode), then
3911 storing into memory, and changing the mode to BLKmode. */
3912 enum machine_mode ext_mode = mode;
3914 if (ext_mode == BLKmode)
3915 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
3917 if (ext_mode == BLKmode)
3920 op0 = extract_bit_field (validize_mem (op0), bitsize, bitpos,
3921 unsignedp, target, ext_mode, ext_mode,
3922 TYPE_ALIGN (TREE_TYPE (tem)) / BITS_PER_UNIT,
3923 int_size_in_bytes (TREE_TYPE (tem)));
3924 if (mode == BLKmode)
3926 rtx new = assign_stack_temp (ext_mode,
3927 bitsize / BITS_PER_UNIT, 0);
3929 emit_move_insn (new, op0);
3930 op0 = copy_rtx (new);
3931 PUT_MODE (op0, BLKmode);
3937 /* Get a reference to just this component. */
3938 if (modifier == EXPAND_CONST_ADDRESS
3939 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
3940 op0 = gen_rtx (MEM, mode1, plus_constant (XEXP (op0, 0),
3941 (bitpos / BITS_PER_UNIT)));
3943 op0 = change_address (op0, mode1,
3944 plus_constant (XEXP (op0, 0),
3945 (bitpos / BITS_PER_UNIT)));
3946 MEM_IN_STRUCT_P (op0) = 1;
3947 MEM_VOLATILE_P (op0) |= volatilep;
3948 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode)
3951 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
3952 convert_move (target, op0, unsignedp);
3958 tree base = build1 (ADDR_EXPR, type, TREE_OPERAND (exp, 0));
3959 tree addr = build (PLUS_EXPR, type, base, TREE_OPERAND (exp, 1));
3960 op0 = expand_expr (addr, NULL_RTX, VOIDmode, EXPAND_SUM);
3961 temp = gen_rtx (MEM, mode, memory_address (mode, op0));
3962 MEM_IN_STRUCT_P (temp) = 1;
3963 MEM_VOLATILE_P (temp) = TREE_THIS_VOLATILE (exp);
3964 #if 0 /* It is incorrect to set RTX_UNCHANGING_P here, because the fact that
3965 a location is accessed through a pointer to const does not mean
3966 that the value there can never change. */
3967 RTX_UNCHANGING_P (temp) = TREE_READONLY (exp);
3972 /* Intended for a reference to a buffer of a file-object in Pascal.
3973 But it's not certain that a special tree code will really be
3974 necessary for these. INDIRECT_REF might work for them. */
3978 /* IN_EXPR: Inlined pascal set IN expression.
3981 rlo = set_low - (set_low%bits_per_word);
3982 the_word = set [ (index - rlo)/bits_per_word ];
3983 bit_index = index % bits_per_word;
3984 bitmask = 1 << bit_index;
3985 return !!(the_word & bitmask); */
3987 preexpand_calls (exp);
3989 tree set = TREE_OPERAND (exp, 0);
3990 tree index = TREE_OPERAND (exp, 1);
3991 tree set_type = TREE_TYPE (set);
3993 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
3994 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
4000 rtx diff, quo, rem, addr, bit, result;
4001 rtx setval, setaddr;
4002 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
4005 target = gen_reg_rtx (mode);
4007 /* If domain is empty, answer is no. */
4008 if (tree_int_cst_lt (set_high_bound, set_low_bound))
4011 index_val = expand_expr (index, 0, VOIDmode, 0);
4012 lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
4013 hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
4014 setval = expand_expr (set, 0, VOIDmode, 0);
4015 setaddr = XEXP (setval, 0);
4017 /* Compare index against bounds, if they are constant. */
4018 if (GET_CODE (index_val) == CONST_INT
4019 && GET_CODE (lo_r) == CONST_INT
4020 && INTVAL (index_val) < INTVAL (lo_r))
4023 if (GET_CODE (index_val) == CONST_INT
4024 && GET_CODE (hi_r) == CONST_INT
4025 && INTVAL (hi_r) < INTVAL (index_val))
4028 /* If we get here, we have to generate the code for both cases
4029 (in range and out of range). */
4031 op0 = gen_label_rtx ();
4032 op1 = gen_label_rtx ();
4034 if (! (GET_CODE (index_val) == CONST_INT
4035 && GET_CODE (lo_r) == CONST_INT))
4037 emit_cmp_insn (index_val, lo_r, LT, NULL_RTX,
4038 GET_MODE (index_val), 0, 0);
4039 emit_jump_insn (gen_blt (op1));
4042 if (! (GET_CODE (index_val) == CONST_INT
4043 && GET_CODE (hi_r) == CONST_INT))
4045 emit_cmp_insn (index_val, hi_r, GT, NULL_RTX,
4046 GET_MODE (index_val), 0, 0);
4047 emit_jump_insn (gen_bgt (op1));
4050 /* Calculate the element number of bit zero in the first word
4052 if (GET_CODE (lo_r) == CONST_INT)
4053 rlow = GEN_INT (INTVAL (lo_r)
4054 & ~ ((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
4056 rlow = expand_binop (index_mode, and_optab, lo_r,
4057 GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
4058 NULL_RTX, 0, OPTAB_LIB_WIDEN);
4060 diff = expand_binop (index_mode, sub_optab,
4061 index_val, rlow, NULL_RTX, 0, OPTAB_LIB_WIDEN);
4063 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
4064 GEN_INT (BITS_PER_UNIT), NULL_RTX, 0);
4065 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
4066 GEN_INT (BITS_PER_UNIT), NULL_RTX, 0);
4067 addr = memory_address (byte_mode,
4068 expand_binop (index_mode, add_optab,
4069 diff, setaddr, NULL_RTX, 0,
4071 /* Extract the bit we want to examine */
4072 bit = expand_shift (RSHIFT_EXPR, byte_mode,
4073 gen_rtx (MEM, byte_mode, addr),
4074 make_tree (TREE_TYPE (index), rem),
4076 result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
4077 GET_MODE (target) == byte_mode ? target : 0,
4078 1, OPTAB_LIB_WIDEN);
4080 if (result != target)
4081 convert_move (target, result, 1);
4083 /* Output the code to handle the out-of-range case. */
4086 emit_move_insn (target, const0_rtx);
4091 case WITH_CLEANUP_EXPR:
4092 if (RTL_EXPR_RTL (exp) == 0)
4095 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
4097 = tree_cons (NULL_TREE, TREE_OPERAND (exp, 2), cleanups_this_call);
4098 /* That's it for this cleanup. */
4099 TREE_OPERAND (exp, 2) = 0;
4101 return RTL_EXPR_RTL (exp);
4104 /* Check for a built-in function. */
4105 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
4106 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) == FUNCTION_DECL
4107 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
4108 return expand_builtin (exp, target, subtarget, tmode, ignore);
4109 /* If this call was expanded already by preexpand_calls,
4110 just return the result we got. */
4111 if (CALL_EXPR_RTL (exp) != 0)
4112 return CALL_EXPR_RTL (exp);
4113 return expand_call (exp, target, ignore);
4115 case NON_LVALUE_EXPR:
4118 case REFERENCE_EXPR:
4119 if (TREE_CODE (type) == VOID_TYPE || ignore)
4121 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
4124 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
4125 return expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, modifier);
4126 if (TREE_CODE (type) == UNION_TYPE)
4128 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
4131 if (mode == BLKmode)
4133 if (TYPE_SIZE (type) == 0
4134 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
4136 target = assign_stack_temp (BLKmode,
4137 (TREE_INT_CST_LOW (TYPE_SIZE (type))
4138 + BITS_PER_UNIT - 1)
4139 / BITS_PER_UNIT, 0);
4142 target = gen_reg_rtx (mode);
4144 if (GET_CODE (target) == MEM)
4145 /* Store data into beginning of memory target. */
4146 store_expr (TREE_OPERAND (exp, 0),
4147 change_address (target, TYPE_MODE (valtype), 0), 0);
4149 else if (GET_CODE (target) == REG)
4150 /* Store this field into a union of the proper type. */
4151 store_field (target, GET_MODE_BITSIZE (TYPE_MODE (valtype)), 0,
4152 TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
4154 int_size_in_bytes (TREE_TYPE (TREE_OPERAND (exp, 0))));
4158 /* Return the entire union. */
4161 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, 0);
4162 if (GET_MODE (op0) == mode)
4164 /* If arg is a constant integer being extended from a narrower mode,
4165 we must really truncate to get the extended bits right. Otherwise
4166 (unsigned long) (unsigned char) ("\377"[0])
4167 would come out as ffffffff. */
4168 if (GET_MODE (op0) == VOIDmode
4169 && (GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
4170 < GET_MODE_BITSIZE (mode)))
4172 /* MODE must be narrower than HOST_BITS_PER_INT. */
4173 int width = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
4175 if (width < HOST_BITS_PER_WIDE_INT)
4177 HOST_WIDE_INT val = (GET_CODE (op0) == CONST_INT ? INTVAL (op0)
4178 : CONST_DOUBLE_LOW (op0));
4179 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)))
4180 || !(val & ((HOST_WIDE_INT) 1 << (width - 1))))
4181 val &= ((HOST_WIDE_INT) 1 << width) - 1;
4183 val |= ~(((HOST_WIDE_INT) 1 << width) - 1);
4185 op0 = GEN_INT (val);
4189 op0 = (simplify_unary_operation
4190 ((TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)))
4191 ? ZERO_EXTEND : SIGN_EXTEND),
4193 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))));
4198 if (GET_MODE (op0) == VOIDmode)
4200 if (modifier == EXPAND_INITIALIZER)
4201 return gen_rtx (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
4202 if (flag_force_mem && GET_CODE (op0) == MEM)
4203 op0 = copy_to_reg (op0);
4206 return convert_to_mode (mode, op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
4208 convert_move (target, op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
4212 /* We come here from MINUS_EXPR when the second operand is a constant. */
4214 this_optab = add_optab;
4216 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
4217 something else, make sure we add the register to the constant and
4218 then to the other thing. This case can occur during strength
4219 reduction and doing it this way will produce better code if the
4220 frame pointer or argument pointer is eliminated.
4222 fold-const.c will ensure that the constant is always in the inner
4223 PLUS_EXPR, so the only case we need to do anything about is if
4224 sp, ap, or fp is our second argument, in which case we must swap
4225 the innermost first argument and our second argument. */
4227 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
4228 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
4229 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
4230 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
4231 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
4232 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
4234 tree t = TREE_OPERAND (exp, 1);
4236 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
4237 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
4240 /* If the result is to be Pmode and we are adding an integer to
4241 something, we might be forming a constant. So try to use
4242 plus_constant. If it produces a sum and we can't accept it,
4243 use force_operand. This allows P = &ARR[const] to generate
4244 efficient code on machines where a SYMBOL_REF is not a valid
4247 If this is an EXPAND_SUM call, always return the sum. */
4248 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
4249 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
4250 && (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
4253 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
4255 op1 = plus_constant (op1, TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)));
4256 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
4257 op1 = force_operand (op1, target);
4261 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
4262 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
4263 && (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
4266 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
4268 op0 = plus_constant (op0, TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)));
4269 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
4270 op0 = force_operand (op0, target);
4274 /* No sense saving up arithmetic to be done
4275 if it's all in the wrong mode to form part of an address.
4276 And force_operand won't know whether to sign-extend or
4278 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
4279 || mode != Pmode) goto binop;
4281 preexpand_calls (exp);
4282 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
4285 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, modifier);
4286 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, modifier);
4288 /* Make sure any term that's a sum with a constant comes last. */
4289 if (GET_CODE (op0) == PLUS
4290 && CONSTANT_P (XEXP (op0, 1)))
4296 /* If adding to a sum including a constant,
4297 associate it to put the constant outside. */
4298 if (GET_CODE (op1) == PLUS
4299 && CONSTANT_P (XEXP (op1, 1)))
4301 rtx constant_term = const0_rtx;
4303 temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0);
4306 /* Ensure that MULT comes first if there is one. */
4307 else if (GET_CODE (op0) == MULT)
4308 op0 = gen_rtx (PLUS, mode, op0, XEXP (op1, 0));
4310 op0 = gen_rtx (PLUS, mode, XEXP (op1, 0), op0);
4312 /* Let's also eliminate constants from op0 if possible. */
4313 op0 = eliminate_constant_term (op0, &constant_term);
4315 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
4316 their sum should be a constant. Form it into OP1, since the
4317 result we want will then be OP0 + OP1. */
4319 temp = simplify_binary_operation (PLUS, mode, constant_term,
4324 op1 = gen_rtx (PLUS, mode, constant_term, XEXP (op1, 1));
4327 /* Put a constant term last and put a multiplication first. */
4328 if (CONSTANT_P (op0) || GET_CODE (op1) == MULT)
4329 temp = op1, op1 = op0, op0 = temp;
4331 temp = simplify_binary_operation (PLUS, mode, op0, op1);
4332 return temp ? temp : gen_rtx (PLUS, mode, op0, op1);
4335 /* Handle difference of two symbolic constants,
4336 for the sake of an initializer. */
4337 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
4338 && really_constant_p (TREE_OPERAND (exp, 0))
4339 && really_constant_p (TREE_OPERAND (exp, 1)))
4341 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX,
4342 VOIDmode, modifier);
4343 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
4344 VOIDmode, modifier);
4345 return gen_rtx (MINUS, mode, op0, op1);
4347 /* Convert A - const to A + (-const). */
4348 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
4350 exp = build (PLUS_EXPR, type, TREE_OPERAND (exp, 0),
4351 fold (build1 (NEGATE_EXPR, type,
4352 TREE_OPERAND (exp, 1))));
4355 this_optab = sub_optab;
4359 preexpand_calls (exp);
4360 /* If first operand is constant, swap them.
4361 Thus the following special case checks need only
4362 check the second operand. */
4363 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
4365 register tree t1 = TREE_OPERAND (exp, 0);
4366 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
4367 TREE_OPERAND (exp, 1) = t1;
4370 /* Attempt to return something suitable for generating an
4371 indexed address, for machines that support that. */
4373 if (modifier == EXPAND_SUM && mode == Pmode
4374 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
4375 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
4377 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, EXPAND_SUM);
4379 /* Apply distributive law if OP0 is x+c. */
4380 if (GET_CODE (op0) == PLUS
4381 && GET_CODE (XEXP (op0, 1)) == CONST_INT)
4382 return gen_rtx (PLUS, mode,
4383 gen_rtx (MULT, mode, XEXP (op0, 0),
4384 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))),
4385 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))
4386 * INTVAL (XEXP (op0, 1))));
4388 if (GET_CODE (op0) != REG)
4389 op0 = force_operand (op0, NULL_RTX);
4390 if (GET_CODE (op0) != REG)
4391 op0 = copy_to_mode_reg (mode, op0);
4393 return gen_rtx (MULT, mode, op0,
4394 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))));
4397 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
4400 /* Check for multiplying things that have been extended
4401 from a narrower type. If this machine supports multiplying
4402 in that narrower type with a result in the desired type,
4403 do it that way, and avoid the explicit type-conversion. */
4404 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
4405 && TREE_CODE (type) == INTEGER_TYPE
4406 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
4407 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
4408 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
4409 && int_fits_type_p (TREE_OPERAND (exp, 1),
4410 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
4411 /* Don't use a widening multiply if a shift will do. */
4412 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
4413 > HOST_BITS_PER_WIDE_INT)
4414 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
4416 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
4417 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
4419 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
4420 /* If both operands are extended, they must either both
4421 be zero-extended or both be sign-extended. */
4422 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
4424 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
4426 enum machine_mode innermode
4427 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
4428 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
4429 ? umul_widen_optab : smul_widen_optab);
4430 if (mode == GET_MODE_WIDER_MODE (innermode)
4431 && this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
4433 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
4434 NULL_RTX, VOIDmode, 0);
4435 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
4436 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
4439 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
4440 NULL_RTX, VOIDmode, 0);
4444 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
4445 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
4446 return expand_mult (mode, op0, op1, target, unsignedp);
4448 case TRUNC_DIV_EXPR:
4449 case FLOOR_DIV_EXPR:
4451 case ROUND_DIV_EXPR:
4452 case EXACT_DIV_EXPR:
4453 preexpand_calls (exp);
4454 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
4456 /* Possible optimization: compute the dividend with EXPAND_SUM
4457 then if the divisor is constant can optimize the case
4458 where some terms of the dividend have coeffs divisible by it. */
4459 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
4460 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
4461 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
4464 this_optab = flodiv_optab;
4467 case TRUNC_MOD_EXPR:
4468 case FLOOR_MOD_EXPR:
4470 case ROUND_MOD_EXPR:
4471 preexpand_calls (exp);
4472 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
4474 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
4475 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
4476 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
4478 case FIX_ROUND_EXPR:
4479 case FIX_FLOOR_EXPR:
4481 abort (); /* Not used for C. */
4483 case FIX_TRUNC_EXPR:
4484 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
4486 target = gen_reg_rtx (mode);
4487 expand_fix (target, op0, unsignedp);
4491 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
4493 target = gen_reg_rtx (mode);
4494 /* expand_float can't figure out what to do if FROM has VOIDmode.
4495 So give it the correct mode. With -O, cse will optimize this. */
4496 if (GET_MODE (op0) == VOIDmode)
4497 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
4499 expand_float (target, op0,
4500 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
4504 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
4505 temp = expand_unop (mode, neg_optab, op0, target, 0);
4511 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
4513 /* Handle complex values specially. */
4515 enum machine_mode opmode
4516 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
4518 if (GET_MODE_CLASS (opmode) == MODE_COMPLEX_INT
4519 || GET_MODE_CLASS (opmode) == MODE_COMPLEX_FLOAT)
4520 return expand_complex_abs (opmode, op0, target, unsignedp);
4523 /* Unsigned abs is simply the operand. Testing here means we don't
4524 risk generating incorrect code below. */
4525 if (TREE_UNSIGNED (type))
4528 /* First try to do it with a special abs instruction. */
4529 temp = expand_unop (mode, abs_optab, op0, target, 0);
4533 /* If this machine has expensive jumps, we can do integer absolute
4534 value of X as (((signed) x >> (W-1)) ^ x) - ((signed) x >> (W-1)),
4535 where W is the width of MODE. */
4537 if (GET_MODE_CLASS (mode) == MODE_INT && BRANCH_COST >= 2)
4539 rtx extended = expand_shift (RSHIFT_EXPR, mode, op0,
4540 size_int (GET_MODE_BITSIZE (mode) - 1),
4543 temp = expand_binop (mode, xor_optab, extended, op0, target, 0,
4546 temp = expand_binop (mode, sub_optab, temp, extended, target, 0,
4553 /* If that does not win, use conditional jump and negate. */
4554 target = original_target;
4555 temp = gen_label_rtx ();
4556 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 0))
4557 || (GET_CODE (target) == REG
4558 && REGNO (target) < FIRST_PSEUDO_REGISTER))
4559 target = gen_reg_rtx (mode);
4560 emit_move_insn (target, op0);
4561 emit_cmp_insn (target,
4562 expand_expr (convert (type, integer_zero_node),
4563 NULL_RTX, VOIDmode, 0),
4564 GE, NULL_RTX, mode, 0, 0);
4566 emit_jump_insn (gen_bge (temp));
4567 op0 = expand_unop (mode, neg_optab, target, target, 0);
4569 emit_move_insn (target, op0);
4576 target = original_target;
4577 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 1))
4578 || (GET_CODE (target) == REG
4579 && REGNO (target) < FIRST_PSEUDO_REGISTER))
4580 target = gen_reg_rtx (mode);
4581 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
4582 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
4584 /* First try to do it with a special MIN or MAX instruction.
4585 If that does not win, use a conditional jump to select the proper
4587 this_optab = (TREE_UNSIGNED (type)
4588 ? (code == MIN_EXPR ? umin_optab : umax_optab)
4589 : (code == MIN_EXPR ? smin_optab : smax_optab));
4591 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
4597 emit_move_insn (target, op0);
4598 op0 = gen_label_rtx ();
4599 /* If this mode is an integer too wide to compare properly,
4600 compare word by word. Rely on cse to optimize constant cases. */
4601 if (GET_MODE_CLASS (mode) == MODE_INT
4602 && !can_compare_p (mode))
4604 if (code == MAX_EXPR)
4605 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type), target, op1, NULL, op0);
4607 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type), op1, target, NULL, op0);
4608 emit_move_insn (target, op1);
4612 if (code == MAX_EXPR)
4613 temp = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)))
4614 ? compare_from_rtx (target, op1, GEU, 1, mode, NULL_RTX, 0)
4615 : compare_from_rtx (target, op1, GE, 0, mode, NULL_RTX, 0));
4617 temp = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)))
4618 ? compare_from_rtx (target, op1, LEU, 1, mode, NULL_RTX, 0)
4619 : compare_from_rtx (target, op1, LE, 0, mode, NULL_RTX, 0));
4620 if (temp == const0_rtx)
4621 emit_move_insn (target, op1);
4622 else if (temp != const_true_rtx)
4624 if (bcc_gen_fctn[(int) GET_CODE (temp)] != 0)
4625 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (temp)]) (op0));
4628 emit_move_insn (target, op1);
4634 /* ??? Can optimize when the operand of this is a bitwise operation,
4635 by using a different bitwise operation. */
4637 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
4638 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
4644 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
4645 temp = expand_unop (mode, ffs_optab, op0, target, 1);
4650 /* ??? Can optimize bitwise operations with one arg constant.
4651 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
4652 and (a bitwise1 b) bitwise2 b (etc)
4653 but that is probably not worth while. */
4655 /* BIT_AND_EXPR is for bitwise anding.
4656 TRUTH_AND_EXPR is for anding two boolean values
4657 when we want in all cases to compute both of them.
4658 In general it is fastest to do TRUTH_AND_EXPR by
4659 computing both operands as actual zero-or-1 values
4660 and then bitwise anding. In cases where there cannot
4661 be any side effects, better code would be made by
4662 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR;
4663 but the question is how to recognize those cases. */
4665 case TRUTH_AND_EXPR:
4667 this_optab = and_optab;
4670 /* See comment above about TRUTH_AND_EXPR; it applies here too. */
4673 this_optab = ior_optab;
4676 case TRUTH_XOR_EXPR:
4678 this_optab = xor_optab;
4685 preexpand_calls (exp);
4686 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
4688 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
4689 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
4692 /* Could determine the answer when only additive constants differ.
4693 Also, the addition of one can be handled by changing the condition. */
4700 preexpand_calls (exp);
4701 temp = do_store_flag (exp, target, tmode != VOIDmode ? tmode : mode, 0);
4704 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
4705 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
4707 && GET_CODE (original_target) == REG
4708 && (GET_MODE (original_target)
4709 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
4711 temp = expand_expr (TREE_OPERAND (exp, 0), original_target, VOIDmode, 0);
4712 if (temp != original_target)
4713 temp = copy_to_reg (temp);
4714 op1 = gen_label_rtx ();
4715 emit_cmp_insn (temp, const0_rtx, EQ, NULL_RTX,
4716 GET_MODE (temp), unsignedp, 0);
4717 emit_jump_insn (gen_beq (op1));
4718 emit_move_insn (temp, const1_rtx);
4722 /* If no set-flag instruction, must generate a conditional
4723 store into a temporary variable. Drop through
4724 and handle this like && and ||. */
4726 case TRUTH_ANDIF_EXPR:
4727 case TRUTH_ORIF_EXPR:
4728 if (target == 0 || ! safe_from_p (target, exp)
4729 /* Make sure we don't have a hard reg (such as function's return
4730 value) live across basic blocks, if not optimizing. */
4731 || (!optimize && GET_CODE (target) == REG
4732 && REGNO (target) < FIRST_PSEUDO_REGISTER))
4733 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
4734 emit_clr_insn (target);
4735 op1 = gen_label_rtx ();
4736 jumpifnot (exp, op1);
4737 emit_0_to_1_insn (target);
4741 case TRUTH_NOT_EXPR:
4742 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
4743 /* The parser is careful to generate TRUTH_NOT_EXPR
4744 only with operands that are always zero or one. */
4745 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
4746 target, 1, OPTAB_LIB_WIDEN);
4752 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
4754 return expand_expr (TREE_OPERAND (exp, 1),
4755 (ignore ? const0_rtx : target),
4760 /* Note that COND_EXPRs whose type is a structure or union
4761 are required to be constructed to contain assignments of
4762 a temporary variable, so that we can evaluate them here
4763 for side effect only. If type is void, we must do likewise. */
4765 /* If an arm of the branch requires a cleanup,
4766 only that cleanup is performed. */
4769 tree binary_op = 0, unary_op = 0;
4770 tree old_cleanups = cleanups_this_call;
4771 cleanups_this_call = 0;
4773 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
4774 convert it to our mode, if necessary. */
4775 if (integer_onep (TREE_OPERAND (exp, 1))
4776 && integer_zerop (TREE_OPERAND (exp, 2))
4777 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
4779 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, modifier);
4780 if (GET_MODE (op0) == mode)
4783 target = gen_reg_rtx (mode);
4784 convert_move (target, op0, unsignedp);
4788 /* If we are not to produce a result, we have no target. Otherwise,
4789 if a target was specified use it; it will not be used as an
4790 intermediate target unless it is safe. If no target, use a
4793 if (mode == VOIDmode || ignore)
4795 else if (original_target
4796 && safe_from_p (original_target, TREE_OPERAND (exp, 0)))
4797 temp = original_target;
4798 else if (mode == BLKmode)
4800 if (TYPE_SIZE (type) == 0
4801 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
4803 temp = assign_stack_temp (BLKmode,
4804 (TREE_INT_CST_LOW (TYPE_SIZE (type))
4805 + BITS_PER_UNIT - 1)
4806 / BITS_PER_UNIT, 0);
4809 temp = gen_reg_rtx (mode);
4811 /* Check for X ? A + B : A. If we have this, we can copy
4812 A to the output and conditionally add B. Similarly for unary
4813 operations. Don't do this if X has side-effects because
4814 those side effects might affect A or B and the "?" operation is
4815 a sequence point in ANSI. (We test for side effects later.) */
4817 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
4818 && operand_equal_p (TREE_OPERAND (exp, 2),
4819 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
4820 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
4821 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
4822 && operand_equal_p (TREE_OPERAND (exp, 1),
4823 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
4824 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
4825 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
4826 && operand_equal_p (TREE_OPERAND (exp, 2),
4827 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
4828 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
4829 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
4830 && operand_equal_p (TREE_OPERAND (exp, 1),
4831 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
4832 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
4834 /* If we had X ? A + 1 : A and we can do the test of X as a store-flag
4835 operation, do this as A + (X != 0). Similarly for other simple
4836 binary operators. */
4837 if (singleton && binary_op
4838 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
4839 && (TREE_CODE (binary_op) == PLUS_EXPR
4840 || TREE_CODE (binary_op) == MINUS_EXPR
4841 || TREE_CODE (binary_op) == BIT_IOR_EXPR
4842 || TREE_CODE (binary_op) == BIT_XOR_EXPR
4843 || TREE_CODE (binary_op) == BIT_AND_EXPR)
4844 && integer_onep (TREE_OPERAND (binary_op, 1))
4845 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
4848 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR ? add_optab
4849 : TREE_CODE (binary_op) == MINUS_EXPR ? sub_optab
4850 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
4851 : TREE_CODE (binary_op) == BIT_XOR_EXPR ? xor_optab
4854 /* If we had X ? A : A + 1, do this as A + (X == 0).
4856 We have to invert the truth value here and then put it
4857 back later if do_store_flag fails. We cannot simply copy
4858 TREE_OPERAND (exp, 0) to another variable and modify that
4859 because invert_truthvalue can modify the tree pointed to
4861 if (singleton == TREE_OPERAND (exp, 1))
4862 TREE_OPERAND (exp, 0)
4863 = invert_truthvalue (TREE_OPERAND (exp, 0));
4865 result = do_store_flag (TREE_OPERAND (exp, 0),
4866 (safe_from_p (temp, singleton)
4868 mode, BRANCH_COST <= 1);
4872 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
4873 return expand_binop (mode, boptab, op1, result, temp,
4874 unsignedp, OPTAB_LIB_WIDEN);
4876 else if (singleton == TREE_OPERAND (exp, 1))
4877 TREE_OPERAND (exp, 0)
4878 = invert_truthvalue (TREE_OPERAND (exp, 0));
4882 op0 = gen_label_rtx ();
4884 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
4888 /* If the target conflicts with the other operand of the
4889 binary op, we can't use it. Also, we can't use the target
4890 if it is a hard register, because evaluating the condition
4891 might clobber it. */
4893 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1)))
4894 || (GET_CODE (temp) == REG
4895 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
4896 temp = gen_reg_rtx (mode);
4897 store_expr (singleton, temp, 0);
4900 expand_expr (singleton,
4901 ignore ? const1_rtx : NULL_RTX, VOIDmode, 0);
4902 if (cleanups_this_call)
4904 sorry ("aggregate value in COND_EXPR");
4905 cleanups_this_call = 0;
4907 if (singleton == TREE_OPERAND (exp, 1))
4908 jumpif (TREE_OPERAND (exp, 0), op0);
4910 jumpifnot (TREE_OPERAND (exp, 0), op0);
4912 if (binary_op && temp == 0)
4913 /* Just touch the other operand. */
4914 expand_expr (TREE_OPERAND (binary_op, 1),
4915 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
4917 store_expr (build (TREE_CODE (binary_op), type,
4918 make_tree (type, temp),
4919 TREE_OPERAND (binary_op, 1)),
4922 store_expr (build1 (TREE_CODE (unary_op), type,
4923 make_tree (type, temp)),
4928 /* This is now done in jump.c and is better done there because it
4929 produces shorter register lifetimes. */
4931 /* Check for both possibilities either constants or variables
4932 in registers (but not the same as the target!). If so, can
4933 save branches by assigning one, branching, and assigning the
4935 else if (temp && GET_MODE (temp) != BLKmode
4936 && (TREE_CONSTANT (TREE_OPERAND (exp, 1))
4937 || ((TREE_CODE (TREE_OPERAND (exp, 1)) == PARM_DECL
4938 || TREE_CODE (TREE_OPERAND (exp, 1)) == VAR_DECL)
4939 && DECL_RTL (TREE_OPERAND (exp, 1))
4940 && GET_CODE (DECL_RTL (TREE_OPERAND (exp, 1))) == REG
4941 && DECL_RTL (TREE_OPERAND (exp, 1)) != temp))
4942 && (TREE_CONSTANT (TREE_OPERAND (exp, 2))
4943 || ((TREE_CODE (TREE_OPERAND (exp, 2)) == PARM_DECL
4944 || TREE_CODE (TREE_OPERAND (exp, 2)) == VAR_DECL)
4945 && DECL_RTL (TREE_OPERAND (exp, 2))
4946 && GET_CODE (DECL_RTL (TREE_OPERAND (exp, 2))) == REG
4947 && DECL_RTL (TREE_OPERAND (exp, 2)) != temp)))
4949 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
4950 temp = gen_reg_rtx (mode);
4951 store_expr (TREE_OPERAND (exp, 2), temp, 0);
4952 jumpifnot (TREE_OPERAND (exp, 0), op0);
4953 store_expr (TREE_OPERAND (exp, 1), temp, 0);
4957 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
4958 comparison operator. If we have one of these cases, set the
4959 output to A, branch on A (cse will merge these two references),
4960 then set the output to FOO. */
4962 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
4963 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
4964 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
4965 TREE_OPERAND (exp, 1), 0)
4966 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
4967 && safe_from_p (temp, TREE_OPERAND (exp, 2)))
4969 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
4970 temp = gen_reg_rtx (mode);
4971 store_expr (TREE_OPERAND (exp, 1), temp, 0);
4972 jumpif (TREE_OPERAND (exp, 0), op0);
4973 store_expr (TREE_OPERAND (exp, 2), temp, 0);
4977 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
4978 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
4979 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
4980 TREE_OPERAND (exp, 2), 0)
4981 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
4982 && safe_from_p (temp, TREE_OPERAND (exp, 1)))
4984 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
4985 temp = gen_reg_rtx (mode);
4986 store_expr (TREE_OPERAND (exp, 2), temp, 0);
4987 jumpifnot (TREE_OPERAND (exp, 0), op0);
4988 store_expr (TREE_OPERAND (exp, 1), temp, 0);
4993 op1 = gen_label_rtx ();
4994 jumpifnot (TREE_OPERAND (exp, 0), op0);
4996 store_expr (TREE_OPERAND (exp, 1), temp, 0);
4998 expand_expr (TREE_OPERAND (exp, 1),
4999 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
5000 if (cleanups_this_call)
5002 sorry ("aggregate value in COND_EXPR");
5003 cleanups_this_call = 0;
5007 emit_jump_insn (gen_jump (op1));
5011 store_expr (TREE_OPERAND (exp, 2), temp, 0);
5013 expand_expr (TREE_OPERAND (exp, 2),
5014 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
5017 if (cleanups_this_call)
5019 sorry ("aggregate value in COND_EXPR");
5020 cleanups_this_call = 0;
5026 cleanups_this_call = old_cleanups;
5032 /* Something needs to be initialized, but we didn't know
5033 where that thing was when building the tree. For example,
5034 it could be the return value of a function, or a parameter
5035 to a function which lays down in the stack, or a temporary
5036 variable which must be passed by reference.
5038 We guarantee that the expression will either be constructed
5039 or copied into our original target. */
5041 tree slot = TREE_OPERAND (exp, 0);
5044 if (TREE_CODE (slot) != VAR_DECL)
5049 if (DECL_RTL (slot) != 0)
5051 target = DECL_RTL (slot);
5052 /* If we have already expanded the slot, so don't do
5054 if (TREE_OPERAND (exp, 1) == NULL_TREE)
5059 target = assign_stack_temp (mode, int_size_in_bytes (type), 0);
5060 /* All temp slots at this level must not conflict. */
5061 preserve_temp_slots (target);
5062 DECL_RTL (slot) = target;
5066 /* I bet this needs to be done, and I bet that it needs to
5067 be above, inside the else clause. The reason is
5068 simple, how else is it going to get cleaned up? (mrs)
5070 The reason is probably did not work before, and was
5071 commented out is because this was re-expanding already
5072 expanded target_exprs (target == 0 and DECL_RTL (slot)
5073 != 0) also cleaning them up many times as well. :-( */
5075 /* Since SLOT is not known to the called function
5076 to belong to its stack frame, we must build an explicit
5077 cleanup. This case occurs when we must build up a reference
5078 to pass the reference as an argument. In this case,
5079 it is very likely that such a reference need not be
5082 if (TREE_OPERAND (exp, 2) == 0)
5083 TREE_OPERAND (exp, 2) = maybe_build_cleanup (slot);
5084 if (TREE_OPERAND (exp, 2))
5085 cleanups_this_call = tree_cons (NULL_TREE, TREE_OPERAND (exp, 2),
5086 cleanups_this_call);
5091 /* This case does occur, when expanding a parameter which
5092 needs to be constructed on the stack. The target
5093 is the actual stack address that we want to initialize.
5094 The function we call will perform the cleanup in this case. */
5096 DECL_RTL (slot) = target;
5099 exp1 = TREE_OPERAND (exp, 1);
5100 /* Mark it as expanded. */
5101 TREE_OPERAND (exp, 1) = NULL_TREE;
5103 return expand_expr (exp1, target, tmode, modifier);
5108 tree lhs = TREE_OPERAND (exp, 0);
5109 tree rhs = TREE_OPERAND (exp, 1);
5110 tree noncopied_parts = 0;
5111 tree lhs_type = TREE_TYPE (lhs);
5113 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
5114 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0 && !fixed_type_p (rhs))
5115 noncopied_parts = init_noncopied_parts (stabilize_reference (lhs),
5116 TYPE_NONCOPIED_PARTS (lhs_type));
5117 while (noncopied_parts != 0)
5119 expand_assignment (TREE_VALUE (noncopied_parts),
5120 TREE_PURPOSE (noncopied_parts), 0, 0);
5121 noncopied_parts = TREE_CHAIN (noncopied_parts);
5128 /* If lhs is complex, expand calls in rhs before computing it.
5129 That's so we don't compute a pointer and save it over a call.
5130 If lhs is simple, compute it first so we can give it as a
5131 target if the rhs is just a call. This avoids an extra temp and copy
5132 and that prevents a partial-subsumption which makes bad code.
5133 Actually we could treat component_ref's of vars like vars. */
5135 tree lhs = TREE_OPERAND (exp, 0);
5136 tree rhs = TREE_OPERAND (exp, 1);
5137 tree noncopied_parts = 0;
5138 tree lhs_type = TREE_TYPE (lhs);
5142 if (TREE_CODE (lhs) != VAR_DECL
5143 && TREE_CODE (lhs) != RESULT_DECL
5144 && TREE_CODE (lhs) != PARM_DECL)
5145 preexpand_calls (exp);
5147 /* Check for |= or &= of a bitfield of size one into another bitfield
5148 of size 1. In this case, (unless we need the result of the
5149 assignment) we can do this more efficiently with a
5150 test followed by an assignment, if necessary.
5152 ??? At this point, we can't get a BIT_FIELD_REF here. But if
5153 things change so we do, this code should be enhanced to
5156 && TREE_CODE (lhs) == COMPONENT_REF
5157 && (TREE_CODE (rhs) == BIT_IOR_EXPR
5158 || TREE_CODE (rhs) == BIT_AND_EXPR)
5159 && TREE_OPERAND (rhs, 0) == lhs
5160 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
5161 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (lhs, 1))) == 1
5162 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))) == 1)
5164 rtx label = gen_label_rtx ();
5166 do_jump (TREE_OPERAND (rhs, 1),
5167 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
5168 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
5169 expand_assignment (lhs, convert (TREE_TYPE (rhs),
5170 (TREE_CODE (rhs) == BIT_IOR_EXPR
5172 : integer_zero_node)),
5174 do_pending_stack_adjust ();
5179 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0
5180 && ! (fixed_type_p (lhs) && fixed_type_p (rhs)))
5181 noncopied_parts = save_noncopied_parts (stabilize_reference (lhs),
5182 TYPE_NONCOPIED_PARTS (lhs_type));
5184 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
5185 while (noncopied_parts != 0)
5187 expand_assignment (TREE_PURPOSE (noncopied_parts),
5188 TREE_VALUE (noncopied_parts), 0, 0);
5189 noncopied_parts = TREE_CHAIN (noncopied_parts);
5194 case PREINCREMENT_EXPR:
5195 case PREDECREMENT_EXPR:
5196 return expand_increment (exp, 0);
5198 case POSTINCREMENT_EXPR:
5199 case POSTDECREMENT_EXPR:
5200 /* Faster to treat as pre-increment if result is not used. */
5201 return expand_increment (exp, ! ignore);
5204 /* Are we taking the address of a nested function? */
5205 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
5206 && decl_function_context (TREE_OPERAND (exp, 0)) != 0)
5208 op0 = trampoline_address (TREE_OPERAND (exp, 0));
5209 op0 = force_operand (op0, target);
5213 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode,
5214 (modifier == EXPAND_INITIALIZER
5215 ? modifier : EXPAND_CONST_ADDRESS));
5217 /* We would like the object in memory. If it is a constant,
5218 we can have it be statically allocated into memory. For
5219 a non-constant (REG or SUBREG), we need to allocate some
5220 memory and store the value into it. */
5222 if (CONSTANT_P (op0))
5223 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
5226 if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG)
5228 /* If this object is in a register, it must be not
5230 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
5231 enum machine_mode inner_mode = TYPE_MODE (inner_type);
5233 = assign_stack_temp (inner_mode,
5234 int_size_in_bytes (inner_type), 1);
5236 emit_move_insn (memloc, op0);
5240 if (GET_CODE (op0) != MEM)
5243 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
5244 return XEXP (op0, 0);
5245 op0 = force_operand (XEXP (op0, 0), target);
5247 if (flag_force_addr && GET_CODE (op0) != REG)
5248 return force_reg (Pmode, op0);
5251 case ENTRY_VALUE_EXPR:
5254 /* COMPLEX type for Extended Pascal & Fortran */
5257 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
5261 /* Get the rtx code of the operands. */
5262 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
5263 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
5266 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
5268 prev = get_last_insn ();
5270 /* Tell flow that the whole of the destination is being set. */
5271 if (GET_CODE (target) == REG)
5272 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
5274 /* Move the real (op0) and imaginary (op1) parts to their location. */
5275 emit_move_insn (gen_realpart (mode, target), op0);
5276 emit_move_insn (gen_imagpart (mode, target), op1);
5278 /* Complex construction should appear as a single unit. */
5285 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
5286 return gen_realpart (mode, op0);
5289 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
5290 return gen_imagpart (mode, op0);
5294 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
5298 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
5301 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
5303 prev = get_last_insn ();
5305 /* Tell flow that the whole of the destination is being set. */
5306 if (GET_CODE (target) == REG)
5307 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
5309 /* Store the realpart and the negated imagpart to target. */
5310 emit_move_insn (gen_realpart (mode, target), gen_realpart (mode, op0));
5312 imag_t = gen_imagpart (mode, target);
5313 temp = expand_unop (mode, neg_optab,
5314 gen_imagpart (mode, op0), imag_t, 0);
5316 emit_move_insn (imag_t, temp);
5318 /* Conjugate should appear as a single unit */
5325 op0 = CONST0_RTX (tmode);
5331 return (*lang_expand_expr) (exp, target, tmode, modifier);
5334 /* Here to do an ordinary binary operator, generating an instruction
5335 from the optab already placed in `this_optab'. */
5337 preexpand_calls (exp);
5338 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
5340 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
5341 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
5343 temp = expand_binop (mode, this_optab, op0, op1, target,
5344 unsignedp, OPTAB_LIB_WIDEN);
5350 /* Return the alignment in bits of EXP, a pointer valued expression.
5351 But don't return more than MAX_ALIGN no matter what.
5352 The alignment returned is, by default, the alignment of the thing that
5353 EXP points to (if it is not a POINTER_TYPE, 0 is returned).
5355 Otherwise, look at the expression to see if we can do better, i.e., if the
5356 expression is actually pointing at an object whose alignment is tighter. */
5359 get_pointer_alignment (exp, max_align)
5363 unsigned align, inner;
5365 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
5368 align = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
5369 align = MIN (align, max_align);
5373 switch (TREE_CODE (exp))
5377 case NON_LVALUE_EXPR:
5378 exp = TREE_OPERAND (exp, 0);
5379 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
5381 inner = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
5382 inner = MIN (inner, max_align);
5383 align = MAX (align, inner);
5387 /* If sum of pointer + int, restrict our maximum alignment to that
5388 imposed by the integer. If not, we can't do any better than
5390 if (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST)
5393 while (((TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)) * BITS_PER_UNIT)
5398 exp = TREE_OPERAND (exp, 0);
5402 /* See what we are pointing at and look at its alignment. */
5403 exp = TREE_OPERAND (exp, 0);
5404 if (TREE_CODE (exp) == FUNCTION_DECL)
5405 align = MAX (align, FUNCTION_BOUNDARY);
5406 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'd')
5407 align = MAX (align, DECL_ALIGN (exp));
5408 #ifdef CONSTANT_ALIGNMENT
5409 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'c')
5410 align = CONSTANT_ALIGNMENT (exp, align);
5412 return MIN (align, max_align);
5420 /* Return the tree node and offset if a given argument corresponds to
5421 a string constant. */
5424 string_constant (arg, ptr_offset)
5430 if (TREE_CODE (arg) == ADDR_EXPR
5431 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
5433 *ptr_offset = integer_zero_node;
5434 return TREE_OPERAND (arg, 0);
5436 else if (TREE_CODE (arg) == PLUS_EXPR)
5438 tree arg0 = TREE_OPERAND (arg, 0);
5439 tree arg1 = TREE_OPERAND (arg, 1);
5444 if (TREE_CODE (arg0) == ADDR_EXPR
5445 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
5448 return TREE_OPERAND (arg0, 0);
5450 else if (TREE_CODE (arg1) == ADDR_EXPR
5451 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
5454 return TREE_OPERAND (arg1, 0);
5461 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
5462 way, because it could contain a zero byte in the middle.
5463 TREE_STRING_LENGTH is the size of the character array, not the string.
5465 Unfortunately, string_constant can't access the values of const char
5466 arrays with initializers, so neither can we do so here. */
5476 src = string_constant (src, &offset_node);
5479 max = TREE_STRING_LENGTH (src);
5480 ptr = TREE_STRING_POINTER (src);
5481 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
5483 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
5484 compute the offset to the following null if we don't know where to
5485 start searching for it. */
5487 for (i = 0; i < max; i++)
5490 /* We don't know the starting offset, but we do know that the string
5491 has no internal zero bytes. We can assume that the offset falls
5492 within the bounds of the string; otherwise, the programmer deserves
5493 what he gets. Subtract the offset from the length of the string,
5495 /* This would perhaps not be valid if we were dealing with named
5496 arrays in addition to literal string constants. */
5497 return size_binop (MINUS_EXPR, size_int (max), offset_node);
5500 /* We have a known offset into the string. Start searching there for
5501 a null character. */
5502 if (offset_node == 0)
5506 /* Did we get a long long offset? If so, punt. */
5507 if (TREE_INT_CST_HIGH (offset_node) != 0)
5509 offset = TREE_INT_CST_LOW (offset_node);
5511 /* If the offset is known to be out of bounds, warn, and call strlen at
5513 if (offset < 0 || offset > max)
5515 warning ("offset outside bounds of constant string");
5518 /* Use strlen to search for the first zero byte. Since any strings
5519 constructed with build_string will have nulls appended, we win even
5520 if we get handed something like (char[4])"abcd".
5522 Since OFFSET is our starting index into the string, no further
5523 calculation is needed. */
5524 return size_int (strlen (ptr + offset));
5527 /* Expand an expression EXP that calls a built-in function,
5528 with result going to TARGET if that's convenient
5529 (and in mode MODE if that's convenient).
5530 SUBTARGET may be used as the target for computing one of EXP's operands.
5531 IGNORE is nonzero if the value is to be ignored. */
5534 expand_builtin (exp, target, subtarget, mode, ignore)
5538 enum machine_mode mode;
5541 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
5542 tree arglist = TREE_OPERAND (exp, 1);
5545 enum machine_mode value_mode = TYPE_MODE (TREE_TYPE (exp));
5546 optab builtin_optab;
5548 switch (DECL_FUNCTION_CODE (fndecl))
5553 /* build_function_call changes these into ABS_EXPR. */
5558 case BUILT_IN_FSQRT:
5559 /* If not optimizing, call the library function. */
5564 /* Arg could be wrong type if user redeclared this fcn wrong. */
5565 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != REAL_TYPE)
5566 return CONST0_RTX (TYPE_MODE (TREE_TYPE (exp)));
5568 /* Stabilize and compute the argument. */
5569 if (TREE_CODE (TREE_VALUE (arglist)) != VAR_DECL
5570 && TREE_CODE (TREE_VALUE (arglist)) != PARM_DECL)
5572 exp = copy_node (exp);
5573 arglist = copy_node (arglist);
5574 TREE_OPERAND (exp, 1) = arglist;
5575 TREE_VALUE (arglist) = save_expr (TREE_VALUE (arglist));
5577 op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
5579 /* Make a suitable register to place result in. */
5580 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
5585 switch (DECL_FUNCTION_CODE (fndecl))
5588 builtin_optab = sin_optab; break;
5590 builtin_optab = cos_optab; break;
5591 case BUILT_IN_FSQRT:
5592 builtin_optab = sqrt_optab; break;
5597 /* Compute into TARGET.
5598 Set TARGET to wherever the result comes back. */
5599 target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
5600 builtin_optab, op0, target, 0);
5602 /* If we were unable to expand via the builtin, stop the
5603 sequence (without outputting the insns) and break, causing
5604 a call the the library function. */
5611 /* Check the results by default. But if flag_fast_math is turned on,
5612 then assume sqrt will always be called with valid arguments. */
5614 if (! flag_fast_math)
5616 /* Don't define the builtin FP instructions
5617 if your machine is not IEEE. */
5618 if (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT)
5621 lab1 = gen_label_rtx ();
5623 /* Test the result; if it is NaN, set errno=EDOM because
5624 the argument was not in the domain. */
5625 emit_cmp_insn (target, target, EQ, 0, GET_MODE (target), 0, 0);
5626 emit_jump_insn (gen_beq (lab1));
5630 #ifdef GEN_ERRNO_RTX
5631 rtx errno_rtx = GEN_ERRNO_RTX;
5634 = gen_rtx (MEM, word_mode, gen_rtx (SYMBOL_REF, Pmode, "*errno"));
5637 emit_move_insn (errno_rtx, GEN_INT (TARGET_EDOM));
5640 /* We can't set errno=EDOM directly; let the library call do it.
5641 Pop the arguments right away in case the call gets deleted. */
5643 expand_call (exp, target, 0);
5650 /* Output the entire sequence. */
5651 insns = get_insns ();
5657 /* __builtin_apply_args returns block of memory allocated on
5658 the stack into which is stored the arg pointer, structure
5659 value address, static chain, and all the registers that might
5660 possibly be used in performing a function call. The code is
5661 moved to the start of the function so the incoming values are
5663 case BUILT_IN_APPLY_ARGS:
5664 /* Don't do __builtin_apply_args more than once in a function.
5665 Save the result of the first call and reuse it. */
5666 if (apply_args_value != 0)
5667 return apply_args_value;
5669 /* When this function is called, it means that registers must be
5670 saved on entry to this function. So we migrate the
5671 call to the first insn of this function. */
5676 temp = expand_builtin_apply_args ();
5680 apply_args_value = temp;
5682 /* Put the sequence after the NOTE that starts the function.
5683 If this is inside a SEQUENCE, make the outer-level insn
5684 chain current, so the code is placed at the start of the
5686 push_topmost_sequence ();
5687 emit_insns_before (seq, NEXT_INSN (get_insns ()));
5688 pop_topmost_sequence ();
5692 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
5693 FUNCTION with a copy of the parameters described by
5694 ARGUMENTS, and ARGSIZE. It returns a block of memory
5695 allocated on the stack into which is stored all the registers
5696 that might possibly be used for returning the result of a
5697 function. ARGUMENTS is the value returned by
5698 __builtin_apply_args. ARGSIZE is the number of bytes of
5699 arguments that must be copied. ??? How should this value be
5700 computed? We'll also need a safe worst case value for varargs
5702 case BUILT_IN_APPLY:
5704 /* Arg could be non-pointer if user redeclared this fcn wrong. */
5705 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
5706 || TREE_CHAIN (arglist) == 0
5707 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
5708 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
5709 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
5717 for (t = arglist, i = 0; t; t = TREE_CHAIN (t), i++)
5718 ops[i] = expand_expr (TREE_VALUE (t), NULL_RTX, VOIDmode, 0);
5720 return expand_builtin_apply (ops[0], ops[1], ops[2]);
5723 /* __builtin_return (RESULT) causes the function to return the
5724 value described by RESULT. RESULT is address of the block of
5725 memory returned by __builtin_apply. */
5726 case BUILT_IN_RETURN:
5728 /* Arg could be non-pointer if user redeclared this fcn wrong. */
5729 && TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) == POINTER_TYPE)
5730 expand_builtin_return (expand_expr (TREE_VALUE (arglist),
5731 NULL_RTX, VOIDmode, 0));
5734 case BUILT_IN_SAVEREGS:
5735 /* Don't do __builtin_saveregs more than once in a function.
5736 Save the result of the first call and reuse it. */
5737 if (saveregs_value != 0)
5738 return saveregs_value;
5740 /* When this function is called, it means that registers must be
5741 saved on entry to this function. So we migrate the
5742 call to the first insn of this function. */
5745 rtx valreg, saved_valreg;
5747 /* Now really call the function. `expand_call' does not call
5748 expand_builtin, so there is no danger of infinite recursion here. */
5751 #ifdef EXPAND_BUILTIN_SAVEREGS
5752 /* Do whatever the machine needs done in this case. */
5753 temp = EXPAND_BUILTIN_SAVEREGS (arglist);
5755 /* The register where the function returns its value
5756 is likely to have something else in it, such as an argument.
5757 So preserve that register around the call. */
5758 if (value_mode != VOIDmode)
5760 valreg = hard_libcall_value (value_mode);
5761 saved_valreg = gen_reg_rtx (value_mode);
5762 emit_move_insn (saved_valreg, valreg);
5765 /* Generate the call, putting the value in a pseudo. */
5766 temp = expand_call (exp, target, ignore);
5768 if (value_mode != VOIDmode)
5769 emit_move_insn (valreg, saved_valreg);
5775 saveregs_value = temp;
5777 /* Put the sequence after the NOTE that starts the function.
5778 If this is inside a SEQUENCE, make the outer-level insn
5779 chain current, so the code is placed at the start of the
5781 push_topmost_sequence ();
5782 emit_insns_before (seq, NEXT_INSN (get_insns ()));
5783 pop_topmost_sequence ();
5787 /* __builtin_args_info (N) returns word N of the arg space info
5788 for the current function. The number and meanings of words
5789 is controlled by the definition of CUMULATIVE_ARGS. */
5790 case BUILT_IN_ARGS_INFO:
5792 int nwords = sizeof (CUMULATIVE_ARGS) / sizeof (int);
5794 int *word_ptr = (int *) ¤t_function_args_info;
5795 tree type, elts, result;
5797 if (sizeof (CUMULATIVE_ARGS) % sizeof (int) != 0)
5798 fatal ("CUMULATIVE_ARGS type defined badly; see %s, line %d",
5799 __FILE__, __LINE__);
5803 tree arg = TREE_VALUE (arglist);
5804 if (TREE_CODE (arg) != INTEGER_CST)
5805 error ("argument of `__builtin_args_info' must be constant");
5808 int wordnum = TREE_INT_CST_LOW (arg);
5810 if (wordnum < 0 || wordnum >= nwords || TREE_INT_CST_HIGH (arg))
5811 error ("argument of `__builtin_args_info' out of range");
5813 return GEN_INT (word_ptr[wordnum]);
5817 error ("missing argument in `__builtin_args_info'");
5822 for (i = 0; i < nwords; i++)
5823 elts = tree_cons (NULL_TREE, build_int_2 (word_ptr[i], 0));
5825 type = build_array_type (integer_type_node,
5826 build_index_type (build_int_2 (nwords, 0)));
5827 result = build (CONSTRUCTOR, type, NULL_TREE, nreverse (elts));
5828 TREE_CONSTANT (result) = 1;
5829 TREE_STATIC (result) = 1;
5830 result = build (INDIRECT_REF, build_pointer_type (type), result);
5831 TREE_CONSTANT (result) = 1;
5832 return expand_expr (result, NULL_RTX, VOIDmode, 0);
5836 /* Return the address of the first anonymous stack arg. */
5837 case BUILT_IN_NEXT_ARG:
5839 tree fntype = TREE_TYPE (current_function_decl);
5840 if (!(TYPE_ARG_TYPES (fntype) != 0
5841 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
5842 != void_type_node)))
5844 error ("`va_start' used in function with fixed args");
5849 return expand_binop (Pmode, add_optab,
5850 current_function_internal_arg_pointer,
5851 current_function_arg_offset_rtx,
5852 NULL_RTX, 0, OPTAB_LIB_WIDEN);
5854 case BUILT_IN_CLASSIFY_TYPE:
5857 tree type = TREE_TYPE (TREE_VALUE (arglist));
5858 enum tree_code code = TREE_CODE (type);
5859 if (code == VOID_TYPE)
5860 return GEN_INT (void_type_class);
5861 if (code == INTEGER_TYPE)
5862 return GEN_INT (integer_type_class);
5863 if (code == CHAR_TYPE)
5864 return GEN_INT (char_type_class);
5865 if (code == ENUMERAL_TYPE)
5866 return GEN_INT (enumeral_type_class);
5867 if (code == BOOLEAN_TYPE)
5868 return GEN_INT (boolean_type_class);
5869 if (code == POINTER_TYPE)
5870 return GEN_INT (pointer_type_class);
5871 if (code == REFERENCE_TYPE)
5872 return GEN_INT (reference_type_class);
5873 if (code == OFFSET_TYPE)
5874 return GEN_INT (offset_type_class);
5875 if (code == REAL_TYPE)
5876 return GEN_INT (real_type_class);
5877 if (code == COMPLEX_TYPE)
5878 return GEN_INT (complex_type_class);
5879 if (code == FUNCTION_TYPE)
5880 return GEN_INT (function_type_class);
5881 if (code == METHOD_TYPE)
5882 return GEN_INT (method_type_class);
5883 if (code == RECORD_TYPE)
5884 return GEN_INT (record_type_class);
5885 if (code == UNION_TYPE || code == QUAL_UNION_TYPE)
5886 return GEN_INT (union_type_class);
5887 if (code == ARRAY_TYPE)
5888 return GEN_INT (array_type_class);
5889 if (code == STRING_TYPE)
5890 return GEN_INT (string_type_class);
5891 if (code == SET_TYPE)
5892 return GEN_INT (set_type_class);
5893 if (code == FILE_TYPE)
5894 return GEN_INT (file_type_class);
5895 if (code == LANG_TYPE)
5896 return GEN_INT (lang_type_class);
5898 return GEN_INT (no_type_class);
5900 case BUILT_IN_CONSTANT_P:
5904 return (TREE_CODE_CLASS (TREE_CODE (TREE_VALUE (arglist))) == 'c'
5905 ? const1_rtx : const0_rtx);
5907 case BUILT_IN_FRAME_ADDRESS:
5908 /* The argument must be a nonnegative integer constant.
5909 It counts the number of frames to scan up the stack.
5910 The value is the address of that frame. */
5911 case BUILT_IN_RETURN_ADDRESS:
5912 /* The argument must be a nonnegative integer constant.
5913 It counts the number of frames to scan up the stack.
5914 The value is the return address saved in that frame. */
5916 /* Warning about missing arg was already issued. */
5918 else if (TREE_CODE (TREE_VALUE (arglist)) != INTEGER_CST)
5920 error ("invalid arg to `__builtin_return_address'");
5923 else if (tree_int_cst_lt (TREE_VALUE (arglist), integer_zero_node))
5925 error ("invalid arg to `__builtin_return_address'");
5930 int count = TREE_INT_CST_LOW (TREE_VALUE (arglist));
5931 rtx tem = frame_pointer_rtx;
5934 /* Some machines need special handling before we can access arbitrary
5935 frames. For example, on the sparc, we must first flush all
5936 register windows to the stack. */
5937 #ifdef SETUP_FRAME_ADDRESSES
5938 SETUP_FRAME_ADDRESSES ();
5941 /* On the sparc, the return address is not in the frame, it is
5942 in a register. There is no way to access it off of the current
5943 frame pointer, but it can be accessed off the previous frame
5944 pointer by reading the value from the register window save
5946 #ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
5947 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_RETURN_ADDRESS)
5951 /* Scan back COUNT frames to the specified frame. */
5952 for (i = 0; i < count; i++)
5954 /* Assume the dynamic chain pointer is in the word that
5955 the frame address points to, unless otherwise specified. */
5956 #ifdef DYNAMIC_CHAIN_ADDRESS
5957 tem = DYNAMIC_CHAIN_ADDRESS (tem);
5959 tem = memory_address (Pmode, tem);
5960 tem = copy_to_reg (gen_rtx (MEM, Pmode, tem));
5963 /* For __builtin_frame_address, return what we've got. */
5964 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
5967 /* For __builtin_return_address,
5968 Get the return address from that frame. */
5969 #ifdef RETURN_ADDR_RTX
5970 return RETURN_ADDR_RTX (count, tem);
5972 tem = memory_address (Pmode,
5973 plus_constant (tem, GET_MODE_SIZE (Pmode)));
5974 return copy_to_reg (gen_rtx (MEM, Pmode, tem));
5978 case BUILT_IN_ALLOCA:
5980 /* Arg could be non-integer if user redeclared this fcn wrong. */
5981 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE)
5983 current_function_calls_alloca = 1;
5984 /* Compute the argument. */
5985 op0 = expand_expr (TREE_VALUE (arglist), NULL_RTX, VOIDmode, 0);
5987 /* Allocate the desired space. */
5988 target = allocate_dynamic_stack_space (op0, target, BITS_PER_UNIT);
5990 /* Record the new stack level for nonlocal gotos. */
5991 if (nonlocal_goto_handler_slot != 0)
5992 emit_stack_save (SAVE_NONLOCAL, &nonlocal_goto_stack_level, NULL_RTX);
5996 /* If not optimizing, call the library function. */
6001 /* Arg could be non-integer if user redeclared this fcn wrong. */
6002 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE)
6005 /* Compute the argument. */
6006 op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
6007 /* Compute ffs, into TARGET if possible.
6008 Set TARGET to wherever the result comes back. */
6009 target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
6010 ffs_optab, op0, target, 1);
6015 case BUILT_IN_STRLEN:
6016 /* If not optimizing, call the library function. */
6021 /* Arg could be non-pointer if user redeclared this fcn wrong. */
6022 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE)
6026 tree src = TREE_VALUE (arglist);
6027 tree len = c_strlen (src);
6030 = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
6032 rtx result, src_rtx, char_rtx;
6033 enum machine_mode insn_mode = value_mode, char_mode;
6034 enum insn_code icode;
6036 /* If the length is known, just return it. */
6038 return expand_expr (len, target, mode, 0);
6040 /* If SRC is not a pointer type, don't do this operation inline. */
6044 /* Call a function if we can't compute strlen in the right mode. */
6046 while (insn_mode != VOIDmode)
6048 icode = strlen_optab->handlers[(int) insn_mode].insn_code;
6049 if (icode != CODE_FOR_nothing)
6052 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
6054 if (insn_mode == VOIDmode)
6057 /* Make a place to write the result of the instruction. */
6060 && GET_CODE (result) == REG
6061 && GET_MODE (result) == insn_mode
6062 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
6063 result = gen_reg_rtx (insn_mode);
6065 /* Make sure the operands are acceptable to the predicates. */
6067 if (! (*insn_operand_predicate[(int)icode][0]) (result, insn_mode))
6068 result = gen_reg_rtx (insn_mode);
6070 src_rtx = memory_address (BLKmode,
6071 expand_expr (src, NULL_RTX, Pmode,
6073 if (! (*insn_operand_predicate[(int)icode][1]) (src_rtx, Pmode))
6074 src_rtx = copy_to_mode_reg (Pmode, src_rtx);
6076 char_rtx = const0_rtx;
6077 char_mode = insn_operand_mode[(int)icode][2];
6078 if (! (*insn_operand_predicate[(int)icode][2]) (char_rtx, char_mode))
6079 char_rtx = copy_to_mode_reg (char_mode, char_rtx);
6081 emit_insn (GEN_FCN (icode) (result,
6082 gen_rtx (MEM, BLKmode, src_rtx),
6083 char_rtx, GEN_INT (align)));
6085 /* Return the value in the proper mode for this function. */
6086 if (GET_MODE (result) == value_mode)
6088 else if (target != 0)
6090 convert_move (target, result, 0);
6094 return convert_to_mode (value_mode, result, 0);
6097 case BUILT_IN_STRCPY:
6098 /* If not optimizing, call the library function. */
6103 /* Arg could be non-pointer if user redeclared this fcn wrong. */
6104 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
6105 || TREE_CHAIN (arglist) == 0
6106 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE)
6110 tree len = c_strlen (TREE_VALUE (TREE_CHAIN (arglist)));
6115 len = size_binop (PLUS_EXPR, len, integer_one_node);
6117 chainon (arglist, build_tree_list (NULL_TREE, len));
6121 case BUILT_IN_MEMCPY:
6122 /* If not optimizing, call the library function. */
6127 /* Arg could be non-pointer if user redeclared this fcn wrong. */
6128 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
6129 || TREE_CHAIN (arglist) == 0
6130 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
6131 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
6132 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
6136 tree dest = TREE_VALUE (arglist);
6137 tree src = TREE_VALUE (TREE_CHAIN (arglist));
6138 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
6141 = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
6143 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
6144 rtx dest_rtx, dest_mem, src_mem;
6146 /* If either SRC or DEST is not a pointer type, don't do
6147 this operation in-line. */
6148 if (src_align == 0 || dest_align == 0)
6150 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRCPY)
6151 TREE_CHAIN (TREE_CHAIN (arglist)) = 0;
6155 dest_rtx = expand_expr (dest, NULL_RTX, Pmode, EXPAND_NORMAL);
6156 dest_mem = gen_rtx (MEM, BLKmode,
6157 memory_address (BLKmode, dest_rtx));
6158 src_mem = gen_rtx (MEM, BLKmode,
6159 memory_address (BLKmode,
6160 expand_expr (src, NULL_RTX,
6164 /* Copy word part most expediently. */
6165 emit_block_move (dest_mem, src_mem,
6166 expand_expr (len, NULL_RTX, VOIDmode, 0),
6167 MIN (src_align, dest_align));
6171 /* These comparison functions need an instruction that returns an actual
6172 index. An ordinary compare that just sets the condition codes
6174 #ifdef HAVE_cmpstrsi
6175 case BUILT_IN_STRCMP:
6176 /* If not optimizing, call the library function. */
6181 /* Arg could be non-pointer if user redeclared this fcn wrong. */
6182 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
6183 || TREE_CHAIN (arglist) == 0
6184 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE)
6186 else if (!HAVE_cmpstrsi)
6189 tree arg1 = TREE_VALUE (arglist);
6190 tree arg2 = TREE_VALUE (TREE_CHAIN (arglist));
6194 len = c_strlen (arg1);
6196 len = size_binop (PLUS_EXPR, integer_one_node, len);
6197 len2 = c_strlen (arg2);
6199 len2 = size_binop (PLUS_EXPR, integer_one_node, len2);
6201 /* If we don't have a constant length for the first, use the length
6202 of the second, if we know it. We don't require a constant for
6203 this case; some cost analysis could be done if both are available
6204 but neither is constant. For now, assume they're equally cheap.
6206 If both strings have constant lengths, use the smaller. This
6207 could arise if optimization results in strcpy being called with
6208 two fixed strings, or if the code was machine-generated. We should
6209 add some code to the `memcmp' handler below to deal with such
6210 situations, someday. */
6211 if (!len || TREE_CODE (len) != INTEGER_CST)
6218 else if (len2 && TREE_CODE (len2) == INTEGER_CST)
6220 if (tree_int_cst_lt (len2, len))
6224 chainon (arglist, build_tree_list (NULL_TREE, len));
6228 case BUILT_IN_MEMCMP:
6229 /* If not optimizing, call the library function. */
6234 /* Arg could be non-pointer if user redeclared this fcn wrong. */
6235 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
6236 || TREE_CHAIN (arglist) == 0
6237 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
6238 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
6239 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
6241 else if (!HAVE_cmpstrsi)
6244 tree arg1 = TREE_VALUE (arglist);
6245 tree arg2 = TREE_VALUE (TREE_CHAIN (arglist));
6246 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
6250 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
6252 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
6253 enum machine_mode insn_mode
6254 = insn_operand_mode[(int) CODE_FOR_cmpstrsi][0];
6256 /* If we don't have POINTER_TYPE, call the function. */
6257 if (arg1_align == 0 || arg2_align == 0)
6259 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRCMP)
6260 TREE_CHAIN (TREE_CHAIN (arglist)) = 0;
6264 /* Make a place to write the result of the instruction. */
6267 && GET_CODE (result) == REG && GET_MODE (result) == insn_mode
6268 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
6269 result = gen_reg_rtx (insn_mode);
6271 emit_insn (gen_cmpstrsi (result,
6272 gen_rtx (MEM, BLKmode,
6273 expand_expr (arg1, NULL_RTX, Pmode,
6275 gen_rtx (MEM, BLKmode,
6276 expand_expr (arg2, NULL_RTX, Pmode,
6278 expand_expr (len, NULL_RTX, VOIDmode, 0),
6279 GEN_INT (MIN (arg1_align, arg2_align))));
6281 /* Return the value in the proper mode for this function. */
6282 mode = TYPE_MODE (TREE_TYPE (exp));
6283 if (GET_MODE (result) == mode)
6285 else if (target != 0)
6287 convert_move (target, result, 0);
6291 return convert_to_mode (mode, result, 0);
6294 case BUILT_IN_STRCMP:
6295 case BUILT_IN_MEMCMP:
6299 default: /* just do library call, if unknown builtin */
6300 error ("built-in function `%s' not currently supported",
6301 IDENTIFIER_POINTER (DECL_NAME (fndecl)));
6304 /* The switch statement above can drop through to cause the function
6305 to be called normally. */
6307 return expand_call (exp, target, ignore);
6310 /* Built-in functions to perform an untyped call and return. */
6312 /* For each register that may be used for calling a function, this
6313 gives a mode used to copy the register's value. VOIDmode indicates
6314 the register is not used for calling a function. If the machine
6315 has register windows, this gives only the outbound registers.
6316 INCOMING_REGNO gives the corresponding inbound register. */
6317 static enum machine_mode apply_args_mode[FIRST_PSEUDO_REGISTER];
6319 /* For each register that may be used for returning values, this gives
6320 a mode used to copy the register's value. VOIDmode indicates the
6321 register is not used for returning values. If the machine has
6322 register windows, this gives only the outbound registers.
6323 INCOMING_REGNO gives the corresponding inbound register. */
6324 static enum machine_mode apply_result_mode[FIRST_PSEUDO_REGISTER];
6326 /* Return the size required for the block returned by __builtin_apply_args,
6327 and initialize apply_args_mode. */
6331 static int size = -1;
6333 enum machine_mode mode;
6335 /* The values computed by this function never change. */
6338 /* The first value is the incoming arg-pointer. */
6339 size = GET_MODE_SIZE (Pmode);
6341 /* The second value is the structure value address unless this is
6342 passed as an "invisible" first argument. */
6343 if (struct_value_rtx)
6344 size += GET_MODE_SIZE (Pmode);
6346 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
6347 if (FUNCTION_ARG_REGNO_P (regno))
6349 /* Search for the proper mode for copying this register's
6350 value. I'm not sure this is right, but it works so far. */
6351 enum machine_mode best_mode = VOIDmode;
6353 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
6355 mode = GET_MODE_WIDER_MODE (mode))
6356 if (HARD_REGNO_MODE_OK (regno, mode)
6357 && HARD_REGNO_NREGS (regno, mode) == 1)
6360 if (best_mode == VOIDmode)
6361 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
6363 mode = GET_MODE_WIDER_MODE (mode))
6364 if (HARD_REGNO_MODE_OK (regno, mode)
6365 && (mov_optab->handlers[(int) mode].insn_code
6366 != CODE_FOR_nothing))
6370 if (mode == VOIDmode)
6373 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
6374 if (size % align != 0)
6375 size = CEIL (size, align) * align;
6376 size += GET_MODE_SIZE (mode);
6377 apply_args_mode[regno] = mode;
6380 apply_args_mode[regno] = VOIDmode;
6385 /* Return the size required for the block returned by __builtin_apply,
6386 and initialize apply_result_mode. */
6388 apply_result_size ()
6390 static int size = -1;
6392 enum machine_mode mode;
6394 /* The values computed by this function never change. */
6399 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
6400 if (FUNCTION_VALUE_REGNO_P (regno))
6402 /* Search for the proper mode for copying this register's
6403 value. I'm not sure this is right, but it works so far. */
6404 enum machine_mode best_mode = VOIDmode;
6406 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
6408 mode = GET_MODE_WIDER_MODE (mode))
6409 if (HARD_REGNO_MODE_OK (regno, mode))
6412 if (best_mode == VOIDmode)
6413 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
6415 mode = GET_MODE_WIDER_MODE (mode))
6416 if (HARD_REGNO_MODE_OK (regno, mode)
6417 && (mov_optab->handlers[(int) mode].insn_code
6418 != CODE_FOR_nothing))
6422 if (mode == VOIDmode)
6425 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
6426 if (size % align != 0)
6427 size = CEIL (size, align) * align;
6428 size += GET_MODE_SIZE (mode);
6429 apply_result_mode[regno] = mode;
6432 apply_result_mode[regno] = VOIDmode;
6434 /* Allow targets that use untyped_call and untyped_return to override
6435 the size so that machine-specific information can be stored here. */
6436 #ifdef APPLY_RESULT_SIZE
6437 size = APPLY_RESULT_SIZE;
6443 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
6444 /* Create a vector describing the result block RESULT. If SAVEP is true,
6445 the result block is used to save the values; otherwise it is used to
6446 restore the values. */
6448 result_vector (savep, result)
6452 int regno, size, align, nelts;
6453 enum machine_mode mode;
6455 rtx *savevec = (rtx *) alloca (FIRST_PSEUDO_REGISTER * sizeof (rtx));
6458 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
6459 if ((mode = apply_result_mode[regno]) != VOIDmode)
6461 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
6462 if (size % align != 0)
6463 size = CEIL (size, align) * align;
6464 reg = gen_rtx (REG, mode, savep ? INCOMING_REGNO (regno) : regno);
6465 mem = change_address (result, mode,
6466 plus_constant (XEXP (result, 0), size));
6467 savevec[nelts++] = (savep
6468 ? gen_rtx (SET, VOIDmode, mem, reg)
6469 : gen_rtx (SET, VOIDmode, reg, mem));
6470 size += GET_MODE_SIZE (mode);
6472 return gen_rtx (PARALLEL, VOIDmode, gen_rtvec_v (nelts, savevec));
6474 #endif /* HAVE_untyped_call or HAVE_untyped_return */
6477 /* Save the state required to perform an untyped call with the same
6478 arguments as were passed to the current function. */
6480 expand_builtin_apply_args ()
6483 int size, align, regno;
6484 enum machine_mode mode;
6486 /* Create a block where the arg-pointer, structure value address,
6487 and argument registers can be saved. */
6488 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
6490 /* Walk past the arg-pointer and structure value address. */
6491 size = GET_MODE_SIZE (Pmode);
6492 if (struct_value_rtx)
6493 size += GET_MODE_SIZE (Pmode);
6495 /* Save each register used in calling a function to the block. */
6496 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
6497 if ((mode = apply_args_mode[regno]) != VOIDmode)
6499 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
6500 if (size % align != 0)
6501 size = CEIL (size, align) * align;
6502 emit_move_insn (change_address (registers, mode,
6503 plus_constant (XEXP (registers, 0),
6505 gen_rtx (REG, mode, INCOMING_REGNO (regno)));
6506 size += GET_MODE_SIZE (mode);
6509 /* Save the arg pointer to the block. */
6510 emit_move_insn (change_address (registers, Pmode, XEXP (registers, 0)),
6511 copy_to_reg (virtual_incoming_args_rtx));
6512 size = GET_MODE_SIZE (Pmode);
6514 /* Save the structure value address unless this is passed as an
6515 "invisible" first argument. */
6516 if (struct_value_incoming_rtx)
6518 emit_move_insn (change_address (registers, Pmode,
6519 plus_constant (XEXP (registers, 0),
6521 copy_to_reg (struct_value_incoming_rtx));
6522 size += GET_MODE_SIZE (Pmode);
6525 /* Return the address of the block. */
6526 return copy_addr_to_reg (XEXP (registers, 0));
6529 /* Perform an untyped call and save the state required to perform an
6530 untyped return of whatever value was returned by the given function. */
6532 expand_builtin_apply (function, arguments, argsize)
6533 rtx function, arguments, argsize;
6535 int size, align, regno;
6536 enum machine_mode mode;
6537 rtx incoming_args, result, reg, dest, call_insn;
6538 rtx old_stack_level = 0;
6541 /* Create a block where the return registers can be saved. */
6542 result = assign_stack_local (BLKmode, apply_result_size (), -1);
6544 /* ??? The argsize value should be adjusted here. */
6546 /* Fetch the arg pointer from the ARGUMENTS block. */
6547 incoming_args = gen_reg_rtx (Pmode);
6548 emit_move_insn (incoming_args,
6549 gen_rtx (MEM, Pmode, arguments));
6550 #ifndef STACK_GROWS_DOWNWARD
6551 incoming_args = expand_binop (Pmode, add_optab, incoming_args, argsize,
6552 incoming_args, 0, OPTAB_LIB_WIDEN);
6555 /* Perform postincrements before actually calling the function. */
6558 /* Push a new argument block and copy the arguments. */
6559 do_pending_stack_adjust ();
6560 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
6562 /* Push a block of memory onto the stack to store the memory arguments.
6563 Save the address in a register, and copy the memory arguments. ??? I
6564 haven't figured out how the calling convention macros effect this,
6565 but it's likely that the source and/or destination addresses in
6566 the block copy will need updating in machine specific ways. */
6567 dest = copy_addr_to_reg (push_block (argsize, 0, 0));
6568 emit_block_move (gen_rtx (MEM, BLKmode, dest),
6569 gen_rtx (MEM, BLKmode, incoming_args),
6571 PARM_BOUNDARY / BITS_PER_UNIT);
6573 /* Refer to the argument block. */
6575 arguments = gen_rtx (MEM, BLKmode, arguments);
6577 /* Walk past the arg-pointer and structure value address. */
6578 size = GET_MODE_SIZE (Pmode);
6579 if (struct_value_rtx)
6580 size += GET_MODE_SIZE (Pmode);
6582 /* Restore each of the registers previously saved. Make USE insns
6583 for each of these registers for use in making the call. */
6584 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
6585 if ((mode = apply_args_mode[regno]) != VOIDmode)
6587 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
6588 if (size % align != 0)
6589 size = CEIL (size, align) * align;
6590 reg = gen_rtx (REG, mode, regno);
6591 emit_move_insn (reg,
6592 change_address (arguments, mode,
6593 plus_constant (XEXP (arguments, 0),
6596 push_to_sequence (use_insns);
6597 emit_insn (gen_rtx (USE, VOIDmode, reg));
6598 use_insns = get_insns ();
6600 size += GET_MODE_SIZE (mode);
6603 /* Restore the structure value address unless this is passed as an
6604 "invisible" first argument. */
6605 size = GET_MODE_SIZE (Pmode);
6606 if (struct_value_rtx)
6608 rtx value = gen_reg_rtx (Pmode);
6609 emit_move_insn (value,
6610 change_address (arguments, Pmode,
6611 plus_constant (XEXP (arguments, 0),
6613 emit_move_insn (struct_value_rtx, value);
6614 if (GET_CODE (struct_value_rtx) == REG)
6616 push_to_sequence (use_insns);
6617 emit_insn (gen_rtx (USE, VOIDmode, struct_value_rtx));
6618 use_insns = get_insns ();
6621 size += GET_MODE_SIZE (Pmode);
6624 /* All arguments and registers used for the call are set up by now! */
6625 function = prepare_call_address (function, NULL_TREE, &use_insns);
6627 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
6628 and we don't want to load it into a register as an optimization,
6629 because prepare_call_address already did it if it should be done. */
6630 if (GET_CODE (function) != SYMBOL_REF)
6631 function = memory_address (FUNCTION_MODE, function);
6633 /* Generate the actual call instruction and save the return value. */
6634 #ifdef HAVE_untyped_call
6635 if (HAVE_untyped_call)
6636 emit_call_insn (gen_untyped_call (gen_rtx (MEM, FUNCTION_MODE, function),
6637 result, result_vector (1, result)));
6640 #ifdef HAVE_call_value
6641 if (HAVE_call_value)
6645 /* Locate the unique return register. It is not possible to
6646 express a call that sets more than one return register using
6647 call_value; use untyped_call for that. In fact, untyped_call
6648 only needs to save the return registers in the given block. */
6649 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
6650 if ((mode = apply_result_mode[regno]) != VOIDmode)
6653 abort (); /* HAVE_untyped_call required. */
6654 valreg = gen_rtx (REG, mode, regno);
6657 emit_call_insn (gen_call_value (valreg,
6658 gen_rtx (MEM, FUNCTION_MODE, function),
6659 const0_rtx, NULL_RTX, const0_rtx));
6661 emit_move_insn (change_address (result, GET_MODE (valreg),
6669 /* Find the CALL insn we just emitted and write the USE insns before it. */
6670 for (call_insn = get_last_insn ();
6671 call_insn && GET_CODE (call_insn) != CALL_INSN;
6672 call_insn = PREV_INSN (call_insn))
6678 /* Put the USE insns before the CALL. */
6679 emit_insns_before (use_insns, call_insn);
6681 /* Restore the stack. */
6682 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
6684 /* Return the address of the result block. */
6685 return copy_addr_to_reg (XEXP (result, 0));
6688 /* Perform an untyped return. */
6690 expand_builtin_return (result)
6693 int size, align, regno;
6694 enum machine_mode mode;
6698 apply_result_size ();
6699 result = gen_rtx (MEM, BLKmode, result);
6701 #ifdef HAVE_untyped_return
6702 if (HAVE_untyped_return)
6704 emit_jump_insn (gen_untyped_return (result, result_vector (0, result)));
6710 /* Restore the return value and note that each value is used. */
6712 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
6713 if ((mode = apply_result_mode[regno]) != VOIDmode)
6715 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
6716 if (size % align != 0)
6717 size = CEIL (size, align) * align;
6718 reg = gen_rtx (REG, mode, INCOMING_REGNO (regno));
6719 emit_move_insn (reg,
6720 change_address (result, mode,
6721 plus_constant (XEXP (result, 0),
6724 push_to_sequence (use_insns);
6725 emit_insn (gen_rtx (USE, VOIDmode, reg));
6726 use_insns = get_insns ();
6728 size += GET_MODE_SIZE (mode);
6731 /* Put the USE insns before the return. */
6732 emit_insns (use_insns);
6734 /* Return whatever values was restored by jumping directly to the end
6736 expand_null_return ();
6739 /* Expand code for a post- or pre- increment or decrement
6740 and return the RTX for the result.
6741 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
6744 expand_increment (exp, post)
6748 register rtx op0, op1;
6749 register rtx temp, value;
6750 register tree incremented = TREE_OPERAND (exp, 0);
6751 optab this_optab = add_optab;
6753 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
6754 int op0_is_copy = 0;
6756 /* Stabilize any component ref that might need to be
6757 evaluated more than once below. */
6759 || TREE_CODE (incremented) == BIT_FIELD_REF
6760 || (TREE_CODE (incremented) == COMPONENT_REF
6761 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
6762 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
6763 incremented = stabilize_reference (incremented);
6765 /* Compute the operands as RTX.
6766 Note whether OP0 is the actual lvalue or a copy of it:
6767 I believe it is a copy iff it is a register or subreg
6768 and insns were generated in computing it. */
6770 temp = get_last_insn ();
6771 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, 0);
6773 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
6774 in place but intead must do sign- or zero-extension during assignment,
6775 so we copy it into a new register and let the code below use it as
6778 Note that we can safely modify this SUBREG since it is know not to be
6779 shared (it was made by the expand_expr call above). */
6781 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
6782 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
6784 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
6785 && temp != get_last_insn ());
6786 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
6788 /* Decide whether incrementing or decrementing. */
6789 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
6790 || TREE_CODE (exp) == PREDECREMENT_EXPR)
6791 this_optab = sub_optab;
6793 /* If OP0 is not the actual lvalue, but rather a copy in a register,
6794 then we cannot just increment OP0. We must therefore contrive to
6795 increment the original value. Then, for postincrement, we can return
6796 OP0 since it is a copy of the old value. For preincrement, we want
6797 to always expand here, since this generates better or equivalent code. */
6798 if (!post || op0_is_copy)
6800 /* This is the easiest way to increment the value wherever it is.
6801 Problems with multiple evaluation of INCREMENTED are prevented
6802 because either (1) it is a component_ref or preincrement,
6803 in which case it was stabilized above, or (2) it is an array_ref
6804 with constant index in an array in a register, which is
6805 safe to reevaluate. */
6806 tree newexp = build ((this_optab == add_optab
6807 ? PLUS_EXPR : MINUS_EXPR),
6810 TREE_OPERAND (exp, 1));
6811 temp = expand_assignment (incremented, newexp, ! post, 0);
6812 return post ? op0 : temp;
6815 /* Convert decrement by a constant into a negative increment. */
6816 if (this_optab == sub_optab
6817 && GET_CODE (op1) == CONST_INT)
6819 op1 = GEN_INT (- INTVAL (op1));
6820 this_optab = add_optab;
6825 /* We have a true reference to the value in OP0.
6826 If there is an insn to add or subtract in this mode, queue it. */
6828 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
6829 op0 = stabilize (op0);
6832 icode = (int) this_optab->handlers[(int) mode].insn_code;
6833 if (icode != (int) CODE_FOR_nothing
6834 /* Make sure that OP0 is valid for operands 0 and 1
6835 of the insn we want to queue. */
6836 && (*insn_operand_predicate[icode][0]) (op0, mode)
6837 && (*insn_operand_predicate[icode][1]) (op0, mode))
6839 if (! (*insn_operand_predicate[icode][2]) (op1, mode))
6840 op1 = force_reg (mode, op1);
6842 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
6846 /* Preincrement, or we can't increment with one simple insn. */
6848 /* Save a copy of the value before inc or dec, to return it later. */
6849 temp = value = copy_to_reg (op0);
6851 /* Arrange to return the incremented value. */
6852 /* Copy the rtx because expand_binop will protect from the queue,
6853 and the results of that would be invalid for us to return
6854 if our caller does emit_queue before using our result. */
6855 temp = copy_rtx (value = op0);
6857 /* Increment however we can. */
6858 op1 = expand_binop (mode, this_optab, value, op1, op0,
6859 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
6860 /* Make sure the value is stored into OP0. */
6862 emit_move_insn (op0, op1);
6867 /* Expand all function calls contained within EXP, innermost ones first.
6868 But don't look within expressions that have sequence points.
6869 For each CALL_EXPR, record the rtx for its value
6870 in the CALL_EXPR_RTL field. */
6873 preexpand_calls (exp)
6876 register int nops, i;
6877 int type = TREE_CODE_CLASS (TREE_CODE (exp));
6879 if (! do_preexpand_calls)
6882 /* Only expressions and references can contain calls. */
6884 if (type != 'e' && type != '<' && type != '1' && type != '2' && type != 'r')
6887 switch (TREE_CODE (exp))
6890 /* Do nothing if already expanded. */
6891 if (CALL_EXPR_RTL (exp) != 0)
6894 /* Do nothing to built-in functions. */
6895 if (TREE_CODE (TREE_OPERAND (exp, 0)) != ADDR_EXPR
6896 || TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) != FUNCTION_DECL
6897 || ! DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
6898 CALL_EXPR_RTL (exp) = expand_call (exp, NULL_RTX, 0);
6903 case TRUTH_ANDIF_EXPR:
6904 case TRUTH_ORIF_EXPR:
6905 /* If we find one of these, then we can be sure
6906 the adjust will be done for it (since it makes jumps).
6907 Do it now, so that if this is inside an argument
6908 of a function, we don't get the stack adjustment
6909 after some other args have already been pushed. */
6910 do_pending_stack_adjust ();
6915 case WITH_CLEANUP_EXPR:
6919 if (SAVE_EXPR_RTL (exp) != 0)
6923 nops = tree_code_length[(int) TREE_CODE (exp)];
6924 for (i = 0; i < nops; i++)
6925 if (TREE_OPERAND (exp, i) != 0)
6927 type = TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, i)));
6928 if (type == 'e' || type == '<' || type == '1' || type == '2'
6930 preexpand_calls (TREE_OPERAND (exp, i));
6934 /* At the start of a function, record that we have no previously-pushed
6935 arguments waiting to be popped. */
6938 init_pending_stack_adjust ()
6940 pending_stack_adjust = 0;
6943 /* When exiting from function, if safe, clear out any pending stack adjust
6944 so the adjustment won't get done. */
6947 clear_pending_stack_adjust ()
6949 #ifdef EXIT_IGNORE_STACK
6950 if (! flag_omit_frame_pointer && EXIT_IGNORE_STACK
6951 && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline)
6952 && ! flag_inline_functions)
6953 pending_stack_adjust = 0;
6957 /* Pop any previously-pushed arguments that have not been popped yet. */
6960 do_pending_stack_adjust ()
6962 if (inhibit_defer_pop == 0)
6964 if (pending_stack_adjust != 0)
6965 adjust_stack (GEN_INT (pending_stack_adjust));
6966 pending_stack_adjust = 0;
6970 /* Expand all cleanups up to OLD_CLEANUPS.
6971 Needed here, and also for language-dependent calls. */
6974 expand_cleanups_to (old_cleanups)
6977 while (cleanups_this_call != old_cleanups)
6979 expand_expr (TREE_VALUE (cleanups_this_call), NULL_RTX, VOIDmode, 0);
6980 cleanups_this_call = TREE_CHAIN (cleanups_this_call);
6984 /* Expand conditional expressions. */
6986 /* Generate code to evaluate EXP and jump to LABEL if the value is zero.
6987 LABEL is an rtx of code CODE_LABEL, in this function and all the
6991 jumpifnot (exp, label)
6995 do_jump (exp, label, NULL_RTX);
6998 /* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
7005 do_jump (exp, NULL_RTX, label);
7008 /* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
7009 the result is zero, or IF_TRUE_LABEL if the result is one.
7010 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
7011 meaning fall through in that case.
7013 do_jump always does any pending stack adjust except when it does not
7014 actually perform a jump. An example where there is no jump
7015 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
7017 This function is responsible for optimizing cases such as
7018 &&, || and comparison operators in EXP. */
7021 do_jump (exp, if_false_label, if_true_label)
7023 rtx if_false_label, if_true_label;
7025 register enum tree_code code = TREE_CODE (exp);
7026 /* Some cases need to create a label to jump to
7027 in order to properly fall through.
7028 These cases set DROP_THROUGH_LABEL nonzero. */
7029 rtx drop_through_label = 0;
7043 temp = integer_zerop (exp) ? if_false_label : if_true_label;
7049 /* This is not true with #pragma weak */
7051 /* The address of something can never be zero. */
7053 emit_jump (if_true_label);
7058 if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
7059 || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
7060 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF)
7063 /* If we are narrowing the operand, we have to do the compare in the
7065 if ((TYPE_PRECISION (TREE_TYPE (exp))
7066 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
7068 case NON_LVALUE_EXPR:
7069 case REFERENCE_EXPR:
7074 /* These cannot change zero->non-zero or vice versa. */
7075 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
7079 /* This is never less insns than evaluating the PLUS_EXPR followed by
7080 a test and can be longer if the test is eliminated. */
7082 /* Reduce to minus. */
7083 exp = build (MINUS_EXPR, TREE_TYPE (exp),
7084 TREE_OPERAND (exp, 0),
7085 fold (build1 (NEGATE_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
7086 TREE_OPERAND (exp, 1))));
7087 /* Process as MINUS. */
7091 /* Non-zero iff operands of minus differ. */
7092 comparison = compare (build (NE_EXPR, TREE_TYPE (exp),
7093 TREE_OPERAND (exp, 0),
7094 TREE_OPERAND (exp, 1)),
7099 /* If we are AND'ing with a small constant, do this comparison in the
7100 smallest type that fits. If the machine doesn't have comparisons
7101 that small, it will be converted back to the wider comparison.
7102 This helps if we are testing the sign bit of a narrower object.
7103 combine can't do this for us because it can't know whether a
7104 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
7106 if (! SLOW_BYTE_ACCESS
7107 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7108 && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
7109 && (i = floor_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))) >= 0
7110 && (type = type_for_size (i + 1, 1)) != 0
7111 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
7112 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
7113 != CODE_FOR_nothing))
7115 do_jump (convert (type, exp), if_false_label, if_true_label);
7120 case TRUTH_NOT_EXPR:
7121 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
7124 case TRUTH_ANDIF_EXPR:
7125 if (if_false_label == 0)
7126 if_false_label = drop_through_label = gen_label_rtx ();
7127 do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
7128 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
7131 case TRUTH_ORIF_EXPR:
7132 if (if_true_label == 0)
7133 if_true_label = drop_through_label = gen_label_rtx ();
7134 do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
7135 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
7139 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
7142 do_pending_stack_adjust ();
7143 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
7150 int bitsize, bitpos, unsignedp;
7151 enum machine_mode mode;
7156 /* Get description of this reference. We don't actually care
7157 about the underlying object here. */
7158 get_inner_reference (exp, &bitsize, &bitpos, &offset,
7159 &mode, &unsignedp, &volatilep);
7161 type = type_for_size (bitsize, unsignedp);
7162 if (! SLOW_BYTE_ACCESS
7163 && type != 0 && bitsize >= 0
7164 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
7165 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
7166 != CODE_FOR_nothing))
7168 do_jump (convert (type, exp), if_false_label, if_true_label);
7175 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
7176 if (integer_onep (TREE_OPERAND (exp, 1))
7177 && integer_zerop (TREE_OPERAND (exp, 2)))
7178 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
7180 else if (integer_zerop (TREE_OPERAND (exp, 1))
7181 && integer_onep (TREE_OPERAND (exp, 2)))
7182 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
7186 register rtx label1 = gen_label_rtx ();
7187 drop_through_label = gen_label_rtx ();
7188 do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX);
7189 /* Now the THEN-expression. */
7190 do_jump (TREE_OPERAND (exp, 1),
7191 if_false_label ? if_false_label : drop_through_label,
7192 if_true_label ? if_true_label : drop_through_label);
7193 /* In case the do_jump just above never jumps. */
7194 do_pending_stack_adjust ();
7195 emit_label (label1);
7196 /* Now the ELSE-expression. */
7197 do_jump (TREE_OPERAND (exp, 2),
7198 if_false_label ? if_false_label : drop_through_label,
7199 if_true_label ? if_true_label : drop_through_label);
7204 if (integer_zerop (TREE_OPERAND (exp, 1)))
7205 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
7206 else if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7209 !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
7210 do_jump_by_parts_equality (exp, if_false_label, if_true_label);
7212 comparison = compare (exp, EQ, EQ);
7216 if (integer_zerop (TREE_OPERAND (exp, 1)))
7217 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
7218 else if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7221 !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
7222 do_jump_by_parts_equality (exp, if_true_label, if_false_label);
7224 comparison = compare (exp, NE, NE);
7228 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7230 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
7231 do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
7233 comparison = compare (exp, LT, LTU);
7237 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7239 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
7240 do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
7242 comparison = compare (exp, LE, LEU);
7246 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7248 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
7249 do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
7251 comparison = compare (exp, GT, GTU);
7255 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7257 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
7258 do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
7260 comparison = compare (exp, GE, GEU);
7265 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
7267 /* This is not needed any more and causes poor code since it causes
7268 comparisons and tests from non-SI objects to have different code
7270 /* Copy to register to avoid generating bad insns by cse
7271 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
7272 if (!cse_not_expected && GET_CODE (temp) == MEM)
7273 temp = copy_to_reg (temp);
7275 do_pending_stack_adjust ();
7276 if (GET_CODE (temp) == CONST_INT)
7277 comparison = (temp == const0_rtx ? const0_rtx : const_true_rtx);
7278 else if (GET_CODE (temp) == LABEL_REF)
7279 comparison = const_true_rtx;
7280 else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
7281 && !can_compare_p (GET_MODE (temp)))
7282 /* Note swapping the labels gives us not-equal. */
7283 do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label);
7284 else if (GET_MODE (temp) != VOIDmode)
7285 comparison = compare_from_rtx (temp, CONST0_RTX (GET_MODE (temp)),
7286 NE, TREE_UNSIGNED (TREE_TYPE (exp)),
7287 GET_MODE (temp), NULL_RTX, 0);
7292 /* Do any postincrements in the expression that was tested. */
7295 /* If COMPARISON is nonzero here, it is an rtx that can be substituted
7296 straight into a conditional jump instruction as the jump condition.
7297 Otherwise, all the work has been done already. */
7299 if (comparison == const_true_rtx)
7302 emit_jump (if_true_label);
7304 else if (comparison == const0_rtx)
7307 emit_jump (if_false_label);
7309 else if (comparison)
7310 do_jump_for_compare (comparison, if_false_label, if_true_label);
7314 if (drop_through_label)
7316 /* If do_jump produces code that might be jumped around,
7317 do any stack adjusts from that code, before the place
7318 where control merges in. */
7319 do_pending_stack_adjust ();
7320 emit_label (drop_through_label);
7324 /* Given a comparison expression EXP for values too wide to be compared
7325 with one insn, test the comparison and jump to the appropriate label.
7326 The code of EXP is ignored; we always test GT if SWAP is 0,
7327 and LT if SWAP is 1. */
7330 do_jump_by_parts_greater (exp, swap, if_false_label, if_true_label)
7333 rtx if_false_label, if_true_label;
7335 rtx op0 = expand_expr (TREE_OPERAND (exp, swap), NULL_RTX, VOIDmode, 0);
7336 rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), NULL_RTX, VOIDmode, 0);
7337 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
7338 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
7339 rtx drop_through_label = 0;
7340 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
7343 if (! if_true_label || ! if_false_label)
7344 drop_through_label = gen_label_rtx ();
7345 if (! if_true_label)
7346 if_true_label = drop_through_label;
7347 if (! if_false_label)
7348 if_false_label = drop_through_label;
7350 /* Compare a word at a time, high order first. */
7351 for (i = 0; i < nwords; i++)
7354 rtx op0_word, op1_word;
7356 if (WORDS_BIG_ENDIAN)
7358 op0_word = operand_subword_force (op0, i, mode);
7359 op1_word = operand_subword_force (op1, i, mode);
7363 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
7364 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
7367 /* All but high-order word must be compared as unsigned. */
7368 comp = compare_from_rtx (op0_word, op1_word,
7369 (unsignedp || i > 0) ? GTU : GT,
7370 unsignedp, word_mode, NULL_RTX, 0);
7371 if (comp == const_true_rtx)
7372 emit_jump (if_true_label);
7373 else if (comp != const0_rtx)
7374 do_jump_for_compare (comp, NULL_RTX, if_true_label);
7376 /* Consider lower words only if these are equal. */
7377 comp = compare_from_rtx (op0_word, op1_word, NE, unsignedp, word_mode,
7379 if (comp == const_true_rtx)
7380 emit_jump (if_false_label);
7381 else if (comp != const0_rtx)
7382 do_jump_for_compare (comp, NULL_RTX, if_false_label);
7386 emit_jump (if_false_label);
7387 if (drop_through_label)
7388 emit_label (drop_through_label);
7391 /* Compare OP0 with OP1, word at a time, in mode MODE.
7392 UNSIGNEDP says to do unsigned comparison.
7393 Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
7396 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label)
7397 enum machine_mode mode;
7400 rtx if_false_label, if_true_label;
7402 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
7403 rtx drop_through_label = 0;
7406 if (! if_true_label || ! if_false_label)
7407 drop_through_label = gen_label_rtx ();
7408 if (! if_true_label)
7409 if_true_label = drop_through_label;
7410 if (! if_false_label)
7411 if_false_label = drop_through_label;
7413 /* Compare a word at a time, high order first. */
7414 for (i = 0; i < nwords; i++)
7417 rtx op0_word, op1_word;
7419 if (WORDS_BIG_ENDIAN)
7421 op0_word = operand_subword_force (op0, i, mode);
7422 op1_word = operand_subword_force (op1, i, mode);
7426 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
7427 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
7430 /* All but high-order word must be compared as unsigned. */
7431 comp = compare_from_rtx (op0_word, op1_word,
7432 (unsignedp || i > 0) ? GTU : GT,
7433 unsignedp, word_mode, NULL_RTX, 0);
7434 if (comp == const_true_rtx)
7435 emit_jump (if_true_label);
7436 else if (comp != const0_rtx)
7437 do_jump_for_compare (comp, NULL_RTX, if_true_label);
7439 /* Consider lower words only if these are equal. */
7440 comp = compare_from_rtx (op0_word, op1_word, NE, unsignedp, word_mode,
7442 if (comp == const_true_rtx)
7443 emit_jump (if_false_label);
7444 else if (comp != const0_rtx)
7445 do_jump_for_compare (comp, NULL_RTX, if_false_label);
7449 emit_jump (if_false_label);
7450 if (drop_through_label)
7451 emit_label (drop_through_label);
7454 /* Given an EQ_EXPR expression EXP for values too wide to be compared
7455 with one insn, test the comparison and jump to the appropriate label. */
7458 do_jump_by_parts_equality (exp, if_false_label, if_true_label)
7460 rtx if_false_label, if_true_label;
7462 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7463 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7464 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
7465 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
7467 rtx drop_through_label = 0;
7469 if (! if_false_label)
7470 drop_through_label = if_false_label = gen_label_rtx ();
7472 for (i = 0; i < nwords; i++)
7474 rtx comp = compare_from_rtx (operand_subword_force (op0, i, mode),
7475 operand_subword_force (op1, i, mode),
7476 EQ, TREE_UNSIGNED (TREE_TYPE (exp)),
7477 word_mode, NULL_RTX, 0);
7478 if (comp == const_true_rtx)
7479 emit_jump (if_false_label);
7480 else if (comp != const0_rtx)
7481 do_jump_for_compare (comp, if_false_label, NULL_RTX);
7485 emit_jump (if_true_label);
7486 if (drop_through_label)
7487 emit_label (drop_through_label);
7490 /* Jump according to whether OP0 is 0.
7491 We assume that OP0 has an integer mode that is too wide
7492 for the available compare insns. */
7495 do_jump_by_parts_equality_rtx (op0, if_false_label, if_true_label)
7497 rtx if_false_label, if_true_label;
7499 int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD;
7501 rtx drop_through_label = 0;
7503 if (! if_false_label)
7504 drop_through_label = if_false_label = gen_label_rtx ();
7506 for (i = 0; i < nwords; i++)
7508 rtx comp = compare_from_rtx (operand_subword_force (op0, i,
7510 const0_rtx, EQ, 1, word_mode, NULL_RTX, 0);
7511 if (comp == const_true_rtx)
7512 emit_jump (if_false_label);
7513 else if (comp != const0_rtx)
7514 do_jump_for_compare (comp, if_false_label, NULL_RTX);
7518 emit_jump (if_true_label);
7519 if (drop_through_label)
7520 emit_label (drop_through_label);
7523 /* Given a comparison expression in rtl form, output conditional branches to
7524 IF_TRUE_LABEL, IF_FALSE_LABEL, or both. */
7527 do_jump_for_compare (comparison, if_false_label, if_true_label)
7528 rtx comparison, if_false_label, if_true_label;
7532 if (bcc_gen_fctn[(int) GET_CODE (comparison)] != 0)
7533 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (comparison)]) (if_true_label));
7538 emit_jump (if_false_label);
7540 else if (if_false_label)
7543 rtx prev = PREV_INSN (get_last_insn ());
7546 /* Output the branch with the opposite condition. Then try to invert
7547 what is generated. If more than one insn is a branch, or if the
7548 branch is not the last insn written, abort. If we can't invert
7549 the branch, emit make a true label, redirect this jump to that,
7550 emit a jump to the false label and define the true label. */
7552 if (bcc_gen_fctn[(int) GET_CODE (comparison)] != 0)
7553 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (comparison)]) (if_false_label));
7557 /* Here we get the insn before what was just emitted.
7558 On some machines, emitting the branch can discard
7559 the previous compare insn and emit a replacement. */
7561 /* If there's only one preceding insn... */
7562 insn = get_insns ();
7564 insn = NEXT_INSN (prev);
7566 for (insn = NEXT_INSN (insn); insn; insn = NEXT_INSN (insn))
7567 if (GET_CODE (insn) == JUMP_INSN)
7574 if (branch != get_last_insn ())
7577 if (! invert_jump (branch, if_false_label))
7579 if_true_label = gen_label_rtx ();
7580 redirect_jump (branch, if_true_label);
7581 emit_jump (if_false_label);
7582 emit_label (if_true_label);
7587 /* Generate code for a comparison expression EXP
7588 (including code to compute the values to be compared)
7589 and set (CC0) according to the result.
7590 SIGNED_CODE should be the rtx operation for this comparison for
7591 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
7593 We force a stack adjustment unless there are currently
7594 things pushed on the stack that aren't yet used. */
7597 compare (exp, signed_code, unsigned_code)
7599 enum rtx_code signed_code, unsigned_code;
7602 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7604 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7605 register tree type = TREE_TYPE (TREE_OPERAND (exp, 0));
7606 register enum machine_mode mode = TYPE_MODE (type);
7607 int unsignedp = TREE_UNSIGNED (type);
7608 enum rtx_code code = unsignedp ? unsigned_code : signed_code;
7610 return compare_from_rtx (op0, op1, code, unsignedp, mode,
7612 ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
7613 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
7616 /* Like compare but expects the values to compare as two rtx's.
7617 The decision as to signed or unsigned comparison must be made by the caller.
7619 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
7622 If ALIGN is non-zero, it is the alignment of this type; if zero, the
7623 size of MODE should be used. */
7626 compare_from_rtx (op0, op1, code, unsignedp, mode, size, align)
7627 register rtx op0, op1;
7630 enum machine_mode mode;
7636 /* If one operand is constant, make it the second one. Only do this
7637 if the other operand is not constant as well. */
7639 if ((CONSTANT_P (op0) && ! CONSTANT_P (op1))
7640 || (GET_CODE (op0) == CONST_INT && GET_CODE (op1) != CONST_INT))
7645 code = swap_condition (code);
7650 op0 = force_not_mem (op0);
7651 op1 = force_not_mem (op1);
7654 do_pending_stack_adjust ();
7656 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
7657 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
7661 /* There's no need to do this now that combine.c can eliminate lots of
7662 sign extensions. This can be less efficient in certain cases on other
7665 /* If this is a signed equality comparison, we can do it as an
7666 unsigned comparison since zero-extension is cheaper than sign
7667 extension and comparisons with zero are done as unsigned. This is
7668 the case even on machines that can do fast sign extension, since
7669 zero-extension is easier to combine with other operations than
7670 sign-extension is. If we are comparing against a constant, we must
7671 convert it to what it would look like unsigned. */
7672 if ((code == EQ || code == NE) && ! unsignedp
7673 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
7675 if (GET_CODE (op1) == CONST_INT
7676 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
7677 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
7682 emit_cmp_insn (op0, op1, code, size, mode, unsignedp, align);
7684 return gen_rtx (code, VOIDmode, cc0_rtx, const0_rtx);
7687 /* Generate code to calculate EXP using a store-flag instruction
7688 and return an rtx for the result. EXP is either a comparison
7689 or a TRUTH_NOT_EXPR whose operand is a comparison.
7691 If TARGET is nonzero, store the result there if convenient.
7693 If ONLY_CHEAP is non-zero, only do this if it is likely to be very
7696 Return zero if there is no suitable set-flag instruction
7697 available on this machine.
7699 Once expand_expr has been called on the arguments of the comparison,
7700 we are committed to doing the store flag, since it is not safe to
7701 re-evaluate the expression. We emit the store-flag insn by calling
7702 emit_store_flag, but only expand the arguments if we have a reason
7703 to believe that emit_store_flag will be successful. If we think that
7704 it will, but it isn't, we have to simulate the store-flag with a
7705 set/jump/set sequence. */
7708 do_store_flag (exp, target, mode, only_cheap)
7711 enum machine_mode mode;
7715 tree arg0, arg1, type;
7717 enum machine_mode operand_mode;
7721 enum insn_code icode;
7722 rtx subtarget = target;
7723 rtx result, label, pattern, jump_pat;
7725 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
7726 result at the end. We can't simply invert the test since it would
7727 have already been inverted if it were valid. This case occurs for
7728 some floating-point comparisons. */
7730 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
7731 invert = 1, exp = TREE_OPERAND (exp, 0);
7733 arg0 = TREE_OPERAND (exp, 0);
7734 arg1 = TREE_OPERAND (exp, 1);
7735 type = TREE_TYPE (arg0);
7736 operand_mode = TYPE_MODE (type);
7737 unsignedp = TREE_UNSIGNED (type);
7739 /* We won't bother with BLKmode store-flag operations because it would mean
7740 passing a lot of information to emit_store_flag. */
7741 if (operand_mode == BLKmode)
7747 /* Get the rtx comparison code to use. We know that EXP is a comparison
7748 operation of some type. Some comparisons against 1 and -1 can be
7749 converted to comparisons with zero. Do so here so that the tests
7750 below will be aware that we have a comparison with zero. These
7751 tests will not catch constants in the first operand, but constants
7752 are rarely passed as the first operand. */
7754 switch (TREE_CODE (exp))
7763 if (integer_onep (arg1))
7764 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
7766 code = unsignedp ? LTU : LT;
7769 if (! unsignedp && integer_all_onesp (arg1))
7770 arg1 = integer_zero_node, code = LT;
7772 code = unsignedp ? LEU : LE;
7775 if (! unsignedp && integer_all_onesp (arg1))
7776 arg1 = integer_zero_node, code = GE;
7778 code = unsignedp ? GTU : GT;
7781 if (integer_onep (arg1))
7782 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
7784 code = unsignedp ? GEU : GE;
7790 /* Put a constant second. */
7791 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
7793 tem = arg0; arg0 = arg1; arg1 = tem;
7794 code = swap_condition (code);
7797 /* If this is an equality or inequality test of a single bit, we can
7798 do this by shifting the bit being tested to the low-order bit and
7799 masking the result with the constant 1. If the condition was EQ,
7800 we xor it with 1. This does not require an scc insn and is faster
7801 than an scc insn even if we have it. */
7803 if ((code == NE || code == EQ)
7804 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
7805 && integer_pow2p (TREE_OPERAND (arg0, 1))
7806 && TYPE_PRECISION (type) <= HOST_BITS_PER_WIDE_INT)
7808 int bitnum = exact_log2 (INTVAL (expand_expr (TREE_OPERAND (arg0, 1),
7809 NULL_RTX, VOIDmode, 0)));
7811 if (subtarget == 0 || GET_CODE (subtarget) != REG
7812 || GET_MODE (subtarget) != operand_mode
7813 || ! safe_from_p (subtarget, TREE_OPERAND (arg0, 0)))
7816 op0 = expand_expr (TREE_OPERAND (arg0, 0), subtarget, VOIDmode, 0);
7819 op0 = expand_shift (RSHIFT_EXPR, GET_MODE (op0), op0,
7820 size_int (bitnum), target, 1);
7822 if (GET_MODE (op0) != mode)
7823 op0 = convert_to_mode (mode, op0, 1);
7825 if (bitnum != TYPE_PRECISION (type) - 1)
7826 op0 = expand_and (op0, const1_rtx, target);
7828 if ((code == EQ && ! invert) || (code == NE && invert))
7829 op0 = expand_binop (mode, xor_optab, op0, const1_rtx, target, 0,
7835 /* Now see if we are likely to be able to do this. Return if not. */
7836 if (! can_compare_p (operand_mode))
7838 icode = setcc_gen_code[(int) code];
7839 if (icode == CODE_FOR_nothing
7840 || (only_cheap && insn_operand_mode[(int) icode][0] != mode))
7842 /* We can only do this if it is one of the special cases that
7843 can be handled without an scc insn. */
7844 if ((code == LT && integer_zerop (arg1))
7845 || (! only_cheap && code == GE && integer_zerop (arg1)))
7847 else if (BRANCH_COST >= 0
7848 && ! only_cheap && (code == NE || code == EQ)
7849 && TREE_CODE (type) != REAL_TYPE
7850 && ((abs_optab->handlers[(int) operand_mode].insn_code
7851 != CODE_FOR_nothing)
7852 || (ffs_optab->handlers[(int) operand_mode].insn_code
7853 != CODE_FOR_nothing)))
7859 preexpand_calls (exp);
7860 if (subtarget == 0 || GET_CODE (subtarget) != REG
7861 || GET_MODE (subtarget) != operand_mode
7862 || ! safe_from_p (subtarget, arg1))
7865 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
7866 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
7869 target = gen_reg_rtx (mode);
7871 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
7872 because, if the emit_store_flag does anything it will succeed and
7873 OP0 and OP1 will not be used subsequently. */
7875 result = emit_store_flag (target, code,
7876 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
7877 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
7878 operand_mode, unsignedp, 1);
7883 result = expand_binop (mode, xor_optab, result, const1_rtx,
7884 result, 0, OPTAB_LIB_WIDEN);
7888 /* If this failed, we have to do this with set/compare/jump/set code. */
7889 if (target == 0 || GET_CODE (target) != REG
7890 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
7891 target = gen_reg_rtx (GET_MODE (target));
7893 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
7894 result = compare_from_rtx (op0, op1, code, unsignedp,
7895 operand_mode, NULL_RTX, 0);
7896 if (GET_CODE (result) == CONST_INT)
7897 return (((result == const0_rtx && ! invert)
7898 || (result != const0_rtx && invert))
7899 ? const0_rtx : const1_rtx);
7901 label = gen_label_rtx ();
7902 if (bcc_gen_fctn[(int) code] == 0)
7905 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
7906 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
7912 /* Generate a tablejump instruction (used for switch statements). */
7914 #ifdef HAVE_tablejump
7916 /* INDEX is the value being switched on, with the lowest value
7917 in the table already subtracted.
7918 MODE is its expected mode (needed if INDEX is constant).
7919 RANGE is the length of the jump table.
7920 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
7922 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
7923 index value is out of range. */
7926 do_tablejump (index, mode, range, table_label, default_label)
7927 rtx index, range, table_label, default_label;
7928 enum machine_mode mode;
7930 register rtx temp, vector;
7932 /* Do an unsigned comparison (in the proper mode) between the index
7933 expression and the value which represents the length of the range.
7934 Since we just finished subtracting the lower bound of the range
7935 from the index expression, this comparison allows us to simultaneously
7936 check that the original index expression value is both greater than
7937 or equal to the minimum value of the range and less than or equal to
7938 the maximum value of the range. */
7940 emit_cmp_insn (range, index, LTU, NULL_RTX, mode, 1, 0);
7941 emit_jump_insn (gen_bltu (default_label));
7943 /* If index is in range, it must fit in Pmode.
7944 Convert to Pmode so we can index with it. */
7946 index = convert_to_mode (Pmode, index, 1);
7948 /* If flag_force_addr were to affect this address
7949 it could interfere with the tricky assumptions made
7950 about addresses that contain label-refs,
7951 which may be valid only very near the tablejump itself. */
7952 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
7953 GET_MODE_SIZE, because this indicates how large insns are. The other
7954 uses should all be Pmode, because they are addresses. This code
7955 could fail if addresses and insns are not the same size. */
7956 index = memory_address_noforce
7958 gen_rtx (PLUS, Pmode,
7959 gen_rtx (MULT, Pmode, index,
7960 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
7961 gen_rtx (LABEL_REF, Pmode, table_label)));
7962 temp = gen_reg_rtx (CASE_VECTOR_MODE);
7963 vector = gen_rtx (MEM, CASE_VECTOR_MODE, index);
7964 RTX_UNCHANGING_P (vector) = 1;
7965 convert_move (temp, vector, 0);
7967 emit_jump_insn (gen_tablejump (temp, table_label));
7969 #ifndef CASE_VECTOR_PC_RELATIVE
7970 /* If we are generating PIC code or if the table is PC-relative, the
7971 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
7977 #endif /* HAVE_tablejump */