1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA. */
26 #include "insn-flags.h"
27 #include "insn-codes.h"
29 #include "insn-config.h"
32 #include "typeclass.h"
34 #define CEIL(x,y) (((x) + (y) - 1) / (y))
36 /* Decide whether a function's arguments should be processed
37 from first to last or from last to first.
39 They should if the stack and args grow in opposite directions, but
40 only if we have push insns. */
44 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNARD)
45 #define PUSH_ARGS_REVERSED /* If it's last to first */
50 #ifndef STACK_PUSH_CODE
51 #ifdef STACK_GROWS_DOWNWARD
52 #define STACK_PUSH_CODE PRE_DEC
54 #define STACK_PUSH_CODE PRE_INC
58 /* Like STACK_BOUNDARY but in units of bytes, not bits. */
59 #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
61 /* If this is nonzero, we do not bother generating VOLATILE
62 around volatile memory references, and we are willing to
63 output indirect addresses. If cse is to follow, we reject
64 indirect addresses so a useful potential cse is generated;
65 if it is used only once, instruction combination will produce
66 the same indirect address eventually. */
69 /* Nonzero to generate code for all the subroutines within an
70 expression before generating the upper levels of the expression.
71 Nowadays this is never zero. */
72 int do_preexpand_calls = 1;
74 /* Number of units that we should eventually pop off the stack.
75 These are the arguments to function calls that have already returned. */
76 int pending_stack_adjust;
78 /* Nonzero means stack pops must not be deferred, and deferred stack
79 pops must not be output. It is nonzero inside a function call,
80 inside a conditional expression, inside a statement expression,
81 and in other cases as well. */
82 int inhibit_defer_pop;
84 /* A list of all cleanups which belong to the arguments of
85 function calls being expanded by expand_call. */
86 tree cleanups_this_call;
88 /* Nonzero means __builtin_saveregs has already been done in this function.
89 The value is the pseudoreg containing the value __builtin_saveregs
91 static rtx saveregs_value;
93 /* Similarly for __builtin_apply_args. */
94 static rtx apply_args_value;
96 /* This structure is used by move_by_pieces to describe the move to
108 int explicit_inc_from;
114 static rtx enqueue_insn PROTO((rtx, rtx));
115 static int queued_subexp_p PROTO((rtx));
116 static void init_queue PROTO((void));
117 static void move_by_pieces PROTO((rtx, rtx, int, int));
118 static int move_by_pieces_ninsns PROTO((unsigned int, int));
119 static void move_by_pieces_1 PROTO((rtx (*) (), enum machine_mode,
120 struct move_by_pieces *));
121 static void group_insns PROTO((rtx));
122 static void store_constructor PROTO((tree, rtx));
123 static rtx store_field PROTO((rtx, int, int, enum machine_mode, tree,
124 enum machine_mode, int, int, int));
125 static tree save_noncopied_parts PROTO((tree, tree));
126 static tree init_noncopied_parts PROTO((tree, tree));
127 static int safe_from_p PROTO((rtx, tree));
128 static int fixed_type_p PROTO((tree));
129 static int get_pointer_alignment PROTO((tree, unsigned));
130 static tree string_constant PROTO((tree, tree *));
131 static tree c_strlen PROTO((tree));
132 static rtx expand_builtin PROTO((tree, rtx, rtx, enum machine_mode, int));
133 static int apply_args_size PROTO((void));
134 static int apply_result_size PROTO((void));
135 static rtx result_vector PROTO((int, rtx));
136 static rtx expand_builtin_apply_args PROTO((void));
137 static rtx expand_builtin_apply PROTO((rtx, rtx, rtx));
138 static void expand_builtin_return PROTO((rtx));
139 static rtx expand_increment PROTO((tree, int));
140 static void preexpand_calls PROTO((tree));
141 static void do_jump_by_parts_greater PROTO((tree, int, rtx, rtx));
142 static void do_jump_by_parts_greater_rtx PROTO((enum machine_mode, int, rtx, rtx, rtx, rtx));
143 static void do_jump_by_parts_equality PROTO((tree, rtx, rtx));
144 static void do_jump_by_parts_equality_rtx PROTO((rtx, rtx, rtx));
145 static void do_jump_for_compare PROTO((rtx, rtx, rtx));
146 static rtx compare PROTO((tree, enum rtx_code, enum rtx_code));
147 static rtx do_store_flag PROTO((tree, rtx, enum machine_mode, int));
149 /* Record for each mode whether we can move a register directly to or
150 from an object of that mode in memory. If we can't, we won't try
151 to use that mode directly when accessing a field of that mode. */
153 static char direct_load[NUM_MACHINE_MODES];
154 static char direct_store[NUM_MACHINE_MODES];
156 /* MOVE_RATIO is the number of move instructions that is better than
160 #if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
163 /* A value of around 6 would minimize code size; infinity would minimize
165 #define MOVE_RATIO 15
169 /* This array records the insn_code of insns to perform block moves. */
170 enum insn_code movstr_optab[NUM_MACHINE_MODES];
172 /* SLOW_UNALIGNED_ACCESS is non-zero if unaligned accesses are very slow. */
174 #ifndef SLOW_UNALIGNED_ACCESS
175 #define SLOW_UNALIGNED_ACCESS 0
178 /* Register mappings for target machines without register windows. */
179 #ifndef INCOMING_REGNO
180 #define INCOMING_REGNO(OUT) (OUT)
182 #ifndef OUTGOING_REGNO
183 #define OUTGOING_REGNO(IN) (IN)
186 /* This is run once per compilation to set up which modes can be used
187 directly in memory and to initialize the block move optab. */
193 enum machine_mode mode;
194 /* Try indexing by frame ptr and try by stack ptr.
195 It is known that on the Convex the stack ptr isn't a valid index.
196 With luck, one or the other is valid on any machine. */
197 rtx mem = gen_rtx (MEM, VOIDmode, stack_pointer_rtx);
198 rtx mem1 = gen_rtx (MEM, VOIDmode, frame_pointer_rtx);
201 insn = emit_insn (gen_rtx (SET, 0, 0));
202 pat = PATTERN (insn);
204 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
205 mode = (enum machine_mode) ((int) mode + 1))
211 direct_load[(int) mode] = direct_store[(int) mode] = 0;
212 PUT_MODE (mem, mode);
213 PUT_MODE (mem1, mode);
215 /* See if there is some register that can be used in this mode and
216 directly loaded or stored from memory. */
218 if (mode != VOIDmode && mode != BLKmode)
219 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
220 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
223 if (! HARD_REGNO_MODE_OK (regno, mode))
226 reg = gen_rtx (REG, mode, regno);
229 SET_DEST (pat) = reg;
230 if (recog (pat, insn, &num_clobbers) >= 0)
231 direct_load[(int) mode] = 1;
233 SET_SRC (pat) = mem1;
234 SET_DEST (pat) = reg;
235 if (recog (pat, insn, &num_clobbers) >= 0)
236 direct_load[(int) mode] = 1;
239 SET_DEST (pat) = mem;
240 if (recog (pat, insn, &num_clobbers) >= 0)
241 direct_store[(int) mode] = 1;
244 SET_DEST (pat) = mem1;
245 if (recog (pat, insn, &num_clobbers) >= 0)
246 direct_store[(int) mode] = 1;
253 /* This is run at the start of compiling a function. */
260 pending_stack_adjust = 0;
261 inhibit_defer_pop = 0;
262 cleanups_this_call = 0;
264 apply_args_value = 0;
268 /* Save all variables describing the current status into the structure *P.
269 This is used before starting a nested function. */
275 /* Instead of saving the postincrement queue, empty it. */
278 p->pending_stack_adjust = pending_stack_adjust;
279 p->inhibit_defer_pop = inhibit_defer_pop;
280 p->cleanups_this_call = cleanups_this_call;
281 p->saveregs_value = saveregs_value;
282 p->apply_args_value = apply_args_value;
283 p->forced_labels = forced_labels;
285 pending_stack_adjust = 0;
286 inhibit_defer_pop = 0;
287 cleanups_this_call = 0;
289 apply_args_value = 0;
293 /* Restore all variables describing the current status from the structure *P.
294 This is used after a nested function. */
297 restore_expr_status (p)
300 pending_stack_adjust = p->pending_stack_adjust;
301 inhibit_defer_pop = p->inhibit_defer_pop;
302 cleanups_this_call = p->cleanups_this_call;
303 saveregs_value = p->saveregs_value;
304 apply_args_value = p->apply_args_value;
305 forced_labels = p->forced_labels;
308 /* Manage the queue of increment instructions to be output
309 for POSTINCREMENT_EXPR expressions, etc. */
311 static rtx pending_chain;
313 /* Queue up to increment (or change) VAR later. BODY says how:
314 BODY should be the same thing you would pass to emit_insn
315 to increment right away. It will go to emit_insn later on.
317 The value is a QUEUED expression to be used in place of VAR
318 where you want to guarantee the pre-incrementation value of VAR. */
321 enqueue_insn (var, body)
324 pending_chain = gen_rtx (QUEUED, GET_MODE (var),
325 var, NULL_RTX, NULL_RTX, body, pending_chain);
326 return pending_chain;
329 /* Use protect_from_queue to convert a QUEUED expression
330 into something that you can put immediately into an instruction.
331 If the queued incrementation has not happened yet,
332 protect_from_queue returns the variable itself.
333 If the incrementation has happened, protect_from_queue returns a temp
334 that contains a copy of the old value of the variable.
336 Any time an rtx which might possibly be a QUEUED is to be put
337 into an instruction, it must be passed through protect_from_queue first.
338 QUEUED expressions are not meaningful in instructions.
340 Do not pass a value through protect_from_queue and then hold
341 on to it for a while before putting it in an instruction!
342 If the queue is flushed in between, incorrect code will result. */
345 protect_from_queue (x, modify)
349 register RTX_CODE code = GET_CODE (x);
351 #if 0 /* A QUEUED can hang around after the queue is forced out. */
352 /* Shortcut for most common case. */
353 if (pending_chain == 0)
359 /* A special hack for read access to (MEM (QUEUED ...))
360 to facilitate use of autoincrement.
361 Make a copy of the contents of the memory location
362 rather than a copy of the address, but not
363 if the value is of mode BLKmode. */
364 if (code == MEM && GET_MODE (x) != BLKmode
365 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
367 register rtx y = XEXP (x, 0);
368 XEXP (x, 0) = QUEUED_VAR (y);
371 register rtx temp = gen_reg_rtx (GET_MODE (x));
372 emit_insn_before (gen_move_insn (temp, x),
378 /* Otherwise, recursively protect the subexpressions of all
379 the kinds of rtx's that can contain a QUEUED. */
381 XEXP (x, 0) = protect_from_queue (XEXP (x, 0), 0);
382 else if (code == PLUS || code == MULT)
384 XEXP (x, 0) = protect_from_queue (XEXP (x, 0), 0);
385 XEXP (x, 1) = protect_from_queue (XEXP (x, 1), 0);
389 /* If the increment has not happened, use the variable itself. */
390 if (QUEUED_INSN (x) == 0)
391 return QUEUED_VAR (x);
392 /* If the increment has happened and a pre-increment copy exists,
394 if (QUEUED_COPY (x) != 0)
395 return QUEUED_COPY (x);
396 /* The increment has happened but we haven't set up a pre-increment copy.
397 Set one up now, and use it. */
398 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
399 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
401 return QUEUED_COPY (x);
404 /* Return nonzero if X contains a QUEUED expression:
405 if it contains anything that will be altered by a queued increment.
406 We handle only combinations of MEM, PLUS, MINUS and MULT operators
407 since memory addresses generally contain only those. */
413 register enum rtx_code code = GET_CODE (x);
419 return queued_subexp_p (XEXP (x, 0));
423 return queued_subexp_p (XEXP (x, 0))
424 || queued_subexp_p (XEXP (x, 1));
429 /* Perform all the pending incrementations. */
435 while (p = pending_chain)
437 QUEUED_INSN (p) = emit_insn (QUEUED_BODY (p));
438 pending_chain = QUEUED_NEXT (p);
449 /* Copy data from FROM to TO, where the machine modes are not the same.
450 Both modes may be integer, or both may be floating.
451 UNSIGNEDP should be nonzero if FROM is an unsigned type.
452 This causes zero-extension instead of sign-extension. */
455 convert_move (to, from, unsignedp)
456 register rtx to, from;
459 enum machine_mode to_mode = GET_MODE (to);
460 enum machine_mode from_mode = GET_MODE (from);
461 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
462 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
466 /* rtx code for making an equivalent value. */
467 enum rtx_code equiv_code = (unsignedp ? ZERO_EXTEND : SIGN_EXTEND);
469 to = protect_from_queue (to, 1);
470 from = protect_from_queue (from, 0);
472 if (to_real != from_real)
475 /* If FROM is a SUBREG that indicates that we have already done at least
476 the required extension, strip it. We don't handle such SUBREGs as
479 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
480 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
481 >= GET_MODE_SIZE (to_mode))
482 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
483 from = gen_lowpart (to_mode, from), from_mode = to_mode;
485 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
488 if (to_mode == from_mode
489 || (from_mode == VOIDmode && CONSTANT_P (from)))
491 emit_move_insn (to, from);
497 #ifdef HAVE_extendqfhf2
498 if (HAVE_extendqfsf2 && from_mode == QFmode && to_mode == HFmode)
500 emit_unop_insn (CODE_FOR_extendqfsf2, to, from, UNKNOWN);
504 #ifdef HAVE_extendqfsf2
505 if (HAVE_extendqfsf2 && from_mode == QFmode && to_mode == SFmode)
507 emit_unop_insn (CODE_FOR_extendqfsf2, to, from, UNKNOWN);
511 #ifdef HAVE_extendqfdf2
512 if (HAVE_extendqfdf2 && from_mode == QFmode && to_mode == DFmode)
514 emit_unop_insn (CODE_FOR_extendqfdf2, to, from, UNKNOWN);
518 #ifdef HAVE_extendqfxf2
519 if (HAVE_extendqfxf2 && from_mode == QFmode && to_mode == XFmode)
521 emit_unop_insn (CODE_FOR_extendqfxf2, to, from, UNKNOWN);
525 #ifdef HAVE_extendqftf2
526 if (HAVE_extendqftf2 && from_mode == QFmode && to_mode == TFmode)
528 emit_unop_insn (CODE_FOR_extendqftf2, to, from, UNKNOWN);
533 #ifdef HAVE_extendhfsf2
534 if (HAVE_extendhfsf2 && from_mode == HFmode && to_mode == SFmode)
536 emit_unop_insn (CODE_FOR_extendhfsf2, to, from, UNKNOWN);
540 #ifdef HAVE_extendhfdf2
541 if (HAVE_extendhfdf2 && from_mode == HFmode && to_mode == DFmode)
543 emit_unop_insn (CODE_FOR_extendhfdf2, to, from, UNKNOWN);
547 #ifdef HAVE_extendhfxf2
548 if (HAVE_extendhfxf2 && from_mode == HFmode && to_mode == XFmode)
550 emit_unop_insn (CODE_FOR_extendhfxf2, to, from, UNKNOWN);
554 #ifdef HAVE_extendhftf2
555 if (HAVE_extendhftf2 && from_mode == HFmode && to_mode == TFmode)
557 emit_unop_insn (CODE_FOR_extendhftf2, to, from, UNKNOWN);
562 #ifdef HAVE_extendsfdf2
563 if (HAVE_extendsfdf2 && from_mode == SFmode && to_mode == DFmode)
565 emit_unop_insn (CODE_FOR_extendsfdf2, to, from, UNKNOWN);
569 #ifdef HAVE_extendsfxf2
570 if (HAVE_extendsfxf2 && from_mode == SFmode && to_mode == XFmode)
572 emit_unop_insn (CODE_FOR_extendsfxf2, to, from, UNKNOWN);
576 #ifdef HAVE_extendsftf2
577 if (HAVE_extendsftf2 && from_mode == SFmode && to_mode == TFmode)
579 emit_unop_insn (CODE_FOR_extendsftf2, to, from, UNKNOWN);
583 #ifdef HAVE_extenddfxf2
584 if (HAVE_extenddfxf2 && from_mode == DFmode && to_mode == XFmode)
586 emit_unop_insn (CODE_FOR_extenddfxf2, to, from, UNKNOWN);
590 #ifdef HAVE_extenddftf2
591 if (HAVE_extenddftf2 && from_mode == DFmode && to_mode == TFmode)
593 emit_unop_insn (CODE_FOR_extenddftf2, to, from, UNKNOWN);
598 #ifdef HAVE_trunchfqf2
599 if (HAVE_trunchfqf2 && from_mode == HFmode && to_mode == QFmode)
601 emit_unop_insn (CODE_FOR_trunchfqf2, to, from, UNKNOWN);
605 #ifdef HAVE_truncsfqf2
606 if (HAVE_truncsfqf2 && from_mode == SFmode && to_mode == QFmode)
608 emit_unop_insn (CODE_FOR_truncsfqf2, to, from, UNKNOWN);
612 #ifdef HAVE_truncdfqf2
613 if (HAVE_truncdfqf2 && from_mode == DFmode && to_mode == QFmode)
615 emit_unop_insn (CODE_FOR_truncdfqf2, to, from, UNKNOWN);
619 #ifdef HAVE_truncxfqf2
620 if (HAVE_truncxfqf2 && from_mode == XFmode && to_mode == QFmode)
622 emit_unop_insn (CODE_FOR_truncxfqf2, to, from, UNKNOWN);
626 #ifdef HAVE_trunctfqf2
627 if (HAVE_trunctfqf2 && from_mode == TFmode && to_mode == QFmode)
629 emit_unop_insn (CODE_FOR_trunctfqf2, to, from, UNKNOWN);
633 #ifdef HAVE_truncsfhf2
634 if (HAVE_truncsfhf2 && from_mode == SFmode && to_mode == HFmode)
636 emit_unop_insn (CODE_FOR_truncsfhf2, to, from, UNKNOWN);
640 #ifdef HAVE_truncdfhf2
641 if (HAVE_truncdfhf2 && from_mode == DFmode && to_mode == HFmode)
643 emit_unop_insn (CODE_FOR_truncdfhf2, to, from, UNKNOWN);
647 #ifdef HAVE_truncxfhf2
648 if (HAVE_truncxfhf2 && from_mode == XFmode && to_mode == HFmode)
650 emit_unop_insn (CODE_FOR_truncxfhf2, to, from, UNKNOWN);
654 #ifdef HAVE_trunctfhf2
655 if (HAVE_trunctfhf2 && from_mode == TFmode && to_mode == HFmode)
657 emit_unop_insn (CODE_FOR_trunctfhf2, to, from, UNKNOWN);
661 #ifdef HAVE_truncdfsf2
662 if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
664 emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
668 #ifdef HAVE_truncxfsf2
669 if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode)
671 emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN);
675 #ifdef HAVE_trunctfsf2
676 if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
678 emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
682 #ifdef HAVE_truncxfdf2
683 if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode)
685 emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN);
689 #ifdef HAVE_trunctfdf2
690 if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
692 emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
704 libcall = extendsfdf2_libfunc;
708 libcall = extendsfxf2_libfunc;
712 libcall = extendsftf2_libfunc;
721 libcall = truncdfsf2_libfunc;
725 libcall = extenddfxf2_libfunc;
729 libcall = extenddftf2_libfunc;
738 libcall = truncxfsf2_libfunc;
742 libcall = truncxfdf2_libfunc;
751 libcall = trunctfsf2_libfunc;
755 libcall = trunctfdf2_libfunc;
761 if (libcall == (rtx) 0)
762 /* This conversion is not implemented yet. */
765 emit_library_call (libcall, 1, to_mode, 1, from, from_mode);
766 emit_move_insn (to, hard_libcall_value (to_mode));
770 /* Now both modes are integers. */
772 /* Handle expanding beyond a word. */
773 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
774 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
781 enum machine_mode lowpart_mode;
782 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
784 /* Try converting directly if the insn is supported. */
785 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
788 /* If FROM is a SUBREG, put it into a register. Do this
789 so that we always generate the same set of insns for
790 better cse'ing; if an intermediate assignment occurred,
791 we won't be doing the operation directly on the SUBREG. */
792 if (optimize > 0 && GET_CODE (from) == SUBREG)
793 from = force_reg (from_mode, from);
794 emit_unop_insn (code, to, from, equiv_code);
797 /* Next, try converting via full word. */
798 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
799 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
800 != CODE_FOR_nothing))
802 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
803 emit_unop_insn (code, to,
804 gen_lowpart (word_mode, to), equiv_code);
808 /* No special multiword conversion insn; do it by hand. */
811 /* Get a copy of FROM widened to a word, if necessary. */
812 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
813 lowpart_mode = word_mode;
815 lowpart_mode = from_mode;
817 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
819 lowpart = gen_lowpart (lowpart_mode, to);
820 emit_move_insn (lowpart, lowfrom);
822 /* Compute the value to put in each remaining word. */
824 fill_value = const0_rtx;
829 && insn_operand_mode[(int) CODE_FOR_slt][0] == word_mode
830 && STORE_FLAG_VALUE == -1)
832 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
834 fill_value = gen_reg_rtx (word_mode);
835 emit_insn (gen_slt (fill_value));
841 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
842 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
844 fill_value = convert_to_mode (word_mode, fill_value, 1);
848 /* Fill the remaining words. */
849 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
851 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
852 rtx subword = operand_subword (to, index, 1, to_mode);
857 if (fill_value != subword)
858 emit_move_insn (subword, fill_value);
861 insns = get_insns ();
864 emit_no_conflict_block (insns, to, from, NULL_RTX,
865 gen_rtx (equiv_code, to_mode, from));
869 /* Truncating multi-word to a word or less. */
870 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
871 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
873 convert_move (to, gen_lowpart (word_mode, from), 0);
877 /* Handle pointer conversion */ /* SPEE 900220 */
878 if (to_mode == PSImode)
880 if (from_mode != SImode)
881 from = convert_to_mode (SImode, from, unsignedp);
883 #ifdef HAVE_truncsipsi
886 emit_unop_insn (CODE_FOR_truncsipsi, to, from, UNKNOWN);
889 #endif /* HAVE_truncsipsi */
893 if (from_mode == PSImode)
895 if (to_mode != SImode)
897 from = convert_to_mode (SImode, from, unsignedp);
902 #ifdef HAVE_extendpsisi
903 if (HAVE_extendpsisi)
905 emit_unop_insn (CODE_FOR_extendpsisi, to, from, UNKNOWN);
908 #endif /* HAVE_extendpsisi */
913 /* Now follow all the conversions between integers
914 no more than a word long. */
916 /* For truncation, usually we can just refer to FROM in a narrower mode. */
917 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
918 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
919 GET_MODE_BITSIZE (from_mode)))
921 if (!((GET_CODE (from) == MEM
922 && ! MEM_VOLATILE_P (from)
923 && direct_load[(int) to_mode]
924 && ! mode_dependent_address_p (XEXP (from, 0)))
925 || GET_CODE (from) == REG
926 || GET_CODE (from) == SUBREG))
927 from = force_reg (from_mode, from);
928 emit_move_insn (to, gen_lowpart (to_mode, from));
932 /* Handle extension. */
933 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
935 /* Convert directly if that works. */
936 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
939 /* If FROM is a SUBREG, put it into a register. Do this
940 so that we always generate the same set of insns for
941 better cse'ing; if an intermediate assignment occurred,
942 we won't be doing the operation directly on the SUBREG. */
943 if (optimize > 0 && GET_CODE (from) == SUBREG)
944 from = force_reg (from_mode, from);
945 emit_unop_insn (code, to, from, equiv_code);
950 enum machine_mode intermediate;
952 /* Search for a mode to convert via. */
953 for (intermediate = from_mode; intermediate != VOIDmode;
954 intermediate = GET_MODE_WIDER_MODE (intermediate))
955 if ((can_extend_p (to_mode, intermediate, unsignedp)
957 && (can_extend_p (intermediate, from_mode, unsignedp)
958 != CODE_FOR_nothing))
960 convert_move (to, convert_to_mode (intermediate, from,
961 unsignedp), unsignedp);
965 /* No suitable intermediate mode. */
970 /* Support special truncate insns for certain modes. */
972 if (from_mode == DImode && to_mode == SImode)
974 #ifdef HAVE_truncdisi2
977 emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
981 convert_move (to, force_reg (from_mode, from), unsignedp);
985 if (from_mode == DImode && to_mode == HImode)
987 #ifdef HAVE_truncdihi2
990 emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
994 convert_move (to, force_reg (from_mode, from), unsignedp);
998 if (from_mode == DImode && to_mode == QImode)
1000 #ifdef HAVE_truncdiqi2
1001 if (HAVE_truncdiqi2)
1003 emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
1007 convert_move (to, force_reg (from_mode, from), unsignedp);
1011 if (from_mode == SImode && to_mode == HImode)
1013 #ifdef HAVE_truncsihi2
1014 if (HAVE_truncsihi2)
1016 emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
1020 convert_move (to, force_reg (from_mode, from), unsignedp);
1024 if (from_mode == SImode && to_mode == QImode)
1026 #ifdef HAVE_truncsiqi2
1027 if (HAVE_truncsiqi2)
1029 emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
1033 convert_move (to, force_reg (from_mode, from), unsignedp);
1037 if (from_mode == HImode && to_mode == QImode)
1039 #ifdef HAVE_trunchiqi2
1040 if (HAVE_trunchiqi2)
1042 emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
1046 convert_move (to, force_reg (from_mode, from), unsignedp);
1050 /* Handle truncation of volatile memrefs, and so on;
1051 the things that couldn't be truncated directly,
1052 and for which there was no special instruction. */
1053 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
1055 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
1056 emit_move_insn (to, temp);
1060 /* Mode combination is not recognized. */
1064 /* Return an rtx for a value that would result
1065 from converting X to mode MODE.
1066 Both X and MODE may be floating, or both integer.
1067 UNSIGNEDP is nonzero if X is an unsigned value.
1068 This can be done by referring to a part of X in place
1069 or by copying to a new temporary with conversion.
1071 This function *must not* call protect_from_queue
1072 except when putting X into an insn (in which case convert_move does it). */
1075 convert_to_mode (mode, x, unsignedp)
1076 enum machine_mode mode;
1082 /* If FROM is a SUBREG that indicates that we have already done at least
1083 the required extension, strip it. */
1085 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
1086 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
1087 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
1088 x = gen_lowpart (mode, x);
1090 if (mode == GET_MODE (x))
1093 /* There is one case that we must handle specially: If we are converting
1094 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
1095 we are to interpret the constant as unsigned, gen_lowpart will do
1096 the wrong if the constant appears negative. What we want to do is
1097 make the high-order word of the constant zero, not all ones. */
1099 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
1100 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
1101 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
1102 return immed_double_const (INTVAL (x), (HOST_WIDE_INT) 0, mode);
1104 /* We can do this with a gen_lowpart if both desired and current modes
1105 are integer, and this is either a constant integer, a register, or a
1106 non-volatile MEM. Except for the constant case, we must be narrowing
1109 if (GET_CODE (x) == CONST_INT
1110 || (GET_MODE_CLASS (mode) == MODE_INT
1111 && GET_MODE_CLASS (GET_MODE (x)) == MODE_INT
1112 && (GET_CODE (x) == CONST_DOUBLE
1113 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (GET_MODE (x))
1114 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x))
1115 && direct_load[(int) mode]
1116 || GET_CODE (x) == REG)))))
1117 return gen_lowpart (mode, x);
1119 temp = gen_reg_rtx (mode);
1120 convert_move (temp, x, unsignedp);
1124 /* Generate several move instructions to copy LEN bytes
1125 from block FROM to block TO. (These are MEM rtx's with BLKmode).
1126 The caller must pass FROM and TO
1127 through protect_from_queue before calling.
1128 ALIGN (in bytes) is maximum alignment we can assume. */
1131 move_by_pieces (to, from, len, align)
1135 struct move_by_pieces data;
1136 rtx to_addr = XEXP (to, 0), from_addr = XEXP (from, 0);
1137 int max_size = MOVE_MAX + 1;
1140 data.to_addr = to_addr;
1141 data.from_addr = from_addr;
1145 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1146 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1148 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1149 || GET_CODE (from_addr) == POST_INC
1150 || GET_CODE (from_addr) == POST_DEC);
1152 data.explicit_inc_from = 0;
1153 data.explicit_inc_to = 0;
1155 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1156 if (data.reverse) data.offset = len;
1159 /* If copying requires more than two move insns,
1160 copy addresses to registers (to make displacements shorter)
1161 and use post-increment if available. */
1162 if (!(data.autinc_from && data.autinc_to)
1163 && move_by_pieces_ninsns (len, align) > 2)
1165 #ifdef HAVE_PRE_DECREMENT
1166 if (data.reverse && ! data.autinc_from)
1168 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1169 data.autinc_from = 1;
1170 data.explicit_inc_from = -1;
1173 #ifdef HAVE_POST_INCREMENT
1174 if (! data.autinc_from)
1176 data.from_addr = copy_addr_to_reg (from_addr);
1177 data.autinc_from = 1;
1178 data.explicit_inc_from = 1;
1181 if (!data.autinc_from && CONSTANT_P (from_addr))
1182 data.from_addr = copy_addr_to_reg (from_addr);
1183 #ifdef HAVE_PRE_DECREMENT
1184 if (data.reverse && ! data.autinc_to)
1186 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1188 data.explicit_inc_to = -1;
1191 #ifdef HAVE_POST_INCREMENT
1192 if (! data.reverse && ! data.autinc_to)
1194 data.to_addr = copy_addr_to_reg (to_addr);
1196 data.explicit_inc_to = 1;
1199 if (!data.autinc_to && CONSTANT_P (to_addr))
1200 data.to_addr = copy_addr_to_reg (to_addr);
1203 if (! (STRICT_ALIGNMENT || SLOW_UNALIGNED_ACCESS)
1204 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
1207 /* First move what we can in the largest integer mode, then go to
1208 successively smaller modes. */
1210 while (max_size > 1)
1212 enum machine_mode mode = VOIDmode, tmode;
1213 enum insn_code icode;
1215 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1216 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1217 if (GET_MODE_SIZE (tmode) < max_size)
1220 if (mode == VOIDmode)
1223 icode = mov_optab->handlers[(int) mode].insn_code;
1224 if (icode != CODE_FOR_nothing
1225 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1226 GET_MODE_SIZE (mode)))
1227 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1229 max_size = GET_MODE_SIZE (mode);
1232 /* The code above should have handled everything. */
1237 /* Return number of insns required to move L bytes by pieces.
1238 ALIGN (in bytes) is maximum alignment we can assume. */
1241 move_by_pieces_ninsns (l, align)
1245 register int n_insns = 0;
1246 int max_size = MOVE_MAX + 1;
1248 if (! (STRICT_ALIGNMENT || SLOW_UNALIGNED_ACCESS)
1249 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
1252 while (max_size > 1)
1254 enum machine_mode mode = VOIDmode, tmode;
1255 enum insn_code icode;
1257 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1258 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1259 if (GET_MODE_SIZE (tmode) < max_size)
1262 if (mode == VOIDmode)
1265 icode = mov_optab->handlers[(int) mode].insn_code;
1266 if (icode != CODE_FOR_nothing
1267 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1268 GET_MODE_SIZE (mode)))
1269 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1271 max_size = GET_MODE_SIZE (mode);
1277 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1278 with move instructions for mode MODE. GENFUN is the gen_... function
1279 to make a move insn for that mode. DATA has all the other info. */
1282 move_by_pieces_1 (genfun, mode, data)
1284 enum machine_mode mode;
1285 struct move_by_pieces *data;
1287 register int size = GET_MODE_SIZE (mode);
1288 register rtx to1, from1;
1290 while (data->len >= size)
1292 if (data->reverse) data->offset -= size;
1294 to1 = (data->autinc_to
1295 ? gen_rtx (MEM, mode, data->to_addr)
1296 : change_address (data->to, mode,
1297 plus_constant (data->to_addr, data->offset)));
1300 ? gen_rtx (MEM, mode, data->from_addr)
1301 : change_address (data->from, mode,
1302 plus_constant (data->from_addr, data->offset)));
1304 #ifdef HAVE_PRE_DECREMENT
1305 if (data->explicit_inc_to < 0)
1306 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
1307 if (data->explicit_inc_from < 0)
1308 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (-size)));
1311 emit_insn ((*genfun) (to1, from1));
1312 #ifdef HAVE_POST_INCREMENT
1313 if (data->explicit_inc_to > 0)
1314 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1315 if (data->explicit_inc_from > 0)
1316 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1319 if (! data->reverse) data->offset += size;
1325 /* Emit code to move a block Y to a block X.
1326 This may be done with string-move instructions,
1327 with multiple scalar move instructions, or with a library call.
1329 Both X and Y must be MEM rtx's (perhaps inside VOLATILE)
1331 SIZE is an rtx that says how long they are.
1332 ALIGN is the maximum alignment we can assume they have,
1333 measured in bytes. */
1336 emit_block_move (x, y, size, align)
1341 if (GET_MODE (x) != BLKmode)
1344 if (GET_MODE (y) != BLKmode)
1347 x = protect_from_queue (x, 1);
1348 y = protect_from_queue (y, 0);
1349 size = protect_from_queue (size, 0);
1351 if (GET_CODE (x) != MEM)
1353 if (GET_CODE (y) != MEM)
1358 if (GET_CODE (size) == CONST_INT
1359 && (move_by_pieces_ninsns (INTVAL (size), align) < MOVE_RATIO))
1360 move_by_pieces (x, y, INTVAL (size), align);
1363 /* Try the most limited insn first, because there's no point
1364 including more than one in the machine description unless
1365 the more limited one has some advantage. */
1367 rtx opalign = GEN_INT (align);
1368 enum machine_mode mode;
1370 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1371 mode = GET_MODE_WIDER_MODE (mode))
1373 enum insn_code code = movstr_optab[(int) mode];
1375 if (code != CODE_FOR_nothing
1376 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1377 here because if SIZE is less than the mode mask, as it is
1378 returned by the macro, it will definitely be less than the
1379 actual mode mask. */
1380 && (unsigned) INTVAL (size) <= GET_MODE_MASK (mode)
1381 && (insn_operand_predicate[(int) code][0] == 0
1382 || (*insn_operand_predicate[(int) code][0]) (x, BLKmode))
1383 && (insn_operand_predicate[(int) code][1] == 0
1384 || (*insn_operand_predicate[(int) code][1]) (y, BLKmode))
1385 && (insn_operand_predicate[(int) code][3] == 0
1386 || (*insn_operand_predicate[(int) code][3]) (opalign,
1390 rtx last = get_last_insn ();
1393 op2 = convert_to_mode (mode, size, 1);
1394 if (insn_operand_predicate[(int) code][2] != 0
1395 && ! (*insn_operand_predicate[(int) code][2]) (op2, mode))
1396 op2 = copy_to_mode_reg (mode, op2);
1398 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1405 delete_insns_since (last);
1409 #ifdef TARGET_MEM_FUNCTIONS
1410 emit_library_call (memcpy_libfunc, 0,
1411 VOIDmode, 3, XEXP (x, 0), Pmode,
1413 convert_to_mode (TYPE_MODE (sizetype), size,
1414 TREE_UNSIGNED (sizetype)),
1415 TYPE_MODE (sizetype));
1417 emit_library_call (bcopy_libfunc, 0,
1418 VOIDmode, 3, XEXP (y, 0), Pmode,
1420 convert_to_mode (TYPE_MODE (sizetype), size,
1421 TREE_UNSIGNED (sizetype)),
1422 TYPE_MODE (sizetype));
1427 /* Copy all or part of a value X into registers starting at REGNO.
1428 The number of registers to be filled is NREGS. */
1431 move_block_to_reg (regno, x, nregs, mode)
1435 enum machine_mode mode;
1440 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1441 x = validize_mem (force_const_mem (mode, x));
1443 /* See if the machine can do this with a load multiple insn. */
1444 #ifdef HAVE_load_multiple
1445 last = get_last_insn ();
1446 pat = gen_load_multiple (gen_rtx (REG, word_mode, regno), x,
1454 delete_insns_since (last);
1457 for (i = 0; i < nregs; i++)
1458 emit_move_insn (gen_rtx (REG, word_mode, regno + i),
1459 operand_subword_force (x, i, mode));
1462 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1463 The number of registers to be filled is NREGS. */
1466 move_block_from_reg (regno, x, nregs)
1474 /* See if the machine can do this with a store multiple insn. */
1475 #ifdef HAVE_store_multiple
1476 last = get_last_insn ();
1477 pat = gen_store_multiple (x, gen_rtx (REG, word_mode, regno),
1485 delete_insns_since (last);
1488 for (i = 0; i < nregs; i++)
1490 rtx tem = operand_subword (x, i, 1, BLKmode);
1495 emit_move_insn (tem, gen_rtx (REG, word_mode, regno + i));
1499 /* Mark NREGS consecutive regs, starting at REGNO, as being live now. */
1502 use_regs (regno, nregs)
1508 for (i = 0; i < nregs; i++)
1509 emit_insn (gen_rtx (USE, VOIDmode, gen_rtx (REG, word_mode, regno + i)));
1512 /* Mark the instructions since PREV as a libcall block.
1513 Add REG_LIBCALL to PREV and add a REG_RETVAL to the most recent insn. */
1522 /* Find the instructions to mark */
1524 insn_first = NEXT_INSN (prev);
1526 insn_first = get_insns ();
1528 insn_last = get_last_insn ();
1530 REG_NOTES (insn_last) = gen_rtx (INSN_LIST, REG_RETVAL, insn_first,
1531 REG_NOTES (insn_last));
1533 REG_NOTES (insn_first) = gen_rtx (INSN_LIST, REG_LIBCALL, insn_last,
1534 REG_NOTES (insn_first));
1537 /* Write zeros through the storage of OBJECT.
1538 If OBJECT has BLKmode, SIZE is its length in bytes. */
1541 clear_storage (object, size)
1545 if (GET_MODE (object) == BLKmode)
1547 #ifdef TARGET_MEM_FUNCTIONS
1548 emit_library_call (memset_libfunc, 0,
1550 XEXP (object, 0), Pmode, const0_rtx, Pmode,
1551 GEN_INT (size), Pmode);
1553 emit_library_call (bzero_libfunc, 0,
1555 XEXP (object, 0), Pmode,
1556 GEN_INT (size), Pmode);
1560 emit_move_insn (object, const0_rtx);
1563 /* Generate code to copy Y into X.
1564 Both Y and X must have the same mode, except that
1565 Y can be a constant with VOIDmode.
1566 This mode cannot be BLKmode; use emit_block_move for that.
1568 Return the last instruction emitted. */
1571 emit_move_insn (x, y)
1574 enum machine_mode mode = GET_MODE (x);
1575 enum machine_mode submode;
1576 enum mode_class class = GET_MODE_CLASS (mode);
1579 x = protect_from_queue (x, 1);
1580 y = protect_from_queue (y, 0);
1582 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
1585 if (CONSTANT_P (y) && ! LEGITIMATE_CONSTANT_P (y))
1586 y = force_const_mem (mode, y);
1588 /* If X or Y are memory references, verify that their addresses are valid
1590 if (GET_CODE (x) == MEM
1591 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
1592 && ! push_operand (x, GET_MODE (x)))
1594 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
1595 x = change_address (x, VOIDmode, XEXP (x, 0));
1597 if (GET_CODE (y) == MEM
1598 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
1600 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
1601 y = change_address (y, VOIDmode, XEXP (y, 0));
1603 if (mode == BLKmode)
1606 return emit_move_insn_1 (x, y);
1609 /* Low level part of emit_move_insn.
1610 Called just like emit_move_insn, but assumes X and Y
1611 are basically valid. */
1614 emit_move_insn_1 (x, y)
1617 enum machine_mode mode = GET_MODE (x);
1618 enum machine_mode submode;
1619 enum mode_class class = GET_MODE_CLASS (mode);
1622 if (class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
1623 submode = mode_for_size (GET_MODE_UNIT_SIZE (mode) * BITS_PER_UNIT,
1624 (class == MODE_COMPLEX_INT
1625 ? MODE_INT : MODE_FLOAT),
1628 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
1630 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
1632 /* Expand complex moves by moving real part and imag part, if possible. */
1633 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
1634 && submode != BLKmode
1635 && (mov_optab->handlers[(int) submode].insn_code
1636 != CODE_FOR_nothing))
1638 /* Don't split destination if it is a stack push. */
1639 int stack = push_operand (x, GET_MODE (x));
1640 rtx prev = get_last_insn ();
1642 /* Tell flow that the whole of the destination is being set. */
1643 if (GET_CODE (x) == REG)
1644 emit_insn (gen_rtx (CLOBBER, VOIDmode, x));
1646 /* If this is a stack, push the highpart first, so it
1647 will be in the argument order.
1649 In that case, change_address is used only to convert
1650 the mode, not to change the address. */
1651 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
1652 ((stack ? change_address (x, submode, (rtx) 0)
1653 : gen_highpart (submode, x)),
1654 gen_highpart (submode, y)));
1655 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
1656 ((stack ? change_address (x, submode, (rtx) 0)
1657 : gen_lowpart (submode, x)),
1658 gen_lowpart (submode, y)));
1662 return get_last_insn ();
1665 /* This will handle any multi-word mode that lacks a move_insn pattern.
1666 However, you will get better code if you define such patterns,
1667 even if they must turn into multiple assembler instructions. */
1668 else if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
1671 rtx prev_insn = get_last_insn ();
1674 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
1677 rtx xpart = operand_subword (x, i, 1, mode);
1678 rtx ypart = operand_subword (y, i, 1, mode);
1680 /* If we can't get a part of Y, put Y into memory if it is a
1681 constant. Otherwise, force it into a register. If we still
1682 can't get a part of Y, abort. */
1683 if (ypart == 0 && CONSTANT_P (y))
1685 y = force_const_mem (mode, y);
1686 ypart = operand_subword (y, i, 1, mode);
1688 else if (ypart == 0)
1689 ypart = operand_subword_force (y, i, mode);
1691 if (xpart == 0 || ypart == 0)
1694 last_insn = emit_move_insn (xpart, ypart);
1696 /* Mark these insns as a libcall block. */
1697 group_insns (prev_insn);
1705 /* Pushing data onto the stack. */
1707 /* Push a block of length SIZE (perhaps variable)
1708 and return an rtx to address the beginning of the block.
1709 Note that it is not possible for the value returned to be a QUEUED.
1710 The value may be virtual_outgoing_args_rtx.
1712 EXTRA is the number of bytes of padding to push in addition to SIZE.
1713 BELOW nonzero means this padding comes at low addresses;
1714 otherwise, the padding comes at high addresses. */
1717 push_block (size, extra, below)
1722 if (CONSTANT_P (size))
1723 anti_adjust_stack (plus_constant (size, extra));
1724 else if (GET_CODE (size) == REG && extra == 0)
1725 anti_adjust_stack (size);
1728 rtx temp = copy_to_mode_reg (Pmode, size);
1730 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
1731 temp, 0, OPTAB_LIB_WIDEN);
1732 anti_adjust_stack (temp);
1735 #ifdef STACK_GROWS_DOWNWARD
1736 temp = virtual_outgoing_args_rtx;
1737 if (extra != 0 && below)
1738 temp = plus_constant (temp, extra);
1740 if (GET_CODE (size) == CONST_INT)
1741 temp = plus_constant (virtual_outgoing_args_rtx,
1742 - INTVAL (size) - (below ? 0 : extra));
1743 else if (extra != 0 && !below)
1744 temp = gen_rtx (PLUS, Pmode, virtual_outgoing_args_rtx,
1745 negate_rtx (Pmode, plus_constant (size, extra)));
1747 temp = gen_rtx (PLUS, Pmode, virtual_outgoing_args_rtx,
1748 negate_rtx (Pmode, size));
1751 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
1757 return gen_rtx (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
1760 /* Generate code to push X onto the stack, assuming it has mode MODE and
1762 MODE is redundant except when X is a CONST_INT (since they don't
1764 SIZE is an rtx for the size of data to be copied (in bytes),
1765 needed only if X is BLKmode.
1767 ALIGN (in bytes) is maximum alignment we can assume.
1769 If PARTIAL and REG are both nonzero, then copy that many of the first
1770 words of X into registers starting with REG, and push the rest of X.
1771 The amount of space pushed is decreased by PARTIAL words,
1772 rounded *down* to a multiple of PARM_BOUNDARY.
1773 REG must be a hard register in this case.
1774 If REG is zero but PARTIAL is not, take any all others actions for an
1775 argument partially in registers, but do not actually load any
1778 EXTRA is the amount in bytes of extra space to leave next to this arg.
1779 This is ignored if an argument block has already been allocated.
1781 On a machine that lacks real push insns, ARGS_ADDR is the address of
1782 the bottom of the argument block for this call. We use indexing off there
1783 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
1784 argument block has not been preallocated.
1786 ARGS_SO_FAR is the size of args previously pushed for this call. */
1789 emit_push_insn (x, mode, type, size, align, partial, reg, extra,
1790 args_addr, args_so_far)
1792 enum machine_mode mode;
1803 enum direction stack_direction
1804 #ifdef STACK_GROWS_DOWNWARD
1810 /* Decide where to pad the argument: `downward' for below,
1811 `upward' for above, or `none' for don't pad it.
1812 Default is below for small data on big-endian machines; else above. */
1813 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
1815 /* Invert direction if stack is post-update. */
1816 if (STACK_PUSH_CODE == POST_INC || STACK_PUSH_CODE == POST_DEC)
1817 if (where_pad != none)
1818 where_pad = (where_pad == downward ? upward : downward);
1820 xinner = x = protect_from_queue (x, 0);
1822 if (mode == BLKmode)
1824 /* Copy a block into the stack, entirely or partially. */
1827 int used = partial * UNITS_PER_WORD;
1828 int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
1836 /* USED is now the # of bytes we need not copy to the stack
1837 because registers will take care of them. */
1840 xinner = change_address (xinner, BLKmode,
1841 plus_constant (XEXP (xinner, 0), used));
1843 /* If the partial register-part of the arg counts in its stack size,
1844 skip the part of stack space corresponding to the registers.
1845 Otherwise, start copying to the beginning of the stack space,
1846 by setting SKIP to 0. */
1847 #ifndef REG_PARM_STACK_SPACE
1853 #ifdef PUSH_ROUNDING
1854 /* Do it with several push insns if that doesn't take lots of insns
1855 and if there is no difficulty with push insns that skip bytes
1856 on the stack for alignment purposes. */
1858 && GET_CODE (size) == CONST_INT
1860 && (move_by_pieces_ninsns ((unsigned) INTVAL (size) - used, align)
1862 /* Here we avoid the case of a structure whose weak alignment
1863 forces many pushes of a small amount of data,
1864 and such small pushes do rounding that causes trouble. */
1865 && ((! STRICT_ALIGNMENT && ! SLOW_UNALIGNED_ACCESS)
1866 || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT
1867 || PUSH_ROUNDING (align) == align)
1868 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
1870 /* Push padding now if padding above and stack grows down,
1871 or if padding below and stack grows up.
1872 But if space already allocated, this has already been done. */
1873 if (extra && args_addr == 0
1874 && where_pad != none && where_pad != stack_direction)
1875 anti_adjust_stack (GEN_INT (extra));
1877 move_by_pieces (gen_rtx (MEM, BLKmode, gen_push_operand ()), xinner,
1878 INTVAL (size) - used, align);
1881 #endif /* PUSH_ROUNDING */
1883 /* Otherwise make space on the stack and copy the data
1884 to the address of that space. */
1886 /* Deduct words put into registers from the size we must copy. */
1889 if (GET_CODE (size) == CONST_INT)
1890 size = GEN_INT (INTVAL (size) - used);
1892 size = expand_binop (GET_MODE (size), sub_optab, size,
1893 GEN_INT (used), NULL_RTX, 0,
1897 /* Get the address of the stack space.
1898 In this case, we do not deal with EXTRA separately.
1899 A single stack adjust will do. */
1902 temp = push_block (size, extra, where_pad == downward);
1905 else if (GET_CODE (args_so_far) == CONST_INT)
1906 temp = memory_address (BLKmode,
1907 plus_constant (args_addr,
1908 skip + INTVAL (args_so_far)));
1910 temp = memory_address (BLKmode,
1911 plus_constant (gen_rtx (PLUS, Pmode,
1912 args_addr, args_so_far),
1915 /* TEMP is the address of the block. Copy the data there. */
1916 if (GET_CODE (size) == CONST_INT
1917 && (move_by_pieces_ninsns ((unsigned) INTVAL (size), align)
1920 move_by_pieces (gen_rtx (MEM, BLKmode, temp), xinner,
1921 INTVAL (size), align);
1924 /* Try the most limited insn first, because there's no point
1925 including more than one in the machine description unless
1926 the more limited one has some advantage. */
1927 #ifdef HAVE_movstrqi
1929 && GET_CODE (size) == CONST_INT
1930 && ((unsigned) INTVAL (size)
1931 < (1 << (GET_MODE_BITSIZE (QImode) - 1))))
1933 rtx pat = gen_movstrqi (gen_rtx (MEM, BLKmode, temp),
1934 xinner, size, GEN_INT (align));
1942 #ifdef HAVE_movstrhi
1944 && GET_CODE (size) == CONST_INT
1945 && ((unsigned) INTVAL (size)
1946 < (1 << (GET_MODE_BITSIZE (HImode) - 1))))
1948 rtx pat = gen_movstrhi (gen_rtx (MEM, BLKmode, temp),
1949 xinner, size, GEN_INT (align));
1957 #ifdef HAVE_movstrsi
1960 rtx pat = gen_movstrsi (gen_rtx (MEM, BLKmode, temp),
1961 xinner, size, GEN_INT (align));
1969 #ifdef HAVE_movstrdi
1972 rtx pat = gen_movstrdi (gen_rtx (MEM, BLKmode, temp),
1973 xinner, size, GEN_INT (align));
1982 #ifndef ACCUMULATE_OUTGOING_ARGS
1983 /* If the source is referenced relative to the stack pointer,
1984 copy it to another register to stabilize it. We do not need
1985 to do this if we know that we won't be changing sp. */
1987 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
1988 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
1989 temp = copy_to_reg (temp);
1992 /* Make inhibit_defer_pop nonzero around the library call
1993 to force it to pop the bcopy-arguments right away. */
1995 #ifdef TARGET_MEM_FUNCTIONS
1996 emit_library_call (memcpy_libfunc, 0,
1997 VOIDmode, 3, temp, Pmode, XEXP (xinner, 0), Pmode,
1998 convert_to_mode (TYPE_MODE (sizetype),
1999 size, TREE_UNSIGNED (sizetype)),
2000 TYPE_MODE (sizetype));
2002 emit_library_call (bcopy_libfunc, 0,
2003 VOIDmode, 3, XEXP (xinner, 0), Pmode, temp, Pmode,
2004 convert_to_mode (TYPE_MODE (sizetype),
2005 size, TREE_UNSIGNED (sizetype)),
2006 TYPE_MODE (sizetype));
2011 else if (partial > 0)
2013 /* Scalar partly in registers. */
2015 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
2018 /* # words of start of argument
2019 that we must make space for but need not store. */
2020 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
2021 int args_offset = INTVAL (args_so_far);
2024 /* Push padding now if padding above and stack grows down,
2025 or if padding below and stack grows up.
2026 But if space already allocated, this has already been done. */
2027 if (extra && args_addr == 0
2028 && where_pad != none && where_pad != stack_direction)
2029 anti_adjust_stack (GEN_INT (extra));
2031 /* If we make space by pushing it, we might as well push
2032 the real data. Otherwise, we can leave OFFSET nonzero
2033 and leave the space uninitialized. */
2037 /* Now NOT_STACK gets the number of words that we don't need to
2038 allocate on the stack. */
2039 not_stack = partial - offset;
2041 /* If the partial register-part of the arg counts in its stack size,
2042 skip the part of stack space corresponding to the registers.
2043 Otherwise, start copying to the beginning of the stack space,
2044 by setting SKIP to 0. */
2045 #ifndef REG_PARM_STACK_SPACE
2051 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
2052 x = validize_mem (force_const_mem (mode, x));
2054 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
2055 SUBREGs of such registers are not allowed. */
2056 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
2057 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
2058 x = copy_to_reg (x);
2060 /* Loop over all the words allocated on the stack for this arg. */
2061 /* We can do it by words, because any scalar bigger than a word
2062 has a size a multiple of a word. */
2063 #ifndef PUSH_ARGS_REVERSED
2064 for (i = not_stack; i < size; i++)
2066 for (i = size - 1; i >= not_stack; i--)
2068 if (i >= not_stack + offset)
2069 emit_push_insn (operand_subword_force (x, i, mode),
2070 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
2072 GEN_INT (args_offset + ((i - not_stack + skip)
2073 * UNITS_PER_WORD)));
2079 /* Push padding now if padding above and stack grows down,
2080 or if padding below and stack grows up.
2081 But if space already allocated, this has already been done. */
2082 if (extra && args_addr == 0
2083 && where_pad != none && where_pad != stack_direction)
2084 anti_adjust_stack (GEN_INT (extra));
2086 #ifdef PUSH_ROUNDING
2088 addr = gen_push_operand ();
2091 if (GET_CODE (args_so_far) == CONST_INT)
2093 = memory_address (mode,
2094 plus_constant (args_addr, INTVAL (args_so_far)));
2096 addr = memory_address (mode, gen_rtx (PLUS, Pmode, args_addr,
2099 emit_move_insn (gen_rtx (MEM, mode, addr), x);
2103 /* If part should go in registers, copy that part
2104 into the appropriate registers. Do this now, at the end,
2105 since mem-to-mem copies above may do function calls. */
2106 if (partial > 0 && reg != 0)
2107 move_block_to_reg (REGNO (reg), x, partial, mode);
2109 if (extra && args_addr == 0 && where_pad == stack_direction)
2110 anti_adjust_stack (GEN_INT (extra));
2113 /* Expand an assignment that stores the value of FROM into TO.
2114 If WANT_VALUE is nonzero, return an rtx for the value of TO.
2115 (This may contain a QUEUED rtx.)
2116 Otherwise, the returned value is not meaningful.
2118 SUGGEST_REG is no longer actually used.
2119 It used to mean, copy the value through a register
2120 and return that register, if that is possible.
2121 But now we do this if WANT_VALUE.
2123 If the value stored is a constant, we return the constant. */
2126 expand_assignment (to, from, want_value, suggest_reg)
2131 register rtx to_rtx = 0;
2134 /* Don't crash if the lhs of the assignment was erroneous. */
2136 if (TREE_CODE (to) == ERROR_MARK)
2137 return expand_expr (from, NULL_RTX, VOIDmode, 0);
2139 /* Assignment of a structure component needs special treatment
2140 if the structure component's rtx is not simply a MEM.
2141 Assignment of an array element at a constant index
2142 has the same problem. */
2144 if (TREE_CODE (to) == COMPONENT_REF
2145 || TREE_CODE (to) == BIT_FIELD_REF
2146 || (TREE_CODE (to) == ARRAY_REF
2147 && TREE_CODE (TREE_OPERAND (to, 1)) == INTEGER_CST
2148 && TREE_CODE (TYPE_SIZE (TREE_TYPE (to))) == INTEGER_CST))
2150 enum machine_mode mode1;
2156 tree tem = get_inner_reference (to, &bitsize, &bitpos, &offset,
2157 &mode1, &unsignedp, &volatilep);
2159 /* If we are going to use store_bit_field and extract_bit_field,
2160 make sure to_rtx will be safe for multiple use. */
2162 if (mode1 == VOIDmode && want_value)
2163 tem = stabilize_reference (tem);
2165 to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, 0);
2168 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
2170 if (GET_CODE (to_rtx) != MEM)
2172 to_rtx = change_address (to_rtx, VOIDmode,
2173 gen_rtx (PLUS, Pmode, XEXP (to_rtx, 0),
2174 force_reg (Pmode, offset_rtx)));
2178 if (GET_CODE (to_rtx) == MEM)
2179 MEM_VOLATILE_P (to_rtx) = 1;
2180 #if 0 /* This was turned off because, when a field is volatile
2181 in an object which is not volatile, the object may be in a register,
2182 and then we would abort over here. */
2188 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
2190 /* Spurious cast makes HPUX compiler happy. */
2191 ? (enum machine_mode) TYPE_MODE (TREE_TYPE (to))
2194 /* Required alignment of containing datum. */
2195 TYPE_ALIGN (TREE_TYPE (tem)) / BITS_PER_UNIT,
2196 int_size_in_bytes (TREE_TYPE (tem)));
2197 preserve_temp_slots (result);
2203 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
2204 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
2207 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, 0);
2209 /* Don't move directly into a return register. */
2210 if (TREE_CODE (to) == RESULT_DECL && GET_CODE (to_rtx) == REG)
2212 rtx temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
2213 emit_move_insn (to_rtx, temp);
2214 preserve_temp_slots (to_rtx);
2219 /* In case we are returning the contents of an object which overlaps
2220 the place the value is being stored, use a safe function when copying
2221 a value through a pointer into a structure value return block. */
2222 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
2223 && current_function_returns_struct
2224 && !current_function_returns_pcc_struct)
2226 rtx from_rtx = expand_expr (from, NULL_RTX, VOIDmode, 0);
2227 rtx size = expr_size (from);
2229 #ifdef TARGET_MEM_FUNCTIONS
2230 emit_library_call (memcpy_libfunc, 0,
2231 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
2232 XEXP (from_rtx, 0), Pmode,
2233 convert_to_mode (TYPE_MODE (sizetype),
2234 size, TREE_UNSIGNED (sizetype)),
2235 TYPE_MODE (sizetype));
2237 emit_library_call (bcopy_libfunc, 0,
2238 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
2239 XEXP (to_rtx, 0), Pmode,
2240 convert_to_mode (TYPE_MODE (sizetype),
2241 size, TREE_UNSIGNED (sizetype)),
2242 TYPE_MODE (sizetype));
2245 preserve_temp_slots (to_rtx);
2250 /* Compute FROM and store the value in the rtx we got. */
2252 result = store_expr (from, to_rtx, want_value);
2253 preserve_temp_slots (result);
2258 /* Generate code for computing expression EXP,
2259 and storing the value into TARGET.
2260 Returns TARGET or an equivalent value.
2261 TARGET may contain a QUEUED rtx.
2263 If SUGGEST_REG is nonzero, copy the value through a register
2264 and return that register, if that is possible.
2266 If the value stored is a constant, we return the constant. */
2269 store_expr (exp, target, suggest_reg)
2271 register rtx target;
2275 int dont_return_target = 0;
2277 if (TREE_CODE (exp) == COMPOUND_EXPR)
2279 /* Perform first part of compound expression, then assign from second
2281 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
2283 return store_expr (TREE_OPERAND (exp, 1), target, suggest_reg);
2285 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
2287 /* For conditional expression, get safe form of the target. Then
2288 test the condition, doing the appropriate assignment on either
2289 side. This avoids the creation of unnecessary temporaries.
2290 For non-BLKmode, it is more efficient not to do this. */
2292 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
2295 target = protect_from_queue (target, 1);
2298 jumpifnot (TREE_OPERAND (exp, 0), lab1);
2299 store_expr (TREE_OPERAND (exp, 1), target, suggest_reg);
2301 emit_jump_insn (gen_jump (lab2));
2304 store_expr (TREE_OPERAND (exp, 2), target, suggest_reg);
2310 else if (suggest_reg && GET_CODE (target) == MEM
2311 && GET_MODE (target) != BLKmode)
2312 /* If target is in memory and caller wants value in a register instead,
2313 arrange that. Pass TARGET as target for expand_expr so that,
2314 if EXP is another assignment, SUGGEST_REG will be nonzero for it.
2315 We know expand_expr will not use the target in that case. */
2317 temp = expand_expr (exp, cse_not_expected ? NULL_RTX : target,
2318 GET_MODE (target), 0);
2319 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
2320 temp = copy_to_reg (temp);
2321 dont_return_target = 1;
2323 else if (queued_subexp_p (target))
2324 /* If target contains a postincrement, it is not safe
2325 to use as the returned value. It would access the wrong
2326 place by the time the queued increment gets output.
2327 So copy the value through a temporary and use that temp
2330 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
2332 /* Expand EXP into a new pseudo. */
2333 temp = gen_reg_rtx (GET_MODE (target));
2334 temp = expand_expr (exp, temp, GET_MODE (target), 0);
2337 temp = expand_expr (exp, NULL_RTX, GET_MODE (target), 0);
2338 dont_return_target = 1;
2340 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
2341 /* If this is an scalar in a register that is stored in a wider mode
2342 than the declared mode, compute the result into its declared mode
2343 and then convert to the wider mode. Our value is the computed
2346 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
2347 convert_move (SUBREG_REG (target), temp,
2348 SUBREG_PROMOTED_UNSIGNED_P (target));
2353 temp = expand_expr (exp, target, GET_MODE (target), 0);
2354 /* DO return TARGET if it's a specified hardware register.
2355 expand_return relies on this. */
2356 if (!(target && GET_CODE (target) == REG
2357 && REGNO (target) < FIRST_PSEUDO_REGISTER)
2358 && CONSTANT_P (temp))
2359 dont_return_target = 1;
2362 /* If value was not generated in the target, store it there.
2363 Convert the value to TARGET's type first if nec. */
2365 if (temp != target && TREE_CODE (exp) != ERROR_MARK)
2367 target = protect_from_queue (target, 1);
2368 if (GET_MODE (temp) != GET_MODE (target)
2369 && GET_MODE (temp) != VOIDmode)
2371 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
2372 if (dont_return_target)
2374 /* In this case, we will return TEMP,
2375 so make sure it has the proper mode.
2376 But don't forget to store the value into TARGET. */
2377 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
2378 emit_move_insn (target, temp);
2381 convert_move (target, temp, unsignedp);
2384 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
2386 /* Handle copying a string constant into an array.
2387 The string constant may be shorter than the array.
2388 So copy just the string's actual length, and clear the rest. */
2391 /* Get the size of the data type of the string,
2392 which is actually the size of the target. */
2393 size = expr_size (exp);
2394 if (GET_CODE (size) == CONST_INT
2395 && INTVAL (size) < TREE_STRING_LENGTH (exp))
2396 emit_block_move (target, temp, size,
2397 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
2400 /* Compute the size of the data to copy from the string. */
2402 = fold (build (MIN_EXPR, sizetype,
2403 size_binop (CEIL_DIV_EXPR,
2404 TYPE_SIZE (TREE_TYPE (exp)),
2405 size_int (BITS_PER_UNIT)),
2407 build_int_2 (TREE_STRING_LENGTH (exp), 0))));
2408 rtx copy_size_rtx = expand_expr (copy_size, NULL_RTX,
2412 /* Copy that much. */
2413 emit_block_move (target, temp, copy_size_rtx,
2414 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
2416 /* Figure out how much is left in TARGET
2417 that we have to clear. */
2418 if (GET_CODE (copy_size_rtx) == CONST_INT)
2420 temp = plus_constant (XEXP (target, 0),
2421 TREE_STRING_LENGTH (exp));
2422 size = plus_constant (size,
2423 - TREE_STRING_LENGTH (exp));
2427 enum machine_mode size_mode = Pmode;
2429 temp = force_reg (Pmode, XEXP (target, 0));
2430 temp = expand_binop (size_mode, add_optab, temp,
2431 copy_size_rtx, NULL_RTX, 0,
2434 size = expand_binop (size_mode, sub_optab, size,
2435 copy_size_rtx, NULL_RTX, 0,
2438 emit_cmp_insn (size, const0_rtx, LT, NULL_RTX,
2439 GET_MODE (size), 0, 0);
2440 label = gen_label_rtx ();
2441 emit_jump_insn (gen_blt (label));
2444 if (size != const0_rtx)
2446 #ifdef TARGET_MEM_FUNCTIONS
2447 emit_library_call (memset_libfunc, 0, VOIDmode, 3,
2448 temp, Pmode, const0_rtx, Pmode, size, Pmode);
2450 emit_library_call (bzero_libfunc, 0, VOIDmode, 2,
2451 temp, Pmode, size, Pmode);
2458 else if (GET_MODE (temp) == BLKmode)
2459 emit_block_move (target, temp, expr_size (exp),
2460 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
2462 emit_move_insn (target, temp);
2464 if (dont_return_target)
2469 /* Store the value of constructor EXP into the rtx TARGET.
2470 TARGET is either a REG or a MEM. */
2473 store_constructor (exp, target)
2477 tree type = TREE_TYPE (exp);
2479 /* We know our target cannot conflict, since safe_from_p has been called. */
2481 /* Don't try copying piece by piece into a hard register
2482 since that is vulnerable to being clobbered by EXP.
2483 Instead, construct in a pseudo register and then copy it all. */
2484 if (GET_CODE (target) == REG && REGNO (target) < FIRST_PSEUDO_REGISTER)
2486 rtx temp = gen_reg_rtx (GET_MODE (target));
2487 store_constructor (exp, temp);
2488 emit_move_insn (target, temp);
2493 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE)
2497 /* Inform later passes that the whole union value is dead. */
2498 if (TREE_CODE (type) == UNION_TYPE)
2499 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
2501 /* If we are building a static constructor into a register,
2502 set the initial value as zero so we can fold the value into
2504 else if (GET_CODE (target) == REG && TREE_STATIC (exp))
2505 emit_move_insn (target, const0_rtx);
2507 /* If the constructor has fewer fields than the structure,
2508 clear the whole structure first. */
2509 else if (list_length (CONSTRUCTOR_ELTS (exp))
2510 != list_length (TYPE_FIELDS (type)))
2511 clear_storage (target, int_size_in_bytes (type));
2513 /* Inform later passes that the old value is dead. */
2514 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
2516 /* Store each element of the constructor into
2517 the corresponding field of TARGET. */
2519 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
2521 register tree field = TREE_PURPOSE (elt);
2522 register enum machine_mode mode;
2527 /* Just ignore missing fields.
2528 We cleared the whole structure, above,
2529 if any fields are missing. */
2533 bitsize = TREE_INT_CST_LOW (DECL_SIZE (field));
2534 unsignedp = TREE_UNSIGNED (field);
2535 mode = DECL_MODE (field);
2536 if (DECL_BIT_FIELD (field))
2539 if (TREE_CODE (DECL_FIELD_BITPOS (field)) != INTEGER_CST)
2540 /* ??? This case remains to be written. */
2543 bitpos = TREE_INT_CST_LOW (DECL_FIELD_BITPOS (field));
2545 store_field (target, bitsize, bitpos, mode, TREE_VALUE (elt),
2546 /* The alignment of TARGET is
2547 at least what its type requires. */
2549 TYPE_ALIGN (type) / BITS_PER_UNIT,
2550 int_size_in_bytes (type));
2553 else if (TREE_CODE (type) == ARRAY_TYPE)
2557 tree domain = TYPE_DOMAIN (type);
2558 HOST_WIDE_INT minelt = TREE_INT_CST_LOW (TYPE_MIN_VALUE (domain));
2559 HOST_WIDE_INT maxelt = TREE_INT_CST_LOW (TYPE_MAX_VALUE (domain));
2560 tree elttype = TREE_TYPE (type);
2562 /* If the constructor has fewer fields than the structure,
2563 clear the whole structure first. Similarly if this this is
2564 static constructor of a non-BLKmode object. */
2566 if (list_length (CONSTRUCTOR_ELTS (exp)) < maxelt - minelt + 1
2567 || (GET_CODE (target) == REG && TREE_STATIC (exp)))
2568 clear_storage (target, maxelt - minelt + 1);
2570 /* Inform later passes that the old value is dead. */
2571 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
2573 /* Store each element of the constructor into
2574 the corresponding element of TARGET, determined
2575 by counting the elements. */
2576 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
2578 elt = TREE_CHAIN (elt), i++)
2580 register enum machine_mode mode;
2585 mode = TYPE_MODE (elttype);
2586 bitsize = GET_MODE_BITSIZE (mode);
2587 unsignedp = TREE_UNSIGNED (elttype);
2589 bitpos = (i * TREE_INT_CST_LOW (TYPE_SIZE (elttype)));
2591 store_field (target, bitsize, bitpos, mode, TREE_VALUE (elt),
2592 /* The alignment of TARGET is
2593 at least what its type requires. */
2595 TYPE_ALIGN (type) / BITS_PER_UNIT,
2596 int_size_in_bytes (type));
2604 /* Store the value of EXP (an expression tree)
2605 into a subfield of TARGET which has mode MODE and occupies
2606 BITSIZE bits, starting BITPOS bits from the start of TARGET.
2607 If MODE is VOIDmode, it means that we are storing into a bit-field.
2609 If VALUE_MODE is VOIDmode, return nothing in particular.
2610 UNSIGNEDP is not used in this case.
2612 Otherwise, return an rtx for the value stored. This rtx
2613 has mode VALUE_MODE if that is convenient to do.
2614 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
2616 ALIGN is the alignment that TARGET is known to have, measured in bytes.
2617 TOTAL_SIZE is the size in bytes of the structure, or -1 if varying. */
2620 store_field (target, bitsize, bitpos, mode, exp, value_mode,
2621 unsignedp, align, total_size)
2623 int bitsize, bitpos;
2624 enum machine_mode mode;
2626 enum machine_mode value_mode;
2631 HOST_WIDE_INT width_mask = 0;
2633 if (bitsize < HOST_BITS_PER_WIDE_INT)
2634 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
2636 /* If we are storing into an unaligned field of an aligned union that is
2637 in a register, we may have the mode of TARGET being an integer mode but
2638 MODE == BLKmode. In that case, get an aligned object whose size and
2639 alignment are the same as TARGET and store TARGET into it (we can avoid
2640 the store if the field being stored is the entire width of TARGET). Then
2641 call ourselves recursively to store the field into a BLKmode version of
2642 that object. Finally, load from the object into TARGET. This is not
2643 very efficient in general, but should only be slightly more expensive
2644 than the otherwise-required unaligned accesses. Perhaps this can be
2645 cleaned up later. */
2648 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
2650 rtx object = assign_stack_temp (GET_MODE (target),
2651 GET_MODE_SIZE (GET_MODE (target)), 0);
2652 rtx blk_object = copy_rtx (object);
2654 PUT_MODE (blk_object, BLKmode);
2656 if (bitsize != GET_MODE_BITSIZE (GET_MODE (target)))
2657 emit_move_insn (object, target);
2659 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0,
2662 emit_move_insn (target, object);
2667 /* If the structure is in a register or if the component
2668 is a bit field, we cannot use addressing to access it.
2669 Use bit-field techniques or SUBREG to store in it. */
2671 if (mode == VOIDmode
2672 || (mode != BLKmode && ! direct_store[(int) mode])
2673 || GET_CODE (target) == REG
2674 || GET_CODE (target) == SUBREG)
2676 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
2677 /* Store the value in the bitfield. */
2678 store_bit_field (target, bitsize, bitpos, mode, temp, align, total_size);
2679 if (value_mode != VOIDmode)
2681 /* The caller wants an rtx for the value. */
2682 /* If possible, avoid refetching from the bitfield itself. */
2684 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
2687 enum machine_mode tmode;
2690 return expand_and (temp, GEN_INT (width_mask), NULL_RTX);
2691 tmode = GET_MODE (temp);
2692 if (tmode == VOIDmode)
2694 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
2695 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
2696 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
2698 return extract_bit_field (target, bitsize, bitpos, unsignedp,
2699 NULL_RTX, value_mode, 0, align,
2706 rtx addr = XEXP (target, 0);
2709 /* If a value is wanted, it must be the lhs;
2710 so make the address stable for multiple use. */
2712 if (value_mode != VOIDmode && GET_CODE (addr) != REG
2713 && ! CONSTANT_ADDRESS_P (addr)
2714 /* A frame-pointer reference is already stable. */
2715 && ! (GET_CODE (addr) == PLUS
2716 && GET_CODE (XEXP (addr, 1)) == CONST_INT
2717 && (XEXP (addr, 0) == virtual_incoming_args_rtx
2718 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
2719 addr = copy_to_reg (addr);
2721 /* Now build a reference to just the desired component. */
2723 to_rtx = change_address (target, mode,
2724 plus_constant (addr, (bitpos / BITS_PER_UNIT)));
2725 MEM_IN_STRUCT_P (to_rtx) = 1;
2727 return store_expr (exp, to_rtx, value_mode != VOIDmode);
2731 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
2732 or an ARRAY_REF, look for nested COMPONENT_REFs, BIT_FIELD_REFs, or
2733 ARRAY_REFs and find the ultimate containing object, which we return.
2735 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
2736 bit position, and *PUNSIGNEDP to the signedness of the field.
2737 If the position of the field is variable, we store a tree
2738 giving the variable offset (in units) in *POFFSET.
2739 This offset is in addition to the bit position.
2740 If the position is not variable, we store 0 in *POFFSET.
2742 If any of the extraction expressions is volatile,
2743 we store 1 in *PVOLATILEP. Otherwise we don't change that.
2745 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
2746 is a mode that can be used to access the field. In that case, *PBITSIZE
2749 If the field describes a variable-sized object, *PMODE is set to
2750 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
2751 this case, but the address of the object can be found. */
2754 get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode,
2755 punsignedp, pvolatilep)
2760 enum machine_mode *pmode;
2765 enum machine_mode mode = VOIDmode;
2766 tree offset = integer_zero_node;
2768 if (TREE_CODE (exp) == COMPONENT_REF)
2770 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
2771 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
2772 mode = DECL_MODE (TREE_OPERAND (exp, 1));
2773 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
2775 else if (TREE_CODE (exp) == BIT_FIELD_REF)
2777 size_tree = TREE_OPERAND (exp, 1);
2778 *punsignedp = TREE_UNSIGNED (exp);
2782 mode = TYPE_MODE (TREE_TYPE (exp));
2783 *pbitsize = GET_MODE_BITSIZE (mode);
2784 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
2789 if (TREE_CODE (size_tree) != INTEGER_CST)
2790 mode = BLKmode, *pbitsize = -1;
2792 *pbitsize = TREE_INT_CST_LOW (size_tree);
2795 /* Compute cumulative bit-offset for nested component-refs and array-refs,
2796 and find the ultimate containing object. */
2802 if (TREE_CODE (exp) == COMPONENT_REF || TREE_CODE (exp) == BIT_FIELD_REF)
2804 tree pos = (TREE_CODE (exp) == COMPONENT_REF
2805 ? DECL_FIELD_BITPOS (TREE_OPERAND (exp, 1))
2806 : TREE_OPERAND (exp, 2));
2808 if (TREE_CODE (pos) == PLUS_EXPR)
2811 if (TREE_CODE (TREE_OPERAND (pos, 0)) == INTEGER_CST)
2813 constant = TREE_OPERAND (pos, 0);
2814 var = TREE_OPERAND (pos, 1);
2816 else if (TREE_CODE (TREE_OPERAND (pos, 1)) == INTEGER_CST)
2818 constant = TREE_OPERAND (pos, 1);
2819 var = TREE_OPERAND (pos, 0);
2824 *pbitpos += TREE_INT_CST_LOW (constant);
2825 offset = size_binop (PLUS_EXPR, offset,
2826 size_binop (FLOOR_DIV_EXPR, var,
2827 size_int (BITS_PER_UNIT)));
2829 else if (TREE_CODE (pos) == INTEGER_CST)
2830 *pbitpos += TREE_INT_CST_LOW (pos);
2833 /* Assume here that the offset is a multiple of a unit.
2834 If not, there should be an explicitly added constant. */
2835 offset = size_binop (PLUS_EXPR, offset,
2836 size_binop (FLOOR_DIV_EXPR, pos,
2837 size_int (BITS_PER_UNIT)));
2841 else if (TREE_CODE (exp) == ARRAY_REF)
2843 /* This code is based on the code in case ARRAY_REF in expand_expr
2844 below. We assume here that the size of an array element is
2845 always an integral multiple of BITS_PER_UNIT. */
2847 tree index = TREE_OPERAND (exp, 1);
2848 tree domain = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
2850 = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
2851 tree index_type = TREE_TYPE (index);
2853 if (! integer_zerop (low_bound))
2854 index = fold (build (MINUS_EXPR, index_type, index, low_bound));
2856 if (TYPE_PRECISION (index_type) != POINTER_SIZE)
2858 index = convert (type_for_size (POINTER_SIZE, 0), index);
2859 index_type = TREE_TYPE (index);
2862 index = fold (build (MULT_EXPR, index_type, index,
2863 TYPE_SIZE (TREE_TYPE (exp))));
2865 if (TREE_CODE (index) == INTEGER_CST
2866 && TREE_INT_CST_HIGH (index) == 0)
2867 *pbitpos += TREE_INT_CST_LOW (index);
2869 offset = size_binop (PLUS_EXPR, offset,
2870 size_binop (FLOOR_DIV_EXPR, index,
2871 size_int (BITS_PER_UNIT)));
2873 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
2874 && ! ((TREE_CODE (exp) == NOP_EXPR
2875 || TREE_CODE (exp) == CONVERT_EXPR)
2876 && (TYPE_MODE (TREE_TYPE (exp))
2877 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
2880 /* If any reference in the chain is volatile, the effect is volatile. */
2881 if (TREE_THIS_VOLATILE (exp))
2883 exp = TREE_OPERAND (exp, 0);
2886 /* If this was a bit-field, see if there is a mode that allows direct
2887 access in case EXP is in memory. */
2888 if (mode == VOIDmode && *pbitpos % *pbitsize == 0)
2890 mode = mode_for_size (*pbitsize, MODE_INT, 0);
2891 if (mode == BLKmode)
2895 if (integer_zerop (offset))
2901 /* We aren't finished fixing the callers to really handle nonzero offset. */
2909 /* Given an rtx VALUE that may contain additions and multiplications,
2910 return an equivalent value that just refers to a register or memory.
2911 This is done by generating instructions to perform the arithmetic
2912 and returning a pseudo-register containing the value.
2914 The returned value may be a REG, SUBREG, MEM or constant. */
2917 force_operand (value, target)
2920 register optab binoptab = 0;
2921 /* Use a temporary to force order of execution of calls to
2925 /* Use subtarget as the target for operand 0 of a binary operation. */
2926 register rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
2928 if (GET_CODE (value) == PLUS)
2929 binoptab = add_optab;
2930 else if (GET_CODE (value) == MINUS)
2931 binoptab = sub_optab;
2932 else if (GET_CODE (value) == MULT)
2934 op2 = XEXP (value, 1);
2935 if (!CONSTANT_P (op2)
2936 && !(GET_CODE (op2) == REG && op2 != subtarget))
2938 tmp = force_operand (XEXP (value, 0), subtarget);
2939 return expand_mult (GET_MODE (value), tmp,
2940 force_operand (op2, NULL_RTX),
2946 op2 = XEXP (value, 1);
2947 if (!CONSTANT_P (op2)
2948 && !(GET_CODE (op2) == REG && op2 != subtarget))
2950 if (binoptab == sub_optab && GET_CODE (op2) == CONST_INT)
2952 binoptab = add_optab;
2953 op2 = negate_rtx (GET_MODE (value), op2);
2956 /* Check for an addition with OP2 a constant integer and our first
2957 operand a PLUS of a virtual register and something else. In that
2958 case, we want to emit the sum of the virtual register and the
2959 constant first and then add the other value. This allows virtual
2960 register instantiation to simply modify the constant rather than
2961 creating another one around this addition. */
2962 if (binoptab == add_optab && GET_CODE (op2) == CONST_INT
2963 && GET_CODE (XEXP (value, 0)) == PLUS
2964 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
2965 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
2966 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
2968 rtx temp = expand_binop (GET_MODE (value), binoptab,
2969 XEXP (XEXP (value, 0), 0), op2,
2970 subtarget, 0, OPTAB_LIB_WIDEN);
2971 return expand_binop (GET_MODE (value), binoptab, temp,
2972 force_operand (XEXP (XEXP (value, 0), 1), 0),
2973 target, 0, OPTAB_LIB_WIDEN);
2976 tmp = force_operand (XEXP (value, 0), subtarget);
2977 return expand_binop (GET_MODE (value), binoptab, tmp,
2978 force_operand (op2, NULL_RTX),
2979 target, 0, OPTAB_LIB_WIDEN);
2980 /* We give UNSIGNEDP = 0 to expand_binop
2981 because the only operations we are expanding here are signed ones. */
2986 /* Subroutine of expand_expr:
2987 save the non-copied parts (LIST) of an expr (LHS), and return a list
2988 which can restore these values to their previous values,
2989 should something modify their storage. */
2992 save_noncopied_parts (lhs, list)
2999 for (tail = list; tail; tail = TREE_CHAIN (tail))
3000 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
3001 parts = chainon (parts, save_noncopied_parts (lhs, TREE_VALUE (tail)));
3004 tree part = TREE_VALUE (tail);
3005 tree part_type = TREE_TYPE (part);
3006 tree to_be_saved = build (COMPONENT_REF, part_type, lhs, part);
3007 rtx target = assign_stack_temp (TYPE_MODE (part_type),
3008 int_size_in_bytes (part_type), 0);
3009 if (! memory_address_p (TYPE_MODE (part_type), XEXP (target, 0)))
3010 target = change_address (target, TYPE_MODE (part_type), NULL_RTX);
3011 parts = tree_cons (to_be_saved,
3012 build (RTL_EXPR, part_type, NULL_TREE,
3015 store_expr (TREE_PURPOSE (parts), RTL_EXPR_RTL (TREE_VALUE (parts)), 0);
3020 /* Subroutine of expand_expr:
3021 record the non-copied parts (LIST) of an expr (LHS), and return a list
3022 which specifies the initial values of these parts. */
3025 init_noncopied_parts (lhs, list)
3032 for (tail = list; tail; tail = TREE_CHAIN (tail))
3033 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
3034 parts = chainon (parts, init_noncopied_parts (lhs, TREE_VALUE (tail)));
3037 tree part = TREE_VALUE (tail);
3038 tree part_type = TREE_TYPE (part);
3039 tree to_be_initialized = build (COMPONENT_REF, part_type, lhs, part);
3040 parts = tree_cons (TREE_PURPOSE (tail), to_be_initialized, parts);
3045 /* Subroutine of expand_expr: return nonzero iff there is no way that
3046 EXP can reference X, which is being modified. */
3049 safe_from_p (x, exp)
3059 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
3060 find the underlying pseudo. */
3061 if (GET_CODE (x) == SUBREG)
3064 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
3068 /* If X is a location in the outgoing argument area, it is always safe. */
3069 if (GET_CODE (x) == MEM
3070 && (XEXP (x, 0) == virtual_outgoing_args_rtx
3071 || (GET_CODE (XEXP (x, 0)) == PLUS
3072 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx)))
3075 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
3078 exp_rtl = DECL_RTL (exp);
3085 if (TREE_CODE (exp) == TREE_LIST)
3086 return ((TREE_VALUE (exp) == 0
3087 || safe_from_p (x, TREE_VALUE (exp)))
3088 && (TREE_CHAIN (exp) == 0
3089 || safe_from_p (x, TREE_CHAIN (exp))));
3094 return safe_from_p (x, TREE_OPERAND (exp, 0));
3098 return (safe_from_p (x, TREE_OPERAND (exp, 0))
3099 && safe_from_p (x, TREE_OPERAND (exp, 1)));
3103 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
3104 the expression. If it is set, we conflict iff we are that rtx or
3105 both are in memory. Otherwise, we check all operands of the
3106 expression recursively. */
3108 switch (TREE_CODE (exp))
3111 return staticp (TREE_OPERAND (exp, 0));
3114 if (GET_CODE (x) == MEM)
3119 exp_rtl = CALL_EXPR_RTL (exp);
3122 /* Assume that the call will clobber all hard registers and
3124 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
3125 || GET_CODE (x) == MEM)
3132 exp_rtl = RTL_EXPR_RTL (exp);
3134 /* We don't know what this can modify. */
3139 case WITH_CLEANUP_EXPR:
3140 exp_rtl = RTL_EXPR_RTL (exp);
3144 exp_rtl = SAVE_EXPR_RTL (exp);
3148 /* The only operand we look at is operand 1. The rest aren't
3149 part of the expression. */
3150 return safe_from_p (x, TREE_OPERAND (exp, 1));
3152 case METHOD_CALL_EXPR:
3153 /* This takes a rtx argument, but shouldn't appear here. */
3157 /* If we have an rtx, we do not need to scan our operands. */
3161 nops = tree_code_length[(int) TREE_CODE (exp)];
3162 for (i = 0; i < nops; i++)
3163 if (TREE_OPERAND (exp, i) != 0
3164 && ! safe_from_p (x, TREE_OPERAND (exp, i)))
3168 /* If we have an rtl, find any enclosed object. Then see if we conflict
3172 if (GET_CODE (exp_rtl) == SUBREG)
3174 exp_rtl = SUBREG_REG (exp_rtl);
3175 if (GET_CODE (exp_rtl) == REG
3176 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
3180 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
3181 are memory and EXP is not readonly. */
3182 return ! (rtx_equal_p (x, exp_rtl)
3183 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
3184 && ! TREE_READONLY (exp)));
3187 /* If we reach here, it is safe. */
3191 /* Subroutine of expand_expr: return nonzero iff EXP is an
3192 expression whose type is statically determinable. */
3198 if (TREE_CODE (exp) == PARM_DECL
3199 || TREE_CODE (exp) == VAR_DECL
3200 || TREE_CODE (exp) == CALL_EXPR || TREE_CODE (exp) == TARGET_EXPR
3201 || TREE_CODE (exp) == COMPONENT_REF
3202 || TREE_CODE (exp) == ARRAY_REF)
3207 /* expand_expr: generate code for computing expression EXP.
3208 An rtx for the computed value is returned. The value is never null.
3209 In the case of a void EXP, const0_rtx is returned.
3211 The value may be stored in TARGET if TARGET is nonzero.
3212 TARGET is just a suggestion; callers must assume that
3213 the rtx returned may not be the same as TARGET.
3215 If TARGET is CONST0_RTX, it means that the value will be ignored.
3217 If TMODE is not VOIDmode, it suggests generating the
3218 result in mode TMODE. But this is done only when convenient.
3219 Otherwise, TMODE is ignored and the value generated in its natural mode.
3220 TMODE is just a suggestion; callers must assume that
3221 the rtx returned may not have mode TMODE.
3223 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
3224 with a constant address even if that address is not normally legitimate.
3225 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
3227 If MODIFIER is EXPAND_SUM then when EXP is an addition
3228 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
3229 or a nest of (PLUS ...) and (MINUS ...) where the terms are
3230 products as above, or REG or MEM, or constant.
3231 Ordinarily in such cases we would output mul or add instructions
3232 and then return a pseudo reg containing the sum.
3234 EXPAND_INITIALIZER is much like EXPAND_SUM except that
3235 it also marks a label as absolutely required (it can't be dead).
3236 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
3237 This is used for outputting expressions used in initializers. */
3240 expand_expr (exp, target, tmode, modifier)
3243 enum machine_mode tmode;
3244 enum expand_modifier modifier;
3246 register rtx op0, op1, temp;
3247 tree type = TREE_TYPE (exp);
3248 int unsignedp = TREE_UNSIGNED (type);
3249 register enum machine_mode mode = TYPE_MODE (type);
3250 register enum tree_code code = TREE_CODE (exp);
3252 /* Use subtarget as the target for operand 0 of a binary operation. */
3253 rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
3254 rtx original_target = target;
3255 int ignore = target == const0_rtx;
3258 /* Don't use hard regs as subtargets, because the combiner
3259 can only handle pseudo regs. */
3260 if (subtarget && REGNO (subtarget) < FIRST_PSEUDO_REGISTER)
3262 /* Avoid subtargets inside loops,
3263 since they hide some invariant expressions. */
3264 if (preserve_subexpressions_p ())
3267 if (ignore) target = 0, original_target = 0;
3269 /* If will do cse, generate all results into pseudo registers
3270 since 1) that allows cse to find more things
3271 and 2) otherwise cse could produce an insn the machine
3274 if (! cse_not_expected && mode != BLKmode && target
3275 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER))
3278 /* Ensure we reference a volatile object even if value is ignored. */
3279 if (ignore && TREE_THIS_VOLATILE (exp)
3280 && TREE_CODE (exp) != FUNCTION_DECL
3281 && mode != VOIDmode && mode != BLKmode)
3283 target = gen_reg_rtx (mode);
3284 temp = expand_expr (exp, target, VOIDmode, modifier);
3286 emit_move_insn (target, temp);
3294 tree function = decl_function_context (exp);
3295 /* Handle using a label in a containing function. */
3296 if (function != current_function_decl && function != 0)
3298 struct function *p = find_function_data (function);
3299 /* Allocate in the memory associated with the function
3300 that the label is in. */
3301 push_obstacks (p->function_obstack,
3302 p->function_maybepermanent_obstack);
3304 p->forced_labels = gen_rtx (EXPR_LIST, VOIDmode,
3305 label_rtx (exp), p->forced_labels);
3308 else if (modifier == EXPAND_INITIALIZER)
3309 forced_labels = gen_rtx (EXPR_LIST, VOIDmode,
3310 label_rtx (exp), forced_labels);
3311 temp = gen_rtx (MEM, FUNCTION_MODE,
3312 gen_rtx (LABEL_REF, Pmode, label_rtx (exp)));
3313 if (function != current_function_decl && function != 0)
3314 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
3319 if (DECL_RTL (exp) == 0)
3321 error_with_decl (exp, "prior parameter's size depends on `%s'");
3322 return CONST0_RTX (mode);
3328 if (DECL_RTL (exp) == 0)
3330 /* Ensure variable marked as used
3331 even if it doesn't go through a parser. */
3332 TREE_USED (exp) = 1;
3333 /* Handle variables inherited from containing functions. */
3334 context = decl_function_context (exp);
3336 /* We treat inline_function_decl as an alias for the current function
3337 because that is the inline function whose vars, types, etc.
3338 are being merged into the current function.
3339 See expand_inline_function. */
3340 if (context != 0 && context != current_function_decl
3341 && context != inline_function_decl
3342 /* If var is static, we don't need a static chain to access it. */
3343 && ! (GET_CODE (DECL_RTL (exp)) == MEM
3344 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
3348 /* Mark as non-local and addressable. */
3349 DECL_NONLOCAL (exp) = 1;
3350 mark_addressable (exp);
3351 if (GET_CODE (DECL_RTL (exp)) != MEM)
3353 addr = XEXP (DECL_RTL (exp), 0);
3354 if (GET_CODE (addr) == MEM)
3355 addr = gen_rtx (MEM, Pmode, fix_lexical_addr (XEXP (addr, 0), exp));
3357 addr = fix_lexical_addr (addr, exp);
3358 return change_address (DECL_RTL (exp), mode, addr);
3361 /* This is the case of an array whose size is to be determined
3362 from its initializer, while the initializer is still being parsed.
3364 if (GET_CODE (DECL_RTL (exp)) == MEM
3365 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
3366 return change_address (DECL_RTL (exp), GET_MODE (DECL_RTL (exp)),
3367 XEXP (DECL_RTL (exp), 0));
3368 if (GET_CODE (DECL_RTL (exp)) == MEM
3369 && modifier != EXPAND_CONST_ADDRESS
3370 && modifier != EXPAND_SUM
3371 && modifier != EXPAND_INITIALIZER)
3373 /* DECL_RTL probably contains a constant address.
3374 On RISC machines where a constant address isn't valid,
3375 make some insns to get that address into a register. */
3376 if (!memory_address_p (DECL_MODE (exp), XEXP (DECL_RTL (exp), 0))
3378 && CONSTANT_ADDRESS_P (XEXP (DECL_RTL (exp), 0))))
3379 return change_address (DECL_RTL (exp), VOIDmode,
3380 copy_rtx (XEXP (DECL_RTL (exp), 0)));
3383 /* If the mode of DECL_RTL does not match that of the decl, it
3384 must be a promoted value. We return a SUBREG of the wanted mode,
3385 but mark it so that we know that it was already extended. */
3387 if (GET_CODE (DECL_RTL (exp)) == REG
3388 && GET_MODE (DECL_RTL (exp)) != mode)
3390 enum machine_mode decl_mode = DECL_MODE (exp);
3392 /* Get the signedness used for this variable. Ensure we get the
3393 same mode we got when the variable was declared. */
3395 PROMOTE_MODE (decl_mode, unsignedp, type);
3397 if (decl_mode != GET_MODE (DECL_RTL (exp)))
3400 temp = gen_rtx (SUBREG, mode, DECL_RTL (exp), 0);
3401 SUBREG_PROMOTED_VAR_P (temp) = 1;
3402 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
3406 return DECL_RTL (exp);
3409 return immed_double_const (TREE_INT_CST_LOW (exp),
3410 TREE_INT_CST_HIGH (exp),
3414 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, 0);
3417 /* If optimized, generate immediate CONST_DOUBLE
3418 which will be turned into memory by reload if necessary.
3420 We used to force a register so that loop.c could see it. But
3421 this does not allow gen_* patterns to perform optimizations with
3422 the constants. It also produces two insns in cases like "x = 1.0;".
3423 On most machines, floating-point constants are not permitted in
3424 many insns, so we'd end up copying it to a register in any case.
3426 Now, we do the copying in expand_binop, if appropriate. */
3427 return immed_real_const (exp);
3431 if (! TREE_CST_RTL (exp))
3432 output_constant_def (exp);
3434 /* TREE_CST_RTL probably contains a constant address.
3435 On RISC machines where a constant address isn't valid,
3436 make some insns to get that address into a register. */
3437 if (GET_CODE (TREE_CST_RTL (exp)) == MEM
3438 && modifier != EXPAND_CONST_ADDRESS
3439 && modifier != EXPAND_INITIALIZER
3440 && modifier != EXPAND_SUM
3441 && !memory_address_p (mode, XEXP (TREE_CST_RTL (exp), 0)))
3442 return change_address (TREE_CST_RTL (exp), VOIDmode,
3443 copy_rtx (XEXP (TREE_CST_RTL (exp), 0)));
3444 return TREE_CST_RTL (exp);
3447 context = decl_function_context (exp);
3448 /* We treat inline_function_decl as an alias for the current function
3449 because that is the inline function whose vars, types, etc.
3450 are being merged into the current function.
3451 See expand_inline_function. */
3452 if (context == current_function_decl || context == inline_function_decl)
3455 /* If this is non-local, handle it. */
3458 temp = SAVE_EXPR_RTL (exp);
3459 if (temp && GET_CODE (temp) == REG)
3461 put_var_into_stack (exp);
3462 temp = SAVE_EXPR_RTL (exp);
3464 if (temp == 0 || GET_CODE (temp) != MEM)
3466 return change_address (temp, mode,
3467 fix_lexical_addr (XEXP (temp, 0), exp));
3469 if (SAVE_EXPR_RTL (exp) == 0)
3471 if (mode == BLKmode)
3473 = assign_stack_temp (mode,
3474 int_size_in_bytes (TREE_TYPE (exp)), 0);
3477 enum machine_mode var_mode = mode;
3479 if (TREE_CODE (type) == INTEGER_TYPE
3480 || TREE_CODE (type) == ENUMERAL_TYPE
3481 || TREE_CODE (type) == BOOLEAN_TYPE
3482 || TREE_CODE (type) == CHAR_TYPE
3483 || TREE_CODE (type) == REAL_TYPE
3484 || TREE_CODE (type) == POINTER_TYPE
3485 || TREE_CODE (type) == OFFSET_TYPE)
3487 PROMOTE_MODE (var_mode, unsignedp, type);
3490 temp = gen_reg_rtx (var_mode);
3493 SAVE_EXPR_RTL (exp) = temp;
3494 store_expr (TREE_OPERAND (exp, 0), temp, 0);
3495 if (!optimize && GET_CODE (temp) == REG)
3496 save_expr_regs = gen_rtx (EXPR_LIST, VOIDmode, temp,
3500 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
3501 must be a promoted value. We return a SUBREG of the wanted mode,
3502 but mark it so that we know that it was already extended. Note
3503 that `unsignedp' was modified above in this case. */
3505 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
3506 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
3508 temp = gen_rtx (SUBREG, mode, SAVE_EXPR_RTL (exp), 0);
3509 SUBREG_PROMOTED_VAR_P (temp) = 1;
3510 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
3514 return SAVE_EXPR_RTL (exp);
3517 /* Exit the current loop if the body-expression is true. */
3519 rtx label = gen_label_rtx ();
3520 do_jump (TREE_OPERAND (exp, 0), label, NULL_RTX);
3521 expand_exit_loop (NULL_PTR);
3527 expand_start_loop (1);
3528 expand_expr_stmt (TREE_OPERAND (exp, 0));
3535 tree vars = TREE_OPERAND (exp, 0);
3536 int vars_need_expansion = 0;
3538 /* Need to open a binding contour here because
3539 if there are any cleanups they most be contained here. */
3540 expand_start_bindings (0);
3542 /* Mark the corresponding BLOCK for output in its proper place. */
3543 if (TREE_OPERAND (exp, 2) != 0
3544 && ! TREE_USED (TREE_OPERAND (exp, 2)))
3545 insert_block (TREE_OPERAND (exp, 2));
3547 /* If VARS have not yet been expanded, expand them now. */
3550 if (DECL_RTL (vars) == 0)
3552 vars_need_expansion = 1;
3555 expand_decl_init (vars);
3556 vars = TREE_CHAIN (vars);
3559 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, modifier);
3561 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
3567 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
3569 emit_insns (RTL_EXPR_SEQUENCE (exp));
3570 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
3571 return RTL_EXPR_RTL (exp);
3574 /* All elts simple constants => refer to a constant in memory. But
3575 if this is a non-BLKmode mode, let it store a field at a time
3576 since that should make a CONST_INT or CONST_DOUBLE when we
3578 if (TREE_STATIC (exp) && (mode == BLKmode || TREE_ADDRESSABLE (exp)))
3580 rtx constructor = output_constant_def (exp);
3581 if (modifier != EXPAND_CONST_ADDRESS
3582 && modifier != EXPAND_INITIALIZER
3583 && modifier != EXPAND_SUM
3584 && !memory_address_p (GET_MODE (constructor),
3585 XEXP (constructor, 0)))
3586 constructor = change_address (constructor, VOIDmode,
3587 XEXP (constructor, 0));
3594 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
3595 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode, 0);
3600 if (target == 0 || ! safe_from_p (target, exp))
3602 if (mode != BLKmode && ! TREE_ADDRESSABLE (exp))
3603 target = gen_reg_rtx (mode);
3606 enum tree_code c = TREE_CODE (type);
3608 = assign_stack_temp (mode, int_size_in_bytes (type), 0);
3609 if (c == RECORD_TYPE || c == UNION_TYPE || c == ARRAY_TYPE)
3610 MEM_IN_STRUCT_P (target) = 1;
3613 store_constructor (exp, target);
3619 tree exp1 = TREE_OPERAND (exp, 0);
3622 /* A SAVE_EXPR as the address in an INDIRECT_EXPR is generated
3623 for *PTR += ANYTHING where PTR is put inside the SAVE_EXPR.
3624 This code has the same general effect as simply doing
3625 expand_expr on the save expr, except that the expression PTR
3626 is computed for use as a memory address. This means different
3627 code, suitable for indexing, may be generated. */
3628 if (TREE_CODE (exp1) == SAVE_EXPR
3629 && SAVE_EXPR_RTL (exp1) == 0
3630 && TREE_CODE (exp2 = TREE_OPERAND (exp1, 0)) != ERROR_MARK
3631 && TYPE_MODE (TREE_TYPE (exp1)) == Pmode
3632 && TYPE_MODE (TREE_TYPE (exp2)) == Pmode)
3634 temp = expand_expr (TREE_OPERAND (exp1, 0), NULL_RTX,
3635 VOIDmode, EXPAND_SUM);
3636 op0 = memory_address (mode, temp);
3637 op0 = copy_all_regs (op0);
3638 SAVE_EXPR_RTL (exp1) = op0;
3642 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
3643 op0 = memory_address (mode, op0);
3646 temp = gen_rtx (MEM, mode, op0);
3647 /* If address was computed by addition,
3648 mark this as an element of an aggregate. */
3649 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
3650 || (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR
3651 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) == PLUS_EXPR)
3652 || TREE_CODE (TREE_TYPE (exp)) == ARRAY_TYPE
3653 || TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE
3654 || TREE_CODE (TREE_TYPE (exp)) == UNION_TYPE
3655 || (TREE_CODE (exp1) == ADDR_EXPR
3656 && (exp2 = TREE_OPERAND (exp1, 0))
3657 && (TREE_CODE (TREE_TYPE (exp2)) == ARRAY_TYPE
3658 || TREE_CODE (TREE_TYPE (exp2)) == RECORD_TYPE
3659 || TREE_CODE (TREE_TYPE (exp2)) == UNION_TYPE)))
3660 MEM_IN_STRUCT_P (temp) = 1;
3661 MEM_VOLATILE_P (temp) = TREE_THIS_VOLATILE (exp);
3662 #if 0 /* It is incorrect to set RTX_UNCHANGING_P here, because the fact that
3663 a location is accessed through a pointer to const does not mean
3664 that the value there can never change. */
3665 RTX_UNCHANGING_P (temp) = TREE_READONLY (exp);
3671 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
3675 tree array = TREE_OPERAND (exp, 0);
3676 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
3677 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
3678 tree index = TREE_OPERAND (exp, 1);
3679 tree index_type = TREE_TYPE (index);
3682 /* Optimize the special-case of a zero lower bound. */
3683 if (! integer_zerop (low_bound))
3684 index = fold (build (MINUS_EXPR, index_type, index, low_bound));
3686 if (TREE_CODE (index) != INTEGER_CST
3687 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
3689 /* Nonconstant array index or nonconstant element size.
3690 Generate the tree for *(&array+index) and expand that,
3691 except do it in a language-independent way
3692 and don't complain about non-lvalue arrays.
3693 `mark_addressable' should already have been called
3694 for any array for which this case will be reached. */
3696 /* Don't forget the const or volatile flag from the array
3698 tree variant_type = build_type_variant (type,
3699 TREE_READONLY (exp),
3700 TREE_THIS_VOLATILE (exp));
3701 tree array_adr = build1 (ADDR_EXPR,
3702 build_pointer_type (variant_type), array);
3705 /* Convert the integer argument to a type the same size as a
3706 pointer so the multiply won't overflow spuriously. */
3707 if (TYPE_PRECISION (index_type) != POINTER_SIZE)
3708 index = convert (type_for_size (POINTER_SIZE, 0), index);
3710 /* Don't think the address has side effects
3711 just because the array does.
3712 (In some cases the address might have side effects,
3713 and we fail to record that fact here. However, it should not
3714 matter, since expand_expr should not care.) */
3715 TREE_SIDE_EFFECTS (array_adr) = 0;
3717 elt = build1 (INDIRECT_REF, type,
3718 fold (build (PLUS_EXPR,
3719 TYPE_POINTER_TO (variant_type),
3721 fold (build (MULT_EXPR,
3722 TYPE_POINTER_TO (variant_type),
3724 size_in_bytes (type))))));
3726 /* Volatility, etc., of new expression is same as old
3728 TREE_SIDE_EFFECTS (elt) = TREE_SIDE_EFFECTS (exp);
3729 TREE_THIS_VOLATILE (elt) = TREE_THIS_VOLATILE (exp);
3730 TREE_READONLY (elt) = TREE_READONLY (exp);
3732 return expand_expr (elt, target, tmode, modifier);
3735 /* Fold an expression like: "foo"[2].
3736 This is not done in fold so it won't happen inside &. */
3738 if (TREE_CODE (array) == STRING_CST
3739 && TREE_CODE (index) == INTEGER_CST
3740 && !TREE_INT_CST_HIGH (index)
3741 && (i = TREE_INT_CST_LOW (index)) < TREE_STRING_LENGTH (array))
3743 if (TREE_TYPE (TREE_TYPE (array)) == integer_type_node)
3745 exp = build_int_2 (((int *)TREE_STRING_POINTER (array))[i], 0);
3746 TREE_TYPE (exp) = integer_type_node;
3747 return expand_expr (exp, target, tmode, modifier);
3749 if (TREE_TYPE (TREE_TYPE (array)) == char_type_node)
3751 exp = build_int_2 (TREE_STRING_POINTER (array)[i], 0);
3752 TREE_TYPE (exp) = integer_type_node;
3753 return expand_expr (convert (TREE_TYPE (TREE_TYPE (array)),
3755 target, tmode, modifier);
3759 /* If this is a constant index into a constant array,
3760 just get the value from the array. Handle both the cases when
3761 we have an explicit constructor and when our operand is a variable
3762 that was declared const. */
3764 if (TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array))
3766 if (TREE_CODE (index) == INTEGER_CST
3767 && TREE_INT_CST_HIGH (index) == 0)
3769 tree elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0));
3771 i = TREE_INT_CST_LOW (index);
3773 elem = TREE_CHAIN (elem);
3775 return expand_expr (fold (TREE_VALUE (elem)), target,
3780 else if (optimize >= 1
3781 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
3782 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
3783 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
3785 if (TREE_CODE (index) == INTEGER_CST
3786 && TREE_INT_CST_HIGH (index) == 0)
3788 tree init = DECL_INITIAL (array);
3790 i = TREE_INT_CST_LOW (index);
3791 if (TREE_CODE (init) == CONSTRUCTOR)
3793 tree elem = CONSTRUCTOR_ELTS (init);
3796 elem = TREE_CHAIN (elem);
3798 return expand_expr (fold (TREE_VALUE (elem)), target,
3801 else if (TREE_CODE (init) == STRING_CST
3802 && i < TREE_STRING_LENGTH (init))
3804 temp = GEN_INT (TREE_STRING_POINTER (init)[i]);
3805 return convert_to_mode (mode, temp, 0);
3811 /* Treat array-ref with constant index as a component-ref. */
3815 /* If the operand is a CONSTRUCTOR, we can just extract the
3816 appropriate field if it is present. */
3817 if (code != ARRAY_REF
3818 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR)
3822 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
3823 elt = TREE_CHAIN (elt))
3824 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1))
3825 return expand_expr (TREE_VALUE (elt), target, tmode, modifier);
3829 enum machine_mode mode1;
3834 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
3835 &mode1, &unsignedp, &volatilep);
3837 /* In some cases, we will be offsetting OP0's address by a constant.
3838 So get it as a sum, if possible. If we will be using it
3839 directly in an insn, we validate it. */
3840 op0 = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_SUM);
3842 /* If this is a constant, put it into a register if it is a
3843 legitimate constant and memory if it isn't. */
3844 if (CONSTANT_P (op0))
3846 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
3847 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0))
3848 op0 = force_reg (mode, op0);
3850 op0 = validize_mem (force_const_mem (mode, op0));
3855 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
3857 if (GET_CODE (op0) != MEM)
3859 op0 = change_address (op0, VOIDmode,
3860 gen_rtx (PLUS, Pmode, XEXP (op0, 0),
3861 force_reg (Pmode, offset_rtx)));
3864 /* Don't forget about volatility even if this is a bitfield. */
3865 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
3867 op0 = copy_rtx (op0);
3868 MEM_VOLATILE_P (op0) = 1;
3871 if (mode1 == VOIDmode
3872 || (mode1 != BLKmode && ! direct_load[(int) mode1]
3873 && modifier != EXPAND_CONST_ADDRESS
3874 && modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
3875 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG)
3877 /* In cases where an aligned union has an unaligned object
3878 as a field, we might be extracting a BLKmode value from
3879 an integer-mode (e.g., SImode) object. Handle this case
3880 by doing the extract into an object as wide as the field
3881 (which we know to be the width of a basic mode), then
3882 storing into memory, and changing the mode to BLKmode. */
3883 enum machine_mode ext_mode = mode;
3885 if (ext_mode == BLKmode)
3886 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
3888 if (ext_mode == BLKmode)
3891 op0 = extract_bit_field (validize_mem (op0), bitsize, bitpos,
3892 unsignedp, target, ext_mode, ext_mode,
3893 TYPE_ALIGN (TREE_TYPE (tem)) / BITS_PER_UNIT,
3894 int_size_in_bytes (TREE_TYPE (tem)));
3895 if (mode == BLKmode)
3897 rtx new = assign_stack_temp (ext_mode,
3898 bitsize / BITS_PER_UNIT, 0);
3900 emit_move_insn (new, op0);
3901 op0 = copy_rtx (new);
3902 PUT_MODE (op0, BLKmode);
3908 /* Get a reference to just this component. */
3909 if (modifier == EXPAND_CONST_ADDRESS
3910 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
3911 op0 = gen_rtx (MEM, mode1, plus_constant (XEXP (op0, 0),
3912 (bitpos / BITS_PER_UNIT)));
3914 op0 = change_address (op0, mode1,
3915 plus_constant (XEXP (op0, 0),
3916 (bitpos / BITS_PER_UNIT)));
3917 MEM_IN_STRUCT_P (op0) = 1;
3918 MEM_VOLATILE_P (op0) |= volatilep;
3919 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode)
3922 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
3923 convert_move (target, op0, unsignedp);
3929 tree base = build_unary_op (ADDR_EXPR, TREE_OPERAND (exp, 0), 0);
3930 tree addr = build (PLUS_EXPR, type, base, TREE_OPERAND (exp, 1));
3931 op0 = expand_expr (addr, NULL_RTX, VOIDmode, EXPAND_SUM);
3932 temp = gen_rtx (MEM, mode, memory_address (mode, op0));
3933 MEM_IN_STRUCT_P (temp) = 1;
3934 MEM_VOLATILE_P (temp) = TREE_THIS_VOLATILE (exp);
3935 #if 0 /* It is incorrect to set RTX_UNCHANGING_P here, because the fact that
3936 a location is accessed through a pointer to const does not mean
3937 that the value there can never change. */
3938 RTX_UNCHANGING_P (temp) = TREE_READONLY (exp);
3943 /* Intended for a reference to a buffer of a file-object in Pascal.
3944 But it's not certain that a special tree code will really be
3945 necessary for these. INDIRECT_REF might work for them. */
3949 /* IN_EXPR: Inlined pascal set IN expression.
3952 rlo = set_low - (set_low%bits_per_word);
3953 the_word = set [ (index - rlo)/bits_per_word ];
3954 bit_index = index % bits_per_word;
3955 bitmask = 1 << bit_index;
3956 return !!(the_word & bitmask); */
3958 preexpand_calls (exp);
3960 tree set = TREE_OPERAND (exp, 0);
3961 tree index = TREE_OPERAND (exp, 1);
3962 tree set_type = TREE_TYPE (set);
3964 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
3965 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
3971 rtx diff, quo, rem, addr, bit, result;
3972 rtx setval, setaddr;
3973 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
3976 target = gen_reg_rtx (mode);
3978 /* If domain is empty, answer is no. */
3979 if (tree_int_cst_lt (set_high_bound, set_low_bound))
3982 index_val = expand_expr (index, 0, VOIDmode, 0);
3983 lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
3984 hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
3985 setval = expand_expr (set, 0, VOIDmode, 0);
3986 setaddr = XEXP (setval, 0);
3988 /* Compare index against bounds, if they are constant. */
3989 if (GET_CODE (index_val) == CONST_INT
3990 && GET_CODE (lo_r) == CONST_INT
3991 && INTVAL (index_val) < INTVAL (lo_r))
3994 if (GET_CODE (index_val) == CONST_INT
3995 && GET_CODE (hi_r) == CONST_INT
3996 && INTVAL (hi_r) < INTVAL (index_val))
3999 /* If we get here, we have to generate the code for both cases
4000 (in range and out of range). */
4002 op0 = gen_label_rtx ();
4003 op1 = gen_label_rtx ();
4005 if (! (GET_CODE (index_val) == CONST_INT
4006 && GET_CODE (lo_r) == CONST_INT))
4008 emit_cmp_insn (index_val, lo_r, LT, NULL_RTX,
4009 GET_MODE (index_val), 0, 0);
4010 emit_jump_insn (gen_blt (op1));
4013 if (! (GET_CODE (index_val) == CONST_INT
4014 && GET_CODE (hi_r) == CONST_INT))
4016 emit_cmp_insn (index_val, hi_r, GT, NULL_RTX,
4017 GET_MODE (index_val), 0, 0);
4018 emit_jump_insn (gen_bgt (op1));
4021 /* Calculate the element number of bit zero in the first word
4023 if (GET_CODE (lo_r) == CONST_INT)
4024 rlow = GEN_INT (INTVAL (lo_r)
4025 & ~ ((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
4027 rlow = expand_binop (index_mode, and_optab, lo_r,
4028 GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
4029 NULL_RTX, 0, OPTAB_LIB_WIDEN);
4031 diff = expand_binop (index_mode, sub_optab,
4032 index_val, rlow, NULL_RTX, 0, OPTAB_LIB_WIDEN);
4034 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
4035 GEN_INT (BITS_PER_UNIT), NULL_RTX, 0);
4036 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
4037 GEN_INT (BITS_PER_UNIT), NULL_RTX, 0);
4038 addr = memory_address (byte_mode,
4039 expand_binop (index_mode, add_optab,
4040 diff, setaddr, NULL_RTX, 0,
4042 /* Extract the bit we want to examine */
4043 bit = expand_shift (RSHIFT_EXPR, byte_mode,
4044 gen_rtx (MEM, byte_mode, addr),
4045 make_tree (TREE_TYPE (index), rem),
4047 result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
4048 GET_MODE (target) == byte_mode ? target : 0,
4049 1, OPTAB_LIB_WIDEN);
4051 if (result != target)
4052 convert_move (target, result, 1);
4054 /* Output the code to handle the out-of-range case. */
4057 emit_move_insn (target, const0_rtx);
4062 case WITH_CLEANUP_EXPR:
4063 if (RTL_EXPR_RTL (exp) == 0)
4066 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
4068 = tree_cons (NULL_TREE, TREE_OPERAND (exp, 2), cleanups_this_call);
4069 /* That's it for this cleanup. */
4070 TREE_OPERAND (exp, 2) = 0;
4072 return RTL_EXPR_RTL (exp);
4075 /* Check for a built-in function. */
4076 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
4077 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) == FUNCTION_DECL
4078 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
4079 return expand_builtin (exp, target, subtarget, tmode, ignore);
4080 /* If this call was expanded already by preexpand_calls,
4081 just return the result we got. */
4082 if (CALL_EXPR_RTL (exp) != 0)
4083 return CALL_EXPR_RTL (exp);
4084 return expand_call (exp, target, ignore);
4086 case NON_LVALUE_EXPR:
4089 case REFERENCE_EXPR:
4090 if (TREE_CODE (type) == VOID_TYPE || ignore)
4092 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
4095 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
4096 return expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, modifier);
4097 if (TREE_CODE (type) == UNION_TYPE)
4099 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
4102 if (mode == BLKmode)
4104 if (TYPE_SIZE (type) == 0
4105 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
4107 target = assign_stack_temp (BLKmode,
4108 (TREE_INT_CST_LOW (TYPE_SIZE (type))
4109 + BITS_PER_UNIT - 1)
4110 / BITS_PER_UNIT, 0);
4113 target = gen_reg_rtx (mode);
4115 if (GET_CODE (target) == MEM)
4116 /* Store data into beginning of memory target. */
4117 store_expr (TREE_OPERAND (exp, 0),
4118 change_address (target, TYPE_MODE (valtype), 0), 0);
4120 else if (GET_CODE (target) == REG)
4121 /* Store this field into a union of the proper type. */
4122 store_field (target, GET_MODE_BITSIZE (TYPE_MODE (valtype)), 0,
4123 TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
4125 int_size_in_bytes (TREE_TYPE (TREE_OPERAND (exp, 0))));
4129 /* Return the entire union. */
4132 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, 0);
4133 if (GET_MODE (op0) == mode)
4135 /* If arg is a constant integer being extended from a narrower mode,
4136 we must really truncate to get the extended bits right. Otherwise
4137 (unsigned long) (unsigned char) ("\377"[0])
4138 would come out as ffffffff. */
4139 if (GET_MODE (op0) == VOIDmode
4140 && (GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
4141 < GET_MODE_BITSIZE (mode)))
4143 /* MODE must be narrower than HOST_BITS_PER_INT. */
4144 int width = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
4146 if (width < HOST_BITS_PER_WIDE_INT)
4148 HOST_WIDE_INT val = (GET_CODE (op0) == CONST_INT ? INTVAL (op0)
4149 : CONST_DOUBLE_LOW (op0));
4150 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)))
4151 || !(val & ((HOST_WIDE_INT) 1 << (width - 1))))
4152 val &= ((HOST_WIDE_INT) 1 << width) - 1;
4154 val |= ~(((HOST_WIDE_INT) 1 << width) - 1);
4156 op0 = GEN_INT (val);
4160 op0 = (simplify_unary_operation
4161 ((TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)))
4162 ? ZERO_EXTEND : SIGN_EXTEND),
4164 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))));
4169 if (GET_MODE (op0) == VOIDmode)
4171 if (modifier == EXPAND_INITIALIZER)
4172 return gen_rtx (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
4173 if (flag_force_mem && GET_CODE (op0) == MEM)
4174 op0 = copy_to_reg (op0);
4177 return convert_to_mode (mode, op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
4179 convert_move (target, op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
4183 /* We come here from MINUS_EXPR when the second operand is a constant. */
4185 this_optab = add_optab;
4187 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
4188 something else, make sure we add the register to the constant and
4189 then to the other thing. This case can occur during strength
4190 reduction and doing it this way will produce better code if the
4191 frame pointer or argument pointer is eliminated.
4193 fold-const.c will ensure that the constant is always in the inner
4194 PLUS_EXPR, so the only case we need to do anything about is if
4195 sp, ap, or fp is our second argument, in which case we must swap
4196 the innermost first argument and our second argument. */
4198 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
4199 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
4200 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
4201 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
4202 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
4203 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
4205 tree t = TREE_OPERAND (exp, 1);
4207 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
4208 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
4211 /* If the result is to be Pmode and we are adding an integer to
4212 something, we might be forming a constant. So try to use
4213 plus_constant. If it produces a sum and we can't accept it,
4214 use force_operand. This allows P = &ARR[const] to generate
4215 efficient code on machines where a SYMBOL_REF is not a valid
4218 If this is an EXPAND_SUM call, always return the sum. */
4219 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
4220 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
4221 && (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
4224 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
4226 op1 = plus_constant (op1, TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)));
4227 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
4228 op1 = force_operand (op1, target);
4232 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
4233 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
4234 && (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
4237 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
4239 op0 = plus_constant (op0, TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)));
4240 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
4241 op0 = force_operand (op0, target);
4245 /* No sense saving up arithmetic to be done
4246 if it's all in the wrong mode to form part of an address.
4247 And force_operand won't know whether to sign-extend or
4249 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
4250 || mode != Pmode) goto binop;
4252 preexpand_calls (exp);
4253 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
4256 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, modifier);
4257 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, modifier);
4259 /* Make sure any term that's a sum with a constant comes last. */
4260 if (GET_CODE (op0) == PLUS
4261 && CONSTANT_P (XEXP (op0, 1)))
4267 /* If adding to a sum including a constant,
4268 associate it to put the constant outside. */
4269 if (GET_CODE (op1) == PLUS
4270 && CONSTANT_P (XEXP (op1, 1)))
4272 rtx constant_term = const0_rtx;
4274 temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0);
4277 /* Ensure that MULT comes first if there is one. */
4278 else if (GET_CODE (op0) == MULT)
4279 op0 = gen_rtx (PLUS, mode, op0, XEXP (op1, 0));
4281 op0 = gen_rtx (PLUS, mode, XEXP (op1, 0), op0);
4283 /* Let's also eliminate constants from op0 if possible. */
4284 op0 = eliminate_constant_term (op0, &constant_term);
4286 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
4287 their sum should be a constant. Form it into OP1, since the
4288 result we want will then be OP0 + OP1. */
4290 temp = simplify_binary_operation (PLUS, mode, constant_term,
4295 op1 = gen_rtx (PLUS, mode, constant_term, XEXP (op1, 1));
4298 /* Put a constant term last and put a multiplication first. */
4299 if (CONSTANT_P (op0) || GET_CODE (op1) == MULT)
4300 temp = op1, op1 = op0, op0 = temp;
4302 temp = simplify_binary_operation (PLUS, mode, op0, op1);
4303 return temp ? temp : gen_rtx (PLUS, mode, op0, op1);
4306 /* Handle difference of two symbolic constants,
4307 for the sake of an initializer. */
4308 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
4309 && really_constant_p (TREE_OPERAND (exp, 0))
4310 && really_constant_p (TREE_OPERAND (exp, 1)))
4312 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX,
4313 VOIDmode, modifier);
4314 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
4315 VOIDmode, modifier);
4316 return gen_rtx (MINUS, mode, op0, op1);
4318 /* Convert A - const to A + (-const). */
4319 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
4321 exp = build (PLUS_EXPR, type, TREE_OPERAND (exp, 0),
4322 fold (build1 (NEGATE_EXPR, type,
4323 TREE_OPERAND (exp, 1))));
4326 this_optab = sub_optab;
4330 preexpand_calls (exp);
4331 /* If first operand is constant, swap them.
4332 Thus the following special case checks need only
4333 check the second operand. */
4334 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
4336 register tree t1 = TREE_OPERAND (exp, 0);
4337 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
4338 TREE_OPERAND (exp, 1) = t1;
4341 /* Attempt to return something suitable for generating an
4342 indexed address, for machines that support that. */
4344 if (modifier == EXPAND_SUM && mode == Pmode
4345 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
4346 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
4348 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, EXPAND_SUM);
4350 /* Apply distributive law if OP0 is x+c. */
4351 if (GET_CODE (op0) == PLUS
4352 && GET_CODE (XEXP (op0, 1)) == CONST_INT)
4353 return gen_rtx (PLUS, mode,
4354 gen_rtx (MULT, mode, XEXP (op0, 0),
4355 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))),
4356 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))
4357 * INTVAL (XEXP (op0, 1))));
4359 if (GET_CODE (op0) != REG)
4360 op0 = force_operand (op0, NULL_RTX);
4361 if (GET_CODE (op0) != REG)
4362 op0 = copy_to_mode_reg (mode, op0);
4364 return gen_rtx (MULT, mode, op0,
4365 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))));
4368 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
4371 /* Check for multiplying things that have been extended
4372 from a narrower type. If this machine supports multiplying
4373 in that narrower type with a result in the desired type,
4374 do it that way, and avoid the explicit type-conversion. */
4375 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
4376 && TREE_CODE (type) == INTEGER_TYPE
4377 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
4378 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
4379 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
4380 && int_fits_type_p (TREE_OPERAND (exp, 1),
4381 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
4382 /* Don't use a widening multiply if a shift will do. */
4383 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
4384 > HOST_BITS_PER_WIDE_INT)
4385 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
4387 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
4388 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
4390 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
4391 /* If both operands are extended, they must either both
4392 be zero-extended or both be sign-extended. */
4393 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
4395 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
4397 enum machine_mode innermode
4398 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
4399 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
4400 ? umul_widen_optab : smul_widen_optab);
4401 if (mode == GET_MODE_WIDER_MODE (innermode)
4402 && this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
4404 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
4405 NULL_RTX, VOIDmode, 0);
4406 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
4407 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
4410 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
4411 NULL_RTX, VOIDmode, 0);
4415 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
4416 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
4417 return expand_mult (mode, op0, op1, target, unsignedp);
4419 case TRUNC_DIV_EXPR:
4420 case FLOOR_DIV_EXPR:
4422 case ROUND_DIV_EXPR:
4423 case EXACT_DIV_EXPR:
4424 preexpand_calls (exp);
4425 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
4427 /* Possible optimization: compute the dividend with EXPAND_SUM
4428 then if the divisor is constant can optimize the case
4429 where some terms of the dividend have coeffs divisible by it. */
4430 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
4431 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
4432 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
4435 this_optab = flodiv_optab;
4438 case TRUNC_MOD_EXPR:
4439 case FLOOR_MOD_EXPR:
4441 case ROUND_MOD_EXPR:
4442 preexpand_calls (exp);
4443 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
4445 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
4446 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
4447 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
4449 case FIX_ROUND_EXPR:
4450 case FIX_FLOOR_EXPR:
4452 abort (); /* Not used for C. */
4454 case FIX_TRUNC_EXPR:
4455 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
4457 target = gen_reg_rtx (mode);
4458 expand_fix (target, op0, unsignedp);
4462 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
4464 target = gen_reg_rtx (mode);
4465 /* expand_float can't figure out what to do if FROM has VOIDmode.
4466 So give it the correct mode. With -O, cse will optimize this. */
4467 if (GET_MODE (op0) == VOIDmode)
4468 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
4470 expand_float (target, op0,
4471 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
4475 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
4476 temp = expand_unop (mode, neg_optab, op0, target, 0);
4482 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
4484 /* Handle complex values specially. */
4486 enum machine_mode opmode
4487 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
4489 if (GET_MODE_CLASS (opmode) == MODE_COMPLEX_INT
4490 || GET_MODE_CLASS (opmode) == MODE_COMPLEX_FLOAT)
4491 return expand_complex_abs (opmode, op0, target, unsignedp);
4494 /* Unsigned abs is simply the operand. Testing here means we don't
4495 risk generating incorrect code below. */
4496 if (TREE_UNSIGNED (type))
4499 /* First try to do it with a special abs instruction. */
4500 temp = expand_unop (mode, abs_optab, op0, target, 0);
4504 /* If this machine has expensive jumps, we can do integer absolute
4505 value of X as (((signed) x >> (W-1)) ^ x) - ((signed) x >> (W-1)),
4506 where W is the width of MODE. */
4508 if (GET_MODE_CLASS (mode) == MODE_INT && BRANCH_COST >= 2)
4510 rtx extended = expand_shift (RSHIFT_EXPR, mode, op0,
4511 size_int (GET_MODE_BITSIZE (mode) - 1),
4514 temp = expand_binop (mode, xor_optab, extended, op0, target, 0,
4517 temp = expand_binop (mode, sub_optab, temp, extended, target, 0,
4524 /* If that does not win, use conditional jump and negate. */
4525 target = original_target;
4526 temp = gen_label_rtx ();
4527 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 0))
4528 || (GET_CODE (target) == REG
4529 && REGNO (target) < FIRST_PSEUDO_REGISTER))
4530 target = gen_reg_rtx (mode);
4531 emit_move_insn (target, op0);
4532 emit_cmp_insn (target,
4533 expand_expr (convert (type, integer_zero_node),
4534 NULL_RTX, VOIDmode, 0),
4535 GE, NULL_RTX, mode, 0, 0);
4537 emit_jump_insn (gen_bge (temp));
4538 op0 = expand_unop (mode, neg_optab, target, target, 0);
4540 emit_move_insn (target, op0);
4547 target = original_target;
4548 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 1))
4549 || (GET_CODE (target) == REG
4550 && REGNO (target) < FIRST_PSEUDO_REGISTER))
4551 target = gen_reg_rtx (mode);
4552 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
4553 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
4555 /* First try to do it with a special MIN or MAX instruction.
4556 If that does not win, use a conditional jump to select the proper
4558 this_optab = (TREE_UNSIGNED (type)
4559 ? (code == MIN_EXPR ? umin_optab : umax_optab)
4560 : (code == MIN_EXPR ? smin_optab : smax_optab));
4562 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
4568 emit_move_insn (target, op0);
4569 op0 = gen_label_rtx ();
4570 /* If this mode is an integer too wide to compare properly,
4571 compare word by word. Rely on cse to optimize constant cases. */
4572 if (GET_MODE_CLASS (mode) == MODE_INT
4573 && !can_compare_p (mode))
4575 if (code == MAX_EXPR)
4576 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type), target, op1, NULL, op0);
4578 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type), op1, target, NULL, op0);
4579 emit_move_insn (target, op1);
4583 if (code == MAX_EXPR)
4584 temp = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)))
4585 ? compare_from_rtx (target, op1, GEU, 1, mode, NULL_RTX, 0)
4586 : compare_from_rtx (target, op1, GE, 0, mode, NULL_RTX, 0));
4588 temp = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)))
4589 ? compare_from_rtx (target, op1, LEU, 1, mode, NULL_RTX, 0)
4590 : compare_from_rtx (target, op1, LE, 0, mode, NULL_RTX, 0));
4591 if (temp == const0_rtx)
4592 emit_move_insn (target, op1);
4593 else if (temp != const_true_rtx)
4595 if (bcc_gen_fctn[(int) GET_CODE (temp)] != 0)
4596 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (temp)]) (op0));
4599 emit_move_insn (target, op1);
4605 /* ??? Can optimize when the operand of this is a bitwise operation,
4606 by using a different bitwise operation. */
4608 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
4609 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
4615 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
4616 temp = expand_unop (mode, ffs_optab, op0, target, 1);
4621 /* ??? Can optimize bitwise operations with one arg constant.
4622 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
4623 and (a bitwise1 b) bitwise2 b (etc)
4624 but that is probably not worth while. */
4626 /* BIT_AND_EXPR is for bitwise anding.
4627 TRUTH_AND_EXPR is for anding two boolean values
4628 when we want in all cases to compute both of them.
4629 In general it is fastest to do TRUTH_AND_EXPR by
4630 computing both operands as actual zero-or-1 values
4631 and then bitwise anding. In cases where there cannot
4632 be any side effects, better code would be made by
4633 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR;
4634 but the question is how to recognize those cases. */
4636 case TRUTH_AND_EXPR:
4638 this_optab = and_optab;
4641 /* See comment above about TRUTH_AND_EXPR; it applies here too. */
4644 this_optab = ior_optab;
4647 case TRUTH_XOR_EXPR:
4649 this_optab = xor_optab;
4656 preexpand_calls (exp);
4657 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
4659 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
4660 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
4663 /* Could determine the answer when only additive constants differ.
4664 Also, the addition of one can be handled by changing the condition. */
4671 preexpand_calls (exp);
4672 temp = do_store_flag (exp, target, tmode != VOIDmode ? tmode : mode, 0);
4675 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
4676 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
4678 && GET_CODE (original_target) == REG
4679 && (GET_MODE (original_target)
4680 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
4682 temp = expand_expr (TREE_OPERAND (exp, 0), original_target, VOIDmode, 0);
4683 if (temp != original_target)
4684 temp = copy_to_reg (temp);
4685 op1 = gen_label_rtx ();
4686 emit_cmp_insn (temp, const0_rtx, EQ, NULL_RTX,
4687 GET_MODE (temp), unsignedp, 0);
4688 emit_jump_insn (gen_beq (op1));
4689 emit_move_insn (temp, const1_rtx);
4693 /* If no set-flag instruction, must generate a conditional
4694 store into a temporary variable. Drop through
4695 and handle this like && and ||. */
4697 case TRUTH_ANDIF_EXPR:
4698 case TRUTH_ORIF_EXPR:
4699 if (target == 0 || ! safe_from_p (target, exp)
4700 /* Make sure we don't have a hard reg (such as function's return
4701 value) live across basic blocks, if not optimizing. */
4702 || (!optimize && GET_CODE (target) == REG
4703 && REGNO (target) < FIRST_PSEUDO_REGISTER))
4704 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
4705 emit_clr_insn (target);
4706 op1 = gen_label_rtx ();
4707 jumpifnot (exp, op1);
4708 emit_0_to_1_insn (target);
4712 case TRUTH_NOT_EXPR:
4713 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
4714 /* The parser is careful to generate TRUTH_NOT_EXPR
4715 only with operands that are always zero or one. */
4716 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
4717 target, 1, OPTAB_LIB_WIDEN);
4723 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
4725 return expand_expr (TREE_OPERAND (exp, 1),
4726 (ignore ? const0_rtx : target),
4731 /* Note that COND_EXPRs whose type is a structure or union
4732 are required to be constructed to contain assignments of
4733 a temporary variable, so that we can evaluate them here
4734 for side effect only. If type is void, we must do likewise. */
4736 /* If an arm of the branch requires a cleanup,
4737 only that cleanup is performed. */
4740 tree binary_op = 0, unary_op = 0;
4741 tree old_cleanups = cleanups_this_call;
4742 cleanups_this_call = 0;
4744 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
4745 convert it to our mode, if necessary. */
4746 if (integer_onep (TREE_OPERAND (exp, 1))
4747 && integer_zerop (TREE_OPERAND (exp, 2))
4748 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
4750 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, modifier);
4751 if (GET_MODE (op0) == mode)
4754 target = gen_reg_rtx (mode);
4755 convert_move (target, op0, unsignedp);
4759 /* If we are not to produce a result, we have no target. Otherwise,
4760 if a target was specified use it; it will not be used as an
4761 intermediate target unless it is safe. If no target, use a
4764 if (mode == VOIDmode || ignore)
4766 else if (original_target
4767 && safe_from_p (original_target, TREE_OPERAND (exp, 0)))
4768 temp = original_target;
4769 else if (mode == BLKmode)
4771 if (TYPE_SIZE (type) == 0
4772 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
4774 temp = assign_stack_temp (BLKmode,
4775 (TREE_INT_CST_LOW (TYPE_SIZE (type))
4776 + BITS_PER_UNIT - 1)
4777 / BITS_PER_UNIT, 0);
4780 temp = gen_reg_rtx (mode);
4782 /* Check for X ? A + B : A. If we have this, we can copy
4783 A to the output and conditionally add B. Similarly for unary
4784 operations. Don't do this if X has side-effects because
4785 those side effects might affect A or B and the "?" operation is
4786 a sequence point in ANSI. (We test for side effects later.) */
4788 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
4789 && operand_equal_p (TREE_OPERAND (exp, 2),
4790 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
4791 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
4792 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
4793 && operand_equal_p (TREE_OPERAND (exp, 1),
4794 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
4795 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
4796 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
4797 && operand_equal_p (TREE_OPERAND (exp, 2),
4798 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
4799 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
4800 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
4801 && operand_equal_p (TREE_OPERAND (exp, 1),
4802 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
4803 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
4805 /* If we had X ? A + 1 : A and we can do the test of X as a store-flag
4806 operation, do this as A + (X != 0). Similarly for other simple
4807 binary operators. */
4808 if (singleton && binary_op
4809 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
4810 && (TREE_CODE (binary_op) == PLUS_EXPR
4811 || TREE_CODE (binary_op) == MINUS_EXPR
4812 || TREE_CODE (binary_op) == BIT_IOR_EXPR
4813 || TREE_CODE (binary_op) == BIT_XOR_EXPR
4814 || TREE_CODE (binary_op) == BIT_AND_EXPR)
4815 && integer_onep (TREE_OPERAND (binary_op, 1))
4816 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
4819 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR ? add_optab
4820 : TREE_CODE (binary_op) == MINUS_EXPR ? sub_optab
4821 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
4822 : TREE_CODE (binary_op) == BIT_XOR_EXPR ? xor_optab
4825 /* If we had X ? A : A + 1, do this as A + (X == 0).
4827 We have to invert the truth value here and then put it
4828 back later if do_store_flag fails. We cannot simply copy
4829 TREE_OPERAND (exp, 0) to another variable and modify that
4830 because invert_truthvalue can modify the tree pointed to
4832 if (singleton == TREE_OPERAND (exp, 1))
4833 TREE_OPERAND (exp, 0)
4834 = invert_truthvalue (TREE_OPERAND (exp, 0));
4836 result = do_store_flag (TREE_OPERAND (exp, 0),
4837 (safe_from_p (temp, singleton)
4839 mode, BRANCH_COST <= 1);
4843 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
4844 return expand_binop (mode, boptab, op1, result, temp,
4845 unsignedp, OPTAB_LIB_WIDEN);
4847 else if (singleton == TREE_OPERAND (exp, 1))
4848 TREE_OPERAND (exp, 0)
4849 = invert_truthvalue (TREE_OPERAND (exp, 0));
4853 op0 = gen_label_rtx ();
4855 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
4859 /* If the target conflicts with the other operand of the
4860 binary op, we can't use it. Also, we can't use the target
4861 if it is a hard register, because evaluating the condition
4862 might clobber it. */
4864 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1)))
4865 || (GET_CODE (temp) == REG
4866 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
4867 temp = gen_reg_rtx (mode);
4868 store_expr (singleton, temp, 0);
4871 expand_expr (singleton,
4872 ignore ? const1_rtx : NULL_RTX, VOIDmode, 0);
4873 if (cleanups_this_call)
4875 sorry ("aggregate value in COND_EXPR");
4876 cleanups_this_call = 0;
4878 if (singleton == TREE_OPERAND (exp, 1))
4879 jumpif (TREE_OPERAND (exp, 0), op0);
4881 jumpifnot (TREE_OPERAND (exp, 0), op0);
4883 if (binary_op && temp == 0)
4884 /* Just touch the other operand. */
4885 expand_expr (TREE_OPERAND (binary_op, 1),
4886 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
4888 store_expr (build (TREE_CODE (binary_op), type,
4889 make_tree (type, temp),
4890 TREE_OPERAND (binary_op, 1)),
4893 store_expr (build1 (TREE_CODE (unary_op), type,
4894 make_tree (type, temp)),
4899 /* This is now done in jump.c and is better done there because it
4900 produces shorter register lifetimes. */
4902 /* Check for both possibilities either constants or variables
4903 in registers (but not the same as the target!). If so, can
4904 save branches by assigning one, branching, and assigning the
4906 else if (temp && GET_MODE (temp) != BLKmode
4907 && (TREE_CONSTANT (TREE_OPERAND (exp, 1))
4908 || ((TREE_CODE (TREE_OPERAND (exp, 1)) == PARM_DECL
4909 || TREE_CODE (TREE_OPERAND (exp, 1)) == VAR_DECL)
4910 && DECL_RTL (TREE_OPERAND (exp, 1))
4911 && GET_CODE (DECL_RTL (TREE_OPERAND (exp, 1))) == REG
4912 && DECL_RTL (TREE_OPERAND (exp, 1)) != temp))
4913 && (TREE_CONSTANT (TREE_OPERAND (exp, 2))
4914 || ((TREE_CODE (TREE_OPERAND (exp, 2)) == PARM_DECL
4915 || TREE_CODE (TREE_OPERAND (exp, 2)) == VAR_DECL)
4916 && DECL_RTL (TREE_OPERAND (exp, 2))
4917 && GET_CODE (DECL_RTL (TREE_OPERAND (exp, 2))) == REG
4918 && DECL_RTL (TREE_OPERAND (exp, 2)) != temp)))
4920 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
4921 temp = gen_reg_rtx (mode);
4922 store_expr (TREE_OPERAND (exp, 2), temp, 0);
4923 jumpifnot (TREE_OPERAND (exp, 0), op0);
4924 store_expr (TREE_OPERAND (exp, 1), temp, 0);
4928 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
4929 comparison operator. If we have one of these cases, set the
4930 output to A, branch on A (cse will merge these two references),
4931 then set the output to FOO. */
4933 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
4934 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
4935 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
4936 TREE_OPERAND (exp, 1), 0)
4937 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
4938 && safe_from_p (temp, TREE_OPERAND (exp, 2)))
4940 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
4941 temp = gen_reg_rtx (mode);
4942 store_expr (TREE_OPERAND (exp, 1), temp, 0);
4943 jumpif (TREE_OPERAND (exp, 0), op0);
4944 store_expr (TREE_OPERAND (exp, 2), temp, 0);
4948 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
4949 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
4950 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
4951 TREE_OPERAND (exp, 2), 0)
4952 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
4953 && safe_from_p (temp, TREE_OPERAND (exp, 1)))
4955 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
4956 temp = gen_reg_rtx (mode);
4957 store_expr (TREE_OPERAND (exp, 2), temp, 0);
4958 jumpifnot (TREE_OPERAND (exp, 0), op0);
4959 store_expr (TREE_OPERAND (exp, 1), temp, 0);
4964 op1 = gen_label_rtx ();
4965 jumpifnot (TREE_OPERAND (exp, 0), op0);
4967 store_expr (TREE_OPERAND (exp, 1), temp, 0);
4969 expand_expr (TREE_OPERAND (exp, 1),
4970 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
4971 if (cleanups_this_call)
4973 sorry ("aggregate value in COND_EXPR");
4974 cleanups_this_call = 0;
4978 emit_jump_insn (gen_jump (op1));
4982 store_expr (TREE_OPERAND (exp, 2), temp, 0);
4984 expand_expr (TREE_OPERAND (exp, 2),
4985 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
4988 if (cleanups_this_call)
4990 sorry ("aggregate value in COND_EXPR");
4991 cleanups_this_call = 0;
4997 cleanups_this_call = old_cleanups;
5003 /* Something needs to be initialized, but we didn't know
5004 where that thing was when building the tree. For example,
5005 it could be the return value of a function, or a parameter
5006 to a function which lays down in the stack, or a temporary
5007 variable which must be passed by reference.
5009 We guarantee that the expression will either be constructed
5010 or copied into our original target. */
5012 tree slot = TREE_OPERAND (exp, 0);
5015 if (TREE_CODE (slot) != VAR_DECL)
5020 if (DECL_RTL (slot) != 0)
5022 target = DECL_RTL (slot);
5023 /* If we have already expanded the slot, so don't do
5025 if (TREE_OPERAND (exp, 1) == NULL_TREE)
5030 target = assign_stack_temp (mode, int_size_in_bytes (type), 0);
5031 /* All temp slots at this level must not conflict. */
5032 preserve_temp_slots (target);
5033 DECL_RTL (slot) = target;
5037 /* I bet this needs to be done, and I bet that it needs to
5038 be above, inside the else clause. The reason is
5039 simple, how else is it going to get cleaned up? (mrs)
5041 The reason is probably did not work before, and was
5042 commented out is because this was re-expanding already
5043 expanded target_exprs (target == 0 and DECL_RTL (slot)
5044 != 0) also cleaning them up many times as well. :-( */
5046 /* Since SLOT is not known to the called function
5047 to belong to its stack frame, we must build an explicit
5048 cleanup. This case occurs when we must build up a reference
5049 to pass the reference as an argument. In this case,
5050 it is very likely that such a reference need not be
5053 if (TREE_OPERAND (exp, 2) == 0)
5054 TREE_OPERAND (exp, 2) = maybe_build_cleanup (slot);
5055 if (TREE_OPERAND (exp, 2))
5056 cleanups_this_call = tree_cons (NULL_TREE, TREE_OPERAND (exp, 2),
5057 cleanups_this_call);
5062 /* This case does occur, when expanding a parameter which
5063 needs to be constructed on the stack. The target
5064 is the actual stack address that we want to initialize.
5065 The function we call will perform the cleanup in this case. */
5067 DECL_RTL (slot) = target;
5070 exp1 = TREE_OPERAND (exp, 1);
5071 /* Mark it as expanded. */
5072 TREE_OPERAND (exp, 1) = NULL_TREE;
5074 return expand_expr (exp1, target, tmode, modifier);
5079 tree lhs = TREE_OPERAND (exp, 0);
5080 tree rhs = TREE_OPERAND (exp, 1);
5081 tree noncopied_parts = 0;
5082 tree lhs_type = TREE_TYPE (lhs);
5084 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
5085 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0 && !fixed_type_p (rhs))
5086 noncopied_parts = init_noncopied_parts (stabilize_reference (lhs),
5087 TYPE_NONCOPIED_PARTS (lhs_type));
5088 while (noncopied_parts != 0)
5090 expand_assignment (TREE_VALUE (noncopied_parts),
5091 TREE_PURPOSE (noncopied_parts), 0, 0);
5092 noncopied_parts = TREE_CHAIN (noncopied_parts);
5099 /* If lhs is complex, expand calls in rhs before computing it.
5100 That's so we don't compute a pointer and save it over a call.
5101 If lhs is simple, compute it first so we can give it as a
5102 target if the rhs is just a call. This avoids an extra temp and copy
5103 and that prevents a partial-subsumption which makes bad code.
5104 Actually we could treat component_ref's of vars like vars. */
5106 tree lhs = TREE_OPERAND (exp, 0);
5107 tree rhs = TREE_OPERAND (exp, 1);
5108 tree noncopied_parts = 0;
5109 tree lhs_type = TREE_TYPE (lhs);
5113 if (TREE_CODE (lhs) != VAR_DECL
5114 && TREE_CODE (lhs) != RESULT_DECL
5115 && TREE_CODE (lhs) != PARM_DECL)
5116 preexpand_calls (exp);
5118 /* Check for |= or &= of a bitfield of size one into another bitfield
5119 of size 1. In this case, (unless we need the result of the
5120 assignment) we can do this more efficiently with a
5121 test followed by an assignment, if necessary.
5123 ??? At this point, we can't get a BIT_FIELD_REF here. But if
5124 things change so we do, this code should be enhanced to
5127 && TREE_CODE (lhs) == COMPONENT_REF
5128 && (TREE_CODE (rhs) == BIT_IOR_EXPR
5129 || TREE_CODE (rhs) == BIT_AND_EXPR)
5130 && TREE_OPERAND (rhs, 0) == lhs
5131 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
5132 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (lhs, 1))) == 1
5133 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))) == 1)
5135 rtx label = gen_label_rtx ();
5137 do_jump (TREE_OPERAND (rhs, 1),
5138 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
5139 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
5140 expand_assignment (lhs, convert (TREE_TYPE (rhs),
5141 (TREE_CODE (rhs) == BIT_IOR_EXPR
5143 : integer_zero_node)),
5145 do_pending_stack_adjust ();
5150 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0
5151 && ! (fixed_type_p (lhs) && fixed_type_p (rhs)))
5152 noncopied_parts = save_noncopied_parts (stabilize_reference (lhs),
5153 TYPE_NONCOPIED_PARTS (lhs_type));
5155 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
5156 while (noncopied_parts != 0)
5158 expand_assignment (TREE_PURPOSE (noncopied_parts),
5159 TREE_VALUE (noncopied_parts), 0, 0);
5160 noncopied_parts = TREE_CHAIN (noncopied_parts);
5165 case PREINCREMENT_EXPR:
5166 case PREDECREMENT_EXPR:
5167 return expand_increment (exp, 0);
5169 case POSTINCREMENT_EXPR:
5170 case POSTDECREMENT_EXPR:
5171 /* Faster to treat as pre-increment if result is not used. */
5172 return expand_increment (exp, ! ignore);
5175 /* Are we taking the address of a nested function? */
5176 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
5177 && decl_function_context (TREE_OPERAND (exp, 0)) != 0)
5179 op0 = trampoline_address (TREE_OPERAND (exp, 0));
5180 op0 = force_operand (op0, target);
5184 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode,
5185 (modifier == EXPAND_INITIALIZER
5186 ? modifier : EXPAND_CONST_ADDRESS));
5188 /* We would like the object in memory. If it is a constant,
5189 we can have it be statically allocated into memory. For
5190 a non-constant (REG or SUBREG), we need to allocate some
5191 memory and store the value into it. */
5193 if (CONSTANT_P (op0))
5194 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
5197 if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG)
5199 /* If this object is in a register, it must be not
5201 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
5202 enum machine_mode inner_mode = TYPE_MODE (inner_type);
5204 = assign_stack_temp (inner_mode,
5205 int_size_in_bytes (inner_type), 1);
5207 emit_move_insn (memloc, op0);
5211 if (GET_CODE (op0) != MEM)
5214 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
5215 return XEXP (op0, 0);
5216 op0 = force_operand (XEXP (op0, 0), target);
5218 if (flag_force_addr && GET_CODE (op0) != REG)
5219 return force_reg (Pmode, op0);
5222 case ENTRY_VALUE_EXPR:
5225 /* COMPLEX type for Extended Pascal & Fortran */
5228 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
5232 /* Get the rtx code of the operands. */
5233 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
5234 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
5237 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
5239 prev = get_last_insn ();
5241 /* Tell flow that the whole of the destination is being set. */
5242 if (GET_CODE (target) == REG)
5243 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
5245 /* Move the real (op0) and imaginary (op1) parts to their location. */
5246 emit_move_insn (gen_realpart (mode, target), op0);
5247 emit_move_insn (gen_imagpart (mode, target), op1);
5249 /* Complex construction should appear as a single unit. */
5256 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
5257 return gen_realpart (mode, op0);
5260 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
5261 return gen_imagpart (mode, op0);
5265 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
5269 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
5272 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
5274 prev = get_last_insn ();
5276 /* Tell flow that the whole of the destination is being set. */
5277 if (GET_CODE (target) == REG)
5278 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
5280 /* Store the realpart and the negated imagpart to target. */
5281 emit_move_insn (gen_realpart (mode, target), gen_realpart (mode, op0));
5283 imag_t = gen_imagpart (mode, target);
5284 temp = expand_unop (mode, neg_optab,
5285 gen_imagpart (mode, op0), imag_t, 0);
5287 emit_move_insn (imag_t, temp);
5289 /* Conjugate should appear as a single unit */
5296 op0 = CONST0_RTX (tmode);
5302 return (*lang_expand_expr) (exp, target, tmode, modifier);
5305 /* Here to do an ordinary binary operator, generating an instruction
5306 from the optab already placed in `this_optab'. */
5308 preexpand_calls (exp);
5309 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
5311 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
5312 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
5314 temp = expand_binop (mode, this_optab, op0, op1, target,
5315 unsignedp, OPTAB_LIB_WIDEN);
5321 /* Return the alignment in bits of EXP, a pointer valued expression.
5322 But don't return more than MAX_ALIGN no matter what.
5323 The alignment returned is, by default, the alignment of the thing that
5324 EXP points to (if it is not a POINTER_TYPE, 0 is returned).
5326 Otherwise, look at the expression to see if we can do better, i.e., if the
5327 expression is actually pointing at an object whose alignment is tighter. */
5330 get_pointer_alignment (exp, max_align)
5334 unsigned align, inner;
5336 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
5339 align = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
5340 align = MIN (align, max_align);
5344 switch (TREE_CODE (exp))
5348 case NON_LVALUE_EXPR:
5349 exp = TREE_OPERAND (exp, 0);
5350 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
5352 inner = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
5353 inner = MIN (inner, max_align);
5354 align = MAX (align, inner);
5358 /* If sum of pointer + int, restrict our maximum alignment to that
5359 imposed by the integer. If not, we can't do any better than
5361 if (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST)
5364 while (((TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)) * BITS_PER_UNIT)
5369 exp = TREE_OPERAND (exp, 0);
5373 /* See what we are pointing at and look at its alignment. */
5374 exp = TREE_OPERAND (exp, 0);
5375 if (TREE_CODE (exp) == FUNCTION_DECL)
5376 align = MAX (align, FUNCTION_BOUNDARY);
5377 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'd')
5378 align = MAX (align, DECL_ALIGN (exp));
5379 #ifdef CONSTANT_ALIGNMENT
5380 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'c')
5381 align = CONSTANT_ALIGNMENT (exp, align);
5383 return MIN (align, max_align);
5391 /* Return the tree node and offset if a given argument corresponds to
5392 a string constant. */
5395 string_constant (arg, ptr_offset)
5401 if (TREE_CODE (arg) == ADDR_EXPR
5402 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
5404 *ptr_offset = integer_zero_node;
5405 return TREE_OPERAND (arg, 0);
5407 else if (TREE_CODE (arg) == PLUS_EXPR)
5409 tree arg0 = TREE_OPERAND (arg, 0);
5410 tree arg1 = TREE_OPERAND (arg, 1);
5415 if (TREE_CODE (arg0) == ADDR_EXPR
5416 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
5419 return TREE_OPERAND (arg0, 0);
5421 else if (TREE_CODE (arg1) == ADDR_EXPR
5422 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
5425 return TREE_OPERAND (arg1, 0);
5432 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
5433 way, because it could contain a zero byte in the middle.
5434 TREE_STRING_LENGTH is the size of the character array, not the string.
5436 Unfortunately, string_constant can't access the values of const char
5437 arrays with initializers, so neither can we do so here. */
5447 src = string_constant (src, &offset_node);
5450 max = TREE_STRING_LENGTH (src);
5451 ptr = TREE_STRING_POINTER (src);
5452 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
5454 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
5455 compute the offset to the following null if we don't know where to
5456 start searching for it. */
5458 for (i = 0; i < max; i++)
5461 /* We don't know the starting offset, but we do know that the string
5462 has no internal zero bytes. We can assume that the offset falls
5463 within the bounds of the string; otherwise, the programmer deserves
5464 what he gets. Subtract the offset from the length of the string,
5466 /* This would perhaps not be valid if we were dealing with named
5467 arrays in addition to literal string constants. */
5468 return size_binop (MINUS_EXPR, size_int (max), offset_node);
5471 /* We have a known offset into the string. Start searching there for
5472 a null character. */
5473 if (offset_node == 0)
5477 /* Did we get a long long offset? If so, punt. */
5478 if (TREE_INT_CST_HIGH (offset_node) != 0)
5480 offset = TREE_INT_CST_LOW (offset_node);
5482 /* If the offset is known to be out of bounds, warn, and call strlen at
5484 if (offset < 0 || offset > max)
5486 warning ("offset outside bounds of constant string");
5489 /* Use strlen to search for the first zero byte. Since any strings
5490 constructed with build_string will have nulls appended, we win even
5491 if we get handed something like (char[4])"abcd".
5493 Since OFFSET is our starting index into the string, no further
5494 calculation is needed. */
5495 return size_int (strlen (ptr + offset));
5498 /* Expand an expression EXP that calls a built-in function,
5499 with result going to TARGET if that's convenient
5500 (and in mode MODE if that's convenient).
5501 SUBTARGET may be used as the target for computing one of EXP's operands.
5502 IGNORE is nonzero if the value is to be ignored. */
5505 expand_builtin (exp, target, subtarget, mode, ignore)
5509 enum machine_mode mode;
5512 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
5513 tree arglist = TREE_OPERAND (exp, 1);
5516 enum machine_mode value_mode = TYPE_MODE (TREE_TYPE (exp));
5517 optab builtin_optab;
5519 switch (DECL_FUNCTION_CODE (fndecl))
5524 /* build_function_call changes these into ABS_EXPR. */
5529 case BUILT_IN_FSQRT:
5530 /* If not optimizing, call the library function. */
5535 /* Arg could be wrong type if user redeclared this fcn wrong. */
5536 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != REAL_TYPE)
5537 return CONST0_RTX (TYPE_MODE (TREE_TYPE (exp)));
5539 /* Stabilize and compute the argument. */
5540 if (TREE_CODE (TREE_VALUE (arglist)) != VAR_DECL
5541 && TREE_CODE (TREE_VALUE (arglist)) != PARM_DECL)
5543 exp = copy_node (exp);
5544 arglist = copy_node (arglist);
5545 TREE_OPERAND (exp, 1) = arglist;
5546 TREE_VALUE (arglist) = save_expr (TREE_VALUE (arglist));
5548 op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
5550 /* Make a suitable register to place result in. */
5551 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
5556 switch (DECL_FUNCTION_CODE (fndecl))
5559 builtin_optab = sin_optab; break;
5561 builtin_optab = cos_optab; break;
5562 case BUILT_IN_FSQRT:
5563 builtin_optab = sqrt_optab; break;
5568 /* Compute into TARGET.
5569 Set TARGET to wherever the result comes back. */
5570 target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
5571 builtin_optab, op0, target, 0);
5573 /* If we were unable to expand via the builtin, stop the
5574 sequence (without outputting the insns) and break, causing
5575 a call the the library function. */
5582 /* Check the results by default. But if flag_fast_math is turned on,
5583 then assume sqrt will always be called with valid arguments. */
5585 if (! flag_fast_math)
5587 /* Don't define the builtin FP instructions
5588 if your machine is not IEEE. */
5589 if (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT)
5592 lab1 = gen_label_rtx ();
5594 /* Test the result; if it is NaN, set errno=EDOM because
5595 the argument was not in the domain. */
5596 emit_cmp_insn (target, target, EQ, 0, GET_MODE (target), 0, 0);
5597 emit_jump_insn (gen_beq (lab1));
5601 #ifdef GEN_ERRNO_RTX
5602 rtx errno_rtx = GEN_ERRNO_RTX;
5605 = gen_rtx (MEM, word_mode, gen_rtx (SYMBOL_REF, Pmode, "*errno"));
5608 emit_move_insn (errno_rtx, GEN_INT (TARGET_EDOM));
5611 /* We can't set errno=EDOM directly; let the library call do it.
5612 Pop the arguments right away in case the call gets deleted. */
5614 expand_call (exp, target, 0);
5621 /* Output the entire sequence. */
5622 insns = get_insns ();
5628 /* __builtin_apply_args returns block of memory allocated on
5629 the stack into which is stored the arg pointer, structure
5630 value address, static chain, and all the registers that might
5631 possibly be used in performing a function call. The code is
5632 moved to the start of the function so the incoming values are
5634 case BUILT_IN_APPLY_ARGS:
5635 /* Don't do __builtin_apply_args more than once in a function.
5636 Save the result of the first call and reuse it. */
5637 if (apply_args_value != 0)
5638 return apply_args_value;
5640 /* When this function is called, it means that registers must be
5641 saved on entry to this function. So we migrate the
5642 call to the first insn of this function. */
5647 temp = expand_builtin_apply_args ();
5651 apply_args_value = temp;
5653 /* Put the sequence after the NOTE that starts the function.
5654 If this is inside a SEQUENCE, make the outer-level insn
5655 chain current, so the code is placed at the start of the
5657 push_topmost_sequence ();
5658 emit_insns_before (seq, NEXT_INSN (get_insns ()));
5659 pop_topmost_sequence ();
5663 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
5664 FUNCTION with a copy of the parameters described by
5665 ARGUMENTS, and ARGSIZE. It returns a block of memory
5666 allocated on the stack into which is stored all the registers
5667 that might possibly be used for returning the result of a
5668 function. ARGUMENTS is the value returned by
5669 __builtin_apply_args. ARGSIZE is the number of bytes of
5670 arguments that must be copied. ??? How should this value be
5671 computed? We'll also need a safe worst case value for varargs
5673 case BUILT_IN_APPLY:
5675 /* Arg could be non-pointer if user redeclared this fcn wrong. */
5676 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
5677 || TREE_CHAIN (arglist) == 0
5678 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
5679 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
5680 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
5688 for (t = arglist, i = 0; t; t = TREE_CHAIN (t), i++)
5689 ops[i] = expand_expr (TREE_VALUE (t), NULL_RTX, VOIDmode, 0);
5691 return expand_builtin_apply (ops[0], ops[1], ops[2]);
5694 /* __builtin_return (RESULT) causes the function to return the
5695 value described by RESULT. RESULT is address of the block of
5696 memory returned by __builtin_apply. */
5697 case BUILT_IN_RETURN:
5699 /* Arg could be non-pointer if user redeclared this fcn wrong. */
5700 && TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) == POINTER_TYPE)
5701 expand_builtin_return (expand_expr (TREE_VALUE (arglist),
5702 NULL_RTX, VOIDmode, 0));
5705 case BUILT_IN_SAVEREGS:
5706 /* Don't do __builtin_saveregs more than once in a function.
5707 Save the result of the first call and reuse it. */
5708 if (saveregs_value != 0)
5709 return saveregs_value;
5711 /* When this function is called, it means that registers must be
5712 saved on entry to this function. So we migrate the
5713 call to the first insn of this function. */
5716 rtx valreg, saved_valreg;
5718 /* Now really call the function. `expand_call' does not call
5719 expand_builtin, so there is no danger of infinite recursion here. */
5722 #ifdef EXPAND_BUILTIN_SAVEREGS
5723 /* Do whatever the machine needs done in this case. */
5724 temp = EXPAND_BUILTIN_SAVEREGS (arglist);
5726 /* The register where the function returns its value
5727 is likely to have something else in it, such as an argument.
5728 So preserve that register around the call. */
5729 if (value_mode != VOIDmode)
5731 valreg = hard_libcall_value (value_mode);
5732 saved_valreg = gen_reg_rtx (value_mode);
5733 emit_move_insn (saved_valreg, valreg);
5736 /* Generate the call, putting the value in a pseudo. */
5737 temp = expand_call (exp, target, ignore);
5739 if (value_mode != VOIDmode)
5740 emit_move_insn (valreg, saved_valreg);
5746 saveregs_value = temp;
5748 /* Put the sequence after the NOTE that starts the function.
5749 If this is inside a SEQUENCE, make the outer-level insn
5750 chain current, so the code is placed at the start of the
5752 push_topmost_sequence ();
5753 emit_insns_before (seq, NEXT_INSN (get_insns ()));
5754 pop_topmost_sequence ();
5758 /* __builtin_args_info (N) returns word N of the arg space info
5759 for the current function. The number and meanings of words
5760 is controlled by the definition of CUMULATIVE_ARGS. */
5761 case BUILT_IN_ARGS_INFO:
5763 int nwords = sizeof (CUMULATIVE_ARGS) / sizeof (int);
5765 int *word_ptr = (int *) ¤t_function_args_info;
5766 tree type, elts, result;
5768 if (sizeof (CUMULATIVE_ARGS) % sizeof (int) != 0)
5769 fatal ("CUMULATIVE_ARGS type defined badly; see %s, line %d",
5770 __FILE__, __LINE__);
5774 tree arg = TREE_VALUE (arglist);
5775 if (TREE_CODE (arg) != INTEGER_CST)
5776 error ("argument of `__builtin_args_info' must be constant");
5779 int wordnum = TREE_INT_CST_LOW (arg);
5781 if (wordnum < 0 || wordnum >= nwords || TREE_INT_CST_HIGH (arg))
5782 error ("argument of `__builtin_args_info' out of range");
5784 return GEN_INT (word_ptr[wordnum]);
5788 error ("missing argument in `__builtin_args_info'");
5793 for (i = 0; i < nwords; i++)
5794 elts = tree_cons (NULL_TREE, build_int_2 (word_ptr[i], 0));
5796 type = build_array_type (integer_type_node,
5797 build_index_type (build_int_2 (nwords, 0)));
5798 result = build (CONSTRUCTOR, type, NULL_TREE, nreverse (elts));
5799 TREE_CONSTANT (result) = 1;
5800 TREE_STATIC (result) = 1;
5801 result = build (INDIRECT_REF, build_pointer_type (type), result);
5802 TREE_CONSTANT (result) = 1;
5803 return expand_expr (result, NULL_RTX, VOIDmode, 0);
5807 /* Return the address of the first anonymous stack arg. */
5808 case BUILT_IN_NEXT_ARG:
5810 tree fntype = TREE_TYPE (current_function_decl);
5811 if (!(TYPE_ARG_TYPES (fntype) != 0
5812 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
5813 != void_type_node)))
5815 error ("`va_start' used in function with fixed args");
5820 return expand_binop (Pmode, add_optab,
5821 current_function_internal_arg_pointer,
5822 current_function_arg_offset_rtx,
5823 NULL_RTX, 0, OPTAB_LIB_WIDEN);
5825 case BUILT_IN_CLASSIFY_TYPE:
5828 tree type = TREE_TYPE (TREE_VALUE (arglist));
5829 enum tree_code code = TREE_CODE (type);
5830 if (code == VOID_TYPE)
5831 return GEN_INT (void_type_class);
5832 if (code == INTEGER_TYPE)
5833 return GEN_INT (integer_type_class);
5834 if (code == CHAR_TYPE)
5835 return GEN_INT (char_type_class);
5836 if (code == ENUMERAL_TYPE)
5837 return GEN_INT (enumeral_type_class);
5838 if (code == BOOLEAN_TYPE)
5839 return GEN_INT (boolean_type_class);
5840 if (code == POINTER_TYPE)
5841 return GEN_INT (pointer_type_class);
5842 if (code == REFERENCE_TYPE)
5843 return GEN_INT (reference_type_class);
5844 if (code == OFFSET_TYPE)
5845 return GEN_INT (offset_type_class);
5846 if (code == REAL_TYPE)
5847 return GEN_INT (real_type_class);
5848 if (code == COMPLEX_TYPE)
5849 return GEN_INT (complex_type_class);
5850 if (code == FUNCTION_TYPE)
5851 return GEN_INT (function_type_class);
5852 if (code == METHOD_TYPE)
5853 return GEN_INT (method_type_class);
5854 if (code == RECORD_TYPE)
5855 return GEN_INT (record_type_class);
5856 if (code == UNION_TYPE)
5857 return GEN_INT (union_type_class);
5858 if (code == ARRAY_TYPE)
5859 return GEN_INT (array_type_class);
5860 if (code == STRING_TYPE)
5861 return GEN_INT (string_type_class);
5862 if (code == SET_TYPE)
5863 return GEN_INT (set_type_class);
5864 if (code == FILE_TYPE)
5865 return GEN_INT (file_type_class);
5866 if (code == LANG_TYPE)
5867 return GEN_INT (lang_type_class);
5869 return GEN_INT (no_type_class);
5871 case BUILT_IN_CONSTANT_P:
5875 return (TREE_CODE_CLASS (TREE_CODE (TREE_VALUE (arglist))) == 'c'
5876 ? const1_rtx : const0_rtx);
5878 case BUILT_IN_FRAME_ADDRESS:
5879 /* The argument must be a nonnegative integer constant.
5880 It counts the number of frames to scan up the stack.
5881 The value is the address of that frame. */
5882 case BUILT_IN_RETURN_ADDRESS:
5883 /* The argument must be a nonnegative integer constant.
5884 It counts the number of frames to scan up the stack.
5885 The value is the return address saved in that frame. */
5887 /* Warning about missing arg was already issued. */
5889 else if (TREE_CODE (TREE_VALUE (arglist)) != INTEGER_CST)
5891 error ("invalid arg to `__builtin_return_address'");
5894 else if (tree_int_cst_lt (TREE_VALUE (arglist), integer_zero_node))
5896 error ("invalid arg to `__builtin_return_address'");
5901 int count = TREE_INT_CST_LOW (TREE_VALUE (arglist));
5902 rtx tem = frame_pointer_rtx;
5905 /* Some machines need special handling before we can access arbitrary
5906 frames. For example, on the sparc, we must first flush all
5907 register windows to the stack. */
5908 #ifdef SETUP_FRAME_ADDRESSES
5909 SETUP_FRAME_ADDRESSES ();
5912 /* On the sparc, the return address is not in the frame, it is
5913 in a register. There is no way to access it off of the current
5914 frame pointer, but it can be accessed off the previous frame
5915 pointer by reading the value from the register window save
5917 #ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
5918 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_RETURN_ADDRESS)
5922 /* Scan back COUNT frames to the specified frame. */
5923 for (i = 0; i < count; i++)
5925 /* Assume the dynamic chain pointer is in the word that
5926 the frame address points to, unless otherwise specified. */
5927 #ifdef DYNAMIC_CHAIN_ADDRESS
5928 tem = DYNAMIC_CHAIN_ADDRESS (tem);
5930 tem = memory_address (Pmode, tem);
5931 tem = copy_to_reg (gen_rtx (MEM, Pmode, tem));
5934 /* For __builtin_frame_address, return what we've got. */
5935 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
5938 /* For __builtin_return_address,
5939 Get the return address from that frame. */
5940 #ifdef RETURN_ADDR_RTX
5941 return RETURN_ADDR_RTX (count, tem);
5943 tem = memory_address (Pmode,
5944 plus_constant (tem, GET_MODE_SIZE (Pmode)));
5945 return copy_to_reg (gen_rtx (MEM, Pmode, tem));
5949 case BUILT_IN_ALLOCA:
5951 /* Arg could be non-integer if user redeclared this fcn wrong. */
5952 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE)
5954 current_function_calls_alloca = 1;
5955 /* Compute the argument. */
5956 op0 = expand_expr (TREE_VALUE (arglist), NULL_RTX, VOIDmode, 0);
5958 /* Allocate the desired space. */
5959 target = allocate_dynamic_stack_space (op0, target, BITS_PER_UNIT);
5961 /* Record the new stack level for nonlocal gotos. */
5962 if (nonlocal_goto_handler_slot != 0)
5963 emit_stack_save (SAVE_NONLOCAL, &nonlocal_goto_stack_level, NULL_RTX);
5967 /* If not optimizing, call the library function. */
5972 /* Arg could be non-integer if user redeclared this fcn wrong. */
5973 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE)
5976 /* Compute the argument. */
5977 op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
5978 /* Compute ffs, into TARGET if possible.
5979 Set TARGET to wherever the result comes back. */
5980 target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
5981 ffs_optab, op0, target, 1);
5986 case BUILT_IN_STRLEN:
5987 /* If not optimizing, call the library function. */
5992 /* Arg could be non-pointer if user redeclared this fcn wrong. */
5993 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE)
5997 tree src = TREE_VALUE (arglist);
5998 tree len = c_strlen (src);
6001 = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
6003 rtx result, src_rtx, char_rtx;
6004 enum machine_mode insn_mode = value_mode, char_mode;
6005 enum insn_code icode;
6007 /* If the length is known, just return it. */
6009 return expand_expr (len, target, mode, 0);
6011 /* If SRC is not a pointer type, don't do this operation inline. */
6015 /* Call a function if we can't compute strlen in the right mode. */
6017 while (insn_mode != VOIDmode)
6019 icode = strlen_optab->handlers[(int) insn_mode].insn_code;
6020 if (icode != CODE_FOR_nothing)
6023 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
6025 if (insn_mode == VOIDmode)
6028 /* Make a place to write the result of the instruction. */
6031 && GET_CODE (result) == REG
6032 && GET_MODE (result) == insn_mode
6033 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
6034 result = gen_reg_rtx (insn_mode);
6036 /* Make sure the operands are acceptable to the predicates. */
6038 if (! (*insn_operand_predicate[(int)icode][0]) (result, insn_mode))
6039 result = gen_reg_rtx (insn_mode);
6041 src_rtx = memory_address (BLKmode,
6042 expand_expr (src, NULL_RTX, Pmode,
6044 if (! (*insn_operand_predicate[(int)icode][1]) (src_rtx, Pmode))
6045 src_rtx = copy_to_mode_reg (Pmode, src_rtx);
6047 char_rtx = const0_rtx;
6048 char_mode = insn_operand_mode[(int)icode][2];
6049 if (! (*insn_operand_predicate[(int)icode][2]) (char_rtx, char_mode))
6050 char_rtx = copy_to_mode_reg (char_mode, char_rtx);
6052 emit_insn (GEN_FCN (icode) (result,
6053 gen_rtx (MEM, BLKmode, src_rtx),
6054 char_rtx, GEN_INT (align)));
6056 /* Return the value in the proper mode for this function. */
6057 if (GET_MODE (result) == value_mode)
6059 else if (target != 0)
6061 convert_move (target, result, 0);
6065 return convert_to_mode (value_mode, result, 0);
6068 case BUILT_IN_STRCPY:
6069 /* If not optimizing, call the library function. */
6074 /* Arg could be non-pointer if user redeclared this fcn wrong. */
6075 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
6076 || TREE_CHAIN (arglist) == 0
6077 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE)
6081 tree len = c_strlen (TREE_VALUE (TREE_CHAIN (arglist)));
6086 len = size_binop (PLUS_EXPR, len, integer_one_node);
6088 chainon (arglist, build_tree_list (NULL_TREE, len));
6092 case BUILT_IN_MEMCPY:
6093 /* If not optimizing, call the library function. */
6098 /* Arg could be non-pointer if user redeclared this fcn wrong. */
6099 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
6100 || TREE_CHAIN (arglist) == 0
6101 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
6102 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
6103 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
6107 tree dest = TREE_VALUE (arglist);
6108 tree src = TREE_VALUE (TREE_CHAIN (arglist));
6109 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
6112 = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
6114 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
6115 rtx dest_rtx, dest_mem, src_mem;
6117 /* If either SRC or DEST is not a pointer type, don't do
6118 this operation in-line. */
6119 if (src_align == 0 || dest_align == 0)
6121 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRCPY)
6122 TREE_CHAIN (TREE_CHAIN (arglist)) = 0;
6126 dest_rtx = expand_expr (dest, NULL_RTX, Pmode, EXPAND_NORMAL);
6127 dest_mem = gen_rtx (MEM, BLKmode,
6128 memory_address (BLKmode, dest_rtx));
6129 src_mem = gen_rtx (MEM, BLKmode,
6130 memory_address (BLKmode,
6131 expand_expr (src, NULL_RTX,
6135 /* Copy word part most expediently. */
6136 emit_block_move (dest_mem, src_mem,
6137 expand_expr (len, NULL_RTX, VOIDmode, 0),
6138 MIN (src_align, dest_align));
6142 /* These comparison functions need an instruction that returns an actual
6143 index. An ordinary compare that just sets the condition codes
6145 #ifdef HAVE_cmpstrsi
6146 case BUILT_IN_STRCMP:
6147 /* If not optimizing, call the library function. */
6152 /* Arg could be non-pointer if user redeclared this fcn wrong. */
6153 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
6154 || TREE_CHAIN (arglist) == 0
6155 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE)
6157 else if (!HAVE_cmpstrsi)
6160 tree arg1 = TREE_VALUE (arglist);
6161 tree arg2 = TREE_VALUE (TREE_CHAIN (arglist));
6165 len = c_strlen (arg1);
6167 len = size_binop (PLUS_EXPR, integer_one_node, len);
6168 len2 = c_strlen (arg2);
6170 len2 = size_binop (PLUS_EXPR, integer_one_node, len2);
6172 /* If we don't have a constant length for the first, use the length
6173 of the second, if we know it. We don't require a constant for
6174 this case; some cost analysis could be done if both are available
6175 but neither is constant. For now, assume they're equally cheap.
6177 If both strings have constant lengths, use the smaller. This
6178 could arise if optimization results in strcpy being called with
6179 two fixed strings, or if the code was machine-generated. We should
6180 add some code to the `memcmp' handler below to deal with such
6181 situations, someday. */
6182 if (!len || TREE_CODE (len) != INTEGER_CST)
6189 else if (len2 && TREE_CODE (len2) == INTEGER_CST)
6191 if (tree_int_cst_lt (len2, len))
6195 chainon (arglist, build_tree_list (NULL_TREE, len));
6199 case BUILT_IN_MEMCMP:
6200 /* If not optimizing, call the library function. */
6205 /* Arg could be non-pointer if user redeclared this fcn wrong. */
6206 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
6207 || TREE_CHAIN (arglist) == 0
6208 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
6209 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
6210 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
6212 else if (!HAVE_cmpstrsi)
6215 tree arg1 = TREE_VALUE (arglist);
6216 tree arg2 = TREE_VALUE (TREE_CHAIN (arglist));
6217 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
6221 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
6223 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
6224 enum machine_mode insn_mode
6225 = insn_operand_mode[(int) CODE_FOR_cmpstrsi][0];
6227 /* If we don't have POINTER_TYPE, call the function. */
6228 if (arg1_align == 0 || arg2_align == 0)
6230 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRCMP)
6231 TREE_CHAIN (TREE_CHAIN (arglist)) = 0;
6235 /* Make a place to write the result of the instruction. */
6238 && GET_CODE (result) == REG && GET_MODE (result) == insn_mode
6239 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
6240 result = gen_reg_rtx (insn_mode);
6242 emit_insn (gen_cmpstrsi (result,
6243 gen_rtx (MEM, BLKmode,
6244 expand_expr (arg1, NULL_RTX, Pmode,
6246 gen_rtx (MEM, BLKmode,
6247 expand_expr (arg2, NULL_RTX, Pmode,
6249 expand_expr (len, NULL_RTX, VOIDmode, 0),
6250 GEN_INT (MIN (arg1_align, arg2_align))));
6252 /* Return the value in the proper mode for this function. */
6253 mode = TYPE_MODE (TREE_TYPE (exp));
6254 if (GET_MODE (result) == mode)
6256 else if (target != 0)
6258 convert_move (target, result, 0);
6262 return convert_to_mode (mode, result, 0);
6265 case BUILT_IN_STRCMP:
6266 case BUILT_IN_MEMCMP:
6270 default: /* just do library call, if unknown builtin */
6271 error ("built-in function `%s' not currently supported",
6272 IDENTIFIER_POINTER (DECL_NAME (fndecl)));
6275 /* The switch statement above can drop through to cause the function
6276 to be called normally. */
6278 return expand_call (exp, target, ignore);
6281 /* Built-in functions to perform an untyped call and return. */
6283 /* For each register that may be used for calling a function, this
6284 gives a mode used to copy the register's value. VOIDmode indicates
6285 the register is not used for calling a function. If the machine
6286 has register windows, this gives only the outbound registers.
6287 INCOMING_REGNO gives the corresponding inbound register. */
6288 static enum machine_mode apply_args_mode[FIRST_PSEUDO_REGISTER];
6290 /* For each register that may be used for returning values, this gives
6291 a mode used to copy the register's value. VOIDmode indicates the
6292 register is not used for returning values. If the machine has
6293 register windows, this gives only the outbound registers.
6294 INCOMING_REGNO gives the corresponding inbound register. */
6295 static enum machine_mode apply_result_mode[FIRST_PSEUDO_REGISTER];
6297 /* Return the size required for the block returned by __builtin_apply_args,
6298 and initialize apply_args_mode. */
6302 static int size = -1;
6304 enum machine_mode mode;
6306 /* The values computed by this function never change. */
6309 /* The first value is the incoming arg-pointer. */
6310 size = GET_MODE_SIZE (Pmode);
6312 /* The second value is the structure value address unless this is
6313 passed as an "invisible" first argument. */
6314 if (struct_value_rtx)
6315 size += GET_MODE_SIZE (Pmode);
6317 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
6318 if (FUNCTION_ARG_REGNO_P (regno))
6320 /* Search for the proper mode for copying this register's
6321 value. I'm not sure this is right, but it works so far. */
6322 enum machine_mode best_mode = VOIDmode;
6324 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
6326 mode = GET_MODE_WIDER_MODE (mode))
6327 if (HARD_REGNO_MODE_OK (regno, mode)
6328 && HARD_REGNO_NREGS (regno, mode) == 1)
6331 if (best_mode == VOIDmode)
6332 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
6334 mode = GET_MODE_WIDER_MODE (mode))
6335 if (HARD_REGNO_MODE_OK (regno, mode)
6336 && (mov_optab->handlers[(int) mode].insn_code
6337 != CODE_FOR_nothing))
6341 if (mode == VOIDmode)
6344 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
6345 if (size % align != 0)
6346 size = CEIL (size, align) * align;
6347 size += GET_MODE_SIZE (mode);
6348 apply_args_mode[regno] = mode;
6351 apply_args_mode[regno] = VOIDmode;
6356 /* Return the size required for the block returned by __builtin_apply,
6357 and initialize apply_result_mode. */
6359 apply_result_size ()
6361 static int size = -1;
6363 enum machine_mode mode;
6365 /* The values computed by this function never change. */
6370 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
6371 if (FUNCTION_VALUE_REGNO_P (regno))
6373 /* Search for the proper mode for copying this register's
6374 value. I'm not sure this is right, but it works so far. */
6375 enum machine_mode best_mode = VOIDmode;
6377 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
6379 mode = GET_MODE_WIDER_MODE (mode))
6380 if (HARD_REGNO_MODE_OK (regno, mode))
6383 if (best_mode == VOIDmode)
6384 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
6386 mode = GET_MODE_WIDER_MODE (mode))
6387 if (HARD_REGNO_MODE_OK (regno, mode)
6388 && (mov_optab->handlers[(int) mode].insn_code
6389 != CODE_FOR_nothing))
6393 if (mode == VOIDmode)
6396 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
6397 if (size % align != 0)
6398 size = CEIL (size, align) * align;
6399 size += GET_MODE_SIZE (mode);
6400 apply_result_mode[regno] = mode;
6403 apply_result_mode[regno] = VOIDmode;
6405 /* Allow targets that use untyped_call and untyped_return to override
6406 the size so that machine-specific information can be stored here. */
6407 #ifdef APPLY_RESULT_SIZE
6408 size = APPLY_RESULT_SIZE;
6414 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
6415 /* Create a vector describing the result block RESULT. If SAVEP is true,
6416 the result block is used to save the values; otherwise it is used to
6417 restore the values. */
6419 result_vector (savep, result)
6423 int regno, size, align, nelts;
6424 enum machine_mode mode;
6426 rtx *savevec = (rtx *) alloca (FIRST_PSEUDO_REGISTER * sizeof (rtx));
6429 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
6430 if ((mode = apply_result_mode[regno]) != VOIDmode)
6432 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
6433 if (size % align != 0)
6434 size = CEIL (size, align) * align;
6435 reg = gen_rtx (REG, mode, savep ? INCOMING_REGNO (regno) : regno);
6436 mem = change_address (result, mode,
6437 plus_constant (XEXP (result, 0), size));
6438 savevec[nelts++] = (savep
6439 ? gen_rtx (SET, VOIDmode, mem, reg)
6440 : gen_rtx (SET, VOIDmode, reg, mem));
6441 size += GET_MODE_SIZE (mode);
6443 return gen_rtx (PARALLEL, VOIDmode, gen_rtvec_v (nelts, savevec));
6445 #endif /* HAVE_untyped_call or HAVE_untyped_return */
6448 /* Save the state required to perform an untyped call with the same
6449 arguments as were passed to the current function. */
6451 expand_builtin_apply_args ()
6454 int size, align, regno;
6455 enum machine_mode mode;
6457 /* Create a block where the arg-pointer, structure value address,
6458 and argument registers can be saved. */
6459 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
6461 /* Walk past the arg-pointer and structure value address. */
6462 size = GET_MODE_SIZE (Pmode);
6463 if (struct_value_rtx)
6464 size += GET_MODE_SIZE (Pmode);
6466 /* Save each register used in calling a function to the block. */
6467 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
6468 if ((mode = apply_args_mode[regno]) != VOIDmode)
6470 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
6471 if (size % align != 0)
6472 size = CEIL (size, align) * align;
6473 emit_move_insn (change_address (registers, mode,
6474 plus_constant (XEXP (registers, 0),
6476 gen_rtx (REG, mode, INCOMING_REGNO (regno)));
6477 size += GET_MODE_SIZE (mode);
6480 /* Save the arg pointer to the block. */
6481 emit_move_insn (change_address (registers, Pmode, XEXP (registers, 0)),
6482 copy_to_reg (virtual_incoming_args_rtx));
6483 size = GET_MODE_SIZE (Pmode);
6485 /* Save the structure value address unless this is passed as an
6486 "invisible" first argument. */
6487 if (struct_value_incoming_rtx)
6489 emit_move_insn (change_address (registers, Pmode,
6490 plus_constant (XEXP (registers, 0),
6492 copy_to_reg (struct_value_incoming_rtx));
6493 size += GET_MODE_SIZE (Pmode);
6496 /* Return the address of the block. */
6497 return copy_addr_to_reg (XEXP (registers, 0));
6500 /* Perform an untyped call and save the state required to perform an
6501 untyped return of whatever value was returned by the given function. */
6503 expand_builtin_apply (function, arguments, argsize)
6504 rtx function, arguments, argsize;
6506 int size, align, regno;
6507 enum machine_mode mode;
6508 rtx incoming_args, result, reg, dest, call_insn;
6509 rtx old_stack_level = 0;
6512 /* Create a block where the return registers can be saved. */
6513 result = assign_stack_local (BLKmode, apply_result_size (), -1);
6515 /* ??? The argsize value should be adjusted here. */
6517 /* Fetch the arg pointer from the ARGUMENTS block. */
6518 incoming_args = gen_reg_rtx (Pmode);
6519 emit_move_insn (incoming_args,
6520 gen_rtx (MEM, Pmode, arguments));
6521 #ifndef STACK_GROWS_DOWNWARD
6522 incoming_args = expand_binop (Pmode, add_optab, incoming_args, argsize,
6523 incoming_args, 0, OPTAB_LIB_WIDEN);
6526 /* Perform postincrements before actually calling the function. */
6529 /* Push a new argument block and copy the arguments. */
6530 do_pending_stack_adjust ();
6531 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
6533 /* Push a block of memory onto the stack to store the memory arguments.
6534 Save the address in a register, and copy the memory arguments. ??? I
6535 haven't figured out how the calling convention macros effect this,
6536 but it's likely that the source and/or destination addresses in
6537 the block copy will need updating in machine specific ways. */
6538 dest = copy_addr_to_reg (push_block (argsize, 0, 0));
6539 emit_block_move (gen_rtx (MEM, BLKmode, dest),
6540 gen_rtx (MEM, BLKmode, incoming_args),
6542 PARM_BOUNDARY / BITS_PER_UNIT);
6544 /* Refer to the argument block. */
6546 arguments = gen_rtx (MEM, BLKmode, arguments);
6548 /* Walk past the arg-pointer and structure value address. */
6549 size = GET_MODE_SIZE (Pmode);
6550 if (struct_value_rtx)
6551 size += GET_MODE_SIZE (Pmode);
6553 /* Restore each of the registers previously saved. Make USE insns
6554 for each of these registers for use in making the call. */
6555 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
6556 if ((mode = apply_args_mode[regno]) != VOIDmode)
6558 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
6559 if (size % align != 0)
6560 size = CEIL (size, align) * align;
6561 reg = gen_rtx (REG, mode, regno);
6562 emit_move_insn (reg,
6563 change_address (arguments, mode,
6564 plus_constant (XEXP (arguments, 0),
6567 push_to_sequence (use_insns);
6568 emit_insn (gen_rtx (USE, VOIDmode, reg));
6569 use_insns = get_insns ();
6571 size += GET_MODE_SIZE (mode);
6574 /* Restore the structure value address unless this is passed as an
6575 "invisible" first argument. */
6576 size = GET_MODE_SIZE (Pmode);
6577 if (struct_value_rtx)
6579 rtx value = gen_reg_rtx (Pmode);
6580 emit_move_insn (value,
6581 change_address (arguments, Pmode,
6582 plus_constant (XEXP (arguments, 0),
6584 emit_move_insn (struct_value_rtx, value);
6585 if (GET_CODE (struct_value_rtx) == REG)
6587 push_to_sequence (use_insns);
6588 emit_insn (gen_rtx (USE, VOIDmode, struct_value_rtx));
6589 use_insns = get_insns ();
6592 size += GET_MODE_SIZE (Pmode);
6595 /* All arguments and registers used for the call are set up by now! */
6596 function = prepare_call_address (function, NULL_TREE, &use_insns);
6598 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
6599 and we don't want to load it into a register as an optimization,
6600 because prepare_call_address already did it if it should be done. */
6601 if (GET_CODE (function) != SYMBOL_REF)
6602 function = memory_address (FUNCTION_MODE, function);
6604 /* Generate the actual call instruction and save the return value. */
6605 #ifdef HAVE_untyped_call
6606 if (HAVE_untyped_call)
6607 emit_call_insn (gen_untyped_call (gen_rtx (MEM, FUNCTION_MODE, function),
6608 result, result_vector (1, result)));
6611 #ifdef HAVE_call_value
6612 if (HAVE_call_value)
6616 /* Locate the unique return register. It is not possible to
6617 express a call that sets more than one return register using
6618 call_value; use untyped_call for that. In fact, untyped_call
6619 only needs to save the return registers in the given block. */
6620 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
6621 if ((mode = apply_result_mode[regno]) != VOIDmode)
6624 abort (); /* HAVE_untyped_call required. */
6625 valreg = gen_rtx (REG, mode, regno);
6628 emit_call_insn (gen_call_value (valreg,
6629 gen_rtx (MEM, FUNCTION_MODE, function),
6630 const0_rtx, NULL_RTX, const0_rtx));
6632 emit_move_insn (change_address (result, GET_MODE (valreg),
6640 /* Find the CALL insn we just emitted and write the USE insns before it. */
6641 for (call_insn = get_last_insn ();
6642 call_insn && GET_CODE (call_insn) != CALL_INSN;
6643 call_insn = PREV_INSN (call_insn))
6649 /* Put the USE insns before the CALL. */
6650 emit_insns_before (use_insns, call_insn);
6652 /* Restore the stack. */
6653 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
6655 /* Return the address of the result block. */
6656 return copy_addr_to_reg (XEXP (result, 0));
6659 /* Perform an untyped return. */
6661 expand_builtin_return (result)
6664 int size, align, regno;
6665 enum machine_mode mode;
6669 apply_result_size ();
6670 result = gen_rtx (MEM, BLKmode, result);
6672 #ifdef HAVE_untyped_return
6673 if (HAVE_untyped_return)
6675 emit_jump_insn (gen_untyped_return (result, result_vector (0, result)));
6681 /* Restore the return value and note that each value is used. */
6683 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
6684 if ((mode = apply_result_mode[regno]) != VOIDmode)
6686 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
6687 if (size % align != 0)
6688 size = CEIL (size, align) * align;
6689 reg = gen_rtx (REG, mode, INCOMING_REGNO (regno));
6690 emit_move_insn (reg,
6691 change_address (result, mode,
6692 plus_constant (XEXP (result, 0),
6695 push_to_sequence (use_insns);
6696 emit_insn (gen_rtx (USE, VOIDmode, reg));
6697 use_insns = get_insns ();
6699 size += GET_MODE_SIZE (mode);
6702 /* Put the USE insns before the return. */
6703 emit_insns (use_insns);
6705 /* Return whatever values was restored by jumping directly to the end
6707 expand_null_return ();
6710 /* Expand code for a post- or pre- increment or decrement
6711 and return the RTX for the result.
6712 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
6715 expand_increment (exp, post)
6719 register rtx op0, op1;
6720 register rtx temp, value;
6721 register tree incremented = TREE_OPERAND (exp, 0);
6722 optab this_optab = add_optab;
6724 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
6725 int op0_is_copy = 0;
6727 /* Stabilize any component ref that might need to be
6728 evaluated more than once below. */
6730 || TREE_CODE (incremented) == BIT_FIELD_REF
6731 || (TREE_CODE (incremented) == COMPONENT_REF
6732 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
6733 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
6734 incremented = stabilize_reference (incremented);
6736 /* Compute the operands as RTX.
6737 Note whether OP0 is the actual lvalue or a copy of it:
6738 I believe it is a copy iff it is a register or subreg
6739 and insns were generated in computing it. */
6741 temp = get_last_insn ();
6742 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, 0);
6744 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
6745 in place but intead must do sign- or zero-extension during assignment,
6746 so we copy it into a new register and let the code below use it as
6749 Note that we can safely modify this SUBREG since it is know not to be
6750 shared (it was made by the expand_expr call above). */
6752 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
6753 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
6755 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
6756 && temp != get_last_insn ());
6757 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
6759 /* Decide whether incrementing or decrementing. */
6760 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
6761 || TREE_CODE (exp) == PREDECREMENT_EXPR)
6762 this_optab = sub_optab;
6764 /* If OP0 is not the actual lvalue, but rather a copy in a register,
6765 then we cannot just increment OP0. We must therefore contrive to
6766 increment the original value. Then, for postincrement, we can return
6767 OP0 since it is a copy of the old value. For preincrement, we want
6768 to always expand here, since this generates better or equivalent code. */
6769 if (!post || op0_is_copy)
6771 /* This is the easiest way to increment the value wherever it is.
6772 Problems with multiple evaluation of INCREMENTED are prevented
6773 because either (1) it is a component_ref or preincrement,
6774 in which case it was stabilized above, or (2) it is an array_ref
6775 with constant index in an array in a register, which is
6776 safe to reevaluate. */
6777 tree newexp = build ((this_optab == add_optab
6778 ? PLUS_EXPR : MINUS_EXPR),
6781 TREE_OPERAND (exp, 1));
6782 temp = expand_assignment (incremented, newexp, ! post, 0);
6783 return post ? op0 : temp;
6786 /* Convert decrement by a constant into a negative increment. */
6787 if (this_optab == sub_optab
6788 && GET_CODE (op1) == CONST_INT)
6790 op1 = GEN_INT (- INTVAL (op1));
6791 this_optab = add_optab;
6796 /* We have a true reference to the value in OP0.
6797 If there is an insn to add or subtract in this mode, queue it. */
6799 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
6800 op0 = stabilize (op0);
6803 icode = (int) this_optab->handlers[(int) mode].insn_code;
6804 if (icode != (int) CODE_FOR_nothing
6805 /* Make sure that OP0 is valid for operands 0 and 1
6806 of the insn we want to queue. */
6807 && (*insn_operand_predicate[icode][0]) (op0, mode)
6808 && (*insn_operand_predicate[icode][1]) (op0, mode))
6810 if (! (*insn_operand_predicate[icode][2]) (op1, mode))
6811 op1 = force_reg (mode, op1);
6813 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
6817 /* Preincrement, or we can't increment with one simple insn. */
6819 /* Save a copy of the value before inc or dec, to return it later. */
6820 temp = value = copy_to_reg (op0);
6822 /* Arrange to return the incremented value. */
6823 /* Copy the rtx because expand_binop will protect from the queue,
6824 and the results of that would be invalid for us to return
6825 if our caller does emit_queue before using our result. */
6826 temp = copy_rtx (value = op0);
6828 /* Increment however we can. */
6829 op1 = expand_binop (mode, this_optab, value, op1, op0,
6830 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
6831 /* Make sure the value is stored into OP0. */
6833 emit_move_insn (op0, op1);
6838 /* Expand all function calls contained within EXP, innermost ones first.
6839 But don't look within expressions that have sequence points.
6840 For each CALL_EXPR, record the rtx for its value
6841 in the CALL_EXPR_RTL field. */
6844 preexpand_calls (exp)
6847 register int nops, i;
6848 int type = TREE_CODE_CLASS (TREE_CODE (exp));
6850 if (! do_preexpand_calls)
6853 /* Only expressions and references can contain calls. */
6855 if (type != 'e' && type != '<' && type != '1' && type != '2' && type != 'r')
6858 switch (TREE_CODE (exp))
6861 /* Do nothing if already expanded. */
6862 if (CALL_EXPR_RTL (exp) != 0)
6865 /* Do nothing to built-in functions. */
6866 if (TREE_CODE (TREE_OPERAND (exp, 0)) != ADDR_EXPR
6867 || TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) != FUNCTION_DECL
6868 || ! DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
6869 CALL_EXPR_RTL (exp) = expand_call (exp, NULL_RTX, 0);
6874 case TRUTH_ANDIF_EXPR:
6875 case TRUTH_ORIF_EXPR:
6876 /* If we find one of these, then we can be sure
6877 the adjust will be done for it (since it makes jumps).
6878 Do it now, so that if this is inside an argument
6879 of a function, we don't get the stack adjustment
6880 after some other args have already been pushed. */
6881 do_pending_stack_adjust ();
6886 case WITH_CLEANUP_EXPR:
6890 if (SAVE_EXPR_RTL (exp) != 0)
6894 nops = tree_code_length[(int) TREE_CODE (exp)];
6895 for (i = 0; i < nops; i++)
6896 if (TREE_OPERAND (exp, i) != 0)
6898 type = TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, i)));
6899 if (type == 'e' || type == '<' || type == '1' || type == '2'
6901 preexpand_calls (TREE_OPERAND (exp, i));
6905 /* At the start of a function, record that we have no previously-pushed
6906 arguments waiting to be popped. */
6909 init_pending_stack_adjust ()
6911 pending_stack_adjust = 0;
6914 /* When exiting from function, if safe, clear out any pending stack adjust
6915 so the adjustment won't get done. */
6918 clear_pending_stack_adjust ()
6920 #ifdef EXIT_IGNORE_STACK
6921 if (! flag_omit_frame_pointer && EXIT_IGNORE_STACK
6922 && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline)
6923 && ! flag_inline_functions)
6924 pending_stack_adjust = 0;
6928 /* Pop any previously-pushed arguments that have not been popped yet. */
6931 do_pending_stack_adjust ()
6933 if (inhibit_defer_pop == 0)
6935 if (pending_stack_adjust != 0)
6936 adjust_stack (GEN_INT (pending_stack_adjust));
6937 pending_stack_adjust = 0;
6941 /* Expand all cleanups up to OLD_CLEANUPS.
6942 Needed here, and also for language-dependent calls. */
6945 expand_cleanups_to (old_cleanups)
6948 while (cleanups_this_call != old_cleanups)
6950 expand_expr (TREE_VALUE (cleanups_this_call), NULL_RTX, VOIDmode, 0);
6951 cleanups_this_call = TREE_CHAIN (cleanups_this_call);
6955 /* Expand conditional expressions. */
6957 /* Generate code to evaluate EXP and jump to LABEL if the value is zero.
6958 LABEL is an rtx of code CODE_LABEL, in this function and all the
6962 jumpifnot (exp, label)
6966 do_jump (exp, label, NULL_RTX);
6969 /* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
6976 do_jump (exp, NULL_RTX, label);
6979 /* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
6980 the result is zero, or IF_TRUE_LABEL if the result is one.
6981 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
6982 meaning fall through in that case.
6984 do_jump always does any pending stack adjust except when it does not
6985 actually perform a jump. An example where there is no jump
6986 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
6988 This function is responsible for optimizing cases such as
6989 &&, || and comparison operators in EXP. */
6992 do_jump (exp, if_false_label, if_true_label)
6994 rtx if_false_label, if_true_label;
6996 register enum tree_code code = TREE_CODE (exp);
6997 /* Some cases need to create a label to jump to
6998 in order to properly fall through.
6999 These cases set DROP_THROUGH_LABEL nonzero. */
7000 rtx drop_through_label = 0;
7014 temp = integer_zerop (exp) ? if_false_label : if_true_label;
7020 /* This is not true with #pragma weak */
7022 /* The address of something can never be zero. */
7024 emit_jump (if_true_label);
7029 if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
7030 || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
7031 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF)
7034 /* If we are narrowing the operand, we have to do the compare in the
7036 if ((TYPE_PRECISION (TREE_TYPE (exp))
7037 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
7039 case NON_LVALUE_EXPR:
7040 case REFERENCE_EXPR:
7045 /* These cannot change zero->non-zero or vice versa. */
7046 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
7050 /* This is never less insns than evaluating the PLUS_EXPR followed by
7051 a test and can be longer if the test is eliminated. */
7053 /* Reduce to minus. */
7054 exp = build (MINUS_EXPR, TREE_TYPE (exp),
7055 TREE_OPERAND (exp, 0),
7056 fold (build1 (NEGATE_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
7057 TREE_OPERAND (exp, 1))));
7058 /* Process as MINUS. */
7062 /* Non-zero iff operands of minus differ. */
7063 comparison = compare (build (NE_EXPR, TREE_TYPE (exp),
7064 TREE_OPERAND (exp, 0),
7065 TREE_OPERAND (exp, 1)),
7070 /* If we are AND'ing with a small constant, do this comparison in the
7071 smallest type that fits. If the machine doesn't have comparisons
7072 that small, it will be converted back to the wider comparison.
7073 This helps if we are testing the sign bit of a narrower object.
7074 combine can't do this for us because it can't know whether a
7075 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
7077 if (! SLOW_BYTE_ACCESS
7078 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7079 && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
7080 && (i = floor_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))) >= 0
7081 && (type = type_for_size (i + 1, 1)) != 0
7082 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
7083 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
7084 != CODE_FOR_nothing))
7086 do_jump (convert (type, exp), if_false_label, if_true_label);
7091 case TRUTH_NOT_EXPR:
7092 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
7095 case TRUTH_ANDIF_EXPR:
7096 if (if_false_label == 0)
7097 if_false_label = drop_through_label = gen_label_rtx ();
7098 do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
7099 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
7102 case TRUTH_ORIF_EXPR:
7103 if (if_true_label == 0)
7104 if_true_label = drop_through_label = gen_label_rtx ();
7105 do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
7106 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
7110 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
7113 do_pending_stack_adjust ();
7114 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
7121 int bitsize, bitpos, unsignedp;
7122 enum machine_mode mode;
7127 /* Get description of this reference. We don't actually care
7128 about the underlying object here. */
7129 get_inner_reference (exp, &bitsize, &bitpos, &offset,
7130 &mode, &unsignedp, &volatilep);
7132 type = type_for_size (bitsize, unsignedp);
7133 if (! SLOW_BYTE_ACCESS
7134 && type != 0 && bitsize >= 0
7135 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
7136 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
7137 != CODE_FOR_nothing))
7139 do_jump (convert (type, exp), if_false_label, if_true_label);
7146 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
7147 if (integer_onep (TREE_OPERAND (exp, 1))
7148 && integer_zerop (TREE_OPERAND (exp, 2)))
7149 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
7151 else if (integer_zerop (TREE_OPERAND (exp, 1))
7152 && integer_onep (TREE_OPERAND (exp, 2)))
7153 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
7157 register rtx label1 = gen_label_rtx ();
7158 drop_through_label = gen_label_rtx ();
7159 do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX);
7160 /* Now the THEN-expression. */
7161 do_jump (TREE_OPERAND (exp, 1),
7162 if_false_label ? if_false_label : drop_through_label,
7163 if_true_label ? if_true_label : drop_through_label);
7164 /* In case the do_jump just above never jumps. */
7165 do_pending_stack_adjust ();
7166 emit_label (label1);
7167 /* Now the ELSE-expression. */
7168 do_jump (TREE_OPERAND (exp, 2),
7169 if_false_label ? if_false_label : drop_through_label,
7170 if_true_label ? if_true_label : drop_through_label);
7175 if (integer_zerop (TREE_OPERAND (exp, 1)))
7176 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
7177 else if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7180 !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
7181 do_jump_by_parts_equality (exp, if_false_label, if_true_label);
7183 comparison = compare (exp, EQ, EQ);
7187 if (integer_zerop (TREE_OPERAND (exp, 1)))
7188 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
7189 else if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7192 !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
7193 do_jump_by_parts_equality (exp, if_true_label, if_false_label);
7195 comparison = compare (exp, NE, NE);
7199 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7201 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
7202 do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
7204 comparison = compare (exp, LT, LTU);
7208 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7210 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
7211 do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
7213 comparison = compare (exp, LE, LEU);
7217 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7219 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
7220 do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
7222 comparison = compare (exp, GT, GTU);
7226 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7228 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
7229 do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
7231 comparison = compare (exp, GE, GEU);
7236 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
7238 /* This is not needed any more and causes poor code since it causes
7239 comparisons and tests from non-SI objects to have different code
7241 /* Copy to register to avoid generating bad insns by cse
7242 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
7243 if (!cse_not_expected && GET_CODE (temp) == MEM)
7244 temp = copy_to_reg (temp);
7246 do_pending_stack_adjust ();
7247 if (GET_CODE (temp) == CONST_INT)
7248 comparison = (temp == const0_rtx ? const0_rtx : const_true_rtx);
7249 else if (GET_CODE (temp) == LABEL_REF)
7250 comparison = const_true_rtx;
7251 else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
7252 && !can_compare_p (GET_MODE (temp)))
7253 /* Note swapping the labels gives us not-equal. */
7254 do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label);
7255 else if (GET_MODE (temp) != VOIDmode)
7256 comparison = compare_from_rtx (temp, CONST0_RTX (GET_MODE (temp)),
7257 NE, TREE_UNSIGNED (TREE_TYPE (exp)),
7258 GET_MODE (temp), NULL_RTX, 0);
7263 /* Do any postincrements in the expression that was tested. */
7266 /* If COMPARISON is nonzero here, it is an rtx that can be substituted
7267 straight into a conditional jump instruction as the jump condition.
7268 Otherwise, all the work has been done already. */
7270 if (comparison == const_true_rtx)
7273 emit_jump (if_true_label);
7275 else if (comparison == const0_rtx)
7278 emit_jump (if_false_label);
7280 else if (comparison)
7281 do_jump_for_compare (comparison, if_false_label, if_true_label);
7285 if (drop_through_label)
7287 /* If do_jump produces code that might be jumped around,
7288 do any stack adjusts from that code, before the place
7289 where control merges in. */
7290 do_pending_stack_adjust ();
7291 emit_label (drop_through_label);
7295 /* Given a comparison expression EXP for values too wide to be compared
7296 with one insn, test the comparison and jump to the appropriate label.
7297 The code of EXP is ignored; we always test GT if SWAP is 0,
7298 and LT if SWAP is 1. */
7301 do_jump_by_parts_greater (exp, swap, if_false_label, if_true_label)
7304 rtx if_false_label, if_true_label;
7306 rtx op0 = expand_expr (TREE_OPERAND (exp, swap), NULL_RTX, VOIDmode, 0);
7307 rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), NULL_RTX, VOIDmode, 0);
7308 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
7309 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
7310 rtx drop_through_label = 0;
7311 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
7314 if (! if_true_label || ! if_false_label)
7315 drop_through_label = gen_label_rtx ();
7316 if (! if_true_label)
7317 if_true_label = drop_through_label;
7318 if (! if_false_label)
7319 if_false_label = drop_through_label;
7321 /* Compare a word at a time, high order first. */
7322 for (i = 0; i < nwords; i++)
7325 rtx op0_word, op1_word;
7327 if (WORDS_BIG_ENDIAN)
7329 op0_word = operand_subword_force (op0, i, mode);
7330 op1_word = operand_subword_force (op1, i, mode);
7334 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
7335 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
7338 /* All but high-order word must be compared as unsigned. */
7339 comp = compare_from_rtx (op0_word, op1_word,
7340 (unsignedp || i > 0) ? GTU : GT,
7341 unsignedp, word_mode, NULL_RTX, 0);
7342 if (comp == const_true_rtx)
7343 emit_jump (if_true_label);
7344 else if (comp != const0_rtx)
7345 do_jump_for_compare (comp, NULL_RTX, if_true_label);
7347 /* Consider lower words only if these are equal. */
7348 comp = compare_from_rtx (op0_word, op1_word, NE, unsignedp, word_mode,
7350 if (comp == const_true_rtx)
7351 emit_jump (if_false_label);
7352 else if (comp != const0_rtx)
7353 do_jump_for_compare (comp, NULL_RTX, if_false_label);
7357 emit_jump (if_false_label);
7358 if (drop_through_label)
7359 emit_label (drop_through_label);
7362 /* Compare OP0 with OP1, word at a time, in mode MODE.
7363 UNSIGNEDP says to do unsigned comparison.
7364 Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
7367 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label)
7368 enum machine_mode mode;
7371 rtx if_false_label, if_true_label;
7373 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
7374 rtx drop_through_label = 0;
7377 if (! if_true_label || ! if_false_label)
7378 drop_through_label = gen_label_rtx ();
7379 if (! if_true_label)
7380 if_true_label = drop_through_label;
7381 if (! if_false_label)
7382 if_false_label = drop_through_label;
7384 /* Compare a word at a time, high order first. */
7385 for (i = 0; i < nwords; i++)
7388 rtx op0_word, op1_word;
7390 if (WORDS_BIG_ENDIAN)
7392 op0_word = operand_subword_force (op0, i, mode);
7393 op1_word = operand_subword_force (op1, i, mode);
7397 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
7398 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
7401 /* All but high-order word must be compared as unsigned. */
7402 comp = compare_from_rtx (op0_word, op1_word,
7403 (unsignedp || i > 0) ? GTU : GT,
7404 unsignedp, word_mode, NULL_RTX, 0);
7405 if (comp == const_true_rtx)
7406 emit_jump (if_true_label);
7407 else if (comp != const0_rtx)
7408 do_jump_for_compare (comp, NULL_RTX, if_true_label);
7410 /* Consider lower words only if these are equal. */
7411 comp = compare_from_rtx (op0_word, op1_word, NE, unsignedp, word_mode,
7413 if (comp == const_true_rtx)
7414 emit_jump (if_false_label);
7415 else if (comp != const0_rtx)
7416 do_jump_for_compare (comp, NULL_RTX, if_false_label);
7420 emit_jump (if_false_label);
7421 if (drop_through_label)
7422 emit_label (drop_through_label);
7425 /* Given an EQ_EXPR expression EXP for values too wide to be compared
7426 with one insn, test the comparison and jump to the appropriate label. */
7429 do_jump_by_parts_equality (exp, if_false_label, if_true_label)
7431 rtx if_false_label, if_true_label;
7433 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7434 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7435 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
7436 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
7438 rtx drop_through_label = 0;
7440 if (! if_false_label)
7441 drop_through_label = if_false_label = gen_label_rtx ();
7443 for (i = 0; i < nwords; i++)
7445 rtx comp = compare_from_rtx (operand_subword_force (op0, i, mode),
7446 operand_subword_force (op1, i, mode),
7447 EQ, TREE_UNSIGNED (TREE_TYPE (exp)),
7448 word_mode, NULL_RTX, 0);
7449 if (comp == const_true_rtx)
7450 emit_jump (if_false_label);
7451 else if (comp != const0_rtx)
7452 do_jump_for_compare (comp, if_false_label, NULL_RTX);
7456 emit_jump (if_true_label);
7457 if (drop_through_label)
7458 emit_label (drop_through_label);
7461 /* Jump according to whether OP0 is 0.
7462 We assume that OP0 has an integer mode that is too wide
7463 for the available compare insns. */
7466 do_jump_by_parts_equality_rtx (op0, if_false_label, if_true_label)
7468 rtx if_false_label, if_true_label;
7470 int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD;
7472 rtx drop_through_label = 0;
7474 if (! if_false_label)
7475 drop_through_label = if_false_label = gen_label_rtx ();
7477 for (i = 0; i < nwords; i++)
7479 rtx comp = compare_from_rtx (operand_subword_force (op0, i,
7481 const0_rtx, EQ, 1, word_mode, NULL_RTX, 0);
7482 if (comp == const_true_rtx)
7483 emit_jump (if_false_label);
7484 else if (comp != const0_rtx)
7485 do_jump_for_compare (comp, if_false_label, NULL_RTX);
7489 emit_jump (if_true_label);
7490 if (drop_through_label)
7491 emit_label (drop_through_label);
7494 /* Given a comparison expression in rtl form, output conditional branches to
7495 IF_TRUE_LABEL, IF_FALSE_LABEL, or both. */
7498 do_jump_for_compare (comparison, if_false_label, if_true_label)
7499 rtx comparison, if_false_label, if_true_label;
7503 if (bcc_gen_fctn[(int) GET_CODE (comparison)] != 0)
7504 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (comparison)]) (if_true_label));
7509 emit_jump (if_false_label);
7511 else if (if_false_label)
7514 rtx prev = PREV_INSN (get_last_insn ());
7517 /* Output the branch with the opposite condition. Then try to invert
7518 what is generated. If more than one insn is a branch, or if the
7519 branch is not the last insn written, abort. If we can't invert
7520 the branch, emit make a true label, redirect this jump to that,
7521 emit a jump to the false label and define the true label. */
7523 if (bcc_gen_fctn[(int) GET_CODE (comparison)] != 0)
7524 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (comparison)]) (if_false_label));
7528 /* Here we get the insn before what was just emitted.
7529 On some machines, emitting the branch can discard
7530 the previous compare insn and emit a replacement. */
7532 /* If there's only one preceding insn... */
7533 insn = get_insns ();
7535 insn = NEXT_INSN (prev);
7537 for (insn = NEXT_INSN (insn); insn; insn = NEXT_INSN (insn))
7538 if (GET_CODE (insn) == JUMP_INSN)
7545 if (branch != get_last_insn ())
7548 if (! invert_jump (branch, if_false_label))
7550 if_true_label = gen_label_rtx ();
7551 redirect_jump (branch, if_true_label);
7552 emit_jump (if_false_label);
7553 emit_label (if_true_label);
7558 /* Generate code for a comparison expression EXP
7559 (including code to compute the values to be compared)
7560 and set (CC0) according to the result.
7561 SIGNED_CODE should be the rtx operation for this comparison for
7562 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
7564 We force a stack adjustment unless there are currently
7565 things pushed on the stack that aren't yet used. */
7568 compare (exp, signed_code, unsigned_code)
7570 enum rtx_code signed_code, unsigned_code;
7573 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7575 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7576 register tree type = TREE_TYPE (TREE_OPERAND (exp, 0));
7577 register enum machine_mode mode = TYPE_MODE (type);
7578 int unsignedp = TREE_UNSIGNED (type);
7579 enum rtx_code code = unsignedp ? unsigned_code : signed_code;
7581 return compare_from_rtx (op0, op1, code, unsignedp, mode,
7583 ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
7584 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
7587 /* Like compare but expects the values to compare as two rtx's.
7588 The decision as to signed or unsigned comparison must be made by the caller.
7590 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
7593 If ALIGN is non-zero, it is the alignment of this type; if zero, the
7594 size of MODE should be used. */
7597 compare_from_rtx (op0, op1, code, unsignedp, mode, size, align)
7598 register rtx op0, op1;
7601 enum machine_mode mode;
7607 /* If one operand is constant, make it the second one. Only do this
7608 if the other operand is not constant as well. */
7610 if ((CONSTANT_P (op0) && ! CONSTANT_P (op1))
7611 || (GET_CODE (op0) == CONST_INT && GET_CODE (op1) != CONST_INT))
7616 code = swap_condition (code);
7621 op0 = force_not_mem (op0);
7622 op1 = force_not_mem (op1);
7625 do_pending_stack_adjust ();
7627 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
7628 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
7632 /* There's no need to do this now that combine.c can eliminate lots of
7633 sign extensions. This can be less efficient in certain cases on other
7636 /* If this is a signed equality comparison, we can do it as an
7637 unsigned comparison since zero-extension is cheaper than sign
7638 extension and comparisons with zero are done as unsigned. This is
7639 the case even on machines that can do fast sign extension, since
7640 zero-extension is easier to combine with other operations than
7641 sign-extension is. If we are comparing against a constant, we must
7642 convert it to what it would look like unsigned. */
7643 if ((code == EQ || code == NE) && ! unsignedp
7644 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
7646 if (GET_CODE (op1) == CONST_INT
7647 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
7648 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
7653 emit_cmp_insn (op0, op1, code, size, mode, unsignedp, align);
7655 return gen_rtx (code, VOIDmode, cc0_rtx, const0_rtx);
7658 /* Generate code to calculate EXP using a store-flag instruction
7659 and return an rtx for the result. EXP is either a comparison
7660 or a TRUTH_NOT_EXPR whose operand is a comparison.
7662 If TARGET is nonzero, store the result there if convenient.
7664 If ONLY_CHEAP is non-zero, only do this if it is likely to be very
7667 Return zero if there is no suitable set-flag instruction
7668 available on this machine.
7670 Once expand_expr has been called on the arguments of the comparison,
7671 we are committed to doing the store flag, since it is not safe to
7672 re-evaluate the expression. We emit the store-flag insn by calling
7673 emit_store_flag, but only expand the arguments if we have a reason
7674 to believe that emit_store_flag will be successful. If we think that
7675 it will, but it isn't, we have to simulate the store-flag with a
7676 set/jump/set sequence. */
7679 do_store_flag (exp, target, mode, only_cheap)
7682 enum machine_mode mode;
7686 tree arg0, arg1, type;
7688 enum machine_mode operand_mode;
7692 enum insn_code icode;
7693 rtx subtarget = target;
7694 rtx result, label, pattern, jump_pat;
7696 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
7697 result at the end. We can't simply invert the test since it would
7698 have already been inverted if it were valid. This case occurs for
7699 some floating-point comparisons. */
7701 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
7702 invert = 1, exp = TREE_OPERAND (exp, 0);
7704 arg0 = TREE_OPERAND (exp, 0);
7705 arg1 = TREE_OPERAND (exp, 1);
7706 type = TREE_TYPE (arg0);
7707 operand_mode = TYPE_MODE (type);
7708 unsignedp = TREE_UNSIGNED (type);
7710 /* We won't bother with BLKmode store-flag operations because it would mean
7711 passing a lot of information to emit_store_flag. */
7712 if (operand_mode == BLKmode)
7718 /* Get the rtx comparison code to use. We know that EXP is a comparison
7719 operation of some type. Some comparisons against 1 and -1 can be
7720 converted to comparisons with zero. Do so here so that the tests
7721 below will be aware that we have a comparison with zero. These
7722 tests will not catch constants in the first operand, but constants
7723 are rarely passed as the first operand. */
7725 switch (TREE_CODE (exp))
7734 if (integer_onep (arg1))
7735 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
7737 code = unsignedp ? LTU : LT;
7740 if (integer_all_onesp (arg1))
7741 arg1 = integer_zero_node, code = unsignedp ? LTU : LT;
7743 code = unsignedp ? LEU : LE;
7746 if (integer_all_onesp (arg1))
7747 arg1 = integer_zero_node, code = unsignedp ? GEU : GE;
7749 code = unsignedp ? GTU : GT;
7752 if (integer_onep (arg1))
7753 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
7755 code = unsignedp ? GEU : GE;
7761 /* Put a constant second. */
7762 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
7764 tem = arg0; arg0 = arg1; arg1 = tem;
7765 code = swap_condition (code);
7768 /* If this is an equality or inequality test of a single bit, we can
7769 do this by shifting the bit being tested to the low-order bit and
7770 masking the result with the constant 1. If the condition was EQ,
7771 we xor it with 1. This does not require an scc insn and is faster
7772 than an scc insn even if we have it. */
7774 if ((code == NE || code == EQ)
7775 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
7776 && integer_pow2p (TREE_OPERAND (arg0, 1))
7777 && TYPE_PRECISION (type) <= HOST_BITS_PER_WIDE_INT)
7779 int bitnum = exact_log2 (INTVAL (expand_expr (TREE_OPERAND (arg0, 1),
7780 NULL_RTX, VOIDmode, 0)));
7782 if (subtarget == 0 || GET_CODE (subtarget) != REG
7783 || GET_MODE (subtarget) != operand_mode
7784 || ! safe_from_p (subtarget, TREE_OPERAND (arg0, 0)))
7787 op0 = expand_expr (TREE_OPERAND (arg0, 0), subtarget, VOIDmode, 0);
7790 op0 = expand_shift (RSHIFT_EXPR, GET_MODE (op0), op0,
7791 size_int (bitnum), target, 1);
7793 if (GET_MODE (op0) != mode)
7794 op0 = convert_to_mode (mode, op0, 1);
7796 if (bitnum != TYPE_PRECISION (type) - 1)
7797 op0 = expand_and (op0, const1_rtx, target);
7799 if ((code == EQ && ! invert) || (code == NE && invert))
7800 op0 = expand_binop (mode, xor_optab, op0, const1_rtx, target, 0,
7806 /* Now see if we are likely to be able to do this. Return if not. */
7807 if (! can_compare_p (operand_mode))
7809 icode = setcc_gen_code[(int) code];
7810 if (icode == CODE_FOR_nothing
7811 || (only_cheap && insn_operand_mode[(int) icode][0] != mode))
7813 /* We can only do this if it is one of the special cases that
7814 can be handled without an scc insn. */
7815 if ((code == LT && integer_zerop (arg1))
7816 || (! only_cheap && code == GE && integer_zerop (arg1)))
7818 else if (BRANCH_COST >= 0
7819 && ! only_cheap && (code == NE || code == EQ)
7820 && TREE_CODE (type) != REAL_TYPE
7821 && ((abs_optab->handlers[(int) operand_mode].insn_code
7822 != CODE_FOR_nothing)
7823 || (ffs_optab->handlers[(int) operand_mode].insn_code
7824 != CODE_FOR_nothing)))
7830 preexpand_calls (exp);
7831 if (subtarget == 0 || GET_CODE (subtarget) != REG
7832 || GET_MODE (subtarget) != operand_mode
7833 || ! safe_from_p (subtarget, arg1))
7836 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
7837 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
7840 target = gen_reg_rtx (mode);
7842 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
7843 because, if the emit_store_flag does anything it will succeed and
7844 OP0 and OP1 will not be used subsequently. */
7846 result = emit_store_flag (target, code,
7847 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
7848 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
7849 operand_mode, unsignedp, 1);
7854 result = expand_binop (mode, xor_optab, result, const1_rtx,
7855 result, 0, OPTAB_LIB_WIDEN);
7859 /* If this failed, we have to do this with set/compare/jump/set code. */
7860 if (target == 0 || GET_CODE (target) != REG
7861 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
7862 target = gen_reg_rtx (GET_MODE (target));
7864 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
7865 result = compare_from_rtx (op0, op1, code, unsignedp,
7866 operand_mode, NULL_RTX, 0);
7867 if (GET_CODE (result) == CONST_INT)
7868 return (((result == const0_rtx && ! invert)
7869 || (result != const0_rtx && invert))
7870 ? const0_rtx : const1_rtx);
7872 label = gen_label_rtx ();
7873 if (bcc_gen_fctn[(int) code] == 0)
7876 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
7877 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
7883 /* Generate a tablejump instruction (used for switch statements). */
7885 #ifdef HAVE_tablejump
7887 /* INDEX is the value being switched on, with the lowest value
7888 in the table already subtracted.
7889 MODE is its expected mode (needed if INDEX is constant).
7890 RANGE is the length of the jump table.
7891 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
7893 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
7894 index value is out of range. */
7897 do_tablejump (index, mode, range, table_label, default_label)
7898 rtx index, range, table_label, default_label;
7899 enum machine_mode mode;
7901 register rtx temp, vector;
7903 /* Do an unsigned comparison (in the proper mode) between the index
7904 expression and the value which represents the length of the range.
7905 Since we just finished subtracting the lower bound of the range
7906 from the index expression, this comparison allows us to simultaneously
7907 check that the original index expression value is both greater than
7908 or equal to the minimum value of the range and less than or equal to
7909 the maximum value of the range. */
7911 emit_cmp_insn (range, index, LTU, NULL_RTX, mode, 1, 0);
7912 emit_jump_insn (gen_bltu (default_label));
7914 /* If index is in range, it must fit in Pmode.
7915 Convert to Pmode so we can index with it. */
7917 index = convert_to_mode (Pmode, index, 1);
7919 /* If flag_force_addr were to affect this address
7920 it could interfere with the tricky assumptions made
7921 about addresses that contain label-refs,
7922 which may be valid only very near the tablejump itself. */
7923 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
7924 GET_MODE_SIZE, because this indicates how large insns are. The other
7925 uses should all be Pmode, because they are addresses. This code
7926 could fail if addresses and insns are not the same size. */
7927 index = memory_address_noforce
7929 gen_rtx (PLUS, Pmode,
7930 gen_rtx (MULT, Pmode, index,
7931 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
7932 gen_rtx (LABEL_REF, Pmode, table_label)));
7933 temp = gen_reg_rtx (CASE_VECTOR_MODE);
7934 vector = gen_rtx (MEM, CASE_VECTOR_MODE, index);
7935 RTX_UNCHANGING_P (vector) = 1;
7936 convert_move (temp, vector, 0);
7938 emit_jump_insn (gen_tablejump (temp, table_label));
7940 #ifndef CASE_VECTOR_PC_RELATIVE
7941 /* If we are generating PIC code or if the table is PC-relative, the
7942 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
7948 #endif /* HAVE_tablejump */