1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA. */
26 #include "insn-flags.h"
27 #include "insn-codes.h"
29 #include "insn-config.h"
32 #include "typeclass.h"
34 #define CEIL(x,y) (((x) + (y) - 1) / (y))
36 /* Decide whether a function's arguments should be processed
37 from first to last or from last to first.
39 They should if the stack and args grow in opposite directions, but
40 only if we have push insns. */
44 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNARD)
45 #define PUSH_ARGS_REVERSED /* If it's last to first */
50 #ifndef STACK_PUSH_CODE
51 #ifdef STACK_GROWS_DOWNWARD
52 #define STACK_PUSH_CODE PRE_DEC
54 #define STACK_PUSH_CODE PRE_INC
58 /* Like STACK_BOUNDARY but in units of bytes, not bits. */
59 #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
61 /* If this is nonzero, we do not bother generating VOLATILE
62 around volatile memory references, and we are willing to
63 output indirect addresses. If cse is to follow, we reject
64 indirect addresses so a useful potential cse is generated;
65 if it is used only once, instruction combination will produce
66 the same indirect address eventually. */
69 /* Nonzero to generate code for all the subroutines within an
70 expression before generating the upper levels of the expression.
71 Nowadays this is never zero. */
72 int do_preexpand_calls = 1;
74 /* Number of units that we should eventually pop off the stack.
75 These are the arguments to function calls that have already returned. */
76 int pending_stack_adjust;
78 /* Nonzero means stack pops must not be deferred, and deferred stack
79 pops must not be output. It is nonzero inside a function call,
80 inside a conditional expression, inside a statement expression,
81 and in other cases as well. */
82 int inhibit_defer_pop;
84 /* A list of all cleanups which belong to the arguments of
85 function calls being expanded by expand_call. */
86 tree cleanups_this_call;
88 /* Nonzero means __builtin_saveregs has already been done in this function.
89 The value is the pseudoreg containing the value __builtin_saveregs
91 static rtx saveregs_value;
93 /* Similarly for __builtin_apply_args. */
94 static rtx apply_args_value;
96 /* This structure is used by move_by_pieces to describe the move to
108 int explicit_inc_from;
114 static rtx enqueue_insn PROTO((rtx, rtx));
115 static int queued_subexp_p PROTO((rtx));
116 static void init_queue PROTO((void));
117 static void move_by_pieces PROTO((rtx, rtx, int, int));
118 static int move_by_pieces_ninsns PROTO((unsigned int, int));
119 static void move_by_pieces_1 PROTO((rtx (*) (), enum machine_mode,
120 struct move_by_pieces *));
121 static void group_insns PROTO((rtx));
122 static void store_constructor PROTO((tree, rtx));
123 static rtx store_field PROTO((rtx, int, int, enum machine_mode, tree,
124 enum machine_mode, int, int, int));
125 static tree save_noncopied_parts PROTO((tree, tree));
126 static tree init_noncopied_parts PROTO((tree, tree));
127 static int safe_from_p PROTO((rtx, tree));
128 static int fixed_type_p PROTO((tree));
129 static int get_pointer_alignment PROTO((tree, unsigned));
130 static tree string_constant PROTO((tree, tree *));
131 static tree c_strlen PROTO((tree));
132 static rtx expand_builtin PROTO((tree, rtx, rtx, enum machine_mode, int));
133 static int apply_args_size PROTO((void));
134 static int apply_result_size PROTO((void));
135 static rtx result_vector PROTO((int, rtx));
136 static rtx expand_builtin_apply_args PROTO((void));
137 static rtx expand_builtin_apply PROTO((rtx, rtx, rtx));
138 static void expand_builtin_return PROTO((rtx));
139 static rtx expand_increment PROTO((tree, int));
140 static void preexpand_calls PROTO((tree));
141 static void do_jump_by_parts_greater PROTO((tree, int, rtx, rtx));
142 static void do_jump_by_parts_greater_rtx PROTO((enum machine_mode, int, rtx, rtx, rtx, rtx));
143 static void do_jump_by_parts_equality PROTO((tree, rtx, rtx));
144 static void do_jump_by_parts_equality_rtx PROTO((rtx, rtx, rtx));
145 static void do_jump_for_compare PROTO((rtx, rtx, rtx));
146 static rtx compare PROTO((tree, enum rtx_code, enum rtx_code));
147 static rtx do_store_flag PROTO((tree, rtx, enum machine_mode, int));
149 /* Record for each mode whether we can move a register directly to or
150 from an object of that mode in memory. If we can't, we won't try
151 to use that mode directly when accessing a field of that mode. */
153 static char direct_load[NUM_MACHINE_MODES];
154 static char direct_store[NUM_MACHINE_MODES];
156 /* MOVE_RATIO is the number of move instructions that is better than
160 #if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
163 /* A value of around 6 would minimize code size; infinity would minimize
165 #define MOVE_RATIO 15
169 /* This array records the insn_code of insns to perform block moves. */
170 enum insn_code movstr_optab[NUM_MACHINE_MODES];
172 /* SLOW_UNALIGNED_ACCESS is non-zero if unaligned accesses are very slow. */
174 #ifndef SLOW_UNALIGNED_ACCESS
175 #define SLOW_UNALIGNED_ACCESS 0
178 /* Register mappings for target machines without register windows. */
179 #ifndef INCOMING_REGNO
180 #define INCOMING_REGNO(OUT) (OUT)
182 #ifndef OUTGOING_REGNO
183 #define OUTGOING_REGNO(IN) (IN)
186 /* This is run once per compilation to set up which modes can be used
187 directly in memory and to initialize the block move optab. */
193 enum machine_mode mode;
194 /* Try indexing by frame ptr and try by stack ptr.
195 It is known that on the Convex the stack ptr isn't a valid index.
196 With luck, one or the other is valid on any machine. */
197 rtx mem = gen_rtx (MEM, VOIDmode, stack_pointer_rtx);
198 rtx mem1 = gen_rtx (MEM, VOIDmode, frame_pointer_rtx);
201 insn = emit_insn (gen_rtx (SET, 0, 0));
202 pat = PATTERN (insn);
204 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
205 mode = (enum machine_mode) ((int) mode + 1))
211 direct_load[(int) mode] = direct_store[(int) mode] = 0;
212 PUT_MODE (mem, mode);
213 PUT_MODE (mem1, mode);
215 /* See if there is some register that can be used in this mode and
216 directly loaded or stored from memory. */
218 if (mode != VOIDmode && mode != BLKmode)
219 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
220 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
223 if (! HARD_REGNO_MODE_OK (regno, mode))
226 reg = gen_rtx (REG, mode, regno);
229 SET_DEST (pat) = reg;
230 if (recog (pat, insn, &num_clobbers) >= 0)
231 direct_load[(int) mode] = 1;
233 SET_SRC (pat) = mem1;
234 SET_DEST (pat) = reg;
235 if (recog (pat, insn, &num_clobbers) >= 0)
236 direct_load[(int) mode] = 1;
239 SET_DEST (pat) = mem;
240 if (recog (pat, insn, &num_clobbers) >= 0)
241 direct_store[(int) mode] = 1;
244 SET_DEST (pat) = mem1;
245 if (recog (pat, insn, &num_clobbers) >= 0)
246 direct_store[(int) mode] = 1;
253 /* This is run at the start of compiling a function. */
260 pending_stack_adjust = 0;
261 inhibit_defer_pop = 0;
262 cleanups_this_call = 0;
264 apply_args_value = 0;
268 /* Save all variables describing the current status into the structure *P.
269 This is used before starting a nested function. */
275 /* Instead of saving the postincrement queue, empty it. */
278 p->pending_stack_adjust = pending_stack_adjust;
279 p->inhibit_defer_pop = inhibit_defer_pop;
280 p->cleanups_this_call = cleanups_this_call;
281 p->saveregs_value = saveregs_value;
282 p->apply_args_value = apply_args_value;
283 p->forced_labels = forced_labels;
285 pending_stack_adjust = 0;
286 inhibit_defer_pop = 0;
287 cleanups_this_call = 0;
289 apply_args_value = 0;
293 /* Restore all variables describing the current status from the structure *P.
294 This is used after a nested function. */
297 restore_expr_status (p)
300 pending_stack_adjust = p->pending_stack_adjust;
301 inhibit_defer_pop = p->inhibit_defer_pop;
302 cleanups_this_call = p->cleanups_this_call;
303 saveregs_value = p->saveregs_value;
304 apply_args_value = p->apply_args_value;
305 forced_labels = p->forced_labels;
308 /* Manage the queue of increment instructions to be output
309 for POSTINCREMENT_EXPR expressions, etc. */
311 static rtx pending_chain;
313 /* Queue up to increment (or change) VAR later. BODY says how:
314 BODY should be the same thing you would pass to emit_insn
315 to increment right away. It will go to emit_insn later on.
317 The value is a QUEUED expression to be used in place of VAR
318 where you want to guarantee the pre-incrementation value of VAR. */
321 enqueue_insn (var, body)
324 pending_chain = gen_rtx (QUEUED, GET_MODE (var),
325 var, NULL_RTX, NULL_RTX, body, pending_chain);
326 return pending_chain;
329 /* Use protect_from_queue to convert a QUEUED expression
330 into something that you can put immediately into an instruction.
331 If the queued incrementation has not happened yet,
332 protect_from_queue returns the variable itself.
333 If the incrementation has happened, protect_from_queue returns a temp
334 that contains a copy of the old value of the variable.
336 Any time an rtx which might possibly be a QUEUED is to be put
337 into an instruction, it must be passed through protect_from_queue first.
338 QUEUED expressions are not meaningful in instructions.
340 Do not pass a value through protect_from_queue and then hold
341 on to it for a while before putting it in an instruction!
342 If the queue is flushed in between, incorrect code will result. */
345 protect_from_queue (x, modify)
349 register RTX_CODE code = GET_CODE (x);
351 #if 0 /* A QUEUED can hang around after the queue is forced out. */
352 /* Shortcut for most common case. */
353 if (pending_chain == 0)
359 /* A special hack for read access to (MEM (QUEUED ...))
360 to facilitate use of autoincrement.
361 Make a copy of the contents of the memory location
362 rather than a copy of the address, but not
363 if the value is of mode BLKmode. */
364 if (code == MEM && GET_MODE (x) != BLKmode
365 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
367 register rtx y = XEXP (x, 0);
368 XEXP (x, 0) = QUEUED_VAR (y);
371 register rtx temp = gen_reg_rtx (GET_MODE (x));
372 emit_insn_before (gen_move_insn (temp, x),
378 /* Otherwise, recursively protect the subexpressions of all
379 the kinds of rtx's that can contain a QUEUED. */
381 XEXP (x, 0) = protect_from_queue (XEXP (x, 0), 0);
382 else if (code == PLUS || code == MULT)
384 XEXP (x, 0) = protect_from_queue (XEXP (x, 0), 0);
385 XEXP (x, 1) = protect_from_queue (XEXP (x, 1), 0);
389 /* If the increment has not happened, use the variable itself. */
390 if (QUEUED_INSN (x) == 0)
391 return QUEUED_VAR (x);
392 /* If the increment has happened and a pre-increment copy exists,
394 if (QUEUED_COPY (x) != 0)
395 return QUEUED_COPY (x);
396 /* The increment has happened but we haven't set up a pre-increment copy.
397 Set one up now, and use it. */
398 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
399 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
401 return QUEUED_COPY (x);
404 /* Return nonzero if X contains a QUEUED expression:
405 if it contains anything that will be altered by a queued increment.
406 We handle only combinations of MEM, PLUS, MINUS and MULT operators
407 since memory addresses generally contain only those. */
413 register enum rtx_code code = GET_CODE (x);
419 return queued_subexp_p (XEXP (x, 0));
423 return queued_subexp_p (XEXP (x, 0))
424 || queued_subexp_p (XEXP (x, 1));
429 /* Perform all the pending incrementations. */
435 while (p = pending_chain)
437 QUEUED_INSN (p) = emit_insn (QUEUED_BODY (p));
438 pending_chain = QUEUED_NEXT (p);
449 /* Copy data from FROM to TO, where the machine modes are not the same.
450 Both modes may be integer, or both may be floating.
451 UNSIGNEDP should be nonzero if FROM is an unsigned type.
452 This causes zero-extension instead of sign-extension. */
455 convert_move (to, from, unsignedp)
456 register rtx to, from;
459 enum machine_mode to_mode = GET_MODE (to);
460 enum machine_mode from_mode = GET_MODE (from);
461 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
462 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
466 /* rtx code for making an equivalent value. */
467 enum rtx_code equiv_code = (unsignedp ? ZERO_EXTEND : SIGN_EXTEND);
469 to = protect_from_queue (to, 1);
470 from = protect_from_queue (from, 0);
472 if (to_real != from_real)
475 /* If FROM is a SUBREG that indicates that we have already done at least
476 the required extension, strip it. We don't handle such SUBREGs as
479 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
480 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
481 >= GET_MODE_SIZE (to_mode))
482 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
483 from = gen_lowpart (to_mode, from), from_mode = to_mode;
485 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
488 if (to_mode == from_mode
489 || (from_mode == VOIDmode && CONSTANT_P (from)))
491 emit_move_insn (to, from);
497 #ifdef HAVE_extendqfhf2
498 if (HAVE_extendqfsf2 && from_mode == QFmode && to_mode == HFmode)
500 emit_unop_insn (CODE_FOR_extendqfsf2, to, from, UNKNOWN);
504 #ifdef HAVE_extendqfsf2
505 if (HAVE_extendqfsf2 && from_mode == QFmode && to_mode == SFmode)
507 emit_unop_insn (CODE_FOR_extendqfsf2, to, from, UNKNOWN);
511 #ifdef HAVE_extendqfdf2
512 if (HAVE_extendqfdf2 && from_mode == QFmode && to_mode == DFmode)
514 emit_unop_insn (CODE_FOR_extendqfdf2, to, from, UNKNOWN);
518 #ifdef HAVE_extendqfxf2
519 if (HAVE_extendqfxf2 && from_mode == QFmode && to_mode == XFmode)
521 emit_unop_insn (CODE_FOR_extendqfxf2, to, from, UNKNOWN);
525 #ifdef HAVE_extendqftf2
526 if (HAVE_extendqftf2 && from_mode == QFmode && to_mode == TFmode)
528 emit_unop_insn (CODE_FOR_extendqftf2, to, from, UNKNOWN);
533 #ifdef HAVE_extendhfsf2
534 if (HAVE_extendhfsf2 && from_mode == HFmode && to_mode == SFmode)
536 emit_unop_insn (CODE_FOR_extendhfsf2, to, from, UNKNOWN);
540 #ifdef HAVE_extendhfdf2
541 if (HAVE_extendhfdf2 && from_mode == HFmode && to_mode == DFmode)
543 emit_unop_insn (CODE_FOR_extendhfdf2, to, from, UNKNOWN);
547 #ifdef HAVE_extendhfxf2
548 if (HAVE_extendhfxf2 && from_mode == HFmode && to_mode == XFmode)
550 emit_unop_insn (CODE_FOR_extendhfxf2, to, from, UNKNOWN);
554 #ifdef HAVE_extendhftf2
555 if (HAVE_extendhftf2 && from_mode == HFmode && to_mode == TFmode)
557 emit_unop_insn (CODE_FOR_extendhftf2, to, from, UNKNOWN);
562 #ifdef HAVE_extendsfdf2
563 if (HAVE_extendsfdf2 && from_mode == SFmode && to_mode == DFmode)
565 emit_unop_insn (CODE_FOR_extendsfdf2, to, from, UNKNOWN);
569 #ifdef HAVE_extendsfxf2
570 if (HAVE_extendsfxf2 && from_mode == SFmode && to_mode == XFmode)
572 emit_unop_insn (CODE_FOR_extendsfxf2, to, from, UNKNOWN);
576 #ifdef HAVE_extendsftf2
577 if (HAVE_extendsftf2 && from_mode == SFmode && to_mode == TFmode)
579 emit_unop_insn (CODE_FOR_extendsftf2, to, from, UNKNOWN);
583 #ifdef HAVE_extenddfxf2
584 if (HAVE_extenddfxf2 && from_mode == DFmode && to_mode == XFmode)
586 emit_unop_insn (CODE_FOR_extenddfxf2, to, from, UNKNOWN);
590 #ifdef HAVE_extenddftf2
591 if (HAVE_extenddftf2 && from_mode == DFmode && to_mode == TFmode)
593 emit_unop_insn (CODE_FOR_extenddftf2, to, from, UNKNOWN);
598 #ifdef HAVE_trunchfqf2
599 if (HAVE_trunchfqf2 && from_mode == HFmode && to_mode == QFmode)
601 emit_unop_insn (CODE_FOR_trunchfqf2, to, from, UNKNOWN);
605 #ifdef HAVE_truncsfqf2
606 if (HAVE_truncsfqf2 && from_mode == SFmode && to_mode == QFmode)
608 emit_unop_insn (CODE_FOR_truncsfqf2, to, from, UNKNOWN);
612 #ifdef HAVE_truncdfqf2
613 if (HAVE_truncdfqf2 && from_mode == DFmode && to_mode == QFmode)
615 emit_unop_insn (CODE_FOR_truncdfqf2, to, from, UNKNOWN);
619 #ifdef HAVE_truncxfqf2
620 if (HAVE_truncxfqf2 && from_mode == XFmode && to_mode == QFmode)
622 emit_unop_insn (CODE_FOR_truncxfqf2, to, from, UNKNOWN);
626 #ifdef HAVE_trunctfqf2
627 if (HAVE_trunctfqf2 && from_mode == TFmode && to_mode == QFmode)
629 emit_unop_insn (CODE_FOR_trunctfqf2, to, from, UNKNOWN);
633 #ifdef HAVE_truncsfhf2
634 if (HAVE_truncsfhf2 && from_mode == SFmode && to_mode == HFmode)
636 emit_unop_insn (CODE_FOR_truncsfhf2, to, from, UNKNOWN);
640 #ifdef HAVE_truncdfhf2
641 if (HAVE_truncdfhf2 && from_mode == DFmode && to_mode == HFmode)
643 emit_unop_insn (CODE_FOR_truncdfhf2, to, from, UNKNOWN);
647 #ifdef HAVE_truncxfhf2
648 if (HAVE_truncxfhf2 && from_mode == XFmode && to_mode == HFmode)
650 emit_unop_insn (CODE_FOR_truncxfhf2, to, from, UNKNOWN);
654 #ifdef HAVE_trunctfhf2
655 if (HAVE_trunctfhf2 && from_mode == TFmode && to_mode == HFmode)
657 emit_unop_insn (CODE_FOR_trunctfhf2, to, from, UNKNOWN);
661 #ifdef HAVE_truncdfsf2
662 if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
664 emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
668 #ifdef HAVE_truncxfsf2
669 if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode)
671 emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN);
675 #ifdef HAVE_trunctfsf2
676 if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
678 emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
682 #ifdef HAVE_truncxfdf2
683 if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode)
685 emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN);
689 #ifdef HAVE_trunctfdf2
690 if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
692 emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
704 libcall = extendsfdf2_libfunc;
708 libcall = extendsfxf2_libfunc;
712 libcall = extendsftf2_libfunc;
721 libcall = truncdfsf2_libfunc;
725 libcall = extenddfxf2_libfunc;
729 libcall = extenddftf2_libfunc;
738 libcall = truncxfsf2_libfunc;
742 libcall = truncxfdf2_libfunc;
751 libcall = trunctfsf2_libfunc;
755 libcall = trunctfdf2_libfunc;
761 if (libcall == (rtx) 0)
762 /* This conversion is not implemented yet. */
765 emit_library_call (libcall, 1, to_mode, 1, from, from_mode);
766 emit_move_insn (to, hard_libcall_value (to_mode));
770 /* Now both modes are integers. */
772 /* Handle expanding beyond a word. */
773 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
774 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
781 enum machine_mode lowpart_mode;
782 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
784 /* Try converting directly if the insn is supported. */
785 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
788 /* If FROM is a SUBREG, put it into a register. Do this
789 so that we always generate the same set of insns for
790 better cse'ing; if an intermediate assignment occurred,
791 we won't be doing the operation directly on the SUBREG. */
792 if (optimize > 0 && GET_CODE (from) == SUBREG)
793 from = force_reg (from_mode, from);
794 emit_unop_insn (code, to, from, equiv_code);
797 /* Next, try converting via full word. */
798 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
799 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
800 != CODE_FOR_nothing))
802 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
803 emit_unop_insn (code, to,
804 gen_lowpart (word_mode, to), equiv_code);
808 /* No special multiword conversion insn; do it by hand. */
811 /* Get a copy of FROM widened to a word, if necessary. */
812 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
813 lowpart_mode = word_mode;
815 lowpart_mode = from_mode;
817 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
819 lowpart = gen_lowpart (lowpart_mode, to);
820 emit_move_insn (lowpart, lowfrom);
822 /* Compute the value to put in each remaining word. */
824 fill_value = const0_rtx;
829 && insn_operand_mode[(int) CODE_FOR_slt][0] == word_mode
830 && STORE_FLAG_VALUE == -1)
832 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
834 fill_value = gen_reg_rtx (word_mode);
835 emit_insn (gen_slt (fill_value));
841 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
842 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
844 fill_value = convert_to_mode (word_mode, fill_value, 1);
848 /* Fill the remaining words. */
849 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
851 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
852 rtx subword = operand_subword (to, index, 1, to_mode);
857 if (fill_value != subword)
858 emit_move_insn (subword, fill_value);
861 insns = get_insns ();
864 emit_no_conflict_block (insns, to, from, NULL_RTX,
865 gen_rtx (equiv_code, to_mode, from));
869 /* Truncating multi-word to a word or less. */
870 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
871 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
873 convert_move (to, gen_lowpart (word_mode, from), 0);
877 /* Handle pointer conversion */ /* SPEE 900220 */
878 if (to_mode == PSImode)
880 if (from_mode != SImode)
881 from = convert_to_mode (SImode, from, unsignedp);
883 #ifdef HAVE_truncsipsi
886 emit_unop_insn (CODE_FOR_truncsipsi, to, from, UNKNOWN);
889 #endif /* HAVE_truncsipsi */
893 if (from_mode == PSImode)
895 if (to_mode != SImode)
897 from = convert_to_mode (SImode, from, unsignedp);
902 #ifdef HAVE_extendpsisi
903 if (HAVE_extendpsisi)
905 emit_unop_insn (CODE_FOR_extendpsisi, to, from, UNKNOWN);
908 #endif /* HAVE_extendpsisi */
913 /* Now follow all the conversions between integers
914 no more than a word long. */
916 /* For truncation, usually we can just refer to FROM in a narrower mode. */
917 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
918 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
919 GET_MODE_BITSIZE (from_mode)))
921 if (!((GET_CODE (from) == MEM
922 && ! MEM_VOLATILE_P (from)
923 && direct_load[(int) to_mode]
924 && ! mode_dependent_address_p (XEXP (from, 0)))
925 || GET_CODE (from) == REG
926 || GET_CODE (from) == SUBREG))
927 from = force_reg (from_mode, from);
928 emit_move_insn (to, gen_lowpart (to_mode, from));
932 /* Handle extension. */
933 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
935 /* Convert directly if that works. */
936 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
939 /* If FROM is a SUBREG, put it into a register. Do this
940 so that we always generate the same set of insns for
941 better cse'ing; if an intermediate assignment occurred,
942 we won't be doing the operation directly on the SUBREG. */
943 if (optimize > 0 && GET_CODE (from) == SUBREG)
944 from = force_reg (from_mode, from);
945 emit_unop_insn (code, to, from, equiv_code);
950 enum machine_mode intermediate;
952 /* Search for a mode to convert via. */
953 for (intermediate = from_mode; intermediate != VOIDmode;
954 intermediate = GET_MODE_WIDER_MODE (intermediate))
955 if ((can_extend_p (to_mode, intermediate, unsignedp)
957 && (can_extend_p (intermediate, from_mode, unsignedp)
958 != CODE_FOR_nothing))
960 convert_move (to, convert_to_mode (intermediate, from,
961 unsignedp), unsignedp);
965 /* No suitable intermediate mode. */
970 /* Support special truncate insns for certain modes. */
972 if (from_mode == DImode && to_mode == SImode)
974 #ifdef HAVE_truncdisi2
977 emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
981 convert_move (to, force_reg (from_mode, from), unsignedp);
985 if (from_mode == DImode && to_mode == HImode)
987 #ifdef HAVE_truncdihi2
990 emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
994 convert_move (to, force_reg (from_mode, from), unsignedp);
998 if (from_mode == DImode && to_mode == QImode)
1000 #ifdef HAVE_truncdiqi2
1001 if (HAVE_truncdiqi2)
1003 emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
1007 convert_move (to, force_reg (from_mode, from), unsignedp);
1011 if (from_mode == SImode && to_mode == HImode)
1013 #ifdef HAVE_truncsihi2
1014 if (HAVE_truncsihi2)
1016 emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
1020 convert_move (to, force_reg (from_mode, from), unsignedp);
1024 if (from_mode == SImode && to_mode == QImode)
1026 #ifdef HAVE_truncsiqi2
1027 if (HAVE_truncsiqi2)
1029 emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
1033 convert_move (to, force_reg (from_mode, from), unsignedp);
1037 if (from_mode == HImode && to_mode == QImode)
1039 #ifdef HAVE_trunchiqi2
1040 if (HAVE_trunchiqi2)
1042 emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
1046 convert_move (to, force_reg (from_mode, from), unsignedp);
1050 /* Handle truncation of volatile memrefs, and so on;
1051 the things that couldn't be truncated directly,
1052 and for which there was no special instruction. */
1053 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
1055 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
1056 emit_move_insn (to, temp);
1060 /* Mode combination is not recognized. */
1064 /* Return an rtx for a value that would result
1065 from converting X to mode MODE.
1066 Both X and MODE may be floating, or both integer.
1067 UNSIGNEDP is nonzero if X is an unsigned value.
1068 This can be done by referring to a part of X in place
1069 or by copying to a new temporary with conversion.
1071 This function *must not* call protect_from_queue
1072 except when putting X into an insn (in which case convert_move does it). */
1075 convert_to_mode (mode, x, unsignedp)
1076 enum machine_mode mode;
1082 /* If FROM is a SUBREG that indicates that we have already done at least
1083 the required extension, strip it. */
1085 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
1086 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
1087 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
1088 x = gen_lowpart (mode, x);
1090 if (mode == GET_MODE (x))
1093 /* There is one case that we must handle specially: If we are converting
1094 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
1095 we are to interpret the constant as unsigned, gen_lowpart will do
1096 the wrong if the constant appears negative. What we want to do is
1097 make the high-order word of the constant zero, not all ones. */
1099 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
1100 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
1101 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
1102 return immed_double_const (INTVAL (x), (HOST_WIDE_INT) 0, mode);
1104 /* We can do this with a gen_lowpart if both desired and current modes
1105 are integer, and this is either a constant integer, a register, or a
1106 non-volatile MEM. Except for the constant case, we must be narrowing
1109 if (GET_CODE (x) == CONST_INT
1110 || (GET_MODE_CLASS (mode) == MODE_INT
1111 && GET_MODE_CLASS (GET_MODE (x)) == MODE_INT
1112 && (GET_CODE (x) == CONST_DOUBLE
1113 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (GET_MODE (x))
1114 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x))
1115 && direct_load[(int) mode]
1116 || GET_CODE (x) == REG)))))
1117 return gen_lowpart (mode, x);
1119 temp = gen_reg_rtx (mode);
1120 convert_move (temp, x, unsignedp);
1124 /* Generate several move instructions to copy LEN bytes
1125 from block FROM to block TO. (These are MEM rtx's with BLKmode).
1126 The caller must pass FROM and TO
1127 through protect_from_queue before calling.
1128 ALIGN (in bytes) is maximum alignment we can assume. */
1131 move_by_pieces (to, from, len, align)
1135 struct move_by_pieces data;
1136 rtx to_addr = XEXP (to, 0), from_addr = XEXP (from, 0);
1137 int max_size = MOVE_MAX + 1;
1140 data.to_addr = to_addr;
1141 data.from_addr = from_addr;
1145 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1146 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1148 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1149 || GET_CODE (from_addr) == POST_INC
1150 || GET_CODE (from_addr) == POST_DEC);
1152 data.explicit_inc_from = 0;
1153 data.explicit_inc_to = 0;
1155 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1156 if (data.reverse) data.offset = len;
1159 /* If copying requires more than two move insns,
1160 copy addresses to registers (to make displacements shorter)
1161 and use post-increment if available. */
1162 if (!(data.autinc_from && data.autinc_to)
1163 && move_by_pieces_ninsns (len, align) > 2)
1165 #ifdef HAVE_PRE_DECREMENT
1166 if (data.reverse && ! data.autinc_from)
1168 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1169 data.autinc_from = 1;
1170 data.explicit_inc_from = -1;
1173 #ifdef HAVE_POST_INCREMENT
1174 if (! data.autinc_from)
1176 data.from_addr = copy_addr_to_reg (from_addr);
1177 data.autinc_from = 1;
1178 data.explicit_inc_from = 1;
1181 if (!data.autinc_from && CONSTANT_P (from_addr))
1182 data.from_addr = copy_addr_to_reg (from_addr);
1183 #ifdef HAVE_PRE_DECREMENT
1184 if (data.reverse && ! data.autinc_to)
1186 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1188 data.explicit_inc_to = -1;
1191 #ifdef HAVE_POST_INCREMENT
1192 if (! data.reverse && ! data.autinc_to)
1194 data.to_addr = copy_addr_to_reg (to_addr);
1196 data.explicit_inc_to = 1;
1199 if (!data.autinc_to && CONSTANT_P (to_addr))
1200 data.to_addr = copy_addr_to_reg (to_addr);
1203 if (! (STRICT_ALIGNMENT || SLOW_UNALIGNED_ACCESS)
1204 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
1207 /* First move what we can in the largest integer mode, then go to
1208 successively smaller modes. */
1210 while (max_size > 1)
1212 enum machine_mode mode = VOIDmode, tmode;
1213 enum insn_code icode;
1215 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1216 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1217 if (GET_MODE_SIZE (tmode) < max_size)
1220 if (mode == VOIDmode)
1223 icode = mov_optab->handlers[(int) mode].insn_code;
1224 if (icode != CODE_FOR_nothing
1225 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1226 GET_MODE_SIZE (mode)))
1227 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1229 max_size = GET_MODE_SIZE (mode);
1232 /* The code above should have handled everything. */
1237 /* Return number of insns required to move L bytes by pieces.
1238 ALIGN (in bytes) is maximum alignment we can assume. */
1241 move_by_pieces_ninsns (l, align)
1245 register int n_insns = 0;
1246 int max_size = MOVE_MAX + 1;
1248 if (! (STRICT_ALIGNMENT || SLOW_UNALIGNED_ACCESS)
1249 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
1252 while (max_size > 1)
1254 enum machine_mode mode = VOIDmode, tmode;
1255 enum insn_code icode;
1257 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1258 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1259 if (GET_MODE_SIZE (tmode) < max_size)
1262 if (mode == VOIDmode)
1265 icode = mov_optab->handlers[(int) mode].insn_code;
1266 if (icode != CODE_FOR_nothing
1267 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1268 GET_MODE_SIZE (mode)))
1269 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1271 max_size = GET_MODE_SIZE (mode);
1277 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1278 with move instructions for mode MODE. GENFUN is the gen_... function
1279 to make a move insn for that mode. DATA has all the other info. */
1282 move_by_pieces_1 (genfun, mode, data)
1284 enum machine_mode mode;
1285 struct move_by_pieces *data;
1287 register int size = GET_MODE_SIZE (mode);
1288 register rtx to1, from1;
1290 while (data->len >= size)
1292 if (data->reverse) data->offset -= size;
1294 to1 = (data->autinc_to
1295 ? gen_rtx (MEM, mode, data->to_addr)
1296 : change_address (data->to, mode,
1297 plus_constant (data->to_addr, data->offset)));
1300 ? gen_rtx (MEM, mode, data->from_addr)
1301 : change_address (data->from, mode,
1302 plus_constant (data->from_addr, data->offset)));
1304 #ifdef HAVE_PRE_DECREMENT
1305 if (data->explicit_inc_to < 0)
1306 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
1307 if (data->explicit_inc_from < 0)
1308 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (-size)));
1311 emit_insn ((*genfun) (to1, from1));
1312 #ifdef HAVE_POST_INCREMENT
1313 if (data->explicit_inc_to > 0)
1314 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1315 if (data->explicit_inc_from > 0)
1316 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1319 if (! data->reverse) data->offset += size;
1325 /* Emit code to move a block Y to a block X.
1326 This may be done with string-move instructions,
1327 with multiple scalar move instructions, or with a library call.
1329 Both X and Y must be MEM rtx's (perhaps inside VOLATILE)
1331 SIZE is an rtx that says how long they are.
1332 ALIGN is the maximum alignment we can assume they have,
1333 measured in bytes. */
1336 emit_block_move (x, y, size, align)
1341 if (GET_MODE (x) != BLKmode)
1344 if (GET_MODE (y) != BLKmode)
1347 x = protect_from_queue (x, 1);
1348 y = protect_from_queue (y, 0);
1349 size = protect_from_queue (size, 0);
1351 if (GET_CODE (x) != MEM)
1353 if (GET_CODE (y) != MEM)
1358 if (GET_CODE (size) == CONST_INT
1359 && (move_by_pieces_ninsns (INTVAL (size), align) < MOVE_RATIO))
1360 move_by_pieces (x, y, INTVAL (size), align);
1363 /* Try the most limited insn first, because there's no point
1364 including more than one in the machine description unless
1365 the more limited one has some advantage. */
1367 rtx opalign = GEN_INT (align);
1368 enum machine_mode mode;
1370 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1371 mode = GET_MODE_WIDER_MODE (mode))
1373 enum insn_code code = movstr_optab[(int) mode];
1375 if (code != CODE_FOR_nothing
1376 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1377 here because if SIZE is less than the mode mask, as it is
1378 returned by the macro, it will definitely be less than the
1379 actual mode mask. */
1380 && (unsigned) INTVAL (size) <= GET_MODE_MASK (mode)
1381 && (insn_operand_predicate[(int) code][0] == 0
1382 || (*insn_operand_predicate[(int) code][0]) (x, BLKmode))
1383 && (insn_operand_predicate[(int) code][1] == 0
1384 || (*insn_operand_predicate[(int) code][1]) (y, BLKmode))
1385 && (insn_operand_predicate[(int) code][3] == 0
1386 || (*insn_operand_predicate[(int) code][3]) (opalign,
1390 rtx last = get_last_insn ();
1393 op2 = convert_to_mode (mode, size, 1);
1394 if (insn_operand_predicate[(int) code][2] != 0
1395 && ! (*insn_operand_predicate[(int) code][2]) (op2, mode))
1396 op2 = copy_to_mode_reg (mode, op2);
1398 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1405 delete_insns_since (last);
1409 #ifdef TARGET_MEM_FUNCTIONS
1410 emit_library_call (memcpy_libfunc, 0,
1411 VOIDmode, 3, XEXP (x, 0), Pmode,
1413 convert_to_mode (Pmode, size, 1), Pmode);
1415 emit_library_call (bcopy_libfunc, 0,
1416 VOIDmode, 3, XEXP (y, 0), Pmode,
1418 convert_to_mode (Pmode, size, 1), Pmode);
1423 /* Copy all or part of a value X into registers starting at REGNO.
1424 The number of registers to be filled is NREGS. */
1427 move_block_to_reg (regno, x, nregs, mode)
1431 enum machine_mode mode;
1436 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1437 x = validize_mem (force_const_mem (mode, x));
1439 /* See if the machine can do this with a load multiple insn. */
1440 #ifdef HAVE_load_multiple
1441 last = get_last_insn ();
1442 pat = gen_load_multiple (gen_rtx (REG, word_mode, regno), x,
1450 delete_insns_since (last);
1453 for (i = 0; i < nregs; i++)
1454 emit_move_insn (gen_rtx (REG, word_mode, regno + i),
1455 operand_subword_force (x, i, mode));
1458 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1459 The number of registers to be filled is NREGS. */
1462 move_block_from_reg (regno, x, nregs)
1470 /* See if the machine can do this with a store multiple insn. */
1471 #ifdef HAVE_store_multiple
1472 last = get_last_insn ();
1473 pat = gen_store_multiple (x, gen_rtx (REG, word_mode, regno),
1481 delete_insns_since (last);
1484 for (i = 0; i < nregs; i++)
1486 rtx tem = operand_subword (x, i, 1, BLKmode);
1491 emit_move_insn (tem, gen_rtx (REG, word_mode, regno + i));
1495 /* Mark NREGS consecutive regs, starting at REGNO, as being live now. */
1498 use_regs (regno, nregs)
1504 for (i = 0; i < nregs; i++)
1505 emit_insn (gen_rtx (USE, VOIDmode, gen_rtx (REG, word_mode, regno + i)));
1508 /* Mark the instructions since PREV as a libcall block.
1509 Add REG_LIBCALL to PREV and add a REG_RETVAL to the most recent insn. */
1518 /* Find the instructions to mark */
1520 insn_first = NEXT_INSN (prev);
1522 insn_first = get_insns ();
1524 insn_last = get_last_insn ();
1526 REG_NOTES (insn_last) = gen_rtx (INSN_LIST, REG_RETVAL, insn_first,
1527 REG_NOTES (insn_last));
1529 REG_NOTES (insn_first) = gen_rtx (INSN_LIST, REG_LIBCALL, insn_last,
1530 REG_NOTES (insn_first));
1533 /* Write zeros through the storage of OBJECT.
1534 If OBJECT has BLKmode, SIZE is its length in bytes. */
1537 clear_storage (object, size)
1541 if (GET_MODE (object) == BLKmode)
1543 #ifdef TARGET_MEM_FUNCTIONS
1544 emit_library_call (memset_libfunc, 0,
1546 XEXP (object, 0), Pmode, const0_rtx, Pmode,
1547 GEN_INT (size), Pmode);
1549 emit_library_call (bzero_libfunc, 0,
1551 XEXP (object, 0), Pmode,
1552 GEN_INT (size), Pmode);
1556 emit_move_insn (object, const0_rtx);
1559 /* Generate code to copy Y into X.
1560 Both Y and X must have the same mode, except that
1561 Y can be a constant with VOIDmode.
1562 This mode cannot be BLKmode; use emit_block_move for that.
1564 Return the last instruction emitted. */
1567 emit_move_insn (x, y)
1570 enum machine_mode mode = GET_MODE (x);
1571 enum machine_mode submode;
1572 enum mode_class class = GET_MODE_CLASS (mode);
1575 x = protect_from_queue (x, 1);
1576 y = protect_from_queue (y, 0);
1578 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
1581 if (CONSTANT_P (y) && ! LEGITIMATE_CONSTANT_P (y))
1582 y = force_const_mem (mode, y);
1584 /* If X or Y are memory references, verify that their addresses are valid
1586 if (GET_CODE (x) == MEM
1587 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
1588 && ! push_operand (x, GET_MODE (x)))
1590 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
1591 x = change_address (x, VOIDmode, XEXP (x, 0));
1593 if (GET_CODE (y) == MEM
1594 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
1596 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
1597 y = change_address (y, VOIDmode, XEXP (y, 0));
1599 if (mode == BLKmode)
1602 return emit_move_insn_1 (x, y);
1605 /* Low level part of emit_move_insn.
1606 Called just like emit_move_insn, but assumes X and Y
1607 are basically valid. */
1610 emit_move_insn_1 (x, y)
1613 enum machine_mode mode = GET_MODE (x);
1614 enum machine_mode submode;
1615 enum mode_class class = GET_MODE_CLASS (mode);
1618 if (class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
1619 submode = mode_for_size (GET_MODE_UNIT_SIZE (mode) * BITS_PER_UNIT,
1620 (class == MODE_COMPLEX_INT
1621 ? MODE_INT : MODE_FLOAT),
1624 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
1626 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
1628 /* Expand complex moves by moving real part and imag part, if possible. */
1629 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
1630 && submode != BLKmode
1631 && (mov_optab->handlers[(int) submode].insn_code
1632 != CODE_FOR_nothing))
1634 /* Don't split destination if it is a stack push. */
1635 int stack = push_operand (x, GET_MODE (x));
1636 rtx prev = get_last_insn ();
1638 /* Tell flow that the whole of the destination is being set. */
1639 if (GET_CODE (x) == REG)
1640 emit_insn (gen_rtx (CLOBBER, VOIDmode, x));
1642 /* If this is a stack, push the highpart first, so it
1643 will be in the argument order.
1645 In that case, change_address is used only to convert
1646 the mode, not to change the address. */
1647 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
1648 ((stack ? change_address (x, submode, (rtx) 0)
1649 : gen_highpart (submode, x)),
1650 gen_highpart (submode, y)));
1651 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
1652 ((stack ? change_address (x, submode, (rtx) 0)
1653 : gen_lowpart (submode, x)),
1654 gen_lowpart (submode, y)));
1658 return get_last_insn ();
1661 /* This will handle any multi-word mode that lacks a move_insn pattern.
1662 However, you will get better code if you define such patterns,
1663 even if they must turn into multiple assembler instructions. */
1664 else if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
1667 rtx prev_insn = get_last_insn ();
1670 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
1673 rtx xpart = operand_subword (x, i, 1, mode);
1674 rtx ypart = operand_subword (y, i, 1, mode);
1676 /* If we can't get a part of Y, put Y into memory if it is a
1677 constant. Otherwise, force it into a register. If we still
1678 can't get a part of Y, abort. */
1679 if (ypart == 0 && CONSTANT_P (y))
1681 y = force_const_mem (mode, y);
1682 ypart = operand_subword (y, i, 1, mode);
1684 else if (ypart == 0)
1685 ypart = operand_subword_force (y, i, mode);
1687 if (xpart == 0 || ypart == 0)
1690 last_insn = emit_move_insn (xpart, ypart);
1692 /* Mark these insns as a libcall block. */
1693 group_insns (prev_insn);
1701 /* Pushing data onto the stack. */
1703 /* Push a block of length SIZE (perhaps variable)
1704 and return an rtx to address the beginning of the block.
1705 Note that it is not possible for the value returned to be a QUEUED.
1706 The value may be virtual_outgoing_args_rtx.
1708 EXTRA is the number of bytes of padding to push in addition to SIZE.
1709 BELOW nonzero means this padding comes at low addresses;
1710 otherwise, the padding comes at high addresses. */
1713 push_block (size, extra, below)
1718 if (CONSTANT_P (size))
1719 anti_adjust_stack (plus_constant (size, extra));
1720 else if (GET_CODE (size) == REG && extra == 0)
1721 anti_adjust_stack (size);
1724 rtx temp = copy_to_mode_reg (Pmode, size);
1726 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
1727 temp, 0, OPTAB_LIB_WIDEN);
1728 anti_adjust_stack (temp);
1731 #ifdef STACK_GROWS_DOWNWARD
1732 temp = virtual_outgoing_args_rtx;
1733 if (extra != 0 && below)
1734 temp = plus_constant (temp, extra);
1736 if (GET_CODE (size) == CONST_INT)
1737 temp = plus_constant (virtual_outgoing_args_rtx,
1738 - INTVAL (size) - (below ? 0 : extra));
1739 else if (extra != 0 && !below)
1740 temp = gen_rtx (PLUS, Pmode, virtual_outgoing_args_rtx,
1741 negate_rtx (Pmode, plus_constant (size, extra)));
1743 temp = gen_rtx (PLUS, Pmode, virtual_outgoing_args_rtx,
1744 negate_rtx (Pmode, size));
1747 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
1753 return gen_rtx (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
1756 /* Generate code to push X onto the stack, assuming it has mode MODE and
1758 MODE is redundant except when X is a CONST_INT (since they don't
1760 SIZE is an rtx for the size of data to be copied (in bytes),
1761 needed only if X is BLKmode.
1763 ALIGN (in bytes) is maximum alignment we can assume.
1765 If PARTIAL and REG are both nonzero, then copy that many of the first
1766 words of X into registers starting with REG, and push the rest of X.
1767 The amount of space pushed is decreased by PARTIAL words,
1768 rounded *down* to a multiple of PARM_BOUNDARY.
1769 REG must be a hard register in this case.
1770 If REG is zero but PARTIAL is not, take any all others actions for an
1771 argument partially in registers, but do not actually load any
1774 EXTRA is the amount in bytes of extra space to leave next to this arg.
1775 This is ignored if an argument block has already been allocated.
1777 On a machine that lacks real push insns, ARGS_ADDR is the address of
1778 the bottom of the argument block for this call. We use indexing off there
1779 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
1780 argument block has not been preallocated.
1782 ARGS_SO_FAR is the size of args previously pushed for this call. */
1785 emit_push_insn (x, mode, type, size, align, partial, reg, extra,
1786 args_addr, args_so_far)
1788 enum machine_mode mode;
1799 enum direction stack_direction
1800 #ifdef STACK_GROWS_DOWNWARD
1806 /* Decide where to pad the argument: `downward' for below,
1807 `upward' for above, or `none' for don't pad it.
1808 Default is below for small data on big-endian machines; else above. */
1809 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
1811 /* Invert direction if stack is post-update. */
1812 if (STACK_PUSH_CODE == POST_INC || STACK_PUSH_CODE == POST_DEC)
1813 if (where_pad != none)
1814 where_pad = (where_pad == downward ? upward : downward);
1816 xinner = x = protect_from_queue (x, 0);
1818 if (mode == BLKmode)
1820 /* Copy a block into the stack, entirely or partially. */
1823 int used = partial * UNITS_PER_WORD;
1824 int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
1832 /* USED is now the # of bytes we need not copy to the stack
1833 because registers will take care of them. */
1836 xinner = change_address (xinner, BLKmode,
1837 plus_constant (XEXP (xinner, 0), used));
1839 /* If the partial register-part of the arg counts in its stack size,
1840 skip the part of stack space corresponding to the registers.
1841 Otherwise, start copying to the beginning of the stack space,
1842 by setting SKIP to 0. */
1843 #ifndef REG_PARM_STACK_SPACE
1849 #ifdef PUSH_ROUNDING
1850 /* Do it with several push insns if that doesn't take lots of insns
1851 and if there is no difficulty with push insns that skip bytes
1852 on the stack for alignment purposes. */
1854 && GET_CODE (size) == CONST_INT
1856 && (move_by_pieces_ninsns ((unsigned) INTVAL (size) - used, align)
1858 /* Here we avoid the case of a structure whose weak alignment
1859 forces many pushes of a small amount of data,
1860 and such small pushes do rounding that causes trouble. */
1861 && ((! STRICT_ALIGNMENT && ! SLOW_UNALIGNED_ACCESS)
1862 || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT
1863 || PUSH_ROUNDING (align) == align)
1864 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
1866 /* Push padding now if padding above and stack grows down,
1867 or if padding below and stack grows up.
1868 But if space already allocated, this has already been done. */
1869 if (extra && args_addr == 0
1870 && where_pad != none && where_pad != stack_direction)
1871 anti_adjust_stack (GEN_INT (extra));
1873 move_by_pieces (gen_rtx (MEM, BLKmode, gen_push_operand ()), xinner,
1874 INTVAL (size) - used, align);
1877 #endif /* PUSH_ROUNDING */
1879 /* Otherwise make space on the stack and copy the data
1880 to the address of that space. */
1882 /* Deduct words put into registers from the size we must copy. */
1885 if (GET_CODE (size) == CONST_INT)
1886 size = GEN_INT (INTVAL (size) - used);
1888 size = expand_binop (GET_MODE (size), sub_optab, size,
1889 GEN_INT (used), NULL_RTX, 0,
1893 /* Get the address of the stack space.
1894 In this case, we do not deal with EXTRA separately.
1895 A single stack adjust will do. */
1898 temp = push_block (size, extra, where_pad == downward);
1901 else if (GET_CODE (args_so_far) == CONST_INT)
1902 temp = memory_address (BLKmode,
1903 plus_constant (args_addr,
1904 skip + INTVAL (args_so_far)));
1906 temp = memory_address (BLKmode,
1907 plus_constant (gen_rtx (PLUS, Pmode,
1908 args_addr, args_so_far),
1911 /* TEMP is the address of the block. Copy the data there. */
1912 if (GET_CODE (size) == CONST_INT
1913 && (move_by_pieces_ninsns ((unsigned) INTVAL (size), align)
1916 move_by_pieces (gen_rtx (MEM, BLKmode, temp), xinner,
1917 INTVAL (size), align);
1920 /* Try the most limited insn first, because there's no point
1921 including more than one in the machine description unless
1922 the more limited one has some advantage. */
1923 #ifdef HAVE_movstrqi
1925 && GET_CODE (size) == CONST_INT
1926 && ((unsigned) INTVAL (size)
1927 < (1 << (GET_MODE_BITSIZE (QImode) - 1))))
1929 rtx pat = gen_movstrqi (gen_rtx (MEM, BLKmode, temp),
1930 xinner, size, GEN_INT (align));
1938 #ifdef HAVE_movstrhi
1940 && GET_CODE (size) == CONST_INT
1941 && ((unsigned) INTVAL (size)
1942 < (1 << (GET_MODE_BITSIZE (HImode) - 1))))
1944 rtx pat = gen_movstrhi (gen_rtx (MEM, BLKmode, temp),
1945 xinner, size, GEN_INT (align));
1953 #ifdef HAVE_movstrsi
1956 rtx pat = gen_movstrsi (gen_rtx (MEM, BLKmode, temp),
1957 xinner, size, GEN_INT (align));
1965 #ifdef HAVE_movstrdi
1968 rtx pat = gen_movstrdi (gen_rtx (MEM, BLKmode, temp),
1969 xinner, size, GEN_INT (align));
1978 #ifndef ACCUMULATE_OUTGOING_ARGS
1979 /* If the source is referenced relative to the stack pointer,
1980 copy it to another register to stabilize it. We do not need
1981 to do this if we know that we won't be changing sp. */
1983 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
1984 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
1985 temp = copy_to_reg (temp);
1988 /* Make inhibit_defer_pop nonzero around the library call
1989 to force it to pop the bcopy-arguments right away. */
1991 #ifdef TARGET_MEM_FUNCTIONS
1992 emit_library_call (memcpy_libfunc, 0,
1993 VOIDmode, 3, temp, Pmode, XEXP (xinner, 0), Pmode,
1996 emit_library_call (bcopy_libfunc, 0,
1997 VOIDmode, 3, XEXP (xinner, 0), Pmode, temp, Pmode,
2003 else if (partial > 0)
2005 /* Scalar partly in registers. */
2007 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
2010 /* # words of start of argument
2011 that we must make space for but need not store. */
2012 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
2013 int args_offset = INTVAL (args_so_far);
2016 /* Push padding now if padding above and stack grows down,
2017 or if padding below and stack grows up.
2018 But if space already allocated, this has already been done. */
2019 if (extra && args_addr == 0
2020 && where_pad != none && where_pad != stack_direction)
2021 anti_adjust_stack (GEN_INT (extra));
2023 /* If we make space by pushing it, we might as well push
2024 the real data. Otherwise, we can leave OFFSET nonzero
2025 and leave the space uninitialized. */
2029 /* Now NOT_STACK gets the number of words that we don't need to
2030 allocate on the stack. */
2031 not_stack = partial - offset;
2033 /* If the partial register-part of the arg counts in its stack size,
2034 skip the part of stack space corresponding to the registers.
2035 Otherwise, start copying to the beginning of the stack space,
2036 by setting SKIP to 0. */
2037 #ifndef REG_PARM_STACK_SPACE
2043 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
2044 x = validize_mem (force_const_mem (mode, x));
2046 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
2047 SUBREGs of such registers are not allowed. */
2048 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
2049 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
2050 x = copy_to_reg (x);
2052 /* Loop over all the words allocated on the stack for this arg. */
2053 /* We can do it by words, because any scalar bigger than a word
2054 has a size a multiple of a word. */
2055 #ifndef PUSH_ARGS_REVERSED
2056 for (i = not_stack; i < size; i++)
2058 for (i = size - 1; i >= not_stack; i--)
2060 if (i >= not_stack + offset)
2061 emit_push_insn (operand_subword_force (x, i, mode),
2062 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
2064 GEN_INT (args_offset + ((i - not_stack + skip)
2065 * UNITS_PER_WORD)));
2071 /* Push padding now if padding above and stack grows down,
2072 or if padding below and stack grows up.
2073 But if space already allocated, this has already been done. */
2074 if (extra && args_addr == 0
2075 && where_pad != none && where_pad != stack_direction)
2076 anti_adjust_stack (GEN_INT (extra));
2078 #ifdef PUSH_ROUNDING
2080 addr = gen_push_operand ();
2083 if (GET_CODE (args_so_far) == CONST_INT)
2085 = memory_address (mode,
2086 plus_constant (args_addr, INTVAL (args_so_far)));
2088 addr = memory_address (mode, gen_rtx (PLUS, Pmode, args_addr,
2091 emit_move_insn (gen_rtx (MEM, mode, addr), x);
2095 /* If part should go in registers, copy that part
2096 into the appropriate registers. Do this now, at the end,
2097 since mem-to-mem copies above may do function calls. */
2098 if (partial > 0 && reg != 0)
2099 move_block_to_reg (REGNO (reg), x, partial, mode);
2101 if (extra && args_addr == 0 && where_pad == stack_direction)
2102 anti_adjust_stack (GEN_INT (extra));
2105 /* Expand an assignment that stores the value of FROM into TO.
2106 If WANT_VALUE is nonzero, return an rtx for the value of TO.
2107 (This may contain a QUEUED rtx.)
2108 Otherwise, the returned value is not meaningful.
2110 SUGGEST_REG is no longer actually used.
2111 It used to mean, copy the value through a register
2112 and return that register, if that is possible.
2113 But now we do this if WANT_VALUE.
2115 If the value stored is a constant, we return the constant. */
2118 expand_assignment (to, from, want_value, suggest_reg)
2123 register rtx to_rtx = 0;
2126 /* Don't crash if the lhs of the assignment was erroneous. */
2128 if (TREE_CODE (to) == ERROR_MARK)
2129 return expand_expr (from, NULL_RTX, VOIDmode, 0);
2131 /* Assignment of a structure component needs special treatment
2132 if the structure component's rtx is not simply a MEM.
2133 Assignment of an array element at a constant index
2134 has the same problem. */
2136 if (TREE_CODE (to) == COMPONENT_REF
2137 || TREE_CODE (to) == BIT_FIELD_REF
2138 || (TREE_CODE (to) == ARRAY_REF
2139 && TREE_CODE (TREE_OPERAND (to, 1)) == INTEGER_CST
2140 && TREE_CODE (TYPE_SIZE (TREE_TYPE (to))) == INTEGER_CST))
2142 enum machine_mode mode1;
2148 tree tem = get_inner_reference (to, &bitsize, &bitpos, &offset,
2149 &mode1, &unsignedp, &volatilep);
2151 /* If we are going to use store_bit_field and extract_bit_field,
2152 make sure to_rtx will be safe for multiple use. */
2154 if (mode1 == VOIDmode && want_value)
2155 tem = stabilize_reference (tem);
2157 to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, 0);
2160 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
2162 if (GET_CODE (to_rtx) != MEM)
2164 to_rtx = change_address (to_rtx, VOIDmode,
2165 gen_rtx (PLUS, Pmode, XEXP (to_rtx, 0),
2166 force_reg (Pmode, offset_rtx)));
2170 if (GET_CODE (to_rtx) == MEM)
2171 MEM_VOLATILE_P (to_rtx) = 1;
2172 #if 0 /* This was turned off because, when a field is volatile
2173 in an object which is not volatile, the object may be in a register,
2174 and then we would abort over here. */
2180 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
2182 /* Spurious cast makes HPUX compiler happy. */
2183 ? (enum machine_mode) TYPE_MODE (TREE_TYPE (to))
2186 /* Required alignment of containing datum. */
2187 TYPE_ALIGN (TREE_TYPE (tem)) / BITS_PER_UNIT,
2188 int_size_in_bytes (TREE_TYPE (tem)));
2189 preserve_temp_slots (result);
2195 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
2196 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
2199 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, 0);
2201 /* Don't move directly into a return register. */
2202 if (TREE_CODE (to) == RESULT_DECL && GET_CODE (to_rtx) == REG)
2204 rtx temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
2205 emit_move_insn (to_rtx, temp);
2206 preserve_temp_slots (to_rtx);
2211 /* In case we are returning the contents of an object which overlaps
2212 the place the value is being stored, use a safe function when copying
2213 a value through a pointer into a structure value return block. */
2214 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
2215 && current_function_returns_struct
2216 && !current_function_returns_pcc_struct)
2218 rtx from_rtx = expand_expr (from, NULL_RTX, VOIDmode, 0);
2219 rtx size = expr_size (from);
2221 #ifdef TARGET_MEM_FUNCTIONS
2222 emit_library_call (memcpy_libfunc, 0,
2223 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
2224 XEXP (from_rtx, 0), Pmode,
2227 emit_library_call (bcopy_libfunc, 0,
2228 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
2229 XEXP (to_rtx, 0), Pmode,
2233 preserve_temp_slots (to_rtx);
2238 /* Compute FROM and store the value in the rtx we got. */
2240 result = store_expr (from, to_rtx, want_value);
2241 preserve_temp_slots (result);
2246 /* Generate code for computing expression EXP,
2247 and storing the value into TARGET.
2248 Returns TARGET or an equivalent value.
2249 TARGET may contain a QUEUED rtx.
2251 If SUGGEST_REG is nonzero, copy the value through a register
2252 and return that register, if that is possible.
2254 If the value stored is a constant, we return the constant. */
2257 store_expr (exp, target, suggest_reg)
2259 register rtx target;
2263 int dont_return_target = 0;
2265 if (TREE_CODE (exp) == COMPOUND_EXPR)
2267 /* Perform first part of compound expression, then assign from second
2269 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
2271 return store_expr (TREE_OPERAND (exp, 1), target, suggest_reg);
2273 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
2275 /* For conditional expression, get safe form of the target. Then
2276 test the condition, doing the appropriate assignment on either
2277 side. This avoids the creation of unnecessary temporaries.
2278 For non-BLKmode, it is more efficient not to do this. */
2280 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
2283 target = protect_from_queue (target, 1);
2286 jumpifnot (TREE_OPERAND (exp, 0), lab1);
2287 store_expr (TREE_OPERAND (exp, 1), target, suggest_reg);
2289 emit_jump_insn (gen_jump (lab2));
2292 store_expr (TREE_OPERAND (exp, 2), target, suggest_reg);
2298 else if (suggest_reg && GET_CODE (target) == MEM
2299 && GET_MODE (target) != BLKmode)
2300 /* If target is in memory and caller wants value in a register instead,
2301 arrange that. Pass TARGET as target for expand_expr so that,
2302 if EXP is another assignment, SUGGEST_REG will be nonzero for it.
2303 We know expand_expr will not use the target in that case. */
2305 temp = expand_expr (exp, cse_not_expected ? NULL_RTX : target,
2306 GET_MODE (target), 0);
2307 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
2308 temp = copy_to_reg (temp);
2309 dont_return_target = 1;
2311 else if (queued_subexp_p (target))
2312 /* If target contains a postincrement, it is not safe
2313 to use as the returned value. It would access the wrong
2314 place by the time the queued increment gets output.
2315 So copy the value through a temporary and use that temp
2318 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
2320 /* Expand EXP into a new pseudo. */
2321 temp = gen_reg_rtx (GET_MODE (target));
2322 temp = expand_expr (exp, temp, GET_MODE (target), 0);
2325 temp = expand_expr (exp, NULL_RTX, GET_MODE (target), 0);
2326 dont_return_target = 1;
2328 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
2329 /* If this is an scalar in a register that is stored in a wider mode
2330 than the declared mode, compute the result into its declared mode
2331 and then convert to the wider mode. Our value is the computed
2334 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
2335 convert_move (SUBREG_REG (target), temp,
2336 SUBREG_PROMOTED_UNSIGNED_P (target));
2341 temp = expand_expr (exp, target, GET_MODE (target), 0);
2342 /* DO return TARGET if it's a specified hardware register.
2343 expand_return relies on this. */
2344 if (!(target && GET_CODE (target) == REG
2345 && REGNO (target) < FIRST_PSEUDO_REGISTER)
2346 && CONSTANT_P (temp))
2347 dont_return_target = 1;
2350 /* If value was not generated in the target, store it there.
2351 Convert the value to TARGET's type first if nec. */
2353 if (temp != target && TREE_CODE (exp) != ERROR_MARK)
2355 target = protect_from_queue (target, 1);
2356 if (GET_MODE (temp) != GET_MODE (target)
2357 && GET_MODE (temp) != VOIDmode)
2359 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
2360 if (dont_return_target)
2362 /* In this case, we will return TEMP,
2363 so make sure it has the proper mode.
2364 But don't forget to store the value into TARGET. */
2365 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
2366 emit_move_insn (target, temp);
2369 convert_move (target, temp, unsignedp);
2372 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
2374 /* Handle copying a string constant into an array.
2375 The string constant may be shorter than the array.
2376 So copy just the string's actual length, and clear the rest. */
2379 /* Get the size of the data type of the string,
2380 which is actually the size of the target. */
2381 size = expr_size (exp);
2382 if (GET_CODE (size) == CONST_INT
2383 && INTVAL (size) < TREE_STRING_LENGTH (exp))
2384 emit_block_move (target, temp, size,
2385 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
2388 /* Compute the size of the data to copy from the string. */
2390 = fold (build (MIN_EXPR, sizetype,
2391 size_binop (CEIL_DIV_EXPR,
2392 TYPE_SIZE (TREE_TYPE (exp)),
2393 size_int (BITS_PER_UNIT)),
2395 build_int_2 (TREE_STRING_LENGTH (exp), 0))));
2396 rtx copy_size_rtx = expand_expr (copy_size, NULL_RTX,
2400 /* Copy that much. */
2401 emit_block_move (target, temp, copy_size_rtx,
2402 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
2404 /* Figure out how much is left in TARGET
2405 that we have to clear. */
2406 if (GET_CODE (copy_size_rtx) == CONST_INT)
2408 temp = plus_constant (XEXP (target, 0),
2409 TREE_STRING_LENGTH (exp));
2410 size = plus_constant (size,
2411 - TREE_STRING_LENGTH (exp));
2415 enum machine_mode size_mode = Pmode;
2417 temp = force_reg (Pmode, XEXP (target, 0));
2418 temp = expand_binop (size_mode, add_optab, temp,
2419 copy_size_rtx, NULL_RTX, 0,
2422 size = expand_binop (size_mode, sub_optab, size,
2423 copy_size_rtx, NULL_RTX, 0,
2426 emit_cmp_insn (size, const0_rtx, LT, NULL_RTX,
2427 GET_MODE (size), 0, 0);
2428 label = gen_label_rtx ();
2429 emit_jump_insn (gen_blt (label));
2432 if (size != const0_rtx)
2434 #ifdef TARGET_MEM_FUNCTIONS
2435 emit_library_call (memset_libfunc, 0, VOIDmode, 3,
2436 temp, Pmode, const0_rtx, Pmode, size, Pmode);
2438 emit_library_call (bzero_libfunc, 0, VOIDmode, 2,
2439 temp, Pmode, size, Pmode);
2446 else if (GET_MODE (temp) == BLKmode)
2447 emit_block_move (target, temp, expr_size (exp),
2448 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
2450 emit_move_insn (target, temp);
2452 if (dont_return_target)
2457 /* Store the value of constructor EXP into the rtx TARGET.
2458 TARGET is either a REG or a MEM. */
2461 store_constructor (exp, target)
2465 tree type = TREE_TYPE (exp);
2467 /* We know our target cannot conflict, since safe_from_p has been called. */
2469 /* Don't try copying piece by piece into a hard register
2470 since that is vulnerable to being clobbered by EXP.
2471 Instead, construct in a pseudo register and then copy it all. */
2472 if (GET_CODE (target) == REG && REGNO (target) < FIRST_PSEUDO_REGISTER)
2474 rtx temp = gen_reg_rtx (GET_MODE (target));
2475 store_constructor (exp, temp);
2476 emit_move_insn (target, temp);
2481 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE)
2485 /* Inform later passes that the whole union value is dead. */
2486 if (TREE_CODE (type) == UNION_TYPE)
2487 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
2489 /* If we are building a static constructor into a register,
2490 set the initial value as zero so we can fold the value into
2492 else if (GET_CODE (target) == REG && TREE_STATIC (exp))
2493 emit_move_insn (target, const0_rtx);
2495 /* If the constructor has fewer fields than the structure,
2496 clear the whole structure first. */
2497 else if (list_length (CONSTRUCTOR_ELTS (exp))
2498 != list_length (TYPE_FIELDS (type)))
2499 clear_storage (target, int_size_in_bytes (type));
2501 /* Inform later passes that the old value is dead. */
2502 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
2504 /* Store each element of the constructor into
2505 the corresponding field of TARGET. */
2507 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
2509 register tree field = TREE_PURPOSE (elt);
2510 register enum machine_mode mode;
2515 /* Just ignore missing fields.
2516 We cleared the whole structure, above,
2517 if any fields are missing. */
2521 bitsize = TREE_INT_CST_LOW (DECL_SIZE (field));
2522 unsignedp = TREE_UNSIGNED (field);
2523 mode = DECL_MODE (field);
2524 if (DECL_BIT_FIELD (field))
2527 if (TREE_CODE (DECL_FIELD_BITPOS (field)) != INTEGER_CST)
2528 /* ??? This case remains to be written. */
2531 bitpos = TREE_INT_CST_LOW (DECL_FIELD_BITPOS (field));
2533 store_field (target, bitsize, bitpos, mode, TREE_VALUE (elt),
2534 /* The alignment of TARGET is
2535 at least what its type requires. */
2537 TYPE_ALIGN (type) / BITS_PER_UNIT,
2538 int_size_in_bytes (type));
2541 else if (TREE_CODE (type) == ARRAY_TYPE)
2545 tree domain = TYPE_DOMAIN (type);
2546 HOST_WIDE_INT minelt = TREE_INT_CST_LOW (TYPE_MIN_VALUE (domain));
2547 HOST_WIDE_INT maxelt = TREE_INT_CST_LOW (TYPE_MAX_VALUE (domain));
2548 tree elttype = TREE_TYPE (type);
2550 /* If the constructor has fewer fields than the structure,
2551 clear the whole structure first. Similarly if this this is
2552 static constructor of a non-BLKmode object. */
2554 if (list_length (CONSTRUCTOR_ELTS (exp)) < maxelt - minelt + 1
2555 || (GET_CODE (target) == REG && TREE_STATIC (exp)))
2556 clear_storage (target, maxelt - minelt + 1);
2558 /* Inform later passes that the old value is dead. */
2559 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
2561 /* Store each element of the constructor into
2562 the corresponding element of TARGET, determined
2563 by counting the elements. */
2564 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
2566 elt = TREE_CHAIN (elt), i++)
2568 register enum machine_mode mode;
2573 mode = TYPE_MODE (elttype);
2574 bitsize = GET_MODE_BITSIZE (mode);
2575 unsignedp = TREE_UNSIGNED (elttype);
2577 bitpos = (i * TREE_INT_CST_LOW (TYPE_SIZE (elttype)));
2579 store_field (target, bitsize, bitpos, mode, TREE_VALUE (elt),
2580 /* The alignment of TARGET is
2581 at least what its type requires. */
2583 TYPE_ALIGN (type) / BITS_PER_UNIT,
2584 int_size_in_bytes (type));
2592 /* Store the value of EXP (an expression tree)
2593 into a subfield of TARGET which has mode MODE and occupies
2594 BITSIZE bits, starting BITPOS bits from the start of TARGET.
2595 If MODE is VOIDmode, it means that we are storing into a bit-field.
2597 If VALUE_MODE is VOIDmode, return nothing in particular.
2598 UNSIGNEDP is not used in this case.
2600 Otherwise, return an rtx for the value stored. This rtx
2601 has mode VALUE_MODE if that is convenient to do.
2602 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
2604 ALIGN is the alignment that TARGET is known to have, measured in bytes.
2605 TOTAL_SIZE is the size in bytes of the structure, or -1 if varying. */
2608 store_field (target, bitsize, bitpos, mode, exp, value_mode,
2609 unsignedp, align, total_size)
2611 int bitsize, bitpos;
2612 enum machine_mode mode;
2614 enum machine_mode value_mode;
2619 HOST_WIDE_INT width_mask = 0;
2621 if (bitsize < HOST_BITS_PER_WIDE_INT)
2622 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
2624 /* If we are storing into an unaligned field of an aligned union that is
2625 in a register, we may have the mode of TARGET being an integer mode but
2626 MODE == BLKmode. In that case, get an aligned object whose size and
2627 alignment are the same as TARGET and store TARGET into it (we can avoid
2628 the store if the field being stored is the entire width of TARGET). Then
2629 call ourselves recursively to store the field into a BLKmode version of
2630 that object. Finally, load from the object into TARGET. This is not
2631 very efficient in general, but should only be slightly more expensive
2632 than the otherwise-required unaligned accesses. Perhaps this can be
2633 cleaned up later. */
2636 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
2638 rtx object = assign_stack_temp (GET_MODE (target),
2639 GET_MODE_SIZE (GET_MODE (target)), 0);
2640 rtx blk_object = copy_rtx (object);
2642 PUT_MODE (blk_object, BLKmode);
2644 if (bitsize != GET_MODE_BITSIZE (GET_MODE (target)))
2645 emit_move_insn (object, target);
2647 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0,
2650 emit_move_insn (target, object);
2655 /* If the structure is in a register or if the component
2656 is a bit field, we cannot use addressing to access it.
2657 Use bit-field techniques or SUBREG to store in it. */
2659 if (mode == VOIDmode
2660 || (mode != BLKmode && ! direct_store[(int) mode])
2661 || GET_CODE (target) == REG
2662 || GET_CODE (target) == SUBREG)
2664 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
2665 /* Store the value in the bitfield. */
2666 store_bit_field (target, bitsize, bitpos, mode, temp, align, total_size);
2667 if (value_mode != VOIDmode)
2669 /* The caller wants an rtx for the value. */
2670 /* If possible, avoid refetching from the bitfield itself. */
2672 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
2675 enum machine_mode tmode;
2678 return expand_and (temp, GEN_INT (width_mask), NULL_RTX);
2679 tmode = GET_MODE (temp);
2680 if (tmode == VOIDmode)
2682 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
2683 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
2684 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
2686 return extract_bit_field (target, bitsize, bitpos, unsignedp,
2687 NULL_RTX, value_mode, 0, align,
2694 rtx addr = XEXP (target, 0);
2697 /* If a value is wanted, it must be the lhs;
2698 so make the address stable for multiple use. */
2700 if (value_mode != VOIDmode && GET_CODE (addr) != REG
2701 && ! CONSTANT_ADDRESS_P (addr)
2702 /* A frame-pointer reference is already stable. */
2703 && ! (GET_CODE (addr) == PLUS
2704 && GET_CODE (XEXP (addr, 1)) == CONST_INT
2705 && (XEXP (addr, 0) == virtual_incoming_args_rtx
2706 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
2707 addr = copy_to_reg (addr);
2709 /* Now build a reference to just the desired component. */
2711 to_rtx = change_address (target, mode,
2712 plus_constant (addr, (bitpos / BITS_PER_UNIT)));
2713 MEM_IN_STRUCT_P (to_rtx) = 1;
2715 return store_expr (exp, to_rtx, value_mode != VOIDmode);
2719 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
2720 or an ARRAY_REF, look for nested COMPONENT_REFs, BIT_FIELD_REFs, or
2721 ARRAY_REFs and find the ultimate containing object, which we return.
2723 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
2724 bit position, and *PUNSIGNEDP to the signedness of the field.
2725 If the position of the field is variable, we store a tree
2726 giving the variable offset (in units) in *POFFSET.
2727 This offset is in addition to the bit position.
2728 If the position is not variable, we store 0 in *POFFSET.
2730 If any of the extraction expressions is volatile,
2731 we store 1 in *PVOLATILEP. Otherwise we don't change that.
2733 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
2734 is a mode that can be used to access the field. In that case, *PBITSIZE
2737 If the field describes a variable-sized object, *PMODE is set to
2738 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
2739 this case, but the address of the object can be found. */
2742 get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode,
2743 punsignedp, pvolatilep)
2748 enum machine_mode *pmode;
2753 enum machine_mode mode = VOIDmode;
2754 tree offset = integer_zero_node;
2756 if (TREE_CODE (exp) == COMPONENT_REF)
2758 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
2759 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
2760 mode = DECL_MODE (TREE_OPERAND (exp, 1));
2761 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
2763 else if (TREE_CODE (exp) == BIT_FIELD_REF)
2765 size_tree = TREE_OPERAND (exp, 1);
2766 *punsignedp = TREE_UNSIGNED (exp);
2770 mode = TYPE_MODE (TREE_TYPE (exp));
2771 *pbitsize = GET_MODE_BITSIZE (mode);
2772 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
2777 if (TREE_CODE (size_tree) != INTEGER_CST)
2778 mode = BLKmode, *pbitsize = -1;
2780 *pbitsize = TREE_INT_CST_LOW (size_tree);
2783 /* Compute cumulative bit-offset for nested component-refs and array-refs,
2784 and find the ultimate containing object. */
2790 if (TREE_CODE (exp) == COMPONENT_REF || TREE_CODE (exp) == BIT_FIELD_REF)
2792 tree pos = (TREE_CODE (exp) == COMPONENT_REF
2793 ? DECL_FIELD_BITPOS (TREE_OPERAND (exp, 1))
2794 : TREE_OPERAND (exp, 2));
2796 if (TREE_CODE (pos) == PLUS_EXPR)
2799 if (TREE_CODE (TREE_OPERAND (pos, 0)) == INTEGER_CST)
2801 constant = TREE_OPERAND (pos, 0);
2802 var = TREE_OPERAND (pos, 1);
2804 else if (TREE_CODE (TREE_OPERAND (pos, 1)) == INTEGER_CST)
2806 constant = TREE_OPERAND (pos, 1);
2807 var = TREE_OPERAND (pos, 0);
2812 *pbitpos += TREE_INT_CST_LOW (constant);
2813 offset = size_binop (PLUS_EXPR, offset,
2814 size_binop (FLOOR_DIV_EXPR, var,
2815 size_int (BITS_PER_UNIT)));
2817 else if (TREE_CODE (pos) == INTEGER_CST)
2818 *pbitpos += TREE_INT_CST_LOW (pos);
2821 /* Assume here that the offset is a multiple of a unit.
2822 If not, there should be an explicitly added constant. */
2823 offset = size_binop (PLUS_EXPR, offset,
2824 size_binop (FLOOR_DIV_EXPR, pos,
2825 size_int (BITS_PER_UNIT)));
2829 else if (TREE_CODE (exp) == ARRAY_REF)
2831 /* This code is based on the code in case ARRAY_REF in expand_expr
2832 below. We assume here that the size of an array element is
2833 always an integral multiple of BITS_PER_UNIT. */
2835 tree index = TREE_OPERAND (exp, 1);
2836 tree domain = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
2838 = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
2839 tree index_type = TREE_TYPE (index);
2841 if (! integer_zerop (low_bound))
2842 index = fold (build (MINUS_EXPR, index_type, index, low_bound));
2844 if (TYPE_PRECISION (index_type) != POINTER_SIZE)
2846 index = convert (type_for_size (POINTER_SIZE, 0), index);
2847 index_type = TREE_TYPE (index);
2850 index = fold (build (MULT_EXPR, index_type, index,
2851 TYPE_SIZE (TREE_TYPE (exp))));
2853 if (TREE_CODE (index) == INTEGER_CST
2854 && TREE_INT_CST_HIGH (index) == 0)
2855 *pbitpos += TREE_INT_CST_LOW (index);
2857 offset = size_binop (PLUS_EXPR, offset,
2858 size_binop (FLOOR_DIV_EXPR, index,
2859 size_int (BITS_PER_UNIT)));
2861 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
2862 && ! ((TREE_CODE (exp) == NOP_EXPR
2863 || TREE_CODE (exp) == CONVERT_EXPR)
2864 && (TYPE_MODE (TREE_TYPE (exp))
2865 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
2868 /* If any reference in the chain is volatile, the effect is volatile. */
2869 if (TREE_THIS_VOLATILE (exp))
2871 exp = TREE_OPERAND (exp, 0);
2874 /* If this was a bit-field, see if there is a mode that allows direct
2875 access in case EXP is in memory. */
2876 if (mode == VOIDmode && *pbitpos % *pbitsize == 0)
2878 mode = mode_for_size (*pbitsize, MODE_INT, 0);
2879 if (mode == BLKmode)
2883 if (integer_zerop (offset))
2889 /* We aren't finished fixing the callers to really handle nonzero offset. */
2897 /* Given an rtx VALUE that may contain additions and multiplications,
2898 return an equivalent value that just refers to a register or memory.
2899 This is done by generating instructions to perform the arithmetic
2900 and returning a pseudo-register containing the value.
2902 The returned value may be a REG, SUBREG, MEM or constant. */
2905 force_operand (value, target)
2908 register optab binoptab = 0;
2909 /* Use a temporary to force order of execution of calls to
2913 /* Use subtarget as the target for operand 0 of a binary operation. */
2914 register rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
2916 if (GET_CODE (value) == PLUS)
2917 binoptab = add_optab;
2918 else if (GET_CODE (value) == MINUS)
2919 binoptab = sub_optab;
2920 else if (GET_CODE (value) == MULT)
2922 op2 = XEXP (value, 1);
2923 if (!CONSTANT_P (op2)
2924 && !(GET_CODE (op2) == REG && op2 != subtarget))
2926 tmp = force_operand (XEXP (value, 0), subtarget);
2927 return expand_mult (GET_MODE (value), tmp,
2928 force_operand (op2, NULL_RTX),
2934 op2 = XEXP (value, 1);
2935 if (!CONSTANT_P (op2)
2936 && !(GET_CODE (op2) == REG && op2 != subtarget))
2938 if (binoptab == sub_optab && GET_CODE (op2) == CONST_INT)
2940 binoptab = add_optab;
2941 op2 = negate_rtx (GET_MODE (value), op2);
2944 /* Check for an addition with OP2 a constant integer and our first
2945 operand a PLUS of a virtual register and something else. In that
2946 case, we want to emit the sum of the virtual register and the
2947 constant first and then add the other value. This allows virtual
2948 register instantiation to simply modify the constant rather than
2949 creating another one around this addition. */
2950 if (binoptab == add_optab && GET_CODE (op2) == CONST_INT
2951 && GET_CODE (XEXP (value, 0)) == PLUS
2952 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
2953 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
2954 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
2956 rtx temp = expand_binop (GET_MODE (value), binoptab,
2957 XEXP (XEXP (value, 0), 0), op2,
2958 subtarget, 0, OPTAB_LIB_WIDEN);
2959 return expand_binop (GET_MODE (value), binoptab, temp,
2960 force_operand (XEXP (XEXP (value, 0), 1), 0),
2961 target, 0, OPTAB_LIB_WIDEN);
2964 tmp = force_operand (XEXP (value, 0), subtarget);
2965 return expand_binop (GET_MODE (value), binoptab, tmp,
2966 force_operand (op2, NULL_RTX),
2967 target, 0, OPTAB_LIB_WIDEN);
2968 /* We give UNSIGNEDP = 0 to expand_binop
2969 because the only operations we are expanding here are signed ones. */
2974 /* Subroutine of expand_expr:
2975 save the non-copied parts (LIST) of an expr (LHS), and return a list
2976 which can restore these values to their previous values,
2977 should something modify their storage. */
2980 save_noncopied_parts (lhs, list)
2987 for (tail = list; tail; tail = TREE_CHAIN (tail))
2988 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
2989 parts = chainon (parts, save_noncopied_parts (lhs, TREE_VALUE (tail)));
2992 tree part = TREE_VALUE (tail);
2993 tree part_type = TREE_TYPE (part);
2994 tree to_be_saved = build (COMPONENT_REF, part_type, lhs, part);
2995 rtx target = assign_stack_temp (TYPE_MODE (part_type),
2996 int_size_in_bytes (part_type), 0);
2997 if (! memory_address_p (TYPE_MODE (part_type), XEXP (target, 0)))
2998 target = change_address (target, TYPE_MODE (part_type), NULL_RTX);
2999 parts = tree_cons (to_be_saved,
3000 build (RTL_EXPR, part_type, NULL_TREE,
3003 store_expr (TREE_PURPOSE (parts), RTL_EXPR_RTL (TREE_VALUE (parts)), 0);
3008 /* Subroutine of expand_expr:
3009 record the non-copied parts (LIST) of an expr (LHS), and return a list
3010 which specifies the initial values of these parts. */
3013 init_noncopied_parts (lhs, list)
3020 for (tail = list; tail; tail = TREE_CHAIN (tail))
3021 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
3022 parts = chainon (parts, init_noncopied_parts (lhs, TREE_VALUE (tail)));
3025 tree part = TREE_VALUE (tail);
3026 tree part_type = TREE_TYPE (part);
3027 tree to_be_initialized = build (COMPONENT_REF, part_type, lhs, part);
3028 parts = tree_cons (TREE_PURPOSE (tail), to_be_initialized, parts);
3033 /* Subroutine of expand_expr: return nonzero iff there is no way that
3034 EXP can reference X, which is being modified. */
3037 safe_from_p (x, exp)
3047 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
3048 find the underlying pseudo. */
3049 if (GET_CODE (x) == SUBREG)
3052 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
3056 /* If X is a location in the outgoing argument area, it is always safe. */
3057 if (GET_CODE (x) == MEM
3058 && (XEXP (x, 0) == virtual_outgoing_args_rtx
3059 || (GET_CODE (XEXP (x, 0)) == PLUS
3060 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx)))
3063 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
3066 exp_rtl = DECL_RTL (exp);
3073 if (TREE_CODE (exp) == TREE_LIST)
3074 return ((TREE_VALUE (exp) == 0
3075 || safe_from_p (x, TREE_VALUE (exp)))
3076 && (TREE_CHAIN (exp) == 0
3077 || safe_from_p (x, TREE_CHAIN (exp))));
3082 return safe_from_p (x, TREE_OPERAND (exp, 0));
3086 return (safe_from_p (x, TREE_OPERAND (exp, 0))
3087 && safe_from_p (x, TREE_OPERAND (exp, 1)));
3091 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
3092 the expression. If it is set, we conflict iff we are that rtx or
3093 both are in memory. Otherwise, we check all operands of the
3094 expression recursively. */
3096 switch (TREE_CODE (exp))
3099 return staticp (TREE_OPERAND (exp, 0));
3102 if (GET_CODE (x) == MEM)
3107 exp_rtl = CALL_EXPR_RTL (exp);
3110 /* Assume that the call will clobber all hard registers and
3112 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
3113 || GET_CODE (x) == MEM)
3120 exp_rtl = RTL_EXPR_RTL (exp);
3122 /* We don't know what this can modify. */
3127 case WITH_CLEANUP_EXPR:
3128 exp_rtl = RTL_EXPR_RTL (exp);
3132 exp_rtl = SAVE_EXPR_RTL (exp);
3136 /* The only operand we look at is operand 1. The rest aren't
3137 part of the expression. */
3138 return safe_from_p (x, TREE_OPERAND (exp, 1));
3140 case METHOD_CALL_EXPR:
3141 /* This takes a rtx argument, but shouldn't appear here. */
3145 /* If we have an rtx, we do not need to scan our operands. */
3149 nops = tree_code_length[(int) TREE_CODE (exp)];
3150 for (i = 0; i < nops; i++)
3151 if (TREE_OPERAND (exp, i) != 0
3152 && ! safe_from_p (x, TREE_OPERAND (exp, i)))
3156 /* If we have an rtl, find any enclosed object. Then see if we conflict
3160 if (GET_CODE (exp_rtl) == SUBREG)
3162 exp_rtl = SUBREG_REG (exp_rtl);
3163 if (GET_CODE (exp_rtl) == REG
3164 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
3168 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
3169 are memory and EXP is not readonly. */
3170 return ! (rtx_equal_p (x, exp_rtl)
3171 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
3172 && ! TREE_READONLY (exp)));
3175 /* If we reach here, it is safe. */
3179 /* Subroutine of expand_expr: return nonzero iff EXP is an
3180 expression whose type is statically determinable. */
3186 if (TREE_CODE (exp) == PARM_DECL
3187 || TREE_CODE (exp) == VAR_DECL
3188 || TREE_CODE (exp) == CALL_EXPR || TREE_CODE (exp) == TARGET_EXPR
3189 || TREE_CODE (exp) == COMPONENT_REF
3190 || TREE_CODE (exp) == ARRAY_REF)
3195 /* expand_expr: generate code for computing expression EXP.
3196 An rtx for the computed value is returned. The value is never null.
3197 In the case of a void EXP, const0_rtx is returned.
3199 The value may be stored in TARGET if TARGET is nonzero.
3200 TARGET is just a suggestion; callers must assume that
3201 the rtx returned may not be the same as TARGET.
3203 If TARGET is CONST0_RTX, it means that the value will be ignored.
3205 If TMODE is not VOIDmode, it suggests generating the
3206 result in mode TMODE. But this is done only when convenient.
3207 Otherwise, TMODE is ignored and the value generated in its natural mode.
3208 TMODE is just a suggestion; callers must assume that
3209 the rtx returned may not have mode TMODE.
3211 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
3212 with a constant address even if that address is not normally legitimate.
3213 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
3215 If MODIFIER is EXPAND_SUM then when EXP is an addition
3216 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
3217 or a nest of (PLUS ...) and (MINUS ...) where the terms are
3218 products as above, or REG or MEM, or constant.
3219 Ordinarily in such cases we would output mul or add instructions
3220 and then return a pseudo reg containing the sum.
3222 EXPAND_INITIALIZER is much like EXPAND_SUM except that
3223 it also marks a label as absolutely required (it can't be dead).
3224 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
3225 This is used for outputting expressions used in initializers. */
3228 expand_expr (exp, target, tmode, modifier)
3231 enum machine_mode tmode;
3232 enum expand_modifier modifier;
3234 register rtx op0, op1, temp;
3235 tree type = TREE_TYPE (exp);
3236 int unsignedp = TREE_UNSIGNED (type);
3237 register enum machine_mode mode = TYPE_MODE (type);
3238 register enum tree_code code = TREE_CODE (exp);
3240 /* Use subtarget as the target for operand 0 of a binary operation. */
3241 rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
3242 rtx original_target = target;
3243 int ignore = target == const0_rtx;
3246 /* Don't use hard regs as subtargets, because the combiner
3247 can only handle pseudo regs. */
3248 if (subtarget && REGNO (subtarget) < FIRST_PSEUDO_REGISTER)
3250 /* Avoid subtargets inside loops,
3251 since they hide some invariant expressions. */
3252 if (preserve_subexpressions_p ())
3255 if (ignore) target = 0, original_target = 0;
3257 /* If will do cse, generate all results into pseudo registers
3258 since 1) that allows cse to find more things
3259 and 2) otherwise cse could produce an insn the machine
3262 if (! cse_not_expected && mode != BLKmode && target
3263 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER))
3266 /* Ensure we reference a volatile object even if value is ignored. */
3267 if (ignore && TREE_THIS_VOLATILE (exp)
3268 && TREE_CODE (exp) != FUNCTION_DECL
3269 && mode != VOIDmode && mode != BLKmode)
3271 target = gen_reg_rtx (mode);
3272 temp = expand_expr (exp, target, VOIDmode, modifier);
3274 emit_move_insn (target, temp);
3282 tree function = decl_function_context (exp);
3283 /* Handle using a label in a containing function. */
3284 if (function != current_function_decl && function != 0)
3286 struct function *p = find_function_data (function);
3287 /* Allocate in the memory associated with the function
3288 that the label is in. */
3289 push_obstacks (p->function_obstack,
3290 p->function_maybepermanent_obstack);
3292 p->forced_labels = gen_rtx (EXPR_LIST, VOIDmode,
3293 label_rtx (exp), p->forced_labels);
3296 else if (modifier == EXPAND_INITIALIZER)
3297 forced_labels = gen_rtx (EXPR_LIST, VOIDmode,
3298 label_rtx (exp), forced_labels);
3299 temp = gen_rtx (MEM, FUNCTION_MODE,
3300 gen_rtx (LABEL_REF, Pmode, label_rtx (exp)));
3301 if (function != current_function_decl && function != 0)
3302 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
3307 if (DECL_RTL (exp) == 0)
3309 error_with_decl (exp, "prior parameter's size depends on `%s'");
3310 return CONST0_RTX (mode);
3316 if (DECL_RTL (exp) == 0)
3318 /* Ensure variable marked as used
3319 even if it doesn't go through a parser. */
3320 TREE_USED (exp) = 1;
3321 /* Handle variables inherited from containing functions. */
3322 context = decl_function_context (exp);
3324 /* We treat inline_function_decl as an alias for the current function
3325 because that is the inline function whose vars, types, etc.
3326 are being merged into the current function.
3327 See expand_inline_function. */
3328 if (context != 0 && context != current_function_decl
3329 && context != inline_function_decl
3330 /* If var is static, we don't need a static chain to access it. */
3331 && ! (GET_CODE (DECL_RTL (exp)) == MEM
3332 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
3336 /* Mark as non-local and addressable. */
3337 DECL_NONLOCAL (exp) = 1;
3338 mark_addressable (exp);
3339 if (GET_CODE (DECL_RTL (exp)) != MEM)
3341 addr = XEXP (DECL_RTL (exp), 0);
3342 if (GET_CODE (addr) == MEM)
3343 addr = gen_rtx (MEM, Pmode, fix_lexical_addr (XEXP (addr, 0), exp));
3345 addr = fix_lexical_addr (addr, exp);
3346 return change_address (DECL_RTL (exp), mode, addr);
3349 /* This is the case of an array whose size is to be determined
3350 from its initializer, while the initializer is still being parsed.
3352 if (GET_CODE (DECL_RTL (exp)) == MEM
3353 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
3354 return change_address (DECL_RTL (exp), GET_MODE (DECL_RTL (exp)),
3355 XEXP (DECL_RTL (exp), 0));
3356 if (GET_CODE (DECL_RTL (exp)) == MEM
3357 && modifier != EXPAND_CONST_ADDRESS
3358 && modifier != EXPAND_SUM
3359 && modifier != EXPAND_INITIALIZER)
3361 /* DECL_RTL probably contains a constant address.
3362 On RISC machines where a constant address isn't valid,
3363 make some insns to get that address into a register. */
3364 if (!memory_address_p (DECL_MODE (exp), XEXP (DECL_RTL (exp), 0))
3366 && CONSTANT_ADDRESS_P (XEXP (DECL_RTL (exp), 0))))
3367 return change_address (DECL_RTL (exp), VOIDmode,
3368 copy_rtx (XEXP (DECL_RTL (exp), 0)));
3371 /* If the mode of DECL_RTL does not match that of the decl, it
3372 must be a promoted value. We return a SUBREG of the wanted mode,
3373 but mark it so that we know that it was already extended. */
3375 if (GET_CODE (DECL_RTL (exp)) == REG
3376 && GET_MODE (DECL_RTL (exp)) != mode)
3378 enum machine_mode decl_mode = DECL_MODE (exp);
3380 /* Get the signedness used for this variable. Ensure we get the
3381 same mode we got when the variable was declared. */
3383 PROMOTE_MODE (decl_mode, unsignedp, type);
3385 if (decl_mode != GET_MODE (DECL_RTL (exp)))
3388 temp = gen_rtx (SUBREG, mode, DECL_RTL (exp), 0);
3389 SUBREG_PROMOTED_VAR_P (temp) = 1;
3390 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
3394 return DECL_RTL (exp);
3397 return immed_double_const (TREE_INT_CST_LOW (exp),
3398 TREE_INT_CST_HIGH (exp),
3402 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, 0);
3405 /* If optimized, generate immediate CONST_DOUBLE
3406 which will be turned into memory by reload if necessary.
3408 We used to force a register so that loop.c could see it. But
3409 this does not allow gen_* patterns to perform optimizations with
3410 the constants. It also produces two insns in cases like "x = 1.0;".
3411 On most machines, floating-point constants are not permitted in
3412 many insns, so we'd end up copying it to a register in any case.
3414 Now, we do the copying in expand_binop, if appropriate. */
3415 return immed_real_const (exp);
3419 if (! TREE_CST_RTL (exp))
3420 output_constant_def (exp);
3422 /* TREE_CST_RTL probably contains a constant address.
3423 On RISC machines where a constant address isn't valid,
3424 make some insns to get that address into a register. */
3425 if (GET_CODE (TREE_CST_RTL (exp)) == MEM
3426 && modifier != EXPAND_CONST_ADDRESS
3427 && modifier != EXPAND_INITIALIZER
3428 && modifier != EXPAND_SUM
3429 && !memory_address_p (mode, XEXP (TREE_CST_RTL (exp), 0)))
3430 return change_address (TREE_CST_RTL (exp), VOIDmode,
3431 copy_rtx (XEXP (TREE_CST_RTL (exp), 0)));
3432 return TREE_CST_RTL (exp);
3435 context = decl_function_context (exp);
3436 /* We treat inline_function_decl as an alias for the current function
3437 because that is the inline function whose vars, types, etc.
3438 are being merged into the current function.
3439 See expand_inline_function. */
3440 if (context == current_function_decl || context == inline_function_decl)
3443 /* If this is non-local, handle it. */
3446 temp = SAVE_EXPR_RTL (exp);
3447 if (temp && GET_CODE (temp) == REG)
3449 put_var_into_stack (exp);
3450 temp = SAVE_EXPR_RTL (exp);
3452 if (temp == 0 || GET_CODE (temp) != MEM)
3454 return change_address (temp, mode,
3455 fix_lexical_addr (XEXP (temp, 0), exp));
3457 if (SAVE_EXPR_RTL (exp) == 0)
3459 if (mode == BLKmode)
3461 = assign_stack_temp (mode,
3462 int_size_in_bytes (TREE_TYPE (exp)), 0);
3465 enum machine_mode var_mode = mode;
3467 if (TREE_CODE (type) == INTEGER_TYPE
3468 || TREE_CODE (type) == ENUMERAL_TYPE
3469 || TREE_CODE (type) == BOOLEAN_TYPE
3470 || TREE_CODE (type) == CHAR_TYPE
3471 || TREE_CODE (type) == REAL_TYPE
3472 || TREE_CODE (type) == POINTER_TYPE
3473 || TREE_CODE (type) == OFFSET_TYPE)
3475 PROMOTE_MODE (var_mode, unsignedp, type);
3478 temp = gen_reg_rtx (var_mode);
3481 SAVE_EXPR_RTL (exp) = temp;
3482 store_expr (TREE_OPERAND (exp, 0), temp, 0);
3483 if (!optimize && GET_CODE (temp) == REG)
3484 save_expr_regs = gen_rtx (EXPR_LIST, VOIDmode, temp,
3488 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
3489 must be a promoted value. We return a SUBREG of the wanted mode,
3490 but mark it so that we know that it was already extended. Note
3491 that `unsignedp' was modified above in this case. */
3493 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
3494 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
3496 temp = gen_rtx (SUBREG, mode, SAVE_EXPR_RTL (exp), 0);
3497 SUBREG_PROMOTED_VAR_P (temp) = 1;
3498 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
3502 return SAVE_EXPR_RTL (exp);
3505 /* Exit the current loop if the body-expression is true. */
3507 rtx label = gen_label_rtx ();
3508 do_jump (TREE_OPERAND (exp, 0), label, NULL_RTX);
3509 expand_exit_loop (NULL_PTR);
3515 expand_start_loop (1);
3516 expand_expr_stmt (TREE_OPERAND (exp, 0));
3523 tree vars = TREE_OPERAND (exp, 0);
3524 int vars_need_expansion = 0;
3526 /* Need to open a binding contour here because
3527 if there are any cleanups they most be contained here. */
3528 expand_start_bindings (0);
3530 /* Mark the corresponding BLOCK for output in its proper place. */
3531 if (TREE_OPERAND (exp, 2) != 0
3532 && ! TREE_USED (TREE_OPERAND (exp, 2)))
3533 insert_block (TREE_OPERAND (exp, 2));
3535 /* If VARS have not yet been expanded, expand them now. */
3538 if (DECL_RTL (vars) == 0)
3540 vars_need_expansion = 1;
3543 expand_decl_init (vars);
3544 vars = TREE_CHAIN (vars);
3547 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, modifier);
3549 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
3555 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
3557 emit_insns (RTL_EXPR_SEQUENCE (exp));
3558 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
3559 return RTL_EXPR_RTL (exp);
3562 /* All elts simple constants => refer to a constant in memory. But
3563 if this is a non-BLKmode mode, let it store a field at a time
3564 since that should make a CONST_INT or CONST_DOUBLE when we
3566 if (TREE_STATIC (exp) && (mode == BLKmode || TREE_ADDRESSABLE (exp)))
3568 rtx constructor = output_constant_def (exp);
3569 if (modifier != EXPAND_CONST_ADDRESS
3570 && modifier != EXPAND_INITIALIZER
3571 && modifier != EXPAND_SUM
3572 && !memory_address_p (GET_MODE (constructor),
3573 XEXP (constructor, 0)))
3574 constructor = change_address (constructor, VOIDmode,
3575 XEXP (constructor, 0));
3582 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
3583 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode, 0);
3588 if (target == 0 || ! safe_from_p (target, exp))
3590 if (mode != BLKmode && ! TREE_ADDRESSABLE (exp))
3591 target = gen_reg_rtx (mode);
3594 enum tree_code c = TREE_CODE (type);
3596 = assign_stack_temp (mode, int_size_in_bytes (type), 0);
3597 if (c == RECORD_TYPE || c == UNION_TYPE || c == ARRAY_TYPE)
3598 MEM_IN_STRUCT_P (target) = 1;
3601 store_constructor (exp, target);
3607 tree exp1 = TREE_OPERAND (exp, 0);
3610 /* A SAVE_EXPR as the address in an INDIRECT_EXPR is generated
3611 for *PTR += ANYTHING where PTR is put inside the SAVE_EXPR.
3612 This code has the same general effect as simply doing
3613 expand_expr on the save expr, except that the expression PTR
3614 is computed for use as a memory address. This means different
3615 code, suitable for indexing, may be generated. */
3616 if (TREE_CODE (exp1) == SAVE_EXPR
3617 && SAVE_EXPR_RTL (exp1) == 0
3618 && TREE_CODE (exp2 = TREE_OPERAND (exp1, 0)) != ERROR_MARK
3619 && TYPE_MODE (TREE_TYPE (exp1)) == Pmode
3620 && TYPE_MODE (TREE_TYPE (exp2)) == Pmode)
3622 temp = expand_expr (TREE_OPERAND (exp1, 0), NULL_RTX,
3623 VOIDmode, EXPAND_SUM);
3624 op0 = memory_address (mode, temp);
3625 op0 = copy_all_regs (op0);
3626 SAVE_EXPR_RTL (exp1) = op0;
3630 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
3631 op0 = memory_address (mode, op0);
3634 temp = gen_rtx (MEM, mode, op0);
3635 /* If address was computed by addition,
3636 mark this as an element of an aggregate. */
3637 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
3638 || (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR
3639 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) == PLUS_EXPR)
3640 || TREE_CODE (TREE_TYPE (exp)) == ARRAY_TYPE
3641 || TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE
3642 || TREE_CODE (TREE_TYPE (exp)) == UNION_TYPE
3643 || (TREE_CODE (exp1) == ADDR_EXPR
3644 && (exp2 = TREE_OPERAND (exp1, 0))
3645 && (TREE_CODE (TREE_TYPE (exp2)) == ARRAY_TYPE
3646 || TREE_CODE (TREE_TYPE (exp2)) == RECORD_TYPE
3647 || TREE_CODE (TREE_TYPE (exp2)) == UNION_TYPE)))
3648 MEM_IN_STRUCT_P (temp) = 1;
3649 MEM_VOLATILE_P (temp) = TREE_THIS_VOLATILE (exp);
3650 #if 0 /* It is incorrect to set RTX_UNCHANGING_P here, because the fact that
3651 a location is accessed through a pointer to const does not mean
3652 that the value there can never change. */
3653 RTX_UNCHANGING_P (temp) = TREE_READONLY (exp);
3659 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
3663 tree array = TREE_OPERAND (exp, 0);
3664 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
3665 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
3666 tree index = TREE_OPERAND (exp, 1);
3667 tree index_type = TREE_TYPE (index);
3670 /* Optimize the special-case of a zero lower bound. */
3671 if (! integer_zerop (low_bound))
3672 index = fold (build (MINUS_EXPR, index_type, index, low_bound));
3674 if (TREE_CODE (index) != INTEGER_CST
3675 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
3677 /* Nonconstant array index or nonconstant element size.
3678 Generate the tree for *(&array+index) and expand that,
3679 except do it in a language-independent way
3680 and don't complain about non-lvalue arrays.
3681 `mark_addressable' should already have been called
3682 for any array for which this case will be reached. */
3684 /* Don't forget the const or volatile flag from the array
3686 tree variant_type = build_type_variant (type,
3687 TREE_READONLY (exp),
3688 TREE_THIS_VOLATILE (exp));
3689 tree array_adr = build1 (ADDR_EXPR,
3690 build_pointer_type (variant_type), array);
3693 /* Convert the integer argument to a type the same size as a
3694 pointer so the multiply won't overflow spuriously. */
3695 if (TYPE_PRECISION (index_type) != POINTER_SIZE)
3696 index = convert (type_for_size (POINTER_SIZE, 0), index);
3698 /* Don't think the address has side effects
3699 just because the array does.
3700 (In some cases the address might have side effects,
3701 and we fail to record that fact here. However, it should not
3702 matter, since expand_expr should not care.) */
3703 TREE_SIDE_EFFECTS (array_adr) = 0;
3705 elt = build1 (INDIRECT_REF, type,
3706 fold (build (PLUS_EXPR,
3707 TYPE_POINTER_TO (variant_type),
3709 fold (build (MULT_EXPR,
3710 TYPE_POINTER_TO (variant_type),
3712 size_in_bytes (type))))));
3714 /* Volatility, etc., of new expression is same as old
3716 TREE_SIDE_EFFECTS (elt) = TREE_SIDE_EFFECTS (exp);
3717 TREE_THIS_VOLATILE (elt) = TREE_THIS_VOLATILE (exp);
3718 TREE_READONLY (elt) = TREE_READONLY (exp);
3720 return expand_expr (elt, target, tmode, modifier);
3723 /* Fold an expression like: "foo"[2].
3724 This is not done in fold so it won't happen inside &. */
3726 if (TREE_CODE (array) == STRING_CST
3727 && TREE_CODE (index) == INTEGER_CST
3728 && !TREE_INT_CST_HIGH (index)
3729 && (i = TREE_INT_CST_LOW (index)) < TREE_STRING_LENGTH (array))
3731 if (TREE_TYPE (TREE_TYPE (array)) == integer_type_node)
3733 exp = build_int_2 (((int *)TREE_STRING_POINTER (array))[i], 0);
3734 TREE_TYPE (exp) = integer_type_node;
3735 return expand_expr (exp, target, tmode, modifier);
3737 if (TREE_TYPE (TREE_TYPE (array)) == char_type_node)
3739 exp = build_int_2 (TREE_STRING_POINTER (array)[i], 0);
3740 TREE_TYPE (exp) = integer_type_node;
3741 return expand_expr (convert (TREE_TYPE (TREE_TYPE (array)),
3743 target, tmode, modifier);
3747 /* If this is a constant index into a constant array,
3748 just get the value from the array. Handle both the cases when
3749 we have an explicit constructor and when our operand is a variable
3750 that was declared const. */
3752 if (TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array))
3754 if (TREE_CODE (index) == INTEGER_CST
3755 && TREE_INT_CST_HIGH (index) == 0)
3757 tree elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0));
3759 i = TREE_INT_CST_LOW (index);
3761 elem = TREE_CHAIN (elem);
3763 return expand_expr (fold (TREE_VALUE (elem)), target,
3768 else if (optimize >= 1
3769 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
3770 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
3771 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
3773 if (TREE_CODE (index) == INTEGER_CST
3774 && TREE_INT_CST_HIGH (index) == 0)
3776 tree init = DECL_INITIAL (array);
3778 i = TREE_INT_CST_LOW (index);
3779 if (TREE_CODE (init) == CONSTRUCTOR)
3781 tree elem = CONSTRUCTOR_ELTS (init);
3784 elem = TREE_CHAIN (elem);
3786 return expand_expr (fold (TREE_VALUE (elem)), target,
3789 else if (TREE_CODE (init) == STRING_CST
3790 && i < TREE_STRING_LENGTH (init))
3792 temp = GEN_INT (TREE_STRING_POINTER (init)[i]);
3793 return convert_to_mode (mode, temp, 0);
3799 /* Treat array-ref with constant index as a component-ref. */
3803 /* If the operand is a CONSTRUCTOR, we can just extract the
3804 appropriate field if it is present. */
3805 if (code != ARRAY_REF
3806 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR)
3810 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
3811 elt = TREE_CHAIN (elt))
3812 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1))
3813 return expand_expr (TREE_VALUE (elt), target, tmode, modifier);
3817 enum machine_mode mode1;
3822 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
3823 &mode1, &unsignedp, &volatilep);
3825 /* In some cases, we will be offsetting OP0's address by a constant.
3826 So get it as a sum, if possible. If we will be using it
3827 directly in an insn, we validate it. */
3828 op0 = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_SUM);
3830 /* If this is a constant, put it into a register if it is a
3831 legitimate constant and memory if it isn't. */
3832 if (CONSTANT_P (op0))
3834 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
3835 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0))
3836 op0 = force_reg (mode, op0);
3838 op0 = validize_mem (force_const_mem (mode, op0));
3843 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
3845 if (GET_CODE (op0) != MEM)
3847 op0 = change_address (op0, VOIDmode,
3848 gen_rtx (PLUS, Pmode, XEXP (op0, 0),
3849 force_reg (Pmode, offset_rtx)));
3852 /* Don't forget about volatility even if this is a bitfield. */
3853 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
3855 op0 = copy_rtx (op0);
3856 MEM_VOLATILE_P (op0) = 1;
3859 if (mode1 == VOIDmode
3860 || (mode1 != BLKmode && ! direct_load[(int) mode1]
3861 && modifier != EXPAND_CONST_ADDRESS
3862 && modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
3863 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG)
3865 /* In cases where an aligned union has an unaligned object
3866 as a field, we might be extracting a BLKmode value from
3867 an integer-mode (e.g., SImode) object. Handle this case
3868 by doing the extract into an object as wide as the field
3869 (which we know to be the width of a basic mode), then
3870 storing into memory, and changing the mode to BLKmode. */
3871 enum machine_mode ext_mode = mode;
3873 if (ext_mode == BLKmode)
3874 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
3876 if (ext_mode == BLKmode)
3879 op0 = extract_bit_field (validize_mem (op0), bitsize, bitpos,
3880 unsignedp, target, ext_mode, ext_mode,
3881 TYPE_ALIGN (TREE_TYPE (tem)) / BITS_PER_UNIT,
3882 int_size_in_bytes (TREE_TYPE (tem)));
3883 if (mode == BLKmode)
3885 rtx new = assign_stack_temp (ext_mode,
3886 bitsize / BITS_PER_UNIT, 0);
3888 emit_move_insn (new, op0);
3889 op0 = copy_rtx (new);
3890 PUT_MODE (op0, BLKmode);
3896 /* Get a reference to just this component. */
3897 if (modifier == EXPAND_CONST_ADDRESS
3898 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
3899 op0 = gen_rtx (MEM, mode1, plus_constant (XEXP (op0, 0),
3900 (bitpos / BITS_PER_UNIT)));
3902 op0 = change_address (op0, mode1,
3903 plus_constant (XEXP (op0, 0),
3904 (bitpos / BITS_PER_UNIT)));
3905 MEM_IN_STRUCT_P (op0) = 1;
3906 MEM_VOLATILE_P (op0) |= volatilep;
3907 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode)
3910 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
3911 convert_move (target, op0, unsignedp);
3917 tree base = build_unary_op (ADDR_EXPR, TREE_OPERAND (exp, 0), 0);
3918 tree addr = build (PLUS_EXPR, type, base, TREE_OPERAND (exp, 1));
3919 op0 = expand_expr (addr, NULL_RTX, VOIDmode, EXPAND_SUM);
3920 temp = gen_rtx (MEM, mode, memory_address (mode, op0));
3921 MEM_IN_STRUCT_P (temp) = 1;
3922 MEM_VOLATILE_P (temp) = TREE_THIS_VOLATILE (exp);
3923 #if 0 /* It is incorrect to set RTX_UNCHANGING_P here, because the fact that
3924 a location is accessed through a pointer to const does not mean
3925 that the value there can never change. */
3926 RTX_UNCHANGING_P (temp) = TREE_READONLY (exp);
3931 /* Intended for a reference to a buffer of a file-object in Pascal.
3932 But it's not certain that a special tree code will really be
3933 necessary for these. INDIRECT_REF might work for them. */
3937 /* IN_EXPR: Inlined pascal set IN expression.
3940 rlo = set_low - (set_low%bits_per_word);
3941 the_word = set [ (index - rlo)/bits_per_word ];
3942 bit_index = index % bits_per_word;
3943 bitmask = 1 << bit_index;
3944 return !!(the_word & bitmask); */
3946 preexpand_calls (exp);
3948 tree set = TREE_OPERAND (exp, 0);
3949 tree index = TREE_OPERAND (exp, 1);
3950 tree set_type = TREE_TYPE (set);
3952 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
3953 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
3959 rtx diff, quo, rem, addr, bit, result;
3960 rtx setval, setaddr;
3961 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
3964 target = gen_reg_rtx (mode);
3966 /* If domain is empty, answer is no. */
3967 if (tree_int_cst_lt (set_high_bound, set_low_bound))
3970 index_val = expand_expr (index, 0, VOIDmode, 0);
3971 lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
3972 hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
3973 setval = expand_expr (set, 0, VOIDmode, 0);
3974 setaddr = XEXP (setval, 0);
3976 /* Compare index against bounds, if they are constant. */
3977 if (GET_CODE (index_val) == CONST_INT
3978 && GET_CODE (lo_r) == CONST_INT
3979 && INTVAL (index_val) < INTVAL (lo_r))
3982 if (GET_CODE (index_val) == CONST_INT
3983 && GET_CODE (hi_r) == CONST_INT
3984 && INTVAL (hi_r) < INTVAL (index_val))
3987 /* If we get here, we have to generate the code for both cases
3988 (in range and out of range). */
3990 op0 = gen_label_rtx ();
3991 op1 = gen_label_rtx ();
3993 if (! (GET_CODE (index_val) == CONST_INT
3994 && GET_CODE (lo_r) == CONST_INT))
3996 emit_cmp_insn (index_val, lo_r, LT, NULL_RTX,
3997 GET_MODE (index_val), 0, 0);
3998 emit_jump_insn (gen_blt (op1));
4001 if (! (GET_CODE (index_val) == CONST_INT
4002 && GET_CODE (hi_r) == CONST_INT))
4004 emit_cmp_insn (index_val, hi_r, GT, NULL_RTX,
4005 GET_MODE (index_val), 0, 0);
4006 emit_jump_insn (gen_bgt (op1));
4009 /* Calculate the element number of bit zero in the first word
4011 if (GET_CODE (lo_r) == CONST_INT)
4012 rlow = GEN_INT (INTVAL (lo_r)
4013 & ~ ((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
4015 rlow = expand_binop (index_mode, and_optab, lo_r,
4016 GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
4017 NULL_RTX, 0, OPTAB_LIB_WIDEN);
4019 diff = expand_binop (index_mode, sub_optab,
4020 index_val, rlow, NULL_RTX, 0, OPTAB_LIB_WIDEN);
4022 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
4023 GEN_INT (BITS_PER_UNIT), NULL_RTX, 0);
4024 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
4025 GEN_INT (BITS_PER_UNIT), NULL_RTX, 0);
4026 addr = memory_address (byte_mode,
4027 expand_binop (index_mode, add_optab,
4028 diff, setaddr, NULL_RTX, 0,
4030 /* Extract the bit we want to examine */
4031 bit = expand_shift (RSHIFT_EXPR, byte_mode,
4032 gen_rtx (MEM, byte_mode, addr),
4033 make_tree (TREE_TYPE (index), rem),
4035 result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
4036 GET_MODE (target) == byte_mode ? target : 0,
4037 1, OPTAB_LIB_WIDEN);
4039 if (result != target)
4040 convert_move (target, result, 1);
4042 /* Output the code to handle the out-of-range case. */
4045 emit_move_insn (target, const0_rtx);
4050 case WITH_CLEANUP_EXPR:
4051 if (RTL_EXPR_RTL (exp) == 0)
4054 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
4056 = tree_cons (NULL_TREE, TREE_OPERAND (exp, 2), cleanups_this_call);
4057 /* That's it for this cleanup. */
4058 TREE_OPERAND (exp, 2) = 0;
4060 return RTL_EXPR_RTL (exp);
4063 /* Check for a built-in function. */
4064 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
4065 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) == FUNCTION_DECL
4066 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
4067 return expand_builtin (exp, target, subtarget, tmode, ignore);
4068 /* If this call was expanded already by preexpand_calls,
4069 just return the result we got. */
4070 if (CALL_EXPR_RTL (exp) != 0)
4071 return CALL_EXPR_RTL (exp);
4072 return expand_call (exp, target, ignore);
4074 case NON_LVALUE_EXPR:
4077 case REFERENCE_EXPR:
4078 if (TREE_CODE (type) == VOID_TYPE || ignore)
4080 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
4083 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
4084 return expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, modifier);
4085 if (TREE_CODE (type) == UNION_TYPE)
4087 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
4090 if (mode == BLKmode)
4092 if (TYPE_SIZE (type) == 0
4093 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
4095 target = assign_stack_temp (BLKmode,
4096 (TREE_INT_CST_LOW (TYPE_SIZE (type))
4097 + BITS_PER_UNIT - 1)
4098 / BITS_PER_UNIT, 0);
4101 target = gen_reg_rtx (mode);
4103 if (GET_CODE (target) == MEM)
4104 /* Store data into beginning of memory target. */
4105 store_expr (TREE_OPERAND (exp, 0),
4106 change_address (target, TYPE_MODE (valtype), 0), 0);
4108 else if (GET_CODE (target) == REG)
4109 /* Store this field into a union of the proper type. */
4110 store_field (target, GET_MODE_BITSIZE (TYPE_MODE (valtype)), 0,
4111 TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
4113 int_size_in_bytes (TREE_TYPE (TREE_OPERAND (exp, 0))));
4117 /* Return the entire union. */
4120 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, 0);
4121 if (GET_MODE (op0) == mode)
4123 /* If arg is a constant integer being extended from a narrower mode,
4124 we must really truncate to get the extended bits right. Otherwise
4125 (unsigned long) (unsigned char) ("\377"[0])
4126 would come out as ffffffff. */
4127 if (GET_MODE (op0) == VOIDmode
4128 && (GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
4129 < GET_MODE_BITSIZE (mode)))
4131 /* MODE must be narrower than HOST_BITS_PER_INT. */
4132 int width = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
4134 if (width < HOST_BITS_PER_WIDE_INT)
4136 HOST_WIDE_INT val = (GET_CODE (op0) == CONST_INT ? INTVAL (op0)
4137 : CONST_DOUBLE_LOW (op0));
4138 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)))
4139 || !(val & ((HOST_WIDE_INT) 1 << (width - 1))))
4140 val &= ((HOST_WIDE_INT) 1 << width) - 1;
4142 val |= ~(((HOST_WIDE_INT) 1 << width) - 1);
4144 op0 = GEN_INT (val);
4148 op0 = (simplify_unary_operation
4149 ((TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)))
4150 ? ZERO_EXTEND : SIGN_EXTEND),
4152 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))));
4157 if (GET_MODE (op0) == VOIDmode)
4159 if (modifier == EXPAND_INITIALIZER)
4160 return gen_rtx (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
4161 if (flag_force_mem && GET_CODE (op0) == MEM)
4162 op0 = copy_to_reg (op0);
4165 return convert_to_mode (mode, op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
4167 convert_move (target, op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
4171 /* We come here from MINUS_EXPR when the second operand is a constant. */
4173 this_optab = add_optab;
4175 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
4176 something else, make sure we add the register to the constant and
4177 then to the other thing. This case can occur during strength
4178 reduction and doing it this way will produce better code if the
4179 frame pointer or argument pointer is eliminated.
4181 fold-const.c will ensure that the constant is always in the inner
4182 PLUS_EXPR, so the only case we need to do anything about is if
4183 sp, ap, or fp is our second argument, in which case we must swap
4184 the innermost first argument and our second argument. */
4186 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
4187 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
4188 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
4189 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
4190 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
4191 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
4193 tree t = TREE_OPERAND (exp, 1);
4195 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
4196 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
4199 /* If the result is to be Pmode and we are adding an integer to
4200 something, we might be forming a constant. So try to use
4201 plus_constant. If it produces a sum and we can't accept it,
4202 use force_operand. This allows P = &ARR[const] to generate
4203 efficient code on machines where a SYMBOL_REF is not a valid
4206 If this is an EXPAND_SUM call, always return the sum. */
4207 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
4208 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
4209 && (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
4212 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
4214 op1 = plus_constant (op1, TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)));
4215 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
4216 op1 = force_operand (op1, target);
4220 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
4221 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
4222 && (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
4225 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
4227 op0 = plus_constant (op0, TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)));
4228 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
4229 op0 = force_operand (op0, target);
4233 /* No sense saving up arithmetic to be done
4234 if it's all in the wrong mode to form part of an address.
4235 And force_operand won't know whether to sign-extend or
4237 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
4238 || mode != Pmode) goto binop;
4240 preexpand_calls (exp);
4241 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
4244 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, modifier);
4245 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, modifier);
4247 /* Make sure any term that's a sum with a constant comes last. */
4248 if (GET_CODE (op0) == PLUS
4249 && CONSTANT_P (XEXP (op0, 1)))
4255 /* If adding to a sum including a constant,
4256 associate it to put the constant outside. */
4257 if (GET_CODE (op1) == PLUS
4258 && CONSTANT_P (XEXP (op1, 1)))
4260 rtx constant_term = const0_rtx;
4262 temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0);
4265 /* Ensure that MULT comes first if there is one. */
4266 else if (GET_CODE (op0) == MULT)
4267 op0 = gen_rtx (PLUS, mode, op0, XEXP (op1, 0));
4269 op0 = gen_rtx (PLUS, mode, XEXP (op1, 0), op0);
4271 /* Let's also eliminate constants from op0 if possible. */
4272 op0 = eliminate_constant_term (op0, &constant_term);
4274 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
4275 their sum should be a constant. Form it into OP1, since the
4276 result we want will then be OP0 + OP1. */
4278 temp = simplify_binary_operation (PLUS, mode, constant_term,
4283 op1 = gen_rtx (PLUS, mode, constant_term, XEXP (op1, 1));
4286 /* Put a constant term last and put a multiplication first. */
4287 if (CONSTANT_P (op0) || GET_CODE (op1) == MULT)
4288 temp = op1, op1 = op0, op0 = temp;
4290 temp = simplify_binary_operation (PLUS, mode, op0, op1);
4291 return temp ? temp : gen_rtx (PLUS, mode, op0, op1);
4294 /* Handle difference of two symbolic constants,
4295 for the sake of an initializer. */
4296 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
4297 && really_constant_p (TREE_OPERAND (exp, 0))
4298 && really_constant_p (TREE_OPERAND (exp, 1)))
4300 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX,
4301 VOIDmode, modifier);
4302 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
4303 VOIDmode, modifier);
4304 return gen_rtx (MINUS, mode, op0, op1);
4306 /* Convert A - const to A + (-const). */
4307 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
4309 exp = build (PLUS_EXPR, type, TREE_OPERAND (exp, 0),
4310 fold (build1 (NEGATE_EXPR, type,
4311 TREE_OPERAND (exp, 1))));
4314 this_optab = sub_optab;
4318 preexpand_calls (exp);
4319 /* If first operand is constant, swap them.
4320 Thus the following special case checks need only
4321 check the second operand. */
4322 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
4324 register tree t1 = TREE_OPERAND (exp, 0);
4325 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
4326 TREE_OPERAND (exp, 1) = t1;
4329 /* Attempt to return something suitable for generating an
4330 indexed address, for machines that support that. */
4332 if (modifier == EXPAND_SUM && mode == Pmode
4333 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
4334 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
4336 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, EXPAND_SUM);
4338 /* Apply distributive law if OP0 is x+c. */
4339 if (GET_CODE (op0) == PLUS
4340 && GET_CODE (XEXP (op0, 1)) == CONST_INT)
4341 return gen_rtx (PLUS, mode,
4342 gen_rtx (MULT, mode, XEXP (op0, 0),
4343 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))),
4344 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))
4345 * INTVAL (XEXP (op0, 1))));
4347 if (GET_CODE (op0) != REG)
4348 op0 = force_operand (op0, NULL_RTX);
4349 if (GET_CODE (op0) != REG)
4350 op0 = copy_to_mode_reg (mode, op0);
4352 return gen_rtx (MULT, mode, op0,
4353 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))));
4356 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
4359 /* Check for multiplying things that have been extended
4360 from a narrower type. If this machine supports multiplying
4361 in that narrower type with a result in the desired type,
4362 do it that way, and avoid the explicit type-conversion. */
4363 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
4364 && TREE_CODE (type) == INTEGER_TYPE
4365 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
4366 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
4367 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
4368 && int_fits_type_p (TREE_OPERAND (exp, 1),
4369 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
4370 /* Don't use a widening multiply if a shift will do. */
4371 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
4372 > HOST_BITS_PER_WIDE_INT)
4373 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
4375 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
4376 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
4378 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
4379 /* If both operands are extended, they must either both
4380 be zero-extended or both be sign-extended. */
4381 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
4383 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
4385 enum machine_mode innermode
4386 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
4387 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
4388 ? umul_widen_optab : smul_widen_optab);
4389 if (mode == GET_MODE_WIDER_MODE (innermode)
4390 && this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
4392 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
4393 NULL_RTX, VOIDmode, 0);
4394 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
4395 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
4398 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
4399 NULL_RTX, VOIDmode, 0);
4403 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
4404 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
4405 return expand_mult (mode, op0, op1, target, unsignedp);
4407 case TRUNC_DIV_EXPR:
4408 case FLOOR_DIV_EXPR:
4410 case ROUND_DIV_EXPR:
4411 case EXACT_DIV_EXPR:
4412 preexpand_calls (exp);
4413 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
4415 /* Possible optimization: compute the dividend with EXPAND_SUM
4416 then if the divisor is constant can optimize the case
4417 where some terms of the dividend have coeffs divisible by it. */
4418 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
4419 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
4420 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
4423 this_optab = flodiv_optab;
4426 case TRUNC_MOD_EXPR:
4427 case FLOOR_MOD_EXPR:
4429 case ROUND_MOD_EXPR:
4430 preexpand_calls (exp);
4431 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
4433 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
4434 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
4435 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
4437 case FIX_ROUND_EXPR:
4438 case FIX_FLOOR_EXPR:
4440 abort (); /* Not used for C. */
4442 case FIX_TRUNC_EXPR:
4443 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
4445 target = gen_reg_rtx (mode);
4446 expand_fix (target, op0, unsignedp);
4450 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
4452 target = gen_reg_rtx (mode);
4453 /* expand_float can't figure out what to do if FROM has VOIDmode.
4454 So give it the correct mode. With -O, cse will optimize this. */
4455 if (GET_MODE (op0) == VOIDmode)
4456 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
4458 expand_float (target, op0,
4459 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
4463 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
4464 temp = expand_unop (mode, neg_optab, op0, target, 0);
4470 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
4472 /* Handle complex values specially. */
4474 enum machine_mode opmode
4475 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
4477 if (GET_MODE_CLASS (opmode) == MODE_COMPLEX_INT
4478 || GET_MODE_CLASS (opmode) == MODE_COMPLEX_FLOAT)
4479 return expand_complex_abs (opmode, op0, target, unsignedp);
4482 /* Unsigned abs is simply the operand. Testing here means we don't
4483 risk generating incorrect code below. */
4484 if (TREE_UNSIGNED (type))
4487 /* First try to do it with a special abs instruction. */
4488 temp = expand_unop (mode, abs_optab, op0, target, 0);
4492 /* If this machine has expensive jumps, we can do integer absolute
4493 value of X as (((signed) x >> (W-1)) ^ x) - ((signed) x >> (W-1)),
4494 where W is the width of MODE. */
4496 if (GET_MODE_CLASS (mode) == MODE_INT && BRANCH_COST >= 2)
4498 rtx extended = expand_shift (RSHIFT_EXPR, mode, op0,
4499 size_int (GET_MODE_BITSIZE (mode) - 1),
4502 temp = expand_binop (mode, xor_optab, extended, op0, target, 0,
4505 temp = expand_binop (mode, sub_optab, temp, extended, target, 0,
4512 /* If that does not win, use conditional jump and negate. */
4513 target = original_target;
4514 temp = gen_label_rtx ();
4515 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 0))
4516 || (GET_CODE (target) == REG
4517 && REGNO (target) < FIRST_PSEUDO_REGISTER))
4518 target = gen_reg_rtx (mode);
4519 emit_move_insn (target, op0);
4520 emit_cmp_insn (target,
4521 expand_expr (convert (type, integer_zero_node),
4522 NULL_RTX, VOIDmode, 0),
4523 GE, NULL_RTX, mode, 0, 0);
4525 emit_jump_insn (gen_bge (temp));
4526 op0 = expand_unop (mode, neg_optab, target, target, 0);
4528 emit_move_insn (target, op0);
4535 target = original_target;
4536 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 1))
4537 || (GET_CODE (target) == REG
4538 && REGNO (target) < FIRST_PSEUDO_REGISTER))
4539 target = gen_reg_rtx (mode);
4540 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
4541 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
4543 /* First try to do it with a special MIN or MAX instruction.
4544 If that does not win, use a conditional jump to select the proper
4546 this_optab = (TREE_UNSIGNED (type)
4547 ? (code == MIN_EXPR ? umin_optab : umax_optab)
4548 : (code == MIN_EXPR ? smin_optab : smax_optab));
4550 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
4556 emit_move_insn (target, op0);
4557 op0 = gen_label_rtx ();
4558 /* If this mode is an integer too wide to compare properly,
4559 compare word by word. Rely on cse to optimize constant cases. */
4560 if (GET_MODE_CLASS (mode) == MODE_INT
4561 && !can_compare_p (mode))
4563 if (code == MAX_EXPR)
4564 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type), target, op1, NULL, op0);
4566 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type), op1, target, NULL, op0);
4567 emit_move_insn (target, op1);
4571 if (code == MAX_EXPR)
4572 temp = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)))
4573 ? compare_from_rtx (target, op1, GEU, 1, mode, NULL_RTX, 0)
4574 : compare_from_rtx (target, op1, GE, 0, mode, NULL_RTX, 0));
4576 temp = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)))
4577 ? compare_from_rtx (target, op1, LEU, 1, mode, NULL_RTX, 0)
4578 : compare_from_rtx (target, op1, LE, 0, mode, NULL_RTX, 0));
4579 if (temp == const0_rtx)
4580 emit_move_insn (target, op1);
4581 else if (temp != const_true_rtx)
4583 if (bcc_gen_fctn[(int) GET_CODE (temp)] != 0)
4584 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (temp)]) (op0));
4587 emit_move_insn (target, op1);
4593 /* ??? Can optimize when the operand of this is a bitwise operation,
4594 by using a different bitwise operation. */
4596 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
4597 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
4603 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
4604 temp = expand_unop (mode, ffs_optab, op0, target, 1);
4609 /* ??? Can optimize bitwise operations with one arg constant.
4610 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
4611 and (a bitwise1 b) bitwise2 b (etc)
4612 but that is probably not worth while. */
4614 /* BIT_AND_EXPR is for bitwise anding.
4615 TRUTH_AND_EXPR is for anding two boolean values
4616 when we want in all cases to compute both of them.
4617 In general it is fastest to do TRUTH_AND_EXPR by
4618 computing both operands as actual zero-or-1 values
4619 and then bitwise anding. In cases where there cannot
4620 be any side effects, better code would be made by
4621 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR;
4622 but the question is how to recognize those cases. */
4624 case TRUTH_AND_EXPR:
4626 this_optab = and_optab;
4629 /* See comment above about TRUTH_AND_EXPR; it applies here too. */
4632 this_optab = ior_optab;
4635 case TRUTH_XOR_EXPR:
4637 this_optab = xor_optab;
4644 preexpand_calls (exp);
4645 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
4647 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
4648 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
4651 /* Could determine the answer when only additive constants differ.
4652 Also, the addition of one can be handled by changing the condition. */
4659 preexpand_calls (exp);
4660 temp = do_store_flag (exp, target, tmode != VOIDmode ? tmode : mode, 0);
4663 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
4664 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
4666 && GET_CODE (original_target) == REG
4667 && (GET_MODE (original_target)
4668 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
4670 temp = expand_expr (TREE_OPERAND (exp, 0), original_target, VOIDmode, 0);
4671 if (temp != original_target)
4672 temp = copy_to_reg (temp);
4673 op1 = gen_label_rtx ();
4674 emit_cmp_insn (temp, const0_rtx, EQ, NULL_RTX,
4675 GET_MODE (temp), unsignedp, 0);
4676 emit_jump_insn (gen_beq (op1));
4677 emit_move_insn (temp, const1_rtx);
4681 /* If no set-flag instruction, must generate a conditional
4682 store into a temporary variable. Drop through
4683 and handle this like && and ||. */
4685 case TRUTH_ANDIF_EXPR:
4686 case TRUTH_ORIF_EXPR:
4687 if (target == 0 || ! safe_from_p (target, exp)
4688 /* Make sure we don't have a hard reg (such as function's return
4689 value) live across basic blocks, if not optimizing. */
4690 || (!optimize && GET_CODE (target) == REG
4691 && REGNO (target) < FIRST_PSEUDO_REGISTER))
4692 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
4693 emit_clr_insn (target);
4694 op1 = gen_label_rtx ();
4695 jumpifnot (exp, op1);
4696 emit_0_to_1_insn (target);
4700 case TRUTH_NOT_EXPR:
4701 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
4702 /* The parser is careful to generate TRUTH_NOT_EXPR
4703 only with operands that are always zero or one. */
4704 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
4705 target, 1, OPTAB_LIB_WIDEN);
4711 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
4713 return expand_expr (TREE_OPERAND (exp, 1),
4714 (ignore ? const0_rtx : target),
4719 /* Note that COND_EXPRs whose type is a structure or union
4720 are required to be constructed to contain assignments of
4721 a temporary variable, so that we can evaluate them here
4722 for side effect only. If type is void, we must do likewise. */
4724 /* If an arm of the branch requires a cleanup,
4725 only that cleanup is performed. */
4728 tree binary_op = 0, unary_op = 0;
4729 tree old_cleanups = cleanups_this_call;
4730 cleanups_this_call = 0;
4732 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
4733 convert it to our mode, if necessary. */
4734 if (integer_onep (TREE_OPERAND (exp, 1))
4735 && integer_zerop (TREE_OPERAND (exp, 2))
4736 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
4738 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, modifier);
4739 if (GET_MODE (op0) == mode)
4742 target = gen_reg_rtx (mode);
4743 convert_move (target, op0, unsignedp);
4747 /* If we are not to produce a result, we have no target. Otherwise,
4748 if a target was specified use it; it will not be used as an
4749 intermediate target unless it is safe. If no target, use a
4752 if (mode == VOIDmode || ignore)
4754 else if (original_target
4755 && safe_from_p (original_target, TREE_OPERAND (exp, 0)))
4756 temp = original_target;
4757 else if (mode == BLKmode)
4759 if (TYPE_SIZE (type) == 0
4760 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
4762 temp = assign_stack_temp (BLKmode,
4763 (TREE_INT_CST_LOW (TYPE_SIZE (type))
4764 + BITS_PER_UNIT - 1)
4765 / BITS_PER_UNIT, 0);
4768 temp = gen_reg_rtx (mode);
4770 /* Check for X ? A + B : A. If we have this, we can copy
4771 A to the output and conditionally add B. Similarly for unary
4772 operations. Don't do this if X has side-effects because
4773 those side effects might affect A or B and the "?" operation is
4774 a sequence point in ANSI. (We test for side effects later.) */
4776 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
4777 && operand_equal_p (TREE_OPERAND (exp, 2),
4778 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
4779 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
4780 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
4781 && operand_equal_p (TREE_OPERAND (exp, 1),
4782 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
4783 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
4784 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
4785 && operand_equal_p (TREE_OPERAND (exp, 2),
4786 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
4787 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
4788 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
4789 && operand_equal_p (TREE_OPERAND (exp, 1),
4790 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
4791 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
4793 /* If we had X ? A + 1 : A and we can do the test of X as a store-flag
4794 operation, do this as A + (X != 0). Similarly for other simple
4795 binary operators. */
4796 if (singleton && binary_op
4797 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
4798 && (TREE_CODE (binary_op) == PLUS_EXPR
4799 || TREE_CODE (binary_op) == MINUS_EXPR
4800 || TREE_CODE (binary_op) == BIT_IOR_EXPR
4801 || TREE_CODE (binary_op) == BIT_XOR_EXPR
4802 || TREE_CODE (binary_op) == BIT_AND_EXPR)
4803 && integer_onep (TREE_OPERAND (binary_op, 1))
4804 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
4807 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR ? add_optab
4808 : TREE_CODE (binary_op) == MINUS_EXPR ? sub_optab
4809 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
4810 : TREE_CODE (binary_op) == BIT_XOR_EXPR ? xor_optab
4813 /* If we had X ? A : A + 1, do this as A + (X == 0).
4815 We have to invert the truth value here and then put it
4816 back later if do_store_flag fails. We cannot simply copy
4817 TREE_OPERAND (exp, 0) to another variable and modify that
4818 because invert_truthvalue can modify the tree pointed to
4820 if (singleton == TREE_OPERAND (exp, 1))
4821 TREE_OPERAND (exp, 0)
4822 = invert_truthvalue (TREE_OPERAND (exp, 0));
4824 result = do_store_flag (TREE_OPERAND (exp, 0),
4825 (safe_from_p (temp, singleton)
4827 mode, BRANCH_COST <= 1);
4831 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
4832 return expand_binop (mode, boptab, op1, result, temp,
4833 unsignedp, OPTAB_LIB_WIDEN);
4835 else if (singleton == TREE_OPERAND (exp, 1))
4836 TREE_OPERAND (exp, 0)
4837 = invert_truthvalue (TREE_OPERAND (exp, 0));
4841 op0 = gen_label_rtx ();
4843 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
4847 /* If the target conflicts with the other operand of the
4848 binary op, we can't use it. Also, we can't use the target
4849 if it is a hard register, because evaluating the condition
4850 might clobber it. */
4852 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1)))
4853 || (GET_CODE (temp) == REG
4854 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
4855 temp = gen_reg_rtx (mode);
4856 store_expr (singleton, temp, 0);
4859 expand_expr (singleton,
4860 ignore ? const1_rtx : NULL_RTX, VOIDmode, 0);
4861 if (cleanups_this_call)
4863 sorry ("aggregate value in COND_EXPR");
4864 cleanups_this_call = 0;
4866 if (singleton == TREE_OPERAND (exp, 1))
4867 jumpif (TREE_OPERAND (exp, 0), op0);
4869 jumpifnot (TREE_OPERAND (exp, 0), op0);
4871 if (binary_op && temp == 0)
4872 /* Just touch the other operand. */
4873 expand_expr (TREE_OPERAND (binary_op, 1),
4874 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
4876 store_expr (build (TREE_CODE (binary_op), type,
4877 make_tree (type, temp),
4878 TREE_OPERAND (binary_op, 1)),
4881 store_expr (build1 (TREE_CODE (unary_op), type,
4882 make_tree (type, temp)),
4887 /* This is now done in jump.c and is better done there because it
4888 produces shorter register lifetimes. */
4890 /* Check for both possibilities either constants or variables
4891 in registers (but not the same as the target!). If so, can
4892 save branches by assigning one, branching, and assigning the
4894 else if (temp && GET_MODE (temp) != BLKmode
4895 && (TREE_CONSTANT (TREE_OPERAND (exp, 1))
4896 || ((TREE_CODE (TREE_OPERAND (exp, 1)) == PARM_DECL
4897 || TREE_CODE (TREE_OPERAND (exp, 1)) == VAR_DECL)
4898 && DECL_RTL (TREE_OPERAND (exp, 1))
4899 && GET_CODE (DECL_RTL (TREE_OPERAND (exp, 1))) == REG
4900 && DECL_RTL (TREE_OPERAND (exp, 1)) != temp))
4901 && (TREE_CONSTANT (TREE_OPERAND (exp, 2))
4902 || ((TREE_CODE (TREE_OPERAND (exp, 2)) == PARM_DECL
4903 || TREE_CODE (TREE_OPERAND (exp, 2)) == VAR_DECL)
4904 && DECL_RTL (TREE_OPERAND (exp, 2))
4905 && GET_CODE (DECL_RTL (TREE_OPERAND (exp, 2))) == REG
4906 && DECL_RTL (TREE_OPERAND (exp, 2)) != temp)))
4908 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
4909 temp = gen_reg_rtx (mode);
4910 store_expr (TREE_OPERAND (exp, 2), temp, 0);
4911 jumpifnot (TREE_OPERAND (exp, 0), op0);
4912 store_expr (TREE_OPERAND (exp, 1), temp, 0);
4916 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
4917 comparison operator. If we have one of these cases, set the
4918 output to A, branch on A (cse will merge these two references),
4919 then set the output to FOO. */
4921 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
4922 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
4923 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
4924 TREE_OPERAND (exp, 1), 0)
4925 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
4926 && safe_from_p (temp, TREE_OPERAND (exp, 2)))
4928 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
4929 temp = gen_reg_rtx (mode);
4930 store_expr (TREE_OPERAND (exp, 1), temp, 0);
4931 jumpif (TREE_OPERAND (exp, 0), op0);
4932 store_expr (TREE_OPERAND (exp, 2), temp, 0);
4936 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
4937 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
4938 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
4939 TREE_OPERAND (exp, 2), 0)
4940 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
4941 && safe_from_p (temp, TREE_OPERAND (exp, 1)))
4943 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
4944 temp = gen_reg_rtx (mode);
4945 store_expr (TREE_OPERAND (exp, 2), temp, 0);
4946 jumpifnot (TREE_OPERAND (exp, 0), op0);
4947 store_expr (TREE_OPERAND (exp, 1), temp, 0);
4952 op1 = gen_label_rtx ();
4953 jumpifnot (TREE_OPERAND (exp, 0), op0);
4955 store_expr (TREE_OPERAND (exp, 1), temp, 0);
4957 expand_expr (TREE_OPERAND (exp, 1),
4958 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
4959 if (cleanups_this_call)
4961 sorry ("aggregate value in COND_EXPR");
4962 cleanups_this_call = 0;
4966 emit_jump_insn (gen_jump (op1));
4970 store_expr (TREE_OPERAND (exp, 2), temp, 0);
4972 expand_expr (TREE_OPERAND (exp, 2),
4973 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
4976 if (cleanups_this_call)
4978 sorry ("aggregate value in COND_EXPR");
4979 cleanups_this_call = 0;
4985 cleanups_this_call = old_cleanups;
4991 /* Something needs to be initialized, but we didn't know
4992 where that thing was when building the tree. For example,
4993 it could be the return value of a function, or a parameter
4994 to a function which lays down in the stack, or a temporary
4995 variable which must be passed by reference.
4997 We guarantee that the expression will either be constructed
4998 or copied into our original target. */
5000 tree slot = TREE_OPERAND (exp, 0);
5003 if (TREE_CODE (slot) != VAR_DECL)
5008 if (DECL_RTL (slot) != 0)
5010 target = DECL_RTL (slot);
5011 /* If we have already expanded the slot, so don't do
5013 if (TREE_OPERAND (exp, 1) == NULL_TREE)
5018 target = assign_stack_temp (mode, int_size_in_bytes (type), 0);
5019 /* All temp slots at this level must not conflict. */
5020 preserve_temp_slots (target);
5021 DECL_RTL (slot) = target;
5025 /* I bet this needs to be done, and I bet that it needs to
5026 be above, inside the else clause. The reason is
5027 simple, how else is it going to get cleaned up? (mrs)
5029 The reason is probably did not work before, and was
5030 commented out is because this was re-expanding already
5031 expanded target_exprs (target == 0 and DECL_RTL (slot)
5032 != 0) also cleaning them up many times as well. :-( */
5034 /* Since SLOT is not known to the called function
5035 to belong to its stack frame, we must build an explicit
5036 cleanup. This case occurs when we must build up a reference
5037 to pass the reference as an argument. In this case,
5038 it is very likely that such a reference need not be
5041 if (TREE_OPERAND (exp, 2) == 0)
5042 TREE_OPERAND (exp, 2) = maybe_build_cleanup (slot);
5043 if (TREE_OPERAND (exp, 2))
5044 cleanups_this_call = tree_cons (NULL_TREE, TREE_OPERAND (exp, 2),
5045 cleanups_this_call);
5050 /* This case does occur, when expanding a parameter which
5051 needs to be constructed on the stack. The target
5052 is the actual stack address that we want to initialize.
5053 The function we call will perform the cleanup in this case. */
5055 DECL_RTL (slot) = target;
5058 exp1 = TREE_OPERAND (exp, 1);
5059 /* Mark it as expanded. */
5060 TREE_OPERAND (exp, 1) = NULL_TREE;
5062 return expand_expr (exp1, target, tmode, modifier);
5067 tree lhs = TREE_OPERAND (exp, 0);
5068 tree rhs = TREE_OPERAND (exp, 1);
5069 tree noncopied_parts = 0;
5070 tree lhs_type = TREE_TYPE (lhs);
5072 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
5073 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0 && !fixed_type_p (rhs))
5074 noncopied_parts = init_noncopied_parts (stabilize_reference (lhs),
5075 TYPE_NONCOPIED_PARTS (lhs_type));
5076 while (noncopied_parts != 0)
5078 expand_assignment (TREE_VALUE (noncopied_parts),
5079 TREE_PURPOSE (noncopied_parts), 0, 0);
5080 noncopied_parts = TREE_CHAIN (noncopied_parts);
5087 /* If lhs is complex, expand calls in rhs before computing it.
5088 That's so we don't compute a pointer and save it over a call.
5089 If lhs is simple, compute it first so we can give it as a
5090 target if the rhs is just a call. This avoids an extra temp and copy
5091 and that prevents a partial-subsumption which makes bad code.
5092 Actually we could treat component_ref's of vars like vars. */
5094 tree lhs = TREE_OPERAND (exp, 0);
5095 tree rhs = TREE_OPERAND (exp, 1);
5096 tree noncopied_parts = 0;
5097 tree lhs_type = TREE_TYPE (lhs);
5101 if (TREE_CODE (lhs) != VAR_DECL
5102 && TREE_CODE (lhs) != RESULT_DECL
5103 && TREE_CODE (lhs) != PARM_DECL)
5104 preexpand_calls (exp);
5106 /* Check for |= or &= of a bitfield of size one into another bitfield
5107 of size 1. In this case, (unless we need the result of the
5108 assignment) we can do this more efficiently with a
5109 test followed by an assignment, if necessary.
5111 ??? At this point, we can't get a BIT_FIELD_REF here. But if
5112 things change so we do, this code should be enhanced to
5115 && TREE_CODE (lhs) == COMPONENT_REF
5116 && (TREE_CODE (rhs) == BIT_IOR_EXPR
5117 || TREE_CODE (rhs) == BIT_AND_EXPR)
5118 && TREE_OPERAND (rhs, 0) == lhs
5119 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
5120 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (lhs, 1))) == 1
5121 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))) == 1)
5123 rtx label = gen_label_rtx ();
5125 do_jump (TREE_OPERAND (rhs, 1),
5126 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
5127 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
5128 expand_assignment (lhs, convert (TREE_TYPE (rhs),
5129 (TREE_CODE (rhs) == BIT_IOR_EXPR
5131 : integer_zero_node)),
5133 do_pending_stack_adjust ();
5138 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0
5139 && ! (fixed_type_p (lhs) && fixed_type_p (rhs)))
5140 noncopied_parts = save_noncopied_parts (stabilize_reference (lhs),
5141 TYPE_NONCOPIED_PARTS (lhs_type));
5143 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
5144 while (noncopied_parts != 0)
5146 expand_assignment (TREE_PURPOSE (noncopied_parts),
5147 TREE_VALUE (noncopied_parts), 0, 0);
5148 noncopied_parts = TREE_CHAIN (noncopied_parts);
5153 case PREINCREMENT_EXPR:
5154 case PREDECREMENT_EXPR:
5155 return expand_increment (exp, 0);
5157 case POSTINCREMENT_EXPR:
5158 case POSTDECREMENT_EXPR:
5159 /* Faster to treat as pre-increment if result is not used. */
5160 return expand_increment (exp, ! ignore);
5163 /* Are we taking the address of a nested function? */
5164 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
5165 && decl_function_context (TREE_OPERAND (exp, 0)) != 0)
5167 op0 = trampoline_address (TREE_OPERAND (exp, 0));
5168 op0 = force_operand (op0, target);
5172 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode,
5173 (modifier == EXPAND_INITIALIZER
5174 ? modifier : EXPAND_CONST_ADDRESS));
5175 if (GET_CODE (op0) != MEM)
5178 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
5179 return XEXP (op0, 0);
5180 op0 = force_operand (XEXP (op0, 0), target);
5182 if (flag_force_addr && GET_CODE (op0) != REG)
5183 return force_reg (Pmode, op0);
5186 case ENTRY_VALUE_EXPR:
5189 /* COMPLEX type for Extended Pascal & Fortran */
5192 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
5196 /* Get the rtx code of the operands. */
5197 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
5198 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
5201 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
5203 prev = get_last_insn ();
5205 /* Tell flow that the whole of the destination is being set. */
5206 if (GET_CODE (target) == REG)
5207 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
5209 /* Move the real (op0) and imaginary (op1) parts to their location. */
5210 emit_move_insn (gen_realpart (mode, target), op0);
5211 emit_move_insn (gen_imagpart (mode, target), op1);
5213 /* Complex construction should appear as a single unit. */
5220 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
5221 return gen_realpart (mode, op0);
5224 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
5225 return gen_imagpart (mode, op0);
5229 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
5233 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
5236 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
5238 prev = get_last_insn ();
5240 /* Tell flow that the whole of the destination is being set. */
5241 if (GET_CODE (target) == REG)
5242 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
5244 /* Store the realpart and the negated imagpart to target. */
5245 emit_move_insn (gen_realpart (mode, target), gen_realpart (mode, op0));
5247 imag_t = gen_imagpart (mode, target);
5248 temp = expand_unop (mode, neg_optab,
5249 gen_imagpart (mode, op0), imag_t, 0);
5251 emit_move_insn (imag_t, temp);
5253 /* Conjugate should appear as a single unit */
5260 op0 = CONST0_RTX (tmode);
5266 return (*lang_expand_expr) (exp, target, tmode, modifier);
5269 /* Here to do an ordinary binary operator, generating an instruction
5270 from the optab already placed in `this_optab'. */
5272 preexpand_calls (exp);
5273 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
5275 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
5276 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
5278 temp = expand_binop (mode, this_optab, op0, op1, target,
5279 unsignedp, OPTAB_LIB_WIDEN);
5285 /* Return the alignment in bits of EXP, a pointer valued expression.
5286 But don't return more than MAX_ALIGN no matter what.
5287 The alignment returned is, by default, the alignment of the thing that
5288 EXP points to (if it is not a POINTER_TYPE, 0 is returned).
5290 Otherwise, look at the expression to see if we can do better, i.e., if the
5291 expression is actually pointing at an object whose alignment is tighter. */
5294 get_pointer_alignment (exp, max_align)
5298 unsigned align, inner;
5300 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
5303 align = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
5304 align = MIN (align, max_align);
5308 switch (TREE_CODE (exp))
5312 case NON_LVALUE_EXPR:
5313 exp = TREE_OPERAND (exp, 0);
5314 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
5316 inner = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
5317 inner = MIN (inner, max_align);
5318 align = MAX (align, inner);
5322 /* If sum of pointer + int, restrict our maximum alignment to that
5323 imposed by the integer. If not, we can't do any better than
5325 if (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST)
5328 while (((TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)) * BITS_PER_UNIT)
5333 exp = TREE_OPERAND (exp, 0);
5337 /* See what we are pointing at and look at its alignment. */
5338 exp = TREE_OPERAND (exp, 0);
5339 if (TREE_CODE (exp) == FUNCTION_DECL)
5340 align = MAX (align, FUNCTION_BOUNDARY);
5341 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'd')
5342 align = MAX (align, DECL_ALIGN (exp));
5343 #ifdef CONSTANT_ALIGNMENT
5344 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'c')
5345 align = CONSTANT_ALIGNMENT (exp, align);
5347 return MIN (align, max_align);
5355 /* Return the tree node and offset if a given argument corresponds to
5356 a string constant. */
5359 string_constant (arg, ptr_offset)
5365 if (TREE_CODE (arg) == ADDR_EXPR
5366 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
5368 *ptr_offset = integer_zero_node;
5369 return TREE_OPERAND (arg, 0);
5371 else if (TREE_CODE (arg) == PLUS_EXPR)
5373 tree arg0 = TREE_OPERAND (arg, 0);
5374 tree arg1 = TREE_OPERAND (arg, 1);
5379 if (TREE_CODE (arg0) == ADDR_EXPR
5380 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
5383 return TREE_OPERAND (arg0, 0);
5385 else if (TREE_CODE (arg1) == ADDR_EXPR
5386 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
5389 return TREE_OPERAND (arg1, 0);
5396 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
5397 way, because it could contain a zero byte in the middle.
5398 TREE_STRING_LENGTH is the size of the character array, not the string.
5400 Unfortunately, string_constant can't access the values of const char
5401 arrays with initializers, so neither can we do so here. */
5411 src = string_constant (src, &offset_node);
5414 max = TREE_STRING_LENGTH (src);
5415 ptr = TREE_STRING_POINTER (src);
5416 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
5418 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
5419 compute the offset to the following null if we don't know where to
5420 start searching for it. */
5422 for (i = 0; i < max; i++)
5425 /* We don't know the starting offset, but we do know that the string
5426 has no internal zero bytes. We can assume that the offset falls
5427 within the bounds of the string; otherwise, the programmer deserves
5428 what he gets. Subtract the offset from the length of the string,
5430 /* This would perhaps not be valid if we were dealing with named
5431 arrays in addition to literal string constants. */
5432 return size_binop (MINUS_EXPR, size_int (max), offset_node);
5435 /* We have a known offset into the string. Start searching there for
5436 a null character. */
5437 if (offset_node == 0)
5441 /* Did we get a long long offset? If so, punt. */
5442 if (TREE_INT_CST_HIGH (offset_node) != 0)
5444 offset = TREE_INT_CST_LOW (offset_node);
5446 /* If the offset is known to be out of bounds, warn, and call strlen at
5448 if (offset < 0 || offset > max)
5450 warning ("offset outside bounds of constant string");
5453 /* Use strlen to search for the first zero byte. Since any strings
5454 constructed with build_string will have nulls appended, we win even
5455 if we get handed something like (char[4])"abcd".
5457 Since OFFSET is our starting index into the string, no further
5458 calculation is needed. */
5459 return size_int (strlen (ptr + offset));
5462 /* Expand an expression EXP that calls a built-in function,
5463 with result going to TARGET if that's convenient
5464 (and in mode MODE if that's convenient).
5465 SUBTARGET may be used as the target for computing one of EXP's operands.
5466 IGNORE is nonzero if the value is to be ignored. */
5469 expand_builtin (exp, target, subtarget, mode, ignore)
5473 enum machine_mode mode;
5476 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
5477 tree arglist = TREE_OPERAND (exp, 1);
5480 enum machine_mode value_mode = TYPE_MODE (TREE_TYPE (exp));
5481 optab builtin_optab;
5483 switch (DECL_FUNCTION_CODE (fndecl))
5488 /* build_function_call changes these into ABS_EXPR. */
5493 case BUILT_IN_FSQRT:
5494 /* If not optimizing, call the library function. */
5499 /* Arg could be wrong type if user redeclared this fcn wrong. */
5500 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != REAL_TYPE)
5501 return CONST0_RTX (TYPE_MODE (TREE_TYPE (exp)));
5503 /* Stabilize and compute the argument. */
5504 if (TREE_CODE (TREE_VALUE (arglist)) != VAR_DECL
5505 && TREE_CODE (TREE_VALUE (arglist)) != PARM_DECL)
5507 exp = copy_node (exp);
5508 arglist = copy_node (arglist);
5509 TREE_OPERAND (exp, 1) = arglist;
5510 TREE_VALUE (arglist) = save_expr (TREE_VALUE (arglist));
5512 op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
5514 /* Make a suitable register to place result in. */
5515 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
5520 switch (DECL_FUNCTION_CODE (fndecl))
5523 builtin_optab = sin_optab; break;
5525 builtin_optab = cos_optab; break;
5526 case BUILT_IN_FSQRT:
5527 builtin_optab = sqrt_optab; break;
5532 /* Compute into TARGET.
5533 Set TARGET to wherever the result comes back. */
5534 target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
5535 builtin_optab, op0, target, 0);
5537 /* If we were unable to expand via the builtin, stop the
5538 sequence (without outputting the insns) and break, causing
5539 a call the the library function. */
5546 /* Check the results by default. But if flag_fast_math is turned on,
5547 then assume sqrt will always be called with valid arguments. */
5549 if (! flag_fast_math)
5551 /* Don't define the builtin FP instructions
5552 if your machine is not IEEE. */
5553 if (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT)
5556 lab1 = gen_label_rtx ();
5558 /* Test the result; if it is NaN, set errno=EDOM because
5559 the argument was not in the domain. */
5560 emit_cmp_insn (target, target, EQ, 0, GET_MODE (target), 0, 0);
5561 emit_jump_insn (gen_beq (lab1));
5565 #ifdef GEN_ERRNO_RTX
5566 rtx errno_rtx = GEN_ERRNO_RTX;
5569 = gen_rtx (MEM, word_mode, gen_rtx (SYMBOL_REF, Pmode, "*errno"));
5572 emit_move_insn (errno_rtx, GEN_INT (TARGET_EDOM));
5575 /* We can't set errno=EDOM directly; let the library call do it.
5576 Pop the arguments right away in case the call gets deleted. */
5578 expand_call (exp, target, 0);
5585 /* Output the entire sequence. */
5586 insns = get_insns ();
5592 /* __builtin_apply_args returns block of memory allocated on
5593 the stack into which is stored the arg pointer, structure
5594 value address, static chain, and all the registers that might
5595 possibly be used in performing a function call. The code is
5596 moved to the start of the function so the incoming values are
5598 case BUILT_IN_APPLY_ARGS:
5599 /* Don't do __builtin_apply_args more than once in a function.
5600 Save the result of the first call and reuse it. */
5601 if (apply_args_value != 0)
5602 return apply_args_value;
5604 /* When this function is called, it means that registers must be
5605 saved on entry to this function. So we migrate the
5606 call to the first insn of this function. */
5611 temp = expand_builtin_apply_args ();
5615 apply_args_value = temp;
5617 /* Put the sequence after the NOTE that starts the function.
5618 If this is inside a SEQUENCE, make the outer-level insn
5619 chain current, so the code is placed at the start of the
5621 push_topmost_sequence ();
5622 emit_insns_before (seq, NEXT_INSN (get_insns ()));
5623 pop_topmost_sequence ();
5627 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
5628 FUNCTION with a copy of the parameters described by
5629 ARGUMENTS, and ARGSIZE. It returns a block of memory
5630 allocated on the stack into which is stored all the registers
5631 that might possibly be used for returning the result of a
5632 function. ARGUMENTS is the value returned by
5633 __builtin_apply_args. ARGSIZE is the number of bytes of
5634 arguments that must be copied. ??? How should this value be
5635 computed? We'll also need a safe worst case value for varargs
5637 case BUILT_IN_APPLY:
5639 /* Arg could be non-pointer if user redeclared this fcn wrong. */
5640 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
5641 || TREE_CHAIN (arglist) == 0
5642 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
5643 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
5644 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
5652 for (t = arglist, i = 0; t; t = TREE_CHAIN (t), i++)
5653 ops[i] = expand_expr (TREE_VALUE (t), NULL_RTX, VOIDmode, 0);
5655 return expand_builtin_apply (ops[0], ops[1], ops[2]);
5658 /* __builtin_return (RESULT) causes the function to return the
5659 value described by RESULT. RESULT is address of the block of
5660 memory returned by __builtin_apply. */
5661 case BUILT_IN_RETURN:
5663 /* Arg could be non-pointer if user redeclared this fcn wrong. */
5664 && TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) == POINTER_TYPE)
5665 expand_builtin_return (expand_expr (TREE_VALUE (arglist),
5666 NULL_RTX, VOIDmode, 0));
5669 case BUILT_IN_SAVEREGS:
5670 /* Don't do __builtin_saveregs more than once in a function.
5671 Save the result of the first call and reuse it. */
5672 if (saveregs_value != 0)
5673 return saveregs_value;
5675 /* When this function is called, it means that registers must be
5676 saved on entry to this function. So we migrate the
5677 call to the first insn of this function. */
5680 rtx valreg, saved_valreg;
5682 /* Now really call the function. `expand_call' does not call
5683 expand_builtin, so there is no danger of infinite recursion here. */
5686 #ifdef EXPAND_BUILTIN_SAVEREGS
5687 /* Do whatever the machine needs done in this case. */
5688 temp = EXPAND_BUILTIN_SAVEREGS (arglist);
5690 /* The register where the function returns its value
5691 is likely to have something else in it, such as an argument.
5692 So preserve that register around the call. */
5693 if (value_mode != VOIDmode)
5695 valreg = hard_libcall_value (value_mode);
5696 saved_valreg = gen_reg_rtx (value_mode);
5697 emit_move_insn (saved_valreg, valreg);
5700 /* Generate the call, putting the value in a pseudo. */
5701 temp = expand_call (exp, target, ignore);
5703 if (value_mode != VOIDmode)
5704 emit_move_insn (valreg, saved_valreg);
5710 saveregs_value = temp;
5712 /* Put the sequence after the NOTE that starts the function.
5713 If this is inside a SEQUENCE, make the outer-level insn
5714 chain current, so the code is placed at the start of the
5716 push_topmost_sequence ();
5717 emit_insns_before (seq, NEXT_INSN (get_insns ()));
5718 pop_topmost_sequence ();
5722 /* __builtin_args_info (N) returns word N of the arg space info
5723 for the current function. The number and meanings of words
5724 is controlled by the definition of CUMULATIVE_ARGS. */
5725 case BUILT_IN_ARGS_INFO:
5727 int nwords = sizeof (CUMULATIVE_ARGS) / sizeof (int);
5729 int *word_ptr = (int *) ¤t_function_args_info;
5730 tree type, elts, result;
5732 if (sizeof (CUMULATIVE_ARGS) % sizeof (int) != 0)
5733 fatal ("CUMULATIVE_ARGS type defined badly; see %s, line %d",
5734 __FILE__, __LINE__);
5738 tree arg = TREE_VALUE (arglist);
5739 if (TREE_CODE (arg) != INTEGER_CST)
5740 error ("argument of `__builtin_args_info' must be constant");
5743 int wordnum = TREE_INT_CST_LOW (arg);
5745 if (wordnum < 0 || wordnum >= nwords || TREE_INT_CST_HIGH (arg))
5746 error ("argument of `__builtin_args_info' out of range");
5748 return GEN_INT (word_ptr[wordnum]);
5752 error ("missing argument in `__builtin_args_info'");
5757 for (i = 0; i < nwords; i++)
5758 elts = tree_cons (NULL_TREE, build_int_2 (word_ptr[i], 0));
5760 type = build_array_type (integer_type_node,
5761 build_index_type (build_int_2 (nwords, 0)));
5762 result = build (CONSTRUCTOR, type, NULL_TREE, nreverse (elts));
5763 TREE_CONSTANT (result) = 1;
5764 TREE_STATIC (result) = 1;
5765 result = build (INDIRECT_REF, build_pointer_type (type), result);
5766 TREE_CONSTANT (result) = 1;
5767 return expand_expr (result, NULL_RTX, VOIDmode, 0);
5771 /* Return the address of the first anonymous stack arg. */
5772 case BUILT_IN_NEXT_ARG:
5774 tree fntype = TREE_TYPE (current_function_decl);
5775 if (!(TYPE_ARG_TYPES (fntype) != 0
5776 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
5777 != void_type_node)))
5779 error ("`va_start' used in function with fixed args");
5784 return expand_binop (Pmode, add_optab,
5785 current_function_internal_arg_pointer,
5786 current_function_arg_offset_rtx,
5787 NULL_RTX, 0, OPTAB_LIB_WIDEN);
5789 case BUILT_IN_CLASSIFY_TYPE:
5792 tree type = TREE_TYPE (TREE_VALUE (arglist));
5793 enum tree_code code = TREE_CODE (type);
5794 if (code == VOID_TYPE)
5795 return GEN_INT (void_type_class);
5796 if (code == INTEGER_TYPE)
5797 return GEN_INT (integer_type_class);
5798 if (code == CHAR_TYPE)
5799 return GEN_INT (char_type_class);
5800 if (code == ENUMERAL_TYPE)
5801 return GEN_INT (enumeral_type_class);
5802 if (code == BOOLEAN_TYPE)
5803 return GEN_INT (boolean_type_class);
5804 if (code == POINTER_TYPE)
5805 return GEN_INT (pointer_type_class);
5806 if (code == REFERENCE_TYPE)
5807 return GEN_INT (reference_type_class);
5808 if (code == OFFSET_TYPE)
5809 return GEN_INT (offset_type_class);
5810 if (code == REAL_TYPE)
5811 return GEN_INT (real_type_class);
5812 if (code == COMPLEX_TYPE)
5813 return GEN_INT (complex_type_class);
5814 if (code == FUNCTION_TYPE)
5815 return GEN_INT (function_type_class);
5816 if (code == METHOD_TYPE)
5817 return GEN_INT (method_type_class);
5818 if (code == RECORD_TYPE)
5819 return GEN_INT (record_type_class);
5820 if (code == UNION_TYPE)
5821 return GEN_INT (union_type_class);
5822 if (code == ARRAY_TYPE)
5823 return GEN_INT (array_type_class);
5824 if (code == STRING_TYPE)
5825 return GEN_INT (string_type_class);
5826 if (code == SET_TYPE)
5827 return GEN_INT (set_type_class);
5828 if (code == FILE_TYPE)
5829 return GEN_INT (file_type_class);
5830 if (code == LANG_TYPE)
5831 return GEN_INT (lang_type_class);
5833 return GEN_INT (no_type_class);
5835 case BUILT_IN_CONSTANT_P:
5839 return (TREE_CODE_CLASS (TREE_CODE (TREE_VALUE (arglist))) == 'c'
5840 ? const1_rtx : const0_rtx);
5842 case BUILT_IN_FRAME_ADDRESS:
5843 /* The argument must be a nonnegative integer constant.
5844 It counts the number of frames to scan up the stack.
5845 The value is the address of that frame. */
5846 case BUILT_IN_RETURN_ADDRESS:
5847 /* The argument must be a nonnegative integer constant.
5848 It counts the number of frames to scan up the stack.
5849 The value is the return address saved in that frame. */
5851 /* Warning about missing arg was already issued. */
5853 else if (TREE_CODE (TREE_VALUE (arglist)) != INTEGER_CST)
5855 error ("invalid arg to `__builtin_return_address'");
5858 else if (tree_int_cst_lt (TREE_VALUE (arglist), integer_zero_node))
5860 error ("invalid arg to `__builtin_return_address'");
5865 int count = TREE_INT_CST_LOW (TREE_VALUE (arglist));
5866 rtx tem = frame_pointer_rtx;
5869 /* Some machines need special handling before we can access arbitrary
5870 frames. For example, on the sparc, we must first flush all
5871 register windows to the stack. */
5872 #ifdef SETUP_FRAME_ADDRESSES
5873 SETUP_FRAME_ADDRESSES ();
5876 /* On the sparc, the return address is not in the frame, it is
5877 in a register. There is no way to access it off of the current
5878 frame pointer, but it can be accessed off the previous frame
5879 pointer by reading the value from the register window save
5881 #ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
5882 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_RETURN_ADDRESS)
5886 /* Scan back COUNT frames to the specified frame. */
5887 for (i = 0; i < count; i++)
5889 /* Assume the dynamic chain pointer is in the word that
5890 the frame address points to, unless otherwise specified. */
5891 #ifdef DYNAMIC_CHAIN_ADDRESS
5892 tem = DYNAMIC_CHAIN_ADDRESS (tem);
5894 tem = memory_address (Pmode, tem);
5895 tem = copy_to_reg (gen_rtx (MEM, Pmode, tem));
5898 /* For __builtin_frame_address, return what we've got. */
5899 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
5902 /* For __builtin_return_address,
5903 Get the return address from that frame. */
5904 #ifdef RETURN_ADDR_RTX
5905 return RETURN_ADDR_RTX (count, tem);
5907 tem = memory_address (Pmode,
5908 plus_constant (tem, GET_MODE_SIZE (Pmode)));
5909 return copy_to_reg (gen_rtx (MEM, Pmode, tem));
5913 case BUILT_IN_ALLOCA:
5915 /* Arg could be non-integer if user redeclared this fcn wrong. */
5916 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE)
5918 current_function_calls_alloca = 1;
5919 /* Compute the argument. */
5920 op0 = expand_expr (TREE_VALUE (arglist), NULL_RTX, VOIDmode, 0);
5922 /* Allocate the desired space. */
5923 target = allocate_dynamic_stack_space (op0, target, BITS_PER_UNIT);
5925 /* Record the new stack level for nonlocal gotos. */
5926 if (nonlocal_goto_handler_slot != 0)
5927 emit_stack_save (SAVE_NONLOCAL, &nonlocal_goto_stack_level, NULL_RTX);
5931 /* If not optimizing, call the library function. */
5936 /* Arg could be non-integer if user redeclared this fcn wrong. */
5937 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE)
5940 /* Compute the argument. */
5941 op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
5942 /* Compute ffs, into TARGET if possible.
5943 Set TARGET to wherever the result comes back. */
5944 target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
5945 ffs_optab, op0, target, 1);
5950 case BUILT_IN_STRLEN:
5951 /* If not optimizing, call the library function. */
5956 /* Arg could be non-pointer if user redeclared this fcn wrong. */
5957 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE)
5961 tree src = TREE_VALUE (arglist);
5962 tree len = c_strlen (src);
5965 = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
5967 rtx result, src_rtx, char_rtx;
5968 enum machine_mode insn_mode = value_mode, char_mode;
5969 enum insn_code icode;
5971 /* If the length is known, just return it. */
5973 return expand_expr (len, target, mode, 0);
5975 /* If SRC is not a pointer type, don't do this operation inline. */
5979 /* Call a function if we can't compute strlen in the right mode. */
5981 while (insn_mode != VOIDmode)
5983 icode = strlen_optab->handlers[(int) insn_mode].insn_code;
5984 if (icode != CODE_FOR_nothing)
5987 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
5989 if (insn_mode == VOIDmode)
5992 /* Make a place to write the result of the instruction. */
5995 && GET_CODE (result) == REG
5996 && GET_MODE (result) == insn_mode
5997 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
5998 result = gen_reg_rtx (insn_mode);
6000 /* Make sure the operands are acceptable to the predicates. */
6002 if (! (*insn_operand_predicate[(int)icode][0]) (result, insn_mode))
6003 result = gen_reg_rtx (insn_mode);
6005 src_rtx = memory_address (BLKmode,
6006 expand_expr (src, NULL_RTX, Pmode,
6008 if (! (*insn_operand_predicate[(int)icode][1]) (src_rtx, Pmode))
6009 src_rtx = copy_to_mode_reg (Pmode, src_rtx);
6011 char_rtx = const0_rtx;
6012 char_mode = insn_operand_mode[(int)icode][2];
6013 if (! (*insn_operand_predicate[(int)icode][2]) (char_rtx, char_mode))
6014 char_rtx = copy_to_mode_reg (char_mode, char_rtx);
6016 emit_insn (GEN_FCN (icode) (result,
6017 gen_rtx (MEM, BLKmode, src_rtx),
6018 char_rtx, GEN_INT (align)));
6020 /* Return the value in the proper mode for this function. */
6021 if (GET_MODE (result) == value_mode)
6023 else if (target != 0)
6025 convert_move (target, result, 0);
6029 return convert_to_mode (value_mode, result, 0);
6032 case BUILT_IN_STRCPY:
6033 /* If not optimizing, call the library function. */
6038 /* Arg could be non-pointer if user redeclared this fcn wrong. */
6039 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
6040 || TREE_CHAIN (arglist) == 0
6041 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE)
6045 tree len = c_strlen (TREE_VALUE (TREE_CHAIN (arglist)));
6050 len = size_binop (PLUS_EXPR, len, integer_one_node);
6052 chainon (arglist, build_tree_list (NULL_TREE, len));
6056 case BUILT_IN_MEMCPY:
6057 /* If not optimizing, call the library function. */
6062 /* Arg could be non-pointer if user redeclared this fcn wrong. */
6063 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
6064 || TREE_CHAIN (arglist) == 0
6065 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
6066 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
6067 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
6071 tree dest = TREE_VALUE (arglist);
6072 tree src = TREE_VALUE (TREE_CHAIN (arglist));
6073 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
6076 = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
6078 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
6079 rtx dest_rtx, dest_mem, src_mem;
6081 /* If either SRC or DEST is not a pointer type, don't do
6082 this operation in-line. */
6083 if (src_align == 0 || dest_align == 0)
6085 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRCPY)
6086 TREE_CHAIN (TREE_CHAIN (arglist)) = 0;
6090 dest_rtx = expand_expr (dest, NULL_RTX, Pmode, EXPAND_NORMAL);
6091 dest_mem = gen_rtx (MEM, BLKmode,
6092 memory_address (BLKmode, dest_rtx));
6093 src_mem = gen_rtx (MEM, BLKmode,
6094 memory_address (BLKmode,
6095 expand_expr (src, NULL_RTX,
6099 /* Copy word part most expediently. */
6100 emit_block_move (dest_mem, src_mem,
6101 expand_expr (len, NULL_RTX, VOIDmode, 0),
6102 MIN (src_align, dest_align));
6106 /* These comparison functions need an instruction that returns an actual
6107 index. An ordinary compare that just sets the condition codes
6109 #ifdef HAVE_cmpstrsi
6110 case BUILT_IN_STRCMP:
6111 /* If not optimizing, call the library function. */
6116 /* Arg could be non-pointer if user redeclared this fcn wrong. */
6117 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
6118 || TREE_CHAIN (arglist) == 0
6119 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE)
6121 else if (!HAVE_cmpstrsi)
6124 tree arg1 = TREE_VALUE (arglist);
6125 tree arg2 = TREE_VALUE (TREE_CHAIN (arglist));
6129 len = c_strlen (arg1);
6131 len = size_binop (PLUS_EXPR, integer_one_node, len);
6132 len2 = c_strlen (arg2);
6134 len2 = size_binop (PLUS_EXPR, integer_one_node, len2);
6136 /* If we don't have a constant length for the first, use the length
6137 of the second, if we know it. We don't require a constant for
6138 this case; some cost analysis could be done if both are available
6139 but neither is constant. For now, assume they're equally cheap.
6141 If both strings have constant lengths, use the smaller. This
6142 could arise if optimization results in strcpy being called with
6143 two fixed strings, or if the code was machine-generated. We should
6144 add some code to the `memcmp' handler below to deal with such
6145 situations, someday. */
6146 if (!len || TREE_CODE (len) != INTEGER_CST)
6153 else if (len2 && TREE_CODE (len2) == INTEGER_CST)
6155 if (tree_int_cst_lt (len2, len))
6159 chainon (arglist, build_tree_list (NULL_TREE, len));
6163 case BUILT_IN_MEMCMP:
6164 /* If not optimizing, call the library function. */
6169 /* Arg could be non-pointer if user redeclared this fcn wrong. */
6170 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
6171 || TREE_CHAIN (arglist) == 0
6172 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
6173 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
6174 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
6176 else if (!HAVE_cmpstrsi)
6179 tree arg1 = TREE_VALUE (arglist);
6180 tree arg2 = TREE_VALUE (TREE_CHAIN (arglist));
6181 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
6185 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
6187 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
6188 enum machine_mode insn_mode
6189 = insn_operand_mode[(int) CODE_FOR_cmpstrsi][0];
6191 /* If we don't have POINTER_TYPE, call the function. */
6192 if (arg1_align == 0 || arg2_align == 0)
6194 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRCMP)
6195 TREE_CHAIN (TREE_CHAIN (arglist)) = 0;
6199 /* Make a place to write the result of the instruction. */
6202 && GET_CODE (result) == REG && GET_MODE (result) == insn_mode
6203 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
6204 result = gen_reg_rtx (insn_mode);
6206 emit_insn (gen_cmpstrsi (result,
6207 gen_rtx (MEM, BLKmode,
6208 expand_expr (arg1, NULL_RTX, Pmode,
6210 gen_rtx (MEM, BLKmode,
6211 expand_expr (arg2, NULL_RTX, Pmode,
6213 expand_expr (len, NULL_RTX, VOIDmode, 0),
6214 GEN_INT (MIN (arg1_align, arg2_align))));
6216 /* Return the value in the proper mode for this function. */
6217 mode = TYPE_MODE (TREE_TYPE (exp));
6218 if (GET_MODE (result) == mode)
6220 else if (target != 0)
6222 convert_move (target, result, 0);
6226 return convert_to_mode (mode, result, 0);
6229 case BUILT_IN_STRCMP:
6230 case BUILT_IN_MEMCMP:
6234 default: /* just do library call, if unknown builtin */
6235 error ("built-in function `%s' not currently supported",
6236 IDENTIFIER_POINTER (DECL_NAME (fndecl)));
6239 /* The switch statement above can drop through to cause the function
6240 to be called normally. */
6242 return expand_call (exp, target, ignore);
6245 /* Built-in functions to perform an untyped call and return. */
6247 /* For each register that may be used for calling a function, this
6248 gives a mode used to copy the register's value. VOIDmode indicates
6249 the register is not used for calling a function. If the machine
6250 has register windows, this gives only the outbound registers.
6251 INCOMING_REGNO gives the corresponding inbound register. */
6252 static enum machine_mode apply_args_mode[FIRST_PSEUDO_REGISTER];
6254 /* For each register that may be used for returning values, this gives
6255 a mode used to copy the register's value. VOIDmode indicates the
6256 register is not used for returning values. If the machine has
6257 register windows, this gives only the outbound registers.
6258 INCOMING_REGNO gives the corresponding inbound register. */
6259 static enum machine_mode apply_result_mode[FIRST_PSEUDO_REGISTER];
6261 /* Return the size required for the block returned by __builtin_apply_args,
6262 and initialize apply_args_mode. */
6266 static int size = -1;
6268 enum machine_mode mode;
6270 /* The values computed by this function never change. */
6273 /* The first value is the incoming arg-pointer. */
6274 size = GET_MODE_SIZE (Pmode);
6276 /* The second value is the structure value address unless this is
6277 passed as an "invisible" first argument. */
6278 if (struct_value_rtx)
6279 size += GET_MODE_SIZE (Pmode);
6281 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
6282 if (FUNCTION_ARG_REGNO_P (regno))
6284 /* Search for the proper mode for copying this register's
6285 value. I'm not sure this is right, but it works so far. */
6286 enum machine_mode best_mode = VOIDmode;
6288 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
6290 mode = GET_MODE_WIDER_MODE (mode))
6291 if (HARD_REGNO_MODE_OK (regno, mode)
6292 && HARD_REGNO_NREGS (regno, mode) == 1)
6295 if (best_mode == VOIDmode)
6296 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
6298 mode = GET_MODE_WIDER_MODE (mode))
6299 if (HARD_REGNO_MODE_OK (regno, mode)
6300 && (mov_optab->handlers[(int) mode].insn_code
6301 != CODE_FOR_nothing))
6305 if (mode == VOIDmode)
6308 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
6309 if (size % align != 0)
6310 size = CEIL (size, align) * align;
6311 size += GET_MODE_SIZE (mode);
6312 apply_args_mode[regno] = mode;
6315 apply_args_mode[regno] = VOIDmode;
6320 /* Return the size required for the block returned by __builtin_apply,
6321 and initialize apply_result_mode. */
6323 apply_result_size ()
6325 static int size = -1;
6327 enum machine_mode mode;
6329 /* The values computed by this function never change. */
6334 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
6335 if (FUNCTION_VALUE_REGNO_P (regno))
6337 /* Search for the proper mode for copying this register's
6338 value. I'm not sure this is right, but it works so far. */
6339 enum machine_mode best_mode = VOIDmode;
6341 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
6343 mode = GET_MODE_WIDER_MODE (mode))
6344 if (HARD_REGNO_MODE_OK (regno, mode))
6347 if (best_mode == VOIDmode)
6348 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
6350 mode = GET_MODE_WIDER_MODE (mode))
6351 if (HARD_REGNO_MODE_OK (regno, mode)
6352 && (mov_optab->handlers[(int) mode].insn_code
6353 != CODE_FOR_nothing))
6357 if (mode == VOIDmode)
6360 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
6361 if (size % align != 0)
6362 size = CEIL (size, align) * align;
6363 size += GET_MODE_SIZE (mode);
6364 apply_result_mode[regno] = mode;
6367 apply_result_mode[regno] = VOIDmode;
6369 /* Allow targets that use untyped_call and untyped_return to override
6370 the size so that machine-specific information can be stored here. */
6371 #ifdef APPLY_RESULT_SIZE
6372 size = APPLY_RESULT_SIZE;
6378 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
6379 /* Create a vector describing the result block RESULT. If SAVEP is true,
6380 the result block is used to save the values; otherwise it is used to
6381 restore the values. */
6383 result_vector (savep, result)
6387 int regno, size, align, nelts;
6388 enum machine_mode mode;
6390 rtx *savevec = (rtx *) alloca (FIRST_PSEUDO_REGISTER * sizeof (rtx));
6393 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
6394 if ((mode = apply_result_mode[regno]) != VOIDmode)
6396 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
6397 if (size % align != 0)
6398 size = CEIL (size, align) * align;
6399 reg = gen_rtx (REG, mode, savep ? INCOMING_REGNO (regno) : regno);
6400 mem = change_address (result, mode,
6401 plus_constant (XEXP (result, 0), size));
6402 savevec[nelts++] = (savep
6403 ? gen_rtx (SET, VOIDmode, mem, reg)
6404 : gen_rtx (SET, VOIDmode, reg, mem));
6405 size += GET_MODE_SIZE (mode);
6407 return gen_rtx (PARALLEL, VOIDmode, gen_rtvec_v (nelts, savevec));
6409 #endif /* HAVE_untyped_call or HAVE_untyped_return */
6412 /* Save the state required to perform an untyped call with the same
6413 arguments as were passed to the current function. */
6415 expand_builtin_apply_args ()
6418 int size, align, regno;
6419 enum machine_mode mode;
6421 /* Create a block where the arg-pointer, structure value address,
6422 and argument registers can be saved. */
6423 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
6425 /* Walk past the arg-pointer and structure value address. */
6426 size = GET_MODE_SIZE (Pmode);
6427 if (struct_value_rtx)
6428 size += GET_MODE_SIZE (Pmode);
6430 /* Save each register used in calling a function to the block. */
6431 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
6432 if ((mode = apply_args_mode[regno]) != VOIDmode)
6434 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
6435 if (size % align != 0)
6436 size = CEIL (size, align) * align;
6437 emit_move_insn (change_address (registers, mode,
6438 plus_constant (XEXP (registers, 0),
6440 gen_rtx (REG, mode, INCOMING_REGNO (regno)));
6441 size += GET_MODE_SIZE (mode);
6444 /* Save the arg pointer to the block. */
6445 emit_move_insn (change_address (registers, Pmode, XEXP (registers, 0)),
6446 copy_to_reg (virtual_incoming_args_rtx));
6447 size = GET_MODE_SIZE (Pmode);
6449 /* Save the structure value address unless this is passed as an
6450 "invisible" first argument. */
6451 if (struct_value_incoming_rtx)
6453 emit_move_insn (change_address (registers, Pmode,
6454 plus_constant (XEXP (registers, 0),
6456 copy_to_reg (struct_value_incoming_rtx));
6457 size += GET_MODE_SIZE (Pmode);
6460 /* Return the address of the block. */
6461 return copy_addr_to_reg (XEXP (registers, 0));
6464 /* Perform an untyped call and save the state required to perform an
6465 untyped return of whatever value was returned by the given function. */
6467 expand_builtin_apply (function, arguments, argsize)
6468 rtx function, arguments, argsize;
6470 int size, align, regno;
6471 enum machine_mode mode;
6472 rtx incoming_args, result, reg, dest, call_insn;
6473 rtx old_stack_level = 0;
6476 /* Create a block where the return registers can be saved. */
6477 result = assign_stack_local (BLKmode, apply_result_size (), -1);
6479 /* ??? The argsize value should be adjusted here. */
6481 /* Fetch the arg pointer from the ARGUMENTS block. */
6482 incoming_args = gen_reg_rtx (Pmode);
6483 emit_move_insn (incoming_args,
6484 gen_rtx (MEM, Pmode, arguments));
6485 #ifndef STACK_GROWS_DOWNWARD
6486 incoming_args = expand_binop (Pmode, add_optab, incoming_args, argsize,
6487 incoming_args, 0, OPTAB_LIB_WIDEN);
6490 /* Perform postincrements before actually calling the function. */
6493 /* Push a new argument block and copy the arguments. */
6494 do_pending_stack_adjust ();
6495 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
6497 /* Push a block of memory onto the stack to store the memory arguments.
6498 Save the address in a register, and copy the memory arguments. ??? I
6499 haven't figured out how the calling convention macros effect this,
6500 but it's likely that the source and/or destination addresses in
6501 the block copy will need updating in machine specific ways. */
6502 dest = copy_addr_to_reg (push_block (argsize, 0, 0));
6503 emit_block_move (gen_rtx (MEM, BLKmode, dest),
6504 gen_rtx (MEM, BLKmode, incoming_args),
6506 PARM_BOUNDARY / BITS_PER_UNIT);
6508 /* Refer to the argument block. */
6510 arguments = gen_rtx (MEM, BLKmode, arguments);
6512 /* Walk past the arg-pointer and structure value address. */
6513 size = GET_MODE_SIZE (Pmode);
6514 if (struct_value_rtx)
6515 size += GET_MODE_SIZE (Pmode);
6517 /* Restore each of the registers previously saved. Make USE insns
6518 for each of these registers for use in making the call. */
6519 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
6520 if ((mode = apply_args_mode[regno]) != VOIDmode)
6522 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
6523 if (size % align != 0)
6524 size = CEIL (size, align) * align;
6525 reg = gen_rtx (REG, mode, regno);
6526 emit_move_insn (reg,
6527 change_address (arguments, mode,
6528 plus_constant (XEXP (arguments, 0),
6531 push_to_sequence (use_insns);
6532 emit_insn (gen_rtx (USE, VOIDmode, reg));
6533 use_insns = get_insns ();
6535 size += GET_MODE_SIZE (mode);
6538 /* Restore the structure value address unless this is passed as an
6539 "invisible" first argument. */
6540 size = GET_MODE_SIZE (Pmode);
6541 if (struct_value_rtx)
6543 rtx value = gen_reg_rtx (Pmode);
6544 emit_move_insn (value,
6545 change_address (arguments, Pmode,
6546 plus_constant (XEXP (arguments, 0),
6548 emit_move_insn (struct_value_rtx, value);
6549 if (GET_CODE (struct_value_rtx) == REG)
6551 push_to_sequence (use_insns);
6552 emit_insn (gen_rtx (USE, VOIDmode, struct_value_rtx));
6553 use_insns = get_insns ();
6556 size += GET_MODE_SIZE (Pmode);
6559 /* All arguments and registers used for the call are set up by now! */
6560 function = prepare_call_address (function, NULL_TREE, &use_insns);
6562 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
6563 and we don't want to load it into a register as an optimization,
6564 because prepare_call_address already did it if it should be done. */
6565 if (GET_CODE (function) != SYMBOL_REF)
6566 function = memory_address (FUNCTION_MODE, function);
6568 /* Generate the actual call instruction and save the return value. */
6569 #ifdef HAVE_untyped_call
6570 if (HAVE_untyped_call)
6571 emit_call_insn (gen_untyped_call (gen_rtx (MEM, FUNCTION_MODE, function),
6572 result, result_vector (1, result)));
6575 #ifdef HAVE_call_value
6576 if (HAVE_call_value)
6580 /* Locate the unique return register. It is not possible to
6581 express a call that sets more than one return register using
6582 call_value; use untyped_call for that. In fact, untyped_call
6583 only needs to save the return registers in the given block. */
6584 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
6585 if ((mode = apply_result_mode[regno]) != VOIDmode)
6588 abort (); /* HAVE_untyped_call required. */
6589 valreg = gen_rtx (REG, mode, regno);
6592 emit_call_insn (gen_call_value (valreg,
6593 gen_rtx (MEM, FUNCTION_MODE, function),
6594 const0_rtx, NULL_RTX, const0_rtx));
6596 emit_move_insn (change_address (result, GET_MODE (valreg),
6604 /* Find the CALL insn we just emitted and write the USE insns before it. */
6605 for (call_insn = get_last_insn ();
6606 call_insn && GET_CODE (call_insn) != CALL_INSN;
6607 call_insn = PREV_INSN (call_insn))
6613 /* Put the USE insns before the CALL. */
6614 emit_insns_before (use_insns, call_insn);
6616 /* Restore the stack. */
6617 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
6619 /* Return the address of the result block. */
6620 return copy_addr_to_reg (XEXP (result, 0));
6623 /* Perform an untyped return. */
6625 expand_builtin_return (result)
6628 int size, align, regno;
6629 enum machine_mode mode;
6633 apply_result_size ();
6634 result = gen_rtx (MEM, BLKmode, result);
6636 #ifdef HAVE_untyped_return
6637 if (HAVE_untyped_return)
6639 emit_jump_insn (gen_untyped_return (result, result_vector (0, result)));
6645 /* Restore the return value and note that each value is used. */
6647 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
6648 if ((mode = apply_result_mode[regno]) != VOIDmode)
6650 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
6651 if (size % align != 0)
6652 size = CEIL (size, align) * align;
6653 reg = gen_rtx (REG, mode, INCOMING_REGNO (regno));
6654 emit_move_insn (reg,
6655 change_address (result, mode,
6656 plus_constant (XEXP (result, 0),
6659 push_to_sequence (use_insns);
6660 emit_insn (gen_rtx (USE, VOIDmode, reg));
6661 use_insns = get_insns ();
6663 size += GET_MODE_SIZE (mode);
6666 /* Put the USE insns before the return. */
6667 emit_insns (use_insns);
6669 /* Return whatever values was restored by jumping directly to the end
6671 expand_null_return ();
6674 /* Expand code for a post- or pre- increment or decrement
6675 and return the RTX for the result.
6676 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
6679 expand_increment (exp, post)
6683 register rtx op0, op1;
6684 register rtx temp, value;
6685 register tree incremented = TREE_OPERAND (exp, 0);
6686 optab this_optab = add_optab;
6688 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
6689 int op0_is_copy = 0;
6691 /* Stabilize any component ref that might need to be
6692 evaluated more than once below. */
6694 || TREE_CODE (incremented) == BIT_FIELD_REF
6695 || (TREE_CODE (incremented) == COMPONENT_REF
6696 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
6697 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
6698 incremented = stabilize_reference (incremented);
6700 /* Compute the operands as RTX.
6701 Note whether OP0 is the actual lvalue or a copy of it:
6702 I believe it is a copy iff it is a register or subreg
6703 and insns were generated in computing it. */
6705 temp = get_last_insn ();
6706 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, 0);
6708 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
6709 in place but intead must do sign- or zero-extension during assignment,
6710 so we copy it into a new register and let the code below use it as
6713 Note that we can safely modify this SUBREG since it is know not to be
6714 shared (it was made by the expand_expr call above). */
6716 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
6717 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
6719 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
6720 && temp != get_last_insn ());
6721 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
6723 /* Decide whether incrementing or decrementing. */
6724 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
6725 || TREE_CODE (exp) == PREDECREMENT_EXPR)
6726 this_optab = sub_optab;
6728 /* If OP0 is not the actual lvalue, but rather a copy in a register,
6729 then we cannot just increment OP0. We must therefore contrive to
6730 increment the original value. Then, for postincrement, we can return
6731 OP0 since it is a copy of the old value. For preincrement, we want
6732 to always expand here, since this generates better or equivalent code. */
6733 if (!post || op0_is_copy)
6735 /* This is the easiest way to increment the value wherever it is.
6736 Problems with multiple evaluation of INCREMENTED are prevented
6737 because either (1) it is a component_ref or preincrement,
6738 in which case it was stabilized above, or (2) it is an array_ref
6739 with constant index in an array in a register, which is
6740 safe to reevaluate. */
6741 tree newexp = build ((this_optab == add_optab
6742 ? PLUS_EXPR : MINUS_EXPR),
6745 TREE_OPERAND (exp, 1));
6746 temp = expand_assignment (incremented, newexp, ! post, 0);
6747 return post ? op0 : temp;
6750 /* Convert decrement by a constant into a negative increment. */
6751 if (this_optab == sub_optab
6752 && GET_CODE (op1) == CONST_INT)
6754 op1 = GEN_INT (- INTVAL (op1));
6755 this_optab = add_optab;
6760 /* We have a true reference to the value in OP0.
6761 If there is an insn to add or subtract in this mode, queue it. */
6763 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
6764 op0 = stabilize (op0);
6767 icode = (int) this_optab->handlers[(int) mode].insn_code;
6768 if (icode != (int) CODE_FOR_nothing
6769 /* Make sure that OP0 is valid for operands 0 and 1
6770 of the insn we want to queue. */
6771 && (*insn_operand_predicate[icode][0]) (op0, mode)
6772 && (*insn_operand_predicate[icode][1]) (op0, mode))
6774 if (! (*insn_operand_predicate[icode][2]) (op1, mode))
6775 op1 = force_reg (mode, op1);
6777 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
6781 /* Preincrement, or we can't increment with one simple insn. */
6783 /* Save a copy of the value before inc or dec, to return it later. */
6784 temp = value = copy_to_reg (op0);
6786 /* Arrange to return the incremented value. */
6787 /* Copy the rtx because expand_binop will protect from the queue,
6788 and the results of that would be invalid for us to return
6789 if our caller does emit_queue before using our result. */
6790 temp = copy_rtx (value = op0);
6792 /* Increment however we can. */
6793 op1 = expand_binop (mode, this_optab, value, op1, op0,
6794 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
6795 /* Make sure the value is stored into OP0. */
6797 emit_move_insn (op0, op1);
6802 /* Expand all function calls contained within EXP, innermost ones first.
6803 But don't look within expressions that have sequence points.
6804 For each CALL_EXPR, record the rtx for its value
6805 in the CALL_EXPR_RTL field. */
6808 preexpand_calls (exp)
6811 register int nops, i;
6812 int type = TREE_CODE_CLASS (TREE_CODE (exp));
6814 if (! do_preexpand_calls)
6817 /* Only expressions and references can contain calls. */
6819 if (type != 'e' && type != '<' && type != '1' && type != '2' && type != 'r')
6822 switch (TREE_CODE (exp))
6825 /* Do nothing if already expanded. */
6826 if (CALL_EXPR_RTL (exp) != 0)
6829 /* Do nothing to built-in functions. */
6830 if (TREE_CODE (TREE_OPERAND (exp, 0)) != ADDR_EXPR
6831 || TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) != FUNCTION_DECL
6832 || ! DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
6833 CALL_EXPR_RTL (exp) = expand_call (exp, NULL_RTX, 0);
6838 case TRUTH_ANDIF_EXPR:
6839 case TRUTH_ORIF_EXPR:
6840 /* If we find one of these, then we can be sure
6841 the adjust will be done for it (since it makes jumps).
6842 Do it now, so that if this is inside an argument
6843 of a function, we don't get the stack adjustment
6844 after some other args have already been pushed. */
6845 do_pending_stack_adjust ();
6850 case WITH_CLEANUP_EXPR:
6854 if (SAVE_EXPR_RTL (exp) != 0)
6858 nops = tree_code_length[(int) TREE_CODE (exp)];
6859 for (i = 0; i < nops; i++)
6860 if (TREE_OPERAND (exp, i) != 0)
6862 type = TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, i)));
6863 if (type == 'e' || type == '<' || type == '1' || type == '2'
6865 preexpand_calls (TREE_OPERAND (exp, i));
6869 /* At the start of a function, record that we have no previously-pushed
6870 arguments waiting to be popped. */
6873 init_pending_stack_adjust ()
6875 pending_stack_adjust = 0;
6878 /* When exiting from function, if safe, clear out any pending stack adjust
6879 so the adjustment won't get done. */
6882 clear_pending_stack_adjust ()
6884 #ifdef EXIT_IGNORE_STACK
6885 if (! flag_omit_frame_pointer && EXIT_IGNORE_STACK
6886 && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline)
6887 && ! flag_inline_functions)
6888 pending_stack_adjust = 0;
6892 /* Pop any previously-pushed arguments that have not been popped yet. */
6895 do_pending_stack_adjust ()
6897 if (inhibit_defer_pop == 0)
6899 if (pending_stack_adjust != 0)
6900 adjust_stack (GEN_INT (pending_stack_adjust));
6901 pending_stack_adjust = 0;
6905 /* Expand all cleanups up to OLD_CLEANUPS.
6906 Needed here, and also for language-dependent calls. */
6909 expand_cleanups_to (old_cleanups)
6912 while (cleanups_this_call != old_cleanups)
6914 expand_expr (TREE_VALUE (cleanups_this_call), NULL_RTX, VOIDmode, 0);
6915 cleanups_this_call = TREE_CHAIN (cleanups_this_call);
6919 /* Expand conditional expressions. */
6921 /* Generate code to evaluate EXP and jump to LABEL if the value is zero.
6922 LABEL is an rtx of code CODE_LABEL, in this function and all the
6926 jumpifnot (exp, label)
6930 do_jump (exp, label, NULL_RTX);
6933 /* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
6940 do_jump (exp, NULL_RTX, label);
6943 /* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
6944 the result is zero, or IF_TRUE_LABEL if the result is one.
6945 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
6946 meaning fall through in that case.
6948 do_jump always does any pending stack adjust except when it does not
6949 actually perform a jump. An example where there is no jump
6950 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
6952 This function is responsible for optimizing cases such as
6953 &&, || and comparison operators in EXP. */
6956 do_jump (exp, if_false_label, if_true_label)
6958 rtx if_false_label, if_true_label;
6960 register enum tree_code code = TREE_CODE (exp);
6961 /* Some cases need to create a label to jump to
6962 in order to properly fall through.
6963 These cases set DROP_THROUGH_LABEL nonzero. */
6964 rtx drop_through_label = 0;
6978 temp = integer_zerop (exp) ? if_false_label : if_true_label;
6984 /* This is not true with #pragma weak */
6986 /* The address of something can never be zero. */
6988 emit_jump (if_true_label);
6993 if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
6994 || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
6995 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF)
6998 /* If we are narrowing the operand, we have to do the compare in the
7000 if ((TYPE_PRECISION (TREE_TYPE (exp))
7001 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
7003 case NON_LVALUE_EXPR:
7004 case REFERENCE_EXPR:
7009 /* These cannot change zero->non-zero or vice versa. */
7010 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
7014 /* This is never less insns than evaluating the PLUS_EXPR followed by
7015 a test and can be longer if the test is eliminated. */
7017 /* Reduce to minus. */
7018 exp = build (MINUS_EXPR, TREE_TYPE (exp),
7019 TREE_OPERAND (exp, 0),
7020 fold (build1 (NEGATE_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
7021 TREE_OPERAND (exp, 1))));
7022 /* Process as MINUS. */
7026 /* Non-zero iff operands of minus differ. */
7027 comparison = compare (build (NE_EXPR, TREE_TYPE (exp),
7028 TREE_OPERAND (exp, 0),
7029 TREE_OPERAND (exp, 1)),
7034 /* If we are AND'ing with a small constant, do this comparison in the
7035 smallest type that fits. If the machine doesn't have comparisons
7036 that small, it will be converted back to the wider comparison.
7037 This helps if we are testing the sign bit of a narrower object.
7038 combine can't do this for us because it can't know whether a
7039 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
7041 if (! SLOW_BYTE_ACCESS
7042 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7043 && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
7044 && (i = floor_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))) >= 0
7045 && (type = type_for_size (i + 1, 1)) != 0
7046 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
7047 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
7048 != CODE_FOR_nothing))
7050 do_jump (convert (type, exp), if_false_label, if_true_label);
7055 case TRUTH_NOT_EXPR:
7056 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
7059 case TRUTH_ANDIF_EXPR:
7060 if (if_false_label == 0)
7061 if_false_label = drop_through_label = gen_label_rtx ();
7062 do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
7063 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
7066 case TRUTH_ORIF_EXPR:
7067 if (if_true_label == 0)
7068 if_true_label = drop_through_label = gen_label_rtx ();
7069 do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
7070 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
7074 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
7077 do_pending_stack_adjust ();
7078 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
7085 int bitsize, bitpos, unsignedp;
7086 enum machine_mode mode;
7091 /* Get description of this reference. We don't actually care
7092 about the underlying object here. */
7093 get_inner_reference (exp, &bitsize, &bitpos, &offset,
7094 &mode, &unsignedp, &volatilep);
7096 type = type_for_size (bitsize, unsignedp);
7097 if (! SLOW_BYTE_ACCESS
7098 && type != 0 && bitsize >= 0
7099 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
7100 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
7101 != CODE_FOR_nothing))
7103 do_jump (convert (type, exp), if_false_label, if_true_label);
7110 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
7111 if (integer_onep (TREE_OPERAND (exp, 1))
7112 && integer_zerop (TREE_OPERAND (exp, 2)))
7113 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
7115 else if (integer_zerop (TREE_OPERAND (exp, 1))
7116 && integer_onep (TREE_OPERAND (exp, 2)))
7117 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
7121 register rtx label1 = gen_label_rtx ();
7122 drop_through_label = gen_label_rtx ();
7123 do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX);
7124 /* Now the THEN-expression. */
7125 do_jump (TREE_OPERAND (exp, 1),
7126 if_false_label ? if_false_label : drop_through_label,
7127 if_true_label ? if_true_label : drop_through_label);
7128 /* In case the do_jump just above never jumps. */
7129 do_pending_stack_adjust ();
7130 emit_label (label1);
7131 /* Now the ELSE-expression. */
7132 do_jump (TREE_OPERAND (exp, 2),
7133 if_false_label ? if_false_label : drop_through_label,
7134 if_true_label ? if_true_label : drop_through_label);
7139 if (integer_zerop (TREE_OPERAND (exp, 1)))
7140 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
7141 else if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7144 !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
7145 do_jump_by_parts_equality (exp, if_false_label, if_true_label);
7147 comparison = compare (exp, EQ, EQ);
7151 if (integer_zerop (TREE_OPERAND (exp, 1)))
7152 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
7153 else if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7156 !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
7157 do_jump_by_parts_equality (exp, if_true_label, if_false_label);
7159 comparison = compare (exp, NE, NE);
7163 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7165 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
7166 do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
7168 comparison = compare (exp, LT, LTU);
7172 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7174 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
7175 do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
7177 comparison = compare (exp, LE, LEU);
7181 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7183 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
7184 do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
7186 comparison = compare (exp, GT, GTU);
7190 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7192 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
7193 do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
7195 comparison = compare (exp, GE, GEU);
7200 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
7202 /* This is not needed any more and causes poor code since it causes
7203 comparisons and tests from non-SI objects to have different code
7205 /* Copy to register to avoid generating bad insns by cse
7206 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
7207 if (!cse_not_expected && GET_CODE (temp) == MEM)
7208 temp = copy_to_reg (temp);
7210 do_pending_stack_adjust ();
7211 if (GET_CODE (temp) == CONST_INT)
7212 comparison = (temp == const0_rtx ? const0_rtx : const_true_rtx);
7213 else if (GET_CODE (temp) == LABEL_REF)
7214 comparison = const_true_rtx;
7215 else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
7216 && !can_compare_p (GET_MODE (temp)))
7217 /* Note swapping the labels gives us not-equal. */
7218 do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label);
7219 else if (GET_MODE (temp) != VOIDmode)
7220 comparison = compare_from_rtx (temp, CONST0_RTX (GET_MODE (temp)),
7221 NE, TREE_UNSIGNED (TREE_TYPE (exp)),
7222 GET_MODE (temp), NULL_RTX, 0);
7227 /* Do any postincrements in the expression that was tested. */
7230 /* If COMPARISON is nonzero here, it is an rtx that can be substituted
7231 straight into a conditional jump instruction as the jump condition.
7232 Otherwise, all the work has been done already. */
7234 if (comparison == const_true_rtx)
7237 emit_jump (if_true_label);
7239 else if (comparison == const0_rtx)
7242 emit_jump (if_false_label);
7244 else if (comparison)
7245 do_jump_for_compare (comparison, if_false_label, if_true_label);
7249 if (drop_through_label)
7251 /* If do_jump produces code that might be jumped around,
7252 do any stack adjusts from that code, before the place
7253 where control merges in. */
7254 do_pending_stack_adjust ();
7255 emit_label (drop_through_label);
7259 /* Given a comparison expression EXP for values too wide to be compared
7260 with one insn, test the comparison and jump to the appropriate label.
7261 The code of EXP is ignored; we always test GT if SWAP is 0,
7262 and LT if SWAP is 1. */
7265 do_jump_by_parts_greater (exp, swap, if_false_label, if_true_label)
7268 rtx if_false_label, if_true_label;
7270 rtx op0 = expand_expr (TREE_OPERAND (exp, swap), NULL_RTX, VOIDmode, 0);
7271 rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), NULL_RTX, VOIDmode, 0);
7272 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
7273 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
7274 rtx drop_through_label = 0;
7275 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
7278 if (! if_true_label || ! if_false_label)
7279 drop_through_label = gen_label_rtx ();
7280 if (! if_true_label)
7281 if_true_label = drop_through_label;
7282 if (! if_false_label)
7283 if_false_label = drop_through_label;
7285 /* Compare a word at a time, high order first. */
7286 for (i = 0; i < nwords; i++)
7289 rtx op0_word, op1_word;
7291 if (WORDS_BIG_ENDIAN)
7293 op0_word = operand_subword_force (op0, i, mode);
7294 op1_word = operand_subword_force (op1, i, mode);
7298 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
7299 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
7302 /* All but high-order word must be compared as unsigned. */
7303 comp = compare_from_rtx (op0_word, op1_word,
7304 (unsignedp || i > 0) ? GTU : GT,
7305 unsignedp, word_mode, NULL_RTX, 0);
7306 if (comp == const_true_rtx)
7307 emit_jump (if_true_label);
7308 else if (comp != const0_rtx)
7309 do_jump_for_compare (comp, NULL_RTX, if_true_label);
7311 /* Consider lower words only if these are equal. */
7312 comp = compare_from_rtx (op0_word, op1_word, NE, unsignedp, word_mode,
7314 if (comp == const_true_rtx)
7315 emit_jump (if_false_label);
7316 else if (comp != const0_rtx)
7317 do_jump_for_compare (comp, NULL_RTX, if_false_label);
7321 emit_jump (if_false_label);
7322 if (drop_through_label)
7323 emit_label (drop_through_label);
7326 /* Compare OP0 with OP1, word at a time, in mode MODE.
7327 UNSIGNEDP says to do unsigned comparison.
7328 Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
7331 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label)
7332 enum machine_mode mode;
7335 rtx if_false_label, if_true_label;
7337 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
7338 rtx drop_through_label = 0;
7341 if (! if_true_label || ! if_false_label)
7342 drop_through_label = gen_label_rtx ();
7343 if (! if_true_label)
7344 if_true_label = drop_through_label;
7345 if (! if_false_label)
7346 if_false_label = drop_through_label;
7348 /* Compare a word at a time, high order first. */
7349 for (i = 0; i < nwords; i++)
7352 rtx op0_word, op1_word;
7354 if (WORDS_BIG_ENDIAN)
7356 op0_word = operand_subword_force (op0, i, mode);
7357 op1_word = operand_subword_force (op1, i, mode);
7361 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
7362 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
7365 /* All but high-order word must be compared as unsigned. */
7366 comp = compare_from_rtx (op0_word, op1_word,
7367 (unsignedp || i > 0) ? GTU : GT,
7368 unsignedp, word_mode, NULL_RTX, 0);
7369 if (comp == const_true_rtx)
7370 emit_jump (if_true_label);
7371 else if (comp != const0_rtx)
7372 do_jump_for_compare (comp, NULL_RTX, if_true_label);
7374 /* Consider lower words only if these are equal. */
7375 comp = compare_from_rtx (op0_word, op1_word, NE, unsignedp, word_mode,
7377 if (comp == const_true_rtx)
7378 emit_jump (if_false_label);
7379 else if (comp != const0_rtx)
7380 do_jump_for_compare (comp, NULL_RTX, if_false_label);
7384 emit_jump (if_false_label);
7385 if (drop_through_label)
7386 emit_label (drop_through_label);
7389 /* Given an EQ_EXPR expression EXP for values too wide to be compared
7390 with one insn, test the comparison and jump to the appropriate label. */
7393 do_jump_by_parts_equality (exp, if_false_label, if_true_label)
7395 rtx if_false_label, if_true_label;
7397 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7398 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7399 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
7400 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
7402 rtx drop_through_label = 0;
7404 if (! if_false_label)
7405 drop_through_label = if_false_label = gen_label_rtx ();
7407 for (i = 0; i < nwords; i++)
7409 rtx comp = compare_from_rtx (operand_subword_force (op0, i, mode),
7410 operand_subword_force (op1, i, mode),
7411 EQ, TREE_UNSIGNED (TREE_TYPE (exp)),
7412 word_mode, NULL_RTX, 0);
7413 if (comp == const_true_rtx)
7414 emit_jump (if_false_label);
7415 else if (comp != const0_rtx)
7416 do_jump_for_compare (comp, if_false_label, NULL_RTX);
7420 emit_jump (if_true_label);
7421 if (drop_through_label)
7422 emit_label (drop_through_label);
7425 /* Jump according to whether OP0 is 0.
7426 We assume that OP0 has an integer mode that is too wide
7427 for the available compare insns. */
7430 do_jump_by_parts_equality_rtx (op0, if_false_label, if_true_label)
7432 rtx if_false_label, if_true_label;
7434 int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD;
7436 rtx drop_through_label = 0;
7438 if (! if_false_label)
7439 drop_through_label = if_false_label = gen_label_rtx ();
7441 for (i = 0; i < nwords; i++)
7443 rtx comp = compare_from_rtx (operand_subword_force (op0, i,
7445 const0_rtx, EQ, 1, word_mode, NULL_RTX, 0);
7446 if (comp == const_true_rtx)
7447 emit_jump (if_false_label);
7448 else if (comp != const0_rtx)
7449 do_jump_for_compare (comp, if_false_label, NULL_RTX);
7453 emit_jump (if_true_label);
7454 if (drop_through_label)
7455 emit_label (drop_through_label);
7458 /* Given a comparison expression in rtl form, output conditional branches to
7459 IF_TRUE_LABEL, IF_FALSE_LABEL, or both. */
7462 do_jump_for_compare (comparison, if_false_label, if_true_label)
7463 rtx comparison, if_false_label, if_true_label;
7467 if (bcc_gen_fctn[(int) GET_CODE (comparison)] != 0)
7468 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (comparison)]) (if_true_label));
7473 emit_jump (if_false_label);
7475 else if (if_false_label)
7478 rtx prev = PREV_INSN (get_last_insn ());
7481 /* Output the branch with the opposite condition. Then try to invert
7482 what is generated. If more than one insn is a branch, or if the
7483 branch is not the last insn written, abort. If we can't invert
7484 the branch, emit make a true label, redirect this jump to that,
7485 emit a jump to the false label and define the true label. */
7487 if (bcc_gen_fctn[(int) GET_CODE (comparison)] != 0)
7488 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (comparison)]) (if_false_label));
7492 /* Here we get the insn before what was just emitted.
7493 On some machines, emitting the branch can discard
7494 the previous compare insn and emit a replacement. */
7496 /* If there's only one preceding insn... */
7497 insn = get_insns ();
7499 insn = NEXT_INSN (prev);
7501 for (insn = NEXT_INSN (insn); insn; insn = NEXT_INSN (insn))
7502 if (GET_CODE (insn) == JUMP_INSN)
7509 if (branch != get_last_insn ())
7512 if (! invert_jump (branch, if_false_label))
7514 if_true_label = gen_label_rtx ();
7515 redirect_jump (branch, if_true_label);
7516 emit_jump (if_false_label);
7517 emit_label (if_true_label);
7522 /* Generate code for a comparison expression EXP
7523 (including code to compute the values to be compared)
7524 and set (CC0) according to the result.
7525 SIGNED_CODE should be the rtx operation for this comparison for
7526 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
7528 We force a stack adjustment unless there are currently
7529 things pushed on the stack that aren't yet used. */
7532 compare (exp, signed_code, unsigned_code)
7534 enum rtx_code signed_code, unsigned_code;
7537 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7539 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7540 register tree type = TREE_TYPE (TREE_OPERAND (exp, 0));
7541 register enum machine_mode mode = TYPE_MODE (type);
7542 int unsignedp = TREE_UNSIGNED (type);
7543 enum rtx_code code = unsignedp ? unsigned_code : signed_code;
7545 return compare_from_rtx (op0, op1, code, unsignedp, mode,
7547 ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
7548 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
7551 /* Like compare but expects the values to compare as two rtx's.
7552 The decision as to signed or unsigned comparison must be made by the caller.
7554 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
7557 If ALIGN is non-zero, it is the alignment of this type; if zero, the
7558 size of MODE should be used. */
7561 compare_from_rtx (op0, op1, code, unsignedp, mode, size, align)
7562 register rtx op0, op1;
7565 enum machine_mode mode;
7571 /* If one operand is constant, make it the second one. Only do this
7572 if the other operand is not constant as well. */
7574 if ((CONSTANT_P (op0) && ! CONSTANT_P (op1))
7575 || (GET_CODE (op0) == CONST_INT && GET_CODE (op1) != CONST_INT))
7580 code = swap_condition (code);
7585 op0 = force_not_mem (op0);
7586 op1 = force_not_mem (op1);
7589 do_pending_stack_adjust ();
7591 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
7592 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
7596 /* There's no need to do this now that combine.c can eliminate lots of
7597 sign extensions. This can be less efficient in certain cases on other
7600 /* If this is a signed equality comparison, we can do it as an
7601 unsigned comparison since zero-extension is cheaper than sign
7602 extension and comparisons with zero are done as unsigned. This is
7603 the case even on machines that can do fast sign extension, since
7604 zero-extension is easier to combine with other operations than
7605 sign-extension is. If we are comparing against a constant, we must
7606 convert it to what it would look like unsigned. */
7607 if ((code == EQ || code == NE) && ! unsignedp
7608 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
7610 if (GET_CODE (op1) == CONST_INT
7611 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
7612 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
7617 emit_cmp_insn (op0, op1, code, size, mode, unsignedp, align);
7619 return gen_rtx (code, VOIDmode, cc0_rtx, const0_rtx);
7622 /* Generate code to calculate EXP using a store-flag instruction
7623 and return an rtx for the result. EXP is either a comparison
7624 or a TRUTH_NOT_EXPR whose operand is a comparison.
7626 If TARGET is nonzero, store the result there if convenient.
7628 If ONLY_CHEAP is non-zero, only do this if it is likely to be very
7631 Return zero if there is no suitable set-flag instruction
7632 available on this machine.
7634 Once expand_expr has been called on the arguments of the comparison,
7635 we are committed to doing the store flag, since it is not safe to
7636 re-evaluate the expression. We emit the store-flag insn by calling
7637 emit_store_flag, but only expand the arguments if we have a reason
7638 to believe that emit_store_flag will be successful. If we think that
7639 it will, but it isn't, we have to simulate the store-flag with a
7640 set/jump/set sequence. */
7643 do_store_flag (exp, target, mode, only_cheap)
7646 enum machine_mode mode;
7650 tree arg0, arg1, type;
7652 enum machine_mode operand_mode;
7656 enum insn_code icode;
7657 rtx subtarget = target;
7658 rtx result, label, pattern, jump_pat;
7660 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
7661 result at the end. We can't simply invert the test since it would
7662 have already been inverted if it were valid. This case occurs for
7663 some floating-point comparisons. */
7665 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
7666 invert = 1, exp = TREE_OPERAND (exp, 0);
7668 arg0 = TREE_OPERAND (exp, 0);
7669 arg1 = TREE_OPERAND (exp, 1);
7670 type = TREE_TYPE (arg0);
7671 operand_mode = TYPE_MODE (type);
7672 unsignedp = TREE_UNSIGNED (type);
7674 /* We won't bother with BLKmode store-flag operations because it would mean
7675 passing a lot of information to emit_store_flag. */
7676 if (operand_mode == BLKmode)
7682 /* Get the rtx comparison code to use. We know that EXP is a comparison
7683 operation of some type. Some comparisons against 1 and -1 can be
7684 converted to comparisons with zero. Do so here so that the tests
7685 below will be aware that we have a comparison with zero. These
7686 tests will not catch constants in the first operand, but constants
7687 are rarely passed as the first operand. */
7689 switch (TREE_CODE (exp))
7698 if (integer_onep (arg1))
7699 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
7701 code = unsignedp ? LTU : LT;
7704 if (integer_all_onesp (arg1))
7705 arg1 = integer_zero_node, code = unsignedp ? LTU : LT;
7707 code = unsignedp ? LEU : LE;
7710 if (integer_all_onesp (arg1))
7711 arg1 = integer_zero_node, code = unsignedp ? GEU : GE;
7713 code = unsignedp ? GTU : GT;
7716 if (integer_onep (arg1))
7717 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
7719 code = unsignedp ? GEU : GE;
7725 /* Put a constant second. */
7726 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
7728 tem = arg0; arg0 = arg1; arg1 = tem;
7729 code = swap_condition (code);
7732 /* If this is an equality or inequality test of a single bit, we can
7733 do this by shifting the bit being tested to the low-order bit and
7734 masking the result with the constant 1. If the condition was EQ,
7735 we xor it with 1. This does not require an scc insn and is faster
7736 than an scc insn even if we have it. */
7738 if ((code == NE || code == EQ)
7739 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
7740 && integer_pow2p (TREE_OPERAND (arg0, 1))
7741 && TYPE_PRECISION (type) <= HOST_BITS_PER_WIDE_INT)
7743 int bitnum = exact_log2 (INTVAL (expand_expr (TREE_OPERAND (arg0, 1),
7744 NULL_RTX, VOIDmode, 0)));
7746 if (subtarget == 0 || GET_CODE (subtarget) != REG
7747 || GET_MODE (subtarget) != operand_mode
7748 || ! safe_from_p (subtarget, TREE_OPERAND (arg0, 0)))
7751 op0 = expand_expr (TREE_OPERAND (arg0, 0), subtarget, VOIDmode, 0);
7754 op0 = expand_shift (RSHIFT_EXPR, GET_MODE (op0), op0,
7755 size_int (bitnum), target, 1);
7757 if (GET_MODE (op0) != mode)
7758 op0 = convert_to_mode (mode, op0, 1);
7760 if (bitnum != TYPE_PRECISION (type) - 1)
7761 op0 = expand_and (op0, const1_rtx, target);
7763 if ((code == EQ && ! invert) || (code == NE && invert))
7764 op0 = expand_binop (mode, xor_optab, op0, const1_rtx, target, 0,
7770 /* Now see if we are likely to be able to do this. Return if not. */
7771 if (! can_compare_p (operand_mode))
7773 icode = setcc_gen_code[(int) code];
7774 if (icode == CODE_FOR_nothing
7775 || (only_cheap && insn_operand_mode[(int) icode][0] != mode))
7777 /* We can only do this if it is one of the special cases that
7778 can be handled without an scc insn. */
7779 if ((code == LT && integer_zerop (arg1))
7780 || (! only_cheap && code == GE && integer_zerop (arg1)))
7782 else if (BRANCH_COST >= 0
7783 && ! only_cheap && (code == NE || code == EQ)
7784 && TREE_CODE (type) != REAL_TYPE
7785 && ((abs_optab->handlers[(int) operand_mode].insn_code
7786 != CODE_FOR_nothing)
7787 || (ffs_optab->handlers[(int) operand_mode].insn_code
7788 != CODE_FOR_nothing)))
7794 preexpand_calls (exp);
7795 if (subtarget == 0 || GET_CODE (subtarget) != REG
7796 || GET_MODE (subtarget) != operand_mode
7797 || ! safe_from_p (subtarget, arg1))
7800 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
7801 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
7804 target = gen_reg_rtx (mode);
7806 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
7807 because, if the emit_store_flag does anything it will succeed and
7808 OP0 and OP1 will not be used subsequently. */
7810 result = emit_store_flag (target, code,
7811 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
7812 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
7813 operand_mode, unsignedp, 1);
7818 result = expand_binop (mode, xor_optab, result, const1_rtx,
7819 result, 0, OPTAB_LIB_WIDEN);
7823 /* If this failed, we have to do this with set/compare/jump/set code. */
7824 if (target == 0 || GET_CODE (target) != REG
7825 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
7826 target = gen_reg_rtx (GET_MODE (target));
7828 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
7829 result = compare_from_rtx (op0, op1, code, unsignedp,
7830 operand_mode, NULL_RTX, 0);
7831 if (GET_CODE (result) == CONST_INT)
7832 return (((result == const0_rtx && ! invert)
7833 || (result != const0_rtx && invert))
7834 ? const0_rtx : const1_rtx);
7836 label = gen_label_rtx ();
7837 if (bcc_gen_fctn[(int) code] == 0)
7840 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
7841 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
7847 /* Generate a tablejump instruction (used for switch statements). */
7849 #ifdef HAVE_tablejump
7851 /* INDEX is the value being switched on, with the lowest value
7852 in the table already subtracted.
7853 MODE is its expected mode (needed if INDEX is constant).
7854 RANGE is the length of the jump table.
7855 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
7857 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
7858 index value is out of range. */
7861 do_tablejump (index, mode, range, table_label, default_label)
7862 rtx index, range, table_label, default_label;
7863 enum machine_mode mode;
7865 register rtx temp, vector;
7867 /* Do an unsigned comparison (in the proper mode) between the index
7868 expression and the value which represents the length of the range.
7869 Since we just finished subtracting the lower bound of the range
7870 from the index expression, this comparison allows us to simultaneously
7871 check that the original index expression value is both greater than
7872 or equal to the minimum value of the range and less than or equal to
7873 the maximum value of the range. */
7875 emit_cmp_insn (range, index, LTU, NULL_RTX, mode, 1, 0);
7876 emit_jump_insn (gen_bltu (default_label));
7878 /* If index is in range, it must fit in Pmode.
7879 Convert to Pmode so we can index with it. */
7881 index = convert_to_mode (Pmode, index, 1);
7883 /* If flag_force_addr were to affect this address
7884 it could interfere with the tricky assumptions made
7885 about addresses that contain label-refs,
7886 which may be valid only very near the tablejump itself. */
7887 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
7888 GET_MODE_SIZE, because this indicates how large insns are. The other
7889 uses should all be Pmode, because they are addresses. This code
7890 could fail if addresses and insns are not the same size. */
7891 index = memory_address_noforce
7893 gen_rtx (PLUS, Pmode,
7894 gen_rtx (MULT, Pmode, index,
7895 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
7896 gen_rtx (LABEL_REF, Pmode, table_label)));
7897 temp = gen_reg_rtx (CASE_VECTOR_MODE);
7898 vector = gen_rtx (MEM, CASE_VECTOR_MODE, index);
7899 RTX_UNCHANGING_P (vector) = 1;
7900 convert_move (temp, vector, 0);
7902 emit_jump_insn (gen_tablejump (temp, table_label));
7904 #ifndef CASE_VECTOR_PC_RELATIVE
7905 /* If we are generating PIC code or if the table is PC-relative, the
7906 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
7912 #endif /* HAVE_tablejump */