1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA. */
26 #include "insn-flags.h"
27 #include "insn-codes.h"
29 #include "insn-config.h"
33 #include "typeclass.h"
35 #define CEIL(x,y) (((x) + (y) - 1) / (y))
37 /* Decide whether a function's arguments should be processed
38 from first to last or from last to first. */
40 #ifdef STACK_GROWS_DOWNWARD
42 #define PUSH_ARGS_REVERSED /* If it's last to first */
46 #ifndef STACK_PUSH_CODE
47 #ifdef STACK_GROWS_DOWNWARD
48 #define STACK_PUSH_CODE PRE_DEC
50 #define STACK_PUSH_CODE PRE_INC
54 /* Like STACK_BOUNDARY but in units of bytes, not bits. */
55 #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
57 /* If this is nonzero, we do not bother generating VOLATILE
58 around volatile memory references, and we are willing to
59 output indirect addresses. If cse is to follow, we reject
60 indirect addresses so a useful potential cse is generated;
61 if it is used only once, instruction combination will produce
62 the same indirect address eventually. */
65 /* Nonzero to generate code for all the subroutines within an
66 expression before generating the upper levels of the expression.
67 Nowadays this is never zero. */
68 int do_preexpand_calls = 1;
70 /* Number of units that we should eventually pop off the stack.
71 These are the arguments to function calls that have already returned. */
72 int pending_stack_adjust;
74 /* Nonzero means stack pops must not be deferred, and deferred stack
75 pops must not be output. It is nonzero inside a function call,
76 inside a conditional expression, inside a statement expression,
77 and in other cases as well. */
78 int inhibit_defer_pop;
80 /* A list of all cleanups which belong to the arguments of
81 function calls being expanded by expand_call. */
82 tree cleanups_this_call;
84 /* Nonzero means __builtin_saveregs has already been done in this function.
85 The value is the pseudoreg containing the value __builtin_saveregs
87 static rtx saveregs_value;
89 /* This structure is used by move_by_pieces to describe the move to
101 int explicit_inc_from;
107 static rtx enqueue_insn PROTO((rtx, rtx));
108 static int queued_subexp_p PROTO((rtx));
109 static void init_queue PROTO((void));
110 static void move_by_pieces PROTO((rtx, rtx, int, int));
111 static int move_by_pieces_ninsns PROTO((unsigned int, int));
112 static void move_by_pieces_1 PROTO((rtx (*) (), enum machine_mode,
113 struct move_by_pieces *));
114 static void group_insns PROTO((rtx));
115 static void store_constructor PROTO((tree, rtx));
116 static rtx store_field PROTO((rtx, int, int, enum machine_mode, tree,
117 enum machine_mode, int, int, int));
118 static tree save_noncopied_parts PROTO((tree, tree));
119 static tree init_noncopied_parts PROTO((tree, tree));
120 static int safe_from_p PROTO((rtx, tree));
121 static int fixed_type_p PROTO((tree));
122 static int get_pointer_alignment PROTO((tree, unsigned));
123 static tree string_constant PROTO((tree, tree *));
124 static tree c_strlen PROTO((tree));
125 static rtx expand_builtin PROTO((tree, rtx, rtx, enum machine_mode, int));
126 static rtx expand_increment PROTO((tree, int));
127 static void preexpand_calls PROTO((tree));
128 static void do_jump_by_parts_greater PROTO((tree, int, rtx, rtx));
129 static void do_jump_by_parts_equality PROTO((tree, rtx, rtx));
130 static void do_jump_by_parts_equality_rtx PROTO((rtx, rtx, rtx));
131 static void do_jump_for_compare PROTO((rtx, rtx, rtx));
132 static rtx compare PROTO((tree, enum rtx_code, enum rtx_code));
133 static rtx do_store_flag PROTO((tree, rtx, enum machine_mode, int));
135 /* Record for each mode whether we can move a register directly to or
136 from an object of that mode in memory. If we can't, we won't try
137 to use that mode directly when accessing a field of that mode. */
139 static char direct_load[NUM_MACHINE_MODES];
140 static char direct_store[NUM_MACHINE_MODES];
142 /* MOVE_RATIO is the number of move instructions that is better than
146 #if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
149 /* A value of around 6 would minimize code size; infinity would minimize
151 #define MOVE_RATIO 15
155 /* This array records the insn_code of insns to perform block moves. */
156 static enum insn_code movstr_optab[NUM_MACHINE_MODES];
158 /* SLOW_UNALIGNED_ACCESS is non-zero if unaligned accesses are very slow. */
160 #ifndef SLOW_UNALIGNED_ACCESS
161 #define SLOW_UNALIGNED_ACCESS 0
164 /* This is run once per compilation to set up which modes can be used
165 directly in memory and to initialize the block move optab. */
171 enum machine_mode mode;
172 /* Try indexing by frame ptr and try by stack ptr.
173 It is known that on the Convex the stack ptr isn't a valid index.
174 With luck, one or the other is valid on any machine. */
175 rtx mem = gen_rtx (MEM, VOIDmode, stack_pointer_rtx);
176 rtx mem1 = gen_rtx (MEM, VOIDmode, frame_pointer_rtx);
179 insn = emit_insn (gen_rtx (SET, 0, 0));
180 pat = PATTERN (insn);
182 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
183 mode = (enum machine_mode) ((int) mode + 1))
189 direct_load[(int) mode] = direct_store[(int) mode] = 0;
190 PUT_MODE (mem, mode);
191 PUT_MODE (mem1, mode);
193 /* See if there is some register that can be used in this mode and
194 directly loaded or stored from memory. */
196 if (mode != VOIDmode && mode != BLKmode)
197 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
198 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
201 if (! HARD_REGNO_MODE_OK (regno, mode))
204 reg = gen_rtx (REG, mode, regno);
207 SET_DEST (pat) = reg;
208 if (recog (pat, insn, &num_clobbers) >= 0)
209 direct_load[(int) mode] = 1;
211 SET_SRC (pat) = mem1;
212 SET_DEST (pat) = reg;
213 if (recog (pat, insn, &num_clobbers) >= 0)
214 direct_load[(int) mode] = 1;
217 SET_DEST (pat) = mem;
218 if (recog (pat, insn, &num_clobbers) >= 0)
219 direct_store[(int) mode] = 1;
222 SET_DEST (pat) = mem1;
223 if (recog (pat, insn, &num_clobbers) >= 0)
224 direct_store[(int) mode] = 1;
227 movstr_optab[(int) mode] = CODE_FOR_nothing;
234 movstr_optab[(int) QImode] = CODE_FOR_movstrqi;
238 movstr_optab[(int) HImode] = CODE_FOR_movstrhi;
242 movstr_optab[(int) SImode] = CODE_FOR_movstrsi;
246 movstr_optab[(int) DImode] = CODE_FOR_movstrdi;
250 movstr_optab[(int) TImode] = CODE_FOR_movstrti;
254 /* This is run at the start of compiling a function. */
261 pending_stack_adjust = 0;
262 inhibit_defer_pop = 0;
263 cleanups_this_call = 0;
268 /* Save all variables describing the current status into the structure *P.
269 This is used before starting a nested function. */
275 /* Instead of saving the postincrement queue, empty it. */
278 p->pending_stack_adjust = pending_stack_adjust;
279 p->inhibit_defer_pop = inhibit_defer_pop;
280 p->cleanups_this_call = cleanups_this_call;
281 p->saveregs_value = saveregs_value;
282 p->forced_labels = forced_labels;
284 pending_stack_adjust = 0;
285 inhibit_defer_pop = 0;
286 cleanups_this_call = 0;
291 /* Restore all variables describing the current status from the structure *P.
292 This is used after a nested function. */
295 restore_expr_status (p)
298 pending_stack_adjust = p->pending_stack_adjust;
299 inhibit_defer_pop = p->inhibit_defer_pop;
300 cleanups_this_call = p->cleanups_this_call;
301 saveregs_value = p->saveregs_value;
302 forced_labels = p->forced_labels;
305 /* Manage the queue of increment instructions to be output
306 for POSTINCREMENT_EXPR expressions, etc. */
308 static rtx pending_chain;
310 /* Queue up to increment (or change) VAR later. BODY says how:
311 BODY should be the same thing you would pass to emit_insn
312 to increment right away. It will go to emit_insn later on.
314 The value is a QUEUED expression to be used in place of VAR
315 where you want to guarantee the pre-incrementation value of VAR. */
318 enqueue_insn (var, body)
321 pending_chain = gen_rtx (QUEUED, GET_MODE (var),
322 var, NULL_RTX, NULL_RTX, body, pending_chain);
323 return pending_chain;
326 /* Use protect_from_queue to convert a QUEUED expression
327 into something that you can put immediately into an instruction.
328 If the queued incrementation has not happened yet,
329 protect_from_queue returns the variable itself.
330 If the incrementation has happened, protect_from_queue returns a temp
331 that contains a copy of the old value of the variable.
333 Any time an rtx which might possibly be a QUEUED is to be put
334 into an instruction, it must be passed through protect_from_queue first.
335 QUEUED expressions are not meaningful in instructions.
337 Do not pass a value through protect_from_queue and then hold
338 on to it for a while before putting it in an instruction!
339 If the queue is flushed in between, incorrect code will result. */
342 protect_from_queue (x, modify)
346 register RTX_CODE code = GET_CODE (x);
348 #if 0 /* A QUEUED can hang around after the queue is forced out. */
349 /* Shortcut for most common case. */
350 if (pending_chain == 0)
356 /* A special hack for read access to (MEM (QUEUED ...))
357 to facilitate use of autoincrement.
358 Make a copy of the contents of the memory location
359 rather than a copy of the address, but not
360 if the value is of mode BLKmode. */
361 if (code == MEM && GET_MODE (x) != BLKmode
362 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
364 register rtx y = XEXP (x, 0);
365 XEXP (x, 0) = QUEUED_VAR (y);
368 register rtx temp = gen_reg_rtx (GET_MODE (x));
369 emit_insn_before (gen_move_insn (temp, x),
375 /* Otherwise, recursively protect the subexpressions of all
376 the kinds of rtx's that can contain a QUEUED. */
378 XEXP (x, 0) = protect_from_queue (XEXP (x, 0), 0);
379 else if (code == PLUS || code == MULT)
381 XEXP (x, 0) = protect_from_queue (XEXP (x, 0), 0);
382 XEXP (x, 1) = protect_from_queue (XEXP (x, 1), 0);
386 /* If the increment has not happened, use the variable itself. */
387 if (QUEUED_INSN (x) == 0)
388 return QUEUED_VAR (x);
389 /* If the increment has happened and a pre-increment copy exists,
391 if (QUEUED_COPY (x) != 0)
392 return QUEUED_COPY (x);
393 /* The increment has happened but we haven't set up a pre-increment copy.
394 Set one up now, and use it. */
395 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
396 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
398 return QUEUED_COPY (x);
401 /* Return nonzero if X contains a QUEUED expression:
402 if it contains anything that will be altered by a queued increment.
403 We handle only combinations of MEM, PLUS, MINUS and MULT operators
404 since memory addresses generally contain only those. */
410 register enum rtx_code code = GET_CODE (x);
416 return queued_subexp_p (XEXP (x, 0));
420 return queued_subexp_p (XEXP (x, 0))
421 || queued_subexp_p (XEXP (x, 1));
426 /* Perform all the pending incrementations. */
432 while (p = pending_chain)
434 QUEUED_INSN (p) = emit_insn (QUEUED_BODY (p));
435 pending_chain = QUEUED_NEXT (p);
446 /* Copy data from FROM to TO, where the machine modes are not the same.
447 Both modes may be integer, or both may be floating.
448 UNSIGNEDP should be nonzero if FROM is an unsigned type.
449 This causes zero-extension instead of sign-extension. */
452 convert_move (to, from, unsignedp)
453 register rtx to, from;
456 enum machine_mode to_mode = GET_MODE (to);
457 enum machine_mode from_mode = GET_MODE (from);
458 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
459 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
463 /* rtx code for making an equivalent value. */
464 enum rtx_code equiv_code = (unsignedp ? ZERO_EXTEND : SIGN_EXTEND);
466 to = protect_from_queue (to, 1);
467 from = protect_from_queue (from, 0);
469 if (to_real != from_real)
472 /* If FROM is a SUBREG that indicates that we have already done at least
473 the required extension, strip it. We don't handle such SUBREGs as
476 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
477 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
478 >= GET_MODE_SIZE (to_mode))
479 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
480 from = gen_lowpart (to_mode, from), from_mode = to_mode;
482 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
485 if (to_mode == from_mode
486 || (from_mode == VOIDmode && CONSTANT_P (from)))
488 emit_move_insn (to, from);
494 #ifdef HAVE_extendqfhf2
495 if (HAVE_extendqfsf2 && from_mode == QFmode && to_mode == HFmode)
497 emit_unop_insn (CODE_FOR_extendqfsf2, to, from, UNKNOWN);
501 #ifdef HAVE_extendqfsf2
502 if (HAVE_extendqfsf2 && from_mode == QFmode && to_mode == SFmode)
504 emit_unop_insn (CODE_FOR_extendqfsf2, to, from, UNKNOWN);
508 #ifdef HAVE_extendqfdf2
509 if (HAVE_extendqfdf2 && from_mode == QFmode && to_mode == DFmode)
511 emit_unop_insn (CODE_FOR_extendqfdf2, to, from, UNKNOWN);
515 #ifdef HAVE_extendqfxf2
516 if (HAVE_extendqfxf2 && from_mode == QFmode && to_mode == XFmode)
518 emit_unop_insn (CODE_FOR_extendqfxf2, to, from, UNKNOWN);
522 #ifdef HAVE_extendqftf2
523 if (HAVE_extendqftf2 && from_mode == QFmode && to_mode == TFmode)
525 emit_unop_insn (CODE_FOR_extendqftf2, to, from, UNKNOWN);
530 #ifdef HAVE_extendhfsf2
531 if (HAVE_extendhfsf2 && from_mode == HFmode && to_mode == SFmode)
533 emit_unop_insn (CODE_FOR_extendhfsf2, to, from, UNKNOWN);
537 #ifdef HAVE_extendhfdf2
538 if (HAVE_extendhfdf2 && from_mode == HFmode && to_mode == DFmode)
540 emit_unop_insn (CODE_FOR_extendhfdf2, to, from, UNKNOWN);
544 #ifdef HAVE_extendhfxf2
545 if (HAVE_extendhfxf2 && from_mode == HFmode && to_mode == XFmode)
547 emit_unop_insn (CODE_FOR_extendhfxf2, to, from, UNKNOWN);
551 #ifdef HAVE_extendhftf2
552 if (HAVE_extendhftf2 && from_mode == HFmode && to_mode == TFmode)
554 emit_unop_insn (CODE_FOR_extendhftf2, to, from, UNKNOWN);
559 #ifdef HAVE_extendsfdf2
560 if (HAVE_extendsfdf2 && from_mode == SFmode && to_mode == DFmode)
562 emit_unop_insn (CODE_FOR_extendsfdf2, to, from, UNKNOWN);
566 #ifdef HAVE_extendsfxf2
567 if (HAVE_extendsfxf2 && from_mode == SFmode && to_mode == XFmode)
569 emit_unop_insn (CODE_FOR_extendsfxf2, to, from, UNKNOWN);
573 #ifdef HAVE_extendsftf2
574 if (HAVE_extendsftf2 && from_mode == SFmode && to_mode == TFmode)
576 emit_unop_insn (CODE_FOR_extendsftf2, to, from, UNKNOWN);
580 #ifdef HAVE_extenddfxf2
581 if (HAVE_extenddfxf2 && from_mode == DFmode && to_mode == XFmode)
583 emit_unop_insn (CODE_FOR_extenddfxf2, to, from, UNKNOWN);
587 #ifdef HAVE_extenddftf2
588 if (HAVE_extenddftf2 && from_mode == DFmode && to_mode == TFmode)
590 emit_unop_insn (CODE_FOR_extenddftf2, to, from, UNKNOWN);
595 #ifdef HAVE_trunchfqf2
596 if (HAVE_trunchfqf2 && from_mode == HFmode && to_mode == QFmode)
598 emit_unop_insn (CODE_FOR_trunchfqf2, to, from, UNKNOWN);
602 #ifdef HAVE_truncsfqf2
603 if (HAVE_truncsfqf2 && from_mode == SFmode && to_mode == QFmode)
605 emit_unop_insn (CODE_FOR_truncsfqf2, to, from, UNKNOWN);
609 #ifdef HAVE_truncdfqf2
610 if (HAVE_truncdfqf2 && from_mode == DFmode && to_mode == QFmode)
612 emit_unop_insn (CODE_FOR_truncdfqf2, to, from, UNKNOWN);
616 #ifdef HAVE_truncxfqf2
617 if (HAVE_truncxfqf2 && from_mode == XFmode && to_mode == QFmode)
619 emit_unop_insn (CODE_FOR_truncxfqf2, to, from, UNKNOWN);
623 #ifdef HAVE_trunctfqf2
624 if (HAVE_trunctfqf2 && from_mode == TFmode && to_mode == QFmode)
626 emit_unop_insn (CODE_FOR_trunctfqf2, to, from, UNKNOWN);
630 #ifdef HAVE_truncsfhf2
631 if (HAVE_truncsfhf2 && from_mode == SFmode && to_mode == HFmode)
633 emit_unop_insn (CODE_FOR_truncsfhf2, to, from, UNKNOWN);
637 #ifdef HAVE_truncdfhf2
638 if (HAVE_truncdfhf2 && from_mode == DFmode && to_mode == HFmode)
640 emit_unop_insn (CODE_FOR_truncdfhf2, to, from, UNKNOWN);
644 #ifdef HAVE_truncxfhf2
645 if (HAVE_truncxfhf2 && from_mode == XFmode && to_mode == HFmode)
647 emit_unop_insn (CODE_FOR_truncxfhf2, to, from, UNKNOWN);
651 #ifdef HAVE_trunctfhf2
652 if (HAVE_trunctfhf2 && from_mode == TFmode && to_mode == HFmode)
654 emit_unop_insn (CODE_FOR_trunctfhf2, to, from, UNKNOWN);
658 #ifdef HAVE_truncdfsf2
659 if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
661 emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
665 #ifdef HAVE_truncxfsf2
666 if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode)
668 emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN);
672 #ifdef HAVE_trunctfsf2
673 if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
675 emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
679 #ifdef HAVE_truncxfdf2
680 if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode)
682 emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN);
686 #ifdef HAVE_trunctfdf2
687 if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
689 emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
701 libcall = extendsfdf2_libfunc;
705 libcall = extendsfxf2_libfunc;
709 libcall = extendsftf2_libfunc;
718 libcall = truncdfsf2_libfunc;
722 libcall = extenddfxf2_libfunc;
726 libcall = extenddftf2_libfunc;
735 libcall = truncxfsf2_libfunc;
739 libcall = truncxfdf2_libfunc;
748 libcall = trunctfsf2_libfunc;
752 libcall = trunctfdf2_libfunc;
758 if (libcall == (rtx) 0)
759 /* This conversion is not implemented yet. */
762 emit_library_call (libcall, 1, to_mode, 1, from, from_mode);
763 emit_move_insn (to, hard_libcall_value (to_mode));
767 /* Now both modes are integers. */
769 /* Handle expanding beyond a word. */
770 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
771 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
778 enum machine_mode lowpart_mode;
779 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
781 /* Try converting directly if the insn is supported. */
782 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
785 /* If FROM is a SUBREG, put it into a register. Do this
786 so that we always generate the same set of insns for
787 better cse'ing; if an intermediate assignment occurred,
788 we won't be doing the operation directly on the SUBREG. */
789 if (optimize > 0 && GET_CODE (from) == SUBREG)
790 from = force_reg (from_mode, from);
791 emit_unop_insn (code, to, from, equiv_code);
794 /* Next, try converting via full word. */
795 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
796 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
797 != CODE_FOR_nothing))
799 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
800 emit_unop_insn (code, to,
801 gen_lowpart (word_mode, to), equiv_code);
805 /* No special multiword conversion insn; do it by hand. */
808 /* Get a copy of FROM widened to a word, if necessary. */
809 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
810 lowpart_mode = word_mode;
812 lowpart_mode = from_mode;
814 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
816 lowpart = gen_lowpart (lowpart_mode, to);
817 emit_move_insn (lowpart, lowfrom);
819 /* Compute the value to put in each remaining word. */
821 fill_value = const0_rtx;
826 && insn_operand_mode[(int) CODE_FOR_slt][0] == word_mode
827 && STORE_FLAG_VALUE == -1)
829 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
831 fill_value = gen_reg_rtx (word_mode);
832 emit_insn (gen_slt (fill_value));
838 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
839 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
841 fill_value = convert_to_mode (word_mode, fill_value, 1);
845 /* Fill the remaining words. */
846 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
848 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
849 rtx subword = operand_subword (to, index, 1, to_mode);
854 if (fill_value != subword)
855 emit_move_insn (subword, fill_value);
858 insns = get_insns ();
861 emit_no_conflict_block (insns, to, from, NULL_RTX,
862 gen_rtx (equiv_code, to_mode, from));
866 /* Truncating multi-word to a word or less. */
867 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
868 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
870 convert_move (to, gen_lowpart (word_mode, from), 0);
874 /* Handle pointer conversion */ /* SPEE 900220 */
875 if (to_mode == PSImode)
877 if (from_mode != SImode)
878 from = convert_to_mode (SImode, from, unsignedp);
880 #ifdef HAVE_truncsipsi
883 emit_unop_insn (CODE_FOR_truncsipsi, to, from, UNKNOWN);
886 #endif /* HAVE_truncsipsi */
890 if (from_mode == PSImode)
892 if (to_mode != SImode)
894 from = convert_to_mode (SImode, from, unsignedp);
899 #ifdef HAVE_extendpsisi
900 if (HAVE_extendpsisi)
902 emit_unop_insn (CODE_FOR_extendpsisi, to, from, UNKNOWN);
905 #endif /* HAVE_extendpsisi */
910 /* Now follow all the conversions between integers
911 no more than a word long. */
913 /* For truncation, usually we can just refer to FROM in a narrower mode. */
914 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
915 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
916 GET_MODE_BITSIZE (from_mode)))
918 if (!((GET_CODE (from) == MEM
919 && ! MEM_VOLATILE_P (from)
920 && direct_load[(int) to_mode]
921 && ! mode_dependent_address_p (XEXP (from, 0)))
922 || GET_CODE (from) == REG
923 || GET_CODE (from) == SUBREG))
924 from = force_reg (from_mode, from);
925 emit_move_insn (to, gen_lowpart (to_mode, from));
929 /* Handle extension. */
930 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
932 /* Convert directly if that works. */
933 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
936 /* If FROM is a SUBREG, put it into a register. Do this
937 so that we always generate the same set of insns for
938 better cse'ing; if an intermediate assignment occurred,
939 we won't be doing the operation directly on the SUBREG. */
940 if (optimize > 0 && GET_CODE (from) == SUBREG)
941 from = force_reg (from_mode, from);
942 emit_unop_insn (code, to, from, equiv_code);
947 enum machine_mode intermediate;
949 /* Search for a mode to convert via. */
950 for (intermediate = from_mode; intermediate != VOIDmode;
951 intermediate = GET_MODE_WIDER_MODE (intermediate))
952 if ((can_extend_p (to_mode, intermediate, unsignedp)
954 && (can_extend_p (intermediate, from_mode, unsignedp)
955 != CODE_FOR_nothing))
957 convert_move (to, convert_to_mode (intermediate, from,
958 unsignedp), unsignedp);
962 /* No suitable intermediate mode. */
967 /* Support special truncate insns for certain modes. */
969 if (from_mode == DImode && to_mode == SImode)
971 #ifdef HAVE_truncdisi2
974 emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
978 convert_move (to, force_reg (from_mode, from), unsignedp);
982 if (from_mode == DImode && to_mode == HImode)
984 #ifdef HAVE_truncdihi2
987 emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
991 convert_move (to, force_reg (from_mode, from), unsignedp);
995 if (from_mode == DImode && to_mode == QImode)
997 #ifdef HAVE_truncdiqi2
1000 emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
1004 convert_move (to, force_reg (from_mode, from), unsignedp);
1008 if (from_mode == SImode && to_mode == HImode)
1010 #ifdef HAVE_truncsihi2
1011 if (HAVE_truncsihi2)
1013 emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
1017 convert_move (to, force_reg (from_mode, from), unsignedp);
1021 if (from_mode == SImode && to_mode == QImode)
1023 #ifdef HAVE_truncsiqi2
1024 if (HAVE_truncsiqi2)
1026 emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
1030 convert_move (to, force_reg (from_mode, from), unsignedp);
1034 if (from_mode == HImode && to_mode == QImode)
1036 #ifdef HAVE_trunchiqi2
1037 if (HAVE_trunchiqi2)
1039 emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
1043 convert_move (to, force_reg (from_mode, from), unsignedp);
1047 /* Handle truncation of volatile memrefs, and so on;
1048 the things that couldn't be truncated directly,
1049 and for which there was no special instruction. */
1050 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
1052 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
1053 emit_move_insn (to, temp);
1057 /* Mode combination is not recognized. */
1061 /* Return an rtx for a value that would result
1062 from converting X to mode MODE.
1063 Both X and MODE may be floating, or both integer.
1064 UNSIGNEDP is nonzero if X is an unsigned value.
1065 This can be done by referring to a part of X in place
1066 or by copying to a new temporary with conversion.
1068 This function *must not* call protect_from_queue
1069 except when putting X into an insn (in which case convert_move does it). */
1072 convert_to_mode (mode, x, unsignedp)
1073 enum machine_mode mode;
1079 /* If FROM is a SUBREG that indicates that we have already done at least
1080 the required extension, strip it. */
1082 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
1083 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
1084 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
1085 x = gen_lowpart (mode, x);
1087 if (mode == GET_MODE (x))
1090 /* There is one case that we must handle specially: If we are converting
1091 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
1092 we are to interpret the constant as unsigned, gen_lowpart will do
1093 the wrong if the constant appears negative. What we want to do is
1094 make the high-order word of the constant zero, not all ones. */
1096 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
1097 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
1098 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
1099 return immed_double_const (INTVAL (x), (HOST_WIDE_INT) 0, mode);
1101 /* We can do this with a gen_lowpart if both desired and current modes
1102 are integer, and this is either a constant integer, a register, or a
1103 non-volatile MEM. Except for the constant case, we must be narrowing
1106 if (GET_CODE (x) == CONST_INT
1107 || (GET_MODE_CLASS (mode) == MODE_INT
1108 && GET_MODE_CLASS (GET_MODE (x)) == MODE_INT
1109 && (GET_CODE (x) == CONST_DOUBLE
1110 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (GET_MODE (x))
1111 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x))
1112 && direct_load[(int) mode]
1113 || GET_CODE (x) == REG)))))
1114 return gen_lowpart (mode, x);
1116 temp = gen_reg_rtx (mode);
1117 convert_move (temp, x, unsignedp);
1121 /* Generate several move instructions to copy LEN bytes
1122 from block FROM to block TO. (These are MEM rtx's with BLKmode).
1123 The caller must pass FROM and TO
1124 through protect_from_queue before calling.
1125 ALIGN (in bytes) is maximum alignment we can assume. */
1128 move_by_pieces (to, from, len, align)
1132 struct move_by_pieces data;
1133 rtx to_addr = XEXP (to, 0), from_addr = XEXP (from, 0);
1134 int max_size = MOVE_MAX + 1;
1137 data.to_addr = to_addr;
1138 data.from_addr = from_addr;
1142 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1143 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1145 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1146 || GET_CODE (from_addr) == POST_INC
1147 || GET_CODE (from_addr) == POST_DEC);
1149 data.explicit_inc_from = 0;
1150 data.explicit_inc_to = 0;
1152 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1153 if (data.reverse) data.offset = len;
1156 /* If copying requires more than two move insns,
1157 copy addresses to registers (to make displacements shorter)
1158 and use post-increment if available. */
1159 if (!(data.autinc_from && data.autinc_to)
1160 && move_by_pieces_ninsns (len, align) > 2)
1162 #ifdef HAVE_PRE_DECREMENT
1163 if (data.reverse && ! data.autinc_from)
1165 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1166 data.autinc_from = 1;
1167 data.explicit_inc_from = -1;
1170 #ifdef HAVE_POST_INCREMENT
1171 if (! data.autinc_from)
1173 data.from_addr = copy_addr_to_reg (from_addr);
1174 data.autinc_from = 1;
1175 data.explicit_inc_from = 1;
1178 if (!data.autinc_from && CONSTANT_P (from_addr))
1179 data.from_addr = copy_addr_to_reg (from_addr);
1180 #ifdef HAVE_PRE_DECREMENT
1181 if (data.reverse && ! data.autinc_to)
1183 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1185 data.explicit_inc_to = -1;
1188 #ifdef HAVE_POST_INCREMENT
1189 if (! data.reverse && ! data.autinc_to)
1191 data.to_addr = copy_addr_to_reg (to_addr);
1193 data.explicit_inc_to = 1;
1196 if (!data.autinc_to && CONSTANT_P (to_addr))
1197 data.to_addr = copy_addr_to_reg (to_addr);
1200 if (! (STRICT_ALIGNMENT || SLOW_UNALIGNED_ACCESS)
1201 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
1204 /* First move what we can in the largest integer mode, then go to
1205 successively smaller modes. */
1207 while (max_size > 1)
1209 enum machine_mode mode = VOIDmode, tmode;
1210 enum insn_code icode;
1212 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1213 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1214 if (GET_MODE_SIZE (tmode) < max_size)
1217 if (mode == VOIDmode)
1220 icode = mov_optab->handlers[(int) mode].insn_code;
1221 if (icode != CODE_FOR_nothing
1222 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1223 GET_MODE_SIZE (mode)))
1224 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1226 max_size = GET_MODE_SIZE (mode);
1229 /* The code above should have handled everything. */
1234 /* Return number of insns required to move L bytes by pieces.
1235 ALIGN (in bytes) is maximum alignment we can assume. */
1238 move_by_pieces_ninsns (l, align)
1242 register int n_insns = 0;
1243 int max_size = MOVE_MAX + 1;
1245 if (! (STRICT_ALIGNMENT || SLOW_UNALIGNED_ACCESS)
1246 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
1249 while (max_size > 1)
1251 enum machine_mode mode = VOIDmode, tmode;
1252 enum insn_code icode;
1254 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1255 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1256 if (GET_MODE_SIZE (tmode) < max_size)
1259 if (mode == VOIDmode)
1262 icode = mov_optab->handlers[(int) mode].insn_code;
1263 if (icode != CODE_FOR_nothing
1264 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1265 GET_MODE_SIZE (mode)))
1266 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1268 max_size = GET_MODE_SIZE (mode);
1274 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1275 with move instructions for mode MODE. GENFUN is the gen_... function
1276 to make a move insn for that mode. DATA has all the other info. */
1279 move_by_pieces_1 (genfun, mode, data)
1281 enum machine_mode mode;
1282 struct move_by_pieces *data;
1284 register int size = GET_MODE_SIZE (mode);
1285 register rtx to1, from1;
1287 while (data->len >= size)
1289 if (data->reverse) data->offset -= size;
1291 to1 = (data->autinc_to
1292 ? gen_rtx (MEM, mode, data->to_addr)
1293 : change_address (data->to, mode,
1294 plus_constant (data->to_addr, data->offset)));
1297 ? gen_rtx (MEM, mode, data->from_addr)
1298 : change_address (data->from, mode,
1299 plus_constant (data->from_addr, data->offset)));
1301 #ifdef HAVE_PRE_DECREMENT
1302 if (data->explicit_inc_to < 0)
1303 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
1304 if (data->explicit_inc_from < 0)
1305 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (-size)));
1308 emit_insn ((*genfun) (to1, from1));
1309 #ifdef HAVE_POST_INCREMENT
1310 if (data->explicit_inc_to > 0)
1311 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1312 if (data->explicit_inc_from > 0)
1313 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1316 if (! data->reverse) data->offset += size;
1322 /* Emit code to move a block Y to a block X.
1323 This may be done with string-move instructions,
1324 with multiple scalar move instructions, or with a library call.
1326 Both X and Y must be MEM rtx's (perhaps inside VOLATILE)
1328 SIZE is an rtx that says how long they are.
1329 ALIGN is the maximum alignment we can assume they have,
1330 measured in bytes. */
1333 emit_block_move (x, y, size, align)
1338 if (GET_MODE (x) != BLKmode)
1341 if (GET_MODE (y) != BLKmode)
1344 x = protect_from_queue (x, 1);
1345 y = protect_from_queue (y, 0);
1346 size = protect_from_queue (size, 0);
1348 if (GET_CODE (x) != MEM)
1350 if (GET_CODE (y) != MEM)
1355 if (GET_CODE (size) == CONST_INT
1356 && (move_by_pieces_ninsns (INTVAL (size), align) < MOVE_RATIO))
1357 move_by_pieces (x, y, INTVAL (size), align);
1360 /* Try the most limited insn first, because there's no point
1361 including more than one in the machine description unless
1362 the more limited one has some advantage. */
1364 rtx opalign = GEN_INT (align);
1365 enum machine_mode mode;
1367 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1368 mode = GET_MODE_WIDER_MODE (mode))
1370 enum insn_code code = movstr_optab[(int) mode];
1372 if (code != CODE_FOR_nothing
1373 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1374 here because if SIZE is less than the mode mask, as it is
1375 returned by the macro, it will definitely be less than the
1376 actual mode mask. */
1377 && (unsigned) INTVAL (size) <= GET_MODE_MASK (mode)
1378 && (insn_operand_predicate[(int) code][0] == 0
1379 || (*insn_operand_predicate[(int) code][0]) (x, BLKmode))
1380 && (insn_operand_predicate[(int) code][1] == 0
1381 || (*insn_operand_predicate[(int) code][1]) (y, BLKmode))
1382 && (insn_operand_predicate[(int) code][3] == 0
1383 || (*insn_operand_predicate[(int) code][3]) (opalign,
1387 rtx last = get_last_insn ();
1390 op2 = convert_to_mode (mode, size, 1);
1391 if (insn_operand_predicate[(int) code][2] != 0
1392 && ! (*insn_operand_predicate[(int) code][2]) (op2, mode))
1393 op2 = copy_to_mode_reg (mode, op2);
1395 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1402 delete_insns_since (last);
1406 #ifdef TARGET_MEM_FUNCTIONS
1407 emit_library_call (memcpy_libfunc, 0,
1408 VOIDmode, 3, XEXP (x, 0), Pmode,
1410 convert_to_mode (Pmode, size, 1), Pmode);
1412 emit_library_call (bcopy_libfunc, 0,
1413 VOIDmode, 3, XEXP (y, 0), Pmode,
1415 convert_to_mode (Pmode, size, 1), Pmode);
1420 /* Copy all or part of a value X into registers starting at REGNO.
1421 The number of registers to be filled is NREGS. */
1424 move_block_to_reg (regno, x, nregs, mode)
1428 enum machine_mode mode;
1433 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1434 x = validize_mem (force_const_mem (mode, x));
1436 /* See if the machine can do this with a load multiple insn. */
1437 #ifdef HAVE_load_multiple
1438 last = get_last_insn ();
1439 pat = gen_load_multiple (gen_rtx (REG, word_mode, regno), x,
1447 delete_insns_since (last);
1450 for (i = 0; i < nregs; i++)
1451 emit_move_insn (gen_rtx (REG, word_mode, regno + i),
1452 operand_subword_force (x, i, mode));
1455 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1456 The number of registers to be filled is NREGS. */
1459 move_block_from_reg (regno, x, nregs)
1467 /* See if the machine can do this with a store multiple insn. */
1468 #ifdef HAVE_store_multiple
1469 last = get_last_insn ();
1470 pat = gen_store_multiple (x, gen_rtx (REG, word_mode, regno),
1478 delete_insns_since (last);
1481 for (i = 0; i < nregs; i++)
1483 rtx tem = operand_subword (x, i, 1, BLKmode);
1488 emit_move_insn (tem, gen_rtx (REG, word_mode, regno + i));
1492 /* Mark NREGS consecutive regs, starting at REGNO, as being live now. */
1495 use_regs (regno, nregs)
1501 for (i = 0; i < nregs; i++)
1502 emit_insn (gen_rtx (USE, VOIDmode, gen_rtx (REG, word_mode, regno + i)));
1505 /* Mark the instructions since PREV as a libcall block.
1506 Add REG_LIBCALL to PREV and add a REG_RETVAL to the most recent insn. */
1515 /* Find the instructions to mark */
1517 insn_first = NEXT_INSN (prev);
1519 insn_first = get_insns ();
1521 insn_last = get_last_insn ();
1523 REG_NOTES (insn_last) = gen_rtx (INSN_LIST, REG_RETVAL, insn_first,
1524 REG_NOTES (insn_last));
1526 REG_NOTES (insn_first) = gen_rtx (INSN_LIST, REG_LIBCALL, insn_last,
1527 REG_NOTES (insn_first));
1530 /* Write zeros through the storage of OBJECT.
1531 If OBJECT has BLKmode, SIZE is its length in bytes. */
1534 clear_storage (object, size)
1538 if (GET_MODE (object) == BLKmode)
1540 #ifdef TARGET_MEM_FUNCTIONS
1541 emit_library_call (memset_libfunc, 0,
1543 XEXP (object, 0), Pmode, const0_rtx, Pmode,
1544 GEN_INT (size), Pmode);
1546 emit_library_call (bzero_libfunc, 0,
1548 XEXP (object, 0), Pmode,
1549 GEN_INT (size), Pmode);
1553 emit_move_insn (object, const0_rtx);
1556 /* Generate code to copy Y into X.
1557 Both Y and X must have the same mode, except that
1558 Y can be a constant with VOIDmode.
1559 This mode cannot be BLKmode; use emit_block_move for that.
1561 Return the last instruction emitted. */
1564 emit_move_insn (x, y)
1567 enum machine_mode mode = GET_MODE (x);
1568 enum machine_mode submode;
1569 enum mode_class class = GET_MODE_CLASS (mode);
1572 x = protect_from_queue (x, 1);
1573 y = protect_from_queue (y, 0);
1575 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
1578 if (CONSTANT_P (y) && ! LEGITIMATE_CONSTANT_P (y))
1579 y = force_const_mem (mode, y);
1581 /* If X or Y are memory references, verify that their addresses are valid
1583 if (GET_CODE (x) == MEM
1584 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
1585 && ! push_operand (x, GET_MODE (x)))
1587 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
1588 x = change_address (x, VOIDmode, XEXP (x, 0));
1590 if (GET_CODE (y) == MEM
1591 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
1593 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
1594 y = change_address (y, VOIDmode, XEXP (y, 0));
1596 if (mode == BLKmode)
1599 return emit_move_insn_1 (x, y);
1602 /* Low level part of emit_move_insn.
1603 Called just like emit_move_insn, but assumes X and Y
1604 are basically valid. */
1607 emit_move_insn_1 (x, y)
1610 enum machine_mode mode = GET_MODE (x);
1611 enum machine_mode submode;
1612 enum mode_class class = GET_MODE_CLASS (mode);
1615 if (class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
1616 submode = mode_for_size (GET_MODE_UNIT_SIZE (mode) * BITS_PER_UNIT,
1617 (class == MODE_COMPLEX_INT
1618 ? MODE_INT : MODE_FLOAT),
1621 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
1623 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
1625 /* Expand complex moves by moving real part and imag part, if possible. */
1626 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
1627 && submode != BLKmode
1628 && (mov_optab->handlers[(int) submode].insn_code
1629 != CODE_FOR_nothing))
1631 /* Don't split destination if it is a stack push. */
1632 int stack = push_operand (x, GET_MODE (x));
1633 rtx prev = get_last_insn ();
1635 /* Tell flow that the whole of the destination is being set. */
1636 if (GET_CODE (x) == REG)
1637 emit_insn (gen_rtx (CLOBBER, VOIDmode, x));
1639 /* If this is a stack, push the highpart first, so it
1640 will be in the argument order.
1642 In that case, change_address is used only to convert
1643 the mode, not to change the address. */
1644 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
1645 ((stack ? change_address (x, submode, (rtx) 0)
1646 : gen_highpart (submode, x)),
1647 gen_highpart (submode, y)));
1648 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
1649 ((stack ? change_address (x, submode, (rtx) 0)
1650 : gen_lowpart (submode, x)),
1651 gen_lowpart (submode, y)));
1655 return get_last_insn ();
1658 /* This will handle any multi-word mode that lacks a move_insn pattern.
1659 However, you will get better code if you define such patterns,
1660 even if they must turn into multiple assembler instructions. */
1661 else if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
1664 rtx prev_insn = get_last_insn ();
1667 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
1670 rtx xpart = operand_subword (x, i, 1, mode);
1671 rtx ypart = operand_subword (y, i, 1, mode);
1673 /* If we can't get a part of Y, put Y into memory if it is a
1674 constant. Otherwise, force it into a register. If we still
1675 can't get a part of Y, abort. */
1676 if (ypart == 0 && CONSTANT_P (y))
1678 y = force_const_mem (mode, y);
1679 ypart = operand_subword (y, i, 1, mode);
1681 else if (ypart == 0)
1682 ypart = operand_subword_force (y, i, mode);
1684 if (xpart == 0 || ypart == 0)
1687 last_insn = emit_move_insn (xpart, ypart);
1689 /* Mark these insns as a libcall block. */
1690 group_insns (prev_insn);
1698 /* Pushing data onto the stack. */
1700 /* Push a block of length SIZE (perhaps variable)
1701 and return an rtx to address the beginning of the block.
1702 Note that it is not possible for the value returned to be a QUEUED.
1703 The value may be virtual_outgoing_args_rtx.
1705 EXTRA is the number of bytes of padding to push in addition to SIZE.
1706 BELOW nonzero means this padding comes at low addresses;
1707 otherwise, the padding comes at high addresses. */
1710 push_block (size, extra, below)
1715 if (CONSTANT_P (size))
1716 anti_adjust_stack (plus_constant (size, extra));
1717 else if (GET_CODE (size) == REG && extra == 0)
1718 anti_adjust_stack (size);
1721 rtx temp = copy_to_mode_reg (Pmode, size);
1723 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
1724 temp, 0, OPTAB_LIB_WIDEN);
1725 anti_adjust_stack (temp);
1728 #ifdef STACK_GROWS_DOWNWARD
1729 temp = virtual_outgoing_args_rtx;
1730 if (extra != 0 && below)
1731 temp = plus_constant (temp, extra);
1733 if (GET_CODE (size) == CONST_INT)
1734 temp = plus_constant (virtual_outgoing_args_rtx,
1735 - INTVAL (size) - (below ? 0 : extra));
1736 else if (extra != 0 && !below)
1737 temp = gen_rtx (PLUS, Pmode, virtual_outgoing_args_rtx,
1738 negate_rtx (Pmode, plus_constant (size, extra)));
1740 temp = gen_rtx (PLUS, Pmode, virtual_outgoing_args_rtx,
1741 negate_rtx (Pmode, size));
1744 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
1750 return gen_rtx (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
1753 /* Generate code to push X onto the stack, assuming it has mode MODE and
1755 MODE is redundant except when X is a CONST_INT (since they don't
1757 SIZE is an rtx for the size of data to be copied (in bytes),
1758 needed only if X is BLKmode.
1760 ALIGN (in bytes) is maximum alignment we can assume.
1762 If PARTIAL and REG are both nonzero, then copy that many of the first
1763 words of X into registers starting with REG, and push the rest of X.
1764 The amount of space pushed is decreased by PARTIAL words,
1765 rounded *down* to a multiple of PARM_BOUNDARY.
1766 REG must be a hard register in this case.
1767 If REG is zero but PARTIAL is not, take any all others actions for an
1768 argument partially in registers, but do not actually load any
1771 EXTRA is the amount in bytes of extra space to leave next to this arg.
1772 This is ignored if an argument block has already been allocated.
1774 On a machine that lacks real push insns, ARGS_ADDR is the address of
1775 the bottom of the argument block for this call. We use indexing off there
1776 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
1777 argument block has not been preallocated.
1779 ARGS_SO_FAR is the size of args previously pushed for this call. */
1782 emit_push_insn (x, mode, type, size, align, partial, reg, extra,
1783 args_addr, args_so_far)
1785 enum machine_mode mode;
1796 enum direction stack_direction
1797 #ifdef STACK_GROWS_DOWNWARD
1803 /* Decide where to pad the argument: `downward' for below,
1804 `upward' for above, or `none' for don't pad it.
1805 Default is below for small data on big-endian machines; else above. */
1806 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
1808 /* Invert direction if stack is post-update. */
1809 if (STACK_PUSH_CODE == POST_INC || STACK_PUSH_CODE == POST_DEC)
1810 if (where_pad != none)
1811 where_pad = (where_pad == downward ? upward : downward);
1813 xinner = x = protect_from_queue (x, 0);
1815 if (mode == BLKmode)
1817 /* Copy a block into the stack, entirely or partially. */
1820 int used = partial * UNITS_PER_WORD;
1821 int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
1829 /* USED is now the # of bytes we need not copy to the stack
1830 because registers will take care of them. */
1833 xinner = change_address (xinner, BLKmode,
1834 plus_constant (XEXP (xinner, 0), used));
1836 /* If the partial register-part of the arg counts in its stack size,
1837 skip the part of stack space corresponding to the registers.
1838 Otherwise, start copying to the beginning of the stack space,
1839 by setting SKIP to 0. */
1840 #ifndef REG_PARM_STACK_SPACE
1846 #ifdef PUSH_ROUNDING
1847 /* Do it with several push insns if that doesn't take lots of insns
1848 and if there is no difficulty with push insns that skip bytes
1849 on the stack for alignment purposes. */
1851 && GET_CODE (size) == CONST_INT
1853 && (move_by_pieces_ninsns ((unsigned) INTVAL (size) - used, align)
1855 /* Here we avoid the case of a structure whose weak alignment
1856 forces many pushes of a small amount of data,
1857 and such small pushes do rounding that causes trouble. */
1858 && ((! STRICT_ALIGNMENT && ! SLOW_UNALIGNED_ACCESS)
1859 || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT
1860 || PUSH_ROUNDING (align) == align)
1861 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
1863 /* Push padding now if padding above and stack grows down,
1864 or if padding below and stack grows up.
1865 But if space already allocated, this has already been done. */
1866 if (extra && args_addr == 0
1867 && where_pad != none && where_pad != stack_direction)
1868 anti_adjust_stack (GEN_INT (extra));
1870 move_by_pieces (gen_rtx (MEM, BLKmode, gen_push_operand ()), xinner,
1871 INTVAL (size) - used, align);
1874 #endif /* PUSH_ROUNDING */
1876 /* Otherwise make space on the stack and copy the data
1877 to the address of that space. */
1879 /* Deduct words put into registers from the size we must copy. */
1882 if (GET_CODE (size) == CONST_INT)
1883 size = GEN_INT (INTVAL (size) - used);
1885 size = expand_binop (GET_MODE (size), sub_optab, size,
1886 GEN_INT (used), NULL_RTX, 0,
1890 /* Get the address of the stack space.
1891 In this case, we do not deal with EXTRA separately.
1892 A single stack adjust will do. */
1895 temp = push_block (size, extra, where_pad == downward);
1898 else if (GET_CODE (args_so_far) == CONST_INT)
1899 temp = memory_address (BLKmode,
1900 plus_constant (args_addr,
1901 skip + INTVAL (args_so_far)));
1903 temp = memory_address (BLKmode,
1904 plus_constant (gen_rtx (PLUS, Pmode,
1905 args_addr, args_so_far),
1908 /* TEMP is the address of the block. Copy the data there. */
1909 if (GET_CODE (size) == CONST_INT
1910 && (move_by_pieces_ninsns ((unsigned) INTVAL (size), align)
1913 move_by_pieces (gen_rtx (MEM, BLKmode, temp), xinner,
1914 INTVAL (size), align);
1917 /* Try the most limited insn first, because there's no point
1918 including more than one in the machine description unless
1919 the more limited one has some advantage. */
1920 #ifdef HAVE_movstrqi
1922 && GET_CODE (size) == CONST_INT
1923 && ((unsigned) INTVAL (size)
1924 < (1 << (GET_MODE_BITSIZE (QImode) - 1))))
1926 rtx pat = gen_movstrqi (gen_rtx (MEM, BLKmode, temp),
1927 xinner, size, GEN_INT (align));
1935 #ifdef HAVE_movstrhi
1937 && GET_CODE (size) == CONST_INT
1938 && ((unsigned) INTVAL (size)
1939 < (1 << (GET_MODE_BITSIZE (HImode) - 1))))
1941 rtx pat = gen_movstrhi (gen_rtx (MEM, BLKmode, temp),
1942 xinner, size, GEN_INT (align));
1950 #ifdef HAVE_movstrsi
1953 rtx pat = gen_movstrsi (gen_rtx (MEM, BLKmode, temp),
1954 xinner, size, GEN_INT (align));
1962 #ifdef HAVE_movstrdi
1965 rtx pat = gen_movstrdi (gen_rtx (MEM, BLKmode, temp),
1966 xinner, size, GEN_INT (align));
1975 #ifndef ACCUMULATE_OUTGOING_ARGS
1976 /* If the source is referenced relative to the stack pointer,
1977 copy it to another register to stabilize it. We do not need
1978 to do this if we know that we won't be changing sp. */
1980 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
1981 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
1982 temp = copy_to_reg (temp);
1985 /* Make inhibit_defer_pop nonzero around the library call
1986 to force it to pop the bcopy-arguments right away. */
1988 #ifdef TARGET_MEM_FUNCTIONS
1989 emit_library_call (memcpy_libfunc, 0,
1990 VOIDmode, 3, temp, Pmode, XEXP (xinner, 0), Pmode,
1993 emit_library_call (bcopy_libfunc, 0,
1994 VOIDmode, 3, XEXP (xinner, 0), Pmode, temp, Pmode,
2000 else if (partial > 0)
2002 /* Scalar partly in registers. */
2004 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
2007 /* # words of start of argument
2008 that we must make space for but need not store. */
2009 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
2010 int args_offset = INTVAL (args_so_far);
2013 /* Push padding now if padding above and stack grows down,
2014 or if padding below and stack grows up.
2015 But if space already allocated, this has already been done. */
2016 if (extra && args_addr == 0
2017 && where_pad != none && where_pad != stack_direction)
2018 anti_adjust_stack (GEN_INT (extra));
2020 /* If we make space by pushing it, we might as well push
2021 the real data. Otherwise, we can leave OFFSET nonzero
2022 and leave the space uninitialized. */
2026 /* Now NOT_STACK gets the number of words that we don't need to
2027 allocate on the stack. */
2028 not_stack = partial - offset;
2030 /* If the partial register-part of the arg counts in its stack size,
2031 skip the part of stack space corresponding to the registers.
2032 Otherwise, start copying to the beginning of the stack space,
2033 by setting SKIP to 0. */
2034 #ifndef REG_PARM_STACK_SPACE
2040 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
2041 x = validize_mem (force_const_mem (mode, x));
2043 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
2044 SUBREGs of such registers are not allowed. */
2045 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
2046 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
2047 x = copy_to_reg (x);
2049 /* Loop over all the words allocated on the stack for this arg. */
2050 /* We can do it by words, because any scalar bigger than a word
2051 has a size a multiple of a word. */
2052 #ifndef PUSH_ARGS_REVERSED
2053 for (i = not_stack; i < size; i++)
2055 for (i = size - 1; i >= not_stack; i--)
2057 if (i >= not_stack + offset)
2058 emit_push_insn (operand_subword_force (x, i, mode),
2059 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
2061 GEN_INT (args_offset + ((i - not_stack + skip)
2062 * UNITS_PER_WORD)));
2068 /* Push padding now if padding above and stack grows down,
2069 or if padding below and stack grows up.
2070 But if space already allocated, this has already been done. */
2071 if (extra && args_addr == 0
2072 && where_pad != none && where_pad != stack_direction)
2073 anti_adjust_stack (GEN_INT (extra));
2075 #ifdef PUSH_ROUNDING
2077 addr = gen_push_operand ();
2080 if (GET_CODE (args_so_far) == CONST_INT)
2082 = memory_address (mode,
2083 plus_constant (args_addr, INTVAL (args_so_far)));
2085 addr = memory_address (mode, gen_rtx (PLUS, Pmode, args_addr,
2088 emit_move_insn (gen_rtx (MEM, mode, addr), x);
2092 /* If part should go in registers, copy that part
2093 into the appropriate registers. Do this now, at the end,
2094 since mem-to-mem copies above may do function calls. */
2095 if (partial > 0 && reg != 0)
2096 move_block_to_reg (REGNO (reg), x, partial, mode);
2098 if (extra && args_addr == 0 && where_pad == stack_direction)
2099 anti_adjust_stack (GEN_INT (extra));
2102 /* Output a library call to function FUN (a SYMBOL_REF rtx)
2103 (emitting the queue unless NO_QUEUE is nonzero),
2104 for a value of mode OUTMODE,
2105 with NARGS different arguments, passed as alternating rtx values
2106 and machine_modes to convert them to.
2107 The rtx values should have been passed through protect_from_queue already.
2109 NO_QUEUE will be true if and only if the library call is a `const' call
2110 which will be enclosed in REG_LIBCALL/REG_RETVAL notes; it is equivalent
2111 to the variable is_const in expand_call.
2113 NO_QUEUE must be true for const calls, because if it isn't, then
2114 any pending increment will be emitted between REG_LIBCALL/REG_RETVAL notes,
2115 and will be lost if the libcall sequence is optimized away.
2117 NO_QUEUE must be false for non-const calls, because if it isn't, the
2118 call insn will have its CONST_CALL_P bit set, and it will be incorrectly
2119 optimized. For instance, the instruction scheduler may incorrectly
2120 move memory references across the non-const call. */
2123 emit_library_call (va_alist)
2127 /* Total size in bytes of all the stack-parms scanned so far. */
2128 struct args_size args_size;
2129 /* Size of arguments before any adjustments (such as rounding). */
2130 struct args_size original_args_size;
2131 register int argnum;
2132 enum machine_mode outmode;
2139 CUMULATIVE_ARGS args_so_far;
2140 struct arg { rtx value; enum machine_mode mode; rtx reg; int partial;
2141 struct args_size offset; struct args_size size; };
2143 int old_inhibit_defer_pop = inhibit_defer_pop;
2148 orgfun = fun = va_arg (p, rtx);
2149 no_queue = va_arg (p, int);
2150 outmode = va_arg (p, enum machine_mode);
2151 nargs = va_arg (p, int);
2153 /* Copy all the libcall-arguments out of the varargs data
2154 and into a vector ARGVEC.
2156 Compute how to pass each argument. We only support a very small subset
2157 of the full argument passing conventions to limit complexity here since
2158 library functions shouldn't have many args. */
2160 argvec = (struct arg *) alloca (nargs * sizeof (struct arg));
2162 INIT_CUMULATIVE_ARGS (args_so_far, NULL_TREE, fun);
2164 args_size.constant = 0;
2167 for (count = 0; count < nargs; count++)
2169 rtx val = va_arg (p, rtx);
2170 enum machine_mode mode = va_arg (p, enum machine_mode);
2172 /* We cannot convert the arg value to the mode the library wants here;
2173 must do it earlier where we know the signedness of the arg. */
2175 || (GET_MODE (val) != mode && GET_MODE (val) != VOIDmode))
2178 /* On some machines, there's no way to pass a float to a library fcn.
2179 Pass it as a double instead. */
2180 #ifdef LIBGCC_NEEDS_DOUBLE
2181 if (LIBGCC_NEEDS_DOUBLE && mode == SFmode)
2182 val = convert_to_mode (DFmode, val, 0), mode = DFmode;
2185 /* There's no need to call protect_from_queue, because
2186 either emit_move_insn or emit_push_insn will do that. */
2188 /* Make sure it is a reasonable operand for a move or push insn. */
2189 if (GET_CODE (val) != REG && GET_CODE (val) != MEM
2190 && ! (CONSTANT_P (val) && LEGITIMATE_CONSTANT_P (val)))
2191 val = force_operand (val, NULL_RTX);
2193 argvec[count].value = val;
2194 argvec[count].mode = mode;
2196 #ifdef FUNCTION_ARG_PASS_BY_REFERENCE
2197 if (FUNCTION_ARG_PASS_BY_REFERENCE (args_so_far, mode, NULL_TREE, 1))
2201 argvec[count].reg = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
2202 if (argvec[count].reg && GET_CODE (argvec[count].reg) == EXPR_LIST)
2204 #ifdef FUNCTION_ARG_PARTIAL_NREGS
2205 argvec[count].partial
2206 = FUNCTION_ARG_PARTIAL_NREGS (args_so_far, mode, NULL_TREE, 1);
2208 argvec[count].partial = 0;
2211 locate_and_pad_parm (mode, NULL_TREE,
2212 argvec[count].reg && argvec[count].partial == 0,
2213 NULL_TREE, &args_size, &argvec[count].offset,
2214 &argvec[count].size);
2216 if (argvec[count].size.var)
2219 #ifndef REG_PARM_STACK_SPACE
2220 if (argvec[count].partial)
2221 argvec[count].size.constant -= argvec[count].partial * UNITS_PER_WORD;
2224 if (argvec[count].reg == 0 || argvec[count].partial != 0
2225 #ifdef REG_PARM_STACK_SPACE
2229 args_size.constant += argvec[count].size.constant;
2231 #ifdef ACCUMULATE_OUTGOING_ARGS
2232 /* If this arg is actually passed on the stack, it might be
2233 clobbering something we already put there (this library call might
2234 be inside the evaluation of an argument to a function whose call
2235 requires the stack). This will only occur when the library call
2236 has sufficient args to run out of argument registers. Abort in
2237 this case; if this ever occurs, code must be added to save and
2238 restore the arg slot. */
2240 if (argvec[count].reg == 0 || argvec[count].partial != 0)
2244 FUNCTION_ARG_ADVANCE (args_so_far, mode, (tree)0, 1);
2248 /* If this machine requires an external definition for library
2249 functions, write one out. */
2250 assemble_external_libcall (fun);
2252 original_args_size = args_size;
2253 #ifdef STACK_BOUNDARY
2254 args_size.constant = (((args_size.constant + (STACK_BYTES - 1))
2255 / STACK_BYTES) * STACK_BYTES);
2258 #ifdef REG_PARM_STACK_SPACE
2259 args_size.constant = MAX (args_size.constant,
2260 REG_PARM_STACK_SPACE (NULL_TREE));
2261 #ifndef OUTGOING_REG_PARM_STACK_SPACE
2262 args_size.constant -= REG_PARM_STACK_SPACE (NULL_TREE);
2266 #ifdef ACCUMULATE_OUTGOING_ARGS
2267 if (args_size.constant > current_function_outgoing_args_size)
2268 current_function_outgoing_args_size = args_size.constant;
2269 args_size.constant = 0;
2272 #ifndef PUSH_ROUNDING
2273 argblock = push_block (GEN_INT (args_size.constant), 0, 0);
2276 #ifdef PUSH_ARGS_REVERSED
2277 #ifdef STACK_BOUNDARY
2278 /* If we push args individually in reverse order, perform stack alignment
2279 before the first push (the last arg). */
2281 anti_adjust_stack (GEN_INT (args_size.constant
2282 - original_args_size.constant));
2286 #ifdef PUSH_ARGS_REVERSED
2294 /* Push the args that need to be pushed. */
2296 for (count = 0; count < nargs; count++, argnum += inc)
2298 register enum machine_mode mode = argvec[argnum].mode;
2299 register rtx val = argvec[argnum].value;
2300 rtx reg = argvec[argnum].reg;
2301 int partial = argvec[argnum].partial;
2303 if (! (reg != 0 && partial == 0))
2304 emit_push_insn (val, mode, NULL_TREE, NULL_RTX, 0, partial, reg, 0,
2305 argblock, GEN_INT (argvec[count].offset.constant));
2309 #ifndef PUSH_ARGS_REVERSED
2310 #ifdef STACK_BOUNDARY
2311 /* If we pushed args in forward order, perform stack alignment
2312 after pushing the last arg. */
2314 anti_adjust_stack (GEN_INT (args_size.constant
2315 - original_args_size.constant));
2319 #ifdef PUSH_ARGS_REVERSED
2325 /* Now load any reg parms into their regs. */
2327 for (count = 0; count < nargs; count++, argnum += inc)
2329 register enum machine_mode mode = argvec[argnum].mode;
2330 register rtx val = argvec[argnum].value;
2331 rtx reg = argvec[argnum].reg;
2332 int partial = argvec[argnum].partial;
2334 if (reg != 0 && partial == 0)
2335 emit_move_insn (reg, val);
2339 /* For version 1.37, try deleting this entirely. */
2343 /* Any regs containing parms remain in use through the call. */
2345 for (count = 0; count < nargs; count++)
2346 if (argvec[count].reg != 0)
2347 emit_insn (gen_rtx (USE, VOIDmode, argvec[count].reg));
2349 use_insns = get_insns ();
2352 fun = prepare_call_address (fun, NULL_TREE, &use_insns);
2354 /* Don't allow popping to be deferred, since then
2355 cse'ing of library calls could delete a call and leave the pop. */
2358 /* We pass the old value of inhibit_defer_pop + 1 to emit_call_1, which
2359 will set inhibit_defer_pop to that value. */
2361 emit_call_1 (fun, get_identifier (XSTR (orgfun, 0)), args_size.constant, 0,
2362 FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1),
2363 outmode != VOIDmode ? hard_libcall_value (outmode) : NULL_RTX,
2364 old_inhibit_defer_pop + 1, use_insns, no_queue);
2366 /* Now restore inhibit_defer_pop to its actual original value. */
2370 /* Like emit_library_call except that an extra argument, VALUE,
2371 comes second and says where to store the result.
2372 (If VALUE is zero, the result comes in the function value register.) */
2375 emit_library_call_value (va_alist)
2379 /* Total size in bytes of all the stack-parms scanned so far. */
2380 struct args_size args_size;
2381 /* Size of arguments before any adjustments (such as rounding). */
2382 struct args_size original_args_size;
2383 register int argnum;
2384 enum machine_mode outmode;
2391 CUMULATIVE_ARGS args_so_far;
2392 struct arg { rtx value; enum machine_mode mode; rtx reg; int partial;
2393 struct args_size offset; struct args_size size; };
2395 int old_inhibit_defer_pop = inhibit_defer_pop;
2402 orgfun = fun = va_arg (p, rtx);
2403 value = va_arg (p, rtx);
2404 no_queue = va_arg (p, int);
2405 outmode = va_arg (p, enum machine_mode);
2406 nargs = va_arg (p, int);
2408 /* If this kind of value comes back in memory,
2409 decide where in memory it should come back. */
2410 if (RETURN_IN_MEMORY (type_for_mode (outmode, 0)))
2412 if (GET_CODE (value) == MEM)
2415 mem_value = assign_stack_temp (outmode, GET_MODE_SIZE (outmode), 0);
2418 /* ??? Unfinished: must pass the memory address as an argument. */
2420 /* Copy all the libcall-arguments out of the varargs data
2421 and into a vector ARGVEC.
2423 Compute how to pass each argument. We only support a very small subset
2424 of the full argument passing conventions to limit complexity here since
2425 library functions shouldn't have many args. */
2427 argvec = (struct arg *) alloca ((nargs + 1) * sizeof (struct arg));
2429 INIT_CUMULATIVE_ARGS (args_so_far, NULL_TREE, fun);
2431 args_size.constant = 0;
2436 /* If there's a structure value address to be passed,
2437 either pass it in the special place, or pass it as an extra argument. */
2440 rtx addr = XEXP (mem_value, 0);
2442 if (! struct_value_rtx)
2446 /* Make sure it is a reasonable operand for a move or push insn. */
2447 if (GET_CODE (addr) != REG && GET_CODE (addr) != MEM
2448 && ! (CONSTANT_P (addr) && LEGITIMATE_CONSTANT_P (addr)))
2449 addr = force_operand (addr, NULL_RTX);
2451 argvec[count].value = addr;
2452 argvec[count].mode = outmode;
2453 argvec[count].partial = 0;
2455 argvec[count].reg = FUNCTION_ARG (args_so_far, outmode, NULL_TREE, 1);
2456 #ifdef FUNCTION_ARG_PARTIAL_NREGS
2457 if (FUNCTION_ARG_PARTIAL_NREGS (args_so_far, outmode, NULL_TREE, 1))
2461 locate_and_pad_parm (outmode, NULL_TREE,
2462 argvec[count].reg && argvec[count].partial == 0,
2463 NULL_TREE, &args_size, &argvec[count].offset,
2464 &argvec[count].size);
2467 if (argvec[count].reg == 0 || argvec[count].partial != 0
2468 #ifdef REG_PARM_STACK_SPACE
2472 args_size.constant += argvec[count].size.constant;
2474 FUNCTION_ARG_ADVANCE (args_so_far, outmode, (tree)0, 1);
2478 for (; count < nargs; count++)
2480 rtx val = va_arg (p, rtx);
2481 enum machine_mode mode = va_arg (p, enum machine_mode);
2483 /* We cannot convert the arg value to the mode the library wants here;
2484 must do it earlier where we know the signedness of the arg. */
2486 || (GET_MODE (val) != mode && GET_MODE (val) != VOIDmode))
2489 /* On some machines, there's no way to pass a float to a library fcn.
2490 Pass it as a double instead. */
2491 #ifdef LIBGCC_NEEDS_DOUBLE
2492 if (LIBGCC_NEEDS_DOUBLE && mode == SFmode)
2493 val = convert_to_mode (DFmode, val, 0), mode = DFmode;
2496 /* There's no need to call protect_from_queue, because
2497 either emit_move_insn or emit_push_insn will do that. */
2499 /* Make sure it is a reasonable operand for a move or push insn. */
2500 if (GET_CODE (val) != REG && GET_CODE (val) != MEM
2501 && ! (CONSTANT_P (val) && LEGITIMATE_CONSTANT_P (val)))
2502 val = force_operand (val, NULL_RTX);
2504 argvec[count].value = val;
2505 argvec[count].mode = mode;
2507 #ifdef FUNCTION_ARG_PASS_BY_REFERENCE
2508 if (FUNCTION_ARG_PASS_BY_REFERENCE (args_so_far, mode, NULL_TREE, 1))
2512 argvec[count].reg = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
2513 if (argvec[count].reg && GET_CODE (argvec[count].reg) == EXPR_LIST)
2515 #ifdef FUNCTION_ARG_PARTIAL_NREGS
2516 argvec[count].partial
2517 = FUNCTION_ARG_PARTIAL_NREGS (args_so_far, mode, NULL_TREE, 1);
2519 argvec[count].partial = 0;
2522 locate_and_pad_parm (mode, NULL_TREE,
2523 argvec[count].reg && argvec[count].partial == 0,
2524 NULL_TREE, &args_size, &argvec[count].offset,
2525 &argvec[count].size);
2527 if (argvec[count].size.var)
2530 #ifndef REG_PARM_STACK_SPACE
2531 if (argvec[count].partial)
2532 argvec[count].size.constant -= argvec[count].partial * UNITS_PER_WORD;
2535 if (argvec[count].reg == 0 || argvec[count].partial != 0
2536 #ifdef REG_PARM_STACK_SPACE
2540 args_size.constant += argvec[count].size.constant;
2542 #ifdef ACCUMULATE_OUTGOING_ARGS
2543 /* If this arg is actually passed on the stack, it might be
2544 clobbering something we already put there (this library call might
2545 be inside the evaluation of an argument to a function whose call
2546 requires the stack). This will only occur when the library call
2547 has sufficient args to run out of argument registers. Abort in
2548 this case; if this ever occurs, code must be added to save and
2549 restore the arg slot. */
2551 if (argvec[count].reg == 0 || argvec[count].partial != 0)
2555 FUNCTION_ARG_ADVANCE (args_so_far, mode, (tree)0, 1);
2559 /* If this machine requires an external definition for library
2560 functions, write one out. */
2561 assemble_external_libcall (fun);
2563 original_args_size = args_size;
2564 #ifdef STACK_BOUNDARY
2565 args_size.constant = (((args_size.constant + (STACK_BYTES - 1))
2566 / STACK_BYTES) * STACK_BYTES);
2569 #ifdef REG_PARM_STACK_SPACE
2570 args_size.constant = MAX (args_size.constant,
2571 REG_PARM_STACK_SPACE (NULL_TREE));
2572 #ifndef OUTGOING_REG_PARM_STACK_SPACE
2573 args_size.constant -= REG_PARM_STACK_SPACE (NULL_TREE);
2577 #ifdef ACCUMULATE_OUTGOING_ARGS
2578 if (args_size.constant > current_function_outgoing_args_size)
2579 current_function_outgoing_args_size = args_size.constant;
2580 args_size.constant = 0;
2583 #ifndef PUSH_ROUNDING
2584 argblock = push_block (GEN_INT (args_size.constant), 0, 0);
2587 #ifdef PUSH_ARGS_REVERSED
2588 #ifdef STACK_BOUNDARY
2589 /* If we push args individually in reverse order, perform stack alignment
2590 before the first push (the last arg). */
2592 anti_adjust_stack (GEN_INT (args_size.constant
2593 - original_args_size.constant));
2597 #ifdef PUSH_ARGS_REVERSED
2605 /* Push the args that need to be pushed. */
2607 for (count = 0; count < nargs; count++, argnum += inc)
2609 register enum machine_mode mode = argvec[argnum].mode;
2610 register rtx val = argvec[argnum].value;
2611 rtx reg = argvec[argnum].reg;
2612 int partial = argvec[argnum].partial;
2614 if (! (reg != 0 && partial == 0))
2615 emit_push_insn (val, mode, NULL_TREE, NULL_RTX, 0, partial, reg, 0,
2616 argblock, GEN_INT (argvec[count].offset.constant));
2620 #ifndef PUSH_ARGS_REVERSED
2621 #ifdef STACK_BOUNDARY
2622 /* If we pushed args in forward order, perform stack alignment
2623 after pushing the last arg. */
2625 anti_adjust_stack (GEN_INT (args_size.constant
2626 - original_args_size.constant));
2630 #ifdef PUSH_ARGS_REVERSED
2636 /* Now load any reg parms into their regs. */
2638 if (mem_value != 0 && struct_value_rtx != 0)
2639 emit_move_insn (struct_value_rtx, XEXP (mem_value, 0));
2641 for (count = 0; count < nargs; count++, argnum += inc)
2643 register enum machine_mode mode = argvec[argnum].mode;
2644 register rtx val = argvec[argnum].value;
2645 rtx reg = argvec[argnum].reg;
2646 int partial = argvec[argnum].partial;
2648 if (reg != 0 && partial == 0)
2649 emit_move_insn (reg, val);
2654 /* For version 1.37, try deleting this entirely. */
2659 /* Any regs containing parms remain in use through the call. */
2661 for (count = 0; count < nargs; count++)
2662 if (argvec[count].reg != 0)
2663 emit_insn (gen_rtx (USE, VOIDmode, argvec[count].reg));
2665 use_insns = get_insns ();
2668 fun = prepare_call_address (fun, NULL_TREE, &use_insns);
2670 /* Don't allow popping to be deferred, since then
2671 cse'ing of library calls could delete a call and leave the pop. */
2674 /* We pass the old value of inhibit_defer_pop + 1 to emit_call_1, which
2675 will set inhibit_defer_pop to that value. */
2677 emit_call_1 (fun, get_identifier (XSTR (orgfun, 0)), args_size.constant, 0,
2678 FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1),
2679 outmode != VOIDmode ? hard_libcall_value (outmode) : NULL_RTX,
2680 old_inhibit_defer_pop + 1, use_insns, no_queue);
2682 /* Now restore inhibit_defer_pop to its actual original value. */
2685 /* Copy the value to the right place. */
2686 if (outmode != VOIDmode)
2691 value = hard_libcall_value (outmode);
2692 if (value != mem_value)
2693 emit_move_insn (value, mem_value);
2695 else if (value != 0)
2696 emit_move_insn (value, hard_libcall_value (outmode));
2700 /* Expand an assignment that stores the value of FROM into TO.
2701 If WANT_VALUE is nonzero, return an rtx for the value of TO.
2702 (This may contain a QUEUED rtx.)
2703 Otherwise, the returned value is not meaningful.
2705 SUGGEST_REG is no longer actually used.
2706 It used to mean, copy the value through a register
2707 and return that register, if that is possible.
2708 But now we do this if WANT_VALUE.
2710 If the value stored is a constant, we return the constant. */
2713 expand_assignment (to, from, want_value, suggest_reg)
2718 register rtx to_rtx = 0;
2721 /* Don't crash if the lhs of the assignment was erroneous. */
2723 if (TREE_CODE (to) == ERROR_MARK)
2724 return expand_expr (from, NULL_RTX, VOIDmode, 0);
2726 /* Assignment of a structure component needs special treatment
2727 if the structure component's rtx is not simply a MEM.
2728 Assignment of an array element at a constant index
2729 has the same problem. */
2731 if (TREE_CODE (to) == COMPONENT_REF
2732 || TREE_CODE (to) == BIT_FIELD_REF
2733 || (TREE_CODE (to) == ARRAY_REF
2734 && TREE_CODE (TREE_OPERAND (to, 1)) == INTEGER_CST
2735 && TREE_CODE (TYPE_SIZE (TREE_TYPE (to))) == INTEGER_CST))
2737 enum machine_mode mode1;
2743 tree tem = get_inner_reference (to, &bitsize, &bitpos, &offset,
2744 &mode1, &unsignedp, &volatilep);
2746 /* If we are going to use store_bit_field and extract_bit_field,
2747 make sure to_rtx will be safe for multiple use. */
2749 if (mode1 == VOIDmode && want_value)
2750 tem = stabilize_reference (tem);
2752 to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, 0);
2755 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
2757 if (GET_CODE (to_rtx) != MEM)
2759 to_rtx = change_address (to_rtx, VOIDmode,
2760 gen_rtx (PLUS, Pmode, XEXP (to_rtx, 0),
2761 force_reg (Pmode, offset_rtx)));
2765 if (GET_CODE (to_rtx) == MEM)
2766 MEM_VOLATILE_P (to_rtx) = 1;
2767 #if 0 /* This was turned off because, when a field is volatile
2768 in an object which is not volatile, the object may be in a register,
2769 and then we would abort over here. */
2775 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
2777 /* Spurious cast makes HPUX compiler happy. */
2778 ? (enum machine_mode) TYPE_MODE (TREE_TYPE (to))
2781 /* Required alignment of containing datum. */
2782 TYPE_ALIGN (TREE_TYPE (tem)) / BITS_PER_UNIT,
2783 int_size_in_bytes (TREE_TYPE (tem)));
2784 preserve_temp_slots (result);
2790 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
2791 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
2794 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, 0);
2796 /* Don't move directly into a return register. */
2797 if (TREE_CODE (to) == RESULT_DECL && GET_CODE (to_rtx) == REG)
2799 rtx temp = expand_expr (from, 0, VOIDmode, 0);
2800 emit_move_insn (to_rtx, temp);
2801 preserve_temp_slots (to_rtx);
2806 /* In case we are returning the contents of an object which overlaps
2807 the place the value is being stored, use a safe function when copying
2808 a value through a pointer into a structure value return block. */
2809 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
2810 && current_function_returns_struct
2811 && !current_function_returns_pcc_struct)
2813 rtx from_rtx = expand_expr (from, NULL_RTX, VOIDmode, 0);
2814 rtx size = expr_size (from);
2816 #ifdef TARGET_MEM_FUNCTIONS
2817 emit_library_call (memcpy_libfunc, 0,
2818 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
2819 XEXP (from_rtx, 0), Pmode,
2822 emit_library_call (bcopy_libfunc, 0,
2823 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
2824 XEXP (to_rtx, 0), Pmode,
2828 preserve_temp_slots (to_rtx);
2833 /* Compute FROM and store the value in the rtx we got. */
2835 result = store_expr (from, to_rtx, want_value);
2836 preserve_temp_slots (result);
2841 /* Generate code for computing expression EXP,
2842 and storing the value into TARGET.
2843 Returns TARGET or an equivalent value.
2844 TARGET may contain a QUEUED rtx.
2846 If SUGGEST_REG is nonzero, copy the value through a register
2847 and return that register, if that is possible.
2849 If the value stored is a constant, we return the constant. */
2852 store_expr (exp, target, suggest_reg)
2854 register rtx target;
2858 int dont_return_target = 0;
2860 if (TREE_CODE (exp) == COMPOUND_EXPR)
2862 /* Perform first part of compound expression, then assign from second
2864 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
2866 return store_expr (TREE_OPERAND (exp, 1), target, suggest_reg);
2868 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
2870 /* For conditional expression, get safe form of the target. Then
2871 test the condition, doing the appropriate assignment on either
2872 side. This avoids the creation of unnecessary temporaries.
2873 For non-BLKmode, it is more efficient not to do this. */
2875 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
2878 target = protect_from_queue (target, 1);
2881 jumpifnot (TREE_OPERAND (exp, 0), lab1);
2882 store_expr (TREE_OPERAND (exp, 1), target, suggest_reg);
2884 emit_jump_insn (gen_jump (lab2));
2887 store_expr (TREE_OPERAND (exp, 2), target, suggest_reg);
2893 else if (suggest_reg && GET_CODE (target) == MEM
2894 && GET_MODE (target) != BLKmode)
2895 /* If target is in memory and caller wants value in a register instead,
2896 arrange that. Pass TARGET as target for expand_expr so that,
2897 if EXP is another assignment, SUGGEST_REG will be nonzero for it.
2898 We know expand_expr will not use the target in that case. */
2900 temp = expand_expr (exp, cse_not_expected ? NULL_RTX : target,
2901 GET_MODE (target), 0);
2902 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
2903 temp = copy_to_reg (temp);
2904 dont_return_target = 1;
2906 else if (queued_subexp_p (target))
2907 /* If target contains a postincrement, it is not safe
2908 to use as the returned value. It would access the wrong
2909 place by the time the queued increment gets output.
2910 So copy the value through a temporary and use that temp
2913 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
2915 /* Expand EXP into a new pseudo. */
2916 temp = gen_reg_rtx (GET_MODE (target));
2917 temp = expand_expr (exp, temp, GET_MODE (target), 0);
2920 temp = expand_expr (exp, NULL_RTX, GET_MODE (target), 0);
2921 dont_return_target = 1;
2923 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
2924 /* If this is an scalar in a register that is stored in a wider mode
2925 than the declared mode, compute the result into its declared mode
2926 and then convert to the wider mode. Our value is the computed
2929 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
2930 convert_move (SUBREG_REG (target), temp,
2931 SUBREG_PROMOTED_UNSIGNED_P (target));
2936 temp = expand_expr (exp, target, GET_MODE (target), 0);
2937 /* DO return TARGET if it's a specified hardware register.
2938 expand_return relies on this. */
2939 if (!(target && GET_CODE (target) == REG
2940 && REGNO (target) < FIRST_PSEUDO_REGISTER)
2941 && CONSTANT_P (temp))
2942 dont_return_target = 1;
2945 /* If value was not generated in the target, store it there.
2946 Convert the value to TARGET's type first if nec. */
2948 if (temp != target && TREE_CODE (exp) != ERROR_MARK)
2950 target = protect_from_queue (target, 1);
2951 if (GET_MODE (temp) != GET_MODE (target)
2952 && GET_MODE (temp) != VOIDmode)
2954 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
2955 if (dont_return_target)
2957 /* In this case, we will return TEMP,
2958 so make sure it has the proper mode.
2959 But don't forget to store the value into TARGET. */
2960 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
2961 emit_move_insn (target, temp);
2964 convert_move (target, temp, unsignedp);
2967 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
2969 /* Handle copying a string constant into an array.
2970 The string constant may be shorter than the array.
2971 So copy just the string's actual length, and clear the rest. */
2974 /* Get the size of the data type of the string,
2975 which is actually the size of the target. */
2976 size = expr_size (exp);
2977 if (GET_CODE (size) == CONST_INT
2978 && INTVAL (size) < TREE_STRING_LENGTH (exp))
2979 emit_block_move (target, temp, size,
2980 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
2983 /* Compute the size of the data to copy from the string. */
2985 = fold (build (MIN_EXPR, sizetype,
2986 size_binop (CEIL_DIV_EXPR,
2987 TYPE_SIZE (TREE_TYPE (exp)),
2988 size_int (BITS_PER_UNIT)),
2990 build_int_2 (TREE_STRING_LENGTH (exp), 0))));
2991 rtx copy_size_rtx = expand_expr (copy_size, NULL_RTX,
2995 /* Copy that much. */
2996 emit_block_move (target, temp, copy_size_rtx,
2997 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
2999 /* Figure out how much is left in TARGET
3000 that we have to clear. */
3001 if (GET_CODE (copy_size_rtx) == CONST_INT)
3003 temp = plus_constant (XEXP (target, 0),
3004 TREE_STRING_LENGTH (exp));
3005 size = plus_constant (size,
3006 - TREE_STRING_LENGTH (exp));
3010 enum machine_mode size_mode = Pmode;
3012 temp = force_reg (Pmode, XEXP (target, 0));
3013 temp = expand_binop (size_mode, add_optab, temp,
3014 copy_size_rtx, NULL_RTX, 0,
3017 size = expand_binop (size_mode, sub_optab, size,
3018 copy_size_rtx, NULL_RTX, 0,
3021 emit_cmp_insn (size, const0_rtx, LT, NULL_RTX,
3022 GET_MODE (size), 0, 0);
3023 label = gen_label_rtx ();
3024 emit_jump_insn (gen_blt (label));
3027 if (size != const0_rtx)
3029 #ifdef TARGET_MEM_FUNCTIONS
3030 emit_library_call (memset_libfunc, 0, VOIDmode, 3,
3031 temp, Pmode, const0_rtx, Pmode, size, Pmode);
3033 emit_library_call (bzero_libfunc, 0, VOIDmode, 2,
3034 temp, Pmode, size, Pmode);
3041 else if (GET_MODE (temp) == BLKmode)
3042 emit_block_move (target, temp, expr_size (exp),
3043 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3045 emit_move_insn (target, temp);
3047 if (dont_return_target)
3052 /* Store the value of constructor EXP into the rtx TARGET.
3053 TARGET is either a REG or a MEM. */
3056 store_constructor (exp, target)
3060 tree type = TREE_TYPE (exp);
3062 /* We know our target cannot conflict, since safe_from_p has been called. */
3064 /* Don't try copying piece by piece into a hard register
3065 since that is vulnerable to being clobbered by EXP.
3066 Instead, construct in a pseudo register and then copy it all. */
3067 if (GET_CODE (target) == REG && REGNO (target) < FIRST_PSEUDO_REGISTER)
3069 rtx temp = gen_reg_rtx (GET_MODE (target));
3070 store_constructor (exp, temp);
3071 emit_move_insn (target, temp);
3076 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE)
3080 /* Inform later passes that the whole union value is dead. */
3081 if (TREE_CODE (type) == UNION_TYPE)
3082 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
3084 /* If we are building a static constructor into a register,
3085 set the initial value as zero so we can fold the value into
3087 else if (GET_CODE (target) == REG && TREE_STATIC (exp))
3088 emit_move_insn (target, const0_rtx);
3090 /* If the constructor has fewer fields than the structure,
3091 clear the whole structure first. */
3092 else if (list_length (CONSTRUCTOR_ELTS (exp))
3093 != list_length (TYPE_FIELDS (type)))
3094 clear_storage (target, int_size_in_bytes (type));
3096 /* Inform later passes that the old value is dead. */
3097 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
3099 /* Store each element of the constructor into
3100 the corresponding field of TARGET. */
3102 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
3104 register tree field = TREE_PURPOSE (elt);
3105 register enum machine_mode mode;
3110 /* Just ignore missing fields.
3111 We cleared the whole structure, above,
3112 if any fields are missing. */
3116 bitsize = TREE_INT_CST_LOW (DECL_SIZE (field));
3117 unsignedp = TREE_UNSIGNED (field);
3118 mode = DECL_MODE (field);
3119 if (DECL_BIT_FIELD (field))
3122 if (TREE_CODE (DECL_FIELD_BITPOS (field)) != INTEGER_CST)
3123 /* ??? This case remains to be written. */
3126 bitpos = TREE_INT_CST_LOW (DECL_FIELD_BITPOS (field));
3128 store_field (target, bitsize, bitpos, mode, TREE_VALUE (elt),
3129 /* The alignment of TARGET is
3130 at least what its type requires. */
3132 TYPE_ALIGN (type) / BITS_PER_UNIT,
3133 int_size_in_bytes (type));
3136 else if (TREE_CODE (type) == ARRAY_TYPE)
3140 tree domain = TYPE_DOMAIN (type);
3141 HOST_WIDE_INT minelt = TREE_INT_CST_LOW (TYPE_MIN_VALUE (domain));
3142 HOST_WIDE_INT maxelt = TREE_INT_CST_LOW (TYPE_MAX_VALUE (domain));
3143 tree elttype = TREE_TYPE (type);
3145 /* If the constructor has fewer fields than the structure,
3146 clear the whole structure first. Similarly if this this is
3147 static constructor of a non-BLKmode object. */
3149 if (list_length (CONSTRUCTOR_ELTS (exp)) < maxelt - minelt + 1
3150 || (GET_CODE (target) == REG && TREE_STATIC (exp)))
3151 clear_storage (target, maxelt - minelt + 1);
3153 /* Inform later passes that the old value is dead. */
3154 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
3156 /* Store each element of the constructor into
3157 the corresponding element of TARGET, determined
3158 by counting the elements. */
3159 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
3161 elt = TREE_CHAIN (elt), i++)
3163 register enum machine_mode mode;
3168 mode = TYPE_MODE (elttype);
3169 bitsize = GET_MODE_BITSIZE (mode);
3170 unsignedp = TREE_UNSIGNED (elttype);
3172 bitpos = (i * TREE_INT_CST_LOW (TYPE_SIZE (elttype)));
3174 store_field (target, bitsize, bitpos, mode, TREE_VALUE (elt),
3175 /* The alignment of TARGET is
3176 at least what its type requires. */
3178 TYPE_ALIGN (type) / BITS_PER_UNIT,
3179 int_size_in_bytes (type));
3187 /* Store the value of EXP (an expression tree)
3188 into a subfield of TARGET which has mode MODE and occupies
3189 BITSIZE bits, starting BITPOS bits from the start of TARGET.
3190 If MODE is VOIDmode, it means that we are storing into a bit-field.
3192 If VALUE_MODE is VOIDmode, return nothing in particular.
3193 UNSIGNEDP is not used in this case.
3195 Otherwise, return an rtx for the value stored. This rtx
3196 has mode VALUE_MODE if that is convenient to do.
3197 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
3199 ALIGN is the alignment that TARGET is known to have, measured in bytes.
3200 TOTAL_SIZE is the size in bytes of the structure, or -1 if varying. */
3203 store_field (target, bitsize, bitpos, mode, exp, value_mode,
3204 unsignedp, align, total_size)
3206 int bitsize, bitpos;
3207 enum machine_mode mode;
3209 enum machine_mode value_mode;
3214 HOST_WIDE_INT width_mask = 0;
3216 if (bitsize < HOST_BITS_PER_WIDE_INT)
3217 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
3219 /* If we are storing into an unaligned field of an aligned union that is
3220 in a register, we may have the mode of TARGET being an integer mode but
3221 MODE == BLKmode. In that case, get an aligned object whose size and
3222 alignment are the same as TARGET and store TARGET into it (we can avoid
3223 the store if the field being stored is the entire width of TARGET). Then
3224 call ourselves recursively to store the field into a BLKmode version of
3225 that object. Finally, load from the object into TARGET. This is not
3226 very efficient in general, but should only be slightly more expensive
3227 than the otherwise-required unaligned accesses. Perhaps this can be
3228 cleaned up later. */
3231 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
3233 rtx object = assign_stack_temp (GET_MODE (target),
3234 GET_MODE_SIZE (GET_MODE (target)), 0);
3235 rtx blk_object = copy_rtx (object);
3237 PUT_MODE (blk_object, BLKmode);
3239 if (bitsize != GET_MODE_BITSIZE (GET_MODE (target)))
3240 emit_move_insn (object, target);
3242 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0,
3245 emit_move_insn (target, object);
3250 /* If the structure is in a register or if the component
3251 is a bit field, we cannot use addressing to access it.
3252 Use bit-field techniques or SUBREG to store in it. */
3254 if (mode == VOIDmode
3255 || (mode != BLKmode && ! direct_store[(int) mode])
3256 || GET_CODE (target) == REG
3257 || GET_CODE (target) == SUBREG)
3259 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
3260 /* Store the value in the bitfield. */
3261 store_bit_field (target, bitsize, bitpos, mode, temp, align, total_size);
3262 if (value_mode != VOIDmode)
3264 /* The caller wants an rtx for the value. */
3265 /* If possible, avoid refetching from the bitfield itself. */
3267 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
3270 enum machine_mode tmode;
3273 return expand_and (temp, GEN_INT (width_mask), NULL_RTX);
3274 tmode = GET_MODE (temp);
3275 if (tmode == VOIDmode)
3277 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
3278 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
3279 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
3281 return extract_bit_field (target, bitsize, bitpos, unsignedp,
3282 NULL_RTX, value_mode, 0, align,
3289 rtx addr = XEXP (target, 0);
3292 /* If a value is wanted, it must be the lhs;
3293 so make the address stable for multiple use. */
3295 if (value_mode != VOIDmode && GET_CODE (addr) != REG
3296 && ! CONSTANT_ADDRESS_P (addr)
3297 /* A frame-pointer reference is already stable. */
3298 && ! (GET_CODE (addr) == PLUS
3299 && GET_CODE (XEXP (addr, 1)) == CONST_INT
3300 && (XEXP (addr, 0) == virtual_incoming_args_rtx
3301 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
3302 addr = copy_to_reg (addr);
3304 /* Now build a reference to just the desired component. */
3306 to_rtx = change_address (target, mode,
3307 plus_constant (addr, (bitpos / BITS_PER_UNIT)));
3308 MEM_IN_STRUCT_P (to_rtx) = 1;
3310 return store_expr (exp, to_rtx, value_mode != VOIDmode);
3314 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
3315 or an ARRAY_REF, look for nested COMPONENT_REFs, BIT_FIELD_REFs, or
3316 ARRAY_REFs at constant positions and find the ultimate containing object,
3319 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
3320 bit position, and *PUNSIGNEDP to the signedness of the field.
3321 If the position of the field is variable, we store a tree
3322 giving the variable offset (in units) in *POFFSET.
3323 This offset is in addition to the bit position.
3324 If the position is not variable, we store 0 in *POFFSET.
3326 If any of the extraction expressions is volatile,
3327 we store 1 in *PVOLATILEP. Otherwise we don't change that.
3329 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
3330 is a mode that can be used to access the field. In that case, *PBITSIZE
3333 If the field describes a variable-sized object, *PMODE is set to
3334 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
3335 this case, but the address of the object can be found. */
3338 get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode,
3339 punsignedp, pvolatilep)
3344 enum machine_mode *pmode;
3349 enum machine_mode mode = VOIDmode;
3352 if (TREE_CODE (exp) == COMPONENT_REF)
3354 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
3355 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
3356 mode = DECL_MODE (TREE_OPERAND (exp, 1));
3357 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
3359 else if (TREE_CODE (exp) == BIT_FIELD_REF)
3361 size_tree = TREE_OPERAND (exp, 1);
3362 *punsignedp = TREE_UNSIGNED (exp);
3366 mode = TYPE_MODE (TREE_TYPE (exp));
3367 *pbitsize = GET_MODE_BITSIZE (mode);
3368 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
3373 if (TREE_CODE (size_tree) != INTEGER_CST)
3374 mode = BLKmode, *pbitsize = -1;
3376 *pbitsize = TREE_INT_CST_LOW (size_tree);
3379 /* Compute cumulative bit-offset for nested component-refs and array-refs,
3380 and find the ultimate containing object. */
3386 if (TREE_CODE (exp) == COMPONENT_REF || TREE_CODE (exp) == BIT_FIELD_REF)
3388 tree pos = (TREE_CODE (exp) == COMPONENT_REF
3389 ? DECL_FIELD_BITPOS (TREE_OPERAND (exp, 1))
3390 : TREE_OPERAND (exp, 2));
3392 if (TREE_CODE (pos) == PLUS_EXPR)
3395 if (TREE_CODE (TREE_OPERAND (pos, 0)) == INTEGER_CST)
3397 constant = TREE_OPERAND (pos, 0);
3398 var = TREE_OPERAND (pos, 1);
3400 else if (TREE_CODE (TREE_OPERAND (pos, 1)) == INTEGER_CST)
3402 constant = TREE_OPERAND (pos, 1);
3403 var = TREE_OPERAND (pos, 0);
3407 *pbitpos += TREE_INT_CST_LOW (constant);
3409 offset = size_binop (PLUS_EXPR, offset,
3410 size_binop (FLOOR_DIV_EXPR, var,
3411 size_int (BITS_PER_UNIT)));
3413 offset = size_binop (FLOOR_DIV_EXPR, var,
3414 size_int (BITS_PER_UNIT));
3416 else if (TREE_CODE (pos) == INTEGER_CST)
3417 *pbitpos += TREE_INT_CST_LOW (pos);
3420 /* Assume here that the offset is a multiple of a unit.
3421 If not, there should be an explicitly added constant. */
3423 offset = size_binop (PLUS_EXPR, offset,
3424 size_binop (FLOOR_DIV_EXPR, pos,
3425 size_int (BITS_PER_UNIT)));
3427 offset = size_binop (FLOOR_DIV_EXPR, pos,
3428 size_int (BITS_PER_UNIT));
3432 else if (TREE_CODE (exp) == ARRAY_REF
3433 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
3434 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST)
3436 *pbitpos += (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))
3437 * TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (exp))));
3439 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
3440 && ! ((TREE_CODE (exp) == NOP_EXPR
3441 || TREE_CODE (exp) == CONVERT_EXPR)
3442 && (TYPE_MODE (TREE_TYPE (exp))
3443 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
3446 /* If any reference in the chain is volatile, the effect is volatile. */
3447 if (TREE_THIS_VOLATILE (exp))
3449 exp = TREE_OPERAND (exp, 0);
3452 /* If this was a bit-field, see if there is a mode that allows direct
3453 access in case EXP is in memory. */
3454 if (mode == VOIDmode && *pbitpos % *pbitsize == 0)
3456 mode = mode_for_size (*pbitsize, MODE_INT, 0);
3457 if (mode == BLKmode)
3464 /* We aren't finished fixing the callers to really handle nonzero offset. */
3472 /* Given an rtx VALUE that may contain additions and multiplications,
3473 return an equivalent value that just refers to a register or memory.
3474 This is done by generating instructions to perform the arithmetic
3475 and returning a pseudo-register containing the value.
3477 The returned value may be a REG, SUBREG, MEM or constant. */
3480 force_operand (value, target)
3483 register optab binoptab = 0;
3484 /* Use a temporary to force order of execution of calls to
3488 /* Use subtarget as the target for operand 0 of a binary operation. */
3489 register rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
3491 if (GET_CODE (value) == PLUS)
3492 binoptab = add_optab;
3493 else if (GET_CODE (value) == MINUS)
3494 binoptab = sub_optab;
3495 else if (GET_CODE (value) == MULT)
3497 op2 = XEXP (value, 1);
3498 if (!CONSTANT_P (op2)
3499 && !(GET_CODE (op2) == REG && op2 != subtarget))
3501 tmp = force_operand (XEXP (value, 0), subtarget);
3502 return expand_mult (GET_MODE (value), tmp,
3503 force_operand (op2, NULL_RTX),
3509 op2 = XEXP (value, 1);
3510 if (!CONSTANT_P (op2)
3511 && !(GET_CODE (op2) == REG && op2 != subtarget))
3513 if (binoptab == sub_optab && GET_CODE (op2) == CONST_INT)
3515 binoptab = add_optab;
3516 op2 = negate_rtx (GET_MODE (value), op2);
3519 /* Check for an addition with OP2 a constant integer and our first
3520 operand a PLUS of a virtual register and something else. In that
3521 case, we want to emit the sum of the virtual register and the
3522 constant first and then add the other value. This allows virtual
3523 register instantiation to simply modify the constant rather than
3524 creating another one around this addition. */
3525 if (binoptab == add_optab && GET_CODE (op2) == CONST_INT
3526 && GET_CODE (XEXP (value, 0)) == PLUS
3527 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
3528 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
3529 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
3531 rtx temp = expand_binop (GET_MODE (value), binoptab,
3532 XEXP (XEXP (value, 0), 0), op2,
3533 subtarget, 0, OPTAB_LIB_WIDEN);
3534 return expand_binop (GET_MODE (value), binoptab, temp,
3535 force_operand (XEXP (XEXP (value, 0), 1), 0),
3536 target, 0, OPTAB_LIB_WIDEN);
3539 tmp = force_operand (XEXP (value, 0), subtarget);
3540 return expand_binop (GET_MODE (value), binoptab, tmp,
3541 force_operand (op2, NULL_RTX),
3542 target, 0, OPTAB_LIB_WIDEN);
3543 /* We give UNSIGNEDP = 0 to expand_binop
3544 because the only operations we are expanding here are signed ones. */
3549 /* Subroutine of expand_expr:
3550 save the non-copied parts (LIST) of an expr (LHS), and return a list
3551 which can restore these values to their previous values,
3552 should something modify their storage. */
3555 save_noncopied_parts (lhs, list)
3562 for (tail = list; tail; tail = TREE_CHAIN (tail))
3563 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
3564 parts = chainon (parts, save_noncopied_parts (lhs, TREE_VALUE (tail)));
3567 tree part = TREE_VALUE (tail);
3568 tree part_type = TREE_TYPE (part);
3569 tree to_be_saved = build (COMPONENT_REF, part_type, lhs, part);
3570 rtx target = assign_stack_temp (TYPE_MODE (part_type),
3571 int_size_in_bytes (part_type), 0);
3572 if (! memory_address_p (TYPE_MODE (part_type), XEXP (target, 0)))
3573 target = change_address (target, TYPE_MODE (part_type), NULL_RTX);
3574 parts = tree_cons (to_be_saved,
3575 build (RTL_EXPR, part_type, NULL_TREE,
3578 store_expr (TREE_PURPOSE (parts), RTL_EXPR_RTL (TREE_VALUE (parts)), 0);
3583 /* Subroutine of expand_expr:
3584 record the non-copied parts (LIST) of an expr (LHS), and return a list
3585 which specifies the initial values of these parts. */
3588 init_noncopied_parts (lhs, list)
3595 for (tail = list; tail; tail = TREE_CHAIN (tail))
3596 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
3597 parts = chainon (parts, init_noncopied_parts (lhs, TREE_VALUE (tail)));
3600 tree part = TREE_VALUE (tail);
3601 tree part_type = TREE_TYPE (part);
3602 tree to_be_initialized = build (COMPONENT_REF, part_type, lhs, part);
3603 parts = tree_cons (TREE_PURPOSE (tail), to_be_initialized, parts);
3608 /* Subroutine of expand_expr: return nonzero iff there is no way that
3609 EXP can reference X, which is being modified. */
3612 safe_from_p (x, exp)
3622 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
3623 find the underlying pseudo. */
3624 if (GET_CODE (x) == SUBREG)
3627 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
3631 /* If X is a location in the outgoing argument area, it is always safe. */
3632 if (GET_CODE (x) == MEM
3633 && (XEXP (x, 0) == virtual_outgoing_args_rtx
3634 || (GET_CODE (XEXP (x, 0)) == PLUS
3635 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx)))
3638 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
3641 exp_rtl = DECL_RTL (exp);
3648 if (TREE_CODE (exp) == TREE_LIST)
3649 return ((TREE_VALUE (exp) == 0
3650 || safe_from_p (x, TREE_VALUE (exp)))
3651 && (TREE_CHAIN (exp) == 0
3652 || safe_from_p (x, TREE_CHAIN (exp))));
3657 return safe_from_p (x, TREE_OPERAND (exp, 0));
3661 return (safe_from_p (x, TREE_OPERAND (exp, 0))
3662 && safe_from_p (x, TREE_OPERAND (exp, 1)));
3666 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
3667 the expression. If it is set, we conflict iff we are that rtx or
3668 both are in memory. Otherwise, we check all operands of the
3669 expression recursively. */
3671 switch (TREE_CODE (exp))
3674 return staticp (TREE_OPERAND (exp, 0));
3677 if (GET_CODE (x) == MEM)
3682 exp_rtl = CALL_EXPR_RTL (exp);
3685 /* Assume that the call will clobber all hard registers and
3687 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
3688 || GET_CODE (x) == MEM)
3695 exp_rtl = RTL_EXPR_RTL (exp);
3697 /* We don't know what this can modify. */
3702 case WITH_CLEANUP_EXPR:
3703 exp_rtl = RTL_EXPR_RTL (exp);
3707 exp_rtl = SAVE_EXPR_RTL (exp);
3711 /* The only operand we look at is operand 1. The rest aren't
3712 part of the expression. */
3713 return safe_from_p (x, TREE_OPERAND (exp, 1));
3715 case METHOD_CALL_EXPR:
3716 /* This takes a rtx argument, but shouldn't appear here. */
3720 /* If we have an rtx, we do not need to scan our operands. */
3724 nops = tree_code_length[(int) TREE_CODE (exp)];
3725 for (i = 0; i < nops; i++)
3726 if (TREE_OPERAND (exp, i) != 0
3727 && ! safe_from_p (x, TREE_OPERAND (exp, i)))
3731 /* If we have an rtl, find any enclosed object. Then see if we conflict
3735 if (GET_CODE (exp_rtl) == SUBREG)
3737 exp_rtl = SUBREG_REG (exp_rtl);
3738 if (GET_CODE (exp_rtl) == REG
3739 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
3743 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
3744 are memory and EXP is not readonly. */
3745 return ! (rtx_equal_p (x, exp_rtl)
3746 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
3747 && ! TREE_READONLY (exp)));
3750 /* If we reach here, it is safe. */
3754 /* Subroutine of expand_expr: return nonzero iff EXP is an
3755 expression whose type is statically determinable. */
3761 if (TREE_CODE (exp) == PARM_DECL
3762 || TREE_CODE (exp) == VAR_DECL
3763 || TREE_CODE (exp) == CALL_EXPR || TREE_CODE (exp) == TARGET_EXPR
3764 || TREE_CODE (exp) == COMPONENT_REF
3765 || TREE_CODE (exp) == ARRAY_REF)
3770 /* expand_expr: generate code for computing expression EXP.
3771 An rtx for the computed value is returned. The value is never null.
3772 In the case of a void EXP, const0_rtx is returned.
3774 The value may be stored in TARGET if TARGET is nonzero.
3775 TARGET is just a suggestion; callers must assume that
3776 the rtx returned may not be the same as TARGET.
3778 If TARGET is CONST0_RTX, it means that the value will be ignored.
3780 If TMODE is not VOIDmode, it suggests generating the
3781 result in mode TMODE. But this is done only when convenient.
3782 Otherwise, TMODE is ignored and the value generated in its natural mode.
3783 TMODE is just a suggestion; callers must assume that
3784 the rtx returned may not have mode TMODE.
3786 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
3787 with a constant address even if that address is not normally legitimate.
3788 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
3790 If MODIFIER is EXPAND_SUM then when EXP is an addition
3791 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
3792 or a nest of (PLUS ...) and (MINUS ...) where the terms are
3793 products as above, or REG or MEM, or constant.
3794 Ordinarily in such cases we would output mul or add instructions
3795 and then return a pseudo reg containing the sum.
3797 EXPAND_INITIALIZER is much like EXPAND_SUM except that
3798 it also marks a label as absolutely required (it can't be dead).
3799 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
3800 This is used for outputting expressions used in initializers. */
3803 expand_expr (exp, target, tmode, modifier)
3806 enum machine_mode tmode;
3807 enum expand_modifier modifier;
3809 register rtx op0, op1, temp;
3810 tree type = TREE_TYPE (exp);
3811 int unsignedp = TREE_UNSIGNED (type);
3812 register enum machine_mode mode = TYPE_MODE (type);
3813 register enum tree_code code = TREE_CODE (exp);
3815 /* Use subtarget as the target for operand 0 of a binary operation. */
3816 rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
3817 rtx original_target = target;
3818 int ignore = target == const0_rtx;
3821 /* Don't use hard regs as subtargets, because the combiner
3822 can only handle pseudo regs. */
3823 if (subtarget && REGNO (subtarget) < FIRST_PSEUDO_REGISTER)
3825 /* Avoid subtargets inside loops,
3826 since they hide some invariant expressions. */
3827 if (preserve_subexpressions_p ())
3830 if (ignore) target = 0, original_target = 0;
3832 /* If will do cse, generate all results into pseudo registers
3833 since 1) that allows cse to find more things
3834 and 2) otherwise cse could produce an insn the machine
3837 if (! cse_not_expected && mode != BLKmode && target
3838 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER))
3841 /* Ensure we reference a volatile object even if value is ignored. */
3842 if (ignore && TREE_THIS_VOLATILE (exp)
3843 && mode != VOIDmode && mode != BLKmode)
3845 target = gen_reg_rtx (mode);
3846 temp = expand_expr (exp, target, VOIDmode, modifier);
3848 emit_move_insn (target, temp);
3856 tree function = decl_function_context (exp);
3857 /* Handle using a label in a containing function. */
3858 if (function != current_function_decl && function != 0)
3860 struct function *p = find_function_data (function);
3861 /* Allocate in the memory associated with the function
3862 that the label is in. */
3863 push_obstacks (p->function_obstack,
3864 p->function_maybepermanent_obstack);
3866 p->forced_labels = gen_rtx (EXPR_LIST, VOIDmode,
3867 label_rtx (exp), p->forced_labels);
3870 else if (modifier == EXPAND_INITIALIZER)
3871 forced_labels = gen_rtx (EXPR_LIST, VOIDmode,
3872 label_rtx (exp), forced_labels);
3873 temp = gen_rtx (MEM, FUNCTION_MODE,
3874 gen_rtx (LABEL_REF, Pmode, label_rtx (exp)));
3875 if (function != current_function_decl && function != 0)
3876 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
3881 if (DECL_RTL (exp) == 0)
3883 error_with_decl (exp, "prior parameter's size depends on `%s'");
3884 return CONST0_RTX (mode);
3890 if (DECL_RTL (exp) == 0)
3892 /* Ensure variable marked as used
3893 even if it doesn't go through a parser. */
3894 TREE_USED (exp) = 1;
3895 /* Handle variables inherited from containing functions. */
3896 context = decl_function_context (exp);
3898 /* We treat inline_function_decl as an alias for the current function
3899 because that is the inline function whose vars, types, etc.
3900 are being merged into the current function.
3901 See expand_inline_function. */
3902 if (context != 0 && context != current_function_decl
3903 && context != inline_function_decl
3904 /* If var is static, we don't need a static chain to access it. */
3905 && ! (GET_CODE (DECL_RTL (exp)) == MEM
3906 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
3910 /* Mark as non-local and addressable. */
3911 DECL_NONLOCAL (exp) = 1;
3912 mark_addressable (exp);
3913 if (GET_CODE (DECL_RTL (exp)) != MEM)
3915 addr = XEXP (DECL_RTL (exp), 0);
3916 if (GET_CODE (addr) == MEM)
3917 addr = gen_rtx (MEM, Pmode, fix_lexical_addr (XEXP (addr, 0), exp));
3919 addr = fix_lexical_addr (addr, exp);
3920 return change_address (DECL_RTL (exp), mode, addr);
3923 /* This is the case of an array whose size is to be determined
3924 from its initializer, while the initializer is still being parsed.
3926 if (GET_CODE (DECL_RTL (exp)) == MEM
3927 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
3928 return change_address (DECL_RTL (exp), GET_MODE (DECL_RTL (exp)),
3929 XEXP (DECL_RTL (exp), 0));
3930 if (GET_CODE (DECL_RTL (exp)) == MEM
3931 && modifier != EXPAND_CONST_ADDRESS
3932 && modifier != EXPAND_SUM
3933 && modifier != EXPAND_INITIALIZER)
3935 /* DECL_RTL probably contains a constant address.
3936 On RISC machines where a constant address isn't valid,
3937 make some insns to get that address into a register. */
3938 if (!memory_address_p (DECL_MODE (exp), XEXP (DECL_RTL (exp), 0))
3940 && CONSTANT_ADDRESS_P (XEXP (DECL_RTL (exp), 0))))
3941 return change_address (DECL_RTL (exp), VOIDmode,
3942 copy_rtx (XEXP (DECL_RTL (exp), 0)));
3945 /* If the mode of DECL_RTL does not match that of the decl, it
3946 must be a promoted value. We return a SUBREG of the wanted mode,
3947 but mark it so that we know that it was already extended. */
3949 if (GET_CODE (DECL_RTL (exp)) == REG
3950 && GET_MODE (DECL_RTL (exp)) != mode)
3952 enum machine_mode decl_mode = DECL_MODE (exp);
3954 /* Get the signedness used for this variable. Ensure we get the
3955 same mode we got when the variable was declared. */
3957 PROMOTE_MODE (decl_mode, unsignedp, type);
3959 if (decl_mode != GET_MODE (DECL_RTL (exp)))
3962 temp = gen_rtx (SUBREG, mode, DECL_RTL (exp), 0);
3963 SUBREG_PROMOTED_VAR_P (temp) = 1;
3964 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
3968 return DECL_RTL (exp);
3971 return immed_double_const (TREE_INT_CST_LOW (exp),
3972 TREE_INT_CST_HIGH (exp),
3976 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, 0);
3979 /* If optimized, generate immediate CONST_DOUBLE
3980 which will be turned into memory by reload if necessary.
3982 We used to force a register so that loop.c could see it. But
3983 this does not allow gen_* patterns to perform optimizations with
3984 the constants. It also produces two insns in cases like "x = 1.0;".
3985 On most machines, floating-point constants are not permitted in
3986 many insns, so we'd end up copying it to a register in any case.
3988 Now, we do the copying in expand_binop, if appropriate. */
3989 return immed_real_const (exp);
3993 if (! TREE_CST_RTL (exp))
3994 output_constant_def (exp);
3996 /* TREE_CST_RTL probably contains a constant address.
3997 On RISC machines where a constant address isn't valid,
3998 make some insns to get that address into a register. */
3999 if (GET_CODE (TREE_CST_RTL (exp)) == MEM
4000 && modifier != EXPAND_CONST_ADDRESS
4001 && modifier != EXPAND_INITIALIZER
4002 && modifier != EXPAND_SUM
4003 && !memory_address_p (mode, XEXP (TREE_CST_RTL (exp), 0)))
4004 return change_address (TREE_CST_RTL (exp), VOIDmode,
4005 copy_rtx (XEXP (TREE_CST_RTL (exp), 0)));
4006 return TREE_CST_RTL (exp);
4009 context = decl_function_context (exp);
4010 /* We treat inline_function_decl as an alias for the current function
4011 because that is the inline function whose vars, types, etc.
4012 are being merged into the current function.
4013 See expand_inline_function. */
4014 if (context == current_function_decl || context == inline_function_decl)
4017 /* If this is non-local, handle it. */
4020 temp = SAVE_EXPR_RTL (exp);
4021 if (temp && GET_CODE (temp) == REG)
4023 put_var_into_stack (exp);
4024 temp = SAVE_EXPR_RTL (exp);
4026 if (temp == 0 || GET_CODE (temp) != MEM)
4028 return change_address (temp, mode,
4029 fix_lexical_addr (XEXP (temp, 0), exp));
4031 if (SAVE_EXPR_RTL (exp) == 0)
4033 if (mode == BLKmode)
4035 = assign_stack_temp (mode,
4036 int_size_in_bytes (TREE_TYPE (exp)), 0);
4039 enum machine_mode var_mode = mode;
4041 if (TREE_CODE (type) == INTEGER_TYPE
4042 || TREE_CODE (type) == ENUMERAL_TYPE
4043 || TREE_CODE (type) == BOOLEAN_TYPE
4044 || TREE_CODE (type) == CHAR_TYPE
4045 || TREE_CODE (type) == REAL_TYPE
4046 || TREE_CODE (type) == POINTER_TYPE
4047 || TREE_CODE (type) == OFFSET_TYPE)
4049 PROMOTE_MODE (var_mode, unsignedp, type);
4052 temp = gen_reg_rtx (var_mode);
4055 SAVE_EXPR_RTL (exp) = temp;
4056 store_expr (TREE_OPERAND (exp, 0), temp, 0);
4057 if (!optimize && GET_CODE (temp) == REG)
4058 save_expr_regs = gen_rtx (EXPR_LIST, VOIDmode, temp,
4062 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
4063 must be a promoted value. We return a SUBREG of the wanted mode,
4064 but mark it so that we know that it was already extended. Note
4065 that `unsignedp' was modified above in this case. */
4067 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
4068 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
4070 temp = gen_rtx (SUBREG, mode, SAVE_EXPR_RTL (exp), 0);
4071 SUBREG_PROMOTED_VAR_P (temp) = 1;
4072 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
4076 return SAVE_EXPR_RTL (exp);
4079 /* Exit the current loop if the body-expression is true. */
4081 rtx label = gen_label_rtx ();
4082 do_jump (TREE_OPERAND (exp, 0), label, NULL_RTX);
4083 expand_exit_loop (NULL_PTR);
4089 expand_start_loop (1);
4090 expand_expr_stmt (TREE_OPERAND (exp, 0));
4097 tree vars = TREE_OPERAND (exp, 0);
4098 int vars_need_expansion = 0;
4100 /* Need to open a binding contour here because
4101 if there are any cleanups they most be contained here. */
4102 expand_start_bindings (0);
4104 /* Mark the corresponding BLOCK for output in its proper place. */
4105 if (TREE_OPERAND (exp, 2) != 0
4106 && ! TREE_USED (TREE_OPERAND (exp, 2)))
4107 insert_block (TREE_OPERAND (exp, 2));
4109 /* If VARS have not yet been expanded, expand them now. */
4112 if (DECL_RTL (vars) == 0)
4114 vars_need_expansion = 1;
4117 expand_decl_init (vars);
4118 vars = TREE_CHAIN (vars);
4121 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, modifier);
4123 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
4129 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
4131 emit_insns (RTL_EXPR_SEQUENCE (exp));
4132 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
4133 return RTL_EXPR_RTL (exp);
4136 /* All elts simple constants => refer to a constant in memory. But
4137 if this is a non-BLKmode mode, let it store a field at a time
4138 since that should make a CONST_INT or CONST_DOUBLE when we
4140 if (TREE_STATIC (exp) && (mode == BLKmode || TREE_ADDRESSABLE (exp)))
4142 rtx constructor = output_constant_def (exp);
4143 if (modifier != EXPAND_CONST_ADDRESS
4144 && modifier != EXPAND_INITIALIZER
4145 && modifier != EXPAND_SUM
4146 && !memory_address_p (GET_MODE (constructor),
4147 XEXP (constructor, 0)))
4148 constructor = change_address (constructor, VOIDmode,
4149 XEXP (constructor, 0));
4156 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4157 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode, 0);
4162 if (target == 0 || ! safe_from_p (target, exp))
4164 if (mode != BLKmode && ! TREE_ADDRESSABLE (exp))
4165 target = gen_reg_rtx (mode);
4168 enum tree_code c = TREE_CODE (type);
4170 = assign_stack_temp (mode, int_size_in_bytes (type), 0);
4171 if (c == RECORD_TYPE || c == UNION_TYPE || c == ARRAY_TYPE)
4172 MEM_IN_STRUCT_P (target) = 1;
4175 store_constructor (exp, target);
4181 tree exp1 = TREE_OPERAND (exp, 0);
4184 /* A SAVE_EXPR as the address in an INDIRECT_EXPR is generated
4185 for *PTR += ANYTHING where PTR is put inside the SAVE_EXPR.
4186 This code has the same general effect as simply doing
4187 expand_expr on the save expr, except that the expression PTR
4188 is computed for use as a memory address. This means different
4189 code, suitable for indexing, may be generated. */
4190 if (TREE_CODE (exp1) == SAVE_EXPR
4191 && SAVE_EXPR_RTL (exp1) == 0
4192 && TREE_CODE (exp2 = TREE_OPERAND (exp1, 0)) != ERROR_MARK
4193 && TYPE_MODE (TREE_TYPE (exp1)) == Pmode
4194 && TYPE_MODE (TREE_TYPE (exp2)) == Pmode)
4196 temp = expand_expr (TREE_OPERAND (exp1, 0), NULL_RTX,
4197 VOIDmode, EXPAND_SUM);
4198 op0 = memory_address (mode, temp);
4199 op0 = copy_all_regs (op0);
4200 SAVE_EXPR_RTL (exp1) = op0;
4204 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
4205 op0 = memory_address (mode, op0);
4208 temp = gen_rtx (MEM, mode, op0);
4209 /* If address was computed by addition,
4210 mark this as an element of an aggregate. */
4211 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
4212 || (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR
4213 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) == PLUS_EXPR)
4214 || TREE_CODE (TREE_TYPE (exp)) == ARRAY_TYPE
4215 || TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE
4216 || TREE_CODE (TREE_TYPE (exp)) == UNION_TYPE
4217 || (TREE_CODE (exp1) == ADDR_EXPR
4218 && (exp2 = TREE_OPERAND (exp1, 0))
4219 && (TREE_CODE (TREE_TYPE (exp2)) == ARRAY_TYPE
4220 || TREE_CODE (TREE_TYPE (exp2)) == RECORD_TYPE
4221 || TREE_CODE (TREE_TYPE (exp2)) == UNION_TYPE)))
4222 MEM_IN_STRUCT_P (temp) = 1;
4223 MEM_VOLATILE_P (temp) = TREE_THIS_VOLATILE (exp);
4224 #if 0 /* It is incorrect to set RTX_UNCHANGING_P here, because the fact that
4225 a location is accessed through a pointer to const does not mean
4226 that the value there can never change. */
4227 RTX_UNCHANGING_P (temp) = TREE_READONLY (exp);
4233 if (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST
4234 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
4236 /* Nonconstant array index or nonconstant element size.
4237 Generate the tree for *(&array+index) and expand that,
4238 except do it in a language-independent way
4239 and don't complain about non-lvalue arrays.
4240 `mark_addressable' should already have been called
4241 for any array for which this case will be reached. */
4243 /* Don't forget the const or volatile flag from the array element. */
4244 tree variant_type = build_type_variant (type,
4245 TREE_READONLY (exp),
4246 TREE_THIS_VOLATILE (exp));
4247 tree array_adr = build1 (ADDR_EXPR, build_pointer_type (variant_type),
4248 TREE_OPERAND (exp, 0));
4249 tree index = TREE_OPERAND (exp, 1);
4252 /* Convert the integer argument to a type the same size as a pointer
4253 so the multiply won't overflow spuriously. */
4254 if (TYPE_PRECISION (TREE_TYPE (index)) != POINTER_SIZE)
4255 index = convert (type_for_size (POINTER_SIZE, 0), index);
4257 /* Don't think the address has side effects
4258 just because the array does.
4259 (In some cases the address might have side effects,
4260 and we fail to record that fact here. However, it should not
4261 matter, since expand_expr should not care.) */
4262 TREE_SIDE_EFFECTS (array_adr) = 0;
4264 elt = build1 (INDIRECT_REF, type,
4265 fold (build (PLUS_EXPR, TYPE_POINTER_TO (variant_type),
4267 fold (build (MULT_EXPR,
4268 TYPE_POINTER_TO (variant_type),
4269 index, size_in_bytes (type))))));
4271 /* Volatility, etc., of new expression is same as old expression. */
4272 TREE_SIDE_EFFECTS (elt) = TREE_SIDE_EFFECTS (exp);
4273 TREE_THIS_VOLATILE (elt) = TREE_THIS_VOLATILE (exp);
4274 TREE_READONLY (elt) = TREE_READONLY (exp);
4276 return expand_expr (elt, target, tmode, modifier);
4279 /* Fold an expression like: "foo"[2].
4280 This is not done in fold so it won't happen inside &. */
4283 tree arg0 = TREE_OPERAND (exp, 0);
4284 tree arg1 = TREE_OPERAND (exp, 1);
4286 if (TREE_CODE (arg0) == STRING_CST
4287 && TREE_CODE (arg1) == INTEGER_CST
4288 && !TREE_INT_CST_HIGH (arg1)
4289 && (i = TREE_INT_CST_LOW (arg1)) < TREE_STRING_LENGTH (arg0))
4291 if (TREE_TYPE (TREE_TYPE (arg0)) == integer_type_node)
4293 exp = build_int_2 (((int *)TREE_STRING_POINTER (arg0))[i], 0);
4294 TREE_TYPE (exp) = integer_type_node;
4295 return expand_expr (exp, target, tmode, modifier);
4297 if (TREE_TYPE (TREE_TYPE (arg0)) == char_type_node)
4299 exp = build_int_2 (TREE_STRING_POINTER (arg0)[i], 0);
4300 TREE_TYPE (exp) = integer_type_node;
4301 return expand_expr (convert (TREE_TYPE (TREE_TYPE (arg0)), exp), target, tmode, modifier);
4306 /* If this is a constant index into a constant array,
4307 just get the value from the array. Handle both the cases when
4308 we have an explicit constructor and when our operand is a variable
4309 that was declared const. */
4311 if (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
4312 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
4314 tree index = fold (TREE_OPERAND (exp, 1));
4315 if (TREE_CODE (index) == INTEGER_CST
4316 && TREE_INT_CST_HIGH (index) == 0)
4318 int i = TREE_INT_CST_LOW (index);
4319 tree elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0));
4322 elem = TREE_CHAIN (elem);
4324 return expand_expr (fold (TREE_VALUE (elem)), target,
4329 else if (TREE_READONLY (TREE_OPERAND (exp, 0))
4330 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
4331 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == ARRAY_TYPE
4332 && TREE_CODE (TREE_OPERAND (exp, 0)) == VAR_DECL
4333 && DECL_INITIAL (TREE_OPERAND (exp, 0))
4335 && (TREE_CODE (DECL_INITIAL (TREE_OPERAND (exp, 0)))
4338 tree index = fold (TREE_OPERAND (exp, 1));
4339 if (TREE_CODE (index) == INTEGER_CST
4340 && TREE_INT_CST_HIGH (index) == 0)
4342 int i = TREE_INT_CST_LOW (index);
4343 tree init = DECL_INITIAL (TREE_OPERAND (exp, 0));
4345 if (TREE_CODE (init) == CONSTRUCTOR)
4347 tree elem = CONSTRUCTOR_ELTS (init);
4350 elem = TREE_CHAIN (elem);
4352 return expand_expr (fold (TREE_VALUE (elem)), target,
4355 else if (TREE_CODE (init) == STRING_CST
4356 && i < TREE_STRING_LENGTH (init))
4358 temp = GEN_INT (TREE_STRING_POINTER (init)[i]);
4359 return convert_to_mode (mode, temp, 0);
4363 /* Treat array-ref with constant index as a component-ref. */
4367 /* If the operand is a CONSTRUCTOR, we can just extract the
4368 appropriate field if it is present. */
4369 if (code != ARRAY_REF
4370 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR)
4374 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
4375 elt = TREE_CHAIN (elt))
4376 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1))
4377 return expand_expr (TREE_VALUE (elt), target, tmode, modifier);
4381 enum machine_mode mode1;
4386 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
4387 &mode1, &unsignedp, &volatilep);
4389 /* In some cases, we will be offsetting OP0's address by a constant.
4390 So get it as a sum, if possible. If we will be using it
4391 directly in an insn, we validate it. */
4392 op0 = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_SUM);
4394 /* If this is a constant, put it into a register if it is a
4395 legitimate constant and memory if it isn't. */
4396 if (CONSTANT_P (op0))
4398 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
4399 if (LEGITIMATE_CONSTANT_P (op0))
4400 op0 = force_reg (mode, op0);
4402 op0 = validize_mem (force_const_mem (mode, op0));
4407 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
4409 if (GET_CODE (op0) != MEM)
4411 op0 = change_address (op0, VOIDmode,
4412 gen_rtx (PLUS, Pmode, XEXP (op0, 0),
4413 force_reg (Pmode, offset_rtx)));
4416 /* Don't forget about volatility even if this is a bitfield. */
4417 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
4419 op0 = copy_rtx (op0);
4420 MEM_VOLATILE_P (op0) = 1;
4423 if (mode1 == VOIDmode
4424 || (mode1 != BLKmode && ! direct_load[(int) mode1]
4425 && modifier != EXPAND_CONST_ADDRESS
4426 && modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
4427 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG)
4429 /* In cases where an aligned union has an unaligned object
4430 as a field, we might be extracting a BLKmode value from
4431 an integer-mode (e.g., SImode) object. Handle this case
4432 by doing the extract into an object as wide as the field
4433 (which we know to be the width of a basic mode), then
4434 storing into memory, and changing the mode to BLKmode. */
4435 enum machine_mode ext_mode = mode;
4437 if (ext_mode == BLKmode)
4438 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
4440 if (ext_mode == BLKmode)
4443 op0 = extract_bit_field (validize_mem (op0), bitsize, bitpos,
4444 unsignedp, target, ext_mode, ext_mode,
4445 TYPE_ALIGN (TREE_TYPE (tem)) / BITS_PER_UNIT,
4446 int_size_in_bytes (TREE_TYPE (tem)));
4447 if (mode == BLKmode)
4449 rtx new = assign_stack_temp (ext_mode,
4450 bitsize / BITS_PER_UNIT, 0);
4452 emit_move_insn (new, op0);
4453 op0 = copy_rtx (new);
4454 PUT_MODE (op0, BLKmode);
4460 /* Get a reference to just this component. */
4461 if (modifier == EXPAND_CONST_ADDRESS
4462 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
4463 op0 = gen_rtx (MEM, mode1, plus_constant (XEXP (op0, 0),
4464 (bitpos / BITS_PER_UNIT)));
4466 op0 = change_address (op0, mode1,
4467 plus_constant (XEXP (op0, 0),
4468 (bitpos / BITS_PER_UNIT)));
4469 MEM_IN_STRUCT_P (op0) = 1;
4470 MEM_VOLATILE_P (op0) |= volatilep;
4471 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode)
4474 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
4475 convert_move (target, op0, unsignedp);
4481 tree base = build_unary_op (ADDR_EXPR, TREE_OPERAND (exp, 0), 0);
4482 tree addr = build (PLUS_EXPR, type, base, TREE_OPERAND (exp, 1));
4483 op0 = expand_expr (addr, NULL_RTX, VOIDmode, EXPAND_SUM);
4484 temp = gen_rtx (MEM, mode, memory_address (mode, op0));
4485 MEM_IN_STRUCT_P (temp) = 1;
4486 MEM_VOLATILE_P (temp) = TREE_THIS_VOLATILE (exp);
4487 #if 0 /* It is incorrect to set RTX_UNCHANGING_P here, because the fact that
4488 a location is accessed through a pointer to const does not mean
4489 that the value there can never change. */
4490 RTX_UNCHANGING_P (temp) = TREE_READONLY (exp);
4495 /* Intended for a reference to a buffer of a file-object in Pascal.
4496 But it's not certain that a special tree code will really be
4497 necessary for these. INDIRECT_REF might work for them. */
4501 /* IN_EXPR: Inlined pascal set IN expression.
4504 rlo = set_low - (set_low%bits_per_word);
4505 the_word = set [ (index - rlo)/bits_per_word ];
4506 bit_index = index % bits_per_word;
4507 bitmask = 1 << bit_index;
4508 return !!(the_word & bitmask); */
4510 preexpand_calls (exp);
4512 tree set = TREE_OPERAND (exp, 0);
4513 tree index = TREE_OPERAND (exp, 1);
4514 tree set_type = TREE_TYPE (set);
4516 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
4517 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
4523 rtx diff, quo, rem, addr, bit, result;
4524 rtx setval, setaddr;
4525 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
4528 target = gen_reg_rtx (mode);
4530 /* If domain is empty, answer is no. */
4531 if (tree_int_cst_lt (set_high_bound, set_low_bound))
4534 index_val = expand_expr (index, 0, VOIDmode, 0);
4535 lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
4536 hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
4537 setval = expand_expr (set, 0, VOIDmode, 0);
4538 setaddr = XEXP (setval, 0);
4540 /* Compare index against bounds, if they are constant. */
4541 if (GET_CODE (index_val) == CONST_INT
4542 && GET_CODE (lo_r) == CONST_INT
4543 && INTVAL (index_val) < INTVAL (lo_r))
4546 if (GET_CODE (index_val) == CONST_INT
4547 && GET_CODE (hi_r) == CONST_INT
4548 && INTVAL (hi_r) < INTVAL (index_val))
4551 /* If we get here, we have to generate the code for both cases
4552 (in range and out of range). */
4554 op0 = gen_label_rtx ();
4555 op1 = gen_label_rtx ();
4557 if (! (GET_CODE (index_val) == CONST_INT
4558 && GET_CODE (lo_r) == CONST_INT))
4560 emit_cmp_insn (index_val, lo_r, LT, NULL_RTX,
4561 GET_MODE (index_val), 0, 0);
4562 emit_jump_insn (gen_blt (op1));
4565 if (! (GET_CODE (index_val) == CONST_INT
4566 && GET_CODE (hi_r) == CONST_INT))
4568 emit_cmp_insn (index_val, hi_r, GT, NULL_RTX,
4569 GET_MODE (index_val), 0, 0);
4570 emit_jump_insn (gen_bgt (op1));
4573 /* Calculate the element number of bit zero in the first word
4575 if (GET_CODE (lo_r) == CONST_INT)
4576 rlow = GEN_INT (INTVAL (lo_r)
4577 & ~ ((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
4579 rlow = expand_binop (index_mode, and_optab, lo_r,
4580 GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
4581 NULL_RTX, 0, OPTAB_LIB_WIDEN);
4583 diff = expand_binop (index_mode, sub_optab,
4584 index_val, rlow, NULL_RTX, 0, OPTAB_LIB_WIDEN);
4586 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
4587 GEN_INT (BITS_PER_UNIT), NULL_RTX, 0);
4588 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
4589 GEN_INT (BITS_PER_UNIT), NULL_RTX, 0);
4590 addr = memory_address (byte_mode,
4591 expand_binop (index_mode, add_optab,
4592 diff, setaddr, NULL_RTX, 0,
4594 /* Extract the bit we want to examine */
4595 bit = expand_shift (RSHIFT_EXPR, byte_mode,
4596 gen_rtx (MEM, byte_mode, addr),
4597 make_tree (TREE_TYPE (index), rem),
4599 result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
4600 GET_MODE (target) == byte_mode ? target : 0,
4601 1, OPTAB_LIB_WIDEN);
4603 if (result != target)
4604 convert_move (target, result, 1);
4606 /* Output the code to handle the out-of-range case. */
4609 emit_move_insn (target, const0_rtx);
4614 case WITH_CLEANUP_EXPR:
4615 if (RTL_EXPR_RTL (exp) == 0)
4618 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
4620 = tree_cons (NULL_TREE, TREE_OPERAND (exp, 2), cleanups_this_call);
4621 /* That's it for this cleanup. */
4622 TREE_OPERAND (exp, 2) = 0;
4624 return RTL_EXPR_RTL (exp);
4627 /* Check for a built-in function. */
4628 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
4629 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) == FUNCTION_DECL
4630 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
4631 return expand_builtin (exp, target, subtarget, tmode, ignore);
4632 /* If this call was expanded already by preexpand_calls,
4633 just return the result we got. */
4634 if (CALL_EXPR_RTL (exp) != 0)
4635 return CALL_EXPR_RTL (exp);
4636 return expand_call (exp, target, ignore);
4638 case NON_LVALUE_EXPR:
4641 case REFERENCE_EXPR:
4642 if (TREE_CODE (type) == VOID_TYPE || ignore)
4644 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
4647 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
4648 return expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, modifier);
4649 if (TREE_CODE (type) == UNION_TYPE)
4651 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
4654 if (mode == BLKmode)
4656 if (TYPE_SIZE (type) == 0
4657 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
4659 target = assign_stack_temp (BLKmode,
4660 (TREE_INT_CST_LOW (TYPE_SIZE (type))
4661 + BITS_PER_UNIT - 1)
4662 / BITS_PER_UNIT, 0);
4665 target = gen_reg_rtx (mode);
4667 if (GET_CODE (target) == MEM)
4668 /* Store data into beginning of memory target. */
4669 store_expr (TREE_OPERAND (exp, 0),
4670 change_address (target, TYPE_MODE (valtype), 0), 0);
4672 else if (GET_CODE (target) == REG)
4673 /* Store this field into a union of the proper type. */
4674 store_field (target, GET_MODE_BITSIZE (TYPE_MODE (valtype)), 0,
4675 TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
4677 int_size_in_bytes (TREE_TYPE (TREE_OPERAND (exp, 0))));
4681 /* Return the entire union. */
4684 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, 0);
4685 if (GET_MODE (op0) == mode)
4687 /* If arg is a constant integer being extended from a narrower mode,
4688 we must really truncate to get the extended bits right. Otherwise
4689 (unsigned long) (unsigned char) ("\377"[0])
4690 would come out as ffffffff. */
4691 if (GET_MODE (op0) == VOIDmode
4692 && (GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
4693 < GET_MODE_BITSIZE (mode)))
4695 /* MODE must be narrower than HOST_BITS_PER_INT. */
4696 int width = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
4698 if (width < HOST_BITS_PER_WIDE_INT)
4700 HOST_WIDE_INT val = (GET_CODE (op0) == CONST_INT ? INTVAL (op0)
4701 : CONST_DOUBLE_LOW (op0));
4702 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)))
4703 || !(val & ((HOST_WIDE_INT) 1 << (width - 1))))
4704 val &= ((HOST_WIDE_INT) 1 << width) - 1;
4706 val |= ~(((HOST_WIDE_INT) 1 << width) - 1);
4708 op0 = GEN_INT (val);
4712 op0 = (simplify_unary_operation
4713 ((TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)))
4714 ? ZERO_EXTEND : SIGN_EXTEND),
4716 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))));
4721 if (GET_MODE (op0) == VOIDmode)
4723 if (modifier == EXPAND_INITIALIZER)
4724 return gen_rtx (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
4725 if (flag_force_mem && GET_CODE (op0) == MEM)
4726 op0 = copy_to_reg (op0);
4729 return convert_to_mode (mode, op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
4731 convert_move (target, op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
4735 /* We come here from MINUS_EXPR when the second operand is a constant. */
4737 this_optab = add_optab;
4739 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
4740 something else, make sure we add the register to the constant and
4741 then to the other thing. This case can occur during strength
4742 reduction and doing it this way will produce better code if the
4743 frame pointer or argument pointer is eliminated.
4745 fold-const.c will ensure that the constant is always in the inner
4746 PLUS_EXPR, so the only case we need to do anything about is if
4747 sp, ap, or fp is our second argument, in which case we must swap
4748 the innermost first argument and our second argument. */
4750 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
4751 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
4752 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
4753 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
4754 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
4755 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
4757 tree t = TREE_OPERAND (exp, 1);
4759 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
4760 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
4763 /* If the result is to be Pmode and we are adding an integer to
4764 something, we might be forming a constant. So try to use
4765 plus_constant. If it produces a sum and we can't accept it,
4766 use force_operand. This allows P = &ARR[const] to generate
4767 efficient code on machines where a SYMBOL_REF is not a valid
4770 If this is an EXPAND_SUM call, always return the sum. */
4771 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
4772 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
4773 && (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
4776 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
4778 op1 = plus_constant (op1, TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)));
4779 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
4780 op1 = force_operand (op1, target);
4784 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
4785 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
4786 && (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
4789 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
4791 op0 = plus_constant (op0, TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)));
4792 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
4793 op0 = force_operand (op0, target);
4797 /* No sense saving up arithmetic to be done
4798 if it's all in the wrong mode to form part of an address.
4799 And force_operand won't know whether to sign-extend or
4801 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
4802 || mode != Pmode) goto binop;
4804 preexpand_calls (exp);
4805 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
4808 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, modifier);
4809 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, modifier);
4811 /* Make sure any term that's a sum with a constant comes last. */
4812 if (GET_CODE (op0) == PLUS
4813 && CONSTANT_P (XEXP (op0, 1)))
4819 /* If adding to a sum including a constant,
4820 associate it to put the constant outside. */
4821 if (GET_CODE (op1) == PLUS
4822 && CONSTANT_P (XEXP (op1, 1)))
4824 rtx constant_term = const0_rtx;
4826 temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0);
4829 /* Ensure that MULT comes first if there is one. */
4830 else if (GET_CODE (op0) == MULT)
4831 op0 = gen_rtx (PLUS, mode, op0, XEXP (op1, 0));
4833 op0 = gen_rtx (PLUS, mode, XEXP (op1, 0), op0);
4835 /* Let's also eliminate constants from op0 if possible. */
4836 op0 = eliminate_constant_term (op0, &constant_term);
4838 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
4839 their sum should be a constant. Form it into OP1, since the
4840 result we want will then be OP0 + OP1. */
4842 temp = simplify_binary_operation (PLUS, mode, constant_term,
4847 op1 = gen_rtx (PLUS, mode, constant_term, XEXP (op1, 1));
4850 /* Put a constant term last and put a multiplication first. */
4851 if (CONSTANT_P (op0) || GET_CODE (op1) == MULT)
4852 temp = op1, op1 = op0, op0 = temp;
4854 temp = simplify_binary_operation (PLUS, mode, op0, op1);
4855 return temp ? temp : gen_rtx (PLUS, mode, op0, op1);
4858 /* Handle difference of two symbolic constants,
4859 for the sake of an initializer. */
4860 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
4861 && really_constant_p (TREE_OPERAND (exp, 0))
4862 && really_constant_p (TREE_OPERAND (exp, 1)))
4864 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX,
4865 VOIDmode, modifier);
4866 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
4867 VOIDmode, modifier);
4868 return gen_rtx (MINUS, mode, op0, op1);
4870 /* Convert A - const to A + (-const). */
4871 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
4873 exp = build (PLUS_EXPR, type, TREE_OPERAND (exp, 0),
4874 fold (build1 (NEGATE_EXPR, type,
4875 TREE_OPERAND (exp, 1))));
4878 this_optab = sub_optab;
4882 preexpand_calls (exp);
4883 /* If first operand is constant, swap them.
4884 Thus the following special case checks need only
4885 check the second operand. */
4886 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
4888 register tree t1 = TREE_OPERAND (exp, 0);
4889 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
4890 TREE_OPERAND (exp, 1) = t1;
4893 /* Attempt to return something suitable for generating an
4894 indexed address, for machines that support that. */
4896 if (modifier == EXPAND_SUM && mode == Pmode
4897 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
4898 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
4900 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, EXPAND_SUM);
4902 /* Apply distributive law if OP0 is x+c. */
4903 if (GET_CODE (op0) == PLUS
4904 && GET_CODE (XEXP (op0, 1)) == CONST_INT)
4905 return gen_rtx (PLUS, mode,
4906 gen_rtx (MULT, mode, XEXP (op0, 0),
4907 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))),
4908 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))
4909 * INTVAL (XEXP (op0, 1))));
4911 if (GET_CODE (op0) != REG)
4912 op0 = force_operand (op0, NULL_RTX);
4913 if (GET_CODE (op0) != REG)
4914 op0 = copy_to_mode_reg (mode, op0);
4916 return gen_rtx (MULT, mode, op0,
4917 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))));
4920 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
4923 /* Check for multiplying things that have been extended
4924 from a narrower type. If this machine supports multiplying
4925 in that narrower type with a result in the desired type,
4926 do it that way, and avoid the explicit type-conversion. */
4927 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
4928 && TREE_CODE (type) == INTEGER_TYPE
4929 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
4930 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
4931 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
4932 && int_fits_type_p (TREE_OPERAND (exp, 1),
4933 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
4934 /* Don't use a widening multiply if a shift will do. */
4935 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
4936 > HOST_BITS_PER_WIDE_INT)
4937 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
4939 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
4940 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
4942 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
4943 /* If both operands are extended, they must either both
4944 be zero-extended or both be sign-extended. */
4945 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
4947 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
4949 enum machine_mode innermode
4950 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
4951 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
4952 ? umul_widen_optab : smul_widen_optab);
4953 if (mode == GET_MODE_WIDER_MODE (innermode)
4954 && this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
4956 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
4957 NULL_RTX, VOIDmode, 0);
4958 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
4959 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
4962 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
4963 NULL_RTX, VOIDmode, 0);
4967 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
4968 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
4969 return expand_mult (mode, op0, op1, target, unsignedp);
4971 case TRUNC_DIV_EXPR:
4972 case FLOOR_DIV_EXPR:
4974 case ROUND_DIV_EXPR:
4975 case EXACT_DIV_EXPR:
4976 preexpand_calls (exp);
4977 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
4979 /* Possible optimization: compute the dividend with EXPAND_SUM
4980 then if the divisor is constant can optimize the case
4981 where some terms of the dividend have coeffs divisible by it. */
4982 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
4983 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
4984 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
4987 this_optab = flodiv_optab;
4990 case TRUNC_MOD_EXPR:
4991 case FLOOR_MOD_EXPR:
4993 case ROUND_MOD_EXPR:
4994 preexpand_calls (exp);
4995 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
4997 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
4998 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
4999 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
5001 case FIX_ROUND_EXPR:
5002 case FIX_FLOOR_EXPR:
5004 abort (); /* Not used for C. */
5006 case FIX_TRUNC_EXPR:
5007 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
5009 target = gen_reg_rtx (mode);
5010 expand_fix (target, op0, unsignedp);
5014 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
5016 target = gen_reg_rtx (mode);
5017 /* expand_float can't figure out what to do if FROM has VOIDmode.
5018 So give it the correct mode. With -O, cse will optimize this. */
5019 if (GET_MODE (op0) == VOIDmode)
5020 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
5022 expand_float (target, op0,
5023 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
5027 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
5028 temp = expand_unop (mode, neg_optab, op0, target, 0);
5034 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
5036 /* Handle complex values specially. */
5038 enum machine_mode opmode
5039 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5041 if (GET_MODE_CLASS (opmode) == MODE_COMPLEX_INT
5042 || GET_MODE_CLASS (opmode) == MODE_COMPLEX_FLOAT)
5043 return expand_complex_abs (opmode, op0, target, unsignedp);
5046 /* Unsigned abs is simply the operand. Testing here means we don't
5047 risk generating incorrect code below. */
5048 if (TREE_UNSIGNED (type))
5051 /* First try to do it with a special abs instruction. */
5052 temp = expand_unop (mode, abs_optab, op0, target, 0);
5056 /* If this machine has expensive jumps, we can do integer absolute
5057 value of X as (((signed) x >> (W-1)) ^ x) - ((signed) x >> (W-1)),
5058 where W is the width of MODE. */
5060 if (GET_MODE_CLASS (mode) == MODE_INT && BRANCH_COST >= 2)
5062 rtx extended = expand_shift (RSHIFT_EXPR, mode, op0,
5063 size_int (GET_MODE_BITSIZE (mode) - 1),
5066 temp = expand_binop (mode, xor_optab, extended, op0, target, 0,
5069 temp = expand_binop (mode, sub_optab, temp, extended, target, 0,
5076 /* If that does not win, use conditional jump and negate. */
5077 target = original_target;
5078 temp = gen_label_rtx ();
5079 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 0))
5080 || (GET_CODE (target) == REG
5081 && REGNO (target) < FIRST_PSEUDO_REGISTER))
5082 target = gen_reg_rtx (mode);
5083 emit_move_insn (target, op0);
5084 emit_cmp_insn (target,
5085 expand_expr (convert (type, integer_zero_node),
5086 NULL_RTX, VOIDmode, 0),
5087 GE, NULL_RTX, mode, 0, 0);
5089 emit_jump_insn (gen_bge (temp));
5090 op0 = expand_unop (mode, neg_optab, target, target, 0);
5092 emit_move_insn (target, op0);
5099 target = original_target;
5100 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 1))
5101 || (GET_CODE (target) == REG
5102 && REGNO (target) < FIRST_PSEUDO_REGISTER))
5103 target = gen_reg_rtx (mode);
5104 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
5105 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
5107 /* First try to do it with a special MIN or MAX instruction.
5108 If that does not win, use a conditional jump to select the proper
5110 this_optab = (TREE_UNSIGNED (type)
5111 ? (code == MIN_EXPR ? umin_optab : umax_optab)
5112 : (code == MIN_EXPR ? smin_optab : smax_optab));
5114 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
5120 emit_move_insn (target, op0);
5121 op0 = gen_label_rtx ();
5122 if (code == MAX_EXPR)
5123 temp = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)))
5124 ? compare_from_rtx (target, op1, GEU, 1, mode, NULL_RTX, 0)
5125 : compare_from_rtx (target, op1, GE, 0, mode, NULL_RTX, 0));
5127 temp = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)))
5128 ? compare_from_rtx (target, op1, LEU, 1, mode, NULL_RTX, 0)
5129 : compare_from_rtx (target, op1, LE, 0, mode, NULL_RTX, 0));
5130 if (temp == const0_rtx)
5131 emit_move_insn (target, op1);
5132 else if (temp != const_true_rtx)
5134 if (bcc_gen_fctn[(int) GET_CODE (temp)] != 0)
5135 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (temp)]) (op0));
5138 emit_move_insn (target, op1);
5143 /* ??? Can optimize when the operand of this is a bitwise operation,
5144 by using a different bitwise operation. */
5146 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
5147 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
5153 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
5154 temp = expand_unop (mode, ffs_optab, op0, target, 1);
5159 /* ??? Can optimize bitwise operations with one arg constant.
5160 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
5161 and (a bitwise1 b) bitwise2 b (etc)
5162 but that is probably not worth while. */
5164 /* BIT_AND_EXPR is for bitwise anding.
5165 TRUTH_AND_EXPR is for anding two boolean values
5166 when we want in all cases to compute both of them.
5167 In general it is fastest to do TRUTH_AND_EXPR by
5168 computing both operands as actual zero-or-1 values
5169 and then bitwise anding. In cases where there cannot
5170 be any side effects, better code would be made by
5171 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR;
5172 but the question is how to recognize those cases. */
5174 case TRUTH_AND_EXPR:
5176 this_optab = and_optab;
5179 /* See comment above about TRUTH_AND_EXPR; it applies here too. */
5182 this_optab = ior_optab;
5185 case TRUTH_XOR_EXPR:
5187 this_optab = xor_optab;
5194 preexpand_calls (exp);
5195 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
5197 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
5198 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
5201 /* Could determine the answer when only additive constants differ.
5202 Also, the addition of one can be handled by changing the condition. */
5209 preexpand_calls (exp);
5210 temp = do_store_flag (exp, target, tmode != VOIDmode ? tmode : mode, 0);
5213 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
5214 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
5216 && GET_CODE (original_target) == REG
5217 && (GET_MODE (original_target)
5218 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
5220 temp = expand_expr (TREE_OPERAND (exp, 0), original_target, VOIDmode, 0);
5221 if (temp != original_target)
5222 temp = copy_to_reg (temp);
5223 op1 = gen_label_rtx ();
5224 emit_cmp_insn (temp, const0_rtx, EQ, NULL_RTX,
5225 GET_MODE (temp), unsignedp, 0);
5226 emit_jump_insn (gen_beq (op1));
5227 emit_move_insn (temp, const1_rtx);
5231 /* If no set-flag instruction, must generate a conditional
5232 store into a temporary variable. Drop through
5233 and handle this like && and ||. */
5235 case TRUTH_ANDIF_EXPR:
5236 case TRUTH_ORIF_EXPR:
5237 if (target == 0 || ! safe_from_p (target, exp)
5238 /* Make sure we don't have a hard reg (such as function's return
5239 value) live across basic blocks, if not optimizing. */
5240 || (!optimize && GET_CODE (target) == REG
5241 && REGNO (target) < FIRST_PSEUDO_REGISTER))
5242 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
5243 emit_clr_insn (target);
5244 op1 = gen_label_rtx ();
5245 jumpifnot (exp, op1);
5246 emit_0_to_1_insn (target);
5250 case TRUTH_NOT_EXPR:
5251 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
5252 /* The parser is careful to generate TRUTH_NOT_EXPR
5253 only with operands that are always zero or one. */
5254 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
5255 target, 1, OPTAB_LIB_WIDEN);
5261 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
5263 return expand_expr (TREE_OPERAND (exp, 1),
5264 (ignore ? const0_rtx : target),
5269 /* Note that COND_EXPRs whose type is a structure or union
5270 are required to be constructed to contain assignments of
5271 a temporary variable, so that we can evaluate them here
5272 for side effect only. If type is void, we must do likewise. */
5274 /* If an arm of the branch requires a cleanup,
5275 only that cleanup is performed. */
5278 tree binary_op = 0, unary_op = 0;
5279 tree old_cleanups = cleanups_this_call;
5280 cleanups_this_call = 0;
5282 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
5283 convert it to our mode, if necessary. */
5284 if (integer_onep (TREE_OPERAND (exp, 1))
5285 && integer_zerop (TREE_OPERAND (exp, 2))
5286 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
5288 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, modifier);
5289 if (GET_MODE (op0) == mode)
5292 target = gen_reg_rtx (mode);
5293 convert_move (target, op0, unsignedp);
5297 /* If we are not to produce a result, we have no target. Otherwise,
5298 if a target was specified use it; it will not be used as an
5299 intermediate target unless it is safe. If no target, use a
5302 if (mode == VOIDmode || ignore)
5304 else if (original_target
5305 && safe_from_p (original_target, TREE_OPERAND (exp, 0)))
5306 temp = original_target;
5307 else if (mode == BLKmode)
5309 if (TYPE_SIZE (type) == 0
5310 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
5312 temp = assign_stack_temp (BLKmode,
5313 (TREE_INT_CST_LOW (TYPE_SIZE (type))
5314 + BITS_PER_UNIT - 1)
5315 / BITS_PER_UNIT, 0);
5318 temp = gen_reg_rtx (mode);
5320 /* Check for X ? A + B : A. If we have this, we can copy
5321 A to the output and conditionally add B. Similarly for unary
5322 operations. Don't do this if X has side-effects because
5323 those side effects might affect A or B and the "?" operation is
5324 a sequence point in ANSI. (We test for side effects later.) */
5326 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
5327 && operand_equal_p (TREE_OPERAND (exp, 2),
5328 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
5329 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
5330 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
5331 && operand_equal_p (TREE_OPERAND (exp, 1),
5332 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
5333 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
5334 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
5335 && operand_equal_p (TREE_OPERAND (exp, 2),
5336 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
5337 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
5338 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
5339 && operand_equal_p (TREE_OPERAND (exp, 1),
5340 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
5341 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
5343 /* If we had X ? A + 1 : A and we can do the test of X as a store-flag
5344 operation, do this as A + (X != 0). Similarly for other simple
5345 binary operators. */
5346 if (singleton && binary_op
5347 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
5348 && (TREE_CODE (binary_op) == PLUS_EXPR
5349 || TREE_CODE (binary_op) == MINUS_EXPR
5350 || TREE_CODE (binary_op) == BIT_IOR_EXPR
5351 || TREE_CODE (binary_op) == BIT_XOR_EXPR
5352 || TREE_CODE (binary_op) == BIT_AND_EXPR)
5353 && integer_onep (TREE_OPERAND (binary_op, 1))
5354 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
5357 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR ? add_optab
5358 : TREE_CODE (binary_op) == MINUS_EXPR ? sub_optab
5359 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
5360 : TREE_CODE (binary_op) == BIT_XOR_EXPR ? xor_optab
5363 /* If we had X ? A : A + 1, do this as A + (X == 0).
5365 We have to invert the truth value here and then put it
5366 back later if do_store_flag fails. We cannot simply copy
5367 TREE_OPERAND (exp, 0) to another variable and modify that
5368 because invert_truthvalue can modify the tree pointed to
5370 if (singleton == TREE_OPERAND (exp, 1))
5371 TREE_OPERAND (exp, 0)
5372 = invert_truthvalue (TREE_OPERAND (exp, 0));
5374 result = do_store_flag (TREE_OPERAND (exp, 0),
5375 (safe_from_p (temp, singleton)
5377 mode, BRANCH_COST <= 1);
5381 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
5382 return expand_binop (mode, boptab, op1, result, temp,
5383 unsignedp, OPTAB_LIB_WIDEN);
5385 else if (singleton == TREE_OPERAND (exp, 1))
5386 TREE_OPERAND (exp, 0)
5387 = invert_truthvalue (TREE_OPERAND (exp, 0));
5391 op0 = gen_label_rtx ();
5393 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
5397 /* If the target conflicts with the other operand of the
5398 binary op, we can't use it. Also, we can't use the target
5399 if it is a hard register, because evaluating the condition
5400 might clobber it. */
5402 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1)))
5403 || (GET_CODE (temp) == REG
5404 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
5405 temp = gen_reg_rtx (mode);
5406 store_expr (singleton, temp, 0);
5409 expand_expr (singleton,
5410 ignore ? const1_rtx : NULL_RTX, VOIDmode, 0);
5411 if (cleanups_this_call)
5413 sorry ("aggregate value in COND_EXPR");
5414 cleanups_this_call = 0;
5416 if (singleton == TREE_OPERAND (exp, 1))
5417 jumpif (TREE_OPERAND (exp, 0), op0);
5419 jumpifnot (TREE_OPERAND (exp, 0), op0);
5421 if (binary_op && temp == 0)
5422 /* Just touch the other operand. */
5423 expand_expr (TREE_OPERAND (binary_op, 1),
5424 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
5426 store_expr (build (TREE_CODE (binary_op), type,
5427 make_tree (type, temp),
5428 TREE_OPERAND (binary_op, 1)),
5431 store_expr (build1 (TREE_CODE (unary_op), type,
5432 make_tree (type, temp)),
5437 /* This is now done in jump.c and is better done there because it
5438 produces shorter register lifetimes. */
5440 /* Check for both possibilities either constants or variables
5441 in registers (but not the same as the target!). If so, can
5442 save branches by assigning one, branching, and assigning the
5444 else if (temp && GET_MODE (temp) != BLKmode
5445 && (TREE_CONSTANT (TREE_OPERAND (exp, 1))
5446 || ((TREE_CODE (TREE_OPERAND (exp, 1)) == PARM_DECL
5447 || TREE_CODE (TREE_OPERAND (exp, 1)) == VAR_DECL)
5448 && DECL_RTL (TREE_OPERAND (exp, 1))
5449 && GET_CODE (DECL_RTL (TREE_OPERAND (exp, 1))) == REG
5450 && DECL_RTL (TREE_OPERAND (exp, 1)) != temp))
5451 && (TREE_CONSTANT (TREE_OPERAND (exp, 2))
5452 || ((TREE_CODE (TREE_OPERAND (exp, 2)) == PARM_DECL
5453 || TREE_CODE (TREE_OPERAND (exp, 2)) == VAR_DECL)
5454 && DECL_RTL (TREE_OPERAND (exp, 2))
5455 && GET_CODE (DECL_RTL (TREE_OPERAND (exp, 2))) == REG
5456 && DECL_RTL (TREE_OPERAND (exp, 2)) != temp)))
5458 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
5459 temp = gen_reg_rtx (mode);
5460 store_expr (TREE_OPERAND (exp, 2), temp, 0);
5461 jumpifnot (TREE_OPERAND (exp, 0), op0);
5462 store_expr (TREE_OPERAND (exp, 1), temp, 0);
5466 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
5467 comparison operator. If we have one of these cases, set the
5468 output to A, branch on A (cse will merge these two references),
5469 then set the output to FOO. */
5471 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
5472 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
5473 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
5474 TREE_OPERAND (exp, 1), 0)
5475 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
5476 && safe_from_p (temp, TREE_OPERAND (exp, 2)))
5478 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
5479 temp = gen_reg_rtx (mode);
5480 store_expr (TREE_OPERAND (exp, 1), temp, 0);
5481 jumpif (TREE_OPERAND (exp, 0), op0);
5482 store_expr (TREE_OPERAND (exp, 2), temp, 0);
5486 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
5487 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
5488 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
5489 TREE_OPERAND (exp, 2), 0)
5490 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
5491 && safe_from_p (temp, TREE_OPERAND (exp, 1)))
5493 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
5494 temp = gen_reg_rtx (mode);
5495 store_expr (TREE_OPERAND (exp, 2), temp, 0);
5496 jumpifnot (TREE_OPERAND (exp, 0), op0);
5497 store_expr (TREE_OPERAND (exp, 1), temp, 0);
5502 op1 = gen_label_rtx ();
5503 jumpifnot (TREE_OPERAND (exp, 0), op0);
5505 store_expr (TREE_OPERAND (exp, 1), temp, 0);
5507 expand_expr (TREE_OPERAND (exp, 1),
5508 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
5509 if (cleanups_this_call)
5511 sorry ("aggregate value in COND_EXPR");
5512 cleanups_this_call = 0;
5516 emit_jump_insn (gen_jump (op1));
5520 store_expr (TREE_OPERAND (exp, 2), temp, 0);
5522 expand_expr (TREE_OPERAND (exp, 2),
5523 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
5526 if (cleanups_this_call)
5528 sorry ("aggregate value in COND_EXPR");
5529 cleanups_this_call = 0;
5535 cleanups_this_call = old_cleanups;
5541 /* Something needs to be initialized, but we didn't know
5542 where that thing was when building the tree. For example,
5543 it could be the return value of a function, or a parameter
5544 to a function which lays down in the stack, or a temporary
5545 variable which must be passed by reference.
5547 We guarantee that the expression will either be constructed
5548 or copied into our original target. */
5550 tree slot = TREE_OPERAND (exp, 0);
5553 if (TREE_CODE (slot) != VAR_DECL)
5558 if (DECL_RTL (slot) != 0)
5560 target = DECL_RTL (slot);
5561 /* If we have already expanded the slot, so don't do
5563 if (TREE_OPERAND (exp, 1) == NULL_TREE)
5568 target = assign_stack_temp (mode, int_size_in_bytes (type), 0);
5569 /* All temp slots at this level must not conflict. */
5570 preserve_temp_slots (target);
5571 DECL_RTL (slot) = target;
5575 /* I bet this needs to be done, and I bet that it needs to
5576 be above, inside the else clause. The reason is
5577 simple, how else is it going to get cleaned up? (mrs)
5579 The reason is probably did not work before, and was
5580 commented out is because this was re-expanding already
5581 expanded target_exprs (target == 0 and DECL_RTL (slot)
5582 != 0) also cleaning them up many times as well. :-( */
5584 /* Since SLOT is not known to the called function
5585 to belong to its stack frame, we must build an explicit
5586 cleanup. This case occurs when we must build up a reference
5587 to pass the reference as an argument. In this case,
5588 it is very likely that such a reference need not be
5591 if (TREE_OPERAND (exp, 2) == 0)
5592 TREE_OPERAND (exp, 2) = maybe_build_cleanup (slot);
5593 if (TREE_OPERAND (exp, 2))
5594 cleanups_this_call = tree_cons (NULL_TREE, TREE_OPERAND (exp, 2),
5595 cleanups_this_call);
5600 /* This case does occur, when expanding a parameter which
5601 needs to be constructed on the stack. The target
5602 is the actual stack address that we want to initialize.
5603 The function we call will perform the cleanup in this case. */
5605 DECL_RTL (slot) = target;
5608 exp1 = TREE_OPERAND (exp, 1);
5609 /* Mark it as expanded. */
5610 TREE_OPERAND (exp, 1) = NULL_TREE;
5612 return expand_expr (exp1, target, tmode, modifier);
5617 tree lhs = TREE_OPERAND (exp, 0);
5618 tree rhs = TREE_OPERAND (exp, 1);
5619 tree noncopied_parts = 0;
5620 tree lhs_type = TREE_TYPE (lhs);
5622 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
5623 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0 && !fixed_type_p (rhs))
5624 noncopied_parts = init_noncopied_parts (stabilize_reference (lhs),
5625 TYPE_NONCOPIED_PARTS (lhs_type));
5626 while (noncopied_parts != 0)
5628 expand_assignment (TREE_VALUE (noncopied_parts),
5629 TREE_PURPOSE (noncopied_parts), 0, 0);
5630 noncopied_parts = TREE_CHAIN (noncopied_parts);
5637 /* If lhs is complex, expand calls in rhs before computing it.
5638 That's so we don't compute a pointer and save it over a call.
5639 If lhs is simple, compute it first so we can give it as a
5640 target if the rhs is just a call. This avoids an extra temp and copy
5641 and that prevents a partial-subsumption which makes bad code.
5642 Actually we could treat component_ref's of vars like vars. */
5644 tree lhs = TREE_OPERAND (exp, 0);
5645 tree rhs = TREE_OPERAND (exp, 1);
5646 tree noncopied_parts = 0;
5647 tree lhs_type = TREE_TYPE (lhs);
5651 if (TREE_CODE (lhs) != VAR_DECL
5652 && TREE_CODE (lhs) != RESULT_DECL
5653 && TREE_CODE (lhs) != PARM_DECL)
5654 preexpand_calls (exp);
5656 /* Check for |= or &= of a bitfield of size one into another bitfield
5657 of size 1. In this case, (unless we need the result of the
5658 assignment) we can do this more efficiently with a
5659 test followed by an assignment, if necessary.
5661 ??? At this point, we can't get a BIT_FIELD_REF here. But if
5662 things change so we do, this code should be enhanced to
5665 && TREE_CODE (lhs) == COMPONENT_REF
5666 && (TREE_CODE (rhs) == BIT_IOR_EXPR
5667 || TREE_CODE (rhs) == BIT_AND_EXPR)
5668 && TREE_OPERAND (rhs, 0) == lhs
5669 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
5670 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (lhs, 1))) == 1
5671 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))) == 1)
5673 rtx label = gen_label_rtx ();
5675 do_jump (TREE_OPERAND (rhs, 1),
5676 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
5677 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
5678 expand_assignment (lhs, convert (TREE_TYPE (rhs),
5679 (TREE_CODE (rhs) == BIT_IOR_EXPR
5681 : integer_zero_node)),
5683 do_pending_stack_adjust ();
5688 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0
5689 && ! (fixed_type_p (lhs) && fixed_type_p (rhs)))
5690 noncopied_parts = save_noncopied_parts (stabilize_reference (lhs),
5691 TYPE_NONCOPIED_PARTS (lhs_type));
5693 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
5694 while (noncopied_parts != 0)
5696 expand_assignment (TREE_PURPOSE (noncopied_parts),
5697 TREE_VALUE (noncopied_parts), 0, 0);
5698 noncopied_parts = TREE_CHAIN (noncopied_parts);
5703 case PREINCREMENT_EXPR:
5704 case PREDECREMENT_EXPR:
5705 return expand_increment (exp, 0);
5707 case POSTINCREMENT_EXPR:
5708 case POSTDECREMENT_EXPR:
5709 /* Faster to treat as pre-increment if result is not used. */
5710 return expand_increment (exp, ! ignore);
5713 /* Are we taking the address of a nested function? */
5714 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
5715 && decl_function_context (TREE_OPERAND (exp, 0)) != 0)
5717 op0 = trampoline_address (TREE_OPERAND (exp, 0));
5718 op0 = force_operand (op0, target);
5722 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode,
5723 (modifier == EXPAND_INITIALIZER
5724 ? modifier : EXPAND_CONST_ADDRESS));
5725 if (GET_CODE (op0) != MEM)
5728 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
5729 return XEXP (op0, 0);
5730 op0 = force_operand (XEXP (op0, 0), target);
5732 if (flag_force_addr && GET_CODE (op0) != REG)
5733 return force_reg (Pmode, op0);
5736 case ENTRY_VALUE_EXPR:
5739 /* COMPLEX type for Extended Pascal & Fortran */
5742 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
5746 /* Get the rtx code of the operands. */
5747 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
5748 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
5751 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
5753 prev = get_last_insn ();
5755 /* Tell flow that the whole of the destination is being set. */
5756 if (GET_CODE (target) == REG)
5757 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
5759 /* Move the real (op0) and imaginary (op1) parts to their location. */
5760 emit_move_insn (gen_realpart (mode, target), op0);
5761 emit_move_insn (gen_imagpart (mode, target), op1);
5763 /* Complex construction should appear as a single unit. */
5770 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
5771 return gen_realpart (mode, op0);
5774 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
5775 return gen_imagpart (mode, op0);
5779 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
5783 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
5786 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
5788 prev = get_last_insn ();
5790 /* Tell flow that the whole of the destination is being set. */
5791 if (GET_CODE (target) == REG)
5792 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
5794 /* Store the realpart and the negated imagpart to target. */
5795 emit_move_insn (gen_realpart (mode, target), gen_realpart (mode, op0));
5797 imag_t = gen_imagpart (mode, target);
5798 temp = expand_unop (mode, neg_optab,
5799 gen_imagpart (mode, op0), imag_t, 0);
5801 emit_move_insn (imag_t, temp);
5803 /* Conjugate should appear as a single unit */
5813 return (*lang_expand_expr) (exp, target, tmode, modifier);
5816 /* Here to do an ordinary binary operator, generating an instruction
5817 from the optab already placed in `this_optab'. */
5819 preexpand_calls (exp);
5820 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
5822 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
5823 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
5825 temp = expand_binop (mode, this_optab, op0, op1, target,
5826 unsignedp, OPTAB_LIB_WIDEN);
5832 /* Return the alignment in bits of EXP, a pointer valued expression.
5833 But don't return more than MAX_ALIGN no matter what.
5834 The alignment returned is, by default, the alignment of the thing that
5835 EXP points to (if it is not a POINTER_TYPE, 0 is returned).
5837 Otherwise, look at the expression to see if we can do better, i.e., if the
5838 expression is actually pointing at an object whose alignment is tighter. */
5841 get_pointer_alignment (exp, max_align)
5845 unsigned align, inner;
5847 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
5850 align = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
5851 align = MIN (align, max_align);
5855 switch (TREE_CODE (exp))
5859 case NON_LVALUE_EXPR:
5860 exp = TREE_OPERAND (exp, 0);
5861 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
5863 inner = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
5864 inner = MIN (inner, max_align);
5865 align = MAX (align, inner);
5869 /* If sum of pointer + int, restrict our maximum alignment to that
5870 imposed by the integer. If not, we can't do any better than
5872 if (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST)
5875 while (((TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)) * BITS_PER_UNIT)
5880 exp = TREE_OPERAND (exp, 0);
5884 /* See what we are pointing at and look at its alignment. */
5885 exp = TREE_OPERAND (exp, 0);
5886 if (TREE_CODE (exp) == FUNCTION_DECL)
5887 align = MAX (align, FUNCTION_BOUNDARY);
5888 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'd')
5889 align = MAX (align, DECL_ALIGN (exp));
5890 #ifdef CONSTANT_ALIGNMENT
5891 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'c')
5892 align = CONSTANT_ALIGNMENT (exp, align);
5894 return MIN (align, max_align);
5902 /* Return the tree node and offset if a given argument corresponds to
5903 a string constant. */
5906 string_constant (arg, ptr_offset)
5912 if (TREE_CODE (arg) == ADDR_EXPR
5913 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
5915 *ptr_offset = integer_zero_node;
5916 return TREE_OPERAND (arg, 0);
5918 else if (TREE_CODE (arg) == PLUS_EXPR)
5920 tree arg0 = TREE_OPERAND (arg, 0);
5921 tree arg1 = TREE_OPERAND (arg, 1);
5926 if (TREE_CODE (arg0) == ADDR_EXPR
5927 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
5930 return TREE_OPERAND (arg0, 0);
5932 else if (TREE_CODE (arg1) == ADDR_EXPR
5933 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
5936 return TREE_OPERAND (arg1, 0);
5943 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
5944 way, because it could contain a zero byte in the middle.
5945 TREE_STRING_LENGTH is the size of the character array, not the string.
5947 Unfortunately, string_constant can't access the values of const char
5948 arrays with initializers, so neither can we do so here. */
5958 src = string_constant (src, &offset_node);
5961 max = TREE_STRING_LENGTH (src);
5962 ptr = TREE_STRING_POINTER (src);
5963 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
5965 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
5966 compute the offset to the following null if we don't know where to
5967 start searching for it. */
5969 for (i = 0; i < max; i++)
5972 /* We don't know the starting offset, but we do know that the string
5973 has no internal zero bytes. We can assume that the offset falls
5974 within the bounds of the string; otherwise, the programmer deserves
5975 what he gets. Subtract the offset from the length of the string,
5977 /* This would perhaps not be valid if we were dealing with named
5978 arrays in addition to literal string constants. */
5979 return size_binop (MINUS_EXPR, size_int (max), offset_node);
5982 /* We have a known offset into the string. Start searching there for
5983 a null character. */
5984 if (offset_node == 0)
5988 /* Did we get a long long offset? If so, punt. */
5989 if (TREE_INT_CST_HIGH (offset_node) != 0)
5991 offset = TREE_INT_CST_LOW (offset_node);
5993 /* If the offset is known to be out of bounds, warn, and call strlen at
5995 if (offset < 0 || offset > max)
5997 warning ("offset outside bounds of constant string");
6000 /* Use strlen to search for the first zero byte. Since any strings
6001 constructed with build_string will have nulls appended, we win even
6002 if we get handed something like (char[4])"abcd".
6004 Since OFFSET is our starting index into the string, no further
6005 calculation is needed. */
6006 return size_int (strlen (ptr + offset));
6009 /* Expand an expression EXP that calls a built-in function,
6010 with result going to TARGET if that's convenient
6011 (and in mode MODE if that's convenient).
6012 SUBTARGET may be used as the target for computing one of EXP's operands.
6013 IGNORE is nonzero if the value is to be ignored. */
6016 expand_builtin (exp, target, subtarget, mode, ignore)
6020 enum machine_mode mode;
6023 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
6024 tree arglist = TREE_OPERAND (exp, 1);
6027 enum machine_mode value_mode = TYPE_MODE (TREE_TYPE (exp));
6028 optab builtin_optab;
6030 switch (DECL_FUNCTION_CODE (fndecl))
6035 /* build_function_call changes these into ABS_EXPR. */
6040 case BUILT_IN_FSQRT:
6041 /* If not optimizing, call the library function. */
6046 /* Arg could be wrong type if user redeclared this fcn wrong. */
6047 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != REAL_TYPE)
6048 return CONST0_RTX (TYPE_MODE (TREE_TYPE (exp)));
6050 /* Stabilize and compute the argument. */
6051 if (TREE_CODE (TREE_VALUE (arglist)) != VAR_DECL
6052 && TREE_CODE (TREE_VALUE (arglist)) != PARM_DECL)
6054 exp = copy_node (exp);
6055 arglist = copy_node (arglist);
6056 TREE_OPERAND (exp, 1) = arglist;
6057 TREE_VALUE (arglist) = save_expr (TREE_VALUE (arglist));
6059 op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
6061 /* Make a suitable register to place result in. */
6062 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
6067 switch (DECL_FUNCTION_CODE (fndecl))
6070 builtin_optab = sin_optab; break;
6072 builtin_optab = cos_optab; break;
6073 case BUILT_IN_FSQRT:
6074 builtin_optab = sqrt_optab; break;
6079 /* Compute into TARGET.
6080 Set TARGET to wherever the result comes back. */
6081 target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
6082 builtin_optab, op0, target, 0);
6084 /* If we were unable to expand via the builtin, stop the
6085 sequence (without outputting the insns) and break, causing
6086 a call the the library function. */
6093 /* Check the results by default. But if flag_fast_math is turned on,
6094 then assume sqrt will always be called with valid arguments. */
6096 if (! flag_fast_math)
6098 /* Don't define the builtin FP instructions
6099 if your machine is not IEEE. */
6100 if (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT)
6103 lab1 = gen_label_rtx ();
6105 /* Test the result; if it is NaN, set errno=EDOM because
6106 the argument was not in the domain. */
6107 emit_cmp_insn (target, target, EQ, 0, GET_MODE (target), 0, 0);
6108 emit_jump_insn (gen_beq (lab1));
6112 #ifdef GEN_ERRNO_RTX
6113 rtx errno_rtx = GEN_ERRNO_RTX;
6116 = gen_rtx (MEM, word_mode, gen_rtx (SYMBOL_REF, Pmode, "*errno"));
6119 emit_move_insn (errno_rtx, GEN_INT (TARGET_EDOM));
6122 /* We can't set errno=EDOM directly; let the library call do it.
6123 Pop the arguments right away in case the call gets deleted. */
6125 expand_call (exp, target, 0);
6132 /* Output the entire sequence. */
6133 insns = get_insns ();
6139 case BUILT_IN_SAVEREGS:
6140 /* Don't do __builtin_saveregs more than once in a function.
6141 Save the result of the first call and reuse it. */
6142 if (saveregs_value != 0)
6143 return saveregs_value;
6145 /* When this function is called, it means that registers must be
6146 saved on entry to this function. So we migrate the
6147 call to the first insn of this function. */
6150 rtx valreg, saved_valreg;
6152 /* Now really call the function. `expand_call' does not call
6153 expand_builtin, so there is no danger of infinite recursion here. */
6156 #ifdef EXPAND_BUILTIN_SAVEREGS
6157 /* Do whatever the machine needs done in this case. */
6158 temp = EXPAND_BUILTIN_SAVEREGS (arglist);
6160 /* The register where the function returns its value
6161 is likely to have something else in it, such as an argument.
6162 So preserve that register around the call. */
6163 if (value_mode != VOIDmode)
6165 valreg = hard_libcall_value (value_mode);
6166 saved_valreg = gen_reg_rtx (value_mode);
6167 emit_move_insn (saved_valreg, valreg);
6170 /* Generate the call, putting the value in a pseudo. */
6171 temp = expand_call (exp, target, ignore);
6173 if (value_mode != VOIDmode)
6174 emit_move_insn (valreg, saved_valreg);
6180 saveregs_value = temp;
6182 /* This won't work inside a SEQUENCE--it really has to be
6183 at the start of the function. */
6184 if (in_sequence_p ())
6186 /* Better to do this than to crash. */
6187 error ("`va_start' used within `({...})'");
6191 /* Put the sequence after the NOTE that starts the function. */
6192 emit_insns_before (seq, NEXT_INSN (get_insns ()));
6196 /* __builtin_args_info (N) returns word N of the arg space info
6197 for the current function. The number and meanings of words
6198 is controlled by the definition of CUMULATIVE_ARGS. */
6199 case BUILT_IN_ARGS_INFO:
6201 int nwords = sizeof (CUMULATIVE_ARGS) / sizeof (int);
6203 int *word_ptr = (int *) ¤t_function_args_info;
6204 tree type, elts, result;
6206 if (sizeof (CUMULATIVE_ARGS) % sizeof (int) != 0)
6207 fatal ("CUMULATIVE_ARGS type defined badly; see %s, line %d",
6208 __FILE__, __LINE__);
6212 tree arg = TREE_VALUE (arglist);
6213 if (TREE_CODE (arg) != INTEGER_CST)
6214 error ("argument of `__builtin_args_info' must be constant");
6217 int wordnum = TREE_INT_CST_LOW (arg);
6219 if (wordnum < 0 || wordnum >= nwords || TREE_INT_CST_HIGH (arg))
6220 error ("argument of `__builtin_args_info' out of range");
6222 return GEN_INT (word_ptr[wordnum]);
6226 error ("missing argument in `__builtin_args_info'");
6231 for (i = 0; i < nwords; i++)
6232 elts = tree_cons (NULL_TREE, build_int_2 (word_ptr[i], 0));
6234 type = build_array_type (integer_type_node,
6235 build_index_type (build_int_2 (nwords, 0)));
6236 result = build (CONSTRUCTOR, type, NULL_TREE, nreverse (elts));
6237 TREE_CONSTANT (result) = 1;
6238 TREE_STATIC (result) = 1;
6239 result = build (INDIRECT_REF, build_pointer_type (type), result);
6240 TREE_CONSTANT (result) = 1;
6241 return expand_expr (result, NULL_RTX, VOIDmode, 0);
6245 /* Return the address of the first anonymous stack arg. */
6246 case BUILT_IN_NEXT_ARG:
6248 tree fntype = TREE_TYPE (current_function_decl);
6249 if (!(TYPE_ARG_TYPES (fntype) != 0
6250 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
6251 != void_type_node)))
6253 error ("`va_start' used in function with fixed args");
6258 return expand_binop (Pmode, add_optab,
6259 current_function_internal_arg_pointer,
6260 current_function_arg_offset_rtx,
6261 NULL_RTX, 0, OPTAB_LIB_WIDEN);
6263 case BUILT_IN_CLASSIFY_TYPE:
6266 tree type = TREE_TYPE (TREE_VALUE (arglist));
6267 enum tree_code code = TREE_CODE (type);
6268 if (code == VOID_TYPE)
6269 return GEN_INT (void_type_class);
6270 if (code == INTEGER_TYPE)
6271 return GEN_INT (integer_type_class);
6272 if (code == CHAR_TYPE)
6273 return GEN_INT (char_type_class);
6274 if (code == ENUMERAL_TYPE)
6275 return GEN_INT (enumeral_type_class);
6276 if (code == BOOLEAN_TYPE)
6277 return GEN_INT (boolean_type_class);
6278 if (code == POINTER_TYPE)
6279 return GEN_INT (pointer_type_class);
6280 if (code == REFERENCE_TYPE)
6281 return GEN_INT (reference_type_class);
6282 if (code == OFFSET_TYPE)
6283 return GEN_INT (offset_type_class);
6284 if (code == REAL_TYPE)
6285 return GEN_INT (real_type_class);
6286 if (code == COMPLEX_TYPE)
6287 return GEN_INT (complex_type_class);
6288 if (code == FUNCTION_TYPE)
6289 return GEN_INT (function_type_class);
6290 if (code == METHOD_TYPE)
6291 return GEN_INT (method_type_class);
6292 if (code == RECORD_TYPE)
6293 return GEN_INT (record_type_class);
6294 if (code == UNION_TYPE)
6295 return GEN_INT (union_type_class);
6296 if (code == ARRAY_TYPE)
6297 return GEN_INT (array_type_class);
6298 if (code == STRING_TYPE)
6299 return GEN_INT (string_type_class);
6300 if (code == SET_TYPE)
6301 return GEN_INT (set_type_class);
6302 if (code == FILE_TYPE)
6303 return GEN_INT (file_type_class);
6304 if (code == LANG_TYPE)
6305 return GEN_INT (lang_type_class);
6307 return GEN_INT (no_type_class);
6309 case BUILT_IN_CONSTANT_P:
6313 return (TREE_CODE_CLASS (TREE_CODE (TREE_VALUE (arglist))) == 'c'
6314 ? const1_rtx : const0_rtx);
6316 case BUILT_IN_FRAME_ADDRESS:
6317 /* The argument must be a nonnegative integer constant.
6318 It counts the number of frames to scan up the stack.
6319 The value is the address of that frame. */
6320 case BUILT_IN_RETURN_ADDRESS:
6321 /* The argument must be a nonnegative integer constant.
6322 It counts the number of frames to scan up the stack.
6323 The value is the return address saved in that frame. */
6325 /* Warning about missing arg was already issued. */
6327 else if (TREE_CODE (TREE_VALUE (arglist)) != INTEGER_CST)
6329 error ("invalid arg to `__builtin_return_address'");
6332 else if (tree_int_cst_lt (TREE_VALUE (arglist), integer_zero_node))
6334 error ("invalid arg to `__builtin_return_address'");
6339 int count = TREE_INT_CST_LOW (TREE_VALUE (arglist));
6340 rtx tem = frame_pointer_rtx;
6343 /* Some machines need special handling before we can access arbitrary
6344 frames. For example, on the sparc, we must first flush all
6345 register windows to the stack. */
6346 #ifdef SETUP_FRAME_ADDRESSES
6347 SETUP_FRAME_ADDRESSES ();
6350 /* On the sparc, the return address is not in the frame, it is
6351 in a register. There is no way to access it off of the current
6352 frame pointer, but it can be accessed off the previous frame
6353 pointer by reading the value from the register window save
6355 #ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
6356 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_RETURN_ADDRESS)
6360 /* Scan back COUNT frames to the specified frame. */
6361 for (i = 0; i < count; i++)
6363 /* Assume the dynamic chain pointer is in the word that
6364 the frame address points to, unless otherwise specified. */
6365 #ifdef DYNAMIC_CHAIN_ADDRESS
6366 tem = DYNAMIC_CHAIN_ADDRESS (tem);
6368 tem = memory_address (Pmode, tem);
6369 tem = copy_to_reg (gen_rtx (MEM, Pmode, tem));
6372 /* For __builtin_frame_address, return what we've got. */
6373 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
6376 /* For __builtin_return_address,
6377 Get the return address from that frame. */
6378 #ifdef RETURN_ADDR_RTX
6379 return RETURN_ADDR_RTX (count, tem);
6381 tem = memory_address (Pmode,
6382 plus_constant (tem, GET_MODE_SIZE (Pmode)));
6383 return copy_to_reg (gen_rtx (MEM, Pmode, tem));
6387 case BUILT_IN_ALLOCA:
6389 /* Arg could be non-integer if user redeclared this fcn wrong. */
6390 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE)
6392 current_function_calls_alloca = 1;
6393 /* Compute the argument. */
6394 op0 = expand_expr (TREE_VALUE (arglist), NULL_RTX, VOIDmode, 0);
6396 /* Allocate the desired space. */
6397 target = allocate_dynamic_stack_space (op0, target, BITS_PER_UNIT);
6399 /* Record the new stack level for nonlocal gotos. */
6400 if (nonlocal_goto_handler_slot != 0)
6401 emit_stack_save (SAVE_NONLOCAL, &nonlocal_goto_stack_level, NULL_RTX);
6405 /* If not optimizing, call the library function. */
6410 /* Arg could be non-integer if user redeclared this fcn wrong. */
6411 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE)
6414 /* Compute the argument. */
6415 op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
6416 /* Compute ffs, into TARGET if possible.
6417 Set TARGET to wherever the result comes back. */
6418 target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
6419 ffs_optab, op0, target, 1);
6424 case BUILT_IN_STRLEN:
6425 /* If not optimizing, call the library function. */
6430 /* Arg could be non-pointer if user redeclared this fcn wrong. */
6431 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE)
6435 tree src = TREE_VALUE (arglist);
6436 tree len = c_strlen (src);
6439 = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
6441 rtx result, src_rtx, char_rtx;
6442 enum machine_mode insn_mode = value_mode, char_mode;
6443 enum insn_code icode;
6445 /* If the length is known, just return it. */
6447 return expand_expr (len, target, mode, 0);
6449 /* If SRC is not a pointer type, don't do this operation inline. */
6453 /* Call a function if we can't compute strlen in the right mode. */
6455 while (insn_mode != VOIDmode)
6457 icode = strlen_optab->handlers[(int) insn_mode].insn_code;
6458 if (icode != CODE_FOR_nothing)
6461 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
6463 if (insn_mode == VOIDmode)
6466 /* Make a place to write the result of the instruction. */
6469 && GET_CODE (result) == REG
6470 && GET_MODE (result) == insn_mode
6471 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
6472 result = gen_reg_rtx (insn_mode);
6474 /* Make sure the operands are acceptable to the predicates. */
6476 if (! (*insn_operand_predicate[(int)icode][0]) (result, insn_mode))
6477 result = gen_reg_rtx (insn_mode);
6479 src_rtx = memory_address (BLKmode,
6480 expand_expr (src, NULL_RTX, Pmode,
6482 if (! (*insn_operand_predicate[(int)icode][1]) (src_rtx, Pmode))
6483 src_rtx = copy_to_mode_reg (Pmode, src_rtx);
6485 char_rtx = const0_rtx;
6486 char_mode = insn_operand_mode[(int)icode][2];
6487 if (! (*insn_operand_predicate[(int)icode][2]) (char_rtx, char_mode))
6488 char_rtx = copy_to_mode_reg (char_mode, char_rtx);
6490 emit_insn (GEN_FCN (icode) (result,
6491 gen_rtx (MEM, BLKmode, src_rtx),
6492 char_rtx, GEN_INT (align)));
6494 /* Return the value in the proper mode for this function. */
6495 if (GET_MODE (result) == value_mode)
6497 else if (target != 0)
6499 convert_move (target, result, 0);
6503 return convert_to_mode (value_mode, result, 0);
6506 case BUILT_IN_STRCPY:
6507 /* If not optimizing, call the library function. */
6512 /* Arg could be non-pointer if user redeclared this fcn wrong. */
6513 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
6514 || TREE_CHAIN (arglist) == 0
6515 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE)
6519 tree len = c_strlen (TREE_VALUE (TREE_CHAIN (arglist)));
6524 len = size_binop (PLUS_EXPR, len, integer_one_node);
6526 chainon (arglist, build_tree_list (NULL_TREE, len));
6530 case BUILT_IN_MEMCPY:
6531 /* If not optimizing, call the library function. */
6536 /* Arg could be non-pointer if user redeclared this fcn wrong. */
6537 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
6538 || TREE_CHAIN (arglist) == 0
6539 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
6540 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
6541 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
6545 tree dest = TREE_VALUE (arglist);
6546 tree src = TREE_VALUE (TREE_CHAIN (arglist));
6547 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
6550 = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
6552 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
6553 rtx dest_rtx, dest_mem, src_mem;
6555 /* If either SRC or DEST is not a pointer type, don't do
6556 this operation in-line. */
6557 if (src_align == 0 || dest_align == 0)
6559 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRCPY)
6560 TREE_CHAIN (TREE_CHAIN (arglist)) = 0;
6564 dest_rtx = expand_expr (dest, NULL_RTX, Pmode, EXPAND_NORMAL);
6565 dest_mem = gen_rtx (MEM, BLKmode,
6566 memory_address (BLKmode, dest_rtx));
6567 src_mem = gen_rtx (MEM, BLKmode,
6568 memory_address (BLKmode,
6569 expand_expr (src, NULL_RTX,
6573 /* Copy word part most expediently. */
6574 emit_block_move (dest_mem, src_mem,
6575 expand_expr (len, NULL_RTX, VOIDmode, 0),
6576 MIN (src_align, dest_align));
6580 /* These comparison functions need an instruction that returns an actual
6581 index. An ordinary compare that just sets the condition codes
6583 #ifdef HAVE_cmpstrsi
6584 case BUILT_IN_STRCMP:
6585 /* If not optimizing, call the library function. */
6590 /* Arg could be non-pointer if user redeclared this fcn wrong. */
6591 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
6592 || TREE_CHAIN (arglist) == 0
6593 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE)
6595 else if (!HAVE_cmpstrsi)
6598 tree arg1 = TREE_VALUE (arglist);
6599 tree arg2 = TREE_VALUE (TREE_CHAIN (arglist));
6603 len = c_strlen (arg1);
6605 len = size_binop (PLUS_EXPR, integer_one_node, len);
6606 len2 = c_strlen (arg2);
6608 len2 = size_binop (PLUS_EXPR, integer_one_node, len2);
6610 /* If we don't have a constant length for the first, use the length
6611 of the second, if we know it. We don't require a constant for
6612 this case; some cost analysis could be done if both are available
6613 but neither is constant. For now, assume they're equally cheap.
6615 If both strings have constant lengths, use the smaller. This
6616 could arise if optimization results in strcpy being called with
6617 two fixed strings, or if the code was machine-generated. We should
6618 add some code to the `memcmp' handler below to deal with such
6619 situations, someday. */
6620 if (!len || TREE_CODE (len) != INTEGER_CST)
6627 else if (len2 && TREE_CODE (len2) == INTEGER_CST)
6629 if (tree_int_cst_lt (len2, len))
6633 chainon (arglist, build_tree_list (NULL_TREE, len));
6637 case BUILT_IN_MEMCMP:
6638 /* If not optimizing, call the library function. */
6643 /* Arg could be non-pointer if user redeclared this fcn wrong. */
6644 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
6645 || TREE_CHAIN (arglist) == 0
6646 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
6647 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
6648 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
6650 else if (!HAVE_cmpstrsi)
6653 tree arg1 = TREE_VALUE (arglist);
6654 tree arg2 = TREE_VALUE (TREE_CHAIN (arglist));
6655 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
6659 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
6661 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
6662 enum machine_mode insn_mode
6663 = insn_operand_mode[(int) CODE_FOR_cmpstrsi][0];
6665 /* If we don't have POINTER_TYPE, call the function. */
6666 if (arg1_align == 0 || arg2_align == 0)
6668 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRCMP)
6669 TREE_CHAIN (TREE_CHAIN (arglist)) = 0;
6673 /* Make a place to write the result of the instruction. */
6676 && GET_CODE (result) == REG && GET_MODE (result) == insn_mode
6677 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
6678 result = gen_reg_rtx (insn_mode);
6680 emit_insn (gen_cmpstrsi (result,
6681 gen_rtx (MEM, BLKmode,
6682 expand_expr (arg1, NULL_RTX, Pmode,
6684 gen_rtx (MEM, BLKmode,
6685 expand_expr (arg2, NULL_RTX, Pmode,
6687 expand_expr (len, NULL_RTX, VOIDmode, 0),
6688 GEN_INT (MIN (arg1_align, arg2_align))));
6690 /* Return the value in the proper mode for this function. */
6691 mode = TYPE_MODE (TREE_TYPE (exp));
6692 if (GET_MODE (result) == mode)
6694 else if (target != 0)
6696 convert_move (target, result, 0);
6700 return convert_to_mode (mode, result, 0);
6703 case BUILT_IN_STRCMP:
6704 case BUILT_IN_MEMCMP:
6708 default: /* just do library call, if unknown builtin */
6709 error ("built-in function `%s' not currently supported",
6710 IDENTIFIER_POINTER (DECL_NAME (fndecl)));
6713 /* The switch statement above can drop through to cause the function
6714 to be called normally. */
6716 return expand_call (exp, target, ignore);
6719 /* Expand code for a post- or pre- increment or decrement
6720 and return the RTX for the result.
6721 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
6724 expand_increment (exp, post)
6728 register rtx op0, op1;
6729 register rtx temp, value;
6730 register tree incremented = TREE_OPERAND (exp, 0);
6731 optab this_optab = add_optab;
6733 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
6734 int op0_is_copy = 0;
6736 /* Stabilize any component ref that might need to be
6737 evaluated more than once below. */
6739 || TREE_CODE (incremented) == BIT_FIELD_REF
6740 || (TREE_CODE (incremented) == COMPONENT_REF
6741 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
6742 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
6743 incremented = stabilize_reference (incremented);
6745 /* Compute the operands as RTX.
6746 Note whether OP0 is the actual lvalue or a copy of it:
6747 I believe it is a copy iff it is a register or subreg
6748 and insns were generated in computing it. */
6750 temp = get_last_insn ();
6751 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, 0);
6753 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
6754 in place but intead must do sign- or zero-extension during assignment,
6755 so we copy it into a new register and let the code below use it as
6758 Note that we can safely modify this SUBREG since it is know not to be
6759 shared (it was made by the expand_expr call above). */
6761 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
6762 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
6764 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
6765 && temp != get_last_insn ());
6766 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
6768 /* Decide whether incrementing or decrementing. */
6769 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
6770 || TREE_CODE (exp) == PREDECREMENT_EXPR)
6771 this_optab = sub_optab;
6773 /* If OP0 is not the actual lvalue, but rather a copy in a register,
6774 then we cannot just increment OP0. We must therefore contrive to
6775 increment the original value. Then, for postincrement, we can return
6776 OP0 since it is a copy of the old value. For preincrement, we want
6777 to always expand here, since this generates better or equivalent code. */
6778 if (!post || op0_is_copy)
6780 /* This is the easiest way to increment the value wherever it is.
6781 Problems with multiple evaluation of INCREMENTED are prevented
6782 because either (1) it is a component_ref or preincrement,
6783 in which case it was stabilized above, or (2) it is an array_ref
6784 with constant index in an array in a register, which is
6785 safe to reevaluate. */
6786 tree newexp = build ((this_optab == add_optab
6787 ? PLUS_EXPR : MINUS_EXPR),
6790 TREE_OPERAND (exp, 1));
6791 temp = expand_assignment (incremented, newexp, ! post, 0);
6792 return post ? op0 : temp;
6795 /* Convert decrement by a constant into a negative increment. */
6796 if (this_optab == sub_optab
6797 && GET_CODE (op1) == CONST_INT)
6799 op1 = GEN_INT (- INTVAL (op1));
6800 this_optab = add_optab;
6805 /* We have a true reference to the value in OP0.
6806 If there is an insn to add or subtract in this mode, queue it. */
6808 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
6809 op0 = stabilize (op0);
6812 icode = (int) this_optab->handlers[(int) mode].insn_code;
6813 if (icode != (int) CODE_FOR_nothing
6814 /* Make sure that OP0 is valid for operands 0 and 1
6815 of the insn we want to queue. */
6816 && (*insn_operand_predicate[icode][0]) (op0, mode)
6817 && (*insn_operand_predicate[icode][1]) (op0, mode))
6819 if (! (*insn_operand_predicate[icode][2]) (op1, mode))
6820 op1 = force_reg (mode, op1);
6822 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
6826 /* Preincrement, or we can't increment with one simple insn. */
6828 /* Save a copy of the value before inc or dec, to return it later. */
6829 temp = value = copy_to_reg (op0);
6831 /* Arrange to return the incremented value. */
6832 /* Copy the rtx because expand_binop will protect from the queue,
6833 and the results of that would be invalid for us to return
6834 if our caller does emit_queue before using our result. */
6835 temp = copy_rtx (value = op0);
6837 /* Increment however we can. */
6838 op1 = expand_binop (mode, this_optab, value, op1, op0,
6839 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
6840 /* Make sure the value is stored into OP0. */
6842 emit_move_insn (op0, op1);
6847 /* Expand all function calls contained within EXP, innermost ones first.
6848 But don't look within expressions that have sequence points.
6849 For each CALL_EXPR, record the rtx for its value
6850 in the CALL_EXPR_RTL field. */
6853 preexpand_calls (exp)
6856 register int nops, i;
6857 int type = TREE_CODE_CLASS (TREE_CODE (exp));
6859 if (! do_preexpand_calls)
6862 /* Only expressions and references can contain calls. */
6864 if (type != 'e' && type != '<' && type != '1' && type != '2' && type != 'r')
6867 switch (TREE_CODE (exp))
6870 /* Do nothing if already expanded. */
6871 if (CALL_EXPR_RTL (exp) != 0)
6874 /* Do nothing to built-in functions. */
6875 if (TREE_CODE (TREE_OPERAND (exp, 0)) != ADDR_EXPR
6876 || TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) != FUNCTION_DECL
6877 || ! DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
6878 CALL_EXPR_RTL (exp) = expand_call (exp, NULL_RTX, 0);
6883 case TRUTH_ANDIF_EXPR:
6884 case TRUTH_ORIF_EXPR:
6885 /* If we find one of these, then we can be sure
6886 the adjust will be done for it (since it makes jumps).
6887 Do it now, so that if this is inside an argument
6888 of a function, we don't get the stack adjustment
6889 after some other args have already been pushed. */
6890 do_pending_stack_adjust ();
6895 case WITH_CLEANUP_EXPR:
6899 if (SAVE_EXPR_RTL (exp) != 0)
6903 nops = tree_code_length[(int) TREE_CODE (exp)];
6904 for (i = 0; i < nops; i++)
6905 if (TREE_OPERAND (exp, i) != 0)
6907 type = TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, i)));
6908 if (type == 'e' || type == '<' || type == '1' || type == '2'
6910 preexpand_calls (TREE_OPERAND (exp, i));
6914 /* At the start of a function, record that we have no previously-pushed
6915 arguments waiting to be popped. */
6918 init_pending_stack_adjust ()
6920 pending_stack_adjust = 0;
6923 /* When exiting from function, if safe, clear out any pending stack adjust
6924 so the adjustment won't get done. */
6927 clear_pending_stack_adjust ()
6929 #ifdef EXIT_IGNORE_STACK
6930 if (! flag_omit_frame_pointer && EXIT_IGNORE_STACK
6931 && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline)
6932 && ! flag_inline_functions)
6933 pending_stack_adjust = 0;
6937 /* Pop any previously-pushed arguments that have not been popped yet. */
6940 do_pending_stack_adjust ()
6942 if (inhibit_defer_pop == 0)
6944 if (pending_stack_adjust != 0)
6945 adjust_stack (GEN_INT (pending_stack_adjust));
6946 pending_stack_adjust = 0;
6950 /* Expand all cleanups up to OLD_CLEANUPS.
6951 Needed here, and also for language-dependent calls. */
6954 expand_cleanups_to (old_cleanups)
6957 while (cleanups_this_call != old_cleanups)
6959 expand_expr (TREE_VALUE (cleanups_this_call), NULL_RTX, VOIDmode, 0);
6960 cleanups_this_call = TREE_CHAIN (cleanups_this_call);
6964 /* Expand conditional expressions. */
6966 /* Generate code to evaluate EXP and jump to LABEL if the value is zero.
6967 LABEL is an rtx of code CODE_LABEL, in this function and all the
6971 jumpifnot (exp, label)
6975 do_jump (exp, label, NULL_RTX);
6978 /* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
6985 do_jump (exp, NULL_RTX, label);
6988 /* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
6989 the result is zero, or IF_TRUE_LABEL if the result is one.
6990 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
6991 meaning fall through in that case.
6993 do_jump always does any pending stack adjust except when it does not
6994 actually perform a jump. An example where there is no jump
6995 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
6997 This function is responsible for optimizing cases such as
6998 &&, || and comparison operators in EXP. */
7001 do_jump (exp, if_false_label, if_true_label)
7003 rtx if_false_label, if_true_label;
7005 register enum tree_code code = TREE_CODE (exp);
7006 /* Some cases need to create a label to jump to
7007 in order to properly fall through.
7008 These cases set DROP_THROUGH_LABEL nonzero. */
7009 rtx drop_through_label = 0;
7023 temp = integer_zerop (exp) ? if_false_label : if_true_label;
7029 /* This is not true with #pragma weak */
7031 /* The address of something can never be zero. */
7033 emit_jump (if_true_label);
7038 if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
7039 || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
7040 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF)
7043 /* If we are narrowing the operand, we have to do the compare in the
7045 if ((TYPE_PRECISION (TREE_TYPE (exp))
7046 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
7048 case NON_LVALUE_EXPR:
7049 case REFERENCE_EXPR:
7054 /* These cannot change zero->non-zero or vice versa. */
7055 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
7059 /* This is never less insns than evaluating the PLUS_EXPR followed by
7060 a test and can be longer if the test is eliminated. */
7062 /* Reduce to minus. */
7063 exp = build (MINUS_EXPR, TREE_TYPE (exp),
7064 TREE_OPERAND (exp, 0),
7065 fold (build1 (NEGATE_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
7066 TREE_OPERAND (exp, 1))));
7067 /* Process as MINUS. */
7071 /* Non-zero iff operands of minus differ. */
7072 comparison = compare (build (NE_EXPR, TREE_TYPE (exp),
7073 TREE_OPERAND (exp, 0),
7074 TREE_OPERAND (exp, 1)),
7079 /* If we are AND'ing with a small constant, do this comparison in the
7080 smallest type that fits. If the machine doesn't have comparisons
7081 that small, it will be converted back to the wider comparison.
7082 This helps if we are testing the sign bit of a narrower object.
7083 combine can't do this for us because it can't know whether a
7084 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
7086 if (! SLOW_BYTE_ACCESS
7087 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7088 && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
7089 && (i = floor_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))) >= 0
7090 && (type = type_for_size (i + 1, 1)) != 0
7091 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
7092 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
7093 != CODE_FOR_nothing))
7095 do_jump (convert (type, exp), if_false_label, if_true_label);
7100 case TRUTH_NOT_EXPR:
7101 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
7104 case TRUTH_ANDIF_EXPR:
7105 if (if_false_label == 0)
7106 if_false_label = drop_through_label = gen_label_rtx ();
7107 do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
7108 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
7111 case TRUTH_ORIF_EXPR:
7112 if (if_true_label == 0)
7113 if_true_label = drop_through_label = gen_label_rtx ();
7114 do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
7115 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
7119 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
7122 do_pending_stack_adjust ();
7123 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
7130 int bitsize, bitpos, unsignedp;
7131 enum machine_mode mode;
7136 /* Get description of this reference. We don't actually care
7137 about the underlying object here. */
7138 get_inner_reference (exp, &bitsize, &bitpos, &offset,
7139 &mode, &unsignedp, &volatilep);
7141 type = type_for_size (bitsize, unsignedp);
7142 if (! SLOW_BYTE_ACCESS
7143 && type != 0 && bitsize >= 0
7144 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
7145 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
7146 != CODE_FOR_nothing))
7148 do_jump (convert (type, exp), if_false_label, if_true_label);
7155 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
7156 if (integer_onep (TREE_OPERAND (exp, 1))
7157 && integer_zerop (TREE_OPERAND (exp, 2)))
7158 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
7160 else if (integer_zerop (TREE_OPERAND (exp, 1))
7161 && integer_onep (TREE_OPERAND (exp, 2)))
7162 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
7166 register rtx label1 = gen_label_rtx ();
7167 drop_through_label = gen_label_rtx ();
7168 do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX);
7169 /* Now the THEN-expression. */
7170 do_jump (TREE_OPERAND (exp, 1),
7171 if_false_label ? if_false_label : drop_through_label,
7172 if_true_label ? if_true_label : drop_through_label);
7173 /* In case the do_jump just above never jumps. */
7174 do_pending_stack_adjust ();
7175 emit_label (label1);
7176 /* Now the ELSE-expression. */
7177 do_jump (TREE_OPERAND (exp, 2),
7178 if_false_label ? if_false_label : drop_through_label,
7179 if_true_label ? if_true_label : drop_through_label);
7184 if (integer_zerop (TREE_OPERAND (exp, 1)))
7185 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
7186 else if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7189 !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
7190 do_jump_by_parts_equality (exp, if_false_label, if_true_label);
7192 comparison = compare (exp, EQ, EQ);
7196 if (integer_zerop (TREE_OPERAND (exp, 1)))
7197 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
7198 else if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7201 !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
7202 do_jump_by_parts_equality (exp, if_true_label, if_false_label);
7204 comparison = compare (exp, NE, NE);
7208 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7210 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
7211 do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
7213 comparison = compare (exp, LT, LTU);
7217 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7219 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
7220 do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
7222 comparison = compare (exp, LE, LEU);
7226 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7228 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
7229 do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
7231 comparison = compare (exp, GT, GTU);
7235 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7237 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
7238 do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
7240 comparison = compare (exp, GE, GEU);
7245 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
7247 /* This is not needed any more and causes poor code since it causes
7248 comparisons and tests from non-SI objects to have different code
7250 /* Copy to register to avoid generating bad insns by cse
7251 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
7252 if (!cse_not_expected && GET_CODE (temp) == MEM)
7253 temp = copy_to_reg (temp);
7255 do_pending_stack_adjust ();
7256 if (GET_CODE (temp) == CONST_INT)
7257 comparison = (temp == const0_rtx ? const0_rtx : const_true_rtx);
7258 else if (GET_CODE (temp) == LABEL_REF)
7259 comparison = const_true_rtx;
7260 else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
7261 && !can_compare_p (GET_MODE (temp)))
7262 /* Note swapping the labels gives us not-equal. */
7263 do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label);
7264 else if (GET_MODE (temp) != VOIDmode)
7265 comparison = compare_from_rtx (temp, CONST0_RTX (GET_MODE (temp)),
7266 NE, TREE_UNSIGNED (TREE_TYPE (exp)),
7267 GET_MODE (temp), NULL_RTX, 0);
7272 /* Do any postincrements in the expression that was tested. */
7275 /* If COMPARISON is nonzero here, it is an rtx that can be substituted
7276 straight into a conditional jump instruction as the jump condition.
7277 Otherwise, all the work has been done already. */
7279 if (comparison == const_true_rtx)
7282 emit_jump (if_true_label);
7284 else if (comparison == const0_rtx)
7287 emit_jump (if_false_label);
7289 else if (comparison)
7290 do_jump_for_compare (comparison, if_false_label, if_true_label);
7294 if (drop_through_label)
7296 /* If do_jump produces code that might be jumped around,
7297 do any stack adjusts from that code, before the place
7298 where control merges in. */
7299 do_pending_stack_adjust ();
7300 emit_label (drop_through_label);
7304 /* Given a comparison expression EXP for values too wide to be compared
7305 with one insn, test the comparison and jump to the appropriate label.
7306 The code of EXP is ignored; we always test GT if SWAP is 0,
7307 and LT if SWAP is 1. */
7310 do_jump_by_parts_greater (exp, swap, if_false_label, if_true_label)
7313 rtx if_false_label, if_true_label;
7315 rtx op0 = expand_expr (TREE_OPERAND (exp, swap), NULL_RTX, VOIDmode, 0);
7316 rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), NULL_RTX, VOIDmode, 0);
7317 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
7318 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
7319 rtx drop_through_label = 0;
7320 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
7323 if (! if_true_label || ! if_false_label)
7324 drop_through_label = gen_label_rtx ();
7325 if (! if_true_label)
7326 if_true_label = drop_through_label;
7327 if (! if_false_label)
7328 if_false_label = drop_through_label;
7330 /* Compare a word at a time, high order first. */
7331 for (i = 0; i < nwords; i++)
7334 rtx op0_word, op1_word;
7336 if (WORDS_BIG_ENDIAN)
7338 op0_word = operand_subword_force (op0, i, mode);
7339 op1_word = operand_subword_force (op1, i, mode);
7343 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
7344 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
7347 /* All but high-order word must be compared as unsigned. */
7348 comp = compare_from_rtx (op0_word, op1_word,
7349 (unsignedp || i > 0) ? GTU : GT,
7350 unsignedp, word_mode, NULL_RTX, 0);
7351 if (comp == const_true_rtx)
7352 emit_jump (if_true_label);
7353 else if (comp != const0_rtx)
7354 do_jump_for_compare (comp, NULL_RTX, if_true_label);
7356 /* Consider lower words only if these are equal. */
7357 comp = compare_from_rtx (op0_word, op1_word, NE, unsignedp, word_mode,
7359 if (comp == const_true_rtx)
7360 emit_jump (if_false_label);
7361 else if (comp != const0_rtx)
7362 do_jump_for_compare (comp, NULL_RTX, if_false_label);
7366 emit_jump (if_false_label);
7367 if (drop_through_label)
7368 emit_label (drop_through_label);
7371 /* Given an EQ_EXPR expression EXP for values too wide to be compared
7372 with one insn, test the comparison and jump to the appropriate label. */
7375 do_jump_by_parts_equality (exp, if_false_label, if_true_label)
7377 rtx if_false_label, if_true_label;
7379 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7380 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7381 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
7382 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
7384 rtx drop_through_label = 0;
7386 if (! if_false_label)
7387 drop_through_label = if_false_label = gen_label_rtx ();
7389 for (i = 0; i < nwords; i++)
7391 rtx comp = compare_from_rtx (operand_subword_force (op0, i, mode),
7392 operand_subword_force (op1, i, mode),
7393 EQ, TREE_UNSIGNED (TREE_TYPE (exp)),
7394 word_mode, NULL_RTX, 0);
7395 if (comp == const_true_rtx)
7396 emit_jump (if_false_label);
7397 else if (comp != const0_rtx)
7398 do_jump_for_compare (comp, if_false_label, NULL_RTX);
7402 emit_jump (if_true_label);
7403 if (drop_through_label)
7404 emit_label (drop_through_label);
7407 /* Jump according to whether OP0 is 0.
7408 We assume that OP0 has an integer mode that is too wide
7409 for the available compare insns. */
7412 do_jump_by_parts_equality_rtx (op0, if_false_label, if_true_label)
7414 rtx if_false_label, if_true_label;
7416 int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD;
7418 rtx drop_through_label = 0;
7420 if (! if_false_label)
7421 drop_through_label = if_false_label = gen_label_rtx ();
7423 for (i = 0; i < nwords; i++)
7425 rtx comp = compare_from_rtx (operand_subword_force (op0, i,
7427 const0_rtx, EQ, 1, word_mode, NULL_RTX, 0);
7428 if (comp == const_true_rtx)
7429 emit_jump (if_false_label);
7430 else if (comp != const0_rtx)
7431 do_jump_for_compare (comp, if_false_label, NULL_RTX);
7435 emit_jump (if_true_label);
7436 if (drop_through_label)
7437 emit_label (drop_through_label);
7440 /* Given a comparison expression in rtl form, output conditional branches to
7441 IF_TRUE_LABEL, IF_FALSE_LABEL, or both. */
7444 do_jump_for_compare (comparison, if_false_label, if_true_label)
7445 rtx comparison, if_false_label, if_true_label;
7449 if (bcc_gen_fctn[(int) GET_CODE (comparison)] != 0)
7450 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (comparison)]) (if_true_label));
7455 emit_jump (if_false_label);
7457 else if (if_false_label)
7460 rtx prev = PREV_INSN (get_last_insn ());
7463 /* Output the branch with the opposite condition. Then try to invert
7464 what is generated. If more than one insn is a branch, or if the
7465 branch is not the last insn written, abort. If we can't invert
7466 the branch, emit make a true label, redirect this jump to that,
7467 emit a jump to the false label and define the true label. */
7469 if (bcc_gen_fctn[(int) GET_CODE (comparison)] != 0)
7470 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (comparison)]) (if_false_label));
7474 /* Here we get the insn before what was just emitted.
7475 On some machines, emitting the branch can discard
7476 the previous compare insn and emit a replacement. */
7478 /* If there's only one preceding insn... */
7479 insn = get_insns ();
7481 insn = NEXT_INSN (prev);
7483 for (insn = NEXT_INSN (insn); insn; insn = NEXT_INSN (insn))
7484 if (GET_CODE (insn) == JUMP_INSN)
7491 if (branch != get_last_insn ())
7494 if (! invert_jump (branch, if_false_label))
7496 if_true_label = gen_label_rtx ();
7497 redirect_jump (branch, if_true_label);
7498 emit_jump (if_false_label);
7499 emit_label (if_true_label);
7504 /* Generate code for a comparison expression EXP
7505 (including code to compute the values to be compared)
7506 and set (CC0) according to the result.
7507 SIGNED_CODE should be the rtx operation for this comparison for
7508 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
7510 We force a stack adjustment unless there are currently
7511 things pushed on the stack that aren't yet used. */
7514 compare (exp, signed_code, unsigned_code)
7516 enum rtx_code signed_code, unsigned_code;
7519 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7521 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7522 register tree type = TREE_TYPE (TREE_OPERAND (exp, 0));
7523 register enum machine_mode mode = TYPE_MODE (type);
7524 int unsignedp = TREE_UNSIGNED (type);
7525 enum rtx_code code = unsignedp ? unsigned_code : signed_code;
7527 return compare_from_rtx (op0, op1, code, unsignedp, mode,
7529 ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
7530 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
7533 /* Like compare but expects the values to compare as two rtx's.
7534 The decision as to signed or unsigned comparison must be made by the caller.
7536 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
7539 If ALIGN is non-zero, it is the alignment of this type; if zero, the
7540 size of MODE should be used. */
7543 compare_from_rtx (op0, op1, code, unsignedp, mode, size, align)
7544 register rtx op0, op1;
7547 enum machine_mode mode;
7553 /* If one operand is constant, make it the second one. Only do this
7554 if the other operand is not constant as well. */
7556 if ((CONSTANT_P (op0) && ! CONSTANT_P (op1))
7557 || (GET_CODE (op0) == CONST_INT && GET_CODE (op1) != CONST_INT))
7562 code = swap_condition (code);
7567 op0 = force_not_mem (op0);
7568 op1 = force_not_mem (op1);
7571 do_pending_stack_adjust ();
7573 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
7574 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
7578 /* There's no need to do this now that combine.c can eliminate lots of
7579 sign extensions. This can be less efficient in certain cases on other
7582 /* If this is a signed equality comparison, we can do it as an
7583 unsigned comparison since zero-extension is cheaper than sign
7584 extension and comparisons with zero are done as unsigned. This is
7585 the case even on machines that can do fast sign extension, since
7586 zero-extension is easier to combine with other operations than
7587 sign-extension is. If we are comparing against a constant, we must
7588 convert it to what it would look like unsigned. */
7589 if ((code == EQ || code == NE) && ! unsignedp
7590 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
7592 if (GET_CODE (op1) == CONST_INT
7593 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
7594 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
7599 emit_cmp_insn (op0, op1, code, size, mode, unsignedp, align);
7601 return gen_rtx (code, VOIDmode, cc0_rtx, const0_rtx);
7604 /* Generate code to calculate EXP using a store-flag instruction
7605 and return an rtx for the result. EXP is either a comparison
7606 or a TRUTH_NOT_EXPR whose operand is a comparison.
7608 If TARGET is nonzero, store the result there if convenient.
7610 If ONLY_CHEAP is non-zero, only do this if it is likely to be very
7613 Return zero if there is no suitable set-flag instruction
7614 available on this machine.
7616 Once expand_expr has been called on the arguments of the comparison,
7617 we are committed to doing the store flag, since it is not safe to
7618 re-evaluate the expression. We emit the store-flag insn by calling
7619 emit_store_flag, but only expand the arguments if we have a reason
7620 to believe that emit_store_flag will be successful. If we think that
7621 it will, but it isn't, we have to simulate the store-flag with a
7622 set/jump/set sequence. */
7625 do_store_flag (exp, target, mode, only_cheap)
7628 enum machine_mode mode;
7632 tree arg0, arg1, type;
7634 enum machine_mode operand_mode;
7638 enum insn_code icode;
7639 rtx subtarget = target;
7640 rtx result, label, pattern, jump_pat;
7642 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
7643 result at the end. We can't simply invert the test since it would
7644 have already been inverted if it were valid. This case occurs for
7645 some floating-point comparisons. */
7647 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
7648 invert = 1, exp = TREE_OPERAND (exp, 0);
7650 arg0 = TREE_OPERAND (exp, 0);
7651 arg1 = TREE_OPERAND (exp, 1);
7652 type = TREE_TYPE (arg0);
7653 operand_mode = TYPE_MODE (type);
7654 unsignedp = TREE_UNSIGNED (type);
7656 /* We won't bother with BLKmode store-flag operations because it would mean
7657 passing a lot of information to emit_store_flag. */
7658 if (operand_mode == BLKmode)
7664 /* Get the rtx comparison code to use. We know that EXP is a comparison
7665 operation of some type. Some comparisons against 1 and -1 can be
7666 converted to comparisons with zero. Do so here so that the tests
7667 below will be aware that we have a comparison with zero. These
7668 tests will not catch constants in the first operand, but constants
7669 are rarely passed as the first operand. */
7671 switch (TREE_CODE (exp))
7680 if (integer_onep (arg1))
7681 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
7683 code = unsignedp ? LTU : LT;
7686 if (integer_all_onesp (arg1))
7687 arg1 = integer_zero_node, code = unsignedp ? LTU : LT;
7689 code = unsignedp ? LEU : LE;
7692 if (integer_all_onesp (arg1))
7693 arg1 = integer_zero_node, code = unsignedp ? GEU : GE;
7695 code = unsignedp ? GTU : GT;
7698 if (integer_onep (arg1))
7699 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
7701 code = unsignedp ? GEU : GE;
7707 /* Put a constant second. */
7708 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
7710 tem = arg0; arg0 = arg1; arg1 = tem;
7711 code = swap_condition (code);
7714 /* If this is an equality or inequality test of a single bit, we can
7715 do this by shifting the bit being tested to the low-order bit and
7716 masking the result with the constant 1. If the condition was EQ,
7717 we xor it with 1. This does not require an scc insn and is faster
7718 than an scc insn even if we have it. */
7720 if ((code == NE || code == EQ)
7721 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
7722 && integer_pow2p (TREE_OPERAND (arg0, 1))
7723 && TYPE_PRECISION (type) <= HOST_BITS_PER_WIDE_INT)
7725 int bitnum = exact_log2 (INTVAL (expand_expr (TREE_OPERAND (arg0, 1),
7726 NULL_RTX, VOIDmode, 0)));
7728 if (subtarget == 0 || GET_CODE (subtarget) != REG
7729 || GET_MODE (subtarget) != operand_mode
7730 || ! safe_from_p (subtarget, TREE_OPERAND (arg0, 0)))
7733 op0 = expand_expr (TREE_OPERAND (arg0, 0), subtarget, VOIDmode, 0);
7736 op0 = expand_shift (RSHIFT_EXPR, GET_MODE (op0), op0,
7737 size_int (bitnum), target, 1);
7739 if (GET_MODE (op0) != mode)
7740 op0 = convert_to_mode (mode, op0, 1);
7742 if (bitnum != TYPE_PRECISION (type) - 1)
7743 op0 = expand_and (op0, const1_rtx, target);
7745 if ((code == EQ && ! invert) || (code == NE && invert))
7746 op0 = expand_binop (mode, xor_optab, op0, const1_rtx, target, 0,
7752 /* Now see if we are likely to be able to do this. Return if not. */
7753 if (! can_compare_p (operand_mode))
7755 icode = setcc_gen_code[(int) code];
7756 if (icode == CODE_FOR_nothing
7757 || (only_cheap && insn_operand_mode[(int) icode][0] != mode))
7759 /* We can only do this if it is one of the special cases that
7760 can be handled without an scc insn. */
7761 if ((code == LT && integer_zerop (arg1))
7762 || (! only_cheap && code == GE && integer_zerop (arg1)))
7764 else if (BRANCH_COST >= 0
7765 && ! only_cheap && (code == NE || code == EQ)
7766 && TREE_CODE (type) != REAL_TYPE
7767 && ((abs_optab->handlers[(int) operand_mode].insn_code
7768 != CODE_FOR_nothing)
7769 || (ffs_optab->handlers[(int) operand_mode].insn_code
7770 != CODE_FOR_nothing)))
7776 preexpand_calls (exp);
7777 if (subtarget == 0 || GET_CODE (subtarget) != REG
7778 || GET_MODE (subtarget) != operand_mode
7779 || ! safe_from_p (subtarget, arg1))
7782 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
7783 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
7786 target = gen_reg_rtx (mode);
7788 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
7789 because, if the emit_store_flag does anything it will succeed and
7790 OP0 and OP1 will not be used subsequently. */
7792 result = emit_store_flag (target, code,
7793 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
7794 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
7795 operand_mode, unsignedp, 1);
7800 result = expand_binop (mode, xor_optab, result, const1_rtx,
7801 result, 0, OPTAB_LIB_WIDEN);
7805 /* If this failed, we have to do this with set/compare/jump/set code. */
7806 if (target == 0 || GET_CODE (target) != REG
7807 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
7808 target = gen_reg_rtx (GET_MODE (target));
7810 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
7811 result = compare_from_rtx (op0, op1, code, unsignedp,
7812 operand_mode, NULL_RTX, 0);
7813 if (GET_CODE (result) == CONST_INT)
7814 return (((result == const0_rtx && ! invert)
7815 || (result != const0_rtx && invert))
7816 ? const0_rtx : const1_rtx);
7818 label = gen_label_rtx ();
7819 if (bcc_gen_fctn[(int) code] == 0)
7822 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
7823 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
7829 /* Generate a tablejump instruction (used for switch statements). */
7831 #ifdef HAVE_tablejump
7833 /* INDEX is the value being switched on, with the lowest value
7834 in the table already subtracted.
7835 MODE is its expected mode (needed if INDEX is constant).
7836 RANGE is the length of the jump table.
7837 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
7839 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
7840 index value is out of range. */
7843 do_tablejump (index, mode, range, table_label, default_label)
7844 rtx index, range, table_label, default_label;
7845 enum machine_mode mode;
7847 register rtx temp, vector;
7849 /* Do an unsigned comparison (in the proper mode) between the index
7850 expression and the value which represents the length of the range.
7851 Since we just finished subtracting the lower bound of the range
7852 from the index expression, this comparison allows us to simultaneously
7853 check that the original index expression value is both greater than
7854 or equal to the minimum value of the range and less than or equal to
7855 the maximum value of the range. */
7857 emit_cmp_insn (range, index, LTU, NULL_RTX, mode, 1, 0);
7858 emit_jump_insn (gen_bltu (default_label));
7860 /* If index is in range, it must fit in Pmode.
7861 Convert to Pmode so we can index with it. */
7863 index = convert_to_mode (Pmode, index, 1);
7865 /* If flag_force_addr were to affect this address
7866 it could interfere with the tricky assumptions made
7867 about addresses that contain label-refs,
7868 which may be valid only very near the tablejump itself. */
7869 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
7870 GET_MODE_SIZE, because this indicates how large insns are. The other
7871 uses should all be Pmode, because they are addresses. This code
7872 could fail if addresses and insns are not the same size. */
7873 index = memory_address_noforce
7875 gen_rtx (PLUS, Pmode,
7876 gen_rtx (MULT, Pmode, index,
7877 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
7878 gen_rtx (LABEL_REF, Pmode, table_label)));
7879 temp = gen_reg_rtx (CASE_VECTOR_MODE);
7880 vector = gen_rtx (MEM, CASE_VECTOR_MODE, index);
7881 RTX_UNCHANGING_P (vector) = 1;
7882 convert_move (temp, vector, 0);
7884 emit_jump_insn (gen_tablejump (temp, table_label));
7886 #ifndef CASE_VECTOR_PC_RELATIVE
7887 /* If we are generating PIC code or if the table is PC-relative, the
7888 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
7894 #endif /* HAVE_tablejump */