1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA. */
26 #include "insn-flags.h"
27 #include "insn-codes.h"
29 #include "insn-config.h"
32 #include "typeclass.h"
34 #define CEIL(x,y) (((x) + (y) - 1) / (y))
36 /* Decide whether a function's arguments should be processed
37 from first to last or from last to first.
39 They should if the stack and args grow in opposite directions, but
40 only if we have push insns. */
44 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNARD)
45 #define PUSH_ARGS_REVERSED /* If it's last to first */
50 #ifndef STACK_PUSH_CODE
51 #ifdef STACK_GROWS_DOWNWARD
52 #define STACK_PUSH_CODE PRE_DEC
54 #define STACK_PUSH_CODE PRE_INC
58 /* Like STACK_BOUNDARY but in units of bytes, not bits. */
59 #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
61 /* If this is nonzero, we do not bother generating VOLATILE
62 around volatile memory references, and we are willing to
63 output indirect addresses. If cse is to follow, we reject
64 indirect addresses so a useful potential cse is generated;
65 if it is used only once, instruction combination will produce
66 the same indirect address eventually. */
69 /* Nonzero to generate code for all the subroutines within an
70 expression before generating the upper levels of the expression.
71 Nowadays this is never zero. */
72 int do_preexpand_calls = 1;
74 /* Number of units that we should eventually pop off the stack.
75 These are the arguments to function calls that have already returned. */
76 int pending_stack_adjust;
78 /* Nonzero means stack pops must not be deferred, and deferred stack
79 pops must not be output. It is nonzero inside a function call,
80 inside a conditional expression, inside a statement expression,
81 and in other cases as well. */
82 int inhibit_defer_pop;
84 /* A list of all cleanups which belong to the arguments of
85 function calls being expanded by expand_call. */
86 tree cleanups_this_call;
88 /* Nonzero means __builtin_saveregs has already been done in this function.
89 The value is the pseudoreg containing the value __builtin_saveregs
91 static rtx saveregs_value;
93 /* Similarly for __builtin_apply_args. */
94 static rtx apply_args_value;
96 /* This structure is used by move_by_pieces to describe the move to
108 int explicit_inc_from;
114 static rtx enqueue_insn PROTO((rtx, rtx));
115 static int queued_subexp_p PROTO((rtx));
116 static void init_queue PROTO((void));
117 static void move_by_pieces PROTO((rtx, rtx, int, int));
118 static int move_by_pieces_ninsns PROTO((unsigned int, int));
119 static void move_by_pieces_1 PROTO((rtx (*) (), enum machine_mode,
120 struct move_by_pieces *));
121 static void group_insns PROTO((rtx));
122 static void store_constructor PROTO((tree, rtx));
123 static rtx store_field PROTO((rtx, int, int, enum machine_mode, tree,
124 enum machine_mode, int, int, int));
125 static tree save_noncopied_parts PROTO((tree, tree));
126 static tree init_noncopied_parts PROTO((tree, tree));
127 static int safe_from_p PROTO((rtx, tree));
128 static int fixed_type_p PROTO((tree));
129 static int get_pointer_alignment PROTO((tree, unsigned));
130 static tree string_constant PROTO((tree, tree *));
131 static tree c_strlen PROTO((tree));
132 static rtx expand_builtin PROTO((tree, rtx, rtx, enum machine_mode, int));
133 static int apply_args_size PROTO((void));
134 static int apply_result_size PROTO((void));
135 static rtx result_vector PROTO((int, rtx));
136 static rtx expand_builtin_apply_args PROTO((void));
137 static rtx expand_builtin_apply PROTO((rtx, rtx, rtx));
138 static void expand_builtin_return PROTO((rtx));
139 static rtx expand_increment PROTO((tree, int));
140 static void preexpand_calls PROTO((tree));
141 static void do_jump_by_parts_greater PROTO((tree, int, rtx, rtx));
142 static void do_jump_by_parts_equality PROTO((tree, rtx, rtx));
143 static void do_jump_by_parts_equality_rtx PROTO((rtx, rtx, rtx));
144 static void do_jump_for_compare PROTO((rtx, rtx, rtx));
145 static rtx compare PROTO((tree, enum rtx_code, enum rtx_code));
146 static rtx do_store_flag PROTO((tree, rtx, enum machine_mode, int));
148 /* Record for each mode whether we can move a register directly to or
149 from an object of that mode in memory. If we can't, we won't try
150 to use that mode directly when accessing a field of that mode. */
152 static char direct_load[NUM_MACHINE_MODES];
153 static char direct_store[NUM_MACHINE_MODES];
155 /* MOVE_RATIO is the number of move instructions that is better than
159 #if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
162 /* A value of around 6 would minimize code size; infinity would minimize
164 #define MOVE_RATIO 15
168 /* This array records the insn_code of insns to perform block moves. */
169 static enum insn_code movstr_optab[NUM_MACHINE_MODES];
171 /* SLOW_UNALIGNED_ACCESS is non-zero if unaligned accesses are very slow. */
173 #ifndef SLOW_UNALIGNED_ACCESS
174 #define SLOW_UNALIGNED_ACCESS 0
177 /* Register mappings for target machines without register windows. */
178 #ifndef INCOMING_REGNO
179 #define INCOMING_REGNO(OUT) (OUT)
181 #ifndef OUTGOING_REGNO
182 #define OUTGOING_REGNO(IN) (IN)
185 /* This is run once per compilation to set up which modes can be used
186 directly in memory and to initialize the block move optab. */
192 enum machine_mode mode;
193 /* Try indexing by frame ptr and try by stack ptr.
194 It is known that on the Convex the stack ptr isn't a valid index.
195 With luck, one or the other is valid on any machine. */
196 rtx mem = gen_rtx (MEM, VOIDmode, stack_pointer_rtx);
197 rtx mem1 = gen_rtx (MEM, VOIDmode, frame_pointer_rtx);
200 insn = emit_insn (gen_rtx (SET, 0, 0));
201 pat = PATTERN (insn);
203 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
204 mode = (enum machine_mode) ((int) mode + 1))
210 direct_load[(int) mode] = direct_store[(int) mode] = 0;
211 PUT_MODE (mem, mode);
212 PUT_MODE (mem1, mode);
214 /* See if there is some register that can be used in this mode and
215 directly loaded or stored from memory. */
217 if (mode != VOIDmode && mode != BLKmode)
218 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
219 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
222 if (! HARD_REGNO_MODE_OK (regno, mode))
225 reg = gen_rtx (REG, mode, regno);
228 SET_DEST (pat) = reg;
229 if (recog (pat, insn, &num_clobbers) >= 0)
230 direct_load[(int) mode] = 1;
232 SET_SRC (pat) = mem1;
233 SET_DEST (pat) = reg;
234 if (recog (pat, insn, &num_clobbers) >= 0)
235 direct_load[(int) mode] = 1;
238 SET_DEST (pat) = mem;
239 if (recog (pat, insn, &num_clobbers) >= 0)
240 direct_store[(int) mode] = 1;
243 SET_DEST (pat) = mem1;
244 if (recog (pat, insn, &num_clobbers) >= 0)
245 direct_store[(int) mode] = 1;
248 movstr_optab[(int) mode] = CODE_FOR_nothing;
255 movstr_optab[(int) QImode] = CODE_FOR_movstrqi;
259 movstr_optab[(int) HImode] = CODE_FOR_movstrhi;
263 movstr_optab[(int) SImode] = CODE_FOR_movstrsi;
267 movstr_optab[(int) DImode] = CODE_FOR_movstrdi;
271 movstr_optab[(int) TImode] = CODE_FOR_movstrti;
275 /* This is run at the start of compiling a function. */
282 pending_stack_adjust = 0;
283 inhibit_defer_pop = 0;
284 cleanups_this_call = 0;
286 apply_args_value = 0;
290 /* Save all variables describing the current status into the structure *P.
291 This is used before starting a nested function. */
297 /* Instead of saving the postincrement queue, empty it. */
300 p->pending_stack_adjust = pending_stack_adjust;
301 p->inhibit_defer_pop = inhibit_defer_pop;
302 p->cleanups_this_call = cleanups_this_call;
303 p->saveregs_value = saveregs_value;
304 p->apply_args_value = apply_args_value;
305 p->forced_labels = forced_labels;
307 pending_stack_adjust = 0;
308 inhibit_defer_pop = 0;
309 cleanups_this_call = 0;
311 apply_args_value = 0;
315 /* Restore all variables describing the current status from the structure *P.
316 This is used after a nested function. */
319 restore_expr_status (p)
322 pending_stack_adjust = p->pending_stack_adjust;
323 inhibit_defer_pop = p->inhibit_defer_pop;
324 cleanups_this_call = p->cleanups_this_call;
325 saveregs_value = p->saveregs_value;
326 apply_args_value = p->apply_args_value;
327 forced_labels = p->forced_labels;
330 /* Manage the queue of increment instructions to be output
331 for POSTINCREMENT_EXPR expressions, etc. */
333 static rtx pending_chain;
335 /* Queue up to increment (or change) VAR later. BODY says how:
336 BODY should be the same thing you would pass to emit_insn
337 to increment right away. It will go to emit_insn later on.
339 The value is a QUEUED expression to be used in place of VAR
340 where you want to guarantee the pre-incrementation value of VAR. */
343 enqueue_insn (var, body)
346 pending_chain = gen_rtx (QUEUED, GET_MODE (var),
347 var, NULL_RTX, NULL_RTX, body, pending_chain);
348 return pending_chain;
351 /* Use protect_from_queue to convert a QUEUED expression
352 into something that you can put immediately into an instruction.
353 If the queued incrementation has not happened yet,
354 protect_from_queue returns the variable itself.
355 If the incrementation has happened, protect_from_queue returns a temp
356 that contains a copy of the old value of the variable.
358 Any time an rtx which might possibly be a QUEUED is to be put
359 into an instruction, it must be passed through protect_from_queue first.
360 QUEUED expressions are not meaningful in instructions.
362 Do not pass a value through protect_from_queue and then hold
363 on to it for a while before putting it in an instruction!
364 If the queue is flushed in between, incorrect code will result. */
367 protect_from_queue (x, modify)
371 register RTX_CODE code = GET_CODE (x);
373 #if 0 /* A QUEUED can hang around after the queue is forced out. */
374 /* Shortcut for most common case. */
375 if (pending_chain == 0)
381 /* A special hack for read access to (MEM (QUEUED ...))
382 to facilitate use of autoincrement.
383 Make a copy of the contents of the memory location
384 rather than a copy of the address, but not
385 if the value is of mode BLKmode. */
386 if (code == MEM && GET_MODE (x) != BLKmode
387 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
389 register rtx y = XEXP (x, 0);
390 XEXP (x, 0) = QUEUED_VAR (y);
393 register rtx temp = gen_reg_rtx (GET_MODE (x));
394 emit_insn_before (gen_move_insn (temp, x),
400 /* Otherwise, recursively protect the subexpressions of all
401 the kinds of rtx's that can contain a QUEUED. */
403 XEXP (x, 0) = protect_from_queue (XEXP (x, 0), 0);
404 else if (code == PLUS || code == MULT)
406 XEXP (x, 0) = protect_from_queue (XEXP (x, 0), 0);
407 XEXP (x, 1) = protect_from_queue (XEXP (x, 1), 0);
411 /* If the increment has not happened, use the variable itself. */
412 if (QUEUED_INSN (x) == 0)
413 return QUEUED_VAR (x);
414 /* If the increment has happened and a pre-increment copy exists,
416 if (QUEUED_COPY (x) != 0)
417 return QUEUED_COPY (x);
418 /* The increment has happened but we haven't set up a pre-increment copy.
419 Set one up now, and use it. */
420 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
421 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
423 return QUEUED_COPY (x);
426 /* Return nonzero if X contains a QUEUED expression:
427 if it contains anything that will be altered by a queued increment.
428 We handle only combinations of MEM, PLUS, MINUS and MULT operators
429 since memory addresses generally contain only those. */
435 register enum rtx_code code = GET_CODE (x);
441 return queued_subexp_p (XEXP (x, 0));
445 return queued_subexp_p (XEXP (x, 0))
446 || queued_subexp_p (XEXP (x, 1));
451 /* Perform all the pending incrementations. */
457 while (p = pending_chain)
459 QUEUED_INSN (p) = emit_insn (QUEUED_BODY (p));
460 pending_chain = QUEUED_NEXT (p);
471 /* Copy data from FROM to TO, where the machine modes are not the same.
472 Both modes may be integer, or both may be floating.
473 UNSIGNEDP should be nonzero if FROM is an unsigned type.
474 This causes zero-extension instead of sign-extension. */
477 convert_move (to, from, unsignedp)
478 register rtx to, from;
481 enum machine_mode to_mode = GET_MODE (to);
482 enum machine_mode from_mode = GET_MODE (from);
483 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
484 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
488 /* rtx code for making an equivalent value. */
489 enum rtx_code equiv_code = (unsignedp ? ZERO_EXTEND : SIGN_EXTEND);
491 to = protect_from_queue (to, 1);
492 from = protect_from_queue (from, 0);
494 if (to_real != from_real)
497 /* If FROM is a SUBREG that indicates that we have already done at least
498 the required extension, strip it. We don't handle such SUBREGs as
501 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
502 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
503 >= GET_MODE_SIZE (to_mode))
504 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
505 from = gen_lowpart (to_mode, from), from_mode = to_mode;
507 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
510 if (to_mode == from_mode
511 || (from_mode == VOIDmode && CONSTANT_P (from)))
513 emit_move_insn (to, from);
519 #ifdef HAVE_extendqfhf2
520 if (HAVE_extendqfsf2 && from_mode == QFmode && to_mode == HFmode)
522 emit_unop_insn (CODE_FOR_extendqfsf2, to, from, UNKNOWN);
526 #ifdef HAVE_extendqfsf2
527 if (HAVE_extendqfsf2 && from_mode == QFmode && to_mode == SFmode)
529 emit_unop_insn (CODE_FOR_extendqfsf2, to, from, UNKNOWN);
533 #ifdef HAVE_extendqfdf2
534 if (HAVE_extendqfdf2 && from_mode == QFmode && to_mode == DFmode)
536 emit_unop_insn (CODE_FOR_extendqfdf2, to, from, UNKNOWN);
540 #ifdef HAVE_extendqfxf2
541 if (HAVE_extendqfxf2 && from_mode == QFmode && to_mode == XFmode)
543 emit_unop_insn (CODE_FOR_extendqfxf2, to, from, UNKNOWN);
547 #ifdef HAVE_extendqftf2
548 if (HAVE_extendqftf2 && from_mode == QFmode && to_mode == TFmode)
550 emit_unop_insn (CODE_FOR_extendqftf2, to, from, UNKNOWN);
555 #ifdef HAVE_extendhfsf2
556 if (HAVE_extendhfsf2 && from_mode == HFmode && to_mode == SFmode)
558 emit_unop_insn (CODE_FOR_extendhfsf2, to, from, UNKNOWN);
562 #ifdef HAVE_extendhfdf2
563 if (HAVE_extendhfdf2 && from_mode == HFmode && to_mode == DFmode)
565 emit_unop_insn (CODE_FOR_extendhfdf2, to, from, UNKNOWN);
569 #ifdef HAVE_extendhfxf2
570 if (HAVE_extendhfxf2 && from_mode == HFmode && to_mode == XFmode)
572 emit_unop_insn (CODE_FOR_extendhfxf2, to, from, UNKNOWN);
576 #ifdef HAVE_extendhftf2
577 if (HAVE_extendhftf2 && from_mode == HFmode && to_mode == TFmode)
579 emit_unop_insn (CODE_FOR_extendhftf2, to, from, UNKNOWN);
584 #ifdef HAVE_extendsfdf2
585 if (HAVE_extendsfdf2 && from_mode == SFmode && to_mode == DFmode)
587 emit_unop_insn (CODE_FOR_extendsfdf2, to, from, UNKNOWN);
591 #ifdef HAVE_extendsfxf2
592 if (HAVE_extendsfxf2 && from_mode == SFmode && to_mode == XFmode)
594 emit_unop_insn (CODE_FOR_extendsfxf2, to, from, UNKNOWN);
598 #ifdef HAVE_extendsftf2
599 if (HAVE_extendsftf2 && from_mode == SFmode && to_mode == TFmode)
601 emit_unop_insn (CODE_FOR_extendsftf2, to, from, UNKNOWN);
605 #ifdef HAVE_extenddfxf2
606 if (HAVE_extenddfxf2 && from_mode == DFmode && to_mode == XFmode)
608 emit_unop_insn (CODE_FOR_extenddfxf2, to, from, UNKNOWN);
612 #ifdef HAVE_extenddftf2
613 if (HAVE_extenddftf2 && from_mode == DFmode && to_mode == TFmode)
615 emit_unop_insn (CODE_FOR_extenddftf2, to, from, UNKNOWN);
620 #ifdef HAVE_trunchfqf2
621 if (HAVE_trunchfqf2 && from_mode == HFmode && to_mode == QFmode)
623 emit_unop_insn (CODE_FOR_trunchfqf2, to, from, UNKNOWN);
627 #ifdef HAVE_truncsfqf2
628 if (HAVE_truncsfqf2 && from_mode == SFmode && to_mode == QFmode)
630 emit_unop_insn (CODE_FOR_truncsfqf2, to, from, UNKNOWN);
634 #ifdef HAVE_truncdfqf2
635 if (HAVE_truncdfqf2 && from_mode == DFmode && to_mode == QFmode)
637 emit_unop_insn (CODE_FOR_truncdfqf2, to, from, UNKNOWN);
641 #ifdef HAVE_truncxfqf2
642 if (HAVE_truncxfqf2 && from_mode == XFmode && to_mode == QFmode)
644 emit_unop_insn (CODE_FOR_truncxfqf2, to, from, UNKNOWN);
648 #ifdef HAVE_trunctfqf2
649 if (HAVE_trunctfqf2 && from_mode == TFmode && to_mode == QFmode)
651 emit_unop_insn (CODE_FOR_trunctfqf2, to, from, UNKNOWN);
655 #ifdef HAVE_truncsfhf2
656 if (HAVE_truncsfhf2 && from_mode == SFmode && to_mode == HFmode)
658 emit_unop_insn (CODE_FOR_truncsfhf2, to, from, UNKNOWN);
662 #ifdef HAVE_truncdfhf2
663 if (HAVE_truncdfhf2 && from_mode == DFmode && to_mode == HFmode)
665 emit_unop_insn (CODE_FOR_truncdfhf2, to, from, UNKNOWN);
669 #ifdef HAVE_truncxfhf2
670 if (HAVE_truncxfhf2 && from_mode == XFmode && to_mode == HFmode)
672 emit_unop_insn (CODE_FOR_truncxfhf2, to, from, UNKNOWN);
676 #ifdef HAVE_trunctfhf2
677 if (HAVE_trunctfhf2 && from_mode == TFmode && to_mode == HFmode)
679 emit_unop_insn (CODE_FOR_trunctfhf2, to, from, UNKNOWN);
683 #ifdef HAVE_truncdfsf2
684 if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
686 emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
690 #ifdef HAVE_truncxfsf2
691 if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode)
693 emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN);
697 #ifdef HAVE_trunctfsf2
698 if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
700 emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
704 #ifdef HAVE_truncxfdf2
705 if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode)
707 emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN);
711 #ifdef HAVE_trunctfdf2
712 if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
714 emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
726 libcall = extendsfdf2_libfunc;
730 libcall = extendsfxf2_libfunc;
734 libcall = extendsftf2_libfunc;
743 libcall = truncdfsf2_libfunc;
747 libcall = extenddfxf2_libfunc;
751 libcall = extenddftf2_libfunc;
760 libcall = truncxfsf2_libfunc;
764 libcall = truncxfdf2_libfunc;
773 libcall = trunctfsf2_libfunc;
777 libcall = trunctfdf2_libfunc;
783 if (libcall == (rtx) 0)
784 /* This conversion is not implemented yet. */
787 emit_library_call (libcall, 1, to_mode, 1, from, from_mode);
788 emit_move_insn (to, hard_libcall_value (to_mode));
792 /* Now both modes are integers. */
794 /* Handle expanding beyond a word. */
795 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
796 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
803 enum machine_mode lowpart_mode;
804 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
806 /* Try converting directly if the insn is supported. */
807 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
810 /* If FROM is a SUBREG, put it into a register. Do this
811 so that we always generate the same set of insns for
812 better cse'ing; if an intermediate assignment occurred,
813 we won't be doing the operation directly on the SUBREG. */
814 if (optimize > 0 && GET_CODE (from) == SUBREG)
815 from = force_reg (from_mode, from);
816 emit_unop_insn (code, to, from, equiv_code);
819 /* Next, try converting via full word. */
820 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
821 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
822 != CODE_FOR_nothing))
824 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
825 emit_unop_insn (code, to,
826 gen_lowpart (word_mode, to), equiv_code);
830 /* No special multiword conversion insn; do it by hand. */
833 /* Get a copy of FROM widened to a word, if necessary. */
834 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
835 lowpart_mode = word_mode;
837 lowpart_mode = from_mode;
839 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
841 lowpart = gen_lowpart (lowpart_mode, to);
842 emit_move_insn (lowpart, lowfrom);
844 /* Compute the value to put in each remaining word. */
846 fill_value = const0_rtx;
851 && insn_operand_mode[(int) CODE_FOR_slt][0] == word_mode
852 && STORE_FLAG_VALUE == -1)
854 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
856 fill_value = gen_reg_rtx (word_mode);
857 emit_insn (gen_slt (fill_value));
863 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
864 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
866 fill_value = convert_to_mode (word_mode, fill_value, 1);
870 /* Fill the remaining words. */
871 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
873 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
874 rtx subword = operand_subword (to, index, 1, to_mode);
879 if (fill_value != subword)
880 emit_move_insn (subword, fill_value);
883 insns = get_insns ();
886 emit_no_conflict_block (insns, to, from, NULL_RTX,
887 gen_rtx (equiv_code, to_mode, from));
891 /* Truncating multi-word to a word or less. */
892 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
893 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
895 convert_move (to, gen_lowpart (word_mode, from), 0);
899 /* Handle pointer conversion */ /* SPEE 900220 */
900 if (to_mode == PSImode)
902 if (from_mode != SImode)
903 from = convert_to_mode (SImode, from, unsignedp);
905 #ifdef HAVE_truncsipsi
908 emit_unop_insn (CODE_FOR_truncsipsi, to, from, UNKNOWN);
911 #endif /* HAVE_truncsipsi */
915 if (from_mode == PSImode)
917 if (to_mode != SImode)
919 from = convert_to_mode (SImode, from, unsignedp);
924 #ifdef HAVE_extendpsisi
925 if (HAVE_extendpsisi)
927 emit_unop_insn (CODE_FOR_extendpsisi, to, from, UNKNOWN);
930 #endif /* HAVE_extendpsisi */
935 /* Now follow all the conversions between integers
936 no more than a word long. */
938 /* For truncation, usually we can just refer to FROM in a narrower mode. */
939 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
940 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
941 GET_MODE_BITSIZE (from_mode)))
943 if (!((GET_CODE (from) == MEM
944 && ! MEM_VOLATILE_P (from)
945 && direct_load[(int) to_mode]
946 && ! mode_dependent_address_p (XEXP (from, 0)))
947 || GET_CODE (from) == REG
948 || GET_CODE (from) == SUBREG))
949 from = force_reg (from_mode, from);
950 emit_move_insn (to, gen_lowpart (to_mode, from));
954 /* Handle extension. */
955 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
957 /* Convert directly if that works. */
958 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
961 /* If FROM is a SUBREG, put it into a register. Do this
962 so that we always generate the same set of insns for
963 better cse'ing; if an intermediate assignment occurred,
964 we won't be doing the operation directly on the SUBREG. */
965 if (optimize > 0 && GET_CODE (from) == SUBREG)
966 from = force_reg (from_mode, from);
967 emit_unop_insn (code, to, from, equiv_code);
972 enum machine_mode intermediate;
974 /* Search for a mode to convert via. */
975 for (intermediate = from_mode; intermediate != VOIDmode;
976 intermediate = GET_MODE_WIDER_MODE (intermediate))
977 if ((can_extend_p (to_mode, intermediate, unsignedp)
979 && (can_extend_p (intermediate, from_mode, unsignedp)
980 != CODE_FOR_nothing))
982 convert_move (to, convert_to_mode (intermediate, from,
983 unsignedp), unsignedp);
987 /* No suitable intermediate mode. */
992 /* Support special truncate insns for certain modes. */
994 if (from_mode == DImode && to_mode == SImode)
996 #ifdef HAVE_truncdisi2
999 emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
1003 convert_move (to, force_reg (from_mode, from), unsignedp);
1007 if (from_mode == DImode && to_mode == HImode)
1009 #ifdef HAVE_truncdihi2
1010 if (HAVE_truncdihi2)
1012 emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
1016 convert_move (to, force_reg (from_mode, from), unsignedp);
1020 if (from_mode == DImode && to_mode == QImode)
1022 #ifdef HAVE_truncdiqi2
1023 if (HAVE_truncdiqi2)
1025 emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
1029 convert_move (to, force_reg (from_mode, from), unsignedp);
1033 if (from_mode == SImode && to_mode == HImode)
1035 #ifdef HAVE_truncsihi2
1036 if (HAVE_truncsihi2)
1038 emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
1042 convert_move (to, force_reg (from_mode, from), unsignedp);
1046 if (from_mode == SImode && to_mode == QImode)
1048 #ifdef HAVE_truncsiqi2
1049 if (HAVE_truncsiqi2)
1051 emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
1055 convert_move (to, force_reg (from_mode, from), unsignedp);
1059 if (from_mode == HImode && to_mode == QImode)
1061 #ifdef HAVE_trunchiqi2
1062 if (HAVE_trunchiqi2)
1064 emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
1068 convert_move (to, force_reg (from_mode, from), unsignedp);
1072 /* Handle truncation of volatile memrefs, and so on;
1073 the things that couldn't be truncated directly,
1074 and for which there was no special instruction. */
1075 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
1077 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
1078 emit_move_insn (to, temp);
1082 /* Mode combination is not recognized. */
1086 /* Return an rtx for a value that would result
1087 from converting X to mode MODE.
1088 Both X and MODE may be floating, or both integer.
1089 UNSIGNEDP is nonzero if X is an unsigned value.
1090 This can be done by referring to a part of X in place
1091 or by copying to a new temporary with conversion.
1093 This function *must not* call protect_from_queue
1094 except when putting X into an insn (in which case convert_move does it). */
1097 convert_to_mode (mode, x, unsignedp)
1098 enum machine_mode mode;
1104 /* If FROM is a SUBREG that indicates that we have already done at least
1105 the required extension, strip it. */
1107 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
1108 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
1109 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
1110 x = gen_lowpart (mode, x);
1112 if (mode == GET_MODE (x))
1115 /* There is one case that we must handle specially: If we are converting
1116 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
1117 we are to interpret the constant as unsigned, gen_lowpart will do
1118 the wrong if the constant appears negative. What we want to do is
1119 make the high-order word of the constant zero, not all ones. */
1121 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
1122 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
1123 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
1124 return immed_double_const (INTVAL (x), (HOST_WIDE_INT) 0, mode);
1126 /* We can do this with a gen_lowpart if both desired and current modes
1127 are integer, and this is either a constant integer, a register, or a
1128 non-volatile MEM. Except for the constant case, we must be narrowing
1131 if (GET_CODE (x) == CONST_INT
1132 || (GET_MODE_CLASS (mode) == MODE_INT
1133 && GET_MODE_CLASS (GET_MODE (x)) == MODE_INT
1134 && (GET_CODE (x) == CONST_DOUBLE
1135 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (GET_MODE (x))
1136 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x))
1137 && direct_load[(int) mode]
1138 || GET_CODE (x) == REG)))))
1139 return gen_lowpart (mode, x);
1141 temp = gen_reg_rtx (mode);
1142 convert_move (temp, x, unsignedp);
1146 /* Generate several move instructions to copy LEN bytes
1147 from block FROM to block TO. (These are MEM rtx's with BLKmode).
1148 The caller must pass FROM and TO
1149 through protect_from_queue before calling.
1150 ALIGN (in bytes) is maximum alignment we can assume. */
1153 move_by_pieces (to, from, len, align)
1157 struct move_by_pieces data;
1158 rtx to_addr = XEXP (to, 0), from_addr = XEXP (from, 0);
1159 int max_size = MOVE_MAX + 1;
1162 data.to_addr = to_addr;
1163 data.from_addr = from_addr;
1167 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1168 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1170 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1171 || GET_CODE (from_addr) == POST_INC
1172 || GET_CODE (from_addr) == POST_DEC);
1174 data.explicit_inc_from = 0;
1175 data.explicit_inc_to = 0;
1177 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1178 if (data.reverse) data.offset = len;
1181 /* If copying requires more than two move insns,
1182 copy addresses to registers (to make displacements shorter)
1183 and use post-increment if available. */
1184 if (!(data.autinc_from && data.autinc_to)
1185 && move_by_pieces_ninsns (len, align) > 2)
1187 #ifdef HAVE_PRE_DECREMENT
1188 if (data.reverse && ! data.autinc_from)
1190 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1191 data.autinc_from = 1;
1192 data.explicit_inc_from = -1;
1195 #ifdef HAVE_POST_INCREMENT
1196 if (! data.autinc_from)
1198 data.from_addr = copy_addr_to_reg (from_addr);
1199 data.autinc_from = 1;
1200 data.explicit_inc_from = 1;
1203 if (!data.autinc_from && CONSTANT_P (from_addr))
1204 data.from_addr = copy_addr_to_reg (from_addr);
1205 #ifdef HAVE_PRE_DECREMENT
1206 if (data.reverse && ! data.autinc_to)
1208 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1210 data.explicit_inc_to = -1;
1213 #ifdef HAVE_POST_INCREMENT
1214 if (! data.reverse && ! data.autinc_to)
1216 data.to_addr = copy_addr_to_reg (to_addr);
1218 data.explicit_inc_to = 1;
1221 if (!data.autinc_to && CONSTANT_P (to_addr))
1222 data.to_addr = copy_addr_to_reg (to_addr);
1225 if (! (STRICT_ALIGNMENT || SLOW_UNALIGNED_ACCESS)
1226 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
1229 /* First move what we can in the largest integer mode, then go to
1230 successively smaller modes. */
1232 while (max_size > 1)
1234 enum machine_mode mode = VOIDmode, tmode;
1235 enum insn_code icode;
1237 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1238 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1239 if (GET_MODE_SIZE (tmode) < max_size)
1242 if (mode == VOIDmode)
1245 icode = mov_optab->handlers[(int) mode].insn_code;
1246 if (icode != CODE_FOR_nothing
1247 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1248 GET_MODE_SIZE (mode)))
1249 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1251 max_size = GET_MODE_SIZE (mode);
1254 /* The code above should have handled everything. */
1259 /* Return number of insns required to move L bytes by pieces.
1260 ALIGN (in bytes) is maximum alignment we can assume. */
1263 move_by_pieces_ninsns (l, align)
1267 register int n_insns = 0;
1268 int max_size = MOVE_MAX + 1;
1270 if (! (STRICT_ALIGNMENT || SLOW_UNALIGNED_ACCESS)
1271 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
1274 while (max_size > 1)
1276 enum machine_mode mode = VOIDmode, tmode;
1277 enum insn_code icode;
1279 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1280 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1281 if (GET_MODE_SIZE (tmode) < max_size)
1284 if (mode == VOIDmode)
1287 icode = mov_optab->handlers[(int) mode].insn_code;
1288 if (icode != CODE_FOR_nothing
1289 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1290 GET_MODE_SIZE (mode)))
1291 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1293 max_size = GET_MODE_SIZE (mode);
1299 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1300 with move instructions for mode MODE. GENFUN is the gen_... function
1301 to make a move insn for that mode. DATA has all the other info. */
1304 move_by_pieces_1 (genfun, mode, data)
1306 enum machine_mode mode;
1307 struct move_by_pieces *data;
1309 register int size = GET_MODE_SIZE (mode);
1310 register rtx to1, from1;
1312 while (data->len >= size)
1314 if (data->reverse) data->offset -= size;
1316 to1 = (data->autinc_to
1317 ? gen_rtx (MEM, mode, data->to_addr)
1318 : change_address (data->to, mode,
1319 plus_constant (data->to_addr, data->offset)));
1322 ? gen_rtx (MEM, mode, data->from_addr)
1323 : change_address (data->from, mode,
1324 plus_constant (data->from_addr, data->offset)));
1326 #ifdef HAVE_PRE_DECREMENT
1327 if (data->explicit_inc_to < 0)
1328 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
1329 if (data->explicit_inc_from < 0)
1330 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (-size)));
1333 emit_insn ((*genfun) (to1, from1));
1334 #ifdef HAVE_POST_INCREMENT
1335 if (data->explicit_inc_to > 0)
1336 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1337 if (data->explicit_inc_from > 0)
1338 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1341 if (! data->reverse) data->offset += size;
1347 /* Emit code to move a block Y to a block X.
1348 This may be done with string-move instructions,
1349 with multiple scalar move instructions, or with a library call.
1351 Both X and Y must be MEM rtx's (perhaps inside VOLATILE)
1353 SIZE is an rtx that says how long they are.
1354 ALIGN is the maximum alignment we can assume they have,
1355 measured in bytes. */
1358 emit_block_move (x, y, size, align)
1363 if (GET_MODE (x) != BLKmode)
1366 if (GET_MODE (y) != BLKmode)
1369 x = protect_from_queue (x, 1);
1370 y = protect_from_queue (y, 0);
1371 size = protect_from_queue (size, 0);
1373 if (GET_CODE (x) != MEM)
1375 if (GET_CODE (y) != MEM)
1380 if (GET_CODE (size) == CONST_INT
1381 && (move_by_pieces_ninsns (INTVAL (size), align) < MOVE_RATIO))
1382 move_by_pieces (x, y, INTVAL (size), align);
1385 /* Try the most limited insn first, because there's no point
1386 including more than one in the machine description unless
1387 the more limited one has some advantage. */
1389 rtx opalign = GEN_INT (align);
1390 enum machine_mode mode;
1392 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1393 mode = GET_MODE_WIDER_MODE (mode))
1395 enum insn_code code = movstr_optab[(int) mode];
1397 if (code != CODE_FOR_nothing
1398 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1399 here because if SIZE is less than the mode mask, as it is
1400 returned by the macro, it will definitely be less than the
1401 actual mode mask. */
1402 && (unsigned) INTVAL (size) <= GET_MODE_MASK (mode)
1403 && (insn_operand_predicate[(int) code][0] == 0
1404 || (*insn_operand_predicate[(int) code][0]) (x, BLKmode))
1405 && (insn_operand_predicate[(int) code][1] == 0
1406 || (*insn_operand_predicate[(int) code][1]) (y, BLKmode))
1407 && (insn_operand_predicate[(int) code][3] == 0
1408 || (*insn_operand_predicate[(int) code][3]) (opalign,
1412 rtx last = get_last_insn ();
1415 op2 = convert_to_mode (mode, size, 1);
1416 if (insn_operand_predicate[(int) code][2] != 0
1417 && ! (*insn_operand_predicate[(int) code][2]) (op2, mode))
1418 op2 = copy_to_mode_reg (mode, op2);
1420 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1427 delete_insns_since (last);
1431 #ifdef TARGET_MEM_FUNCTIONS
1432 emit_library_call (memcpy_libfunc, 0,
1433 VOIDmode, 3, XEXP (x, 0), Pmode,
1435 convert_to_mode (Pmode, size, 1), Pmode);
1437 emit_library_call (bcopy_libfunc, 0,
1438 VOIDmode, 3, XEXP (y, 0), Pmode,
1440 convert_to_mode (Pmode, size, 1), Pmode);
1445 /* Copy all or part of a value X into registers starting at REGNO.
1446 The number of registers to be filled is NREGS. */
1449 move_block_to_reg (regno, x, nregs, mode)
1453 enum machine_mode mode;
1458 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1459 x = validize_mem (force_const_mem (mode, x));
1461 /* See if the machine can do this with a load multiple insn. */
1462 #ifdef HAVE_load_multiple
1463 last = get_last_insn ();
1464 pat = gen_load_multiple (gen_rtx (REG, word_mode, regno), x,
1472 delete_insns_since (last);
1475 for (i = 0; i < nregs; i++)
1476 emit_move_insn (gen_rtx (REG, word_mode, regno + i),
1477 operand_subword_force (x, i, mode));
1480 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1481 The number of registers to be filled is NREGS. */
1484 move_block_from_reg (regno, x, nregs)
1492 /* See if the machine can do this with a store multiple insn. */
1493 #ifdef HAVE_store_multiple
1494 last = get_last_insn ();
1495 pat = gen_store_multiple (x, gen_rtx (REG, word_mode, regno),
1503 delete_insns_since (last);
1506 for (i = 0; i < nregs; i++)
1508 rtx tem = operand_subword (x, i, 1, BLKmode);
1513 emit_move_insn (tem, gen_rtx (REG, word_mode, regno + i));
1517 /* Mark NREGS consecutive regs, starting at REGNO, as being live now. */
1520 use_regs (regno, nregs)
1526 for (i = 0; i < nregs; i++)
1527 emit_insn (gen_rtx (USE, VOIDmode, gen_rtx (REG, word_mode, regno + i)));
1530 /* Mark the instructions since PREV as a libcall block.
1531 Add REG_LIBCALL to PREV and add a REG_RETVAL to the most recent insn. */
1540 /* Find the instructions to mark */
1542 insn_first = NEXT_INSN (prev);
1544 insn_first = get_insns ();
1546 insn_last = get_last_insn ();
1548 REG_NOTES (insn_last) = gen_rtx (INSN_LIST, REG_RETVAL, insn_first,
1549 REG_NOTES (insn_last));
1551 REG_NOTES (insn_first) = gen_rtx (INSN_LIST, REG_LIBCALL, insn_last,
1552 REG_NOTES (insn_first));
1555 /* Write zeros through the storage of OBJECT.
1556 If OBJECT has BLKmode, SIZE is its length in bytes. */
1559 clear_storage (object, size)
1563 if (GET_MODE (object) == BLKmode)
1565 #ifdef TARGET_MEM_FUNCTIONS
1566 emit_library_call (memset_libfunc, 0,
1568 XEXP (object, 0), Pmode, const0_rtx, Pmode,
1569 GEN_INT (size), Pmode);
1571 emit_library_call (bzero_libfunc, 0,
1573 XEXP (object, 0), Pmode,
1574 GEN_INT (size), Pmode);
1578 emit_move_insn (object, const0_rtx);
1581 /* Generate code to copy Y into X.
1582 Both Y and X must have the same mode, except that
1583 Y can be a constant with VOIDmode.
1584 This mode cannot be BLKmode; use emit_block_move for that.
1586 Return the last instruction emitted. */
1589 emit_move_insn (x, y)
1592 enum machine_mode mode = GET_MODE (x);
1593 enum machine_mode submode;
1594 enum mode_class class = GET_MODE_CLASS (mode);
1597 x = protect_from_queue (x, 1);
1598 y = protect_from_queue (y, 0);
1600 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
1603 if (CONSTANT_P (y) && ! LEGITIMATE_CONSTANT_P (y))
1604 y = force_const_mem (mode, y);
1606 /* If X or Y are memory references, verify that their addresses are valid
1608 if (GET_CODE (x) == MEM
1609 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
1610 && ! push_operand (x, GET_MODE (x)))
1612 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
1613 x = change_address (x, VOIDmode, XEXP (x, 0));
1615 if (GET_CODE (y) == MEM
1616 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
1618 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
1619 y = change_address (y, VOIDmode, XEXP (y, 0));
1621 if (mode == BLKmode)
1624 return emit_move_insn_1 (x, y);
1627 /* Low level part of emit_move_insn.
1628 Called just like emit_move_insn, but assumes X and Y
1629 are basically valid. */
1632 emit_move_insn_1 (x, y)
1635 enum machine_mode mode = GET_MODE (x);
1636 enum machine_mode submode;
1637 enum mode_class class = GET_MODE_CLASS (mode);
1640 if (class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
1641 submode = mode_for_size (GET_MODE_UNIT_SIZE (mode) * BITS_PER_UNIT,
1642 (class == MODE_COMPLEX_INT
1643 ? MODE_INT : MODE_FLOAT),
1646 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
1648 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
1650 /* Expand complex moves by moving real part and imag part, if possible. */
1651 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
1652 && submode != BLKmode
1653 && (mov_optab->handlers[(int) submode].insn_code
1654 != CODE_FOR_nothing))
1656 /* Don't split destination if it is a stack push. */
1657 int stack = push_operand (x, GET_MODE (x));
1658 rtx prev = get_last_insn ();
1660 /* Tell flow that the whole of the destination is being set. */
1661 if (GET_CODE (x) == REG)
1662 emit_insn (gen_rtx (CLOBBER, VOIDmode, x));
1664 /* If this is a stack, push the highpart first, so it
1665 will be in the argument order.
1667 In that case, change_address is used only to convert
1668 the mode, not to change the address. */
1669 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
1670 ((stack ? change_address (x, submode, (rtx) 0)
1671 : gen_highpart (submode, x)),
1672 gen_highpart (submode, y)));
1673 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
1674 ((stack ? change_address (x, submode, (rtx) 0)
1675 : gen_lowpart (submode, x)),
1676 gen_lowpart (submode, y)));
1680 return get_last_insn ();
1683 /* This will handle any multi-word mode that lacks a move_insn pattern.
1684 However, you will get better code if you define such patterns,
1685 even if they must turn into multiple assembler instructions. */
1686 else if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
1689 rtx prev_insn = get_last_insn ();
1692 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
1695 rtx xpart = operand_subword (x, i, 1, mode);
1696 rtx ypart = operand_subword (y, i, 1, mode);
1698 /* If we can't get a part of Y, put Y into memory if it is a
1699 constant. Otherwise, force it into a register. If we still
1700 can't get a part of Y, abort. */
1701 if (ypart == 0 && CONSTANT_P (y))
1703 y = force_const_mem (mode, y);
1704 ypart = operand_subword (y, i, 1, mode);
1706 else if (ypart == 0)
1707 ypart = operand_subword_force (y, i, mode);
1709 if (xpart == 0 || ypart == 0)
1712 last_insn = emit_move_insn (xpart, ypart);
1714 /* Mark these insns as a libcall block. */
1715 group_insns (prev_insn);
1723 /* Pushing data onto the stack. */
1725 /* Push a block of length SIZE (perhaps variable)
1726 and return an rtx to address the beginning of the block.
1727 Note that it is not possible for the value returned to be a QUEUED.
1728 The value may be virtual_outgoing_args_rtx.
1730 EXTRA is the number of bytes of padding to push in addition to SIZE.
1731 BELOW nonzero means this padding comes at low addresses;
1732 otherwise, the padding comes at high addresses. */
1735 push_block (size, extra, below)
1740 if (CONSTANT_P (size))
1741 anti_adjust_stack (plus_constant (size, extra));
1742 else if (GET_CODE (size) == REG && extra == 0)
1743 anti_adjust_stack (size);
1746 rtx temp = copy_to_mode_reg (Pmode, size);
1748 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
1749 temp, 0, OPTAB_LIB_WIDEN);
1750 anti_adjust_stack (temp);
1753 #ifdef STACK_GROWS_DOWNWARD
1754 temp = virtual_outgoing_args_rtx;
1755 if (extra != 0 && below)
1756 temp = plus_constant (temp, extra);
1758 if (GET_CODE (size) == CONST_INT)
1759 temp = plus_constant (virtual_outgoing_args_rtx,
1760 - INTVAL (size) - (below ? 0 : extra));
1761 else if (extra != 0 && !below)
1762 temp = gen_rtx (PLUS, Pmode, virtual_outgoing_args_rtx,
1763 negate_rtx (Pmode, plus_constant (size, extra)));
1765 temp = gen_rtx (PLUS, Pmode, virtual_outgoing_args_rtx,
1766 negate_rtx (Pmode, size));
1769 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
1775 return gen_rtx (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
1778 /* Generate code to push X onto the stack, assuming it has mode MODE and
1780 MODE is redundant except when X is a CONST_INT (since they don't
1782 SIZE is an rtx for the size of data to be copied (in bytes),
1783 needed only if X is BLKmode.
1785 ALIGN (in bytes) is maximum alignment we can assume.
1787 If PARTIAL and REG are both nonzero, then copy that many of the first
1788 words of X into registers starting with REG, and push the rest of X.
1789 The amount of space pushed is decreased by PARTIAL words,
1790 rounded *down* to a multiple of PARM_BOUNDARY.
1791 REG must be a hard register in this case.
1792 If REG is zero but PARTIAL is not, take any all others actions for an
1793 argument partially in registers, but do not actually load any
1796 EXTRA is the amount in bytes of extra space to leave next to this arg.
1797 This is ignored if an argument block has already been allocated.
1799 On a machine that lacks real push insns, ARGS_ADDR is the address of
1800 the bottom of the argument block for this call. We use indexing off there
1801 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
1802 argument block has not been preallocated.
1804 ARGS_SO_FAR is the size of args previously pushed for this call. */
1807 emit_push_insn (x, mode, type, size, align, partial, reg, extra,
1808 args_addr, args_so_far)
1810 enum machine_mode mode;
1821 enum direction stack_direction
1822 #ifdef STACK_GROWS_DOWNWARD
1828 /* Decide where to pad the argument: `downward' for below,
1829 `upward' for above, or `none' for don't pad it.
1830 Default is below for small data on big-endian machines; else above. */
1831 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
1833 /* Invert direction if stack is post-update. */
1834 if (STACK_PUSH_CODE == POST_INC || STACK_PUSH_CODE == POST_DEC)
1835 if (where_pad != none)
1836 where_pad = (where_pad == downward ? upward : downward);
1838 xinner = x = protect_from_queue (x, 0);
1840 if (mode == BLKmode)
1842 /* Copy a block into the stack, entirely or partially. */
1845 int used = partial * UNITS_PER_WORD;
1846 int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
1854 /* USED is now the # of bytes we need not copy to the stack
1855 because registers will take care of them. */
1858 xinner = change_address (xinner, BLKmode,
1859 plus_constant (XEXP (xinner, 0), used));
1861 /* If the partial register-part of the arg counts in its stack size,
1862 skip the part of stack space corresponding to the registers.
1863 Otherwise, start copying to the beginning of the stack space,
1864 by setting SKIP to 0. */
1865 #ifndef REG_PARM_STACK_SPACE
1871 #ifdef PUSH_ROUNDING
1872 /* Do it with several push insns if that doesn't take lots of insns
1873 and if there is no difficulty with push insns that skip bytes
1874 on the stack for alignment purposes. */
1876 && GET_CODE (size) == CONST_INT
1878 && (move_by_pieces_ninsns ((unsigned) INTVAL (size) - used, align)
1880 /* Here we avoid the case of a structure whose weak alignment
1881 forces many pushes of a small amount of data,
1882 and such small pushes do rounding that causes trouble. */
1883 && ((! STRICT_ALIGNMENT && ! SLOW_UNALIGNED_ACCESS)
1884 || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT
1885 || PUSH_ROUNDING (align) == align)
1886 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
1888 /* Push padding now if padding above and stack grows down,
1889 or if padding below and stack grows up.
1890 But if space already allocated, this has already been done. */
1891 if (extra && args_addr == 0
1892 && where_pad != none && where_pad != stack_direction)
1893 anti_adjust_stack (GEN_INT (extra));
1895 move_by_pieces (gen_rtx (MEM, BLKmode, gen_push_operand ()), xinner,
1896 INTVAL (size) - used, align);
1899 #endif /* PUSH_ROUNDING */
1901 /* Otherwise make space on the stack and copy the data
1902 to the address of that space. */
1904 /* Deduct words put into registers from the size we must copy. */
1907 if (GET_CODE (size) == CONST_INT)
1908 size = GEN_INT (INTVAL (size) - used);
1910 size = expand_binop (GET_MODE (size), sub_optab, size,
1911 GEN_INT (used), NULL_RTX, 0,
1915 /* Get the address of the stack space.
1916 In this case, we do not deal with EXTRA separately.
1917 A single stack adjust will do. */
1920 temp = push_block (size, extra, where_pad == downward);
1923 else if (GET_CODE (args_so_far) == CONST_INT)
1924 temp = memory_address (BLKmode,
1925 plus_constant (args_addr,
1926 skip + INTVAL (args_so_far)));
1928 temp = memory_address (BLKmode,
1929 plus_constant (gen_rtx (PLUS, Pmode,
1930 args_addr, args_so_far),
1933 /* TEMP is the address of the block. Copy the data there. */
1934 if (GET_CODE (size) == CONST_INT
1935 && (move_by_pieces_ninsns ((unsigned) INTVAL (size), align)
1938 move_by_pieces (gen_rtx (MEM, BLKmode, temp), xinner,
1939 INTVAL (size), align);
1942 /* Try the most limited insn first, because there's no point
1943 including more than one in the machine description unless
1944 the more limited one has some advantage. */
1945 #ifdef HAVE_movstrqi
1947 && GET_CODE (size) == CONST_INT
1948 && ((unsigned) INTVAL (size)
1949 < (1 << (GET_MODE_BITSIZE (QImode) - 1))))
1951 rtx pat = gen_movstrqi (gen_rtx (MEM, BLKmode, temp),
1952 xinner, size, GEN_INT (align));
1960 #ifdef HAVE_movstrhi
1962 && GET_CODE (size) == CONST_INT
1963 && ((unsigned) INTVAL (size)
1964 < (1 << (GET_MODE_BITSIZE (HImode) - 1))))
1966 rtx pat = gen_movstrhi (gen_rtx (MEM, BLKmode, temp),
1967 xinner, size, GEN_INT (align));
1975 #ifdef HAVE_movstrsi
1978 rtx pat = gen_movstrsi (gen_rtx (MEM, BLKmode, temp),
1979 xinner, size, GEN_INT (align));
1987 #ifdef HAVE_movstrdi
1990 rtx pat = gen_movstrdi (gen_rtx (MEM, BLKmode, temp),
1991 xinner, size, GEN_INT (align));
2000 #ifndef ACCUMULATE_OUTGOING_ARGS
2001 /* If the source is referenced relative to the stack pointer,
2002 copy it to another register to stabilize it. We do not need
2003 to do this if we know that we won't be changing sp. */
2005 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
2006 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
2007 temp = copy_to_reg (temp);
2010 /* Make inhibit_defer_pop nonzero around the library call
2011 to force it to pop the bcopy-arguments right away. */
2013 #ifdef TARGET_MEM_FUNCTIONS
2014 emit_library_call (memcpy_libfunc, 0,
2015 VOIDmode, 3, temp, Pmode, XEXP (xinner, 0), Pmode,
2018 emit_library_call (bcopy_libfunc, 0,
2019 VOIDmode, 3, XEXP (xinner, 0), Pmode, temp, Pmode,
2025 else if (partial > 0)
2027 /* Scalar partly in registers. */
2029 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
2032 /* # words of start of argument
2033 that we must make space for but need not store. */
2034 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
2035 int args_offset = INTVAL (args_so_far);
2038 /* Push padding now if padding above and stack grows down,
2039 or if padding below and stack grows up.
2040 But if space already allocated, this has already been done. */
2041 if (extra && args_addr == 0
2042 && where_pad != none && where_pad != stack_direction)
2043 anti_adjust_stack (GEN_INT (extra));
2045 /* If we make space by pushing it, we might as well push
2046 the real data. Otherwise, we can leave OFFSET nonzero
2047 and leave the space uninitialized. */
2051 /* Now NOT_STACK gets the number of words that we don't need to
2052 allocate on the stack. */
2053 not_stack = partial - offset;
2055 /* If the partial register-part of the arg counts in its stack size,
2056 skip the part of stack space corresponding to the registers.
2057 Otherwise, start copying to the beginning of the stack space,
2058 by setting SKIP to 0. */
2059 #ifndef REG_PARM_STACK_SPACE
2065 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
2066 x = validize_mem (force_const_mem (mode, x));
2068 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
2069 SUBREGs of such registers are not allowed. */
2070 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
2071 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
2072 x = copy_to_reg (x);
2074 /* Loop over all the words allocated on the stack for this arg. */
2075 /* We can do it by words, because any scalar bigger than a word
2076 has a size a multiple of a word. */
2077 #ifndef PUSH_ARGS_REVERSED
2078 for (i = not_stack; i < size; i++)
2080 for (i = size - 1; i >= not_stack; i--)
2082 if (i >= not_stack + offset)
2083 emit_push_insn (operand_subword_force (x, i, mode),
2084 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
2086 GEN_INT (args_offset + ((i - not_stack + skip)
2087 * UNITS_PER_WORD)));
2093 /* Push padding now if padding above and stack grows down,
2094 or if padding below and stack grows up.
2095 But if space already allocated, this has already been done. */
2096 if (extra && args_addr == 0
2097 && where_pad != none && where_pad != stack_direction)
2098 anti_adjust_stack (GEN_INT (extra));
2100 #ifdef PUSH_ROUNDING
2102 addr = gen_push_operand ();
2105 if (GET_CODE (args_so_far) == CONST_INT)
2107 = memory_address (mode,
2108 plus_constant (args_addr, INTVAL (args_so_far)));
2110 addr = memory_address (mode, gen_rtx (PLUS, Pmode, args_addr,
2113 emit_move_insn (gen_rtx (MEM, mode, addr), x);
2117 /* If part should go in registers, copy that part
2118 into the appropriate registers. Do this now, at the end,
2119 since mem-to-mem copies above may do function calls. */
2120 if (partial > 0 && reg != 0)
2121 move_block_to_reg (REGNO (reg), x, partial, mode);
2123 if (extra && args_addr == 0 && where_pad == stack_direction)
2124 anti_adjust_stack (GEN_INT (extra));
2127 /* Expand an assignment that stores the value of FROM into TO.
2128 If WANT_VALUE is nonzero, return an rtx for the value of TO.
2129 (This may contain a QUEUED rtx.)
2130 Otherwise, the returned value is not meaningful.
2132 SUGGEST_REG is no longer actually used.
2133 It used to mean, copy the value through a register
2134 and return that register, if that is possible.
2135 But now we do this if WANT_VALUE.
2137 If the value stored is a constant, we return the constant. */
2140 expand_assignment (to, from, want_value, suggest_reg)
2145 register rtx to_rtx = 0;
2148 /* Don't crash if the lhs of the assignment was erroneous. */
2150 if (TREE_CODE (to) == ERROR_MARK)
2151 return expand_expr (from, NULL_RTX, VOIDmode, 0);
2153 /* Assignment of a structure component needs special treatment
2154 if the structure component's rtx is not simply a MEM.
2155 Assignment of an array element at a constant index
2156 has the same problem. */
2158 if (TREE_CODE (to) == COMPONENT_REF
2159 || TREE_CODE (to) == BIT_FIELD_REF
2160 || (TREE_CODE (to) == ARRAY_REF
2161 && TREE_CODE (TREE_OPERAND (to, 1)) == INTEGER_CST
2162 && TREE_CODE (TYPE_SIZE (TREE_TYPE (to))) == INTEGER_CST))
2164 enum machine_mode mode1;
2170 tree tem = get_inner_reference (to, &bitsize, &bitpos, &offset,
2171 &mode1, &unsignedp, &volatilep);
2173 /* If we are going to use store_bit_field and extract_bit_field,
2174 make sure to_rtx will be safe for multiple use. */
2176 if (mode1 == VOIDmode && want_value)
2177 tem = stabilize_reference (tem);
2179 to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, 0);
2182 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
2184 if (GET_CODE (to_rtx) != MEM)
2186 to_rtx = change_address (to_rtx, VOIDmode,
2187 gen_rtx (PLUS, Pmode, XEXP (to_rtx, 0),
2188 force_reg (Pmode, offset_rtx)));
2192 if (GET_CODE (to_rtx) == MEM)
2193 MEM_VOLATILE_P (to_rtx) = 1;
2194 #if 0 /* This was turned off because, when a field is volatile
2195 in an object which is not volatile, the object may be in a register,
2196 and then we would abort over here. */
2202 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
2204 /* Spurious cast makes HPUX compiler happy. */
2205 ? (enum machine_mode) TYPE_MODE (TREE_TYPE (to))
2208 /* Required alignment of containing datum. */
2209 TYPE_ALIGN (TREE_TYPE (tem)) / BITS_PER_UNIT,
2210 int_size_in_bytes (TREE_TYPE (tem)));
2211 preserve_temp_slots (result);
2217 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
2218 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
2221 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, 0);
2223 /* Don't move directly into a return register. */
2224 if (TREE_CODE (to) == RESULT_DECL && GET_CODE (to_rtx) == REG)
2226 rtx temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
2227 emit_move_insn (to_rtx, temp);
2228 preserve_temp_slots (to_rtx);
2233 /* In case we are returning the contents of an object which overlaps
2234 the place the value is being stored, use a safe function when copying
2235 a value through a pointer into a structure value return block. */
2236 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
2237 && current_function_returns_struct
2238 && !current_function_returns_pcc_struct)
2240 rtx from_rtx = expand_expr (from, NULL_RTX, VOIDmode, 0);
2241 rtx size = expr_size (from);
2243 #ifdef TARGET_MEM_FUNCTIONS
2244 emit_library_call (memcpy_libfunc, 0,
2245 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
2246 XEXP (from_rtx, 0), Pmode,
2249 emit_library_call (bcopy_libfunc, 0,
2250 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
2251 XEXP (to_rtx, 0), Pmode,
2255 preserve_temp_slots (to_rtx);
2260 /* Compute FROM and store the value in the rtx we got. */
2262 result = store_expr (from, to_rtx, want_value);
2263 preserve_temp_slots (result);
2268 /* Generate code for computing expression EXP,
2269 and storing the value into TARGET.
2270 Returns TARGET or an equivalent value.
2271 TARGET may contain a QUEUED rtx.
2273 If SUGGEST_REG is nonzero, copy the value through a register
2274 and return that register, if that is possible.
2276 If the value stored is a constant, we return the constant. */
2279 store_expr (exp, target, suggest_reg)
2281 register rtx target;
2285 int dont_return_target = 0;
2287 if (TREE_CODE (exp) == COMPOUND_EXPR)
2289 /* Perform first part of compound expression, then assign from second
2291 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
2293 return store_expr (TREE_OPERAND (exp, 1), target, suggest_reg);
2295 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
2297 /* For conditional expression, get safe form of the target. Then
2298 test the condition, doing the appropriate assignment on either
2299 side. This avoids the creation of unnecessary temporaries.
2300 For non-BLKmode, it is more efficient not to do this. */
2302 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
2305 target = protect_from_queue (target, 1);
2308 jumpifnot (TREE_OPERAND (exp, 0), lab1);
2309 store_expr (TREE_OPERAND (exp, 1), target, suggest_reg);
2311 emit_jump_insn (gen_jump (lab2));
2314 store_expr (TREE_OPERAND (exp, 2), target, suggest_reg);
2320 else if (suggest_reg && GET_CODE (target) == MEM
2321 && GET_MODE (target) != BLKmode)
2322 /* If target is in memory and caller wants value in a register instead,
2323 arrange that. Pass TARGET as target for expand_expr so that,
2324 if EXP is another assignment, SUGGEST_REG will be nonzero for it.
2325 We know expand_expr will not use the target in that case. */
2327 temp = expand_expr (exp, cse_not_expected ? NULL_RTX : target,
2328 GET_MODE (target), 0);
2329 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
2330 temp = copy_to_reg (temp);
2331 dont_return_target = 1;
2333 else if (queued_subexp_p (target))
2334 /* If target contains a postincrement, it is not safe
2335 to use as the returned value. It would access the wrong
2336 place by the time the queued increment gets output.
2337 So copy the value through a temporary and use that temp
2340 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
2342 /* Expand EXP into a new pseudo. */
2343 temp = gen_reg_rtx (GET_MODE (target));
2344 temp = expand_expr (exp, temp, GET_MODE (target), 0);
2347 temp = expand_expr (exp, NULL_RTX, GET_MODE (target), 0);
2348 dont_return_target = 1;
2350 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
2351 /* If this is an scalar in a register that is stored in a wider mode
2352 than the declared mode, compute the result into its declared mode
2353 and then convert to the wider mode. Our value is the computed
2356 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
2357 convert_move (SUBREG_REG (target), temp,
2358 SUBREG_PROMOTED_UNSIGNED_P (target));
2363 temp = expand_expr (exp, target, GET_MODE (target), 0);
2364 /* DO return TARGET if it's a specified hardware register.
2365 expand_return relies on this. */
2366 if (!(target && GET_CODE (target) == REG
2367 && REGNO (target) < FIRST_PSEUDO_REGISTER)
2368 && CONSTANT_P (temp))
2369 dont_return_target = 1;
2372 /* If value was not generated in the target, store it there.
2373 Convert the value to TARGET's type first if nec. */
2375 if (temp != target && TREE_CODE (exp) != ERROR_MARK)
2377 target = protect_from_queue (target, 1);
2378 if (GET_MODE (temp) != GET_MODE (target)
2379 && GET_MODE (temp) != VOIDmode)
2381 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
2382 if (dont_return_target)
2384 /* In this case, we will return TEMP,
2385 so make sure it has the proper mode.
2386 But don't forget to store the value into TARGET. */
2387 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
2388 emit_move_insn (target, temp);
2391 convert_move (target, temp, unsignedp);
2394 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
2396 /* Handle copying a string constant into an array.
2397 The string constant may be shorter than the array.
2398 So copy just the string's actual length, and clear the rest. */
2401 /* Get the size of the data type of the string,
2402 which is actually the size of the target. */
2403 size = expr_size (exp);
2404 if (GET_CODE (size) == CONST_INT
2405 && INTVAL (size) < TREE_STRING_LENGTH (exp))
2406 emit_block_move (target, temp, size,
2407 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
2410 /* Compute the size of the data to copy from the string. */
2412 = fold (build (MIN_EXPR, sizetype,
2413 size_binop (CEIL_DIV_EXPR,
2414 TYPE_SIZE (TREE_TYPE (exp)),
2415 size_int (BITS_PER_UNIT)),
2417 build_int_2 (TREE_STRING_LENGTH (exp), 0))));
2418 rtx copy_size_rtx = expand_expr (copy_size, NULL_RTX,
2422 /* Copy that much. */
2423 emit_block_move (target, temp, copy_size_rtx,
2424 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
2426 /* Figure out how much is left in TARGET
2427 that we have to clear. */
2428 if (GET_CODE (copy_size_rtx) == CONST_INT)
2430 temp = plus_constant (XEXP (target, 0),
2431 TREE_STRING_LENGTH (exp));
2432 size = plus_constant (size,
2433 - TREE_STRING_LENGTH (exp));
2437 enum machine_mode size_mode = Pmode;
2439 temp = force_reg (Pmode, XEXP (target, 0));
2440 temp = expand_binop (size_mode, add_optab, temp,
2441 copy_size_rtx, NULL_RTX, 0,
2444 size = expand_binop (size_mode, sub_optab, size,
2445 copy_size_rtx, NULL_RTX, 0,
2448 emit_cmp_insn (size, const0_rtx, LT, NULL_RTX,
2449 GET_MODE (size), 0, 0);
2450 label = gen_label_rtx ();
2451 emit_jump_insn (gen_blt (label));
2454 if (size != const0_rtx)
2456 #ifdef TARGET_MEM_FUNCTIONS
2457 emit_library_call (memset_libfunc, 0, VOIDmode, 3,
2458 temp, Pmode, const0_rtx, Pmode, size, Pmode);
2460 emit_library_call (bzero_libfunc, 0, VOIDmode, 2,
2461 temp, Pmode, size, Pmode);
2468 else if (GET_MODE (temp) == BLKmode)
2469 emit_block_move (target, temp, expr_size (exp),
2470 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
2472 emit_move_insn (target, temp);
2474 if (dont_return_target)
2479 /* Store the value of constructor EXP into the rtx TARGET.
2480 TARGET is either a REG or a MEM. */
2483 store_constructor (exp, target)
2487 tree type = TREE_TYPE (exp);
2489 /* We know our target cannot conflict, since safe_from_p has been called. */
2491 /* Don't try copying piece by piece into a hard register
2492 since that is vulnerable to being clobbered by EXP.
2493 Instead, construct in a pseudo register and then copy it all. */
2494 if (GET_CODE (target) == REG && REGNO (target) < FIRST_PSEUDO_REGISTER)
2496 rtx temp = gen_reg_rtx (GET_MODE (target));
2497 store_constructor (exp, temp);
2498 emit_move_insn (target, temp);
2503 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE)
2507 /* Inform later passes that the whole union value is dead. */
2508 if (TREE_CODE (type) == UNION_TYPE)
2509 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
2511 /* If we are building a static constructor into a register,
2512 set the initial value as zero so we can fold the value into
2514 else if (GET_CODE (target) == REG && TREE_STATIC (exp))
2515 emit_move_insn (target, const0_rtx);
2517 /* If the constructor has fewer fields than the structure,
2518 clear the whole structure first. */
2519 else if (list_length (CONSTRUCTOR_ELTS (exp))
2520 != list_length (TYPE_FIELDS (type)))
2521 clear_storage (target, int_size_in_bytes (type));
2523 /* Inform later passes that the old value is dead. */
2524 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
2526 /* Store each element of the constructor into
2527 the corresponding field of TARGET. */
2529 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
2531 register tree field = TREE_PURPOSE (elt);
2532 register enum machine_mode mode;
2537 /* Just ignore missing fields.
2538 We cleared the whole structure, above,
2539 if any fields are missing. */
2543 bitsize = TREE_INT_CST_LOW (DECL_SIZE (field));
2544 unsignedp = TREE_UNSIGNED (field);
2545 mode = DECL_MODE (field);
2546 if (DECL_BIT_FIELD (field))
2549 if (TREE_CODE (DECL_FIELD_BITPOS (field)) != INTEGER_CST)
2550 /* ??? This case remains to be written. */
2553 bitpos = TREE_INT_CST_LOW (DECL_FIELD_BITPOS (field));
2555 store_field (target, bitsize, bitpos, mode, TREE_VALUE (elt),
2556 /* The alignment of TARGET is
2557 at least what its type requires. */
2559 TYPE_ALIGN (type) / BITS_PER_UNIT,
2560 int_size_in_bytes (type));
2563 else if (TREE_CODE (type) == ARRAY_TYPE)
2567 tree domain = TYPE_DOMAIN (type);
2568 HOST_WIDE_INT minelt = TREE_INT_CST_LOW (TYPE_MIN_VALUE (domain));
2569 HOST_WIDE_INT maxelt = TREE_INT_CST_LOW (TYPE_MAX_VALUE (domain));
2570 tree elttype = TREE_TYPE (type);
2572 /* If the constructor has fewer fields than the structure,
2573 clear the whole structure first. Similarly if this this is
2574 static constructor of a non-BLKmode object. */
2576 if (list_length (CONSTRUCTOR_ELTS (exp)) < maxelt - minelt + 1
2577 || (GET_CODE (target) == REG && TREE_STATIC (exp)))
2578 clear_storage (target, maxelt - minelt + 1);
2580 /* Inform later passes that the old value is dead. */
2581 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
2583 /* Store each element of the constructor into
2584 the corresponding element of TARGET, determined
2585 by counting the elements. */
2586 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
2588 elt = TREE_CHAIN (elt), i++)
2590 register enum machine_mode mode;
2595 mode = TYPE_MODE (elttype);
2596 bitsize = GET_MODE_BITSIZE (mode);
2597 unsignedp = TREE_UNSIGNED (elttype);
2599 bitpos = (i * TREE_INT_CST_LOW (TYPE_SIZE (elttype)));
2601 store_field (target, bitsize, bitpos, mode, TREE_VALUE (elt),
2602 /* The alignment of TARGET is
2603 at least what its type requires. */
2605 TYPE_ALIGN (type) / BITS_PER_UNIT,
2606 int_size_in_bytes (type));
2614 /* Store the value of EXP (an expression tree)
2615 into a subfield of TARGET which has mode MODE and occupies
2616 BITSIZE bits, starting BITPOS bits from the start of TARGET.
2617 If MODE is VOIDmode, it means that we are storing into a bit-field.
2619 If VALUE_MODE is VOIDmode, return nothing in particular.
2620 UNSIGNEDP is not used in this case.
2622 Otherwise, return an rtx for the value stored. This rtx
2623 has mode VALUE_MODE if that is convenient to do.
2624 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
2626 ALIGN is the alignment that TARGET is known to have, measured in bytes.
2627 TOTAL_SIZE is the size in bytes of the structure, or -1 if varying. */
2630 store_field (target, bitsize, bitpos, mode, exp, value_mode,
2631 unsignedp, align, total_size)
2633 int bitsize, bitpos;
2634 enum machine_mode mode;
2636 enum machine_mode value_mode;
2641 HOST_WIDE_INT width_mask = 0;
2643 if (bitsize < HOST_BITS_PER_WIDE_INT)
2644 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
2646 /* If we are storing into an unaligned field of an aligned union that is
2647 in a register, we may have the mode of TARGET being an integer mode but
2648 MODE == BLKmode. In that case, get an aligned object whose size and
2649 alignment are the same as TARGET and store TARGET into it (we can avoid
2650 the store if the field being stored is the entire width of TARGET). Then
2651 call ourselves recursively to store the field into a BLKmode version of
2652 that object. Finally, load from the object into TARGET. This is not
2653 very efficient in general, but should only be slightly more expensive
2654 than the otherwise-required unaligned accesses. Perhaps this can be
2655 cleaned up later. */
2658 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
2660 rtx object = assign_stack_temp (GET_MODE (target),
2661 GET_MODE_SIZE (GET_MODE (target)), 0);
2662 rtx blk_object = copy_rtx (object);
2664 PUT_MODE (blk_object, BLKmode);
2666 if (bitsize != GET_MODE_BITSIZE (GET_MODE (target)))
2667 emit_move_insn (object, target);
2669 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0,
2672 emit_move_insn (target, object);
2677 /* If the structure is in a register or if the component
2678 is a bit field, we cannot use addressing to access it.
2679 Use bit-field techniques or SUBREG to store in it. */
2681 if (mode == VOIDmode
2682 || (mode != BLKmode && ! direct_store[(int) mode])
2683 || GET_CODE (target) == REG
2684 || GET_CODE (target) == SUBREG)
2686 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
2687 /* Store the value in the bitfield. */
2688 store_bit_field (target, bitsize, bitpos, mode, temp, align, total_size);
2689 if (value_mode != VOIDmode)
2691 /* The caller wants an rtx for the value. */
2692 /* If possible, avoid refetching from the bitfield itself. */
2694 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
2697 enum machine_mode tmode;
2700 return expand_and (temp, GEN_INT (width_mask), NULL_RTX);
2701 tmode = GET_MODE (temp);
2702 if (tmode == VOIDmode)
2704 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
2705 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
2706 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
2708 return extract_bit_field (target, bitsize, bitpos, unsignedp,
2709 NULL_RTX, value_mode, 0, align,
2716 rtx addr = XEXP (target, 0);
2719 /* If a value is wanted, it must be the lhs;
2720 so make the address stable for multiple use. */
2722 if (value_mode != VOIDmode && GET_CODE (addr) != REG
2723 && ! CONSTANT_ADDRESS_P (addr)
2724 /* A frame-pointer reference is already stable. */
2725 && ! (GET_CODE (addr) == PLUS
2726 && GET_CODE (XEXP (addr, 1)) == CONST_INT
2727 && (XEXP (addr, 0) == virtual_incoming_args_rtx
2728 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
2729 addr = copy_to_reg (addr);
2731 /* Now build a reference to just the desired component. */
2733 to_rtx = change_address (target, mode,
2734 plus_constant (addr, (bitpos / BITS_PER_UNIT)));
2735 MEM_IN_STRUCT_P (to_rtx) = 1;
2737 return store_expr (exp, to_rtx, value_mode != VOIDmode);
2741 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
2742 or an ARRAY_REF, look for nested COMPONENT_REFs, BIT_FIELD_REFs, or
2743 ARRAY_REFs and find the ultimate containing object, which we return.
2745 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
2746 bit position, and *PUNSIGNEDP to the signedness of the field.
2747 If the position of the field is variable, we store a tree
2748 giving the variable offset (in units) in *POFFSET.
2749 This offset is in addition to the bit position.
2750 If the position is not variable, we store 0 in *POFFSET.
2752 If any of the extraction expressions is volatile,
2753 we store 1 in *PVOLATILEP. Otherwise we don't change that.
2755 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
2756 is a mode that can be used to access the field. In that case, *PBITSIZE
2759 If the field describes a variable-sized object, *PMODE is set to
2760 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
2761 this case, but the address of the object can be found. */
2764 get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode,
2765 punsignedp, pvolatilep)
2770 enum machine_mode *pmode;
2775 enum machine_mode mode = VOIDmode;
2776 tree offset = integer_zero_node;
2778 if (TREE_CODE (exp) == COMPONENT_REF)
2780 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
2781 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
2782 mode = DECL_MODE (TREE_OPERAND (exp, 1));
2783 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
2785 else if (TREE_CODE (exp) == BIT_FIELD_REF)
2787 size_tree = TREE_OPERAND (exp, 1);
2788 *punsignedp = TREE_UNSIGNED (exp);
2792 mode = TYPE_MODE (TREE_TYPE (exp));
2793 *pbitsize = GET_MODE_BITSIZE (mode);
2794 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
2799 if (TREE_CODE (size_tree) != INTEGER_CST)
2800 mode = BLKmode, *pbitsize = -1;
2802 *pbitsize = TREE_INT_CST_LOW (size_tree);
2805 /* Compute cumulative bit-offset for nested component-refs and array-refs,
2806 and find the ultimate containing object. */
2812 if (TREE_CODE (exp) == COMPONENT_REF || TREE_CODE (exp) == BIT_FIELD_REF)
2814 tree pos = (TREE_CODE (exp) == COMPONENT_REF
2815 ? DECL_FIELD_BITPOS (TREE_OPERAND (exp, 1))
2816 : TREE_OPERAND (exp, 2));
2818 if (TREE_CODE (pos) == PLUS_EXPR)
2821 if (TREE_CODE (TREE_OPERAND (pos, 0)) == INTEGER_CST)
2823 constant = TREE_OPERAND (pos, 0);
2824 var = TREE_OPERAND (pos, 1);
2826 else if (TREE_CODE (TREE_OPERAND (pos, 1)) == INTEGER_CST)
2828 constant = TREE_OPERAND (pos, 1);
2829 var = TREE_OPERAND (pos, 0);
2834 *pbitpos += TREE_INT_CST_LOW (constant);
2835 offset = size_binop (PLUS_EXPR, offset,
2836 size_binop (FLOOR_DIV_EXPR, var,
2837 size_int (BITS_PER_UNIT)));
2839 else if (TREE_CODE (pos) == INTEGER_CST)
2840 *pbitpos += TREE_INT_CST_LOW (pos);
2843 /* Assume here that the offset is a multiple of a unit.
2844 If not, there should be an explicitly added constant. */
2845 offset = size_binop (PLUS_EXPR, offset,
2846 size_binop (FLOOR_DIV_EXPR, pos,
2847 size_int (BITS_PER_UNIT)));
2851 else if (TREE_CODE (exp) == ARRAY_REF)
2853 /* This code is based on the code in case ARRAY_REF in expand_expr
2854 below. We assume here that the size of an array element is
2855 always an integral multiple of BITS_PER_UNIT. */
2857 tree index = TREE_OPERAND (exp, 1);
2858 tree domain = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
2860 = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
2861 tree index_type = TREE_TYPE (index);
2863 if (! integer_zerop (low_bound))
2864 index = fold (build (MINUS_EXPR, index_type, index, low_bound));
2866 if (TYPE_PRECISION (index_type) != POINTER_SIZE)
2868 index = convert (type_for_size (POINTER_SIZE, 0), index);
2869 index_type = TREE_TYPE (index);
2872 index = fold (build (MULT_EXPR, index_type, index,
2873 TYPE_SIZE (TREE_TYPE (exp))));
2875 if (TREE_CODE (index) == INTEGER_CST
2876 && TREE_INT_CST_HIGH (index) == 0)
2877 *pbitpos += TREE_INT_CST_LOW (index);
2879 offset = size_binop (PLUS_EXPR, offset,
2880 size_binop (FLOOR_DIV_EXPR, index,
2881 size_int (BITS_PER_UNIT)));
2883 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
2884 && ! ((TREE_CODE (exp) == NOP_EXPR
2885 || TREE_CODE (exp) == CONVERT_EXPR)
2886 && (TYPE_MODE (TREE_TYPE (exp))
2887 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
2890 /* If any reference in the chain is volatile, the effect is volatile. */
2891 if (TREE_THIS_VOLATILE (exp))
2893 exp = TREE_OPERAND (exp, 0);
2896 /* If this was a bit-field, see if there is a mode that allows direct
2897 access in case EXP is in memory. */
2898 if (mode == VOIDmode && *pbitpos % *pbitsize == 0)
2900 mode = mode_for_size (*pbitsize, MODE_INT, 0);
2901 if (mode == BLKmode)
2905 if (integer_zerop (offset))
2911 /* We aren't finished fixing the callers to really handle nonzero offset. */
2919 /* Given an rtx VALUE that may contain additions and multiplications,
2920 return an equivalent value that just refers to a register or memory.
2921 This is done by generating instructions to perform the arithmetic
2922 and returning a pseudo-register containing the value.
2924 The returned value may be a REG, SUBREG, MEM or constant. */
2927 force_operand (value, target)
2930 register optab binoptab = 0;
2931 /* Use a temporary to force order of execution of calls to
2935 /* Use subtarget as the target for operand 0 of a binary operation. */
2936 register rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
2938 if (GET_CODE (value) == PLUS)
2939 binoptab = add_optab;
2940 else if (GET_CODE (value) == MINUS)
2941 binoptab = sub_optab;
2942 else if (GET_CODE (value) == MULT)
2944 op2 = XEXP (value, 1);
2945 if (!CONSTANT_P (op2)
2946 && !(GET_CODE (op2) == REG && op2 != subtarget))
2948 tmp = force_operand (XEXP (value, 0), subtarget);
2949 return expand_mult (GET_MODE (value), tmp,
2950 force_operand (op2, NULL_RTX),
2956 op2 = XEXP (value, 1);
2957 if (!CONSTANT_P (op2)
2958 && !(GET_CODE (op2) == REG && op2 != subtarget))
2960 if (binoptab == sub_optab && GET_CODE (op2) == CONST_INT)
2962 binoptab = add_optab;
2963 op2 = negate_rtx (GET_MODE (value), op2);
2966 /* Check for an addition with OP2 a constant integer and our first
2967 operand a PLUS of a virtual register and something else. In that
2968 case, we want to emit the sum of the virtual register and the
2969 constant first and then add the other value. This allows virtual
2970 register instantiation to simply modify the constant rather than
2971 creating another one around this addition. */
2972 if (binoptab == add_optab && GET_CODE (op2) == CONST_INT
2973 && GET_CODE (XEXP (value, 0)) == PLUS
2974 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
2975 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
2976 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
2978 rtx temp = expand_binop (GET_MODE (value), binoptab,
2979 XEXP (XEXP (value, 0), 0), op2,
2980 subtarget, 0, OPTAB_LIB_WIDEN);
2981 return expand_binop (GET_MODE (value), binoptab, temp,
2982 force_operand (XEXP (XEXP (value, 0), 1), 0),
2983 target, 0, OPTAB_LIB_WIDEN);
2986 tmp = force_operand (XEXP (value, 0), subtarget);
2987 return expand_binop (GET_MODE (value), binoptab, tmp,
2988 force_operand (op2, NULL_RTX),
2989 target, 0, OPTAB_LIB_WIDEN);
2990 /* We give UNSIGNEDP = 0 to expand_binop
2991 because the only operations we are expanding here are signed ones. */
2996 /* Subroutine of expand_expr:
2997 save the non-copied parts (LIST) of an expr (LHS), and return a list
2998 which can restore these values to their previous values,
2999 should something modify their storage. */
3002 save_noncopied_parts (lhs, list)
3009 for (tail = list; tail; tail = TREE_CHAIN (tail))
3010 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
3011 parts = chainon (parts, save_noncopied_parts (lhs, TREE_VALUE (tail)));
3014 tree part = TREE_VALUE (tail);
3015 tree part_type = TREE_TYPE (part);
3016 tree to_be_saved = build (COMPONENT_REF, part_type, lhs, part);
3017 rtx target = assign_stack_temp (TYPE_MODE (part_type),
3018 int_size_in_bytes (part_type), 0);
3019 if (! memory_address_p (TYPE_MODE (part_type), XEXP (target, 0)))
3020 target = change_address (target, TYPE_MODE (part_type), NULL_RTX);
3021 parts = tree_cons (to_be_saved,
3022 build (RTL_EXPR, part_type, NULL_TREE,
3025 store_expr (TREE_PURPOSE (parts), RTL_EXPR_RTL (TREE_VALUE (parts)), 0);
3030 /* Subroutine of expand_expr:
3031 record the non-copied parts (LIST) of an expr (LHS), and return a list
3032 which specifies the initial values of these parts. */
3035 init_noncopied_parts (lhs, list)
3042 for (tail = list; tail; tail = TREE_CHAIN (tail))
3043 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
3044 parts = chainon (parts, init_noncopied_parts (lhs, TREE_VALUE (tail)));
3047 tree part = TREE_VALUE (tail);
3048 tree part_type = TREE_TYPE (part);
3049 tree to_be_initialized = build (COMPONENT_REF, part_type, lhs, part);
3050 parts = tree_cons (TREE_PURPOSE (tail), to_be_initialized, parts);
3055 /* Subroutine of expand_expr: return nonzero iff there is no way that
3056 EXP can reference X, which is being modified. */
3059 safe_from_p (x, exp)
3069 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
3070 find the underlying pseudo. */
3071 if (GET_CODE (x) == SUBREG)
3074 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
3078 /* If X is a location in the outgoing argument area, it is always safe. */
3079 if (GET_CODE (x) == MEM
3080 && (XEXP (x, 0) == virtual_outgoing_args_rtx
3081 || (GET_CODE (XEXP (x, 0)) == PLUS
3082 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx)))
3085 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
3088 exp_rtl = DECL_RTL (exp);
3095 if (TREE_CODE (exp) == TREE_LIST)
3096 return ((TREE_VALUE (exp) == 0
3097 || safe_from_p (x, TREE_VALUE (exp)))
3098 && (TREE_CHAIN (exp) == 0
3099 || safe_from_p (x, TREE_CHAIN (exp))));
3104 return safe_from_p (x, TREE_OPERAND (exp, 0));
3108 return (safe_from_p (x, TREE_OPERAND (exp, 0))
3109 && safe_from_p (x, TREE_OPERAND (exp, 1)));
3113 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
3114 the expression. If it is set, we conflict iff we are that rtx or
3115 both are in memory. Otherwise, we check all operands of the
3116 expression recursively. */
3118 switch (TREE_CODE (exp))
3121 return staticp (TREE_OPERAND (exp, 0));
3124 if (GET_CODE (x) == MEM)
3129 exp_rtl = CALL_EXPR_RTL (exp);
3132 /* Assume that the call will clobber all hard registers and
3134 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
3135 || GET_CODE (x) == MEM)
3142 exp_rtl = RTL_EXPR_RTL (exp);
3144 /* We don't know what this can modify. */
3149 case WITH_CLEANUP_EXPR:
3150 exp_rtl = RTL_EXPR_RTL (exp);
3154 exp_rtl = SAVE_EXPR_RTL (exp);
3158 /* The only operand we look at is operand 1. The rest aren't
3159 part of the expression. */
3160 return safe_from_p (x, TREE_OPERAND (exp, 1));
3162 case METHOD_CALL_EXPR:
3163 /* This takes a rtx argument, but shouldn't appear here. */
3167 /* If we have an rtx, we do not need to scan our operands. */
3171 nops = tree_code_length[(int) TREE_CODE (exp)];
3172 for (i = 0; i < nops; i++)
3173 if (TREE_OPERAND (exp, i) != 0
3174 && ! safe_from_p (x, TREE_OPERAND (exp, i)))
3178 /* If we have an rtl, find any enclosed object. Then see if we conflict
3182 if (GET_CODE (exp_rtl) == SUBREG)
3184 exp_rtl = SUBREG_REG (exp_rtl);
3185 if (GET_CODE (exp_rtl) == REG
3186 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
3190 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
3191 are memory and EXP is not readonly. */
3192 return ! (rtx_equal_p (x, exp_rtl)
3193 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
3194 && ! TREE_READONLY (exp)));
3197 /* If we reach here, it is safe. */
3201 /* Subroutine of expand_expr: return nonzero iff EXP is an
3202 expression whose type is statically determinable. */
3208 if (TREE_CODE (exp) == PARM_DECL
3209 || TREE_CODE (exp) == VAR_DECL
3210 || TREE_CODE (exp) == CALL_EXPR || TREE_CODE (exp) == TARGET_EXPR
3211 || TREE_CODE (exp) == COMPONENT_REF
3212 || TREE_CODE (exp) == ARRAY_REF)
3217 /* expand_expr: generate code for computing expression EXP.
3218 An rtx for the computed value is returned. The value is never null.
3219 In the case of a void EXP, const0_rtx is returned.
3221 The value may be stored in TARGET if TARGET is nonzero.
3222 TARGET is just a suggestion; callers must assume that
3223 the rtx returned may not be the same as TARGET.
3225 If TARGET is CONST0_RTX, it means that the value will be ignored.
3227 If TMODE is not VOIDmode, it suggests generating the
3228 result in mode TMODE. But this is done only when convenient.
3229 Otherwise, TMODE is ignored and the value generated in its natural mode.
3230 TMODE is just a suggestion; callers must assume that
3231 the rtx returned may not have mode TMODE.
3233 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
3234 with a constant address even if that address is not normally legitimate.
3235 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
3237 If MODIFIER is EXPAND_SUM then when EXP is an addition
3238 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
3239 or a nest of (PLUS ...) and (MINUS ...) where the terms are
3240 products as above, or REG or MEM, or constant.
3241 Ordinarily in such cases we would output mul or add instructions
3242 and then return a pseudo reg containing the sum.
3244 EXPAND_INITIALIZER is much like EXPAND_SUM except that
3245 it also marks a label as absolutely required (it can't be dead).
3246 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
3247 This is used for outputting expressions used in initializers. */
3250 expand_expr (exp, target, tmode, modifier)
3253 enum machine_mode tmode;
3254 enum expand_modifier modifier;
3256 register rtx op0, op1, temp;
3257 tree type = TREE_TYPE (exp);
3258 int unsignedp = TREE_UNSIGNED (type);
3259 register enum machine_mode mode = TYPE_MODE (type);
3260 register enum tree_code code = TREE_CODE (exp);
3262 /* Use subtarget as the target for operand 0 of a binary operation. */
3263 rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
3264 rtx original_target = target;
3265 int ignore = target == const0_rtx;
3268 /* Don't use hard regs as subtargets, because the combiner
3269 can only handle pseudo regs. */
3270 if (subtarget && REGNO (subtarget) < FIRST_PSEUDO_REGISTER)
3272 /* Avoid subtargets inside loops,
3273 since they hide some invariant expressions. */
3274 if (preserve_subexpressions_p ())
3277 if (ignore) target = 0, original_target = 0;
3279 /* If will do cse, generate all results into pseudo registers
3280 since 1) that allows cse to find more things
3281 and 2) otherwise cse could produce an insn the machine
3284 if (! cse_not_expected && mode != BLKmode && target
3285 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER))
3288 /* Ensure we reference a volatile object even if value is ignored. */
3289 if (ignore && TREE_THIS_VOLATILE (exp)
3290 && TREE_CODE (exp) != FUNCTION_DECL
3291 && mode != VOIDmode && mode != BLKmode)
3293 target = gen_reg_rtx (mode);
3294 temp = expand_expr (exp, target, VOIDmode, modifier);
3296 emit_move_insn (target, temp);
3304 tree function = decl_function_context (exp);
3305 /* Handle using a label in a containing function. */
3306 if (function != current_function_decl && function != 0)
3308 struct function *p = find_function_data (function);
3309 /* Allocate in the memory associated with the function
3310 that the label is in. */
3311 push_obstacks (p->function_obstack,
3312 p->function_maybepermanent_obstack);
3314 p->forced_labels = gen_rtx (EXPR_LIST, VOIDmode,
3315 label_rtx (exp), p->forced_labels);
3318 else if (modifier == EXPAND_INITIALIZER)
3319 forced_labels = gen_rtx (EXPR_LIST, VOIDmode,
3320 label_rtx (exp), forced_labels);
3321 temp = gen_rtx (MEM, FUNCTION_MODE,
3322 gen_rtx (LABEL_REF, Pmode, label_rtx (exp)));
3323 if (function != current_function_decl && function != 0)
3324 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
3329 if (DECL_RTL (exp) == 0)
3331 error_with_decl (exp, "prior parameter's size depends on `%s'");
3332 return CONST0_RTX (mode);
3338 if (DECL_RTL (exp) == 0)
3340 /* Ensure variable marked as used
3341 even if it doesn't go through a parser. */
3342 TREE_USED (exp) = 1;
3343 /* Handle variables inherited from containing functions. */
3344 context = decl_function_context (exp);
3346 /* We treat inline_function_decl as an alias for the current function
3347 because that is the inline function whose vars, types, etc.
3348 are being merged into the current function.
3349 See expand_inline_function. */
3350 if (context != 0 && context != current_function_decl
3351 && context != inline_function_decl
3352 /* If var is static, we don't need a static chain to access it. */
3353 && ! (GET_CODE (DECL_RTL (exp)) == MEM
3354 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
3358 /* Mark as non-local and addressable. */
3359 DECL_NONLOCAL (exp) = 1;
3360 mark_addressable (exp);
3361 if (GET_CODE (DECL_RTL (exp)) != MEM)
3363 addr = XEXP (DECL_RTL (exp), 0);
3364 if (GET_CODE (addr) == MEM)
3365 addr = gen_rtx (MEM, Pmode, fix_lexical_addr (XEXP (addr, 0), exp));
3367 addr = fix_lexical_addr (addr, exp);
3368 return change_address (DECL_RTL (exp), mode, addr);
3371 /* This is the case of an array whose size is to be determined
3372 from its initializer, while the initializer is still being parsed.
3374 if (GET_CODE (DECL_RTL (exp)) == MEM
3375 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
3376 return change_address (DECL_RTL (exp), GET_MODE (DECL_RTL (exp)),
3377 XEXP (DECL_RTL (exp), 0));
3378 if (GET_CODE (DECL_RTL (exp)) == MEM
3379 && modifier != EXPAND_CONST_ADDRESS
3380 && modifier != EXPAND_SUM
3381 && modifier != EXPAND_INITIALIZER)
3383 /* DECL_RTL probably contains a constant address.
3384 On RISC machines where a constant address isn't valid,
3385 make some insns to get that address into a register. */
3386 if (!memory_address_p (DECL_MODE (exp), XEXP (DECL_RTL (exp), 0))
3388 && CONSTANT_ADDRESS_P (XEXP (DECL_RTL (exp), 0))))
3389 return change_address (DECL_RTL (exp), VOIDmode,
3390 copy_rtx (XEXP (DECL_RTL (exp), 0)));
3393 /* If the mode of DECL_RTL does not match that of the decl, it
3394 must be a promoted value. We return a SUBREG of the wanted mode,
3395 but mark it so that we know that it was already extended. */
3397 if (GET_CODE (DECL_RTL (exp)) == REG
3398 && GET_MODE (DECL_RTL (exp)) != mode)
3400 enum machine_mode decl_mode = DECL_MODE (exp);
3402 /* Get the signedness used for this variable. Ensure we get the
3403 same mode we got when the variable was declared. */
3405 PROMOTE_MODE (decl_mode, unsignedp, type);
3407 if (decl_mode != GET_MODE (DECL_RTL (exp)))
3410 temp = gen_rtx (SUBREG, mode, DECL_RTL (exp), 0);
3411 SUBREG_PROMOTED_VAR_P (temp) = 1;
3412 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
3416 return DECL_RTL (exp);
3419 return immed_double_const (TREE_INT_CST_LOW (exp),
3420 TREE_INT_CST_HIGH (exp),
3424 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, 0);
3427 /* If optimized, generate immediate CONST_DOUBLE
3428 which will be turned into memory by reload if necessary.
3430 We used to force a register so that loop.c could see it. But
3431 this does not allow gen_* patterns to perform optimizations with
3432 the constants. It also produces two insns in cases like "x = 1.0;".
3433 On most machines, floating-point constants are not permitted in
3434 many insns, so we'd end up copying it to a register in any case.
3436 Now, we do the copying in expand_binop, if appropriate. */
3437 return immed_real_const (exp);
3441 if (! TREE_CST_RTL (exp))
3442 output_constant_def (exp);
3444 /* TREE_CST_RTL probably contains a constant address.
3445 On RISC machines where a constant address isn't valid,
3446 make some insns to get that address into a register. */
3447 if (GET_CODE (TREE_CST_RTL (exp)) == MEM
3448 && modifier != EXPAND_CONST_ADDRESS
3449 && modifier != EXPAND_INITIALIZER
3450 && modifier != EXPAND_SUM
3451 && !memory_address_p (mode, XEXP (TREE_CST_RTL (exp), 0)))
3452 return change_address (TREE_CST_RTL (exp), VOIDmode,
3453 copy_rtx (XEXP (TREE_CST_RTL (exp), 0)));
3454 return TREE_CST_RTL (exp);
3457 context = decl_function_context (exp);
3458 /* We treat inline_function_decl as an alias for the current function
3459 because that is the inline function whose vars, types, etc.
3460 are being merged into the current function.
3461 See expand_inline_function. */
3462 if (context == current_function_decl || context == inline_function_decl)
3465 /* If this is non-local, handle it. */
3468 temp = SAVE_EXPR_RTL (exp);
3469 if (temp && GET_CODE (temp) == REG)
3471 put_var_into_stack (exp);
3472 temp = SAVE_EXPR_RTL (exp);
3474 if (temp == 0 || GET_CODE (temp) != MEM)
3476 return change_address (temp, mode,
3477 fix_lexical_addr (XEXP (temp, 0), exp));
3479 if (SAVE_EXPR_RTL (exp) == 0)
3481 if (mode == BLKmode)
3483 = assign_stack_temp (mode,
3484 int_size_in_bytes (TREE_TYPE (exp)), 0);
3487 enum machine_mode var_mode = mode;
3489 if (TREE_CODE (type) == INTEGER_TYPE
3490 || TREE_CODE (type) == ENUMERAL_TYPE
3491 || TREE_CODE (type) == BOOLEAN_TYPE
3492 || TREE_CODE (type) == CHAR_TYPE
3493 || TREE_CODE (type) == REAL_TYPE
3494 || TREE_CODE (type) == POINTER_TYPE
3495 || TREE_CODE (type) == OFFSET_TYPE)
3497 PROMOTE_MODE (var_mode, unsignedp, type);
3500 temp = gen_reg_rtx (var_mode);
3503 SAVE_EXPR_RTL (exp) = temp;
3504 store_expr (TREE_OPERAND (exp, 0), temp, 0);
3505 if (!optimize && GET_CODE (temp) == REG)
3506 save_expr_regs = gen_rtx (EXPR_LIST, VOIDmode, temp,
3510 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
3511 must be a promoted value. We return a SUBREG of the wanted mode,
3512 but mark it so that we know that it was already extended. Note
3513 that `unsignedp' was modified above in this case. */
3515 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
3516 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
3518 temp = gen_rtx (SUBREG, mode, SAVE_EXPR_RTL (exp), 0);
3519 SUBREG_PROMOTED_VAR_P (temp) = 1;
3520 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
3524 return SAVE_EXPR_RTL (exp);
3527 /* Exit the current loop if the body-expression is true. */
3529 rtx label = gen_label_rtx ();
3530 do_jump (TREE_OPERAND (exp, 0), label, NULL_RTX);
3531 expand_exit_loop (NULL_PTR);
3537 expand_start_loop (1);
3538 expand_expr_stmt (TREE_OPERAND (exp, 0));
3545 tree vars = TREE_OPERAND (exp, 0);
3546 int vars_need_expansion = 0;
3548 /* Need to open a binding contour here because
3549 if there are any cleanups they most be contained here. */
3550 expand_start_bindings (0);
3552 /* Mark the corresponding BLOCK for output in its proper place. */
3553 if (TREE_OPERAND (exp, 2) != 0
3554 && ! TREE_USED (TREE_OPERAND (exp, 2)))
3555 insert_block (TREE_OPERAND (exp, 2));
3557 /* If VARS have not yet been expanded, expand them now. */
3560 if (DECL_RTL (vars) == 0)
3562 vars_need_expansion = 1;
3565 expand_decl_init (vars);
3566 vars = TREE_CHAIN (vars);
3569 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, modifier);
3571 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
3577 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
3579 emit_insns (RTL_EXPR_SEQUENCE (exp));
3580 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
3581 return RTL_EXPR_RTL (exp);
3584 /* All elts simple constants => refer to a constant in memory. But
3585 if this is a non-BLKmode mode, let it store a field at a time
3586 since that should make a CONST_INT or CONST_DOUBLE when we
3588 if (TREE_STATIC (exp) && (mode == BLKmode || TREE_ADDRESSABLE (exp)))
3590 rtx constructor = output_constant_def (exp);
3591 if (modifier != EXPAND_CONST_ADDRESS
3592 && modifier != EXPAND_INITIALIZER
3593 && modifier != EXPAND_SUM
3594 && !memory_address_p (GET_MODE (constructor),
3595 XEXP (constructor, 0)))
3596 constructor = change_address (constructor, VOIDmode,
3597 XEXP (constructor, 0));
3604 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
3605 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode, 0);
3610 if (target == 0 || ! safe_from_p (target, exp))
3612 if (mode != BLKmode && ! TREE_ADDRESSABLE (exp))
3613 target = gen_reg_rtx (mode);
3616 enum tree_code c = TREE_CODE (type);
3618 = assign_stack_temp (mode, int_size_in_bytes (type), 0);
3619 if (c == RECORD_TYPE || c == UNION_TYPE || c == ARRAY_TYPE)
3620 MEM_IN_STRUCT_P (target) = 1;
3623 store_constructor (exp, target);
3629 tree exp1 = TREE_OPERAND (exp, 0);
3632 /* A SAVE_EXPR as the address in an INDIRECT_EXPR is generated
3633 for *PTR += ANYTHING where PTR is put inside the SAVE_EXPR.
3634 This code has the same general effect as simply doing
3635 expand_expr on the save expr, except that the expression PTR
3636 is computed for use as a memory address. This means different
3637 code, suitable for indexing, may be generated. */
3638 if (TREE_CODE (exp1) == SAVE_EXPR
3639 && SAVE_EXPR_RTL (exp1) == 0
3640 && TREE_CODE (exp2 = TREE_OPERAND (exp1, 0)) != ERROR_MARK
3641 && TYPE_MODE (TREE_TYPE (exp1)) == Pmode
3642 && TYPE_MODE (TREE_TYPE (exp2)) == Pmode)
3644 temp = expand_expr (TREE_OPERAND (exp1, 0), NULL_RTX,
3645 VOIDmode, EXPAND_SUM);
3646 op0 = memory_address (mode, temp);
3647 op0 = copy_all_regs (op0);
3648 SAVE_EXPR_RTL (exp1) = op0;
3652 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
3653 op0 = memory_address (mode, op0);
3656 temp = gen_rtx (MEM, mode, op0);
3657 /* If address was computed by addition,
3658 mark this as an element of an aggregate. */
3659 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
3660 || (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR
3661 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) == PLUS_EXPR)
3662 || TREE_CODE (TREE_TYPE (exp)) == ARRAY_TYPE
3663 || TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE
3664 || TREE_CODE (TREE_TYPE (exp)) == UNION_TYPE
3665 || (TREE_CODE (exp1) == ADDR_EXPR
3666 && (exp2 = TREE_OPERAND (exp1, 0))
3667 && (TREE_CODE (TREE_TYPE (exp2)) == ARRAY_TYPE
3668 || TREE_CODE (TREE_TYPE (exp2)) == RECORD_TYPE
3669 || TREE_CODE (TREE_TYPE (exp2)) == UNION_TYPE)))
3670 MEM_IN_STRUCT_P (temp) = 1;
3671 MEM_VOLATILE_P (temp) = TREE_THIS_VOLATILE (exp);
3672 #if 0 /* It is incorrect to set RTX_UNCHANGING_P here, because the fact that
3673 a location is accessed through a pointer to const does not mean
3674 that the value there can never change. */
3675 RTX_UNCHANGING_P (temp) = TREE_READONLY (exp);
3681 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
3685 tree array = TREE_OPERAND (exp, 0);
3686 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
3687 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
3688 tree index = TREE_OPERAND (exp, 1);
3689 tree index_type = TREE_TYPE (index);
3692 /* Optimize the special-case of a zero lower bound. */
3693 if (! integer_zerop (low_bound))
3694 index = fold (build (MINUS_EXPR, index_type, index, low_bound));
3696 if (TREE_CODE (index) != INTEGER_CST
3697 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
3699 /* Nonconstant array index or nonconstant element size.
3700 Generate the tree for *(&array+index) and expand that,
3701 except do it in a language-independent way
3702 and don't complain about non-lvalue arrays.
3703 `mark_addressable' should already have been called
3704 for any array for which this case will be reached. */
3706 /* Don't forget the const or volatile flag from the array
3708 tree variant_type = build_type_variant (type,
3709 TREE_READONLY (exp),
3710 TREE_THIS_VOLATILE (exp));
3711 tree array_adr = build1 (ADDR_EXPR,
3712 build_pointer_type (variant_type), array);
3715 /* Convert the integer argument to a type the same size as a
3716 pointer so the multiply won't overflow spuriously. */
3717 if (TYPE_PRECISION (index_type) != POINTER_SIZE)
3718 index = convert (type_for_size (POINTER_SIZE, 0), index);
3720 /* Don't think the address has side effects
3721 just because the array does.
3722 (In some cases the address might have side effects,
3723 and we fail to record that fact here. However, it should not
3724 matter, since expand_expr should not care.) */
3725 TREE_SIDE_EFFECTS (array_adr) = 0;
3727 elt = build1 (INDIRECT_REF, type,
3728 fold (build (PLUS_EXPR,
3729 TYPE_POINTER_TO (variant_type),
3731 fold (build (MULT_EXPR,
3732 TYPE_POINTER_TO (variant_type),
3734 size_in_bytes (type))))));
3736 /* Volatility, etc., of new expression is same as old
3738 TREE_SIDE_EFFECTS (elt) = TREE_SIDE_EFFECTS (exp);
3739 TREE_THIS_VOLATILE (elt) = TREE_THIS_VOLATILE (exp);
3740 TREE_READONLY (elt) = TREE_READONLY (exp);
3742 return expand_expr (elt, target, tmode, modifier);
3745 /* Fold an expression like: "foo"[2].
3746 This is not done in fold so it won't happen inside &. */
3748 if (TREE_CODE (array) == STRING_CST
3749 && TREE_CODE (index) == INTEGER_CST
3750 && !TREE_INT_CST_HIGH (index)
3751 && (i = TREE_INT_CST_LOW (index)) < TREE_STRING_LENGTH (array))
3753 if (TREE_TYPE (TREE_TYPE (array)) == integer_type_node)
3755 exp = build_int_2 (((int *)TREE_STRING_POINTER (array))[i], 0);
3756 TREE_TYPE (exp) = integer_type_node;
3757 return expand_expr (exp, target, tmode, modifier);
3759 if (TREE_TYPE (TREE_TYPE (array)) == char_type_node)
3761 exp = build_int_2 (TREE_STRING_POINTER (array)[i], 0);
3762 TREE_TYPE (exp) = integer_type_node;
3763 return expand_expr (convert (TREE_TYPE (TREE_TYPE (array)),
3765 target, tmode, modifier);
3769 /* If this is a constant index into a constant array,
3770 just get the value from the array. Handle both the cases when
3771 we have an explicit constructor and when our operand is a variable
3772 that was declared const. */
3774 if (TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array))
3776 if (TREE_CODE (index) == INTEGER_CST
3777 && TREE_INT_CST_HIGH (index) == 0)
3779 tree elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0));
3781 i = TREE_INT_CST_LOW (index);
3783 elem = TREE_CHAIN (elem);
3785 return expand_expr (fold (TREE_VALUE (elem)), target,
3790 else if (optimize >= 1
3791 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
3792 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
3793 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
3795 if (TREE_CODE (index) == INTEGER_CST
3796 && TREE_INT_CST_HIGH (index) == 0)
3798 tree init = DECL_INITIAL (array);
3800 i = TREE_INT_CST_LOW (index);
3801 if (TREE_CODE (init) == CONSTRUCTOR)
3803 tree elem = CONSTRUCTOR_ELTS (init);
3806 elem = TREE_CHAIN (elem);
3808 return expand_expr (fold (TREE_VALUE (elem)), target,
3811 else if (TREE_CODE (init) == STRING_CST
3812 && i < TREE_STRING_LENGTH (init))
3814 temp = GEN_INT (TREE_STRING_POINTER (init)[i]);
3815 return convert_to_mode (mode, temp, 0);
3821 /* Treat array-ref with constant index as a component-ref. */
3825 /* If the operand is a CONSTRUCTOR, we can just extract the
3826 appropriate field if it is present. */
3827 if (code != ARRAY_REF
3828 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR)
3832 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
3833 elt = TREE_CHAIN (elt))
3834 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1))
3835 return expand_expr (TREE_VALUE (elt), target, tmode, modifier);
3839 enum machine_mode mode1;
3844 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
3845 &mode1, &unsignedp, &volatilep);
3847 /* In some cases, we will be offsetting OP0's address by a constant.
3848 So get it as a sum, if possible. If we will be using it
3849 directly in an insn, we validate it. */
3850 op0 = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_SUM);
3852 /* If this is a constant, put it into a register if it is a
3853 legitimate constant and memory if it isn't. */
3854 if (CONSTANT_P (op0))
3856 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
3857 if (LEGITIMATE_CONSTANT_P (op0))
3858 op0 = force_reg (mode, op0);
3860 op0 = validize_mem (force_const_mem (mode, op0));
3865 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
3867 if (GET_CODE (op0) != MEM)
3869 op0 = change_address (op0, VOIDmode,
3870 gen_rtx (PLUS, Pmode, XEXP (op0, 0),
3871 force_reg (Pmode, offset_rtx)));
3874 /* Don't forget about volatility even if this is a bitfield. */
3875 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
3877 op0 = copy_rtx (op0);
3878 MEM_VOLATILE_P (op0) = 1;
3881 if (mode1 == VOIDmode
3882 || (mode1 != BLKmode && ! direct_load[(int) mode1]
3883 && modifier != EXPAND_CONST_ADDRESS
3884 && modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
3885 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG)
3887 /* In cases where an aligned union has an unaligned object
3888 as a field, we might be extracting a BLKmode value from
3889 an integer-mode (e.g., SImode) object. Handle this case
3890 by doing the extract into an object as wide as the field
3891 (which we know to be the width of a basic mode), then
3892 storing into memory, and changing the mode to BLKmode. */
3893 enum machine_mode ext_mode = mode;
3895 if (ext_mode == BLKmode)
3896 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
3898 if (ext_mode == BLKmode)
3901 op0 = extract_bit_field (validize_mem (op0), bitsize, bitpos,
3902 unsignedp, target, ext_mode, ext_mode,
3903 TYPE_ALIGN (TREE_TYPE (tem)) / BITS_PER_UNIT,
3904 int_size_in_bytes (TREE_TYPE (tem)));
3905 if (mode == BLKmode)
3907 rtx new = assign_stack_temp (ext_mode,
3908 bitsize / BITS_PER_UNIT, 0);
3910 emit_move_insn (new, op0);
3911 op0 = copy_rtx (new);
3912 PUT_MODE (op0, BLKmode);
3918 /* Get a reference to just this component. */
3919 if (modifier == EXPAND_CONST_ADDRESS
3920 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
3921 op0 = gen_rtx (MEM, mode1, plus_constant (XEXP (op0, 0),
3922 (bitpos / BITS_PER_UNIT)));
3924 op0 = change_address (op0, mode1,
3925 plus_constant (XEXP (op0, 0),
3926 (bitpos / BITS_PER_UNIT)));
3927 MEM_IN_STRUCT_P (op0) = 1;
3928 MEM_VOLATILE_P (op0) |= volatilep;
3929 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode)
3932 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
3933 convert_move (target, op0, unsignedp);
3939 tree base = build_unary_op (ADDR_EXPR, TREE_OPERAND (exp, 0), 0);
3940 tree addr = build (PLUS_EXPR, type, base, TREE_OPERAND (exp, 1));
3941 op0 = expand_expr (addr, NULL_RTX, VOIDmode, EXPAND_SUM);
3942 temp = gen_rtx (MEM, mode, memory_address (mode, op0));
3943 MEM_IN_STRUCT_P (temp) = 1;
3944 MEM_VOLATILE_P (temp) = TREE_THIS_VOLATILE (exp);
3945 #if 0 /* It is incorrect to set RTX_UNCHANGING_P here, because the fact that
3946 a location is accessed through a pointer to const does not mean
3947 that the value there can never change. */
3948 RTX_UNCHANGING_P (temp) = TREE_READONLY (exp);
3953 /* Intended for a reference to a buffer of a file-object in Pascal.
3954 But it's not certain that a special tree code will really be
3955 necessary for these. INDIRECT_REF might work for them. */
3959 /* IN_EXPR: Inlined pascal set IN expression.
3962 rlo = set_low - (set_low%bits_per_word);
3963 the_word = set [ (index - rlo)/bits_per_word ];
3964 bit_index = index % bits_per_word;
3965 bitmask = 1 << bit_index;
3966 return !!(the_word & bitmask); */
3968 preexpand_calls (exp);
3970 tree set = TREE_OPERAND (exp, 0);
3971 tree index = TREE_OPERAND (exp, 1);
3972 tree set_type = TREE_TYPE (set);
3974 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
3975 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
3981 rtx diff, quo, rem, addr, bit, result;
3982 rtx setval, setaddr;
3983 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
3986 target = gen_reg_rtx (mode);
3988 /* If domain is empty, answer is no. */
3989 if (tree_int_cst_lt (set_high_bound, set_low_bound))
3992 index_val = expand_expr (index, 0, VOIDmode, 0);
3993 lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
3994 hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
3995 setval = expand_expr (set, 0, VOIDmode, 0);
3996 setaddr = XEXP (setval, 0);
3998 /* Compare index against bounds, if they are constant. */
3999 if (GET_CODE (index_val) == CONST_INT
4000 && GET_CODE (lo_r) == CONST_INT
4001 && INTVAL (index_val) < INTVAL (lo_r))
4004 if (GET_CODE (index_val) == CONST_INT
4005 && GET_CODE (hi_r) == CONST_INT
4006 && INTVAL (hi_r) < INTVAL (index_val))
4009 /* If we get here, we have to generate the code for both cases
4010 (in range and out of range). */
4012 op0 = gen_label_rtx ();
4013 op1 = gen_label_rtx ();
4015 if (! (GET_CODE (index_val) == CONST_INT
4016 && GET_CODE (lo_r) == CONST_INT))
4018 emit_cmp_insn (index_val, lo_r, LT, NULL_RTX,
4019 GET_MODE (index_val), 0, 0);
4020 emit_jump_insn (gen_blt (op1));
4023 if (! (GET_CODE (index_val) == CONST_INT
4024 && GET_CODE (hi_r) == CONST_INT))
4026 emit_cmp_insn (index_val, hi_r, GT, NULL_RTX,
4027 GET_MODE (index_val), 0, 0);
4028 emit_jump_insn (gen_bgt (op1));
4031 /* Calculate the element number of bit zero in the first word
4033 if (GET_CODE (lo_r) == CONST_INT)
4034 rlow = GEN_INT (INTVAL (lo_r)
4035 & ~ ((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
4037 rlow = expand_binop (index_mode, and_optab, lo_r,
4038 GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
4039 NULL_RTX, 0, OPTAB_LIB_WIDEN);
4041 diff = expand_binop (index_mode, sub_optab,
4042 index_val, rlow, NULL_RTX, 0, OPTAB_LIB_WIDEN);
4044 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
4045 GEN_INT (BITS_PER_UNIT), NULL_RTX, 0);
4046 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
4047 GEN_INT (BITS_PER_UNIT), NULL_RTX, 0);
4048 addr = memory_address (byte_mode,
4049 expand_binop (index_mode, add_optab,
4050 diff, setaddr, NULL_RTX, 0,
4052 /* Extract the bit we want to examine */
4053 bit = expand_shift (RSHIFT_EXPR, byte_mode,
4054 gen_rtx (MEM, byte_mode, addr),
4055 make_tree (TREE_TYPE (index), rem),
4057 result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
4058 GET_MODE (target) == byte_mode ? target : 0,
4059 1, OPTAB_LIB_WIDEN);
4061 if (result != target)
4062 convert_move (target, result, 1);
4064 /* Output the code to handle the out-of-range case. */
4067 emit_move_insn (target, const0_rtx);
4072 case WITH_CLEANUP_EXPR:
4073 if (RTL_EXPR_RTL (exp) == 0)
4076 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
4078 = tree_cons (NULL_TREE, TREE_OPERAND (exp, 2), cleanups_this_call);
4079 /* That's it for this cleanup. */
4080 TREE_OPERAND (exp, 2) = 0;
4082 return RTL_EXPR_RTL (exp);
4085 /* Check for a built-in function. */
4086 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
4087 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) == FUNCTION_DECL
4088 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
4089 return expand_builtin (exp, target, subtarget, tmode, ignore);
4090 /* If this call was expanded already by preexpand_calls,
4091 just return the result we got. */
4092 if (CALL_EXPR_RTL (exp) != 0)
4093 return CALL_EXPR_RTL (exp);
4094 return expand_call (exp, target, ignore);
4096 case NON_LVALUE_EXPR:
4099 case REFERENCE_EXPR:
4100 if (TREE_CODE (type) == VOID_TYPE || ignore)
4102 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
4105 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
4106 return expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, modifier);
4107 if (TREE_CODE (type) == UNION_TYPE)
4109 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
4112 if (mode == BLKmode)
4114 if (TYPE_SIZE (type) == 0
4115 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
4117 target = assign_stack_temp (BLKmode,
4118 (TREE_INT_CST_LOW (TYPE_SIZE (type))
4119 + BITS_PER_UNIT - 1)
4120 / BITS_PER_UNIT, 0);
4123 target = gen_reg_rtx (mode);
4125 if (GET_CODE (target) == MEM)
4126 /* Store data into beginning of memory target. */
4127 store_expr (TREE_OPERAND (exp, 0),
4128 change_address (target, TYPE_MODE (valtype), 0), 0);
4130 else if (GET_CODE (target) == REG)
4131 /* Store this field into a union of the proper type. */
4132 store_field (target, GET_MODE_BITSIZE (TYPE_MODE (valtype)), 0,
4133 TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
4135 int_size_in_bytes (TREE_TYPE (TREE_OPERAND (exp, 0))));
4139 /* Return the entire union. */
4142 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, 0);
4143 if (GET_MODE (op0) == mode)
4145 /* If arg is a constant integer being extended from a narrower mode,
4146 we must really truncate to get the extended bits right. Otherwise
4147 (unsigned long) (unsigned char) ("\377"[0])
4148 would come out as ffffffff. */
4149 if (GET_MODE (op0) == VOIDmode
4150 && (GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
4151 < GET_MODE_BITSIZE (mode)))
4153 /* MODE must be narrower than HOST_BITS_PER_INT. */
4154 int width = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
4156 if (width < HOST_BITS_PER_WIDE_INT)
4158 HOST_WIDE_INT val = (GET_CODE (op0) == CONST_INT ? INTVAL (op0)
4159 : CONST_DOUBLE_LOW (op0));
4160 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)))
4161 || !(val & ((HOST_WIDE_INT) 1 << (width - 1))))
4162 val &= ((HOST_WIDE_INT) 1 << width) - 1;
4164 val |= ~(((HOST_WIDE_INT) 1 << width) - 1);
4166 op0 = GEN_INT (val);
4170 op0 = (simplify_unary_operation
4171 ((TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)))
4172 ? ZERO_EXTEND : SIGN_EXTEND),
4174 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))));
4179 if (GET_MODE (op0) == VOIDmode)
4181 if (modifier == EXPAND_INITIALIZER)
4182 return gen_rtx (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
4183 if (flag_force_mem && GET_CODE (op0) == MEM)
4184 op0 = copy_to_reg (op0);
4187 return convert_to_mode (mode, op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
4189 convert_move (target, op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
4193 /* We come here from MINUS_EXPR when the second operand is a constant. */
4195 this_optab = add_optab;
4197 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
4198 something else, make sure we add the register to the constant and
4199 then to the other thing. This case can occur during strength
4200 reduction and doing it this way will produce better code if the
4201 frame pointer or argument pointer is eliminated.
4203 fold-const.c will ensure that the constant is always in the inner
4204 PLUS_EXPR, so the only case we need to do anything about is if
4205 sp, ap, or fp is our second argument, in which case we must swap
4206 the innermost first argument and our second argument. */
4208 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
4209 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
4210 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
4211 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
4212 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
4213 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
4215 tree t = TREE_OPERAND (exp, 1);
4217 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
4218 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
4221 /* If the result is to be Pmode and we are adding an integer to
4222 something, we might be forming a constant. So try to use
4223 plus_constant. If it produces a sum and we can't accept it,
4224 use force_operand. This allows P = &ARR[const] to generate
4225 efficient code on machines where a SYMBOL_REF is not a valid
4228 If this is an EXPAND_SUM call, always return the sum. */
4229 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
4230 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
4231 && (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
4234 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
4236 op1 = plus_constant (op1, TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)));
4237 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
4238 op1 = force_operand (op1, target);
4242 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
4243 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
4244 && (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
4247 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
4249 op0 = plus_constant (op0, TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)));
4250 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
4251 op0 = force_operand (op0, target);
4255 /* No sense saving up arithmetic to be done
4256 if it's all in the wrong mode to form part of an address.
4257 And force_operand won't know whether to sign-extend or
4259 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
4260 || mode != Pmode) goto binop;
4262 preexpand_calls (exp);
4263 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
4266 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, modifier);
4267 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, modifier);
4269 /* Make sure any term that's a sum with a constant comes last. */
4270 if (GET_CODE (op0) == PLUS
4271 && CONSTANT_P (XEXP (op0, 1)))
4277 /* If adding to a sum including a constant,
4278 associate it to put the constant outside. */
4279 if (GET_CODE (op1) == PLUS
4280 && CONSTANT_P (XEXP (op1, 1)))
4282 rtx constant_term = const0_rtx;
4284 temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0);
4287 /* Ensure that MULT comes first if there is one. */
4288 else if (GET_CODE (op0) == MULT)
4289 op0 = gen_rtx (PLUS, mode, op0, XEXP (op1, 0));
4291 op0 = gen_rtx (PLUS, mode, XEXP (op1, 0), op0);
4293 /* Let's also eliminate constants from op0 if possible. */
4294 op0 = eliminate_constant_term (op0, &constant_term);
4296 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
4297 their sum should be a constant. Form it into OP1, since the
4298 result we want will then be OP0 + OP1. */
4300 temp = simplify_binary_operation (PLUS, mode, constant_term,
4305 op1 = gen_rtx (PLUS, mode, constant_term, XEXP (op1, 1));
4308 /* Put a constant term last and put a multiplication first. */
4309 if (CONSTANT_P (op0) || GET_CODE (op1) == MULT)
4310 temp = op1, op1 = op0, op0 = temp;
4312 temp = simplify_binary_operation (PLUS, mode, op0, op1);
4313 return temp ? temp : gen_rtx (PLUS, mode, op0, op1);
4316 /* Handle difference of two symbolic constants,
4317 for the sake of an initializer. */
4318 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
4319 && really_constant_p (TREE_OPERAND (exp, 0))
4320 && really_constant_p (TREE_OPERAND (exp, 1)))
4322 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX,
4323 VOIDmode, modifier);
4324 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
4325 VOIDmode, modifier);
4326 return gen_rtx (MINUS, mode, op0, op1);
4328 /* Convert A - const to A + (-const). */
4329 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
4331 exp = build (PLUS_EXPR, type, TREE_OPERAND (exp, 0),
4332 fold (build1 (NEGATE_EXPR, type,
4333 TREE_OPERAND (exp, 1))));
4336 this_optab = sub_optab;
4340 preexpand_calls (exp);
4341 /* If first operand is constant, swap them.
4342 Thus the following special case checks need only
4343 check the second operand. */
4344 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
4346 register tree t1 = TREE_OPERAND (exp, 0);
4347 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
4348 TREE_OPERAND (exp, 1) = t1;
4351 /* Attempt to return something suitable for generating an
4352 indexed address, for machines that support that. */
4354 if (modifier == EXPAND_SUM && mode == Pmode
4355 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
4356 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
4358 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, EXPAND_SUM);
4360 /* Apply distributive law if OP0 is x+c. */
4361 if (GET_CODE (op0) == PLUS
4362 && GET_CODE (XEXP (op0, 1)) == CONST_INT)
4363 return gen_rtx (PLUS, mode,
4364 gen_rtx (MULT, mode, XEXP (op0, 0),
4365 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))),
4366 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))
4367 * INTVAL (XEXP (op0, 1))));
4369 if (GET_CODE (op0) != REG)
4370 op0 = force_operand (op0, NULL_RTX);
4371 if (GET_CODE (op0) != REG)
4372 op0 = copy_to_mode_reg (mode, op0);
4374 return gen_rtx (MULT, mode, op0,
4375 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))));
4378 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
4381 /* Check for multiplying things that have been extended
4382 from a narrower type. If this machine supports multiplying
4383 in that narrower type with a result in the desired type,
4384 do it that way, and avoid the explicit type-conversion. */
4385 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
4386 && TREE_CODE (type) == INTEGER_TYPE
4387 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
4388 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
4389 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
4390 && int_fits_type_p (TREE_OPERAND (exp, 1),
4391 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
4392 /* Don't use a widening multiply if a shift will do. */
4393 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
4394 > HOST_BITS_PER_WIDE_INT)
4395 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
4397 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
4398 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
4400 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
4401 /* If both operands are extended, they must either both
4402 be zero-extended or both be sign-extended. */
4403 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
4405 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
4407 enum machine_mode innermode
4408 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
4409 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
4410 ? umul_widen_optab : smul_widen_optab);
4411 if (mode == GET_MODE_WIDER_MODE (innermode)
4412 && this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
4414 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
4415 NULL_RTX, VOIDmode, 0);
4416 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
4417 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
4420 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
4421 NULL_RTX, VOIDmode, 0);
4425 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
4426 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
4427 return expand_mult (mode, op0, op1, target, unsignedp);
4429 case TRUNC_DIV_EXPR:
4430 case FLOOR_DIV_EXPR:
4432 case ROUND_DIV_EXPR:
4433 case EXACT_DIV_EXPR:
4434 preexpand_calls (exp);
4435 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
4437 /* Possible optimization: compute the dividend with EXPAND_SUM
4438 then if the divisor is constant can optimize the case
4439 where some terms of the dividend have coeffs divisible by it. */
4440 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
4441 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
4442 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
4445 this_optab = flodiv_optab;
4448 case TRUNC_MOD_EXPR:
4449 case FLOOR_MOD_EXPR:
4451 case ROUND_MOD_EXPR:
4452 preexpand_calls (exp);
4453 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
4455 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
4456 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
4457 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
4459 case FIX_ROUND_EXPR:
4460 case FIX_FLOOR_EXPR:
4462 abort (); /* Not used for C. */
4464 case FIX_TRUNC_EXPR:
4465 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
4467 target = gen_reg_rtx (mode);
4468 expand_fix (target, op0, unsignedp);
4472 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
4474 target = gen_reg_rtx (mode);
4475 /* expand_float can't figure out what to do if FROM has VOIDmode.
4476 So give it the correct mode. With -O, cse will optimize this. */
4477 if (GET_MODE (op0) == VOIDmode)
4478 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
4480 expand_float (target, op0,
4481 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
4485 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
4486 temp = expand_unop (mode, neg_optab, op0, target, 0);
4492 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
4494 /* Handle complex values specially. */
4496 enum machine_mode opmode
4497 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
4499 if (GET_MODE_CLASS (opmode) == MODE_COMPLEX_INT
4500 || GET_MODE_CLASS (opmode) == MODE_COMPLEX_FLOAT)
4501 return expand_complex_abs (opmode, op0, target, unsignedp);
4504 /* Unsigned abs is simply the operand. Testing here means we don't
4505 risk generating incorrect code below. */
4506 if (TREE_UNSIGNED (type))
4509 /* First try to do it with a special abs instruction. */
4510 temp = expand_unop (mode, abs_optab, op0, target, 0);
4514 /* If this machine has expensive jumps, we can do integer absolute
4515 value of X as (((signed) x >> (W-1)) ^ x) - ((signed) x >> (W-1)),
4516 where W is the width of MODE. */
4518 if (GET_MODE_CLASS (mode) == MODE_INT && BRANCH_COST >= 2)
4520 rtx extended = expand_shift (RSHIFT_EXPR, mode, op0,
4521 size_int (GET_MODE_BITSIZE (mode) - 1),
4524 temp = expand_binop (mode, xor_optab, extended, op0, target, 0,
4527 temp = expand_binop (mode, sub_optab, temp, extended, target, 0,
4534 /* If that does not win, use conditional jump and negate. */
4535 target = original_target;
4536 temp = gen_label_rtx ();
4537 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 0))
4538 || (GET_CODE (target) == REG
4539 && REGNO (target) < FIRST_PSEUDO_REGISTER))
4540 target = gen_reg_rtx (mode);
4541 emit_move_insn (target, op0);
4542 emit_cmp_insn (target,
4543 expand_expr (convert (type, integer_zero_node),
4544 NULL_RTX, VOIDmode, 0),
4545 GE, NULL_RTX, mode, 0, 0);
4547 emit_jump_insn (gen_bge (temp));
4548 op0 = expand_unop (mode, neg_optab, target, target, 0);
4550 emit_move_insn (target, op0);
4557 target = original_target;
4558 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 1))
4559 || (GET_CODE (target) == REG
4560 && REGNO (target) < FIRST_PSEUDO_REGISTER))
4561 target = gen_reg_rtx (mode);
4562 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
4563 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
4565 /* First try to do it with a special MIN or MAX instruction.
4566 If that does not win, use a conditional jump to select the proper
4568 this_optab = (TREE_UNSIGNED (type)
4569 ? (code == MIN_EXPR ? umin_optab : umax_optab)
4570 : (code == MIN_EXPR ? smin_optab : smax_optab));
4572 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
4578 emit_move_insn (target, op0);
4579 op0 = gen_label_rtx ();
4580 if (code == MAX_EXPR)
4581 temp = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)))
4582 ? compare_from_rtx (target, op1, GEU, 1, mode, NULL_RTX, 0)
4583 : compare_from_rtx (target, op1, GE, 0, mode, NULL_RTX, 0));
4585 temp = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)))
4586 ? compare_from_rtx (target, op1, LEU, 1, mode, NULL_RTX, 0)
4587 : compare_from_rtx (target, op1, LE, 0, mode, NULL_RTX, 0));
4588 if (temp == const0_rtx)
4589 emit_move_insn (target, op1);
4590 else if (temp != const_true_rtx)
4592 if (bcc_gen_fctn[(int) GET_CODE (temp)] != 0)
4593 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (temp)]) (op0));
4596 emit_move_insn (target, op1);
4601 /* ??? Can optimize when the operand of this is a bitwise operation,
4602 by using a different bitwise operation. */
4604 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
4605 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
4611 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
4612 temp = expand_unop (mode, ffs_optab, op0, target, 1);
4617 /* ??? Can optimize bitwise operations with one arg constant.
4618 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
4619 and (a bitwise1 b) bitwise2 b (etc)
4620 but that is probably not worth while. */
4622 /* BIT_AND_EXPR is for bitwise anding.
4623 TRUTH_AND_EXPR is for anding two boolean values
4624 when we want in all cases to compute both of them.
4625 In general it is fastest to do TRUTH_AND_EXPR by
4626 computing both operands as actual zero-or-1 values
4627 and then bitwise anding. In cases where there cannot
4628 be any side effects, better code would be made by
4629 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR;
4630 but the question is how to recognize those cases. */
4632 case TRUTH_AND_EXPR:
4634 this_optab = and_optab;
4637 /* See comment above about TRUTH_AND_EXPR; it applies here too. */
4640 this_optab = ior_optab;
4643 case TRUTH_XOR_EXPR:
4645 this_optab = xor_optab;
4652 preexpand_calls (exp);
4653 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
4655 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
4656 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
4659 /* Could determine the answer when only additive constants differ.
4660 Also, the addition of one can be handled by changing the condition. */
4667 preexpand_calls (exp);
4668 temp = do_store_flag (exp, target, tmode != VOIDmode ? tmode : mode, 0);
4671 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
4672 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
4674 && GET_CODE (original_target) == REG
4675 && (GET_MODE (original_target)
4676 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
4678 temp = expand_expr (TREE_OPERAND (exp, 0), original_target, VOIDmode, 0);
4679 if (temp != original_target)
4680 temp = copy_to_reg (temp);
4681 op1 = gen_label_rtx ();
4682 emit_cmp_insn (temp, const0_rtx, EQ, NULL_RTX,
4683 GET_MODE (temp), unsignedp, 0);
4684 emit_jump_insn (gen_beq (op1));
4685 emit_move_insn (temp, const1_rtx);
4689 /* If no set-flag instruction, must generate a conditional
4690 store into a temporary variable. Drop through
4691 and handle this like && and ||. */
4693 case TRUTH_ANDIF_EXPR:
4694 case TRUTH_ORIF_EXPR:
4695 if (target == 0 || ! safe_from_p (target, exp)
4696 /* Make sure we don't have a hard reg (such as function's return
4697 value) live across basic blocks, if not optimizing. */
4698 || (!optimize && GET_CODE (target) == REG
4699 && REGNO (target) < FIRST_PSEUDO_REGISTER))
4700 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
4701 emit_clr_insn (target);
4702 op1 = gen_label_rtx ();
4703 jumpifnot (exp, op1);
4704 emit_0_to_1_insn (target);
4708 case TRUTH_NOT_EXPR:
4709 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
4710 /* The parser is careful to generate TRUTH_NOT_EXPR
4711 only with operands that are always zero or one. */
4712 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
4713 target, 1, OPTAB_LIB_WIDEN);
4719 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
4721 return expand_expr (TREE_OPERAND (exp, 1),
4722 (ignore ? const0_rtx : target),
4727 /* Note that COND_EXPRs whose type is a structure or union
4728 are required to be constructed to contain assignments of
4729 a temporary variable, so that we can evaluate them here
4730 for side effect only. If type is void, we must do likewise. */
4732 /* If an arm of the branch requires a cleanup,
4733 only that cleanup is performed. */
4736 tree binary_op = 0, unary_op = 0;
4737 tree old_cleanups = cleanups_this_call;
4738 cleanups_this_call = 0;
4740 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
4741 convert it to our mode, if necessary. */
4742 if (integer_onep (TREE_OPERAND (exp, 1))
4743 && integer_zerop (TREE_OPERAND (exp, 2))
4744 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
4746 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, modifier);
4747 if (GET_MODE (op0) == mode)
4750 target = gen_reg_rtx (mode);
4751 convert_move (target, op0, unsignedp);
4755 /* If we are not to produce a result, we have no target. Otherwise,
4756 if a target was specified use it; it will not be used as an
4757 intermediate target unless it is safe. If no target, use a
4760 if (mode == VOIDmode || ignore)
4762 else if (original_target
4763 && safe_from_p (original_target, TREE_OPERAND (exp, 0)))
4764 temp = original_target;
4765 else if (mode == BLKmode)
4767 if (TYPE_SIZE (type) == 0
4768 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
4770 temp = assign_stack_temp (BLKmode,
4771 (TREE_INT_CST_LOW (TYPE_SIZE (type))
4772 + BITS_PER_UNIT - 1)
4773 / BITS_PER_UNIT, 0);
4776 temp = gen_reg_rtx (mode);
4778 /* Check for X ? A + B : A. If we have this, we can copy
4779 A to the output and conditionally add B. Similarly for unary
4780 operations. Don't do this if X has side-effects because
4781 those side effects might affect A or B and the "?" operation is
4782 a sequence point in ANSI. (We test for side effects later.) */
4784 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
4785 && operand_equal_p (TREE_OPERAND (exp, 2),
4786 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
4787 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
4788 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
4789 && operand_equal_p (TREE_OPERAND (exp, 1),
4790 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
4791 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
4792 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
4793 && operand_equal_p (TREE_OPERAND (exp, 2),
4794 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
4795 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
4796 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
4797 && operand_equal_p (TREE_OPERAND (exp, 1),
4798 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
4799 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
4801 /* If we had X ? A + 1 : A and we can do the test of X as a store-flag
4802 operation, do this as A + (X != 0). Similarly for other simple
4803 binary operators. */
4804 if (singleton && binary_op
4805 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
4806 && (TREE_CODE (binary_op) == PLUS_EXPR
4807 || TREE_CODE (binary_op) == MINUS_EXPR
4808 || TREE_CODE (binary_op) == BIT_IOR_EXPR
4809 || TREE_CODE (binary_op) == BIT_XOR_EXPR
4810 || TREE_CODE (binary_op) == BIT_AND_EXPR)
4811 && integer_onep (TREE_OPERAND (binary_op, 1))
4812 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
4815 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR ? add_optab
4816 : TREE_CODE (binary_op) == MINUS_EXPR ? sub_optab
4817 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
4818 : TREE_CODE (binary_op) == BIT_XOR_EXPR ? xor_optab
4821 /* If we had X ? A : A + 1, do this as A + (X == 0).
4823 We have to invert the truth value here and then put it
4824 back later if do_store_flag fails. We cannot simply copy
4825 TREE_OPERAND (exp, 0) to another variable and modify that
4826 because invert_truthvalue can modify the tree pointed to
4828 if (singleton == TREE_OPERAND (exp, 1))
4829 TREE_OPERAND (exp, 0)
4830 = invert_truthvalue (TREE_OPERAND (exp, 0));
4832 result = do_store_flag (TREE_OPERAND (exp, 0),
4833 (safe_from_p (temp, singleton)
4835 mode, BRANCH_COST <= 1);
4839 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
4840 return expand_binop (mode, boptab, op1, result, temp,
4841 unsignedp, OPTAB_LIB_WIDEN);
4843 else if (singleton == TREE_OPERAND (exp, 1))
4844 TREE_OPERAND (exp, 0)
4845 = invert_truthvalue (TREE_OPERAND (exp, 0));
4849 op0 = gen_label_rtx ();
4851 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
4855 /* If the target conflicts with the other operand of the
4856 binary op, we can't use it. Also, we can't use the target
4857 if it is a hard register, because evaluating the condition
4858 might clobber it. */
4860 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1)))
4861 || (GET_CODE (temp) == REG
4862 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
4863 temp = gen_reg_rtx (mode);
4864 store_expr (singleton, temp, 0);
4867 expand_expr (singleton,
4868 ignore ? const1_rtx : NULL_RTX, VOIDmode, 0);
4869 if (cleanups_this_call)
4871 sorry ("aggregate value in COND_EXPR");
4872 cleanups_this_call = 0;
4874 if (singleton == TREE_OPERAND (exp, 1))
4875 jumpif (TREE_OPERAND (exp, 0), op0);
4877 jumpifnot (TREE_OPERAND (exp, 0), op0);
4879 if (binary_op && temp == 0)
4880 /* Just touch the other operand. */
4881 expand_expr (TREE_OPERAND (binary_op, 1),
4882 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
4884 store_expr (build (TREE_CODE (binary_op), type,
4885 make_tree (type, temp),
4886 TREE_OPERAND (binary_op, 1)),
4889 store_expr (build1 (TREE_CODE (unary_op), type,
4890 make_tree (type, temp)),
4895 /* This is now done in jump.c and is better done there because it
4896 produces shorter register lifetimes. */
4898 /* Check for both possibilities either constants or variables
4899 in registers (but not the same as the target!). If so, can
4900 save branches by assigning one, branching, and assigning the
4902 else if (temp && GET_MODE (temp) != BLKmode
4903 && (TREE_CONSTANT (TREE_OPERAND (exp, 1))
4904 || ((TREE_CODE (TREE_OPERAND (exp, 1)) == PARM_DECL
4905 || TREE_CODE (TREE_OPERAND (exp, 1)) == VAR_DECL)
4906 && DECL_RTL (TREE_OPERAND (exp, 1))
4907 && GET_CODE (DECL_RTL (TREE_OPERAND (exp, 1))) == REG
4908 && DECL_RTL (TREE_OPERAND (exp, 1)) != temp))
4909 && (TREE_CONSTANT (TREE_OPERAND (exp, 2))
4910 || ((TREE_CODE (TREE_OPERAND (exp, 2)) == PARM_DECL
4911 || TREE_CODE (TREE_OPERAND (exp, 2)) == VAR_DECL)
4912 && DECL_RTL (TREE_OPERAND (exp, 2))
4913 && GET_CODE (DECL_RTL (TREE_OPERAND (exp, 2))) == REG
4914 && DECL_RTL (TREE_OPERAND (exp, 2)) != temp)))
4916 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
4917 temp = gen_reg_rtx (mode);
4918 store_expr (TREE_OPERAND (exp, 2), temp, 0);
4919 jumpifnot (TREE_OPERAND (exp, 0), op0);
4920 store_expr (TREE_OPERAND (exp, 1), temp, 0);
4924 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
4925 comparison operator. If we have one of these cases, set the
4926 output to A, branch on A (cse will merge these two references),
4927 then set the output to FOO. */
4929 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
4930 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
4931 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
4932 TREE_OPERAND (exp, 1), 0)
4933 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
4934 && safe_from_p (temp, TREE_OPERAND (exp, 2)))
4936 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
4937 temp = gen_reg_rtx (mode);
4938 store_expr (TREE_OPERAND (exp, 1), temp, 0);
4939 jumpif (TREE_OPERAND (exp, 0), op0);
4940 store_expr (TREE_OPERAND (exp, 2), temp, 0);
4944 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
4945 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
4946 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
4947 TREE_OPERAND (exp, 2), 0)
4948 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
4949 && safe_from_p (temp, TREE_OPERAND (exp, 1)))
4951 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
4952 temp = gen_reg_rtx (mode);
4953 store_expr (TREE_OPERAND (exp, 2), temp, 0);
4954 jumpifnot (TREE_OPERAND (exp, 0), op0);
4955 store_expr (TREE_OPERAND (exp, 1), temp, 0);
4960 op1 = gen_label_rtx ();
4961 jumpifnot (TREE_OPERAND (exp, 0), op0);
4963 store_expr (TREE_OPERAND (exp, 1), temp, 0);
4965 expand_expr (TREE_OPERAND (exp, 1),
4966 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
4967 if (cleanups_this_call)
4969 sorry ("aggregate value in COND_EXPR");
4970 cleanups_this_call = 0;
4974 emit_jump_insn (gen_jump (op1));
4978 store_expr (TREE_OPERAND (exp, 2), temp, 0);
4980 expand_expr (TREE_OPERAND (exp, 2),
4981 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
4984 if (cleanups_this_call)
4986 sorry ("aggregate value in COND_EXPR");
4987 cleanups_this_call = 0;
4993 cleanups_this_call = old_cleanups;
4999 /* Something needs to be initialized, but we didn't know
5000 where that thing was when building the tree. For example,
5001 it could be the return value of a function, or a parameter
5002 to a function which lays down in the stack, or a temporary
5003 variable which must be passed by reference.
5005 We guarantee that the expression will either be constructed
5006 or copied into our original target. */
5008 tree slot = TREE_OPERAND (exp, 0);
5011 if (TREE_CODE (slot) != VAR_DECL)
5016 if (DECL_RTL (slot) != 0)
5018 target = DECL_RTL (slot);
5019 /* If we have already expanded the slot, so don't do
5021 if (TREE_OPERAND (exp, 1) == NULL_TREE)
5026 target = assign_stack_temp (mode, int_size_in_bytes (type), 0);
5027 /* All temp slots at this level must not conflict. */
5028 preserve_temp_slots (target);
5029 DECL_RTL (slot) = target;
5033 /* I bet this needs to be done, and I bet that it needs to
5034 be above, inside the else clause. The reason is
5035 simple, how else is it going to get cleaned up? (mrs)
5037 The reason is probably did not work before, and was
5038 commented out is because this was re-expanding already
5039 expanded target_exprs (target == 0 and DECL_RTL (slot)
5040 != 0) also cleaning them up many times as well. :-( */
5042 /* Since SLOT is not known to the called function
5043 to belong to its stack frame, we must build an explicit
5044 cleanup. This case occurs when we must build up a reference
5045 to pass the reference as an argument. In this case,
5046 it is very likely that such a reference need not be
5049 if (TREE_OPERAND (exp, 2) == 0)
5050 TREE_OPERAND (exp, 2) = maybe_build_cleanup (slot);
5051 if (TREE_OPERAND (exp, 2))
5052 cleanups_this_call = tree_cons (NULL_TREE, TREE_OPERAND (exp, 2),
5053 cleanups_this_call);
5058 /* This case does occur, when expanding a parameter which
5059 needs to be constructed on the stack. The target
5060 is the actual stack address that we want to initialize.
5061 The function we call will perform the cleanup in this case. */
5063 DECL_RTL (slot) = target;
5066 exp1 = TREE_OPERAND (exp, 1);
5067 /* Mark it as expanded. */
5068 TREE_OPERAND (exp, 1) = NULL_TREE;
5070 return expand_expr (exp1, target, tmode, modifier);
5075 tree lhs = TREE_OPERAND (exp, 0);
5076 tree rhs = TREE_OPERAND (exp, 1);
5077 tree noncopied_parts = 0;
5078 tree lhs_type = TREE_TYPE (lhs);
5080 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
5081 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0 && !fixed_type_p (rhs))
5082 noncopied_parts = init_noncopied_parts (stabilize_reference (lhs),
5083 TYPE_NONCOPIED_PARTS (lhs_type));
5084 while (noncopied_parts != 0)
5086 expand_assignment (TREE_VALUE (noncopied_parts),
5087 TREE_PURPOSE (noncopied_parts), 0, 0);
5088 noncopied_parts = TREE_CHAIN (noncopied_parts);
5095 /* If lhs is complex, expand calls in rhs before computing it.
5096 That's so we don't compute a pointer and save it over a call.
5097 If lhs is simple, compute it first so we can give it as a
5098 target if the rhs is just a call. This avoids an extra temp and copy
5099 and that prevents a partial-subsumption which makes bad code.
5100 Actually we could treat component_ref's of vars like vars. */
5102 tree lhs = TREE_OPERAND (exp, 0);
5103 tree rhs = TREE_OPERAND (exp, 1);
5104 tree noncopied_parts = 0;
5105 tree lhs_type = TREE_TYPE (lhs);
5109 if (TREE_CODE (lhs) != VAR_DECL
5110 && TREE_CODE (lhs) != RESULT_DECL
5111 && TREE_CODE (lhs) != PARM_DECL)
5112 preexpand_calls (exp);
5114 /* Check for |= or &= of a bitfield of size one into another bitfield
5115 of size 1. In this case, (unless we need the result of the
5116 assignment) we can do this more efficiently with a
5117 test followed by an assignment, if necessary.
5119 ??? At this point, we can't get a BIT_FIELD_REF here. But if
5120 things change so we do, this code should be enhanced to
5123 && TREE_CODE (lhs) == COMPONENT_REF
5124 && (TREE_CODE (rhs) == BIT_IOR_EXPR
5125 || TREE_CODE (rhs) == BIT_AND_EXPR)
5126 && TREE_OPERAND (rhs, 0) == lhs
5127 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
5128 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (lhs, 1))) == 1
5129 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))) == 1)
5131 rtx label = gen_label_rtx ();
5133 do_jump (TREE_OPERAND (rhs, 1),
5134 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
5135 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
5136 expand_assignment (lhs, convert (TREE_TYPE (rhs),
5137 (TREE_CODE (rhs) == BIT_IOR_EXPR
5139 : integer_zero_node)),
5141 do_pending_stack_adjust ();
5146 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0
5147 && ! (fixed_type_p (lhs) && fixed_type_p (rhs)))
5148 noncopied_parts = save_noncopied_parts (stabilize_reference (lhs),
5149 TYPE_NONCOPIED_PARTS (lhs_type));
5151 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
5152 while (noncopied_parts != 0)
5154 expand_assignment (TREE_PURPOSE (noncopied_parts),
5155 TREE_VALUE (noncopied_parts), 0, 0);
5156 noncopied_parts = TREE_CHAIN (noncopied_parts);
5161 case PREINCREMENT_EXPR:
5162 case PREDECREMENT_EXPR:
5163 return expand_increment (exp, 0);
5165 case POSTINCREMENT_EXPR:
5166 case POSTDECREMENT_EXPR:
5167 /* Faster to treat as pre-increment if result is not used. */
5168 return expand_increment (exp, ! ignore);
5171 /* Are we taking the address of a nested function? */
5172 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
5173 && decl_function_context (TREE_OPERAND (exp, 0)) != 0)
5175 op0 = trampoline_address (TREE_OPERAND (exp, 0));
5176 op0 = force_operand (op0, target);
5180 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode,
5181 (modifier == EXPAND_INITIALIZER
5182 ? modifier : EXPAND_CONST_ADDRESS));
5183 if (GET_CODE (op0) != MEM)
5186 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
5187 return XEXP (op0, 0);
5188 op0 = force_operand (XEXP (op0, 0), target);
5190 if (flag_force_addr && GET_CODE (op0) != REG)
5191 return force_reg (Pmode, op0);
5194 case ENTRY_VALUE_EXPR:
5197 /* COMPLEX type for Extended Pascal & Fortran */
5200 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
5204 /* Get the rtx code of the operands. */
5205 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
5206 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
5209 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
5211 prev = get_last_insn ();
5213 /* Tell flow that the whole of the destination is being set. */
5214 if (GET_CODE (target) == REG)
5215 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
5217 /* Move the real (op0) and imaginary (op1) parts to their location. */
5218 emit_move_insn (gen_realpart (mode, target), op0);
5219 emit_move_insn (gen_imagpart (mode, target), op1);
5221 /* Complex construction should appear as a single unit. */
5228 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
5229 return gen_realpart (mode, op0);
5232 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
5233 return gen_imagpart (mode, op0);
5237 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
5241 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
5244 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
5246 prev = get_last_insn ();
5248 /* Tell flow that the whole of the destination is being set. */
5249 if (GET_CODE (target) == REG)
5250 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
5252 /* Store the realpart and the negated imagpart to target. */
5253 emit_move_insn (gen_realpart (mode, target), gen_realpart (mode, op0));
5255 imag_t = gen_imagpart (mode, target);
5256 temp = expand_unop (mode, neg_optab,
5257 gen_imagpart (mode, op0), imag_t, 0);
5259 emit_move_insn (imag_t, temp);
5261 /* Conjugate should appear as a single unit */
5268 op0 = CONST0_RTX (tmode);
5274 return (*lang_expand_expr) (exp, target, tmode, modifier);
5277 /* Here to do an ordinary binary operator, generating an instruction
5278 from the optab already placed in `this_optab'. */
5280 preexpand_calls (exp);
5281 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
5283 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
5284 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
5286 temp = expand_binop (mode, this_optab, op0, op1, target,
5287 unsignedp, OPTAB_LIB_WIDEN);
5293 /* Return the alignment in bits of EXP, a pointer valued expression.
5294 But don't return more than MAX_ALIGN no matter what.
5295 The alignment returned is, by default, the alignment of the thing that
5296 EXP points to (if it is not a POINTER_TYPE, 0 is returned).
5298 Otherwise, look at the expression to see if we can do better, i.e., if the
5299 expression is actually pointing at an object whose alignment is tighter. */
5302 get_pointer_alignment (exp, max_align)
5306 unsigned align, inner;
5308 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
5311 align = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
5312 align = MIN (align, max_align);
5316 switch (TREE_CODE (exp))
5320 case NON_LVALUE_EXPR:
5321 exp = TREE_OPERAND (exp, 0);
5322 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
5324 inner = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
5325 inner = MIN (inner, max_align);
5326 align = MAX (align, inner);
5330 /* If sum of pointer + int, restrict our maximum alignment to that
5331 imposed by the integer. If not, we can't do any better than
5333 if (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST)
5336 while (((TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)) * BITS_PER_UNIT)
5341 exp = TREE_OPERAND (exp, 0);
5345 /* See what we are pointing at and look at its alignment. */
5346 exp = TREE_OPERAND (exp, 0);
5347 if (TREE_CODE (exp) == FUNCTION_DECL)
5348 align = MAX (align, FUNCTION_BOUNDARY);
5349 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'd')
5350 align = MAX (align, DECL_ALIGN (exp));
5351 #ifdef CONSTANT_ALIGNMENT
5352 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'c')
5353 align = CONSTANT_ALIGNMENT (exp, align);
5355 return MIN (align, max_align);
5363 /* Return the tree node and offset if a given argument corresponds to
5364 a string constant. */
5367 string_constant (arg, ptr_offset)
5373 if (TREE_CODE (arg) == ADDR_EXPR
5374 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
5376 *ptr_offset = integer_zero_node;
5377 return TREE_OPERAND (arg, 0);
5379 else if (TREE_CODE (arg) == PLUS_EXPR)
5381 tree arg0 = TREE_OPERAND (arg, 0);
5382 tree arg1 = TREE_OPERAND (arg, 1);
5387 if (TREE_CODE (arg0) == ADDR_EXPR
5388 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
5391 return TREE_OPERAND (arg0, 0);
5393 else if (TREE_CODE (arg1) == ADDR_EXPR
5394 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
5397 return TREE_OPERAND (arg1, 0);
5404 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
5405 way, because it could contain a zero byte in the middle.
5406 TREE_STRING_LENGTH is the size of the character array, not the string.
5408 Unfortunately, string_constant can't access the values of const char
5409 arrays with initializers, so neither can we do so here. */
5419 src = string_constant (src, &offset_node);
5422 max = TREE_STRING_LENGTH (src);
5423 ptr = TREE_STRING_POINTER (src);
5424 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
5426 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
5427 compute the offset to the following null if we don't know where to
5428 start searching for it. */
5430 for (i = 0; i < max; i++)
5433 /* We don't know the starting offset, but we do know that the string
5434 has no internal zero bytes. We can assume that the offset falls
5435 within the bounds of the string; otherwise, the programmer deserves
5436 what he gets. Subtract the offset from the length of the string,
5438 /* This would perhaps not be valid if we were dealing with named
5439 arrays in addition to literal string constants. */
5440 return size_binop (MINUS_EXPR, size_int (max), offset_node);
5443 /* We have a known offset into the string. Start searching there for
5444 a null character. */
5445 if (offset_node == 0)
5449 /* Did we get a long long offset? If so, punt. */
5450 if (TREE_INT_CST_HIGH (offset_node) != 0)
5452 offset = TREE_INT_CST_LOW (offset_node);
5454 /* If the offset is known to be out of bounds, warn, and call strlen at
5456 if (offset < 0 || offset > max)
5458 warning ("offset outside bounds of constant string");
5461 /* Use strlen to search for the first zero byte. Since any strings
5462 constructed with build_string will have nulls appended, we win even
5463 if we get handed something like (char[4])"abcd".
5465 Since OFFSET is our starting index into the string, no further
5466 calculation is needed. */
5467 return size_int (strlen (ptr + offset));
5470 /* Expand an expression EXP that calls a built-in function,
5471 with result going to TARGET if that's convenient
5472 (and in mode MODE if that's convenient).
5473 SUBTARGET may be used as the target for computing one of EXP's operands.
5474 IGNORE is nonzero if the value is to be ignored. */
5477 expand_builtin (exp, target, subtarget, mode, ignore)
5481 enum machine_mode mode;
5484 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
5485 tree arglist = TREE_OPERAND (exp, 1);
5488 enum machine_mode value_mode = TYPE_MODE (TREE_TYPE (exp));
5489 optab builtin_optab;
5491 switch (DECL_FUNCTION_CODE (fndecl))
5496 /* build_function_call changes these into ABS_EXPR. */
5501 case BUILT_IN_FSQRT:
5502 /* If not optimizing, call the library function. */
5507 /* Arg could be wrong type if user redeclared this fcn wrong. */
5508 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != REAL_TYPE)
5509 return CONST0_RTX (TYPE_MODE (TREE_TYPE (exp)));
5511 /* Stabilize and compute the argument. */
5512 if (TREE_CODE (TREE_VALUE (arglist)) != VAR_DECL
5513 && TREE_CODE (TREE_VALUE (arglist)) != PARM_DECL)
5515 exp = copy_node (exp);
5516 arglist = copy_node (arglist);
5517 TREE_OPERAND (exp, 1) = arglist;
5518 TREE_VALUE (arglist) = save_expr (TREE_VALUE (arglist));
5520 op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
5522 /* Make a suitable register to place result in. */
5523 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
5528 switch (DECL_FUNCTION_CODE (fndecl))
5531 builtin_optab = sin_optab; break;
5533 builtin_optab = cos_optab; break;
5534 case BUILT_IN_FSQRT:
5535 builtin_optab = sqrt_optab; break;
5540 /* Compute into TARGET.
5541 Set TARGET to wherever the result comes back. */
5542 target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
5543 builtin_optab, op0, target, 0);
5545 /* If we were unable to expand via the builtin, stop the
5546 sequence (without outputting the insns) and break, causing
5547 a call the the library function. */
5554 /* Check the results by default. But if flag_fast_math is turned on,
5555 then assume sqrt will always be called with valid arguments. */
5557 if (! flag_fast_math)
5559 /* Don't define the builtin FP instructions
5560 if your machine is not IEEE. */
5561 if (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT)
5564 lab1 = gen_label_rtx ();
5566 /* Test the result; if it is NaN, set errno=EDOM because
5567 the argument was not in the domain. */
5568 emit_cmp_insn (target, target, EQ, 0, GET_MODE (target), 0, 0);
5569 emit_jump_insn (gen_beq (lab1));
5573 #ifdef GEN_ERRNO_RTX
5574 rtx errno_rtx = GEN_ERRNO_RTX;
5577 = gen_rtx (MEM, word_mode, gen_rtx (SYMBOL_REF, Pmode, "*errno"));
5580 emit_move_insn (errno_rtx, GEN_INT (TARGET_EDOM));
5583 /* We can't set errno=EDOM directly; let the library call do it.
5584 Pop the arguments right away in case the call gets deleted. */
5586 expand_call (exp, target, 0);
5593 /* Output the entire sequence. */
5594 insns = get_insns ();
5600 /* __builtin_apply_args returns block of memory allocated on
5601 the stack into which is stored the arg pointer, structure
5602 value address, static chain, and all the registers that might
5603 possibly be used in performing a function call. The code is
5604 moved to the start of the function so the incoming values are
5606 case BUILT_IN_APPLY_ARGS:
5607 /* Don't do __builtin_apply_args more than once in a function.
5608 Save the result of the first call and reuse it. */
5609 if (apply_args_value != 0)
5610 return apply_args_value;
5612 /* When this function is called, it means that registers must be
5613 saved on entry to this function. So we migrate the
5614 call to the first insn of this function. */
5619 temp = expand_builtin_apply_args ();
5623 apply_args_value = temp;
5625 /* Put the sequence after the NOTE that starts the function.
5626 If this is inside a SEQUENCE, make the outer-level insn
5627 chain current, so the code is placed at the start of the
5629 push_topmost_sequence ();
5630 emit_insns_before (seq, NEXT_INSN (get_insns ()));
5631 pop_topmost_sequence ();
5635 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
5636 FUNCTION with a copy of the parameters described by
5637 ARGUMENTS, and ARGSIZE. It returns a block of memory
5638 allocated on the stack into which is stored all the registers
5639 that might possibly be used for returning the result of a
5640 function. ARGUMENTS is the value returned by
5641 __builtin_apply_args. ARGSIZE is the number of bytes of
5642 arguments that must be copied. ??? How should this value be
5643 computed? We'll also need a safe worst case value for varargs
5645 case BUILT_IN_APPLY:
5647 /* Arg could be non-pointer if user redeclared this fcn wrong. */
5648 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
5649 || TREE_CHAIN (arglist) == 0
5650 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
5651 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
5652 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
5660 for (t = arglist, i = 0; t; t = TREE_CHAIN (t), i++)
5661 ops[i] = expand_expr (TREE_VALUE (t), NULL_RTX, VOIDmode, 0);
5663 return expand_builtin_apply (ops[0], ops[1], ops[2]);
5666 /* __builtin_return (RESULT) causes the function to return the
5667 value described by RESULT. RESULT is address of the block of
5668 memory returned by __builtin_apply. */
5669 case BUILT_IN_RETURN:
5671 /* Arg could be non-pointer if user redeclared this fcn wrong. */
5672 && TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) == POINTER_TYPE)
5673 expand_builtin_return (expand_expr (TREE_VALUE (arglist),
5674 NULL_RTX, VOIDmode, 0));
5677 case BUILT_IN_SAVEREGS:
5678 /* Don't do __builtin_saveregs more than once in a function.
5679 Save the result of the first call and reuse it. */
5680 if (saveregs_value != 0)
5681 return saveregs_value;
5683 /* When this function is called, it means that registers must be
5684 saved on entry to this function. So we migrate the
5685 call to the first insn of this function. */
5688 rtx valreg, saved_valreg;
5690 /* Now really call the function. `expand_call' does not call
5691 expand_builtin, so there is no danger of infinite recursion here. */
5694 #ifdef EXPAND_BUILTIN_SAVEREGS
5695 /* Do whatever the machine needs done in this case. */
5696 temp = EXPAND_BUILTIN_SAVEREGS (arglist);
5698 /* The register where the function returns its value
5699 is likely to have something else in it, such as an argument.
5700 So preserve that register around the call. */
5701 if (value_mode != VOIDmode)
5703 valreg = hard_libcall_value (value_mode);
5704 saved_valreg = gen_reg_rtx (value_mode);
5705 emit_move_insn (saved_valreg, valreg);
5708 /* Generate the call, putting the value in a pseudo. */
5709 temp = expand_call (exp, target, ignore);
5711 if (value_mode != VOIDmode)
5712 emit_move_insn (valreg, saved_valreg);
5718 saveregs_value = temp;
5720 /* Put the sequence after the NOTE that starts the function.
5721 If this is inside a SEQUENCE, make the outer-level insn
5722 chain current, so the code is placed at the start of the
5724 push_topmost_sequence ();
5725 emit_insns_before (seq, NEXT_INSN (get_insns ()));
5726 pop_topmost_sequence ();
5730 /* __builtin_args_info (N) returns word N of the arg space info
5731 for the current function. The number and meanings of words
5732 is controlled by the definition of CUMULATIVE_ARGS. */
5733 case BUILT_IN_ARGS_INFO:
5735 int nwords = sizeof (CUMULATIVE_ARGS) / sizeof (int);
5737 int *word_ptr = (int *) ¤t_function_args_info;
5738 tree type, elts, result;
5740 if (sizeof (CUMULATIVE_ARGS) % sizeof (int) != 0)
5741 fatal ("CUMULATIVE_ARGS type defined badly; see %s, line %d",
5742 __FILE__, __LINE__);
5746 tree arg = TREE_VALUE (arglist);
5747 if (TREE_CODE (arg) != INTEGER_CST)
5748 error ("argument of `__builtin_args_info' must be constant");
5751 int wordnum = TREE_INT_CST_LOW (arg);
5753 if (wordnum < 0 || wordnum >= nwords || TREE_INT_CST_HIGH (arg))
5754 error ("argument of `__builtin_args_info' out of range");
5756 return GEN_INT (word_ptr[wordnum]);
5760 error ("missing argument in `__builtin_args_info'");
5765 for (i = 0; i < nwords; i++)
5766 elts = tree_cons (NULL_TREE, build_int_2 (word_ptr[i], 0));
5768 type = build_array_type (integer_type_node,
5769 build_index_type (build_int_2 (nwords, 0)));
5770 result = build (CONSTRUCTOR, type, NULL_TREE, nreverse (elts));
5771 TREE_CONSTANT (result) = 1;
5772 TREE_STATIC (result) = 1;
5773 result = build (INDIRECT_REF, build_pointer_type (type), result);
5774 TREE_CONSTANT (result) = 1;
5775 return expand_expr (result, NULL_RTX, VOIDmode, 0);
5779 /* Return the address of the first anonymous stack arg. */
5780 case BUILT_IN_NEXT_ARG:
5782 tree fntype = TREE_TYPE (current_function_decl);
5783 if (!(TYPE_ARG_TYPES (fntype) != 0
5784 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
5785 != void_type_node)))
5787 error ("`va_start' used in function with fixed args");
5792 return expand_binop (Pmode, add_optab,
5793 current_function_internal_arg_pointer,
5794 current_function_arg_offset_rtx,
5795 NULL_RTX, 0, OPTAB_LIB_WIDEN);
5797 case BUILT_IN_CLASSIFY_TYPE:
5800 tree type = TREE_TYPE (TREE_VALUE (arglist));
5801 enum tree_code code = TREE_CODE (type);
5802 if (code == VOID_TYPE)
5803 return GEN_INT (void_type_class);
5804 if (code == INTEGER_TYPE)
5805 return GEN_INT (integer_type_class);
5806 if (code == CHAR_TYPE)
5807 return GEN_INT (char_type_class);
5808 if (code == ENUMERAL_TYPE)
5809 return GEN_INT (enumeral_type_class);
5810 if (code == BOOLEAN_TYPE)
5811 return GEN_INT (boolean_type_class);
5812 if (code == POINTER_TYPE)
5813 return GEN_INT (pointer_type_class);
5814 if (code == REFERENCE_TYPE)
5815 return GEN_INT (reference_type_class);
5816 if (code == OFFSET_TYPE)
5817 return GEN_INT (offset_type_class);
5818 if (code == REAL_TYPE)
5819 return GEN_INT (real_type_class);
5820 if (code == COMPLEX_TYPE)
5821 return GEN_INT (complex_type_class);
5822 if (code == FUNCTION_TYPE)
5823 return GEN_INT (function_type_class);
5824 if (code == METHOD_TYPE)
5825 return GEN_INT (method_type_class);
5826 if (code == RECORD_TYPE)
5827 return GEN_INT (record_type_class);
5828 if (code == UNION_TYPE)
5829 return GEN_INT (union_type_class);
5830 if (code == ARRAY_TYPE)
5831 return GEN_INT (array_type_class);
5832 if (code == STRING_TYPE)
5833 return GEN_INT (string_type_class);
5834 if (code == SET_TYPE)
5835 return GEN_INT (set_type_class);
5836 if (code == FILE_TYPE)
5837 return GEN_INT (file_type_class);
5838 if (code == LANG_TYPE)
5839 return GEN_INT (lang_type_class);
5841 return GEN_INT (no_type_class);
5843 case BUILT_IN_CONSTANT_P:
5847 return (TREE_CODE_CLASS (TREE_CODE (TREE_VALUE (arglist))) == 'c'
5848 ? const1_rtx : const0_rtx);
5850 case BUILT_IN_FRAME_ADDRESS:
5851 /* The argument must be a nonnegative integer constant.
5852 It counts the number of frames to scan up the stack.
5853 The value is the address of that frame. */
5854 case BUILT_IN_RETURN_ADDRESS:
5855 /* The argument must be a nonnegative integer constant.
5856 It counts the number of frames to scan up the stack.
5857 The value is the return address saved in that frame. */
5859 /* Warning about missing arg was already issued. */
5861 else if (TREE_CODE (TREE_VALUE (arglist)) != INTEGER_CST)
5863 error ("invalid arg to `__builtin_return_address'");
5866 else if (tree_int_cst_lt (TREE_VALUE (arglist), integer_zero_node))
5868 error ("invalid arg to `__builtin_return_address'");
5873 int count = TREE_INT_CST_LOW (TREE_VALUE (arglist));
5874 rtx tem = frame_pointer_rtx;
5877 /* Some machines need special handling before we can access arbitrary
5878 frames. For example, on the sparc, we must first flush all
5879 register windows to the stack. */
5880 #ifdef SETUP_FRAME_ADDRESSES
5881 SETUP_FRAME_ADDRESSES ();
5884 /* On the sparc, the return address is not in the frame, it is
5885 in a register. There is no way to access it off of the current
5886 frame pointer, but it can be accessed off the previous frame
5887 pointer by reading the value from the register window save
5889 #ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
5890 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_RETURN_ADDRESS)
5894 /* Scan back COUNT frames to the specified frame. */
5895 for (i = 0; i < count; i++)
5897 /* Assume the dynamic chain pointer is in the word that
5898 the frame address points to, unless otherwise specified. */
5899 #ifdef DYNAMIC_CHAIN_ADDRESS
5900 tem = DYNAMIC_CHAIN_ADDRESS (tem);
5902 tem = memory_address (Pmode, tem);
5903 tem = copy_to_reg (gen_rtx (MEM, Pmode, tem));
5906 /* For __builtin_frame_address, return what we've got. */
5907 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
5910 /* For __builtin_return_address,
5911 Get the return address from that frame. */
5912 #ifdef RETURN_ADDR_RTX
5913 return RETURN_ADDR_RTX (count, tem);
5915 tem = memory_address (Pmode,
5916 plus_constant (tem, GET_MODE_SIZE (Pmode)));
5917 return copy_to_reg (gen_rtx (MEM, Pmode, tem));
5921 case BUILT_IN_ALLOCA:
5923 /* Arg could be non-integer if user redeclared this fcn wrong. */
5924 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE)
5926 current_function_calls_alloca = 1;
5927 /* Compute the argument. */
5928 op0 = expand_expr (TREE_VALUE (arglist), NULL_RTX, VOIDmode, 0);
5930 /* Allocate the desired space. */
5931 target = allocate_dynamic_stack_space (op0, target, BITS_PER_UNIT);
5933 /* Record the new stack level for nonlocal gotos. */
5934 if (nonlocal_goto_handler_slot != 0)
5935 emit_stack_save (SAVE_NONLOCAL, &nonlocal_goto_stack_level, NULL_RTX);
5939 /* If not optimizing, call the library function. */
5944 /* Arg could be non-integer if user redeclared this fcn wrong. */
5945 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE)
5948 /* Compute the argument. */
5949 op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
5950 /* Compute ffs, into TARGET if possible.
5951 Set TARGET to wherever the result comes back. */
5952 target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
5953 ffs_optab, op0, target, 1);
5958 case BUILT_IN_STRLEN:
5959 /* If not optimizing, call the library function. */
5964 /* Arg could be non-pointer if user redeclared this fcn wrong. */
5965 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE)
5969 tree src = TREE_VALUE (arglist);
5970 tree len = c_strlen (src);
5973 = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
5975 rtx result, src_rtx, char_rtx;
5976 enum machine_mode insn_mode = value_mode, char_mode;
5977 enum insn_code icode;
5979 /* If the length is known, just return it. */
5981 return expand_expr (len, target, mode, 0);
5983 /* If SRC is not a pointer type, don't do this operation inline. */
5987 /* Call a function if we can't compute strlen in the right mode. */
5989 while (insn_mode != VOIDmode)
5991 icode = strlen_optab->handlers[(int) insn_mode].insn_code;
5992 if (icode != CODE_FOR_nothing)
5995 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
5997 if (insn_mode == VOIDmode)
6000 /* Make a place to write the result of the instruction. */
6003 && GET_CODE (result) == REG
6004 && GET_MODE (result) == insn_mode
6005 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
6006 result = gen_reg_rtx (insn_mode);
6008 /* Make sure the operands are acceptable to the predicates. */
6010 if (! (*insn_operand_predicate[(int)icode][0]) (result, insn_mode))
6011 result = gen_reg_rtx (insn_mode);
6013 src_rtx = memory_address (BLKmode,
6014 expand_expr (src, NULL_RTX, Pmode,
6016 if (! (*insn_operand_predicate[(int)icode][1]) (src_rtx, Pmode))
6017 src_rtx = copy_to_mode_reg (Pmode, src_rtx);
6019 char_rtx = const0_rtx;
6020 char_mode = insn_operand_mode[(int)icode][2];
6021 if (! (*insn_operand_predicate[(int)icode][2]) (char_rtx, char_mode))
6022 char_rtx = copy_to_mode_reg (char_mode, char_rtx);
6024 emit_insn (GEN_FCN (icode) (result,
6025 gen_rtx (MEM, BLKmode, src_rtx),
6026 char_rtx, GEN_INT (align)));
6028 /* Return the value in the proper mode for this function. */
6029 if (GET_MODE (result) == value_mode)
6031 else if (target != 0)
6033 convert_move (target, result, 0);
6037 return convert_to_mode (value_mode, result, 0);
6040 case BUILT_IN_STRCPY:
6041 /* If not optimizing, call the library function. */
6046 /* Arg could be non-pointer if user redeclared this fcn wrong. */
6047 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
6048 || TREE_CHAIN (arglist) == 0
6049 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE)
6053 tree len = c_strlen (TREE_VALUE (TREE_CHAIN (arglist)));
6058 len = size_binop (PLUS_EXPR, len, integer_one_node);
6060 chainon (arglist, build_tree_list (NULL_TREE, len));
6064 case BUILT_IN_MEMCPY:
6065 /* If not optimizing, call the library function. */
6070 /* Arg could be non-pointer if user redeclared this fcn wrong. */
6071 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
6072 || TREE_CHAIN (arglist) == 0
6073 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
6074 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
6075 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
6079 tree dest = TREE_VALUE (arglist);
6080 tree src = TREE_VALUE (TREE_CHAIN (arglist));
6081 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
6084 = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
6086 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
6087 rtx dest_rtx, dest_mem, src_mem;
6089 /* If either SRC or DEST is not a pointer type, don't do
6090 this operation in-line. */
6091 if (src_align == 0 || dest_align == 0)
6093 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRCPY)
6094 TREE_CHAIN (TREE_CHAIN (arglist)) = 0;
6098 dest_rtx = expand_expr (dest, NULL_RTX, Pmode, EXPAND_NORMAL);
6099 dest_mem = gen_rtx (MEM, BLKmode,
6100 memory_address (BLKmode, dest_rtx));
6101 src_mem = gen_rtx (MEM, BLKmode,
6102 memory_address (BLKmode,
6103 expand_expr (src, NULL_RTX,
6107 /* Copy word part most expediently. */
6108 emit_block_move (dest_mem, src_mem,
6109 expand_expr (len, NULL_RTX, VOIDmode, 0),
6110 MIN (src_align, dest_align));
6114 /* These comparison functions need an instruction that returns an actual
6115 index. An ordinary compare that just sets the condition codes
6117 #ifdef HAVE_cmpstrsi
6118 case BUILT_IN_STRCMP:
6119 /* If not optimizing, call the library function. */
6124 /* Arg could be non-pointer if user redeclared this fcn wrong. */
6125 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
6126 || TREE_CHAIN (arglist) == 0
6127 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE)
6129 else if (!HAVE_cmpstrsi)
6132 tree arg1 = TREE_VALUE (arglist);
6133 tree arg2 = TREE_VALUE (TREE_CHAIN (arglist));
6137 len = c_strlen (arg1);
6139 len = size_binop (PLUS_EXPR, integer_one_node, len);
6140 len2 = c_strlen (arg2);
6142 len2 = size_binop (PLUS_EXPR, integer_one_node, len2);
6144 /* If we don't have a constant length for the first, use the length
6145 of the second, if we know it. We don't require a constant for
6146 this case; some cost analysis could be done if both are available
6147 but neither is constant. For now, assume they're equally cheap.
6149 If both strings have constant lengths, use the smaller. This
6150 could arise if optimization results in strcpy being called with
6151 two fixed strings, or if the code was machine-generated. We should
6152 add some code to the `memcmp' handler below to deal with such
6153 situations, someday. */
6154 if (!len || TREE_CODE (len) != INTEGER_CST)
6161 else if (len2 && TREE_CODE (len2) == INTEGER_CST)
6163 if (tree_int_cst_lt (len2, len))
6167 chainon (arglist, build_tree_list (NULL_TREE, len));
6171 case BUILT_IN_MEMCMP:
6172 /* If not optimizing, call the library function. */
6177 /* Arg could be non-pointer if user redeclared this fcn wrong. */
6178 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
6179 || TREE_CHAIN (arglist) == 0
6180 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
6181 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
6182 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
6184 else if (!HAVE_cmpstrsi)
6187 tree arg1 = TREE_VALUE (arglist);
6188 tree arg2 = TREE_VALUE (TREE_CHAIN (arglist));
6189 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
6193 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
6195 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
6196 enum machine_mode insn_mode
6197 = insn_operand_mode[(int) CODE_FOR_cmpstrsi][0];
6199 /* If we don't have POINTER_TYPE, call the function. */
6200 if (arg1_align == 0 || arg2_align == 0)
6202 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRCMP)
6203 TREE_CHAIN (TREE_CHAIN (arglist)) = 0;
6207 /* Make a place to write the result of the instruction. */
6210 && GET_CODE (result) == REG && GET_MODE (result) == insn_mode
6211 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
6212 result = gen_reg_rtx (insn_mode);
6214 emit_insn (gen_cmpstrsi (result,
6215 gen_rtx (MEM, BLKmode,
6216 expand_expr (arg1, NULL_RTX, Pmode,
6218 gen_rtx (MEM, BLKmode,
6219 expand_expr (arg2, NULL_RTX, Pmode,
6221 expand_expr (len, NULL_RTX, VOIDmode, 0),
6222 GEN_INT (MIN (arg1_align, arg2_align))));
6224 /* Return the value in the proper mode for this function. */
6225 mode = TYPE_MODE (TREE_TYPE (exp));
6226 if (GET_MODE (result) == mode)
6228 else if (target != 0)
6230 convert_move (target, result, 0);
6234 return convert_to_mode (mode, result, 0);
6237 case BUILT_IN_STRCMP:
6238 case BUILT_IN_MEMCMP:
6242 default: /* just do library call, if unknown builtin */
6243 error ("built-in function `%s' not currently supported",
6244 IDENTIFIER_POINTER (DECL_NAME (fndecl)));
6247 /* The switch statement above can drop through to cause the function
6248 to be called normally. */
6250 return expand_call (exp, target, ignore);
6253 /* Built-in functions to perform an untyped call and return. */
6255 /* For each register that may be used for calling a function, this
6256 gives a mode used to copy the register's value. VOIDmode indicates
6257 the register is not used for calling a function. If the machine
6258 has register windows, this gives only the outbound registers.
6259 INCOMING_REGNO gives the corresponding inbound register. */
6260 static enum machine_mode apply_args_mode[FIRST_PSEUDO_REGISTER];
6262 /* For each register that may be used for returning values, this gives
6263 a mode used to copy the register's value. VOIDmode indicates the
6264 register is not used for returning values. If the machine has
6265 register windows, this gives only the outbound registers.
6266 INCOMING_REGNO gives the corresponding inbound register. */
6267 static enum machine_mode apply_result_mode[FIRST_PSEUDO_REGISTER];
6269 /* Return the size required for the block returned by __builtin_apply_args,
6270 and initialize apply_args_mode. */
6274 static int size = -1;
6276 enum machine_mode mode;
6278 /* The values computed by this function never change. */
6281 /* The first value is the incoming arg-pointer. */
6282 size = GET_MODE_SIZE (Pmode);
6284 /* The second value is the structure value address unless this is
6285 passed as an "invisible" first argument. */
6286 if (struct_value_rtx)
6287 size += GET_MODE_SIZE (Pmode);
6289 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
6290 if (FUNCTION_ARG_REGNO_P (regno))
6292 /* Search for the proper mode for copying this register's
6293 value. I'm not sure this is right, but it works so far. */
6294 enum machine_mode best_mode = VOIDmode;
6296 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
6298 mode = GET_MODE_WIDER_MODE (mode))
6299 if (HARD_REGNO_MODE_OK (regno, mode)
6300 && HARD_REGNO_NREGS (regno, mode) == 1)
6303 if (best_mode == VOIDmode)
6304 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
6306 mode = GET_MODE_WIDER_MODE (mode))
6307 if (HARD_REGNO_MODE_OK (regno, mode)
6308 && (mov_optab->handlers[(int) mode].insn_code
6309 != CODE_FOR_nothing))
6313 if (mode == VOIDmode)
6316 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
6317 if (size % align != 0)
6318 size = CEIL (size, align) * align;
6319 size += GET_MODE_SIZE (mode);
6320 apply_args_mode[regno] = mode;
6323 apply_args_mode[regno] = VOIDmode;
6328 /* Return the size required for the block returned by __builtin_apply,
6329 and initialize apply_result_mode. */
6331 apply_result_size ()
6333 static int size = -1;
6335 enum machine_mode mode;
6337 /* The values computed by this function never change. */
6342 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
6343 if (FUNCTION_VALUE_REGNO_P (regno))
6345 /* Search for the proper mode for copying this register's
6346 value. I'm not sure this is right, but it works so far. */
6347 enum machine_mode best_mode = VOIDmode;
6349 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
6351 mode = GET_MODE_WIDER_MODE (mode))
6352 if (HARD_REGNO_MODE_OK (regno, mode))
6355 if (best_mode == VOIDmode)
6356 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
6358 mode = GET_MODE_WIDER_MODE (mode))
6359 if (HARD_REGNO_MODE_OK (regno, mode)
6360 && (mov_optab->handlers[(int) mode].insn_code
6361 != CODE_FOR_nothing))
6365 if (mode == VOIDmode)
6368 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
6369 if (size % align != 0)
6370 size = CEIL (size, align) * align;
6371 size += GET_MODE_SIZE (mode);
6372 apply_result_mode[regno] = mode;
6375 apply_result_mode[regno] = VOIDmode;
6377 /* Allow targets that use untyped_call and untyped_return to override
6378 the size so that machine-specific information can be stored here. */
6379 #ifdef APPLY_RESULT_SIZE
6380 size = APPLY_RESULT_SIZE;
6386 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
6387 /* Create a vector describing the result block RESULT. If SAVEP is true,
6388 the result block is used to save the values; otherwise it is used to
6389 restore the values. */
6391 result_vector (savep, result)
6395 int regno, size, align, nelts;
6396 enum machine_mode mode;
6398 rtx *savevec = (rtx *) alloca (FIRST_PSEUDO_REGISTER * sizeof (rtx));
6401 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
6402 if ((mode = apply_result_mode[regno]) != VOIDmode)
6404 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
6405 if (size % align != 0)
6406 size = CEIL (size, align) * align;
6407 reg = gen_rtx (REG, mode, savep ? INCOMING_REGNO (regno) : regno);
6408 mem = change_address (result, mode,
6409 plus_constant (XEXP (result, 0), size));
6410 savevec[nelts++] = (savep
6411 ? gen_rtx (SET, VOIDmode, mem, reg)
6412 : gen_rtx (SET, VOIDmode, reg, mem));
6413 size += GET_MODE_SIZE (mode);
6415 return gen_rtx (PARALLEL, VOIDmode, gen_rtvec_v (nelts, savevec));
6417 #endif /* HAVE_untyped_call or HAVE_untyped_return */
6420 /* Save the state required to perform an untyped call with the same
6421 arguments as were passed to the current function. */
6423 expand_builtin_apply_args ()
6426 int size, align, regno;
6427 enum machine_mode mode;
6429 /* Create a block where the arg-pointer, structure value address,
6430 and argument registers can be saved. */
6431 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
6433 /* Walk past the arg-pointer and structure value address. */
6434 size = GET_MODE_SIZE (Pmode);
6435 if (struct_value_rtx)
6436 size += GET_MODE_SIZE (Pmode);
6438 /* Save each register used in calling a function to the block. */
6439 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
6440 if ((mode = apply_args_mode[regno]) != VOIDmode)
6442 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
6443 if (size % align != 0)
6444 size = CEIL (size, align) * align;
6445 emit_move_insn (change_address (registers, mode,
6446 plus_constant (XEXP (registers, 0),
6448 gen_rtx (REG, mode, INCOMING_REGNO (regno)));
6449 size += GET_MODE_SIZE (mode);
6452 /* Save the arg pointer to the block. */
6453 emit_move_insn (change_address (registers, Pmode, XEXP (registers, 0)),
6454 copy_to_reg (virtual_incoming_args_rtx));
6455 size = GET_MODE_SIZE (Pmode);
6457 /* Save the structure value address unless this is passed as an
6458 "invisible" first argument. */
6459 if (struct_value_incoming_rtx)
6461 emit_move_insn (change_address (registers, Pmode,
6462 plus_constant (XEXP (registers, 0),
6464 copy_to_reg (struct_value_incoming_rtx));
6465 size += GET_MODE_SIZE (Pmode);
6468 /* Return the address of the block. */
6469 return copy_addr_to_reg (XEXP (registers, 0));
6472 /* Perform an untyped call and save the state required to perform an
6473 untyped return of whatever value was returned by the given function. */
6475 expand_builtin_apply (function, arguments, argsize)
6476 rtx function, arguments, argsize;
6478 int size, align, regno;
6479 enum machine_mode mode;
6480 rtx incoming_args, result, reg, dest, call_insn;
6481 rtx old_stack_level = 0;
6484 /* Create a block where the return registers can be saved. */
6485 result = assign_stack_local (BLKmode, apply_result_size (), -1);
6487 /* ??? The argsize value should be adjusted here. */
6489 /* Fetch the arg pointer from the ARGUMENTS block. */
6490 incoming_args = gen_reg_rtx (Pmode);
6491 emit_move_insn (incoming_args,
6492 gen_rtx (MEM, Pmode, arguments));
6493 #ifndef STACK_GROWS_DOWNWARD
6494 incoming_args = expand_binop (Pmode, add_optab, incoming_args, argsize,
6495 incoming_args, 0, OPTAB_LIB_WIDEN);
6498 /* Perform postincrements before actually calling the function. */
6501 /* Push a new argument block and copy the arguments. */
6502 do_pending_stack_adjust ();
6503 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
6505 /* Push a block of memory onto the stack to store the memory arguments.
6506 Save the address in a register, and copy the memory arguments. ??? I
6507 haven't figured out how the calling convention macros effect this,
6508 but it's likely that the source and/or destination addresses in
6509 the block copy will need updating in machine specific ways. */
6510 dest = copy_addr_to_reg (push_block (argsize, 0, 0));
6511 emit_block_move (gen_rtx (MEM, BLKmode, dest),
6512 gen_rtx (MEM, BLKmode, incoming_args),
6514 PARM_BOUNDARY / BITS_PER_UNIT);
6516 /* Refer to the argument block. */
6518 arguments = gen_rtx (MEM, BLKmode, arguments);
6520 /* Walk past the arg-pointer and structure value address. */
6521 size = GET_MODE_SIZE (Pmode);
6522 if (struct_value_rtx)
6523 size += GET_MODE_SIZE (Pmode);
6525 /* Restore each of the registers previously saved. Make USE insns
6526 for each of these registers for use in making the call. */
6527 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
6528 if ((mode = apply_args_mode[regno]) != VOIDmode)
6530 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
6531 if (size % align != 0)
6532 size = CEIL (size, align) * align;
6533 reg = gen_rtx (REG, mode, regno);
6534 emit_move_insn (reg,
6535 change_address (arguments, mode,
6536 plus_constant (XEXP (arguments, 0),
6539 push_to_sequence (use_insns);
6540 emit_insn (gen_rtx (USE, VOIDmode, reg));
6541 use_insns = get_insns ();
6543 size += GET_MODE_SIZE (mode);
6546 /* Restore the structure value address unless this is passed as an
6547 "invisible" first argument. */
6548 size = GET_MODE_SIZE (Pmode);
6549 if (struct_value_rtx)
6551 rtx value = gen_reg_rtx (Pmode);
6552 emit_move_insn (value,
6553 change_address (arguments, Pmode,
6554 plus_constant (XEXP (arguments, 0),
6556 emit_move_insn (struct_value_rtx, value);
6557 if (GET_CODE (struct_value_rtx) == REG)
6559 push_to_sequence (use_insns);
6560 emit_insn (gen_rtx (USE, VOIDmode, struct_value_rtx));
6561 use_insns = get_insns ();
6564 size += GET_MODE_SIZE (Pmode);
6567 /* All arguments and registers used for the call are set up by now! */
6568 function = prepare_call_address (function, NULL_TREE, &use_insns);
6570 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
6571 and we don't want to load it into a register as an optimization,
6572 because prepare_call_address already did it if it should be done. */
6573 if (GET_CODE (function) != SYMBOL_REF)
6574 function = memory_address (FUNCTION_MODE, function);
6576 /* Generate the actual call instruction and save the return value. */
6577 #ifdef HAVE_untyped_call
6578 if (HAVE_untyped_call)
6579 emit_call_insn (gen_untyped_call (gen_rtx (MEM, FUNCTION_MODE, function),
6580 result, result_vector (1, result)));
6583 #ifdef HAVE_call_value
6584 if (HAVE_call_value)
6588 /* Locate the unique return register. It is not possible to
6589 express a call that sets more than one return register using
6590 call_value; use untyped_call for that. In fact, untyped_call
6591 only needs to save the return registers in the given block. */
6592 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
6593 if ((mode = apply_result_mode[regno]) != VOIDmode)
6596 abort (); /* HAVE_untyped_call required. */
6597 valreg = gen_rtx (REG, mode, regno);
6600 emit_call_insn (gen_call_value (valreg,
6601 gen_rtx (MEM, FUNCTION_MODE, function),
6602 const0_rtx, NULL_RTX, const0_rtx));
6604 emit_move_insn (change_address (result, GET_MODE (valreg),
6612 /* Find the CALL insn we just emitted and write the USE insns before it. */
6613 for (call_insn = get_last_insn ();
6614 call_insn && GET_CODE (call_insn) != CALL_INSN;
6615 call_insn = PREV_INSN (call_insn))
6621 /* Put the USE insns before the CALL. */
6622 emit_insns_before (use_insns, call_insn);
6624 /* Restore the stack. */
6625 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
6627 /* Return the address of the result block. */
6628 return copy_addr_to_reg (XEXP (result, 0));
6631 /* Perform an untyped return. */
6633 expand_builtin_return (result)
6636 int size, align, regno;
6637 enum machine_mode mode;
6641 apply_result_size ();
6642 result = gen_rtx (MEM, BLKmode, result);
6644 #ifdef HAVE_untyped_return
6645 if (HAVE_untyped_return)
6647 emit_jump_insn (gen_untyped_return (result, result_vector (0, result)));
6653 /* Restore the return value and note that each value is used. */
6655 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
6656 if ((mode = apply_result_mode[regno]) != VOIDmode)
6658 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
6659 if (size % align != 0)
6660 size = CEIL (size, align) * align;
6661 reg = gen_rtx (REG, mode, INCOMING_REGNO (regno));
6662 emit_move_insn (reg,
6663 change_address (result, mode,
6664 plus_constant (XEXP (result, 0),
6667 push_to_sequence (use_insns);
6668 emit_insn (gen_rtx (USE, VOIDmode, reg));
6669 use_insns = get_insns ();
6671 size += GET_MODE_SIZE (mode);
6674 /* Put the USE insns before the return. */
6675 emit_insns (use_insns);
6677 /* Return whatever values was restored by jumping directly to the end
6679 expand_null_return ();
6682 /* Expand code for a post- or pre- increment or decrement
6683 and return the RTX for the result.
6684 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
6687 expand_increment (exp, post)
6691 register rtx op0, op1;
6692 register rtx temp, value;
6693 register tree incremented = TREE_OPERAND (exp, 0);
6694 optab this_optab = add_optab;
6696 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
6697 int op0_is_copy = 0;
6699 /* Stabilize any component ref that might need to be
6700 evaluated more than once below. */
6702 || TREE_CODE (incremented) == BIT_FIELD_REF
6703 || (TREE_CODE (incremented) == COMPONENT_REF
6704 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
6705 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
6706 incremented = stabilize_reference (incremented);
6708 /* Compute the operands as RTX.
6709 Note whether OP0 is the actual lvalue or a copy of it:
6710 I believe it is a copy iff it is a register or subreg
6711 and insns were generated in computing it. */
6713 temp = get_last_insn ();
6714 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, 0);
6716 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
6717 in place but intead must do sign- or zero-extension during assignment,
6718 so we copy it into a new register and let the code below use it as
6721 Note that we can safely modify this SUBREG since it is know not to be
6722 shared (it was made by the expand_expr call above). */
6724 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
6725 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
6727 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
6728 && temp != get_last_insn ());
6729 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
6731 /* Decide whether incrementing or decrementing. */
6732 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
6733 || TREE_CODE (exp) == PREDECREMENT_EXPR)
6734 this_optab = sub_optab;
6736 /* If OP0 is not the actual lvalue, but rather a copy in a register,
6737 then we cannot just increment OP0. We must therefore contrive to
6738 increment the original value. Then, for postincrement, we can return
6739 OP0 since it is a copy of the old value. For preincrement, we want
6740 to always expand here, since this generates better or equivalent code. */
6741 if (!post || op0_is_copy)
6743 /* This is the easiest way to increment the value wherever it is.
6744 Problems with multiple evaluation of INCREMENTED are prevented
6745 because either (1) it is a component_ref or preincrement,
6746 in which case it was stabilized above, or (2) it is an array_ref
6747 with constant index in an array in a register, which is
6748 safe to reevaluate. */
6749 tree newexp = build ((this_optab == add_optab
6750 ? PLUS_EXPR : MINUS_EXPR),
6753 TREE_OPERAND (exp, 1));
6754 temp = expand_assignment (incremented, newexp, ! post, 0);
6755 return post ? op0 : temp;
6758 /* Convert decrement by a constant into a negative increment. */
6759 if (this_optab == sub_optab
6760 && GET_CODE (op1) == CONST_INT)
6762 op1 = GEN_INT (- INTVAL (op1));
6763 this_optab = add_optab;
6768 /* We have a true reference to the value in OP0.
6769 If there is an insn to add or subtract in this mode, queue it. */
6771 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
6772 op0 = stabilize (op0);
6775 icode = (int) this_optab->handlers[(int) mode].insn_code;
6776 if (icode != (int) CODE_FOR_nothing
6777 /* Make sure that OP0 is valid for operands 0 and 1
6778 of the insn we want to queue. */
6779 && (*insn_operand_predicate[icode][0]) (op0, mode)
6780 && (*insn_operand_predicate[icode][1]) (op0, mode))
6782 if (! (*insn_operand_predicate[icode][2]) (op1, mode))
6783 op1 = force_reg (mode, op1);
6785 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
6789 /* Preincrement, or we can't increment with one simple insn. */
6791 /* Save a copy of the value before inc or dec, to return it later. */
6792 temp = value = copy_to_reg (op0);
6794 /* Arrange to return the incremented value. */
6795 /* Copy the rtx because expand_binop will protect from the queue,
6796 and the results of that would be invalid for us to return
6797 if our caller does emit_queue before using our result. */
6798 temp = copy_rtx (value = op0);
6800 /* Increment however we can. */
6801 op1 = expand_binop (mode, this_optab, value, op1, op0,
6802 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
6803 /* Make sure the value is stored into OP0. */
6805 emit_move_insn (op0, op1);
6810 /* Expand all function calls contained within EXP, innermost ones first.
6811 But don't look within expressions that have sequence points.
6812 For each CALL_EXPR, record the rtx for its value
6813 in the CALL_EXPR_RTL field. */
6816 preexpand_calls (exp)
6819 register int nops, i;
6820 int type = TREE_CODE_CLASS (TREE_CODE (exp));
6822 if (! do_preexpand_calls)
6825 /* Only expressions and references can contain calls. */
6827 if (type != 'e' && type != '<' && type != '1' && type != '2' && type != 'r')
6830 switch (TREE_CODE (exp))
6833 /* Do nothing if already expanded. */
6834 if (CALL_EXPR_RTL (exp) != 0)
6837 /* Do nothing to built-in functions. */
6838 if (TREE_CODE (TREE_OPERAND (exp, 0)) != ADDR_EXPR
6839 || TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) != FUNCTION_DECL
6840 || ! DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
6841 CALL_EXPR_RTL (exp) = expand_call (exp, NULL_RTX, 0);
6846 case TRUTH_ANDIF_EXPR:
6847 case TRUTH_ORIF_EXPR:
6848 /* If we find one of these, then we can be sure
6849 the adjust will be done for it (since it makes jumps).
6850 Do it now, so that if this is inside an argument
6851 of a function, we don't get the stack adjustment
6852 after some other args have already been pushed. */
6853 do_pending_stack_adjust ();
6858 case WITH_CLEANUP_EXPR:
6862 if (SAVE_EXPR_RTL (exp) != 0)
6866 nops = tree_code_length[(int) TREE_CODE (exp)];
6867 for (i = 0; i < nops; i++)
6868 if (TREE_OPERAND (exp, i) != 0)
6870 type = TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, i)));
6871 if (type == 'e' || type == '<' || type == '1' || type == '2'
6873 preexpand_calls (TREE_OPERAND (exp, i));
6877 /* At the start of a function, record that we have no previously-pushed
6878 arguments waiting to be popped. */
6881 init_pending_stack_adjust ()
6883 pending_stack_adjust = 0;
6886 /* When exiting from function, if safe, clear out any pending stack adjust
6887 so the adjustment won't get done. */
6890 clear_pending_stack_adjust ()
6892 #ifdef EXIT_IGNORE_STACK
6893 if (! flag_omit_frame_pointer && EXIT_IGNORE_STACK
6894 && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline)
6895 && ! flag_inline_functions)
6896 pending_stack_adjust = 0;
6900 /* Pop any previously-pushed arguments that have not been popped yet. */
6903 do_pending_stack_adjust ()
6905 if (inhibit_defer_pop == 0)
6907 if (pending_stack_adjust != 0)
6908 adjust_stack (GEN_INT (pending_stack_adjust));
6909 pending_stack_adjust = 0;
6913 /* Expand all cleanups up to OLD_CLEANUPS.
6914 Needed here, and also for language-dependent calls. */
6917 expand_cleanups_to (old_cleanups)
6920 while (cleanups_this_call != old_cleanups)
6922 expand_expr (TREE_VALUE (cleanups_this_call), NULL_RTX, VOIDmode, 0);
6923 cleanups_this_call = TREE_CHAIN (cleanups_this_call);
6927 /* Expand conditional expressions. */
6929 /* Generate code to evaluate EXP and jump to LABEL if the value is zero.
6930 LABEL is an rtx of code CODE_LABEL, in this function and all the
6934 jumpifnot (exp, label)
6938 do_jump (exp, label, NULL_RTX);
6941 /* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
6948 do_jump (exp, NULL_RTX, label);
6951 /* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
6952 the result is zero, or IF_TRUE_LABEL if the result is one.
6953 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
6954 meaning fall through in that case.
6956 do_jump always does any pending stack adjust except when it does not
6957 actually perform a jump. An example where there is no jump
6958 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
6960 This function is responsible for optimizing cases such as
6961 &&, || and comparison operators in EXP. */
6964 do_jump (exp, if_false_label, if_true_label)
6966 rtx if_false_label, if_true_label;
6968 register enum tree_code code = TREE_CODE (exp);
6969 /* Some cases need to create a label to jump to
6970 in order to properly fall through.
6971 These cases set DROP_THROUGH_LABEL nonzero. */
6972 rtx drop_through_label = 0;
6986 temp = integer_zerop (exp) ? if_false_label : if_true_label;
6992 /* This is not true with #pragma weak */
6994 /* The address of something can never be zero. */
6996 emit_jump (if_true_label);
7001 if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
7002 || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
7003 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF)
7006 /* If we are narrowing the operand, we have to do the compare in the
7008 if ((TYPE_PRECISION (TREE_TYPE (exp))
7009 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
7011 case NON_LVALUE_EXPR:
7012 case REFERENCE_EXPR:
7017 /* These cannot change zero->non-zero or vice versa. */
7018 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
7022 /* This is never less insns than evaluating the PLUS_EXPR followed by
7023 a test and can be longer if the test is eliminated. */
7025 /* Reduce to minus. */
7026 exp = build (MINUS_EXPR, TREE_TYPE (exp),
7027 TREE_OPERAND (exp, 0),
7028 fold (build1 (NEGATE_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
7029 TREE_OPERAND (exp, 1))));
7030 /* Process as MINUS. */
7034 /* Non-zero iff operands of minus differ. */
7035 comparison = compare (build (NE_EXPR, TREE_TYPE (exp),
7036 TREE_OPERAND (exp, 0),
7037 TREE_OPERAND (exp, 1)),
7042 /* If we are AND'ing with a small constant, do this comparison in the
7043 smallest type that fits. If the machine doesn't have comparisons
7044 that small, it will be converted back to the wider comparison.
7045 This helps if we are testing the sign bit of a narrower object.
7046 combine can't do this for us because it can't know whether a
7047 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
7049 if (! SLOW_BYTE_ACCESS
7050 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7051 && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
7052 && (i = floor_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))) >= 0
7053 && (type = type_for_size (i + 1, 1)) != 0
7054 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
7055 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
7056 != CODE_FOR_nothing))
7058 do_jump (convert (type, exp), if_false_label, if_true_label);
7063 case TRUTH_NOT_EXPR:
7064 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
7067 case TRUTH_ANDIF_EXPR:
7068 if (if_false_label == 0)
7069 if_false_label = drop_through_label = gen_label_rtx ();
7070 do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
7071 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
7074 case TRUTH_ORIF_EXPR:
7075 if (if_true_label == 0)
7076 if_true_label = drop_through_label = gen_label_rtx ();
7077 do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
7078 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
7082 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
7085 do_pending_stack_adjust ();
7086 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
7093 int bitsize, bitpos, unsignedp;
7094 enum machine_mode mode;
7099 /* Get description of this reference. We don't actually care
7100 about the underlying object here. */
7101 get_inner_reference (exp, &bitsize, &bitpos, &offset,
7102 &mode, &unsignedp, &volatilep);
7104 type = type_for_size (bitsize, unsignedp);
7105 if (! SLOW_BYTE_ACCESS
7106 && type != 0 && bitsize >= 0
7107 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
7108 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
7109 != CODE_FOR_nothing))
7111 do_jump (convert (type, exp), if_false_label, if_true_label);
7118 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
7119 if (integer_onep (TREE_OPERAND (exp, 1))
7120 && integer_zerop (TREE_OPERAND (exp, 2)))
7121 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
7123 else if (integer_zerop (TREE_OPERAND (exp, 1))
7124 && integer_onep (TREE_OPERAND (exp, 2)))
7125 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
7129 register rtx label1 = gen_label_rtx ();
7130 drop_through_label = gen_label_rtx ();
7131 do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX);
7132 /* Now the THEN-expression. */
7133 do_jump (TREE_OPERAND (exp, 1),
7134 if_false_label ? if_false_label : drop_through_label,
7135 if_true_label ? if_true_label : drop_through_label);
7136 /* In case the do_jump just above never jumps. */
7137 do_pending_stack_adjust ();
7138 emit_label (label1);
7139 /* Now the ELSE-expression. */
7140 do_jump (TREE_OPERAND (exp, 2),
7141 if_false_label ? if_false_label : drop_through_label,
7142 if_true_label ? if_true_label : drop_through_label);
7147 if (integer_zerop (TREE_OPERAND (exp, 1)))
7148 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
7149 else if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7152 !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
7153 do_jump_by_parts_equality (exp, if_false_label, if_true_label);
7155 comparison = compare (exp, EQ, EQ);
7159 if (integer_zerop (TREE_OPERAND (exp, 1)))
7160 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
7161 else if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7164 !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
7165 do_jump_by_parts_equality (exp, if_true_label, if_false_label);
7167 comparison = compare (exp, NE, NE);
7171 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7173 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
7174 do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
7176 comparison = compare (exp, LT, LTU);
7180 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7182 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
7183 do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
7185 comparison = compare (exp, LE, LEU);
7189 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7191 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
7192 do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
7194 comparison = compare (exp, GT, GTU);
7198 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7200 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
7201 do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
7203 comparison = compare (exp, GE, GEU);
7208 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
7210 /* This is not needed any more and causes poor code since it causes
7211 comparisons and tests from non-SI objects to have different code
7213 /* Copy to register to avoid generating bad insns by cse
7214 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
7215 if (!cse_not_expected && GET_CODE (temp) == MEM)
7216 temp = copy_to_reg (temp);
7218 do_pending_stack_adjust ();
7219 if (GET_CODE (temp) == CONST_INT)
7220 comparison = (temp == const0_rtx ? const0_rtx : const_true_rtx);
7221 else if (GET_CODE (temp) == LABEL_REF)
7222 comparison = const_true_rtx;
7223 else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
7224 && !can_compare_p (GET_MODE (temp)))
7225 /* Note swapping the labels gives us not-equal. */
7226 do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label);
7227 else if (GET_MODE (temp) != VOIDmode)
7228 comparison = compare_from_rtx (temp, CONST0_RTX (GET_MODE (temp)),
7229 NE, TREE_UNSIGNED (TREE_TYPE (exp)),
7230 GET_MODE (temp), NULL_RTX, 0);
7235 /* Do any postincrements in the expression that was tested. */
7238 /* If COMPARISON is nonzero here, it is an rtx that can be substituted
7239 straight into a conditional jump instruction as the jump condition.
7240 Otherwise, all the work has been done already. */
7242 if (comparison == const_true_rtx)
7245 emit_jump (if_true_label);
7247 else if (comparison == const0_rtx)
7250 emit_jump (if_false_label);
7252 else if (comparison)
7253 do_jump_for_compare (comparison, if_false_label, if_true_label);
7257 if (drop_through_label)
7259 /* If do_jump produces code that might be jumped around,
7260 do any stack adjusts from that code, before the place
7261 where control merges in. */
7262 do_pending_stack_adjust ();
7263 emit_label (drop_through_label);
7267 /* Given a comparison expression EXP for values too wide to be compared
7268 with one insn, test the comparison and jump to the appropriate label.
7269 The code of EXP is ignored; we always test GT if SWAP is 0,
7270 and LT if SWAP is 1. */
7273 do_jump_by_parts_greater (exp, swap, if_false_label, if_true_label)
7276 rtx if_false_label, if_true_label;
7278 rtx op0 = expand_expr (TREE_OPERAND (exp, swap), NULL_RTX, VOIDmode, 0);
7279 rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), NULL_RTX, VOIDmode, 0);
7280 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
7281 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
7282 rtx drop_through_label = 0;
7283 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
7286 if (! if_true_label || ! if_false_label)
7287 drop_through_label = gen_label_rtx ();
7288 if (! if_true_label)
7289 if_true_label = drop_through_label;
7290 if (! if_false_label)
7291 if_false_label = drop_through_label;
7293 /* Compare a word at a time, high order first. */
7294 for (i = 0; i < nwords; i++)
7297 rtx op0_word, op1_word;
7299 if (WORDS_BIG_ENDIAN)
7301 op0_word = operand_subword_force (op0, i, mode);
7302 op1_word = operand_subword_force (op1, i, mode);
7306 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
7307 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
7310 /* All but high-order word must be compared as unsigned. */
7311 comp = compare_from_rtx (op0_word, op1_word,
7312 (unsignedp || i > 0) ? GTU : GT,
7313 unsignedp, word_mode, NULL_RTX, 0);
7314 if (comp == const_true_rtx)
7315 emit_jump (if_true_label);
7316 else if (comp != const0_rtx)
7317 do_jump_for_compare (comp, NULL_RTX, if_true_label);
7319 /* Consider lower words only if these are equal. */
7320 comp = compare_from_rtx (op0_word, op1_word, NE, unsignedp, word_mode,
7322 if (comp == const_true_rtx)
7323 emit_jump (if_false_label);
7324 else if (comp != const0_rtx)
7325 do_jump_for_compare (comp, NULL_RTX, if_false_label);
7329 emit_jump (if_false_label);
7330 if (drop_through_label)
7331 emit_label (drop_through_label);
7334 /* Given an EQ_EXPR expression EXP for values too wide to be compared
7335 with one insn, test the comparison and jump to the appropriate label. */
7338 do_jump_by_parts_equality (exp, if_false_label, if_true_label)
7340 rtx if_false_label, if_true_label;
7342 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7343 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7344 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
7345 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
7347 rtx drop_through_label = 0;
7349 if (! if_false_label)
7350 drop_through_label = if_false_label = gen_label_rtx ();
7352 for (i = 0; i < nwords; i++)
7354 rtx comp = compare_from_rtx (operand_subword_force (op0, i, mode),
7355 operand_subword_force (op1, i, mode),
7356 EQ, TREE_UNSIGNED (TREE_TYPE (exp)),
7357 word_mode, NULL_RTX, 0);
7358 if (comp == const_true_rtx)
7359 emit_jump (if_false_label);
7360 else if (comp != const0_rtx)
7361 do_jump_for_compare (comp, if_false_label, NULL_RTX);
7365 emit_jump (if_true_label);
7366 if (drop_through_label)
7367 emit_label (drop_through_label);
7370 /* Jump according to whether OP0 is 0.
7371 We assume that OP0 has an integer mode that is too wide
7372 for the available compare insns. */
7375 do_jump_by_parts_equality_rtx (op0, if_false_label, if_true_label)
7377 rtx if_false_label, if_true_label;
7379 int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD;
7381 rtx drop_through_label = 0;
7383 if (! if_false_label)
7384 drop_through_label = if_false_label = gen_label_rtx ();
7386 for (i = 0; i < nwords; i++)
7388 rtx comp = compare_from_rtx (operand_subword_force (op0, i,
7390 const0_rtx, EQ, 1, word_mode, NULL_RTX, 0);
7391 if (comp == const_true_rtx)
7392 emit_jump (if_false_label);
7393 else if (comp != const0_rtx)
7394 do_jump_for_compare (comp, if_false_label, NULL_RTX);
7398 emit_jump (if_true_label);
7399 if (drop_through_label)
7400 emit_label (drop_through_label);
7403 /* Given a comparison expression in rtl form, output conditional branches to
7404 IF_TRUE_LABEL, IF_FALSE_LABEL, or both. */
7407 do_jump_for_compare (comparison, if_false_label, if_true_label)
7408 rtx comparison, if_false_label, if_true_label;
7412 if (bcc_gen_fctn[(int) GET_CODE (comparison)] != 0)
7413 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (comparison)]) (if_true_label));
7418 emit_jump (if_false_label);
7420 else if (if_false_label)
7423 rtx prev = PREV_INSN (get_last_insn ());
7426 /* Output the branch with the opposite condition. Then try to invert
7427 what is generated. If more than one insn is a branch, or if the
7428 branch is not the last insn written, abort. If we can't invert
7429 the branch, emit make a true label, redirect this jump to that,
7430 emit a jump to the false label and define the true label. */
7432 if (bcc_gen_fctn[(int) GET_CODE (comparison)] != 0)
7433 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (comparison)]) (if_false_label));
7437 /* Here we get the insn before what was just emitted.
7438 On some machines, emitting the branch can discard
7439 the previous compare insn and emit a replacement. */
7441 /* If there's only one preceding insn... */
7442 insn = get_insns ();
7444 insn = NEXT_INSN (prev);
7446 for (insn = NEXT_INSN (insn); insn; insn = NEXT_INSN (insn))
7447 if (GET_CODE (insn) == JUMP_INSN)
7454 if (branch != get_last_insn ())
7457 if (! invert_jump (branch, if_false_label))
7459 if_true_label = gen_label_rtx ();
7460 redirect_jump (branch, if_true_label);
7461 emit_jump (if_false_label);
7462 emit_label (if_true_label);
7467 /* Generate code for a comparison expression EXP
7468 (including code to compute the values to be compared)
7469 and set (CC0) according to the result.
7470 SIGNED_CODE should be the rtx operation for this comparison for
7471 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
7473 We force a stack adjustment unless there are currently
7474 things pushed on the stack that aren't yet used. */
7477 compare (exp, signed_code, unsigned_code)
7479 enum rtx_code signed_code, unsigned_code;
7482 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7484 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7485 register tree type = TREE_TYPE (TREE_OPERAND (exp, 0));
7486 register enum machine_mode mode = TYPE_MODE (type);
7487 int unsignedp = TREE_UNSIGNED (type);
7488 enum rtx_code code = unsignedp ? unsigned_code : signed_code;
7490 return compare_from_rtx (op0, op1, code, unsignedp, mode,
7492 ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
7493 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
7496 /* Like compare but expects the values to compare as two rtx's.
7497 The decision as to signed or unsigned comparison must be made by the caller.
7499 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
7502 If ALIGN is non-zero, it is the alignment of this type; if zero, the
7503 size of MODE should be used. */
7506 compare_from_rtx (op0, op1, code, unsignedp, mode, size, align)
7507 register rtx op0, op1;
7510 enum machine_mode mode;
7516 /* If one operand is constant, make it the second one. Only do this
7517 if the other operand is not constant as well. */
7519 if ((CONSTANT_P (op0) && ! CONSTANT_P (op1))
7520 || (GET_CODE (op0) == CONST_INT && GET_CODE (op1) != CONST_INT))
7525 code = swap_condition (code);
7530 op0 = force_not_mem (op0);
7531 op1 = force_not_mem (op1);
7534 do_pending_stack_adjust ();
7536 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
7537 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
7541 /* There's no need to do this now that combine.c can eliminate lots of
7542 sign extensions. This can be less efficient in certain cases on other
7545 /* If this is a signed equality comparison, we can do it as an
7546 unsigned comparison since zero-extension is cheaper than sign
7547 extension and comparisons with zero are done as unsigned. This is
7548 the case even on machines that can do fast sign extension, since
7549 zero-extension is easier to combine with other operations than
7550 sign-extension is. If we are comparing against a constant, we must
7551 convert it to what it would look like unsigned. */
7552 if ((code == EQ || code == NE) && ! unsignedp
7553 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
7555 if (GET_CODE (op1) == CONST_INT
7556 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
7557 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
7562 emit_cmp_insn (op0, op1, code, size, mode, unsignedp, align);
7564 return gen_rtx (code, VOIDmode, cc0_rtx, const0_rtx);
7567 /* Generate code to calculate EXP using a store-flag instruction
7568 and return an rtx for the result. EXP is either a comparison
7569 or a TRUTH_NOT_EXPR whose operand is a comparison.
7571 If TARGET is nonzero, store the result there if convenient.
7573 If ONLY_CHEAP is non-zero, only do this if it is likely to be very
7576 Return zero if there is no suitable set-flag instruction
7577 available on this machine.
7579 Once expand_expr has been called on the arguments of the comparison,
7580 we are committed to doing the store flag, since it is not safe to
7581 re-evaluate the expression. We emit the store-flag insn by calling
7582 emit_store_flag, but only expand the arguments if we have a reason
7583 to believe that emit_store_flag will be successful. If we think that
7584 it will, but it isn't, we have to simulate the store-flag with a
7585 set/jump/set sequence. */
7588 do_store_flag (exp, target, mode, only_cheap)
7591 enum machine_mode mode;
7595 tree arg0, arg1, type;
7597 enum machine_mode operand_mode;
7601 enum insn_code icode;
7602 rtx subtarget = target;
7603 rtx result, label, pattern, jump_pat;
7605 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
7606 result at the end. We can't simply invert the test since it would
7607 have already been inverted if it were valid. This case occurs for
7608 some floating-point comparisons. */
7610 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
7611 invert = 1, exp = TREE_OPERAND (exp, 0);
7613 arg0 = TREE_OPERAND (exp, 0);
7614 arg1 = TREE_OPERAND (exp, 1);
7615 type = TREE_TYPE (arg0);
7616 operand_mode = TYPE_MODE (type);
7617 unsignedp = TREE_UNSIGNED (type);
7619 /* We won't bother with BLKmode store-flag operations because it would mean
7620 passing a lot of information to emit_store_flag. */
7621 if (operand_mode == BLKmode)
7627 /* Get the rtx comparison code to use. We know that EXP is a comparison
7628 operation of some type. Some comparisons against 1 and -1 can be
7629 converted to comparisons with zero. Do so here so that the tests
7630 below will be aware that we have a comparison with zero. These
7631 tests will not catch constants in the first operand, but constants
7632 are rarely passed as the first operand. */
7634 switch (TREE_CODE (exp))
7643 if (integer_onep (arg1))
7644 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
7646 code = unsignedp ? LTU : LT;
7649 if (integer_all_onesp (arg1))
7650 arg1 = integer_zero_node, code = unsignedp ? LTU : LT;
7652 code = unsignedp ? LEU : LE;
7655 if (integer_all_onesp (arg1))
7656 arg1 = integer_zero_node, code = unsignedp ? GEU : GE;
7658 code = unsignedp ? GTU : GT;
7661 if (integer_onep (arg1))
7662 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
7664 code = unsignedp ? GEU : GE;
7670 /* Put a constant second. */
7671 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
7673 tem = arg0; arg0 = arg1; arg1 = tem;
7674 code = swap_condition (code);
7677 /* If this is an equality or inequality test of a single bit, we can
7678 do this by shifting the bit being tested to the low-order bit and
7679 masking the result with the constant 1. If the condition was EQ,
7680 we xor it with 1. This does not require an scc insn and is faster
7681 than an scc insn even if we have it. */
7683 if ((code == NE || code == EQ)
7684 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
7685 && integer_pow2p (TREE_OPERAND (arg0, 1))
7686 && TYPE_PRECISION (type) <= HOST_BITS_PER_WIDE_INT)
7688 int bitnum = exact_log2 (INTVAL (expand_expr (TREE_OPERAND (arg0, 1),
7689 NULL_RTX, VOIDmode, 0)));
7691 if (subtarget == 0 || GET_CODE (subtarget) != REG
7692 || GET_MODE (subtarget) != operand_mode
7693 || ! safe_from_p (subtarget, TREE_OPERAND (arg0, 0)))
7696 op0 = expand_expr (TREE_OPERAND (arg0, 0), subtarget, VOIDmode, 0);
7699 op0 = expand_shift (RSHIFT_EXPR, GET_MODE (op0), op0,
7700 size_int (bitnum), target, 1);
7702 if (GET_MODE (op0) != mode)
7703 op0 = convert_to_mode (mode, op0, 1);
7705 if (bitnum != TYPE_PRECISION (type) - 1)
7706 op0 = expand_and (op0, const1_rtx, target);
7708 if ((code == EQ && ! invert) || (code == NE && invert))
7709 op0 = expand_binop (mode, xor_optab, op0, const1_rtx, target, 0,
7715 /* Now see if we are likely to be able to do this. Return if not. */
7716 if (! can_compare_p (operand_mode))
7718 icode = setcc_gen_code[(int) code];
7719 if (icode == CODE_FOR_nothing
7720 || (only_cheap && insn_operand_mode[(int) icode][0] != mode))
7722 /* We can only do this if it is one of the special cases that
7723 can be handled without an scc insn. */
7724 if ((code == LT && integer_zerop (arg1))
7725 || (! only_cheap && code == GE && integer_zerop (arg1)))
7727 else if (BRANCH_COST >= 0
7728 && ! only_cheap && (code == NE || code == EQ)
7729 && TREE_CODE (type) != REAL_TYPE
7730 && ((abs_optab->handlers[(int) operand_mode].insn_code
7731 != CODE_FOR_nothing)
7732 || (ffs_optab->handlers[(int) operand_mode].insn_code
7733 != CODE_FOR_nothing)))
7739 preexpand_calls (exp);
7740 if (subtarget == 0 || GET_CODE (subtarget) != REG
7741 || GET_MODE (subtarget) != operand_mode
7742 || ! safe_from_p (subtarget, arg1))
7745 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
7746 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
7749 target = gen_reg_rtx (mode);
7751 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
7752 because, if the emit_store_flag does anything it will succeed and
7753 OP0 and OP1 will not be used subsequently. */
7755 result = emit_store_flag (target, code,
7756 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
7757 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
7758 operand_mode, unsignedp, 1);
7763 result = expand_binop (mode, xor_optab, result, const1_rtx,
7764 result, 0, OPTAB_LIB_WIDEN);
7768 /* If this failed, we have to do this with set/compare/jump/set code. */
7769 if (target == 0 || GET_CODE (target) != REG
7770 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
7771 target = gen_reg_rtx (GET_MODE (target));
7773 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
7774 result = compare_from_rtx (op0, op1, code, unsignedp,
7775 operand_mode, NULL_RTX, 0);
7776 if (GET_CODE (result) == CONST_INT)
7777 return (((result == const0_rtx && ! invert)
7778 || (result != const0_rtx && invert))
7779 ? const0_rtx : const1_rtx);
7781 label = gen_label_rtx ();
7782 if (bcc_gen_fctn[(int) code] == 0)
7785 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
7786 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
7792 /* Generate a tablejump instruction (used for switch statements). */
7794 #ifdef HAVE_tablejump
7796 /* INDEX is the value being switched on, with the lowest value
7797 in the table already subtracted.
7798 MODE is its expected mode (needed if INDEX is constant).
7799 RANGE is the length of the jump table.
7800 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
7802 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
7803 index value is out of range. */
7806 do_tablejump (index, mode, range, table_label, default_label)
7807 rtx index, range, table_label, default_label;
7808 enum machine_mode mode;
7810 register rtx temp, vector;
7812 /* Do an unsigned comparison (in the proper mode) between the index
7813 expression and the value which represents the length of the range.
7814 Since we just finished subtracting the lower bound of the range
7815 from the index expression, this comparison allows us to simultaneously
7816 check that the original index expression value is both greater than
7817 or equal to the minimum value of the range and less than or equal to
7818 the maximum value of the range. */
7820 emit_cmp_insn (range, index, LTU, NULL_RTX, mode, 1, 0);
7821 emit_jump_insn (gen_bltu (default_label));
7823 /* If index is in range, it must fit in Pmode.
7824 Convert to Pmode so we can index with it. */
7826 index = convert_to_mode (Pmode, index, 1);
7828 /* If flag_force_addr were to affect this address
7829 it could interfere with the tricky assumptions made
7830 about addresses that contain label-refs,
7831 which may be valid only very near the tablejump itself. */
7832 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
7833 GET_MODE_SIZE, because this indicates how large insns are. The other
7834 uses should all be Pmode, because they are addresses. This code
7835 could fail if addresses and insns are not the same size. */
7836 index = memory_address_noforce
7838 gen_rtx (PLUS, Pmode,
7839 gen_rtx (MULT, Pmode, index,
7840 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
7841 gen_rtx (LABEL_REF, Pmode, table_label)));
7842 temp = gen_reg_rtx (CASE_VECTOR_MODE);
7843 vector = gen_rtx (MEM, CASE_VECTOR_MODE, index);
7844 RTX_UNCHANGING_P (vector) = 1;
7845 convert_move (temp, vector, 0);
7847 emit_jump_insn (gen_tablejump (temp, table_label));
7849 #ifndef CASE_VECTOR_PC_RELATIVE
7850 /* If we are generating PIC code or if the table is PC-relative, the
7851 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
7857 #endif /* HAVE_tablejump */