1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 92-98, 1999 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
30 #include "hard-reg-set.h"
33 #include "insn-flags.h"
34 #include "insn-codes.h"
35 #include "insn-config.h"
36 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
40 #include "typeclass.h"
44 #define CEIL(x,y) (((x) + (y) - 1) / (y))
46 /* Decide whether a function's arguments should be processed
47 from first to last or from last to first.
49 They should if the stack and args grow in opposite directions, but
50 only if we have push insns. */
54 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
55 #define PUSH_ARGS_REVERSED /* If it's last to first */
60 #ifndef STACK_PUSH_CODE
61 #ifdef STACK_GROWS_DOWNWARD
62 #define STACK_PUSH_CODE PRE_DEC
64 #define STACK_PUSH_CODE PRE_INC
68 /* Assume that case vectors are not pc-relative. */
69 #ifndef CASE_VECTOR_PC_RELATIVE
70 #define CASE_VECTOR_PC_RELATIVE 0
73 /* If this is nonzero, we do not bother generating VOLATILE
74 around volatile memory references, and we are willing to
75 output indirect addresses. If cse is to follow, we reject
76 indirect addresses so a useful potential cse is generated;
77 if it is used only once, instruction combination will produce
78 the same indirect address eventually. */
81 /* Nonzero to generate code for all the subroutines within an
82 expression before generating the upper levels of the expression.
83 Nowadays this is never zero. */
84 int do_preexpand_calls = 1;
86 /* Don't check memory usage, since code is being emitted to check a memory
87 usage. Used when current_function_check_memory_usage is true, to avoid
88 infinite recursion. */
89 static int in_check_memory_usage;
91 /* This structure is used by move_by_pieces to describe the move to
103 int explicit_inc_from;
110 /* This structure is used by clear_by_pieces to describe the clear to
113 struct clear_by_pieces
125 extern struct obstack permanent_obstack;
127 static rtx get_push_address PROTO ((int));
129 static rtx enqueue_insn PROTO((rtx, rtx));
130 static int move_by_pieces_ninsns PROTO((unsigned int, int));
131 static void move_by_pieces_1 PROTO((rtx (*) (rtx, ...), enum machine_mode,
132 struct move_by_pieces *));
133 static void clear_by_pieces PROTO((rtx, int, int));
134 static void clear_by_pieces_1 PROTO((rtx (*) (rtx, ...), enum machine_mode,
135 struct clear_by_pieces *));
136 static int is_zeros_p PROTO((tree));
137 static int mostly_zeros_p PROTO((tree));
138 static void store_constructor_field PROTO((rtx, int, int, enum machine_mode,
140 static void store_constructor PROTO((tree, rtx, int));
141 static rtx store_field PROTO((rtx, int, int, enum machine_mode, tree,
142 enum machine_mode, int, int,
144 static enum memory_use_mode
145 get_memory_usage_from_modifier PROTO((enum expand_modifier));
146 static tree save_noncopied_parts PROTO((tree, tree));
147 static tree init_noncopied_parts PROTO((tree, tree));
148 static int safe_from_p PROTO((rtx, tree, int));
149 static int fixed_type_p PROTO((tree));
150 static rtx var_rtx PROTO((tree));
151 static rtx expand_increment PROTO((tree, int, int));
152 static void preexpand_calls PROTO((tree));
153 static void do_jump_by_parts_greater PROTO((tree, int, rtx, rtx));
154 static void do_jump_by_parts_equality PROTO((tree, rtx, rtx));
155 static void do_jump_for_compare PROTO((rtx, rtx, rtx));
156 static rtx compare PROTO((tree, enum rtx_code, enum rtx_code));
157 static rtx do_store_flag PROTO((tree, rtx, enum machine_mode, int));
159 /* Record for each mode whether we can move a register directly to or
160 from an object of that mode in memory. If we can't, we won't try
161 to use that mode directly when accessing a field of that mode. */
163 static char direct_load[NUM_MACHINE_MODES];
164 static char direct_store[NUM_MACHINE_MODES];
166 /* If a memory-to-memory move would take MOVE_RATIO or more simple
167 move-instruction sequences, we will do a movstr or libcall instead. */
170 #if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
173 /* If we are optimizing for space (-Os), cut down the default move ratio */
174 #define MOVE_RATIO (optimize_size ? 3 : 15)
178 /* This macro is used to determine whether move_by_pieces should be called
179 to perform a structure copy. */
180 #ifndef MOVE_BY_PIECES_P
181 #define MOVE_BY_PIECES_P(SIZE, ALIGN) (move_by_pieces_ninsns \
182 (SIZE, ALIGN) < MOVE_RATIO)
185 /* This array records the insn_code of insns to perform block moves. */
186 enum insn_code movstr_optab[NUM_MACHINE_MODES];
188 /* This array records the insn_code of insns to perform block clears. */
189 enum insn_code clrstr_optab[NUM_MACHINE_MODES];
191 /* SLOW_UNALIGNED_ACCESS is non-zero if unaligned accesses are very slow. */
193 #ifndef SLOW_UNALIGNED_ACCESS
194 #define SLOW_UNALIGNED_ACCESS STRICT_ALIGNMENT
197 /* This is run once per compilation to set up which modes can be used
198 directly in memory and to initialize the block move optab. */
204 enum machine_mode mode;
211 /* Since we are on the permanent obstack, we must be sure we save this
212 spot AFTER we call start_sequence, since it will reuse the rtl it
214 free_point = (char *) oballoc (0);
216 /* Try indexing by frame ptr and try by stack ptr.
217 It is known that on the Convex the stack ptr isn't a valid index.
218 With luck, one or the other is valid on any machine. */
219 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
220 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
222 insn = emit_insn (gen_rtx_SET (0, NULL_RTX, NULL_RTX));
223 pat = PATTERN (insn);
225 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
226 mode = (enum machine_mode) ((int) mode + 1))
231 direct_load[(int) mode] = direct_store[(int) mode] = 0;
232 PUT_MODE (mem, mode);
233 PUT_MODE (mem1, mode);
235 /* See if there is some register that can be used in this mode and
236 directly loaded or stored from memory. */
238 if (mode != VOIDmode && mode != BLKmode)
239 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
240 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
243 if (! HARD_REGNO_MODE_OK (regno, mode))
246 reg = gen_rtx_REG (mode, regno);
249 SET_DEST (pat) = reg;
250 if (recog (pat, insn, &num_clobbers) >= 0)
251 direct_load[(int) mode] = 1;
253 SET_SRC (pat) = mem1;
254 SET_DEST (pat) = reg;
255 if (recog (pat, insn, &num_clobbers) >= 0)
256 direct_load[(int) mode] = 1;
259 SET_DEST (pat) = mem;
260 if (recog (pat, insn, &num_clobbers) >= 0)
261 direct_store[(int) mode] = 1;
264 SET_DEST (pat) = mem1;
265 if (recog (pat, insn, &num_clobbers) >= 0)
266 direct_store[(int) mode] = 1;
274 /* This is run at the start of compiling a function. */
279 current_function->expr
280 = (struct expr_status *) xmalloc (sizeof (struct expr_status));
283 pending_stack_adjust = 0;
284 inhibit_defer_pop = 0;
286 apply_args_value = 0;
290 /* Small sanity check that the queue is empty at the end of a function. */
292 finish_expr_for_function ()
298 /* Manage the queue of increment instructions to be output
299 for POSTINCREMENT_EXPR expressions, etc. */
301 /* Queue up to increment (or change) VAR later. BODY says how:
302 BODY should be the same thing you would pass to emit_insn
303 to increment right away. It will go to emit_insn later on.
305 The value is a QUEUED expression to be used in place of VAR
306 where you want to guarantee the pre-incrementation value of VAR. */
309 enqueue_insn (var, body)
312 pending_chain = gen_rtx_QUEUED (GET_MODE (var),
313 var, NULL_RTX, NULL_RTX, body,
315 return pending_chain;
318 /* Use protect_from_queue to convert a QUEUED expression
319 into something that you can put immediately into an instruction.
320 If the queued incrementation has not happened yet,
321 protect_from_queue returns the variable itself.
322 If the incrementation has happened, protect_from_queue returns a temp
323 that contains a copy of the old value of the variable.
325 Any time an rtx which might possibly be a QUEUED is to be put
326 into an instruction, it must be passed through protect_from_queue first.
327 QUEUED expressions are not meaningful in instructions.
329 Do not pass a value through protect_from_queue and then hold
330 on to it for a while before putting it in an instruction!
331 If the queue is flushed in between, incorrect code will result. */
334 protect_from_queue (x, modify)
338 register RTX_CODE code = GET_CODE (x);
340 #if 0 /* A QUEUED can hang around after the queue is forced out. */
341 /* Shortcut for most common case. */
342 if (pending_chain == 0)
348 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
349 use of autoincrement. Make a copy of the contents of the memory
350 location rather than a copy of the address, but not if the value is
351 of mode BLKmode. Don't modify X in place since it might be
353 if (code == MEM && GET_MODE (x) != BLKmode
354 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
356 register rtx y = XEXP (x, 0);
357 register rtx new = gen_rtx_MEM (GET_MODE (x), QUEUED_VAR (y));
359 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (x);
360 MEM_COPY_ATTRIBUTES (new, x);
361 MEM_ALIAS_SET (new) = MEM_ALIAS_SET (x);
365 register rtx temp = gen_reg_rtx (GET_MODE (new));
366 emit_insn_before (gen_move_insn (temp, new),
372 /* Otherwise, recursively protect the subexpressions of all
373 the kinds of rtx's that can contain a QUEUED. */
376 rtx tem = protect_from_queue (XEXP (x, 0), 0);
377 if (tem != XEXP (x, 0))
383 else if (code == PLUS || code == MULT)
385 rtx new0 = protect_from_queue (XEXP (x, 0), 0);
386 rtx new1 = protect_from_queue (XEXP (x, 1), 0);
387 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
396 /* If the increment has not happened, use the variable itself. */
397 if (QUEUED_INSN (x) == 0)
398 return QUEUED_VAR (x);
399 /* If the increment has happened and a pre-increment copy exists,
401 if (QUEUED_COPY (x) != 0)
402 return QUEUED_COPY (x);
403 /* The increment has happened but we haven't set up a pre-increment copy.
404 Set one up now, and use it. */
405 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
406 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
408 return QUEUED_COPY (x);
411 /* Return nonzero if X contains a QUEUED expression:
412 if it contains anything that will be altered by a queued increment.
413 We handle only combinations of MEM, PLUS, MINUS and MULT operators
414 since memory addresses generally contain only those. */
420 register enum rtx_code code = GET_CODE (x);
426 return queued_subexp_p (XEXP (x, 0));
430 return (queued_subexp_p (XEXP (x, 0))
431 || queued_subexp_p (XEXP (x, 1)));
437 /* Perform all the pending incrementations. */
443 while ((p = pending_chain))
445 rtx body = QUEUED_BODY (p);
447 if (GET_CODE (body) == SEQUENCE)
449 QUEUED_INSN (p) = XVECEXP (QUEUED_BODY (p), 0, 0);
450 emit_insn (QUEUED_BODY (p));
453 QUEUED_INSN (p) = emit_insn (QUEUED_BODY (p));
454 pending_chain = QUEUED_NEXT (p);
458 /* Copy data from FROM to TO, where the machine modes are not the same.
459 Both modes may be integer, or both may be floating.
460 UNSIGNEDP should be nonzero if FROM is an unsigned type.
461 This causes zero-extension instead of sign-extension. */
464 convert_move (to, from, unsignedp)
465 register rtx to, from;
468 enum machine_mode to_mode = GET_MODE (to);
469 enum machine_mode from_mode = GET_MODE (from);
470 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
471 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
475 /* rtx code for making an equivalent value. */
476 enum rtx_code equiv_code = (unsignedp ? ZERO_EXTEND : SIGN_EXTEND);
478 to = protect_from_queue (to, 1);
479 from = protect_from_queue (from, 0);
481 if (to_real != from_real)
484 /* If FROM is a SUBREG that indicates that we have already done at least
485 the required extension, strip it. We don't handle such SUBREGs as
488 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
489 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
490 >= GET_MODE_SIZE (to_mode))
491 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
492 from = gen_lowpart (to_mode, from), from_mode = to_mode;
494 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
497 if (to_mode == from_mode
498 || (from_mode == VOIDmode && CONSTANT_P (from)))
500 emit_move_insn (to, from);
508 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode))
510 /* Try converting directly if the insn is supported. */
511 if ((code = can_extend_p (to_mode, from_mode, 0))
514 emit_unop_insn (code, to, from, UNKNOWN);
519 #ifdef HAVE_trunchfqf2
520 if (HAVE_trunchfqf2 && from_mode == HFmode && to_mode == QFmode)
522 emit_unop_insn (CODE_FOR_trunchfqf2, to, from, UNKNOWN);
526 #ifdef HAVE_trunctqfqf2
527 if (HAVE_trunctqfqf2 && from_mode == TQFmode && to_mode == QFmode)
529 emit_unop_insn (CODE_FOR_trunctqfqf2, to, from, UNKNOWN);
533 #ifdef HAVE_truncsfqf2
534 if (HAVE_truncsfqf2 && from_mode == SFmode && to_mode == QFmode)
536 emit_unop_insn (CODE_FOR_truncsfqf2, to, from, UNKNOWN);
540 #ifdef HAVE_truncdfqf2
541 if (HAVE_truncdfqf2 && from_mode == DFmode && to_mode == QFmode)
543 emit_unop_insn (CODE_FOR_truncdfqf2, to, from, UNKNOWN);
547 #ifdef HAVE_truncxfqf2
548 if (HAVE_truncxfqf2 && from_mode == XFmode && to_mode == QFmode)
550 emit_unop_insn (CODE_FOR_truncxfqf2, to, from, UNKNOWN);
554 #ifdef HAVE_trunctfqf2
555 if (HAVE_trunctfqf2 && from_mode == TFmode && to_mode == QFmode)
557 emit_unop_insn (CODE_FOR_trunctfqf2, to, from, UNKNOWN);
562 #ifdef HAVE_trunctqfhf2
563 if (HAVE_trunctqfhf2 && from_mode == TQFmode && to_mode == HFmode)
565 emit_unop_insn (CODE_FOR_trunctqfhf2, to, from, UNKNOWN);
569 #ifdef HAVE_truncsfhf2
570 if (HAVE_truncsfhf2 && from_mode == SFmode && to_mode == HFmode)
572 emit_unop_insn (CODE_FOR_truncsfhf2, to, from, UNKNOWN);
576 #ifdef HAVE_truncdfhf2
577 if (HAVE_truncdfhf2 && from_mode == DFmode && to_mode == HFmode)
579 emit_unop_insn (CODE_FOR_truncdfhf2, to, from, UNKNOWN);
583 #ifdef HAVE_truncxfhf2
584 if (HAVE_truncxfhf2 && from_mode == XFmode && to_mode == HFmode)
586 emit_unop_insn (CODE_FOR_truncxfhf2, to, from, UNKNOWN);
590 #ifdef HAVE_trunctfhf2
591 if (HAVE_trunctfhf2 && from_mode == TFmode && to_mode == HFmode)
593 emit_unop_insn (CODE_FOR_trunctfhf2, to, from, UNKNOWN);
598 #ifdef HAVE_truncsftqf2
599 if (HAVE_truncsftqf2 && from_mode == SFmode && to_mode == TQFmode)
601 emit_unop_insn (CODE_FOR_truncsftqf2, to, from, UNKNOWN);
605 #ifdef HAVE_truncdftqf2
606 if (HAVE_truncdftqf2 && from_mode == DFmode && to_mode == TQFmode)
608 emit_unop_insn (CODE_FOR_truncdftqf2, to, from, UNKNOWN);
612 #ifdef HAVE_truncxftqf2
613 if (HAVE_truncxftqf2 && from_mode == XFmode && to_mode == TQFmode)
615 emit_unop_insn (CODE_FOR_truncxftqf2, to, from, UNKNOWN);
619 #ifdef HAVE_trunctftqf2
620 if (HAVE_trunctftqf2 && from_mode == TFmode && to_mode == TQFmode)
622 emit_unop_insn (CODE_FOR_trunctftqf2, to, from, UNKNOWN);
627 #ifdef HAVE_truncdfsf2
628 if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
630 emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
634 #ifdef HAVE_truncxfsf2
635 if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode)
637 emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN);
641 #ifdef HAVE_trunctfsf2
642 if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
644 emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
648 #ifdef HAVE_truncxfdf2
649 if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode)
651 emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN);
655 #ifdef HAVE_trunctfdf2
656 if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
658 emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
670 libcall = extendsfdf2_libfunc;
674 libcall = extendsfxf2_libfunc;
678 libcall = extendsftf2_libfunc;
690 libcall = truncdfsf2_libfunc;
694 libcall = extenddfxf2_libfunc;
698 libcall = extenddftf2_libfunc;
710 libcall = truncxfsf2_libfunc;
714 libcall = truncxfdf2_libfunc;
726 libcall = trunctfsf2_libfunc;
730 libcall = trunctfdf2_libfunc;
742 if (libcall == (rtx) 0)
743 /* This conversion is not implemented yet. */
746 value = emit_library_call_value (libcall, NULL_RTX, 1, to_mode,
748 emit_move_insn (to, value);
752 /* Now both modes are integers. */
754 /* Handle expanding beyond a word. */
755 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
756 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
763 enum machine_mode lowpart_mode;
764 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
766 /* Try converting directly if the insn is supported. */
767 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
770 /* If FROM is a SUBREG, put it into a register. Do this
771 so that we always generate the same set of insns for
772 better cse'ing; if an intermediate assignment occurred,
773 we won't be doing the operation directly on the SUBREG. */
774 if (optimize > 0 && GET_CODE (from) == SUBREG)
775 from = force_reg (from_mode, from);
776 emit_unop_insn (code, to, from, equiv_code);
779 /* Next, try converting via full word. */
780 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
781 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
782 != CODE_FOR_nothing))
784 if (GET_CODE (to) == REG)
785 emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
786 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
787 emit_unop_insn (code, to,
788 gen_lowpart (word_mode, to), equiv_code);
792 /* No special multiword conversion insn; do it by hand. */
795 /* Since we will turn this into a no conflict block, we must ensure
796 that the source does not overlap the target. */
798 if (reg_overlap_mentioned_p (to, from))
799 from = force_reg (from_mode, from);
801 /* Get a copy of FROM widened to a word, if necessary. */
802 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
803 lowpart_mode = word_mode;
805 lowpart_mode = from_mode;
807 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
809 lowpart = gen_lowpart (lowpart_mode, to);
810 emit_move_insn (lowpart, lowfrom);
812 /* Compute the value to put in each remaining word. */
814 fill_value = const0_rtx;
819 && insn_operand_mode[(int) CODE_FOR_slt][0] == word_mode
820 && STORE_FLAG_VALUE == -1)
822 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
824 fill_value = gen_reg_rtx (word_mode);
825 emit_insn (gen_slt (fill_value));
831 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
832 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
834 fill_value = convert_to_mode (word_mode, fill_value, 1);
838 /* Fill the remaining words. */
839 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
841 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
842 rtx subword = operand_subword (to, index, 1, to_mode);
847 if (fill_value != subword)
848 emit_move_insn (subword, fill_value);
851 insns = get_insns ();
854 emit_no_conflict_block (insns, to, from, NULL_RTX,
855 gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
859 /* Truncating multi-word to a word or less. */
860 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
861 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
863 if (!((GET_CODE (from) == MEM
864 && ! MEM_VOLATILE_P (from)
865 && direct_load[(int) to_mode]
866 && ! mode_dependent_address_p (XEXP (from, 0)))
867 || GET_CODE (from) == REG
868 || GET_CODE (from) == SUBREG))
869 from = force_reg (from_mode, from);
870 convert_move (to, gen_lowpart (word_mode, from), 0);
874 /* Handle pointer conversion */ /* SPEE 900220 */
875 if (to_mode == PQImode)
877 if (from_mode != QImode)
878 from = convert_to_mode (QImode, from, unsignedp);
880 #ifdef HAVE_truncqipqi2
881 if (HAVE_truncqipqi2)
883 emit_unop_insn (CODE_FOR_truncqipqi2, to, from, UNKNOWN);
886 #endif /* HAVE_truncqipqi2 */
890 if (from_mode == PQImode)
892 if (to_mode != QImode)
894 from = convert_to_mode (QImode, from, unsignedp);
899 #ifdef HAVE_extendpqiqi2
900 if (HAVE_extendpqiqi2)
902 emit_unop_insn (CODE_FOR_extendpqiqi2, to, from, UNKNOWN);
905 #endif /* HAVE_extendpqiqi2 */
910 if (to_mode == PSImode)
912 if (from_mode != SImode)
913 from = convert_to_mode (SImode, from, unsignedp);
915 #ifdef HAVE_truncsipsi2
916 if (HAVE_truncsipsi2)
918 emit_unop_insn (CODE_FOR_truncsipsi2, to, from, UNKNOWN);
921 #endif /* HAVE_truncsipsi2 */
925 if (from_mode == PSImode)
927 if (to_mode != SImode)
929 from = convert_to_mode (SImode, from, unsignedp);
934 #ifdef HAVE_extendpsisi2
935 if (HAVE_extendpsisi2)
937 emit_unop_insn (CODE_FOR_extendpsisi2, to, from, UNKNOWN);
940 #endif /* HAVE_extendpsisi2 */
945 if (to_mode == PDImode)
947 if (from_mode != DImode)
948 from = convert_to_mode (DImode, from, unsignedp);
950 #ifdef HAVE_truncdipdi2
951 if (HAVE_truncdipdi2)
953 emit_unop_insn (CODE_FOR_truncdipdi2, to, from, UNKNOWN);
956 #endif /* HAVE_truncdipdi2 */
960 if (from_mode == PDImode)
962 if (to_mode != DImode)
964 from = convert_to_mode (DImode, from, unsignedp);
969 #ifdef HAVE_extendpdidi2
970 if (HAVE_extendpdidi2)
972 emit_unop_insn (CODE_FOR_extendpdidi2, to, from, UNKNOWN);
975 #endif /* HAVE_extendpdidi2 */
980 /* Now follow all the conversions between integers
981 no more than a word long. */
983 /* For truncation, usually we can just refer to FROM in a narrower mode. */
984 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
985 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
986 GET_MODE_BITSIZE (from_mode)))
988 if (!((GET_CODE (from) == MEM
989 && ! MEM_VOLATILE_P (from)
990 && direct_load[(int) to_mode]
991 && ! mode_dependent_address_p (XEXP (from, 0)))
992 || GET_CODE (from) == REG
993 || GET_CODE (from) == SUBREG))
994 from = force_reg (from_mode, from);
995 if (GET_CODE (from) == REG && REGNO (from) < FIRST_PSEUDO_REGISTER
996 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
997 from = copy_to_reg (from);
998 emit_move_insn (to, gen_lowpart (to_mode, from));
1002 /* Handle extension. */
1003 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
1005 /* Convert directly if that works. */
1006 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
1007 != CODE_FOR_nothing)
1009 emit_unop_insn (code, to, from, equiv_code);
1014 enum machine_mode intermediate;
1018 /* Search for a mode to convert via. */
1019 for (intermediate = from_mode; intermediate != VOIDmode;
1020 intermediate = GET_MODE_WIDER_MODE (intermediate))
1021 if (((can_extend_p (to_mode, intermediate, unsignedp)
1022 != CODE_FOR_nothing)
1023 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
1024 && TRULY_NOOP_TRUNCATION (to_mode, intermediate)))
1025 && (can_extend_p (intermediate, from_mode, unsignedp)
1026 != CODE_FOR_nothing))
1028 convert_move (to, convert_to_mode (intermediate, from,
1029 unsignedp), unsignedp);
1033 /* No suitable intermediate mode.
1034 Generate what we need with shifts. */
1035 shift_amount = build_int_2 (GET_MODE_BITSIZE (to_mode)
1036 - GET_MODE_BITSIZE (from_mode), 0);
1037 from = gen_lowpart (to_mode, force_reg (from_mode, from));
1038 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
1040 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
1043 emit_move_insn (to, tmp);
1048 /* Support special truncate insns for certain modes. */
1050 if (from_mode == DImode && to_mode == SImode)
1052 #ifdef HAVE_truncdisi2
1053 if (HAVE_truncdisi2)
1055 emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
1059 convert_move (to, force_reg (from_mode, from), unsignedp);
1063 if (from_mode == DImode && to_mode == HImode)
1065 #ifdef HAVE_truncdihi2
1066 if (HAVE_truncdihi2)
1068 emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
1072 convert_move (to, force_reg (from_mode, from), unsignedp);
1076 if (from_mode == DImode && to_mode == QImode)
1078 #ifdef HAVE_truncdiqi2
1079 if (HAVE_truncdiqi2)
1081 emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
1085 convert_move (to, force_reg (from_mode, from), unsignedp);
1089 if (from_mode == SImode && to_mode == HImode)
1091 #ifdef HAVE_truncsihi2
1092 if (HAVE_truncsihi2)
1094 emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
1098 convert_move (to, force_reg (from_mode, from), unsignedp);
1102 if (from_mode == SImode && to_mode == QImode)
1104 #ifdef HAVE_truncsiqi2
1105 if (HAVE_truncsiqi2)
1107 emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
1111 convert_move (to, force_reg (from_mode, from), unsignedp);
1115 if (from_mode == HImode && to_mode == QImode)
1117 #ifdef HAVE_trunchiqi2
1118 if (HAVE_trunchiqi2)
1120 emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
1124 convert_move (to, force_reg (from_mode, from), unsignedp);
1128 if (from_mode == TImode && to_mode == DImode)
1130 #ifdef HAVE_trunctidi2
1131 if (HAVE_trunctidi2)
1133 emit_unop_insn (CODE_FOR_trunctidi2, to, from, UNKNOWN);
1137 convert_move (to, force_reg (from_mode, from), unsignedp);
1141 if (from_mode == TImode && to_mode == SImode)
1143 #ifdef HAVE_trunctisi2
1144 if (HAVE_trunctisi2)
1146 emit_unop_insn (CODE_FOR_trunctisi2, to, from, UNKNOWN);
1150 convert_move (to, force_reg (from_mode, from), unsignedp);
1154 if (from_mode == TImode && to_mode == HImode)
1156 #ifdef HAVE_trunctihi2
1157 if (HAVE_trunctihi2)
1159 emit_unop_insn (CODE_FOR_trunctihi2, to, from, UNKNOWN);
1163 convert_move (to, force_reg (from_mode, from), unsignedp);
1167 if (from_mode == TImode && to_mode == QImode)
1169 #ifdef HAVE_trunctiqi2
1170 if (HAVE_trunctiqi2)
1172 emit_unop_insn (CODE_FOR_trunctiqi2, to, from, UNKNOWN);
1176 convert_move (to, force_reg (from_mode, from), unsignedp);
1180 /* Handle truncation of volatile memrefs, and so on;
1181 the things that couldn't be truncated directly,
1182 and for which there was no special instruction. */
1183 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
1185 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
1186 emit_move_insn (to, temp);
1190 /* Mode combination is not recognized. */
1194 /* Return an rtx for a value that would result
1195 from converting X to mode MODE.
1196 Both X and MODE may be floating, or both integer.
1197 UNSIGNEDP is nonzero if X is an unsigned value.
1198 This can be done by referring to a part of X in place
1199 or by copying to a new temporary with conversion.
1201 This function *must not* call protect_from_queue
1202 except when putting X into an insn (in which case convert_move does it). */
1205 convert_to_mode (mode, x, unsignedp)
1206 enum machine_mode mode;
1210 return convert_modes (mode, VOIDmode, x, unsignedp);
1213 /* Return an rtx for a value that would result
1214 from converting X from mode OLDMODE to mode MODE.
1215 Both modes may be floating, or both integer.
1216 UNSIGNEDP is nonzero if X is an unsigned value.
1218 This can be done by referring to a part of X in place
1219 or by copying to a new temporary with conversion.
1221 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
1223 This function *must not* call protect_from_queue
1224 except when putting X into an insn (in which case convert_move does it). */
1227 convert_modes (mode, oldmode, x, unsignedp)
1228 enum machine_mode mode, oldmode;
1234 /* If FROM is a SUBREG that indicates that we have already done at least
1235 the required extension, strip it. */
1237 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
1238 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
1239 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
1240 x = gen_lowpart (mode, x);
1242 if (GET_MODE (x) != VOIDmode)
1243 oldmode = GET_MODE (x);
1245 if (mode == oldmode)
1248 /* There is one case that we must handle specially: If we are converting
1249 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
1250 we are to interpret the constant as unsigned, gen_lowpart will do
1251 the wrong if the constant appears negative. What we want to do is
1252 make the high-order word of the constant zero, not all ones. */
1254 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
1255 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
1256 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
1258 HOST_WIDE_INT val = INTVAL (x);
1260 if (oldmode != VOIDmode
1261 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
1263 int width = GET_MODE_BITSIZE (oldmode);
1265 /* We need to zero extend VAL. */
1266 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1269 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
1272 /* We can do this with a gen_lowpart if both desired and current modes
1273 are integer, and this is either a constant integer, a register, or a
1274 non-volatile MEM. Except for the constant case where MODE is no
1275 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
1277 if ((GET_CODE (x) == CONST_INT
1278 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
1279 || (GET_MODE_CLASS (mode) == MODE_INT
1280 && GET_MODE_CLASS (oldmode) == MODE_INT
1281 && (GET_CODE (x) == CONST_DOUBLE
1282 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
1283 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x)
1284 && direct_load[(int) mode])
1285 || (GET_CODE (x) == REG
1286 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
1287 GET_MODE_BITSIZE (GET_MODE (x)))))))))
1289 /* ?? If we don't know OLDMODE, we have to assume here that
1290 X does not need sign- or zero-extension. This may not be
1291 the case, but it's the best we can do. */
1292 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
1293 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
1295 HOST_WIDE_INT val = INTVAL (x);
1296 int width = GET_MODE_BITSIZE (oldmode);
1298 /* We must sign or zero-extend in this case. Start by
1299 zero-extending, then sign extend if we need to. */
1300 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1302 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
1303 val |= (HOST_WIDE_INT) (-1) << width;
1305 return GEN_INT (val);
1308 return gen_lowpart (mode, x);
1311 temp = gen_reg_rtx (mode);
1312 convert_move (temp, x, unsignedp);
1317 /* This macro is used to determine what the largest unit size that
1318 move_by_pieces can use is. */
1320 /* MOVE_MAX_PIECES is the number of bytes at a time which we can
1321 move efficiently, as opposed to MOVE_MAX which is the maximum
1322 number of bhytes we can move with a single instruction. */
1324 #ifndef MOVE_MAX_PIECES
1325 #define MOVE_MAX_PIECES MOVE_MAX
1328 /* Generate several move instructions to copy LEN bytes
1329 from block FROM to block TO. (These are MEM rtx's with BLKmode).
1330 The caller must pass FROM and TO
1331 through protect_from_queue before calling.
1332 ALIGN (in bytes) is maximum alignment we can assume. */
1335 move_by_pieces (to, from, len, align)
1339 struct move_by_pieces data;
1340 rtx to_addr = XEXP (to, 0), from_addr = XEXP (from, 0);
1341 int max_size = MOVE_MAX_PIECES + 1;
1342 enum machine_mode mode = VOIDmode, tmode;
1343 enum insn_code icode;
1346 data.to_addr = to_addr;
1347 data.from_addr = from_addr;
1351 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1352 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1354 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1355 || GET_CODE (from_addr) == POST_INC
1356 || GET_CODE (from_addr) == POST_DEC);
1358 data.explicit_inc_from = 0;
1359 data.explicit_inc_to = 0;
1361 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1362 if (data.reverse) data.offset = len;
1365 data.to_struct = MEM_IN_STRUCT_P (to);
1366 data.from_struct = MEM_IN_STRUCT_P (from);
1368 /* If copying requires more than two move insns,
1369 copy addresses to registers (to make displacements shorter)
1370 and use post-increment if available. */
1371 if (!(data.autinc_from && data.autinc_to)
1372 && move_by_pieces_ninsns (len, align) > 2)
1374 /* Find the mode of the largest move... */
1375 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1376 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1377 if (GET_MODE_SIZE (tmode) < max_size)
1380 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
1382 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1383 data.autinc_from = 1;
1384 data.explicit_inc_from = -1;
1386 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
1388 data.from_addr = copy_addr_to_reg (from_addr);
1389 data.autinc_from = 1;
1390 data.explicit_inc_from = 1;
1392 if (!data.autinc_from && CONSTANT_P (from_addr))
1393 data.from_addr = copy_addr_to_reg (from_addr);
1394 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
1396 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1398 data.explicit_inc_to = -1;
1400 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
1402 data.to_addr = copy_addr_to_reg (to_addr);
1404 data.explicit_inc_to = 1;
1406 if (!data.autinc_to && CONSTANT_P (to_addr))
1407 data.to_addr = copy_addr_to_reg (to_addr);
1410 if (! SLOW_UNALIGNED_ACCESS
1411 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
1414 /* First move what we can in the largest integer mode, then go to
1415 successively smaller modes. */
1417 while (max_size > 1)
1419 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1420 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1421 if (GET_MODE_SIZE (tmode) < max_size)
1424 if (mode == VOIDmode)
1427 icode = mov_optab->handlers[(int) mode].insn_code;
1428 if (icode != CODE_FOR_nothing
1429 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1430 GET_MODE_SIZE (mode)))
1431 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1433 max_size = GET_MODE_SIZE (mode);
1436 /* The code above should have handled everything. */
1441 /* Return number of insns required to move L bytes by pieces.
1442 ALIGN (in bytes) is maximum alignment we can assume. */
1445 move_by_pieces_ninsns (l, align)
1449 register int n_insns = 0;
1450 int max_size = MOVE_MAX + 1;
1452 if (! SLOW_UNALIGNED_ACCESS
1453 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
1456 while (max_size > 1)
1458 enum machine_mode mode = VOIDmode, tmode;
1459 enum insn_code icode;
1461 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1462 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1463 if (GET_MODE_SIZE (tmode) < max_size)
1466 if (mode == VOIDmode)
1469 icode = mov_optab->handlers[(int) mode].insn_code;
1470 if (icode != CODE_FOR_nothing
1471 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1472 GET_MODE_SIZE (mode)))
1473 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1475 max_size = GET_MODE_SIZE (mode);
1481 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1482 with move instructions for mode MODE. GENFUN is the gen_... function
1483 to make a move insn for that mode. DATA has all the other info. */
1486 move_by_pieces_1 (genfun, mode, data)
1487 rtx (*genfun) PROTO ((rtx, ...));
1488 enum machine_mode mode;
1489 struct move_by_pieces *data;
1491 register int size = GET_MODE_SIZE (mode);
1492 register rtx to1, from1;
1494 while (data->len >= size)
1496 if (data->reverse) data->offset -= size;
1498 to1 = (data->autinc_to
1499 ? gen_rtx_MEM (mode, data->to_addr)
1500 : copy_rtx (change_address (data->to, mode,
1501 plus_constant (data->to_addr,
1503 MEM_IN_STRUCT_P (to1) = data->to_struct;
1506 = (data->autinc_from
1507 ? gen_rtx_MEM (mode, data->from_addr)
1508 : copy_rtx (change_address (data->from, mode,
1509 plus_constant (data->from_addr,
1511 MEM_IN_STRUCT_P (from1) = data->from_struct;
1513 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1514 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
1515 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1516 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (-size)));
1518 emit_insn ((*genfun) (to1, from1));
1519 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1520 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1521 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1522 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1524 if (! data->reverse) data->offset += size;
1530 /* Emit code to move a block Y to a block X.
1531 This may be done with string-move instructions,
1532 with multiple scalar move instructions, or with a library call.
1534 Both X and Y must be MEM rtx's (perhaps inside VOLATILE)
1536 SIZE is an rtx that says how long they are.
1537 ALIGN is the maximum alignment we can assume they have,
1540 Return the address of the new block, if memcpy is called and returns it,
1544 emit_block_move (x, y, size, align)
1550 #ifdef TARGET_MEM_FUNCTIONS
1552 tree call_expr, arg_list;
1555 if (GET_MODE (x) != BLKmode)
1558 if (GET_MODE (y) != BLKmode)
1561 x = protect_from_queue (x, 1);
1562 y = protect_from_queue (y, 0);
1563 size = protect_from_queue (size, 0);
1565 if (GET_CODE (x) != MEM)
1567 if (GET_CODE (y) != MEM)
1572 if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
1573 move_by_pieces (x, y, INTVAL (size), align);
1576 /* Try the most limited insn first, because there's no point
1577 including more than one in the machine description unless
1578 the more limited one has some advantage. */
1580 rtx opalign = GEN_INT (align);
1581 enum machine_mode mode;
1583 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1584 mode = GET_MODE_WIDER_MODE (mode))
1586 enum insn_code code = movstr_optab[(int) mode];
1588 if (code != CODE_FOR_nothing
1589 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1590 here because if SIZE is less than the mode mask, as it is
1591 returned by the macro, it will definitely be less than the
1592 actual mode mask. */
1593 && ((GET_CODE (size) == CONST_INT
1594 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1595 <= (GET_MODE_MASK (mode) >> 1)))
1596 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1597 && (insn_operand_predicate[(int) code][0] == 0
1598 || (*insn_operand_predicate[(int) code][0]) (x, BLKmode))
1599 && (insn_operand_predicate[(int) code][1] == 0
1600 || (*insn_operand_predicate[(int) code][1]) (y, BLKmode))
1601 && (insn_operand_predicate[(int) code][3] == 0
1602 || (*insn_operand_predicate[(int) code][3]) (opalign,
1606 rtx last = get_last_insn ();
1609 op2 = convert_to_mode (mode, size, 1);
1610 if (insn_operand_predicate[(int) code][2] != 0
1611 && ! (*insn_operand_predicate[(int) code][2]) (op2, mode))
1612 op2 = copy_to_mode_reg (mode, op2);
1614 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1621 delete_insns_since (last);
1625 /* X, Y, or SIZE may have been passed through protect_from_queue.
1627 It is unsafe to save the value generated by protect_from_queue
1628 and reuse it later. Consider what happens if emit_queue is
1629 called before the return value from protect_from_queue is used.
1631 Expansion of the CALL_EXPR below will call emit_queue before
1632 we are finished emitting RTL for argument setup. So if we are
1633 not careful we could get the wrong value for an argument.
1635 To avoid this problem we go ahead and emit code to copy X, Y &
1636 SIZE into new pseudos. We can then place those new pseudos
1637 into an RTL_EXPR and use them later, even after a call to
1640 Note this is not strictly needed for library calls since they
1641 do not call emit_queue before loading their arguments. However,
1642 we may need to have library calls call emit_queue in the future
1643 since failing to do so could cause problems for targets which
1644 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
1645 x = copy_to_mode_reg (Pmode, XEXP (x, 0));
1646 y = copy_to_mode_reg (Pmode, XEXP (y, 0));
1648 #ifdef TARGET_MEM_FUNCTIONS
1649 size = copy_to_mode_reg (TYPE_MODE (sizetype), size);
1651 size = convert_to_mode (TYPE_MODE (integer_type_node), size,
1652 TREE_UNSIGNED (integer_type_node));
1653 size = copy_to_mode_reg (TYPE_MODE (integer_type_node), size);
1656 #ifdef TARGET_MEM_FUNCTIONS
1657 /* It is incorrect to use the libcall calling conventions to call
1658 memcpy in this context.
1660 This could be a user call to memcpy and the user may wish to
1661 examine the return value from memcpy.
1663 For targets where libcalls and normal calls have different conventions
1664 for returning pointers, we could end up generating incorrect code.
1666 So instead of using a libcall sequence we build up a suitable
1667 CALL_EXPR and expand the call in the normal fashion. */
1668 if (fn == NULL_TREE)
1672 /* This was copied from except.c, I don't know if all this is
1673 necessary in this context or not. */
1674 fn = get_identifier ("memcpy");
1675 push_obstacks_nochange ();
1676 end_temporary_allocation ();
1677 fntype = build_pointer_type (void_type_node);
1678 fntype = build_function_type (fntype, NULL_TREE);
1679 fn = build_decl (FUNCTION_DECL, fn, fntype);
1680 DECL_EXTERNAL (fn) = 1;
1681 TREE_PUBLIC (fn) = 1;
1682 DECL_ARTIFICIAL (fn) = 1;
1683 make_decl_rtl (fn, NULL_PTR, 1);
1684 assemble_external (fn);
1688 /* We need to make an argument list for the function call.
1690 memcpy has three arguments, the first two are void * addresses and
1691 the last is a size_t byte count for the copy. */
1693 = build_tree_list (NULL_TREE,
1694 make_tree (build_pointer_type (void_type_node), x));
1695 TREE_CHAIN (arg_list)
1696 = build_tree_list (NULL_TREE,
1697 make_tree (build_pointer_type (void_type_node), y));
1698 TREE_CHAIN (TREE_CHAIN (arg_list))
1699 = build_tree_list (NULL_TREE, make_tree (sizetype, size));
1700 TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list))) = NULL_TREE;
1702 /* Now we have to build up the CALL_EXPR itself. */
1703 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
1704 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
1705 call_expr, arg_list, NULL_TREE);
1706 TREE_SIDE_EFFECTS (call_expr) = 1;
1708 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
1710 emit_library_call (bcopy_libfunc, 0,
1711 VOIDmode, 3, y, Pmode, x, Pmode,
1712 convert_to_mode (TYPE_MODE (integer_type_node), size,
1713 TREE_UNSIGNED (integer_type_node)),
1714 TYPE_MODE (integer_type_node));
1721 /* Copy all or part of a value X into registers starting at REGNO.
1722 The number of registers to be filled is NREGS. */
1725 move_block_to_reg (regno, x, nregs, mode)
1729 enum machine_mode mode;
1732 #ifdef HAVE_load_multiple
1740 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1741 x = validize_mem (force_const_mem (mode, x));
1743 /* See if the machine can do this with a load multiple insn. */
1744 #ifdef HAVE_load_multiple
1745 if (HAVE_load_multiple)
1747 last = get_last_insn ();
1748 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
1756 delete_insns_since (last);
1760 for (i = 0; i < nregs; i++)
1761 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1762 operand_subword_force (x, i, mode));
1765 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1766 The number of registers to be filled is NREGS. SIZE indicates the number
1767 of bytes in the object X. */
1771 move_block_from_reg (regno, x, nregs, size)
1778 #ifdef HAVE_store_multiple
1782 enum machine_mode mode;
1784 /* If SIZE is that of a mode no bigger than a word, just use that
1785 mode's store operation. */
1786 if (size <= UNITS_PER_WORD
1787 && (mode = mode_for_size (size * BITS_PER_UNIT, MODE_INT, 0)) != BLKmode)
1789 emit_move_insn (change_address (x, mode, NULL),
1790 gen_rtx_REG (mode, regno));
1794 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN machine must be aligned
1795 to the left before storing to memory. Note that the previous test
1796 doesn't handle all cases (e.g. SIZE == 3). */
1797 if (size < UNITS_PER_WORD && BYTES_BIG_ENDIAN)
1799 rtx tem = operand_subword (x, 0, 1, BLKmode);
1805 shift = expand_shift (LSHIFT_EXPR, word_mode,
1806 gen_rtx_REG (word_mode, regno),
1807 build_int_2 ((UNITS_PER_WORD - size)
1808 * BITS_PER_UNIT, 0), NULL_RTX, 0);
1809 emit_move_insn (tem, shift);
1813 /* See if the machine can do this with a store multiple insn. */
1814 #ifdef HAVE_store_multiple
1815 if (HAVE_store_multiple)
1817 last = get_last_insn ();
1818 pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1826 delete_insns_since (last);
1830 for (i = 0; i < nregs; i++)
1832 rtx tem = operand_subword (x, i, 1, BLKmode);
1837 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1841 /* Emit code to move a block SRC to a block DST, where DST is non-consecutive
1842 registers represented by a PARALLEL. SSIZE represents the total size of
1843 block SRC in bytes, or -1 if not known. ALIGN is the known alignment of
1845 /* ??? If SSIZE % UNITS_PER_WORD != 0, we make the blatent assumption that
1846 the balance will be in what would be the low-order memory addresses, i.e.
1847 left justified for big endian, right justified for little endian. This
1848 happens to be true for the targets currently using this support. If this
1849 ever changes, a new target macro along the lines of FUNCTION_ARG_PADDING
1853 emit_group_load (dst, orig_src, ssize, align)
1860 if (GET_CODE (dst) != PARALLEL)
1863 /* Check for a NULL entry, used to indicate that the parameter goes
1864 both on the stack and in registers. */
1865 if (XEXP (XVECEXP (dst, 0, 0), 0))
1870 tmps = (rtx *) alloca (sizeof(rtx) * XVECLEN (dst, 0));
1872 /* If we won't be loading directly from memory, protect the real source
1873 from strange tricks we might play. */
1875 if (GET_CODE (src) != MEM)
1877 src = gen_reg_rtx (GET_MODE (orig_src));
1878 emit_move_insn (src, orig_src);
1881 /* Process the pieces. */
1882 for (i = start; i < XVECLEN (dst, 0); i++)
1884 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1885 int bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1886 int bytelen = GET_MODE_SIZE (mode);
1889 /* Handle trailing fragments that run over the size of the struct. */
1890 if (ssize >= 0 && bytepos + bytelen > ssize)
1892 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1893 bytelen = ssize - bytepos;
1898 /* Optimize the access just a bit. */
1899 if (GET_CODE (src) == MEM
1900 && align*BITS_PER_UNIT >= GET_MODE_ALIGNMENT (mode)
1901 && bytepos*BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
1902 && bytelen == GET_MODE_SIZE (mode))
1904 tmps[i] = gen_reg_rtx (mode);
1905 emit_move_insn (tmps[i],
1906 change_address (src, mode,
1907 plus_constant (XEXP (src, 0),
1910 else if (GET_CODE (src) == CONCAT)
1913 && bytelen == GET_MODE_SIZE (GET_MODE (XEXP (src, 0))))
1914 tmps[i] = XEXP (src, 0);
1915 else if (bytepos == GET_MODE_SIZE (GET_MODE (XEXP (src, 0)))
1916 && bytelen == GET_MODE_SIZE (GET_MODE (XEXP (src, 1))))
1917 tmps[i] = XEXP (src, 1);
1923 tmps[i] = extract_bit_field (src, bytelen*BITS_PER_UNIT,
1924 bytepos*BITS_PER_UNIT, 1, NULL_RTX,
1925 mode, mode, align, ssize);
1928 if (BYTES_BIG_ENDIAN && shift)
1930 expand_binop (mode, ashl_optab, tmps[i], GEN_INT (shift),
1931 tmps[i], 0, OPTAB_WIDEN);
1936 /* Copy the extracted pieces into the proper (probable) hard regs. */
1937 for (i = start; i < XVECLEN (dst, 0); i++)
1938 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0), tmps[i]);
1941 /* Emit code to move a block SRC to a block DST, where SRC is non-consecutive
1942 registers represented by a PARALLEL. SSIZE represents the total size of
1943 block DST, or -1 if not known. ALIGN is the known alignment of DST. */
1946 emit_group_store (orig_dst, src, ssize, align)
1953 if (GET_CODE (src) != PARALLEL)
1956 /* Check for a NULL entry, used to indicate that the parameter goes
1957 both on the stack and in registers. */
1958 if (XEXP (XVECEXP (src, 0, 0), 0))
1963 tmps = (rtx *) alloca (sizeof(rtx) * XVECLEN (src, 0));
1965 /* Copy the (probable) hard regs into pseudos. */
1966 for (i = start; i < XVECLEN (src, 0); i++)
1968 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
1969 tmps[i] = gen_reg_rtx (GET_MODE (reg));
1970 emit_move_insn (tmps[i], reg);
1974 /* If we won't be storing directly into memory, protect the real destination
1975 from strange tricks we might play. */
1977 if (GET_CODE (dst) == PARALLEL)
1981 /* We can get a PARALLEL dst if there is a conditional expression in
1982 a return statement. In that case, the dst and src are the same,
1983 so no action is necessary. */
1984 if (rtx_equal_p (dst, src))
1987 /* It is unclear if we can ever reach here, but we may as well handle
1988 it. Allocate a temporary, and split this into a store/load to/from
1991 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
1992 emit_group_store (temp, src, ssize, align);
1993 emit_group_load (dst, temp, ssize, align);
1996 else if (GET_CODE (dst) != MEM)
1998 dst = gen_reg_rtx (GET_MODE (orig_dst));
1999 /* Make life a bit easier for combine. */
2000 emit_move_insn (dst, const0_rtx);
2002 else if (! MEM_IN_STRUCT_P (dst))
2004 /* store_bit_field requires that memory operations have
2005 mem_in_struct_p set; we might not. */
2007 dst = copy_rtx (orig_dst);
2008 MEM_SET_IN_STRUCT_P (dst, 1);
2011 /* Process the pieces. */
2012 for (i = start; i < XVECLEN (src, 0); i++)
2014 int bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
2015 enum machine_mode mode = GET_MODE (tmps[i]);
2016 int bytelen = GET_MODE_SIZE (mode);
2018 /* Handle trailing fragments that run over the size of the struct. */
2019 if (ssize >= 0 && bytepos + bytelen > ssize)
2021 if (BYTES_BIG_ENDIAN)
2023 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2024 expand_binop (mode, ashr_optab, tmps[i], GEN_INT (shift),
2025 tmps[i], 0, OPTAB_WIDEN);
2027 bytelen = ssize - bytepos;
2030 /* Optimize the access just a bit. */
2031 if (GET_CODE (dst) == MEM
2032 && align*BITS_PER_UNIT >= GET_MODE_ALIGNMENT (mode)
2033 && bytepos*BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2034 && bytelen == GET_MODE_SIZE (mode))
2036 emit_move_insn (change_address (dst, mode,
2037 plus_constant (XEXP (dst, 0),
2043 store_bit_field (dst, bytelen*BITS_PER_UNIT, bytepos*BITS_PER_UNIT,
2044 mode, tmps[i], align, ssize);
2049 /* Copy from the pseudo into the (probable) hard reg. */
2050 if (GET_CODE (dst) == REG)
2051 emit_move_insn (orig_dst, dst);
2054 /* Generate code to copy a BLKmode object of TYPE out of a
2055 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2056 is null, a stack temporary is created. TGTBLK is returned.
2058 The primary purpose of this routine is to handle functions
2059 that return BLKmode structures in registers. Some machines
2060 (the PA for example) want to return all small structures
2061 in registers regardless of the structure's alignment.
2065 copy_blkmode_from_reg(tgtblk,srcreg,type)
2070 int bytes = int_size_in_bytes (type);
2071 rtx src = NULL, dst = NULL;
2072 int bitsize = MIN (TYPE_ALIGN (type), (unsigned int) BITS_PER_WORD);
2073 int bitpos, xbitpos, big_endian_correction = 0;
2077 tgtblk = assign_stack_temp (BLKmode, bytes, 0);
2078 MEM_SET_IN_STRUCT_P (tgtblk, AGGREGATE_TYPE_P (type));
2079 preserve_temp_slots (tgtblk);
2082 /* This code assumes srcreg is at least a full word. If it isn't,
2083 copy it into a new pseudo which is a full word. */
2084 if (GET_MODE (srcreg) != BLKmode
2085 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
2086 srcreg = convert_to_mode (word_mode, srcreg,
2087 TREE_UNSIGNED (type));
2089 /* Structures whose size is not a multiple of a word are aligned
2090 to the least significant byte (to the right). On a BYTES_BIG_ENDIAN
2091 machine, this means we must skip the empty high order bytes when
2092 calculating the bit offset. */
2093 if (BYTES_BIG_ENDIAN && bytes % UNITS_PER_WORD)
2094 big_endian_correction = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD)
2097 /* Copy the structure BITSIZE bites at a time.
2099 We could probably emit more efficient code for machines
2100 which do not use strict alignment, but it doesn't seem
2101 worth the effort at the current time. */
2102 for (bitpos = 0, xbitpos = big_endian_correction;
2103 bitpos < bytes * BITS_PER_UNIT;
2104 bitpos += bitsize, xbitpos += bitsize)
2107 /* We need a new source operand each time xbitpos is on a
2108 word boundary and when xbitpos == big_endian_correction
2109 (the first time through). */
2110 if (xbitpos % BITS_PER_WORD == 0
2111 || xbitpos == big_endian_correction)
2112 src = operand_subword_force (srcreg,
2113 xbitpos / BITS_PER_WORD,
2116 /* We need a new destination operand each time bitpos is on
2118 if (bitpos % BITS_PER_WORD == 0)
2119 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
2121 /* Use xbitpos for the source extraction (right justified) and
2122 xbitpos for the destination store (left justified). */
2123 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode,
2124 extract_bit_field (src, bitsize,
2125 xbitpos % BITS_PER_WORD, 1,
2126 NULL_RTX, word_mode,
2128 bitsize / BITS_PER_UNIT,
2130 bitsize / BITS_PER_UNIT, BITS_PER_WORD);
2136 /* Add a USE expression for REG to the (possibly empty) list pointed
2137 to by CALL_FUSAGE. REG must denote a hard register. */
2140 use_reg (call_fusage, reg)
2141 rtx *call_fusage, reg;
2143 if (GET_CODE (reg) != REG
2144 || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
2148 = gen_rtx_EXPR_LIST (VOIDmode,
2149 gen_rtx_USE (VOIDmode, reg), *call_fusage);
2152 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2153 starting at REGNO. All of these registers must be hard registers. */
2156 use_regs (call_fusage, regno, nregs)
2163 if (regno + nregs > FIRST_PSEUDO_REGISTER)
2166 for (i = 0; i < nregs; i++)
2167 use_reg (call_fusage, gen_rtx_REG (reg_raw_mode[regno + i], regno + i));
2170 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2171 PARALLEL REGS. This is for calls that pass values in multiple
2172 non-contiguous locations. The Irix 6 ABI has examples of this. */
2175 use_group_regs (call_fusage, regs)
2181 for (i = 0; i < XVECLEN (regs, 0); i++)
2183 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2185 /* A NULL entry means the parameter goes both on the stack and in
2186 registers. This can also be a MEM for targets that pass values
2187 partially on the stack and partially in registers. */
2188 if (reg != 0 && GET_CODE (reg) == REG)
2189 use_reg (call_fusage, reg);
2193 /* Generate several move instructions to clear LEN bytes of block TO.
2194 (A MEM rtx with BLKmode). The caller must pass TO through
2195 protect_from_queue before calling. ALIGN (in bytes) is maximum alignment
2199 clear_by_pieces (to, len, align)
2203 struct clear_by_pieces data;
2204 rtx to_addr = XEXP (to, 0);
2205 int max_size = MOVE_MAX_PIECES + 1;
2206 enum machine_mode mode = VOIDmode, tmode;
2207 enum insn_code icode;
2210 data.to_addr = to_addr;
2213 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2214 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2216 data.explicit_inc_to = 0;
2218 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2219 if (data.reverse) data.offset = len;
2222 data.to_struct = MEM_IN_STRUCT_P (to);
2224 /* If copying requires more than two move insns,
2225 copy addresses to registers (to make displacements shorter)
2226 and use post-increment if available. */
2228 && move_by_pieces_ninsns (len, align) > 2)
2230 /* Determine the main mode we'll be using */
2231 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2232 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2233 if (GET_MODE_SIZE (tmode) < max_size)
2236 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
2238 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
2240 data.explicit_inc_to = -1;
2242 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
2244 data.to_addr = copy_addr_to_reg (to_addr);
2246 data.explicit_inc_to = 1;
2248 if (!data.autinc_to && CONSTANT_P (to_addr))
2249 data.to_addr = copy_addr_to_reg (to_addr);
2252 if (! SLOW_UNALIGNED_ACCESS
2253 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
2256 /* First move what we can in the largest integer mode, then go to
2257 successively smaller modes. */
2259 while (max_size > 1)
2261 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2262 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2263 if (GET_MODE_SIZE (tmode) < max_size)
2266 if (mode == VOIDmode)
2269 icode = mov_optab->handlers[(int) mode].insn_code;
2270 if (icode != CODE_FOR_nothing
2271 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
2272 GET_MODE_SIZE (mode)))
2273 clear_by_pieces_1 (GEN_FCN (icode), mode, &data);
2275 max_size = GET_MODE_SIZE (mode);
2278 /* The code above should have handled everything. */
2283 /* Subroutine of clear_by_pieces. Clear as many bytes as appropriate
2284 with move instructions for mode MODE. GENFUN is the gen_... function
2285 to make a move insn for that mode. DATA has all the other info. */
2288 clear_by_pieces_1 (genfun, mode, data)
2289 rtx (*genfun) PROTO ((rtx, ...));
2290 enum machine_mode mode;
2291 struct clear_by_pieces *data;
2293 register int size = GET_MODE_SIZE (mode);
2296 while (data->len >= size)
2298 if (data->reverse) data->offset -= size;
2300 to1 = (data->autinc_to
2301 ? gen_rtx_MEM (mode, data->to_addr)
2302 : copy_rtx (change_address (data->to, mode,
2303 plus_constant (data->to_addr,
2305 MEM_IN_STRUCT_P (to1) = data->to_struct;
2307 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2308 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
2310 emit_insn ((*genfun) (to1, const0_rtx));
2311 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2312 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2314 if (! data->reverse) data->offset += size;
2320 /* Write zeros through the storage of OBJECT.
2321 If OBJECT has BLKmode, SIZE is its length in bytes and ALIGN is
2322 the maximum alignment we can is has, measured in bytes.
2324 If we call a function that returns the length of the block, return it. */
2327 clear_storage (object, size, align)
2332 #ifdef TARGET_MEM_FUNCTIONS
2334 tree call_expr, arg_list;
2338 if (GET_MODE (object) == BLKmode)
2340 object = protect_from_queue (object, 1);
2341 size = protect_from_queue (size, 0);
2343 if (GET_CODE (size) == CONST_INT
2344 && MOVE_BY_PIECES_P (INTVAL (size), align))
2345 clear_by_pieces (object, INTVAL (size), align);
2349 /* Try the most limited insn first, because there's no point
2350 including more than one in the machine description unless
2351 the more limited one has some advantage. */
2353 rtx opalign = GEN_INT (align);
2354 enum machine_mode mode;
2356 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2357 mode = GET_MODE_WIDER_MODE (mode))
2359 enum insn_code code = clrstr_optab[(int) mode];
2361 if (code != CODE_FOR_nothing
2362 /* We don't need MODE to be narrower than
2363 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2364 the mode mask, as it is returned by the macro, it will
2365 definitely be less than the actual mode mask. */
2366 && ((GET_CODE (size) == CONST_INT
2367 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2368 <= (GET_MODE_MASK (mode) >> 1)))
2369 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2370 && (insn_operand_predicate[(int) code][0] == 0
2371 || (*insn_operand_predicate[(int) code][0]) (object,
2373 && (insn_operand_predicate[(int) code][2] == 0
2374 || (*insn_operand_predicate[(int) code][2]) (opalign,
2378 rtx last = get_last_insn ();
2381 op1 = convert_to_mode (mode, size, 1);
2382 if (insn_operand_predicate[(int) code][1] != 0
2383 && ! (*insn_operand_predicate[(int) code][1]) (op1,
2385 op1 = copy_to_mode_reg (mode, op1);
2387 pat = GEN_FCN ((int) code) (object, op1, opalign);
2394 delete_insns_since (last);
2398 /* OBJECT or SIZE may have been passed through protect_from_queue.
2400 It is unsafe to save the value generated by protect_from_queue
2401 and reuse it later. Consider what happens if emit_queue is
2402 called before the return value from protect_from_queue is used.
2404 Expansion of the CALL_EXPR below will call emit_queue before
2405 we are finished emitting RTL for argument setup. So if we are
2406 not careful we could get the wrong value for an argument.
2408 To avoid this problem we go ahead and emit code to copy OBJECT
2409 and SIZE into new pseudos. We can then place those new pseudos
2410 into an RTL_EXPR and use them later, even after a call to
2413 Note this is not strictly needed for library calls since they
2414 do not call emit_queue before loading their arguments. However,
2415 we may need to have library calls call emit_queue in the future
2416 since failing to do so could cause problems for targets which
2417 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
2418 object = copy_to_mode_reg (Pmode, XEXP (object, 0));
2420 #ifdef TARGET_MEM_FUNCTIONS
2421 size = copy_to_mode_reg (TYPE_MODE (sizetype), size);
2423 size = convert_to_mode (TYPE_MODE (integer_type_node), size,
2424 TREE_UNSIGNED (integer_type_node));
2425 size = copy_to_mode_reg (TYPE_MODE (integer_type_node), size);
2429 #ifdef TARGET_MEM_FUNCTIONS
2430 /* It is incorrect to use the libcall calling conventions to call
2431 memset in this context.
2433 This could be a user call to memset and the user may wish to
2434 examine the return value from memset.
2436 For targets where libcalls and normal calls have different
2437 conventions for returning pointers, we could end up generating
2440 So instead of using a libcall sequence we build up a suitable
2441 CALL_EXPR and expand the call in the normal fashion. */
2442 if (fn == NULL_TREE)
2446 /* This was copied from except.c, I don't know if all this is
2447 necessary in this context or not. */
2448 fn = get_identifier ("memset");
2449 push_obstacks_nochange ();
2450 end_temporary_allocation ();
2451 fntype = build_pointer_type (void_type_node);
2452 fntype = build_function_type (fntype, NULL_TREE);
2453 fn = build_decl (FUNCTION_DECL, fn, fntype);
2454 DECL_EXTERNAL (fn) = 1;
2455 TREE_PUBLIC (fn) = 1;
2456 DECL_ARTIFICIAL (fn) = 1;
2457 make_decl_rtl (fn, NULL_PTR, 1);
2458 assemble_external (fn);
2462 /* We need to make an argument list for the function call.
2464 memset has three arguments, the first is a void * addresses, the
2465 second a integer with the initialization value, the last is a
2466 size_t byte count for the copy. */
2468 = build_tree_list (NULL_TREE,
2469 make_tree (build_pointer_type (void_type_node),
2471 TREE_CHAIN (arg_list)
2472 = build_tree_list (NULL_TREE,
2473 make_tree (integer_type_node, const0_rtx));
2474 TREE_CHAIN (TREE_CHAIN (arg_list))
2475 = build_tree_list (NULL_TREE, make_tree (sizetype, size));
2476 TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list))) = NULL_TREE;
2478 /* Now we have to build up the CALL_EXPR itself. */
2479 call_expr = build1 (ADDR_EXPR,
2480 build_pointer_type (TREE_TYPE (fn)), fn);
2481 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
2482 call_expr, arg_list, NULL_TREE);
2483 TREE_SIDE_EFFECTS (call_expr) = 1;
2485 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
2487 emit_library_call (bzero_libfunc, 0,
2488 VOIDmode, 2, object, Pmode, size,
2489 TYPE_MODE (integer_type_node));
2494 emit_move_insn (object, CONST0_RTX (GET_MODE (object)));
2499 /* Generate code to copy Y into X.
2500 Both Y and X must have the same mode, except that
2501 Y can be a constant with VOIDmode.
2502 This mode cannot be BLKmode; use emit_block_move for that.
2504 Return the last instruction emitted. */
2507 emit_move_insn (x, y)
2510 enum machine_mode mode = GET_MODE (x);
2512 x = protect_from_queue (x, 1);
2513 y = protect_from_queue (y, 0);
2515 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
2518 /* Never force constant_p_rtx to memory. */
2519 if (GET_CODE (y) == CONSTANT_P_RTX)
2521 else if (CONSTANT_P (y) && ! LEGITIMATE_CONSTANT_P (y))
2522 y = force_const_mem (mode, y);
2524 /* If X or Y are memory references, verify that their addresses are valid
2526 if (GET_CODE (x) == MEM
2527 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
2528 && ! push_operand (x, GET_MODE (x)))
2530 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
2531 x = change_address (x, VOIDmode, XEXP (x, 0));
2533 if (GET_CODE (y) == MEM
2534 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
2536 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
2537 y = change_address (y, VOIDmode, XEXP (y, 0));
2539 if (mode == BLKmode)
2542 return emit_move_insn_1 (x, y);
2545 /* Low level part of emit_move_insn.
2546 Called just like emit_move_insn, but assumes X and Y
2547 are basically valid. */
2550 emit_move_insn_1 (x, y)
2553 enum machine_mode mode = GET_MODE (x);
2554 enum machine_mode submode;
2555 enum mode_class class = GET_MODE_CLASS (mode);
2558 if (mode >= MAX_MACHINE_MODE)
2561 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
2563 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
2565 /* Expand complex moves by moving real part and imag part, if possible. */
2566 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
2567 && BLKmode != (submode = mode_for_size ((GET_MODE_UNIT_SIZE (mode)
2569 (class == MODE_COMPLEX_INT
2570 ? MODE_INT : MODE_FLOAT),
2572 && (mov_optab->handlers[(int) submode].insn_code
2573 != CODE_FOR_nothing))
2575 /* Don't split destination if it is a stack push. */
2576 int stack = push_operand (x, GET_MODE (x));
2578 /* If this is a stack, push the highpart first, so it
2579 will be in the argument order.
2581 In that case, change_address is used only to convert
2582 the mode, not to change the address. */
2585 /* Note that the real part always precedes the imag part in memory
2586 regardless of machine's endianness. */
2587 #ifdef STACK_GROWS_DOWNWARD
2588 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2589 (gen_rtx_MEM (submode, (XEXP (x, 0))),
2590 gen_imagpart (submode, y)));
2591 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2592 (gen_rtx_MEM (submode, (XEXP (x, 0))),
2593 gen_realpart (submode, y)));
2595 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2596 (gen_rtx_MEM (submode, (XEXP (x, 0))),
2597 gen_realpart (submode, y)));
2598 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2599 (gen_rtx_MEM (submode, (XEXP (x, 0))),
2600 gen_imagpart (submode, y)));
2605 /* Show the output dies here. This is necessary for pseudos;
2606 hard regs shouldn't appear here except as return values.
2607 We never want to emit such a clobber after reload. */
2609 && ! (reload_in_progress || reload_completed))
2611 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
2614 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2615 (gen_realpart (submode, x), gen_realpart (submode, y)));
2616 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2617 (gen_imagpart (submode, x), gen_imagpart (submode, y)));
2620 return get_last_insn ();
2623 /* This will handle any multi-word mode that lacks a move_insn pattern.
2624 However, you will get better code if you define such patterns,
2625 even if they must turn into multiple assembler instructions. */
2626 else if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
2630 #ifdef PUSH_ROUNDING
2632 /* If X is a push on the stack, do the push now and replace
2633 X with a reference to the stack pointer. */
2634 if (push_operand (x, GET_MODE (x)))
2636 anti_adjust_stack (GEN_INT (GET_MODE_SIZE (GET_MODE (x))));
2637 x = change_address (x, VOIDmode, stack_pointer_rtx);
2641 /* Show the output dies here. This is necessary for pseudos;
2642 hard regs shouldn't appear here except as return values.
2643 We never want to emit such a clobber after reload. */
2645 && ! (reload_in_progress || reload_completed))
2647 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
2651 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
2654 rtx xpart = operand_subword (x, i, 1, mode);
2655 rtx ypart = operand_subword (y, i, 1, mode);
2657 /* If we can't get a part of Y, put Y into memory if it is a
2658 constant. Otherwise, force it into a register. If we still
2659 can't get a part of Y, abort. */
2660 if (ypart == 0 && CONSTANT_P (y))
2662 y = force_const_mem (mode, y);
2663 ypart = operand_subword (y, i, 1, mode);
2665 else if (ypart == 0)
2666 ypart = operand_subword_force (y, i, mode);
2668 if (xpart == 0 || ypart == 0)
2671 last_insn = emit_move_insn (xpart, ypart);
2680 /* Pushing data onto the stack. */
2682 /* Push a block of length SIZE (perhaps variable)
2683 and return an rtx to address the beginning of the block.
2684 Note that it is not possible for the value returned to be a QUEUED.
2685 The value may be virtual_outgoing_args_rtx.
2687 EXTRA is the number of bytes of padding to push in addition to SIZE.
2688 BELOW nonzero means this padding comes at low addresses;
2689 otherwise, the padding comes at high addresses. */
2692 push_block (size, extra, below)
2698 size = convert_modes (Pmode, ptr_mode, size, 1);
2699 if (CONSTANT_P (size))
2700 anti_adjust_stack (plus_constant (size, extra));
2701 else if (GET_CODE (size) == REG && extra == 0)
2702 anti_adjust_stack (size);
2705 rtx temp = copy_to_mode_reg (Pmode, size);
2707 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
2708 temp, 0, OPTAB_LIB_WIDEN);
2709 anti_adjust_stack (temp);
2712 #if defined (STACK_GROWS_DOWNWARD) \
2713 || (defined (ARGS_GROW_DOWNWARD) \
2714 && !defined (ACCUMULATE_OUTGOING_ARGS))
2716 /* Return the lowest stack address when STACK or ARGS grow downward and
2717 we are not aaccumulating outgoing arguments (the c4x port uses such
2719 temp = virtual_outgoing_args_rtx;
2720 if (extra != 0 && below)
2721 temp = plus_constant (temp, extra);
2723 if (GET_CODE (size) == CONST_INT)
2724 temp = plus_constant (virtual_outgoing_args_rtx,
2725 - INTVAL (size) - (below ? 0 : extra));
2726 else if (extra != 0 && !below)
2727 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
2728 negate_rtx (Pmode, plus_constant (size, extra)));
2730 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
2731 negate_rtx (Pmode, size));
2734 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
2740 return gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
2743 /* Return an rtx for the address of the beginning of a as-if-it-was-pushed
2744 block of SIZE bytes. */
2747 get_push_address (size)
2752 if (STACK_PUSH_CODE == POST_DEC)
2753 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx, GEN_INT (size));
2754 else if (STACK_PUSH_CODE == POST_INC)
2755 temp = gen_rtx_MINUS (Pmode, stack_pointer_rtx, GEN_INT (size));
2757 temp = stack_pointer_rtx;
2759 return copy_to_reg (temp);
2762 /* Generate code to push X onto the stack, assuming it has mode MODE and
2764 MODE is redundant except when X is a CONST_INT (since they don't
2766 SIZE is an rtx for the size of data to be copied (in bytes),
2767 needed only if X is BLKmode.
2769 ALIGN (in bytes) is maximum alignment we can assume.
2771 If PARTIAL and REG are both nonzero, then copy that many of the first
2772 words of X into registers starting with REG, and push the rest of X.
2773 The amount of space pushed is decreased by PARTIAL words,
2774 rounded *down* to a multiple of PARM_BOUNDARY.
2775 REG must be a hard register in this case.
2776 If REG is zero but PARTIAL is not, take any all others actions for an
2777 argument partially in registers, but do not actually load any
2780 EXTRA is the amount in bytes of extra space to leave next to this arg.
2781 This is ignored if an argument block has already been allocated.
2783 On a machine that lacks real push insns, ARGS_ADDR is the address of
2784 the bottom of the argument block for this call. We use indexing off there
2785 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
2786 argument block has not been preallocated.
2788 ARGS_SO_FAR is the size of args previously pushed for this call.
2790 REG_PARM_STACK_SPACE is nonzero if functions require stack space
2791 for arguments passed in registers. If nonzero, it will be the number
2792 of bytes required. */
2795 emit_push_insn (x, mode, type, size, align, partial, reg, extra,
2796 args_addr, args_so_far, reg_parm_stack_space)
2798 enum machine_mode mode;
2807 int reg_parm_stack_space;
2810 enum direction stack_direction
2811 #ifdef STACK_GROWS_DOWNWARD
2817 /* Decide where to pad the argument: `downward' for below,
2818 `upward' for above, or `none' for don't pad it.
2819 Default is below for small data on big-endian machines; else above. */
2820 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
2822 /* Invert direction if stack is post-update. */
2823 if (STACK_PUSH_CODE == POST_INC || STACK_PUSH_CODE == POST_DEC)
2824 if (where_pad != none)
2825 where_pad = (where_pad == downward ? upward : downward);
2827 xinner = x = protect_from_queue (x, 0);
2829 if (mode == BLKmode)
2831 /* Copy a block into the stack, entirely or partially. */
2834 int used = partial * UNITS_PER_WORD;
2835 int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
2843 /* USED is now the # of bytes we need not copy to the stack
2844 because registers will take care of them. */
2847 xinner = change_address (xinner, BLKmode,
2848 plus_constant (XEXP (xinner, 0), used));
2850 /* If the partial register-part of the arg counts in its stack size,
2851 skip the part of stack space corresponding to the registers.
2852 Otherwise, start copying to the beginning of the stack space,
2853 by setting SKIP to 0. */
2854 skip = (reg_parm_stack_space == 0) ? 0 : used;
2856 #ifdef PUSH_ROUNDING
2857 /* Do it with several push insns if that doesn't take lots of insns
2858 and if there is no difficulty with push insns that skip bytes
2859 on the stack for alignment purposes. */
2861 && GET_CODE (size) == CONST_INT
2863 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
2864 /* Here we avoid the case of a structure whose weak alignment
2865 forces many pushes of a small amount of data,
2866 and such small pushes do rounding that causes trouble. */
2867 && ((! SLOW_UNALIGNED_ACCESS)
2868 || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT
2869 || PUSH_ROUNDING (align) == align)
2870 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
2872 /* Push padding now if padding above and stack grows down,
2873 or if padding below and stack grows up.
2874 But if space already allocated, this has already been done. */
2875 if (extra && args_addr == 0
2876 && where_pad != none && where_pad != stack_direction)
2877 anti_adjust_stack (GEN_INT (extra));
2879 move_by_pieces (gen_rtx_MEM (BLKmode, gen_push_operand ()), xinner,
2880 INTVAL (size) - used, align);
2882 if (current_function_check_memory_usage && ! in_check_memory_usage)
2886 in_check_memory_usage = 1;
2887 temp = get_push_address (INTVAL(size) - used);
2888 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
2889 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
2891 XEXP (xinner, 0), Pmode,
2892 GEN_INT (INTVAL(size) - used),
2893 TYPE_MODE (sizetype));
2895 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
2897 GEN_INT (INTVAL(size) - used),
2898 TYPE_MODE (sizetype),
2899 GEN_INT (MEMORY_USE_RW),
2900 TYPE_MODE (integer_type_node));
2901 in_check_memory_usage = 0;
2905 #endif /* PUSH_ROUNDING */
2907 /* Otherwise make space on the stack and copy the data
2908 to the address of that space. */
2910 /* Deduct words put into registers from the size we must copy. */
2913 if (GET_CODE (size) == CONST_INT)
2914 size = GEN_INT (INTVAL (size) - used);
2916 size = expand_binop (GET_MODE (size), sub_optab, size,
2917 GEN_INT (used), NULL_RTX, 0,
2921 /* Get the address of the stack space.
2922 In this case, we do not deal with EXTRA separately.
2923 A single stack adjust will do. */
2926 temp = push_block (size, extra, where_pad == downward);
2929 else if (GET_CODE (args_so_far) == CONST_INT)
2930 temp = memory_address (BLKmode,
2931 plus_constant (args_addr,
2932 skip + INTVAL (args_so_far)));
2934 temp = memory_address (BLKmode,
2935 plus_constant (gen_rtx_PLUS (Pmode,
2939 if (current_function_check_memory_usage && ! in_check_memory_usage)
2943 in_check_memory_usage = 1;
2944 target = copy_to_reg (temp);
2945 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
2946 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
2948 XEXP (xinner, 0), Pmode,
2949 size, TYPE_MODE (sizetype));
2951 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
2953 size, TYPE_MODE (sizetype),
2954 GEN_INT (MEMORY_USE_RW),
2955 TYPE_MODE (integer_type_node));
2956 in_check_memory_usage = 0;
2959 /* TEMP is the address of the block. Copy the data there. */
2960 if (GET_CODE (size) == CONST_INT
2961 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size), align)))
2963 move_by_pieces (gen_rtx_MEM (BLKmode, temp), xinner,
2964 INTVAL (size), align);
2969 rtx opalign = GEN_INT (align);
2970 enum machine_mode mode;
2971 rtx target = gen_rtx_MEM (BLKmode, temp);
2973 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2975 mode = GET_MODE_WIDER_MODE (mode))
2977 enum insn_code code = movstr_optab[(int) mode];
2979 if (code != CODE_FOR_nothing
2980 && ((GET_CODE (size) == CONST_INT
2981 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2982 <= (GET_MODE_MASK (mode) >> 1)))
2983 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2984 && (insn_operand_predicate[(int) code][0] == 0
2985 || ((*insn_operand_predicate[(int) code][0])
2987 && (insn_operand_predicate[(int) code][1] == 0
2988 || ((*insn_operand_predicate[(int) code][1])
2990 && (insn_operand_predicate[(int) code][3] == 0
2991 || ((*insn_operand_predicate[(int) code][3])
2992 (opalign, VOIDmode))))
2994 rtx op2 = convert_to_mode (mode, size, 1);
2995 rtx last = get_last_insn ();
2998 if (insn_operand_predicate[(int) code][2] != 0
2999 && ! ((*insn_operand_predicate[(int) code][2])
3001 op2 = copy_to_mode_reg (mode, op2);
3003 pat = GEN_FCN ((int) code) (target, xinner,
3011 delete_insns_since (last);
3016 #ifndef ACCUMULATE_OUTGOING_ARGS
3017 /* If the source is referenced relative to the stack pointer,
3018 copy it to another register to stabilize it. We do not need
3019 to do this if we know that we won't be changing sp. */
3021 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3022 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3023 temp = copy_to_reg (temp);
3026 /* Make inhibit_defer_pop nonzero around the library call
3027 to force it to pop the bcopy-arguments right away. */
3029 #ifdef TARGET_MEM_FUNCTIONS
3030 emit_library_call (memcpy_libfunc, 0,
3031 VOIDmode, 3, temp, Pmode, XEXP (xinner, 0), Pmode,
3032 convert_to_mode (TYPE_MODE (sizetype),
3033 size, TREE_UNSIGNED (sizetype)),
3034 TYPE_MODE (sizetype));
3036 emit_library_call (bcopy_libfunc, 0,
3037 VOIDmode, 3, XEXP (xinner, 0), Pmode, temp, Pmode,
3038 convert_to_mode (TYPE_MODE (integer_type_node),
3040 TREE_UNSIGNED (integer_type_node)),
3041 TYPE_MODE (integer_type_node));
3046 else if (partial > 0)
3048 /* Scalar partly in registers. */
3050 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3053 /* # words of start of argument
3054 that we must make space for but need not store. */
3055 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
3056 int args_offset = INTVAL (args_so_far);
3059 /* Push padding now if padding above and stack grows down,
3060 or if padding below and stack grows up.
3061 But if space already allocated, this has already been done. */
3062 if (extra && args_addr == 0
3063 && where_pad != none && where_pad != stack_direction)
3064 anti_adjust_stack (GEN_INT (extra));
3066 /* If we make space by pushing it, we might as well push
3067 the real data. Otherwise, we can leave OFFSET nonzero
3068 and leave the space uninitialized. */
3072 /* Now NOT_STACK gets the number of words that we don't need to
3073 allocate on the stack. */
3074 not_stack = partial - offset;
3076 /* If the partial register-part of the arg counts in its stack size,
3077 skip the part of stack space corresponding to the registers.
3078 Otherwise, start copying to the beginning of the stack space,
3079 by setting SKIP to 0. */
3080 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
3082 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
3083 x = validize_mem (force_const_mem (mode, x));
3085 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3086 SUBREGs of such registers are not allowed. */
3087 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
3088 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
3089 x = copy_to_reg (x);
3091 /* Loop over all the words allocated on the stack for this arg. */
3092 /* We can do it by words, because any scalar bigger than a word
3093 has a size a multiple of a word. */
3094 #ifndef PUSH_ARGS_REVERSED
3095 for (i = not_stack; i < size; i++)
3097 for (i = size - 1; i >= not_stack; i--)
3099 if (i >= not_stack + offset)
3100 emit_push_insn (operand_subword_force (x, i, mode),
3101 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
3103 GEN_INT (args_offset + ((i - not_stack + skip)
3105 reg_parm_stack_space);
3110 rtx target = NULL_RTX;
3112 /* Push padding now if padding above and stack grows down,
3113 or if padding below and stack grows up.
3114 But if space already allocated, this has already been done. */
3115 if (extra && args_addr == 0
3116 && where_pad != none && where_pad != stack_direction)
3117 anti_adjust_stack (GEN_INT (extra));
3119 #ifdef PUSH_ROUNDING
3121 addr = gen_push_operand ();
3125 if (GET_CODE (args_so_far) == CONST_INT)
3127 = memory_address (mode,
3128 plus_constant (args_addr,
3129 INTVAL (args_so_far)));
3131 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
3136 emit_move_insn (gen_rtx_MEM (mode, addr), x);
3138 if (current_function_check_memory_usage && ! in_check_memory_usage)
3140 in_check_memory_usage = 1;
3142 target = get_push_address (GET_MODE_SIZE (mode));
3144 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
3145 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
3148 GEN_INT (GET_MODE_SIZE (mode)),
3149 TYPE_MODE (sizetype));
3151 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
3153 GEN_INT (GET_MODE_SIZE (mode)),
3154 TYPE_MODE (sizetype),
3155 GEN_INT (MEMORY_USE_RW),
3156 TYPE_MODE (integer_type_node));
3157 in_check_memory_usage = 0;
3162 /* If part should go in registers, copy that part
3163 into the appropriate registers. Do this now, at the end,
3164 since mem-to-mem copies above may do function calls. */
3165 if (partial > 0 && reg != 0)
3167 /* Handle calls that pass values in multiple non-contiguous locations.
3168 The Irix 6 ABI has examples of this. */
3169 if (GET_CODE (reg) == PARALLEL)
3170 emit_group_load (reg, x, -1, align); /* ??? size? */
3172 move_block_to_reg (REGNO (reg), x, partial, mode);
3175 if (extra && args_addr == 0 && where_pad == stack_direction)
3176 anti_adjust_stack (GEN_INT (extra));
3179 /* Expand an assignment that stores the value of FROM into TO.
3180 If WANT_VALUE is nonzero, return an rtx for the value of TO.
3181 (This may contain a QUEUED rtx;
3182 if the value is constant, this rtx is a constant.)
3183 Otherwise, the returned value is NULL_RTX.
3185 SUGGEST_REG is no longer actually used.
3186 It used to mean, copy the value through a register
3187 and return that register, if that is possible.
3188 We now use WANT_VALUE to decide whether to do this. */
3191 expand_assignment (to, from, want_value, suggest_reg)
3196 register rtx to_rtx = 0;
3199 /* Don't crash if the lhs of the assignment was erroneous. */
3201 if (TREE_CODE (to) == ERROR_MARK)
3203 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
3204 return want_value ? result : NULL_RTX;
3207 /* Assignment of a structure component needs special treatment
3208 if the structure component's rtx is not simply a MEM.
3209 Assignment of an array element at a constant index, and assignment of
3210 an array element in an unaligned packed structure field, has the same
3213 if (TREE_CODE (to) == COMPONENT_REF || TREE_CODE (to) == BIT_FIELD_REF
3214 || TREE_CODE (to) == ARRAY_REF)
3216 enum machine_mode mode1;
3226 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
3227 &unsignedp, &volatilep, &alignment);
3229 /* If we are going to use store_bit_field and extract_bit_field,
3230 make sure to_rtx will be safe for multiple use. */
3232 if (mode1 == VOIDmode && want_value)
3233 tem = stabilize_reference (tem);
3235 to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_DONT);
3238 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
3240 if (GET_CODE (to_rtx) != MEM)
3243 if (GET_MODE (offset_rtx) != ptr_mode)
3245 #ifdef POINTERS_EXTEND_UNSIGNED
3246 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
3248 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
3252 /* A constant address in TO_RTX can have VOIDmode, we must not try
3253 to call force_reg for that case. Avoid that case. */
3254 if (GET_CODE (to_rtx) == MEM
3255 && GET_MODE (to_rtx) == BLKmode
3256 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
3258 && (bitpos % bitsize) == 0
3259 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
3260 && (alignment * BITS_PER_UNIT) == GET_MODE_ALIGNMENT (mode1))
3262 rtx temp = change_address (to_rtx, mode1,
3263 plus_constant (XEXP (to_rtx, 0),
3266 if (GET_CODE (XEXP (temp, 0)) == REG)
3269 to_rtx = change_address (to_rtx, mode1,
3270 force_reg (GET_MODE (XEXP (temp, 0)),
3275 to_rtx = change_address (to_rtx, VOIDmode,
3276 gen_rtx_PLUS (ptr_mode, XEXP (to_rtx, 0),
3277 force_reg (ptr_mode, offset_rtx)));
3281 if (GET_CODE (to_rtx) == MEM)
3283 /* When the offset is zero, to_rtx is the address of the
3284 structure we are storing into, and hence may be shared.
3285 We must make a new MEM before setting the volatile bit. */
3287 to_rtx = copy_rtx (to_rtx);
3289 MEM_VOLATILE_P (to_rtx) = 1;
3291 #if 0 /* This was turned off because, when a field is volatile
3292 in an object which is not volatile, the object may be in a register,
3293 and then we would abort over here. */
3299 if (TREE_CODE (to) == COMPONENT_REF
3300 && TREE_READONLY (TREE_OPERAND (to, 1)))
3303 to_rtx = copy_rtx (to_rtx);
3305 RTX_UNCHANGING_P (to_rtx) = 1;
3308 /* Check the access. */
3309 if (current_function_check_memory_usage && GET_CODE (to_rtx) == MEM)
3314 enum machine_mode best_mode;
3316 best_mode = get_best_mode (bitsize, bitpos,
3317 TYPE_ALIGN (TREE_TYPE (tem)),
3319 if (best_mode == VOIDmode)
3322 best_mode_size = GET_MODE_BITSIZE (best_mode);
3323 to_addr = plus_constant (XEXP (to_rtx, 0), (bitpos / BITS_PER_UNIT));
3324 size = CEIL ((bitpos % best_mode_size) + bitsize, best_mode_size);
3325 size *= GET_MODE_SIZE (best_mode);
3327 /* Check the access right of the pointer. */
3329 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
3331 GEN_INT (size), TYPE_MODE (sizetype),
3332 GEN_INT (MEMORY_USE_WO),
3333 TYPE_MODE (integer_type_node));
3336 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
3338 /* Spurious cast makes HPUX compiler happy. */
3339 ? (enum machine_mode) TYPE_MODE (TREE_TYPE (to))
3342 /* Required alignment of containing datum. */
3344 int_size_in_bytes (TREE_TYPE (tem)),
3345 get_alias_set (to));
3346 preserve_temp_slots (result);
3350 /* If the value is meaningful, convert RESULT to the proper mode.
3351 Otherwise, return nothing. */
3352 return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
3353 TYPE_MODE (TREE_TYPE (from)),
3355 TREE_UNSIGNED (TREE_TYPE (to)))
3359 /* If the rhs is a function call and its value is not an aggregate,
3360 call the function before we start to compute the lhs.
3361 This is needed for correct code for cases such as
3362 val = setjmp (buf) on machines where reference to val
3363 requires loading up part of an address in a separate insn.
3365 Don't do this if TO is a VAR_DECL whose DECL_RTL is REG since it might be
3366 a promoted variable where the zero- or sign- extension needs to be done.
3367 Handling this in the normal way is safe because no computation is done
3369 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from)
3370 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
3371 && ! (TREE_CODE (to) == VAR_DECL && GET_CODE (DECL_RTL (to)) == REG))
3376 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
3378 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_WO);
3380 /* Handle calls that return values in multiple non-contiguous locations.
3381 The Irix 6 ABI has examples of this. */
3382 if (GET_CODE (to_rtx) == PARALLEL)
3383 emit_group_load (to_rtx, value, int_size_in_bytes (TREE_TYPE (from)),
3384 TYPE_ALIGN (TREE_TYPE (from)) / BITS_PER_UNIT);
3385 else if (GET_MODE (to_rtx) == BLKmode)
3386 emit_block_move (to_rtx, value, expr_size (from),
3387 TYPE_ALIGN (TREE_TYPE (from)) / BITS_PER_UNIT);
3390 #ifdef POINTERS_EXTEND_UNSIGNED
3391 if (TREE_CODE (TREE_TYPE (to)) == REFERENCE_TYPE
3392 || TREE_CODE (TREE_TYPE (to)) == POINTER_TYPE)
3393 value = convert_memory_address (GET_MODE (to_rtx), value);
3395 emit_move_insn (to_rtx, value);
3397 preserve_temp_slots (to_rtx);
3400 return want_value ? to_rtx : NULL_RTX;
3403 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
3404 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
3408 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_WO);
3409 if (GET_CODE (to_rtx) == MEM)
3410 MEM_ALIAS_SET (to_rtx) = get_alias_set (to);
3413 /* Don't move directly into a return register. */
3414 if (TREE_CODE (to) == RESULT_DECL && GET_CODE (to_rtx) == REG)
3419 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
3420 emit_move_insn (to_rtx, temp);
3421 preserve_temp_slots (to_rtx);
3424 return want_value ? to_rtx : NULL_RTX;
3427 /* In case we are returning the contents of an object which overlaps
3428 the place the value is being stored, use a safe function when copying
3429 a value through a pointer into a structure value return block. */
3430 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
3431 && current_function_returns_struct
3432 && !current_function_returns_pcc_struct)
3437 size = expr_size (from);
3438 from_rtx = expand_expr (from, NULL_RTX, VOIDmode,
3439 EXPAND_MEMORY_USE_DONT);
3441 /* Copy the rights of the bitmap. */
3442 if (current_function_check_memory_usage)
3443 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
3444 XEXP (to_rtx, 0), Pmode,
3445 XEXP (from_rtx, 0), Pmode,
3446 convert_to_mode (TYPE_MODE (sizetype),
3447 size, TREE_UNSIGNED (sizetype)),
3448 TYPE_MODE (sizetype));
3450 #ifdef TARGET_MEM_FUNCTIONS
3451 emit_library_call (memcpy_libfunc, 0,
3452 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
3453 XEXP (from_rtx, 0), Pmode,
3454 convert_to_mode (TYPE_MODE (sizetype),
3455 size, TREE_UNSIGNED (sizetype)),
3456 TYPE_MODE (sizetype));
3458 emit_library_call (bcopy_libfunc, 0,
3459 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
3460 XEXP (to_rtx, 0), Pmode,
3461 convert_to_mode (TYPE_MODE (integer_type_node),
3462 size, TREE_UNSIGNED (integer_type_node)),
3463 TYPE_MODE (integer_type_node));
3466 preserve_temp_slots (to_rtx);
3469 return want_value ? to_rtx : NULL_RTX;
3472 /* Compute FROM and store the value in the rtx we got. */
3475 result = store_expr (from, to_rtx, want_value);
3476 preserve_temp_slots (result);
3479 return want_value ? result : NULL_RTX;
3482 /* Generate code for computing expression EXP,
3483 and storing the value into TARGET.
3484 TARGET may contain a QUEUED rtx.
3486 If WANT_VALUE is nonzero, return a copy of the value
3487 not in TARGET, so that we can be sure to use the proper
3488 value in a containing expression even if TARGET has something
3489 else stored in it. If possible, we copy the value through a pseudo
3490 and return that pseudo. Or, if the value is constant, we try to
3491 return the constant. In some cases, we return a pseudo
3492 copied *from* TARGET.
3494 If the mode is BLKmode then we may return TARGET itself.
3495 It turns out that in BLKmode it doesn't cause a problem.
3496 because C has no operators that could combine two different
3497 assignments into the same BLKmode object with different values
3498 with no sequence point. Will other languages need this to
3501 If WANT_VALUE is 0, we return NULL, to make sure
3502 to catch quickly any cases where the caller uses the value
3503 and fails to set WANT_VALUE. */
3506 store_expr (exp, target, want_value)
3508 register rtx target;
3512 int dont_return_target = 0;
3514 if (TREE_CODE (exp) == COMPOUND_EXPR)
3516 /* Perform first part of compound expression, then assign from second
3518 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
3520 return store_expr (TREE_OPERAND (exp, 1), target, want_value);
3522 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
3524 /* For conditional expression, get safe form of the target. Then
3525 test the condition, doing the appropriate assignment on either
3526 side. This avoids the creation of unnecessary temporaries.
3527 For non-BLKmode, it is more efficient not to do this. */
3529 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
3532 target = protect_from_queue (target, 1);
3534 do_pending_stack_adjust ();
3536 jumpifnot (TREE_OPERAND (exp, 0), lab1);
3537 start_cleanup_deferral ();
3538 store_expr (TREE_OPERAND (exp, 1), target, 0);
3539 end_cleanup_deferral ();
3541 emit_jump_insn (gen_jump (lab2));
3544 start_cleanup_deferral ();
3545 store_expr (TREE_OPERAND (exp, 2), target, 0);
3546 end_cleanup_deferral ();
3551 return want_value ? target : NULL_RTX;
3553 else if (queued_subexp_p (target))
3554 /* If target contains a postincrement, let's not risk
3555 using it as the place to generate the rhs. */
3557 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
3559 /* Expand EXP into a new pseudo. */
3560 temp = gen_reg_rtx (GET_MODE (target));
3561 temp = expand_expr (exp, temp, GET_MODE (target), 0);
3564 temp = expand_expr (exp, NULL_RTX, GET_MODE (target), 0);
3566 /* If target is volatile, ANSI requires accessing the value
3567 *from* the target, if it is accessed. So make that happen.
3568 In no case return the target itself. */
3569 if (! MEM_VOLATILE_P (target) && want_value)
3570 dont_return_target = 1;
3572 else if (want_value && GET_CODE (target) == MEM && ! MEM_VOLATILE_P (target)
3573 && GET_MODE (target) != BLKmode)
3574 /* If target is in memory and caller wants value in a register instead,
3575 arrange that. Pass TARGET as target for expand_expr so that,
3576 if EXP is another assignment, WANT_VALUE will be nonzero for it.
3577 We know expand_expr will not use the target in that case.
3578 Don't do this if TARGET is volatile because we are supposed
3579 to write it and then read it. */
3581 temp = expand_expr (exp, cse_not_expected ? NULL_RTX : target,
3582 GET_MODE (target), 0);
3583 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
3584 temp = copy_to_reg (temp);
3585 dont_return_target = 1;
3587 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
3588 /* If this is an scalar in a register that is stored in a wider mode
3589 than the declared mode, compute the result into its declared mode
3590 and then convert to the wider mode. Our value is the computed
3593 /* If we don't want a value, we can do the conversion inside EXP,
3594 which will often result in some optimizations. Do the conversion
3595 in two steps: first change the signedness, if needed, then
3596 the extend. But don't do this if the type of EXP is a subtype
3597 of something else since then the conversion might involve
3598 more than just converting modes. */
3599 if (! want_value && INTEGRAL_TYPE_P (TREE_TYPE (exp))
3600 && TREE_TYPE (TREE_TYPE (exp)) == 0)
3602 if (TREE_UNSIGNED (TREE_TYPE (exp))
3603 != SUBREG_PROMOTED_UNSIGNED_P (target))
3606 (signed_or_unsigned_type (SUBREG_PROMOTED_UNSIGNED_P (target),
3610 exp = convert (type_for_mode (GET_MODE (SUBREG_REG (target)),
3611 SUBREG_PROMOTED_UNSIGNED_P (target)),
3615 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
3617 /* If TEMP is a volatile MEM and we want a result value, make
3618 the access now so it gets done only once. Likewise if
3619 it contains TARGET. */
3620 if (GET_CODE (temp) == MEM && want_value
3621 && (MEM_VOLATILE_P (temp)
3622 || reg_mentioned_p (SUBREG_REG (target), XEXP (temp, 0))))
3623 temp = copy_to_reg (temp);
3625 /* If TEMP is a VOIDmode constant, use convert_modes to make
3626 sure that we properly convert it. */
3627 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
3628 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
3629 TYPE_MODE (TREE_TYPE (exp)), temp,
3630 SUBREG_PROMOTED_UNSIGNED_P (target));
3632 convert_move (SUBREG_REG (target), temp,
3633 SUBREG_PROMOTED_UNSIGNED_P (target));
3635 /* If we promoted a constant, change the mode back down to match
3636 target. Otherwise, the caller might get confused by a result whose
3637 mode is larger than expected. */
3639 if (want_value && GET_MODE (temp) != GET_MODE (target)
3640 && GET_MODE (temp) != VOIDmode)
3642 temp = gen_rtx_SUBREG (GET_MODE (target), temp, 0);
3643 SUBREG_PROMOTED_VAR_P (temp) = 1;
3644 SUBREG_PROMOTED_UNSIGNED_P (temp)
3645 = SUBREG_PROMOTED_UNSIGNED_P (target);
3648 return want_value ? temp : NULL_RTX;
3652 temp = expand_expr (exp, target, GET_MODE (target), 0);
3653 /* Return TARGET if it's a specified hardware register.
3654 If TARGET is a volatile mem ref, either return TARGET
3655 or return a reg copied *from* TARGET; ANSI requires this.
3657 Otherwise, if TEMP is not TARGET, return TEMP
3658 if it is constant (for efficiency),
3659 or if we really want the correct value. */
3660 if (!(target && GET_CODE (target) == REG
3661 && REGNO (target) < FIRST_PSEUDO_REGISTER)
3662 && !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
3663 && ! rtx_equal_p (temp, target)
3664 && (CONSTANT_P (temp) || want_value))
3665 dont_return_target = 1;
3668 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
3669 the same as that of TARGET, adjust the constant. This is needed, for
3670 example, in case it is a CONST_DOUBLE and we want only a word-sized
3672 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
3673 && TREE_CODE (exp) != ERROR_MARK
3674 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
3675 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
3676 temp, TREE_UNSIGNED (TREE_TYPE (exp)));
3678 if (current_function_check_memory_usage
3679 && GET_CODE (target) == MEM
3680 && AGGREGATE_TYPE_P (TREE_TYPE (exp)))
3682 if (GET_CODE (temp) == MEM)
3683 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
3684 XEXP (target, 0), Pmode,
3685 XEXP (temp, 0), Pmode,
3686 expr_size (exp), TYPE_MODE (sizetype));
3688 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
3689 XEXP (target, 0), Pmode,
3690 expr_size (exp), TYPE_MODE (sizetype),
3691 GEN_INT (MEMORY_USE_WO),
3692 TYPE_MODE (integer_type_node));
3695 /* If value was not generated in the target, store it there.
3696 Convert the value to TARGET's type first if nec. */
3697 /* If TEMP and TARGET compare equal according to rtx_equal_p, but
3698 one or both of them are volatile memory refs, we have to distinguish
3700 - expand_expr has used TARGET. In this case, we must not generate
3701 another copy. This can be detected by TARGET being equal according
3703 - expand_expr has not used TARGET - that means that the source just
3704 happens to have the same RTX form. Since temp will have been created
3705 by expand_expr, it will compare unequal according to == .
3706 We must generate a copy in this case, to reach the correct number
3707 of volatile memory references. */
3709 if ((! rtx_equal_p (temp, target)
3710 || (temp != target && (side_effects_p (temp)
3711 || side_effects_p (target))))
3712 && TREE_CODE (exp) != ERROR_MARK)
3714 target = protect_from_queue (target, 1);
3715 if (GET_MODE (temp) != GET_MODE (target)
3716 && GET_MODE (temp) != VOIDmode)
3718 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
3719 if (dont_return_target)
3721 /* In this case, we will return TEMP,
3722 so make sure it has the proper mode.
3723 But don't forget to store the value into TARGET. */
3724 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
3725 emit_move_insn (target, temp);
3728 convert_move (target, temp, unsignedp);
3731 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
3733 /* Handle copying a string constant into an array.
3734 The string constant may be shorter than the array.
3735 So copy just the string's actual length, and clear the rest. */
3739 /* Get the size of the data type of the string,
3740 which is actually the size of the target. */
3741 size = expr_size (exp);
3742 if (GET_CODE (size) == CONST_INT
3743 && INTVAL (size) < TREE_STRING_LENGTH (exp))
3744 emit_block_move (target, temp, size,
3745 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3748 /* Compute the size of the data to copy from the string. */
3750 = size_binop (MIN_EXPR,
3751 make_tree (sizetype, size),
3753 build_int_2 (TREE_STRING_LENGTH (exp), 0)));
3754 rtx copy_size_rtx = expand_expr (copy_size, NULL_RTX,
3758 /* Copy that much. */
3759 emit_block_move (target, temp, copy_size_rtx,
3760 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3762 /* Figure out how much is left in TARGET that we have to clear.
3763 Do all calculations in ptr_mode. */
3765 addr = XEXP (target, 0);
3766 addr = convert_modes (ptr_mode, Pmode, addr, 1);
3768 if (GET_CODE (copy_size_rtx) == CONST_INT)
3770 addr = plus_constant (addr, TREE_STRING_LENGTH (exp));
3771 size = plus_constant (size, - TREE_STRING_LENGTH (exp));
3775 addr = force_reg (ptr_mode, addr);
3776 addr = expand_binop (ptr_mode, add_optab, addr,
3777 copy_size_rtx, NULL_RTX, 0,
3780 size = expand_binop (ptr_mode, sub_optab, size,
3781 copy_size_rtx, NULL_RTX, 0,
3784 label = gen_label_rtx ();
3785 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
3786 GET_MODE (size), 0, 0, label);
3789 if (size != const0_rtx)
3791 /* Be sure we can write on ADDR. */
3792 if (current_function_check_memory_usage)
3793 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
3795 size, TYPE_MODE (sizetype),
3796 GEN_INT (MEMORY_USE_WO),
3797 TYPE_MODE (integer_type_node));
3798 #ifdef TARGET_MEM_FUNCTIONS
3799 emit_library_call (memset_libfunc, 0, VOIDmode, 3,
3801 const0_rtx, TYPE_MODE (integer_type_node),
3802 convert_to_mode (TYPE_MODE (sizetype),
3804 TREE_UNSIGNED (sizetype)),
3805 TYPE_MODE (sizetype));
3807 emit_library_call (bzero_libfunc, 0, VOIDmode, 2,
3809 convert_to_mode (TYPE_MODE (integer_type_node),
3811 TREE_UNSIGNED (integer_type_node)),
3812 TYPE_MODE (integer_type_node));
3820 /* Handle calls that return values in multiple non-contiguous locations.
3821 The Irix 6 ABI has examples of this. */
3822 else if (GET_CODE (target) == PARALLEL)
3823 emit_group_load (target, temp, int_size_in_bytes (TREE_TYPE (exp)),
3824 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3825 else if (GET_MODE (temp) == BLKmode)
3826 emit_block_move (target, temp, expr_size (exp),
3827 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3829 emit_move_insn (target, temp);
3832 /* If we don't want a value, return NULL_RTX. */
3836 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
3837 ??? The latter test doesn't seem to make sense. */
3838 else if (dont_return_target && GET_CODE (temp) != MEM)
3841 /* Return TARGET itself if it is a hard register. */
3842 else if (want_value && GET_MODE (target) != BLKmode
3843 && ! (GET_CODE (target) == REG
3844 && REGNO (target) < FIRST_PSEUDO_REGISTER))
3845 return copy_to_reg (target);
3851 /* Return 1 if EXP just contains zeros. */
3859 switch (TREE_CODE (exp))
3863 case NON_LVALUE_EXPR:
3864 return is_zeros_p (TREE_OPERAND (exp, 0));
3867 return TREE_INT_CST_LOW (exp) == 0 && TREE_INT_CST_HIGH (exp) == 0;
3871 is_zeros_p (TREE_REALPART (exp)) && is_zeros_p (TREE_IMAGPART (exp));
3874 return REAL_VALUES_IDENTICAL (TREE_REAL_CST (exp), dconst0);
3877 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
3878 return CONSTRUCTOR_ELTS (exp) == NULL_TREE;
3879 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
3880 if (! is_zeros_p (TREE_VALUE (elt)))
3890 /* Return 1 if EXP contains mostly (3/4) zeros. */
3893 mostly_zeros_p (exp)
3896 if (TREE_CODE (exp) == CONSTRUCTOR)
3898 int elts = 0, zeros = 0;
3899 tree elt = CONSTRUCTOR_ELTS (exp);
3900 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
3902 /* If there are no ranges of true bits, it is all zero. */
3903 return elt == NULL_TREE;
3905 for (; elt; elt = TREE_CHAIN (elt))
3907 /* We do not handle the case where the index is a RANGE_EXPR,
3908 so the statistic will be somewhat inaccurate.
3909 We do make a more accurate count in store_constructor itself,
3910 so since this function is only used for nested array elements,
3911 this should be close enough. */
3912 if (mostly_zeros_p (TREE_VALUE (elt)))
3917 return 4 * zeros >= 3 * elts;
3920 return is_zeros_p (exp);
3923 /* Helper function for store_constructor.
3924 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
3925 TYPE is the type of the CONSTRUCTOR, not the element type.
3926 CLEARED is as for store_constructor.
3928 This provides a recursive shortcut back to store_constructor when it isn't
3929 necessary to go through store_field. This is so that we can pass through
3930 the cleared field to let store_constructor know that we may not have to
3931 clear a substructure if the outer structure has already been cleared. */
3934 store_constructor_field (target, bitsize, bitpos,
3935 mode, exp, type, cleared)
3937 int bitsize, bitpos;
3938 enum machine_mode mode;
3942 if (TREE_CODE (exp) == CONSTRUCTOR
3943 && bitpos % BITS_PER_UNIT == 0
3944 /* If we have a non-zero bitpos for a register target, then we just
3945 let store_field do the bitfield handling. This is unlikely to
3946 generate unnecessary clear instructions anyways. */
3947 && (bitpos == 0 || GET_CODE (target) == MEM))
3950 target = change_address (target, VOIDmode,
3951 plus_constant (XEXP (target, 0),
3952 bitpos / BITS_PER_UNIT));
3953 store_constructor (exp, target, cleared);
3956 store_field (target, bitsize, bitpos, mode, exp,
3957 VOIDmode, 0, TYPE_ALIGN (type) / BITS_PER_UNIT,
3958 int_size_in_bytes (type), 0);
3961 /* Store the value of constructor EXP into the rtx TARGET.
3962 TARGET is either a REG or a MEM.
3963 CLEARED is true if TARGET is known to have been zero'd. */
3966 store_constructor (exp, target, cleared)
3971 tree type = TREE_TYPE (exp);
3972 #ifdef WORD_REGISTER_OPERATIONS
3973 rtx exp_size = expr_size (exp);
3976 /* We know our target cannot conflict, since safe_from_p has been called. */
3978 /* Don't try copying piece by piece into a hard register
3979 since that is vulnerable to being clobbered by EXP.
3980 Instead, construct in a pseudo register and then copy it all. */
3981 if (GET_CODE (target) == REG && REGNO (target) < FIRST_PSEUDO_REGISTER)
3983 rtx temp = gen_reg_rtx (GET_MODE (target));
3984 store_constructor (exp, temp, 0);
3985 emit_move_insn (target, temp);
3990 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
3991 || TREE_CODE (type) == QUAL_UNION_TYPE)
3995 /* Inform later passes that the whole union value is dead. */
3996 if (TREE_CODE (type) == UNION_TYPE
3997 || TREE_CODE (type) == QUAL_UNION_TYPE)
3998 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4000 /* If we are building a static constructor into a register,
4001 set the initial value as zero so we can fold the value into
4002 a constant. But if more than one register is involved,
4003 this probably loses. */
4004 else if (GET_CODE (target) == REG && TREE_STATIC (exp)
4005 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
4008 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4013 /* If the constructor has fewer fields than the structure
4014 or if we are initializing the structure to mostly zeros,
4015 clear the whole structure first. */
4016 else if ((list_length (CONSTRUCTOR_ELTS (exp))
4017 != list_length (TYPE_FIELDS (type)))
4018 || mostly_zeros_p (exp))
4021 clear_storage (target, expr_size (exp),
4022 TYPE_ALIGN (type) / BITS_PER_UNIT);
4027 /* Inform later passes that the old value is dead. */
4028 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4030 /* Store each element of the constructor into
4031 the corresponding field of TARGET. */
4033 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4035 register tree field = TREE_PURPOSE (elt);
4036 tree value = TREE_VALUE (elt);
4037 register enum machine_mode mode;
4041 tree pos, constant = 0, offset = 0;
4042 rtx to_rtx = target;
4044 /* Just ignore missing fields.
4045 We cleared the whole structure, above,
4046 if any fields are missing. */
4050 if (cleared && is_zeros_p (TREE_VALUE (elt)))
4053 bitsize = TREE_INT_CST_LOW (DECL_SIZE (field));
4054 unsignedp = TREE_UNSIGNED (field);
4055 mode = DECL_MODE (field);
4056 if (DECL_BIT_FIELD (field))
4059 pos = DECL_FIELD_BITPOS (field);
4060 if (TREE_CODE (pos) == INTEGER_CST)
4062 else if (TREE_CODE (pos) == PLUS_EXPR
4063 && TREE_CODE (TREE_OPERAND (pos, 1)) == INTEGER_CST)
4064 constant = TREE_OPERAND (pos, 1), offset = TREE_OPERAND (pos, 0);
4069 bitpos = TREE_INT_CST_LOW (constant);
4075 if (contains_placeholder_p (offset))
4076 offset = build (WITH_RECORD_EXPR, sizetype,
4077 offset, make_tree (TREE_TYPE (exp), target));
4079 offset = size_binop (FLOOR_DIV_EXPR, offset,
4080 size_int (BITS_PER_UNIT));
4082 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
4083 if (GET_CODE (to_rtx) != MEM)
4086 if (GET_MODE (offset_rtx) != ptr_mode)
4088 #ifdef POINTERS_EXTEND_UNSIGNED
4089 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
4091 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4096 = change_address (to_rtx, VOIDmode,
4097 gen_rtx_PLUS (ptr_mode, XEXP (to_rtx, 0),
4098 force_reg (ptr_mode, offset_rtx)));
4100 if (TREE_READONLY (field))
4102 if (GET_CODE (to_rtx) == MEM)
4103 to_rtx = copy_rtx (to_rtx);
4105 RTX_UNCHANGING_P (to_rtx) = 1;
4108 #ifdef WORD_REGISTER_OPERATIONS
4109 /* If this initializes a field that is smaller than a word, at the
4110 start of a word, try to widen it to a full word.
4111 This special case allows us to output C++ member function
4112 initializations in a form that the optimizers can understand. */
4114 && GET_CODE (target) == REG
4115 && bitsize < BITS_PER_WORD
4116 && bitpos % BITS_PER_WORD == 0
4117 && GET_MODE_CLASS (mode) == MODE_INT
4118 && TREE_CODE (value) == INTEGER_CST
4119 && GET_CODE (exp_size) == CONST_INT
4120 && bitpos + BITS_PER_WORD <= INTVAL (exp_size) * BITS_PER_UNIT)
4122 tree type = TREE_TYPE (value);
4123 if (TYPE_PRECISION (type) < BITS_PER_WORD)
4125 type = type_for_size (BITS_PER_WORD, TREE_UNSIGNED (type));
4126 value = convert (type, value);
4128 if (BYTES_BIG_ENDIAN)
4130 = fold (build (LSHIFT_EXPR, type, value,
4131 build_int_2 (BITS_PER_WORD - bitsize, 0)));
4132 bitsize = BITS_PER_WORD;
4136 store_constructor_field (to_rtx, bitsize, bitpos,
4137 mode, value, type, cleared);
4140 else if (TREE_CODE (type) == ARRAY_TYPE)
4145 tree domain = TYPE_DOMAIN (type);
4146 HOST_WIDE_INT minelt = TREE_INT_CST_LOW (TYPE_MIN_VALUE (domain));
4147 HOST_WIDE_INT maxelt = TREE_INT_CST_LOW (TYPE_MAX_VALUE (domain));
4148 tree elttype = TREE_TYPE (type);
4150 /* If the constructor has fewer elements than the array,
4151 clear the whole array first. Similarly if this is
4152 static constructor of a non-BLKmode object. */
4153 if (cleared || (GET_CODE (target) == REG && TREE_STATIC (exp)))
4157 HOST_WIDE_INT count = 0, zero_count = 0;
4159 /* This loop is a more accurate version of the loop in
4160 mostly_zeros_p (it handles RANGE_EXPR in an index).
4161 It is also needed to check for missing elements. */
4162 for (elt = CONSTRUCTOR_ELTS (exp);
4164 elt = TREE_CHAIN (elt))
4166 tree index = TREE_PURPOSE (elt);
4167 HOST_WIDE_INT this_node_count;
4168 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4170 tree lo_index = TREE_OPERAND (index, 0);
4171 tree hi_index = TREE_OPERAND (index, 1);
4172 if (TREE_CODE (lo_index) != INTEGER_CST
4173 || TREE_CODE (hi_index) != INTEGER_CST)
4178 this_node_count = TREE_INT_CST_LOW (hi_index)
4179 - TREE_INT_CST_LOW (lo_index) + 1;
4182 this_node_count = 1;
4183 count += this_node_count;
4184 if (mostly_zeros_p (TREE_VALUE (elt)))
4185 zero_count += this_node_count;
4187 /* Clear the entire array first if there are any missing elements,
4188 or if the incidence of zero elements is >= 75%. */
4189 if (count < maxelt - minelt + 1
4190 || 4 * zero_count >= 3 * count)
4196 clear_storage (target, expr_size (exp),
4197 TYPE_ALIGN (type) / BITS_PER_UNIT);
4201 /* Inform later passes that the old value is dead. */
4202 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4204 /* Store each element of the constructor into
4205 the corresponding element of TARGET, determined
4206 by counting the elements. */
4207 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
4209 elt = TREE_CHAIN (elt), i++)
4211 register enum machine_mode mode;
4215 tree value = TREE_VALUE (elt);
4216 tree index = TREE_PURPOSE (elt);
4217 rtx xtarget = target;
4219 if (cleared && is_zeros_p (value))
4222 mode = TYPE_MODE (elttype);
4223 bitsize = GET_MODE_BITSIZE (mode);
4224 unsignedp = TREE_UNSIGNED (elttype);
4226 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4228 tree lo_index = TREE_OPERAND (index, 0);
4229 tree hi_index = TREE_OPERAND (index, 1);
4230 rtx index_r, pos_rtx, addr, hi_r, loop_top, loop_end;
4231 struct nesting *loop;
4232 HOST_WIDE_INT lo, hi, count;
4235 /* If the range is constant and "small", unroll the loop. */
4236 if (TREE_CODE (lo_index) == INTEGER_CST
4237 && TREE_CODE (hi_index) == INTEGER_CST
4238 && (lo = TREE_INT_CST_LOW (lo_index),
4239 hi = TREE_INT_CST_LOW (hi_index),
4240 count = hi - lo + 1,
4241 (GET_CODE (target) != MEM
4243 || (TREE_CODE (TYPE_SIZE (elttype)) == INTEGER_CST
4244 && TREE_INT_CST_LOW (TYPE_SIZE (elttype)) * count
4247 lo -= minelt; hi -= minelt;
4248 for (; lo <= hi; lo++)
4250 bitpos = lo * TREE_INT_CST_LOW (TYPE_SIZE (elttype));
4251 store_constructor_field (target, bitsize, bitpos,
4252 mode, value, type, cleared);
4257 hi_r = expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
4258 loop_top = gen_label_rtx ();
4259 loop_end = gen_label_rtx ();
4261 unsignedp = TREE_UNSIGNED (domain);
4263 index = build_decl (VAR_DECL, NULL_TREE, domain);
4265 DECL_RTL (index) = index_r
4266 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
4269 if (TREE_CODE (value) == SAVE_EXPR
4270 && SAVE_EXPR_RTL (value) == 0)
4272 /* Make sure value gets expanded once before the
4274 expand_expr (value, const0_rtx, VOIDmode, 0);
4277 store_expr (lo_index, index_r, 0);
4278 loop = expand_start_loop (0);
4280 /* Assign value to element index. */
4281 position = size_binop (EXACT_DIV_EXPR, TYPE_SIZE (elttype),
4282 size_int (BITS_PER_UNIT));
4283 position = size_binop (MULT_EXPR,
4284 size_binop (MINUS_EXPR, index,
4285 TYPE_MIN_VALUE (domain)),
4287 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
4288 addr = gen_rtx_PLUS (Pmode, XEXP (target, 0), pos_rtx);
4289 xtarget = change_address (target, mode, addr);
4290 if (TREE_CODE (value) == CONSTRUCTOR)
4291 store_constructor (value, xtarget, cleared);
4293 store_expr (value, xtarget, 0);
4295 expand_exit_loop_if_false (loop,
4296 build (LT_EXPR, integer_type_node,
4299 expand_increment (build (PREINCREMENT_EXPR,
4301 index, integer_one_node), 0, 0);
4303 emit_label (loop_end);
4305 /* Needed by stupid register allocation. to extend the
4306 lifetime of pseudo-regs used by target past the end
4308 emit_insn (gen_rtx_USE (GET_MODE (target), target));
4311 else if ((index != 0 && TREE_CODE (index) != INTEGER_CST)
4312 || TREE_CODE (TYPE_SIZE (elttype)) != INTEGER_CST)
4318 index = size_int (i);
4321 index = size_binop (MINUS_EXPR, index,
4322 TYPE_MIN_VALUE (domain));
4323 position = size_binop (EXACT_DIV_EXPR, TYPE_SIZE (elttype),
4324 size_int (BITS_PER_UNIT));
4325 position = size_binop (MULT_EXPR, index, position);
4326 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
4327 addr = gen_rtx_PLUS (Pmode, XEXP (target, 0), pos_rtx);
4328 xtarget = change_address (target, mode, addr);
4329 store_expr (value, xtarget, 0);
4334 bitpos = ((TREE_INT_CST_LOW (index) - minelt)
4335 * TREE_INT_CST_LOW (TYPE_SIZE (elttype)));
4337 bitpos = (i * TREE_INT_CST_LOW (TYPE_SIZE (elttype)));
4338 store_constructor_field (target, bitsize, bitpos,
4339 mode, value, type, cleared);
4343 /* set constructor assignments */
4344 else if (TREE_CODE (type) == SET_TYPE)
4346 tree elt = CONSTRUCTOR_ELTS (exp);
4347 int nbytes = int_size_in_bytes (type), nbits;
4348 tree domain = TYPE_DOMAIN (type);
4349 tree domain_min, domain_max, bitlength;
4351 /* The default implementation strategy is to extract the constant
4352 parts of the constructor, use that to initialize the target,
4353 and then "or" in whatever non-constant ranges we need in addition.
4355 If a large set is all zero or all ones, it is
4356 probably better to set it using memset (if available) or bzero.
4357 Also, if a large set has just a single range, it may also be
4358 better to first clear all the first clear the set (using
4359 bzero/memset), and set the bits we want. */
4361 /* Check for all zeros. */
4362 if (elt == NULL_TREE)
4365 clear_storage (target, expr_size (exp),
4366 TYPE_ALIGN (type) / BITS_PER_UNIT);
4370 domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
4371 domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
4372 bitlength = size_binop (PLUS_EXPR,
4373 size_binop (MINUS_EXPR, domain_max, domain_min),
4376 if (nbytes < 0 || TREE_CODE (bitlength) != INTEGER_CST)
4378 nbits = TREE_INT_CST_LOW (bitlength);
4380 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
4381 are "complicated" (more than one range), initialize (the
4382 constant parts) by copying from a constant. */
4383 if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD
4384 || (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE))
4386 int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
4387 enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
4388 char *bit_buffer = (char *) alloca (nbits);
4389 HOST_WIDE_INT word = 0;
4392 int offset = 0; /* In bytes from beginning of set. */
4393 elt = get_set_constructor_bits (exp, bit_buffer, nbits);
4396 if (bit_buffer[ibit])
4398 if (BYTES_BIG_ENDIAN)
4399 word |= (1 << (set_word_size - 1 - bit_pos));
4401 word |= 1 << bit_pos;
4404 if (bit_pos >= set_word_size || ibit == nbits)
4406 if (word != 0 || ! cleared)
4408 rtx datum = GEN_INT (word);
4410 /* The assumption here is that it is safe to use
4411 XEXP if the set is multi-word, but not if
4412 it's single-word. */
4413 if (GET_CODE (target) == MEM)
4415 to_rtx = plus_constant (XEXP (target, 0), offset);
4416 to_rtx = change_address (target, mode, to_rtx);
4418 else if (offset == 0)
4422 emit_move_insn (to_rtx, datum);
4428 offset += set_word_size / BITS_PER_UNIT;
4434 /* Don't bother clearing storage if the set is all ones. */
4435 if (TREE_CHAIN (elt) != NULL_TREE
4436 || (TREE_PURPOSE (elt) == NULL_TREE
4438 : (TREE_CODE (TREE_VALUE (elt)) != INTEGER_CST
4439 || TREE_CODE (TREE_PURPOSE (elt)) != INTEGER_CST
4440 || (TREE_INT_CST_LOW (TREE_VALUE (elt))
4441 - TREE_INT_CST_LOW (TREE_PURPOSE (elt)) + 1
4443 clear_storage (target, expr_size (exp),
4444 TYPE_ALIGN (type) / BITS_PER_UNIT);
4447 for (; elt != NULL_TREE; elt = TREE_CHAIN (elt))
4449 /* start of range of element or NULL */
4450 tree startbit = TREE_PURPOSE (elt);
4451 /* end of range of element, or element value */
4452 tree endbit = TREE_VALUE (elt);
4453 #ifdef TARGET_MEM_FUNCTIONS
4454 HOST_WIDE_INT startb, endb;
4456 rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
4458 bitlength_rtx = expand_expr (bitlength,
4459 NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
4461 /* handle non-range tuple element like [ expr ] */
4462 if (startbit == NULL_TREE)
4464 startbit = save_expr (endbit);
4467 startbit = convert (sizetype, startbit);
4468 endbit = convert (sizetype, endbit);
4469 if (! integer_zerop (domain_min))
4471 startbit = size_binop (MINUS_EXPR, startbit, domain_min);
4472 endbit = size_binop (MINUS_EXPR, endbit, domain_min);
4474 startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
4475 EXPAND_CONST_ADDRESS);
4476 endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
4477 EXPAND_CONST_ADDRESS);
4481 targetx = assign_stack_temp (GET_MODE (target),
4482 GET_MODE_SIZE (GET_MODE (target)),
4484 emit_move_insn (targetx, target);
4486 else if (GET_CODE (target) == MEM)
4491 #ifdef TARGET_MEM_FUNCTIONS
4492 /* Optimization: If startbit and endbit are
4493 constants divisible by BITS_PER_UNIT,
4494 call memset instead. */
4495 if (TREE_CODE (startbit) == INTEGER_CST
4496 && TREE_CODE (endbit) == INTEGER_CST
4497 && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
4498 && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0)
4500 emit_library_call (memset_libfunc, 0,
4502 plus_constant (XEXP (targetx, 0),
4503 startb / BITS_PER_UNIT),
4505 constm1_rtx, TYPE_MODE (integer_type_node),
4506 GEN_INT ((endb - startb) / BITS_PER_UNIT),
4507 TYPE_MODE (sizetype));
4512 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__setbits"),
4513 0, VOIDmode, 4, XEXP (targetx, 0), Pmode,
4514 bitlength_rtx, TYPE_MODE (sizetype),
4515 startbit_rtx, TYPE_MODE (sizetype),
4516 endbit_rtx, TYPE_MODE (sizetype));
4519 emit_move_insn (target, targetx);
4527 /* Store the value of EXP (an expression tree)
4528 into a subfield of TARGET which has mode MODE and occupies
4529 BITSIZE bits, starting BITPOS bits from the start of TARGET.
4530 If MODE is VOIDmode, it means that we are storing into a bit-field.
4532 If VALUE_MODE is VOIDmode, return nothing in particular.
4533 UNSIGNEDP is not used in this case.
4535 Otherwise, return an rtx for the value stored. This rtx
4536 has mode VALUE_MODE if that is convenient to do.
4537 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
4539 ALIGN is the alignment that TARGET is known to have, measured in bytes.
4540 TOTAL_SIZE is the size in bytes of the structure, or -1 if varying.
4542 ALIAS_SET is the alias set for the destination. This value will
4543 (in general) be different from that for TARGET, since TARGET is a
4544 reference to the containing structure. */
4547 store_field (target, bitsize, bitpos, mode, exp, value_mode,
4548 unsignedp, align, total_size, alias_set)
4550 int bitsize, bitpos;
4551 enum machine_mode mode;
4553 enum machine_mode value_mode;
4559 HOST_WIDE_INT width_mask = 0;
4561 if (TREE_CODE (exp) == ERROR_MARK)
4564 if (bitsize < HOST_BITS_PER_WIDE_INT)
4565 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
4567 /* If we are storing into an unaligned field of an aligned union that is
4568 in a register, we may have the mode of TARGET being an integer mode but
4569 MODE == BLKmode. In that case, get an aligned object whose size and
4570 alignment are the same as TARGET and store TARGET into it (we can avoid
4571 the store if the field being stored is the entire width of TARGET). Then
4572 call ourselves recursively to store the field into a BLKmode version of
4573 that object. Finally, load from the object into TARGET. This is not
4574 very efficient in general, but should only be slightly more expensive
4575 than the otherwise-required unaligned accesses. Perhaps this can be
4576 cleaned up later. */
4579 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
4581 rtx object = assign_stack_temp (GET_MODE (target),
4582 GET_MODE_SIZE (GET_MODE (target)), 0);
4583 rtx blk_object = copy_rtx (object);
4585 MEM_SET_IN_STRUCT_P (object, 1);
4586 MEM_SET_IN_STRUCT_P (blk_object, 1);
4587 PUT_MODE (blk_object, BLKmode);
4589 if (bitsize != GET_MODE_BITSIZE (GET_MODE (target)))
4590 emit_move_insn (object, target);
4592 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0,
4593 align, total_size, alias_set);
4595 /* Even though we aren't returning target, we need to
4596 give it the updated value. */
4597 emit_move_insn (target, object);
4602 /* If the structure is in a register or if the component
4603 is a bit field, we cannot use addressing to access it.
4604 Use bit-field techniques or SUBREG to store in it. */
4606 if (mode == VOIDmode
4607 || (mode != BLKmode && ! direct_store[(int) mode]
4608 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
4609 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
4610 || GET_CODE (target) == REG
4611 || GET_CODE (target) == SUBREG
4612 /* If the field isn't aligned enough to store as an ordinary memref,
4613 store it as a bit field. */
4614 || (SLOW_UNALIGNED_ACCESS
4615 && align * BITS_PER_UNIT < GET_MODE_ALIGNMENT (mode))
4616 || (SLOW_UNALIGNED_ACCESS && bitpos % GET_MODE_ALIGNMENT (mode) != 0))
4618 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
4620 /* If BITSIZE is narrower than the size of the type of EXP
4621 we will be narrowing TEMP. Normally, what's wanted are the
4622 low-order bits. However, if EXP's type is a record and this is
4623 big-endian machine, we want the upper BITSIZE bits. */
4624 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
4625 && bitsize < GET_MODE_BITSIZE (GET_MODE (temp))
4626 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
4627 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
4628 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
4632 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
4634 if (mode != VOIDmode && mode != BLKmode
4635 && mode != TYPE_MODE (TREE_TYPE (exp)))
4636 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
4638 /* If the modes of TARGET and TEMP are both BLKmode, both
4639 must be in memory and BITPOS must be aligned on a byte
4640 boundary. If so, we simply do a block copy. */
4641 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
4643 if (GET_CODE (target) != MEM || GET_CODE (temp) != MEM
4644 || bitpos % BITS_PER_UNIT != 0)
4647 target = change_address (target, VOIDmode,
4648 plus_constant (XEXP (target, 0),
4649 bitpos / BITS_PER_UNIT));
4651 emit_block_move (target, temp,
4652 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
4656 return value_mode == VOIDmode ? const0_rtx : target;
4659 /* Store the value in the bitfield. */
4660 store_bit_field (target, bitsize, bitpos, mode, temp, align, total_size);
4661 if (value_mode != VOIDmode)
4663 /* The caller wants an rtx for the value. */
4664 /* If possible, avoid refetching from the bitfield itself. */
4666 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
4669 enum machine_mode tmode;
4672 return expand_and (temp, GEN_INT (width_mask), NULL_RTX);
4673 tmode = GET_MODE (temp);
4674 if (tmode == VOIDmode)
4676 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
4677 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
4678 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
4680 return extract_bit_field (target, bitsize, bitpos, unsignedp,
4681 NULL_RTX, value_mode, 0, align,
4688 rtx addr = XEXP (target, 0);
4691 /* If a value is wanted, it must be the lhs;
4692 so make the address stable for multiple use. */
4694 if (value_mode != VOIDmode && GET_CODE (addr) != REG
4695 && ! CONSTANT_ADDRESS_P (addr)
4696 /* A frame-pointer reference is already stable. */
4697 && ! (GET_CODE (addr) == PLUS
4698 && GET_CODE (XEXP (addr, 1)) == CONST_INT
4699 && (XEXP (addr, 0) == virtual_incoming_args_rtx
4700 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
4701 addr = copy_to_reg (addr);
4703 /* Now build a reference to just the desired component. */
4705 to_rtx = copy_rtx (change_address (target, mode,
4706 plus_constant (addr,
4708 / BITS_PER_UNIT))));
4709 MEM_SET_IN_STRUCT_P (to_rtx, 1);
4710 MEM_ALIAS_SET (to_rtx) = alias_set;
4712 return store_expr (exp, to_rtx, value_mode != VOIDmode);
4716 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
4717 or an ARRAY_REF, look for nested COMPONENT_REFs, BIT_FIELD_REFs, or
4718 ARRAY_REFs and find the ultimate containing object, which we return.
4720 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
4721 bit position, and *PUNSIGNEDP to the signedness of the field.
4722 If the position of the field is variable, we store a tree
4723 giving the variable offset (in units) in *POFFSET.
4724 This offset is in addition to the bit position.
4725 If the position is not variable, we store 0 in *POFFSET.
4726 We set *PALIGNMENT to the alignment in bytes of the address that will be
4727 computed. This is the alignment of the thing we return if *POFFSET
4728 is zero, but can be more less strictly aligned if *POFFSET is nonzero.
4730 If any of the extraction expressions is volatile,
4731 we store 1 in *PVOLATILEP. Otherwise we don't change that.
4733 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
4734 is a mode that can be used to access the field. In that case, *PBITSIZE
4737 If the field describes a variable-sized object, *PMODE is set to
4738 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
4739 this case, but the address of the object can be found. */
4742 get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode,
4743 punsignedp, pvolatilep, palignment)
4748 enum machine_mode *pmode;
4753 tree orig_exp = exp;
4755 enum machine_mode mode = VOIDmode;
4756 tree offset = integer_zero_node;
4757 unsigned int alignment = BIGGEST_ALIGNMENT;
4759 if (TREE_CODE (exp) == COMPONENT_REF)
4761 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
4762 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
4763 mode = DECL_MODE (TREE_OPERAND (exp, 1));
4764 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
4766 else if (TREE_CODE (exp) == BIT_FIELD_REF)
4768 size_tree = TREE_OPERAND (exp, 1);
4769 *punsignedp = TREE_UNSIGNED (exp);
4773 mode = TYPE_MODE (TREE_TYPE (exp));
4774 if (mode == BLKmode)
4775 size_tree = TYPE_SIZE (TREE_TYPE (exp));
4777 *pbitsize = GET_MODE_BITSIZE (mode);
4778 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
4783 if (TREE_CODE (size_tree) != INTEGER_CST)
4784 mode = BLKmode, *pbitsize = -1;
4786 *pbitsize = TREE_INT_CST_LOW (size_tree);
4789 /* Compute cumulative bit-offset for nested component-refs and array-refs,
4790 and find the ultimate containing object. */
4796 if (TREE_CODE (exp) == COMPONENT_REF || TREE_CODE (exp) == BIT_FIELD_REF)
4798 tree pos = (TREE_CODE (exp) == COMPONENT_REF
4799 ? DECL_FIELD_BITPOS (TREE_OPERAND (exp, 1))
4800 : TREE_OPERAND (exp, 2));
4801 tree constant = integer_zero_node, var = pos;
4803 /* If this field hasn't been filled in yet, don't go
4804 past it. This should only happen when folding expressions
4805 made during type construction. */
4809 /* Assume here that the offset is a multiple of a unit.
4810 If not, there should be an explicitly added constant. */
4811 if (TREE_CODE (pos) == PLUS_EXPR
4812 && TREE_CODE (TREE_OPERAND (pos, 1)) == INTEGER_CST)
4813 constant = TREE_OPERAND (pos, 1), var = TREE_OPERAND (pos, 0);
4814 else if (TREE_CODE (pos) == INTEGER_CST)
4815 constant = pos, var = integer_zero_node;
4817 *pbitpos += TREE_INT_CST_LOW (constant);
4818 offset = size_binop (PLUS_EXPR, offset,
4819 size_binop (EXACT_DIV_EXPR, var,
4820 size_int (BITS_PER_UNIT)));
4823 else if (TREE_CODE (exp) == ARRAY_REF)
4825 /* This code is based on the code in case ARRAY_REF in expand_expr
4826 below. We assume here that the size of an array element is
4827 always an integral multiple of BITS_PER_UNIT. */
4829 tree index = TREE_OPERAND (exp, 1);
4830 tree domain = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
4832 = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
4833 tree index_type = TREE_TYPE (index);
4836 if (TYPE_PRECISION (index_type) != TYPE_PRECISION (sizetype))
4838 index = convert (type_for_size (TYPE_PRECISION (sizetype), 0),
4840 index_type = TREE_TYPE (index);
4843 /* Optimize the special-case of a zero lower bound.
4845 We convert the low_bound to sizetype to avoid some problems
4846 with constant folding. (E.g. suppose the lower bound is 1,
4847 and its mode is QI. Without the conversion, (ARRAY
4848 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
4849 +INDEX), which becomes (ARRAY+255+INDEX). Oops!)
4851 But sizetype isn't quite right either (especially if
4852 the lowbound is negative). FIXME */
4854 if (! integer_zerop (low_bound))
4855 index = fold (build (MINUS_EXPR, index_type, index,
4856 convert (sizetype, low_bound)));
4858 if (TREE_CODE (index) == INTEGER_CST)
4860 index = convert (sbitsizetype, index);
4861 index_type = TREE_TYPE (index);
4864 xindex = fold (build (MULT_EXPR, sbitsizetype, index,
4865 convert (sbitsizetype,
4866 TYPE_SIZE (TREE_TYPE (exp)))));
4868 if (TREE_CODE (xindex) == INTEGER_CST
4869 && TREE_INT_CST_HIGH (xindex) == 0)
4870 *pbitpos += TREE_INT_CST_LOW (xindex);
4873 /* Either the bit offset calculated above is not constant, or
4874 it overflowed. In either case, redo the multiplication
4875 against the size in units. This is especially important
4876 in the non-constant case to avoid a division at runtime. */
4877 xindex = fold (build (MULT_EXPR, ssizetype, index,
4879 TYPE_SIZE_UNIT (TREE_TYPE (exp)))));
4881 if (contains_placeholder_p (xindex))
4882 xindex = build (WITH_RECORD_EXPR, sizetype, xindex, exp);
4884 offset = size_binop (PLUS_EXPR, offset, xindex);
4887 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
4888 && ! ((TREE_CODE (exp) == NOP_EXPR
4889 || TREE_CODE (exp) == CONVERT_EXPR)
4890 && ! (TREE_CODE (TREE_TYPE (exp)) == UNION_TYPE
4891 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0)))
4893 && (TYPE_MODE (TREE_TYPE (exp))
4894 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
4897 /* If any reference in the chain is volatile, the effect is volatile. */
4898 if (TREE_THIS_VOLATILE (exp))
4901 /* If the offset is non-constant already, then we can't assume any
4902 alignment more than the alignment here. */
4903 if (! integer_zerop (offset))
4904 alignment = MIN (alignment, TYPE_ALIGN (TREE_TYPE (exp)));
4906 exp = TREE_OPERAND (exp, 0);
4909 if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'd')
4910 alignment = MIN (alignment, DECL_ALIGN (exp));
4911 else if (TREE_TYPE (exp) != 0)
4912 alignment = MIN (alignment, TYPE_ALIGN (TREE_TYPE (exp)));
4914 if (integer_zerop (offset))
4917 if (offset != 0 && contains_placeholder_p (offset))
4918 offset = build (WITH_RECORD_EXPR, sizetype, offset, orig_exp);
4922 *palignment = alignment / BITS_PER_UNIT;
4926 /* Subroutine of expand_exp: compute memory_usage from modifier. */
4927 static enum memory_use_mode
4928 get_memory_usage_from_modifier (modifier)
4929 enum expand_modifier modifier;
4935 return MEMORY_USE_RO;
4937 case EXPAND_MEMORY_USE_WO:
4938 return MEMORY_USE_WO;
4940 case EXPAND_MEMORY_USE_RW:
4941 return MEMORY_USE_RW;
4943 case EXPAND_MEMORY_USE_DONT:
4944 /* EXPAND_CONST_ADDRESS and EXPAND_INITIALIZER are converted into
4945 MEMORY_USE_DONT, because they are modifiers to a call of
4946 expand_expr in the ADDR_EXPR case of expand_expr. */
4947 case EXPAND_CONST_ADDRESS:
4948 case EXPAND_INITIALIZER:
4949 return MEMORY_USE_DONT;
4950 case EXPAND_MEMORY_USE_BAD:
4956 /* Given an rtx VALUE that may contain additions and multiplications,
4957 return an equivalent value that just refers to a register or memory.
4958 This is done by generating instructions to perform the arithmetic
4959 and returning a pseudo-register containing the value.
4961 The returned value may be a REG, SUBREG, MEM or constant. */
4964 force_operand (value, target)
4967 register optab binoptab = 0;
4968 /* Use a temporary to force order of execution of calls to
4972 /* Use subtarget as the target for operand 0 of a binary operation. */
4973 register rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
4975 /* Check for a PIC address load. */
4977 && (GET_CODE (value) == PLUS || GET_CODE (value) == MINUS)
4978 && XEXP (value, 0) == pic_offset_table_rtx
4979 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
4980 || GET_CODE (XEXP (value, 1)) == LABEL_REF
4981 || GET_CODE (XEXP (value, 1)) == CONST))
4984 subtarget = gen_reg_rtx (GET_MODE (value));
4985 emit_move_insn (subtarget, value);
4989 if (GET_CODE (value) == PLUS)
4990 binoptab = add_optab;
4991 else if (GET_CODE (value) == MINUS)
4992 binoptab = sub_optab;
4993 else if (GET_CODE (value) == MULT)
4995 op2 = XEXP (value, 1);
4996 if (!CONSTANT_P (op2)
4997 && !(GET_CODE (op2) == REG && op2 != subtarget))
4999 tmp = force_operand (XEXP (value, 0), subtarget);
5000 return expand_mult (GET_MODE (value), tmp,
5001 force_operand (op2, NULL_RTX),
5007 op2 = XEXP (value, 1);
5008 if (!CONSTANT_P (op2)
5009 && !(GET_CODE (op2) == REG && op2 != subtarget))
5011 if (binoptab == sub_optab && GET_CODE (op2) == CONST_INT)
5013 binoptab = add_optab;
5014 op2 = negate_rtx (GET_MODE (value), op2);
5017 /* Check for an addition with OP2 a constant integer and our first
5018 operand a PLUS of a virtual register and something else. In that
5019 case, we want to emit the sum of the virtual register and the
5020 constant first and then add the other value. This allows virtual
5021 register instantiation to simply modify the constant rather than
5022 creating another one around this addition. */
5023 if (binoptab == add_optab && GET_CODE (op2) == CONST_INT
5024 && GET_CODE (XEXP (value, 0)) == PLUS
5025 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
5026 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
5027 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
5029 rtx temp = expand_binop (GET_MODE (value), binoptab,
5030 XEXP (XEXP (value, 0), 0), op2,
5031 subtarget, 0, OPTAB_LIB_WIDEN);
5032 return expand_binop (GET_MODE (value), binoptab, temp,
5033 force_operand (XEXP (XEXP (value, 0), 1), 0),
5034 target, 0, OPTAB_LIB_WIDEN);
5037 tmp = force_operand (XEXP (value, 0), subtarget);
5038 return expand_binop (GET_MODE (value), binoptab, tmp,
5039 force_operand (op2, NULL_RTX),
5040 target, 0, OPTAB_LIB_WIDEN);
5041 /* We give UNSIGNEDP = 0 to expand_binop
5042 because the only operations we are expanding here are signed ones. */
5047 /* Subroutine of expand_expr:
5048 save the non-copied parts (LIST) of an expr (LHS), and return a list
5049 which can restore these values to their previous values,
5050 should something modify their storage. */
5053 save_noncopied_parts (lhs, list)
5060 for (tail = list; tail; tail = TREE_CHAIN (tail))
5061 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
5062 parts = chainon (parts, save_noncopied_parts (lhs, TREE_VALUE (tail)));
5065 tree part = TREE_VALUE (tail);
5066 tree part_type = TREE_TYPE (part);
5067 tree to_be_saved = build (COMPONENT_REF, part_type, lhs, part);
5068 rtx target = assign_temp (part_type, 0, 1, 1);
5069 if (! memory_address_p (TYPE_MODE (part_type), XEXP (target, 0)))
5070 target = change_address (target, TYPE_MODE (part_type), NULL_RTX);
5071 parts = tree_cons (to_be_saved,
5072 build (RTL_EXPR, part_type, NULL_TREE,
5075 store_expr (TREE_PURPOSE (parts), RTL_EXPR_RTL (TREE_VALUE (parts)), 0);
5080 /* Subroutine of expand_expr:
5081 record the non-copied parts (LIST) of an expr (LHS), and return a list
5082 which specifies the initial values of these parts. */
5085 init_noncopied_parts (lhs, list)
5092 for (tail = list; tail; tail = TREE_CHAIN (tail))
5093 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
5094 parts = chainon (parts, init_noncopied_parts (lhs, TREE_VALUE (tail)));
5095 else if (TREE_PURPOSE (tail))
5097 tree part = TREE_VALUE (tail);
5098 tree part_type = TREE_TYPE (part);
5099 tree to_be_initialized = build (COMPONENT_REF, part_type, lhs, part);
5100 parts = tree_cons (TREE_PURPOSE (tail), to_be_initialized, parts);
5105 /* Subroutine of expand_expr: return nonzero iff there is no way that
5106 EXP can reference X, which is being modified. TOP_P is nonzero if this
5107 call is going to be used to determine whether we need a temporary
5108 for EXP, as opposed to a recursive call to this function.
5110 It is always safe for this routine to return zero since it merely
5111 searches for optimization opportunities. */
5114 safe_from_p (x, exp, top_p)
5121 static int save_expr_count;
5122 static int save_expr_size = 0;
5123 static tree *save_expr_rewritten;
5124 static tree save_expr_trees[256];
5127 /* If EXP has varying size, we MUST use a target since we currently
5128 have no way of allocating temporaries of variable size
5129 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
5130 So we assume here that something at a higher level has prevented a
5131 clash. This is somewhat bogus, but the best we can do. Only
5132 do this when X is BLKmode and when we are at the top level. */
5133 || (top_p && TREE_TYPE (exp) != 0 && TYPE_SIZE (TREE_TYPE (exp)) != 0
5134 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
5135 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
5136 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
5137 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
5139 && GET_MODE (x) == BLKmode))
5142 if (top_p && save_expr_size == 0)
5146 save_expr_count = 0;
5147 save_expr_size = sizeof (save_expr_trees) / sizeof (save_expr_trees[0]);
5148 save_expr_rewritten = &save_expr_trees[0];
5150 rtn = safe_from_p (x, exp, 1);
5152 for (i = 0; i < save_expr_count; ++i)
5154 if (TREE_CODE (save_expr_trees[i]) != ERROR_MARK)
5156 TREE_SET_CODE (save_expr_trees[i], SAVE_EXPR);
5164 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
5165 find the underlying pseudo. */
5166 if (GET_CODE (x) == SUBREG)
5169 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5173 /* If X is a location in the outgoing argument area, it is always safe. */
5174 if (GET_CODE (x) == MEM
5175 && (XEXP (x, 0) == virtual_outgoing_args_rtx
5176 || (GET_CODE (XEXP (x, 0)) == PLUS
5177 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx)))
5180 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
5183 exp_rtl = DECL_RTL (exp);
5190 if (TREE_CODE (exp) == TREE_LIST)
5191 return ((TREE_VALUE (exp) == 0
5192 || safe_from_p (x, TREE_VALUE (exp), 0))
5193 && (TREE_CHAIN (exp) == 0
5194 || safe_from_p (x, TREE_CHAIN (exp), 0)));
5195 else if (TREE_CODE (exp) == ERROR_MARK)
5196 return 1; /* An already-visited SAVE_EXPR? */
5201 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5205 return (safe_from_p (x, TREE_OPERAND (exp, 0), 0)
5206 && safe_from_p (x, TREE_OPERAND (exp, 1), 0));
5210 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
5211 the expression. If it is set, we conflict iff we are that rtx or
5212 both are in memory. Otherwise, we check all operands of the
5213 expression recursively. */
5215 switch (TREE_CODE (exp))
5218 return (staticp (TREE_OPERAND (exp, 0))
5219 || safe_from_p (x, TREE_OPERAND (exp, 0), 0)
5220 || TREE_STATIC (exp));
5223 if (GET_CODE (x) == MEM)
5228 exp_rtl = CALL_EXPR_RTL (exp);
5231 /* Assume that the call will clobber all hard registers and
5233 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5234 || GET_CODE (x) == MEM)
5241 /* If a sequence exists, we would have to scan every instruction
5242 in the sequence to see if it was safe. This is probably not
5244 if (RTL_EXPR_SEQUENCE (exp))
5247 exp_rtl = RTL_EXPR_RTL (exp);
5250 case WITH_CLEANUP_EXPR:
5251 exp_rtl = RTL_EXPR_RTL (exp);
5254 case CLEANUP_POINT_EXPR:
5255 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5258 exp_rtl = SAVE_EXPR_RTL (exp);
5262 /* This SAVE_EXPR might appear many times in the top-level
5263 safe_from_p() expression, and if it has a complex
5264 subexpression, examining it multiple times could result
5265 in a combinatorial explosion. E.g. on an Alpha
5266 running at least 200MHz, a Fortran test case compiled with
5267 optimization took about 28 minutes to compile -- even though
5268 it was only a few lines long, and the complicated line causing
5269 so much time to be spent in the earlier version of safe_from_p()
5270 had only 293 or so unique nodes.
5272 So, turn this SAVE_EXPR into an ERROR_MARK for now, but remember
5273 where it is so we can turn it back in the top-level safe_from_p()
5276 /* For now, don't bother re-sizing the array. */
5277 if (save_expr_count >= save_expr_size)
5279 save_expr_rewritten[save_expr_count++] = exp;
5281 nops = tree_code_length[(int) SAVE_EXPR];
5282 for (i = 0; i < nops; i++)
5284 tree operand = TREE_OPERAND (exp, i);
5285 if (operand == NULL_TREE)
5287 TREE_SET_CODE (exp, ERROR_MARK);
5288 if (!safe_from_p (x, operand, 0))
5290 TREE_SET_CODE (exp, SAVE_EXPR);
5292 TREE_SET_CODE (exp, ERROR_MARK);
5296 /* The only operand we look at is operand 1. The rest aren't
5297 part of the expression. */
5298 return safe_from_p (x, TREE_OPERAND (exp, 1), 0);
5300 case METHOD_CALL_EXPR:
5301 /* This takes a rtx argument, but shouldn't appear here. */
5308 /* If we have an rtx, we do not need to scan our operands. */
5312 nops = tree_code_length[(int) TREE_CODE (exp)];
5313 for (i = 0; i < nops; i++)
5314 if (TREE_OPERAND (exp, i) != 0
5315 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
5319 /* If we have an rtl, find any enclosed object. Then see if we conflict
5323 if (GET_CODE (exp_rtl) == SUBREG)
5325 exp_rtl = SUBREG_REG (exp_rtl);
5326 if (GET_CODE (exp_rtl) == REG
5327 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
5331 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
5332 are memory and EXP is not readonly. */
5333 return ! (rtx_equal_p (x, exp_rtl)
5334 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
5335 && ! TREE_READONLY (exp)));
5338 /* If we reach here, it is safe. */
5342 /* Subroutine of expand_expr: return nonzero iff EXP is an
5343 expression whose type is statically determinable. */
5349 if (TREE_CODE (exp) == PARM_DECL
5350 || TREE_CODE (exp) == VAR_DECL
5351 || TREE_CODE (exp) == CALL_EXPR || TREE_CODE (exp) == TARGET_EXPR
5352 || TREE_CODE (exp) == COMPONENT_REF
5353 || TREE_CODE (exp) == ARRAY_REF)
5358 /* Subroutine of expand_expr: return rtx if EXP is a
5359 variable or parameter; else return 0. */
5366 switch (TREE_CODE (exp))
5370 return DECL_RTL (exp);
5376 #ifdef MAX_INTEGER_COMPUTATION_MODE
5378 check_max_integer_computation_mode (exp)
5381 enum tree_code code;
5382 enum machine_mode mode;
5384 /* Strip any NOPs that don't change the mode. */
5386 code = TREE_CODE (exp);
5388 /* We must allow conversions of constants to MAX_INTEGER_COMPUTATION_MODE. */
5389 if (code == NOP_EXPR
5390 && TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
5393 /* First check the type of the overall operation. We need only look at
5394 unary, binary and relational operations. */
5395 if (TREE_CODE_CLASS (code) == '1'
5396 || TREE_CODE_CLASS (code) == '2'
5397 || TREE_CODE_CLASS (code) == '<')
5399 mode = TYPE_MODE (TREE_TYPE (exp));
5400 if (GET_MODE_CLASS (mode) == MODE_INT
5401 && mode > MAX_INTEGER_COMPUTATION_MODE)
5402 fatal ("unsupported wide integer operation");
5405 /* Check operand of a unary op. */
5406 if (TREE_CODE_CLASS (code) == '1')
5408 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5409 if (GET_MODE_CLASS (mode) == MODE_INT
5410 && mode > MAX_INTEGER_COMPUTATION_MODE)
5411 fatal ("unsupported wide integer operation");
5414 /* Check operands of a binary/comparison op. */
5415 if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<')
5417 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5418 if (GET_MODE_CLASS (mode) == MODE_INT
5419 && mode > MAX_INTEGER_COMPUTATION_MODE)
5420 fatal ("unsupported wide integer operation");
5422 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1)));
5423 if (GET_MODE_CLASS (mode) == MODE_INT
5424 && mode > MAX_INTEGER_COMPUTATION_MODE)
5425 fatal ("unsupported wide integer operation");
5431 /* expand_expr: generate code for computing expression EXP.
5432 An rtx for the computed value is returned. The value is never null.
5433 In the case of a void EXP, const0_rtx is returned.
5435 The value may be stored in TARGET if TARGET is nonzero.
5436 TARGET is just a suggestion; callers must assume that
5437 the rtx returned may not be the same as TARGET.
5439 If TARGET is CONST0_RTX, it means that the value will be ignored.
5441 If TMODE is not VOIDmode, it suggests generating the
5442 result in mode TMODE. But this is done only when convenient.
5443 Otherwise, TMODE is ignored and the value generated in its natural mode.
5444 TMODE is just a suggestion; callers must assume that
5445 the rtx returned may not have mode TMODE.
5447 Note that TARGET may have neither TMODE nor MODE. In that case, it
5448 probably will not be used.
5450 If MODIFIER is EXPAND_SUM then when EXP is an addition
5451 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
5452 or a nest of (PLUS ...) and (MINUS ...) where the terms are
5453 products as above, or REG or MEM, or constant.
5454 Ordinarily in such cases we would output mul or add instructions
5455 and then return a pseudo reg containing the sum.
5457 EXPAND_INITIALIZER is much like EXPAND_SUM except that
5458 it also marks a label as absolutely required (it can't be dead).
5459 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
5460 This is used for outputting expressions used in initializers.
5462 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
5463 with a constant address even if that address is not normally legitimate.
5464 EXPAND_INITIALIZER and EXPAND_SUM also have this effect. */
5467 expand_expr (exp, target, tmode, modifier)
5470 enum machine_mode tmode;
5471 enum expand_modifier modifier;
5473 /* Chain of pending expressions for PLACEHOLDER_EXPR to replace.
5474 This is static so it will be accessible to our recursive callees. */
5475 static tree placeholder_list = 0;
5476 register rtx op0, op1, temp;
5477 tree type = TREE_TYPE (exp);
5478 int unsignedp = TREE_UNSIGNED (type);
5479 register enum machine_mode mode;
5480 register enum tree_code code = TREE_CODE (exp);
5482 rtx subtarget, original_target;
5485 /* Used by check-memory-usage to make modifier read only. */
5486 enum expand_modifier ro_modifier;
5488 /* Handle ERROR_MARK before anybody tries to access its type. */
5489 if (TREE_CODE (exp) == ERROR_MARK)
5491 op0 = CONST0_RTX (tmode);
5497 mode = TYPE_MODE (type);
5498 /* Use subtarget as the target for operand 0 of a binary operation. */
5499 subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
5500 original_target = target;
5501 ignore = (target == const0_rtx
5502 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
5503 || code == CONVERT_EXPR || code == REFERENCE_EXPR
5504 || code == COND_EXPR)
5505 && TREE_CODE (type) == VOID_TYPE));
5507 /* Make a read-only version of the modifier. */
5508 if (modifier == EXPAND_NORMAL || modifier == EXPAND_SUM
5509 || modifier == EXPAND_CONST_ADDRESS || modifier == EXPAND_INITIALIZER)
5510 ro_modifier = modifier;
5512 ro_modifier = EXPAND_NORMAL;
5514 /* Don't use hard regs as subtargets, because the combiner
5515 can only handle pseudo regs. */
5516 if (subtarget && REGNO (subtarget) < FIRST_PSEUDO_REGISTER)
5518 /* Avoid subtargets inside loops,
5519 since they hide some invariant expressions. */
5520 if (preserve_subexpressions_p ())
5523 /* If we are going to ignore this result, we need only do something
5524 if there is a side-effect somewhere in the expression. If there
5525 is, short-circuit the most common cases here. Note that we must
5526 not call expand_expr with anything but const0_rtx in case this
5527 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
5531 if (! TREE_SIDE_EFFECTS (exp))
5534 /* Ensure we reference a volatile object even if value is ignored. */
5535 if (TREE_THIS_VOLATILE (exp)
5536 && TREE_CODE (exp) != FUNCTION_DECL
5537 && mode != VOIDmode && mode != BLKmode)
5539 temp = expand_expr (exp, NULL_RTX, VOIDmode, ro_modifier);
5540 if (GET_CODE (temp) == MEM)
5541 temp = copy_to_reg (temp);
5545 if (TREE_CODE_CLASS (code) == '1')
5546 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
5547 VOIDmode, ro_modifier);
5548 else if (TREE_CODE_CLASS (code) == '2'
5549 || TREE_CODE_CLASS (code) == '<')
5551 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, ro_modifier);
5552 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, ro_modifier);
5555 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
5556 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
5557 /* If the second operand has no side effects, just evaluate
5559 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
5560 VOIDmode, ro_modifier);
5565 #ifdef MAX_INTEGER_COMPUTATION_MODE
5566 /* Only check stuff here if the mode we want is different from the mode
5567 of the expression; if it's the same, check_max_integer_computiation_mode
5568 will handle it. Do we really need to check this stuff at all? */
5571 && GET_MODE (target) != mode
5572 && TREE_CODE (exp) != INTEGER_CST
5573 && TREE_CODE (exp) != PARM_DECL
5574 && TREE_CODE (exp) != ARRAY_REF
5575 && TREE_CODE (exp) != COMPONENT_REF
5576 && TREE_CODE (exp) != BIT_FIELD_REF
5577 && TREE_CODE (exp) != INDIRECT_REF
5578 && TREE_CODE (exp) != CALL_EXPR
5579 && TREE_CODE (exp) != VAR_DECL
5580 && TREE_CODE (exp) != RTL_EXPR)
5582 enum machine_mode mode = GET_MODE (target);
5584 if (GET_MODE_CLASS (mode) == MODE_INT
5585 && mode > MAX_INTEGER_COMPUTATION_MODE)
5586 fatal ("unsupported wide integer operation");
5590 && TREE_CODE (exp) != INTEGER_CST
5591 && TREE_CODE (exp) != PARM_DECL
5592 && TREE_CODE (exp) != ARRAY_REF
5593 && TREE_CODE (exp) != COMPONENT_REF
5594 && TREE_CODE (exp) != BIT_FIELD_REF
5595 && TREE_CODE (exp) != INDIRECT_REF
5596 && TREE_CODE (exp) != VAR_DECL
5597 && TREE_CODE (exp) != CALL_EXPR
5598 && TREE_CODE (exp) != RTL_EXPR
5599 && GET_MODE_CLASS (tmode) == MODE_INT
5600 && tmode > MAX_INTEGER_COMPUTATION_MODE)
5601 fatal ("unsupported wide integer operation");
5603 check_max_integer_computation_mode (exp);
5606 /* If will do cse, generate all results into pseudo registers
5607 since 1) that allows cse to find more things
5608 and 2) otherwise cse could produce an insn the machine
5611 if (! cse_not_expected && mode != BLKmode && target
5612 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER))
5619 tree function = decl_function_context (exp);
5620 /* Handle using a label in a containing function. */
5621 if (function != current_function_decl
5622 && function != inline_function_decl && function != 0)
5624 struct function *p = find_function_data (function);
5625 /* Allocate in the memory associated with the function
5626 that the label is in. */
5627 push_obstacks (p->function_obstack,
5628 p->function_maybepermanent_obstack);
5630 p->expr->x_forced_labels
5631 = gen_rtx_EXPR_LIST (VOIDmode, label_rtx (exp),
5632 p->expr->x_forced_labels);
5637 if (modifier == EXPAND_INITIALIZER)
5638 forced_labels = gen_rtx_EXPR_LIST (VOIDmode,
5642 temp = gen_rtx_MEM (FUNCTION_MODE,
5643 gen_rtx_LABEL_REF (Pmode, label_rtx (exp)));
5644 if (function != current_function_decl
5645 && function != inline_function_decl && function != 0)
5646 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
5651 if (DECL_RTL (exp) == 0)
5653 error_with_decl (exp, "prior parameter's size depends on `%s'");
5654 return CONST0_RTX (mode);
5657 /* ... fall through ... */
5660 /* If a static var's type was incomplete when the decl was written,
5661 but the type is complete now, lay out the decl now. */
5662 if (DECL_SIZE (exp) == 0 && TYPE_SIZE (TREE_TYPE (exp)) != 0
5663 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
5665 push_obstacks_nochange ();
5666 end_temporary_allocation ();
5667 layout_decl (exp, 0);
5668 PUT_MODE (DECL_RTL (exp), DECL_MODE (exp));
5672 /* Although static-storage variables start off initialized, according to
5673 ANSI C, a memcpy could overwrite them with uninitialized values. So
5674 we check them too. This also lets us check for read-only variables
5675 accessed via a non-const declaration, in case it won't be detected
5676 any other way (e.g., in an embedded system or OS kernel without
5679 Aggregates are not checked here; they're handled elsewhere. */
5680 if (current_function && current_function_check_memory_usage
5682 && GET_CODE (DECL_RTL (exp)) == MEM
5683 && ! AGGREGATE_TYPE_P (TREE_TYPE (exp)))
5685 enum memory_use_mode memory_usage;
5686 memory_usage = get_memory_usage_from_modifier (modifier);
5688 if (memory_usage != MEMORY_USE_DONT)
5689 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
5690 XEXP (DECL_RTL (exp), 0), Pmode,
5691 GEN_INT (int_size_in_bytes (type)),
5692 TYPE_MODE (sizetype),
5693 GEN_INT (memory_usage),
5694 TYPE_MODE (integer_type_node));
5697 /* ... fall through ... */
5701 if (DECL_RTL (exp) == 0)
5704 /* Ensure variable marked as used even if it doesn't go through
5705 a parser. If it hasn't be used yet, write out an external
5707 if (! TREE_USED (exp))
5709 assemble_external (exp);
5710 TREE_USED (exp) = 1;
5713 /* Show we haven't gotten RTL for this yet. */
5716 /* Handle variables inherited from containing functions. */
5717 context = decl_function_context (exp);
5719 /* We treat inline_function_decl as an alias for the current function
5720 because that is the inline function whose vars, types, etc.
5721 are being merged into the current function.
5722 See expand_inline_function. */
5724 if (context != 0 && context != current_function_decl
5725 && context != inline_function_decl
5726 /* If var is static, we don't need a static chain to access it. */
5727 && ! (GET_CODE (DECL_RTL (exp)) == MEM
5728 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
5732 /* Mark as non-local and addressable. */
5733 DECL_NONLOCAL (exp) = 1;
5734 if (DECL_NO_STATIC_CHAIN (current_function_decl))
5736 mark_addressable (exp);
5737 if (GET_CODE (DECL_RTL (exp)) != MEM)
5739 addr = XEXP (DECL_RTL (exp), 0);
5740 if (GET_CODE (addr) == MEM)
5741 addr = gen_rtx_MEM (Pmode,
5742 fix_lexical_addr (XEXP (addr, 0), exp));
5744 addr = fix_lexical_addr (addr, exp);
5745 temp = change_address (DECL_RTL (exp), mode, addr);
5748 /* This is the case of an array whose size is to be determined
5749 from its initializer, while the initializer is still being parsed.
5752 else if (GET_CODE (DECL_RTL (exp)) == MEM
5753 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
5754 temp = change_address (DECL_RTL (exp), GET_MODE (DECL_RTL (exp)),
5755 XEXP (DECL_RTL (exp), 0));
5757 /* If DECL_RTL is memory, we are in the normal case and either
5758 the address is not valid or it is not a register and -fforce-addr
5759 is specified, get the address into a register. */
5761 else if (GET_CODE (DECL_RTL (exp)) == MEM
5762 && modifier != EXPAND_CONST_ADDRESS
5763 && modifier != EXPAND_SUM
5764 && modifier != EXPAND_INITIALIZER
5765 && (! memory_address_p (DECL_MODE (exp),
5766 XEXP (DECL_RTL (exp), 0))
5768 && GET_CODE (XEXP (DECL_RTL (exp), 0)) != REG)))
5769 temp = change_address (DECL_RTL (exp), VOIDmode,
5770 copy_rtx (XEXP (DECL_RTL (exp), 0)));
5772 /* If we got something, return it. But first, set the alignment
5773 the address is a register. */
5776 if (GET_CODE (temp) == MEM && GET_CODE (XEXP (temp, 0)) == REG)
5777 mark_reg_pointer (XEXP (temp, 0),
5778 DECL_ALIGN (exp) / BITS_PER_UNIT);
5783 /* If the mode of DECL_RTL does not match that of the decl, it
5784 must be a promoted value. We return a SUBREG of the wanted mode,
5785 but mark it so that we know that it was already extended. */
5787 if (GET_CODE (DECL_RTL (exp)) == REG
5788 && GET_MODE (DECL_RTL (exp)) != mode)
5790 /* Get the signedness used for this variable. Ensure we get the
5791 same mode we got when the variable was declared. */
5792 if (GET_MODE (DECL_RTL (exp))
5793 != promote_mode (type, DECL_MODE (exp), &unsignedp, 0))
5796 temp = gen_rtx_SUBREG (mode, DECL_RTL (exp), 0);
5797 SUBREG_PROMOTED_VAR_P (temp) = 1;
5798 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
5802 return DECL_RTL (exp);
5805 return immed_double_const (TREE_INT_CST_LOW (exp),
5806 TREE_INT_CST_HIGH (exp),
5810 return expand_expr (DECL_INITIAL (exp), target, VOIDmode,
5811 EXPAND_MEMORY_USE_BAD);
5814 /* If optimized, generate immediate CONST_DOUBLE
5815 which will be turned into memory by reload if necessary.
5817 We used to force a register so that loop.c could see it. But
5818 this does not allow gen_* patterns to perform optimizations with
5819 the constants. It also produces two insns in cases like "x = 1.0;".
5820 On most machines, floating-point constants are not permitted in
5821 many insns, so we'd end up copying it to a register in any case.
5823 Now, we do the copying in expand_binop, if appropriate. */
5824 return immed_real_const (exp);
5828 if (! TREE_CST_RTL (exp))
5829 output_constant_def (exp);
5831 /* TREE_CST_RTL probably contains a constant address.
5832 On RISC machines where a constant address isn't valid,
5833 make some insns to get that address into a register. */
5834 if (GET_CODE (TREE_CST_RTL (exp)) == MEM
5835 && modifier != EXPAND_CONST_ADDRESS
5836 && modifier != EXPAND_INITIALIZER
5837 && modifier != EXPAND_SUM
5838 && (! memory_address_p (mode, XEXP (TREE_CST_RTL (exp), 0))
5840 && GET_CODE (XEXP (TREE_CST_RTL (exp), 0)) != REG)))
5841 return change_address (TREE_CST_RTL (exp), VOIDmode,
5842 copy_rtx (XEXP (TREE_CST_RTL (exp), 0)));
5843 return TREE_CST_RTL (exp);
5845 case EXPR_WITH_FILE_LOCATION:
5848 char *saved_input_filename = input_filename;
5849 int saved_lineno = lineno;
5850 input_filename = EXPR_WFL_FILENAME (exp);
5851 lineno = EXPR_WFL_LINENO (exp);
5852 if (EXPR_WFL_EMIT_LINE_NOTE (exp))
5853 emit_line_note (input_filename, lineno);
5854 /* Possibly avoid switching back and force here */
5855 to_return = expand_expr (EXPR_WFL_NODE (exp), target, tmode, modifier);
5856 input_filename = saved_input_filename;
5857 lineno = saved_lineno;
5862 context = decl_function_context (exp);
5864 /* If this SAVE_EXPR was at global context, assume we are an
5865 initialization function and move it into our context. */
5867 SAVE_EXPR_CONTEXT (exp) = current_function_decl;
5869 /* We treat inline_function_decl as an alias for the current function
5870 because that is the inline function whose vars, types, etc.
5871 are being merged into the current function.
5872 See expand_inline_function. */
5873 if (context == current_function_decl || context == inline_function_decl)
5876 /* If this is non-local, handle it. */
5879 /* The following call just exists to abort if the context is
5880 not of a containing function. */
5881 find_function_data (context);
5883 temp = SAVE_EXPR_RTL (exp);
5884 if (temp && GET_CODE (temp) == REG)
5886 put_var_into_stack (exp);
5887 temp = SAVE_EXPR_RTL (exp);
5889 if (temp == 0 || GET_CODE (temp) != MEM)
5891 return change_address (temp, mode,
5892 fix_lexical_addr (XEXP (temp, 0), exp));
5894 if (SAVE_EXPR_RTL (exp) == 0)
5896 if (mode == VOIDmode)
5899 temp = assign_temp (type, 3, 0, 0);
5901 SAVE_EXPR_RTL (exp) = temp;
5902 if (!optimize && GET_CODE (temp) == REG)
5903 save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, temp,
5906 /* If the mode of TEMP does not match that of the expression, it
5907 must be a promoted value. We pass store_expr a SUBREG of the
5908 wanted mode but mark it so that we know that it was already
5909 extended. Note that `unsignedp' was modified above in
5912 if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
5914 temp = gen_rtx_SUBREG (mode, SAVE_EXPR_RTL (exp), 0);
5915 SUBREG_PROMOTED_VAR_P (temp) = 1;
5916 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
5919 if (temp == const0_rtx)
5920 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
5921 EXPAND_MEMORY_USE_BAD);
5923 store_expr (TREE_OPERAND (exp, 0), temp, 0);
5925 TREE_USED (exp) = 1;
5928 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
5929 must be a promoted value. We return a SUBREG of the wanted mode,
5930 but mark it so that we know that it was already extended. */
5932 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
5933 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
5935 /* Compute the signedness and make the proper SUBREG. */
5936 promote_mode (type, mode, &unsignedp, 0);
5937 temp = gen_rtx_SUBREG (mode, SAVE_EXPR_RTL (exp), 0);
5938 SUBREG_PROMOTED_VAR_P (temp) = 1;
5939 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
5943 return SAVE_EXPR_RTL (exp);
5948 temp = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
5949 TREE_OPERAND (exp, 0) = unsave_expr_now (TREE_OPERAND (exp, 0));
5953 case PLACEHOLDER_EXPR:
5955 tree placeholder_expr;
5957 /* If there is an object on the head of the placeholder list,
5958 see if some object in it of type TYPE or a pointer to it. For
5959 further information, see tree.def. */
5960 for (placeholder_expr = placeholder_list;
5961 placeholder_expr != 0;
5962 placeholder_expr = TREE_CHAIN (placeholder_expr))
5964 tree need_type = TYPE_MAIN_VARIANT (type);
5966 tree old_list = placeholder_list;
5969 /* Find the outermost reference that is of the type we want.
5970 If none, see if any object has a type that is a pointer to
5971 the type we want. */
5972 for (elt = TREE_PURPOSE (placeholder_expr);
5973 elt != 0 && object == 0;
5975 = ((TREE_CODE (elt) == COMPOUND_EXPR
5976 || TREE_CODE (elt) == COND_EXPR)
5977 ? TREE_OPERAND (elt, 1)
5978 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
5979 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
5980 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
5981 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
5982 ? TREE_OPERAND (elt, 0) : 0))
5983 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
5986 for (elt = TREE_PURPOSE (placeholder_expr);
5987 elt != 0 && object == 0;
5989 = ((TREE_CODE (elt) == COMPOUND_EXPR
5990 || TREE_CODE (elt) == COND_EXPR)
5991 ? TREE_OPERAND (elt, 1)
5992 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
5993 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
5994 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
5995 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
5996 ? TREE_OPERAND (elt, 0) : 0))
5997 if (POINTER_TYPE_P (TREE_TYPE (elt))
5998 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
6000 object = build1 (INDIRECT_REF, need_type, elt);
6004 /* Expand this object skipping the list entries before
6005 it was found in case it is also a PLACEHOLDER_EXPR.
6006 In that case, we want to translate it using subsequent
6008 placeholder_list = TREE_CHAIN (placeholder_expr);
6009 temp = expand_expr (object, original_target, tmode,
6011 placeholder_list = old_list;
6017 /* We can't find the object or there was a missing WITH_RECORD_EXPR. */
6020 case WITH_RECORD_EXPR:
6021 /* Put the object on the placeholder list, expand our first operand,
6022 and pop the list. */
6023 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
6025 target = expand_expr (TREE_OPERAND (exp, 0), original_target,
6026 tmode, ro_modifier);
6027 placeholder_list = TREE_CHAIN (placeholder_list);
6031 if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
6032 expand_goto (TREE_OPERAND (exp, 0));
6034 expand_computed_goto (TREE_OPERAND (exp, 0));
6038 expand_exit_loop_if_false (NULL_PTR,
6039 invert_truthvalue (TREE_OPERAND (exp, 0)));
6042 case LABELED_BLOCK_EXPR:
6043 if (LABELED_BLOCK_BODY (exp))
6044 expand_expr_stmt (LABELED_BLOCK_BODY (exp));
6045 emit_label (label_rtx (LABELED_BLOCK_LABEL (exp)));
6048 case EXIT_BLOCK_EXPR:
6049 if (EXIT_BLOCK_RETURN (exp))
6050 sorry ("returned value in block_exit_expr");
6051 expand_goto (LABELED_BLOCK_LABEL (EXIT_BLOCK_LABELED_BLOCK (exp)));
6056 expand_start_loop (1);
6057 expand_expr_stmt (TREE_OPERAND (exp, 0));
6065 tree vars = TREE_OPERAND (exp, 0);
6066 int vars_need_expansion = 0;
6068 /* Need to open a binding contour here because
6069 if there are any cleanups they must be contained here. */
6070 expand_start_bindings (0);
6072 /* Mark the corresponding BLOCK for output in its proper place. */
6073 if (TREE_OPERAND (exp, 2) != 0
6074 && ! TREE_USED (TREE_OPERAND (exp, 2)))
6075 insert_block (TREE_OPERAND (exp, 2));
6077 /* If VARS have not yet been expanded, expand them now. */
6080 if (DECL_RTL (vars) == 0)
6082 vars_need_expansion = 1;
6085 expand_decl_init (vars);
6086 vars = TREE_CHAIN (vars);
6089 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, ro_modifier);
6091 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
6097 if (RTL_EXPR_SEQUENCE (exp))
6099 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
6101 emit_insns (RTL_EXPR_SEQUENCE (exp));
6102 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
6104 preserve_rtl_expr_result (RTL_EXPR_RTL (exp));
6105 free_temps_for_rtl_expr (exp);
6106 return RTL_EXPR_RTL (exp);
6109 /* If we don't need the result, just ensure we evaluate any
6114 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
6115 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode,
6116 EXPAND_MEMORY_USE_BAD);
6120 /* All elts simple constants => refer to a constant in memory. But
6121 if this is a non-BLKmode mode, let it store a field at a time
6122 since that should make a CONST_INT or CONST_DOUBLE when we
6123 fold. Likewise, if we have a target we can use, it is best to
6124 store directly into the target unless the type is large enough
6125 that memcpy will be used. If we are making an initializer and
6126 all operands are constant, put it in memory as well. */
6127 else if ((TREE_STATIC (exp)
6128 && ((mode == BLKmode
6129 && ! (target != 0 && safe_from_p (target, exp, 1)))
6130 || TREE_ADDRESSABLE (exp)
6131 || (TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
6132 && (!MOVE_BY_PIECES_P
6133 (TREE_INT_CST_LOW (TYPE_SIZE (type))/BITS_PER_UNIT,
6134 TYPE_ALIGN (type) / BITS_PER_UNIT))
6135 && ! mostly_zeros_p (exp))))
6136 || (modifier == EXPAND_INITIALIZER && TREE_CONSTANT (exp)))
6138 rtx constructor = output_constant_def (exp);
6139 if (modifier != EXPAND_CONST_ADDRESS
6140 && modifier != EXPAND_INITIALIZER
6141 && modifier != EXPAND_SUM
6142 && (! memory_address_p (GET_MODE (constructor),
6143 XEXP (constructor, 0))
6145 && GET_CODE (XEXP (constructor, 0)) != REG)))
6146 constructor = change_address (constructor, VOIDmode,
6147 XEXP (constructor, 0));
6153 /* Handle calls that pass values in multiple non-contiguous
6154 locations. The Irix 6 ABI has examples of this. */
6155 if (target == 0 || ! safe_from_p (target, exp, 1)
6156 || GET_CODE (target) == PARALLEL)
6158 if (mode != BLKmode && ! TREE_ADDRESSABLE (exp))
6159 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
6161 target = assign_temp (type, 0, 1, 1);
6164 if (TREE_READONLY (exp))
6166 if (GET_CODE (target) == MEM)
6167 target = copy_rtx (target);
6169 RTX_UNCHANGING_P (target) = 1;
6172 store_constructor (exp, target, 0);
6178 tree exp1 = TREE_OPERAND (exp, 0);
6181 tree string = string_constant (exp1, &index);
6184 /* Try to optimize reads from const strings. */
6186 && TREE_CODE (string) == STRING_CST
6187 && TREE_CODE (index) == INTEGER_CST
6188 && !TREE_INT_CST_HIGH (index)
6189 && (i = TREE_INT_CST_LOW (index)) < TREE_STRING_LENGTH (string)
6190 && GET_MODE_CLASS (mode) == MODE_INT
6191 && GET_MODE_SIZE (mode) == 1
6192 && modifier != EXPAND_MEMORY_USE_WO)
6193 return GEN_INT (TREE_STRING_POINTER (string)[i]);
6195 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
6196 op0 = memory_address (mode, op0);
6198 if (current_function && current_function_check_memory_usage
6199 && ! AGGREGATE_TYPE_P (TREE_TYPE (exp)))
6201 enum memory_use_mode memory_usage;
6202 memory_usage = get_memory_usage_from_modifier (modifier);
6204 if (memory_usage != MEMORY_USE_DONT)
6206 in_check_memory_usage = 1;
6207 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
6209 GEN_INT (int_size_in_bytes (type)),
6210 TYPE_MODE (sizetype),
6211 GEN_INT (memory_usage),
6212 TYPE_MODE (integer_type_node));
6213 in_check_memory_usage = 0;
6217 temp = gen_rtx_MEM (mode, op0);
6218 /* If address was computed by addition,
6219 mark this as an element of an aggregate. */
6220 if (TREE_CODE (exp1) == PLUS_EXPR
6221 || (TREE_CODE (exp1) == SAVE_EXPR
6222 && TREE_CODE (TREE_OPERAND (exp1, 0)) == PLUS_EXPR)
6223 || AGGREGATE_TYPE_P (TREE_TYPE (exp))
6224 || (TREE_CODE (exp1) == ADDR_EXPR
6225 && (exp2 = TREE_OPERAND (exp1, 0))
6226 && AGGREGATE_TYPE_P (TREE_TYPE (exp2))))
6227 MEM_SET_IN_STRUCT_P (temp, 1);
6229 MEM_VOLATILE_P (temp) = TREE_THIS_VOLATILE (exp) | flag_volatile;
6230 MEM_ALIAS_SET (temp) = get_alias_set (exp);
6232 /* It is incorrect to set RTX_UNCHANGING_P from TREE_READONLY
6233 here, because, in C and C++, the fact that a location is accessed
6234 through a pointer to const does not mean that the value there can
6235 never change. Languages where it can never change should
6236 also set TREE_STATIC. */
6237 RTX_UNCHANGING_P (temp) = TREE_READONLY (exp) & TREE_STATIC (exp);
6242 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
6246 tree array = TREE_OPERAND (exp, 0);
6247 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
6248 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
6249 tree index = TREE_OPERAND (exp, 1);
6250 tree index_type = TREE_TYPE (index);
6253 /* Optimize the special-case of a zero lower bound.
6255 We convert the low_bound to sizetype to avoid some problems
6256 with constant folding. (E.g. suppose the lower bound is 1,
6257 and its mode is QI. Without the conversion, (ARRAY
6258 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
6259 +INDEX), which becomes (ARRAY+255+INDEX). Oops!)
6261 But sizetype isn't quite right either (especially if
6262 the lowbound is negative). FIXME */
6264 if (! integer_zerop (low_bound))
6265 index = fold (build (MINUS_EXPR, index_type, index,
6266 convert (sizetype, low_bound)));
6268 /* Fold an expression like: "foo"[2].
6269 This is not done in fold so it won't happen inside &.
6270 Don't fold if this is for wide characters since it's too
6271 difficult to do correctly and this is a very rare case. */
6273 if (TREE_CODE (array) == STRING_CST
6274 && TREE_CODE (index) == INTEGER_CST
6275 && !TREE_INT_CST_HIGH (index)
6276 && (i = TREE_INT_CST_LOW (index)) < TREE_STRING_LENGTH (array)
6277 && GET_MODE_CLASS (mode) == MODE_INT
6278 && GET_MODE_SIZE (mode) == 1)
6279 return GEN_INT (TREE_STRING_POINTER (array)[i]);
6281 /* If this is a constant index into a constant array,
6282 just get the value from the array. Handle both the cases when
6283 we have an explicit constructor and when our operand is a variable
6284 that was declared const. */
6286 if (TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array))
6288 if (TREE_CODE (index) == INTEGER_CST
6289 && TREE_INT_CST_HIGH (index) == 0)
6291 tree elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0));
6293 i = TREE_INT_CST_LOW (index);
6295 elem = TREE_CHAIN (elem);
6297 return expand_expr (fold (TREE_VALUE (elem)), target,
6298 tmode, ro_modifier);
6302 else if (optimize >= 1
6303 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
6304 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
6305 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
6307 if (TREE_CODE (index) == INTEGER_CST)
6309 tree init = DECL_INITIAL (array);
6311 i = TREE_INT_CST_LOW (index);
6312 if (TREE_CODE (init) == CONSTRUCTOR)
6314 tree elem = CONSTRUCTOR_ELTS (init);
6317 && !tree_int_cst_equal (TREE_PURPOSE (elem), index))
6318 elem = TREE_CHAIN (elem);
6320 return expand_expr (fold (TREE_VALUE (elem)), target,
6321 tmode, ro_modifier);
6323 else if (TREE_CODE (init) == STRING_CST
6324 && TREE_INT_CST_HIGH (index) == 0
6325 && (TREE_INT_CST_LOW (index)
6326 < TREE_STRING_LENGTH (init)))
6328 (TREE_STRING_POINTER
6329 (init)[TREE_INT_CST_LOW (index)]));
6334 /* ... fall through ... */
6338 /* If the operand is a CONSTRUCTOR, we can just extract the
6339 appropriate field if it is present. Don't do this if we have
6340 already written the data since we want to refer to that copy
6341 and varasm.c assumes that's what we'll do. */
6342 if (code != ARRAY_REF
6343 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
6344 && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
6348 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
6349 elt = TREE_CHAIN (elt))
6350 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1)
6351 /* We can normally use the value of the field in the
6352 CONSTRUCTOR. However, if this is a bitfield in
6353 an integral mode that we can fit in a HOST_WIDE_INT,
6354 we must mask only the number of bits in the bitfield,
6355 since this is done implicitly by the constructor. If
6356 the bitfield does not meet either of those conditions,
6357 we can't do this optimization. */
6358 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt))
6359 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt)))
6361 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt)))
6362 <= HOST_BITS_PER_WIDE_INT))))
6364 op0 = expand_expr (TREE_VALUE (elt), target, tmode, modifier);
6365 if (DECL_BIT_FIELD (TREE_PURPOSE (elt)))
6367 int bitsize = DECL_FIELD_SIZE (TREE_PURPOSE (elt));
6369 if (TREE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt))))
6371 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
6372 op0 = expand_and (op0, op1, target);
6376 enum machine_mode imode
6377 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt)));
6379 = build_int_2 (GET_MODE_BITSIZE (imode) - bitsize,
6382 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
6384 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
6394 enum machine_mode mode1;
6400 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
6401 &mode1, &unsignedp, &volatilep,
6404 /* If we got back the original object, something is wrong. Perhaps
6405 we are evaluating an expression too early. In any event, don't
6406 infinitely recurse. */
6410 /* If TEM's type is a union of variable size, pass TARGET to the inner
6411 computation, since it will need a temporary and TARGET is known
6412 to have to do. This occurs in unchecked conversion in Ada. */
6414 op0 = expand_expr (tem,
6415 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
6416 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
6418 ? target : NULL_RTX),
6420 modifier == EXPAND_INITIALIZER
6421 ? modifier : EXPAND_NORMAL);
6423 /* If this is a constant, put it into a register if it is a
6424 legitimate constant and memory if it isn't. */
6425 if (CONSTANT_P (op0))
6427 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
6428 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0))
6429 op0 = force_reg (mode, op0);
6431 op0 = validize_mem (force_const_mem (mode, op0));
6436 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
6438 if (GET_CODE (op0) != MEM)
6441 if (GET_MODE (offset_rtx) != ptr_mode)
6443 #ifdef POINTERS_EXTEND_UNSIGNED
6444 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
6446 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
6450 /* A constant address in TO_RTX can have VOIDmode, we must not try
6451 to call force_reg for that case. Avoid that case. */
6452 if (GET_CODE (op0) == MEM
6453 && GET_MODE (op0) == BLKmode
6454 && GET_MODE (XEXP (op0, 0)) != VOIDmode
6456 && (bitpos % bitsize) == 0
6457 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
6458 && (alignment * BITS_PER_UNIT) == GET_MODE_ALIGNMENT (mode1))
6460 rtx temp = change_address (op0, mode1,
6461 plus_constant (XEXP (op0, 0),
6464 if (GET_CODE (XEXP (temp, 0)) == REG)
6467 op0 = change_address (op0, mode1,
6468 force_reg (GET_MODE (XEXP (temp, 0)),
6474 op0 = change_address (op0, VOIDmode,
6475 gen_rtx_PLUS (ptr_mode, XEXP (op0, 0),
6476 force_reg (ptr_mode, offset_rtx)));
6479 /* Don't forget about volatility even if this is a bitfield. */
6480 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
6482 op0 = copy_rtx (op0);
6483 MEM_VOLATILE_P (op0) = 1;
6486 /* Check the access. */
6487 if (current_function && current_function_check_memory_usage
6488 && GET_CODE (op0) == MEM)
6490 enum memory_use_mode memory_usage;
6491 memory_usage = get_memory_usage_from_modifier (modifier);
6493 if (memory_usage != MEMORY_USE_DONT)
6498 to = plus_constant (XEXP (op0, 0), (bitpos / BITS_PER_UNIT));
6499 size = (bitpos % BITS_PER_UNIT) + bitsize + BITS_PER_UNIT - 1;
6501 /* Check the access right of the pointer. */
6502 if (size > BITS_PER_UNIT)
6503 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
6505 GEN_INT (size / BITS_PER_UNIT),
6506 TYPE_MODE (sizetype),
6507 GEN_INT (memory_usage),
6508 TYPE_MODE (integer_type_node));
6512 /* In cases where an aligned union has an unaligned object
6513 as a field, we might be extracting a BLKmode value from
6514 an integer-mode (e.g., SImode) object. Handle this case
6515 by doing the extract into an object as wide as the field
6516 (which we know to be the width of a basic mode), then
6517 storing into memory, and changing the mode to BLKmode.
6518 If we ultimately want the address (EXPAND_CONST_ADDRESS or
6519 EXPAND_INITIALIZER), then we must not copy to a temporary. */
6520 if (mode1 == VOIDmode
6521 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
6522 || (modifier != EXPAND_CONST_ADDRESS
6523 && modifier != EXPAND_INITIALIZER
6524 && ((mode1 != BLKmode && ! direct_load[(int) mode1]
6525 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
6526 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
6527 /* If the field isn't aligned enough to fetch as a memref,
6528 fetch it as a bit field. */
6529 || (SLOW_UNALIGNED_ACCESS
6530 && ((TYPE_ALIGN (TREE_TYPE (tem)) < (unsigned int) GET_MODE_ALIGNMENT (mode))
6531 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0))))))
6533 enum machine_mode ext_mode = mode;
6535 if (ext_mode == BLKmode)
6536 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
6538 if (ext_mode == BLKmode)
6540 /* In this case, BITPOS must start at a byte boundary and
6541 TARGET, if specified, must be a MEM. */
6542 if (GET_CODE (op0) != MEM
6543 || (target != 0 && GET_CODE (target) != MEM)
6544 || bitpos % BITS_PER_UNIT != 0)
6547 op0 = change_address (op0, VOIDmode,
6548 plus_constant (XEXP (op0, 0),
6549 bitpos / BITS_PER_UNIT));
6551 target = assign_temp (type, 0, 1, 1);
6553 emit_block_move (target, op0,
6554 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
6561 op0 = validize_mem (op0);
6563 if (GET_CODE (op0) == MEM && GET_CODE (XEXP (op0, 0)) == REG)
6564 mark_reg_pointer (XEXP (op0, 0), alignment);
6566 op0 = extract_bit_field (op0, bitsize, bitpos,
6567 unsignedp, target, ext_mode, ext_mode,
6569 int_size_in_bytes (TREE_TYPE (tem)));
6571 /* If the result is a record type and BITSIZE is narrower than
6572 the mode of OP0, an integral mode, and this is a big endian
6573 machine, we must put the field into the high-order bits. */
6574 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
6575 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
6576 && bitsize < GET_MODE_BITSIZE (GET_MODE (op0)))
6577 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
6578 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
6582 if (mode == BLKmode)
6584 rtx new = assign_stack_temp (ext_mode,
6585 bitsize / BITS_PER_UNIT, 0);
6587 emit_move_insn (new, op0);
6588 op0 = copy_rtx (new);
6589 PUT_MODE (op0, BLKmode);
6590 MEM_SET_IN_STRUCT_P (op0, 1);
6596 /* If the result is BLKmode, use that to access the object
6598 if (mode == BLKmode)
6601 /* Get a reference to just this component. */
6602 if (modifier == EXPAND_CONST_ADDRESS
6603 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
6604 op0 = gen_rtx_MEM (mode1, plus_constant (XEXP (op0, 0),
6605 (bitpos / BITS_PER_UNIT)));
6607 op0 = change_address (op0, mode1,
6608 plus_constant (XEXP (op0, 0),
6609 (bitpos / BITS_PER_UNIT)));
6611 if (GET_CODE (op0) == MEM)
6612 MEM_ALIAS_SET (op0) = get_alias_set (exp);
6614 if (GET_CODE (XEXP (op0, 0)) == REG)
6615 mark_reg_pointer (XEXP (op0, 0), alignment);
6617 MEM_SET_IN_STRUCT_P (op0, 1);
6618 MEM_VOLATILE_P (op0) |= volatilep;
6619 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
6620 || modifier == EXPAND_CONST_ADDRESS
6621 || modifier == EXPAND_INITIALIZER)
6623 else if (target == 0)
6624 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
6626 convert_move (target, op0, unsignedp);
6630 /* Intended for a reference to a buffer of a file-object in Pascal.
6631 But it's not certain that a special tree code will really be
6632 necessary for these. INDIRECT_REF might work for them. */
6638 /* Pascal set IN expression.
6641 rlo = set_low - (set_low%bits_per_word);
6642 the_word = set [ (index - rlo)/bits_per_word ];
6643 bit_index = index % bits_per_word;
6644 bitmask = 1 << bit_index;
6645 return !!(the_word & bitmask); */
6647 tree set = TREE_OPERAND (exp, 0);
6648 tree index = TREE_OPERAND (exp, 1);
6649 int iunsignedp = TREE_UNSIGNED (TREE_TYPE (index));
6650 tree set_type = TREE_TYPE (set);
6651 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
6652 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
6653 rtx index_val = expand_expr (index, 0, VOIDmode, 0);
6654 rtx lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
6655 rtx hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
6656 rtx setval = expand_expr (set, 0, VOIDmode, 0);
6657 rtx setaddr = XEXP (setval, 0);
6658 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
6660 rtx diff, quo, rem, addr, bit, result;
6662 preexpand_calls (exp);
6664 /* If domain is empty, answer is no. Likewise if index is constant
6665 and out of bounds. */
6666 if (((TREE_CODE (set_high_bound) == INTEGER_CST
6667 && TREE_CODE (set_low_bound) == INTEGER_CST
6668 && tree_int_cst_lt (set_high_bound, set_low_bound))
6669 || (TREE_CODE (index) == INTEGER_CST
6670 && TREE_CODE (set_low_bound) == INTEGER_CST
6671 && tree_int_cst_lt (index, set_low_bound))
6672 || (TREE_CODE (set_high_bound) == INTEGER_CST
6673 && TREE_CODE (index) == INTEGER_CST
6674 && tree_int_cst_lt (set_high_bound, index))))
6678 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
6680 /* If we get here, we have to generate the code for both cases
6681 (in range and out of range). */
6683 op0 = gen_label_rtx ();
6684 op1 = gen_label_rtx ();
6686 if (! (GET_CODE (index_val) == CONST_INT
6687 && GET_CODE (lo_r) == CONST_INT))
6689 emit_cmp_and_jump_insns (index_val, lo_r, LT, NULL_RTX,
6690 GET_MODE (index_val), iunsignedp, 0, op1);
6693 if (! (GET_CODE (index_val) == CONST_INT
6694 && GET_CODE (hi_r) == CONST_INT))
6696 emit_cmp_and_jump_insns (index_val, hi_r, GT, NULL_RTX,
6697 GET_MODE (index_val), iunsignedp, 0, op1);
6700 /* Calculate the element number of bit zero in the first word
6702 if (GET_CODE (lo_r) == CONST_INT)
6703 rlow = GEN_INT (INTVAL (lo_r)
6704 & ~ ((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
6706 rlow = expand_binop (index_mode, and_optab, lo_r,
6707 GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
6708 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
6710 diff = expand_binop (index_mode, sub_optab, index_val, rlow,
6711 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
6713 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
6714 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
6715 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
6716 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
6718 addr = memory_address (byte_mode,
6719 expand_binop (index_mode, add_optab, diff,
6720 setaddr, NULL_RTX, iunsignedp,
6723 /* Extract the bit we want to examine */
6724 bit = expand_shift (RSHIFT_EXPR, byte_mode,
6725 gen_rtx_MEM (byte_mode, addr),
6726 make_tree (TREE_TYPE (index), rem),
6728 result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
6729 GET_MODE (target) == byte_mode ? target : 0,
6730 1, OPTAB_LIB_WIDEN);
6732 if (result != target)
6733 convert_move (target, result, 1);
6735 /* Output the code to handle the out-of-range case. */
6738 emit_move_insn (target, const0_rtx);
6743 case WITH_CLEANUP_EXPR:
6744 if (RTL_EXPR_RTL (exp) == 0)
6747 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, ro_modifier);
6748 expand_decl_cleanup (NULL_TREE, TREE_OPERAND (exp, 2));
6750 /* That's it for this cleanup. */
6751 TREE_OPERAND (exp, 2) = 0;
6753 return RTL_EXPR_RTL (exp);
6755 case CLEANUP_POINT_EXPR:
6757 /* Start a new binding layer that will keep track of all cleanup
6758 actions to be performed. */
6759 expand_start_bindings (0);
6761 target_temp_slot_level = temp_slot_level;
6763 op0 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, ro_modifier);
6764 /* If we're going to use this value, load it up now. */
6766 op0 = force_not_mem (op0);
6767 preserve_temp_slots (op0);
6768 expand_end_bindings (NULL_TREE, 0, 0);
6773 /* Check for a built-in function. */
6774 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
6775 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
6777 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
6778 return expand_builtin (exp, target, subtarget, tmode, ignore);
6780 /* If this call was expanded already by preexpand_calls,
6781 just return the result we got. */
6782 if (CALL_EXPR_RTL (exp) != 0)
6783 return CALL_EXPR_RTL (exp);
6785 return expand_call (exp, target, ignore);
6787 case NON_LVALUE_EXPR:
6790 case REFERENCE_EXPR:
6791 if (TREE_CODE (type) == UNION_TYPE)
6793 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
6796 if (mode != BLKmode)
6797 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
6799 target = assign_temp (type, 0, 1, 1);
6802 if (GET_CODE (target) == MEM)
6803 /* Store data into beginning of memory target. */
6804 store_expr (TREE_OPERAND (exp, 0),
6805 change_address (target, TYPE_MODE (valtype), 0), 0);
6807 else if (GET_CODE (target) == REG)
6808 /* Store this field into a union of the proper type. */
6809 store_field (target, GET_MODE_BITSIZE (TYPE_MODE (valtype)), 0,
6810 TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
6812 int_size_in_bytes (TREE_TYPE (TREE_OPERAND (exp, 0))),
6817 /* Return the entire union. */
6821 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
6823 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
6826 /* If the signedness of the conversion differs and OP0 is
6827 a promoted SUBREG, clear that indication since we now
6828 have to do the proper extension. */
6829 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
6830 && GET_CODE (op0) == SUBREG)
6831 SUBREG_PROMOTED_VAR_P (op0) = 0;
6836 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, 0);
6837 if (GET_MODE (op0) == mode)
6840 /* If OP0 is a constant, just convert it into the proper mode. */
6841 if (CONSTANT_P (op0))
6843 convert_modes (mode, TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
6844 op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
6846 if (modifier == EXPAND_INITIALIZER)
6847 return gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
6851 convert_to_mode (mode, op0,
6852 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
6854 convert_move (target, op0,
6855 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
6859 /* We come here from MINUS_EXPR when the second operand is a
6862 this_optab = add_optab;
6864 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
6865 something else, make sure we add the register to the constant and
6866 then to the other thing. This case can occur during strength
6867 reduction and doing it this way will produce better code if the
6868 frame pointer or argument pointer is eliminated.
6870 fold-const.c will ensure that the constant is always in the inner
6871 PLUS_EXPR, so the only case we need to do anything about is if
6872 sp, ap, or fp is our second argument, in which case we must swap
6873 the innermost first argument and our second argument. */
6875 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
6876 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
6877 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
6878 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
6879 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
6880 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
6882 tree t = TREE_OPERAND (exp, 1);
6884 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
6885 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
6888 /* If the result is to be ptr_mode and we are adding an integer to
6889 something, we might be forming a constant. So try to use
6890 plus_constant. If it produces a sum and we can't accept it,
6891 use force_operand. This allows P = &ARR[const] to generate
6892 efficient code on machines where a SYMBOL_REF is not a valid
6895 If this is an EXPAND_SUM call, always return the sum. */
6896 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
6897 || mode == ptr_mode)
6899 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
6900 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
6901 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
6905 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
6907 /* Use immed_double_const to ensure that the constant is
6908 truncated according to the mode of OP1, then sign extended
6909 to a HOST_WIDE_INT. Using the constant directly can result
6910 in non-canonical RTL in a 64x32 cross compile. */
6912 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)),
6914 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))));
6915 op1 = plus_constant (op1, INTVAL (constant_part));
6916 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
6917 op1 = force_operand (op1, target);
6921 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
6922 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
6923 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
6927 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
6929 if (! CONSTANT_P (op0))
6931 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
6932 VOIDmode, modifier);
6933 /* Don't go to both_summands if modifier
6934 says it's not right to return a PLUS. */
6935 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
6939 /* Use immed_double_const to ensure that the constant is
6940 truncated according to the mode of OP1, then sign extended
6941 to a HOST_WIDE_INT. Using the constant directly can result
6942 in non-canonical RTL in a 64x32 cross compile. */
6944 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)),
6946 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
6947 op0 = plus_constant (op0, INTVAL (constant_part));
6948 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
6949 op0 = force_operand (op0, target);
6954 /* No sense saving up arithmetic to be done
6955 if it's all in the wrong mode to form part of an address.
6956 And force_operand won't know whether to sign-extend or
6958 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
6959 || mode != ptr_mode)
6962 preexpand_calls (exp);
6963 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
6966 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, ro_modifier);
6967 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, ro_modifier);
6970 /* Make sure any term that's a sum with a constant comes last. */
6971 if (GET_CODE (op0) == PLUS
6972 && CONSTANT_P (XEXP (op0, 1)))
6978 /* If adding to a sum including a constant,
6979 associate it to put the constant outside. */
6980 if (GET_CODE (op1) == PLUS
6981 && CONSTANT_P (XEXP (op1, 1)))
6983 rtx constant_term = const0_rtx;
6985 temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0);
6988 /* Ensure that MULT comes first if there is one. */
6989 else if (GET_CODE (op0) == MULT)
6990 op0 = gen_rtx_PLUS (mode, op0, XEXP (op1, 0));
6992 op0 = gen_rtx_PLUS (mode, XEXP (op1, 0), op0);
6994 /* Let's also eliminate constants from op0 if possible. */
6995 op0 = eliminate_constant_term (op0, &constant_term);
6997 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
6998 their sum should be a constant. Form it into OP1, since the
6999 result we want will then be OP0 + OP1. */
7001 temp = simplify_binary_operation (PLUS, mode, constant_term,
7006 op1 = gen_rtx_PLUS (mode, constant_term, XEXP (op1, 1));
7009 /* Put a constant term last and put a multiplication first. */
7010 if (CONSTANT_P (op0) || GET_CODE (op1) == MULT)
7011 temp = op1, op1 = op0, op0 = temp;
7013 temp = simplify_binary_operation (PLUS, mode, op0, op1);
7014 return temp ? temp : gen_rtx_PLUS (mode, op0, op1);
7017 /* For initializers, we are allowed to return a MINUS of two
7018 symbolic constants. Here we handle all cases when both operands
7020 /* Handle difference of two symbolic constants,
7021 for the sake of an initializer. */
7022 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7023 && really_constant_p (TREE_OPERAND (exp, 0))
7024 && really_constant_p (TREE_OPERAND (exp, 1)))
7026 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX,
7027 VOIDmode, ro_modifier);
7028 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7029 VOIDmode, ro_modifier);
7031 /* If the last operand is a CONST_INT, use plus_constant of
7032 the negated constant. Else make the MINUS. */
7033 if (GET_CODE (op1) == CONST_INT)
7034 return plus_constant (op0, - INTVAL (op1));
7036 return gen_rtx_MINUS (mode, op0, op1);
7038 /* Convert A - const to A + (-const). */
7039 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7041 tree negated = fold (build1 (NEGATE_EXPR, type,
7042 TREE_OPERAND (exp, 1)));
7044 /* Deal with the case where we can't negate the constant
7046 if (TREE_UNSIGNED (type) || TREE_OVERFLOW (negated))
7048 tree newtype = signed_type (type);
7049 tree newop0 = convert (newtype, TREE_OPERAND (exp, 0));
7050 tree newop1 = convert (newtype, TREE_OPERAND (exp, 1));
7051 tree newneg = fold (build1 (NEGATE_EXPR, newtype, newop1));
7053 if (! TREE_OVERFLOW (newneg))
7054 return expand_expr (convert (type,
7055 build (PLUS_EXPR, newtype,
7057 target, tmode, ro_modifier);
7061 exp = build (PLUS_EXPR, type, TREE_OPERAND (exp, 0), negated);
7065 this_optab = sub_optab;
7069 preexpand_calls (exp);
7070 /* If first operand is constant, swap them.
7071 Thus the following special case checks need only
7072 check the second operand. */
7073 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
7075 register tree t1 = TREE_OPERAND (exp, 0);
7076 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
7077 TREE_OPERAND (exp, 1) = t1;
7080 /* Attempt to return something suitable for generating an
7081 indexed address, for machines that support that. */
7083 if (modifier == EXPAND_SUM && mode == ptr_mode
7084 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7085 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
7087 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7090 /* Apply distributive law if OP0 is x+c. */
7091 if (GET_CODE (op0) == PLUS
7092 && GET_CODE (XEXP (op0, 1)) == CONST_INT)
7093 return gen_rtx_PLUS (mode,
7094 gen_rtx_MULT (mode, XEXP (op0, 0),
7095 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))),
7096 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))
7097 * INTVAL (XEXP (op0, 1))));
7099 if (GET_CODE (op0) != REG)
7100 op0 = force_operand (op0, NULL_RTX);
7101 if (GET_CODE (op0) != REG)
7102 op0 = copy_to_mode_reg (mode, op0);
7104 return gen_rtx_MULT (mode, op0,
7105 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))));
7108 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7111 /* Check for multiplying things that have been extended
7112 from a narrower type. If this machine supports multiplying
7113 in that narrower type with a result in the desired type,
7114 do it that way, and avoid the explicit type-conversion. */
7115 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
7116 && TREE_CODE (type) == INTEGER_TYPE
7117 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7118 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
7119 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7120 && int_fits_type_p (TREE_OPERAND (exp, 1),
7121 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7122 /* Don't use a widening multiply if a shift will do. */
7123 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
7124 > HOST_BITS_PER_WIDE_INT)
7125 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
7127 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
7128 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7130 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
7131 /* If both operands are extended, they must either both
7132 be zero-extended or both be sign-extended. */
7133 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7135 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
7137 enum machine_mode innermode
7138 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
7139 optab other_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7140 ? smul_widen_optab : umul_widen_optab);
7141 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7142 ? umul_widen_optab : smul_widen_optab);
7143 if (mode == GET_MODE_WIDER_MODE (innermode))
7145 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
7147 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7148 NULL_RTX, VOIDmode, 0);
7149 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7150 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7153 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7154 NULL_RTX, VOIDmode, 0);
7157 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
7158 && innermode == word_mode)
7161 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7162 NULL_RTX, VOIDmode, 0);
7163 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7164 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7167 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7168 NULL_RTX, VOIDmode, 0);
7169 temp = expand_binop (mode, other_optab, op0, op1, target,
7170 unsignedp, OPTAB_LIB_WIDEN);
7171 htem = expand_mult_highpart_adjust (innermode,
7172 gen_highpart (innermode, temp),
7174 gen_highpart (innermode, temp),
7176 emit_move_insn (gen_highpart (innermode, temp), htem);
7181 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7182 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7183 return expand_mult (mode, op0, op1, target, unsignedp);
7185 case TRUNC_DIV_EXPR:
7186 case FLOOR_DIV_EXPR:
7188 case ROUND_DIV_EXPR:
7189 case EXACT_DIV_EXPR:
7190 preexpand_calls (exp);
7191 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7193 /* Possible optimization: compute the dividend with EXPAND_SUM
7194 then if the divisor is constant can optimize the case
7195 where some terms of the dividend have coeffs divisible by it. */
7196 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7197 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7198 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
7201 this_optab = flodiv_optab;
7204 case TRUNC_MOD_EXPR:
7205 case FLOOR_MOD_EXPR:
7207 case ROUND_MOD_EXPR:
7208 preexpand_calls (exp);
7209 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7211 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7212 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7213 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
7215 case FIX_ROUND_EXPR:
7216 case FIX_FLOOR_EXPR:
7218 abort (); /* Not used for C. */
7220 case FIX_TRUNC_EXPR:
7221 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7223 target = gen_reg_rtx (mode);
7224 expand_fix (target, op0, unsignedp);
7228 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7230 target = gen_reg_rtx (mode);
7231 /* expand_float can't figure out what to do if FROM has VOIDmode.
7232 So give it the correct mode. With -O, cse will optimize this. */
7233 if (GET_MODE (op0) == VOIDmode)
7234 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7236 expand_float (target, op0,
7237 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7241 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7242 temp = expand_unop (mode, neg_optab, op0, target, 0);
7248 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7250 /* Handle complex values specially. */
7251 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT
7252 || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
7253 return expand_complex_abs (mode, op0, target, unsignedp);
7255 /* Unsigned abs is simply the operand. Testing here means we don't
7256 risk generating incorrect code below. */
7257 if (TREE_UNSIGNED (type))
7260 return expand_abs (mode, op0, target,
7261 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
7265 target = original_target;
7266 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 1), 1)
7267 || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
7268 || GET_MODE (target) != mode
7269 || (GET_CODE (target) == REG
7270 && REGNO (target) < FIRST_PSEUDO_REGISTER))
7271 target = gen_reg_rtx (mode);
7272 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7273 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
7275 /* First try to do it with a special MIN or MAX instruction.
7276 If that does not win, use a conditional jump to select the proper
7278 this_optab = (TREE_UNSIGNED (type)
7279 ? (code == MIN_EXPR ? umin_optab : umax_optab)
7280 : (code == MIN_EXPR ? smin_optab : smax_optab));
7282 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
7287 /* At this point, a MEM target is no longer useful; we will get better
7290 if (GET_CODE (target) == MEM)
7291 target = gen_reg_rtx (mode);
7294 emit_move_insn (target, op0);
7296 op0 = gen_label_rtx ();
7298 /* If this mode is an integer too wide to compare properly,
7299 compare word by word. Rely on cse to optimize constant cases. */
7300 if (GET_MODE_CLASS (mode) == MODE_INT && !can_compare_p (mode))
7302 if (code == MAX_EXPR)
7303 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
7304 target, op1, NULL_RTX, op0);
7306 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
7307 op1, target, NULL_RTX, op0);
7308 emit_move_insn (target, op1);
7312 if (code == MAX_EXPR)
7313 temp = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)))
7314 ? compare_from_rtx (target, op1, GEU, 1, mode, NULL_RTX, 0)
7315 : compare_from_rtx (target, op1, GE, 0, mode, NULL_RTX, 0));
7317 temp = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)))
7318 ? compare_from_rtx (target, op1, LEU, 1, mode, NULL_RTX, 0)
7319 : compare_from_rtx (target, op1, LE, 0, mode, NULL_RTX, 0));
7320 if (temp == const0_rtx)
7321 emit_move_insn (target, op1);
7322 else if (temp != const_true_rtx)
7324 if (bcc_gen_fctn[(int) GET_CODE (temp)] != 0)
7325 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (temp)]) (op0));
7328 emit_move_insn (target, op1);
7335 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7336 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
7342 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7343 temp = expand_unop (mode, ffs_optab, op0, target, 1);
7348 /* ??? Can optimize bitwise operations with one arg constant.
7349 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
7350 and (a bitwise1 b) bitwise2 b (etc)
7351 but that is probably not worth while. */
7353 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
7354 boolean values when we want in all cases to compute both of them. In
7355 general it is fastest to do TRUTH_AND_EXPR by computing both operands
7356 as actual zero-or-1 values and then bitwise anding. In cases where
7357 there cannot be any side effects, better code would be made by
7358 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
7359 how to recognize those cases. */
7361 case TRUTH_AND_EXPR:
7363 this_optab = and_optab;
7368 this_optab = ior_optab;
7371 case TRUTH_XOR_EXPR:
7373 this_optab = xor_optab;
7380 preexpand_calls (exp);
7381 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7383 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7384 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
7387 /* Could determine the answer when only additive constants differ. Also,
7388 the addition of one can be handled by changing the condition. */
7395 preexpand_calls (exp);
7396 temp = do_store_flag (exp, target, tmode != VOIDmode ? tmode : mode, 0);
7400 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
7401 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
7403 && GET_CODE (original_target) == REG
7404 && (GET_MODE (original_target)
7405 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
7407 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
7410 if (temp != original_target)
7411 temp = copy_to_reg (temp);
7413 op1 = gen_label_rtx ();
7414 emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX,
7415 GET_MODE (temp), unsignedp, 0, op1);
7416 emit_move_insn (temp, const1_rtx);
7421 /* If no set-flag instruction, must generate a conditional
7422 store into a temporary variable. Drop through
7423 and handle this like && and ||. */
7425 case TRUTH_ANDIF_EXPR:
7426 case TRUTH_ORIF_EXPR:
7428 && (target == 0 || ! safe_from_p (target, exp, 1)
7429 /* Make sure we don't have a hard reg (such as function's return
7430 value) live across basic blocks, if not optimizing. */
7431 || (!optimize && GET_CODE (target) == REG
7432 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
7433 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7436 emit_clr_insn (target);
7438 op1 = gen_label_rtx ();
7439 jumpifnot (exp, op1);
7442 emit_0_to_1_insn (target);
7445 return ignore ? const0_rtx : target;
7447 case TRUTH_NOT_EXPR:
7448 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
7449 /* The parser is careful to generate TRUTH_NOT_EXPR
7450 only with operands that are always zero or one. */
7451 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
7452 target, 1, OPTAB_LIB_WIDEN);
7458 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
7460 return expand_expr (TREE_OPERAND (exp, 1),
7461 (ignore ? const0_rtx : target),
7465 /* If we would have a "singleton" (see below) were it not for a
7466 conversion in each arm, bring that conversion back out. */
7467 if (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
7468 && TREE_CODE (TREE_OPERAND (exp, 2)) == NOP_EXPR
7469 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0))
7470 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 2), 0))))
7472 tree true = TREE_OPERAND (TREE_OPERAND (exp, 1), 0);
7473 tree false = TREE_OPERAND (TREE_OPERAND (exp, 2), 0);
7475 if ((TREE_CODE_CLASS (TREE_CODE (true)) == '2'
7476 && operand_equal_p (false, TREE_OPERAND (true, 0), 0))
7477 || (TREE_CODE_CLASS (TREE_CODE (false)) == '2'
7478 && operand_equal_p (true, TREE_OPERAND (false, 0), 0))
7479 || (TREE_CODE_CLASS (TREE_CODE (true)) == '1'
7480 && operand_equal_p (false, TREE_OPERAND (true, 0), 0))
7481 || (TREE_CODE_CLASS (TREE_CODE (false)) == '1'
7482 && operand_equal_p (true, TREE_OPERAND (false, 0), 0)))
7483 return expand_expr (build1 (NOP_EXPR, type,
7484 build (COND_EXPR, TREE_TYPE (true),
7485 TREE_OPERAND (exp, 0),
7487 target, tmode, modifier);
7491 /* Note that COND_EXPRs whose type is a structure or union
7492 are required to be constructed to contain assignments of
7493 a temporary variable, so that we can evaluate them here
7494 for side effect only. If type is void, we must do likewise. */
7496 /* If an arm of the branch requires a cleanup,
7497 only that cleanup is performed. */
7500 tree binary_op = 0, unary_op = 0;
7502 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
7503 convert it to our mode, if necessary. */
7504 if (integer_onep (TREE_OPERAND (exp, 1))
7505 && integer_zerop (TREE_OPERAND (exp, 2))
7506 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
7510 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
7515 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, ro_modifier);
7516 if (GET_MODE (op0) == mode)
7520 target = gen_reg_rtx (mode);
7521 convert_move (target, op0, unsignedp);
7525 /* Check for X ? A + B : A. If we have this, we can copy A to the
7526 output and conditionally add B. Similarly for unary operations.
7527 Don't do this if X has side-effects because those side effects
7528 might affect A or B and the "?" operation is a sequence point in
7529 ANSI. (operand_equal_p tests for side effects.) */
7531 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
7532 && operand_equal_p (TREE_OPERAND (exp, 2),
7533 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
7534 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
7535 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
7536 && operand_equal_p (TREE_OPERAND (exp, 1),
7537 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
7538 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
7539 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
7540 && operand_equal_p (TREE_OPERAND (exp, 2),
7541 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
7542 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
7543 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
7544 && operand_equal_p (TREE_OPERAND (exp, 1),
7545 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
7546 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
7548 /* If we are not to produce a result, we have no target. Otherwise,
7549 if a target was specified use it; it will not be used as an
7550 intermediate target unless it is safe. If no target, use a
7555 else if (original_target
7556 && (safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
7557 || (singleton && GET_CODE (original_target) == REG
7558 && REGNO (original_target) >= FIRST_PSEUDO_REGISTER
7559 && original_target == var_rtx (singleton)))
7560 && GET_MODE (original_target) == mode
7561 #ifdef HAVE_conditional_move
7562 && (! can_conditionally_move_p (mode)
7563 || GET_CODE (original_target) == REG
7564 || TREE_ADDRESSABLE (type))
7566 && ! (GET_CODE (original_target) == MEM
7567 && MEM_VOLATILE_P (original_target)))
7568 temp = original_target;
7569 else if (TREE_ADDRESSABLE (type))
7572 temp = assign_temp (type, 0, 0, 1);
7574 /* If we had X ? A + C : A, with C a constant power of 2, and we can
7575 do the test of X as a store-flag operation, do this as
7576 A + ((X != 0) << log C). Similarly for other simple binary
7577 operators. Only do for C == 1 if BRANCH_COST is low. */
7578 if (temp && singleton && binary_op
7579 && (TREE_CODE (binary_op) == PLUS_EXPR
7580 || TREE_CODE (binary_op) == MINUS_EXPR
7581 || TREE_CODE (binary_op) == BIT_IOR_EXPR
7582 || TREE_CODE (binary_op) == BIT_XOR_EXPR)
7583 && (BRANCH_COST >= 3 ? integer_pow2p (TREE_OPERAND (binary_op, 1))
7584 : integer_onep (TREE_OPERAND (binary_op, 1)))
7585 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
7588 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR ? add_optab
7589 : TREE_CODE (binary_op) == MINUS_EXPR ? sub_optab
7590 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
7593 /* If we had X ? A : A + 1, do this as A + (X == 0).
7595 We have to invert the truth value here and then put it
7596 back later if do_store_flag fails. We cannot simply copy
7597 TREE_OPERAND (exp, 0) to another variable and modify that
7598 because invert_truthvalue can modify the tree pointed to
7600 if (singleton == TREE_OPERAND (exp, 1))
7601 TREE_OPERAND (exp, 0)
7602 = invert_truthvalue (TREE_OPERAND (exp, 0));
7604 result = do_store_flag (TREE_OPERAND (exp, 0),
7605 (safe_from_p (temp, singleton, 1)
7607 mode, BRANCH_COST <= 1);
7609 if (result != 0 && ! integer_onep (TREE_OPERAND (binary_op, 1)))
7610 result = expand_shift (LSHIFT_EXPR, mode, result,
7611 build_int_2 (tree_log2
7615 (safe_from_p (temp, singleton, 1)
7616 ? temp : NULL_RTX), 0);
7620 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
7621 return expand_binop (mode, boptab, op1, result, temp,
7622 unsignedp, OPTAB_LIB_WIDEN);
7624 else if (singleton == TREE_OPERAND (exp, 1))
7625 TREE_OPERAND (exp, 0)
7626 = invert_truthvalue (TREE_OPERAND (exp, 0));
7629 do_pending_stack_adjust ();
7631 op0 = gen_label_rtx ();
7633 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
7637 /* If the target conflicts with the other operand of the
7638 binary op, we can't use it. Also, we can't use the target
7639 if it is a hard register, because evaluating the condition
7640 might clobber it. */
7642 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1), 1))
7643 || (GET_CODE (temp) == REG
7644 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
7645 temp = gen_reg_rtx (mode);
7646 store_expr (singleton, temp, 0);
7649 expand_expr (singleton,
7650 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
7651 if (singleton == TREE_OPERAND (exp, 1))
7652 jumpif (TREE_OPERAND (exp, 0), op0);
7654 jumpifnot (TREE_OPERAND (exp, 0), op0);
7656 start_cleanup_deferral ();
7657 if (binary_op && temp == 0)
7658 /* Just touch the other operand. */
7659 expand_expr (TREE_OPERAND (binary_op, 1),
7660 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
7662 store_expr (build (TREE_CODE (binary_op), type,
7663 make_tree (type, temp),
7664 TREE_OPERAND (binary_op, 1)),
7667 store_expr (build1 (TREE_CODE (unary_op), type,
7668 make_tree (type, temp)),
7672 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
7673 comparison operator. If we have one of these cases, set the
7674 output to A, branch on A (cse will merge these two references),
7675 then set the output to FOO. */
7677 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
7678 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
7679 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7680 TREE_OPERAND (exp, 1), 0)
7681 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
7682 || TREE_CODE (TREE_OPERAND (exp, 1)) == SAVE_EXPR)
7683 && safe_from_p (temp, TREE_OPERAND (exp, 2), 1))
7685 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
7686 temp = gen_reg_rtx (mode);
7687 store_expr (TREE_OPERAND (exp, 1), temp, 0);
7688 jumpif (TREE_OPERAND (exp, 0), op0);
7690 start_cleanup_deferral ();
7691 store_expr (TREE_OPERAND (exp, 2), temp, 0);
7695 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
7696 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
7697 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7698 TREE_OPERAND (exp, 2), 0)
7699 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
7700 || TREE_CODE (TREE_OPERAND (exp, 2)) == SAVE_EXPR)
7701 && safe_from_p (temp, TREE_OPERAND (exp, 1), 1))
7703 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
7704 temp = gen_reg_rtx (mode);
7705 store_expr (TREE_OPERAND (exp, 2), temp, 0);
7706 jumpifnot (TREE_OPERAND (exp, 0), op0);
7708 start_cleanup_deferral ();
7709 store_expr (TREE_OPERAND (exp, 1), temp, 0);
7714 op1 = gen_label_rtx ();
7715 jumpifnot (TREE_OPERAND (exp, 0), op0);
7717 start_cleanup_deferral ();
7719 store_expr (TREE_OPERAND (exp, 1), temp, 0);
7721 expand_expr (TREE_OPERAND (exp, 1),
7722 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
7723 end_cleanup_deferral ();
7725 emit_jump_insn (gen_jump (op1));
7728 start_cleanup_deferral ();
7730 store_expr (TREE_OPERAND (exp, 2), temp, 0);
7732 expand_expr (TREE_OPERAND (exp, 2),
7733 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
7736 end_cleanup_deferral ();
7747 /* Something needs to be initialized, but we didn't know
7748 where that thing was when building the tree. For example,
7749 it could be the return value of a function, or a parameter
7750 to a function which lays down in the stack, or a temporary
7751 variable which must be passed by reference.
7753 We guarantee that the expression will either be constructed
7754 or copied into our original target. */
7756 tree slot = TREE_OPERAND (exp, 0);
7757 tree cleanups = NULL_TREE;
7760 if (TREE_CODE (slot) != VAR_DECL)
7764 target = original_target;
7768 if (DECL_RTL (slot) != 0)
7770 target = DECL_RTL (slot);
7771 /* If we have already expanded the slot, so don't do
7773 if (TREE_OPERAND (exp, 1) == NULL_TREE)
7778 target = assign_temp (type, 2, 0, 1);
7779 /* All temp slots at this level must not conflict. */
7780 preserve_temp_slots (target);
7781 DECL_RTL (slot) = target;
7782 if (TREE_ADDRESSABLE (slot))
7784 TREE_ADDRESSABLE (slot) = 0;
7785 mark_addressable (slot);
7788 /* Since SLOT is not known to the called function
7789 to belong to its stack frame, we must build an explicit
7790 cleanup. This case occurs when we must build up a reference
7791 to pass the reference as an argument. In this case,
7792 it is very likely that such a reference need not be
7795 if (TREE_OPERAND (exp, 2) == 0)
7796 TREE_OPERAND (exp, 2) = maybe_build_cleanup (slot);
7797 cleanups = TREE_OPERAND (exp, 2);
7802 /* This case does occur, when expanding a parameter which
7803 needs to be constructed on the stack. The target
7804 is the actual stack address that we want to initialize.
7805 The function we call will perform the cleanup in this case. */
7807 /* If we have already assigned it space, use that space,
7808 not target that we were passed in, as our target
7809 parameter is only a hint. */
7810 if (DECL_RTL (slot) != 0)
7812 target = DECL_RTL (slot);
7813 /* If we have already expanded the slot, so don't do
7815 if (TREE_OPERAND (exp, 1) == NULL_TREE)
7820 DECL_RTL (slot) = target;
7821 /* If we must have an addressable slot, then make sure that
7822 the RTL that we just stored in slot is OK. */
7823 if (TREE_ADDRESSABLE (slot))
7825 TREE_ADDRESSABLE (slot) = 0;
7826 mark_addressable (slot);
7831 exp1 = TREE_OPERAND (exp, 3) = TREE_OPERAND (exp, 1);
7832 /* Mark it as expanded. */
7833 TREE_OPERAND (exp, 1) = NULL_TREE;
7835 TREE_USED (slot) = 1;
7836 store_expr (exp1, target, 0);
7838 expand_decl_cleanup (NULL_TREE, cleanups);
7845 tree lhs = TREE_OPERAND (exp, 0);
7846 tree rhs = TREE_OPERAND (exp, 1);
7847 tree noncopied_parts = 0;
7848 tree lhs_type = TREE_TYPE (lhs);
7850 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
7851 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0 && !fixed_type_p (rhs))
7852 noncopied_parts = init_noncopied_parts (stabilize_reference (lhs),
7853 TYPE_NONCOPIED_PARTS (lhs_type));
7854 while (noncopied_parts != 0)
7856 expand_assignment (TREE_VALUE (noncopied_parts),
7857 TREE_PURPOSE (noncopied_parts), 0, 0);
7858 noncopied_parts = TREE_CHAIN (noncopied_parts);
7865 /* If lhs is complex, expand calls in rhs before computing it.
7866 That's so we don't compute a pointer and save it over a call.
7867 If lhs is simple, compute it first so we can give it as a
7868 target if the rhs is just a call. This avoids an extra temp and copy
7869 and that prevents a partial-subsumption which makes bad code.
7870 Actually we could treat component_ref's of vars like vars. */
7872 tree lhs = TREE_OPERAND (exp, 0);
7873 tree rhs = TREE_OPERAND (exp, 1);
7874 tree noncopied_parts = 0;
7875 tree lhs_type = TREE_TYPE (lhs);
7879 if (TREE_CODE (lhs) != VAR_DECL
7880 && TREE_CODE (lhs) != RESULT_DECL
7881 && TREE_CODE (lhs) != PARM_DECL
7882 && ! (TREE_CODE (lhs) == INDIRECT_REF
7883 && TYPE_READONLY (TREE_TYPE (TREE_OPERAND (lhs, 0)))))
7884 preexpand_calls (exp);
7886 /* Check for |= or &= of a bitfield of size one into another bitfield
7887 of size 1. In this case, (unless we need the result of the
7888 assignment) we can do this more efficiently with a
7889 test followed by an assignment, if necessary.
7891 ??? At this point, we can't get a BIT_FIELD_REF here. But if
7892 things change so we do, this code should be enhanced to
7895 && TREE_CODE (lhs) == COMPONENT_REF
7896 && (TREE_CODE (rhs) == BIT_IOR_EXPR
7897 || TREE_CODE (rhs) == BIT_AND_EXPR)
7898 && TREE_OPERAND (rhs, 0) == lhs
7899 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
7900 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (lhs, 1))) == 1
7901 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))) == 1)
7903 rtx label = gen_label_rtx ();
7905 do_jump (TREE_OPERAND (rhs, 1),
7906 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
7907 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
7908 expand_assignment (lhs, convert (TREE_TYPE (rhs),
7909 (TREE_CODE (rhs) == BIT_IOR_EXPR
7911 : integer_zero_node)),
7913 do_pending_stack_adjust ();
7918 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0
7919 && ! (fixed_type_p (lhs) && fixed_type_p (rhs)))
7920 noncopied_parts = save_noncopied_parts (stabilize_reference (lhs),
7921 TYPE_NONCOPIED_PARTS (lhs_type));
7923 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
7924 while (noncopied_parts != 0)
7926 expand_assignment (TREE_PURPOSE (noncopied_parts),
7927 TREE_VALUE (noncopied_parts), 0, 0);
7928 noncopied_parts = TREE_CHAIN (noncopied_parts);
7934 if (!TREE_OPERAND (exp, 0))
7935 expand_null_return ();
7937 expand_return (TREE_OPERAND (exp, 0));
7940 case PREINCREMENT_EXPR:
7941 case PREDECREMENT_EXPR:
7942 return expand_increment (exp, 0, ignore);
7944 case POSTINCREMENT_EXPR:
7945 case POSTDECREMENT_EXPR:
7946 /* Faster to treat as pre-increment if result is not used. */
7947 return expand_increment (exp, ! ignore, ignore);
7950 /* If nonzero, TEMP will be set to the address of something that might
7951 be a MEM corresponding to a stack slot. */
7954 /* Are we taking the address of a nested function? */
7955 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
7956 && decl_function_context (TREE_OPERAND (exp, 0)) != 0
7957 && ! DECL_NO_STATIC_CHAIN (TREE_OPERAND (exp, 0))
7958 && ! TREE_STATIC (exp))
7960 op0 = trampoline_address (TREE_OPERAND (exp, 0));
7961 op0 = force_operand (op0, target);
7963 /* If we are taking the address of something erroneous, just
7965 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
7969 /* We make sure to pass const0_rtx down if we came in with
7970 ignore set, to avoid doing the cleanups twice for something. */
7971 op0 = expand_expr (TREE_OPERAND (exp, 0),
7972 ignore ? const0_rtx : NULL_RTX, VOIDmode,
7973 (modifier == EXPAND_INITIALIZER
7974 ? modifier : EXPAND_CONST_ADDRESS));
7976 /* If we are going to ignore the result, OP0 will have been set
7977 to const0_rtx, so just return it. Don't get confused and
7978 think we are taking the address of the constant. */
7982 op0 = protect_from_queue (op0, 0);
7984 /* We would like the object in memory. If it is a constant,
7985 we can have it be statically allocated into memory. For
7986 a non-constant (REG, SUBREG or CONCAT), we need to allocate some
7987 memory and store the value into it. */
7989 if (CONSTANT_P (op0))
7990 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7992 else if (GET_CODE (op0) == MEM)
7994 mark_temp_addr_taken (op0);
7995 temp = XEXP (op0, 0);
7998 else if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
7999 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
8001 /* If this object is in a register, it must be not
8003 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8004 rtx memloc = assign_temp (inner_type, 1, 1, 1);
8006 mark_temp_addr_taken (memloc);
8007 emit_move_insn (memloc, op0);
8011 if (GET_CODE (op0) != MEM)
8014 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
8016 temp = XEXP (op0, 0);
8017 #ifdef POINTERS_EXTEND_UNSIGNED
8018 if (GET_MODE (temp) == Pmode && GET_MODE (temp) != mode
8019 && mode == ptr_mode)
8020 temp = convert_memory_address (ptr_mode, temp);
8025 op0 = force_operand (XEXP (op0, 0), target);
8028 if (flag_force_addr && GET_CODE (op0) != REG)
8029 op0 = force_reg (Pmode, op0);
8031 if (GET_CODE (op0) == REG
8032 && ! REG_USERVAR_P (op0))
8033 mark_reg_pointer (op0, TYPE_ALIGN (TREE_TYPE (type)) / BITS_PER_UNIT);
8035 /* If we might have had a temp slot, add an equivalent address
8038 update_temp_slot_address (temp, op0);
8040 #ifdef POINTERS_EXTEND_UNSIGNED
8041 if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
8042 && mode == ptr_mode)
8043 op0 = convert_memory_address (ptr_mode, op0);
8048 case ENTRY_VALUE_EXPR:
8051 /* COMPLEX type for Extended Pascal & Fortran */
8054 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8057 /* Get the rtx code of the operands. */
8058 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8059 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
8062 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
8066 /* Move the real (op0) and imaginary (op1) parts to their location. */
8067 emit_move_insn (gen_realpart (mode, target), op0);
8068 emit_move_insn (gen_imagpart (mode, target), op1);
8070 insns = get_insns ();
8073 /* Complex construction should appear as a single unit. */
8074 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
8075 each with a separate pseudo as destination.
8076 It's not correct for flow to treat them as a unit. */
8077 if (GET_CODE (target) != CONCAT)
8078 emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
8086 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8087 return gen_realpart (mode, op0);
8090 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8091 return gen_imagpart (mode, op0);
8095 enum machine_mode partmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8099 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8102 target = gen_reg_rtx (mode);
8106 /* Store the realpart and the negated imagpart to target. */
8107 emit_move_insn (gen_realpart (partmode, target),
8108 gen_realpart (partmode, op0));
8110 imag_t = gen_imagpart (partmode, target);
8111 temp = expand_unop (partmode, neg_optab,
8112 gen_imagpart (partmode, op0), imag_t, 0);
8114 emit_move_insn (imag_t, temp);
8116 insns = get_insns ();
8119 /* Conjugate should appear as a single unit
8120 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
8121 each with a separate pseudo as destination.
8122 It's not correct for flow to treat them as a unit. */
8123 if (GET_CODE (target) != CONCAT)
8124 emit_no_conflict_block (insns, target, op0, NULL_RTX, NULL_RTX);
8131 case TRY_CATCH_EXPR:
8133 tree handler = TREE_OPERAND (exp, 1);
8135 expand_eh_region_start ();
8137 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8139 expand_eh_region_end (handler);
8144 case TRY_FINALLY_EXPR:
8146 tree try_block = TREE_OPERAND (exp, 0);
8147 tree finally_block = TREE_OPERAND (exp, 1);
8148 rtx finally_label = gen_label_rtx ();
8149 rtx done_label = gen_label_rtx ();
8150 rtx return_link = gen_reg_rtx (Pmode);
8151 tree cleanup = build (GOTO_SUBROUTINE_EXPR, void_type_node,
8152 (tree) finally_label, (tree) return_link);
8153 TREE_SIDE_EFFECTS (cleanup) = 1;
8155 /* Start a new binding layer that will keep track of all cleanup
8156 actions to be performed. */
8157 expand_start_bindings (0);
8159 target_temp_slot_level = temp_slot_level;
8161 expand_decl_cleanup (NULL_TREE, cleanup);
8162 op0 = expand_expr (try_block, target, tmode, modifier);
8164 preserve_temp_slots (op0);
8165 expand_end_bindings (NULL_TREE, 0, 0);
8166 emit_jump (done_label);
8167 emit_label (finally_label);
8168 expand_expr (finally_block, const0_rtx, VOIDmode, 0);
8169 emit_indirect_jump (return_link);
8170 emit_label (done_label);
8174 case GOTO_SUBROUTINE_EXPR:
8176 rtx subr = (rtx) TREE_OPERAND (exp, 0);
8177 rtx return_link = *(rtx *) &TREE_OPERAND (exp, 1);
8178 rtx return_address = gen_label_rtx ();
8179 emit_move_insn (return_link, gen_rtx_LABEL_REF (Pmode, return_address));
8181 emit_label (return_address);
8187 rtx dcc = get_dynamic_cleanup_chain ();
8188 emit_move_insn (dcc, validize_mem (gen_rtx_MEM (Pmode, dcc)));
8194 rtx dhc = get_dynamic_handler_chain ();
8195 emit_move_insn (dhc, validize_mem (gen_rtx_MEM (Pmode, dhc)));
8200 return expand_builtin_va_arg (TREE_OPERAND (exp, 0), type);
8203 return (*lang_expand_expr) (exp, original_target, tmode, modifier);
8206 /* Here to do an ordinary binary operator, generating an instruction
8207 from the optab already placed in `this_optab'. */
8209 preexpand_calls (exp);
8210 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8212 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8213 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8215 temp = expand_binop (mode, this_optab, op0, op1, target,
8216 unsignedp, OPTAB_LIB_WIDEN);
8222 /* Return the tree node and offset if a given argument corresponds to
8223 a string constant. */
8226 string_constant (arg, ptr_offset)
8232 if (TREE_CODE (arg) == ADDR_EXPR
8233 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
8235 *ptr_offset = integer_zero_node;
8236 return TREE_OPERAND (arg, 0);
8238 else if (TREE_CODE (arg) == PLUS_EXPR)
8240 tree arg0 = TREE_OPERAND (arg, 0);
8241 tree arg1 = TREE_OPERAND (arg, 1);
8246 if (TREE_CODE (arg0) == ADDR_EXPR
8247 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
8250 return TREE_OPERAND (arg0, 0);
8252 else if (TREE_CODE (arg1) == ADDR_EXPR
8253 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
8256 return TREE_OPERAND (arg1, 0);
8263 /* Expand code for a post- or pre- increment or decrement
8264 and return the RTX for the result.
8265 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
8268 expand_increment (exp, post, ignore)
8272 register rtx op0, op1;
8273 register rtx temp, value;
8274 register tree incremented = TREE_OPERAND (exp, 0);
8275 optab this_optab = add_optab;
8277 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
8278 int op0_is_copy = 0;
8279 int single_insn = 0;
8280 /* 1 means we can't store into OP0 directly,
8281 because it is a subreg narrower than a word,
8282 and we don't dare clobber the rest of the word. */
8285 /* Stabilize any component ref that might need to be
8286 evaluated more than once below. */
8288 || TREE_CODE (incremented) == BIT_FIELD_REF
8289 || (TREE_CODE (incremented) == COMPONENT_REF
8290 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
8291 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
8292 incremented = stabilize_reference (incremented);
8293 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
8294 ones into save exprs so that they don't accidentally get evaluated
8295 more than once by the code below. */
8296 if (TREE_CODE (incremented) == PREINCREMENT_EXPR
8297 || TREE_CODE (incremented) == PREDECREMENT_EXPR)
8298 incremented = save_expr (incremented);
8300 /* Compute the operands as RTX.
8301 Note whether OP0 is the actual lvalue or a copy of it:
8302 I believe it is a copy iff it is a register or subreg
8303 and insns were generated in computing it. */
8305 temp = get_last_insn ();
8306 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_RW);
8308 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
8309 in place but instead must do sign- or zero-extension during assignment,
8310 so we copy it into a new register and let the code below use it as
8313 Note that we can safely modify this SUBREG since it is know not to be
8314 shared (it was made by the expand_expr call above). */
8316 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
8319 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
8323 else if (GET_CODE (op0) == SUBREG
8324 && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD)
8326 /* We cannot increment this SUBREG in place. If we are
8327 post-incrementing, get a copy of the old value. Otherwise,
8328 just mark that we cannot increment in place. */
8330 op0 = copy_to_reg (op0);
8335 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
8336 && temp != get_last_insn ());
8337 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode,
8338 EXPAND_MEMORY_USE_BAD);
8340 /* Decide whether incrementing or decrementing. */
8341 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
8342 || TREE_CODE (exp) == PREDECREMENT_EXPR)
8343 this_optab = sub_optab;
8345 /* Convert decrement by a constant into a negative increment. */
8346 if (this_optab == sub_optab
8347 && GET_CODE (op1) == CONST_INT)
8349 op1 = GEN_INT (- INTVAL (op1));
8350 this_optab = add_optab;
8353 /* For a preincrement, see if we can do this with a single instruction. */
8356 icode = (int) this_optab->handlers[(int) mode].insn_code;
8357 if (icode != (int) CODE_FOR_nothing
8358 /* Make sure that OP0 is valid for operands 0 and 1
8359 of the insn we want to queue. */
8360 && (*insn_operand_predicate[icode][0]) (op0, mode)
8361 && (*insn_operand_predicate[icode][1]) (op0, mode)
8362 && (*insn_operand_predicate[icode][2]) (op1, mode))
8366 /* If OP0 is not the actual lvalue, but rather a copy in a register,
8367 then we cannot just increment OP0. We must therefore contrive to
8368 increment the original value. Then, for postincrement, we can return
8369 OP0 since it is a copy of the old value. For preincrement, expand here
8370 unless we can do it with a single insn.
8372 Likewise if storing directly into OP0 would clobber high bits
8373 we need to preserve (bad_subreg). */
8374 if (op0_is_copy || (!post && !single_insn) || bad_subreg)
8376 /* This is the easiest way to increment the value wherever it is.
8377 Problems with multiple evaluation of INCREMENTED are prevented
8378 because either (1) it is a component_ref or preincrement,
8379 in which case it was stabilized above, or (2) it is an array_ref
8380 with constant index in an array in a register, which is
8381 safe to reevaluate. */
8382 tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
8383 || TREE_CODE (exp) == PREDECREMENT_EXPR)
8384 ? MINUS_EXPR : PLUS_EXPR),
8387 TREE_OPERAND (exp, 1));
8389 while (TREE_CODE (incremented) == NOP_EXPR
8390 || TREE_CODE (incremented) == CONVERT_EXPR)
8392 newexp = convert (TREE_TYPE (incremented), newexp);
8393 incremented = TREE_OPERAND (incremented, 0);
8396 temp = expand_assignment (incremented, newexp, ! post && ! ignore , 0);
8397 return post ? op0 : temp;
8402 /* We have a true reference to the value in OP0.
8403 If there is an insn to add or subtract in this mode, queue it.
8404 Queueing the increment insn avoids the register shuffling
8405 that often results if we must increment now and first save
8406 the old value for subsequent use. */
8408 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
8409 op0 = stabilize (op0);
8412 icode = (int) this_optab->handlers[(int) mode].insn_code;
8413 if (icode != (int) CODE_FOR_nothing
8414 /* Make sure that OP0 is valid for operands 0 and 1
8415 of the insn we want to queue. */
8416 && (*insn_operand_predicate[icode][0]) (op0, mode)
8417 && (*insn_operand_predicate[icode][1]) (op0, mode))
8419 if (! (*insn_operand_predicate[icode][2]) (op1, mode))
8420 op1 = force_reg (mode, op1);
8422 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
8424 if (icode != (int) CODE_FOR_nothing && GET_CODE (op0) == MEM)
8426 rtx addr = (general_operand (XEXP (op0, 0), mode)
8427 ? force_reg (Pmode, XEXP (op0, 0))
8428 : copy_to_reg (XEXP (op0, 0)));
8431 op0 = change_address (op0, VOIDmode, addr);
8432 temp = force_reg (GET_MODE (op0), op0);
8433 if (! (*insn_operand_predicate[icode][2]) (op1, mode))
8434 op1 = force_reg (mode, op1);
8436 /* The increment queue is LIFO, thus we have to `queue'
8437 the instructions in reverse order. */
8438 enqueue_insn (op0, gen_move_insn (op0, temp));
8439 result = enqueue_insn (temp, GEN_FCN (icode) (temp, temp, op1));
8444 /* Preincrement, or we can't increment with one simple insn. */
8446 /* Save a copy of the value before inc or dec, to return it later. */
8447 temp = value = copy_to_reg (op0);
8449 /* Arrange to return the incremented value. */
8450 /* Copy the rtx because expand_binop will protect from the queue,
8451 and the results of that would be invalid for us to return
8452 if our caller does emit_queue before using our result. */
8453 temp = copy_rtx (value = op0);
8455 /* Increment however we can. */
8456 op1 = expand_binop (mode, this_optab, value, op1,
8457 current_function_check_memory_usage ? NULL_RTX : op0,
8458 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
8459 /* Make sure the value is stored into OP0. */
8461 emit_move_insn (op0, op1);
8466 /* Expand all function calls contained within EXP, innermost ones first.
8467 But don't look within expressions that have sequence points.
8468 For each CALL_EXPR, record the rtx for its value
8469 in the CALL_EXPR_RTL field. */
8472 preexpand_calls (exp)
8475 register int nops, i;
8476 int type = TREE_CODE_CLASS (TREE_CODE (exp));
8478 if (! do_preexpand_calls)
8481 /* Only expressions and references can contain calls. */
8483 if (type != 'e' && type != '<' && type != '1' && type != '2' && type != 'r')
8486 switch (TREE_CODE (exp))
8489 /* Do nothing if already expanded. */
8490 if (CALL_EXPR_RTL (exp) != 0
8491 /* Do nothing if the call returns a variable-sized object. */
8492 || TREE_CODE (TYPE_SIZE (TREE_TYPE(exp))) != INTEGER_CST
8493 /* Do nothing to built-in functions. */
8494 || (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
8495 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
8497 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
8500 CALL_EXPR_RTL (exp) = expand_call (exp, NULL_RTX, 0);
8505 case TRUTH_ANDIF_EXPR:
8506 case TRUTH_ORIF_EXPR:
8507 /* If we find one of these, then we can be sure
8508 the adjust will be done for it (since it makes jumps).
8509 Do it now, so that if this is inside an argument
8510 of a function, we don't get the stack adjustment
8511 after some other args have already been pushed. */
8512 do_pending_stack_adjust ();
8517 case WITH_CLEANUP_EXPR:
8518 case CLEANUP_POINT_EXPR:
8519 case TRY_CATCH_EXPR:
8523 if (SAVE_EXPR_RTL (exp) != 0)
8530 nops = tree_code_length[(int) TREE_CODE (exp)];
8531 for (i = 0; i < nops; i++)
8532 if (TREE_OPERAND (exp, i) != 0)
8534 type = TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, i)));
8535 if (type == 'e' || type == '<' || type == '1' || type == '2'
8537 preexpand_calls (TREE_OPERAND (exp, i));
8541 /* At the start of a function, record that we have no previously-pushed
8542 arguments waiting to be popped. */
8545 init_pending_stack_adjust ()
8547 pending_stack_adjust = 0;
8550 /* When exiting from function, if safe, clear out any pending stack adjust
8551 so the adjustment won't get done.
8553 Note, if the current function calls alloca, then it must have a
8554 frame pointer regardless of the value of flag_omit_frame_pointer. */
8557 clear_pending_stack_adjust ()
8559 #ifdef EXIT_IGNORE_STACK
8561 && (! flag_omit_frame_pointer || current_function_calls_alloca)
8562 && EXIT_IGNORE_STACK
8563 && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline)
8564 && ! flag_inline_functions)
8565 pending_stack_adjust = 0;
8569 /* Pop any previously-pushed arguments that have not been popped yet. */
8572 do_pending_stack_adjust ()
8574 if (inhibit_defer_pop == 0)
8576 if (pending_stack_adjust != 0)
8577 adjust_stack (GEN_INT (pending_stack_adjust));
8578 pending_stack_adjust = 0;
8582 /* Expand conditional expressions. */
8584 /* Generate code to evaluate EXP and jump to LABEL if the value is zero.
8585 LABEL is an rtx of code CODE_LABEL, in this function and all the
8589 jumpifnot (exp, label)
8593 do_jump (exp, label, NULL_RTX);
8596 /* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
8603 do_jump (exp, NULL_RTX, label);
8606 /* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
8607 the result is zero, or IF_TRUE_LABEL if the result is one.
8608 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
8609 meaning fall through in that case.
8611 do_jump always does any pending stack adjust except when it does not
8612 actually perform a jump. An example where there is no jump
8613 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
8615 This function is responsible for optimizing cases such as
8616 &&, || and comparison operators in EXP. */
8619 do_jump (exp, if_false_label, if_true_label)
8621 rtx if_false_label, if_true_label;
8623 register enum tree_code code = TREE_CODE (exp);
8624 /* Some cases need to create a label to jump to
8625 in order to properly fall through.
8626 These cases set DROP_THROUGH_LABEL nonzero. */
8627 rtx drop_through_label = 0;
8632 enum machine_mode mode;
8634 #ifdef MAX_INTEGER_COMPUTATION_MODE
8635 check_max_integer_computation_mode (exp);
8646 temp = integer_zerop (exp) ? if_false_label : if_true_label;
8652 /* This is not true with #pragma weak */
8654 /* The address of something can never be zero. */
8656 emit_jump (if_true_label);
8661 if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
8662 || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
8663 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF)
8666 /* If we are narrowing the operand, we have to do the compare in the
8668 if ((TYPE_PRECISION (TREE_TYPE (exp))
8669 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8671 case NON_LVALUE_EXPR:
8672 case REFERENCE_EXPR:
8677 /* These cannot change zero->non-zero or vice versa. */
8678 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
8682 /* This is never less insns than evaluating the PLUS_EXPR followed by
8683 a test and can be longer if the test is eliminated. */
8685 /* Reduce to minus. */
8686 exp = build (MINUS_EXPR, TREE_TYPE (exp),
8687 TREE_OPERAND (exp, 0),
8688 fold (build1 (NEGATE_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
8689 TREE_OPERAND (exp, 1))));
8690 /* Process as MINUS. */
8694 /* Non-zero iff operands of minus differ. */
8695 comparison = compare (build (NE_EXPR, TREE_TYPE (exp),
8696 TREE_OPERAND (exp, 0),
8697 TREE_OPERAND (exp, 1)),
8702 /* If we are AND'ing with a small constant, do this comparison in the
8703 smallest type that fits. If the machine doesn't have comparisons
8704 that small, it will be converted back to the wider comparison.
8705 This helps if we are testing the sign bit of a narrower object.
8706 combine can't do this for us because it can't know whether a
8707 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
8709 if (! SLOW_BYTE_ACCESS
8710 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
8711 && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
8712 && (i = floor_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))) >= 0
8713 && (mode = mode_for_size (i + 1, MODE_INT, 0)) != BLKmode
8714 && (type = type_for_mode (mode, 1)) != 0
8715 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
8716 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
8717 != CODE_FOR_nothing))
8719 do_jump (convert (type, exp), if_false_label, if_true_label);
8724 case TRUTH_NOT_EXPR:
8725 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
8728 case TRUTH_ANDIF_EXPR:
8729 if (if_false_label == 0)
8730 if_false_label = drop_through_label = gen_label_rtx ();
8731 do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
8732 start_cleanup_deferral ();
8733 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
8734 end_cleanup_deferral ();
8737 case TRUTH_ORIF_EXPR:
8738 if (if_true_label == 0)
8739 if_true_label = drop_through_label = gen_label_rtx ();
8740 do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
8741 start_cleanup_deferral ();
8742 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
8743 end_cleanup_deferral ();
8748 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
8749 preserve_temp_slots (NULL_RTX);
8753 do_pending_stack_adjust ();
8754 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
8761 int bitsize, bitpos, unsignedp;
8762 enum machine_mode mode;
8768 /* Get description of this reference. We don't actually care
8769 about the underlying object here. */
8770 get_inner_reference (exp, &bitsize, &bitpos, &offset,
8771 &mode, &unsignedp, &volatilep,
8774 type = type_for_size (bitsize, unsignedp);
8775 if (! SLOW_BYTE_ACCESS
8776 && type != 0 && bitsize >= 0
8777 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
8778 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
8779 != CODE_FOR_nothing))
8781 do_jump (convert (type, exp), if_false_label, if_true_label);
8788 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
8789 if (integer_onep (TREE_OPERAND (exp, 1))
8790 && integer_zerop (TREE_OPERAND (exp, 2)))
8791 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
8793 else if (integer_zerop (TREE_OPERAND (exp, 1))
8794 && integer_onep (TREE_OPERAND (exp, 2)))
8795 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
8799 register rtx label1 = gen_label_rtx ();
8800 drop_through_label = gen_label_rtx ();
8802 do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX);
8804 start_cleanup_deferral ();
8805 /* Now the THEN-expression. */
8806 do_jump (TREE_OPERAND (exp, 1),
8807 if_false_label ? if_false_label : drop_through_label,
8808 if_true_label ? if_true_label : drop_through_label);
8809 /* In case the do_jump just above never jumps. */
8810 do_pending_stack_adjust ();
8811 emit_label (label1);
8813 /* Now the ELSE-expression. */
8814 do_jump (TREE_OPERAND (exp, 2),
8815 if_false_label ? if_false_label : drop_through_label,
8816 if_true_label ? if_true_label : drop_through_label);
8817 end_cleanup_deferral ();
8823 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8825 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
8826 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
8828 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
8829 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
8832 (build (TRUTH_ANDIF_EXPR, TREE_TYPE (exp),
8833 fold (build (EQ_EXPR, TREE_TYPE (exp),
8834 fold (build1 (REALPART_EXPR,
8835 TREE_TYPE (inner_type),
8837 fold (build1 (REALPART_EXPR,
8838 TREE_TYPE (inner_type),
8840 fold (build (EQ_EXPR, TREE_TYPE (exp),
8841 fold (build1 (IMAGPART_EXPR,
8842 TREE_TYPE (inner_type),
8844 fold (build1 (IMAGPART_EXPR,
8845 TREE_TYPE (inner_type),
8847 if_false_label, if_true_label);
8850 else if (integer_zerop (TREE_OPERAND (exp, 1)))
8851 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
8853 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
8854 && !can_compare_p (TYPE_MODE (inner_type)))
8855 do_jump_by_parts_equality (exp, if_false_label, if_true_label);
8857 comparison = compare (exp, EQ, EQ);
8863 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8865 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
8866 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
8868 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
8869 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
8872 (build (TRUTH_ORIF_EXPR, TREE_TYPE (exp),
8873 fold (build (NE_EXPR, TREE_TYPE (exp),
8874 fold (build1 (REALPART_EXPR,
8875 TREE_TYPE (inner_type),
8877 fold (build1 (REALPART_EXPR,
8878 TREE_TYPE (inner_type),
8880 fold (build (NE_EXPR, TREE_TYPE (exp),
8881 fold (build1 (IMAGPART_EXPR,
8882 TREE_TYPE (inner_type),
8884 fold (build1 (IMAGPART_EXPR,
8885 TREE_TYPE (inner_type),
8887 if_false_label, if_true_label);
8890 else if (integer_zerop (TREE_OPERAND (exp, 1)))
8891 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
8893 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
8894 && !can_compare_p (TYPE_MODE (inner_type)))
8895 do_jump_by_parts_equality (exp, if_true_label, if_false_label);
8897 comparison = compare (exp, NE, NE);
8902 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
8904 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8905 do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
8907 comparison = compare (exp, LT, LTU);
8911 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
8913 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8914 do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
8916 comparison = compare (exp, LE, LEU);
8920 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
8922 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8923 do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
8925 comparison = compare (exp, GT, GTU);
8929 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
8931 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8932 do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
8934 comparison = compare (exp, GE, GEU);
8939 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
8941 /* This is not needed any more and causes poor code since it causes
8942 comparisons and tests from non-SI objects to have different code
8944 /* Copy to register to avoid generating bad insns by cse
8945 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
8946 if (!cse_not_expected && GET_CODE (temp) == MEM)
8947 temp = copy_to_reg (temp);
8949 do_pending_stack_adjust ();
8950 if (GET_CODE (temp) == CONST_INT)
8951 comparison = (temp == const0_rtx ? const0_rtx : const_true_rtx);
8952 else if (GET_CODE (temp) == LABEL_REF)
8953 comparison = const_true_rtx;
8954 else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
8955 && !can_compare_p (GET_MODE (temp)))
8956 /* Note swapping the labels gives us not-equal. */
8957 do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label);
8958 else if (GET_MODE (temp) != VOIDmode)
8959 comparison = compare_from_rtx (temp, CONST0_RTX (GET_MODE (temp)),
8960 NE, TREE_UNSIGNED (TREE_TYPE (exp)),
8961 GET_MODE (temp), NULL_RTX, 0);
8966 /* Do any postincrements in the expression that was tested. */
8969 /* If COMPARISON is nonzero here, it is an rtx that can be substituted
8970 straight into a conditional jump instruction as the jump condition.
8971 Otherwise, all the work has been done already. */
8973 if (comparison == const_true_rtx)
8976 emit_jump (if_true_label);
8978 else if (comparison == const0_rtx)
8981 emit_jump (if_false_label);
8983 else if (comparison)
8984 do_jump_for_compare (comparison, if_false_label, if_true_label);
8986 if (drop_through_label)
8988 /* If do_jump produces code that might be jumped around,
8989 do any stack adjusts from that code, before the place
8990 where control merges in. */
8991 do_pending_stack_adjust ();
8992 emit_label (drop_through_label);
8996 /* Given a comparison expression EXP for values too wide to be compared
8997 with one insn, test the comparison and jump to the appropriate label.
8998 The code of EXP is ignored; we always test GT if SWAP is 0,
8999 and LT if SWAP is 1. */
9002 do_jump_by_parts_greater (exp, swap, if_false_label, if_true_label)
9005 rtx if_false_label, if_true_label;
9007 rtx op0 = expand_expr (TREE_OPERAND (exp, swap), NULL_RTX, VOIDmode, 0);
9008 rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), NULL_RTX, VOIDmode, 0);
9009 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9010 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
9011 rtx drop_through_label = 0;
9012 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
9015 if (! if_true_label || ! if_false_label)
9016 drop_through_label = gen_label_rtx ();
9017 if (! if_true_label)
9018 if_true_label = drop_through_label;
9019 if (! if_false_label)
9020 if_false_label = drop_through_label;
9022 /* Compare a word at a time, high order first. */
9023 for (i = 0; i < nwords; i++)
9026 rtx op0_word, op1_word;
9028 if (WORDS_BIG_ENDIAN)
9030 op0_word = operand_subword_force (op0, i, mode);
9031 op1_word = operand_subword_force (op1, i, mode);
9035 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
9036 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
9039 /* All but high-order word must be compared as unsigned. */
9040 comp = compare_from_rtx (op0_word, op1_word,
9041 (unsignedp || i > 0) ? GTU : GT,
9042 unsignedp, word_mode, NULL_RTX, 0);
9043 if (comp == const_true_rtx)
9044 emit_jump (if_true_label);
9045 else if (comp != const0_rtx)
9046 do_jump_for_compare (comp, NULL_RTX, if_true_label);
9048 /* Consider lower words only if these are equal. */
9049 comp = compare_from_rtx (op0_word, op1_word, NE, unsignedp, word_mode,
9051 if (comp == const_true_rtx)
9052 emit_jump (if_false_label);
9053 else if (comp != const0_rtx)
9054 do_jump_for_compare (comp, NULL_RTX, if_false_label);
9058 emit_jump (if_false_label);
9059 if (drop_through_label)
9060 emit_label (drop_through_label);
9063 /* Compare OP0 with OP1, word at a time, in mode MODE.
9064 UNSIGNEDP says to do unsigned comparison.
9065 Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
9068 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label)
9069 enum machine_mode mode;
9072 rtx if_false_label, if_true_label;
9074 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
9075 rtx drop_through_label = 0;
9078 if (! if_true_label || ! if_false_label)
9079 drop_through_label = gen_label_rtx ();
9080 if (! if_true_label)
9081 if_true_label = drop_through_label;
9082 if (! if_false_label)
9083 if_false_label = drop_through_label;
9085 /* Compare a word at a time, high order first. */
9086 for (i = 0; i < nwords; i++)
9089 rtx op0_word, op1_word;
9091 if (WORDS_BIG_ENDIAN)
9093 op0_word = operand_subword_force (op0, i, mode);
9094 op1_word = operand_subword_force (op1, i, mode);
9098 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
9099 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
9102 /* All but high-order word must be compared as unsigned. */
9103 comp = compare_from_rtx (op0_word, op1_word,
9104 (unsignedp || i > 0) ? GTU : GT,
9105 unsignedp, word_mode, NULL_RTX, 0);
9106 if (comp == const_true_rtx)
9107 emit_jump (if_true_label);
9108 else if (comp != const0_rtx)
9109 do_jump_for_compare (comp, NULL_RTX, if_true_label);
9111 /* Consider lower words only if these are equal. */
9112 comp = compare_from_rtx (op0_word, op1_word, NE, unsignedp, word_mode,
9114 if (comp == const_true_rtx)
9115 emit_jump (if_false_label);
9116 else if (comp != const0_rtx)
9117 do_jump_for_compare (comp, NULL_RTX, if_false_label);
9121 emit_jump (if_false_label);
9122 if (drop_through_label)
9123 emit_label (drop_through_label);
9126 /* Given an EQ_EXPR expression EXP for values too wide to be compared
9127 with one insn, test the comparison and jump to the appropriate label. */
9130 do_jump_by_parts_equality (exp, if_false_label, if_true_label)
9132 rtx if_false_label, if_true_label;
9134 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
9135 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
9136 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9137 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
9139 rtx drop_through_label = 0;
9141 if (! if_false_label)
9142 drop_through_label = if_false_label = gen_label_rtx ();
9144 for (i = 0; i < nwords; i++)
9146 rtx comp = compare_from_rtx (operand_subword_force (op0, i, mode),
9147 operand_subword_force (op1, i, mode),
9148 EQ, TREE_UNSIGNED (TREE_TYPE (exp)),
9149 word_mode, NULL_RTX, 0);
9150 if (comp == const_true_rtx)
9151 emit_jump (if_false_label);
9152 else if (comp != const0_rtx)
9153 do_jump_for_compare (comp, if_false_label, NULL_RTX);
9157 emit_jump (if_true_label);
9158 if (drop_through_label)
9159 emit_label (drop_through_label);
9162 /* Jump according to whether OP0 is 0.
9163 We assume that OP0 has an integer mode that is too wide
9164 for the available compare insns. */
9167 do_jump_by_parts_equality_rtx (op0, if_false_label, if_true_label)
9169 rtx if_false_label, if_true_label;
9171 int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD;
9174 rtx drop_through_label = 0;
9176 /* The fastest way of doing this comparison on almost any machine is to
9177 "or" all the words and compare the result. If all have to be loaded
9178 from memory and this is a very wide item, it's possible this may
9179 be slower, but that's highly unlikely. */
9181 part = gen_reg_rtx (word_mode);
9182 emit_move_insn (part, operand_subword_force (op0, 0, GET_MODE (op0)));
9183 for (i = 1; i < nwords && part != 0; i++)
9184 part = expand_binop (word_mode, ior_optab, part,
9185 operand_subword_force (op0, i, GET_MODE (op0)),
9186 part, 1, OPTAB_WIDEN);
9190 rtx comp = compare_from_rtx (part, const0_rtx, EQ, 1, word_mode,
9193 if (comp == const_true_rtx)
9194 emit_jump (if_false_label);
9195 else if (comp == const0_rtx)
9196 emit_jump (if_true_label);
9198 do_jump_for_compare (comp, if_false_label, if_true_label);
9203 /* If we couldn't do the "or" simply, do this with a series of compares. */
9204 if (! if_false_label)
9205 drop_through_label = if_false_label = gen_label_rtx ();
9207 for (i = 0; i < nwords; i++)
9209 rtx comp = compare_from_rtx (operand_subword_force (op0, i,
9211 const0_rtx, EQ, 1, word_mode, NULL_RTX, 0);
9212 if (comp == const_true_rtx)
9213 emit_jump (if_false_label);
9214 else if (comp != const0_rtx)
9215 do_jump_for_compare (comp, if_false_label, NULL_RTX);
9219 emit_jump (if_true_label);
9221 if (drop_through_label)
9222 emit_label (drop_through_label);
9225 /* Given a comparison expression in rtl form, output conditional branches to
9226 IF_TRUE_LABEL, IF_FALSE_LABEL, or both. */
9229 do_jump_for_compare (comparison, if_false_label, if_true_label)
9230 rtx comparison, if_false_label, if_true_label;
9234 if (bcc_gen_fctn[(int) GET_CODE (comparison)] != 0)
9235 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (comparison)])
9241 emit_jump (if_false_label);
9243 else if (if_false_label)
9245 rtx first = get_last_insn (), insn, branch;
9248 /* Output the branch with the opposite condition. Then try to invert
9249 what is generated. If more than one insn is a branch, or if the
9250 branch is not the last insn written, abort. If we can't invert
9251 the branch, emit make a true label, redirect this jump to that,
9252 emit a jump to the false label and define the true label. */
9253 /* ??? Note that we wouldn't have to do any of this nonsense if
9254 we passed both labels into a combined compare-and-branch.
9255 Ah well, jump threading does a good job of repairing the damage. */
9257 if (bcc_gen_fctn[(int) GET_CODE (comparison)] != 0)
9258 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (comparison)])
9263 /* Here we get the first insn that was just emitted. It used to be the
9264 case that, on some machines, emitting the branch would discard
9265 the previous compare insn and emit a replacement. This isn't
9266 done anymore, but abort if we see that FIRST is deleted. */
9269 first = get_insns ();
9270 else if (INSN_DELETED_P (first))
9273 first = NEXT_INSN (first);
9275 /* Look for multiple branches in this sequence, as might be generated
9276 for a multi-word integer comparison. */
9280 for (insn = first; insn ; insn = NEXT_INSN (insn))
9281 if (GET_CODE (insn) == JUMP_INSN)
9287 /* If we've got one branch at the end of the sequence,
9288 we can try to reverse it. */
9290 if (br_count == 1 && NEXT_INSN (branch) == NULL_RTX)
9293 insn_label = XEXP (condjump_label (branch), 0);
9294 JUMP_LABEL (branch) = insn_label;
9296 if (insn_label != if_false_label)
9299 if (invert_jump (branch, if_false_label))
9303 /* Multiple branches, or reversion failed. Convert to branches
9304 around an unconditional jump. */
9306 if_true_label = gen_label_rtx ();
9307 for (insn = first; insn; insn = NEXT_INSN (insn))
9308 if (GET_CODE (insn) == JUMP_INSN)
9311 insn_label = XEXP (condjump_label (insn), 0);
9312 JUMP_LABEL (insn) = insn_label;
9314 if (insn_label == if_false_label)
9315 redirect_jump (insn, if_true_label);
9317 emit_jump (if_false_label);
9318 emit_label (if_true_label);
9322 /* Generate code for a comparison expression EXP
9323 (including code to compute the values to be compared)
9324 and set (CC0) according to the result.
9325 SIGNED_CODE should be the rtx operation for this comparison for
9326 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
9328 We force a stack adjustment unless there are currently
9329 things pushed on the stack that aren't yet used. */
9332 compare (exp, signed_code, unsigned_code)
9334 enum rtx_code signed_code, unsigned_code;
9336 register rtx op0, op1;
9338 register enum machine_mode mode;
9342 /* Don't crash if the comparison was erroneous. */
9343 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
9344 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
9347 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
9348 type = TREE_TYPE (TREE_OPERAND (exp, 0));
9349 mode = TYPE_MODE (type);
9350 unsignedp = TREE_UNSIGNED (type);
9351 code = unsignedp ? unsigned_code : signed_code;
9353 #ifdef HAVE_canonicalize_funcptr_for_compare
9354 /* If function pointers need to be "canonicalized" before they can
9355 be reliably compared, then canonicalize them. */
9356 if (HAVE_canonicalize_funcptr_for_compare
9357 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
9358 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
9361 rtx new_op0 = gen_reg_rtx (mode);
9363 emit_insn (gen_canonicalize_funcptr_for_compare (new_op0, op0));
9367 if (HAVE_canonicalize_funcptr_for_compare
9368 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
9369 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
9372 rtx new_op1 = gen_reg_rtx (mode);
9374 emit_insn (gen_canonicalize_funcptr_for_compare (new_op1, op1));
9379 return compare_from_rtx (op0, op1, code, unsignedp, mode,
9381 ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
9382 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
9385 /* Like compare but expects the values to compare as two rtx's.
9386 The decision as to signed or unsigned comparison must be made by the caller.
9388 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
9391 If ALIGN is non-zero, it is the alignment of this type; if zero, the
9392 size of MODE should be used. */
9395 compare_from_rtx (op0, op1, code, unsignedp, mode, size, align)
9396 register rtx op0, op1;
9399 enum machine_mode mode;
9405 /* If one operand is constant, make it the second one. Only do this
9406 if the other operand is not constant as well. */
9408 if ((CONSTANT_P (op0) && ! CONSTANT_P (op1))
9409 || (GET_CODE (op0) == CONST_INT && GET_CODE (op1) != CONST_INT))
9414 code = swap_condition (code);
9419 op0 = force_not_mem (op0);
9420 op1 = force_not_mem (op1);
9423 do_pending_stack_adjust ();
9425 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
9426 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
9430 /* There's no need to do this now that combine.c can eliminate lots of
9431 sign extensions. This can be less efficient in certain cases on other
9434 /* If this is a signed equality comparison, we can do it as an
9435 unsigned comparison since zero-extension is cheaper than sign
9436 extension and comparisons with zero are done as unsigned. This is
9437 the case even on machines that can do fast sign extension, since
9438 zero-extension is easier to combine with other operations than
9439 sign-extension is. If we are comparing against a constant, we must
9440 convert it to what it would look like unsigned. */
9441 if ((code == EQ || code == NE) && ! unsignedp
9442 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
9444 if (GET_CODE (op1) == CONST_INT
9445 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
9446 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
9451 emit_cmp_insn (op0, op1, code, size, mode, unsignedp, align);
9453 return gen_rtx_fmt_ee (code, VOIDmode, cc0_rtx, const0_rtx);
9456 /* Generate code to calculate EXP using a store-flag instruction
9457 and return an rtx for the result. EXP is either a comparison
9458 or a TRUTH_NOT_EXPR whose operand is a comparison.
9460 If TARGET is nonzero, store the result there if convenient.
9462 If ONLY_CHEAP is non-zero, only do this if it is likely to be very
9465 Return zero if there is no suitable set-flag instruction
9466 available on this machine.
9468 Once expand_expr has been called on the arguments of the comparison,
9469 we are committed to doing the store flag, since it is not safe to
9470 re-evaluate the expression. We emit the store-flag insn by calling
9471 emit_store_flag, but only expand the arguments if we have a reason
9472 to believe that emit_store_flag will be successful. If we think that
9473 it will, but it isn't, we have to simulate the store-flag with a
9474 set/jump/set sequence. */
9477 do_store_flag (exp, target, mode, only_cheap)
9480 enum machine_mode mode;
9484 tree arg0, arg1, type;
9486 enum machine_mode operand_mode;
9490 enum insn_code icode;
9491 rtx subtarget = target;
9494 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
9495 result at the end. We can't simply invert the test since it would
9496 have already been inverted if it were valid. This case occurs for
9497 some floating-point comparisons. */
9499 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
9500 invert = 1, exp = TREE_OPERAND (exp, 0);
9502 arg0 = TREE_OPERAND (exp, 0);
9503 arg1 = TREE_OPERAND (exp, 1);
9504 type = TREE_TYPE (arg0);
9505 operand_mode = TYPE_MODE (type);
9506 unsignedp = TREE_UNSIGNED (type);
9508 /* We won't bother with BLKmode store-flag operations because it would mean
9509 passing a lot of information to emit_store_flag. */
9510 if (operand_mode == BLKmode)
9513 /* We won't bother with store-flag operations involving function pointers
9514 when function pointers must be canonicalized before comparisons. */
9515 #ifdef HAVE_canonicalize_funcptr_for_compare
9516 if (HAVE_canonicalize_funcptr_for_compare
9517 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
9518 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
9520 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
9521 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
9522 == FUNCTION_TYPE))))
9529 /* Get the rtx comparison code to use. We know that EXP is a comparison
9530 operation of some type. Some comparisons against 1 and -1 can be
9531 converted to comparisons with zero. Do so here so that the tests
9532 below will be aware that we have a comparison with zero. These
9533 tests will not catch constants in the first operand, but constants
9534 are rarely passed as the first operand. */
9536 switch (TREE_CODE (exp))
9545 if (integer_onep (arg1))
9546 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
9548 code = unsignedp ? LTU : LT;
9551 if (! unsignedp && integer_all_onesp (arg1))
9552 arg1 = integer_zero_node, code = LT;
9554 code = unsignedp ? LEU : LE;
9557 if (! unsignedp && integer_all_onesp (arg1))
9558 arg1 = integer_zero_node, code = GE;
9560 code = unsignedp ? GTU : GT;
9563 if (integer_onep (arg1))
9564 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
9566 code = unsignedp ? GEU : GE;
9572 /* Put a constant second. */
9573 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
9575 tem = arg0; arg0 = arg1; arg1 = tem;
9576 code = swap_condition (code);
9579 /* If this is an equality or inequality test of a single bit, we can
9580 do this by shifting the bit being tested to the low-order bit and
9581 masking the result with the constant 1. If the condition was EQ,
9582 we xor it with 1. This does not require an scc insn and is faster
9583 than an scc insn even if we have it. */
9585 if ((code == NE || code == EQ)
9586 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
9587 && integer_pow2p (TREE_OPERAND (arg0, 1)))
9589 tree inner = TREE_OPERAND (arg0, 0);
9590 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
9593 /* If INNER is a right shift of a constant and it plus BITNUM does
9594 not overflow, adjust BITNUM and INNER. */
9596 if (TREE_CODE (inner) == RSHIFT_EXPR
9597 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
9598 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
9599 && (bitnum + TREE_INT_CST_LOW (TREE_OPERAND (inner, 1))
9600 < TYPE_PRECISION (type)))
9602 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
9603 inner = TREE_OPERAND (inner, 0);
9606 /* If we are going to be able to omit the AND below, we must do our
9607 operations as unsigned. If we must use the AND, we have a choice.
9608 Normally unsigned is faster, but for some machines signed is. */
9609 ops_unsignedp = (bitnum == TYPE_PRECISION (type) - 1 ? 1
9610 #ifdef LOAD_EXTEND_OP
9611 : (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1)
9617 if (subtarget == 0 || GET_CODE (subtarget) != REG
9618 || GET_MODE (subtarget) != operand_mode
9619 || ! safe_from_p (subtarget, inner, 1))
9622 op0 = expand_expr (inner, subtarget, VOIDmode, 0);
9625 op0 = expand_shift (RSHIFT_EXPR, GET_MODE (op0), op0,
9626 size_int (bitnum), subtarget, ops_unsignedp);
9628 if (GET_MODE (op0) != mode)
9629 op0 = convert_to_mode (mode, op0, ops_unsignedp);
9631 if ((code == EQ && ! invert) || (code == NE && invert))
9632 op0 = expand_binop (mode, xor_optab, op0, const1_rtx, subtarget,
9633 ops_unsignedp, OPTAB_LIB_WIDEN);
9635 /* Put the AND last so it can combine with more things. */
9636 if (bitnum != TYPE_PRECISION (type) - 1)
9637 op0 = expand_and (op0, const1_rtx, subtarget);
9642 /* Now see if we are likely to be able to do this. Return if not. */
9643 if (! can_compare_p (operand_mode))
9645 icode = setcc_gen_code[(int) code];
9646 if (icode == CODE_FOR_nothing
9647 || (only_cheap && insn_operand_mode[(int) icode][0] != mode))
9649 /* We can only do this if it is one of the special cases that
9650 can be handled without an scc insn. */
9651 if ((code == LT && integer_zerop (arg1))
9652 || (! only_cheap && code == GE && integer_zerop (arg1)))
9654 else if (BRANCH_COST >= 0
9655 && ! only_cheap && (code == NE || code == EQ)
9656 && TREE_CODE (type) != REAL_TYPE
9657 && ((abs_optab->handlers[(int) operand_mode].insn_code
9658 != CODE_FOR_nothing)
9659 || (ffs_optab->handlers[(int) operand_mode].insn_code
9660 != CODE_FOR_nothing)))
9666 preexpand_calls (exp);
9667 if (subtarget == 0 || GET_CODE (subtarget) != REG
9668 || GET_MODE (subtarget) != operand_mode
9669 || ! safe_from_p (subtarget, arg1, 1))
9672 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
9673 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
9676 target = gen_reg_rtx (mode);
9678 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
9679 because, if the emit_store_flag does anything it will succeed and
9680 OP0 and OP1 will not be used subsequently. */
9682 result = emit_store_flag (target, code,
9683 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
9684 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
9685 operand_mode, unsignedp, 1);
9690 result = expand_binop (mode, xor_optab, result, const1_rtx,
9691 result, 0, OPTAB_LIB_WIDEN);
9695 /* If this failed, we have to do this with set/compare/jump/set code. */
9696 if (GET_CODE (target) != REG
9697 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
9698 target = gen_reg_rtx (GET_MODE (target));
9700 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
9701 result = compare_from_rtx (op0, op1, code, unsignedp,
9702 operand_mode, NULL_RTX, 0);
9703 if (GET_CODE (result) == CONST_INT)
9704 return (((result == const0_rtx && ! invert)
9705 || (result != const0_rtx && invert))
9706 ? const0_rtx : const1_rtx);
9708 label = gen_label_rtx ();
9709 if (bcc_gen_fctn[(int) code] == 0)
9712 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
9713 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
9719 /* Generate a tablejump instruction (used for switch statements). */
9721 #ifdef HAVE_tablejump
9723 /* INDEX is the value being switched on, with the lowest value
9724 in the table already subtracted.
9725 MODE is its expected mode (needed if INDEX is constant).
9726 RANGE is the length of the jump table.
9727 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
9729 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
9730 index value is out of range. */
9733 do_tablejump (index, mode, range, table_label, default_label)
9734 rtx index, range, table_label, default_label;
9735 enum machine_mode mode;
9737 register rtx temp, vector;
9739 /* Do an unsigned comparison (in the proper mode) between the index
9740 expression and the value which represents the length of the range.
9741 Since we just finished subtracting the lower bound of the range
9742 from the index expression, this comparison allows us to simultaneously
9743 check that the original index expression value is both greater than
9744 or equal to the minimum value of the range and less than or equal to
9745 the maximum value of the range. */
9747 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
9750 /* If index is in range, it must fit in Pmode.
9751 Convert to Pmode so we can index with it. */
9753 index = convert_to_mode (Pmode, index, 1);
9755 /* Don't let a MEM slip thru, because then INDEX that comes
9756 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
9757 and break_out_memory_refs will go to work on it and mess it up. */
9758 #ifdef PIC_CASE_VECTOR_ADDRESS
9759 if (flag_pic && GET_CODE (index) != REG)
9760 index = copy_to_mode_reg (Pmode, index);
9763 /* If flag_force_addr were to affect this address
9764 it could interfere with the tricky assumptions made
9765 about addresses that contain label-refs,
9766 which may be valid only very near the tablejump itself. */
9767 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
9768 GET_MODE_SIZE, because this indicates how large insns are. The other
9769 uses should all be Pmode, because they are addresses. This code
9770 could fail if addresses and insns are not the same size. */
9771 index = gen_rtx_PLUS (Pmode,
9772 gen_rtx_MULT (Pmode, index,
9773 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
9774 gen_rtx_LABEL_REF (Pmode, table_label));
9775 #ifdef PIC_CASE_VECTOR_ADDRESS
9777 index = PIC_CASE_VECTOR_ADDRESS (index);
9780 index = memory_address_noforce (CASE_VECTOR_MODE, index);
9781 temp = gen_reg_rtx (CASE_VECTOR_MODE);
9782 vector = gen_rtx_MEM (CASE_VECTOR_MODE, index);
9783 RTX_UNCHANGING_P (vector) = 1;
9784 convert_move (temp, vector, 0);
9786 emit_jump_insn (gen_tablejump (temp, table_label));
9788 /* If we are generating PIC code or if the table is PC-relative, the
9789 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
9790 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
9794 #endif /* HAVE_tablejump */