1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 92-98, 1999 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
30 #include "hard-reg-set.h"
33 #include "insn-flags.h"
34 #include "insn-codes.h"
35 #include "insn-config.h"
36 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
40 #include "typeclass.h"
44 #define CEIL(x,y) (((x) + (y) - 1) / (y))
46 /* Decide whether a function's arguments should be processed
47 from first to last or from last to first.
49 They should if the stack and args grow in opposite directions, but
50 only if we have push insns. */
54 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
55 #define PUSH_ARGS_REVERSED /* If it's last to first */
60 #ifndef STACK_PUSH_CODE
61 #ifdef STACK_GROWS_DOWNWARD
62 #define STACK_PUSH_CODE PRE_DEC
64 #define STACK_PUSH_CODE PRE_INC
68 /* Assume that case vectors are not pc-relative. */
69 #ifndef CASE_VECTOR_PC_RELATIVE
70 #define CASE_VECTOR_PC_RELATIVE 0
73 /* If this is nonzero, we do not bother generating VOLATILE
74 around volatile memory references, and we are willing to
75 output indirect addresses. If cse is to follow, we reject
76 indirect addresses so a useful potential cse is generated;
77 if it is used only once, instruction combination will produce
78 the same indirect address eventually. */
81 /* Nonzero to generate code for all the subroutines within an
82 expression before generating the upper levels of the expression.
83 Nowadays this is never zero. */
84 int do_preexpand_calls = 1;
86 /* Don't check memory usage, since code is being emitted to check a memory
87 usage. Used when current_function_check_memory_usage is true, to avoid
88 infinite recursion. */
89 static int in_check_memory_usage;
91 /* This structure is used by move_by_pieces to describe the move to
103 int explicit_inc_from;
110 /* This structure is used by clear_by_pieces to describe the clear to
113 struct clear_by_pieces
125 extern struct obstack permanent_obstack;
127 static rtx get_push_address PROTO ((int));
129 static rtx enqueue_insn PROTO((rtx, rtx));
130 static int move_by_pieces_ninsns PROTO((unsigned int, int));
131 static void move_by_pieces_1 PROTO((rtx (*) (rtx, ...), enum machine_mode,
132 struct move_by_pieces *));
133 static void clear_by_pieces PROTO((rtx, int, int));
134 static void clear_by_pieces_1 PROTO((rtx (*) (rtx, ...), enum machine_mode,
135 struct clear_by_pieces *));
136 static int is_zeros_p PROTO((tree));
137 static int mostly_zeros_p PROTO((tree));
138 static void store_constructor_field PROTO((rtx, int, int, enum machine_mode,
140 static void store_constructor PROTO((tree, rtx, int));
141 static rtx store_field PROTO((rtx, int, int, enum machine_mode, tree,
142 enum machine_mode, int, int,
144 static enum memory_use_mode
145 get_memory_usage_from_modifier PROTO((enum expand_modifier));
146 static tree save_noncopied_parts PROTO((tree, tree));
147 static tree init_noncopied_parts PROTO((tree, tree));
148 static int safe_from_p PROTO((rtx, tree, int));
149 static int fixed_type_p PROTO((tree));
150 static rtx var_rtx PROTO((tree));
151 static rtx expand_increment PROTO((tree, int, int));
152 static void preexpand_calls PROTO((tree));
153 static void do_jump_by_parts_greater PROTO((tree, int, rtx, rtx));
154 static void do_jump_by_parts_equality PROTO((tree, rtx, rtx));
155 static void do_compare_and_jump PROTO((tree, enum rtx_code, enum rtx_code, rtx, rtx));
156 static rtx do_store_flag PROTO((tree, rtx, enum machine_mode, int));
158 /* Record for each mode whether we can move a register directly to or
159 from an object of that mode in memory. If we can't, we won't try
160 to use that mode directly when accessing a field of that mode. */
162 static char direct_load[NUM_MACHINE_MODES];
163 static char direct_store[NUM_MACHINE_MODES];
165 /* If a memory-to-memory move would take MOVE_RATIO or more simple
166 move-instruction sequences, we will do a movstr or libcall instead. */
169 #if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
172 /* If we are optimizing for space (-Os), cut down the default move ratio */
173 #define MOVE_RATIO (optimize_size ? 3 : 15)
177 /* This macro is used to determine whether move_by_pieces should be called
178 to perform a structure copy. */
179 #ifndef MOVE_BY_PIECES_P
180 #define MOVE_BY_PIECES_P(SIZE, ALIGN) (move_by_pieces_ninsns \
181 (SIZE, ALIGN) < MOVE_RATIO)
184 /* This array records the insn_code of insns to perform block moves. */
185 enum insn_code movstr_optab[NUM_MACHINE_MODES];
187 /* This array records the insn_code of insns to perform block clears. */
188 enum insn_code clrstr_optab[NUM_MACHINE_MODES];
190 /* SLOW_UNALIGNED_ACCESS is non-zero if unaligned accesses are very slow. */
192 #ifndef SLOW_UNALIGNED_ACCESS
193 #define SLOW_UNALIGNED_ACCESS STRICT_ALIGNMENT
196 /* This is run once per compilation to set up which modes can be used
197 directly in memory and to initialize the block move optab. */
203 enum machine_mode mode;
210 /* Since we are on the permanent obstack, we must be sure we save this
211 spot AFTER we call start_sequence, since it will reuse the rtl it
213 free_point = (char *) oballoc (0);
215 /* Try indexing by frame ptr and try by stack ptr.
216 It is known that on the Convex the stack ptr isn't a valid index.
217 With luck, one or the other is valid on any machine. */
218 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
219 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
221 insn = emit_insn (gen_rtx_SET (0, NULL_RTX, NULL_RTX));
222 pat = PATTERN (insn);
224 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
225 mode = (enum machine_mode) ((int) mode + 1))
230 direct_load[(int) mode] = direct_store[(int) mode] = 0;
231 PUT_MODE (mem, mode);
232 PUT_MODE (mem1, mode);
234 /* See if there is some register that can be used in this mode and
235 directly loaded or stored from memory. */
237 if (mode != VOIDmode && mode != BLKmode)
238 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
239 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
242 if (! HARD_REGNO_MODE_OK (regno, mode))
245 reg = gen_rtx_REG (mode, regno);
248 SET_DEST (pat) = reg;
249 if (recog (pat, insn, &num_clobbers) >= 0)
250 direct_load[(int) mode] = 1;
252 SET_SRC (pat) = mem1;
253 SET_DEST (pat) = reg;
254 if (recog (pat, insn, &num_clobbers) >= 0)
255 direct_load[(int) mode] = 1;
258 SET_DEST (pat) = mem;
259 if (recog (pat, insn, &num_clobbers) >= 0)
260 direct_store[(int) mode] = 1;
263 SET_DEST (pat) = mem1;
264 if (recog (pat, insn, &num_clobbers) >= 0)
265 direct_store[(int) mode] = 1;
273 /* This is run at the start of compiling a function. */
278 current_function->expr
279 = (struct expr_status *) xmalloc (sizeof (struct expr_status));
282 pending_stack_adjust = 0;
283 inhibit_defer_pop = 0;
285 apply_args_value = 0;
289 /* Small sanity check that the queue is empty at the end of a function. */
291 finish_expr_for_function ()
297 /* Manage the queue of increment instructions to be output
298 for POSTINCREMENT_EXPR expressions, etc. */
300 /* Queue up to increment (or change) VAR later. BODY says how:
301 BODY should be the same thing you would pass to emit_insn
302 to increment right away. It will go to emit_insn later on.
304 The value is a QUEUED expression to be used in place of VAR
305 where you want to guarantee the pre-incrementation value of VAR. */
308 enqueue_insn (var, body)
311 pending_chain = gen_rtx_QUEUED (GET_MODE (var),
312 var, NULL_RTX, NULL_RTX, body,
314 return pending_chain;
317 /* Use protect_from_queue to convert a QUEUED expression
318 into something that you can put immediately into an instruction.
319 If the queued incrementation has not happened yet,
320 protect_from_queue returns the variable itself.
321 If the incrementation has happened, protect_from_queue returns a temp
322 that contains a copy of the old value of the variable.
324 Any time an rtx which might possibly be a QUEUED is to be put
325 into an instruction, it must be passed through protect_from_queue first.
326 QUEUED expressions are not meaningful in instructions.
328 Do not pass a value through protect_from_queue and then hold
329 on to it for a while before putting it in an instruction!
330 If the queue is flushed in between, incorrect code will result. */
333 protect_from_queue (x, modify)
337 register RTX_CODE code = GET_CODE (x);
339 #if 0 /* A QUEUED can hang around after the queue is forced out. */
340 /* Shortcut for most common case. */
341 if (pending_chain == 0)
347 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
348 use of autoincrement. Make a copy of the contents of the memory
349 location rather than a copy of the address, but not if the value is
350 of mode BLKmode. Don't modify X in place since it might be
352 if (code == MEM && GET_MODE (x) != BLKmode
353 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
355 register rtx y = XEXP (x, 0);
356 register rtx new = gen_rtx_MEM (GET_MODE (x), QUEUED_VAR (y));
358 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (x);
359 MEM_COPY_ATTRIBUTES (new, x);
360 MEM_ALIAS_SET (new) = MEM_ALIAS_SET (x);
364 register rtx temp = gen_reg_rtx (GET_MODE (new));
365 emit_insn_before (gen_move_insn (temp, new),
371 /* Otherwise, recursively protect the subexpressions of all
372 the kinds of rtx's that can contain a QUEUED. */
375 rtx tem = protect_from_queue (XEXP (x, 0), 0);
376 if (tem != XEXP (x, 0))
382 else if (code == PLUS || code == MULT)
384 rtx new0 = protect_from_queue (XEXP (x, 0), 0);
385 rtx new1 = protect_from_queue (XEXP (x, 1), 0);
386 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
395 /* If the increment has not happened, use the variable itself. */
396 if (QUEUED_INSN (x) == 0)
397 return QUEUED_VAR (x);
398 /* If the increment has happened and a pre-increment copy exists,
400 if (QUEUED_COPY (x) != 0)
401 return QUEUED_COPY (x);
402 /* The increment has happened but we haven't set up a pre-increment copy.
403 Set one up now, and use it. */
404 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
405 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
407 return QUEUED_COPY (x);
410 /* Return nonzero if X contains a QUEUED expression:
411 if it contains anything that will be altered by a queued increment.
412 We handle only combinations of MEM, PLUS, MINUS and MULT operators
413 since memory addresses generally contain only those. */
419 register enum rtx_code code = GET_CODE (x);
425 return queued_subexp_p (XEXP (x, 0));
429 return (queued_subexp_p (XEXP (x, 0))
430 || queued_subexp_p (XEXP (x, 1)));
436 /* Perform all the pending incrementations. */
442 while ((p = pending_chain))
444 rtx body = QUEUED_BODY (p);
446 if (GET_CODE (body) == SEQUENCE)
448 QUEUED_INSN (p) = XVECEXP (QUEUED_BODY (p), 0, 0);
449 emit_insn (QUEUED_BODY (p));
452 QUEUED_INSN (p) = emit_insn (QUEUED_BODY (p));
453 pending_chain = QUEUED_NEXT (p);
457 /* Copy data from FROM to TO, where the machine modes are not the same.
458 Both modes may be integer, or both may be floating.
459 UNSIGNEDP should be nonzero if FROM is an unsigned type.
460 This causes zero-extension instead of sign-extension. */
463 convert_move (to, from, unsignedp)
464 register rtx to, from;
467 enum machine_mode to_mode = GET_MODE (to);
468 enum machine_mode from_mode = GET_MODE (from);
469 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
470 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
474 /* rtx code for making an equivalent value. */
475 enum rtx_code equiv_code = (unsignedp ? ZERO_EXTEND : SIGN_EXTEND);
477 to = protect_from_queue (to, 1);
478 from = protect_from_queue (from, 0);
480 if (to_real != from_real)
483 /* If FROM is a SUBREG that indicates that we have already done at least
484 the required extension, strip it. We don't handle such SUBREGs as
487 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
488 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
489 >= GET_MODE_SIZE (to_mode))
490 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
491 from = gen_lowpart (to_mode, from), from_mode = to_mode;
493 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
496 if (to_mode == from_mode
497 || (from_mode == VOIDmode && CONSTANT_P (from)))
499 emit_move_insn (to, from);
507 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode))
509 /* Try converting directly if the insn is supported. */
510 if ((code = can_extend_p (to_mode, from_mode, 0))
513 emit_unop_insn (code, to, from, UNKNOWN);
518 #ifdef HAVE_trunchfqf2
519 if (HAVE_trunchfqf2 && from_mode == HFmode && to_mode == QFmode)
521 emit_unop_insn (CODE_FOR_trunchfqf2, to, from, UNKNOWN);
525 #ifdef HAVE_trunctqfqf2
526 if (HAVE_trunctqfqf2 && from_mode == TQFmode && to_mode == QFmode)
528 emit_unop_insn (CODE_FOR_trunctqfqf2, to, from, UNKNOWN);
532 #ifdef HAVE_truncsfqf2
533 if (HAVE_truncsfqf2 && from_mode == SFmode && to_mode == QFmode)
535 emit_unop_insn (CODE_FOR_truncsfqf2, to, from, UNKNOWN);
539 #ifdef HAVE_truncdfqf2
540 if (HAVE_truncdfqf2 && from_mode == DFmode && to_mode == QFmode)
542 emit_unop_insn (CODE_FOR_truncdfqf2, to, from, UNKNOWN);
546 #ifdef HAVE_truncxfqf2
547 if (HAVE_truncxfqf2 && from_mode == XFmode && to_mode == QFmode)
549 emit_unop_insn (CODE_FOR_truncxfqf2, to, from, UNKNOWN);
553 #ifdef HAVE_trunctfqf2
554 if (HAVE_trunctfqf2 && from_mode == TFmode && to_mode == QFmode)
556 emit_unop_insn (CODE_FOR_trunctfqf2, to, from, UNKNOWN);
561 #ifdef HAVE_trunctqfhf2
562 if (HAVE_trunctqfhf2 && from_mode == TQFmode && to_mode == HFmode)
564 emit_unop_insn (CODE_FOR_trunctqfhf2, to, from, UNKNOWN);
568 #ifdef HAVE_truncsfhf2
569 if (HAVE_truncsfhf2 && from_mode == SFmode && to_mode == HFmode)
571 emit_unop_insn (CODE_FOR_truncsfhf2, to, from, UNKNOWN);
575 #ifdef HAVE_truncdfhf2
576 if (HAVE_truncdfhf2 && from_mode == DFmode && to_mode == HFmode)
578 emit_unop_insn (CODE_FOR_truncdfhf2, to, from, UNKNOWN);
582 #ifdef HAVE_truncxfhf2
583 if (HAVE_truncxfhf2 && from_mode == XFmode && to_mode == HFmode)
585 emit_unop_insn (CODE_FOR_truncxfhf2, to, from, UNKNOWN);
589 #ifdef HAVE_trunctfhf2
590 if (HAVE_trunctfhf2 && from_mode == TFmode && to_mode == HFmode)
592 emit_unop_insn (CODE_FOR_trunctfhf2, to, from, UNKNOWN);
597 #ifdef HAVE_truncsftqf2
598 if (HAVE_truncsftqf2 && from_mode == SFmode && to_mode == TQFmode)
600 emit_unop_insn (CODE_FOR_truncsftqf2, to, from, UNKNOWN);
604 #ifdef HAVE_truncdftqf2
605 if (HAVE_truncdftqf2 && from_mode == DFmode && to_mode == TQFmode)
607 emit_unop_insn (CODE_FOR_truncdftqf2, to, from, UNKNOWN);
611 #ifdef HAVE_truncxftqf2
612 if (HAVE_truncxftqf2 && from_mode == XFmode && to_mode == TQFmode)
614 emit_unop_insn (CODE_FOR_truncxftqf2, to, from, UNKNOWN);
618 #ifdef HAVE_trunctftqf2
619 if (HAVE_trunctftqf2 && from_mode == TFmode && to_mode == TQFmode)
621 emit_unop_insn (CODE_FOR_trunctftqf2, to, from, UNKNOWN);
626 #ifdef HAVE_truncdfsf2
627 if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
629 emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
633 #ifdef HAVE_truncxfsf2
634 if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode)
636 emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN);
640 #ifdef HAVE_trunctfsf2
641 if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
643 emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
647 #ifdef HAVE_truncxfdf2
648 if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode)
650 emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN);
654 #ifdef HAVE_trunctfdf2
655 if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
657 emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
669 libcall = extendsfdf2_libfunc;
673 libcall = extendsfxf2_libfunc;
677 libcall = extendsftf2_libfunc;
689 libcall = truncdfsf2_libfunc;
693 libcall = extenddfxf2_libfunc;
697 libcall = extenddftf2_libfunc;
709 libcall = truncxfsf2_libfunc;
713 libcall = truncxfdf2_libfunc;
725 libcall = trunctfsf2_libfunc;
729 libcall = trunctfdf2_libfunc;
741 if (libcall == (rtx) 0)
742 /* This conversion is not implemented yet. */
745 value = emit_library_call_value (libcall, NULL_RTX, 1, to_mode,
747 emit_move_insn (to, value);
751 /* Now both modes are integers. */
753 /* Handle expanding beyond a word. */
754 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
755 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
762 enum machine_mode lowpart_mode;
763 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
765 /* Try converting directly if the insn is supported. */
766 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
769 /* If FROM is a SUBREG, put it into a register. Do this
770 so that we always generate the same set of insns for
771 better cse'ing; if an intermediate assignment occurred,
772 we won't be doing the operation directly on the SUBREG. */
773 if (optimize > 0 && GET_CODE (from) == SUBREG)
774 from = force_reg (from_mode, from);
775 emit_unop_insn (code, to, from, equiv_code);
778 /* Next, try converting via full word. */
779 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
780 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
781 != CODE_FOR_nothing))
783 if (GET_CODE (to) == REG)
784 emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
785 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
786 emit_unop_insn (code, to,
787 gen_lowpart (word_mode, to), equiv_code);
791 /* No special multiword conversion insn; do it by hand. */
794 /* Since we will turn this into a no conflict block, we must ensure
795 that the source does not overlap the target. */
797 if (reg_overlap_mentioned_p (to, from))
798 from = force_reg (from_mode, from);
800 /* Get a copy of FROM widened to a word, if necessary. */
801 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
802 lowpart_mode = word_mode;
804 lowpart_mode = from_mode;
806 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
808 lowpart = gen_lowpart (lowpart_mode, to);
809 emit_move_insn (lowpart, lowfrom);
811 /* Compute the value to put in each remaining word. */
813 fill_value = const0_rtx;
818 && insn_operand_mode[(int) CODE_FOR_slt][0] == word_mode
819 && STORE_FLAG_VALUE == -1)
821 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
823 fill_value = gen_reg_rtx (word_mode);
824 emit_insn (gen_slt (fill_value));
830 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
831 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
833 fill_value = convert_to_mode (word_mode, fill_value, 1);
837 /* Fill the remaining words. */
838 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
840 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
841 rtx subword = operand_subword (to, index, 1, to_mode);
846 if (fill_value != subword)
847 emit_move_insn (subword, fill_value);
850 insns = get_insns ();
853 emit_no_conflict_block (insns, to, from, NULL_RTX,
854 gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
858 /* Truncating multi-word to a word or less. */
859 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
860 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
862 if (!((GET_CODE (from) == MEM
863 && ! MEM_VOLATILE_P (from)
864 && direct_load[(int) to_mode]
865 && ! mode_dependent_address_p (XEXP (from, 0)))
866 || GET_CODE (from) == REG
867 || GET_CODE (from) == SUBREG))
868 from = force_reg (from_mode, from);
869 convert_move (to, gen_lowpart (word_mode, from), 0);
873 /* Handle pointer conversion */ /* SPEE 900220 */
874 if (to_mode == PQImode)
876 if (from_mode != QImode)
877 from = convert_to_mode (QImode, from, unsignedp);
879 #ifdef HAVE_truncqipqi2
880 if (HAVE_truncqipqi2)
882 emit_unop_insn (CODE_FOR_truncqipqi2, to, from, UNKNOWN);
885 #endif /* HAVE_truncqipqi2 */
889 if (from_mode == PQImode)
891 if (to_mode != QImode)
893 from = convert_to_mode (QImode, from, unsignedp);
898 #ifdef HAVE_extendpqiqi2
899 if (HAVE_extendpqiqi2)
901 emit_unop_insn (CODE_FOR_extendpqiqi2, to, from, UNKNOWN);
904 #endif /* HAVE_extendpqiqi2 */
909 if (to_mode == PSImode)
911 if (from_mode != SImode)
912 from = convert_to_mode (SImode, from, unsignedp);
914 #ifdef HAVE_truncsipsi2
915 if (HAVE_truncsipsi2)
917 emit_unop_insn (CODE_FOR_truncsipsi2, to, from, UNKNOWN);
920 #endif /* HAVE_truncsipsi2 */
924 if (from_mode == PSImode)
926 if (to_mode != SImode)
928 from = convert_to_mode (SImode, from, unsignedp);
933 #ifdef HAVE_extendpsisi2
934 if (HAVE_extendpsisi2)
936 emit_unop_insn (CODE_FOR_extendpsisi2, to, from, UNKNOWN);
939 #endif /* HAVE_extendpsisi2 */
944 if (to_mode == PDImode)
946 if (from_mode != DImode)
947 from = convert_to_mode (DImode, from, unsignedp);
949 #ifdef HAVE_truncdipdi2
950 if (HAVE_truncdipdi2)
952 emit_unop_insn (CODE_FOR_truncdipdi2, to, from, UNKNOWN);
955 #endif /* HAVE_truncdipdi2 */
959 if (from_mode == PDImode)
961 if (to_mode != DImode)
963 from = convert_to_mode (DImode, from, unsignedp);
968 #ifdef HAVE_extendpdidi2
969 if (HAVE_extendpdidi2)
971 emit_unop_insn (CODE_FOR_extendpdidi2, to, from, UNKNOWN);
974 #endif /* HAVE_extendpdidi2 */
979 /* Now follow all the conversions between integers
980 no more than a word long. */
982 /* For truncation, usually we can just refer to FROM in a narrower mode. */
983 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
984 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
985 GET_MODE_BITSIZE (from_mode)))
987 if (!((GET_CODE (from) == MEM
988 && ! MEM_VOLATILE_P (from)
989 && direct_load[(int) to_mode]
990 && ! mode_dependent_address_p (XEXP (from, 0)))
991 || GET_CODE (from) == REG
992 || GET_CODE (from) == SUBREG))
993 from = force_reg (from_mode, from);
994 if (GET_CODE (from) == REG && REGNO (from) < FIRST_PSEUDO_REGISTER
995 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
996 from = copy_to_reg (from);
997 emit_move_insn (to, gen_lowpart (to_mode, from));
1001 /* Handle extension. */
1002 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
1004 /* Convert directly if that works. */
1005 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
1006 != CODE_FOR_nothing)
1008 emit_unop_insn (code, to, from, equiv_code);
1013 enum machine_mode intermediate;
1017 /* Search for a mode to convert via. */
1018 for (intermediate = from_mode; intermediate != VOIDmode;
1019 intermediate = GET_MODE_WIDER_MODE (intermediate))
1020 if (((can_extend_p (to_mode, intermediate, unsignedp)
1021 != CODE_FOR_nothing)
1022 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
1023 && TRULY_NOOP_TRUNCATION (to_mode, intermediate)))
1024 && (can_extend_p (intermediate, from_mode, unsignedp)
1025 != CODE_FOR_nothing))
1027 convert_move (to, convert_to_mode (intermediate, from,
1028 unsignedp), unsignedp);
1032 /* No suitable intermediate mode.
1033 Generate what we need with shifts. */
1034 shift_amount = build_int_2 (GET_MODE_BITSIZE (to_mode)
1035 - GET_MODE_BITSIZE (from_mode), 0);
1036 from = gen_lowpart (to_mode, force_reg (from_mode, from));
1037 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
1039 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
1042 emit_move_insn (to, tmp);
1047 /* Support special truncate insns for certain modes. */
1049 if (from_mode == DImode && to_mode == SImode)
1051 #ifdef HAVE_truncdisi2
1052 if (HAVE_truncdisi2)
1054 emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
1058 convert_move (to, force_reg (from_mode, from), unsignedp);
1062 if (from_mode == DImode && to_mode == HImode)
1064 #ifdef HAVE_truncdihi2
1065 if (HAVE_truncdihi2)
1067 emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
1071 convert_move (to, force_reg (from_mode, from), unsignedp);
1075 if (from_mode == DImode && to_mode == QImode)
1077 #ifdef HAVE_truncdiqi2
1078 if (HAVE_truncdiqi2)
1080 emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
1084 convert_move (to, force_reg (from_mode, from), unsignedp);
1088 if (from_mode == SImode && to_mode == HImode)
1090 #ifdef HAVE_truncsihi2
1091 if (HAVE_truncsihi2)
1093 emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
1097 convert_move (to, force_reg (from_mode, from), unsignedp);
1101 if (from_mode == SImode && to_mode == QImode)
1103 #ifdef HAVE_truncsiqi2
1104 if (HAVE_truncsiqi2)
1106 emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
1110 convert_move (to, force_reg (from_mode, from), unsignedp);
1114 if (from_mode == HImode && to_mode == QImode)
1116 #ifdef HAVE_trunchiqi2
1117 if (HAVE_trunchiqi2)
1119 emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
1123 convert_move (to, force_reg (from_mode, from), unsignedp);
1127 if (from_mode == TImode && to_mode == DImode)
1129 #ifdef HAVE_trunctidi2
1130 if (HAVE_trunctidi2)
1132 emit_unop_insn (CODE_FOR_trunctidi2, to, from, UNKNOWN);
1136 convert_move (to, force_reg (from_mode, from), unsignedp);
1140 if (from_mode == TImode && to_mode == SImode)
1142 #ifdef HAVE_trunctisi2
1143 if (HAVE_trunctisi2)
1145 emit_unop_insn (CODE_FOR_trunctisi2, to, from, UNKNOWN);
1149 convert_move (to, force_reg (from_mode, from), unsignedp);
1153 if (from_mode == TImode && to_mode == HImode)
1155 #ifdef HAVE_trunctihi2
1156 if (HAVE_trunctihi2)
1158 emit_unop_insn (CODE_FOR_trunctihi2, to, from, UNKNOWN);
1162 convert_move (to, force_reg (from_mode, from), unsignedp);
1166 if (from_mode == TImode && to_mode == QImode)
1168 #ifdef HAVE_trunctiqi2
1169 if (HAVE_trunctiqi2)
1171 emit_unop_insn (CODE_FOR_trunctiqi2, to, from, UNKNOWN);
1175 convert_move (to, force_reg (from_mode, from), unsignedp);
1179 /* Handle truncation of volatile memrefs, and so on;
1180 the things that couldn't be truncated directly,
1181 and for which there was no special instruction. */
1182 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
1184 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
1185 emit_move_insn (to, temp);
1189 /* Mode combination is not recognized. */
1193 /* Return an rtx for a value that would result
1194 from converting X to mode MODE.
1195 Both X and MODE may be floating, or both integer.
1196 UNSIGNEDP is nonzero if X is an unsigned value.
1197 This can be done by referring to a part of X in place
1198 or by copying to a new temporary with conversion.
1200 This function *must not* call protect_from_queue
1201 except when putting X into an insn (in which case convert_move does it). */
1204 convert_to_mode (mode, x, unsignedp)
1205 enum machine_mode mode;
1209 return convert_modes (mode, VOIDmode, x, unsignedp);
1212 /* Return an rtx for a value that would result
1213 from converting X from mode OLDMODE to mode MODE.
1214 Both modes may be floating, or both integer.
1215 UNSIGNEDP is nonzero if X is an unsigned value.
1217 This can be done by referring to a part of X in place
1218 or by copying to a new temporary with conversion.
1220 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
1222 This function *must not* call protect_from_queue
1223 except when putting X into an insn (in which case convert_move does it). */
1226 convert_modes (mode, oldmode, x, unsignedp)
1227 enum machine_mode mode, oldmode;
1233 /* If FROM is a SUBREG that indicates that we have already done at least
1234 the required extension, strip it. */
1236 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
1237 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
1238 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
1239 x = gen_lowpart (mode, x);
1241 if (GET_MODE (x) != VOIDmode)
1242 oldmode = GET_MODE (x);
1244 if (mode == oldmode)
1247 /* There is one case that we must handle specially: If we are converting
1248 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
1249 we are to interpret the constant as unsigned, gen_lowpart will do
1250 the wrong if the constant appears negative. What we want to do is
1251 make the high-order word of the constant zero, not all ones. */
1253 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
1254 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
1255 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
1257 HOST_WIDE_INT val = INTVAL (x);
1259 if (oldmode != VOIDmode
1260 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
1262 int width = GET_MODE_BITSIZE (oldmode);
1264 /* We need to zero extend VAL. */
1265 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1268 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
1271 /* We can do this with a gen_lowpart if both desired and current modes
1272 are integer, and this is either a constant integer, a register, or a
1273 non-volatile MEM. Except for the constant case where MODE is no
1274 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
1276 if ((GET_CODE (x) == CONST_INT
1277 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
1278 || (GET_MODE_CLASS (mode) == MODE_INT
1279 && GET_MODE_CLASS (oldmode) == MODE_INT
1280 && (GET_CODE (x) == CONST_DOUBLE
1281 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
1282 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x)
1283 && direct_load[(int) mode])
1284 || (GET_CODE (x) == REG
1285 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
1286 GET_MODE_BITSIZE (GET_MODE (x)))))))))
1288 /* ?? If we don't know OLDMODE, we have to assume here that
1289 X does not need sign- or zero-extension. This may not be
1290 the case, but it's the best we can do. */
1291 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
1292 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
1294 HOST_WIDE_INT val = INTVAL (x);
1295 int width = GET_MODE_BITSIZE (oldmode);
1297 /* We must sign or zero-extend in this case. Start by
1298 zero-extending, then sign extend if we need to. */
1299 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1301 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
1302 val |= (HOST_WIDE_INT) (-1) << width;
1304 return GEN_INT (val);
1307 return gen_lowpart (mode, x);
1310 temp = gen_reg_rtx (mode);
1311 convert_move (temp, x, unsignedp);
1316 /* This macro is used to determine what the largest unit size that
1317 move_by_pieces can use is. */
1319 /* MOVE_MAX_PIECES is the number of bytes at a time which we can
1320 move efficiently, as opposed to MOVE_MAX which is the maximum
1321 number of bhytes we can move with a single instruction. */
1323 #ifndef MOVE_MAX_PIECES
1324 #define MOVE_MAX_PIECES MOVE_MAX
1327 /* Generate several move instructions to copy LEN bytes
1328 from block FROM to block TO. (These are MEM rtx's with BLKmode).
1329 The caller must pass FROM and TO
1330 through protect_from_queue before calling.
1331 ALIGN (in bytes) is maximum alignment we can assume. */
1334 move_by_pieces (to, from, len, align)
1338 struct move_by_pieces data;
1339 rtx to_addr = XEXP (to, 0), from_addr = XEXP (from, 0);
1340 int max_size = MOVE_MAX_PIECES + 1;
1341 enum machine_mode mode = VOIDmode, tmode;
1342 enum insn_code icode;
1345 data.to_addr = to_addr;
1346 data.from_addr = from_addr;
1350 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1351 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1353 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1354 || GET_CODE (from_addr) == POST_INC
1355 || GET_CODE (from_addr) == POST_DEC);
1357 data.explicit_inc_from = 0;
1358 data.explicit_inc_to = 0;
1360 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1361 if (data.reverse) data.offset = len;
1364 data.to_struct = MEM_IN_STRUCT_P (to);
1365 data.from_struct = MEM_IN_STRUCT_P (from);
1367 /* If copying requires more than two move insns,
1368 copy addresses to registers (to make displacements shorter)
1369 and use post-increment if available. */
1370 if (!(data.autinc_from && data.autinc_to)
1371 && move_by_pieces_ninsns (len, align) > 2)
1373 /* Find the mode of the largest move... */
1374 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1375 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1376 if (GET_MODE_SIZE (tmode) < max_size)
1379 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
1381 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1382 data.autinc_from = 1;
1383 data.explicit_inc_from = -1;
1385 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
1387 data.from_addr = copy_addr_to_reg (from_addr);
1388 data.autinc_from = 1;
1389 data.explicit_inc_from = 1;
1391 if (!data.autinc_from && CONSTANT_P (from_addr))
1392 data.from_addr = copy_addr_to_reg (from_addr);
1393 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
1395 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1397 data.explicit_inc_to = -1;
1399 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
1401 data.to_addr = copy_addr_to_reg (to_addr);
1403 data.explicit_inc_to = 1;
1405 if (!data.autinc_to && CONSTANT_P (to_addr))
1406 data.to_addr = copy_addr_to_reg (to_addr);
1409 if (! SLOW_UNALIGNED_ACCESS
1410 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
1413 /* First move what we can in the largest integer mode, then go to
1414 successively smaller modes. */
1416 while (max_size > 1)
1418 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1419 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1420 if (GET_MODE_SIZE (tmode) < max_size)
1423 if (mode == VOIDmode)
1426 icode = mov_optab->handlers[(int) mode].insn_code;
1427 if (icode != CODE_FOR_nothing
1428 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1429 GET_MODE_SIZE (mode)))
1430 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1432 max_size = GET_MODE_SIZE (mode);
1435 /* The code above should have handled everything. */
1440 /* Return number of insns required to move L bytes by pieces.
1441 ALIGN (in bytes) is maximum alignment we can assume. */
1444 move_by_pieces_ninsns (l, align)
1448 register int n_insns = 0;
1449 int max_size = MOVE_MAX + 1;
1451 if (! SLOW_UNALIGNED_ACCESS
1452 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
1455 while (max_size > 1)
1457 enum machine_mode mode = VOIDmode, tmode;
1458 enum insn_code icode;
1460 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1461 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1462 if (GET_MODE_SIZE (tmode) < max_size)
1465 if (mode == VOIDmode)
1468 icode = mov_optab->handlers[(int) mode].insn_code;
1469 if (icode != CODE_FOR_nothing
1470 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1471 GET_MODE_SIZE (mode)))
1472 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1474 max_size = GET_MODE_SIZE (mode);
1480 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1481 with move instructions for mode MODE. GENFUN is the gen_... function
1482 to make a move insn for that mode. DATA has all the other info. */
1485 move_by_pieces_1 (genfun, mode, data)
1486 rtx (*genfun) PROTO ((rtx, ...));
1487 enum machine_mode mode;
1488 struct move_by_pieces *data;
1490 register int size = GET_MODE_SIZE (mode);
1491 register rtx to1, from1;
1493 while (data->len >= size)
1495 if (data->reverse) data->offset -= size;
1497 to1 = (data->autinc_to
1498 ? gen_rtx_MEM (mode, data->to_addr)
1499 : copy_rtx (change_address (data->to, mode,
1500 plus_constant (data->to_addr,
1502 MEM_IN_STRUCT_P (to1) = data->to_struct;
1505 = (data->autinc_from
1506 ? gen_rtx_MEM (mode, data->from_addr)
1507 : copy_rtx (change_address (data->from, mode,
1508 plus_constant (data->from_addr,
1510 MEM_IN_STRUCT_P (from1) = data->from_struct;
1512 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1513 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
1514 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1515 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (-size)));
1517 emit_insn ((*genfun) (to1, from1));
1518 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1519 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1520 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1521 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1523 if (! data->reverse) data->offset += size;
1529 /* Emit code to move a block Y to a block X.
1530 This may be done with string-move instructions,
1531 with multiple scalar move instructions, or with a library call.
1533 Both X and Y must be MEM rtx's (perhaps inside VOLATILE)
1535 SIZE is an rtx that says how long they are.
1536 ALIGN is the maximum alignment we can assume they have,
1539 Return the address of the new block, if memcpy is called and returns it,
1543 emit_block_move (x, y, size, align)
1549 #ifdef TARGET_MEM_FUNCTIONS
1551 tree call_expr, arg_list;
1554 if (GET_MODE (x) != BLKmode)
1557 if (GET_MODE (y) != BLKmode)
1560 x = protect_from_queue (x, 1);
1561 y = protect_from_queue (y, 0);
1562 size = protect_from_queue (size, 0);
1564 if (GET_CODE (x) != MEM)
1566 if (GET_CODE (y) != MEM)
1571 if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
1572 move_by_pieces (x, y, INTVAL (size), align);
1575 /* Try the most limited insn first, because there's no point
1576 including more than one in the machine description unless
1577 the more limited one has some advantage. */
1579 rtx opalign = GEN_INT (align);
1580 enum machine_mode mode;
1582 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1583 mode = GET_MODE_WIDER_MODE (mode))
1585 enum insn_code code = movstr_optab[(int) mode];
1587 if (code != CODE_FOR_nothing
1588 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1589 here because if SIZE is less than the mode mask, as it is
1590 returned by the macro, it will definitely be less than the
1591 actual mode mask. */
1592 && ((GET_CODE (size) == CONST_INT
1593 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1594 <= (GET_MODE_MASK (mode) >> 1)))
1595 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1596 && (insn_operand_predicate[(int) code][0] == 0
1597 || (*insn_operand_predicate[(int) code][0]) (x, BLKmode))
1598 && (insn_operand_predicate[(int) code][1] == 0
1599 || (*insn_operand_predicate[(int) code][1]) (y, BLKmode))
1600 && (insn_operand_predicate[(int) code][3] == 0
1601 || (*insn_operand_predicate[(int) code][3]) (opalign,
1605 rtx last = get_last_insn ();
1608 op2 = convert_to_mode (mode, size, 1);
1609 if (insn_operand_predicate[(int) code][2] != 0
1610 && ! (*insn_operand_predicate[(int) code][2]) (op2, mode))
1611 op2 = copy_to_mode_reg (mode, op2);
1613 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1620 delete_insns_since (last);
1624 /* X, Y, or SIZE may have been passed through protect_from_queue.
1626 It is unsafe to save the value generated by protect_from_queue
1627 and reuse it later. Consider what happens if emit_queue is
1628 called before the return value from protect_from_queue is used.
1630 Expansion of the CALL_EXPR below will call emit_queue before
1631 we are finished emitting RTL for argument setup. So if we are
1632 not careful we could get the wrong value for an argument.
1634 To avoid this problem we go ahead and emit code to copy X, Y &
1635 SIZE into new pseudos. We can then place those new pseudos
1636 into an RTL_EXPR and use them later, even after a call to
1639 Note this is not strictly needed for library calls since they
1640 do not call emit_queue before loading their arguments. However,
1641 we may need to have library calls call emit_queue in the future
1642 since failing to do so could cause problems for targets which
1643 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
1644 x = copy_to_mode_reg (Pmode, XEXP (x, 0));
1645 y = copy_to_mode_reg (Pmode, XEXP (y, 0));
1647 #ifdef TARGET_MEM_FUNCTIONS
1648 size = copy_to_mode_reg (TYPE_MODE (sizetype), size);
1650 size = convert_to_mode (TYPE_MODE (integer_type_node), size,
1651 TREE_UNSIGNED (integer_type_node));
1652 size = copy_to_mode_reg (TYPE_MODE (integer_type_node), size);
1655 #ifdef TARGET_MEM_FUNCTIONS
1656 /* It is incorrect to use the libcall calling conventions to call
1657 memcpy in this context.
1659 This could be a user call to memcpy and the user may wish to
1660 examine the return value from memcpy.
1662 For targets where libcalls and normal calls have different conventions
1663 for returning pointers, we could end up generating incorrect code.
1665 So instead of using a libcall sequence we build up a suitable
1666 CALL_EXPR and expand the call in the normal fashion. */
1667 if (fn == NULL_TREE)
1671 /* This was copied from except.c, I don't know if all this is
1672 necessary in this context or not. */
1673 fn = get_identifier ("memcpy");
1674 push_obstacks_nochange ();
1675 end_temporary_allocation ();
1676 fntype = build_pointer_type (void_type_node);
1677 fntype = build_function_type (fntype, NULL_TREE);
1678 fn = build_decl (FUNCTION_DECL, fn, fntype);
1679 DECL_EXTERNAL (fn) = 1;
1680 TREE_PUBLIC (fn) = 1;
1681 DECL_ARTIFICIAL (fn) = 1;
1682 make_decl_rtl (fn, NULL_PTR, 1);
1683 assemble_external (fn);
1687 /* We need to make an argument list for the function call.
1689 memcpy has three arguments, the first two are void * addresses and
1690 the last is a size_t byte count for the copy. */
1692 = build_tree_list (NULL_TREE,
1693 make_tree (build_pointer_type (void_type_node), x));
1694 TREE_CHAIN (arg_list)
1695 = build_tree_list (NULL_TREE,
1696 make_tree (build_pointer_type (void_type_node), y));
1697 TREE_CHAIN (TREE_CHAIN (arg_list))
1698 = build_tree_list (NULL_TREE, make_tree (sizetype, size));
1699 TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list))) = NULL_TREE;
1701 /* Now we have to build up the CALL_EXPR itself. */
1702 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
1703 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
1704 call_expr, arg_list, NULL_TREE);
1705 TREE_SIDE_EFFECTS (call_expr) = 1;
1707 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
1709 emit_library_call (bcopy_libfunc, 0,
1710 VOIDmode, 3, y, Pmode, x, Pmode,
1711 convert_to_mode (TYPE_MODE (integer_type_node), size,
1712 TREE_UNSIGNED (integer_type_node)),
1713 TYPE_MODE (integer_type_node));
1720 /* Copy all or part of a value X into registers starting at REGNO.
1721 The number of registers to be filled is NREGS. */
1724 move_block_to_reg (regno, x, nregs, mode)
1728 enum machine_mode mode;
1731 #ifdef HAVE_load_multiple
1739 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1740 x = validize_mem (force_const_mem (mode, x));
1742 /* See if the machine can do this with a load multiple insn. */
1743 #ifdef HAVE_load_multiple
1744 if (HAVE_load_multiple)
1746 last = get_last_insn ();
1747 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
1755 delete_insns_since (last);
1759 for (i = 0; i < nregs; i++)
1760 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1761 operand_subword_force (x, i, mode));
1764 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1765 The number of registers to be filled is NREGS. SIZE indicates the number
1766 of bytes in the object X. */
1770 move_block_from_reg (regno, x, nregs, size)
1777 #ifdef HAVE_store_multiple
1781 enum machine_mode mode;
1783 /* If SIZE is that of a mode no bigger than a word, just use that
1784 mode's store operation. */
1785 if (size <= UNITS_PER_WORD
1786 && (mode = mode_for_size (size * BITS_PER_UNIT, MODE_INT, 0)) != BLKmode)
1788 emit_move_insn (change_address (x, mode, NULL),
1789 gen_rtx_REG (mode, regno));
1793 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN machine must be aligned
1794 to the left before storing to memory. Note that the previous test
1795 doesn't handle all cases (e.g. SIZE == 3). */
1796 if (size < UNITS_PER_WORD && BYTES_BIG_ENDIAN)
1798 rtx tem = operand_subword (x, 0, 1, BLKmode);
1804 shift = expand_shift (LSHIFT_EXPR, word_mode,
1805 gen_rtx_REG (word_mode, regno),
1806 build_int_2 ((UNITS_PER_WORD - size)
1807 * BITS_PER_UNIT, 0), NULL_RTX, 0);
1808 emit_move_insn (tem, shift);
1812 /* See if the machine can do this with a store multiple insn. */
1813 #ifdef HAVE_store_multiple
1814 if (HAVE_store_multiple)
1816 last = get_last_insn ();
1817 pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1825 delete_insns_since (last);
1829 for (i = 0; i < nregs; i++)
1831 rtx tem = operand_subword (x, i, 1, BLKmode);
1836 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1840 /* Emit code to move a block SRC to a block DST, where DST is non-consecutive
1841 registers represented by a PARALLEL. SSIZE represents the total size of
1842 block SRC in bytes, or -1 if not known. ALIGN is the known alignment of
1844 /* ??? If SSIZE % UNITS_PER_WORD != 0, we make the blatent assumption that
1845 the balance will be in what would be the low-order memory addresses, i.e.
1846 left justified for big endian, right justified for little endian. This
1847 happens to be true for the targets currently using this support. If this
1848 ever changes, a new target macro along the lines of FUNCTION_ARG_PADDING
1852 emit_group_load (dst, orig_src, ssize, align)
1859 if (GET_CODE (dst) != PARALLEL)
1862 /* Check for a NULL entry, used to indicate that the parameter goes
1863 both on the stack and in registers. */
1864 if (XEXP (XVECEXP (dst, 0, 0), 0))
1869 tmps = (rtx *) alloca (sizeof(rtx) * XVECLEN (dst, 0));
1871 /* If we won't be loading directly from memory, protect the real source
1872 from strange tricks we might play. */
1874 if (GET_CODE (src) != MEM)
1876 src = gen_reg_rtx (GET_MODE (orig_src));
1877 emit_move_insn (src, orig_src);
1880 /* Process the pieces. */
1881 for (i = start; i < XVECLEN (dst, 0); i++)
1883 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1884 int bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1885 int bytelen = GET_MODE_SIZE (mode);
1888 /* Handle trailing fragments that run over the size of the struct. */
1889 if (ssize >= 0 && bytepos + bytelen > ssize)
1891 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1892 bytelen = ssize - bytepos;
1897 /* Optimize the access just a bit. */
1898 if (GET_CODE (src) == MEM
1899 && align*BITS_PER_UNIT >= GET_MODE_ALIGNMENT (mode)
1900 && bytepos*BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
1901 && bytelen == GET_MODE_SIZE (mode))
1903 tmps[i] = gen_reg_rtx (mode);
1904 emit_move_insn (tmps[i],
1905 change_address (src, mode,
1906 plus_constant (XEXP (src, 0),
1909 else if (GET_CODE (src) == CONCAT)
1912 && bytelen == GET_MODE_SIZE (GET_MODE (XEXP (src, 0))))
1913 tmps[i] = XEXP (src, 0);
1914 else if (bytepos == GET_MODE_SIZE (GET_MODE (XEXP (src, 0)))
1915 && bytelen == GET_MODE_SIZE (GET_MODE (XEXP (src, 1))))
1916 tmps[i] = XEXP (src, 1);
1922 tmps[i] = extract_bit_field (src, bytelen*BITS_PER_UNIT,
1923 bytepos*BITS_PER_UNIT, 1, NULL_RTX,
1924 mode, mode, align, ssize);
1927 if (BYTES_BIG_ENDIAN && shift)
1929 expand_binop (mode, ashl_optab, tmps[i], GEN_INT (shift),
1930 tmps[i], 0, OPTAB_WIDEN);
1935 /* Copy the extracted pieces into the proper (probable) hard regs. */
1936 for (i = start; i < XVECLEN (dst, 0); i++)
1937 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0), tmps[i]);
1940 /* Emit code to move a block SRC to a block DST, where SRC is non-consecutive
1941 registers represented by a PARALLEL. SSIZE represents the total size of
1942 block DST, or -1 if not known. ALIGN is the known alignment of DST. */
1945 emit_group_store (orig_dst, src, ssize, align)
1952 if (GET_CODE (src) != PARALLEL)
1955 /* Check for a NULL entry, used to indicate that the parameter goes
1956 both on the stack and in registers. */
1957 if (XEXP (XVECEXP (src, 0, 0), 0))
1962 tmps = (rtx *) alloca (sizeof(rtx) * XVECLEN (src, 0));
1964 /* Copy the (probable) hard regs into pseudos. */
1965 for (i = start; i < XVECLEN (src, 0); i++)
1967 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
1968 tmps[i] = gen_reg_rtx (GET_MODE (reg));
1969 emit_move_insn (tmps[i], reg);
1973 /* If we won't be storing directly into memory, protect the real destination
1974 from strange tricks we might play. */
1976 if (GET_CODE (dst) == PARALLEL)
1980 /* We can get a PARALLEL dst if there is a conditional expression in
1981 a return statement. In that case, the dst and src are the same,
1982 so no action is necessary. */
1983 if (rtx_equal_p (dst, src))
1986 /* It is unclear if we can ever reach here, but we may as well handle
1987 it. Allocate a temporary, and split this into a store/load to/from
1990 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
1991 emit_group_store (temp, src, ssize, align);
1992 emit_group_load (dst, temp, ssize, align);
1995 else if (GET_CODE (dst) != MEM)
1997 dst = gen_reg_rtx (GET_MODE (orig_dst));
1998 /* Make life a bit easier for combine. */
1999 emit_move_insn (dst, const0_rtx);
2001 else if (! MEM_IN_STRUCT_P (dst))
2003 /* store_bit_field requires that memory operations have
2004 mem_in_struct_p set; we might not. */
2006 dst = copy_rtx (orig_dst);
2007 MEM_SET_IN_STRUCT_P (dst, 1);
2010 /* Process the pieces. */
2011 for (i = start; i < XVECLEN (src, 0); i++)
2013 int bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
2014 enum machine_mode mode = GET_MODE (tmps[i]);
2015 int bytelen = GET_MODE_SIZE (mode);
2017 /* Handle trailing fragments that run over the size of the struct. */
2018 if (ssize >= 0 && bytepos + bytelen > ssize)
2020 if (BYTES_BIG_ENDIAN)
2022 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2023 expand_binop (mode, ashr_optab, tmps[i], GEN_INT (shift),
2024 tmps[i], 0, OPTAB_WIDEN);
2026 bytelen = ssize - bytepos;
2029 /* Optimize the access just a bit. */
2030 if (GET_CODE (dst) == MEM
2031 && align*BITS_PER_UNIT >= GET_MODE_ALIGNMENT (mode)
2032 && bytepos*BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2033 && bytelen == GET_MODE_SIZE (mode))
2035 emit_move_insn (change_address (dst, mode,
2036 plus_constant (XEXP (dst, 0),
2042 store_bit_field (dst, bytelen*BITS_PER_UNIT, bytepos*BITS_PER_UNIT,
2043 mode, tmps[i], align, ssize);
2048 /* Copy from the pseudo into the (probable) hard reg. */
2049 if (GET_CODE (dst) == REG)
2050 emit_move_insn (orig_dst, dst);
2053 /* Generate code to copy a BLKmode object of TYPE out of a
2054 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2055 is null, a stack temporary is created. TGTBLK is returned.
2057 The primary purpose of this routine is to handle functions
2058 that return BLKmode structures in registers. Some machines
2059 (the PA for example) want to return all small structures
2060 in registers regardless of the structure's alignment.
2064 copy_blkmode_from_reg(tgtblk,srcreg,type)
2069 int bytes = int_size_in_bytes (type);
2070 rtx src = NULL, dst = NULL;
2071 int bitsize = MIN (TYPE_ALIGN (type), (unsigned int) BITS_PER_WORD);
2072 int bitpos, xbitpos, big_endian_correction = 0;
2076 tgtblk = assign_stack_temp (BLKmode, bytes, 0);
2077 MEM_SET_IN_STRUCT_P (tgtblk, AGGREGATE_TYPE_P (type));
2078 preserve_temp_slots (tgtblk);
2081 /* This code assumes srcreg is at least a full word. If it isn't,
2082 copy it into a new pseudo which is a full word. */
2083 if (GET_MODE (srcreg) != BLKmode
2084 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
2085 srcreg = convert_to_mode (word_mode, srcreg,
2086 TREE_UNSIGNED (type));
2088 /* Structures whose size is not a multiple of a word are aligned
2089 to the least significant byte (to the right). On a BYTES_BIG_ENDIAN
2090 machine, this means we must skip the empty high order bytes when
2091 calculating the bit offset. */
2092 if (BYTES_BIG_ENDIAN && bytes % UNITS_PER_WORD)
2093 big_endian_correction = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD)
2096 /* Copy the structure BITSIZE bites at a time.
2098 We could probably emit more efficient code for machines
2099 which do not use strict alignment, but it doesn't seem
2100 worth the effort at the current time. */
2101 for (bitpos = 0, xbitpos = big_endian_correction;
2102 bitpos < bytes * BITS_PER_UNIT;
2103 bitpos += bitsize, xbitpos += bitsize)
2106 /* We need a new source operand each time xbitpos is on a
2107 word boundary and when xbitpos == big_endian_correction
2108 (the first time through). */
2109 if (xbitpos % BITS_PER_WORD == 0
2110 || xbitpos == big_endian_correction)
2111 src = operand_subword_force (srcreg,
2112 xbitpos / BITS_PER_WORD,
2115 /* We need a new destination operand each time bitpos is on
2117 if (bitpos % BITS_PER_WORD == 0)
2118 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
2120 /* Use xbitpos for the source extraction (right justified) and
2121 xbitpos for the destination store (left justified). */
2122 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode,
2123 extract_bit_field (src, bitsize,
2124 xbitpos % BITS_PER_WORD, 1,
2125 NULL_RTX, word_mode,
2127 bitsize / BITS_PER_UNIT,
2129 bitsize / BITS_PER_UNIT, BITS_PER_WORD);
2135 /* Add a USE expression for REG to the (possibly empty) list pointed
2136 to by CALL_FUSAGE. REG must denote a hard register. */
2139 use_reg (call_fusage, reg)
2140 rtx *call_fusage, reg;
2142 if (GET_CODE (reg) != REG
2143 || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
2147 = gen_rtx_EXPR_LIST (VOIDmode,
2148 gen_rtx_USE (VOIDmode, reg), *call_fusage);
2151 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2152 starting at REGNO. All of these registers must be hard registers. */
2155 use_regs (call_fusage, regno, nregs)
2162 if (regno + nregs > FIRST_PSEUDO_REGISTER)
2165 for (i = 0; i < nregs; i++)
2166 use_reg (call_fusage, gen_rtx_REG (reg_raw_mode[regno + i], regno + i));
2169 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2170 PARALLEL REGS. This is for calls that pass values in multiple
2171 non-contiguous locations. The Irix 6 ABI has examples of this. */
2174 use_group_regs (call_fusage, regs)
2180 for (i = 0; i < XVECLEN (regs, 0); i++)
2182 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2184 /* A NULL entry means the parameter goes both on the stack and in
2185 registers. This can also be a MEM for targets that pass values
2186 partially on the stack and partially in registers. */
2187 if (reg != 0 && GET_CODE (reg) == REG)
2188 use_reg (call_fusage, reg);
2192 /* Generate several move instructions to clear LEN bytes of block TO.
2193 (A MEM rtx with BLKmode). The caller must pass TO through
2194 protect_from_queue before calling. ALIGN (in bytes) is maximum alignment
2198 clear_by_pieces (to, len, align)
2202 struct clear_by_pieces data;
2203 rtx to_addr = XEXP (to, 0);
2204 int max_size = MOVE_MAX_PIECES + 1;
2205 enum machine_mode mode = VOIDmode, tmode;
2206 enum insn_code icode;
2209 data.to_addr = to_addr;
2212 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2213 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2215 data.explicit_inc_to = 0;
2217 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2218 if (data.reverse) data.offset = len;
2221 data.to_struct = MEM_IN_STRUCT_P (to);
2223 /* If copying requires more than two move insns,
2224 copy addresses to registers (to make displacements shorter)
2225 and use post-increment if available. */
2227 && move_by_pieces_ninsns (len, align) > 2)
2229 /* Determine the main mode we'll be using */
2230 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2231 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2232 if (GET_MODE_SIZE (tmode) < max_size)
2235 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
2237 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
2239 data.explicit_inc_to = -1;
2241 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
2243 data.to_addr = copy_addr_to_reg (to_addr);
2245 data.explicit_inc_to = 1;
2247 if (!data.autinc_to && CONSTANT_P (to_addr))
2248 data.to_addr = copy_addr_to_reg (to_addr);
2251 if (! SLOW_UNALIGNED_ACCESS
2252 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
2255 /* First move what we can in the largest integer mode, then go to
2256 successively smaller modes. */
2258 while (max_size > 1)
2260 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2261 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2262 if (GET_MODE_SIZE (tmode) < max_size)
2265 if (mode == VOIDmode)
2268 icode = mov_optab->handlers[(int) mode].insn_code;
2269 if (icode != CODE_FOR_nothing
2270 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
2271 GET_MODE_SIZE (mode)))
2272 clear_by_pieces_1 (GEN_FCN (icode), mode, &data);
2274 max_size = GET_MODE_SIZE (mode);
2277 /* The code above should have handled everything. */
2282 /* Subroutine of clear_by_pieces. Clear as many bytes as appropriate
2283 with move instructions for mode MODE. GENFUN is the gen_... function
2284 to make a move insn for that mode. DATA has all the other info. */
2287 clear_by_pieces_1 (genfun, mode, data)
2288 rtx (*genfun) PROTO ((rtx, ...));
2289 enum machine_mode mode;
2290 struct clear_by_pieces *data;
2292 register int size = GET_MODE_SIZE (mode);
2295 while (data->len >= size)
2297 if (data->reverse) data->offset -= size;
2299 to1 = (data->autinc_to
2300 ? gen_rtx_MEM (mode, data->to_addr)
2301 : copy_rtx (change_address (data->to, mode,
2302 plus_constant (data->to_addr,
2304 MEM_IN_STRUCT_P (to1) = data->to_struct;
2306 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2307 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
2309 emit_insn ((*genfun) (to1, const0_rtx));
2310 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2311 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2313 if (! data->reverse) data->offset += size;
2319 /* Write zeros through the storage of OBJECT.
2320 If OBJECT has BLKmode, SIZE is its length in bytes and ALIGN is
2321 the maximum alignment we can is has, measured in bytes.
2323 If we call a function that returns the length of the block, return it. */
2326 clear_storage (object, size, align)
2331 #ifdef TARGET_MEM_FUNCTIONS
2333 tree call_expr, arg_list;
2337 if (GET_MODE (object) == BLKmode)
2339 object = protect_from_queue (object, 1);
2340 size = protect_from_queue (size, 0);
2342 if (GET_CODE (size) == CONST_INT
2343 && MOVE_BY_PIECES_P (INTVAL (size), align))
2344 clear_by_pieces (object, INTVAL (size), align);
2348 /* Try the most limited insn first, because there's no point
2349 including more than one in the machine description unless
2350 the more limited one has some advantage. */
2352 rtx opalign = GEN_INT (align);
2353 enum machine_mode mode;
2355 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2356 mode = GET_MODE_WIDER_MODE (mode))
2358 enum insn_code code = clrstr_optab[(int) mode];
2360 if (code != CODE_FOR_nothing
2361 /* We don't need MODE to be narrower than
2362 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2363 the mode mask, as it is returned by the macro, it will
2364 definitely be less than the actual mode mask. */
2365 && ((GET_CODE (size) == CONST_INT
2366 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2367 <= (GET_MODE_MASK (mode) >> 1)))
2368 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2369 && (insn_operand_predicate[(int) code][0] == 0
2370 || (*insn_operand_predicate[(int) code][0]) (object,
2372 && (insn_operand_predicate[(int) code][2] == 0
2373 || (*insn_operand_predicate[(int) code][2]) (opalign,
2377 rtx last = get_last_insn ();
2380 op1 = convert_to_mode (mode, size, 1);
2381 if (insn_operand_predicate[(int) code][1] != 0
2382 && ! (*insn_operand_predicate[(int) code][1]) (op1,
2384 op1 = copy_to_mode_reg (mode, op1);
2386 pat = GEN_FCN ((int) code) (object, op1, opalign);
2393 delete_insns_since (last);
2397 /* OBJECT or SIZE may have been passed through protect_from_queue.
2399 It is unsafe to save the value generated by protect_from_queue
2400 and reuse it later. Consider what happens if emit_queue is
2401 called before the return value from protect_from_queue is used.
2403 Expansion of the CALL_EXPR below will call emit_queue before
2404 we are finished emitting RTL for argument setup. So if we are
2405 not careful we could get the wrong value for an argument.
2407 To avoid this problem we go ahead and emit code to copy OBJECT
2408 and SIZE into new pseudos. We can then place those new pseudos
2409 into an RTL_EXPR and use them later, even after a call to
2412 Note this is not strictly needed for library calls since they
2413 do not call emit_queue before loading their arguments. However,
2414 we may need to have library calls call emit_queue in the future
2415 since failing to do so could cause problems for targets which
2416 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
2417 object = copy_to_mode_reg (Pmode, XEXP (object, 0));
2419 #ifdef TARGET_MEM_FUNCTIONS
2420 size = copy_to_mode_reg (TYPE_MODE (sizetype), size);
2422 size = convert_to_mode (TYPE_MODE (integer_type_node), size,
2423 TREE_UNSIGNED (integer_type_node));
2424 size = copy_to_mode_reg (TYPE_MODE (integer_type_node), size);
2428 #ifdef TARGET_MEM_FUNCTIONS
2429 /* It is incorrect to use the libcall calling conventions to call
2430 memset in this context.
2432 This could be a user call to memset and the user may wish to
2433 examine the return value from memset.
2435 For targets where libcalls and normal calls have different
2436 conventions for returning pointers, we could end up generating
2439 So instead of using a libcall sequence we build up a suitable
2440 CALL_EXPR and expand the call in the normal fashion. */
2441 if (fn == NULL_TREE)
2445 /* This was copied from except.c, I don't know if all this is
2446 necessary in this context or not. */
2447 fn = get_identifier ("memset");
2448 push_obstacks_nochange ();
2449 end_temporary_allocation ();
2450 fntype = build_pointer_type (void_type_node);
2451 fntype = build_function_type (fntype, NULL_TREE);
2452 fn = build_decl (FUNCTION_DECL, fn, fntype);
2453 DECL_EXTERNAL (fn) = 1;
2454 TREE_PUBLIC (fn) = 1;
2455 DECL_ARTIFICIAL (fn) = 1;
2456 make_decl_rtl (fn, NULL_PTR, 1);
2457 assemble_external (fn);
2461 /* We need to make an argument list for the function call.
2463 memset has three arguments, the first is a void * addresses, the
2464 second a integer with the initialization value, the last is a
2465 size_t byte count for the copy. */
2467 = build_tree_list (NULL_TREE,
2468 make_tree (build_pointer_type (void_type_node),
2470 TREE_CHAIN (arg_list)
2471 = build_tree_list (NULL_TREE,
2472 make_tree (integer_type_node, const0_rtx));
2473 TREE_CHAIN (TREE_CHAIN (arg_list))
2474 = build_tree_list (NULL_TREE, make_tree (sizetype, size));
2475 TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list))) = NULL_TREE;
2477 /* Now we have to build up the CALL_EXPR itself. */
2478 call_expr = build1 (ADDR_EXPR,
2479 build_pointer_type (TREE_TYPE (fn)), fn);
2480 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
2481 call_expr, arg_list, NULL_TREE);
2482 TREE_SIDE_EFFECTS (call_expr) = 1;
2484 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
2486 emit_library_call (bzero_libfunc, 0,
2487 VOIDmode, 2, object, Pmode, size,
2488 TYPE_MODE (integer_type_node));
2493 emit_move_insn (object, CONST0_RTX (GET_MODE (object)));
2498 /* Generate code to copy Y into X.
2499 Both Y and X must have the same mode, except that
2500 Y can be a constant with VOIDmode.
2501 This mode cannot be BLKmode; use emit_block_move for that.
2503 Return the last instruction emitted. */
2506 emit_move_insn (x, y)
2509 enum machine_mode mode = GET_MODE (x);
2511 x = protect_from_queue (x, 1);
2512 y = protect_from_queue (y, 0);
2514 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
2517 /* Never force constant_p_rtx to memory. */
2518 if (GET_CODE (y) == CONSTANT_P_RTX)
2520 else if (CONSTANT_P (y) && ! LEGITIMATE_CONSTANT_P (y))
2521 y = force_const_mem (mode, y);
2523 /* If X or Y are memory references, verify that their addresses are valid
2525 if (GET_CODE (x) == MEM
2526 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
2527 && ! push_operand (x, GET_MODE (x)))
2529 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
2530 x = change_address (x, VOIDmode, XEXP (x, 0));
2532 if (GET_CODE (y) == MEM
2533 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
2535 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
2536 y = change_address (y, VOIDmode, XEXP (y, 0));
2538 if (mode == BLKmode)
2541 return emit_move_insn_1 (x, y);
2544 /* Low level part of emit_move_insn.
2545 Called just like emit_move_insn, but assumes X and Y
2546 are basically valid. */
2549 emit_move_insn_1 (x, y)
2552 enum machine_mode mode = GET_MODE (x);
2553 enum machine_mode submode;
2554 enum mode_class class = GET_MODE_CLASS (mode);
2557 if (mode >= MAX_MACHINE_MODE)
2560 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
2562 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
2564 /* Expand complex moves by moving real part and imag part, if possible. */
2565 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
2566 && BLKmode != (submode = mode_for_size ((GET_MODE_UNIT_SIZE (mode)
2568 (class == MODE_COMPLEX_INT
2569 ? MODE_INT : MODE_FLOAT),
2571 && (mov_optab->handlers[(int) submode].insn_code
2572 != CODE_FOR_nothing))
2574 /* Don't split destination if it is a stack push. */
2575 int stack = push_operand (x, GET_MODE (x));
2577 /* If this is a stack, push the highpart first, so it
2578 will be in the argument order.
2580 In that case, change_address is used only to convert
2581 the mode, not to change the address. */
2584 /* Note that the real part always precedes the imag part in memory
2585 regardless of machine's endianness. */
2586 #ifdef STACK_GROWS_DOWNWARD
2587 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2588 (gen_rtx_MEM (submode, (XEXP (x, 0))),
2589 gen_imagpart (submode, y)));
2590 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2591 (gen_rtx_MEM (submode, (XEXP (x, 0))),
2592 gen_realpart (submode, y)));
2594 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2595 (gen_rtx_MEM (submode, (XEXP (x, 0))),
2596 gen_realpart (submode, y)));
2597 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2598 (gen_rtx_MEM (submode, (XEXP (x, 0))),
2599 gen_imagpart (submode, y)));
2604 /* Show the output dies here. This is necessary for pseudos;
2605 hard regs shouldn't appear here except as return values.
2606 We never want to emit such a clobber after reload. */
2608 && ! (reload_in_progress || reload_completed))
2610 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
2613 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2614 (gen_realpart (submode, x), gen_realpart (submode, y)));
2615 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2616 (gen_imagpart (submode, x), gen_imagpart (submode, y)));
2619 return get_last_insn ();
2622 /* This will handle any multi-word mode that lacks a move_insn pattern.
2623 However, you will get better code if you define such patterns,
2624 even if they must turn into multiple assembler instructions. */
2625 else if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
2629 #ifdef PUSH_ROUNDING
2631 /* If X is a push on the stack, do the push now and replace
2632 X with a reference to the stack pointer. */
2633 if (push_operand (x, GET_MODE (x)))
2635 anti_adjust_stack (GEN_INT (GET_MODE_SIZE (GET_MODE (x))));
2636 x = change_address (x, VOIDmode, stack_pointer_rtx);
2640 /* Show the output dies here. This is necessary for pseudos;
2641 hard regs shouldn't appear here except as return values.
2642 We never want to emit such a clobber after reload. */
2644 && ! (reload_in_progress || reload_completed))
2646 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
2650 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
2653 rtx xpart = operand_subword (x, i, 1, mode);
2654 rtx ypart = operand_subword (y, i, 1, mode);
2656 /* If we can't get a part of Y, put Y into memory if it is a
2657 constant. Otherwise, force it into a register. If we still
2658 can't get a part of Y, abort. */
2659 if (ypart == 0 && CONSTANT_P (y))
2661 y = force_const_mem (mode, y);
2662 ypart = operand_subword (y, i, 1, mode);
2664 else if (ypart == 0)
2665 ypart = operand_subword_force (y, i, mode);
2667 if (xpart == 0 || ypart == 0)
2670 last_insn = emit_move_insn (xpart, ypart);
2679 /* Pushing data onto the stack. */
2681 /* Push a block of length SIZE (perhaps variable)
2682 and return an rtx to address the beginning of the block.
2683 Note that it is not possible for the value returned to be a QUEUED.
2684 The value may be virtual_outgoing_args_rtx.
2686 EXTRA is the number of bytes of padding to push in addition to SIZE.
2687 BELOW nonzero means this padding comes at low addresses;
2688 otherwise, the padding comes at high addresses. */
2691 push_block (size, extra, below)
2697 size = convert_modes (Pmode, ptr_mode, size, 1);
2698 if (CONSTANT_P (size))
2699 anti_adjust_stack (plus_constant (size, extra));
2700 else if (GET_CODE (size) == REG && extra == 0)
2701 anti_adjust_stack (size);
2704 rtx temp = copy_to_mode_reg (Pmode, size);
2706 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
2707 temp, 0, OPTAB_LIB_WIDEN);
2708 anti_adjust_stack (temp);
2711 #if defined (STACK_GROWS_DOWNWARD) \
2712 || (defined (ARGS_GROW_DOWNWARD) \
2713 && !defined (ACCUMULATE_OUTGOING_ARGS))
2715 /* Return the lowest stack address when STACK or ARGS grow downward and
2716 we are not aaccumulating outgoing arguments (the c4x port uses such
2718 temp = virtual_outgoing_args_rtx;
2719 if (extra != 0 && below)
2720 temp = plus_constant (temp, extra);
2722 if (GET_CODE (size) == CONST_INT)
2723 temp = plus_constant (virtual_outgoing_args_rtx,
2724 - INTVAL (size) - (below ? 0 : extra));
2725 else if (extra != 0 && !below)
2726 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
2727 negate_rtx (Pmode, plus_constant (size, extra)));
2729 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
2730 negate_rtx (Pmode, size));
2733 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
2739 return gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
2742 /* Return an rtx for the address of the beginning of a as-if-it-was-pushed
2743 block of SIZE bytes. */
2746 get_push_address (size)
2751 if (STACK_PUSH_CODE == POST_DEC)
2752 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx, GEN_INT (size));
2753 else if (STACK_PUSH_CODE == POST_INC)
2754 temp = gen_rtx_MINUS (Pmode, stack_pointer_rtx, GEN_INT (size));
2756 temp = stack_pointer_rtx;
2758 return copy_to_reg (temp);
2761 /* Generate code to push X onto the stack, assuming it has mode MODE and
2763 MODE is redundant except when X is a CONST_INT (since they don't
2765 SIZE is an rtx for the size of data to be copied (in bytes),
2766 needed only if X is BLKmode.
2768 ALIGN (in bytes) is maximum alignment we can assume.
2770 If PARTIAL and REG are both nonzero, then copy that many of the first
2771 words of X into registers starting with REG, and push the rest of X.
2772 The amount of space pushed is decreased by PARTIAL words,
2773 rounded *down* to a multiple of PARM_BOUNDARY.
2774 REG must be a hard register in this case.
2775 If REG is zero but PARTIAL is not, take any all others actions for an
2776 argument partially in registers, but do not actually load any
2779 EXTRA is the amount in bytes of extra space to leave next to this arg.
2780 This is ignored if an argument block has already been allocated.
2782 On a machine that lacks real push insns, ARGS_ADDR is the address of
2783 the bottom of the argument block for this call. We use indexing off there
2784 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
2785 argument block has not been preallocated.
2787 ARGS_SO_FAR is the size of args previously pushed for this call.
2789 REG_PARM_STACK_SPACE is nonzero if functions require stack space
2790 for arguments passed in registers. If nonzero, it will be the number
2791 of bytes required. */
2794 emit_push_insn (x, mode, type, size, align, partial, reg, extra,
2795 args_addr, args_so_far, reg_parm_stack_space)
2797 enum machine_mode mode;
2806 int reg_parm_stack_space;
2809 enum direction stack_direction
2810 #ifdef STACK_GROWS_DOWNWARD
2816 /* Decide where to pad the argument: `downward' for below,
2817 `upward' for above, or `none' for don't pad it.
2818 Default is below for small data on big-endian machines; else above. */
2819 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
2821 /* Invert direction if stack is post-update. */
2822 if (STACK_PUSH_CODE == POST_INC || STACK_PUSH_CODE == POST_DEC)
2823 if (where_pad != none)
2824 where_pad = (where_pad == downward ? upward : downward);
2826 xinner = x = protect_from_queue (x, 0);
2828 if (mode == BLKmode)
2830 /* Copy a block into the stack, entirely or partially. */
2833 int used = partial * UNITS_PER_WORD;
2834 int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
2842 /* USED is now the # of bytes we need not copy to the stack
2843 because registers will take care of them. */
2846 xinner = change_address (xinner, BLKmode,
2847 plus_constant (XEXP (xinner, 0), used));
2849 /* If the partial register-part of the arg counts in its stack size,
2850 skip the part of stack space corresponding to the registers.
2851 Otherwise, start copying to the beginning of the stack space,
2852 by setting SKIP to 0. */
2853 skip = (reg_parm_stack_space == 0) ? 0 : used;
2855 #ifdef PUSH_ROUNDING
2856 /* Do it with several push insns if that doesn't take lots of insns
2857 and if there is no difficulty with push insns that skip bytes
2858 on the stack for alignment purposes. */
2860 && GET_CODE (size) == CONST_INT
2862 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
2863 /* Here we avoid the case of a structure whose weak alignment
2864 forces many pushes of a small amount of data,
2865 and such small pushes do rounding that causes trouble. */
2866 && ((! SLOW_UNALIGNED_ACCESS)
2867 || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT
2868 || PUSH_ROUNDING (align) == align)
2869 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
2871 /* Push padding now if padding above and stack grows down,
2872 or if padding below and stack grows up.
2873 But if space already allocated, this has already been done. */
2874 if (extra && args_addr == 0
2875 && where_pad != none && where_pad != stack_direction)
2876 anti_adjust_stack (GEN_INT (extra));
2878 move_by_pieces (gen_rtx_MEM (BLKmode, gen_push_operand ()), xinner,
2879 INTVAL (size) - used, align);
2881 if (current_function_check_memory_usage && ! in_check_memory_usage)
2885 in_check_memory_usage = 1;
2886 temp = get_push_address (INTVAL(size) - used);
2887 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
2888 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
2890 XEXP (xinner, 0), Pmode,
2891 GEN_INT (INTVAL(size) - used),
2892 TYPE_MODE (sizetype));
2894 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
2896 GEN_INT (INTVAL(size) - used),
2897 TYPE_MODE (sizetype),
2898 GEN_INT (MEMORY_USE_RW),
2899 TYPE_MODE (integer_type_node));
2900 in_check_memory_usage = 0;
2904 #endif /* PUSH_ROUNDING */
2906 /* Otherwise make space on the stack and copy the data
2907 to the address of that space. */
2909 /* Deduct words put into registers from the size we must copy. */
2912 if (GET_CODE (size) == CONST_INT)
2913 size = GEN_INT (INTVAL (size) - used);
2915 size = expand_binop (GET_MODE (size), sub_optab, size,
2916 GEN_INT (used), NULL_RTX, 0,
2920 /* Get the address of the stack space.
2921 In this case, we do not deal with EXTRA separately.
2922 A single stack adjust will do. */
2925 temp = push_block (size, extra, where_pad == downward);
2928 else if (GET_CODE (args_so_far) == CONST_INT)
2929 temp = memory_address (BLKmode,
2930 plus_constant (args_addr,
2931 skip + INTVAL (args_so_far)));
2933 temp = memory_address (BLKmode,
2934 plus_constant (gen_rtx_PLUS (Pmode,
2938 if (current_function_check_memory_usage && ! in_check_memory_usage)
2942 in_check_memory_usage = 1;
2943 target = copy_to_reg (temp);
2944 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
2945 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
2947 XEXP (xinner, 0), Pmode,
2948 size, TYPE_MODE (sizetype));
2950 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
2952 size, TYPE_MODE (sizetype),
2953 GEN_INT (MEMORY_USE_RW),
2954 TYPE_MODE (integer_type_node));
2955 in_check_memory_usage = 0;
2958 /* TEMP is the address of the block. Copy the data there. */
2959 if (GET_CODE (size) == CONST_INT
2960 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size), align)))
2962 move_by_pieces (gen_rtx_MEM (BLKmode, temp), xinner,
2963 INTVAL (size), align);
2968 rtx opalign = GEN_INT (align);
2969 enum machine_mode mode;
2970 rtx target = gen_rtx_MEM (BLKmode, temp);
2972 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2974 mode = GET_MODE_WIDER_MODE (mode))
2976 enum insn_code code = movstr_optab[(int) mode];
2978 if (code != CODE_FOR_nothing
2979 && ((GET_CODE (size) == CONST_INT
2980 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2981 <= (GET_MODE_MASK (mode) >> 1)))
2982 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2983 && (insn_operand_predicate[(int) code][0] == 0
2984 || ((*insn_operand_predicate[(int) code][0])
2986 && (insn_operand_predicate[(int) code][1] == 0
2987 || ((*insn_operand_predicate[(int) code][1])
2989 && (insn_operand_predicate[(int) code][3] == 0
2990 || ((*insn_operand_predicate[(int) code][3])
2991 (opalign, VOIDmode))))
2993 rtx op2 = convert_to_mode (mode, size, 1);
2994 rtx last = get_last_insn ();
2997 if (insn_operand_predicate[(int) code][2] != 0
2998 && ! ((*insn_operand_predicate[(int) code][2])
3000 op2 = copy_to_mode_reg (mode, op2);
3002 pat = GEN_FCN ((int) code) (target, xinner,
3010 delete_insns_since (last);
3015 #ifndef ACCUMULATE_OUTGOING_ARGS
3016 /* If the source is referenced relative to the stack pointer,
3017 copy it to another register to stabilize it. We do not need
3018 to do this if we know that we won't be changing sp. */
3020 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3021 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3022 temp = copy_to_reg (temp);
3025 /* Make inhibit_defer_pop nonzero around the library call
3026 to force it to pop the bcopy-arguments right away. */
3028 #ifdef TARGET_MEM_FUNCTIONS
3029 emit_library_call (memcpy_libfunc, 0,
3030 VOIDmode, 3, temp, Pmode, XEXP (xinner, 0), Pmode,
3031 convert_to_mode (TYPE_MODE (sizetype),
3032 size, TREE_UNSIGNED (sizetype)),
3033 TYPE_MODE (sizetype));
3035 emit_library_call (bcopy_libfunc, 0,
3036 VOIDmode, 3, XEXP (xinner, 0), Pmode, temp, Pmode,
3037 convert_to_mode (TYPE_MODE (integer_type_node),
3039 TREE_UNSIGNED (integer_type_node)),
3040 TYPE_MODE (integer_type_node));
3045 else if (partial > 0)
3047 /* Scalar partly in registers. */
3049 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3052 /* # words of start of argument
3053 that we must make space for but need not store. */
3054 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
3055 int args_offset = INTVAL (args_so_far);
3058 /* Push padding now if padding above and stack grows down,
3059 or if padding below and stack grows up.
3060 But if space already allocated, this has already been done. */
3061 if (extra && args_addr == 0
3062 && where_pad != none && where_pad != stack_direction)
3063 anti_adjust_stack (GEN_INT (extra));
3065 /* If we make space by pushing it, we might as well push
3066 the real data. Otherwise, we can leave OFFSET nonzero
3067 and leave the space uninitialized. */
3071 /* Now NOT_STACK gets the number of words that we don't need to
3072 allocate on the stack. */
3073 not_stack = partial - offset;
3075 /* If the partial register-part of the arg counts in its stack size,
3076 skip the part of stack space corresponding to the registers.
3077 Otherwise, start copying to the beginning of the stack space,
3078 by setting SKIP to 0. */
3079 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
3081 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
3082 x = validize_mem (force_const_mem (mode, x));
3084 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3085 SUBREGs of such registers are not allowed. */
3086 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
3087 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
3088 x = copy_to_reg (x);
3090 /* Loop over all the words allocated on the stack for this arg. */
3091 /* We can do it by words, because any scalar bigger than a word
3092 has a size a multiple of a word. */
3093 #ifndef PUSH_ARGS_REVERSED
3094 for (i = not_stack; i < size; i++)
3096 for (i = size - 1; i >= not_stack; i--)
3098 if (i >= not_stack + offset)
3099 emit_push_insn (operand_subword_force (x, i, mode),
3100 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
3102 GEN_INT (args_offset + ((i - not_stack + skip)
3104 reg_parm_stack_space);
3109 rtx target = NULL_RTX;
3111 /* Push padding now if padding above and stack grows down,
3112 or if padding below and stack grows up.
3113 But if space already allocated, this has already been done. */
3114 if (extra && args_addr == 0
3115 && where_pad != none && where_pad != stack_direction)
3116 anti_adjust_stack (GEN_INT (extra));
3118 #ifdef PUSH_ROUNDING
3120 addr = gen_push_operand ();
3124 if (GET_CODE (args_so_far) == CONST_INT)
3126 = memory_address (mode,
3127 plus_constant (args_addr,
3128 INTVAL (args_so_far)));
3130 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
3135 emit_move_insn (gen_rtx_MEM (mode, addr), x);
3137 if (current_function_check_memory_usage && ! in_check_memory_usage)
3139 in_check_memory_usage = 1;
3141 target = get_push_address (GET_MODE_SIZE (mode));
3143 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
3144 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
3147 GEN_INT (GET_MODE_SIZE (mode)),
3148 TYPE_MODE (sizetype));
3150 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
3152 GEN_INT (GET_MODE_SIZE (mode)),
3153 TYPE_MODE (sizetype),
3154 GEN_INT (MEMORY_USE_RW),
3155 TYPE_MODE (integer_type_node));
3156 in_check_memory_usage = 0;
3161 /* If part should go in registers, copy that part
3162 into the appropriate registers. Do this now, at the end,
3163 since mem-to-mem copies above may do function calls. */
3164 if (partial > 0 && reg != 0)
3166 /* Handle calls that pass values in multiple non-contiguous locations.
3167 The Irix 6 ABI has examples of this. */
3168 if (GET_CODE (reg) == PARALLEL)
3169 emit_group_load (reg, x, -1, align); /* ??? size? */
3171 move_block_to_reg (REGNO (reg), x, partial, mode);
3174 if (extra && args_addr == 0 && where_pad == stack_direction)
3175 anti_adjust_stack (GEN_INT (extra));
3178 /* Expand an assignment that stores the value of FROM into TO.
3179 If WANT_VALUE is nonzero, return an rtx for the value of TO.
3180 (This may contain a QUEUED rtx;
3181 if the value is constant, this rtx is a constant.)
3182 Otherwise, the returned value is NULL_RTX.
3184 SUGGEST_REG is no longer actually used.
3185 It used to mean, copy the value through a register
3186 and return that register, if that is possible.
3187 We now use WANT_VALUE to decide whether to do this. */
3190 expand_assignment (to, from, want_value, suggest_reg)
3195 register rtx to_rtx = 0;
3198 /* Don't crash if the lhs of the assignment was erroneous. */
3200 if (TREE_CODE (to) == ERROR_MARK)
3202 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
3203 return want_value ? result : NULL_RTX;
3206 /* Assignment of a structure component needs special treatment
3207 if the structure component's rtx is not simply a MEM.
3208 Assignment of an array element at a constant index, and assignment of
3209 an array element in an unaligned packed structure field, has the same
3212 if (TREE_CODE (to) == COMPONENT_REF || TREE_CODE (to) == BIT_FIELD_REF
3213 || TREE_CODE (to) == ARRAY_REF)
3215 enum machine_mode mode1;
3225 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
3226 &unsignedp, &volatilep, &alignment);
3228 /* If we are going to use store_bit_field and extract_bit_field,
3229 make sure to_rtx will be safe for multiple use. */
3231 if (mode1 == VOIDmode && want_value)
3232 tem = stabilize_reference (tem);
3234 to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_DONT);
3237 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
3239 if (GET_CODE (to_rtx) != MEM)
3242 if (GET_MODE (offset_rtx) != ptr_mode)
3244 #ifdef POINTERS_EXTEND_UNSIGNED
3245 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
3247 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
3251 /* A constant address in TO_RTX can have VOIDmode, we must not try
3252 to call force_reg for that case. Avoid that case. */
3253 if (GET_CODE (to_rtx) == MEM
3254 && GET_MODE (to_rtx) == BLKmode
3255 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
3257 && (bitpos % bitsize) == 0
3258 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
3259 && (alignment * BITS_PER_UNIT) == GET_MODE_ALIGNMENT (mode1))
3261 rtx temp = change_address (to_rtx, mode1,
3262 plus_constant (XEXP (to_rtx, 0),
3265 if (GET_CODE (XEXP (temp, 0)) == REG)
3268 to_rtx = change_address (to_rtx, mode1,
3269 force_reg (GET_MODE (XEXP (temp, 0)),
3274 to_rtx = change_address (to_rtx, VOIDmode,
3275 gen_rtx_PLUS (ptr_mode, XEXP (to_rtx, 0),
3276 force_reg (ptr_mode, offset_rtx)));
3280 if (GET_CODE (to_rtx) == MEM)
3282 /* When the offset is zero, to_rtx is the address of the
3283 structure we are storing into, and hence may be shared.
3284 We must make a new MEM before setting the volatile bit. */
3286 to_rtx = copy_rtx (to_rtx);
3288 MEM_VOLATILE_P (to_rtx) = 1;
3290 #if 0 /* This was turned off because, when a field is volatile
3291 in an object which is not volatile, the object may be in a register,
3292 and then we would abort over here. */
3298 if (TREE_CODE (to) == COMPONENT_REF
3299 && TREE_READONLY (TREE_OPERAND (to, 1)))
3302 to_rtx = copy_rtx (to_rtx);
3304 RTX_UNCHANGING_P (to_rtx) = 1;
3307 /* Check the access. */
3308 if (current_function_check_memory_usage && GET_CODE (to_rtx) == MEM)
3313 enum machine_mode best_mode;
3315 best_mode = get_best_mode (bitsize, bitpos,
3316 TYPE_ALIGN (TREE_TYPE (tem)),
3318 if (best_mode == VOIDmode)
3321 best_mode_size = GET_MODE_BITSIZE (best_mode);
3322 to_addr = plus_constant (XEXP (to_rtx, 0), (bitpos / BITS_PER_UNIT));
3323 size = CEIL ((bitpos % best_mode_size) + bitsize, best_mode_size);
3324 size *= GET_MODE_SIZE (best_mode);
3326 /* Check the access right of the pointer. */
3328 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
3330 GEN_INT (size), TYPE_MODE (sizetype),
3331 GEN_INT (MEMORY_USE_WO),
3332 TYPE_MODE (integer_type_node));
3335 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
3337 /* Spurious cast makes HPUX compiler happy. */
3338 ? (enum machine_mode) TYPE_MODE (TREE_TYPE (to))
3341 /* Required alignment of containing datum. */
3343 int_size_in_bytes (TREE_TYPE (tem)),
3344 get_alias_set (to));
3345 preserve_temp_slots (result);
3349 /* If the value is meaningful, convert RESULT to the proper mode.
3350 Otherwise, return nothing. */
3351 return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
3352 TYPE_MODE (TREE_TYPE (from)),
3354 TREE_UNSIGNED (TREE_TYPE (to)))
3358 /* If the rhs is a function call and its value is not an aggregate,
3359 call the function before we start to compute the lhs.
3360 This is needed for correct code for cases such as
3361 val = setjmp (buf) on machines where reference to val
3362 requires loading up part of an address in a separate insn.
3364 Don't do this if TO is a VAR_DECL whose DECL_RTL is REG since it might be
3365 a promoted variable where the zero- or sign- extension needs to be done.
3366 Handling this in the normal way is safe because no computation is done
3368 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from)
3369 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
3370 && ! (TREE_CODE (to) == VAR_DECL && GET_CODE (DECL_RTL (to)) == REG))
3375 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
3377 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_WO);
3379 /* Handle calls that return values in multiple non-contiguous locations.
3380 The Irix 6 ABI has examples of this. */
3381 if (GET_CODE (to_rtx) == PARALLEL)
3382 emit_group_load (to_rtx, value, int_size_in_bytes (TREE_TYPE (from)),
3383 TYPE_ALIGN (TREE_TYPE (from)) / BITS_PER_UNIT);
3384 else if (GET_MODE (to_rtx) == BLKmode)
3385 emit_block_move (to_rtx, value, expr_size (from),
3386 TYPE_ALIGN (TREE_TYPE (from)) / BITS_PER_UNIT);
3389 #ifdef POINTERS_EXTEND_UNSIGNED
3390 if (TREE_CODE (TREE_TYPE (to)) == REFERENCE_TYPE
3391 || TREE_CODE (TREE_TYPE (to)) == POINTER_TYPE)
3392 value = convert_memory_address (GET_MODE (to_rtx), value);
3394 emit_move_insn (to_rtx, value);
3396 preserve_temp_slots (to_rtx);
3399 return want_value ? to_rtx : NULL_RTX;
3402 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
3403 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
3407 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_WO);
3408 if (GET_CODE (to_rtx) == MEM)
3409 MEM_ALIAS_SET (to_rtx) = get_alias_set (to);
3412 /* Don't move directly into a return register. */
3413 if (TREE_CODE (to) == RESULT_DECL && GET_CODE (to_rtx) == REG)
3418 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
3419 emit_move_insn (to_rtx, temp);
3420 preserve_temp_slots (to_rtx);
3423 return want_value ? to_rtx : NULL_RTX;
3426 /* In case we are returning the contents of an object which overlaps
3427 the place the value is being stored, use a safe function when copying
3428 a value through a pointer into a structure value return block. */
3429 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
3430 && current_function_returns_struct
3431 && !current_function_returns_pcc_struct)
3436 size = expr_size (from);
3437 from_rtx = expand_expr (from, NULL_RTX, VOIDmode,
3438 EXPAND_MEMORY_USE_DONT);
3440 /* Copy the rights of the bitmap. */
3441 if (current_function_check_memory_usage)
3442 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
3443 XEXP (to_rtx, 0), Pmode,
3444 XEXP (from_rtx, 0), Pmode,
3445 convert_to_mode (TYPE_MODE (sizetype),
3446 size, TREE_UNSIGNED (sizetype)),
3447 TYPE_MODE (sizetype));
3449 #ifdef TARGET_MEM_FUNCTIONS
3450 emit_library_call (memcpy_libfunc, 0,
3451 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
3452 XEXP (from_rtx, 0), Pmode,
3453 convert_to_mode (TYPE_MODE (sizetype),
3454 size, TREE_UNSIGNED (sizetype)),
3455 TYPE_MODE (sizetype));
3457 emit_library_call (bcopy_libfunc, 0,
3458 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
3459 XEXP (to_rtx, 0), Pmode,
3460 convert_to_mode (TYPE_MODE (integer_type_node),
3461 size, TREE_UNSIGNED (integer_type_node)),
3462 TYPE_MODE (integer_type_node));
3465 preserve_temp_slots (to_rtx);
3468 return want_value ? to_rtx : NULL_RTX;
3471 /* Compute FROM and store the value in the rtx we got. */
3474 result = store_expr (from, to_rtx, want_value);
3475 preserve_temp_slots (result);
3478 return want_value ? result : NULL_RTX;
3481 /* Generate code for computing expression EXP,
3482 and storing the value into TARGET.
3483 TARGET may contain a QUEUED rtx.
3485 If WANT_VALUE is nonzero, return a copy of the value
3486 not in TARGET, so that we can be sure to use the proper
3487 value in a containing expression even if TARGET has something
3488 else stored in it. If possible, we copy the value through a pseudo
3489 and return that pseudo. Or, if the value is constant, we try to
3490 return the constant. In some cases, we return a pseudo
3491 copied *from* TARGET.
3493 If the mode is BLKmode then we may return TARGET itself.
3494 It turns out that in BLKmode it doesn't cause a problem.
3495 because C has no operators that could combine two different
3496 assignments into the same BLKmode object with different values
3497 with no sequence point. Will other languages need this to
3500 If WANT_VALUE is 0, we return NULL, to make sure
3501 to catch quickly any cases where the caller uses the value
3502 and fails to set WANT_VALUE. */
3505 store_expr (exp, target, want_value)
3507 register rtx target;
3511 int dont_return_target = 0;
3513 if (TREE_CODE (exp) == COMPOUND_EXPR)
3515 /* Perform first part of compound expression, then assign from second
3517 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
3519 return store_expr (TREE_OPERAND (exp, 1), target, want_value);
3521 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
3523 /* For conditional expression, get safe form of the target. Then
3524 test the condition, doing the appropriate assignment on either
3525 side. This avoids the creation of unnecessary temporaries.
3526 For non-BLKmode, it is more efficient not to do this. */
3528 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
3531 target = protect_from_queue (target, 1);
3533 do_pending_stack_adjust ();
3535 jumpifnot (TREE_OPERAND (exp, 0), lab1);
3536 start_cleanup_deferral ();
3537 store_expr (TREE_OPERAND (exp, 1), target, 0);
3538 end_cleanup_deferral ();
3540 emit_jump_insn (gen_jump (lab2));
3543 start_cleanup_deferral ();
3544 store_expr (TREE_OPERAND (exp, 2), target, 0);
3545 end_cleanup_deferral ();
3550 return want_value ? target : NULL_RTX;
3552 else if (queued_subexp_p (target))
3553 /* If target contains a postincrement, let's not risk
3554 using it as the place to generate the rhs. */
3556 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
3558 /* Expand EXP into a new pseudo. */
3559 temp = gen_reg_rtx (GET_MODE (target));
3560 temp = expand_expr (exp, temp, GET_MODE (target), 0);
3563 temp = expand_expr (exp, NULL_RTX, GET_MODE (target), 0);
3565 /* If target is volatile, ANSI requires accessing the value
3566 *from* the target, if it is accessed. So make that happen.
3567 In no case return the target itself. */
3568 if (! MEM_VOLATILE_P (target) && want_value)
3569 dont_return_target = 1;
3571 else if (want_value && GET_CODE (target) == MEM && ! MEM_VOLATILE_P (target)
3572 && GET_MODE (target) != BLKmode)
3573 /* If target is in memory and caller wants value in a register instead,
3574 arrange that. Pass TARGET as target for expand_expr so that,
3575 if EXP is another assignment, WANT_VALUE will be nonzero for it.
3576 We know expand_expr will not use the target in that case.
3577 Don't do this if TARGET is volatile because we are supposed
3578 to write it and then read it. */
3580 temp = expand_expr (exp, cse_not_expected ? NULL_RTX : target,
3581 GET_MODE (target), 0);
3582 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
3583 temp = copy_to_reg (temp);
3584 dont_return_target = 1;
3586 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
3587 /* If this is an scalar in a register that is stored in a wider mode
3588 than the declared mode, compute the result into its declared mode
3589 and then convert to the wider mode. Our value is the computed
3592 /* If we don't want a value, we can do the conversion inside EXP,
3593 which will often result in some optimizations. Do the conversion
3594 in two steps: first change the signedness, if needed, then
3595 the extend. But don't do this if the type of EXP is a subtype
3596 of something else since then the conversion might involve
3597 more than just converting modes. */
3598 if (! want_value && INTEGRAL_TYPE_P (TREE_TYPE (exp))
3599 && TREE_TYPE (TREE_TYPE (exp)) == 0)
3601 if (TREE_UNSIGNED (TREE_TYPE (exp))
3602 != SUBREG_PROMOTED_UNSIGNED_P (target))
3605 (signed_or_unsigned_type (SUBREG_PROMOTED_UNSIGNED_P (target),
3609 exp = convert (type_for_mode (GET_MODE (SUBREG_REG (target)),
3610 SUBREG_PROMOTED_UNSIGNED_P (target)),
3614 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
3616 /* If TEMP is a volatile MEM and we want a result value, make
3617 the access now so it gets done only once. Likewise if
3618 it contains TARGET. */
3619 if (GET_CODE (temp) == MEM && want_value
3620 && (MEM_VOLATILE_P (temp)
3621 || reg_mentioned_p (SUBREG_REG (target), XEXP (temp, 0))))
3622 temp = copy_to_reg (temp);
3624 /* If TEMP is a VOIDmode constant, use convert_modes to make
3625 sure that we properly convert it. */
3626 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
3627 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
3628 TYPE_MODE (TREE_TYPE (exp)), temp,
3629 SUBREG_PROMOTED_UNSIGNED_P (target));
3631 convert_move (SUBREG_REG (target), temp,
3632 SUBREG_PROMOTED_UNSIGNED_P (target));
3634 /* If we promoted a constant, change the mode back down to match
3635 target. Otherwise, the caller might get confused by a result whose
3636 mode is larger than expected. */
3638 if (want_value && GET_MODE (temp) != GET_MODE (target)
3639 && GET_MODE (temp) != VOIDmode)
3641 temp = gen_rtx_SUBREG (GET_MODE (target), temp, 0);
3642 SUBREG_PROMOTED_VAR_P (temp) = 1;
3643 SUBREG_PROMOTED_UNSIGNED_P (temp)
3644 = SUBREG_PROMOTED_UNSIGNED_P (target);
3647 return want_value ? temp : NULL_RTX;
3651 temp = expand_expr (exp, target, GET_MODE (target), 0);
3652 /* Return TARGET if it's a specified hardware register.
3653 If TARGET is a volatile mem ref, either return TARGET
3654 or return a reg copied *from* TARGET; ANSI requires this.
3656 Otherwise, if TEMP is not TARGET, return TEMP
3657 if it is constant (for efficiency),
3658 or if we really want the correct value. */
3659 if (!(target && GET_CODE (target) == REG
3660 && REGNO (target) < FIRST_PSEUDO_REGISTER)
3661 && !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
3662 && ! rtx_equal_p (temp, target)
3663 && (CONSTANT_P (temp) || want_value))
3664 dont_return_target = 1;
3667 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
3668 the same as that of TARGET, adjust the constant. This is needed, for
3669 example, in case it is a CONST_DOUBLE and we want only a word-sized
3671 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
3672 && TREE_CODE (exp) != ERROR_MARK
3673 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
3674 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
3675 temp, TREE_UNSIGNED (TREE_TYPE (exp)));
3677 if (current_function_check_memory_usage
3678 && GET_CODE (target) == MEM
3679 && AGGREGATE_TYPE_P (TREE_TYPE (exp)))
3681 if (GET_CODE (temp) == MEM)
3682 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
3683 XEXP (target, 0), Pmode,
3684 XEXP (temp, 0), Pmode,
3685 expr_size (exp), TYPE_MODE (sizetype));
3687 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
3688 XEXP (target, 0), Pmode,
3689 expr_size (exp), TYPE_MODE (sizetype),
3690 GEN_INT (MEMORY_USE_WO),
3691 TYPE_MODE (integer_type_node));
3694 /* If value was not generated in the target, store it there.
3695 Convert the value to TARGET's type first if nec. */
3696 /* If TEMP and TARGET compare equal according to rtx_equal_p, but
3697 one or both of them are volatile memory refs, we have to distinguish
3699 - expand_expr has used TARGET. In this case, we must not generate
3700 another copy. This can be detected by TARGET being equal according
3702 - expand_expr has not used TARGET - that means that the source just
3703 happens to have the same RTX form. Since temp will have been created
3704 by expand_expr, it will compare unequal according to == .
3705 We must generate a copy in this case, to reach the correct number
3706 of volatile memory references. */
3708 if ((! rtx_equal_p (temp, target)
3709 || (temp != target && (side_effects_p (temp)
3710 || side_effects_p (target))))
3711 && TREE_CODE (exp) != ERROR_MARK)
3713 target = protect_from_queue (target, 1);
3714 if (GET_MODE (temp) != GET_MODE (target)
3715 && GET_MODE (temp) != VOIDmode)
3717 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
3718 if (dont_return_target)
3720 /* In this case, we will return TEMP,
3721 so make sure it has the proper mode.
3722 But don't forget to store the value into TARGET. */
3723 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
3724 emit_move_insn (target, temp);
3727 convert_move (target, temp, unsignedp);
3730 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
3732 /* Handle copying a string constant into an array.
3733 The string constant may be shorter than the array.
3734 So copy just the string's actual length, and clear the rest. */
3738 /* Get the size of the data type of the string,
3739 which is actually the size of the target. */
3740 size = expr_size (exp);
3741 if (GET_CODE (size) == CONST_INT
3742 && INTVAL (size) < TREE_STRING_LENGTH (exp))
3743 emit_block_move (target, temp, size,
3744 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3747 /* Compute the size of the data to copy from the string. */
3749 = size_binop (MIN_EXPR,
3750 make_tree (sizetype, size),
3752 build_int_2 (TREE_STRING_LENGTH (exp), 0)));
3753 rtx copy_size_rtx = expand_expr (copy_size, NULL_RTX,
3757 /* Copy that much. */
3758 emit_block_move (target, temp, copy_size_rtx,
3759 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3761 /* Figure out how much is left in TARGET that we have to clear.
3762 Do all calculations in ptr_mode. */
3764 addr = XEXP (target, 0);
3765 addr = convert_modes (ptr_mode, Pmode, addr, 1);
3767 if (GET_CODE (copy_size_rtx) == CONST_INT)
3769 addr = plus_constant (addr, TREE_STRING_LENGTH (exp));
3770 size = plus_constant (size, - TREE_STRING_LENGTH (exp));
3774 addr = force_reg (ptr_mode, addr);
3775 addr = expand_binop (ptr_mode, add_optab, addr,
3776 copy_size_rtx, NULL_RTX, 0,
3779 size = expand_binop (ptr_mode, sub_optab, size,
3780 copy_size_rtx, NULL_RTX, 0,
3783 label = gen_label_rtx ();
3784 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
3785 GET_MODE (size), 0, 0, label);
3788 if (size != const0_rtx)
3790 /* Be sure we can write on ADDR. */
3791 if (current_function_check_memory_usage)
3792 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
3794 size, TYPE_MODE (sizetype),
3795 GEN_INT (MEMORY_USE_WO),
3796 TYPE_MODE (integer_type_node));
3797 #ifdef TARGET_MEM_FUNCTIONS
3798 emit_library_call (memset_libfunc, 0, VOIDmode, 3,
3800 const0_rtx, TYPE_MODE (integer_type_node),
3801 convert_to_mode (TYPE_MODE (sizetype),
3803 TREE_UNSIGNED (sizetype)),
3804 TYPE_MODE (sizetype));
3806 emit_library_call (bzero_libfunc, 0, VOIDmode, 2,
3808 convert_to_mode (TYPE_MODE (integer_type_node),
3810 TREE_UNSIGNED (integer_type_node)),
3811 TYPE_MODE (integer_type_node));
3819 /* Handle calls that return values in multiple non-contiguous locations.
3820 The Irix 6 ABI has examples of this. */
3821 else if (GET_CODE (target) == PARALLEL)
3822 emit_group_load (target, temp, int_size_in_bytes (TREE_TYPE (exp)),
3823 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3824 else if (GET_MODE (temp) == BLKmode)
3825 emit_block_move (target, temp, expr_size (exp),
3826 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3828 emit_move_insn (target, temp);
3831 /* If we don't want a value, return NULL_RTX. */
3835 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
3836 ??? The latter test doesn't seem to make sense. */
3837 else if (dont_return_target && GET_CODE (temp) != MEM)
3840 /* Return TARGET itself if it is a hard register. */
3841 else if (want_value && GET_MODE (target) != BLKmode
3842 && ! (GET_CODE (target) == REG
3843 && REGNO (target) < FIRST_PSEUDO_REGISTER))
3844 return copy_to_reg (target);
3850 /* Return 1 if EXP just contains zeros. */
3858 switch (TREE_CODE (exp))
3862 case NON_LVALUE_EXPR:
3863 return is_zeros_p (TREE_OPERAND (exp, 0));
3866 return TREE_INT_CST_LOW (exp) == 0 && TREE_INT_CST_HIGH (exp) == 0;
3870 is_zeros_p (TREE_REALPART (exp)) && is_zeros_p (TREE_IMAGPART (exp));
3873 return REAL_VALUES_IDENTICAL (TREE_REAL_CST (exp), dconst0);
3876 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
3877 return CONSTRUCTOR_ELTS (exp) == NULL_TREE;
3878 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
3879 if (! is_zeros_p (TREE_VALUE (elt)))
3889 /* Return 1 if EXP contains mostly (3/4) zeros. */
3892 mostly_zeros_p (exp)
3895 if (TREE_CODE (exp) == CONSTRUCTOR)
3897 int elts = 0, zeros = 0;
3898 tree elt = CONSTRUCTOR_ELTS (exp);
3899 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
3901 /* If there are no ranges of true bits, it is all zero. */
3902 return elt == NULL_TREE;
3904 for (; elt; elt = TREE_CHAIN (elt))
3906 /* We do not handle the case where the index is a RANGE_EXPR,
3907 so the statistic will be somewhat inaccurate.
3908 We do make a more accurate count in store_constructor itself,
3909 so since this function is only used for nested array elements,
3910 this should be close enough. */
3911 if (mostly_zeros_p (TREE_VALUE (elt)))
3916 return 4 * zeros >= 3 * elts;
3919 return is_zeros_p (exp);
3922 /* Helper function for store_constructor.
3923 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
3924 TYPE is the type of the CONSTRUCTOR, not the element type.
3925 CLEARED is as for store_constructor.
3927 This provides a recursive shortcut back to store_constructor when it isn't
3928 necessary to go through store_field. This is so that we can pass through
3929 the cleared field to let store_constructor know that we may not have to
3930 clear a substructure if the outer structure has already been cleared. */
3933 store_constructor_field (target, bitsize, bitpos,
3934 mode, exp, type, cleared)
3936 int bitsize, bitpos;
3937 enum machine_mode mode;
3941 if (TREE_CODE (exp) == CONSTRUCTOR
3942 && bitpos % BITS_PER_UNIT == 0
3943 /* If we have a non-zero bitpos for a register target, then we just
3944 let store_field do the bitfield handling. This is unlikely to
3945 generate unnecessary clear instructions anyways. */
3946 && (bitpos == 0 || GET_CODE (target) == MEM))
3949 target = change_address (target, VOIDmode,
3950 plus_constant (XEXP (target, 0),
3951 bitpos / BITS_PER_UNIT));
3952 store_constructor (exp, target, cleared);
3955 store_field (target, bitsize, bitpos, mode, exp,
3956 VOIDmode, 0, TYPE_ALIGN (type) / BITS_PER_UNIT,
3957 int_size_in_bytes (type), 0);
3960 /* Store the value of constructor EXP into the rtx TARGET.
3961 TARGET is either a REG or a MEM.
3962 CLEARED is true if TARGET is known to have been zero'd. */
3965 store_constructor (exp, target, cleared)
3970 tree type = TREE_TYPE (exp);
3971 #ifdef WORD_REGISTER_OPERATIONS
3972 rtx exp_size = expr_size (exp);
3975 /* We know our target cannot conflict, since safe_from_p has been called. */
3977 /* Don't try copying piece by piece into a hard register
3978 since that is vulnerable to being clobbered by EXP.
3979 Instead, construct in a pseudo register and then copy it all. */
3980 if (GET_CODE (target) == REG && REGNO (target) < FIRST_PSEUDO_REGISTER)
3982 rtx temp = gen_reg_rtx (GET_MODE (target));
3983 store_constructor (exp, temp, 0);
3984 emit_move_insn (target, temp);
3989 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
3990 || TREE_CODE (type) == QUAL_UNION_TYPE)
3994 /* Inform later passes that the whole union value is dead. */
3995 if (TREE_CODE (type) == UNION_TYPE
3996 || TREE_CODE (type) == QUAL_UNION_TYPE)
3997 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
3999 /* If we are building a static constructor into a register,
4000 set the initial value as zero so we can fold the value into
4001 a constant. But if more than one register is involved,
4002 this probably loses. */
4003 else if (GET_CODE (target) == REG && TREE_STATIC (exp)
4004 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
4007 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4012 /* If the constructor has fewer fields than the structure
4013 or if we are initializing the structure to mostly zeros,
4014 clear the whole structure first. */
4015 else if ((list_length (CONSTRUCTOR_ELTS (exp))
4016 != list_length (TYPE_FIELDS (type)))
4017 || mostly_zeros_p (exp))
4020 clear_storage (target, expr_size (exp),
4021 TYPE_ALIGN (type) / BITS_PER_UNIT);
4026 /* Inform later passes that the old value is dead. */
4027 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4029 /* Store each element of the constructor into
4030 the corresponding field of TARGET. */
4032 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4034 register tree field = TREE_PURPOSE (elt);
4035 tree value = TREE_VALUE (elt);
4036 register enum machine_mode mode;
4040 tree pos, constant = 0, offset = 0;
4041 rtx to_rtx = target;
4043 /* Just ignore missing fields.
4044 We cleared the whole structure, above,
4045 if any fields are missing. */
4049 if (cleared && is_zeros_p (TREE_VALUE (elt)))
4052 bitsize = TREE_INT_CST_LOW (DECL_SIZE (field));
4053 unsignedp = TREE_UNSIGNED (field);
4054 mode = DECL_MODE (field);
4055 if (DECL_BIT_FIELD (field))
4058 pos = DECL_FIELD_BITPOS (field);
4059 if (TREE_CODE (pos) == INTEGER_CST)
4061 else if (TREE_CODE (pos) == PLUS_EXPR
4062 && TREE_CODE (TREE_OPERAND (pos, 1)) == INTEGER_CST)
4063 constant = TREE_OPERAND (pos, 1), offset = TREE_OPERAND (pos, 0);
4068 bitpos = TREE_INT_CST_LOW (constant);
4074 if (contains_placeholder_p (offset))
4075 offset = build (WITH_RECORD_EXPR, sizetype,
4076 offset, make_tree (TREE_TYPE (exp), target));
4078 offset = size_binop (FLOOR_DIV_EXPR, offset,
4079 size_int (BITS_PER_UNIT));
4081 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
4082 if (GET_CODE (to_rtx) != MEM)
4085 if (GET_MODE (offset_rtx) != ptr_mode)
4087 #ifdef POINTERS_EXTEND_UNSIGNED
4088 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
4090 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4095 = change_address (to_rtx, VOIDmode,
4096 gen_rtx_PLUS (ptr_mode, XEXP (to_rtx, 0),
4097 force_reg (ptr_mode, offset_rtx)));
4099 if (TREE_READONLY (field))
4101 if (GET_CODE (to_rtx) == MEM)
4102 to_rtx = copy_rtx (to_rtx);
4104 RTX_UNCHANGING_P (to_rtx) = 1;
4107 #ifdef WORD_REGISTER_OPERATIONS
4108 /* If this initializes a field that is smaller than a word, at the
4109 start of a word, try to widen it to a full word.
4110 This special case allows us to output C++ member function
4111 initializations in a form that the optimizers can understand. */
4113 && GET_CODE (target) == REG
4114 && bitsize < BITS_PER_WORD
4115 && bitpos % BITS_PER_WORD == 0
4116 && GET_MODE_CLASS (mode) == MODE_INT
4117 && TREE_CODE (value) == INTEGER_CST
4118 && GET_CODE (exp_size) == CONST_INT
4119 && bitpos + BITS_PER_WORD <= INTVAL (exp_size) * BITS_PER_UNIT)
4121 tree type = TREE_TYPE (value);
4122 if (TYPE_PRECISION (type) < BITS_PER_WORD)
4124 type = type_for_size (BITS_PER_WORD, TREE_UNSIGNED (type));
4125 value = convert (type, value);
4127 if (BYTES_BIG_ENDIAN)
4129 = fold (build (LSHIFT_EXPR, type, value,
4130 build_int_2 (BITS_PER_WORD - bitsize, 0)));
4131 bitsize = BITS_PER_WORD;
4135 store_constructor_field (to_rtx, bitsize, bitpos,
4136 mode, value, type, cleared);
4139 else if (TREE_CODE (type) == ARRAY_TYPE)
4144 tree domain = TYPE_DOMAIN (type);
4145 HOST_WIDE_INT minelt = TREE_INT_CST_LOW (TYPE_MIN_VALUE (domain));
4146 HOST_WIDE_INT maxelt = TREE_INT_CST_LOW (TYPE_MAX_VALUE (domain));
4147 tree elttype = TREE_TYPE (type);
4149 /* If the constructor has fewer elements than the array,
4150 clear the whole array first. Similarly if this is
4151 static constructor of a non-BLKmode object. */
4152 if (cleared || (GET_CODE (target) == REG && TREE_STATIC (exp)))
4156 HOST_WIDE_INT count = 0, zero_count = 0;
4158 /* This loop is a more accurate version of the loop in
4159 mostly_zeros_p (it handles RANGE_EXPR in an index).
4160 It is also needed to check for missing elements. */
4161 for (elt = CONSTRUCTOR_ELTS (exp);
4163 elt = TREE_CHAIN (elt))
4165 tree index = TREE_PURPOSE (elt);
4166 HOST_WIDE_INT this_node_count;
4167 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4169 tree lo_index = TREE_OPERAND (index, 0);
4170 tree hi_index = TREE_OPERAND (index, 1);
4171 if (TREE_CODE (lo_index) != INTEGER_CST
4172 || TREE_CODE (hi_index) != INTEGER_CST)
4177 this_node_count = TREE_INT_CST_LOW (hi_index)
4178 - TREE_INT_CST_LOW (lo_index) + 1;
4181 this_node_count = 1;
4182 count += this_node_count;
4183 if (mostly_zeros_p (TREE_VALUE (elt)))
4184 zero_count += this_node_count;
4186 /* Clear the entire array first if there are any missing elements,
4187 or if the incidence of zero elements is >= 75%. */
4188 if (count < maxelt - minelt + 1
4189 || 4 * zero_count >= 3 * count)
4195 clear_storage (target, expr_size (exp),
4196 TYPE_ALIGN (type) / BITS_PER_UNIT);
4200 /* Inform later passes that the old value is dead. */
4201 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4203 /* Store each element of the constructor into
4204 the corresponding element of TARGET, determined
4205 by counting the elements. */
4206 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
4208 elt = TREE_CHAIN (elt), i++)
4210 register enum machine_mode mode;
4214 tree value = TREE_VALUE (elt);
4215 tree index = TREE_PURPOSE (elt);
4216 rtx xtarget = target;
4218 if (cleared && is_zeros_p (value))
4221 mode = TYPE_MODE (elttype);
4222 bitsize = GET_MODE_BITSIZE (mode);
4223 unsignedp = TREE_UNSIGNED (elttype);
4225 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4227 tree lo_index = TREE_OPERAND (index, 0);
4228 tree hi_index = TREE_OPERAND (index, 1);
4229 rtx index_r, pos_rtx, addr, hi_r, loop_top, loop_end;
4230 struct nesting *loop;
4231 HOST_WIDE_INT lo, hi, count;
4234 /* If the range is constant and "small", unroll the loop. */
4235 if (TREE_CODE (lo_index) == INTEGER_CST
4236 && TREE_CODE (hi_index) == INTEGER_CST
4237 && (lo = TREE_INT_CST_LOW (lo_index),
4238 hi = TREE_INT_CST_LOW (hi_index),
4239 count = hi - lo + 1,
4240 (GET_CODE (target) != MEM
4242 || (TREE_CODE (TYPE_SIZE (elttype)) == INTEGER_CST
4243 && TREE_INT_CST_LOW (TYPE_SIZE (elttype)) * count
4246 lo -= minelt; hi -= minelt;
4247 for (; lo <= hi; lo++)
4249 bitpos = lo * TREE_INT_CST_LOW (TYPE_SIZE (elttype));
4250 store_constructor_field (target, bitsize, bitpos,
4251 mode, value, type, cleared);
4256 hi_r = expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
4257 loop_top = gen_label_rtx ();
4258 loop_end = gen_label_rtx ();
4260 unsignedp = TREE_UNSIGNED (domain);
4262 index = build_decl (VAR_DECL, NULL_TREE, domain);
4264 DECL_RTL (index) = index_r
4265 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
4268 if (TREE_CODE (value) == SAVE_EXPR
4269 && SAVE_EXPR_RTL (value) == 0)
4271 /* Make sure value gets expanded once before the
4273 expand_expr (value, const0_rtx, VOIDmode, 0);
4276 store_expr (lo_index, index_r, 0);
4277 loop = expand_start_loop (0);
4279 /* Assign value to element index. */
4280 position = size_binop (EXACT_DIV_EXPR, TYPE_SIZE (elttype),
4281 size_int (BITS_PER_UNIT));
4282 position = size_binop (MULT_EXPR,
4283 size_binop (MINUS_EXPR, index,
4284 TYPE_MIN_VALUE (domain)),
4286 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
4287 addr = gen_rtx_PLUS (Pmode, XEXP (target, 0), pos_rtx);
4288 xtarget = change_address (target, mode, addr);
4289 if (TREE_CODE (value) == CONSTRUCTOR)
4290 store_constructor (value, xtarget, cleared);
4292 store_expr (value, xtarget, 0);
4294 expand_exit_loop_if_false (loop,
4295 build (LT_EXPR, integer_type_node,
4298 expand_increment (build (PREINCREMENT_EXPR,
4300 index, integer_one_node), 0, 0);
4302 emit_label (loop_end);
4304 /* Needed by stupid register allocation. to extend the
4305 lifetime of pseudo-regs used by target past the end
4307 emit_insn (gen_rtx_USE (GET_MODE (target), target));
4310 else if ((index != 0 && TREE_CODE (index) != INTEGER_CST)
4311 || TREE_CODE (TYPE_SIZE (elttype)) != INTEGER_CST)
4317 index = size_int (i);
4320 index = size_binop (MINUS_EXPR, index,
4321 TYPE_MIN_VALUE (domain));
4322 position = size_binop (EXACT_DIV_EXPR, TYPE_SIZE (elttype),
4323 size_int (BITS_PER_UNIT));
4324 position = size_binop (MULT_EXPR, index, position);
4325 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
4326 addr = gen_rtx_PLUS (Pmode, XEXP (target, 0), pos_rtx);
4327 xtarget = change_address (target, mode, addr);
4328 store_expr (value, xtarget, 0);
4333 bitpos = ((TREE_INT_CST_LOW (index) - minelt)
4334 * TREE_INT_CST_LOW (TYPE_SIZE (elttype)));
4336 bitpos = (i * TREE_INT_CST_LOW (TYPE_SIZE (elttype)));
4337 store_constructor_field (target, bitsize, bitpos,
4338 mode, value, type, cleared);
4342 /* set constructor assignments */
4343 else if (TREE_CODE (type) == SET_TYPE)
4345 tree elt = CONSTRUCTOR_ELTS (exp);
4346 int nbytes = int_size_in_bytes (type), nbits;
4347 tree domain = TYPE_DOMAIN (type);
4348 tree domain_min, domain_max, bitlength;
4350 /* The default implementation strategy is to extract the constant
4351 parts of the constructor, use that to initialize the target,
4352 and then "or" in whatever non-constant ranges we need in addition.
4354 If a large set is all zero or all ones, it is
4355 probably better to set it using memset (if available) or bzero.
4356 Also, if a large set has just a single range, it may also be
4357 better to first clear all the first clear the set (using
4358 bzero/memset), and set the bits we want. */
4360 /* Check for all zeros. */
4361 if (elt == NULL_TREE)
4364 clear_storage (target, expr_size (exp),
4365 TYPE_ALIGN (type) / BITS_PER_UNIT);
4369 domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
4370 domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
4371 bitlength = size_binop (PLUS_EXPR,
4372 size_binop (MINUS_EXPR, domain_max, domain_min),
4375 if (nbytes < 0 || TREE_CODE (bitlength) != INTEGER_CST)
4377 nbits = TREE_INT_CST_LOW (bitlength);
4379 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
4380 are "complicated" (more than one range), initialize (the
4381 constant parts) by copying from a constant. */
4382 if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD
4383 || (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE))
4385 int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
4386 enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
4387 char *bit_buffer = (char *) alloca (nbits);
4388 HOST_WIDE_INT word = 0;
4391 int offset = 0; /* In bytes from beginning of set. */
4392 elt = get_set_constructor_bits (exp, bit_buffer, nbits);
4395 if (bit_buffer[ibit])
4397 if (BYTES_BIG_ENDIAN)
4398 word |= (1 << (set_word_size - 1 - bit_pos));
4400 word |= 1 << bit_pos;
4403 if (bit_pos >= set_word_size || ibit == nbits)
4405 if (word != 0 || ! cleared)
4407 rtx datum = GEN_INT (word);
4409 /* The assumption here is that it is safe to use
4410 XEXP if the set is multi-word, but not if
4411 it's single-word. */
4412 if (GET_CODE (target) == MEM)
4414 to_rtx = plus_constant (XEXP (target, 0), offset);
4415 to_rtx = change_address (target, mode, to_rtx);
4417 else if (offset == 0)
4421 emit_move_insn (to_rtx, datum);
4427 offset += set_word_size / BITS_PER_UNIT;
4433 /* Don't bother clearing storage if the set is all ones. */
4434 if (TREE_CHAIN (elt) != NULL_TREE
4435 || (TREE_PURPOSE (elt) == NULL_TREE
4437 : (TREE_CODE (TREE_VALUE (elt)) != INTEGER_CST
4438 || TREE_CODE (TREE_PURPOSE (elt)) != INTEGER_CST
4439 || (TREE_INT_CST_LOW (TREE_VALUE (elt))
4440 - TREE_INT_CST_LOW (TREE_PURPOSE (elt)) + 1
4442 clear_storage (target, expr_size (exp),
4443 TYPE_ALIGN (type) / BITS_PER_UNIT);
4446 for (; elt != NULL_TREE; elt = TREE_CHAIN (elt))
4448 /* start of range of element or NULL */
4449 tree startbit = TREE_PURPOSE (elt);
4450 /* end of range of element, or element value */
4451 tree endbit = TREE_VALUE (elt);
4452 #ifdef TARGET_MEM_FUNCTIONS
4453 HOST_WIDE_INT startb, endb;
4455 rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
4457 bitlength_rtx = expand_expr (bitlength,
4458 NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
4460 /* handle non-range tuple element like [ expr ] */
4461 if (startbit == NULL_TREE)
4463 startbit = save_expr (endbit);
4466 startbit = convert (sizetype, startbit);
4467 endbit = convert (sizetype, endbit);
4468 if (! integer_zerop (domain_min))
4470 startbit = size_binop (MINUS_EXPR, startbit, domain_min);
4471 endbit = size_binop (MINUS_EXPR, endbit, domain_min);
4473 startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
4474 EXPAND_CONST_ADDRESS);
4475 endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
4476 EXPAND_CONST_ADDRESS);
4480 targetx = assign_stack_temp (GET_MODE (target),
4481 GET_MODE_SIZE (GET_MODE (target)),
4483 emit_move_insn (targetx, target);
4485 else if (GET_CODE (target) == MEM)
4490 #ifdef TARGET_MEM_FUNCTIONS
4491 /* Optimization: If startbit and endbit are
4492 constants divisible by BITS_PER_UNIT,
4493 call memset instead. */
4494 if (TREE_CODE (startbit) == INTEGER_CST
4495 && TREE_CODE (endbit) == INTEGER_CST
4496 && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
4497 && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0)
4499 emit_library_call (memset_libfunc, 0,
4501 plus_constant (XEXP (targetx, 0),
4502 startb / BITS_PER_UNIT),
4504 constm1_rtx, TYPE_MODE (integer_type_node),
4505 GEN_INT ((endb - startb) / BITS_PER_UNIT),
4506 TYPE_MODE (sizetype));
4511 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__setbits"),
4512 0, VOIDmode, 4, XEXP (targetx, 0), Pmode,
4513 bitlength_rtx, TYPE_MODE (sizetype),
4514 startbit_rtx, TYPE_MODE (sizetype),
4515 endbit_rtx, TYPE_MODE (sizetype));
4518 emit_move_insn (target, targetx);
4526 /* Store the value of EXP (an expression tree)
4527 into a subfield of TARGET which has mode MODE and occupies
4528 BITSIZE bits, starting BITPOS bits from the start of TARGET.
4529 If MODE is VOIDmode, it means that we are storing into a bit-field.
4531 If VALUE_MODE is VOIDmode, return nothing in particular.
4532 UNSIGNEDP is not used in this case.
4534 Otherwise, return an rtx for the value stored. This rtx
4535 has mode VALUE_MODE if that is convenient to do.
4536 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
4538 ALIGN is the alignment that TARGET is known to have, measured in bytes.
4539 TOTAL_SIZE is the size in bytes of the structure, or -1 if varying.
4541 ALIAS_SET is the alias set for the destination. This value will
4542 (in general) be different from that for TARGET, since TARGET is a
4543 reference to the containing structure. */
4546 store_field (target, bitsize, bitpos, mode, exp, value_mode,
4547 unsignedp, align, total_size, alias_set)
4549 int bitsize, bitpos;
4550 enum machine_mode mode;
4552 enum machine_mode value_mode;
4558 HOST_WIDE_INT width_mask = 0;
4560 if (TREE_CODE (exp) == ERROR_MARK)
4563 if (bitsize < HOST_BITS_PER_WIDE_INT)
4564 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
4566 /* If we are storing into an unaligned field of an aligned union that is
4567 in a register, we may have the mode of TARGET being an integer mode but
4568 MODE == BLKmode. In that case, get an aligned object whose size and
4569 alignment are the same as TARGET and store TARGET into it (we can avoid
4570 the store if the field being stored is the entire width of TARGET). Then
4571 call ourselves recursively to store the field into a BLKmode version of
4572 that object. Finally, load from the object into TARGET. This is not
4573 very efficient in general, but should only be slightly more expensive
4574 than the otherwise-required unaligned accesses. Perhaps this can be
4575 cleaned up later. */
4578 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
4580 rtx object = assign_stack_temp (GET_MODE (target),
4581 GET_MODE_SIZE (GET_MODE (target)), 0);
4582 rtx blk_object = copy_rtx (object);
4584 MEM_SET_IN_STRUCT_P (object, 1);
4585 MEM_SET_IN_STRUCT_P (blk_object, 1);
4586 PUT_MODE (blk_object, BLKmode);
4588 if (bitsize != GET_MODE_BITSIZE (GET_MODE (target)))
4589 emit_move_insn (object, target);
4591 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0,
4592 align, total_size, alias_set);
4594 /* Even though we aren't returning target, we need to
4595 give it the updated value. */
4596 emit_move_insn (target, object);
4601 /* If the structure is in a register or if the component
4602 is a bit field, we cannot use addressing to access it.
4603 Use bit-field techniques or SUBREG to store in it. */
4605 if (mode == VOIDmode
4606 || (mode != BLKmode && ! direct_store[(int) mode]
4607 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
4608 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
4609 || GET_CODE (target) == REG
4610 || GET_CODE (target) == SUBREG
4611 /* If the field isn't aligned enough to store as an ordinary memref,
4612 store it as a bit field. */
4613 || (SLOW_UNALIGNED_ACCESS
4614 && align * BITS_PER_UNIT < GET_MODE_ALIGNMENT (mode))
4615 || (SLOW_UNALIGNED_ACCESS && bitpos % GET_MODE_ALIGNMENT (mode) != 0))
4617 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
4619 /* If BITSIZE is narrower than the size of the type of EXP
4620 we will be narrowing TEMP. Normally, what's wanted are the
4621 low-order bits. However, if EXP's type is a record and this is
4622 big-endian machine, we want the upper BITSIZE bits. */
4623 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
4624 && bitsize < GET_MODE_BITSIZE (GET_MODE (temp))
4625 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
4626 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
4627 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
4631 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
4633 if (mode != VOIDmode && mode != BLKmode
4634 && mode != TYPE_MODE (TREE_TYPE (exp)))
4635 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
4637 /* If the modes of TARGET and TEMP are both BLKmode, both
4638 must be in memory and BITPOS must be aligned on a byte
4639 boundary. If so, we simply do a block copy. */
4640 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
4642 if (GET_CODE (target) != MEM || GET_CODE (temp) != MEM
4643 || bitpos % BITS_PER_UNIT != 0)
4646 target = change_address (target, VOIDmode,
4647 plus_constant (XEXP (target, 0),
4648 bitpos / BITS_PER_UNIT));
4650 emit_block_move (target, temp,
4651 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
4655 return value_mode == VOIDmode ? const0_rtx : target;
4658 /* Store the value in the bitfield. */
4659 store_bit_field (target, bitsize, bitpos, mode, temp, align, total_size);
4660 if (value_mode != VOIDmode)
4662 /* The caller wants an rtx for the value. */
4663 /* If possible, avoid refetching from the bitfield itself. */
4665 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
4668 enum machine_mode tmode;
4671 return expand_and (temp, GEN_INT (width_mask), NULL_RTX);
4672 tmode = GET_MODE (temp);
4673 if (tmode == VOIDmode)
4675 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
4676 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
4677 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
4679 return extract_bit_field (target, bitsize, bitpos, unsignedp,
4680 NULL_RTX, value_mode, 0, align,
4687 rtx addr = XEXP (target, 0);
4690 /* If a value is wanted, it must be the lhs;
4691 so make the address stable for multiple use. */
4693 if (value_mode != VOIDmode && GET_CODE (addr) != REG
4694 && ! CONSTANT_ADDRESS_P (addr)
4695 /* A frame-pointer reference is already stable. */
4696 && ! (GET_CODE (addr) == PLUS
4697 && GET_CODE (XEXP (addr, 1)) == CONST_INT
4698 && (XEXP (addr, 0) == virtual_incoming_args_rtx
4699 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
4700 addr = copy_to_reg (addr);
4702 /* Now build a reference to just the desired component. */
4704 to_rtx = copy_rtx (change_address (target, mode,
4705 plus_constant (addr,
4707 / BITS_PER_UNIT))));
4708 MEM_SET_IN_STRUCT_P (to_rtx, 1);
4709 MEM_ALIAS_SET (to_rtx) = alias_set;
4711 return store_expr (exp, to_rtx, value_mode != VOIDmode);
4715 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
4716 or an ARRAY_REF, look for nested COMPONENT_REFs, BIT_FIELD_REFs, or
4717 ARRAY_REFs and find the ultimate containing object, which we return.
4719 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
4720 bit position, and *PUNSIGNEDP to the signedness of the field.
4721 If the position of the field is variable, we store a tree
4722 giving the variable offset (in units) in *POFFSET.
4723 This offset is in addition to the bit position.
4724 If the position is not variable, we store 0 in *POFFSET.
4725 We set *PALIGNMENT to the alignment in bytes of the address that will be
4726 computed. This is the alignment of the thing we return if *POFFSET
4727 is zero, but can be more less strictly aligned if *POFFSET is nonzero.
4729 If any of the extraction expressions is volatile,
4730 we store 1 in *PVOLATILEP. Otherwise we don't change that.
4732 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
4733 is a mode that can be used to access the field. In that case, *PBITSIZE
4736 If the field describes a variable-sized object, *PMODE is set to
4737 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
4738 this case, but the address of the object can be found. */
4741 get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode,
4742 punsignedp, pvolatilep, palignment)
4747 enum machine_mode *pmode;
4752 tree orig_exp = exp;
4754 enum machine_mode mode = VOIDmode;
4755 tree offset = integer_zero_node;
4756 unsigned int alignment = BIGGEST_ALIGNMENT;
4758 if (TREE_CODE (exp) == COMPONENT_REF)
4760 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
4761 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
4762 mode = DECL_MODE (TREE_OPERAND (exp, 1));
4763 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
4765 else if (TREE_CODE (exp) == BIT_FIELD_REF)
4767 size_tree = TREE_OPERAND (exp, 1);
4768 *punsignedp = TREE_UNSIGNED (exp);
4772 mode = TYPE_MODE (TREE_TYPE (exp));
4773 if (mode == BLKmode)
4774 size_tree = TYPE_SIZE (TREE_TYPE (exp));
4776 *pbitsize = GET_MODE_BITSIZE (mode);
4777 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
4782 if (TREE_CODE (size_tree) != INTEGER_CST)
4783 mode = BLKmode, *pbitsize = -1;
4785 *pbitsize = TREE_INT_CST_LOW (size_tree);
4788 /* Compute cumulative bit-offset for nested component-refs and array-refs,
4789 and find the ultimate containing object. */
4795 if (TREE_CODE (exp) == COMPONENT_REF || TREE_CODE (exp) == BIT_FIELD_REF)
4797 tree pos = (TREE_CODE (exp) == COMPONENT_REF
4798 ? DECL_FIELD_BITPOS (TREE_OPERAND (exp, 1))
4799 : TREE_OPERAND (exp, 2));
4800 tree constant = integer_zero_node, var = pos;
4802 /* If this field hasn't been filled in yet, don't go
4803 past it. This should only happen when folding expressions
4804 made during type construction. */
4808 /* Assume here that the offset is a multiple of a unit.
4809 If not, there should be an explicitly added constant. */
4810 if (TREE_CODE (pos) == PLUS_EXPR
4811 && TREE_CODE (TREE_OPERAND (pos, 1)) == INTEGER_CST)
4812 constant = TREE_OPERAND (pos, 1), var = TREE_OPERAND (pos, 0);
4813 else if (TREE_CODE (pos) == INTEGER_CST)
4814 constant = pos, var = integer_zero_node;
4816 *pbitpos += TREE_INT_CST_LOW (constant);
4817 offset = size_binop (PLUS_EXPR, offset,
4818 size_binop (EXACT_DIV_EXPR, var,
4819 size_int (BITS_PER_UNIT)));
4822 else if (TREE_CODE (exp) == ARRAY_REF)
4824 /* This code is based on the code in case ARRAY_REF in expand_expr
4825 below. We assume here that the size of an array element is
4826 always an integral multiple of BITS_PER_UNIT. */
4828 tree index = TREE_OPERAND (exp, 1);
4829 tree domain = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
4831 = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
4832 tree index_type = TREE_TYPE (index);
4835 if (TYPE_PRECISION (index_type) != TYPE_PRECISION (sizetype))
4837 index = convert (type_for_size (TYPE_PRECISION (sizetype), 0),
4839 index_type = TREE_TYPE (index);
4842 /* Optimize the special-case of a zero lower bound.
4844 We convert the low_bound to sizetype to avoid some problems
4845 with constant folding. (E.g. suppose the lower bound is 1,
4846 and its mode is QI. Without the conversion, (ARRAY
4847 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
4848 +INDEX), which becomes (ARRAY+255+INDEX). Oops!)
4850 But sizetype isn't quite right either (especially if
4851 the lowbound is negative). FIXME */
4853 if (! integer_zerop (low_bound))
4854 index = fold (build (MINUS_EXPR, index_type, index,
4855 convert (sizetype, low_bound)));
4857 if (TREE_CODE (index) == INTEGER_CST)
4859 index = convert (sbitsizetype, index);
4860 index_type = TREE_TYPE (index);
4863 xindex = fold (build (MULT_EXPR, sbitsizetype, index,
4864 convert (sbitsizetype,
4865 TYPE_SIZE (TREE_TYPE (exp)))));
4867 if (TREE_CODE (xindex) == INTEGER_CST
4868 && TREE_INT_CST_HIGH (xindex) == 0)
4869 *pbitpos += TREE_INT_CST_LOW (xindex);
4872 /* Either the bit offset calculated above is not constant, or
4873 it overflowed. In either case, redo the multiplication
4874 against the size in units. This is especially important
4875 in the non-constant case to avoid a division at runtime. */
4876 xindex = fold (build (MULT_EXPR, ssizetype, index,
4878 TYPE_SIZE_UNIT (TREE_TYPE (exp)))));
4880 if (contains_placeholder_p (xindex))
4881 xindex = build (WITH_RECORD_EXPR, sizetype, xindex, exp);
4883 offset = size_binop (PLUS_EXPR, offset, xindex);
4886 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
4887 && ! ((TREE_CODE (exp) == NOP_EXPR
4888 || TREE_CODE (exp) == CONVERT_EXPR)
4889 && ! (TREE_CODE (TREE_TYPE (exp)) == UNION_TYPE
4890 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0)))
4892 && (TYPE_MODE (TREE_TYPE (exp))
4893 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
4896 /* If any reference in the chain is volatile, the effect is volatile. */
4897 if (TREE_THIS_VOLATILE (exp))
4900 /* If the offset is non-constant already, then we can't assume any
4901 alignment more than the alignment here. */
4902 if (! integer_zerop (offset))
4903 alignment = MIN (alignment, TYPE_ALIGN (TREE_TYPE (exp)));
4905 exp = TREE_OPERAND (exp, 0);
4908 if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'd')
4909 alignment = MIN (alignment, DECL_ALIGN (exp));
4910 else if (TREE_TYPE (exp) != 0)
4911 alignment = MIN (alignment, TYPE_ALIGN (TREE_TYPE (exp)));
4913 if (integer_zerop (offset))
4916 if (offset != 0 && contains_placeholder_p (offset))
4917 offset = build (WITH_RECORD_EXPR, sizetype, offset, orig_exp);
4921 *palignment = alignment / BITS_PER_UNIT;
4925 /* Subroutine of expand_exp: compute memory_usage from modifier. */
4926 static enum memory_use_mode
4927 get_memory_usage_from_modifier (modifier)
4928 enum expand_modifier modifier;
4934 return MEMORY_USE_RO;
4936 case EXPAND_MEMORY_USE_WO:
4937 return MEMORY_USE_WO;
4939 case EXPAND_MEMORY_USE_RW:
4940 return MEMORY_USE_RW;
4942 case EXPAND_MEMORY_USE_DONT:
4943 /* EXPAND_CONST_ADDRESS and EXPAND_INITIALIZER are converted into
4944 MEMORY_USE_DONT, because they are modifiers to a call of
4945 expand_expr in the ADDR_EXPR case of expand_expr. */
4946 case EXPAND_CONST_ADDRESS:
4947 case EXPAND_INITIALIZER:
4948 return MEMORY_USE_DONT;
4949 case EXPAND_MEMORY_USE_BAD:
4955 /* Given an rtx VALUE that may contain additions and multiplications,
4956 return an equivalent value that just refers to a register or memory.
4957 This is done by generating instructions to perform the arithmetic
4958 and returning a pseudo-register containing the value.
4960 The returned value may be a REG, SUBREG, MEM or constant. */
4963 force_operand (value, target)
4966 register optab binoptab = 0;
4967 /* Use a temporary to force order of execution of calls to
4971 /* Use subtarget as the target for operand 0 of a binary operation. */
4972 register rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
4974 /* Check for a PIC address load. */
4976 && (GET_CODE (value) == PLUS || GET_CODE (value) == MINUS)
4977 && XEXP (value, 0) == pic_offset_table_rtx
4978 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
4979 || GET_CODE (XEXP (value, 1)) == LABEL_REF
4980 || GET_CODE (XEXP (value, 1)) == CONST))
4983 subtarget = gen_reg_rtx (GET_MODE (value));
4984 emit_move_insn (subtarget, value);
4988 if (GET_CODE (value) == PLUS)
4989 binoptab = add_optab;
4990 else if (GET_CODE (value) == MINUS)
4991 binoptab = sub_optab;
4992 else if (GET_CODE (value) == MULT)
4994 op2 = XEXP (value, 1);
4995 if (!CONSTANT_P (op2)
4996 && !(GET_CODE (op2) == REG && op2 != subtarget))
4998 tmp = force_operand (XEXP (value, 0), subtarget);
4999 return expand_mult (GET_MODE (value), tmp,
5000 force_operand (op2, NULL_RTX),
5006 op2 = XEXP (value, 1);
5007 if (!CONSTANT_P (op2)
5008 && !(GET_CODE (op2) == REG && op2 != subtarget))
5010 if (binoptab == sub_optab && GET_CODE (op2) == CONST_INT)
5012 binoptab = add_optab;
5013 op2 = negate_rtx (GET_MODE (value), op2);
5016 /* Check for an addition with OP2 a constant integer and our first
5017 operand a PLUS of a virtual register and something else. In that
5018 case, we want to emit the sum of the virtual register and the
5019 constant first and then add the other value. This allows virtual
5020 register instantiation to simply modify the constant rather than
5021 creating another one around this addition. */
5022 if (binoptab == add_optab && GET_CODE (op2) == CONST_INT
5023 && GET_CODE (XEXP (value, 0)) == PLUS
5024 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
5025 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
5026 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
5028 rtx temp = expand_binop (GET_MODE (value), binoptab,
5029 XEXP (XEXP (value, 0), 0), op2,
5030 subtarget, 0, OPTAB_LIB_WIDEN);
5031 return expand_binop (GET_MODE (value), binoptab, temp,
5032 force_operand (XEXP (XEXP (value, 0), 1), 0),
5033 target, 0, OPTAB_LIB_WIDEN);
5036 tmp = force_operand (XEXP (value, 0), subtarget);
5037 return expand_binop (GET_MODE (value), binoptab, tmp,
5038 force_operand (op2, NULL_RTX),
5039 target, 0, OPTAB_LIB_WIDEN);
5040 /* We give UNSIGNEDP = 0 to expand_binop
5041 because the only operations we are expanding here are signed ones. */
5046 /* Subroutine of expand_expr:
5047 save the non-copied parts (LIST) of an expr (LHS), and return a list
5048 which can restore these values to their previous values,
5049 should something modify their storage. */
5052 save_noncopied_parts (lhs, list)
5059 for (tail = list; tail; tail = TREE_CHAIN (tail))
5060 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
5061 parts = chainon (parts, save_noncopied_parts (lhs, TREE_VALUE (tail)));
5064 tree part = TREE_VALUE (tail);
5065 tree part_type = TREE_TYPE (part);
5066 tree to_be_saved = build (COMPONENT_REF, part_type, lhs, part);
5067 rtx target = assign_temp (part_type, 0, 1, 1);
5068 if (! memory_address_p (TYPE_MODE (part_type), XEXP (target, 0)))
5069 target = change_address (target, TYPE_MODE (part_type), NULL_RTX);
5070 parts = tree_cons (to_be_saved,
5071 build (RTL_EXPR, part_type, NULL_TREE,
5074 store_expr (TREE_PURPOSE (parts), RTL_EXPR_RTL (TREE_VALUE (parts)), 0);
5079 /* Subroutine of expand_expr:
5080 record the non-copied parts (LIST) of an expr (LHS), and return a list
5081 which specifies the initial values of these parts. */
5084 init_noncopied_parts (lhs, list)
5091 for (tail = list; tail; tail = TREE_CHAIN (tail))
5092 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
5093 parts = chainon (parts, init_noncopied_parts (lhs, TREE_VALUE (tail)));
5094 else if (TREE_PURPOSE (tail))
5096 tree part = TREE_VALUE (tail);
5097 tree part_type = TREE_TYPE (part);
5098 tree to_be_initialized = build (COMPONENT_REF, part_type, lhs, part);
5099 parts = tree_cons (TREE_PURPOSE (tail), to_be_initialized, parts);
5104 /* Subroutine of expand_expr: return nonzero iff there is no way that
5105 EXP can reference X, which is being modified. TOP_P is nonzero if this
5106 call is going to be used to determine whether we need a temporary
5107 for EXP, as opposed to a recursive call to this function.
5109 It is always safe for this routine to return zero since it merely
5110 searches for optimization opportunities. */
5113 safe_from_p (x, exp, top_p)
5120 static int save_expr_count;
5121 static int save_expr_size = 0;
5122 static tree *save_expr_rewritten;
5123 static tree save_expr_trees[256];
5126 /* If EXP has varying size, we MUST use a target since we currently
5127 have no way of allocating temporaries of variable size
5128 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
5129 So we assume here that something at a higher level has prevented a
5130 clash. This is somewhat bogus, but the best we can do. Only
5131 do this when X is BLKmode and when we are at the top level. */
5132 || (top_p && TREE_TYPE (exp) != 0 && TYPE_SIZE (TREE_TYPE (exp)) != 0
5133 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
5134 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
5135 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
5136 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
5138 && GET_MODE (x) == BLKmode))
5141 if (top_p && save_expr_size == 0)
5145 save_expr_count = 0;
5146 save_expr_size = sizeof (save_expr_trees) / sizeof (save_expr_trees[0]);
5147 save_expr_rewritten = &save_expr_trees[0];
5149 rtn = safe_from_p (x, exp, 1);
5151 for (i = 0; i < save_expr_count; ++i)
5153 if (TREE_CODE (save_expr_trees[i]) != ERROR_MARK)
5155 TREE_SET_CODE (save_expr_trees[i], SAVE_EXPR);
5163 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
5164 find the underlying pseudo. */
5165 if (GET_CODE (x) == SUBREG)
5168 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5172 /* If X is a location in the outgoing argument area, it is always safe. */
5173 if (GET_CODE (x) == MEM
5174 && (XEXP (x, 0) == virtual_outgoing_args_rtx
5175 || (GET_CODE (XEXP (x, 0)) == PLUS
5176 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx)))
5179 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
5182 exp_rtl = DECL_RTL (exp);
5189 if (TREE_CODE (exp) == TREE_LIST)
5190 return ((TREE_VALUE (exp) == 0
5191 || safe_from_p (x, TREE_VALUE (exp), 0))
5192 && (TREE_CHAIN (exp) == 0
5193 || safe_from_p (x, TREE_CHAIN (exp), 0)));
5194 else if (TREE_CODE (exp) == ERROR_MARK)
5195 return 1; /* An already-visited SAVE_EXPR? */
5200 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5204 return (safe_from_p (x, TREE_OPERAND (exp, 0), 0)
5205 && safe_from_p (x, TREE_OPERAND (exp, 1), 0));
5209 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
5210 the expression. If it is set, we conflict iff we are that rtx or
5211 both are in memory. Otherwise, we check all operands of the
5212 expression recursively. */
5214 switch (TREE_CODE (exp))
5217 return (staticp (TREE_OPERAND (exp, 0))
5218 || safe_from_p (x, TREE_OPERAND (exp, 0), 0)
5219 || TREE_STATIC (exp));
5222 if (GET_CODE (x) == MEM)
5227 exp_rtl = CALL_EXPR_RTL (exp);
5230 /* Assume that the call will clobber all hard registers and
5232 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5233 || GET_CODE (x) == MEM)
5240 /* If a sequence exists, we would have to scan every instruction
5241 in the sequence to see if it was safe. This is probably not
5243 if (RTL_EXPR_SEQUENCE (exp))
5246 exp_rtl = RTL_EXPR_RTL (exp);
5249 case WITH_CLEANUP_EXPR:
5250 exp_rtl = RTL_EXPR_RTL (exp);
5253 case CLEANUP_POINT_EXPR:
5254 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5257 exp_rtl = SAVE_EXPR_RTL (exp);
5261 /* This SAVE_EXPR might appear many times in the top-level
5262 safe_from_p() expression, and if it has a complex
5263 subexpression, examining it multiple times could result
5264 in a combinatorial explosion. E.g. on an Alpha
5265 running at least 200MHz, a Fortran test case compiled with
5266 optimization took about 28 minutes to compile -- even though
5267 it was only a few lines long, and the complicated line causing
5268 so much time to be spent in the earlier version of safe_from_p()
5269 had only 293 or so unique nodes.
5271 So, turn this SAVE_EXPR into an ERROR_MARK for now, but remember
5272 where it is so we can turn it back in the top-level safe_from_p()
5275 /* For now, don't bother re-sizing the array. */
5276 if (save_expr_count >= save_expr_size)
5278 save_expr_rewritten[save_expr_count++] = exp;
5280 nops = tree_code_length[(int) SAVE_EXPR];
5281 for (i = 0; i < nops; i++)
5283 tree operand = TREE_OPERAND (exp, i);
5284 if (operand == NULL_TREE)
5286 TREE_SET_CODE (exp, ERROR_MARK);
5287 if (!safe_from_p (x, operand, 0))
5289 TREE_SET_CODE (exp, SAVE_EXPR);
5291 TREE_SET_CODE (exp, ERROR_MARK);
5295 /* The only operand we look at is operand 1. The rest aren't
5296 part of the expression. */
5297 return safe_from_p (x, TREE_OPERAND (exp, 1), 0);
5299 case METHOD_CALL_EXPR:
5300 /* This takes a rtx argument, but shouldn't appear here. */
5307 /* If we have an rtx, we do not need to scan our operands. */
5311 nops = tree_code_length[(int) TREE_CODE (exp)];
5312 for (i = 0; i < nops; i++)
5313 if (TREE_OPERAND (exp, i) != 0
5314 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
5318 /* If we have an rtl, find any enclosed object. Then see if we conflict
5322 if (GET_CODE (exp_rtl) == SUBREG)
5324 exp_rtl = SUBREG_REG (exp_rtl);
5325 if (GET_CODE (exp_rtl) == REG
5326 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
5330 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
5331 are memory and EXP is not readonly. */
5332 return ! (rtx_equal_p (x, exp_rtl)
5333 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
5334 && ! TREE_READONLY (exp)));
5337 /* If we reach here, it is safe. */
5341 /* Subroutine of expand_expr: return nonzero iff EXP is an
5342 expression whose type is statically determinable. */
5348 if (TREE_CODE (exp) == PARM_DECL
5349 || TREE_CODE (exp) == VAR_DECL
5350 || TREE_CODE (exp) == CALL_EXPR || TREE_CODE (exp) == TARGET_EXPR
5351 || TREE_CODE (exp) == COMPONENT_REF
5352 || TREE_CODE (exp) == ARRAY_REF)
5357 /* Subroutine of expand_expr: return rtx if EXP is a
5358 variable or parameter; else return 0. */
5365 switch (TREE_CODE (exp))
5369 return DECL_RTL (exp);
5375 #ifdef MAX_INTEGER_COMPUTATION_MODE
5377 check_max_integer_computation_mode (exp)
5380 enum tree_code code;
5381 enum machine_mode mode;
5383 /* Strip any NOPs that don't change the mode. */
5385 code = TREE_CODE (exp);
5387 /* We must allow conversions of constants to MAX_INTEGER_COMPUTATION_MODE. */
5388 if (code == NOP_EXPR
5389 && TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
5392 /* First check the type of the overall operation. We need only look at
5393 unary, binary and relational operations. */
5394 if (TREE_CODE_CLASS (code) == '1'
5395 || TREE_CODE_CLASS (code) == '2'
5396 || TREE_CODE_CLASS (code) == '<')
5398 mode = TYPE_MODE (TREE_TYPE (exp));
5399 if (GET_MODE_CLASS (mode) == MODE_INT
5400 && mode > MAX_INTEGER_COMPUTATION_MODE)
5401 fatal ("unsupported wide integer operation");
5404 /* Check operand of a unary op. */
5405 if (TREE_CODE_CLASS (code) == '1')
5407 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5408 if (GET_MODE_CLASS (mode) == MODE_INT
5409 && mode > MAX_INTEGER_COMPUTATION_MODE)
5410 fatal ("unsupported wide integer operation");
5413 /* Check operands of a binary/comparison op. */
5414 if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<')
5416 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5417 if (GET_MODE_CLASS (mode) == MODE_INT
5418 && mode > MAX_INTEGER_COMPUTATION_MODE)
5419 fatal ("unsupported wide integer operation");
5421 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1)));
5422 if (GET_MODE_CLASS (mode) == MODE_INT
5423 && mode > MAX_INTEGER_COMPUTATION_MODE)
5424 fatal ("unsupported wide integer operation");
5430 /* expand_expr: generate code for computing expression EXP.
5431 An rtx for the computed value is returned. The value is never null.
5432 In the case of a void EXP, const0_rtx is returned.
5434 The value may be stored in TARGET if TARGET is nonzero.
5435 TARGET is just a suggestion; callers must assume that
5436 the rtx returned may not be the same as TARGET.
5438 If TARGET is CONST0_RTX, it means that the value will be ignored.
5440 If TMODE is not VOIDmode, it suggests generating the
5441 result in mode TMODE. But this is done only when convenient.
5442 Otherwise, TMODE is ignored and the value generated in its natural mode.
5443 TMODE is just a suggestion; callers must assume that
5444 the rtx returned may not have mode TMODE.
5446 Note that TARGET may have neither TMODE nor MODE. In that case, it
5447 probably will not be used.
5449 If MODIFIER is EXPAND_SUM then when EXP is an addition
5450 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
5451 or a nest of (PLUS ...) and (MINUS ...) where the terms are
5452 products as above, or REG or MEM, or constant.
5453 Ordinarily in such cases we would output mul or add instructions
5454 and then return a pseudo reg containing the sum.
5456 EXPAND_INITIALIZER is much like EXPAND_SUM except that
5457 it also marks a label as absolutely required (it can't be dead).
5458 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
5459 This is used for outputting expressions used in initializers.
5461 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
5462 with a constant address even if that address is not normally legitimate.
5463 EXPAND_INITIALIZER and EXPAND_SUM also have this effect. */
5466 expand_expr (exp, target, tmode, modifier)
5469 enum machine_mode tmode;
5470 enum expand_modifier modifier;
5472 /* Chain of pending expressions for PLACEHOLDER_EXPR to replace.
5473 This is static so it will be accessible to our recursive callees. */
5474 static tree placeholder_list = 0;
5475 register rtx op0, op1, temp;
5476 tree type = TREE_TYPE (exp);
5477 int unsignedp = TREE_UNSIGNED (type);
5478 register enum machine_mode mode;
5479 register enum tree_code code = TREE_CODE (exp);
5481 rtx subtarget, original_target;
5484 /* Used by check-memory-usage to make modifier read only. */
5485 enum expand_modifier ro_modifier;
5487 /* Handle ERROR_MARK before anybody tries to access its type. */
5488 if (TREE_CODE (exp) == ERROR_MARK)
5490 op0 = CONST0_RTX (tmode);
5496 mode = TYPE_MODE (type);
5497 /* Use subtarget as the target for operand 0 of a binary operation. */
5498 subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
5499 original_target = target;
5500 ignore = (target == const0_rtx
5501 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
5502 || code == CONVERT_EXPR || code == REFERENCE_EXPR
5503 || code == COND_EXPR)
5504 && TREE_CODE (type) == VOID_TYPE));
5506 /* Make a read-only version of the modifier. */
5507 if (modifier == EXPAND_NORMAL || modifier == EXPAND_SUM
5508 || modifier == EXPAND_CONST_ADDRESS || modifier == EXPAND_INITIALIZER)
5509 ro_modifier = modifier;
5511 ro_modifier = EXPAND_NORMAL;
5513 /* Don't use hard regs as subtargets, because the combiner
5514 can only handle pseudo regs. */
5515 if (subtarget && REGNO (subtarget) < FIRST_PSEUDO_REGISTER)
5517 /* Avoid subtargets inside loops,
5518 since they hide some invariant expressions. */
5519 if (preserve_subexpressions_p ())
5522 /* If we are going to ignore this result, we need only do something
5523 if there is a side-effect somewhere in the expression. If there
5524 is, short-circuit the most common cases here. Note that we must
5525 not call expand_expr with anything but const0_rtx in case this
5526 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
5530 if (! TREE_SIDE_EFFECTS (exp))
5533 /* Ensure we reference a volatile object even if value is ignored. */
5534 if (TREE_THIS_VOLATILE (exp)
5535 && TREE_CODE (exp) != FUNCTION_DECL
5536 && mode != VOIDmode && mode != BLKmode)
5538 temp = expand_expr (exp, NULL_RTX, VOIDmode, ro_modifier);
5539 if (GET_CODE (temp) == MEM)
5540 temp = copy_to_reg (temp);
5544 if (TREE_CODE_CLASS (code) == '1')
5545 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
5546 VOIDmode, ro_modifier);
5547 else if (TREE_CODE_CLASS (code) == '2'
5548 || TREE_CODE_CLASS (code) == '<')
5550 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, ro_modifier);
5551 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, ro_modifier);
5554 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
5555 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
5556 /* If the second operand has no side effects, just evaluate
5558 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
5559 VOIDmode, ro_modifier);
5564 #ifdef MAX_INTEGER_COMPUTATION_MODE
5565 /* Only check stuff here if the mode we want is different from the mode
5566 of the expression; if it's the same, check_max_integer_computiation_mode
5567 will handle it. Do we really need to check this stuff at all? */
5570 && GET_MODE (target) != mode
5571 && TREE_CODE (exp) != INTEGER_CST
5572 && TREE_CODE (exp) != PARM_DECL
5573 && TREE_CODE (exp) != ARRAY_REF
5574 && TREE_CODE (exp) != COMPONENT_REF
5575 && TREE_CODE (exp) != BIT_FIELD_REF
5576 && TREE_CODE (exp) != INDIRECT_REF
5577 && TREE_CODE (exp) != CALL_EXPR
5578 && TREE_CODE (exp) != VAR_DECL
5579 && TREE_CODE (exp) != RTL_EXPR)
5581 enum machine_mode mode = GET_MODE (target);
5583 if (GET_MODE_CLASS (mode) == MODE_INT
5584 && mode > MAX_INTEGER_COMPUTATION_MODE)
5585 fatal ("unsupported wide integer operation");
5589 && TREE_CODE (exp) != INTEGER_CST
5590 && TREE_CODE (exp) != PARM_DECL
5591 && TREE_CODE (exp) != ARRAY_REF
5592 && TREE_CODE (exp) != COMPONENT_REF
5593 && TREE_CODE (exp) != BIT_FIELD_REF
5594 && TREE_CODE (exp) != INDIRECT_REF
5595 && TREE_CODE (exp) != VAR_DECL
5596 && TREE_CODE (exp) != CALL_EXPR
5597 && TREE_CODE (exp) != RTL_EXPR
5598 && GET_MODE_CLASS (tmode) == MODE_INT
5599 && tmode > MAX_INTEGER_COMPUTATION_MODE)
5600 fatal ("unsupported wide integer operation");
5602 check_max_integer_computation_mode (exp);
5605 /* If will do cse, generate all results into pseudo registers
5606 since 1) that allows cse to find more things
5607 and 2) otherwise cse could produce an insn the machine
5610 if (! cse_not_expected && mode != BLKmode && target
5611 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER))
5618 tree function = decl_function_context (exp);
5619 /* Handle using a label in a containing function. */
5620 if (function != current_function_decl
5621 && function != inline_function_decl && function != 0)
5623 struct function *p = find_function_data (function);
5624 /* Allocate in the memory associated with the function
5625 that the label is in. */
5626 push_obstacks (p->function_obstack,
5627 p->function_maybepermanent_obstack);
5629 p->expr->x_forced_labels
5630 = gen_rtx_EXPR_LIST (VOIDmode, label_rtx (exp),
5631 p->expr->x_forced_labels);
5636 if (modifier == EXPAND_INITIALIZER)
5637 forced_labels = gen_rtx_EXPR_LIST (VOIDmode,
5641 temp = gen_rtx_MEM (FUNCTION_MODE,
5642 gen_rtx_LABEL_REF (Pmode, label_rtx (exp)));
5643 if (function != current_function_decl
5644 && function != inline_function_decl && function != 0)
5645 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
5650 if (DECL_RTL (exp) == 0)
5652 error_with_decl (exp, "prior parameter's size depends on `%s'");
5653 return CONST0_RTX (mode);
5656 /* ... fall through ... */
5659 /* If a static var's type was incomplete when the decl was written,
5660 but the type is complete now, lay out the decl now. */
5661 if (DECL_SIZE (exp) == 0 && TYPE_SIZE (TREE_TYPE (exp)) != 0
5662 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
5664 push_obstacks_nochange ();
5665 end_temporary_allocation ();
5666 layout_decl (exp, 0);
5667 PUT_MODE (DECL_RTL (exp), DECL_MODE (exp));
5671 /* Although static-storage variables start off initialized, according to
5672 ANSI C, a memcpy could overwrite them with uninitialized values. So
5673 we check them too. This also lets us check for read-only variables
5674 accessed via a non-const declaration, in case it won't be detected
5675 any other way (e.g., in an embedded system or OS kernel without
5678 Aggregates are not checked here; they're handled elsewhere. */
5679 if (current_function && current_function_check_memory_usage
5681 && GET_CODE (DECL_RTL (exp)) == MEM
5682 && ! AGGREGATE_TYPE_P (TREE_TYPE (exp)))
5684 enum memory_use_mode memory_usage;
5685 memory_usage = get_memory_usage_from_modifier (modifier);
5687 if (memory_usage != MEMORY_USE_DONT)
5688 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
5689 XEXP (DECL_RTL (exp), 0), Pmode,
5690 GEN_INT (int_size_in_bytes (type)),
5691 TYPE_MODE (sizetype),
5692 GEN_INT (memory_usage),
5693 TYPE_MODE (integer_type_node));
5696 /* ... fall through ... */
5700 if (DECL_RTL (exp) == 0)
5703 /* Ensure variable marked as used even if it doesn't go through
5704 a parser. If it hasn't be used yet, write out an external
5706 if (! TREE_USED (exp))
5708 assemble_external (exp);
5709 TREE_USED (exp) = 1;
5712 /* Show we haven't gotten RTL for this yet. */
5715 /* Handle variables inherited from containing functions. */
5716 context = decl_function_context (exp);
5718 /* We treat inline_function_decl as an alias for the current function
5719 because that is the inline function whose vars, types, etc.
5720 are being merged into the current function.
5721 See expand_inline_function. */
5723 if (context != 0 && context != current_function_decl
5724 && context != inline_function_decl
5725 /* If var is static, we don't need a static chain to access it. */
5726 && ! (GET_CODE (DECL_RTL (exp)) == MEM
5727 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
5731 /* Mark as non-local and addressable. */
5732 DECL_NONLOCAL (exp) = 1;
5733 if (DECL_NO_STATIC_CHAIN (current_function_decl))
5735 mark_addressable (exp);
5736 if (GET_CODE (DECL_RTL (exp)) != MEM)
5738 addr = XEXP (DECL_RTL (exp), 0);
5739 if (GET_CODE (addr) == MEM)
5740 addr = gen_rtx_MEM (Pmode,
5741 fix_lexical_addr (XEXP (addr, 0), exp));
5743 addr = fix_lexical_addr (addr, exp);
5744 temp = change_address (DECL_RTL (exp), mode, addr);
5747 /* This is the case of an array whose size is to be determined
5748 from its initializer, while the initializer is still being parsed.
5751 else if (GET_CODE (DECL_RTL (exp)) == MEM
5752 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
5753 temp = change_address (DECL_RTL (exp), GET_MODE (DECL_RTL (exp)),
5754 XEXP (DECL_RTL (exp), 0));
5756 /* If DECL_RTL is memory, we are in the normal case and either
5757 the address is not valid or it is not a register and -fforce-addr
5758 is specified, get the address into a register. */
5760 else if (GET_CODE (DECL_RTL (exp)) == MEM
5761 && modifier != EXPAND_CONST_ADDRESS
5762 && modifier != EXPAND_SUM
5763 && modifier != EXPAND_INITIALIZER
5764 && (! memory_address_p (DECL_MODE (exp),
5765 XEXP (DECL_RTL (exp), 0))
5767 && GET_CODE (XEXP (DECL_RTL (exp), 0)) != REG)))
5768 temp = change_address (DECL_RTL (exp), VOIDmode,
5769 copy_rtx (XEXP (DECL_RTL (exp), 0)));
5771 /* If we got something, return it. But first, set the alignment
5772 the address is a register. */
5775 if (GET_CODE (temp) == MEM && GET_CODE (XEXP (temp, 0)) == REG)
5776 mark_reg_pointer (XEXP (temp, 0),
5777 DECL_ALIGN (exp) / BITS_PER_UNIT);
5782 /* If the mode of DECL_RTL does not match that of the decl, it
5783 must be a promoted value. We return a SUBREG of the wanted mode,
5784 but mark it so that we know that it was already extended. */
5786 if (GET_CODE (DECL_RTL (exp)) == REG
5787 && GET_MODE (DECL_RTL (exp)) != mode)
5789 /* Get the signedness used for this variable. Ensure we get the
5790 same mode we got when the variable was declared. */
5791 if (GET_MODE (DECL_RTL (exp))
5792 != promote_mode (type, DECL_MODE (exp), &unsignedp, 0))
5795 temp = gen_rtx_SUBREG (mode, DECL_RTL (exp), 0);
5796 SUBREG_PROMOTED_VAR_P (temp) = 1;
5797 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
5801 return DECL_RTL (exp);
5804 return immed_double_const (TREE_INT_CST_LOW (exp),
5805 TREE_INT_CST_HIGH (exp),
5809 return expand_expr (DECL_INITIAL (exp), target, VOIDmode,
5810 EXPAND_MEMORY_USE_BAD);
5813 /* If optimized, generate immediate CONST_DOUBLE
5814 which will be turned into memory by reload if necessary.
5816 We used to force a register so that loop.c could see it. But
5817 this does not allow gen_* patterns to perform optimizations with
5818 the constants. It also produces two insns in cases like "x = 1.0;".
5819 On most machines, floating-point constants are not permitted in
5820 many insns, so we'd end up copying it to a register in any case.
5822 Now, we do the copying in expand_binop, if appropriate. */
5823 return immed_real_const (exp);
5827 if (! TREE_CST_RTL (exp))
5828 output_constant_def (exp);
5830 /* TREE_CST_RTL probably contains a constant address.
5831 On RISC machines where a constant address isn't valid,
5832 make some insns to get that address into a register. */
5833 if (GET_CODE (TREE_CST_RTL (exp)) == MEM
5834 && modifier != EXPAND_CONST_ADDRESS
5835 && modifier != EXPAND_INITIALIZER
5836 && modifier != EXPAND_SUM
5837 && (! memory_address_p (mode, XEXP (TREE_CST_RTL (exp), 0))
5839 && GET_CODE (XEXP (TREE_CST_RTL (exp), 0)) != REG)))
5840 return change_address (TREE_CST_RTL (exp), VOIDmode,
5841 copy_rtx (XEXP (TREE_CST_RTL (exp), 0)));
5842 return TREE_CST_RTL (exp);
5844 case EXPR_WITH_FILE_LOCATION:
5847 char *saved_input_filename = input_filename;
5848 int saved_lineno = lineno;
5849 input_filename = EXPR_WFL_FILENAME (exp);
5850 lineno = EXPR_WFL_LINENO (exp);
5851 if (EXPR_WFL_EMIT_LINE_NOTE (exp))
5852 emit_line_note (input_filename, lineno);
5853 /* Possibly avoid switching back and force here */
5854 to_return = expand_expr (EXPR_WFL_NODE (exp), target, tmode, modifier);
5855 input_filename = saved_input_filename;
5856 lineno = saved_lineno;
5861 context = decl_function_context (exp);
5863 /* If this SAVE_EXPR was at global context, assume we are an
5864 initialization function and move it into our context. */
5866 SAVE_EXPR_CONTEXT (exp) = current_function_decl;
5868 /* We treat inline_function_decl as an alias for the current function
5869 because that is the inline function whose vars, types, etc.
5870 are being merged into the current function.
5871 See expand_inline_function. */
5872 if (context == current_function_decl || context == inline_function_decl)
5875 /* If this is non-local, handle it. */
5878 /* The following call just exists to abort if the context is
5879 not of a containing function. */
5880 find_function_data (context);
5882 temp = SAVE_EXPR_RTL (exp);
5883 if (temp && GET_CODE (temp) == REG)
5885 put_var_into_stack (exp);
5886 temp = SAVE_EXPR_RTL (exp);
5888 if (temp == 0 || GET_CODE (temp) != MEM)
5890 return change_address (temp, mode,
5891 fix_lexical_addr (XEXP (temp, 0), exp));
5893 if (SAVE_EXPR_RTL (exp) == 0)
5895 if (mode == VOIDmode)
5898 temp = assign_temp (type, 3, 0, 0);
5900 SAVE_EXPR_RTL (exp) = temp;
5901 if (!optimize && GET_CODE (temp) == REG)
5902 save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, temp,
5905 /* If the mode of TEMP does not match that of the expression, it
5906 must be a promoted value. We pass store_expr a SUBREG of the
5907 wanted mode but mark it so that we know that it was already
5908 extended. Note that `unsignedp' was modified above in
5911 if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
5913 temp = gen_rtx_SUBREG (mode, SAVE_EXPR_RTL (exp), 0);
5914 SUBREG_PROMOTED_VAR_P (temp) = 1;
5915 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
5918 if (temp == const0_rtx)
5919 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
5920 EXPAND_MEMORY_USE_BAD);
5922 store_expr (TREE_OPERAND (exp, 0), temp, 0);
5924 TREE_USED (exp) = 1;
5927 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
5928 must be a promoted value. We return a SUBREG of the wanted mode,
5929 but mark it so that we know that it was already extended. */
5931 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
5932 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
5934 /* Compute the signedness and make the proper SUBREG. */
5935 promote_mode (type, mode, &unsignedp, 0);
5936 temp = gen_rtx_SUBREG (mode, SAVE_EXPR_RTL (exp), 0);
5937 SUBREG_PROMOTED_VAR_P (temp) = 1;
5938 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
5942 return SAVE_EXPR_RTL (exp);
5947 temp = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
5948 TREE_OPERAND (exp, 0) = unsave_expr_now (TREE_OPERAND (exp, 0));
5952 case PLACEHOLDER_EXPR:
5954 tree placeholder_expr;
5956 /* If there is an object on the head of the placeholder list,
5957 see if some object in it of type TYPE or a pointer to it. For
5958 further information, see tree.def. */
5959 for (placeholder_expr = placeholder_list;
5960 placeholder_expr != 0;
5961 placeholder_expr = TREE_CHAIN (placeholder_expr))
5963 tree need_type = TYPE_MAIN_VARIANT (type);
5965 tree old_list = placeholder_list;
5968 /* Find the outermost reference that is of the type we want.
5969 If none, see if any object has a type that is a pointer to
5970 the type we want. */
5971 for (elt = TREE_PURPOSE (placeholder_expr);
5972 elt != 0 && object == 0;
5974 = ((TREE_CODE (elt) == COMPOUND_EXPR
5975 || TREE_CODE (elt) == COND_EXPR)
5976 ? TREE_OPERAND (elt, 1)
5977 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
5978 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
5979 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
5980 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
5981 ? TREE_OPERAND (elt, 0) : 0))
5982 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
5985 for (elt = TREE_PURPOSE (placeholder_expr);
5986 elt != 0 && object == 0;
5988 = ((TREE_CODE (elt) == COMPOUND_EXPR
5989 || TREE_CODE (elt) == COND_EXPR)
5990 ? TREE_OPERAND (elt, 1)
5991 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
5992 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
5993 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
5994 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
5995 ? TREE_OPERAND (elt, 0) : 0))
5996 if (POINTER_TYPE_P (TREE_TYPE (elt))
5997 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
5999 object = build1 (INDIRECT_REF, need_type, elt);
6003 /* Expand this object skipping the list entries before
6004 it was found in case it is also a PLACEHOLDER_EXPR.
6005 In that case, we want to translate it using subsequent
6007 placeholder_list = TREE_CHAIN (placeholder_expr);
6008 temp = expand_expr (object, original_target, tmode,
6010 placeholder_list = old_list;
6016 /* We can't find the object or there was a missing WITH_RECORD_EXPR. */
6019 case WITH_RECORD_EXPR:
6020 /* Put the object on the placeholder list, expand our first operand,
6021 and pop the list. */
6022 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
6024 target = expand_expr (TREE_OPERAND (exp, 0), original_target,
6025 tmode, ro_modifier);
6026 placeholder_list = TREE_CHAIN (placeholder_list);
6030 if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
6031 expand_goto (TREE_OPERAND (exp, 0));
6033 expand_computed_goto (TREE_OPERAND (exp, 0));
6037 expand_exit_loop_if_false (NULL_PTR,
6038 invert_truthvalue (TREE_OPERAND (exp, 0)));
6041 case LABELED_BLOCK_EXPR:
6042 if (LABELED_BLOCK_BODY (exp))
6043 expand_expr_stmt (LABELED_BLOCK_BODY (exp));
6044 emit_label (label_rtx (LABELED_BLOCK_LABEL (exp)));
6047 case EXIT_BLOCK_EXPR:
6048 if (EXIT_BLOCK_RETURN (exp))
6049 sorry ("returned value in block_exit_expr");
6050 expand_goto (LABELED_BLOCK_LABEL (EXIT_BLOCK_LABELED_BLOCK (exp)));
6055 expand_start_loop (1);
6056 expand_expr_stmt (TREE_OPERAND (exp, 0));
6064 tree vars = TREE_OPERAND (exp, 0);
6065 int vars_need_expansion = 0;
6067 /* Need to open a binding contour here because
6068 if there are any cleanups they must be contained here. */
6069 expand_start_bindings (0);
6071 /* Mark the corresponding BLOCK for output in its proper place. */
6072 if (TREE_OPERAND (exp, 2) != 0
6073 && ! TREE_USED (TREE_OPERAND (exp, 2)))
6074 insert_block (TREE_OPERAND (exp, 2));
6076 /* If VARS have not yet been expanded, expand them now. */
6079 if (DECL_RTL (vars) == 0)
6081 vars_need_expansion = 1;
6084 expand_decl_init (vars);
6085 vars = TREE_CHAIN (vars);
6088 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, ro_modifier);
6090 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
6096 if (RTL_EXPR_SEQUENCE (exp))
6098 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
6100 emit_insns (RTL_EXPR_SEQUENCE (exp));
6101 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
6103 preserve_rtl_expr_result (RTL_EXPR_RTL (exp));
6104 free_temps_for_rtl_expr (exp);
6105 return RTL_EXPR_RTL (exp);
6108 /* If we don't need the result, just ensure we evaluate any
6113 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
6114 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode,
6115 EXPAND_MEMORY_USE_BAD);
6119 /* All elts simple constants => refer to a constant in memory. But
6120 if this is a non-BLKmode mode, let it store a field at a time
6121 since that should make a CONST_INT or CONST_DOUBLE when we
6122 fold. Likewise, if we have a target we can use, it is best to
6123 store directly into the target unless the type is large enough
6124 that memcpy will be used. If we are making an initializer and
6125 all operands are constant, put it in memory as well. */
6126 else if ((TREE_STATIC (exp)
6127 && ((mode == BLKmode
6128 && ! (target != 0 && safe_from_p (target, exp, 1)))
6129 || TREE_ADDRESSABLE (exp)
6130 || (TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
6131 && (!MOVE_BY_PIECES_P
6132 (TREE_INT_CST_LOW (TYPE_SIZE (type))/BITS_PER_UNIT,
6133 TYPE_ALIGN (type) / BITS_PER_UNIT))
6134 && ! mostly_zeros_p (exp))))
6135 || (modifier == EXPAND_INITIALIZER && TREE_CONSTANT (exp)))
6137 rtx constructor = output_constant_def (exp);
6138 if (modifier != EXPAND_CONST_ADDRESS
6139 && modifier != EXPAND_INITIALIZER
6140 && modifier != EXPAND_SUM
6141 && (! memory_address_p (GET_MODE (constructor),
6142 XEXP (constructor, 0))
6144 && GET_CODE (XEXP (constructor, 0)) != REG)))
6145 constructor = change_address (constructor, VOIDmode,
6146 XEXP (constructor, 0));
6152 /* Handle calls that pass values in multiple non-contiguous
6153 locations. The Irix 6 ABI has examples of this. */
6154 if (target == 0 || ! safe_from_p (target, exp, 1)
6155 || GET_CODE (target) == PARALLEL)
6157 if (mode != BLKmode && ! TREE_ADDRESSABLE (exp))
6158 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
6160 target = assign_temp (type, 0, 1, 1);
6163 if (TREE_READONLY (exp))
6165 if (GET_CODE (target) == MEM)
6166 target = copy_rtx (target);
6168 RTX_UNCHANGING_P (target) = 1;
6171 store_constructor (exp, target, 0);
6177 tree exp1 = TREE_OPERAND (exp, 0);
6180 tree string = string_constant (exp1, &index);
6183 /* Try to optimize reads from const strings. */
6185 && TREE_CODE (string) == STRING_CST
6186 && TREE_CODE (index) == INTEGER_CST
6187 && !TREE_INT_CST_HIGH (index)
6188 && (i = TREE_INT_CST_LOW (index)) < TREE_STRING_LENGTH (string)
6189 && GET_MODE_CLASS (mode) == MODE_INT
6190 && GET_MODE_SIZE (mode) == 1
6191 && modifier != EXPAND_MEMORY_USE_WO)
6192 return GEN_INT (TREE_STRING_POINTER (string)[i]);
6194 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
6195 op0 = memory_address (mode, op0);
6197 if (current_function && current_function_check_memory_usage
6198 && ! AGGREGATE_TYPE_P (TREE_TYPE (exp)))
6200 enum memory_use_mode memory_usage;
6201 memory_usage = get_memory_usage_from_modifier (modifier);
6203 if (memory_usage != MEMORY_USE_DONT)
6205 in_check_memory_usage = 1;
6206 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
6208 GEN_INT (int_size_in_bytes (type)),
6209 TYPE_MODE (sizetype),
6210 GEN_INT (memory_usage),
6211 TYPE_MODE (integer_type_node));
6212 in_check_memory_usage = 0;
6216 temp = gen_rtx_MEM (mode, op0);
6217 /* If address was computed by addition,
6218 mark this as an element of an aggregate. */
6219 if (TREE_CODE (exp1) == PLUS_EXPR
6220 || (TREE_CODE (exp1) == SAVE_EXPR
6221 && TREE_CODE (TREE_OPERAND (exp1, 0)) == PLUS_EXPR)
6222 || AGGREGATE_TYPE_P (TREE_TYPE (exp))
6223 || (TREE_CODE (exp1) == ADDR_EXPR
6224 && (exp2 = TREE_OPERAND (exp1, 0))
6225 && AGGREGATE_TYPE_P (TREE_TYPE (exp2))))
6226 MEM_SET_IN_STRUCT_P (temp, 1);
6228 MEM_VOLATILE_P (temp) = TREE_THIS_VOLATILE (exp) | flag_volatile;
6229 MEM_ALIAS_SET (temp) = get_alias_set (exp);
6231 /* It is incorrect to set RTX_UNCHANGING_P from TREE_READONLY
6232 here, because, in C and C++, the fact that a location is accessed
6233 through a pointer to const does not mean that the value there can
6234 never change. Languages where it can never change should
6235 also set TREE_STATIC. */
6236 RTX_UNCHANGING_P (temp) = TREE_READONLY (exp) & TREE_STATIC (exp);
6241 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
6245 tree array = TREE_OPERAND (exp, 0);
6246 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
6247 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
6248 tree index = TREE_OPERAND (exp, 1);
6249 tree index_type = TREE_TYPE (index);
6252 /* Optimize the special-case of a zero lower bound.
6254 We convert the low_bound to sizetype to avoid some problems
6255 with constant folding. (E.g. suppose the lower bound is 1,
6256 and its mode is QI. Without the conversion, (ARRAY
6257 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
6258 +INDEX), which becomes (ARRAY+255+INDEX). Oops!)
6260 But sizetype isn't quite right either (especially if
6261 the lowbound is negative). FIXME */
6263 if (! integer_zerop (low_bound))
6264 index = fold (build (MINUS_EXPR, index_type, index,
6265 convert (sizetype, low_bound)));
6267 /* Fold an expression like: "foo"[2].
6268 This is not done in fold so it won't happen inside &.
6269 Don't fold if this is for wide characters since it's too
6270 difficult to do correctly and this is a very rare case. */
6272 if (TREE_CODE (array) == STRING_CST
6273 && TREE_CODE (index) == INTEGER_CST
6274 && !TREE_INT_CST_HIGH (index)
6275 && (i = TREE_INT_CST_LOW (index)) < TREE_STRING_LENGTH (array)
6276 && GET_MODE_CLASS (mode) == MODE_INT
6277 && GET_MODE_SIZE (mode) == 1)
6278 return GEN_INT (TREE_STRING_POINTER (array)[i]);
6280 /* If this is a constant index into a constant array,
6281 just get the value from the array. Handle both the cases when
6282 we have an explicit constructor and when our operand is a variable
6283 that was declared const. */
6285 if (TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array))
6287 if (TREE_CODE (index) == INTEGER_CST
6288 && TREE_INT_CST_HIGH (index) == 0)
6290 tree elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0));
6292 i = TREE_INT_CST_LOW (index);
6294 elem = TREE_CHAIN (elem);
6296 return expand_expr (fold (TREE_VALUE (elem)), target,
6297 tmode, ro_modifier);
6301 else if (optimize >= 1
6302 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
6303 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
6304 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
6306 if (TREE_CODE (index) == INTEGER_CST)
6308 tree init = DECL_INITIAL (array);
6310 i = TREE_INT_CST_LOW (index);
6311 if (TREE_CODE (init) == CONSTRUCTOR)
6313 tree elem = CONSTRUCTOR_ELTS (init);
6316 && !tree_int_cst_equal (TREE_PURPOSE (elem), index))
6317 elem = TREE_CHAIN (elem);
6319 return expand_expr (fold (TREE_VALUE (elem)), target,
6320 tmode, ro_modifier);
6322 else if (TREE_CODE (init) == STRING_CST
6323 && TREE_INT_CST_HIGH (index) == 0
6324 && (TREE_INT_CST_LOW (index)
6325 < TREE_STRING_LENGTH (init)))
6327 (TREE_STRING_POINTER
6328 (init)[TREE_INT_CST_LOW (index)]));
6333 /* ... fall through ... */
6337 /* If the operand is a CONSTRUCTOR, we can just extract the
6338 appropriate field if it is present. Don't do this if we have
6339 already written the data since we want to refer to that copy
6340 and varasm.c assumes that's what we'll do. */
6341 if (code != ARRAY_REF
6342 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
6343 && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
6347 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
6348 elt = TREE_CHAIN (elt))
6349 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1)
6350 /* We can normally use the value of the field in the
6351 CONSTRUCTOR. However, if this is a bitfield in
6352 an integral mode that we can fit in a HOST_WIDE_INT,
6353 we must mask only the number of bits in the bitfield,
6354 since this is done implicitly by the constructor. If
6355 the bitfield does not meet either of those conditions,
6356 we can't do this optimization. */
6357 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt))
6358 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt)))
6360 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt)))
6361 <= HOST_BITS_PER_WIDE_INT))))
6363 op0 = expand_expr (TREE_VALUE (elt), target, tmode, modifier);
6364 if (DECL_BIT_FIELD (TREE_PURPOSE (elt)))
6366 int bitsize = DECL_FIELD_SIZE (TREE_PURPOSE (elt));
6368 if (TREE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt))))
6370 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
6371 op0 = expand_and (op0, op1, target);
6375 enum machine_mode imode
6376 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt)));
6378 = build_int_2 (GET_MODE_BITSIZE (imode) - bitsize,
6381 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
6383 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
6393 enum machine_mode mode1;
6399 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
6400 &mode1, &unsignedp, &volatilep,
6403 /* If we got back the original object, something is wrong. Perhaps
6404 we are evaluating an expression too early. In any event, don't
6405 infinitely recurse. */
6409 /* If TEM's type is a union of variable size, pass TARGET to the inner
6410 computation, since it will need a temporary and TARGET is known
6411 to have to do. This occurs in unchecked conversion in Ada. */
6413 op0 = expand_expr (tem,
6414 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
6415 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
6417 ? target : NULL_RTX),
6419 modifier == EXPAND_INITIALIZER
6420 ? modifier : EXPAND_NORMAL);
6422 /* If this is a constant, put it into a register if it is a
6423 legitimate constant and memory if it isn't. */
6424 if (CONSTANT_P (op0))
6426 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
6427 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0))
6428 op0 = force_reg (mode, op0);
6430 op0 = validize_mem (force_const_mem (mode, op0));
6435 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
6437 if (GET_CODE (op0) != MEM)
6440 if (GET_MODE (offset_rtx) != ptr_mode)
6442 #ifdef POINTERS_EXTEND_UNSIGNED
6443 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
6445 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
6449 /* A constant address in TO_RTX can have VOIDmode, we must not try
6450 to call force_reg for that case. Avoid that case. */
6451 if (GET_CODE (op0) == MEM
6452 && GET_MODE (op0) == BLKmode
6453 && GET_MODE (XEXP (op0, 0)) != VOIDmode
6455 && (bitpos % bitsize) == 0
6456 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
6457 && (alignment * BITS_PER_UNIT) == GET_MODE_ALIGNMENT (mode1))
6459 rtx temp = change_address (op0, mode1,
6460 plus_constant (XEXP (op0, 0),
6463 if (GET_CODE (XEXP (temp, 0)) == REG)
6466 op0 = change_address (op0, mode1,
6467 force_reg (GET_MODE (XEXP (temp, 0)),
6473 op0 = change_address (op0, VOIDmode,
6474 gen_rtx_PLUS (ptr_mode, XEXP (op0, 0),
6475 force_reg (ptr_mode, offset_rtx)));
6478 /* Don't forget about volatility even if this is a bitfield. */
6479 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
6481 op0 = copy_rtx (op0);
6482 MEM_VOLATILE_P (op0) = 1;
6485 /* Check the access. */
6486 if (current_function && current_function_check_memory_usage
6487 && GET_CODE (op0) == MEM)
6489 enum memory_use_mode memory_usage;
6490 memory_usage = get_memory_usage_from_modifier (modifier);
6492 if (memory_usage != MEMORY_USE_DONT)
6497 to = plus_constant (XEXP (op0, 0), (bitpos / BITS_PER_UNIT));
6498 size = (bitpos % BITS_PER_UNIT) + bitsize + BITS_PER_UNIT - 1;
6500 /* Check the access right of the pointer. */
6501 if (size > BITS_PER_UNIT)
6502 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
6504 GEN_INT (size / BITS_PER_UNIT),
6505 TYPE_MODE (sizetype),
6506 GEN_INT (memory_usage),
6507 TYPE_MODE (integer_type_node));
6511 /* In cases where an aligned union has an unaligned object
6512 as a field, we might be extracting a BLKmode value from
6513 an integer-mode (e.g., SImode) object. Handle this case
6514 by doing the extract into an object as wide as the field
6515 (which we know to be the width of a basic mode), then
6516 storing into memory, and changing the mode to BLKmode.
6517 If we ultimately want the address (EXPAND_CONST_ADDRESS or
6518 EXPAND_INITIALIZER), then we must not copy to a temporary. */
6519 if (mode1 == VOIDmode
6520 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
6521 || (modifier != EXPAND_CONST_ADDRESS
6522 && modifier != EXPAND_INITIALIZER
6523 && ((mode1 != BLKmode && ! direct_load[(int) mode1]
6524 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
6525 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
6526 /* If the field isn't aligned enough to fetch as a memref,
6527 fetch it as a bit field. */
6528 || (SLOW_UNALIGNED_ACCESS
6529 && ((TYPE_ALIGN (TREE_TYPE (tem)) < (unsigned int) GET_MODE_ALIGNMENT (mode))
6530 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0))))))
6532 enum machine_mode ext_mode = mode;
6534 if (ext_mode == BLKmode)
6535 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
6537 if (ext_mode == BLKmode)
6539 /* In this case, BITPOS must start at a byte boundary and
6540 TARGET, if specified, must be a MEM. */
6541 if (GET_CODE (op0) != MEM
6542 || (target != 0 && GET_CODE (target) != MEM)
6543 || bitpos % BITS_PER_UNIT != 0)
6546 op0 = change_address (op0, VOIDmode,
6547 plus_constant (XEXP (op0, 0),
6548 bitpos / BITS_PER_UNIT));
6550 target = assign_temp (type, 0, 1, 1);
6552 emit_block_move (target, op0,
6553 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
6560 op0 = validize_mem (op0);
6562 if (GET_CODE (op0) == MEM && GET_CODE (XEXP (op0, 0)) == REG)
6563 mark_reg_pointer (XEXP (op0, 0), alignment);
6565 op0 = extract_bit_field (op0, bitsize, bitpos,
6566 unsignedp, target, ext_mode, ext_mode,
6568 int_size_in_bytes (TREE_TYPE (tem)));
6570 /* If the result is a record type and BITSIZE is narrower than
6571 the mode of OP0, an integral mode, and this is a big endian
6572 machine, we must put the field into the high-order bits. */
6573 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
6574 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
6575 && bitsize < GET_MODE_BITSIZE (GET_MODE (op0)))
6576 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
6577 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
6581 if (mode == BLKmode)
6583 rtx new = assign_stack_temp (ext_mode,
6584 bitsize / BITS_PER_UNIT, 0);
6586 emit_move_insn (new, op0);
6587 op0 = copy_rtx (new);
6588 PUT_MODE (op0, BLKmode);
6589 MEM_SET_IN_STRUCT_P (op0, 1);
6595 /* If the result is BLKmode, use that to access the object
6597 if (mode == BLKmode)
6600 /* Get a reference to just this component. */
6601 if (modifier == EXPAND_CONST_ADDRESS
6602 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
6603 op0 = gen_rtx_MEM (mode1, plus_constant (XEXP (op0, 0),
6604 (bitpos / BITS_PER_UNIT)));
6606 op0 = change_address (op0, mode1,
6607 plus_constant (XEXP (op0, 0),
6608 (bitpos / BITS_PER_UNIT)));
6610 if (GET_CODE (op0) == MEM)
6611 MEM_ALIAS_SET (op0) = get_alias_set (exp);
6613 if (GET_CODE (XEXP (op0, 0)) == REG)
6614 mark_reg_pointer (XEXP (op0, 0), alignment);
6616 MEM_SET_IN_STRUCT_P (op0, 1);
6617 MEM_VOLATILE_P (op0) |= volatilep;
6618 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
6619 || modifier == EXPAND_CONST_ADDRESS
6620 || modifier == EXPAND_INITIALIZER)
6622 else if (target == 0)
6623 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
6625 convert_move (target, op0, unsignedp);
6629 /* Intended for a reference to a buffer of a file-object in Pascal.
6630 But it's not certain that a special tree code will really be
6631 necessary for these. INDIRECT_REF might work for them. */
6637 /* Pascal set IN expression.
6640 rlo = set_low - (set_low%bits_per_word);
6641 the_word = set [ (index - rlo)/bits_per_word ];
6642 bit_index = index % bits_per_word;
6643 bitmask = 1 << bit_index;
6644 return !!(the_word & bitmask); */
6646 tree set = TREE_OPERAND (exp, 0);
6647 tree index = TREE_OPERAND (exp, 1);
6648 int iunsignedp = TREE_UNSIGNED (TREE_TYPE (index));
6649 tree set_type = TREE_TYPE (set);
6650 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
6651 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
6652 rtx index_val = expand_expr (index, 0, VOIDmode, 0);
6653 rtx lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
6654 rtx hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
6655 rtx setval = expand_expr (set, 0, VOIDmode, 0);
6656 rtx setaddr = XEXP (setval, 0);
6657 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
6659 rtx diff, quo, rem, addr, bit, result;
6661 preexpand_calls (exp);
6663 /* If domain is empty, answer is no. Likewise if index is constant
6664 and out of bounds. */
6665 if (((TREE_CODE (set_high_bound) == INTEGER_CST
6666 && TREE_CODE (set_low_bound) == INTEGER_CST
6667 && tree_int_cst_lt (set_high_bound, set_low_bound))
6668 || (TREE_CODE (index) == INTEGER_CST
6669 && TREE_CODE (set_low_bound) == INTEGER_CST
6670 && tree_int_cst_lt (index, set_low_bound))
6671 || (TREE_CODE (set_high_bound) == INTEGER_CST
6672 && TREE_CODE (index) == INTEGER_CST
6673 && tree_int_cst_lt (set_high_bound, index))))
6677 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
6679 /* If we get here, we have to generate the code for both cases
6680 (in range and out of range). */
6682 op0 = gen_label_rtx ();
6683 op1 = gen_label_rtx ();
6685 if (! (GET_CODE (index_val) == CONST_INT
6686 && GET_CODE (lo_r) == CONST_INT))
6688 emit_cmp_and_jump_insns (index_val, lo_r, LT, NULL_RTX,
6689 GET_MODE (index_val), iunsignedp, 0, op1);
6692 if (! (GET_CODE (index_val) == CONST_INT
6693 && GET_CODE (hi_r) == CONST_INT))
6695 emit_cmp_and_jump_insns (index_val, hi_r, GT, NULL_RTX,
6696 GET_MODE (index_val), iunsignedp, 0, op1);
6699 /* Calculate the element number of bit zero in the first word
6701 if (GET_CODE (lo_r) == CONST_INT)
6702 rlow = GEN_INT (INTVAL (lo_r)
6703 & ~ ((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
6705 rlow = expand_binop (index_mode, and_optab, lo_r,
6706 GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
6707 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
6709 diff = expand_binop (index_mode, sub_optab, index_val, rlow,
6710 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
6712 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
6713 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
6714 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
6715 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
6717 addr = memory_address (byte_mode,
6718 expand_binop (index_mode, add_optab, diff,
6719 setaddr, NULL_RTX, iunsignedp,
6722 /* Extract the bit we want to examine */
6723 bit = expand_shift (RSHIFT_EXPR, byte_mode,
6724 gen_rtx_MEM (byte_mode, addr),
6725 make_tree (TREE_TYPE (index), rem),
6727 result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
6728 GET_MODE (target) == byte_mode ? target : 0,
6729 1, OPTAB_LIB_WIDEN);
6731 if (result != target)
6732 convert_move (target, result, 1);
6734 /* Output the code to handle the out-of-range case. */
6737 emit_move_insn (target, const0_rtx);
6742 case WITH_CLEANUP_EXPR:
6743 if (RTL_EXPR_RTL (exp) == 0)
6746 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, ro_modifier);
6747 expand_decl_cleanup (NULL_TREE, TREE_OPERAND (exp, 2));
6749 /* That's it for this cleanup. */
6750 TREE_OPERAND (exp, 2) = 0;
6752 return RTL_EXPR_RTL (exp);
6754 case CLEANUP_POINT_EXPR:
6756 /* Start a new binding layer that will keep track of all cleanup
6757 actions to be performed. */
6758 expand_start_bindings (0);
6760 target_temp_slot_level = temp_slot_level;
6762 op0 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, ro_modifier);
6763 /* If we're going to use this value, load it up now. */
6765 op0 = force_not_mem (op0);
6766 preserve_temp_slots (op0);
6767 expand_end_bindings (NULL_TREE, 0, 0);
6772 /* Check for a built-in function. */
6773 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
6774 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
6776 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
6777 return expand_builtin (exp, target, subtarget, tmode, ignore);
6779 /* If this call was expanded already by preexpand_calls,
6780 just return the result we got. */
6781 if (CALL_EXPR_RTL (exp) != 0)
6782 return CALL_EXPR_RTL (exp);
6784 return expand_call (exp, target, ignore);
6786 case NON_LVALUE_EXPR:
6789 case REFERENCE_EXPR:
6790 if (TREE_CODE (type) == UNION_TYPE)
6792 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
6795 if (mode != BLKmode)
6796 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
6798 target = assign_temp (type, 0, 1, 1);
6801 if (GET_CODE (target) == MEM)
6802 /* Store data into beginning of memory target. */
6803 store_expr (TREE_OPERAND (exp, 0),
6804 change_address (target, TYPE_MODE (valtype), 0), 0);
6806 else if (GET_CODE (target) == REG)
6807 /* Store this field into a union of the proper type. */
6808 store_field (target, GET_MODE_BITSIZE (TYPE_MODE (valtype)), 0,
6809 TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
6811 int_size_in_bytes (TREE_TYPE (TREE_OPERAND (exp, 0))),
6816 /* Return the entire union. */
6820 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
6822 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
6825 /* If the signedness of the conversion differs and OP0 is
6826 a promoted SUBREG, clear that indication since we now
6827 have to do the proper extension. */
6828 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
6829 && GET_CODE (op0) == SUBREG)
6830 SUBREG_PROMOTED_VAR_P (op0) = 0;
6835 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, 0);
6836 if (GET_MODE (op0) == mode)
6839 /* If OP0 is a constant, just convert it into the proper mode. */
6840 if (CONSTANT_P (op0))
6842 convert_modes (mode, TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
6843 op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
6845 if (modifier == EXPAND_INITIALIZER)
6846 return gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
6850 convert_to_mode (mode, op0,
6851 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
6853 convert_move (target, op0,
6854 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
6858 /* We come here from MINUS_EXPR when the second operand is a
6861 this_optab = add_optab;
6863 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
6864 something else, make sure we add the register to the constant and
6865 then to the other thing. This case can occur during strength
6866 reduction and doing it this way will produce better code if the
6867 frame pointer or argument pointer is eliminated.
6869 fold-const.c will ensure that the constant is always in the inner
6870 PLUS_EXPR, so the only case we need to do anything about is if
6871 sp, ap, or fp is our second argument, in which case we must swap
6872 the innermost first argument and our second argument. */
6874 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
6875 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
6876 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
6877 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
6878 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
6879 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
6881 tree t = TREE_OPERAND (exp, 1);
6883 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
6884 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
6887 /* If the result is to be ptr_mode and we are adding an integer to
6888 something, we might be forming a constant. So try to use
6889 plus_constant. If it produces a sum and we can't accept it,
6890 use force_operand. This allows P = &ARR[const] to generate
6891 efficient code on machines where a SYMBOL_REF is not a valid
6894 If this is an EXPAND_SUM call, always return the sum. */
6895 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
6896 || mode == ptr_mode)
6898 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
6899 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
6900 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
6904 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
6906 /* Use immed_double_const to ensure that the constant is
6907 truncated according to the mode of OP1, then sign extended
6908 to a HOST_WIDE_INT. Using the constant directly can result
6909 in non-canonical RTL in a 64x32 cross compile. */
6911 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)),
6913 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))));
6914 op1 = plus_constant (op1, INTVAL (constant_part));
6915 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
6916 op1 = force_operand (op1, target);
6920 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
6921 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
6922 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
6926 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
6928 if (! CONSTANT_P (op0))
6930 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
6931 VOIDmode, modifier);
6932 /* Don't go to both_summands if modifier
6933 says it's not right to return a PLUS. */
6934 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
6938 /* Use immed_double_const to ensure that the constant is
6939 truncated according to the mode of OP1, then sign extended
6940 to a HOST_WIDE_INT. Using the constant directly can result
6941 in non-canonical RTL in a 64x32 cross compile. */
6943 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)),
6945 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
6946 op0 = plus_constant (op0, INTVAL (constant_part));
6947 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
6948 op0 = force_operand (op0, target);
6953 /* No sense saving up arithmetic to be done
6954 if it's all in the wrong mode to form part of an address.
6955 And force_operand won't know whether to sign-extend or
6957 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
6958 || mode != ptr_mode)
6961 preexpand_calls (exp);
6962 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
6965 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, ro_modifier);
6966 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, ro_modifier);
6969 /* Make sure any term that's a sum with a constant comes last. */
6970 if (GET_CODE (op0) == PLUS
6971 && CONSTANT_P (XEXP (op0, 1)))
6977 /* If adding to a sum including a constant,
6978 associate it to put the constant outside. */
6979 if (GET_CODE (op1) == PLUS
6980 && CONSTANT_P (XEXP (op1, 1)))
6982 rtx constant_term = const0_rtx;
6984 temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0);
6987 /* Ensure that MULT comes first if there is one. */
6988 else if (GET_CODE (op0) == MULT)
6989 op0 = gen_rtx_PLUS (mode, op0, XEXP (op1, 0));
6991 op0 = gen_rtx_PLUS (mode, XEXP (op1, 0), op0);
6993 /* Let's also eliminate constants from op0 if possible. */
6994 op0 = eliminate_constant_term (op0, &constant_term);
6996 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
6997 their sum should be a constant. Form it into OP1, since the
6998 result we want will then be OP0 + OP1. */
7000 temp = simplify_binary_operation (PLUS, mode, constant_term,
7005 op1 = gen_rtx_PLUS (mode, constant_term, XEXP (op1, 1));
7008 /* Put a constant term last and put a multiplication first. */
7009 if (CONSTANT_P (op0) || GET_CODE (op1) == MULT)
7010 temp = op1, op1 = op0, op0 = temp;
7012 temp = simplify_binary_operation (PLUS, mode, op0, op1);
7013 return temp ? temp : gen_rtx_PLUS (mode, op0, op1);
7016 /* For initializers, we are allowed to return a MINUS of two
7017 symbolic constants. Here we handle all cases when both operands
7019 /* Handle difference of two symbolic constants,
7020 for the sake of an initializer. */
7021 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7022 && really_constant_p (TREE_OPERAND (exp, 0))
7023 && really_constant_p (TREE_OPERAND (exp, 1)))
7025 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX,
7026 VOIDmode, ro_modifier);
7027 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7028 VOIDmode, ro_modifier);
7030 /* If the last operand is a CONST_INT, use plus_constant of
7031 the negated constant. Else make the MINUS. */
7032 if (GET_CODE (op1) == CONST_INT)
7033 return plus_constant (op0, - INTVAL (op1));
7035 return gen_rtx_MINUS (mode, op0, op1);
7037 /* Convert A - const to A + (-const). */
7038 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7040 tree negated = fold (build1 (NEGATE_EXPR, type,
7041 TREE_OPERAND (exp, 1)));
7043 /* Deal with the case where we can't negate the constant
7045 if (TREE_UNSIGNED (type) || TREE_OVERFLOW (negated))
7047 tree newtype = signed_type (type);
7048 tree newop0 = convert (newtype, TREE_OPERAND (exp, 0));
7049 tree newop1 = convert (newtype, TREE_OPERAND (exp, 1));
7050 tree newneg = fold (build1 (NEGATE_EXPR, newtype, newop1));
7052 if (! TREE_OVERFLOW (newneg))
7053 return expand_expr (convert (type,
7054 build (PLUS_EXPR, newtype,
7056 target, tmode, ro_modifier);
7060 exp = build (PLUS_EXPR, type, TREE_OPERAND (exp, 0), negated);
7064 this_optab = sub_optab;
7068 preexpand_calls (exp);
7069 /* If first operand is constant, swap them.
7070 Thus the following special case checks need only
7071 check the second operand. */
7072 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
7074 register tree t1 = TREE_OPERAND (exp, 0);
7075 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
7076 TREE_OPERAND (exp, 1) = t1;
7079 /* Attempt to return something suitable for generating an
7080 indexed address, for machines that support that. */
7082 if (modifier == EXPAND_SUM && mode == ptr_mode
7083 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7084 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
7086 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7089 /* Apply distributive law if OP0 is x+c. */
7090 if (GET_CODE (op0) == PLUS
7091 && GET_CODE (XEXP (op0, 1)) == CONST_INT)
7092 return gen_rtx_PLUS (mode,
7093 gen_rtx_MULT (mode, XEXP (op0, 0),
7094 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))),
7095 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))
7096 * INTVAL (XEXP (op0, 1))));
7098 if (GET_CODE (op0) != REG)
7099 op0 = force_operand (op0, NULL_RTX);
7100 if (GET_CODE (op0) != REG)
7101 op0 = copy_to_mode_reg (mode, op0);
7103 return gen_rtx_MULT (mode, op0,
7104 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))));
7107 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7110 /* Check for multiplying things that have been extended
7111 from a narrower type. If this machine supports multiplying
7112 in that narrower type with a result in the desired type,
7113 do it that way, and avoid the explicit type-conversion. */
7114 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
7115 && TREE_CODE (type) == INTEGER_TYPE
7116 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7117 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
7118 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7119 && int_fits_type_p (TREE_OPERAND (exp, 1),
7120 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7121 /* Don't use a widening multiply if a shift will do. */
7122 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
7123 > HOST_BITS_PER_WIDE_INT)
7124 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
7126 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
7127 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7129 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
7130 /* If both operands are extended, they must either both
7131 be zero-extended or both be sign-extended. */
7132 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7134 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
7136 enum machine_mode innermode
7137 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
7138 optab other_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7139 ? smul_widen_optab : umul_widen_optab);
7140 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7141 ? umul_widen_optab : smul_widen_optab);
7142 if (mode == GET_MODE_WIDER_MODE (innermode))
7144 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
7146 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7147 NULL_RTX, VOIDmode, 0);
7148 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7149 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7152 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7153 NULL_RTX, VOIDmode, 0);
7156 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
7157 && innermode == word_mode)
7160 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7161 NULL_RTX, VOIDmode, 0);
7162 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7163 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7166 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7167 NULL_RTX, VOIDmode, 0);
7168 temp = expand_binop (mode, other_optab, op0, op1, target,
7169 unsignedp, OPTAB_LIB_WIDEN);
7170 htem = expand_mult_highpart_adjust (innermode,
7171 gen_highpart (innermode, temp),
7173 gen_highpart (innermode, temp),
7175 emit_move_insn (gen_highpart (innermode, temp), htem);
7180 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7181 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7182 return expand_mult (mode, op0, op1, target, unsignedp);
7184 case TRUNC_DIV_EXPR:
7185 case FLOOR_DIV_EXPR:
7187 case ROUND_DIV_EXPR:
7188 case EXACT_DIV_EXPR:
7189 preexpand_calls (exp);
7190 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7192 /* Possible optimization: compute the dividend with EXPAND_SUM
7193 then if the divisor is constant can optimize the case
7194 where some terms of the dividend have coeffs divisible by it. */
7195 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7196 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7197 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
7200 this_optab = flodiv_optab;
7203 case TRUNC_MOD_EXPR:
7204 case FLOOR_MOD_EXPR:
7206 case ROUND_MOD_EXPR:
7207 preexpand_calls (exp);
7208 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7210 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7211 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7212 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
7214 case FIX_ROUND_EXPR:
7215 case FIX_FLOOR_EXPR:
7217 abort (); /* Not used for C. */
7219 case FIX_TRUNC_EXPR:
7220 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7222 target = gen_reg_rtx (mode);
7223 expand_fix (target, op0, unsignedp);
7227 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7229 target = gen_reg_rtx (mode);
7230 /* expand_float can't figure out what to do if FROM has VOIDmode.
7231 So give it the correct mode. With -O, cse will optimize this. */
7232 if (GET_MODE (op0) == VOIDmode)
7233 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7235 expand_float (target, op0,
7236 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7240 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7241 temp = expand_unop (mode, neg_optab, op0, target, 0);
7247 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7249 /* Handle complex values specially. */
7250 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT
7251 || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
7252 return expand_complex_abs (mode, op0, target, unsignedp);
7254 /* Unsigned abs is simply the operand. Testing here means we don't
7255 risk generating incorrect code below. */
7256 if (TREE_UNSIGNED (type))
7259 return expand_abs (mode, op0, target,
7260 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
7264 target = original_target;
7265 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 1), 1)
7266 || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
7267 || GET_MODE (target) != mode
7268 || (GET_CODE (target) == REG
7269 && REGNO (target) < FIRST_PSEUDO_REGISTER))
7270 target = gen_reg_rtx (mode);
7271 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7272 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
7274 /* First try to do it with a special MIN or MAX instruction.
7275 If that does not win, use a conditional jump to select the proper
7277 this_optab = (TREE_UNSIGNED (type)
7278 ? (code == MIN_EXPR ? umin_optab : umax_optab)
7279 : (code == MIN_EXPR ? smin_optab : smax_optab));
7281 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
7286 /* At this point, a MEM target is no longer useful; we will get better
7289 if (GET_CODE (target) == MEM)
7290 target = gen_reg_rtx (mode);
7293 emit_move_insn (target, op0);
7295 op0 = gen_label_rtx ();
7297 /* If this mode is an integer too wide to compare properly,
7298 compare word by word. Rely on cse to optimize constant cases. */
7299 if (GET_MODE_CLASS (mode) == MODE_INT && ! can_compare_p (mode))
7301 if (code == MAX_EXPR)
7302 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
7303 target, op1, NULL_RTX, op0);
7305 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
7306 op1, target, NULL_RTX, op0);
7310 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)));
7311 do_compare_rtx_and_jump (target, op1, code == MAX_EXPR ? GE : LE,
7312 unsignedp, mode, NULL_RTX, 0, NULL_RTX,
7315 emit_move_insn (target, op1);
7320 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7321 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
7327 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7328 temp = expand_unop (mode, ffs_optab, op0, target, 1);
7333 /* ??? Can optimize bitwise operations with one arg constant.
7334 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
7335 and (a bitwise1 b) bitwise2 b (etc)
7336 but that is probably not worth while. */
7338 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
7339 boolean values when we want in all cases to compute both of them. In
7340 general it is fastest to do TRUTH_AND_EXPR by computing both operands
7341 as actual zero-or-1 values and then bitwise anding. In cases where
7342 there cannot be any side effects, better code would be made by
7343 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
7344 how to recognize those cases. */
7346 case TRUTH_AND_EXPR:
7348 this_optab = and_optab;
7353 this_optab = ior_optab;
7356 case TRUTH_XOR_EXPR:
7358 this_optab = xor_optab;
7365 preexpand_calls (exp);
7366 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7368 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7369 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
7372 /* Could determine the answer when only additive constants differ. Also,
7373 the addition of one can be handled by changing the condition. */
7380 preexpand_calls (exp);
7381 temp = do_store_flag (exp, target, tmode != VOIDmode ? tmode : mode, 0);
7385 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
7386 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
7388 && GET_CODE (original_target) == REG
7389 && (GET_MODE (original_target)
7390 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
7392 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
7395 if (temp != original_target)
7396 temp = copy_to_reg (temp);
7398 op1 = gen_label_rtx ();
7399 emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX,
7400 GET_MODE (temp), unsignedp, 0, op1);
7401 emit_move_insn (temp, const1_rtx);
7406 /* If no set-flag instruction, must generate a conditional
7407 store into a temporary variable. Drop through
7408 and handle this like && and ||. */
7410 case TRUTH_ANDIF_EXPR:
7411 case TRUTH_ORIF_EXPR:
7413 && (target == 0 || ! safe_from_p (target, exp, 1)
7414 /* Make sure we don't have a hard reg (such as function's return
7415 value) live across basic blocks, if not optimizing. */
7416 || (!optimize && GET_CODE (target) == REG
7417 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
7418 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7421 emit_clr_insn (target);
7423 op1 = gen_label_rtx ();
7424 jumpifnot (exp, op1);
7427 emit_0_to_1_insn (target);
7430 return ignore ? const0_rtx : target;
7432 case TRUTH_NOT_EXPR:
7433 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
7434 /* The parser is careful to generate TRUTH_NOT_EXPR
7435 only with operands that are always zero or one. */
7436 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
7437 target, 1, OPTAB_LIB_WIDEN);
7443 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
7445 return expand_expr (TREE_OPERAND (exp, 1),
7446 (ignore ? const0_rtx : target),
7450 /* If we would have a "singleton" (see below) were it not for a
7451 conversion in each arm, bring that conversion back out. */
7452 if (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
7453 && TREE_CODE (TREE_OPERAND (exp, 2)) == NOP_EXPR
7454 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0))
7455 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 2), 0))))
7457 tree true = TREE_OPERAND (TREE_OPERAND (exp, 1), 0);
7458 tree false = TREE_OPERAND (TREE_OPERAND (exp, 2), 0);
7460 if ((TREE_CODE_CLASS (TREE_CODE (true)) == '2'
7461 && operand_equal_p (false, TREE_OPERAND (true, 0), 0))
7462 || (TREE_CODE_CLASS (TREE_CODE (false)) == '2'
7463 && operand_equal_p (true, TREE_OPERAND (false, 0), 0))
7464 || (TREE_CODE_CLASS (TREE_CODE (true)) == '1'
7465 && operand_equal_p (false, TREE_OPERAND (true, 0), 0))
7466 || (TREE_CODE_CLASS (TREE_CODE (false)) == '1'
7467 && operand_equal_p (true, TREE_OPERAND (false, 0), 0)))
7468 return expand_expr (build1 (NOP_EXPR, type,
7469 build (COND_EXPR, TREE_TYPE (true),
7470 TREE_OPERAND (exp, 0),
7472 target, tmode, modifier);
7476 /* Note that COND_EXPRs whose type is a structure or union
7477 are required to be constructed to contain assignments of
7478 a temporary variable, so that we can evaluate them here
7479 for side effect only. If type is void, we must do likewise. */
7481 /* If an arm of the branch requires a cleanup,
7482 only that cleanup is performed. */
7485 tree binary_op = 0, unary_op = 0;
7487 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
7488 convert it to our mode, if necessary. */
7489 if (integer_onep (TREE_OPERAND (exp, 1))
7490 && integer_zerop (TREE_OPERAND (exp, 2))
7491 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
7495 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
7500 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, ro_modifier);
7501 if (GET_MODE (op0) == mode)
7505 target = gen_reg_rtx (mode);
7506 convert_move (target, op0, unsignedp);
7510 /* Check for X ? A + B : A. If we have this, we can copy A to the
7511 output and conditionally add B. Similarly for unary operations.
7512 Don't do this if X has side-effects because those side effects
7513 might affect A or B and the "?" operation is a sequence point in
7514 ANSI. (operand_equal_p tests for side effects.) */
7516 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
7517 && operand_equal_p (TREE_OPERAND (exp, 2),
7518 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
7519 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
7520 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
7521 && operand_equal_p (TREE_OPERAND (exp, 1),
7522 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
7523 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
7524 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
7525 && operand_equal_p (TREE_OPERAND (exp, 2),
7526 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
7527 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
7528 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
7529 && operand_equal_p (TREE_OPERAND (exp, 1),
7530 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
7531 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
7533 /* If we are not to produce a result, we have no target. Otherwise,
7534 if a target was specified use it; it will not be used as an
7535 intermediate target unless it is safe. If no target, use a
7540 else if (original_target
7541 && (safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
7542 || (singleton && GET_CODE (original_target) == REG
7543 && REGNO (original_target) >= FIRST_PSEUDO_REGISTER
7544 && original_target == var_rtx (singleton)))
7545 && GET_MODE (original_target) == mode
7546 #ifdef HAVE_conditional_move
7547 && (! can_conditionally_move_p (mode)
7548 || GET_CODE (original_target) == REG
7549 || TREE_ADDRESSABLE (type))
7551 && ! (GET_CODE (original_target) == MEM
7552 && MEM_VOLATILE_P (original_target)))
7553 temp = original_target;
7554 else if (TREE_ADDRESSABLE (type))
7557 temp = assign_temp (type, 0, 0, 1);
7559 /* If we had X ? A + C : A, with C a constant power of 2, and we can
7560 do the test of X as a store-flag operation, do this as
7561 A + ((X != 0) << log C). Similarly for other simple binary
7562 operators. Only do for C == 1 if BRANCH_COST is low. */
7563 if (temp && singleton && binary_op
7564 && (TREE_CODE (binary_op) == PLUS_EXPR
7565 || TREE_CODE (binary_op) == MINUS_EXPR
7566 || TREE_CODE (binary_op) == BIT_IOR_EXPR
7567 || TREE_CODE (binary_op) == BIT_XOR_EXPR)
7568 && (BRANCH_COST >= 3 ? integer_pow2p (TREE_OPERAND (binary_op, 1))
7569 : integer_onep (TREE_OPERAND (binary_op, 1)))
7570 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
7573 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR ? add_optab
7574 : TREE_CODE (binary_op) == MINUS_EXPR ? sub_optab
7575 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
7578 /* If we had X ? A : A + 1, do this as A + (X == 0).
7580 We have to invert the truth value here and then put it
7581 back later if do_store_flag fails. We cannot simply copy
7582 TREE_OPERAND (exp, 0) to another variable and modify that
7583 because invert_truthvalue can modify the tree pointed to
7585 if (singleton == TREE_OPERAND (exp, 1))
7586 TREE_OPERAND (exp, 0)
7587 = invert_truthvalue (TREE_OPERAND (exp, 0));
7589 result = do_store_flag (TREE_OPERAND (exp, 0),
7590 (safe_from_p (temp, singleton, 1)
7592 mode, BRANCH_COST <= 1);
7594 if (result != 0 && ! integer_onep (TREE_OPERAND (binary_op, 1)))
7595 result = expand_shift (LSHIFT_EXPR, mode, result,
7596 build_int_2 (tree_log2
7600 (safe_from_p (temp, singleton, 1)
7601 ? temp : NULL_RTX), 0);
7605 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
7606 return expand_binop (mode, boptab, op1, result, temp,
7607 unsignedp, OPTAB_LIB_WIDEN);
7609 else if (singleton == TREE_OPERAND (exp, 1))
7610 TREE_OPERAND (exp, 0)
7611 = invert_truthvalue (TREE_OPERAND (exp, 0));
7614 do_pending_stack_adjust ();
7616 op0 = gen_label_rtx ();
7618 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
7622 /* If the target conflicts with the other operand of the
7623 binary op, we can't use it. Also, we can't use the target
7624 if it is a hard register, because evaluating the condition
7625 might clobber it. */
7627 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1), 1))
7628 || (GET_CODE (temp) == REG
7629 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
7630 temp = gen_reg_rtx (mode);
7631 store_expr (singleton, temp, 0);
7634 expand_expr (singleton,
7635 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
7636 if (singleton == TREE_OPERAND (exp, 1))
7637 jumpif (TREE_OPERAND (exp, 0), op0);
7639 jumpifnot (TREE_OPERAND (exp, 0), op0);
7641 start_cleanup_deferral ();
7642 if (binary_op && temp == 0)
7643 /* Just touch the other operand. */
7644 expand_expr (TREE_OPERAND (binary_op, 1),
7645 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
7647 store_expr (build (TREE_CODE (binary_op), type,
7648 make_tree (type, temp),
7649 TREE_OPERAND (binary_op, 1)),
7652 store_expr (build1 (TREE_CODE (unary_op), type,
7653 make_tree (type, temp)),
7657 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
7658 comparison operator. If we have one of these cases, set the
7659 output to A, branch on A (cse will merge these two references),
7660 then set the output to FOO. */
7662 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
7663 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
7664 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7665 TREE_OPERAND (exp, 1), 0)
7666 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
7667 || TREE_CODE (TREE_OPERAND (exp, 1)) == SAVE_EXPR)
7668 && safe_from_p (temp, TREE_OPERAND (exp, 2), 1))
7670 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
7671 temp = gen_reg_rtx (mode);
7672 store_expr (TREE_OPERAND (exp, 1), temp, 0);
7673 jumpif (TREE_OPERAND (exp, 0), op0);
7675 start_cleanup_deferral ();
7676 store_expr (TREE_OPERAND (exp, 2), temp, 0);
7680 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
7681 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
7682 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7683 TREE_OPERAND (exp, 2), 0)
7684 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
7685 || TREE_CODE (TREE_OPERAND (exp, 2)) == SAVE_EXPR)
7686 && safe_from_p (temp, TREE_OPERAND (exp, 1), 1))
7688 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
7689 temp = gen_reg_rtx (mode);
7690 store_expr (TREE_OPERAND (exp, 2), temp, 0);
7691 jumpifnot (TREE_OPERAND (exp, 0), op0);
7693 start_cleanup_deferral ();
7694 store_expr (TREE_OPERAND (exp, 1), temp, 0);
7699 op1 = gen_label_rtx ();
7700 jumpifnot (TREE_OPERAND (exp, 0), op0);
7702 start_cleanup_deferral ();
7704 store_expr (TREE_OPERAND (exp, 1), temp, 0);
7706 expand_expr (TREE_OPERAND (exp, 1),
7707 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
7708 end_cleanup_deferral ();
7710 emit_jump_insn (gen_jump (op1));
7713 start_cleanup_deferral ();
7715 store_expr (TREE_OPERAND (exp, 2), temp, 0);
7717 expand_expr (TREE_OPERAND (exp, 2),
7718 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
7721 end_cleanup_deferral ();
7732 /* Something needs to be initialized, but we didn't know
7733 where that thing was when building the tree. For example,
7734 it could be the return value of a function, or a parameter
7735 to a function which lays down in the stack, or a temporary
7736 variable which must be passed by reference.
7738 We guarantee that the expression will either be constructed
7739 or copied into our original target. */
7741 tree slot = TREE_OPERAND (exp, 0);
7742 tree cleanups = NULL_TREE;
7745 if (TREE_CODE (slot) != VAR_DECL)
7749 target = original_target;
7753 if (DECL_RTL (slot) != 0)
7755 target = DECL_RTL (slot);
7756 /* If we have already expanded the slot, so don't do
7758 if (TREE_OPERAND (exp, 1) == NULL_TREE)
7763 target = assign_temp (type, 2, 0, 1);
7764 /* All temp slots at this level must not conflict. */
7765 preserve_temp_slots (target);
7766 DECL_RTL (slot) = target;
7767 if (TREE_ADDRESSABLE (slot))
7769 TREE_ADDRESSABLE (slot) = 0;
7770 mark_addressable (slot);
7773 /* Since SLOT is not known to the called function
7774 to belong to its stack frame, we must build an explicit
7775 cleanup. This case occurs when we must build up a reference
7776 to pass the reference as an argument. In this case,
7777 it is very likely that such a reference need not be
7780 if (TREE_OPERAND (exp, 2) == 0)
7781 TREE_OPERAND (exp, 2) = maybe_build_cleanup (slot);
7782 cleanups = TREE_OPERAND (exp, 2);
7787 /* This case does occur, when expanding a parameter which
7788 needs to be constructed on the stack. The target
7789 is the actual stack address that we want to initialize.
7790 The function we call will perform the cleanup in this case. */
7792 /* If we have already assigned it space, use that space,
7793 not target that we were passed in, as our target
7794 parameter is only a hint. */
7795 if (DECL_RTL (slot) != 0)
7797 target = DECL_RTL (slot);
7798 /* If we have already expanded the slot, so don't do
7800 if (TREE_OPERAND (exp, 1) == NULL_TREE)
7805 DECL_RTL (slot) = target;
7806 /* If we must have an addressable slot, then make sure that
7807 the RTL that we just stored in slot is OK. */
7808 if (TREE_ADDRESSABLE (slot))
7810 TREE_ADDRESSABLE (slot) = 0;
7811 mark_addressable (slot);
7816 exp1 = TREE_OPERAND (exp, 3) = TREE_OPERAND (exp, 1);
7817 /* Mark it as expanded. */
7818 TREE_OPERAND (exp, 1) = NULL_TREE;
7820 TREE_USED (slot) = 1;
7821 store_expr (exp1, target, 0);
7823 expand_decl_cleanup (NULL_TREE, cleanups);
7830 tree lhs = TREE_OPERAND (exp, 0);
7831 tree rhs = TREE_OPERAND (exp, 1);
7832 tree noncopied_parts = 0;
7833 tree lhs_type = TREE_TYPE (lhs);
7835 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
7836 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0 && !fixed_type_p (rhs))
7837 noncopied_parts = init_noncopied_parts (stabilize_reference (lhs),
7838 TYPE_NONCOPIED_PARTS (lhs_type));
7839 while (noncopied_parts != 0)
7841 expand_assignment (TREE_VALUE (noncopied_parts),
7842 TREE_PURPOSE (noncopied_parts), 0, 0);
7843 noncopied_parts = TREE_CHAIN (noncopied_parts);
7850 /* If lhs is complex, expand calls in rhs before computing it.
7851 That's so we don't compute a pointer and save it over a call.
7852 If lhs is simple, compute it first so we can give it as a
7853 target if the rhs is just a call. This avoids an extra temp and copy
7854 and that prevents a partial-subsumption which makes bad code.
7855 Actually we could treat component_ref's of vars like vars. */
7857 tree lhs = TREE_OPERAND (exp, 0);
7858 tree rhs = TREE_OPERAND (exp, 1);
7859 tree noncopied_parts = 0;
7860 tree lhs_type = TREE_TYPE (lhs);
7864 if (TREE_CODE (lhs) != VAR_DECL
7865 && TREE_CODE (lhs) != RESULT_DECL
7866 && TREE_CODE (lhs) != PARM_DECL
7867 && ! (TREE_CODE (lhs) == INDIRECT_REF
7868 && TYPE_READONLY (TREE_TYPE (TREE_OPERAND (lhs, 0)))))
7869 preexpand_calls (exp);
7871 /* Check for |= or &= of a bitfield of size one into another bitfield
7872 of size 1. In this case, (unless we need the result of the
7873 assignment) we can do this more efficiently with a
7874 test followed by an assignment, if necessary.
7876 ??? At this point, we can't get a BIT_FIELD_REF here. But if
7877 things change so we do, this code should be enhanced to
7880 && TREE_CODE (lhs) == COMPONENT_REF
7881 && (TREE_CODE (rhs) == BIT_IOR_EXPR
7882 || TREE_CODE (rhs) == BIT_AND_EXPR)
7883 && TREE_OPERAND (rhs, 0) == lhs
7884 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
7885 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (lhs, 1))) == 1
7886 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))) == 1)
7888 rtx label = gen_label_rtx ();
7890 do_jump (TREE_OPERAND (rhs, 1),
7891 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
7892 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
7893 expand_assignment (lhs, convert (TREE_TYPE (rhs),
7894 (TREE_CODE (rhs) == BIT_IOR_EXPR
7896 : integer_zero_node)),
7898 do_pending_stack_adjust ();
7903 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0
7904 && ! (fixed_type_p (lhs) && fixed_type_p (rhs)))
7905 noncopied_parts = save_noncopied_parts (stabilize_reference (lhs),
7906 TYPE_NONCOPIED_PARTS (lhs_type));
7908 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
7909 while (noncopied_parts != 0)
7911 expand_assignment (TREE_PURPOSE (noncopied_parts),
7912 TREE_VALUE (noncopied_parts), 0, 0);
7913 noncopied_parts = TREE_CHAIN (noncopied_parts);
7919 if (!TREE_OPERAND (exp, 0))
7920 expand_null_return ();
7922 expand_return (TREE_OPERAND (exp, 0));
7925 case PREINCREMENT_EXPR:
7926 case PREDECREMENT_EXPR:
7927 return expand_increment (exp, 0, ignore);
7929 case POSTINCREMENT_EXPR:
7930 case POSTDECREMENT_EXPR:
7931 /* Faster to treat as pre-increment if result is not used. */
7932 return expand_increment (exp, ! ignore, ignore);
7935 /* If nonzero, TEMP will be set to the address of something that might
7936 be a MEM corresponding to a stack slot. */
7939 /* Are we taking the address of a nested function? */
7940 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
7941 && decl_function_context (TREE_OPERAND (exp, 0)) != 0
7942 && ! DECL_NO_STATIC_CHAIN (TREE_OPERAND (exp, 0))
7943 && ! TREE_STATIC (exp))
7945 op0 = trampoline_address (TREE_OPERAND (exp, 0));
7946 op0 = force_operand (op0, target);
7948 /* If we are taking the address of something erroneous, just
7950 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
7954 /* We make sure to pass const0_rtx down if we came in with
7955 ignore set, to avoid doing the cleanups twice for something. */
7956 op0 = expand_expr (TREE_OPERAND (exp, 0),
7957 ignore ? const0_rtx : NULL_RTX, VOIDmode,
7958 (modifier == EXPAND_INITIALIZER
7959 ? modifier : EXPAND_CONST_ADDRESS));
7961 /* If we are going to ignore the result, OP0 will have been set
7962 to const0_rtx, so just return it. Don't get confused and
7963 think we are taking the address of the constant. */
7967 op0 = protect_from_queue (op0, 0);
7969 /* We would like the object in memory. If it is a constant,
7970 we can have it be statically allocated into memory. For
7971 a non-constant (REG, SUBREG or CONCAT), we need to allocate some
7972 memory and store the value into it. */
7974 if (CONSTANT_P (op0))
7975 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7977 else if (GET_CODE (op0) == MEM)
7979 mark_temp_addr_taken (op0);
7980 temp = XEXP (op0, 0);
7983 else if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
7984 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
7986 /* If this object is in a register, it must be not
7988 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7989 rtx memloc = assign_temp (inner_type, 1, 1, 1);
7991 mark_temp_addr_taken (memloc);
7992 emit_move_insn (memloc, op0);
7996 if (GET_CODE (op0) != MEM)
7999 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
8001 temp = XEXP (op0, 0);
8002 #ifdef POINTERS_EXTEND_UNSIGNED
8003 if (GET_MODE (temp) == Pmode && GET_MODE (temp) != mode
8004 && mode == ptr_mode)
8005 temp = convert_memory_address (ptr_mode, temp);
8010 op0 = force_operand (XEXP (op0, 0), target);
8013 if (flag_force_addr && GET_CODE (op0) != REG)
8014 op0 = force_reg (Pmode, op0);
8016 if (GET_CODE (op0) == REG
8017 && ! REG_USERVAR_P (op0))
8018 mark_reg_pointer (op0, TYPE_ALIGN (TREE_TYPE (type)) / BITS_PER_UNIT);
8020 /* If we might have had a temp slot, add an equivalent address
8023 update_temp_slot_address (temp, op0);
8025 #ifdef POINTERS_EXTEND_UNSIGNED
8026 if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
8027 && mode == ptr_mode)
8028 op0 = convert_memory_address (ptr_mode, op0);
8033 case ENTRY_VALUE_EXPR:
8036 /* COMPLEX type for Extended Pascal & Fortran */
8039 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8042 /* Get the rtx code of the operands. */
8043 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8044 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
8047 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
8051 /* Move the real (op0) and imaginary (op1) parts to their location. */
8052 emit_move_insn (gen_realpart (mode, target), op0);
8053 emit_move_insn (gen_imagpart (mode, target), op1);
8055 insns = get_insns ();
8058 /* Complex construction should appear as a single unit. */
8059 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
8060 each with a separate pseudo as destination.
8061 It's not correct for flow to treat them as a unit. */
8062 if (GET_CODE (target) != CONCAT)
8063 emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
8071 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8072 return gen_realpart (mode, op0);
8075 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8076 return gen_imagpart (mode, op0);
8080 enum machine_mode partmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8084 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8087 target = gen_reg_rtx (mode);
8091 /* Store the realpart and the negated imagpart to target. */
8092 emit_move_insn (gen_realpart (partmode, target),
8093 gen_realpart (partmode, op0));
8095 imag_t = gen_imagpart (partmode, target);
8096 temp = expand_unop (partmode, neg_optab,
8097 gen_imagpart (partmode, op0), imag_t, 0);
8099 emit_move_insn (imag_t, temp);
8101 insns = get_insns ();
8104 /* Conjugate should appear as a single unit
8105 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
8106 each with a separate pseudo as destination.
8107 It's not correct for flow to treat them as a unit. */
8108 if (GET_CODE (target) != CONCAT)
8109 emit_no_conflict_block (insns, target, op0, NULL_RTX, NULL_RTX);
8116 case TRY_CATCH_EXPR:
8118 tree handler = TREE_OPERAND (exp, 1);
8120 expand_eh_region_start ();
8122 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8124 expand_eh_region_end (handler);
8129 case TRY_FINALLY_EXPR:
8131 tree try_block = TREE_OPERAND (exp, 0);
8132 tree finally_block = TREE_OPERAND (exp, 1);
8133 rtx finally_label = gen_label_rtx ();
8134 rtx done_label = gen_label_rtx ();
8135 rtx return_link = gen_reg_rtx (Pmode);
8136 tree cleanup = build (GOTO_SUBROUTINE_EXPR, void_type_node,
8137 (tree) finally_label, (tree) return_link);
8138 TREE_SIDE_EFFECTS (cleanup) = 1;
8140 /* Start a new binding layer that will keep track of all cleanup
8141 actions to be performed. */
8142 expand_start_bindings (0);
8144 target_temp_slot_level = temp_slot_level;
8146 expand_decl_cleanup (NULL_TREE, cleanup);
8147 op0 = expand_expr (try_block, target, tmode, modifier);
8149 preserve_temp_slots (op0);
8150 expand_end_bindings (NULL_TREE, 0, 0);
8151 emit_jump (done_label);
8152 emit_label (finally_label);
8153 expand_expr (finally_block, const0_rtx, VOIDmode, 0);
8154 emit_indirect_jump (return_link);
8155 emit_label (done_label);
8159 case GOTO_SUBROUTINE_EXPR:
8161 rtx subr = (rtx) TREE_OPERAND (exp, 0);
8162 rtx return_link = *(rtx *) &TREE_OPERAND (exp, 1);
8163 rtx return_address = gen_label_rtx ();
8164 emit_move_insn (return_link, gen_rtx_LABEL_REF (Pmode, return_address));
8166 emit_label (return_address);
8172 rtx dcc = get_dynamic_cleanup_chain ();
8173 emit_move_insn (dcc, validize_mem (gen_rtx_MEM (Pmode, dcc)));
8179 rtx dhc = get_dynamic_handler_chain ();
8180 emit_move_insn (dhc, validize_mem (gen_rtx_MEM (Pmode, dhc)));
8185 return expand_builtin_va_arg (TREE_OPERAND (exp, 0), type);
8188 return (*lang_expand_expr) (exp, original_target, tmode, modifier);
8191 /* Here to do an ordinary binary operator, generating an instruction
8192 from the optab already placed in `this_optab'. */
8194 preexpand_calls (exp);
8195 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8197 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8198 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8200 temp = expand_binop (mode, this_optab, op0, op1, target,
8201 unsignedp, OPTAB_LIB_WIDEN);
8207 /* Return the tree node and offset if a given argument corresponds to
8208 a string constant. */
8211 string_constant (arg, ptr_offset)
8217 if (TREE_CODE (arg) == ADDR_EXPR
8218 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
8220 *ptr_offset = integer_zero_node;
8221 return TREE_OPERAND (arg, 0);
8223 else if (TREE_CODE (arg) == PLUS_EXPR)
8225 tree arg0 = TREE_OPERAND (arg, 0);
8226 tree arg1 = TREE_OPERAND (arg, 1);
8231 if (TREE_CODE (arg0) == ADDR_EXPR
8232 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
8235 return TREE_OPERAND (arg0, 0);
8237 else if (TREE_CODE (arg1) == ADDR_EXPR
8238 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
8241 return TREE_OPERAND (arg1, 0);
8248 /* Expand code for a post- or pre- increment or decrement
8249 and return the RTX for the result.
8250 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
8253 expand_increment (exp, post, ignore)
8257 register rtx op0, op1;
8258 register rtx temp, value;
8259 register tree incremented = TREE_OPERAND (exp, 0);
8260 optab this_optab = add_optab;
8262 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
8263 int op0_is_copy = 0;
8264 int single_insn = 0;
8265 /* 1 means we can't store into OP0 directly,
8266 because it is a subreg narrower than a word,
8267 and we don't dare clobber the rest of the word. */
8270 /* Stabilize any component ref that might need to be
8271 evaluated more than once below. */
8273 || TREE_CODE (incremented) == BIT_FIELD_REF
8274 || (TREE_CODE (incremented) == COMPONENT_REF
8275 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
8276 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
8277 incremented = stabilize_reference (incremented);
8278 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
8279 ones into save exprs so that they don't accidentally get evaluated
8280 more than once by the code below. */
8281 if (TREE_CODE (incremented) == PREINCREMENT_EXPR
8282 || TREE_CODE (incremented) == PREDECREMENT_EXPR)
8283 incremented = save_expr (incremented);
8285 /* Compute the operands as RTX.
8286 Note whether OP0 is the actual lvalue or a copy of it:
8287 I believe it is a copy iff it is a register or subreg
8288 and insns were generated in computing it. */
8290 temp = get_last_insn ();
8291 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_RW);
8293 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
8294 in place but instead must do sign- or zero-extension during assignment,
8295 so we copy it into a new register and let the code below use it as
8298 Note that we can safely modify this SUBREG since it is know not to be
8299 shared (it was made by the expand_expr call above). */
8301 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
8304 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
8308 else if (GET_CODE (op0) == SUBREG
8309 && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD)
8311 /* We cannot increment this SUBREG in place. If we are
8312 post-incrementing, get a copy of the old value. Otherwise,
8313 just mark that we cannot increment in place. */
8315 op0 = copy_to_reg (op0);
8320 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
8321 && temp != get_last_insn ());
8322 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode,
8323 EXPAND_MEMORY_USE_BAD);
8325 /* Decide whether incrementing or decrementing. */
8326 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
8327 || TREE_CODE (exp) == PREDECREMENT_EXPR)
8328 this_optab = sub_optab;
8330 /* Convert decrement by a constant into a negative increment. */
8331 if (this_optab == sub_optab
8332 && GET_CODE (op1) == CONST_INT)
8334 op1 = GEN_INT (- INTVAL (op1));
8335 this_optab = add_optab;
8338 /* For a preincrement, see if we can do this with a single instruction. */
8341 icode = (int) this_optab->handlers[(int) mode].insn_code;
8342 if (icode != (int) CODE_FOR_nothing
8343 /* Make sure that OP0 is valid for operands 0 and 1
8344 of the insn we want to queue. */
8345 && (*insn_operand_predicate[icode][0]) (op0, mode)
8346 && (*insn_operand_predicate[icode][1]) (op0, mode)
8347 && (*insn_operand_predicate[icode][2]) (op1, mode))
8351 /* If OP0 is not the actual lvalue, but rather a copy in a register,
8352 then we cannot just increment OP0. We must therefore contrive to
8353 increment the original value. Then, for postincrement, we can return
8354 OP0 since it is a copy of the old value. For preincrement, expand here
8355 unless we can do it with a single insn.
8357 Likewise if storing directly into OP0 would clobber high bits
8358 we need to preserve (bad_subreg). */
8359 if (op0_is_copy || (!post && !single_insn) || bad_subreg)
8361 /* This is the easiest way to increment the value wherever it is.
8362 Problems with multiple evaluation of INCREMENTED are prevented
8363 because either (1) it is a component_ref or preincrement,
8364 in which case it was stabilized above, or (2) it is an array_ref
8365 with constant index in an array in a register, which is
8366 safe to reevaluate. */
8367 tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
8368 || TREE_CODE (exp) == PREDECREMENT_EXPR)
8369 ? MINUS_EXPR : PLUS_EXPR),
8372 TREE_OPERAND (exp, 1));
8374 while (TREE_CODE (incremented) == NOP_EXPR
8375 || TREE_CODE (incremented) == CONVERT_EXPR)
8377 newexp = convert (TREE_TYPE (incremented), newexp);
8378 incremented = TREE_OPERAND (incremented, 0);
8381 temp = expand_assignment (incremented, newexp, ! post && ! ignore , 0);
8382 return post ? op0 : temp;
8387 /* We have a true reference to the value in OP0.
8388 If there is an insn to add or subtract in this mode, queue it.
8389 Queueing the increment insn avoids the register shuffling
8390 that often results if we must increment now and first save
8391 the old value for subsequent use. */
8393 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
8394 op0 = stabilize (op0);
8397 icode = (int) this_optab->handlers[(int) mode].insn_code;
8398 if (icode != (int) CODE_FOR_nothing
8399 /* Make sure that OP0 is valid for operands 0 and 1
8400 of the insn we want to queue. */
8401 && (*insn_operand_predicate[icode][0]) (op0, mode)
8402 && (*insn_operand_predicate[icode][1]) (op0, mode))
8404 if (! (*insn_operand_predicate[icode][2]) (op1, mode))
8405 op1 = force_reg (mode, op1);
8407 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
8409 if (icode != (int) CODE_FOR_nothing && GET_CODE (op0) == MEM)
8411 rtx addr = (general_operand (XEXP (op0, 0), mode)
8412 ? force_reg (Pmode, XEXP (op0, 0))
8413 : copy_to_reg (XEXP (op0, 0)));
8416 op0 = change_address (op0, VOIDmode, addr);
8417 temp = force_reg (GET_MODE (op0), op0);
8418 if (! (*insn_operand_predicate[icode][2]) (op1, mode))
8419 op1 = force_reg (mode, op1);
8421 /* The increment queue is LIFO, thus we have to `queue'
8422 the instructions in reverse order. */
8423 enqueue_insn (op0, gen_move_insn (op0, temp));
8424 result = enqueue_insn (temp, GEN_FCN (icode) (temp, temp, op1));
8429 /* Preincrement, or we can't increment with one simple insn. */
8431 /* Save a copy of the value before inc or dec, to return it later. */
8432 temp = value = copy_to_reg (op0);
8434 /* Arrange to return the incremented value. */
8435 /* Copy the rtx because expand_binop will protect from the queue,
8436 and the results of that would be invalid for us to return
8437 if our caller does emit_queue before using our result. */
8438 temp = copy_rtx (value = op0);
8440 /* Increment however we can. */
8441 op1 = expand_binop (mode, this_optab, value, op1,
8442 current_function_check_memory_usage ? NULL_RTX : op0,
8443 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
8444 /* Make sure the value is stored into OP0. */
8446 emit_move_insn (op0, op1);
8451 /* Expand all function calls contained within EXP, innermost ones first.
8452 But don't look within expressions that have sequence points.
8453 For each CALL_EXPR, record the rtx for its value
8454 in the CALL_EXPR_RTL field. */
8457 preexpand_calls (exp)
8460 register int nops, i;
8461 int type = TREE_CODE_CLASS (TREE_CODE (exp));
8463 if (! do_preexpand_calls)
8466 /* Only expressions and references can contain calls. */
8468 if (type != 'e' && type != '<' && type != '1' && type != '2' && type != 'r')
8471 switch (TREE_CODE (exp))
8474 /* Do nothing if already expanded. */
8475 if (CALL_EXPR_RTL (exp) != 0
8476 /* Do nothing if the call returns a variable-sized object. */
8477 || TREE_CODE (TYPE_SIZE (TREE_TYPE(exp))) != INTEGER_CST
8478 /* Do nothing to built-in functions. */
8479 || (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
8480 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
8482 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
8485 CALL_EXPR_RTL (exp) = expand_call (exp, NULL_RTX, 0);
8490 case TRUTH_ANDIF_EXPR:
8491 case TRUTH_ORIF_EXPR:
8492 /* If we find one of these, then we can be sure
8493 the adjust will be done for it (since it makes jumps).
8494 Do it now, so that if this is inside an argument
8495 of a function, we don't get the stack adjustment
8496 after some other args have already been pushed. */
8497 do_pending_stack_adjust ();
8502 case WITH_CLEANUP_EXPR:
8503 case CLEANUP_POINT_EXPR:
8504 case TRY_CATCH_EXPR:
8508 if (SAVE_EXPR_RTL (exp) != 0)
8515 nops = tree_code_length[(int) TREE_CODE (exp)];
8516 for (i = 0; i < nops; i++)
8517 if (TREE_OPERAND (exp, i) != 0)
8519 type = TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, i)));
8520 if (type == 'e' || type == '<' || type == '1' || type == '2'
8522 preexpand_calls (TREE_OPERAND (exp, i));
8526 /* At the start of a function, record that we have no previously-pushed
8527 arguments waiting to be popped. */
8530 init_pending_stack_adjust ()
8532 pending_stack_adjust = 0;
8535 /* When exiting from function, if safe, clear out any pending stack adjust
8536 so the adjustment won't get done.
8538 Note, if the current function calls alloca, then it must have a
8539 frame pointer regardless of the value of flag_omit_frame_pointer. */
8542 clear_pending_stack_adjust ()
8544 #ifdef EXIT_IGNORE_STACK
8546 && (! flag_omit_frame_pointer || current_function_calls_alloca)
8547 && EXIT_IGNORE_STACK
8548 && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline)
8549 && ! flag_inline_functions)
8550 pending_stack_adjust = 0;
8554 /* Pop any previously-pushed arguments that have not been popped yet. */
8557 do_pending_stack_adjust ()
8559 if (inhibit_defer_pop == 0)
8561 if (pending_stack_adjust != 0)
8562 adjust_stack (GEN_INT (pending_stack_adjust));
8563 pending_stack_adjust = 0;
8567 /* Expand conditional expressions. */
8569 /* Generate code to evaluate EXP and jump to LABEL if the value is zero.
8570 LABEL is an rtx of code CODE_LABEL, in this function and all the
8574 jumpifnot (exp, label)
8578 do_jump (exp, label, NULL_RTX);
8581 /* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
8588 do_jump (exp, NULL_RTX, label);
8591 /* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
8592 the result is zero, or IF_TRUE_LABEL if the result is one.
8593 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
8594 meaning fall through in that case.
8596 do_jump always does any pending stack adjust except when it does not
8597 actually perform a jump. An example where there is no jump
8598 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
8600 This function is responsible for optimizing cases such as
8601 &&, || and comparison operators in EXP. */
8604 do_jump (exp, if_false_label, if_true_label)
8606 rtx if_false_label, if_true_label;
8608 register enum tree_code code = TREE_CODE (exp);
8609 /* Some cases need to create a label to jump to
8610 in order to properly fall through.
8611 These cases set DROP_THROUGH_LABEL nonzero. */
8612 rtx drop_through_label = 0;
8616 enum machine_mode mode;
8618 #ifdef MAX_INTEGER_COMPUTATION_MODE
8619 check_max_integer_computation_mode (exp);
8630 temp = integer_zerop (exp) ? if_false_label : if_true_label;
8636 /* This is not true with #pragma weak */
8638 /* The address of something can never be zero. */
8640 emit_jump (if_true_label);
8645 if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
8646 || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
8647 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF)
8650 /* If we are narrowing the operand, we have to do the compare in the
8652 if ((TYPE_PRECISION (TREE_TYPE (exp))
8653 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8655 case NON_LVALUE_EXPR:
8656 case REFERENCE_EXPR:
8661 /* These cannot change zero->non-zero or vice versa. */
8662 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
8666 /* This is never less insns than evaluating the PLUS_EXPR followed by
8667 a test and can be longer if the test is eliminated. */
8669 /* Reduce to minus. */
8670 exp = build (MINUS_EXPR, TREE_TYPE (exp),
8671 TREE_OPERAND (exp, 0),
8672 fold (build1 (NEGATE_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
8673 TREE_OPERAND (exp, 1))));
8674 /* Process as MINUS. */
8678 /* Non-zero iff operands of minus differ. */
8679 do_compare_and_jump (build (NE_EXPR, TREE_TYPE (exp),
8680 TREE_OPERAND (exp, 0),
8681 TREE_OPERAND (exp, 1)),
8682 NE, NE, if_false_label, if_true_label);
8686 /* If we are AND'ing with a small constant, do this comparison in the
8687 smallest type that fits. If the machine doesn't have comparisons
8688 that small, it will be converted back to the wider comparison.
8689 This helps if we are testing the sign bit of a narrower object.
8690 combine can't do this for us because it can't know whether a
8691 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
8693 if (! SLOW_BYTE_ACCESS
8694 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
8695 && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
8696 && (i = floor_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))) >= 0
8697 && (mode = mode_for_size (i + 1, MODE_INT, 0)) != BLKmode
8698 && (type = type_for_mode (mode, 1)) != 0
8699 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
8700 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
8701 != CODE_FOR_nothing))
8703 do_jump (convert (type, exp), if_false_label, if_true_label);
8708 case TRUTH_NOT_EXPR:
8709 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
8712 case TRUTH_ANDIF_EXPR:
8713 if (if_false_label == 0)
8714 if_false_label = drop_through_label = gen_label_rtx ();
8715 do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
8716 start_cleanup_deferral ();
8717 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
8718 end_cleanup_deferral ();
8721 case TRUTH_ORIF_EXPR:
8722 if (if_true_label == 0)
8723 if_true_label = drop_through_label = gen_label_rtx ();
8724 do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
8725 start_cleanup_deferral ();
8726 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
8727 end_cleanup_deferral ();
8732 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
8733 preserve_temp_slots (NULL_RTX);
8737 do_pending_stack_adjust ();
8738 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
8745 int bitsize, bitpos, unsignedp;
8746 enum machine_mode mode;
8752 /* Get description of this reference. We don't actually care
8753 about the underlying object here. */
8754 get_inner_reference (exp, &bitsize, &bitpos, &offset,
8755 &mode, &unsignedp, &volatilep,
8758 type = type_for_size (bitsize, unsignedp);
8759 if (! SLOW_BYTE_ACCESS
8760 && type != 0 && bitsize >= 0
8761 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
8762 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
8763 != CODE_FOR_nothing))
8765 do_jump (convert (type, exp), if_false_label, if_true_label);
8772 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
8773 if (integer_onep (TREE_OPERAND (exp, 1))
8774 && integer_zerop (TREE_OPERAND (exp, 2)))
8775 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
8777 else if (integer_zerop (TREE_OPERAND (exp, 1))
8778 && integer_onep (TREE_OPERAND (exp, 2)))
8779 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
8783 register rtx label1 = gen_label_rtx ();
8784 drop_through_label = gen_label_rtx ();
8786 do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX);
8788 start_cleanup_deferral ();
8789 /* Now the THEN-expression. */
8790 do_jump (TREE_OPERAND (exp, 1),
8791 if_false_label ? if_false_label : drop_through_label,
8792 if_true_label ? if_true_label : drop_through_label);
8793 /* In case the do_jump just above never jumps. */
8794 do_pending_stack_adjust ();
8795 emit_label (label1);
8797 /* Now the ELSE-expression. */
8798 do_jump (TREE_OPERAND (exp, 2),
8799 if_false_label ? if_false_label : drop_through_label,
8800 if_true_label ? if_true_label : drop_through_label);
8801 end_cleanup_deferral ();
8807 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8809 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
8810 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
8812 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
8813 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
8816 (build (TRUTH_ANDIF_EXPR, TREE_TYPE (exp),
8817 fold (build (EQ_EXPR, TREE_TYPE (exp),
8818 fold (build1 (REALPART_EXPR,
8819 TREE_TYPE (inner_type),
8821 fold (build1 (REALPART_EXPR,
8822 TREE_TYPE (inner_type),
8824 fold (build (EQ_EXPR, TREE_TYPE (exp),
8825 fold (build1 (IMAGPART_EXPR,
8826 TREE_TYPE (inner_type),
8828 fold (build1 (IMAGPART_EXPR,
8829 TREE_TYPE (inner_type),
8831 if_false_label, if_true_label);
8834 else if (integer_zerop (TREE_OPERAND (exp, 1)))
8835 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
8837 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
8838 && !can_compare_p (TYPE_MODE (inner_type)))
8839 do_jump_by_parts_equality (exp, if_false_label, if_true_label);
8841 do_compare_and_jump (exp, EQ, EQ, if_false_label, if_true_label);
8847 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8849 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
8850 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
8852 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
8853 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
8856 (build (TRUTH_ORIF_EXPR, TREE_TYPE (exp),
8857 fold (build (NE_EXPR, TREE_TYPE (exp),
8858 fold (build1 (REALPART_EXPR,
8859 TREE_TYPE (inner_type),
8861 fold (build1 (REALPART_EXPR,
8862 TREE_TYPE (inner_type),
8864 fold (build (NE_EXPR, TREE_TYPE (exp),
8865 fold (build1 (IMAGPART_EXPR,
8866 TREE_TYPE (inner_type),
8868 fold (build1 (IMAGPART_EXPR,
8869 TREE_TYPE (inner_type),
8871 if_false_label, if_true_label);
8874 else if (integer_zerop (TREE_OPERAND (exp, 1)))
8875 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
8877 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
8878 && !can_compare_p (TYPE_MODE (inner_type)))
8879 do_jump_by_parts_equality (exp, if_true_label, if_false_label);
8881 do_compare_and_jump (exp, NE, NE, if_false_label, if_true_label);
8886 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
8888 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8889 do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
8891 do_compare_and_jump (exp, LT, LTU, if_false_label, if_true_label);
8895 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
8897 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8898 do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
8900 do_compare_and_jump (exp, LE, LEU, if_false_label, if_true_label);
8904 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
8906 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8907 do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
8909 do_compare_and_jump (exp, GT, GTU, if_false_label, if_true_label);
8913 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
8915 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8916 do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
8918 do_compare_and_jump (exp, GE, GEU, if_false_label, if_true_label);
8923 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
8925 /* This is not needed any more and causes poor code since it causes
8926 comparisons and tests from non-SI objects to have different code
8928 /* Copy to register to avoid generating bad insns by cse
8929 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
8930 if (!cse_not_expected && GET_CODE (temp) == MEM)
8931 temp = copy_to_reg (temp);
8933 do_pending_stack_adjust ();
8934 /* Do any postincrements in the expression that was tested. */
8937 if (GET_CODE (temp) == CONST_INT || GET_CODE (temp) == LABEL_REF)
8939 rtx target = temp == const0_rtx ? if_false_label : if_true_label;
8943 else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
8944 && ! can_compare_p (GET_MODE (temp)))
8945 /* Note swapping the labels gives us not-equal. */
8946 do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label);
8947 else if (GET_MODE (temp) != VOIDmode)
8948 do_compare_rtx_and_jump (temp, CONST0_RTX (GET_MODE (temp)),
8949 NE, TREE_UNSIGNED (TREE_TYPE (exp)),
8950 GET_MODE (temp), NULL_RTX, 0,
8951 if_false_label, if_true_label);
8956 if (drop_through_label)
8958 /* If do_jump produces code that might be jumped around,
8959 do any stack adjusts from that code, before the place
8960 where control merges in. */
8961 do_pending_stack_adjust ();
8962 emit_label (drop_through_label);
8966 /* Given a comparison expression EXP for values too wide to be compared
8967 with one insn, test the comparison and jump to the appropriate label.
8968 The code of EXP is ignored; we always test GT if SWAP is 0,
8969 and LT if SWAP is 1. */
8972 do_jump_by_parts_greater (exp, swap, if_false_label, if_true_label)
8975 rtx if_false_label, if_true_label;
8977 rtx op0 = expand_expr (TREE_OPERAND (exp, swap), NULL_RTX, VOIDmode, 0);
8978 rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), NULL_RTX, VOIDmode, 0);
8979 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
8980 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
8982 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label);
8985 /* Compare OP0 with OP1, word at a time, in mode MODE.
8986 UNSIGNEDP says to do unsigned comparison.
8987 Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
8990 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label)
8991 enum machine_mode mode;
8994 rtx if_false_label, if_true_label;
8996 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
8997 rtx drop_through_label = 0;
9000 if (! if_true_label || ! if_false_label)
9001 drop_through_label = gen_label_rtx ();
9002 if (! if_true_label)
9003 if_true_label = drop_through_label;
9004 if (! if_false_label)
9005 if_false_label = drop_through_label;
9007 /* Compare a word at a time, high order first. */
9008 for (i = 0; i < nwords; i++)
9011 rtx op0_word, op1_word;
9013 if (WORDS_BIG_ENDIAN)
9015 op0_word = operand_subword_force (op0, i, mode);
9016 op1_word = operand_subword_force (op1, i, mode);
9020 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
9021 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
9024 /* All but high-order word must be compared as unsigned. */
9025 do_compare_rtx_and_jump (op0_word, op1_word, GT,
9026 (unsignedp || i > 0), word_mode, NULL_RTX, 0,
9027 NULL_RTX, if_true_label);
9029 /* Consider lower words only if these are equal. */
9030 do_compare_rtx_and_jump (op0_word, op1_word, NE, unsignedp, word_mode,
9031 NULL_RTX, 0, NULL_RTX, if_false_label);
9035 emit_jump (if_false_label);
9036 if (drop_through_label)
9037 emit_label (drop_through_label);
9040 /* Given an EQ_EXPR expression EXP for values too wide to be compared
9041 with one insn, test the comparison and jump to the appropriate label. */
9044 do_jump_by_parts_equality (exp, if_false_label, if_true_label)
9046 rtx if_false_label, if_true_label;
9048 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
9049 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
9050 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9051 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
9053 rtx drop_through_label = 0;
9055 if (! if_false_label)
9056 drop_through_label = if_false_label = gen_label_rtx ();
9058 for (i = 0; i < nwords; i++)
9059 do_compare_rtx_and_jump (operand_subword_force (op0, i, mode),
9060 operand_subword_force (op1, i, mode),
9061 EQ, TREE_UNSIGNED (TREE_TYPE (exp)),
9062 word_mode, NULL_RTX, 0, if_false_label,
9066 emit_jump (if_true_label);
9067 if (drop_through_label)
9068 emit_label (drop_through_label);
9071 /* Jump according to whether OP0 is 0.
9072 We assume that OP0 has an integer mode that is too wide
9073 for the available compare insns. */
9076 do_jump_by_parts_equality_rtx (op0, if_false_label, if_true_label)
9078 rtx if_false_label, if_true_label;
9080 int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD;
9083 rtx drop_through_label = 0;
9085 /* The fastest way of doing this comparison on almost any machine is to
9086 "or" all the words and compare the result. If all have to be loaded
9087 from memory and this is a very wide item, it's possible this may
9088 be slower, but that's highly unlikely. */
9090 part = gen_reg_rtx (word_mode);
9091 emit_move_insn (part, operand_subword_force (op0, 0, GET_MODE (op0)));
9092 for (i = 1; i < nwords && part != 0; i++)
9093 part = expand_binop (word_mode, ior_optab, part,
9094 operand_subword_force (op0, i, GET_MODE (op0)),
9095 part, 1, OPTAB_WIDEN);
9099 do_compare_rtx_and_jump (part, const0_rtx, EQ, 1, word_mode,
9100 NULL_RTX, 0, if_false_label, if_true_label);
9105 /* If we couldn't do the "or" simply, do this with a series of compares. */
9106 if (! if_false_label)
9107 drop_through_label = if_false_label = gen_label_rtx ();
9109 for (i = 0; i < nwords; i++)
9110 do_compare_rtx_and_jump (operand_subword_force (op0, i, GET_MODE (op0)),
9111 const0_rtx, EQ, 1, word_mode, NULL_RTX, 0,
9112 if_false_label, NULL_RTX);
9115 emit_jump (if_true_label);
9117 if (drop_through_label)
9118 emit_label (drop_through_label);
9121 /* Generate code for a comparison of OP0 and OP1 with rtx code CODE.
9122 (including code to compute the values to be compared)
9123 and set (CC0) according to the result.
9124 The decision as to signed or unsigned comparison must be made by the caller.
9126 We force a stack adjustment unless there are currently
9127 things pushed on the stack that aren't yet used.
9129 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
9132 If ALIGN is non-zero, it is the alignment of this type; if zero, the
9133 size of MODE should be used. */
9136 compare_from_rtx (op0, op1, code, unsignedp, mode, size, align)
9137 register rtx op0, op1;
9140 enum machine_mode mode;
9146 /* If one operand is constant, make it the second one. Only do this
9147 if the other operand is not constant as well. */
9149 if ((CONSTANT_P (op0) && ! CONSTANT_P (op1))
9150 || (GET_CODE (op0) == CONST_INT && GET_CODE (op1) != CONST_INT))
9155 code = swap_condition (code);
9160 op0 = force_not_mem (op0);
9161 op1 = force_not_mem (op1);
9164 do_pending_stack_adjust ();
9166 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
9167 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
9171 /* There's no need to do this now that combine.c can eliminate lots of
9172 sign extensions. This can be less efficient in certain cases on other
9175 /* If this is a signed equality comparison, we can do it as an
9176 unsigned comparison since zero-extension is cheaper than sign
9177 extension and comparisons with zero are done as unsigned. This is
9178 the case even on machines that can do fast sign extension, since
9179 zero-extension is easier to combine with other operations than
9180 sign-extension is. If we are comparing against a constant, we must
9181 convert it to what it would look like unsigned. */
9182 if ((code == EQ || code == NE) && ! unsignedp
9183 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
9185 if (GET_CODE (op1) == CONST_INT
9186 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
9187 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
9192 emit_cmp_insn (op0, op1, code, size, mode, unsignedp, align);
9194 return gen_rtx_fmt_ee (code, VOIDmode, cc0_rtx, const0_rtx);
9197 /* Like do_compare_and_jump but expects the values to compare as two rtx's.
9198 The decision as to signed or unsigned comparison must be made by the caller.
9200 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
9203 If ALIGN is non-zero, it is the alignment of this type; if zero, the
9204 size of MODE should be used. */
9207 do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode, size, align,
9208 if_false_label, if_true_label)
9209 register rtx op0, op1;
9212 enum machine_mode mode;
9215 rtx if_false_label, if_true_label;
9218 int dummy_true_label = 0;
9220 /* Reverse the comparison if that is safe and we want to jump if it is
9222 if (! if_true_label && ! FLOAT_MODE_P (mode))
9224 if_true_label = if_false_label;
9226 code = reverse_condition (code);
9229 /* If one operand is constant, make it the second one. Only do this
9230 if the other operand is not constant as well. */
9232 if ((CONSTANT_P (op0) && ! CONSTANT_P (op1))
9233 || (GET_CODE (op0) == CONST_INT && GET_CODE (op1) != CONST_INT))
9238 code = swap_condition (code);
9243 op0 = force_not_mem (op0);
9244 op1 = force_not_mem (op1);
9247 do_pending_stack_adjust ();
9249 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
9250 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
9252 if (tem == const_true_rtx)
9255 emit_jump (if_true_label);
9260 emit_jump (if_false_label);
9266 /* There's no need to do this now that combine.c can eliminate lots of
9267 sign extensions. This can be less efficient in certain cases on other
9270 /* If this is a signed equality comparison, we can do it as an
9271 unsigned comparison since zero-extension is cheaper than sign
9272 extension and comparisons with zero are done as unsigned. This is
9273 the case even on machines that can do fast sign extension, since
9274 zero-extension is easier to combine with other operations than
9275 sign-extension is. If we are comparing against a constant, we must
9276 convert it to what it would look like unsigned. */
9277 if ((code == EQ || code == NE) && ! unsignedp
9278 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
9280 if (GET_CODE (op1) == CONST_INT
9281 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
9282 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
9287 if (! if_true_label)
9289 dummy_true_label = 1;
9290 if_true_label = gen_label_rtx ();
9293 emit_cmp_and_jump_insns (op0, op1, code, size, mode, unsignedp, align,
9297 emit_jump (if_false_label);
9298 if (dummy_true_label)
9299 emit_label (if_true_label);
9302 /* Generate code for a comparison expression EXP (including code to compute
9303 the values to be compared) and a conditional jump to IF_FALSE_LABEL and/or
9304 IF_TRUE_LABEL. One of the labels can be NULL_RTX, in which case the
9305 generated code will drop through.
9306 SIGNED_CODE should be the rtx operation for this comparison for
9307 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
9309 We force a stack adjustment unless there are currently
9310 things pushed on the stack that aren't yet used. */
9313 do_compare_and_jump (exp, signed_code, unsigned_code, if_false_label,
9316 enum rtx_code signed_code, unsigned_code;
9317 rtx if_false_label, if_true_label;
9319 register rtx op0, op1;
9321 register enum machine_mode mode;
9325 /* Don't crash if the comparison was erroneous. */
9326 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
9327 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
9330 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
9331 type = TREE_TYPE (TREE_OPERAND (exp, 0));
9332 mode = TYPE_MODE (type);
9333 unsignedp = TREE_UNSIGNED (type);
9334 code = unsignedp ? unsigned_code : signed_code;
9336 #ifdef HAVE_canonicalize_funcptr_for_compare
9337 /* If function pointers need to be "canonicalized" before they can
9338 be reliably compared, then canonicalize them. */
9339 if (HAVE_canonicalize_funcptr_for_compare
9340 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
9341 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
9344 rtx new_op0 = gen_reg_rtx (mode);
9346 emit_insn (gen_canonicalize_funcptr_for_compare (new_op0, op0));
9350 if (HAVE_canonicalize_funcptr_for_compare
9351 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
9352 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
9355 rtx new_op1 = gen_reg_rtx (mode);
9357 emit_insn (gen_canonicalize_funcptr_for_compare (new_op1, op1));
9362 /* Do any postincrements in the expression that was tested. */
9365 do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode,
9367 ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
9368 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT,
9369 if_false_label, if_true_label);
9372 /* Generate code to calculate EXP using a store-flag instruction
9373 and return an rtx for the result. EXP is either a comparison
9374 or a TRUTH_NOT_EXPR whose operand is a comparison.
9376 If TARGET is nonzero, store the result there if convenient.
9378 If ONLY_CHEAP is non-zero, only do this if it is likely to be very
9381 Return zero if there is no suitable set-flag instruction
9382 available on this machine.
9384 Once expand_expr has been called on the arguments of the comparison,
9385 we are committed to doing the store flag, since it is not safe to
9386 re-evaluate the expression. We emit the store-flag insn by calling
9387 emit_store_flag, but only expand the arguments if we have a reason
9388 to believe that emit_store_flag will be successful. If we think that
9389 it will, but it isn't, we have to simulate the store-flag with a
9390 set/jump/set sequence. */
9393 do_store_flag (exp, target, mode, only_cheap)
9396 enum machine_mode mode;
9400 tree arg0, arg1, type;
9402 enum machine_mode operand_mode;
9406 enum insn_code icode;
9407 rtx subtarget = target;
9410 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
9411 result at the end. We can't simply invert the test since it would
9412 have already been inverted if it were valid. This case occurs for
9413 some floating-point comparisons. */
9415 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
9416 invert = 1, exp = TREE_OPERAND (exp, 0);
9418 arg0 = TREE_OPERAND (exp, 0);
9419 arg1 = TREE_OPERAND (exp, 1);
9420 type = TREE_TYPE (arg0);
9421 operand_mode = TYPE_MODE (type);
9422 unsignedp = TREE_UNSIGNED (type);
9424 /* We won't bother with BLKmode store-flag operations because it would mean
9425 passing a lot of information to emit_store_flag. */
9426 if (operand_mode == BLKmode)
9429 /* We won't bother with store-flag operations involving function pointers
9430 when function pointers must be canonicalized before comparisons. */
9431 #ifdef HAVE_canonicalize_funcptr_for_compare
9432 if (HAVE_canonicalize_funcptr_for_compare
9433 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
9434 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
9436 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
9437 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
9438 == FUNCTION_TYPE))))
9445 /* Get the rtx comparison code to use. We know that EXP is a comparison
9446 operation of some type. Some comparisons against 1 and -1 can be
9447 converted to comparisons with zero. Do so here so that the tests
9448 below will be aware that we have a comparison with zero. These
9449 tests will not catch constants in the first operand, but constants
9450 are rarely passed as the first operand. */
9452 switch (TREE_CODE (exp))
9461 if (integer_onep (arg1))
9462 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
9464 code = unsignedp ? LTU : LT;
9467 if (! unsignedp && integer_all_onesp (arg1))
9468 arg1 = integer_zero_node, code = LT;
9470 code = unsignedp ? LEU : LE;
9473 if (! unsignedp && integer_all_onesp (arg1))
9474 arg1 = integer_zero_node, code = GE;
9476 code = unsignedp ? GTU : GT;
9479 if (integer_onep (arg1))
9480 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
9482 code = unsignedp ? GEU : GE;
9488 /* Put a constant second. */
9489 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
9491 tem = arg0; arg0 = arg1; arg1 = tem;
9492 code = swap_condition (code);
9495 /* If this is an equality or inequality test of a single bit, we can
9496 do this by shifting the bit being tested to the low-order bit and
9497 masking the result with the constant 1. If the condition was EQ,
9498 we xor it with 1. This does not require an scc insn and is faster
9499 than an scc insn even if we have it. */
9501 if ((code == NE || code == EQ)
9502 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
9503 && integer_pow2p (TREE_OPERAND (arg0, 1)))
9505 tree inner = TREE_OPERAND (arg0, 0);
9506 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
9509 /* If INNER is a right shift of a constant and it plus BITNUM does
9510 not overflow, adjust BITNUM and INNER. */
9512 if (TREE_CODE (inner) == RSHIFT_EXPR
9513 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
9514 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
9515 && (bitnum + TREE_INT_CST_LOW (TREE_OPERAND (inner, 1))
9516 < TYPE_PRECISION (type)))
9518 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
9519 inner = TREE_OPERAND (inner, 0);
9522 /* If we are going to be able to omit the AND below, we must do our
9523 operations as unsigned. If we must use the AND, we have a choice.
9524 Normally unsigned is faster, but for some machines signed is. */
9525 ops_unsignedp = (bitnum == TYPE_PRECISION (type) - 1 ? 1
9526 #ifdef LOAD_EXTEND_OP
9527 : (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1)
9533 if (subtarget == 0 || GET_CODE (subtarget) != REG
9534 || GET_MODE (subtarget) != operand_mode
9535 || ! safe_from_p (subtarget, inner, 1))
9538 op0 = expand_expr (inner, subtarget, VOIDmode, 0);
9541 op0 = expand_shift (RSHIFT_EXPR, GET_MODE (op0), op0,
9542 size_int (bitnum), subtarget, ops_unsignedp);
9544 if (GET_MODE (op0) != mode)
9545 op0 = convert_to_mode (mode, op0, ops_unsignedp);
9547 if ((code == EQ && ! invert) || (code == NE && invert))
9548 op0 = expand_binop (mode, xor_optab, op0, const1_rtx, subtarget,
9549 ops_unsignedp, OPTAB_LIB_WIDEN);
9551 /* Put the AND last so it can combine with more things. */
9552 if (bitnum != TYPE_PRECISION (type) - 1)
9553 op0 = expand_and (op0, const1_rtx, subtarget);
9558 /* Now see if we are likely to be able to do this. Return if not. */
9559 if (! can_compare_p (operand_mode))
9561 icode = setcc_gen_code[(int) code];
9562 if (icode == CODE_FOR_nothing
9563 || (only_cheap && insn_operand_mode[(int) icode][0] != mode))
9565 /* We can only do this if it is one of the special cases that
9566 can be handled without an scc insn. */
9567 if ((code == LT && integer_zerop (arg1))
9568 || (! only_cheap && code == GE && integer_zerop (arg1)))
9570 else if (BRANCH_COST >= 0
9571 && ! only_cheap && (code == NE || code == EQ)
9572 && TREE_CODE (type) != REAL_TYPE
9573 && ((abs_optab->handlers[(int) operand_mode].insn_code
9574 != CODE_FOR_nothing)
9575 || (ffs_optab->handlers[(int) operand_mode].insn_code
9576 != CODE_FOR_nothing)))
9582 preexpand_calls (exp);
9583 if (subtarget == 0 || GET_CODE (subtarget) != REG
9584 || GET_MODE (subtarget) != operand_mode
9585 || ! safe_from_p (subtarget, arg1, 1))
9588 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
9589 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
9592 target = gen_reg_rtx (mode);
9594 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
9595 because, if the emit_store_flag does anything it will succeed and
9596 OP0 and OP1 will not be used subsequently. */
9598 result = emit_store_flag (target, code,
9599 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
9600 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
9601 operand_mode, unsignedp, 1);
9606 result = expand_binop (mode, xor_optab, result, const1_rtx,
9607 result, 0, OPTAB_LIB_WIDEN);
9611 /* If this failed, we have to do this with set/compare/jump/set code. */
9612 if (GET_CODE (target) != REG
9613 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
9614 target = gen_reg_rtx (GET_MODE (target));
9616 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
9617 result = compare_from_rtx (op0, op1, code, unsignedp,
9618 operand_mode, NULL_RTX, 0);
9619 if (GET_CODE (result) == CONST_INT)
9620 return (((result == const0_rtx && ! invert)
9621 || (result != const0_rtx && invert))
9622 ? const0_rtx : const1_rtx);
9624 label = gen_label_rtx ();
9625 if (bcc_gen_fctn[(int) code] == 0)
9628 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
9629 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
9635 /* Generate a tablejump instruction (used for switch statements). */
9637 #ifdef HAVE_tablejump
9639 /* INDEX is the value being switched on, with the lowest value
9640 in the table already subtracted.
9641 MODE is its expected mode (needed if INDEX is constant).
9642 RANGE is the length of the jump table.
9643 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
9645 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
9646 index value is out of range. */
9649 do_tablejump (index, mode, range, table_label, default_label)
9650 rtx index, range, table_label, default_label;
9651 enum machine_mode mode;
9653 register rtx temp, vector;
9655 /* Do an unsigned comparison (in the proper mode) between the index
9656 expression and the value which represents the length of the range.
9657 Since we just finished subtracting the lower bound of the range
9658 from the index expression, this comparison allows us to simultaneously
9659 check that the original index expression value is both greater than
9660 or equal to the minimum value of the range and less than or equal to
9661 the maximum value of the range. */
9663 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
9666 /* If index is in range, it must fit in Pmode.
9667 Convert to Pmode so we can index with it. */
9669 index = convert_to_mode (Pmode, index, 1);
9671 /* Don't let a MEM slip thru, because then INDEX that comes
9672 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
9673 and break_out_memory_refs will go to work on it and mess it up. */
9674 #ifdef PIC_CASE_VECTOR_ADDRESS
9675 if (flag_pic && GET_CODE (index) != REG)
9676 index = copy_to_mode_reg (Pmode, index);
9679 /* If flag_force_addr were to affect this address
9680 it could interfere with the tricky assumptions made
9681 about addresses that contain label-refs,
9682 which may be valid only very near the tablejump itself. */
9683 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
9684 GET_MODE_SIZE, because this indicates how large insns are. The other
9685 uses should all be Pmode, because they are addresses. This code
9686 could fail if addresses and insns are not the same size. */
9687 index = gen_rtx_PLUS (Pmode,
9688 gen_rtx_MULT (Pmode, index,
9689 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
9690 gen_rtx_LABEL_REF (Pmode, table_label));
9691 #ifdef PIC_CASE_VECTOR_ADDRESS
9693 index = PIC_CASE_VECTOR_ADDRESS (index);
9696 index = memory_address_noforce (CASE_VECTOR_MODE, index);
9697 temp = gen_reg_rtx (CASE_VECTOR_MODE);
9698 vector = gen_rtx_MEM (CASE_VECTOR_MODE, index);
9699 RTX_UNCHANGING_P (vector) = 1;
9700 convert_move (temp, vector, 0);
9702 emit_jump_insn (gen_tablejump (temp, table_label));
9704 /* If we are generating PIC code or if the table is PC-relative, the
9705 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
9706 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
9710 #endif /* HAVE_tablejump */