1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
30 #include "hard-reg-set.h"
33 #include "insn-config.h"
34 #include "insn-attr.h"
35 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
42 #include "typeclass.h"
45 #include "langhooks.h"
49 /* Decide whether a function's arguments should be processed
50 from first to last or from last to first.
52 They should if the stack and args grow in opposite directions, but
53 only if we have push insns. */
57 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
58 #define PUSH_ARGS_REVERSED /* If it's last to first. */
63 #ifndef STACK_PUSH_CODE
64 #ifdef STACK_GROWS_DOWNWARD
65 #define STACK_PUSH_CODE PRE_DEC
67 #define STACK_PUSH_CODE PRE_INC
71 /* Assume that case vectors are not pc-relative. */
72 #ifndef CASE_VECTOR_PC_RELATIVE
73 #define CASE_VECTOR_PC_RELATIVE 0
76 /* If this is nonzero, we do not bother generating VOLATILE
77 around volatile memory references, and we are willing to
78 output indirect addresses. If cse is to follow, we reject
79 indirect addresses so a useful potential cse is generated;
80 if it is used only once, instruction combination will produce
81 the same indirect address eventually. */
84 /* Chain of pending expressions for PLACEHOLDER_EXPR to replace. */
85 static tree placeholder_list = 0;
87 /* This structure is used by move_by_pieces to describe the move to
98 int explicit_inc_from;
99 unsigned HOST_WIDE_INT len;
100 HOST_WIDE_INT offset;
104 /* This structure is used by store_by_pieces to describe the clear to
107 struct store_by_pieces
113 unsigned HOST_WIDE_INT len;
114 HOST_WIDE_INT offset;
115 rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
120 extern struct obstack permanent_obstack;
122 static rtx enqueue_insn PARAMS ((rtx, rtx));
123 static unsigned HOST_WIDE_INT move_by_pieces_ninsns
124 PARAMS ((unsigned HOST_WIDE_INT,
126 static void move_by_pieces_1 PARAMS ((rtx (*) (rtx, ...), enum machine_mode,
127 struct move_by_pieces *));
128 static rtx clear_by_pieces_1 PARAMS ((PTR, HOST_WIDE_INT,
130 static void clear_by_pieces PARAMS ((rtx, unsigned HOST_WIDE_INT,
132 static void store_by_pieces_1 PARAMS ((struct store_by_pieces *,
134 static void store_by_pieces_2 PARAMS ((rtx (*) (rtx, ...),
136 struct store_by_pieces *));
137 static rtx get_subtarget PARAMS ((rtx));
138 static int is_zeros_p PARAMS ((tree));
139 static int mostly_zeros_p PARAMS ((tree));
140 static void store_constructor_field PARAMS ((rtx, unsigned HOST_WIDE_INT,
141 HOST_WIDE_INT, enum machine_mode,
142 tree, tree, int, int));
143 static void store_constructor PARAMS ((tree, rtx, int, HOST_WIDE_INT));
144 static rtx store_field PARAMS ((rtx, HOST_WIDE_INT,
145 HOST_WIDE_INT, enum machine_mode,
146 tree, enum machine_mode, int, tree,
148 static rtx var_rtx PARAMS ((tree));
149 static HOST_WIDE_INT highest_pow2_factor PARAMS ((tree));
150 static rtx expand_increment PARAMS ((tree, int, int));
151 static void do_jump_by_parts_greater PARAMS ((tree, int, rtx, rtx));
152 static void do_jump_by_parts_equality PARAMS ((tree, rtx, rtx));
153 static void do_compare_and_jump PARAMS ((tree, enum rtx_code, enum rtx_code,
155 static rtx do_store_flag PARAMS ((tree, rtx, enum machine_mode, int));
157 static void emit_single_push_insn PARAMS ((enum machine_mode, rtx, tree));
159 static void do_tablejump PARAMS ((rtx, enum machine_mode, rtx, rtx, rtx));
161 /* Record for each mode whether we can move a register directly to or
162 from an object of that mode in memory. If we can't, we won't try
163 to use that mode directly when accessing a field of that mode. */
165 static char direct_load[NUM_MACHINE_MODES];
166 static char direct_store[NUM_MACHINE_MODES];
168 /* If a memory-to-memory move would take MOVE_RATIO or more simple
169 move-instruction sequences, we will do a movstr or libcall instead. */
172 #if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
175 /* If we are optimizing for space (-Os), cut down the default move ratio. */
176 #define MOVE_RATIO (optimize_size ? 3 : 15)
180 /* This macro is used to determine whether move_by_pieces should be called
181 to perform a structure copy. */
182 #ifndef MOVE_BY_PIECES_P
183 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
184 (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) MOVE_RATIO)
187 /* This array records the insn_code of insns to perform block moves. */
188 enum insn_code movstr_optab[NUM_MACHINE_MODES];
190 /* This array records the insn_code of insns to perform block clears. */
191 enum insn_code clrstr_optab[NUM_MACHINE_MODES];
193 /* SLOW_UNALIGNED_ACCESS is non-zero if unaligned accesses are very slow. */
195 #ifndef SLOW_UNALIGNED_ACCESS
196 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
199 /* This is run once per compilation to set up which modes can be used
200 directly in memory and to initialize the block move optab. */
206 enum machine_mode mode;
212 /* Try indexing by frame ptr and try by stack ptr.
213 It is known that on the Convex the stack ptr isn't a valid index.
214 With luck, one or the other is valid on any machine. */
215 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
216 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
218 insn = emit_insn (gen_rtx_SET (0, NULL_RTX, NULL_RTX));
219 pat = PATTERN (insn);
221 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
222 mode = (enum machine_mode) ((int) mode + 1))
227 direct_load[(int) mode] = direct_store[(int) mode] = 0;
228 PUT_MODE (mem, mode);
229 PUT_MODE (mem1, mode);
231 /* See if there is some register that can be used in this mode and
232 directly loaded or stored from memory. */
234 if (mode != VOIDmode && mode != BLKmode)
235 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
236 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
239 if (! HARD_REGNO_MODE_OK (regno, mode))
242 reg = gen_rtx_REG (mode, regno);
245 SET_DEST (pat) = reg;
246 if (recog (pat, insn, &num_clobbers) >= 0)
247 direct_load[(int) mode] = 1;
249 SET_SRC (pat) = mem1;
250 SET_DEST (pat) = reg;
251 if (recog (pat, insn, &num_clobbers) >= 0)
252 direct_load[(int) mode] = 1;
255 SET_DEST (pat) = mem;
256 if (recog (pat, insn, &num_clobbers) >= 0)
257 direct_store[(int) mode] = 1;
260 SET_DEST (pat) = mem1;
261 if (recog (pat, insn, &num_clobbers) >= 0)
262 direct_store[(int) mode] = 1;
269 /* This is run at the start of compiling a function. */
274 cfun->expr = (struct expr_status *) xmalloc (sizeof (struct expr_status));
277 pending_stack_adjust = 0;
278 stack_pointer_delta = 0;
279 inhibit_defer_pop = 0;
281 apply_args_value = 0;
287 struct expr_status *p;
292 ggc_mark_rtx (p->x_saveregs_value);
293 ggc_mark_rtx (p->x_apply_args_value);
294 ggc_mark_rtx (p->x_forced_labels);
305 /* Small sanity check that the queue is empty at the end of a function. */
308 finish_expr_for_function ()
314 /* Manage the queue of increment instructions to be output
315 for POSTINCREMENT_EXPR expressions, etc. */
317 /* Queue up to increment (or change) VAR later. BODY says how:
318 BODY should be the same thing you would pass to emit_insn
319 to increment right away. It will go to emit_insn later on.
321 The value is a QUEUED expression to be used in place of VAR
322 where you want to guarantee the pre-incrementation value of VAR. */
325 enqueue_insn (var, body)
328 pending_chain = gen_rtx_QUEUED (GET_MODE (var), var, NULL_RTX, NULL_RTX,
329 body, pending_chain);
330 return pending_chain;
333 /* Use protect_from_queue to convert a QUEUED expression
334 into something that you can put immediately into an instruction.
335 If the queued incrementation has not happened yet,
336 protect_from_queue returns the variable itself.
337 If the incrementation has happened, protect_from_queue returns a temp
338 that contains a copy of the old value of the variable.
340 Any time an rtx which might possibly be a QUEUED is to be put
341 into an instruction, it must be passed through protect_from_queue first.
342 QUEUED expressions are not meaningful in instructions.
344 Do not pass a value through protect_from_queue and then hold
345 on to it for a while before putting it in an instruction!
346 If the queue is flushed in between, incorrect code will result. */
349 protect_from_queue (x, modify)
353 RTX_CODE code = GET_CODE (x);
355 #if 0 /* A QUEUED can hang around after the queue is forced out. */
356 /* Shortcut for most common case. */
357 if (pending_chain == 0)
363 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
364 use of autoincrement. Make a copy of the contents of the memory
365 location rather than a copy of the address, but not if the value is
366 of mode BLKmode. Don't modify X in place since it might be
368 if (code == MEM && GET_MODE (x) != BLKmode
369 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
372 rtx new = replace_equiv_address_nv (x, QUEUED_VAR (y));
376 rtx temp = gen_reg_rtx (GET_MODE (x));
378 emit_insn_before (gen_move_insn (temp, new),
383 /* Copy the address into a pseudo, so that the returned value
384 remains correct across calls to emit_queue. */
385 return replace_equiv_address (new, copy_to_reg (XEXP (new, 0)));
388 /* Otherwise, recursively protect the subexpressions of all
389 the kinds of rtx's that can contain a QUEUED. */
392 rtx tem = protect_from_queue (XEXP (x, 0), 0);
393 if (tem != XEXP (x, 0))
399 else if (code == PLUS || code == MULT)
401 rtx new0 = protect_from_queue (XEXP (x, 0), 0);
402 rtx new1 = protect_from_queue (XEXP (x, 1), 0);
403 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
412 /* If the increment has not happened, use the variable itself. Copy it
413 into a new pseudo so that the value remains correct across calls to
415 if (QUEUED_INSN (x) == 0)
416 return copy_to_reg (QUEUED_VAR (x));
417 /* If the increment has happened and a pre-increment copy exists,
419 if (QUEUED_COPY (x) != 0)
420 return QUEUED_COPY (x);
421 /* The increment has happened but we haven't set up a pre-increment copy.
422 Set one up now, and use it. */
423 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
424 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
426 return QUEUED_COPY (x);
429 /* Return nonzero if X contains a QUEUED expression:
430 if it contains anything that will be altered by a queued increment.
431 We handle only combinations of MEM, PLUS, MINUS and MULT operators
432 since memory addresses generally contain only those. */
438 enum rtx_code code = GET_CODE (x);
444 return queued_subexp_p (XEXP (x, 0));
448 return (queued_subexp_p (XEXP (x, 0))
449 || queued_subexp_p (XEXP (x, 1)));
455 /* Perform all the pending incrementations. */
461 while ((p = pending_chain))
463 rtx body = QUEUED_BODY (p);
465 if (GET_CODE (body) == SEQUENCE)
467 QUEUED_INSN (p) = XVECEXP (QUEUED_BODY (p), 0, 0);
468 emit_insn (QUEUED_BODY (p));
471 QUEUED_INSN (p) = emit_insn (QUEUED_BODY (p));
472 pending_chain = QUEUED_NEXT (p);
476 /* Copy data from FROM to TO, where the machine modes are not the same.
477 Both modes may be integer, or both may be floating.
478 UNSIGNEDP should be nonzero if FROM is an unsigned type.
479 This causes zero-extension instead of sign-extension. */
482 convert_move (to, from, unsignedp)
486 enum machine_mode to_mode = GET_MODE (to);
487 enum machine_mode from_mode = GET_MODE (from);
488 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
489 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
493 /* rtx code for making an equivalent value. */
494 enum rtx_code equiv_code = (unsignedp ? ZERO_EXTEND : SIGN_EXTEND);
496 to = protect_from_queue (to, 1);
497 from = protect_from_queue (from, 0);
499 if (to_real != from_real)
502 /* If FROM is a SUBREG that indicates that we have already done at least
503 the required extension, strip it. We don't handle such SUBREGs as
506 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
507 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
508 >= GET_MODE_SIZE (to_mode))
509 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
510 from = gen_lowpart (to_mode, from), from_mode = to_mode;
512 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
515 if (to_mode == from_mode
516 || (from_mode == VOIDmode && CONSTANT_P (from)))
518 emit_move_insn (to, from);
522 if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
524 if (GET_MODE_BITSIZE (from_mode) != GET_MODE_BITSIZE (to_mode))
527 if (VECTOR_MODE_P (to_mode))
528 from = simplify_gen_subreg (to_mode, from, GET_MODE (from), 0);
530 to = simplify_gen_subreg (from_mode, to, GET_MODE (to), 0);
532 emit_move_insn (to, from);
536 if (to_real != from_real)
543 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode))
545 /* Try converting directly if the insn is supported. */
546 if ((code = can_extend_p (to_mode, from_mode, 0))
549 emit_unop_insn (code, to, from, UNKNOWN);
554 #ifdef HAVE_trunchfqf2
555 if (HAVE_trunchfqf2 && from_mode == HFmode && to_mode == QFmode)
557 emit_unop_insn (CODE_FOR_trunchfqf2, to, from, UNKNOWN);
561 #ifdef HAVE_trunctqfqf2
562 if (HAVE_trunctqfqf2 && from_mode == TQFmode && to_mode == QFmode)
564 emit_unop_insn (CODE_FOR_trunctqfqf2, to, from, UNKNOWN);
568 #ifdef HAVE_truncsfqf2
569 if (HAVE_truncsfqf2 && from_mode == SFmode && to_mode == QFmode)
571 emit_unop_insn (CODE_FOR_truncsfqf2, to, from, UNKNOWN);
575 #ifdef HAVE_truncdfqf2
576 if (HAVE_truncdfqf2 && from_mode == DFmode && to_mode == QFmode)
578 emit_unop_insn (CODE_FOR_truncdfqf2, to, from, UNKNOWN);
582 #ifdef HAVE_truncxfqf2
583 if (HAVE_truncxfqf2 && from_mode == XFmode && to_mode == QFmode)
585 emit_unop_insn (CODE_FOR_truncxfqf2, to, from, UNKNOWN);
589 #ifdef HAVE_trunctfqf2
590 if (HAVE_trunctfqf2 && from_mode == TFmode && to_mode == QFmode)
592 emit_unop_insn (CODE_FOR_trunctfqf2, to, from, UNKNOWN);
597 #ifdef HAVE_trunctqfhf2
598 if (HAVE_trunctqfhf2 && from_mode == TQFmode && to_mode == HFmode)
600 emit_unop_insn (CODE_FOR_trunctqfhf2, to, from, UNKNOWN);
604 #ifdef HAVE_truncsfhf2
605 if (HAVE_truncsfhf2 && from_mode == SFmode && to_mode == HFmode)
607 emit_unop_insn (CODE_FOR_truncsfhf2, to, from, UNKNOWN);
611 #ifdef HAVE_truncdfhf2
612 if (HAVE_truncdfhf2 && from_mode == DFmode && to_mode == HFmode)
614 emit_unop_insn (CODE_FOR_truncdfhf2, to, from, UNKNOWN);
618 #ifdef HAVE_truncxfhf2
619 if (HAVE_truncxfhf2 && from_mode == XFmode && to_mode == HFmode)
621 emit_unop_insn (CODE_FOR_truncxfhf2, to, from, UNKNOWN);
625 #ifdef HAVE_trunctfhf2
626 if (HAVE_trunctfhf2 && from_mode == TFmode && to_mode == HFmode)
628 emit_unop_insn (CODE_FOR_trunctfhf2, to, from, UNKNOWN);
633 #ifdef HAVE_truncsftqf2
634 if (HAVE_truncsftqf2 && from_mode == SFmode && to_mode == TQFmode)
636 emit_unop_insn (CODE_FOR_truncsftqf2, to, from, UNKNOWN);
640 #ifdef HAVE_truncdftqf2
641 if (HAVE_truncdftqf2 && from_mode == DFmode && to_mode == TQFmode)
643 emit_unop_insn (CODE_FOR_truncdftqf2, to, from, UNKNOWN);
647 #ifdef HAVE_truncxftqf2
648 if (HAVE_truncxftqf2 && from_mode == XFmode && to_mode == TQFmode)
650 emit_unop_insn (CODE_FOR_truncxftqf2, to, from, UNKNOWN);
654 #ifdef HAVE_trunctftqf2
655 if (HAVE_trunctftqf2 && from_mode == TFmode && to_mode == TQFmode)
657 emit_unop_insn (CODE_FOR_trunctftqf2, to, from, UNKNOWN);
662 #ifdef HAVE_truncdfsf2
663 if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
665 emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
669 #ifdef HAVE_truncxfsf2
670 if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode)
672 emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN);
676 #ifdef HAVE_trunctfsf2
677 if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
679 emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
683 #ifdef HAVE_truncxfdf2
684 if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode)
686 emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN);
690 #ifdef HAVE_trunctfdf2
691 if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
693 emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
705 libcall = extendsfdf2_libfunc;
709 libcall = extendsfxf2_libfunc;
713 libcall = extendsftf2_libfunc;
725 libcall = truncdfsf2_libfunc;
729 libcall = extenddfxf2_libfunc;
733 libcall = extenddftf2_libfunc;
745 libcall = truncxfsf2_libfunc;
749 libcall = truncxfdf2_libfunc;
761 libcall = trunctfsf2_libfunc;
765 libcall = trunctfdf2_libfunc;
777 if (libcall == (rtx) 0)
778 /* This conversion is not implemented yet. */
782 value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
784 insns = get_insns ();
786 emit_libcall_block (insns, to, value, gen_rtx_FLOAT_TRUNCATE (to_mode,
791 /* Now both modes are integers. */
793 /* Handle expanding beyond a word. */
794 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
795 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
802 enum machine_mode lowpart_mode;
803 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
805 /* Try converting directly if the insn is supported. */
806 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
809 /* If FROM is a SUBREG, put it into a register. Do this
810 so that we always generate the same set of insns for
811 better cse'ing; if an intermediate assignment occurred,
812 we won't be doing the operation directly on the SUBREG. */
813 if (optimize > 0 && GET_CODE (from) == SUBREG)
814 from = force_reg (from_mode, from);
815 emit_unop_insn (code, to, from, equiv_code);
818 /* Next, try converting via full word. */
819 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
820 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
821 != CODE_FOR_nothing))
823 if (GET_CODE (to) == REG)
824 emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
825 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
826 emit_unop_insn (code, to,
827 gen_lowpart (word_mode, to), equiv_code);
831 /* No special multiword conversion insn; do it by hand. */
834 /* Since we will turn this into a no conflict block, we must ensure
835 that the source does not overlap the target. */
837 if (reg_overlap_mentioned_p (to, from))
838 from = force_reg (from_mode, from);
840 /* Get a copy of FROM widened to a word, if necessary. */
841 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
842 lowpart_mode = word_mode;
844 lowpart_mode = from_mode;
846 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
848 lowpart = gen_lowpart (lowpart_mode, to);
849 emit_move_insn (lowpart, lowfrom);
851 /* Compute the value to put in each remaining word. */
853 fill_value = const0_rtx;
858 && insn_data[(int) CODE_FOR_slt].operand[0].mode == word_mode
859 && STORE_FLAG_VALUE == -1)
861 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
863 fill_value = gen_reg_rtx (word_mode);
864 emit_insn (gen_slt (fill_value));
870 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
871 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
873 fill_value = convert_to_mode (word_mode, fill_value, 1);
877 /* Fill the remaining words. */
878 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
880 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
881 rtx subword = operand_subword (to, index, 1, to_mode);
886 if (fill_value != subword)
887 emit_move_insn (subword, fill_value);
890 insns = get_insns ();
893 emit_no_conflict_block (insns, to, from, NULL_RTX,
894 gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
898 /* Truncating multi-word to a word or less. */
899 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
900 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
902 if (!((GET_CODE (from) == MEM
903 && ! MEM_VOLATILE_P (from)
904 && direct_load[(int) to_mode]
905 && ! mode_dependent_address_p (XEXP (from, 0)))
906 || GET_CODE (from) == REG
907 || GET_CODE (from) == SUBREG))
908 from = force_reg (from_mode, from);
909 convert_move (to, gen_lowpart (word_mode, from), 0);
913 /* Handle pointer conversion. */ /* SPEE 900220. */
914 if (to_mode == PQImode)
916 if (from_mode != QImode)
917 from = convert_to_mode (QImode, from, unsignedp);
919 #ifdef HAVE_truncqipqi2
920 if (HAVE_truncqipqi2)
922 emit_unop_insn (CODE_FOR_truncqipqi2, to, from, UNKNOWN);
925 #endif /* HAVE_truncqipqi2 */
929 if (from_mode == PQImode)
931 if (to_mode != QImode)
933 from = convert_to_mode (QImode, from, unsignedp);
938 #ifdef HAVE_extendpqiqi2
939 if (HAVE_extendpqiqi2)
941 emit_unop_insn (CODE_FOR_extendpqiqi2, to, from, UNKNOWN);
944 #endif /* HAVE_extendpqiqi2 */
949 if (to_mode == PSImode)
951 if (from_mode != SImode)
952 from = convert_to_mode (SImode, from, unsignedp);
954 #ifdef HAVE_truncsipsi2
955 if (HAVE_truncsipsi2)
957 emit_unop_insn (CODE_FOR_truncsipsi2, to, from, UNKNOWN);
960 #endif /* HAVE_truncsipsi2 */
964 if (from_mode == PSImode)
966 if (to_mode != SImode)
968 from = convert_to_mode (SImode, from, unsignedp);
973 #ifdef HAVE_extendpsisi2
974 if (! unsignedp && HAVE_extendpsisi2)
976 emit_unop_insn (CODE_FOR_extendpsisi2, to, from, UNKNOWN);
979 #endif /* HAVE_extendpsisi2 */
980 #ifdef HAVE_zero_extendpsisi2
981 if (unsignedp && HAVE_zero_extendpsisi2)
983 emit_unop_insn (CODE_FOR_zero_extendpsisi2, to, from, UNKNOWN);
986 #endif /* HAVE_zero_extendpsisi2 */
991 if (to_mode == PDImode)
993 if (from_mode != DImode)
994 from = convert_to_mode (DImode, from, unsignedp);
996 #ifdef HAVE_truncdipdi2
997 if (HAVE_truncdipdi2)
999 emit_unop_insn (CODE_FOR_truncdipdi2, to, from, UNKNOWN);
1002 #endif /* HAVE_truncdipdi2 */
1006 if (from_mode == PDImode)
1008 if (to_mode != DImode)
1010 from = convert_to_mode (DImode, from, unsignedp);
1015 #ifdef HAVE_extendpdidi2
1016 if (HAVE_extendpdidi2)
1018 emit_unop_insn (CODE_FOR_extendpdidi2, to, from, UNKNOWN);
1021 #endif /* HAVE_extendpdidi2 */
1026 /* Now follow all the conversions between integers
1027 no more than a word long. */
1029 /* For truncation, usually we can just refer to FROM in a narrower mode. */
1030 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
1031 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1032 GET_MODE_BITSIZE (from_mode)))
1034 if (!((GET_CODE (from) == MEM
1035 && ! MEM_VOLATILE_P (from)
1036 && direct_load[(int) to_mode]
1037 && ! mode_dependent_address_p (XEXP (from, 0)))
1038 || GET_CODE (from) == REG
1039 || GET_CODE (from) == SUBREG))
1040 from = force_reg (from_mode, from);
1041 if (GET_CODE (from) == REG && REGNO (from) < FIRST_PSEUDO_REGISTER
1042 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
1043 from = copy_to_reg (from);
1044 emit_move_insn (to, gen_lowpart (to_mode, from));
1048 /* Handle extension. */
1049 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
1051 /* Convert directly if that works. */
1052 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
1053 != CODE_FOR_nothing)
1056 from = force_not_mem (from);
1058 emit_unop_insn (code, to, from, equiv_code);
1063 enum machine_mode intermediate;
1067 /* Search for a mode to convert via. */
1068 for (intermediate = from_mode; intermediate != VOIDmode;
1069 intermediate = GET_MODE_WIDER_MODE (intermediate))
1070 if (((can_extend_p (to_mode, intermediate, unsignedp)
1071 != CODE_FOR_nothing)
1072 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
1073 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1074 GET_MODE_BITSIZE (intermediate))))
1075 && (can_extend_p (intermediate, from_mode, unsignedp)
1076 != CODE_FOR_nothing))
1078 convert_move (to, convert_to_mode (intermediate, from,
1079 unsignedp), unsignedp);
1083 /* No suitable intermediate mode.
1084 Generate what we need with shifts. */
1085 shift_amount = build_int_2 (GET_MODE_BITSIZE (to_mode)
1086 - GET_MODE_BITSIZE (from_mode), 0);
1087 from = gen_lowpart (to_mode, force_reg (from_mode, from));
1088 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
1090 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
1093 emit_move_insn (to, tmp);
1098 /* Support special truncate insns for certain modes. */
1100 if (from_mode == DImode && to_mode == SImode)
1102 #ifdef HAVE_truncdisi2
1103 if (HAVE_truncdisi2)
1105 emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
1109 convert_move (to, force_reg (from_mode, from), unsignedp);
1113 if (from_mode == DImode && to_mode == HImode)
1115 #ifdef HAVE_truncdihi2
1116 if (HAVE_truncdihi2)
1118 emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
1122 convert_move (to, force_reg (from_mode, from), unsignedp);
1126 if (from_mode == DImode && to_mode == QImode)
1128 #ifdef HAVE_truncdiqi2
1129 if (HAVE_truncdiqi2)
1131 emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
1135 convert_move (to, force_reg (from_mode, from), unsignedp);
1139 if (from_mode == SImode && to_mode == HImode)
1141 #ifdef HAVE_truncsihi2
1142 if (HAVE_truncsihi2)
1144 emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
1148 convert_move (to, force_reg (from_mode, from), unsignedp);
1152 if (from_mode == SImode && to_mode == QImode)
1154 #ifdef HAVE_truncsiqi2
1155 if (HAVE_truncsiqi2)
1157 emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
1161 convert_move (to, force_reg (from_mode, from), unsignedp);
1165 if (from_mode == HImode && to_mode == QImode)
1167 #ifdef HAVE_trunchiqi2
1168 if (HAVE_trunchiqi2)
1170 emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
1174 convert_move (to, force_reg (from_mode, from), unsignedp);
1178 if (from_mode == TImode && to_mode == DImode)
1180 #ifdef HAVE_trunctidi2
1181 if (HAVE_trunctidi2)
1183 emit_unop_insn (CODE_FOR_trunctidi2, to, from, UNKNOWN);
1187 convert_move (to, force_reg (from_mode, from), unsignedp);
1191 if (from_mode == TImode && to_mode == SImode)
1193 #ifdef HAVE_trunctisi2
1194 if (HAVE_trunctisi2)
1196 emit_unop_insn (CODE_FOR_trunctisi2, to, from, UNKNOWN);
1200 convert_move (to, force_reg (from_mode, from), unsignedp);
1204 if (from_mode == TImode && to_mode == HImode)
1206 #ifdef HAVE_trunctihi2
1207 if (HAVE_trunctihi2)
1209 emit_unop_insn (CODE_FOR_trunctihi2, to, from, UNKNOWN);
1213 convert_move (to, force_reg (from_mode, from), unsignedp);
1217 if (from_mode == TImode && to_mode == QImode)
1219 #ifdef HAVE_trunctiqi2
1220 if (HAVE_trunctiqi2)
1222 emit_unop_insn (CODE_FOR_trunctiqi2, to, from, UNKNOWN);
1226 convert_move (to, force_reg (from_mode, from), unsignedp);
1230 /* Handle truncation of volatile memrefs, and so on;
1231 the things that couldn't be truncated directly,
1232 and for which there was no special instruction. */
1233 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
1235 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
1236 emit_move_insn (to, temp);
1240 /* Mode combination is not recognized. */
1244 /* Return an rtx for a value that would result
1245 from converting X to mode MODE.
1246 Both X and MODE may be floating, or both integer.
1247 UNSIGNEDP is nonzero if X is an unsigned value.
1248 This can be done by referring to a part of X in place
1249 or by copying to a new temporary with conversion.
1251 This function *must not* call protect_from_queue
1252 except when putting X into an insn (in which case convert_move does it). */
1255 convert_to_mode (mode, x, unsignedp)
1256 enum machine_mode mode;
1260 return convert_modes (mode, VOIDmode, x, unsignedp);
1263 /* Return an rtx for a value that would result
1264 from converting X from mode OLDMODE to mode MODE.
1265 Both modes may be floating, or both integer.
1266 UNSIGNEDP is nonzero if X is an unsigned value.
1268 This can be done by referring to a part of X in place
1269 or by copying to a new temporary with conversion.
1271 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
1273 This function *must not* call protect_from_queue
1274 except when putting X into an insn (in which case convert_move does it). */
1277 convert_modes (mode, oldmode, x, unsignedp)
1278 enum machine_mode mode, oldmode;
1284 /* If FROM is a SUBREG that indicates that we have already done at least
1285 the required extension, strip it. */
1287 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
1288 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
1289 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
1290 x = gen_lowpart (mode, x);
1292 if (GET_MODE (x) != VOIDmode)
1293 oldmode = GET_MODE (x);
1295 if (mode == oldmode)
1298 /* There is one case that we must handle specially: If we are converting
1299 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
1300 we are to interpret the constant as unsigned, gen_lowpart will do
1301 the wrong if the constant appears negative. What we want to do is
1302 make the high-order word of the constant zero, not all ones. */
1304 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
1305 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
1306 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
1308 HOST_WIDE_INT val = INTVAL (x);
1310 if (oldmode != VOIDmode
1311 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
1313 int width = GET_MODE_BITSIZE (oldmode);
1315 /* We need to zero extend VAL. */
1316 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1319 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
1322 /* We can do this with a gen_lowpart if both desired and current modes
1323 are integer, and this is either a constant integer, a register, or a
1324 non-volatile MEM. Except for the constant case where MODE is no
1325 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
1327 if ((GET_CODE (x) == CONST_INT
1328 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
1329 || (GET_MODE_CLASS (mode) == MODE_INT
1330 && GET_MODE_CLASS (oldmode) == MODE_INT
1331 && (GET_CODE (x) == CONST_DOUBLE
1332 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
1333 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x)
1334 && direct_load[(int) mode])
1335 || (GET_CODE (x) == REG
1336 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
1337 GET_MODE_BITSIZE (GET_MODE (x)))))))))
1339 /* ?? If we don't know OLDMODE, we have to assume here that
1340 X does not need sign- or zero-extension. This may not be
1341 the case, but it's the best we can do. */
1342 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
1343 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
1345 HOST_WIDE_INT val = INTVAL (x);
1346 int width = GET_MODE_BITSIZE (oldmode);
1348 /* We must sign or zero-extend in this case. Start by
1349 zero-extending, then sign extend if we need to. */
1350 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1352 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
1353 val |= (HOST_WIDE_INT) (-1) << width;
1355 return GEN_INT (trunc_int_for_mode (val, mode));
1358 return gen_lowpart (mode, x);
1361 temp = gen_reg_rtx (mode);
1362 convert_move (temp, x, unsignedp);
1366 /* This macro is used to determine what the largest unit size that
1367 move_by_pieces can use is. */
1369 /* MOVE_MAX_PIECES is the number of bytes at a time which we can
1370 move efficiently, as opposed to MOVE_MAX which is the maximum
1371 number of bytes we can move with a single instruction. */
1373 #ifndef MOVE_MAX_PIECES
1374 #define MOVE_MAX_PIECES MOVE_MAX
1377 /* Generate several move instructions to copy LEN bytes from block FROM to
1378 block TO. (These are MEM rtx's with BLKmode). The caller must pass FROM
1379 and TO through protect_from_queue before calling.
1381 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
1382 used to push FROM to the stack.
1384 ALIGN is maximum alignment we can assume. */
1387 move_by_pieces (to, from, len, align)
1389 unsigned HOST_WIDE_INT len;
1392 struct move_by_pieces data;
1393 rtx to_addr, from_addr = XEXP (from, 0);
1394 unsigned int max_size = MOVE_MAX_PIECES + 1;
1395 enum machine_mode mode = VOIDmode, tmode;
1396 enum insn_code icode;
1399 data.from_addr = from_addr;
1402 to_addr = XEXP (to, 0);
1405 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1406 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1408 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1415 #ifdef STACK_GROWS_DOWNWARD
1421 data.to_addr = to_addr;
1424 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1425 || GET_CODE (from_addr) == POST_INC
1426 || GET_CODE (from_addr) == POST_DEC);
1428 data.explicit_inc_from = 0;
1429 data.explicit_inc_to = 0;
1430 if (data.reverse) data.offset = len;
1433 /* If copying requires more than two move insns,
1434 copy addresses to registers (to make displacements shorter)
1435 and use post-increment if available. */
1436 if (!(data.autinc_from && data.autinc_to)
1437 && move_by_pieces_ninsns (len, align) > 2)
1439 /* Find the mode of the largest move... */
1440 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1441 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1442 if (GET_MODE_SIZE (tmode) < max_size)
1445 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
1447 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1448 data.autinc_from = 1;
1449 data.explicit_inc_from = -1;
1451 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
1453 data.from_addr = copy_addr_to_reg (from_addr);
1454 data.autinc_from = 1;
1455 data.explicit_inc_from = 1;
1457 if (!data.autinc_from && CONSTANT_P (from_addr))
1458 data.from_addr = copy_addr_to_reg (from_addr);
1459 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
1461 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1463 data.explicit_inc_to = -1;
1465 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
1467 data.to_addr = copy_addr_to_reg (to_addr);
1469 data.explicit_inc_to = 1;
1471 if (!data.autinc_to && CONSTANT_P (to_addr))
1472 data.to_addr = copy_addr_to_reg (to_addr);
1475 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1476 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1477 align = MOVE_MAX * BITS_PER_UNIT;
1479 /* First move what we can in the largest integer mode, then go to
1480 successively smaller modes. */
1482 while (max_size > 1)
1484 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1485 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1486 if (GET_MODE_SIZE (tmode) < max_size)
1489 if (mode == VOIDmode)
1492 icode = mov_optab->handlers[(int) mode].insn_code;
1493 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1494 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1496 max_size = GET_MODE_SIZE (mode);
1499 /* The code above should have handled everything. */
1504 /* Return number of insns required to move L bytes by pieces.
1505 ALIGN (in bits) is maximum alignment we can assume. */
1507 static unsigned HOST_WIDE_INT
1508 move_by_pieces_ninsns (l, align)
1509 unsigned HOST_WIDE_INT l;
1512 unsigned HOST_WIDE_INT n_insns = 0;
1513 unsigned HOST_WIDE_INT max_size = MOVE_MAX + 1;
1515 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1516 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1517 align = MOVE_MAX * BITS_PER_UNIT;
1519 while (max_size > 1)
1521 enum machine_mode mode = VOIDmode, tmode;
1522 enum insn_code icode;
1524 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1525 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1526 if (GET_MODE_SIZE (tmode) < max_size)
1529 if (mode == VOIDmode)
1532 icode = mov_optab->handlers[(int) mode].insn_code;
1533 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1534 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1536 max_size = GET_MODE_SIZE (mode);
1544 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1545 with move instructions for mode MODE. GENFUN is the gen_... function
1546 to make a move insn for that mode. DATA has all the other info. */
1549 move_by_pieces_1 (genfun, mode, data)
1550 rtx (*genfun) PARAMS ((rtx, ...));
1551 enum machine_mode mode;
1552 struct move_by_pieces *data;
1554 unsigned int size = GET_MODE_SIZE (mode);
1555 rtx to1 = NULL_RTX, from1;
1557 while (data->len >= size)
1560 data->offset -= size;
1564 if (data->autinc_to)
1565 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
1568 to1 = adjust_address (data->to, mode, data->offset);
1571 if (data->autinc_from)
1572 from1 = adjust_automodify_address (data->from, mode, data->from_addr,
1575 from1 = adjust_address (data->from, mode, data->offset);
1577 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1578 emit_insn (gen_add2_insn (data->to_addr,
1579 GEN_INT (-(HOST_WIDE_INT)size)));
1580 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1581 emit_insn (gen_add2_insn (data->from_addr,
1582 GEN_INT (-(HOST_WIDE_INT)size)));
1585 emit_insn ((*genfun) (to1, from1));
1588 #ifdef PUSH_ROUNDING
1589 emit_single_push_insn (mode, from1, NULL);
1595 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1596 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1597 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1598 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1600 if (! data->reverse)
1601 data->offset += size;
1607 /* Emit code to move a block Y to a block X.
1608 This may be done with string-move instructions,
1609 with multiple scalar move instructions, or with a library call.
1611 Both X and Y must be MEM rtx's (perhaps inside VOLATILE)
1613 SIZE is an rtx that says how long they are.
1614 ALIGN is the maximum alignment we can assume they have.
1616 Return the address of the new block, if memcpy is called and returns it,
1620 emit_block_move (x, y, size)
1625 #ifdef TARGET_MEM_FUNCTIONS
1627 tree call_expr, arg_list;
1629 unsigned int align = MIN (MEM_ALIGN (x), MEM_ALIGN (y));
1631 if (GET_MODE (x) != BLKmode)
1634 if (GET_MODE (y) != BLKmode)
1637 x = protect_from_queue (x, 1);
1638 y = protect_from_queue (y, 0);
1639 size = protect_from_queue (size, 0);
1641 if (GET_CODE (x) != MEM)
1643 if (GET_CODE (y) != MEM)
1648 if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
1649 move_by_pieces (x, y, INTVAL (size), align);
1652 /* Try the most limited insn first, because there's no point
1653 including more than one in the machine description unless
1654 the more limited one has some advantage. */
1656 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
1657 enum machine_mode mode;
1659 /* Since this is a move insn, we don't care about volatility. */
1662 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1663 mode = GET_MODE_WIDER_MODE (mode))
1665 enum insn_code code = movstr_optab[(int) mode];
1666 insn_operand_predicate_fn pred;
1668 if (code != CODE_FOR_nothing
1669 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1670 here because if SIZE is less than the mode mask, as it is
1671 returned by the macro, it will definitely be less than the
1672 actual mode mask. */
1673 && ((GET_CODE (size) == CONST_INT
1674 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1675 <= (GET_MODE_MASK (mode) >> 1)))
1676 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1677 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
1678 || (*pred) (x, BLKmode))
1679 && ((pred = insn_data[(int) code].operand[1].predicate) == 0
1680 || (*pred) (y, BLKmode))
1681 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
1682 || (*pred) (opalign, VOIDmode)))
1685 rtx last = get_last_insn ();
1688 op2 = convert_to_mode (mode, size, 1);
1689 pred = insn_data[(int) code].operand[2].predicate;
1690 if (pred != 0 && ! (*pred) (op2, mode))
1691 op2 = copy_to_mode_reg (mode, op2);
1693 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1701 delete_insns_since (last);
1707 /* X, Y, or SIZE may have been passed through protect_from_queue.
1709 It is unsafe to save the value generated by protect_from_queue
1710 and reuse it later. Consider what happens if emit_queue is
1711 called before the return value from protect_from_queue is used.
1713 Expansion of the CALL_EXPR below will call emit_queue before
1714 we are finished emitting RTL for argument setup. So if we are
1715 not careful we could get the wrong value for an argument.
1717 To avoid this problem we go ahead and emit code to copy X, Y &
1718 SIZE into new pseudos. We can then place those new pseudos
1719 into an RTL_EXPR and use them later, even after a call to
1722 Note this is not strictly needed for library calls since they
1723 do not call emit_queue before loading their arguments. However,
1724 we may need to have library calls call emit_queue in the future
1725 since failing to do so could cause problems for targets which
1726 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
1727 x = copy_to_mode_reg (Pmode, XEXP (x, 0));
1728 y = copy_to_mode_reg (Pmode, XEXP (y, 0));
1730 #ifdef TARGET_MEM_FUNCTIONS
1731 size = copy_to_mode_reg (TYPE_MODE (sizetype), size);
1733 size = convert_to_mode (TYPE_MODE (integer_type_node), size,
1734 TREE_UNSIGNED (integer_type_node));
1735 size = copy_to_mode_reg (TYPE_MODE (integer_type_node), size);
1738 #ifdef TARGET_MEM_FUNCTIONS
1739 /* It is incorrect to use the libcall calling conventions to call
1740 memcpy in this context.
1742 This could be a user call to memcpy and the user may wish to
1743 examine the return value from memcpy.
1745 For targets where libcalls and normal calls have different conventions
1746 for returning pointers, we could end up generating incorrect code.
1748 So instead of using a libcall sequence we build up a suitable
1749 CALL_EXPR and expand the call in the normal fashion. */
1750 if (fn == NULL_TREE)
1754 /* This was copied from except.c, I don't know if all this is
1755 necessary in this context or not. */
1756 fn = get_identifier ("memcpy");
1757 fntype = build_pointer_type (void_type_node);
1758 fntype = build_function_type (fntype, NULL_TREE);
1759 fn = build_decl (FUNCTION_DECL, fn, fntype);
1760 ggc_add_tree_root (&fn, 1);
1761 DECL_EXTERNAL (fn) = 1;
1762 TREE_PUBLIC (fn) = 1;
1763 DECL_ARTIFICIAL (fn) = 1;
1764 TREE_NOTHROW (fn) = 1;
1765 make_decl_rtl (fn, NULL);
1766 assemble_external (fn);
1769 /* We need to make an argument list for the function call.
1771 memcpy has three arguments, the first two are void * addresses and
1772 the last is a size_t byte count for the copy. */
1774 = build_tree_list (NULL_TREE,
1775 make_tree (build_pointer_type (void_type_node), x));
1776 TREE_CHAIN (arg_list)
1777 = build_tree_list (NULL_TREE,
1778 make_tree (build_pointer_type (void_type_node), y));
1779 TREE_CHAIN (TREE_CHAIN (arg_list))
1780 = build_tree_list (NULL_TREE, make_tree (sizetype, size));
1781 TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list))) = NULL_TREE;
1783 /* Now we have to build up the CALL_EXPR itself. */
1784 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
1785 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
1786 call_expr, arg_list, NULL_TREE);
1787 TREE_SIDE_EFFECTS (call_expr) = 1;
1789 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
1791 emit_library_call (bcopy_libfunc, LCT_NORMAL,
1792 VOIDmode, 3, y, Pmode, x, Pmode,
1793 convert_to_mode (TYPE_MODE (integer_type_node), size,
1794 TREE_UNSIGNED (integer_type_node)),
1795 TYPE_MODE (integer_type_node));
1798 /* If we are initializing a readonly value, show the above call
1799 clobbered it. Otherwise, a load from it may erroneously be hoisted
1801 if (RTX_UNCHANGING_P (x))
1802 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
1808 /* Copy all or part of a value X into registers starting at REGNO.
1809 The number of registers to be filled is NREGS. */
1812 move_block_to_reg (regno, x, nregs, mode)
1816 enum machine_mode mode;
1819 #ifdef HAVE_load_multiple
1827 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1828 x = validize_mem (force_const_mem (mode, x));
1830 /* See if the machine can do this with a load multiple insn. */
1831 #ifdef HAVE_load_multiple
1832 if (HAVE_load_multiple)
1834 last = get_last_insn ();
1835 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
1843 delete_insns_since (last);
1847 for (i = 0; i < nregs; i++)
1848 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1849 operand_subword_force (x, i, mode));
1852 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1853 The number of registers to be filled is NREGS. SIZE indicates the number
1854 of bytes in the object X. */
1857 move_block_from_reg (regno, x, nregs, size)
1864 #ifdef HAVE_store_multiple
1868 enum machine_mode mode;
1873 /* If SIZE is that of a mode no bigger than a word, just use that
1874 mode's store operation. */
1875 if (size <= UNITS_PER_WORD
1876 && (mode = mode_for_size (size * BITS_PER_UNIT, MODE_INT, 0)) != BLKmode
1877 && !FUNCTION_ARG_REG_LITTLE_ENDIAN)
1879 emit_move_insn (adjust_address (x, mode, 0), gen_rtx_REG (mode, regno));
1883 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN machine must be aligned
1884 to the left before storing to memory. Note that the previous test
1885 doesn't handle all cases (e.g. SIZE == 3). */
1886 if (size < UNITS_PER_WORD
1888 && !FUNCTION_ARG_REG_LITTLE_ENDIAN)
1890 rtx tem = operand_subword (x, 0, 1, BLKmode);
1896 shift = expand_shift (LSHIFT_EXPR, word_mode,
1897 gen_rtx_REG (word_mode, regno),
1898 build_int_2 ((UNITS_PER_WORD - size)
1899 * BITS_PER_UNIT, 0), NULL_RTX, 0);
1900 emit_move_insn (tem, shift);
1904 /* See if the machine can do this with a store multiple insn. */
1905 #ifdef HAVE_store_multiple
1906 if (HAVE_store_multiple)
1908 last = get_last_insn ();
1909 pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1917 delete_insns_since (last);
1921 for (i = 0; i < nregs; i++)
1923 rtx tem = operand_subword (x, i, 1, BLKmode);
1928 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1932 /* Emit code to move a block SRC to a block DST, where DST is non-consecutive
1933 registers represented by a PARALLEL. SSIZE represents the total size of
1934 block SRC in bytes, or -1 if not known. */
1935 /* ??? If SSIZE % UNITS_PER_WORD != 0, we make the blatant assumption that
1936 the balance will be in what would be the low-order memory addresses, i.e.
1937 left justified for big endian, right justified for little endian. This
1938 happens to be true for the targets currently using this support. If this
1939 ever changes, a new target macro along the lines of FUNCTION_ARG_PADDING
1943 emit_group_load (dst, orig_src, ssize)
1950 if (GET_CODE (dst) != PARALLEL)
1953 /* Check for a NULL entry, used to indicate that the parameter goes
1954 both on the stack and in registers. */
1955 if (XEXP (XVECEXP (dst, 0, 0), 0))
1960 tmps = (rtx *) alloca (sizeof (rtx) * XVECLEN (dst, 0));
1962 /* Process the pieces. */
1963 for (i = start; i < XVECLEN (dst, 0); i++)
1965 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1966 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1967 unsigned int bytelen = GET_MODE_SIZE (mode);
1970 /* Handle trailing fragments that run over the size of the struct. */
1971 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
1973 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1974 bytelen = ssize - bytepos;
1979 /* If we won't be loading directly from memory, protect the real source
1980 from strange tricks we might play; but make sure that the source can
1981 be loaded directly into the destination. */
1983 if (GET_CODE (orig_src) != MEM
1984 && (!CONSTANT_P (orig_src)
1985 || (GET_MODE (orig_src) != mode
1986 && GET_MODE (orig_src) != VOIDmode)))
1988 if (GET_MODE (orig_src) == VOIDmode)
1989 src = gen_reg_rtx (mode);
1991 src = gen_reg_rtx (GET_MODE (orig_src));
1993 emit_move_insn (src, orig_src);
1996 /* Optimize the access just a bit. */
1997 if (GET_CODE (src) == MEM
1998 && MEM_ALIGN (src) >= GET_MODE_ALIGNMENT (mode)
1999 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2000 && bytelen == GET_MODE_SIZE (mode))
2002 tmps[i] = gen_reg_rtx (mode);
2003 emit_move_insn (tmps[i], adjust_address (src, mode, bytepos));
2005 else if (GET_CODE (src) == CONCAT)
2008 && bytelen == GET_MODE_SIZE (GET_MODE (XEXP (src, 0))))
2009 tmps[i] = XEXP (src, 0);
2010 else if (bytepos == (HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (XEXP (src, 0)))
2011 && bytelen == GET_MODE_SIZE (GET_MODE (XEXP (src, 1))))
2012 tmps[i] = XEXP (src, 1);
2013 else if (bytepos == 0)
2015 rtx mem = assign_stack_temp (GET_MODE (src),
2016 GET_MODE_SIZE (GET_MODE (src)), 0);
2017 emit_move_insn (mem, src);
2018 tmps[i] = adjust_address (mem, mode, 0);
2023 else if (CONSTANT_P (src)
2024 || (GET_CODE (src) == REG && GET_MODE (src) == mode))
2027 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
2028 bytepos * BITS_PER_UNIT, 1, NULL_RTX,
2031 if (BYTES_BIG_ENDIAN && shift)
2032 expand_binop (mode, ashl_optab, tmps[i], GEN_INT (shift),
2033 tmps[i], 0, OPTAB_WIDEN);
2038 /* Copy the extracted pieces into the proper (probable) hard regs. */
2039 for (i = start; i < XVECLEN (dst, 0); i++)
2040 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0), tmps[i]);
2043 /* Emit code to move a block SRC to a block DST, where SRC is non-consecutive
2044 registers represented by a PARALLEL. SSIZE represents the total size of
2045 block DST, or -1 if not known. */
2048 emit_group_store (orig_dst, src, ssize)
2055 if (GET_CODE (src) != PARALLEL)
2058 /* Check for a NULL entry, used to indicate that the parameter goes
2059 both on the stack and in registers. */
2060 if (XEXP (XVECEXP (src, 0, 0), 0))
2065 tmps = (rtx *) alloca (sizeof (rtx) * XVECLEN (src, 0));
2067 /* Copy the (probable) hard regs into pseudos. */
2068 for (i = start; i < XVECLEN (src, 0); i++)
2070 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
2071 tmps[i] = gen_reg_rtx (GET_MODE (reg));
2072 emit_move_insn (tmps[i], reg);
2076 /* If we won't be storing directly into memory, protect the real destination
2077 from strange tricks we might play. */
2079 if (GET_CODE (dst) == PARALLEL)
2083 /* We can get a PARALLEL dst if there is a conditional expression in
2084 a return statement. In that case, the dst and src are the same,
2085 so no action is necessary. */
2086 if (rtx_equal_p (dst, src))
2089 /* It is unclear if we can ever reach here, but we may as well handle
2090 it. Allocate a temporary, and split this into a store/load to/from
2093 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
2094 emit_group_store (temp, src, ssize);
2095 emit_group_load (dst, temp, ssize);
2098 else if (GET_CODE (dst) != MEM)
2100 dst = gen_reg_rtx (GET_MODE (orig_dst));
2101 /* Make life a bit easier for combine. */
2102 emit_move_insn (dst, const0_rtx);
2105 /* Process the pieces. */
2106 for (i = start; i < XVECLEN (src, 0); i++)
2108 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
2109 enum machine_mode mode = GET_MODE (tmps[i]);
2110 unsigned int bytelen = GET_MODE_SIZE (mode);
2112 /* Handle trailing fragments that run over the size of the struct. */
2113 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2115 if (BYTES_BIG_ENDIAN)
2117 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2118 expand_binop (mode, ashr_optab, tmps[i], GEN_INT (shift),
2119 tmps[i], 0, OPTAB_WIDEN);
2121 bytelen = ssize - bytepos;
2124 /* Optimize the access just a bit. */
2125 if (GET_CODE (dst) == MEM
2126 && MEM_ALIGN (dst) >= GET_MODE_ALIGNMENT (mode)
2127 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2128 && bytelen == GET_MODE_SIZE (mode))
2129 emit_move_insn (adjust_address (dst, mode, bytepos), tmps[i]);
2131 store_bit_field (dst, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2132 mode, tmps[i], ssize);
2137 /* Copy from the pseudo into the (probable) hard reg. */
2138 if (GET_CODE (dst) == REG)
2139 emit_move_insn (orig_dst, dst);
2142 /* Generate code to copy a BLKmode object of TYPE out of a
2143 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2144 is null, a stack temporary is created. TGTBLK is returned.
2146 The primary purpose of this routine is to handle functions
2147 that return BLKmode structures in registers. Some machines
2148 (the PA for example) want to return all small structures
2149 in registers regardless of the structure's alignment. */
2152 copy_blkmode_from_reg (tgtblk, srcreg, type)
2157 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2158 rtx src = NULL, dst = NULL;
2159 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2160 unsigned HOST_WIDE_INT bitpos, xbitpos, big_endian_correction = 0;
2164 tgtblk = assign_temp (build_qualified_type (type,
2166 | TYPE_QUAL_CONST)),
2168 preserve_temp_slots (tgtblk);
2171 /* This code assumes srcreg is at least a full word. If it isn't, copy it
2172 into a new pseudo which is a full word.
2174 If FUNCTION_ARG_REG_LITTLE_ENDIAN is set and convert_to_mode does a copy,
2175 the wrong part of the register gets copied so we fake a type conversion
2177 if (GET_MODE (srcreg) != BLKmode
2178 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
2180 if (FUNCTION_ARG_REG_LITTLE_ENDIAN)
2181 srcreg = simplify_gen_subreg (word_mode, srcreg, GET_MODE (srcreg), 0);
2183 srcreg = convert_to_mode (word_mode, srcreg, TREE_UNSIGNED (type));
2186 /* Structures whose size is not a multiple of a word are aligned
2187 to the least significant byte (to the right). On a BYTES_BIG_ENDIAN
2188 machine, this means we must skip the empty high order bytes when
2189 calculating the bit offset. */
2190 if (BYTES_BIG_ENDIAN
2191 && !FUNCTION_ARG_REG_LITTLE_ENDIAN
2192 && bytes % UNITS_PER_WORD)
2193 big_endian_correction
2194 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2196 /* Copy the structure BITSIZE bites at a time.
2198 We could probably emit more efficient code for machines which do not use
2199 strict alignment, but it doesn't seem worth the effort at the current
2201 for (bitpos = 0, xbitpos = big_endian_correction;
2202 bitpos < bytes * BITS_PER_UNIT;
2203 bitpos += bitsize, xbitpos += bitsize)
2205 /* We need a new source operand each time xbitpos is on a
2206 word boundary and when xbitpos == big_endian_correction
2207 (the first time through). */
2208 if (xbitpos % BITS_PER_WORD == 0
2209 || xbitpos == big_endian_correction)
2210 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD,
2213 /* We need a new destination operand each time bitpos is on
2215 if (bitpos % BITS_PER_WORD == 0)
2216 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
2218 /* Use xbitpos for the source extraction (right justified) and
2219 xbitpos for the destination store (left justified). */
2220 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode,
2221 extract_bit_field (src, bitsize,
2222 xbitpos % BITS_PER_WORD, 1,
2223 NULL_RTX, word_mode, word_mode,
2231 /* Add a USE expression for REG to the (possibly empty) list pointed
2232 to by CALL_FUSAGE. REG must denote a hard register. */
2235 use_reg (call_fusage, reg)
2236 rtx *call_fusage, reg;
2238 if (GET_CODE (reg) != REG
2239 || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
2243 = gen_rtx_EXPR_LIST (VOIDmode,
2244 gen_rtx_USE (VOIDmode, reg), *call_fusage);
2247 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2248 starting at REGNO. All of these registers must be hard registers. */
2251 use_regs (call_fusage, regno, nregs)
2258 if (regno + nregs > FIRST_PSEUDO_REGISTER)
2261 for (i = 0; i < nregs; i++)
2262 use_reg (call_fusage, gen_rtx_REG (reg_raw_mode[regno + i], regno + i));
2265 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2266 PARALLEL REGS. This is for calls that pass values in multiple
2267 non-contiguous locations. The Irix 6 ABI has examples of this. */
2270 use_group_regs (call_fusage, regs)
2276 for (i = 0; i < XVECLEN (regs, 0); i++)
2278 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2280 /* A NULL entry means the parameter goes both on the stack and in
2281 registers. This can also be a MEM for targets that pass values
2282 partially on the stack and partially in registers. */
2283 if (reg != 0 && GET_CODE (reg) == REG)
2284 use_reg (call_fusage, reg);
2290 can_store_by_pieces (len, constfun, constfundata, align)
2291 unsigned HOST_WIDE_INT len;
2292 rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
2296 unsigned HOST_WIDE_INT max_size, l;
2297 HOST_WIDE_INT offset = 0;
2298 enum machine_mode mode, tmode;
2299 enum insn_code icode;
2303 if (! MOVE_BY_PIECES_P (len, align))
2306 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2307 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2308 align = MOVE_MAX * BITS_PER_UNIT;
2310 /* We would first store what we can in the largest integer mode, then go to
2311 successively smaller modes. */
2314 reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2319 max_size = MOVE_MAX_PIECES + 1;
2320 while (max_size > 1)
2322 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2323 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2324 if (GET_MODE_SIZE (tmode) < max_size)
2327 if (mode == VOIDmode)
2330 icode = mov_optab->handlers[(int) mode].insn_code;
2331 if (icode != CODE_FOR_nothing
2332 && align >= GET_MODE_ALIGNMENT (mode))
2334 unsigned int size = GET_MODE_SIZE (mode);
2341 cst = (*constfun) (constfundata, offset, mode);
2342 if (!LEGITIMATE_CONSTANT_P (cst))
2352 max_size = GET_MODE_SIZE (mode);
2355 /* The code above should have handled everything. */
2363 /* Generate several move instructions to store LEN bytes generated by
2364 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2365 pointer which will be passed as argument in every CONSTFUN call.
2366 ALIGN is maximum alignment we can assume. */
2369 store_by_pieces (to, len, constfun, constfundata, align)
2371 unsigned HOST_WIDE_INT len;
2372 rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
2376 struct store_by_pieces data;
2378 if (! MOVE_BY_PIECES_P (len, align))
2380 to = protect_from_queue (to, 1);
2381 data.constfun = constfun;
2382 data.constfundata = constfundata;
2385 store_by_pieces_1 (&data, align);
2388 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2389 rtx with BLKmode). The caller must pass TO through protect_from_queue
2390 before calling. ALIGN is maximum alignment we can assume. */
2393 clear_by_pieces (to, len, align)
2395 unsigned HOST_WIDE_INT len;
2398 struct store_by_pieces data;
2400 data.constfun = clear_by_pieces_1;
2401 data.constfundata = NULL;
2404 store_by_pieces_1 (&data, align);
2407 /* Callback routine for clear_by_pieces.
2408 Return const0_rtx unconditionally. */
2411 clear_by_pieces_1 (data, offset, mode)
2412 PTR data ATTRIBUTE_UNUSED;
2413 HOST_WIDE_INT offset ATTRIBUTE_UNUSED;
2414 enum machine_mode mode ATTRIBUTE_UNUSED;
2419 /* Subroutine of clear_by_pieces and store_by_pieces.
2420 Generate several move instructions to store LEN bytes of block TO. (A MEM
2421 rtx with BLKmode). The caller must pass TO through protect_from_queue
2422 before calling. ALIGN is maximum alignment we can assume. */
2425 store_by_pieces_1 (data, align)
2426 struct store_by_pieces *data;
2429 rtx to_addr = XEXP (data->to, 0);
2430 unsigned HOST_WIDE_INT max_size = MOVE_MAX_PIECES + 1;
2431 enum machine_mode mode = VOIDmode, tmode;
2432 enum insn_code icode;
2435 data->to_addr = to_addr;
2437 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2438 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2440 data->explicit_inc_to = 0;
2442 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2444 data->offset = data->len;
2446 /* If storing requires more than two move insns,
2447 copy addresses to registers (to make displacements shorter)
2448 and use post-increment if available. */
2449 if (!data->autinc_to
2450 && move_by_pieces_ninsns (data->len, align) > 2)
2452 /* Determine the main mode we'll be using. */
2453 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2454 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2455 if (GET_MODE_SIZE (tmode) < max_size)
2458 if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
2460 data->to_addr = copy_addr_to_reg (plus_constant (to_addr, data->len));
2461 data->autinc_to = 1;
2462 data->explicit_inc_to = -1;
2465 if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2466 && ! data->autinc_to)
2468 data->to_addr = copy_addr_to_reg (to_addr);
2469 data->autinc_to = 1;
2470 data->explicit_inc_to = 1;
2473 if ( !data->autinc_to && CONSTANT_P (to_addr))
2474 data->to_addr = copy_addr_to_reg (to_addr);
2477 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2478 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2479 align = MOVE_MAX * BITS_PER_UNIT;
2481 /* First store what we can in the largest integer mode, then go to
2482 successively smaller modes. */
2484 while (max_size > 1)
2486 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2487 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2488 if (GET_MODE_SIZE (tmode) < max_size)
2491 if (mode == VOIDmode)
2494 icode = mov_optab->handlers[(int) mode].insn_code;
2495 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
2496 store_by_pieces_2 (GEN_FCN (icode), mode, data);
2498 max_size = GET_MODE_SIZE (mode);
2501 /* The code above should have handled everything. */
2506 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2507 with move instructions for mode MODE. GENFUN is the gen_... function
2508 to make a move insn for that mode. DATA has all the other info. */
2511 store_by_pieces_2 (genfun, mode, data)
2512 rtx (*genfun) PARAMS ((rtx, ...));
2513 enum machine_mode mode;
2514 struct store_by_pieces *data;
2516 unsigned int size = GET_MODE_SIZE (mode);
2519 while (data->len >= size)
2522 data->offset -= size;
2524 if (data->autinc_to)
2525 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
2528 to1 = adjust_address (data->to, mode, data->offset);
2530 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2531 emit_insn (gen_add2_insn (data->to_addr,
2532 GEN_INT (-(HOST_WIDE_INT) size)));
2534 cst = (*data->constfun) (data->constfundata, data->offset, mode);
2535 emit_insn ((*genfun) (to1, cst));
2537 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2538 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2540 if (! data->reverse)
2541 data->offset += size;
2547 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2548 its length in bytes. */
2551 clear_storage (object, size)
2555 #ifdef TARGET_MEM_FUNCTIONS
2557 tree call_expr, arg_list;
2560 unsigned int align = (GET_CODE (object) == MEM ? MEM_ALIGN (object)
2561 : GET_MODE_ALIGNMENT (GET_MODE (object)));
2563 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2564 just move a zero. Otherwise, do this a piece at a time. */
2565 if (GET_MODE (object) != BLKmode
2566 && GET_CODE (size) == CONST_INT
2567 && GET_MODE_SIZE (GET_MODE (object)) == (unsigned int) INTVAL (size))
2568 emit_move_insn (object, CONST0_RTX (GET_MODE (object)));
2571 object = protect_from_queue (object, 1);
2572 size = protect_from_queue (size, 0);
2574 if (GET_CODE (size) == CONST_INT
2575 && MOVE_BY_PIECES_P (INTVAL (size), align))
2576 clear_by_pieces (object, INTVAL (size), align);
2579 /* Try the most limited insn first, because there's no point
2580 including more than one in the machine description unless
2581 the more limited one has some advantage. */
2583 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
2584 enum machine_mode mode;
2586 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2587 mode = GET_MODE_WIDER_MODE (mode))
2589 enum insn_code code = clrstr_optab[(int) mode];
2590 insn_operand_predicate_fn pred;
2592 if (code != CODE_FOR_nothing
2593 /* We don't need MODE to be narrower than
2594 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2595 the mode mask, as it is returned by the macro, it will
2596 definitely be less than the actual mode mask. */
2597 && ((GET_CODE (size) == CONST_INT
2598 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2599 <= (GET_MODE_MASK (mode) >> 1)))
2600 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2601 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
2602 || (*pred) (object, BLKmode))
2603 && ((pred = insn_data[(int) code].operand[2].predicate) == 0
2604 || (*pred) (opalign, VOIDmode)))
2607 rtx last = get_last_insn ();
2610 op1 = convert_to_mode (mode, size, 1);
2611 pred = insn_data[(int) code].operand[1].predicate;
2612 if (pred != 0 && ! (*pred) (op1, mode))
2613 op1 = copy_to_mode_reg (mode, op1);
2615 pat = GEN_FCN ((int) code) (object, op1, opalign);
2622 delete_insns_since (last);
2626 /* OBJECT or SIZE may have been passed through protect_from_queue.
2628 It is unsafe to save the value generated by protect_from_queue
2629 and reuse it later. Consider what happens if emit_queue is
2630 called before the return value from protect_from_queue is used.
2632 Expansion of the CALL_EXPR below will call emit_queue before
2633 we are finished emitting RTL for argument setup. So if we are
2634 not careful we could get the wrong value for an argument.
2636 To avoid this problem we go ahead and emit code to copy OBJECT
2637 and SIZE into new pseudos. We can then place those new pseudos
2638 into an RTL_EXPR and use them later, even after a call to
2641 Note this is not strictly needed for library calls since they
2642 do not call emit_queue before loading their arguments. However,
2643 we may need to have library calls call emit_queue in the future
2644 since failing to do so could cause problems for targets which
2645 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
2646 object = copy_to_mode_reg (Pmode, XEXP (object, 0));
2648 #ifdef TARGET_MEM_FUNCTIONS
2649 size = copy_to_mode_reg (TYPE_MODE (sizetype), size);
2651 size = convert_to_mode (TYPE_MODE (integer_type_node), size,
2652 TREE_UNSIGNED (integer_type_node));
2653 size = copy_to_mode_reg (TYPE_MODE (integer_type_node), size);
2656 #ifdef TARGET_MEM_FUNCTIONS
2657 /* It is incorrect to use the libcall calling conventions to call
2658 memset in this context.
2660 This could be a user call to memset and the user may wish to
2661 examine the return value from memset.
2663 For targets where libcalls and normal calls have different
2664 conventions for returning pointers, we could end up generating
2667 So instead of using a libcall sequence we build up a suitable
2668 CALL_EXPR and expand the call in the normal fashion. */
2669 if (fn == NULL_TREE)
2673 /* This was copied from except.c, I don't know if all this is
2674 necessary in this context or not. */
2675 fn = get_identifier ("memset");
2676 fntype = build_pointer_type (void_type_node);
2677 fntype = build_function_type (fntype, NULL_TREE);
2678 fn = build_decl (FUNCTION_DECL, fn, fntype);
2679 ggc_add_tree_root (&fn, 1);
2680 DECL_EXTERNAL (fn) = 1;
2681 TREE_PUBLIC (fn) = 1;
2682 DECL_ARTIFICIAL (fn) = 1;
2683 TREE_NOTHROW (fn) = 1;
2684 make_decl_rtl (fn, NULL);
2685 assemble_external (fn);
2688 /* We need to make an argument list for the function call.
2690 memset has three arguments, the first is a void * addresses, the
2691 second an integer with the initialization value, the last is a
2692 size_t byte count for the copy. */
2694 = build_tree_list (NULL_TREE,
2695 make_tree (build_pointer_type (void_type_node),
2697 TREE_CHAIN (arg_list)
2698 = build_tree_list (NULL_TREE,
2699 make_tree (integer_type_node, const0_rtx));
2700 TREE_CHAIN (TREE_CHAIN (arg_list))
2701 = build_tree_list (NULL_TREE, make_tree (sizetype, size));
2702 TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list))) = NULL_TREE;
2704 /* Now we have to build up the CALL_EXPR itself. */
2705 call_expr = build1 (ADDR_EXPR,
2706 build_pointer_type (TREE_TYPE (fn)), fn);
2707 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
2708 call_expr, arg_list, NULL_TREE);
2709 TREE_SIDE_EFFECTS (call_expr) = 1;
2711 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
2713 emit_library_call (bzero_libfunc, LCT_NORMAL,
2714 VOIDmode, 2, object, Pmode, size,
2715 TYPE_MODE (integer_type_node));
2718 /* If we are initializing a readonly value, show the above call
2719 clobbered it. Otherwise, a load from it may erroneously be
2720 hoisted from a loop. */
2721 if (RTX_UNCHANGING_P (object))
2722 emit_insn (gen_rtx_CLOBBER (VOIDmode, object));
2729 /* Generate code to copy Y into X.
2730 Both Y and X must have the same mode, except that
2731 Y can be a constant with VOIDmode.
2732 This mode cannot be BLKmode; use emit_block_move for that.
2734 Return the last instruction emitted. */
2737 emit_move_insn (x, y)
2740 enum machine_mode mode = GET_MODE (x);
2741 rtx y_cst = NULL_RTX;
2744 x = protect_from_queue (x, 1);
2745 y = protect_from_queue (y, 0);
2747 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
2750 /* Never force constant_p_rtx to memory. */
2751 if (GET_CODE (y) == CONSTANT_P_RTX)
2753 else if (CONSTANT_P (y) && ! LEGITIMATE_CONSTANT_P (y))
2756 y = force_const_mem (mode, y);
2759 /* If X or Y are memory references, verify that their addresses are valid
2761 if (GET_CODE (x) == MEM
2762 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
2763 && ! push_operand (x, GET_MODE (x)))
2765 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
2766 x = validize_mem (x);
2768 if (GET_CODE (y) == MEM
2769 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
2771 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
2772 y = validize_mem (y);
2774 if (mode == BLKmode)
2777 last_insn = emit_move_insn_1 (x, y);
2779 if (y_cst && GET_CODE (x) == REG)
2780 set_unique_reg_note (last_insn, REG_EQUAL, y_cst);
2785 /* Low level part of emit_move_insn.
2786 Called just like emit_move_insn, but assumes X and Y
2787 are basically valid. */
2790 emit_move_insn_1 (x, y)
2793 enum machine_mode mode = GET_MODE (x);
2794 enum machine_mode submode;
2795 enum mode_class class = GET_MODE_CLASS (mode);
2797 if ((unsigned int) mode >= (unsigned int) MAX_MACHINE_MODE)
2800 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
2802 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
2804 /* Expand complex moves by moving real part and imag part, if possible. */
2805 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
2806 && BLKmode != (submode = mode_for_size ((GET_MODE_UNIT_SIZE (mode)
2808 (class == MODE_COMPLEX_INT
2809 ? MODE_INT : MODE_FLOAT),
2811 && (mov_optab->handlers[(int) submode].insn_code
2812 != CODE_FOR_nothing))
2814 /* Don't split destination if it is a stack push. */
2815 int stack = push_operand (x, GET_MODE (x));
2817 #ifdef PUSH_ROUNDING
2818 /* In case we output to the stack, but the size is smaller machine can
2819 push exactly, we need to use move instructions. */
2821 && (PUSH_ROUNDING (GET_MODE_SIZE (submode))
2822 != GET_MODE_SIZE (submode)))
2825 HOST_WIDE_INT offset1, offset2;
2827 /* Do not use anti_adjust_stack, since we don't want to update
2828 stack_pointer_delta. */
2829 temp = expand_binop (Pmode,
2830 #ifdef STACK_GROWS_DOWNWARD
2838 (GET_MODE_SIZE (GET_MODE (x)))),
2839 stack_pointer_rtx, 0, OPTAB_LIB_WIDEN);
2841 if (temp != stack_pointer_rtx)
2842 emit_move_insn (stack_pointer_rtx, temp);
2844 #ifdef STACK_GROWS_DOWNWARD
2846 offset2 = GET_MODE_SIZE (submode);
2848 offset1 = -PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)));
2849 offset2 = (-PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)))
2850 + GET_MODE_SIZE (submode));
2853 emit_move_insn (change_address (x, submode,
2854 gen_rtx_PLUS (Pmode,
2856 GEN_INT (offset1))),
2857 gen_realpart (submode, y));
2858 emit_move_insn (change_address (x, submode,
2859 gen_rtx_PLUS (Pmode,
2861 GEN_INT (offset2))),
2862 gen_imagpart (submode, y));
2866 /* If this is a stack, push the highpart first, so it
2867 will be in the argument order.
2869 In that case, change_address is used only to convert
2870 the mode, not to change the address. */
2873 /* Note that the real part always precedes the imag part in memory
2874 regardless of machine's endianness. */
2875 #ifdef STACK_GROWS_DOWNWARD
2876 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2877 (gen_rtx_MEM (submode, XEXP (x, 0)),
2878 gen_imagpart (submode, y)));
2879 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2880 (gen_rtx_MEM (submode, XEXP (x, 0)),
2881 gen_realpart (submode, y)));
2883 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2884 (gen_rtx_MEM (submode, XEXP (x, 0)),
2885 gen_realpart (submode, y)));
2886 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2887 (gen_rtx_MEM (submode, XEXP (x, 0)),
2888 gen_imagpart (submode, y)));
2893 rtx realpart_x, realpart_y;
2894 rtx imagpart_x, imagpart_y;
2896 /* If this is a complex value with each part being smaller than a
2897 word, the usual calling sequence will likely pack the pieces into
2898 a single register. Unfortunately, SUBREG of hard registers only
2899 deals in terms of words, so we have a problem converting input
2900 arguments to the CONCAT of two registers that is used elsewhere
2901 for complex values. If this is before reload, we can copy it into
2902 memory and reload. FIXME, we should see about using extract and
2903 insert on integer registers, but complex short and complex char
2904 variables should be rarely used. */
2905 if (GET_MODE_BITSIZE (mode) < 2 * BITS_PER_WORD
2906 && (reload_in_progress | reload_completed) == 0)
2909 = (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER);
2911 = (REG_P (y) && REGNO (y) < FIRST_PSEUDO_REGISTER);
2913 if (packed_dest_p || packed_src_p)
2915 enum mode_class reg_class = ((class == MODE_COMPLEX_FLOAT)
2916 ? MODE_FLOAT : MODE_INT);
2918 enum machine_mode reg_mode
2919 = mode_for_size (GET_MODE_BITSIZE (mode), reg_class, 1);
2921 if (reg_mode != BLKmode)
2923 rtx mem = assign_stack_temp (reg_mode,
2924 GET_MODE_SIZE (mode), 0);
2925 rtx cmem = adjust_address (mem, mode, 0);
2928 = N_("function using short complex types cannot be inline");
2932 rtx sreg = gen_rtx_SUBREG (reg_mode, x, 0);
2934 emit_move_insn_1 (cmem, y);
2935 return emit_move_insn_1 (sreg, mem);
2939 rtx sreg = gen_rtx_SUBREG (reg_mode, y, 0);
2941 emit_move_insn_1 (mem, sreg);
2942 return emit_move_insn_1 (x, cmem);
2948 realpart_x = gen_realpart (submode, x);
2949 realpart_y = gen_realpart (submode, y);
2950 imagpart_x = gen_imagpart (submode, x);
2951 imagpart_y = gen_imagpart (submode, y);
2953 /* Show the output dies here. This is necessary for SUBREGs
2954 of pseudos since we cannot track their lifetimes correctly;
2955 hard regs shouldn't appear here except as return values.
2956 We never want to emit such a clobber after reload. */
2958 && ! (reload_in_progress || reload_completed)
2959 && (GET_CODE (realpart_x) == SUBREG
2960 || GET_CODE (imagpart_x) == SUBREG))
2961 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
2963 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2964 (realpart_x, realpart_y));
2965 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2966 (imagpart_x, imagpart_y));
2969 return get_last_insn ();
2972 /* This will handle any multi-word mode that lacks a move_insn pattern.
2973 However, you will get better code if you define such patterns,
2974 even if they must turn into multiple assembler instructions. */
2975 else if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
2982 #ifdef PUSH_ROUNDING
2984 /* If X is a push on the stack, do the push now and replace
2985 X with a reference to the stack pointer. */
2986 if (push_operand (x, GET_MODE (x)))
2991 /* Do not use anti_adjust_stack, since we don't want to update
2992 stack_pointer_delta. */
2993 temp = expand_binop (Pmode,
2994 #ifdef STACK_GROWS_DOWNWARD
3002 (GET_MODE_SIZE (GET_MODE (x)))),
3003 stack_pointer_rtx, 0, OPTAB_LIB_WIDEN);
3005 if (temp != stack_pointer_rtx)
3006 emit_move_insn (stack_pointer_rtx, temp);
3008 code = GET_CODE (XEXP (x, 0));
3010 /* Just hope that small offsets off SP are OK. */
3011 if (code == POST_INC)
3012 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3013 GEN_INT (-((HOST_WIDE_INT)
3014 GET_MODE_SIZE (GET_MODE (x)))));
3015 else if (code == POST_DEC)
3016 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3017 GEN_INT (GET_MODE_SIZE (GET_MODE (x))));
3019 temp = stack_pointer_rtx;
3021 x = change_address (x, VOIDmode, temp);
3025 /* If we are in reload, see if either operand is a MEM whose address
3026 is scheduled for replacement. */
3027 if (reload_in_progress && GET_CODE (x) == MEM
3028 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
3029 x = replace_equiv_address_nv (x, inner);
3030 if (reload_in_progress && GET_CODE (y) == MEM
3031 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
3032 y = replace_equiv_address_nv (y, inner);
3038 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
3041 rtx xpart = operand_subword (x, i, 1, mode);
3042 rtx ypart = operand_subword (y, i, 1, mode);
3044 /* If we can't get a part of Y, put Y into memory if it is a
3045 constant. Otherwise, force it into a register. If we still
3046 can't get a part of Y, abort. */
3047 if (ypart == 0 && CONSTANT_P (y))
3049 y = force_const_mem (mode, y);
3050 ypart = operand_subword (y, i, 1, mode);
3052 else if (ypart == 0)
3053 ypart = operand_subword_force (y, i, mode);
3055 if (xpart == 0 || ypart == 0)
3058 need_clobber |= (GET_CODE (xpart) == SUBREG);
3060 last_insn = emit_move_insn (xpart, ypart);
3063 seq = gen_sequence ();
3066 /* Show the output dies here. This is necessary for SUBREGs
3067 of pseudos since we cannot track their lifetimes correctly;
3068 hard regs shouldn't appear here except as return values.
3069 We never want to emit such a clobber after reload. */
3071 && ! (reload_in_progress || reload_completed)
3072 && need_clobber != 0)
3073 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
3083 /* Pushing data onto the stack. */
3085 /* Push a block of length SIZE (perhaps variable)
3086 and return an rtx to address the beginning of the block.
3087 Note that it is not possible for the value returned to be a QUEUED.
3088 The value may be virtual_outgoing_args_rtx.
3090 EXTRA is the number of bytes of padding to push in addition to SIZE.
3091 BELOW nonzero means this padding comes at low addresses;
3092 otherwise, the padding comes at high addresses. */
3095 push_block (size, extra, below)
3101 size = convert_modes (Pmode, ptr_mode, size, 1);
3102 if (CONSTANT_P (size))
3103 anti_adjust_stack (plus_constant (size, extra));
3104 else if (GET_CODE (size) == REG && extra == 0)
3105 anti_adjust_stack (size);
3108 temp = copy_to_mode_reg (Pmode, size);
3110 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
3111 temp, 0, OPTAB_LIB_WIDEN);
3112 anti_adjust_stack (temp);
3115 #ifndef STACK_GROWS_DOWNWARD
3121 temp = virtual_outgoing_args_rtx;
3122 if (extra != 0 && below)
3123 temp = plus_constant (temp, extra);
3127 if (GET_CODE (size) == CONST_INT)
3128 temp = plus_constant (virtual_outgoing_args_rtx,
3129 -INTVAL (size) - (below ? 0 : extra));
3130 else if (extra != 0 && !below)
3131 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3132 negate_rtx (Pmode, plus_constant (size, extra)));
3134 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3135 negate_rtx (Pmode, size));
3138 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
3141 #ifdef PUSH_ROUNDING
3143 /* Emit single push insn. */
3146 emit_single_push_insn (mode, x, type)
3148 enum machine_mode mode;
3152 unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
3154 enum insn_code icode;
3155 insn_operand_predicate_fn pred;
3157 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3158 /* If there is push pattern, use it. Otherwise try old way of throwing
3159 MEM representing push operation to move expander. */
3160 icode = push_optab->handlers[(int) mode].insn_code;
3161 if (icode != CODE_FOR_nothing)
3163 if (((pred = insn_data[(int) icode].operand[0].predicate)
3164 && !((*pred) (x, mode))))
3165 x = force_reg (mode, x);
3166 emit_insn (GEN_FCN (icode) (x));
3169 if (GET_MODE_SIZE (mode) == rounded_size)
3170 dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
3173 #ifdef STACK_GROWS_DOWNWARD
3174 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3175 GEN_INT (-(HOST_WIDE_INT) rounded_size));
3177 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3178 GEN_INT (rounded_size));
3180 dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
3183 dest = gen_rtx_MEM (mode, dest_addr);
3187 set_mem_attributes (dest, type, 1);
3189 if (flag_optimize_sibling_calls)
3190 /* Function incoming arguments may overlap with sibling call
3191 outgoing arguments and we cannot allow reordering of reads
3192 from function arguments with stores to outgoing arguments
3193 of sibling calls. */
3194 set_mem_alias_set (dest, 0);
3196 emit_move_insn (dest, x);
3200 /* Generate code to push X onto the stack, assuming it has mode MODE and
3202 MODE is redundant except when X is a CONST_INT (since they don't
3204 SIZE is an rtx for the size of data to be copied (in bytes),
3205 needed only if X is BLKmode.
3207 ALIGN (in bits) is maximum alignment we can assume.
3209 If PARTIAL and REG are both nonzero, then copy that many of the first
3210 words of X into registers starting with REG, and push the rest of X.
3211 The amount of space pushed is decreased by PARTIAL words,
3212 rounded *down* to a multiple of PARM_BOUNDARY.
3213 REG must be a hard register in this case.
3214 If REG is zero but PARTIAL is not, take any all others actions for an
3215 argument partially in registers, but do not actually load any
3218 EXTRA is the amount in bytes of extra space to leave next to this arg.
3219 This is ignored if an argument block has already been allocated.
3221 On a machine that lacks real push insns, ARGS_ADDR is the address of
3222 the bottom of the argument block for this call. We use indexing off there
3223 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
3224 argument block has not been preallocated.
3226 ARGS_SO_FAR is the size of args previously pushed for this call.
3228 REG_PARM_STACK_SPACE is nonzero if functions require stack space
3229 for arguments passed in registers. If nonzero, it will be the number
3230 of bytes required. */
3233 emit_push_insn (x, mode, type, size, align, partial, reg, extra,
3234 args_addr, args_so_far, reg_parm_stack_space,
3237 enum machine_mode mode;
3246 int reg_parm_stack_space;
3250 enum direction stack_direction
3251 #ifdef STACK_GROWS_DOWNWARD
3257 /* Decide where to pad the argument: `downward' for below,
3258 `upward' for above, or `none' for don't pad it.
3259 Default is below for small data on big-endian machines; else above. */
3260 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
3262 /* Invert direction if stack is post-decrement.
3264 if (STACK_PUSH_CODE == POST_DEC)
3265 if (where_pad != none)
3266 where_pad = (where_pad == downward ? upward : downward);
3268 xinner = x = protect_from_queue (x, 0);
3270 if (mode == BLKmode)
3272 /* Copy a block into the stack, entirely or partially. */
3275 int used = partial * UNITS_PER_WORD;
3276 int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
3284 /* USED is now the # of bytes we need not copy to the stack
3285 because registers will take care of them. */
3288 xinner = adjust_address (xinner, BLKmode, used);
3290 /* If the partial register-part of the arg counts in its stack size,
3291 skip the part of stack space corresponding to the registers.
3292 Otherwise, start copying to the beginning of the stack space,
3293 by setting SKIP to 0. */
3294 skip = (reg_parm_stack_space == 0) ? 0 : used;
3296 #ifdef PUSH_ROUNDING
3297 /* Do it with several push insns if that doesn't take lots of insns
3298 and if there is no difficulty with push insns that skip bytes
3299 on the stack for alignment purposes. */
3302 && GET_CODE (size) == CONST_INT
3304 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
3305 /* Here we avoid the case of a structure whose weak alignment
3306 forces many pushes of a small amount of data,
3307 and such small pushes do rounding that causes trouble. */
3308 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
3309 || align >= BIGGEST_ALIGNMENT
3310 || (PUSH_ROUNDING (align / BITS_PER_UNIT)
3311 == (align / BITS_PER_UNIT)))
3312 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
3314 /* Push padding now if padding above and stack grows down,
3315 or if padding below and stack grows up.
3316 But if space already allocated, this has already been done. */
3317 if (extra && args_addr == 0
3318 && where_pad != none && where_pad != stack_direction)
3319 anti_adjust_stack (GEN_INT (extra));
3321 move_by_pieces (NULL, xinner, INTVAL (size) - used, align);
3324 #endif /* PUSH_ROUNDING */
3328 /* Otherwise make space on the stack and copy the data
3329 to the address of that space. */
3331 /* Deduct words put into registers from the size we must copy. */
3334 if (GET_CODE (size) == CONST_INT)
3335 size = GEN_INT (INTVAL (size) - used);
3337 size = expand_binop (GET_MODE (size), sub_optab, size,
3338 GEN_INT (used), NULL_RTX, 0,
3342 /* Get the address of the stack space.
3343 In this case, we do not deal with EXTRA separately.
3344 A single stack adjust will do. */
3347 temp = push_block (size, extra, where_pad == downward);
3350 else if (GET_CODE (args_so_far) == CONST_INT)
3351 temp = memory_address (BLKmode,
3352 plus_constant (args_addr,
3353 skip + INTVAL (args_so_far)));
3355 temp = memory_address (BLKmode,
3356 plus_constant (gen_rtx_PLUS (Pmode,
3360 target = gen_rtx_MEM (BLKmode, temp);
3364 set_mem_attributes (target, type, 1);
3365 /* Function incoming arguments may overlap with sibling call
3366 outgoing arguments and we cannot allow reordering of reads
3367 from function arguments with stores to outgoing arguments
3368 of sibling calls. */
3369 set_mem_alias_set (target, 0);
3372 set_mem_align (target, align);
3374 /* TEMP is the address of the block. Copy the data there. */
3375 if (GET_CODE (size) == CONST_INT
3376 && MOVE_BY_PIECES_P ((unsigned) INTVAL (size), align))
3378 move_by_pieces (target, xinner, INTVAL (size), align);
3383 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
3384 enum machine_mode mode;
3386 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
3388 mode = GET_MODE_WIDER_MODE (mode))
3390 enum insn_code code = movstr_optab[(int) mode];
3391 insn_operand_predicate_fn pred;
3393 if (code != CODE_FOR_nothing
3394 && ((GET_CODE (size) == CONST_INT
3395 && ((unsigned HOST_WIDE_INT) INTVAL (size)
3396 <= (GET_MODE_MASK (mode) >> 1)))
3397 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
3398 && (!(pred = insn_data[(int) code].operand[0].predicate)
3399 || ((*pred) (target, BLKmode)))
3400 && (!(pred = insn_data[(int) code].operand[1].predicate)
3401 || ((*pred) (xinner, BLKmode)))
3402 && (!(pred = insn_data[(int) code].operand[3].predicate)
3403 || ((*pred) (opalign, VOIDmode))))
3405 rtx op2 = convert_to_mode (mode, size, 1);
3406 rtx last = get_last_insn ();
3409 pred = insn_data[(int) code].operand[2].predicate;
3410 if (pred != 0 && ! (*pred) (op2, mode))
3411 op2 = copy_to_mode_reg (mode, op2);
3413 pat = GEN_FCN ((int) code) (target, xinner,
3421 delete_insns_since (last);
3426 if (!ACCUMULATE_OUTGOING_ARGS)
3428 /* If the source is referenced relative to the stack pointer,
3429 copy it to another register to stabilize it. We do not need
3430 to do this if we know that we won't be changing sp. */
3432 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3433 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3434 temp = copy_to_reg (temp);
3437 /* Make inhibit_defer_pop nonzero around the library call
3438 to force it to pop the bcopy-arguments right away. */
3440 #ifdef TARGET_MEM_FUNCTIONS
3441 emit_library_call (memcpy_libfunc, LCT_NORMAL,
3442 VOIDmode, 3, temp, Pmode, XEXP (xinner, 0), Pmode,
3443 convert_to_mode (TYPE_MODE (sizetype),
3444 size, TREE_UNSIGNED (sizetype)),
3445 TYPE_MODE (sizetype));
3447 emit_library_call (bcopy_libfunc, LCT_NORMAL,
3448 VOIDmode, 3, XEXP (xinner, 0), Pmode, temp, Pmode,
3449 convert_to_mode (TYPE_MODE (integer_type_node),
3451 TREE_UNSIGNED (integer_type_node)),
3452 TYPE_MODE (integer_type_node));
3457 else if (partial > 0)
3459 /* Scalar partly in registers. */
3461 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3464 /* # words of start of argument
3465 that we must make space for but need not store. */
3466 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
3467 int args_offset = INTVAL (args_so_far);
3470 /* Push padding now if padding above and stack grows down,
3471 or if padding below and stack grows up.
3472 But if space already allocated, this has already been done. */
3473 if (extra && args_addr == 0
3474 && where_pad != none && where_pad != stack_direction)
3475 anti_adjust_stack (GEN_INT (extra));
3477 /* If we make space by pushing it, we might as well push
3478 the real data. Otherwise, we can leave OFFSET nonzero
3479 and leave the space uninitialized. */
3483 /* Now NOT_STACK gets the number of words that we don't need to
3484 allocate on the stack. */
3485 not_stack = partial - offset;
3487 /* If the partial register-part of the arg counts in its stack size,
3488 skip the part of stack space corresponding to the registers.
3489 Otherwise, start copying to the beginning of the stack space,
3490 by setting SKIP to 0. */
3491 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
3493 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
3494 x = validize_mem (force_const_mem (mode, x));
3496 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3497 SUBREGs of such registers are not allowed. */
3498 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
3499 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
3500 x = copy_to_reg (x);
3502 /* Loop over all the words allocated on the stack for this arg. */
3503 /* We can do it by words, because any scalar bigger than a word
3504 has a size a multiple of a word. */
3505 #ifndef PUSH_ARGS_REVERSED
3506 for (i = not_stack; i < size; i++)
3508 for (i = size - 1; i >= not_stack; i--)
3510 if (i >= not_stack + offset)
3511 emit_push_insn (operand_subword_force (x, i, mode),
3512 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
3514 GEN_INT (args_offset + ((i - not_stack + skip)
3516 reg_parm_stack_space, alignment_pad);
3521 rtx target = NULL_RTX;
3524 /* Push padding now if padding above and stack grows down,
3525 or if padding below and stack grows up.
3526 But if space already allocated, this has already been done. */
3527 if (extra && args_addr == 0
3528 && where_pad != none && where_pad != stack_direction)
3529 anti_adjust_stack (GEN_INT (extra));
3531 #ifdef PUSH_ROUNDING
3532 if (args_addr == 0 && PUSH_ARGS)
3533 emit_single_push_insn (mode, x, type);
3537 if (GET_CODE (args_so_far) == CONST_INT)
3539 = memory_address (mode,
3540 plus_constant (args_addr,
3541 INTVAL (args_so_far)));
3543 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
3546 dest = gen_rtx_MEM (mode, addr);
3549 set_mem_attributes (dest, type, 1);
3550 /* Function incoming arguments may overlap with sibling call
3551 outgoing arguments and we cannot allow reordering of reads
3552 from function arguments with stores to outgoing arguments
3553 of sibling calls. */
3554 set_mem_alias_set (dest, 0);
3557 emit_move_insn (dest, x);
3563 /* If part should go in registers, copy that part
3564 into the appropriate registers. Do this now, at the end,
3565 since mem-to-mem copies above may do function calls. */
3566 if (partial > 0 && reg != 0)
3568 /* Handle calls that pass values in multiple non-contiguous locations.
3569 The Irix 6 ABI has examples of this. */
3570 if (GET_CODE (reg) == PARALLEL)
3571 emit_group_load (reg, x, -1); /* ??? size? */
3573 move_block_to_reg (REGNO (reg), x, partial, mode);
3576 if (extra && args_addr == 0 && where_pad == stack_direction)
3577 anti_adjust_stack (GEN_INT (extra));
3579 if (alignment_pad && args_addr == 0)
3580 anti_adjust_stack (alignment_pad);
3583 /* Return X if X can be used as a subtarget in a sequence of arithmetic
3591 /* Only registers can be subtargets. */
3592 || GET_CODE (x) != REG
3593 /* If the register is readonly, it can't be set more than once. */
3594 || RTX_UNCHANGING_P (x)
3595 /* Don't use hard regs to avoid extending their life. */
3596 || REGNO (x) < FIRST_PSEUDO_REGISTER
3597 /* Avoid subtargets inside loops,
3598 since they hide some invariant expressions. */
3599 || preserve_subexpressions_p ())
3603 /* Expand an assignment that stores the value of FROM into TO.
3604 If WANT_VALUE is nonzero, return an rtx for the value of TO.
3605 (This may contain a QUEUED rtx;
3606 if the value is constant, this rtx is a constant.)
3607 Otherwise, the returned value is NULL_RTX.
3609 SUGGEST_REG is no longer actually used.
3610 It used to mean, copy the value through a register
3611 and return that register, if that is possible.
3612 We now use WANT_VALUE to decide whether to do this. */
3615 expand_assignment (to, from, want_value, suggest_reg)
3618 int suggest_reg ATTRIBUTE_UNUSED;
3623 /* Don't crash if the lhs of the assignment was erroneous. */
3625 if (TREE_CODE (to) == ERROR_MARK)
3627 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
3628 return want_value ? result : NULL_RTX;
3631 /* Assignment of a structure component needs special treatment
3632 if the structure component's rtx is not simply a MEM.
3633 Assignment of an array element at a constant index, and assignment of
3634 an array element in an unaligned packed structure field, has the same
3637 if (TREE_CODE (to) == COMPONENT_REF || TREE_CODE (to) == BIT_FIELD_REF
3638 || TREE_CODE (to) == ARRAY_REF || TREE_CODE (to) == ARRAY_RANGE_REF)
3640 enum machine_mode mode1;
3641 HOST_WIDE_INT bitsize, bitpos;
3649 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
3650 &unsignedp, &volatilep);
3652 /* If we are going to use store_bit_field and extract_bit_field,
3653 make sure to_rtx will be safe for multiple use. */
3655 if (mode1 == VOIDmode && want_value)
3656 tem = stabilize_reference (tem);
3658 orig_to_rtx = to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, 0);
3662 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM);
3664 if (GET_CODE (to_rtx) != MEM)
3667 if (GET_MODE (offset_rtx) != ptr_mode)
3668 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
3670 #ifdef POINTERS_EXTEND_UNSIGNED
3671 if (GET_MODE (offset_rtx) != Pmode)
3672 offset_rtx = convert_memory_address (Pmode, offset_rtx);
3675 /* A constant address in TO_RTX can have VOIDmode, we must not try
3676 to call force_reg for that case. Avoid that case. */
3677 if (GET_CODE (to_rtx) == MEM
3678 && GET_MODE (to_rtx) == BLKmode
3679 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
3681 && (bitpos % bitsize) == 0
3682 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
3683 && MEM_ALIGN (to_rtx) == GET_MODE_ALIGNMENT (mode1))
3685 to_rtx = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT);
3689 to_rtx = offset_address (to_rtx, offset_rtx,
3690 highest_pow2_factor (offset));
3693 if (GET_CODE (to_rtx) == MEM)
3695 tree old_expr = MEM_EXPR (to_rtx);
3697 /* If the field is at offset zero, we could have been given the
3698 DECL_RTX of the parent struct. Don't munge it. */
3699 to_rtx = shallow_copy_rtx (to_rtx);
3701 set_mem_attributes (to_rtx, to, 0);
3703 /* If we changed MEM_EXPR, that means we're now referencing
3704 the COMPONENT_REF, which means that MEM_OFFSET must be
3705 relative to that field. But we've not yet reflected BITPOS
3706 in TO_RTX. This will be done in store_field. Adjust for
3707 that by biasing MEM_OFFSET by -bitpos. */
3708 if (MEM_EXPR (to_rtx) != old_expr && MEM_OFFSET (to_rtx)
3709 && (bitpos / BITS_PER_UNIT) != 0)
3710 set_mem_offset (to_rtx, GEN_INT (INTVAL (MEM_OFFSET (to_rtx))
3711 - (bitpos / BITS_PER_UNIT)));
3714 /* Deal with volatile and readonly fields. The former is only done
3715 for MEM. Also set MEM_KEEP_ALIAS_SET_P if needed. */
3716 if (volatilep && GET_CODE (to_rtx) == MEM)
3718 if (to_rtx == orig_to_rtx)
3719 to_rtx = copy_rtx (to_rtx);
3720 MEM_VOLATILE_P (to_rtx) = 1;
3723 if (TREE_CODE (to) == COMPONENT_REF
3724 && TREE_READONLY (TREE_OPERAND (to, 1)))
3726 if (to_rtx == orig_to_rtx)
3727 to_rtx = copy_rtx (to_rtx);
3728 RTX_UNCHANGING_P (to_rtx) = 1;
3731 if (GET_CODE (to_rtx) == MEM && ! can_address_p (to))
3733 if (to_rtx == orig_to_rtx)
3734 to_rtx = copy_rtx (to_rtx);
3735 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
3738 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
3740 /* Spurious cast for HPUX compiler. */
3741 ? ((enum machine_mode)
3742 TYPE_MODE (TREE_TYPE (to)))
3744 unsignedp, TREE_TYPE (tem), get_alias_set (to));
3746 preserve_temp_slots (result);
3750 /* If the value is meaningful, convert RESULT to the proper mode.
3751 Otherwise, return nothing. */
3752 return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
3753 TYPE_MODE (TREE_TYPE (from)),
3755 TREE_UNSIGNED (TREE_TYPE (to)))
3759 /* If the rhs is a function call and its value is not an aggregate,
3760 call the function before we start to compute the lhs.
3761 This is needed for correct code for cases such as
3762 val = setjmp (buf) on machines where reference to val
3763 requires loading up part of an address in a separate insn.
3765 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
3766 since it might be a promoted variable where the zero- or sign- extension
3767 needs to be done. Handling this in the normal way is safe because no
3768 computation is done before the call. */
3769 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from)
3770 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
3771 && ! ((TREE_CODE (to) == VAR_DECL || TREE_CODE (to) == PARM_DECL)
3772 && GET_CODE (DECL_RTL (to)) == REG))
3777 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
3779 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
3781 /* Handle calls that return values in multiple non-contiguous locations.
3782 The Irix 6 ABI has examples of this. */
3783 if (GET_CODE (to_rtx) == PARALLEL)
3784 emit_group_load (to_rtx, value, int_size_in_bytes (TREE_TYPE (from)));
3785 else if (GET_MODE (to_rtx) == BLKmode)
3786 emit_block_move (to_rtx, value, expr_size (from));
3789 #ifdef POINTERS_EXTEND_UNSIGNED
3790 if (POINTER_TYPE_P (TREE_TYPE (to))
3791 && GET_MODE (to_rtx) != GET_MODE (value))
3792 value = convert_memory_address (GET_MODE (to_rtx), value);
3794 emit_move_insn (to_rtx, value);
3796 preserve_temp_slots (to_rtx);
3799 return want_value ? to_rtx : NULL_RTX;
3802 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
3803 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
3806 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
3808 /* Don't move directly into a return register. */
3809 if (TREE_CODE (to) == RESULT_DECL
3810 && (GET_CODE (to_rtx) == REG || GET_CODE (to_rtx) == PARALLEL))
3815 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
3817 if (GET_CODE (to_rtx) == PARALLEL)
3818 emit_group_load (to_rtx, temp, int_size_in_bytes (TREE_TYPE (from)));
3820 emit_move_insn (to_rtx, temp);
3822 preserve_temp_slots (to_rtx);
3825 return want_value ? to_rtx : NULL_RTX;
3828 /* In case we are returning the contents of an object which overlaps
3829 the place the value is being stored, use a safe function when copying
3830 a value through a pointer into a structure value return block. */
3831 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
3832 && current_function_returns_struct
3833 && !current_function_returns_pcc_struct)
3838 size = expr_size (from);
3839 from_rtx = expand_expr (from, NULL_RTX, VOIDmode, 0);
3841 #ifdef TARGET_MEM_FUNCTIONS
3842 emit_library_call (memmove_libfunc, LCT_NORMAL,
3843 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
3844 XEXP (from_rtx, 0), Pmode,
3845 convert_to_mode (TYPE_MODE (sizetype),
3846 size, TREE_UNSIGNED (sizetype)),
3847 TYPE_MODE (sizetype));
3849 emit_library_call (bcopy_libfunc, LCT_NORMAL,
3850 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
3851 XEXP (to_rtx, 0), Pmode,
3852 convert_to_mode (TYPE_MODE (integer_type_node),
3853 size, TREE_UNSIGNED (integer_type_node)),
3854 TYPE_MODE (integer_type_node));
3857 preserve_temp_slots (to_rtx);
3860 return want_value ? to_rtx : NULL_RTX;
3863 /* Compute FROM and store the value in the rtx we got. */
3866 result = store_expr (from, to_rtx, want_value);
3867 preserve_temp_slots (result);
3870 return want_value ? result : NULL_RTX;
3873 /* Generate code for computing expression EXP,
3874 and storing the value into TARGET.
3875 TARGET may contain a QUEUED rtx.
3877 If WANT_VALUE is nonzero, return a copy of the value
3878 not in TARGET, so that we can be sure to use the proper
3879 value in a containing expression even if TARGET has something
3880 else stored in it. If possible, we copy the value through a pseudo
3881 and return that pseudo. Or, if the value is constant, we try to
3882 return the constant. In some cases, we return a pseudo
3883 copied *from* TARGET.
3885 If the mode is BLKmode then we may return TARGET itself.
3886 It turns out that in BLKmode it doesn't cause a problem.
3887 because C has no operators that could combine two different
3888 assignments into the same BLKmode object with different values
3889 with no sequence point. Will other languages need this to
3892 If WANT_VALUE is 0, we return NULL, to make sure
3893 to catch quickly any cases where the caller uses the value
3894 and fails to set WANT_VALUE. */
3897 store_expr (exp, target, want_value)
3903 int dont_return_target = 0;
3904 int dont_store_target = 0;
3906 if (TREE_CODE (exp) == COMPOUND_EXPR)
3908 /* Perform first part of compound expression, then assign from second
3910 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
3912 return store_expr (TREE_OPERAND (exp, 1), target, want_value);
3914 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
3916 /* For conditional expression, get safe form of the target. Then
3917 test the condition, doing the appropriate assignment on either
3918 side. This avoids the creation of unnecessary temporaries.
3919 For non-BLKmode, it is more efficient not to do this. */
3921 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
3924 target = protect_from_queue (target, 1);
3926 do_pending_stack_adjust ();
3928 jumpifnot (TREE_OPERAND (exp, 0), lab1);
3929 start_cleanup_deferral ();
3930 store_expr (TREE_OPERAND (exp, 1), target, 0);
3931 end_cleanup_deferral ();
3933 emit_jump_insn (gen_jump (lab2));
3936 start_cleanup_deferral ();
3937 store_expr (TREE_OPERAND (exp, 2), target, 0);
3938 end_cleanup_deferral ();
3943 return want_value ? target : NULL_RTX;
3945 else if (queued_subexp_p (target))
3946 /* If target contains a postincrement, let's not risk
3947 using it as the place to generate the rhs. */
3949 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
3951 /* Expand EXP into a new pseudo. */
3952 temp = gen_reg_rtx (GET_MODE (target));
3953 temp = expand_expr (exp, temp, GET_MODE (target), 0);
3956 temp = expand_expr (exp, NULL_RTX, GET_MODE (target), 0);
3958 /* If target is volatile, ANSI requires accessing the value
3959 *from* the target, if it is accessed. So make that happen.
3960 In no case return the target itself. */
3961 if (! MEM_VOLATILE_P (target) && want_value)
3962 dont_return_target = 1;
3964 else if (want_value && GET_CODE (target) == MEM && ! MEM_VOLATILE_P (target)
3965 && GET_MODE (target) != BLKmode)
3966 /* If target is in memory and caller wants value in a register instead,
3967 arrange that. Pass TARGET as target for expand_expr so that,
3968 if EXP is another assignment, WANT_VALUE will be nonzero for it.
3969 We know expand_expr will not use the target in that case.
3970 Don't do this if TARGET is volatile because we are supposed
3971 to write it and then read it. */
3973 temp = expand_expr (exp, target, GET_MODE (target), 0);
3974 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
3976 /* If TEMP is already in the desired TARGET, only copy it from
3977 memory and don't store it there again. */
3979 || (rtx_equal_p (temp, target)
3980 && ! side_effects_p (temp) && ! side_effects_p (target)))
3981 dont_store_target = 1;
3982 temp = copy_to_reg (temp);
3984 dont_return_target = 1;
3986 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
3987 /* If this is an scalar in a register that is stored in a wider mode
3988 than the declared mode, compute the result into its declared mode
3989 and then convert to the wider mode. Our value is the computed
3992 rtx inner_target = 0;
3994 /* If we don't want a value, we can do the conversion inside EXP,
3995 which will often result in some optimizations. Do the conversion
3996 in two steps: first change the signedness, if needed, then
3997 the extend. But don't do this if the type of EXP is a subtype
3998 of something else since then the conversion might involve
3999 more than just converting modes. */
4000 if (! want_value && INTEGRAL_TYPE_P (TREE_TYPE (exp))
4001 && TREE_TYPE (TREE_TYPE (exp)) == 0)
4003 if (TREE_UNSIGNED (TREE_TYPE (exp))
4004 != SUBREG_PROMOTED_UNSIGNED_P (target))
4007 (signed_or_unsigned_type (SUBREG_PROMOTED_UNSIGNED_P (target),
4011 exp = convert (type_for_mode (GET_MODE (SUBREG_REG (target)),
4012 SUBREG_PROMOTED_UNSIGNED_P (target)),
4015 inner_target = SUBREG_REG (target);
4018 temp = expand_expr (exp, inner_target, VOIDmode, 0);
4020 /* If TEMP is a volatile MEM and we want a result value, make
4021 the access now so it gets done only once. Likewise if
4022 it contains TARGET. */
4023 if (GET_CODE (temp) == MEM && want_value
4024 && (MEM_VOLATILE_P (temp)
4025 || reg_mentioned_p (SUBREG_REG (target), XEXP (temp, 0))))
4026 temp = copy_to_reg (temp);
4028 /* If TEMP is a VOIDmode constant, use convert_modes to make
4029 sure that we properly convert it. */
4030 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
4032 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4033 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4034 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
4035 GET_MODE (target), temp,
4036 SUBREG_PROMOTED_UNSIGNED_P (target));
4039 convert_move (SUBREG_REG (target), temp,
4040 SUBREG_PROMOTED_UNSIGNED_P (target));
4042 /* If we promoted a constant, change the mode back down to match
4043 target. Otherwise, the caller might get confused by a result whose
4044 mode is larger than expected. */
4046 if (want_value && GET_MODE (temp) != GET_MODE (target))
4048 if (GET_MODE (temp) != VOIDmode)
4050 temp = gen_lowpart_SUBREG (GET_MODE (target), temp);
4051 SUBREG_PROMOTED_VAR_P (temp) = 1;
4052 SUBREG_PROMOTED_UNSIGNED_P (temp)
4053 = SUBREG_PROMOTED_UNSIGNED_P (target);
4056 temp = convert_modes (GET_MODE (target),
4057 GET_MODE (SUBREG_REG (target)),
4058 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4061 return want_value ? temp : NULL_RTX;
4065 temp = expand_expr (exp, target, GET_MODE (target), 0);
4066 /* Return TARGET if it's a specified hardware register.
4067 If TARGET is a volatile mem ref, either return TARGET
4068 or return a reg copied *from* TARGET; ANSI requires this.
4070 Otherwise, if TEMP is not TARGET, return TEMP
4071 if it is constant (for efficiency),
4072 or if we really want the correct value. */
4073 if (!(target && GET_CODE (target) == REG
4074 && REGNO (target) < FIRST_PSEUDO_REGISTER)
4075 && !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
4076 && ! rtx_equal_p (temp, target)
4077 && (CONSTANT_P (temp) || want_value))
4078 dont_return_target = 1;
4081 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
4082 the same as that of TARGET, adjust the constant. This is needed, for
4083 example, in case it is a CONST_DOUBLE and we want only a word-sized
4085 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
4086 && TREE_CODE (exp) != ERROR_MARK
4087 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
4088 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4089 temp, TREE_UNSIGNED (TREE_TYPE (exp)));
4091 /* If value was not generated in the target, store it there.
4092 Convert the value to TARGET's type first if necessary.
4093 If TEMP and TARGET compare equal according to rtx_equal_p, but
4094 one or both of them are volatile memory refs, we have to distinguish
4096 - expand_expr has used TARGET. In this case, we must not generate
4097 another copy. This can be detected by TARGET being equal according
4099 - expand_expr has not used TARGET - that means that the source just
4100 happens to have the same RTX form. Since temp will have been created
4101 by expand_expr, it will compare unequal according to == .
4102 We must generate a copy in this case, to reach the correct number
4103 of volatile memory references. */
4105 if ((! rtx_equal_p (temp, target)
4106 || (temp != target && (side_effects_p (temp)
4107 || side_effects_p (target))))
4108 && TREE_CODE (exp) != ERROR_MARK
4109 && ! dont_store_target)
4111 target = protect_from_queue (target, 1);
4112 if (GET_MODE (temp) != GET_MODE (target)
4113 && GET_MODE (temp) != VOIDmode)
4115 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
4116 if (dont_return_target)
4118 /* In this case, we will return TEMP,
4119 so make sure it has the proper mode.
4120 But don't forget to store the value into TARGET. */
4121 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
4122 emit_move_insn (target, temp);
4125 convert_move (target, temp, unsignedp);
4128 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
4130 /* Handle copying a string constant into an array. The string
4131 constant may be shorter than the array. So copy just the string's
4132 actual length, and clear the rest. First get the size of the data
4133 type of the string, which is actually the size of the target. */
4134 rtx size = expr_size (exp);
4136 if (GET_CODE (size) == CONST_INT
4137 && INTVAL (size) < TREE_STRING_LENGTH (exp))
4138 emit_block_move (target, temp, size);
4141 /* Compute the size of the data to copy from the string. */
4143 = size_binop (MIN_EXPR,
4144 make_tree (sizetype, size),
4145 size_int (TREE_STRING_LENGTH (exp)));
4146 rtx copy_size_rtx = expand_expr (copy_size, NULL_RTX,
4150 /* Copy that much. */
4151 copy_size_rtx = convert_to_mode (ptr_mode, copy_size_rtx, 0);
4152 emit_block_move (target, temp, copy_size_rtx);
4154 /* Figure out how much is left in TARGET that we have to clear.
4155 Do all calculations in ptr_mode. */
4156 if (GET_CODE (copy_size_rtx) == CONST_INT)
4158 size = plus_constant (size, -INTVAL (copy_size_rtx));
4159 target = adjust_address (target, BLKmode,
4160 INTVAL (copy_size_rtx));
4164 size = expand_binop (ptr_mode, sub_optab, size,
4165 copy_size_rtx, NULL_RTX, 0,
4168 #ifdef POINTERS_EXTEND_UNSIGNED
4169 if (GET_MODE (copy_size_rtx) != Pmode)
4170 copy_size_rtx = convert_memory_address (Pmode,
4174 target = offset_address (target, copy_size_rtx,
4175 highest_pow2_factor (copy_size));
4176 label = gen_label_rtx ();
4177 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
4178 GET_MODE (size), 0, label);
4181 if (size != const0_rtx)
4182 clear_storage (target, size);
4188 /* Handle calls that return values in multiple non-contiguous locations.
4189 The Irix 6 ABI has examples of this. */
4190 else if (GET_CODE (target) == PARALLEL)
4191 emit_group_load (target, temp, int_size_in_bytes (TREE_TYPE (exp)));
4192 else if (GET_MODE (temp) == BLKmode)
4193 emit_block_move (target, temp, expr_size (exp));
4195 emit_move_insn (target, temp);
4198 /* If we don't want a value, return NULL_RTX. */
4202 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
4203 ??? The latter test doesn't seem to make sense. */
4204 else if (dont_return_target && GET_CODE (temp) != MEM)
4207 /* Return TARGET itself if it is a hard register. */
4208 else if (want_value && GET_MODE (target) != BLKmode
4209 && ! (GET_CODE (target) == REG
4210 && REGNO (target) < FIRST_PSEUDO_REGISTER))
4211 return copy_to_reg (target);
4217 /* Return 1 if EXP just contains zeros. */
4225 switch (TREE_CODE (exp))
4229 case NON_LVALUE_EXPR:
4230 case VIEW_CONVERT_EXPR:
4231 return is_zeros_p (TREE_OPERAND (exp, 0));
4234 return integer_zerop (exp);
4238 is_zeros_p (TREE_REALPART (exp)) && is_zeros_p (TREE_IMAGPART (exp));
4241 return REAL_VALUES_IDENTICAL (TREE_REAL_CST (exp), dconst0);
4244 for (elt = TREE_VECTOR_CST_ELTS (exp); elt;
4245 elt = TREE_CHAIN (elt))
4246 if (!is_zeros_p (TREE_VALUE (elt)))
4252 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4253 return CONSTRUCTOR_ELTS (exp) == NULL_TREE;
4254 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4255 if (! is_zeros_p (TREE_VALUE (elt)))
4265 /* Return 1 if EXP contains mostly (3/4) zeros. */
4268 mostly_zeros_p (exp)
4271 if (TREE_CODE (exp) == CONSTRUCTOR)
4273 int elts = 0, zeros = 0;
4274 tree elt = CONSTRUCTOR_ELTS (exp);
4275 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4277 /* If there are no ranges of true bits, it is all zero. */
4278 return elt == NULL_TREE;
4280 for (; elt; elt = TREE_CHAIN (elt))
4282 /* We do not handle the case where the index is a RANGE_EXPR,
4283 so the statistic will be somewhat inaccurate.
4284 We do make a more accurate count in store_constructor itself,
4285 so since this function is only used for nested array elements,
4286 this should be close enough. */
4287 if (mostly_zeros_p (TREE_VALUE (elt)))
4292 return 4 * zeros >= 3 * elts;
4295 return is_zeros_p (exp);
4298 /* Helper function for store_constructor.
4299 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4300 TYPE is the type of the CONSTRUCTOR, not the element type.
4301 CLEARED is as for store_constructor.
4302 ALIAS_SET is the alias set to use for any stores.
4304 This provides a recursive shortcut back to store_constructor when it isn't
4305 necessary to go through store_field. This is so that we can pass through
4306 the cleared field to let store_constructor know that we may not have to
4307 clear a substructure if the outer structure has already been cleared. */
4310 store_constructor_field (target, bitsize, bitpos, mode, exp, type, cleared,
4313 unsigned HOST_WIDE_INT bitsize;
4314 HOST_WIDE_INT bitpos;
4315 enum machine_mode mode;
4320 if (TREE_CODE (exp) == CONSTRUCTOR
4321 && bitpos % BITS_PER_UNIT == 0
4322 /* If we have a non-zero bitpos for a register target, then we just
4323 let store_field do the bitfield handling. This is unlikely to
4324 generate unnecessary clear instructions anyways. */
4325 && (bitpos == 0 || GET_CODE (target) == MEM))
4327 if (GET_CODE (target) == MEM)
4329 = adjust_address (target,
4330 GET_MODE (target) == BLKmode
4332 % GET_MODE_ALIGNMENT (GET_MODE (target)))
4333 ? BLKmode : VOIDmode, bitpos / BITS_PER_UNIT);
4336 /* Update the alias set, if required. */
4337 if (GET_CODE (target) == MEM && ! MEM_KEEP_ALIAS_SET_P (target)
4338 && MEM_ALIAS_SET (target) != 0)
4340 target = copy_rtx (target);
4341 set_mem_alias_set (target, alias_set);
4344 store_constructor (exp, target, cleared, bitsize / BITS_PER_UNIT);
4347 store_field (target, bitsize, bitpos, mode, exp, VOIDmode, 0, type,
4351 /* Store the value of constructor EXP into the rtx TARGET.
4352 TARGET is either a REG or a MEM; we know it cannot conflict, since
4353 safe_from_p has been called.
4354 CLEARED is true if TARGET is known to have been zero'd.
4355 SIZE is the number of bytes of TARGET we are allowed to modify: this
4356 may not be the same as the size of EXP if we are assigning to a field
4357 which has been packed to exclude padding bits. */
4360 store_constructor (exp, target, cleared, size)
4366 tree type = TREE_TYPE (exp);
4367 #ifdef WORD_REGISTER_OPERATIONS
4368 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
4371 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
4372 || TREE_CODE (type) == QUAL_UNION_TYPE)
4376 /* We either clear the aggregate or indicate the value is dead. */
4377 if ((TREE_CODE (type) == UNION_TYPE
4378 || TREE_CODE (type) == QUAL_UNION_TYPE)
4380 && ! CONSTRUCTOR_ELTS (exp))
4381 /* If the constructor is empty, clear the union. */
4383 clear_storage (target, expr_size (exp));
4387 /* If we are building a static constructor into a register,
4388 set the initial value as zero so we can fold the value into
4389 a constant. But if more than one register is involved,
4390 this probably loses. */
4391 else if (! cleared && GET_CODE (target) == REG && TREE_STATIC (exp)
4392 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
4394 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4398 /* If the constructor has fewer fields than the structure
4399 or if we are initializing the structure to mostly zeros,
4400 clear the whole structure first. Don't do this if TARGET is a
4401 register whose mode size isn't equal to SIZE since clear_storage
4402 can't handle this case. */
4403 else if (! cleared && size > 0
4404 && ((list_length (CONSTRUCTOR_ELTS (exp))
4405 != fields_length (type))
4406 || mostly_zeros_p (exp))
4407 && (GET_CODE (target) != REG
4408 || ((HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target))
4411 clear_storage (target, GEN_INT (size));
4416 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4418 /* Store each element of the constructor into
4419 the corresponding field of TARGET. */
4421 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4423 tree field = TREE_PURPOSE (elt);
4424 tree value = TREE_VALUE (elt);
4425 enum machine_mode mode;
4426 HOST_WIDE_INT bitsize;
4427 HOST_WIDE_INT bitpos = 0;
4430 rtx to_rtx = target;
4432 /* Just ignore missing fields.
4433 We cleared the whole structure, above,
4434 if any fields are missing. */
4438 if (cleared && is_zeros_p (value))
4441 if (host_integerp (DECL_SIZE (field), 1))
4442 bitsize = tree_low_cst (DECL_SIZE (field), 1);
4446 unsignedp = TREE_UNSIGNED (field);
4447 mode = DECL_MODE (field);
4448 if (DECL_BIT_FIELD (field))
4451 offset = DECL_FIELD_OFFSET (field);
4452 if (host_integerp (offset, 0)
4453 && host_integerp (bit_position (field), 0))
4455 bitpos = int_bit_position (field);
4459 bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
4465 if (contains_placeholder_p (offset))
4466 offset = build (WITH_RECORD_EXPR, sizetype,
4467 offset, make_tree (TREE_TYPE (exp), target));
4469 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
4470 if (GET_CODE (to_rtx) != MEM)
4473 if (GET_MODE (offset_rtx) != ptr_mode)
4474 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4476 #ifdef POINTERS_EXTEND_UNSIGNED
4477 if (GET_MODE (offset_rtx) != Pmode)
4478 offset_rtx = convert_memory_address (Pmode, offset_rtx);
4481 to_rtx = offset_address (to_rtx, offset_rtx,
4482 highest_pow2_factor (offset));
4485 if (TREE_READONLY (field))
4487 if (GET_CODE (to_rtx) == MEM)
4488 to_rtx = copy_rtx (to_rtx);
4490 RTX_UNCHANGING_P (to_rtx) = 1;
4493 #ifdef WORD_REGISTER_OPERATIONS
4494 /* If this initializes a field that is smaller than a word, at the
4495 start of a word, try to widen it to a full word.
4496 This special case allows us to output C++ member function
4497 initializations in a form that the optimizers can understand. */
4498 if (GET_CODE (target) == REG
4499 && bitsize < BITS_PER_WORD
4500 && bitpos % BITS_PER_WORD == 0
4501 && GET_MODE_CLASS (mode) == MODE_INT
4502 && TREE_CODE (value) == INTEGER_CST
4504 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
4506 tree type = TREE_TYPE (value);
4508 if (TYPE_PRECISION (type) < BITS_PER_WORD)
4510 type = type_for_size (BITS_PER_WORD, TREE_UNSIGNED (type));
4511 value = convert (type, value);
4514 if (BYTES_BIG_ENDIAN)
4516 = fold (build (LSHIFT_EXPR, type, value,
4517 build_int_2 (BITS_PER_WORD - bitsize, 0)));
4518 bitsize = BITS_PER_WORD;
4523 if (GET_CODE (to_rtx) == MEM && !MEM_KEEP_ALIAS_SET_P (to_rtx)
4524 && DECL_NONADDRESSABLE_P (field))
4526 to_rtx = copy_rtx (to_rtx);
4527 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
4530 store_constructor_field (to_rtx, bitsize, bitpos, mode,
4531 value, type, cleared,
4532 get_alias_set (TREE_TYPE (field)));
4535 else if (TREE_CODE (type) == ARRAY_TYPE
4536 || TREE_CODE (type) == VECTOR_TYPE)
4541 tree domain = TYPE_DOMAIN (type);
4542 tree elttype = TREE_TYPE (type);
4544 HOST_WIDE_INT minelt = 0;
4545 HOST_WIDE_INT maxelt = 0;
4547 /* Vectors are like arrays, but the domain is stored via an array
4549 if (TREE_CODE (type) == VECTOR_TYPE)
4551 /* Note that although TYPE_DEBUG_REPRESENTATION_TYPE uses
4552 the same field as TYPE_DOMAIN, we are not guaranteed that
4554 domain = TYPE_DEBUG_REPRESENTATION_TYPE (type);
4555 domain = TYPE_DOMAIN (TREE_TYPE (TYPE_FIELDS (domain)));
4558 const_bounds_p = (TYPE_MIN_VALUE (domain)
4559 && TYPE_MAX_VALUE (domain)
4560 && host_integerp (TYPE_MIN_VALUE (domain), 0)
4561 && host_integerp (TYPE_MAX_VALUE (domain), 0));
4563 /* If we have constant bounds for the range of the type, get them. */
4566 minelt = tree_low_cst (TYPE_MIN_VALUE (domain), 0);
4567 maxelt = tree_low_cst (TYPE_MAX_VALUE (domain), 0);
4570 /* If the constructor has fewer elements than the array,
4571 clear the whole array first. Similarly if this is
4572 static constructor of a non-BLKmode object. */
4573 if (cleared || (GET_CODE (target) == REG && TREE_STATIC (exp)))
4577 HOST_WIDE_INT count = 0, zero_count = 0;
4578 need_to_clear = ! const_bounds_p;
4580 /* This loop is a more accurate version of the loop in
4581 mostly_zeros_p (it handles RANGE_EXPR in an index).
4582 It is also needed to check for missing elements. */
4583 for (elt = CONSTRUCTOR_ELTS (exp);
4584 elt != NULL_TREE && ! need_to_clear;
4585 elt = TREE_CHAIN (elt))
4587 tree index = TREE_PURPOSE (elt);
4588 HOST_WIDE_INT this_node_count;
4590 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4592 tree lo_index = TREE_OPERAND (index, 0);
4593 tree hi_index = TREE_OPERAND (index, 1);
4595 if (! host_integerp (lo_index, 1)
4596 || ! host_integerp (hi_index, 1))
4602 this_node_count = (tree_low_cst (hi_index, 1)
4603 - tree_low_cst (lo_index, 1) + 1);
4606 this_node_count = 1;
4608 count += this_node_count;
4609 if (mostly_zeros_p (TREE_VALUE (elt)))
4610 zero_count += this_node_count;
4613 /* Clear the entire array first if there are any missing elements,
4614 or if the incidence of zero elements is >= 75%. */
4616 && (count < maxelt - minelt + 1 || 4 * zero_count >= 3 * count))
4620 if (need_to_clear && size > 0)
4625 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4627 clear_storage (target, GEN_INT (size));
4631 else if (REG_P (target))
4632 /* Inform later passes that the old value is dead. */
4633 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4635 /* Store each element of the constructor into
4636 the corresponding element of TARGET, determined
4637 by counting the elements. */
4638 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
4640 elt = TREE_CHAIN (elt), i++)
4642 enum machine_mode mode;
4643 HOST_WIDE_INT bitsize;
4644 HOST_WIDE_INT bitpos;
4646 tree value = TREE_VALUE (elt);
4647 tree index = TREE_PURPOSE (elt);
4648 rtx xtarget = target;
4650 if (cleared && is_zeros_p (value))
4653 unsignedp = TREE_UNSIGNED (elttype);
4654 mode = TYPE_MODE (elttype);
4655 if (mode == BLKmode)
4656 bitsize = (host_integerp (TYPE_SIZE (elttype), 1)
4657 ? tree_low_cst (TYPE_SIZE (elttype), 1)
4660 bitsize = GET_MODE_BITSIZE (mode);
4662 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4664 tree lo_index = TREE_OPERAND (index, 0);
4665 tree hi_index = TREE_OPERAND (index, 1);
4666 rtx index_r, pos_rtx, hi_r, loop_top, loop_end;
4667 struct nesting *loop;
4668 HOST_WIDE_INT lo, hi, count;
4671 /* If the range is constant and "small", unroll the loop. */
4673 && host_integerp (lo_index, 0)
4674 && host_integerp (hi_index, 0)
4675 && (lo = tree_low_cst (lo_index, 0),
4676 hi = tree_low_cst (hi_index, 0),
4677 count = hi - lo + 1,
4678 (GET_CODE (target) != MEM
4680 || (host_integerp (TYPE_SIZE (elttype), 1)
4681 && (tree_low_cst (TYPE_SIZE (elttype), 1) * count
4684 lo -= minelt; hi -= minelt;
4685 for (; lo <= hi; lo++)
4687 bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0);
4689 if (GET_CODE (target) == MEM
4690 && !MEM_KEEP_ALIAS_SET_P (target)
4691 && TREE_CODE (type) == ARRAY_TYPE
4692 && TYPE_NONALIASED_COMPONENT (type))
4694 target = copy_rtx (target);
4695 MEM_KEEP_ALIAS_SET_P (target) = 1;
4698 store_constructor_field
4699 (target, bitsize, bitpos, mode, value, type, cleared,
4700 get_alias_set (elttype));
4705 hi_r = expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
4706 loop_top = gen_label_rtx ();
4707 loop_end = gen_label_rtx ();
4709 unsignedp = TREE_UNSIGNED (domain);
4711 index = build_decl (VAR_DECL, NULL_TREE, domain);
4714 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
4716 SET_DECL_RTL (index, index_r);
4717 if (TREE_CODE (value) == SAVE_EXPR
4718 && SAVE_EXPR_RTL (value) == 0)
4720 /* Make sure value gets expanded once before the
4722 expand_expr (value, const0_rtx, VOIDmode, 0);
4725 store_expr (lo_index, index_r, 0);
4726 loop = expand_start_loop (0);
4728 /* Assign value to element index. */
4730 = convert (ssizetype,
4731 fold (build (MINUS_EXPR, TREE_TYPE (index),
4732 index, TYPE_MIN_VALUE (domain))));
4733 position = size_binop (MULT_EXPR, position,
4735 TYPE_SIZE_UNIT (elttype)));
4737 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
4738 xtarget = offset_address (target, pos_rtx,
4739 highest_pow2_factor (position));
4740 xtarget = adjust_address (xtarget, mode, 0);
4741 if (TREE_CODE (value) == CONSTRUCTOR)
4742 store_constructor (value, xtarget, cleared,
4743 bitsize / BITS_PER_UNIT);
4745 store_expr (value, xtarget, 0);
4747 expand_exit_loop_if_false (loop,
4748 build (LT_EXPR, integer_type_node,
4751 expand_increment (build (PREINCREMENT_EXPR,
4753 index, integer_one_node), 0, 0);
4755 emit_label (loop_end);
4758 else if ((index != 0 && ! host_integerp (index, 0))
4759 || ! host_integerp (TYPE_SIZE (elttype), 1))
4764 index = ssize_int (1);
4767 index = convert (ssizetype,
4768 fold (build (MINUS_EXPR, index,
4769 TYPE_MIN_VALUE (domain))));
4771 position = size_binop (MULT_EXPR, index,
4773 TYPE_SIZE_UNIT (elttype)));
4774 xtarget = offset_address (target,
4775 expand_expr (position, 0, VOIDmode, 0),
4776 highest_pow2_factor (position));
4777 xtarget = adjust_address (xtarget, mode, 0);
4778 store_expr (value, xtarget, 0);
4783 bitpos = ((tree_low_cst (index, 0) - minelt)
4784 * tree_low_cst (TYPE_SIZE (elttype), 1));
4786 bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
4788 if (GET_CODE (target) == MEM && !MEM_KEEP_ALIAS_SET_P (target)
4789 && TREE_CODE (type) == ARRAY_TYPE
4790 && TYPE_NONALIASED_COMPONENT (type))
4792 target = copy_rtx (target);
4793 MEM_KEEP_ALIAS_SET_P (target) = 1;
4796 store_constructor_field (target, bitsize, bitpos, mode, value,
4797 type, cleared, get_alias_set (elttype));
4803 /* Set constructor assignments. */
4804 else if (TREE_CODE (type) == SET_TYPE)
4806 tree elt = CONSTRUCTOR_ELTS (exp);
4807 unsigned HOST_WIDE_INT nbytes = int_size_in_bytes (type), nbits;
4808 tree domain = TYPE_DOMAIN (type);
4809 tree domain_min, domain_max, bitlength;
4811 /* The default implementation strategy is to extract the constant
4812 parts of the constructor, use that to initialize the target,
4813 and then "or" in whatever non-constant ranges we need in addition.
4815 If a large set is all zero or all ones, it is
4816 probably better to set it using memset (if available) or bzero.
4817 Also, if a large set has just a single range, it may also be
4818 better to first clear all the first clear the set (using
4819 bzero/memset), and set the bits we want. */
4821 /* Check for all zeros. */
4822 if (elt == NULL_TREE && size > 0)
4825 clear_storage (target, GEN_INT (size));
4829 domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
4830 domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
4831 bitlength = size_binop (PLUS_EXPR,
4832 size_diffop (domain_max, domain_min),
4835 nbits = tree_low_cst (bitlength, 1);
4837 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
4838 are "complicated" (more than one range), initialize (the
4839 constant parts) by copying from a constant. */
4840 if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD
4841 || (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE))
4843 unsigned int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
4844 enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
4845 char *bit_buffer = (char *) alloca (nbits);
4846 HOST_WIDE_INT word = 0;
4847 unsigned int bit_pos = 0;
4848 unsigned int ibit = 0;
4849 unsigned int offset = 0; /* In bytes from beginning of set. */
4851 elt = get_set_constructor_bits (exp, bit_buffer, nbits);
4854 if (bit_buffer[ibit])
4856 if (BYTES_BIG_ENDIAN)
4857 word |= (1 << (set_word_size - 1 - bit_pos));
4859 word |= 1 << bit_pos;
4863 if (bit_pos >= set_word_size || ibit == nbits)
4865 if (word != 0 || ! cleared)
4867 rtx datum = GEN_INT (word);
4870 /* The assumption here is that it is safe to use
4871 XEXP if the set is multi-word, but not if
4872 it's single-word. */
4873 if (GET_CODE (target) == MEM)
4874 to_rtx = adjust_address (target, mode, offset);
4875 else if (offset == 0)
4879 emit_move_insn (to_rtx, datum);
4886 offset += set_word_size / BITS_PER_UNIT;
4891 /* Don't bother clearing storage if the set is all ones. */
4892 if (TREE_CHAIN (elt) != NULL_TREE
4893 || (TREE_PURPOSE (elt) == NULL_TREE
4895 : ( ! host_integerp (TREE_VALUE (elt), 0)
4896 || ! host_integerp (TREE_PURPOSE (elt), 0)
4897 || (tree_low_cst (TREE_VALUE (elt), 0)
4898 - tree_low_cst (TREE_PURPOSE (elt), 0) + 1
4899 != (HOST_WIDE_INT) nbits))))
4900 clear_storage (target, expr_size (exp));
4902 for (; elt != NULL_TREE; elt = TREE_CHAIN (elt))
4904 /* Start of range of element or NULL. */
4905 tree startbit = TREE_PURPOSE (elt);
4906 /* End of range of element, or element value. */
4907 tree endbit = TREE_VALUE (elt);
4908 #ifdef TARGET_MEM_FUNCTIONS
4909 HOST_WIDE_INT startb, endb;
4911 rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
4913 bitlength_rtx = expand_expr (bitlength,
4914 NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
4916 /* Handle non-range tuple element like [ expr ]. */
4917 if (startbit == NULL_TREE)
4919 startbit = save_expr (endbit);
4923 startbit = convert (sizetype, startbit);
4924 endbit = convert (sizetype, endbit);
4925 if (! integer_zerop (domain_min))
4927 startbit = size_binop (MINUS_EXPR, startbit, domain_min);
4928 endbit = size_binop (MINUS_EXPR, endbit, domain_min);
4930 startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
4931 EXPAND_CONST_ADDRESS);
4932 endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
4933 EXPAND_CONST_ADDRESS);
4939 ((build_qualified_type (type_for_mode (GET_MODE (target), 0),
4942 emit_move_insn (targetx, target);
4945 else if (GET_CODE (target) == MEM)
4950 #ifdef TARGET_MEM_FUNCTIONS
4951 /* Optimization: If startbit and endbit are
4952 constants divisible by BITS_PER_UNIT,
4953 call memset instead. */
4954 if (TREE_CODE (startbit) == INTEGER_CST
4955 && TREE_CODE (endbit) == INTEGER_CST
4956 && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
4957 && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0)
4959 emit_library_call (memset_libfunc, LCT_NORMAL,
4961 plus_constant (XEXP (targetx, 0),
4962 startb / BITS_PER_UNIT),
4964 constm1_rtx, TYPE_MODE (integer_type_node),
4965 GEN_INT ((endb - startb) / BITS_PER_UNIT),
4966 TYPE_MODE (sizetype));
4970 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__setbits"),
4971 LCT_NORMAL, VOIDmode, 4, XEXP (targetx, 0),
4972 Pmode, bitlength_rtx, TYPE_MODE (sizetype),
4973 startbit_rtx, TYPE_MODE (sizetype),
4974 endbit_rtx, TYPE_MODE (sizetype));
4977 emit_move_insn (target, targetx);
4985 /* Store the value of EXP (an expression tree)
4986 into a subfield of TARGET which has mode MODE and occupies
4987 BITSIZE bits, starting BITPOS bits from the start of TARGET.
4988 If MODE is VOIDmode, it means that we are storing into a bit-field.
4990 If VALUE_MODE is VOIDmode, return nothing in particular.
4991 UNSIGNEDP is not used in this case.
4993 Otherwise, return an rtx for the value stored. This rtx
4994 has mode VALUE_MODE if that is convenient to do.
4995 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
4997 TYPE is the type of the underlying object,
4999 ALIAS_SET is the alias set for the destination. This value will
5000 (in general) be different from that for TARGET, since TARGET is a
5001 reference to the containing structure. */
5004 store_field (target, bitsize, bitpos, mode, exp, value_mode, unsignedp, type,
5007 HOST_WIDE_INT bitsize;
5008 HOST_WIDE_INT bitpos;
5009 enum machine_mode mode;
5011 enum machine_mode value_mode;
5016 HOST_WIDE_INT width_mask = 0;
5018 if (TREE_CODE (exp) == ERROR_MARK)
5021 /* If we have nothing to store, do nothing unless the expression has
5024 return expand_expr (exp, const0_rtx, VOIDmode, 0);
5025 else if (bitsize >=0 && bitsize < HOST_BITS_PER_WIDE_INT)
5026 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
5028 /* If we are storing into an unaligned field of an aligned union that is
5029 in a register, we may have the mode of TARGET being an integer mode but
5030 MODE == BLKmode. In that case, get an aligned object whose size and
5031 alignment are the same as TARGET and store TARGET into it (we can avoid
5032 the store if the field being stored is the entire width of TARGET). Then
5033 call ourselves recursively to store the field into a BLKmode version of
5034 that object. Finally, load from the object into TARGET. This is not
5035 very efficient in general, but should only be slightly more expensive
5036 than the otherwise-required unaligned accesses. Perhaps this can be
5037 cleaned up later. */
5040 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
5044 (build_qualified_type (type, TYPE_QUALS (type) | TYPE_QUAL_CONST),
5046 rtx blk_object = adjust_address (object, BLKmode, 0);
5048 if (bitsize != (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (target)))
5049 emit_move_insn (object, target);
5051 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0, type,
5054 emit_move_insn (target, object);
5056 /* We want to return the BLKmode version of the data. */
5060 if (GET_CODE (target) == CONCAT)
5062 /* We're storing into a struct containing a single __complex. */
5066 return store_expr (exp, target, 0);
5069 /* If the structure is in a register or if the component
5070 is a bit field, we cannot use addressing to access it.
5071 Use bit-field techniques or SUBREG to store in it. */
5073 if (mode == VOIDmode
5074 || (mode != BLKmode && ! direct_store[(int) mode]
5075 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
5076 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
5077 || GET_CODE (target) == REG
5078 || GET_CODE (target) == SUBREG
5079 /* If the field isn't aligned enough to store as an ordinary memref,
5080 store it as a bit field. */
5081 || (mode != BLKmode && SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target))
5082 && (MEM_ALIGN (target) < GET_MODE_ALIGNMENT (mode)
5083 || bitpos % GET_MODE_ALIGNMENT (mode)))
5084 /* If the RHS and field are a constant size and the size of the
5085 RHS isn't the same size as the bitfield, we must use bitfield
5088 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
5089 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0))
5091 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
5093 /* If BITSIZE is narrower than the size of the type of EXP
5094 we will be narrowing TEMP. Normally, what's wanted are the
5095 low-order bits. However, if EXP's type is a record and this is
5096 big-endian machine, we want the upper BITSIZE bits. */
5097 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
5098 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (temp))
5099 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
5100 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
5101 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
5105 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
5107 if (mode != VOIDmode && mode != BLKmode
5108 && mode != TYPE_MODE (TREE_TYPE (exp)))
5109 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
5111 /* If the modes of TARGET and TEMP are both BLKmode, both
5112 must be in memory and BITPOS must be aligned on a byte
5113 boundary. If so, we simply do a block copy. */
5114 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
5116 if (GET_CODE (target) != MEM || GET_CODE (temp) != MEM
5117 || bitpos % BITS_PER_UNIT != 0)
5120 target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT);
5121 emit_block_move (target, temp,
5122 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
5125 return value_mode == VOIDmode ? const0_rtx : target;
5128 /* Store the value in the bitfield. */
5129 store_bit_field (target, bitsize, bitpos, mode, temp,
5130 int_size_in_bytes (type));
5132 if (value_mode != VOIDmode)
5134 /* The caller wants an rtx for the value.
5135 If possible, avoid refetching from the bitfield itself. */
5137 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
5140 enum machine_mode tmode;
5142 tmode = GET_MODE (temp);
5143 if (tmode == VOIDmode)
5147 return expand_and (tmode, temp,
5148 GEN_INT (trunc_int_for_mode (width_mask,
5152 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
5153 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
5154 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
5157 return extract_bit_field (target, bitsize, bitpos, unsignedp,
5158 NULL_RTX, value_mode, VOIDmode,
5159 int_size_in_bytes (type));
5165 rtx addr = XEXP (target, 0);
5166 rtx to_rtx = target;
5168 /* If a value is wanted, it must be the lhs;
5169 so make the address stable for multiple use. */
5171 if (value_mode != VOIDmode && GET_CODE (addr) != REG
5172 && ! CONSTANT_ADDRESS_P (addr)
5173 /* A frame-pointer reference is already stable. */
5174 && ! (GET_CODE (addr) == PLUS
5175 && GET_CODE (XEXP (addr, 1)) == CONST_INT
5176 && (XEXP (addr, 0) == virtual_incoming_args_rtx
5177 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
5178 to_rtx = replace_equiv_address (to_rtx, copy_to_reg (addr));
5180 /* Now build a reference to just the desired component. */
5182 to_rtx = adjust_address (target, mode, bitpos / BITS_PER_UNIT);
5184 if (to_rtx == target)
5185 to_rtx = copy_rtx (to_rtx);
5187 MEM_SET_IN_STRUCT_P (to_rtx, 1);
5188 if (!MEM_KEEP_ALIAS_SET_P (to_rtx) && MEM_ALIAS_SET (to_rtx) != 0)
5189 set_mem_alias_set (to_rtx, alias_set);
5191 return store_expr (exp, to_rtx, value_mode != VOIDmode);
5195 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
5196 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
5197 codes and find the ultimate containing object, which we return.
5199 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5200 bit position, and *PUNSIGNEDP to the signedness of the field.
5201 If the position of the field is variable, we store a tree
5202 giving the variable offset (in units) in *POFFSET.
5203 This offset is in addition to the bit position.
5204 If the position is not variable, we store 0 in *POFFSET.
5206 If any of the extraction expressions is volatile,
5207 we store 1 in *PVOLATILEP. Otherwise we don't change that.
5209 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
5210 is a mode that can be used to access the field. In that case, *PBITSIZE
5213 If the field describes a variable-sized object, *PMODE is set to
5214 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
5215 this case, but the address of the object can be found. */
5218 get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode,
5219 punsignedp, pvolatilep)
5221 HOST_WIDE_INT *pbitsize;
5222 HOST_WIDE_INT *pbitpos;
5224 enum machine_mode *pmode;
5229 enum machine_mode mode = VOIDmode;
5230 tree offset = size_zero_node;
5231 tree bit_offset = bitsize_zero_node;
5232 tree placeholder_ptr = 0;
5235 /* First get the mode, signedness, and size. We do this from just the
5236 outermost expression. */
5237 if (TREE_CODE (exp) == COMPONENT_REF)
5239 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
5240 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
5241 mode = DECL_MODE (TREE_OPERAND (exp, 1));
5243 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
5245 else if (TREE_CODE (exp) == BIT_FIELD_REF)
5247 size_tree = TREE_OPERAND (exp, 1);
5248 *punsignedp = TREE_UNSIGNED (exp);
5252 mode = TYPE_MODE (TREE_TYPE (exp));
5253 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
5255 if (mode == BLKmode)
5256 size_tree = TYPE_SIZE (TREE_TYPE (exp));
5258 *pbitsize = GET_MODE_BITSIZE (mode);
5263 if (! host_integerp (size_tree, 1))
5264 mode = BLKmode, *pbitsize = -1;
5266 *pbitsize = tree_low_cst (size_tree, 1);
5269 /* Compute cumulative bit-offset for nested component-refs and array-refs,
5270 and find the ultimate containing object. */
5273 if (TREE_CODE (exp) == BIT_FIELD_REF)
5274 bit_offset = size_binop (PLUS_EXPR, bit_offset, TREE_OPERAND (exp, 2));
5275 else if (TREE_CODE (exp) == COMPONENT_REF)
5277 tree field = TREE_OPERAND (exp, 1);
5278 tree this_offset = DECL_FIELD_OFFSET (field);
5280 /* If this field hasn't been filled in yet, don't go
5281 past it. This should only happen when folding expressions
5282 made during type construction. */
5283 if (this_offset == 0)
5285 else if (! TREE_CONSTANT (this_offset)
5286 && contains_placeholder_p (this_offset))
5287 this_offset = build (WITH_RECORD_EXPR, sizetype, this_offset, exp);
5289 offset = size_binop (PLUS_EXPR, offset, this_offset);
5290 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5291 DECL_FIELD_BIT_OFFSET (field));
5293 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
5296 else if (TREE_CODE (exp) == ARRAY_REF
5297 || TREE_CODE (exp) == ARRAY_RANGE_REF)
5299 tree index = TREE_OPERAND (exp, 1);
5300 tree array = TREE_OPERAND (exp, 0);
5301 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
5302 tree low_bound = (domain ? TYPE_MIN_VALUE (domain) : 0);
5303 tree unit_size = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (array)));
5305 /* We assume all arrays have sizes that are a multiple of a byte.
5306 First subtract the lower bound, if any, in the type of the
5307 index, then convert to sizetype and multiply by the size of the
5309 if (low_bound != 0 && ! integer_zerop (low_bound))
5310 index = fold (build (MINUS_EXPR, TREE_TYPE (index),
5313 /* If the index has a self-referential type, pass it to a
5314 WITH_RECORD_EXPR; if the component size is, pass our
5315 component to one. */
5316 if (! TREE_CONSTANT (index)
5317 && contains_placeholder_p (index))
5318 index = build (WITH_RECORD_EXPR, TREE_TYPE (index), index, exp);
5319 if (! TREE_CONSTANT (unit_size)
5320 && contains_placeholder_p (unit_size))
5321 unit_size = build (WITH_RECORD_EXPR, sizetype, unit_size, array);
5323 offset = size_binop (PLUS_EXPR, offset,
5324 size_binop (MULT_EXPR,
5325 convert (sizetype, index),
5329 else if (TREE_CODE (exp) == PLACEHOLDER_EXPR)
5331 tree new = find_placeholder (exp, &placeholder_ptr);
5333 /* If we couldn't find the replacement, return the PLACEHOLDER_EXPR.
5334 We might have been called from tree optimization where we
5335 haven't set up an object yet. */
5343 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
5344 && TREE_CODE (exp) != VIEW_CONVERT_EXPR
5345 && ! ((TREE_CODE (exp) == NOP_EXPR
5346 || TREE_CODE (exp) == CONVERT_EXPR)
5347 && (TYPE_MODE (TREE_TYPE (exp))
5348 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
5351 /* If any reference in the chain is volatile, the effect is volatile. */
5352 if (TREE_THIS_VOLATILE (exp))
5355 exp = TREE_OPERAND (exp, 0);
5358 /* If OFFSET is constant, see if we can return the whole thing as a
5359 constant bit position. Otherwise, split it up. */
5360 if (host_integerp (offset, 0)
5361 && 0 != (tem = size_binop (MULT_EXPR, convert (bitsizetype, offset),
5363 && 0 != (tem = size_binop (PLUS_EXPR, tem, bit_offset))
5364 && host_integerp (tem, 0))
5365 *pbitpos = tree_low_cst (tem, 0), *poffset = 0;
5367 *pbitpos = tree_low_cst (bit_offset, 0), *poffset = offset;
5373 /* Return 1 if T is an expression that get_inner_reference handles. */
5376 handled_component_p (t)
5379 switch (TREE_CODE (t))
5384 case ARRAY_RANGE_REF:
5385 case NON_LVALUE_EXPR:
5386 case VIEW_CONVERT_EXPR:
5391 return (TYPE_MODE (TREE_TYPE (t))
5392 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (t, 0))));
5399 /* Given an rtx VALUE that may contain additions and multiplications, return
5400 an equivalent value that just refers to a register, memory, or constant.
5401 This is done by generating instructions to perform the arithmetic and
5402 returning a pseudo-register containing the value.
5404 The returned value may be a REG, SUBREG, MEM or constant. */
5407 force_operand (value, target)
5411 /* Use a temporary to force order of execution of calls to
5415 /* Use subtarget as the target for operand 0 of a binary operation. */
5416 rtx subtarget = get_subtarget (target);
5418 /* Check for a PIC address load. */
5419 if ((GET_CODE (value) == PLUS || GET_CODE (value) == MINUS)
5420 && XEXP (value, 0) == pic_offset_table_rtx
5421 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
5422 || GET_CODE (XEXP (value, 1)) == LABEL_REF
5423 || GET_CODE (XEXP (value, 1)) == CONST))
5426 subtarget = gen_reg_rtx (GET_MODE (value));
5427 emit_move_insn (subtarget, value);
5431 if (GET_CODE (value) == PLUS)
5432 binoptab = add_optab;
5433 else if (GET_CODE (value) == MINUS)
5434 binoptab = sub_optab;
5435 else if (GET_CODE (value) == MULT)
5437 op2 = XEXP (value, 1);
5438 if (!CONSTANT_P (op2)
5439 && !(GET_CODE (op2) == REG && op2 != subtarget))
5441 tmp = force_operand (XEXP (value, 0), subtarget);
5442 return expand_mult (GET_MODE (value), tmp,
5443 force_operand (op2, NULL_RTX),
5449 op2 = XEXP (value, 1);
5450 if (!CONSTANT_P (op2)
5451 && !(GET_CODE (op2) == REG && op2 != subtarget))
5453 if (binoptab == sub_optab && GET_CODE (op2) == CONST_INT)
5455 binoptab = add_optab;
5456 op2 = negate_rtx (GET_MODE (value), op2);
5459 /* Check for an addition with OP2 a constant integer and our first
5460 operand a PLUS of a virtual register and something else. In that
5461 case, we want to emit the sum of the virtual register and the
5462 constant first and then add the other value. This allows virtual
5463 register instantiation to simply modify the constant rather than
5464 creating another one around this addition. */
5465 if (binoptab == add_optab && GET_CODE (op2) == CONST_INT
5466 && GET_CODE (XEXP (value, 0)) == PLUS
5467 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
5468 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
5469 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
5471 rtx temp = expand_binop (GET_MODE (value), binoptab,
5472 XEXP (XEXP (value, 0), 0), op2,
5473 subtarget, 0, OPTAB_LIB_WIDEN);
5474 return expand_binop (GET_MODE (value), binoptab, temp,
5475 force_operand (XEXP (XEXP (value, 0), 1), 0),
5476 target, 0, OPTAB_LIB_WIDEN);
5479 tmp = force_operand (XEXP (value, 0), subtarget);
5480 return expand_binop (GET_MODE (value), binoptab, tmp,
5481 force_operand (op2, NULL_RTX),
5482 target, 0, OPTAB_LIB_WIDEN);
5483 /* We give UNSIGNEDP = 0 to expand_binop
5484 because the only operations we are expanding here are signed ones. */
5487 #ifdef INSN_SCHEDULING
5488 /* On machines that have insn scheduling, we want all memory reference to be
5489 explicit, so we need to deal with such paradoxical SUBREGs. */
5490 if (GET_CODE (value) == SUBREG && GET_CODE (SUBREG_REG (value)) == MEM
5491 && (GET_MODE_SIZE (GET_MODE (value))
5492 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (value)))))
5494 = simplify_gen_subreg (GET_MODE (value),
5495 force_reg (GET_MODE (SUBREG_REG (value)),
5496 force_operand (SUBREG_REG (value),
5498 GET_MODE (SUBREG_REG (value)),
5499 SUBREG_BYTE (value));
5505 /* Subroutine of expand_expr: return nonzero iff there is no way that
5506 EXP can reference X, which is being modified. TOP_P is nonzero if this
5507 call is going to be used to determine whether we need a temporary
5508 for EXP, as opposed to a recursive call to this function.
5510 It is always safe for this routine to return zero since it merely
5511 searches for optimization opportunities. */
5514 safe_from_p (x, exp, top_p)
5521 static tree save_expr_list;
5524 /* If EXP has varying size, we MUST use a target since we currently
5525 have no way of allocating temporaries of variable size
5526 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
5527 So we assume here that something at a higher level has prevented a
5528 clash. This is somewhat bogus, but the best we can do. Only
5529 do this when X is BLKmode and when we are at the top level. */
5530 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
5531 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
5532 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
5533 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
5534 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
5536 && GET_MODE (x) == BLKmode)
5537 /* If X is in the outgoing argument area, it is always safe. */
5538 || (GET_CODE (x) == MEM
5539 && (XEXP (x, 0) == virtual_outgoing_args_rtx
5540 || (GET_CODE (XEXP (x, 0)) == PLUS
5541 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
5544 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
5545 find the underlying pseudo. */
5546 if (GET_CODE (x) == SUBREG)
5549 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5553 /* A SAVE_EXPR might appear many times in the expression passed to the
5554 top-level safe_from_p call, and if it has a complex subexpression,
5555 examining it multiple times could result in a combinatorial explosion.
5556 E.g. on an Alpha running at least 200MHz, a Fortran test case compiled
5557 with optimization took about 28 minutes to compile -- even though it was
5558 only a few lines long. So we mark each SAVE_EXPR we see with TREE_PRIVATE
5559 and turn that off when we are done. We keep a list of the SAVE_EXPRs
5560 we have processed. Note that the only test of top_p was above. */
5569 rtn = safe_from_p (x, exp, 0);
5571 for (t = save_expr_list; t != 0; t = TREE_CHAIN (t))
5572 TREE_PRIVATE (TREE_PURPOSE (t)) = 0;
5577 /* Now look at our tree code and possibly recurse. */
5578 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
5581 exp_rtl = DECL_RTL_SET_P (exp) ? DECL_RTL (exp) : NULL_RTX;
5588 if (TREE_CODE (exp) == TREE_LIST)
5589 return ((TREE_VALUE (exp) == 0
5590 || safe_from_p (x, TREE_VALUE (exp), 0))
5591 && (TREE_CHAIN (exp) == 0
5592 || safe_from_p (x, TREE_CHAIN (exp), 0)));
5593 else if (TREE_CODE (exp) == ERROR_MARK)
5594 return 1; /* An already-visited SAVE_EXPR? */
5599 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5603 return (safe_from_p (x, TREE_OPERAND (exp, 0), 0)
5604 && safe_from_p (x, TREE_OPERAND (exp, 1), 0));
5608 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
5609 the expression. If it is set, we conflict iff we are that rtx or
5610 both are in memory. Otherwise, we check all operands of the
5611 expression recursively. */
5613 switch (TREE_CODE (exp))
5616 /* If the operand is static or we are static, we can't conflict.
5617 Likewise if we don't conflict with the operand at all. */
5618 if (staticp (TREE_OPERAND (exp, 0))
5619 || TREE_STATIC (exp)
5620 || safe_from_p (x, TREE_OPERAND (exp, 0), 0))
5623 /* Otherwise, the only way this can conflict is if we are taking
5624 the address of a DECL a that address if part of X, which is
5626 exp = TREE_OPERAND (exp, 0);
5629 if (!DECL_RTL_SET_P (exp)
5630 || GET_CODE (DECL_RTL (exp)) != MEM)
5633 exp_rtl = XEXP (DECL_RTL (exp), 0);
5638 if (GET_CODE (x) == MEM
5639 && alias_sets_conflict_p (MEM_ALIAS_SET (x),
5640 get_alias_set (exp)))
5645 /* Assume that the call will clobber all hard registers and
5647 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5648 || GET_CODE (x) == MEM)
5653 /* If a sequence exists, we would have to scan every instruction
5654 in the sequence to see if it was safe. This is probably not
5656 if (RTL_EXPR_SEQUENCE (exp))
5659 exp_rtl = RTL_EXPR_RTL (exp);
5662 case WITH_CLEANUP_EXPR:
5663 exp_rtl = WITH_CLEANUP_EXPR_RTL (exp);
5666 case CLEANUP_POINT_EXPR:
5667 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5670 exp_rtl = SAVE_EXPR_RTL (exp);
5674 /* If we've already scanned this, don't do it again. Otherwise,
5675 show we've scanned it and record for clearing the flag if we're
5677 if (TREE_PRIVATE (exp))
5680 TREE_PRIVATE (exp) = 1;
5681 if (! safe_from_p (x, TREE_OPERAND (exp, 0), 0))
5683 TREE_PRIVATE (exp) = 0;
5687 save_expr_list = tree_cons (exp, NULL_TREE, save_expr_list);
5691 /* The only operand we look at is operand 1. The rest aren't
5692 part of the expression. */
5693 return safe_from_p (x, TREE_OPERAND (exp, 1), 0);
5695 case METHOD_CALL_EXPR:
5696 /* This takes an rtx argument, but shouldn't appear here. */
5703 /* If we have an rtx, we do not need to scan our operands. */
5707 nops = first_rtl_op (TREE_CODE (exp));
5708 for (i = 0; i < nops; i++)
5709 if (TREE_OPERAND (exp, i) != 0
5710 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
5713 /* If this is a language-specific tree code, it may require
5714 special handling. */
5715 if ((unsigned int) TREE_CODE (exp)
5716 >= (unsigned int) LAST_AND_UNUSED_TREE_CODE
5717 && !(*lang_hooks.safe_from_p) (x, exp))
5721 /* If we have an rtl, find any enclosed object. Then see if we conflict
5725 if (GET_CODE (exp_rtl) == SUBREG)
5727 exp_rtl = SUBREG_REG (exp_rtl);
5728 if (GET_CODE (exp_rtl) == REG
5729 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
5733 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
5734 are memory and they conflict. */
5735 return ! (rtx_equal_p (x, exp_rtl)
5736 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
5737 && true_dependence (exp_rtl, VOIDmode, x,
5738 rtx_addr_varies_p)));
5741 /* If we reach here, it is safe. */
5745 /* Subroutine of expand_expr: return rtx if EXP is a
5746 variable or parameter; else return 0. */
5753 switch (TREE_CODE (exp))
5757 return DECL_RTL (exp);
5763 #ifdef MAX_INTEGER_COMPUTATION_MODE
5766 check_max_integer_computation_mode (exp)
5769 enum tree_code code;
5770 enum machine_mode mode;
5772 /* Strip any NOPs that don't change the mode. */
5774 code = TREE_CODE (exp);
5776 /* We must allow conversions of constants to MAX_INTEGER_COMPUTATION_MODE. */
5777 if (code == NOP_EXPR
5778 && TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
5781 /* First check the type of the overall operation. We need only look at
5782 unary, binary and relational operations. */
5783 if (TREE_CODE_CLASS (code) == '1'
5784 || TREE_CODE_CLASS (code) == '2'
5785 || TREE_CODE_CLASS (code) == '<')
5787 mode = TYPE_MODE (TREE_TYPE (exp));
5788 if (GET_MODE_CLASS (mode) == MODE_INT
5789 && mode > MAX_INTEGER_COMPUTATION_MODE)
5790 internal_error ("unsupported wide integer operation");
5793 /* Check operand of a unary op. */
5794 if (TREE_CODE_CLASS (code) == '1')
5796 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5797 if (GET_MODE_CLASS (mode) == MODE_INT
5798 && mode > MAX_INTEGER_COMPUTATION_MODE)
5799 internal_error ("unsupported wide integer operation");
5802 /* Check operands of a binary/comparison op. */
5803 if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<')
5805 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5806 if (GET_MODE_CLASS (mode) == MODE_INT
5807 && mode > MAX_INTEGER_COMPUTATION_MODE)
5808 internal_error ("unsupported wide integer operation");
5810 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1)));
5811 if (GET_MODE_CLASS (mode) == MODE_INT
5812 && mode > MAX_INTEGER_COMPUTATION_MODE)
5813 internal_error ("unsupported wide integer operation");
5818 /* Return the highest power of two that EXP is known to be a multiple of.
5819 This is used in updating alignment of MEMs in array references. */
5821 static HOST_WIDE_INT
5822 highest_pow2_factor (exp)
5825 HOST_WIDE_INT c0, c1;
5827 switch (TREE_CODE (exp))
5830 /* If the integer is expressable in a HOST_WIDE_INT, we can find the
5831 lowest bit that's a one. If the result is zero, return
5832 BIGGEST_ALIGNMENT. We need to handle this case since we can find it
5833 in a COND_EXPR, a MIN_EXPR, or a MAX_EXPR. If the constant overlows,
5834 we have an erroneous program, so return BIGGEST_ALIGNMENT to avoid any
5836 if (TREE_CONSTANT_OVERFLOW (exp)
5837 || integer_zerop (exp))
5838 return BIGGEST_ALIGNMENT;
5839 else if (host_integerp (exp, 0))
5841 c0 = tree_low_cst (exp, 0);
5842 c0 = c0 < 0 ? - c0 : c0;
5847 case PLUS_EXPR: case MINUS_EXPR: case MIN_EXPR: case MAX_EXPR:
5848 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
5849 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
5850 return MIN (c0, c1);
5853 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
5854 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
5857 case ROUND_DIV_EXPR: case TRUNC_DIV_EXPR: case FLOOR_DIV_EXPR:
5859 if (integer_pow2p (TREE_OPERAND (exp, 1))
5860 && host_integerp (TREE_OPERAND (exp, 1), 1))
5862 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
5863 c1 = tree_low_cst (TREE_OPERAND (exp, 1), 1);
5864 return MAX (1, c0 / c1);
5868 case NON_LVALUE_EXPR: case NOP_EXPR: case CONVERT_EXPR:
5869 case SAVE_EXPR: case WITH_RECORD_EXPR:
5870 return highest_pow2_factor (TREE_OPERAND (exp, 0));
5873 return highest_pow2_factor (TREE_OPERAND (exp, 1));
5876 c0 = highest_pow2_factor (TREE_OPERAND (exp, 1));
5877 c1 = highest_pow2_factor (TREE_OPERAND (exp, 2));
5878 return MIN (c0, c1);
5887 /* Return an object on the placeholder list that matches EXP, a
5888 PLACEHOLDER_EXPR. An object "matches" if it is of the type of the
5889 PLACEHOLDER_EXPR or a pointer type to it. For further information, see
5890 tree.def. If no such object is found, return 0. If PLIST is nonzero, it
5891 is a location which initially points to a starting location in the
5892 placeholder list (zero means start of the list) and where a pointer into
5893 the placeholder list at which the object is found is placed. */
5896 find_placeholder (exp, plist)
5900 tree type = TREE_TYPE (exp);
5901 tree placeholder_expr;
5903 for (placeholder_expr
5904 = plist && *plist ? TREE_CHAIN (*plist) : placeholder_list;
5905 placeholder_expr != 0;
5906 placeholder_expr = TREE_CHAIN (placeholder_expr))
5908 tree need_type = TYPE_MAIN_VARIANT (type);
5911 /* Find the outermost reference that is of the type we want. If none,
5912 see if any object has a type that is a pointer to the type we
5914 for (elt = TREE_PURPOSE (placeholder_expr); elt != 0;
5915 elt = ((TREE_CODE (elt) == COMPOUND_EXPR
5916 || TREE_CODE (elt) == COND_EXPR)
5917 ? TREE_OPERAND (elt, 1)
5918 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
5919 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
5920 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
5921 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
5922 ? TREE_OPERAND (elt, 0) : 0))
5923 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
5926 *plist = placeholder_expr;
5930 for (elt = TREE_PURPOSE (placeholder_expr); elt != 0;
5932 = ((TREE_CODE (elt) == COMPOUND_EXPR
5933 || TREE_CODE (elt) == COND_EXPR)
5934 ? TREE_OPERAND (elt, 1)
5935 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
5936 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
5937 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
5938 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
5939 ? TREE_OPERAND (elt, 0) : 0))
5940 if (POINTER_TYPE_P (TREE_TYPE (elt))
5941 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
5945 *plist = placeholder_expr;
5946 return build1 (INDIRECT_REF, need_type, elt);
5953 /* expand_expr: generate code for computing expression EXP.
5954 An rtx for the computed value is returned. The value is never null.
5955 In the case of a void EXP, const0_rtx is returned.
5957 The value may be stored in TARGET if TARGET is nonzero.
5958 TARGET is just a suggestion; callers must assume that
5959 the rtx returned may not be the same as TARGET.
5961 If TARGET is CONST0_RTX, it means that the value will be ignored.
5963 If TMODE is not VOIDmode, it suggests generating the
5964 result in mode TMODE. But this is done only when convenient.
5965 Otherwise, TMODE is ignored and the value generated in its natural mode.
5966 TMODE is just a suggestion; callers must assume that
5967 the rtx returned may not have mode TMODE.
5969 Note that TARGET may have neither TMODE nor MODE. In that case, it
5970 probably will not be used.
5972 If MODIFIER is EXPAND_SUM then when EXP is an addition
5973 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
5974 or a nest of (PLUS ...) and (MINUS ...) where the terms are
5975 products as above, or REG or MEM, or constant.
5976 Ordinarily in such cases we would output mul or add instructions
5977 and then return a pseudo reg containing the sum.
5979 EXPAND_INITIALIZER is much like EXPAND_SUM except that
5980 it also marks a label as absolutely required (it can't be dead).
5981 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
5982 This is used for outputting expressions used in initializers.
5984 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
5985 with a constant address even if that address is not normally legitimate.
5986 EXPAND_INITIALIZER and EXPAND_SUM also have this effect. */
5989 expand_expr (exp, target, tmode, modifier)
5992 enum machine_mode tmode;
5993 enum expand_modifier modifier;
5996 tree type = TREE_TYPE (exp);
5997 int unsignedp = TREE_UNSIGNED (type);
5998 enum machine_mode mode;
5999 enum tree_code code = TREE_CODE (exp);
6001 rtx subtarget, original_target;
6005 /* Handle ERROR_MARK before anybody tries to access its type. */
6006 if (TREE_CODE (exp) == ERROR_MARK || TREE_CODE (type) == ERROR_MARK)
6008 op0 = CONST0_RTX (tmode);
6014 mode = TYPE_MODE (type);
6015 /* Use subtarget as the target for operand 0 of a binary operation. */
6016 subtarget = get_subtarget (target);
6017 original_target = target;
6018 ignore = (target == const0_rtx
6019 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
6020 || code == CONVERT_EXPR || code == REFERENCE_EXPR
6021 || code == COND_EXPR || code == VIEW_CONVERT_EXPR)
6022 && TREE_CODE (type) == VOID_TYPE));
6024 /* If we are going to ignore this result, we need only do something
6025 if there is a side-effect somewhere in the expression. If there
6026 is, short-circuit the most common cases here. Note that we must
6027 not call expand_expr with anything but const0_rtx in case this
6028 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
6032 if (! TREE_SIDE_EFFECTS (exp))
6035 /* Ensure we reference a volatile object even if value is ignored, but
6036 don't do this if all we are doing is taking its address. */
6037 if (TREE_THIS_VOLATILE (exp)
6038 && TREE_CODE (exp) != FUNCTION_DECL
6039 && mode != VOIDmode && mode != BLKmode
6040 && modifier != EXPAND_CONST_ADDRESS)
6042 temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
6043 if (GET_CODE (temp) == MEM)
6044 temp = copy_to_reg (temp);
6048 if (TREE_CODE_CLASS (code) == '1' || code == COMPONENT_REF
6049 || code == INDIRECT_REF || code == BUFFER_REF)
6050 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6053 else if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<'
6054 || code == ARRAY_REF || code == ARRAY_RANGE_REF)
6056 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6057 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6060 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
6061 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
6062 /* If the second operand has no side effects, just evaluate
6064 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6066 else if (code == BIT_FIELD_REF)
6068 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6069 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6070 expand_expr (TREE_OPERAND (exp, 2), const0_rtx, VOIDmode, modifier);
6077 #ifdef MAX_INTEGER_COMPUTATION_MODE
6078 /* Only check stuff here if the mode we want is different from the mode
6079 of the expression; if it's the same, check_max_integer_computiation_mode
6080 will handle it. Do we really need to check this stuff at all? */
6083 && GET_MODE (target) != mode
6084 && TREE_CODE (exp) != INTEGER_CST
6085 && TREE_CODE (exp) != PARM_DECL
6086 && TREE_CODE (exp) != ARRAY_REF
6087 && TREE_CODE (exp) != ARRAY_RANGE_REF
6088 && TREE_CODE (exp) != COMPONENT_REF
6089 && TREE_CODE (exp) != BIT_FIELD_REF
6090 && TREE_CODE (exp) != INDIRECT_REF
6091 && TREE_CODE (exp) != CALL_EXPR
6092 && TREE_CODE (exp) != VAR_DECL
6093 && TREE_CODE (exp) != RTL_EXPR)
6095 enum machine_mode mode = GET_MODE (target);
6097 if (GET_MODE_CLASS (mode) == MODE_INT
6098 && mode > MAX_INTEGER_COMPUTATION_MODE)
6099 internal_error ("unsupported wide integer operation");
6103 && TREE_CODE (exp) != INTEGER_CST
6104 && TREE_CODE (exp) != PARM_DECL
6105 && TREE_CODE (exp) != ARRAY_REF
6106 && TREE_CODE (exp) != ARRAY_RANGE_REF
6107 && TREE_CODE (exp) != COMPONENT_REF
6108 && TREE_CODE (exp) != BIT_FIELD_REF
6109 && TREE_CODE (exp) != INDIRECT_REF
6110 && TREE_CODE (exp) != VAR_DECL
6111 && TREE_CODE (exp) != CALL_EXPR
6112 && TREE_CODE (exp) != RTL_EXPR
6113 && GET_MODE_CLASS (tmode) == MODE_INT
6114 && tmode > MAX_INTEGER_COMPUTATION_MODE)
6115 internal_error ("unsupported wide integer operation");
6117 check_max_integer_computation_mode (exp);
6120 /* If will do cse, generate all results into pseudo registers
6121 since 1) that allows cse to find more things
6122 and 2) otherwise cse could produce an insn the machine
6123 cannot support. And exception is a CONSTRUCTOR into a multi-word
6124 MEM: that's much more likely to be most efficient into the MEM. */
6126 if (! cse_not_expected && mode != BLKmode && target
6127 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER)
6128 && ! (code == CONSTRUCTOR && GET_MODE_SIZE (mode) > UNITS_PER_WORD))
6135 tree function = decl_function_context (exp);
6136 /* Handle using a label in a containing function. */
6137 if (function != current_function_decl
6138 && function != inline_function_decl && function != 0)
6140 struct function *p = find_function_data (function);
6141 p->expr->x_forced_labels
6142 = gen_rtx_EXPR_LIST (VOIDmode, label_rtx (exp),
6143 p->expr->x_forced_labels);
6147 if (modifier == EXPAND_INITIALIZER)
6148 forced_labels = gen_rtx_EXPR_LIST (VOIDmode,
6153 temp = gen_rtx_MEM (FUNCTION_MODE,
6154 gen_rtx_LABEL_REF (Pmode, label_rtx (exp)));
6155 if (function != current_function_decl
6156 && function != inline_function_decl && function != 0)
6157 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
6162 if (DECL_RTL (exp) == 0)
6164 error_with_decl (exp, "prior parameter's size depends on `%s'");
6165 return CONST0_RTX (mode);
6168 /* ... fall through ... */
6171 /* If a static var's type was incomplete when the decl was written,
6172 but the type is complete now, lay out the decl now. */
6173 if (DECL_SIZE (exp) == 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
6174 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
6176 rtx value = DECL_RTL_IF_SET (exp);
6178 layout_decl (exp, 0);
6180 /* If the RTL was already set, update its mode and memory
6184 PUT_MODE (value, DECL_MODE (exp));
6185 SET_DECL_RTL (exp, 0);
6186 set_mem_attributes (value, exp, 1);
6187 SET_DECL_RTL (exp, value);
6191 /* ... fall through ... */
6195 if (DECL_RTL (exp) == 0)
6198 /* Ensure variable marked as used even if it doesn't go through
6199 a parser. If it hasn't be used yet, write out an external
6201 if (! TREE_USED (exp))
6203 assemble_external (exp);
6204 TREE_USED (exp) = 1;
6207 /* Show we haven't gotten RTL for this yet. */
6210 /* Handle variables inherited from containing functions. */
6211 context = decl_function_context (exp);
6213 /* We treat inline_function_decl as an alias for the current function
6214 because that is the inline function whose vars, types, etc.
6215 are being merged into the current function.
6216 See expand_inline_function. */
6218 if (context != 0 && context != current_function_decl
6219 && context != inline_function_decl
6220 /* If var is static, we don't need a static chain to access it. */
6221 && ! (GET_CODE (DECL_RTL (exp)) == MEM
6222 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
6226 /* Mark as non-local and addressable. */
6227 DECL_NONLOCAL (exp) = 1;
6228 if (DECL_NO_STATIC_CHAIN (current_function_decl))
6230 mark_addressable (exp);
6231 if (GET_CODE (DECL_RTL (exp)) != MEM)
6233 addr = XEXP (DECL_RTL (exp), 0);
6234 if (GET_CODE (addr) == MEM)
6236 = replace_equiv_address (addr,
6237 fix_lexical_addr (XEXP (addr, 0), exp));
6239 addr = fix_lexical_addr (addr, exp);
6241 temp = replace_equiv_address (DECL_RTL (exp), addr);
6244 /* This is the case of an array whose size is to be determined
6245 from its initializer, while the initializer is still being parsed.
6248 else if (GET_CODE (DECL_RTL (exp)) == MEM
6249 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
6250 temp = validize_mem (DECL_RTL (exp));
6252 /* If DECL_RTL is memory, we are in the normal case and either
6253 the address is not valid or it is not a register and -fforce-addr
6254 is specified, get the address into a register. */
6256 else if (GET_CODE (DECL_RTL (exp)) == MEM
6257 && modifier != EXPAND_CONST_ADDRESS
6258 && modifier != EXPAND_SUM
6259 && modifier != EXPAND_INITIALIZER
6260 && (! memory_address_p (DECL_MODE (exp),
6261 XEXP (DECL_RTL (exp), 0))
6263 && GET_CODE (XEXP (DECL_RTL (exp), 0)) != REG)))
6264 temp = replace_equiv_address (DECL_RTL (exp),
6265 copy_rtx (XEXP (DECL_RTL (exp), 0)));
6267 /* If we got something, return it. But first, set the alignment
6268 if the address is a register. */
6271 if (GET_CODE (temp) == MEM && GET_CODE (XEXP (temp, 0)) == REG)
6272 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
6277 /* If the mode of DECL_RTL does not match that of the decl, it
6278 must be a promoted value. We return a SUBREG of the wanted mode,
6279 but mark it so that we know that it was already extended. */
6281 if (GET_CODE (DECL_RTL (exp)) == REG
6282 && GET_MODE (DECL_RTL (exp)) != DECL_MODE (exp))
6284 /* Get the signedness used for this variable. Ensure we get the
6285 same mode we got when the variable was declared. */
6286 if (GET_MODE (DECL_RTL (exp))
6287 != promote_mode (type, DECL_MODE (exp), &unsignedp, 0))
6290 temp = gen_lowpart_SUBREG (mode, DECL_RTL (exp));
6291 SUBREG_PROMOTED_VAR_P (temp) = 1;
6292 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
6296 return DECL_RTL (exp);
6299 return immed_double_const (TREE_INT_CST_LOW (exp),
6300 TREE_INT_CST_HIGH (exp), mode);
6303 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, 0);
6306 /* If optimized, generate immediate CONST_DOUBLE
6307 which will be turned into memory by reload if necessary.
6309 We used to force a register so that loop.c could see it. But
6310 this does not allow gen_* patterns to perform optimizations with
6311 the constants. It also produces two insns in cases like "x = 1.0;".
6312 On most machines, floating-point constants are not permitted in
6313 many insns, so we'd end up copying it to a register in any case.
6315 Now, we do the copying in expand_binop, if appropriate. */
6316 return immed_real_const (exp);
6320 if (! TREE_CST_RTL (exp))
6321 output_constant_def (exp, 1);
6323 /* TREE_CST_RTL probably contains a constant address.
6324 On RISC machines where a constant address isn't valid,
6325 make some insns to get that address into a register. */
6326 if (GET_CODE (TREE_CST_RTL (exp)) == MEM
6327 && modifier != EXPAND_CONST_ADDRESS
6328 && modifier != EXPAND_INITIALIZER
6329 && modifier != EXPAND_SUM
6330 && (! memory_address_p (mode, XEXP (TREE_CST_RTL (exp), 0))
6332 && GET_CODE (XEXP (TREE_CST_RTL (exp), 0)) != REG)))
6333 return replace_equiv_address (TREE_CST_RTL (exp),
6334 copy_rtx (XEXP (TREE_CST_RTL (exp), 0)));
6335 return TREE_CST_RTL (exp);
6337 case EXPR_WITH_FILE_LOCATION:
6340 const char *saved_input_filename = input_filename;
6341 int saved_lineno = lineno;
6342 input_filename = EXPR_WFL_FILENAME (exp);
6343 lineno = EXPR_WFL_LINENO (exp);
6344 if (EXPR_WFL_EMIT_LINE_NOTE (exp))
6345 emit_line_note (input_filename, lineno);
6346 /* Possibly avoid switching back and forth here. */
6347 to_return = expand_expr (EXPR_WFL_NODE (exp), target, tmode, modifier);
6348 input_filename = saved_input_filename;
6349 lineno = saved_lineno;
6354 context = decl_function_context (exp);
6356 /* If this SAVE_EXPR was at global context, assume we are an
6357 initialization function and move it into our context. */
6359 SAVE_EXPR_CONTEXT (exp) = current_function_decl;
6361 /* We treat inline_function_decl as an alias for the current function
6362 because that is the inline function whose vars, types, etc.
6363 are being merged into the current function.
6364 See expand_inline_function. */
6365 if (context == current_function_decl || context == inline_function_decl)
6368 /* If this is non-local, handle it. */
6371 /* The following call just exists to abort if the context is
6372 not of a containing function. */
6373 find_function_data (context);
6375 temp = SAVE_EXPR_RTL (exp);
6376 if (temp && GET_CODE (temp) == REG)
6378 put_var_into_stack (exp);
6379 temp = SAVE_EXPR_RTL (exp);
6381 if (temp == 0 || GET_CODE (temp) != MEM)
6384 replace_equiv_address (temp,
6385 fix_lexical_addr (XEXP (temp, 0), exp));
6387 if (SAVE_EXPR_RTL (exp) == 0)
6389 if (mode == VOIDmode)
6392 temp = assign_temp (build_qualified_type (type,
6394 | TYPE_QUAL_CONST)),
6397 SAVE_EXPR_RTL (exp) = temp;
6398 if (!optimize && GET_CODE (temp) == REG)
6399 save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, temp,
6402 /* If the mode of TEMP does not match that of the expression, it
6403 must be a promoted value. We pass store_expr a SUBREG of the
6404 wanted mode but mark it so that we know that it was already
6405 extended. Note that `unsignedp' was modified above in
6408 if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
6410 temp = gen_lowpart_SUBREG (mode, SAVE_EXPR_RTL (exp));
6411 SUBREG_PROMOTED_VAR_P (temp) = 1;
6412 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
6415 if (temp == const0_rtx)
6416 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
6418 store_expr (TREE_OPERAND (exp, 0), temp, 0);
6420 TREE_USED (exp) = 1;
6423 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
6424 must be a promoted value. We return a SUBREG of the wanted mode,
6425 but mark it so that we know that it was already extended. */
6427 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
6428 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
6430 /* Compute the signedness and make the proper SUBREG. */
6431 promote_mode (type, mode, &unsignedp, 0);
6432 temp = gen_lowpart_SUBREG (mode, SAVE_EXPR_RTL (exp));
6433 SUBREG_PROMOTED_VAR_P (temp) = 1;
6434 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
6438 return SAVE_EXPR_RTL (exp);
6443 temp = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
6444 TREE_OPERAND (exp, 0) = unsave_expr_now (TREE_OPERAND (exp, 0));
6448 case PLACEHOLDER_EXPR:
6450 tree old_list = placeholder_list;
6451 tree placeholder_expr = 0;
6453 exp = find_placeholder (exp, &placeholder_expr);
6457 placeholder_list = TREE_CHAIN (placeholder_expr);
6458 temp = expand_expr (exp, original_target, tmode, modifier);
6459 placeholder_list = old_list;
6463 /* We can't find the object or there was a missing WITH_RECORD_EXPR. */
6466 case WITH_RECORD_EXPR:
6467 /* Put the object on the placeholder list, expand our first operand,
6468 and pop the list. */
6469 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
6471 target = expand_expr (TREE_OPERAND (exp, 0), original_target, tmode,
6473 placeholder_list = TREE_CHAIN (placeholder_list);
6477 if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
6478 expand_goto (TREE_OPERAND (exp, 0));
6480 expand_computed_goto (TREE_OPERAND (exp, 0));
6484 expand_exit_loop_if_false (NULL,
6485 invert_truthvalue (TREE_OPERAND (exp, 0)));
6488 case LABELED_BLOCK_EXPR:
6489 if (LABELED_BLOCK_BODY (exp))
6490 expand_expr_stmt_value (LABELED_BLOCK_BODY (exp), 0, 1);
6491 /* Should perhaps use expand_label, but this is simpler and safer. */
6492 do_pending_stack_adjust ();
6493 emit_label (label_rtx (LABELED_BLOCK_LABEL (exp)));
6496 case EXIT_BLOCK_EXPR:
6497 if (EXIT_BLOCK_RETURN (exp))
6498 sorry ("returned value in block_exit_expr");
6499 expand_goto (LABELED_BLOCK_LABEL (EXIT_BLOCK_LABELED_BLOCK (exp)));
6504 expand_start_loop (1);
6505 expand_expr_stmt_value (TREE_OPERAND (exp, 0), 0, 1);
6513 tree vars = TREE_OPERAND (exp, 0);
6514 int vars_need_expansion = 0;
6516 /* Need to open a binding contour here because
6517 if there are any cleanups they must be contained here. */
6518 expand_start_bindings (2);
6520 /* Mark the corresponding BLOCK for output in its proper place. */
6521 if (TREE_OPERAND (exp, 2) != 0
6522 && ! TREE_USED (TREE_OPERAND (exp, 2)))
6523 insert_block (TREE_OPERAND (exp, 2));
6525 /* If VARS have not yet been expanded, expand them now. */
6528 if (!DECL_RTL_SET_P (vars))
6530 vars_need_expansion = 1;
6533 expand_decl_init (vars);
6534 vars = TREE_CHAIN (vars);
6537 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, modifier);
6539 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
6545 if (RTL_EXPR_SEQUENCE (exp))
6547 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
6549 emit_insns (RTL_EXPR_SEQUENCE (exp));
6550 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
6552 preserve_rtl_expr_result (RTL_EXPR_RTL (exp));
6553 free_temps_for_rtl_expr (exp);
6554 return RTL_EXPR_RTL (exp);
6557 /* If we don't need the result, just ensure we evaluate any
6563 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
6564 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode, 0);
6569 /* All elts simple constants => refer to a constant in memory. But
6570 if this is a non-BLKmode mode, let it store a field at a time
6571 since that should make a CONST_INT or CONST_DOUBLE when we
6572 fold. Likewise, if we have a target we can use, it is best to
6573 store directly into the target unless the type is large enough
6574 that memcpy will be used. If we are making an initializer and
6575 all operands are constant, put it in memory as well. */
6576 else if ((TREE_STATIC (exp)
6577 && ((mode == BLKmode
6578 && ! (target != 0 && safe_from_p (target, exp, 1)))
6579 || TREE_ADDRESSABLE (exp)
6580 || (host_integerp (TYPE_SIZE_UNIT (type), 1)
6581 && (! MOVE_BY_PIECES_P
6582 (tree_low_cst (TYPE_SIZE_UNIT (type), 1),
6584 && ! mostly_zeros_p (exp))))
6585 || (modifier == EXPAND_INITIALIZER && TREE_CONSTANT (exp)))
6587 rtx constructor = output_constant_def (exp, 1);
6589 if (modifier != EXPAND_CONST_ADDRESS
6590 && modifier != EXPAND_INITIALIZER
6591 && modifier != EXPAND_SUM)
6592 constructor = validize_mem (constructor);
6598 /* Handle calls that pass values in multiple non-contiguous
6599 locations. The Irix 6 ABI has examples of this. */
6600 if (target == 0 || ! safe_from_p (target, exp, 1)
6601 || GET_CODE (target) == PARALLEL)
6603 = assign_temp (build_qualified_type (type,
6605 | (TREE_READONLY (exp)
6606 * TYPE_QUAL_CONST))),
6607 0, TREE_ADDRESSABLE (exp), 1);
6609 store_constructor (exp, target, 0,
6610 int_size_in_bytes (TREE_TYPE (exp)));
6616 tree exp1 = TREE_OPERAND (exp, 0);
6618 tree string = string_constant (exp1, &index);
6620 /* Try to optimize reads from const strings. */
6622 && TREE_CODE (string) == STRING_CST
6623 && TREE_CODE (index) == INTEGER_CST
6624 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
6625 && GET_MODE_CLASS (mode) == MODE_INT
6626 && GET_MODE_SIZE (mode) == 1
6627 && modifier != EXPAND_WRITE)
6629 GEN_INT (TREE_STRING_POINTER (string)[TREE_INT_CST_LOW (index)]);
6631 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
6632 op0 = memory_address (mode, op0);
6633 temp = gen_rtx_MEM (mode, op0);
6634 set_mem_attributes (temp, exp, 0);
6636 /* If we are writing to this object and its type is a record with
6637 readonly fields, we must mark it as readonly so it will
6638 conflict with readonly references to those fields. */
6639 if (modifier == EXPAND_WRITE && readonly_fields_p (type))
6640 RTX_UNCHANGING_P (temp) = 1;
6646 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
6650 tree array = TREE_OPERAND (exp, 0);
6651 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
6652 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
6653 tree index = convert (sizetype, TREE_OPERAND (exp, 1));
6656 /* Optimize the special-case of a zero lower bound.
6658 We convert the low_bound to sizetype to avoid some problems
6659 with constant folding. (E.g. suppose the lower bound is 1,
6660 and its mode is QI. Without the conversion, (ARRAY
6661 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
6662 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
6664 if (! integer_zerop (low_bound))
6665 index = size_diffop (index, convert (sizetype, low_bound));
6667 /* Fold an expression like: "foo"[2].
6668 This is not done in fold so it won't happen inside &.
6669 Don't fold if this is for wide characters since it's too
6670 difficult to do correctly and this is a very rare case. */
6672 if (modifier != EXPAND_CONST_ADDRESS && modifier != EXPAND_INITIALIZER
6673 && TREE_CODE (array) == STRING_CST
6674 && TREE_CODE (index) == INTEGER_CST
6675 && compare_tree_int (index, TREE_STRING_LENGTH (array)) < 0
6676 && GET_MODE_CLASS (mode) == MODE_INT
6677 && GET_MODE_SIZE (mode) == 1)
6679 GEN_INT (TREE_STRING_POINTER (array)[TREE_INT_CST_LOW (index)]);
6681 /* If this is a constant index into a constant array,
6682 just get the value from the array. Handle both the cases when
6683 we have an explicit constructor and when our operand is a variable
6684 that was declared const. */
6686 if (modifier != EXPAND_CONST_ADDRESS && modifier != EXPAND_INITIALIZER
6687 && TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array)
6688 && TREE_CODE (index) == INTEGER_CST
6689 && 0 > compare_tree_int (index,
6690 list_length (CONSTRUCTOR_ELTS
6691 (TREE_OPERAND (exp, 0)))))
6695 for (elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
6696 i = TREE_INT_CST_LOW (index);
6697 elem != 0 && i != 0; i--, elem = TREE_CHAIN (elem))
6701 return expand_expr (fold (TREE_VALUE (elem)), target, tmode,
6705 else if (optimize >= 1
6706 && modifier != EXPAND_CONST_ADDRESS
6707 && modifier != EXPAND_INITIALIZER
6708 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
6709 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
6710 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
6712 if (TREE_CODE (index) == INTEGER_CST)
6714 tree init = DECL_INITIAL (array);
6716 if (TREE_CODE (init) == CONSTRUCTOR)
6720 for (elem = CONSTRUCTOR_ELTS (init);
6722 && !tree_int_cst_equal (TREE_PURPOSE (elem), index));
6723 elem = TREE_CHAIN (elem))
6726 if (elem && !TREE_SIDE_EFFECTS (TREE_VALUE (elem)))
6727 return expand_expr (fold (TREE_VALUE (elem)), target,
6730 else if (TREE_CODE (init) == STRING_CST
6731 && 0 > compare_tree_int (index,
6732 TREE_STRING_LENGTH (init)))
6734 tree type = TREE_TYPE (TREE_TYPE (init));
6735 enum machine_mode mode = TYPE_MODE (type);
6737 if (GET_MODE_CLASS (mode) == MODE_INT
6738 && GET_MODE_SIZE (mode) == 1)
6740 (TREE_STRING_POINTER
6741 (init)[TREE_INT_CST_LOW (index)]));
6750 case ARRAY_RANGE_REF:
6751 /* If the operand is a CONSTRUCTOR, we can just extract the
6752 appropriate field if it is present. Don't do this if we have
6753 already written the data since we want to refer to that copy
6754 and varasm.c assumes that's what we'll do. */
6755 if (code == COMPONENT_REF
6756 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
6757 && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
6761 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
6762 elt = TREE_CHAIN (elt))
6763 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1)
6764 /* We can normally use the value of the field in the
6765 CONSTRUCTOR. However, if this is a bitfield in
6766 an integral mode that we can fit in a HOST_WIDE_INT,
6767 we must mask only the number of bits in the bitfield,
6768 since this is done implicitly by the constructor. If
6769 the bitfield does not meet either of those conditions,
6770 we can't do this optimization. */
6771 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt))
6772 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt)))
6774 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt)))
6775 <= HOST_BITS_PER_WIDE_INT))))
6777 op0 = expand_expr (TREE_VALUE (elt), target, tmode, modifier);
6778 if (DECL_BIT_FIELD (TREE_PURPOSE (elt)))
6780 HOST_WIDE_INT bitsize
6781 = TREE_INT_CST_LOW (DECL_SIZE (TREE_PURPOSE (elt)));
6782 enum machine_mode imode
6783 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt)));
6785 if (TREE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt))))
6787 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
6788 op0 = expand_and (imode, op0, op1, target);
6793 = build_int_2 (GET_MODE_BITSIZE (imode) - bitsize,
6796 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
6798 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
6808 enum machine_mode mode1;
6809 HOST_WIDE_INT bitsize, bitpos;
6812 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
6813 &mode1, &unsignedp, &volatilep);
6816 /* If we got back the original object, something is wrong. Perhaps
6817 we are evaluating an expression too early. In any event, don't
6818 infinitely recurse. */
6822 /* If TEM's type is a union of variable size, pass TARGET to the inner
6823 computation, since it will need a temporary and TARGET is known
6824 to have to do. This occurs in unchecked conversion in Ada. */
6828 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
6829 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
6831 ? target : NULL_RTX),
6833 (modifier == EXPAND_INITIALIZER
6834 || modifier == EXPAND_CONST_ADDRESS)
6835 ? modifier : EXPAND_NORMAL);
6837 /* If this is a constant, put it into a register if it is a
6838 legitimate constant and OFFSET is 0 and memory if it isn't. */
6839 if (CONSTANT_P (op0))
6841 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
6842 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
6844 op0 = force_reg (mode, op0);
6846 op0 = validize_mem (force_const_mem (mode, op0));
6851 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM);
6853 /* If this object is in a register, put it into memory.
6854 This case can't occur in C, but can in Ada if we have
6855 unchecked conversion of an expression from a scalar type to
6856 an array or record type. */
6857 if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
6858 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
6860 /* If the operand is a SAVE_EXPR, we can deal with this by
6861 forcing the SAVE_EXPR into memory. */
6862 if (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR)
6864 put_var_into_stack (TREE_OPERAND (exp, 0));
6865 op0 = SAVE_EXPR_RTL (TREE_OPERAND (exp, 0));
6870 = build_qualified_type (TREE_TYPE (tem),
6871 (TYPE_QUALS (TREE_TYPE (tem))
6872 | TYPE_QUAL_CONST));
6873 rtx memloc = assign_temp (nt, 1, 1, 1);
6875 emit_move_insn (memloc, op0);
6880 if (GET_CODE (op0) != MEM)
6883 if (GET_MODE (offset_rtx) != ptr_mode)
6884 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
6886 #ifdef POINTERS_EXTEND_UNSIGNED
6887 if (GET_MODE (offset_rtx) != Pmode)
6888 offset_rtx = convert_memory_address (Pmode, offset_rtx);
6891 /* A constant address in OP0 can have VOIDmode, we must not try
6892 to call force_reg for that case. Avoid that case. */
6893 if (GET_CODE (op0) == MEM
6894 && GET_MODE (op0) == BLKmode
6895 && GET_MODE (XEXP (op0, 0)) != VOIDmode
6897 && (bitpos % bitsize) == 0
6898 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
6899 && MEM_ALIGN (op0) == GET_MODE_ALIGNMENT (mode1))
6901 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
6905 op0 = offset_address (op0, offset_rtx,
6906 highest_pow2_factor (offset));
6909 /* Don't forget about volatility even if this is a bitfield. */
6910 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
6912 if (op0 == orig_op0)
6913 op0 = copy_rtx (op0);
6915 MEM_VOLATILE_P (op0) = 1;
6918 /* In cases where an aligned union has an unaligned object
6919 as a field, we might be extracting a BLKmode value from
6920 an integer-mode (e.g., SImode) object. Handle this case
6921 by doing the extract into an object as wide as the field
6922 (which we know to be the width of a basic mode), then
6923 storing into memory, and changing the mode to BLKmode. */
6924 if (mode1 == VOIDmode
6925 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
6926 || (mode1 != BLKmode && ! direct_load[(int) mode1]
6927 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
6928 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT
6929 && modifier != EXPAND_CONST_ADDRESS
6930 && modifier != EXPAND_INITIALIZER)
6931 /* If the field isn't aligned enough to fetch as a memref,
6932 fetch it as a bit field. */
6933 || (mode1 != BLKmode
6934 && SLOW_UNALIGNED_ACCESS (mode1, MEM_ALIGN (op0))
6935 && ((TYPE_ALIGN (TREE_TYPE (tem))
6936 < GET_MODE_ALIGNMENT (mode))
6937 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)))
6938 /* If the type and the field are a constant size and the
6939 size of the type isn't the same size as the bitfield,
6940 we must use bitfield operations. */
6942 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (exp)))
6944 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
6947 enum machine_mode ext_mode = mode;
6949 if (ext_mode == BLKmode
6950 && ! (target != 0 && GET_CODE (op0) == MEM
6951 && GET_CODE (target) == MEM
6952 && bitpos % BITS_PER_UNIT == 0))
6953 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
6955 if (ext_mode == BLKmode)
6957 /* In this case, BITPOS must start at a byte boundary and
6958 TARGET, if specified, must be a MEM. */
6959 if (GET_CODE (op0) != MEM
6960 || (target != 0 && GET_CODE (target) != MEM)
6961 || bitpos % BITS_PER_UNIT != 0)
6964 op0 = adjust_address (op0, VOIDmode, bitpos / BITS_PER_UNIT);
6966 target = assign_temp (type, 0, 1, 1);
6968 emit_block_move (target, op0,
6969 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
6975 op0 = validize_mem (op0);
6977 if (GET_CODE (op0) == MEM && GET_CODE (XEXP (op0, 0)) == REG)
6978 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
6980 op0 = extract_bit_field (op0, bitsize, bitpos,
6981 unsignedp, target, ext_mode, ext_mode,
6982 int_size_in_bytes (TREE_TYPE (tem)));
6984 /* If the result is a record type and BITSIZE is narrower than
6985 the mode of OP0, an integral mode, and this is a big endian
6986 machine, we must put the field into the high-order bits. */
6987 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
6988 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
6989 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (op0)))
6990 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
6991 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
6995 if (mode == BLKmode)
6997 rtx new = assign_temp (build_qualified_type
6998 (type_for_mode (ext_mode, 0),
6999 TYPE_QUAL_CONST), 0, 1, 1);
7001 emit_move_insn (new, op0);
7002 op0 = copy_rtx (new);
7003 PUT_MODE (op0, BLKmode);
7004 set_mem_attributes (op0, exp, 1);
7010 /* If the result is BLKmode, use that to access the object
7012 if (mode == BLKmode)
7015 /* Get a reference to just this component. */
7016 if (modifier == EXPAND_CONST_ADDRESS
7017 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7018 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
7020 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7022 if (op0 == orig_op0)
7023 op0 = copy_rtx (op0);
7025 set_mem_attributes (op0, exp, 0);
7026 if (GET_CODE (XEXP (op0, 0)) == REG)
7027 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7029 MEM_VOLATILE_P (op0) |= volatilep;
7030 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
7031 || modifier == EXPAND_CONST_ADDRESS
7032 || modifier == EXPAND_INITIALIZER)
7034 else if (target == 0)
7035 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7037 convert_move (target, op0, unsignedp);
7043 rtx insn, before = get_last_insn (), vtbl_ref;
7045 /* Evaluate the interior expression. */
7046 subtarget = expand_expr (TREE_OPERAND (exp, 0), target,
7049 /* Get or create an instruction off which to hang a note. */
7050 if (REG_P (subtarget))
7053 insn = get_last_insn ();
7056 if (! INSN_P (insn))
7057 insn = prev_nonnote_insn (insn);
7061 target = gen_reg_rtx (GET_MODE (subtarget));
7062 insn = emit_move_insn (target, subtarget);
7065 /* Collect the data for the note. */
7066 vtbl_ref = XEXP (DECL_RTL (TREE_OPERAND (exp, 1)), 0);
7067 vtbl_ref = plus_constant (vtbl_ref,
7068 tree_low_cst (TREE_OPERAND (exp, 2), 0));
7069 /* Discard the initial CONST that was added. */
7070 vtbl_ref = XEXP (vtbl_ref, 0);
7073 = gen_rtx_EXPR_LIST (REG_VTABLE_REF, vtbl_ref, REG_NOTES (insn));
7078 /* Intended for a reference to a buffer of a file-object in Pascal.
7079 But it's not certain that a special tree code will really be
7080 necessary for these. INDIRECT_REF might work for them. */
7086 /* Pascal set IN expression.
7089 rlo = set_low - (set_low%bits_per_word);
7090 the_word = set [ (index - rlo)/bits_per_word ];
7091 bit_index = index % bits_per_word;
7092 bitmask = 1 << bit_index;
7093 return !!(the_word & bitmask); */
7095 tree set = TREE_OPERAND (exp, 0);
7096 tree index = TREE_OPERAND (exp, 1);
7097 int iunsignedp = TREE_UNSIGNED (TREE_TYPE (index));
7098 tree set_type = TREE_TYPE (set);
7099 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
7100 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
7101 rtx index_val = expand_expr (index, 0, VOIDmode, 0);
7102 rtx lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
7103 rtx hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
7104 rtx setval = expand_expr (set, 0, VOIDmode, 0);
7105 rtx setaddr = XEXP (setval, 0);
7106 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
7108 rtx diff, quo, rem, addr, bit, result;
7110 /* If domain is empty, answer is no. Likewise if index is constant
7111 and out of bounds. */
7112 if (((TREE_CODE (set_high_bound) == INTEGER_CST
7113 && TREE_CODE (set_low_bound) == INTEGER_CST
7114 && tree_int_cst_lt (set_high_bound, set_low_bound))
7115 || (TREE_CODE (index) == INTEGER_CST
7116 && TREE_CODE (set_low_bound) == INTEGER_CST
7117 && tree_int_cst_lt (index, set_low_bound))
7118 || (TREE_CODE (set_high_bound) == INTEGER_CST
7119 && TREE_CODE (index) == INTEGER_CST
7120 && tree_int_cst_lt (set_high_bound, index))))
7124 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7126 /* If we get here, we have to generate the code for both cases
7127 (in range and out of range). */
7129 op0 = gen_label_rtx ();
7130 op1 = gen_label_rtx ();
7132 if (! (GET_CODE (index_val) == CONST_INT
7133 && GET_CODE (lo_r) == CONST_INT))
7134 emit_cmp_and_jump_insns (index_val, lo_r, LT, NULL_RTX,
7135 GET_MODE (index_val), iunsignedp, op1);
7137 if (! (GET_CODE (index_val) == CONST_INT
7138 && GET_CODE (hi_r) == CONST_INT))
7139 emit_cmp_and_jump_insns (index_val, hi_r, GT, NULL_RTX,
7140 GET_MODE (index_val), iunsignedp, op1);
7142 /* Calculate the element number of bit zero in the first word
7144 if (GET_CODE (lo_r) == CONST_INT)
7145 rlow = GEN_INT (INTVAL (lo_r)
7146 & ~((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
7148 rlow = expand_binop (index_mode, and_optab, lo_r,
7149 GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
7150 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7152 diff = expand_binop (index_mode, sub_optab, index_val, rlow,
7153 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7155 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
7156 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7157 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
7158 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7160 addr = memory_address (byte_mode,
7161 expand_binop (index_mode, add_optab, diff,
7162 setaddr, NULL_RTX, iunsignedp,
7165 /* Extract the bit we want to examine. */
7166 bit = expand_shift (RSHIFT_EXPR, byte_mode,
7167 gen_rtx_MEM (byte_mode, addr),
7168 make_tree (TREE_TYPE (index), rem),
7170 result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
7171 GET_MODE (target) == byte_mode ? target : 0,
7172 1, OPTAB_LIB_WIDEN);
7174 if (result != target)
7175 convert_move (target, result, 1);
7177 /* Output the code to handle the out-of-range case. */
7180 emit_move_insn (target, const0_rtx);
7185 case WITH_CLEANUP_EXPR:
7186 if (WITH_CLEANUP_EXPR_RTL (exp) == 0)
7188 WITH_CLEANUP_EXPR_RTL (exp)
7189 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
7190 expand_decl_cleanup (NULL_TREE, TREE_OPERAND (exp, 1));
7192 /* That's it for this cleanup. */
7193 TREE_OPERAND (exp, 1) = 0;
7195 return WITH_CLEANUP_EXPR_RTL (exp);
7197 case CLEANUP_POINT_EXPR:
7199 /* Start a new binding layer that will keep track of all cleanup
7200 actions to be performed. */
7201 expand_start_bindings (2);
7203 target_temp_slot_level = temp_slot_level;
7205 op0 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
7206 /* If we're going to use this value, load it up now. */
7208 op0 = force_not_mem (op0);
7209 preserve_temp_slots (op0);
7210 expand_end_bindings (NULL_TREE, 0, 0);
7215 /* Check for a built-in function. */
7216 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
7217 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7219 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7221 if (DECL_BUILT_IN_CLASS (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7222 == BUILT_IN_FRONTEND)
7223 return (*lang_expand_expr) (exp, original_target, tmode, modifier);
7225 return expand_builtin (exp, target, subtarget, tmode, ignore);
7228 return expand_call (exp, target, ignore);
7230 case NON_LVALUE_EXPR:
7233 case REFERENCE_EXPR:
7234 if (TREE_OPERAND (exp, 0) == error_mark_node)
7237 if (TREE_CODE (type) == UNION_TYPE)
7239 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
7241 /* If both input and output are BLKmode, this conversion isn't doing
7242 anything except possibly changing memory attribute. */
7243 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode)
7245 rtx result = expand_expr (TREE_OPERAND (exp, 0), target, tmode,
7248 result = copy_rtx (result);
7249 set_mem_attributes (result, exp, 0);
7254 target = assign_temp (type, 0, 1, 1);
7256 if (GET_CODE (target) == MEM)
7257 /* Store data into beginning of memory target. */
7258 store_expr (TREE_OPERAND (exp, 0),
7259 adjust_address (target, TYPE_MODE (valtype), 0), 0);
7261 else if (GET_CODE (target) == REG)
7262 /* Store this field into a union of the proper type. */
7263 store_field (target,
7264 MIN ((int_size_in_bytes (TREE_TYPE
7265 (TREE_OPERAND (exp, 0)))
7267 (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
7268 0, TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
7269 VOIDmode, 0, type, 0);
7273 /* Return the entire union. */
7277 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7279 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
7282 /* If the signedness of the conversion differs and OP0 is
7283 a promoted SUBREG, clear that indication since we now
7284 have to do the proper extension. */
7285 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
7286 && GET_CODE (op0) == SUBREG)
7287 SUBREG_PROMOTED_VAR_P (op0) = 0;
7292 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
7293 if (GET_MODE (op0) == mode)
7296 /* If OP0 is a constant, just convert it into the proper mode. */
7297 if (CONSTANT_P (op0))
7299 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7300 enum machine_mode inner_mode = TYPE_MODE (inner_type);
7302 if (modifier == EXPAND_INITIALIZER)
7303 return simplify_gen_subreg (mode, op0, inner_mode,
7304 subreg_lowpart_offset (mode,
7307 return convert_modes (mode, inner_mode, op0,
7308 TREE_UNSIGNED (inner_type));
7311 if (modifier == EXPAND_INITIALIZER)
7312 return gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
7316 convert_to_mode (mode, op0,
7317 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7319 convert_move (target, op0,
7320 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7323 case VIEW_CONVERT_EXPR:
7324 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
7326 /* If the input and output modes are both the same, we are done.
7327 Otherwise, if neither mode is BLKmode and both are within a word, we
7328 can use gen_lowpart. If neither is true, make sure the operand is
7329 in memory and convert the MEM to the new mode. */
7330 if (TYPE_MODE (type) == GET_MODE (op0))
7332 else if (TYPE_MODE (type) != BLKmode && GET_MODE (op0) != BLKmode
7333 && GET_MODE_SIZE (TYPE_MODE (type)) <= UNITS_PER_WORD
7334 && GET_MODE_SIZE (GET_MODE (op0)) <= UNITS_PER_WORD)
7335 op0 = gen_lowpart (TYPE_MODE (type), op0);
7336 else if (GET_CODE (op0) != MEM)
7338 /* If the operand is not a MEM, force it into memory. Since we
7339 are going to be be changing the mode of the MEM, don't call
7340 force_const_mem for constants because we don't allow pool
7341 constants to change mode. */
7342 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7344 if (TREE_ADDRESSABLE (exp))
7347 if (target == 0 || GET_MODE (target) != TYPE_MODE (inner_type))
7349 = assign_stack_temp_for_type
7350 (TYPE_MODE (inner_type),
7351 GET_MODE_SIZE (TYPE_MODE (inner_type)), 0, inner_type);
7353 emit_move_insn (target, op0);
7357 /* At this point, OP0 is in the correct mode. If the output type is such
7358 that the operand is known to be aligned, indicate that it is.
7359 Otherwise, we need only be concerned about alignment for non-BLKmode
7361 if (GET_CODE (op0) == MEM)
7363 op0 = copy_rtx (op0);
7365 if (TYPE_ALIGN_OK (type))
7366 set_mem_align (op0, MAX (MEM_ALIGN (op0), TYPE_ALIGN (type)));
7367 else if (TYPE_MODE (type) != BLKmode && STRICT_ALIGNMENT
7368 && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
7370 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7371 HOST_WIDE_INT temp_size
7372 = MAX (int_size_in_bytes (inner_type),
7373 (HOST_WIDE_INT) GET_MODE_SIZE (TYPE_MODE (type)));
7374 rtx new = assign_stack_temp_for_type (TYPE_MODE (type),
7375 temp_size, 0, type);
7376 rtx new_with_op0_mode = adjust_address (new, GET_MODE (op0), 0);
7378 if (TREE_ADDRESSABLE (exp))
7381 if (GET_MODE (op0) == BLKmode)
7382 emit_block_move (new_with_op0_mode, op0,
7383 GEN_INT (GET_MODE_SIZE (TYPE_MODE (type))));
7385 emit_move_insn (new_with_op0_mode, op0);
7390 op0 = adjust_address (op0, TYPE_MODE (type), 0);
7396 /* We come here from MINUS_EXPR when the second operand is a
7399 this_optab = ! unsignedp && flag_trapv
7400 && (GET_MODE_CLASS (mode) == MODE_INT)
7401 ? addv_optab : add_optab;
7403 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
7404 something else, make sure we add the register to the constant and
7405 then to the other thing. This case can occur during strength
7406 reduction and doing it this way will produce better code if the
7407 frame pointer or argument pointer is eliminated.
7409 fold-const.c will ensure that the constant is always in the inner
7410 PLUS_EXPR, so the only case we need to do anything about is if
7411 sp, ap, or fp is our second argument, in which case we must swap
7412 the innermost first argument and our second argument. */
7414 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
7415 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
7416 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
7417 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
7418 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
7419 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
7421 tree t = TREE_OPERAND (exp, 1);
7423 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7424 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
7427 /* If the result is to be ptr_mode and we are adding an integer to
7428 something, we might be forming a constant. So try to use
7429 plus_constant. If it produces a sum and we can't accept it,
7430 use force_operand. This allows P = &ARR[const] to generate
7431 efficient code on machines where a SYMBOL_REF is not a valid
7434 If this is an EXPAND_SUM call, always return the sum. */
7435 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
7436 || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
7438 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
7439 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7440 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
7444 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
7446 /* Use immed_double_const to ensure that the constant is
7447 truncated according to the mode of OP1, then sign extended
7448 to a HOST_WIDE_INT. Using the constant directly can result
7449 in non-canonical RTL in a 64x32 cross compile. */
7451 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)),
7453 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))));
7454 op1 = plus_constant (op1, INTVAL (constant_part));
7455 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7456 op1 = force_operand (op1, target);
7460 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7461 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
7462 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
7466 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7468 if (! CONSTANT_P (op0))
7470 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7471 VOIDmode, modifier);
7472 /* Don't go to both_summands if modifier
7473 says it's not right to return a PLUS. */
7474 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7478 /* Use immed_double_const to ensure that the constant is
7479 truncated according to the mode of OP1, then sign extended
7480 to a HOST_WIDE_INT. Using the constant directly can result
7481 in non-canonical RTL in a 64x32 cross compile. */
7483 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)),
7485 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
7486 op0 = plus_constant (op0, INTVAL (constant_part));
7487 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7488 op0 = force_operand (op0, target);
7493 /* No sense saving up arithmetic to be done
7494 if it's all in the wrong mode to form part of an address.
7495 And force_operand won't know whether to sign-extend or
7497 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7498 || mode != ptr_mode)
7501 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7504 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, modifier);
7505 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, modifier);
7508 /* Make sure any term that's a sum with a constant comes last. */
7509 if (GET_CODE (op0) == PLUS
7510 && CONSTANT_P (XEXP (op0, 1)))
7516 /* If adding to a sum including a constant,
7517 associate it to put the constant outside. */
7518 if (GET_CODE (op1) == PLUS
7519 && CONSTANT_P (XEXP (op1, 1)))
7521 rtx constant_term = const0_rtx;
7523 temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0);
7526 /* Ensure that MULT comes first if there is one. */
7527 else if (GET_CODE (op0) == MULT)
7528 op0 = gen_rtx_PLUS (mode, op0, XEXP (op1, 0));
7530 op0 = gen_rtx_PLUS (mode, XEXP (op1, 0), op0);
7532 /* Let's also eliminate constants from op0 if possible. */
7533 op0 = eliminate_constant_term (op0, &constant_term);
7535 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
7536 their sum should be a constant. Form it into OP1, since the
7537 result we want will then be OP0 + OP1. */
7539 temp = simplify_binary_operation (PLUS, mode, constant_term,
7544 op1 = gen_rtx_PLUS (mode, constant_term, XEXP (op1, 1));
7547 /* Put a constant term last and put a multiplication first. */
7548 if (CONSTANT_P (op0) || GET_CODE (op1) == MULT)
7549 temp = op1, op1 = op0, op0 = temp;
7551 temp = simplify_binary_operation (PLUS, mode, op0, op1);
7552 return temp ? temp : gen_rtx_PLUS (mode, op0, op1);
7555 /* For initializers, we are allowed to return a MINUS of two
7556 symbolic constants. Here we handle all cases when both operands
7558 /* Handle difference of two symbolic constants,
7559 for the sake of an initializer. */
7560 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7561 && really_constant_p (TREE_OPERAND (exp, 0))
7562 && really_constant_p (TREE_OPERAND (exp, 1)))
7564 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode,
7566 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode,
7569 /* If the last operand is a CONST_INT, use plus_constant of
7570 the negated constant. Else make the MINUS. */
7571 if (GET_CODE (op1) == CONST_INT)
7572 return plus_constant (op0, - INTVAL (op1));
7574 return gen_rtx_MINUS (mode, op0, op1);
7576 /* Convert A - const to A + (-const). */
7577 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7579 tree negated = fold (build1 (NEGATE_EXPR, type,
7580 TREE_OPERAND (exp, 1)));
7582 if (TREE_UNSIGNED (type) || TREE_OVERFLOW (negated))
7583 /* If we can't negate the constant in TYPE, leave it alone and
7584 expand_binop will negate it for us. We used to try to do it
7585 here in the signed version of TYPE, but that doesn't work
7586 on POINTER_TYPEs. */;
7589 exp = build (PLUS_EXPR, type, TREE_OPERAND (exp, 0), negated);
7593 this_optab = ! unsignedp && flag_trapv
7594 && (GET_MODE_CLASS(mode) == MODE_INT)
7595 ? subv_optab : sub_optab;
7599 /* If first operand is constant, swap them.
7600 Thus the following special case checks need only
7601 check the second operand. */
7602 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
7604 tree t1 = TREE_OPERAND (exp, 0);
7605 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
7606 TREE_OPERAND (exp, 1) = t1;
7609 /* Attempt to return something suitable for generating an
7610 indexed address, for machines that support that. */
7612 if (modifier == EXPAND_SUM && mode == ptr_mode
7613 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7614 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
7616 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7619 /* Apply distributive law if OP0 is x+c. */
7620 if (GET_CODE (op0) == PLUS
7621 && GET_CODE (XEXP (op0, 1)) == CONST_INT)
7626 (mode, XEXP (op0, 0),
7627 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))),
7628 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))
7629 * INTVAL (XEXP (op0, 1))));
7631 if (GET_CODE (op0) != REG)
7632 op0 = force_operand (op0, NULL_RTX);
7633 if (GET_CODE (op0) != REG)
7634 op0 = copy_to_mode_reg (mode, op0);
7637 gen_rtx_MULT (mode, op0,
7638 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))));
7641 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7644 /* Check for multiplying things that have been extended
7645 from a narrower type. If this machine supports multiplying
7646 in that narrower type with a result in the desired type,
7647 do it that way, and avoid the explicit type-conversion. */
7648 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
7649 && TREE_CODE (type) == INTEGER_TYPE
7650 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7651 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
7652 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7653 && int_fits_type_p (TREE_OPERAND (exp, 1),
7654 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7655 /* Don't use a widening multiply if a shift will do. */
7656 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
7657 > HOST_BITS_PER_WIDE_INT)
7658 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
7660 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
7661 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7663 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
7664 /* If both operands are extended, they must either both
7665 be zero-extended or both be sign-extended. */
7666 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7668 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
7670 enum machine_mode innermode
7671 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
7672 optab other_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7673 ? smul_widen_optab : umul_widen_optab);
7674 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7675 ? umul_widen_optab : smul_widen_optab);
7676 if (mode == GET_MODE_WIDER_MODE (innermode))
7678 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
7680 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7681 NULL_RTX, VOIDmode, 0);
7682 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7683 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7686 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7687 NULL_RTX, VOIDmode, 0);
7690 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
7691 && innermode == word_mode)
7694 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7695 NULL_RTX, VOIDmode, 0);
7696 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7697 op1 = convert_modes (innermode, mode,
7698 expand_expr (TREE_OPERAND (exp, 1),
7699 NULL_RTX, VOIDmode, 0),
7702 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7703 NULL_RTX, VOIDmode, 0);
7704 temp = expand_binop (mode, other_optab, op0, op1, target,
7705 unsignedp, OPTAB_LIB_WIDEN);
7706 htem = expand_mult_highpart_adjust (innermode,
7707 gen_highpart (innermode, temp),
7709 gen_highpart (innermode, temp),
7711 emit_move_insn (gen_highpart (innermode, temp), htem);
7716 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7717 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7718 return expand_mult (mode, op0, op1, target, unsignedp);
7720 case TRUNC_DIV_EXPR:
7721 case FLOOR_DIV_EXPR:
7723 case ROUND_DIV_EXPR:
7724 case EXACT_DIV_EXPR:
7725 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7727 /* Possible optimization: compute the dividend with EXPAND_SUM
7728 then if the divisor is constant can optimize the case
7729 where some terms of the dividend have coeffs divisible by it. */
7730 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7731 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7732 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
7735 /* Emit a/b as a*(1/b). Later we may manage CSE the reciprocal saving
7736 expensive divide. If not, combine will rebuild the original
7738 if (flag_unsafe_math_optimizations && optimize && !optimize_size
7739 && !real_onep (TREE_OPERAND (exp, 0)))
7740 return expand_expr (build (MULT_EXPR, type, TREE_OPERAND (exp, 0),
7741 build (RDIV_EXPR, type,
7742 build_real (type, dconst1),
7743 TREE_OPERAND (exp, 1))),
7744 target, tmode, unsignedp);
7745 this_optab = sdiv_optab;
7748 case TRUNC_MOD_EXPR:
7749 case FLOOR_MOD_EXPR:
7751 case ROUND_MOD_EXPR:
7752 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7754 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7755 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7756 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
7758 case FIX_ROUND_EXPR:
7759 case FIX_FLOOR_EXPR:
7761 abort (); /* Not used for C. */
7763 case FIX_TRUNC_EXPR:
7764 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7766 target = gen_reg_rtx (mode);
7767 expand_fix (target, op0, unsignedp);
7771 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7773 target = gen_reg_rtx (mode);
7774 /* expand_float can't figure out what to do if FROM has VOIDmode.
7775 So give it the correct mode. With -O, cse will optimize this. */
7776 if (GET_MODE (op0) == VOIDmode)
7777 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7779 expand_float (target, op0,
7780 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7784 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7785 temp = expand_unop (mode,
7786 ! unsignedp && flag_trapv
7787 && (GET_MODE_CLASS(mode) == MODE_INT)
7788 ? negv_optab : neg_optab, op0, target, 0);
7794 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7796 /* Handle complex values specially. */
7797 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT
7798 || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
7799 return expand_complex_abs (mode, op0, target, unsignedp);
7801 /* Unsigned abs is simply the operand. Testing here means we don't
7802 risk generating incorrect code below. */
7803 if (TREE_UNSIGNED (type))
7806 return expand_abs (mode, op0, target, unsignedp,
7807 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
7811 target = original_target;
7812 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 1), 1)
7813 || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
7814 || GET_MODE (target) != mode
7815 || (GET_CODE (target) == REG
7816 && REGNO (target) < FIRST_PSEUDO_REGISTER))
7817 target = gen_reg_rtx (mode);
7818 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7819 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
7821 /* First try to do it with a special MIN or MAX instruction.
7822 If that does not win, use a conditional jump to select the proper
7824 this_optab = (TREE_UNSIGNED (type)
7825 ? (code == MIN_EXPR ? umin_optab : umax_optab)
7826 : (code == MIN_EXPR ? smin_optab : smax_optab));
7828 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
7833 /* At this point, a MEM target is no longer useful; we will get better
7836 if (GET_CODE (target) == MEM)
7837 target = gen_reg_rtx (mode);
7840 emit_move_insn (target, op0);
7842 op0 = gen_label_rtx ();
7844 /* If this mode is an integer too wide to compare properly,
7845 compare word by word. Rely on cse to optimize constant cases. */
7846 if (GET_MODE_CLASS (mode) == MODE_INT
7847 && ! can_compare_p (GE, mode, ccp_jump))
7849 if (code == MAX_EXPR)
7850 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
7851 target, op1, NULL_RTX, op0);
7853 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
7854 op1, target, NULL_RTX, op0);
7858 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)));
7859 do_compare_rtx_and_jump (target, op1, code == MAX_EXPR ? GE : LE,
7860 unsignedp, mode, NULL_RTX, NULL_RTX,
7863 emit_move_insn (target, op1);
7868 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7869 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
7875 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7876 temp = expand_unop (mode, ffs_optab, op0, target, 1);
7881 /* ??? Can optimize bitwise operations with one arg constant.
7882 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
7883 and (a bitwise1 b) bitwise2 b (etc)
7884 but that is probably not worth while. */
7886 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
7887 boolean values when we want in all cases to compute both of them. In
7888 general it is fastest to do TRUTH_AND_EXPR by computing both operands
7889 as actual zero-or-1 values and then bitwise anding. In cases where
7890 there cannot be any side effects, better code would be made by
7891 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
7892 how to recognize those cases. */
7894 case TRUTH_AND_EXPR:
7896 this_optab = and_optab;
7901 this_optab = ior_optab;
7904 case TRUTH_XOR_EXPR:
7906 this_optab = xor_optab;
7913 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7915 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7916 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
7919 /* Could determine the answer when only additive constants differ. Also,
7920 the addition of one can be handled by changing the condition. */
7927 case UNORDERED_EXPR:
7934 temp = do_store_flag (exp, target, tmode != VOIDmode ? tmode : mode, 0);
7938 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
7939 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
7941 && GET_CODE (original_target) == REG
7942 && (GET_MODE (original_target)
7943 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
7945 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
7948 if (temp != original_target)
7949 temp = copy_to_reg (temp);
7951 op1 = gen_label_rtx ();
7952 emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX,
7953 GET_MODE (temp), unsignedp, op1);
7954 emit_move_insn (temp, const1_rtx);
7959 /* If no set-flag instruction, must generate a conditional
7960 store into a temporary variable. Drop through
7961 and handle this like && and ||. */
7963 case TRUTH_ANDIF_EXPR:
7964 case TRUTH_ORIF_EXPR:
7966 && (target == 0 || ! safe_from_p (target, exp, 1)
7967 /* Make sure we don't have a hard reg (such as function's return
7968 value) live across basic blocks, if not optimizing. */
7969 || (!optimize && GET_CODE (target) == REG
7970 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
7971 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7974 emit_clr_insn (target);
7976 op1 = gen_label_rtx ();
7977 jumpifnot (exp, op1);
7980 emit_0_to_1_insn (target);
7983 return ignore ? const0_rtx : target;
7985 case TRUTH_NOT_EXPR:
7986 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
7987 /* The parser is careful to generate TRUTH_NOT_EXPR
7988 only with operands that are always zero or one. */
7989 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
7990 target, 1, OPTAB_LIB_WIDEN);
7996 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
7998 return expand_expr (TREE_OPERAND (exp, 1),
7999 (ignore ? const0_rtx : target),
8003 /* If we would have a "singleton" (see below) were it not for a
8004 conversion in each arm, bring that conversion back out. */
8005 if (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
8006 && TREE_CODE (TREE_OPERAND (exp, 2)) == NOP_EXPR
8007 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0))
8008 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 2), 0))))
8010 tree iftrue = TREE_OPERAND (TREE_OPERAND (exp, 1), 0);
8011 tree iffalse = TREE_OPERAND (TREE_OPERAND (exp, 2), 0);
8013 if ((TREE_CODE_CLASS (TREE_CODE (iftrue)) == '2'
8014 && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
8015 || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '2'
8016 && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0))
8017 || (TREE_CODE_CLASS (TREE_CODE (iftrue)) == '1'
8018 && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
8019 || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '1'
8020 && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0)))
8021 return expand_expr (build1 (NOP_EXPR, type,
8022 build (COND_EXPR, TREE_TYPE (iftrue),
8023 TREE_OPERAND (exp, 0),
8025 target, tmode, modifier);
8029 /* Note that COND_EXPRs whose type is a structure or union
8030 are required to be constructed to contain assignments of
8031 a temporary variable, so that we can evaluate them here
8032 for side effect only. If type is void, we must do likewise. */
8034 /* If an arm of the branch requires a cleanup,
8035 only that cleanup is performed. */
8038 tree binary_op = 0, unary_op = 0;
8040 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
8041 convert it to our mode, if necessary. */
8042 if (integer_onep (TREE_OPERAND (exp, 1))
8043 && integer_zerop (TREE_OPERAND (exp, 2))
8044 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8048 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
8053 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, modifier);
8054 if (GET_MODE (op0) == mode)
8058 target = gen_reg_rtx (mode);
8059 convert_move (target, op0, unsignedp);
8063 /* Check for X ? A + B : A. If we have this, we can copy A to the
8064 output and conditionally add B. Similarly for unary operations.
8065 Don't do this if X has side-effects because those side effects
8066 might affect A or B and the "?" operation is a sequence point in
8067 ANSI. (operand_equal_p tests for side effects.) */
8069 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
8070 && operand_equal_p (TREE_OPERAND (exp, 2),
8071 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8072 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
8073 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
8074 && operand_equal_p (TREE_OPERAND (exp, 1),
8075 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8076 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
8077 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
8078 && operand_equal_p (TREE_OPERAND (exp, 2),
8079 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8080 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
8081 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
8082 && operand_equal_p (TREE_OPERAND (exp, 1),
8083 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8084 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
8086 /* If we are not to produce a result, we have no target. Otherwise,
8087 if a target was specified use it; it will not be used as an
8088 intermediate target unless it is safe. If no target, use a
8093 else if (original_target
8094 && (safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
8095 || (singleton && GET_CODE (original_target) == REG
8096 && REGNO (original_target) >= FIRST_PSEUDO_REGISTER
8097 && original_target == var_rtx (singleton)))
8098 && GET_MODE (original_target) == mode
8099 #ifdef HAVE_conditional_move
8100 && (! can_conditionally_move_p (mode)
8101 || GET_CODE (original_target) == REG
8102 || TREE_ADDRESSABLE (type))
8104 && (GET_CODE (original_target) != MEM
8105 || TREE_ADDRESSABLE (type)))
8106 temp = original_target;
8107 else if (TREE_ADDRESSABLE (type))
8110 temp = assign_temp (type, 0, 0, 1);
8112 /* If we had X ? A + C : A, with C a constant power of 2, and we can
8113 do the test of X as a store-flag operation, do this as
8114 A + ((X != 0) << log C). Similarly for other simple binary
8115 operators. Only do for C == 1 if BRANCH_COST is low. */
8116 if (temp && singleton && binary_op
8117 && (TREE_CODE (binary_op) == PLUS_EXPR
8118 || TREE_CODE (binary_op) == MINUS_EXPR
8119 || TREE_CODE (binary_op) == BIT_IOR_EXPR
8120 || TREE_CODE (binary_op) == BIT_XOR_EXPR)
8121 && (BRANCH_COST >= 3 ? integer_pow2p (TREE_OPERAND (binary_op, 1))
8122 : integer_onep (TREE_OPERAND (binary_op, 1)))
8123 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8126 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR
8127 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8128 ? addv_optab : add_optab)
8129 : TREE_CODE (binary_op) == MINUS_EXPR
8130 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8131 ? subv_optab : sub_optab)
8132 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
8135 /* If we had X ? A : A + 1, do this as A + (X == 0).
8137 We have to invert the truth value here and then put it
8138 back later if do_store_flag fails. We cannot simply copy
8139 TREE_OPERAND (exp, 0) to another variable and modify that
8140 because invert_truthvalue can modify the tree pointed to
8142 if (singleton == TREE_OPERAND (exp, 1))
8143 TREE_OPERAND (exp, 0)
8144 = invert_truthvalue (TREE_OPERAND (exp, 0));
8146 result = do_store_flag (TREE_OPERAND (exp, 0),
8147 (safe_from_p (temp, singleton, 1)
8149 mode, BRANCH_COST <= 1);
8151 if (result != 0 && ! integer_onep (TREE_OPERAND (binary_op, 1)))
8152 result = expand_shift (LSHIFT_EXPR, mode, result,
8153 build_int_2 (tree_log2
8157 (safe_from_p (temp, singleton, 1)
8158 ? temp : NULL_RTX), 0);
8162 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
8163 return expand_binop (mode, boptab, op1, result, temp,
8164 unsignedp, OPTAB_LIB_WIDEN);
8166 else if (singleton == TREE_OPERAND (exp, 1))
8167 TREE_OPERAND (exp, 0)
8168 = invert_truthvalue (TREE_OPERAND (exp, 0));
8171 do_pending_stack_adjust ();
8173 op0 = gen_label_rtx ();
8175 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
8179 /* If the target conflicts with the other operand of the
8180 binary op, we can't use it. Also, we can't use the target
8181 if it is a hard register, because evaluating the condition
8182 might clobber it. */
8184 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1), 1))
8185 || (GET_CODE (temp) == REG
8186 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
8187 temp = gen_reg_rtx (mode);
8188 store_expr (singleton, temp, 0);
8191 expand_expr (singleton,
8192 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8193 if (singleton == TREE_OPERAND (exp, 1))
8194 jumpif (TREE_OPERAND (exp, 0), op0);
8196 jumpifnot (TREE_OPERAND (exp, 0), op0);
8198 start_cleanup_deferral ();
8199 if (binary_op && temp == 0)
8200 /* Just touch the other operand. */
8201 expand_expr (TREE_OPERAND (binary_op, 1),
8202 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8204 store_expr (build (TREE_CODE (binary_op), type,
8205 make_tree (type, temp),
8206 TREE_OPERAND (binary_op, 1)),
8209 store_expr (build1 (TREE_CODE (unary_op), type,
8210 make_tree (type, temp)),
8214 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
8215 comparison operator. If we have one of these cases, set the
8216 output to A, branch on A (cse will merge these two references),
8217 then set the output to FOO. */
8219 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8220 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8221 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8222 TREE_OPERAND (exp, 1), 0)
8223 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8224 || TREE_CODE (TREE_OPERAND (exp, 1)) == SAVE_EXPR)
8225 && safe_from_p (temp, TREE_OPERAND (exp, 2), 1))
8227 if (GET_CODE (temp) == REG
8228 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
8229 temp = gen_reg_rtx (mode);
8230 store_expr (TREE_OPERAND (exp, 1), temp, 0);
8231 jumpif (TREE_OPERAND (exp, 0), op0);
8233 start_cleanup_deferral ();
8234 store_expr (TREE_OPERAND (exp, 2), temp, 0);
8238 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8239 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8240 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8241 TREE_OPERAND (exp, 2), 0)
8242 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8243 || TREE_CODE (TREE_OPERAND (exp, 2)) == SAVE_EXPR)
8244 && safe_from_p (temp, TREE_OPERAND (exp, 1), 1))
8246 if (GET_CODE (temp) == REG
8247 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
8248 temp = gen_reg_rtx (mode);
8249 store_expr (TREE_OPERAND (exp, 2), temp, 0);
8250 jumpifnot (TREE_OPERAND (exp, 0), op0);
8252 start_cleanup_deferral ();
8253 store_expr (TREE_OPERAND (exp, 1), temp, 0);
8258 op1 = gen_label_rtx ();
8259 jumpifnot (TREE_OPERAND (exp, 0), op0);
8261 start_cleanup_deferral ();
8263 /* One branch of the cond can be void, if it never returns. For
8264 example A ? throw : E */
8266 && TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node)
8267 store_expr (TREE_OPERAND (exp, 1), temp, 0);
8269 expand_expr (TREE_OPERAND (exp, 1),
8270 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8271 end_cleanup_deferral ();
8273 emit_jump_insn (gen_jump (op1));
8276 start_cleanup_deferral ();
8278 && TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node)
8279 store_expr (TREE_OPERAND (exp, 2), temp, 0);
8281 expand_expr (TREE_OPERAND (exp, 2),
8282 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8285 end_cleanup_deferral ();
8296 /* Something needs to be initialized, but we didn't know
8297 where that thing was when building the tree. For example,
8298 it could be the return value of a function, or a parameter
8299 to a function which lays down in the stack, or a temporary
8300 variable which must be passed by reference.
8302 We guarantee that the expression will either be constructed
8303 or copied into our original target. */
8305 tree slot = TREE_OPERAND (exp, 0);
8306 tree cleanups = NULL_TREE;
8309 if (TREE_CODE (slot) != VAR_DECL)
8313 target = original_target;
8315 /* Set this here so that if we get a target that refers to a
8316 register variable that's already been used, put_reg_into_stack
8317 knows that it should fix up those uses. */
8318 TREE_USED (slot) = 1;
8322 if (DECL_RTL_SET_P (slot))
8324 target = DECL_RTL (slot);
8325 /* If we have already expanded the slot, so don't do
8327 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8332 target = assign_temp (type, 2, 0, 1);
8333 /* All temp slots at this level must not conflict. */
8334 preserve_temp_slots (target);
8335 SET_DECL_RTL (slot, target);
8336 if (TREE_ADDRESSABLE (slot))
8337 put_var_into_stack (slot);
8339 /* Since SLOT is not known to the called function
8340 to belong to its stack frame, we must build an explicit
8341 cleanup. This case occurs when we must build up a reference
8342 to pass the reference as an argument. In this case,
8343 it is very likely that such a reference need not be
8346 if (TREE_OPERAND (exp, 2) == 0)
8347 TREE_OPERAND (exp, 2) = maybe_build_cleanup (slot);
8348 cleanups = TREE_OPERAND (exp, 2);
8353 /* This case does occur, when expanding a parameter which
8354 needs to be constructed on the stack. The target
8355 is the actual stack address that we want to initialize.
8356 The function we call will perform the cleanup in this case. */
8358 /* If we have already assigned it space, use that space,
8359 not target that we were passed in, as our target
8360 parameter is only a hint. */
8361 if (DECL_RTL_SET_P (slot))
8363 target = DECL_RTL (slot);
8364 /* If we have already expanded the slot, so don't do
8366 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8371 SET_DECL_RTL (slot, target);
8372 /* If we must have an addressable slot, then make sure that
8373 the RTL that we just stored in slot is OK. */
8374 if (TREE_ADDRESSABLE (slot))
8375 put_var_into_stack (slot);
8379 exp1 = TREE_OPERAND (exp, 3) = TREE_OPERAND (exp, 1);
8380 /* Mark it as expanded. */
8381 TREE_OPERAND (exp, 1) = NULL_TREE;
8383 store_expr (exp1, target, 0);
8385 expand_decl_cleanup (NULL_TREE, cleanups);
8392 tree lhs = TREE_OPERAND (exp, 0);
8393 tree rhs = TREE_OPERAND (exp, 1);
8395 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
8401 /* If lhs is complex, expand calls in rhs before computing it.
8402 That's so we don't compute a pointer and save it over a
8403 call. If lhs is simple, compute it first so we can give it
8404 as a target if the rhs is just a call. This avoids an
8405 extra temp and copy and that prevents a partial-subsumption
8406 which makes bad code. Actually we could treat
8407 component_ref's of vars like vars. */
8409 tree lhs = TREE_OPERAND (exp, 0);
8410 tree rhs = TREE_OPERAND (exp, 1);
8414 /* Check for |= or &= of a bitfield of size one into another bitfield
8415 of size 1. In this case, (unless we need the result of the
8416 assignment) we can do this more efficiently with a
8417 test followed by an assignment, if necessary.
8419 ??? At this point, we can't get a BIT_FIELD_REF here. But if
8420 things change so we do, this code should be enhanced to
8423 && TREE_CODE (lhs) == COMPONENT_REF
8424 && (TREE_CODE (rhs) == BIT_IOR_EXPR
8425 || TREE_CODE (rhs) == BIT_AND_EXPR)
8426 && TREE_OPERAND (rhs, 0) == lhs
8427 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
8428 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
8429 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
8431 rtx label = gen_label_rtx ();
8433 do_jump (TREE_OPERAND (rhs, 1),
8434 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
8435 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
8436 expand_assignment (lhs, convert (TREE_TYPE (rhs),
8437 (TREE_CODE (rhs) == BIT_IOR_EXPR
8439 : integer_zero_node)),
8441 do_pending_stack_adjust ();
8446 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
8452 if (!TREE_OPERAND (exp, 0))
8453 expand_null_return ();
8455 expand_return (TREE_OPERAND (exp, 0));
8458 case PREINCREMENT_EXPR:
8459 case PREDECREMENT_EXPR:
8460 return expand_increment (exp, 0, ignore);
8462 case POSTINCREMENT_EXPR:
8463 case POSTDECREMENT_EXPR:
8464 /* Faster to treat as pre-increment if result is not used. */
8465 return expand_increment (exp, ! ignore, ignore);
8468 /* Are we taking the address of a nested function? */
8469 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
8470 && decl_function_context (TREE_OPERAND (exp, 0)) != 0
8471 && ! DECL_NO_STATIC_CHAIN (TREE_OPERAND (exp, 0))
8472 && ! TREE_STATIC (exp))
8474 op0 = trampoline_address (TREE_OPERAND (exp, 0));
8475 op0 = force_operand (op0, target);
8477 /* If we are taking the address of something erroneous, just
8479 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
8481 /* If we are taking the address of a constant and are at the
8482 top level, we have to use output_constant_def since we can't
8483 call force_const_mem at top level. */
8485 && (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
8486 || (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0)))
8488 op0 = XEXP (output_constant_def (TREE_OPERAND (exp, 0), 0), 0);
8491 /* We make sure to pass const0_rtx down if we came in with
8492 ignore set, to avoid doing the cleanups twice for something. */
8493 op0 = expand_expr (TREE_OPERAND (exp, 0),
8494 ignore ? const0_rtx : NULL_RTX, VOIDmode,
8495 (modifier == EXPAND_INITIALIZER
8496 ? modifier : EXPAND_CONST_ADDRESS));
8498 /* If we are going to ignore the result, OP0 will have been set
8499 to const0_rtx, so just return it. Don't get confused and
8500 think we are taking the address of the constant. */
8504 /* Pass 1 for MODIFY, so that protect_from_queue doesn't get
8505 clever and returns a REG when given a MEM. */
8506 op0 = protect_from_queue (op0, 1);
8508 /* We would like the object in memory. If it is a constant, we can
8509 have it be statically allocated into memory. For a non-constant,
8510 we need to allocate some memory and store the value into it. */
8512 if (CONSTANT_P (op0))
8513 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
8515 else if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
8516 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF
8517 || GET_CODE (op0) == PARALLEL)
8519 /* If the operand is a SAVE_EXPR, we can deal with this by
8520 forcing the SAVE_EXPR into memory. */
8521 if (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR)
8523 put_var_into_stack (TREE_OPERAND (exp, 0));
8524 op0 = SAVE_EXPR_RTL (TREE_OPERAND (exp, 0));
8528 /* If this object is in a register, it can't be BLKmode. */
8529 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8530 rtx memloc = assign_temp (inner_type, 1, 1, 1);
8532 if (GET_CODE (op0) == PARALLEL)
8533 /* Handle calls that pass values in multiple
8534 non-contiguous locations. The Irix 6 ABI has examples
8536 emit_group_store (memloc, op0,
8537 int_size_in_bytes (inner_type));
8539 emit_move_insn (memloc, op0);
8545 if (GET_CODE (op0) != MEM)
8548 mark_temp_addr_taken (op0);
8549 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
8551 op0 = XEXP (op0, 0);
8552 #ifdef POINTERS_EXTEND_UNSIGNED
8553 if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
8554 && mode == ptr_mode)
8555 op0 = convert_memory_address (ptr_mode, op0);
8560 /* If OP0 is not aligned as least as much as the type requires, we
8561 need to make a temporary, copy OP0 to it, and take the address of
8562 the temporary. We want to use the alignment of the type, not of
8563 the operand. Note that this is incorrect for FUNCTION_TYPE, but
8564 the test for BLKmode means that can't happen. The test for
8565 BLKmode is because we never make mis-aligned MEMs with
8568 We don't need to do this at all if the machine doesn't have
8569 strict alignment. */
8570 if (STRICT_ALIGNMENT && GET_MODE (op0) == BLKmode
8571 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
8573 && MEM_ALIGN (op0) < BIGGEST_ALIGNMENT)
8575 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8577 = assign_stack_temp_for_type
8578 (TYPE_MODE (inner_type),
8579 MEM_SIZE (op0) ? INTVAL (MEM_SIZE (op0))
8580 : int_size_in_bytes (inner_type),
8581 1, build_qualified_type (inner_type,
8582 (TYPE_QUALS (inner_type)
8583 | TYPE_QUAL_CONST)));
8585 if (TYPE_ALIGN_OK (inner_type))
8588 emit_block_move (new, op0, expr_size (TREE_OPERAND (exp, 0)));
8592 op0 = force_operand (XEXP (op0, 0), target);
8596 && GET_CODE (op0) != REG
8597 && modifier != EXPAND_CONST_ADDRESS
8598 && modifier != EXPAND_INITIALIZER
8599 && modifier != EXPAND_SUM)
8600 op0 = force_reg (Pmode, op0);
8602 if (GET_CODE (op0) == REG
8603 && ! REG_USERVAR_P (op0))
8604 mark_reg_pointer (op0, TYPE_ALIGN (TREE_TYPE (type)));
8606 #ifdef POINTERS_EXTEND_UNSIGNED
8607 if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
8608 && mode == ptr_mode)
8609 op0 = convert_memory_address (ptr_mode, op0);
8614 case ENTRY_VALUE_EXPR:
8617 /* COMPLEX type for Extended Pascal & Fortran */
8620 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8623 /* Get the rtx code of the operands. */
8624 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8625 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
8628 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
8632 /* Move the real (op0) and imaginary (op1) parts to their location. */
8633 emit_move_insn (gen_realpart (mode, target), op0);
8634 emit_move_insn (gen_imagpart (mode, target), op1);
8636 insns = get_insns ();
8639 /* Complex construction should appear as a single unit. */
8640 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
8641 each with a separate pseudo as destination.
8642 It's not correct for flow to treat them as a unit. */
8643 if (GET_CODE (target) != CONCAT)
8644 emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
8652 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8653 return gen_realpart (mode, op0);
8656 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8657 return gen_imagpart (mode, op0);
8661 enum machine_mode partmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8665 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8668 target = gen_reg_rtx (mode);
8672 /* Store the realpart and the negated imagpart to target. */
8673 emit_move_insn (gen_realpart (partmode, target),
8674 gen_realpart (partmode, op0));
8676 imag_t = gen_imagpart (partmode, target);
8677 temp = expand_unop (partmode,
8678 ! unsignedp && flag_trapv
8679 && (GET_MODE_CLASS(partmode) == MODE_INT)
8680 ? negv_optab : neg_optab,
8681 gen_imagpart (partmode, op0), imag_t, 0);
8683 emit_move_insn (imag_t, temp);
8685 insns = get_insns ();
8688 /* Conjugate should appear as a single unit
8689 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
8690 each with a separate pseudo as destination.
8691 It's not correct for flow to treat them as a unit. */
8692 if (GET_CODE (target) != CONCAT)
8693 emit_no_conflict_block (insns, target, op0, NULL_RTX, NULL_RTX);
8700 case TRY_CATCH_EXPR:
8702 tree handler = TREE_OPERAND (exp, 1);
8704 expand_eh_region_start ();
8706 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8708 expand_eh_region_end_cleanup (handler);
8713 case TRY_FINALLY_EXPR:
8715 tree try_block = TREE_OPERAND (exp, 0);
8716 tree finally_block = TREE_OPERAND (exp, 1);
8717 rtx finally_label = gen_label_rtx ();
8718 rtx done_label = gen_label_rtx ();
8719 rtx return_link = gen_reg_rtx (Pmode);
8720 tree cleanup = build (GOTO_SUBROUTINE_EXPR, void_type_node,
8721 (tree) finally_label, (tree) return_link);
8722 TREE_SIDE_EFFECTS (cleanup) = 1;
8724 /* Start a new binding layer that will keep track of all cleanup
8725 actions to be performed. */
8726 expand_start_bindings (2);
8728 target_temp_slot_level = temp_slot_level;
8730 expand_decl_cleanup (NULL_TREE, cleanup);
8731 op0 = expand_expr (try_block, target, tmode, modifier);
8733 preserve_temp_slots (op0);
8734 expand_end_bindings (NULL_TREE, 0, 0);
8735 emit_jump (done_label);
8736 emit_label (finally_label);
8737 expand_expr (finally_block, const0_rtx, VOIDmode, 0);
8738 emit_indirect_jump (return_link);
8739 emit_label (done_label);
8743 case GOTO_SUBROUTINE_EXPR:
8745 rtx subr = (rtx) TREE_OPERAND (exp, 0);
8746 rtx return_link = *(rtx *) &TREE_OPERAND (exp, 1);
8747 rtx return_address = gen_label_rtx ();
8748 emit_move_insn (return_link,
8749 gen_rtx_LABEL_REF (Pmode, return_address));
8751 emit_label (return_address);
8756 return expand_builtin_va_arg (TREE_OPERAND (exp, 0), type);
8759 return get_exception_pointer (cfun);
8762 /* Function descriptors are not valid except for as
8763 initialization constants, and should not be expanded. */
8767 return (*lang_expand_expr) (exp, original_target, tmode, modifier);
8770 /* Here to do an ordinary binary operator, generating an instruction
8771 from the optab already placed in `this_optab'. */
8773 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8775 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8776 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8778 temp = expand_binop (mode, this_optab, op0, op1, target,
8779 unsignedp, OPTAB_LIB_WIDEN);
8785 /* Return the tree node if a ARG corresponds to a string constant or zero
8786 if it doesn't. If we return non-zero, set *PTR_OFFSET to the offset
8787 in bytes within the string that ARG is accessing. The type of the
8788 offset will be `sizetype'. */
8791 string_constant (arg, ptr_offset)
8797 if (TREE_CODE (arg) == ADDR_EXPR
8798 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
8800 *ptr_offset = size_zero_node;
8801 return TREE_OPERAND (arg, 0);
8803 else if (TREE_CODE (arg) == PLUS_EXPR)
8805 tree arg0 = TREE_OPERAND (arg, 0);
8806 tree arg1 = TREE_OPERAND (arg, 1);
8811 if (TREE_CODE (arg0) == ADDR_EXPR
8812 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
8814 *ptr_offset = convert (sizetype, arg1);
8815 return TREE_OPERAND (arg0, 0);
8817 else if (TREE_CODE (arg1) == ADDR_EXPR
8818 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
8820 *ptr_offset = convert (sizetype, arg0);
8821 return TREE_OPERAND (arg1, 0);
8828 /* Expand code for a post- or pre- increment or decrement
8829 and return the RTX for the result.
8830 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
8833 expand_increment (exp, post, ignore)
8839 tree incremented = TREE_OPERAND (exp, 0);
8840 optab this_optab = add_optab;
8842 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
8843 int op0_is_copy = 0;
8844 int single_insn = 0;
8845 /* 1 means we can't store into OP0 directly,
8846 because it is a subreg narrower than a word,
8847 and we don't dare clobber the rest of the word. */
8850 /* Stabilize any component ref that might need to be
8851 evaluated more than once below. */
8853 || TREE_CODE (incremented) == BIT_FIELD_REF
8854 || (TREE_CODE (incremented) == COMPONENT_REF
8855 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
8856 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
8857 incremented = stabilize_reference (incremented);
8858 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
8859 ones into save exprs so that they don't accidentally get evaluated
8860 more than once by the code below. */
8861 if (TREE_CODE (incremented) == PREINCREMENT_EXPR
8862 || TREE_CODE (incremented) == PREDECREMENT_EXPR)
8863 incremented = save_expr (incremented);
8865 /* Compute the operands as RTX.
8866 Note whether OP0 is the actual lvalue or a copy of it:
8867 I believe it is a copy iff it is a register or subreg
8868 and insns were generated in computing it. */
8870 temp = get_last_insn ();
8871 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, 0);
8873 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
8874 in place but instead must do sign- or zero-extension during assignment,
8875 so we copy it into a new register and let the code below use it as
8878 Note that we can safely modify this SUBREG since it is know not to be
8879 shared (it was made by the expand_expr call above). */
8881 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
8884 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
8888 else if (GET_CODE (op0) == SUBREG
8889 && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD)
8891 /* We cannot increment this SUBREG in place. If we are
8892 post-incrementing, get a copy of the old value. Otherwise,
8893 just mark that we cannot increment in place. */
8895 op0 = copy_to_reg (op0);
8900 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
8901 && temp != get_last_insn ());
8902 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8904 /* Decide whether incrementing or decrementing. */
8905 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
8906 || TREE_CODE (exp) == PREDECREMENT_EXPR)
8907 this_optab = sub_optab;
8909 /* Convert decrement by a constant into a negative increment. */
8910 if (this_optab == sub_optab
8911 && GET_CODE (op1) == CONST_INT)
8913 op1 = GEN_INT (-INTVAL (op1));
8914 this_optab = add_optab;
8917 if (TYPE_TRAP_SIGNED (TREE_TYPE (exp)))
8918 this_optab = this_optab == add_optab ? addv_optab : subv_optab;
8920 /* For a preincrement, see if we can do this with a single instruction. */
8923 icode = (int) this_optab->handlers[(int) mode].insn_code;
8924 if (icode != (int) CODE_FOR_nothing
8925 /* Make sure that OP0 is valid for operands 0 and 1
8926 of the insn we want to queue. */
8927 && (*insn_data[icode].operand[0].predicate) (op0, mode)
8928 && (*insn_data[icode].operand[1].predicate) (op0, mode)
8929 && (*insn_data[icode].operand[2].predicate) (op1, mode))
8933 /* If OP0 is not the actual lvalue, but rather a copy in a register,
8934 then we cannot just increment OP0. We must therefore contrive to
8935 increment the original value. Then, for postincrement, we can return
8936 OP0 since it is a copy of the old value. For preincrement, expand here
8937 unless we can do it with a single insn.
8939 Likewise if storing directly into OP0 would clobber high bits
8940 we need to preserve (bad_subreg). */
8941 if (op0_is_copy || (!post && !single_insn) || bad_subreg)
8943 /* This is the easiest way to increment the value wherever it is.
8944 Problems with multiple evaluation of INCREMENTED are prevented
8945 because either (1) it is a component_ref or preincrement,
8946 in which case it was stabilized above, or (2) it is an array_ref
8947 with constant index in an array in a register, which is
8948 safe to reevaluate. */
8949 tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
8950 || TREE_CODE (exp) == PREDECREMENT_EXPR)
8951 ? MINUS_EXPR : PLUS_EXPR),
8954 TREE_OPERAND (exp, 1));
8956 while (TREE_CODE (incremented) == NOP_EXPR
8957 || TREE_CODE (incremented) == CONVERT_EXPR)
8959 newexp = convert (TREE_TYPE (incremented), newexp);
8960 incremented = TREE_OPERAND (incremented, 0);
8963 temp = expand_assignment (incremented, newexp, ! post && ! ignore , 0);
8964 return post ? op0 : temp;
8969 /* We have a true reference to the value in OP0.
8970 If there is an insn to add or subtract in this mode, queue it.
8971 Queueing the increment insn avoids the register shuffling
8972 that often results if we must increment now and first save
8973 the old value for subsequent use. */
8975 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
8976 op0 = stabilize (op0);
8979 icode = (int) this_optab->handlers[(int) mode].insn_code;
8980 if (icode != (int) CODE_FOR_nothing
8981 /* Make sure that OP0 is valid for operands 0 and 1
8982 of the insn we want to queue. */
8983 && (*insn_data[icode].operand[0].predicate) (op0, mode)
8984 && (*insn_data[icode].operand[1].predicate) (op0, mode))
8986 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
8987 op1 = force_reg (mode, op1);
8989 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
8991 if (icode != (int) CODE_FOR_nothing && GET_CODE (op0) == MEM)
8993 rtx addr = (general_operand (XEXP (op0, 0), mode)
8994 ? force_reg (Pmode, XEXP (op0, 0))
8995 : copy_to_reg (XEXP (op0, 0)));
8998 op0 = replace_equiv_address (op0, addr);
8999 temp = force_reg (GET_MODE (op0), op0);
9000 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9001 op1 = force_reg (mode, op1);
9003 /* The increment queue is LIFO, thus we have to `queue'
9004 the instructions in reverse order. */
9005 enqueue_insn (op0, gen_move_insn (op0, temp));
9006 result = enqueue_insn (temp, GEN_FCN (icode) (temp, temp, op1));
9011 /* Preincrement, or we can't increment with one simple insn. */
9013 /* Save a copy of the value before inc or dec, to return it later. */
9014 temp = value = copy_to_reg (op0);
9016 /* Arrange to return the incremented value. */
9017 /* Copy the rtx because expand_binop will protect from the queue,
9018 and the results of that would be invalid for us to return
9019 if our caller does emit_queue before using our result. */
9020 temp = copy_rtx (value = op0);
9022 /* Increment however we can. */
9023 op1 = expand_binop (mode, this_optab, value, op1, op0,
9024 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
9026 /* Make sure the value is stored into OP0. */
9028 emit_move_insn (op0, op1);
9033 /* At the start of a function, record that we have no previously-pushed
9034 arguments waiting to be popped. */
9037 init_pending_stack_adjust ()
9039 pending_stack_adjust = 0;
9042 /* When exiting from function, if safe, clear out any pending stack adjust
9043 so the adjustment won't get done.
9045 Note, if the current function calls alloca, then it must have a
9046 frame pointer regardless of the value of flag_omit_frame_pointer. */
9049 clear_pending_stack_adjust ()
9051 #ifdef EXIT_IGNORE_STACK
9053 && (! flag_omit_frame_pointer || current_function_calls_alloca)
9054 && EXIT_IGNORE_STACK
9055 && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline)
9056 && ! flag_inline_functions)
9058 stack_pointer_delta -= pending_stack_adjust,
9059 pending_stack_adjust = 0;
9064 /* Pop any previously-pushed arguments that have not been popped yet. */
9067 do_pending_stack_adjust ()
9069 if (inhibit_defer_pop == 0)
9071 if (pending_stack_adjust != 0)
9072 adjust_stack (GEN_INT (pending_stack_adjust));
9073 pending_stack_adjust = 0;
9077 /* Expand conditional expressions. */
9079 /* Generate code to evaluate EXP and jump to LABEL if the value is zero.
9080 LABEL is an rtx of code CODE_LABEL, in this function and all the
9084 jumpifnot (exp, label)
9088 do_jump (exp, label, NULL_RTX);
9091 /* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
9098 do_jump (exp, NULL_RTX, label);
9101 /* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
9102 the result is zero, or IF_TRUE_LABEL if the result is one.
9103 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
9104 meaning fall through in that case.
9106 do_jump always does any pending stack adjust except when it does not
9107 actually perform a jump. An example where there is no jump
9108 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
9110 This function is responsible for optimizing cases such as
9111 &&, || and comparison operators in EXP. */
9114 do_jump (exp, if_false_label, if_true_label)
9116 rtx if_false_label, if_true_label;
9118 enum tree_code code = TREE_CODE (exp);
9119 /* Some cases need to create a label to jump to
9120 in order to properly fall through.
9121 These cases set DROP_THROUGH_LABEL nonzero. */
9122 rtx drop_through_label = 0;
9126 enum machine_mode mode;
9128 #ifdef MAX_INTEGER_COMPUTATION_MODE
9129 check_max_integer_computation_mode (exp);
9140 temp = integer_zerop (exp) ? if_false_label : if_true_label;
9146 /* This is not true with #pragma weak */
9148 /* The address of something can never be zero. */
9150 emit_jump (if_true_label);
9155 if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
9156 || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
9157 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF
9158 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_RANGE_REF)
9161 /* If we are narrowing the operand, we have to do the compare in the
9163 if ((TYPE_PRECISION (TREE_TYPE (exp))
9164 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
9166 case NON_LVALUE_EXPR:
9167 case REFERENCE_EXPR:
9172 /* These cannot change zero->non-zero or vice versa. */
9173 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9176 case WITH_RECORD_EXPR:
9177 /* Put the object on the placeholder list, recurse through our first
9178 operand, and pop the list. */
9179 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
9181 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9182 placeholder_list = TREE_CHAIN (placeholder_list);
9186 /* This is never less insns than evaluating the PLUS_EXPR followed by
9187 a test and can be longer if the test is eliminated. */
9189 /* Reduce to minus. */
9190 exp = build (MINUS_EXPR, TREE_TYPE (exp),
9191 TREE_OPERAND (exp, 0),
9192 fold (build1 (NEGATE_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
9193 TREE_OPERAND (exp, 1))));
9194 /* Process as MINUS. */
9198 /* Non-zero iff operands of minus differ. */
9199 do_compare_and_jump (build (NE_EXPR, TREE_TYPE (exp),
9200 TREE_OPERAND (exp, 0),
9201 TREE_OPERAND (exp, 1)),
9202 NE, NE, if_false_label, if_true_label);
9206 /* If we are AND'ing with a small constant, do this comparison in the
9207 smallest type that fits. If the machine doesn't have comparisons
9208 that small, it will be converted back to the wider comparison.
9209 This helps if we are testing the sign bit of a narrower object.
9210 combine can't do this for us because it can't know whether a
9211 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
9213 if (! SLOW_BYTE_ACCESS
9214 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
9215 && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
9216 && (i = tree_floor_log2 (TREE_OPERAND (exp, 1))) >= 0
9217 && (mode = mode_for_size (i + 1, MODE_INT, 0)) != BLKmode
9218 && (type = type_for_mode (mode, 1)) != 0
9219 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9220 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9221 != CODE_FOR_nothing))
9223 do_jump (convert (type, exp), if_false_label, if_true_label);
9228 case TRUTH_NOT_EXPR:
9229 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9232 case TRUTH_ANDIF_EXPR:
9233 if (if_false_label == 0)
9234 if_false_label = drop_through_label = gen_label_rtx ();
9235 do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
9236 start_cleanup_deferral ();
9237 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9238 end_cleanup_deferral ();
9241 case TRUTH_ORIF_EXPR:
9242 if (if_true_label == 0)
9243 if_true_label = drop_through_label = gen_label_rtx ();
9244 do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
9245 start_cleanup_deferral ();
9246 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9247 end_cleanup_deferral ();
9252 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
9253 preserve_temp_slots (NULL_RTX);
9257 do_pending_stack_adjust ();
9258 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9264 case ARRAY_RANGE_REF:
9266 HOST_WIDE_INT bitsize, bitpos;
9268 enum machine_mode mode;
9273 /* Get description of this reference. We don't actually care
9274 about the underlying object here. */
9275 get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode,
9276 &unsignedp, &volatilep);
9278 type = type_for_size (bitsize, unsignedp);
9279 if (! SLOW_BYTE_ACCESS
9280 && type != 0 && bitsize >= 0
9281 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9282 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9283 != CODE_FOR_nothing))
9285 do_jump (convert (type, exp), if_false_label, if_true_label);
9292 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
9293 if (integer_onep (TREE_OPERAND (exp, 1))
9294 && integer_zerop (TREE_OPERAND (exp, 2)))
9295 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9297 else if (integer_zerop (TREE_OPERAND (exp, 1))
9298 && integer_onep (TREE_OPERAND (exp, 2)))
9299 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9303 rtx label1 = gen_label_rtx ();
9304 drop_through_label = gen_label_rtx ();
9306 do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX);
9308 start_cleanup_deferral ();
9309 /* Now the THEN-expression. */
9310 do_jump (TREE_OPERAND (exp, 1),
9311 if_false_label ? if_false_label : drop_through_label,
9312 if_true_label ? if_true_label : drop_through_label);
9313 /* In case the do_jump just above never jumps. */
9314 do_pending_stack_adjust ();
9315 emit_label (label1);
9317 /* Now the ELSE-expression. */
9318 do_jump (TREE_OPERAND (exp, 2),
9319 if_false_label ? if_false_label : drop_through_label,
9320 if_true_label ? if_true_label : drop_through_label);
9321 end_cleanup_deferral ();
9327 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9329 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
9330 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
9332 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
9333 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
9336 (build (TRUTH_ANDIF_EXPR, TREE_TYPE (exp),
9337 fold (build (EQ_EXPR, TREE_TYPE (exp),
9338 fold (build1 (REALPART_EXPR,
9339 TREE_TYPE (inner_type),
9341 fold (build1 (REALPART_EXPR,
9342 TREE_TYPE (inner_type),
9344 fold (build (EQ_EXPR, TREE_TYPE (exp),
9345 fold (build1 (IMAGPART_EXPR,
9346 TREE_TYPE (inner_type),
9348 fold (build1 (IMAGPART_EXPR,
9349 TREE_TYPE (inner_type),
9351 if_false_label, if_true_label);
9354 else if (integer_zerop (TREE_OPERAND (exp, 1)))
9355 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9357 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
9358 && !can_compare_p (EQ, TYPE_MODE (inner_type), ccp_jump))
9359 do_jump_by_parts_equality (exp, if_false_label, if_true_label);
9361 do_compare_and_jump (exp, EQ, EQ, if_false_label, if_true_label);
9367 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9369 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
9370 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
9372 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
9373 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
9376 (build (TRUTH_ORIF_EXPR, TREE_TYPE (exp),
9377 fold (build (NE_EXPR, TREE_TYPE (exp),
9378 fold (build1 (REALPART_EXPR,
9379 TREE_TYPE (inner_type),
9381 fold (build1 (REALPART_EXPR,
9382 TREE_TYPE (inner_type),
9384 fold (build (NE_EXPR, TREE_TYPE (exp),
9385 fold (build1 (IMAGPART_EXPR,
9386 TREE_TYPE (inner_type),
9388 fold (build1 (IMAGPART_EXPR,
9389 TREE_TYPE (inner_type),
9391 if_false_label, if_true_label);
9394 else if (integer_zerop (TREE_OPERAND (exp, 1)))
9395 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9397 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
9398 && !can_compare_p (NE, TYPE_MODE (inner_type), ccp_jump))
9399 do_jump_by_parts_equality (exp, if_true_label, if_false_label);
9401 do_compare_and_jump (exp, NE, NE, if_false_label, if_true_label);
9406 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9407 if (GET_MODE_CLASS (mode) == MODE_INT
9408 && ! can_compare_p (LT, mode, ccp_jump))
9409 do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
9411 do_compare_and_jump (exp, LT, LTU, if_false_label, if_true_label);
9415 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9416 if (GET_MODE_CLASS (mode) == MODE_INT
9417 && ! can_compare_p (LE, mode, ccp_jump))
9418 do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
9420 do_compare_and_jump (exp, LE, LEU, if_false_label, if_true_label);
9424 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9425 if (GET_MODE_CLASS (mode) == MODE_INT
9426 && ! can_compare_p (GT, mode, ccp_jump))
9427 do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
9429 do_compare_and_jump (exp, GT, GTU, if_false_label, if_true_label);
9433 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9434 if (GET_MODE_CLASS (mode) == MODE_INT
9435 && ! can_compare_p (GE, mode, ccp_jump))
9436 do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
9438 do_compare_and_jump (exp, GE, GEU, if_false_label, if_true_label);
9441 case UNORDERED_EXPR:
9444 enum rtx_code cmp, rcmp;
9447 if (code == UNORDERED_EXPR)
9448 cmp = UNORDERED, rcmp = ORDERED;
9450 cmp = ORDERED, rcmp = UNORDERED;
9451 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9454 if (! can_compare_p (cmp, mode, ccp_jump)
9455 && (can_compare_p (rcmp, mode, ccp_jump)
9456 /* If the target doesn't provide either UNORDERED or ORDERED
9457 comparisons, canonicalize on UNORDERED for the library. */
9458 || rcmp == UNORDERED))
9462 do_compare_and_jump (exp, cmp, cmp, if_false_label, if_true_label);
9464 do_compare_and_jump (exp, rcmp, rcmp, if_true_label, if_false_label);
9469 enum rtx_code rcode1;
9470 enum tree_code tcode2;
9494 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9495 if (can_compare_p (rcode1, mode, ccp_jump))
9496 do_compare_and_jump (exp, rcode1, rcode1, if_false_label,
9500 tree op0 = save_expr (TREE_OPERAND (exp, 0));
9501 tree op1 = save_expr (TREE_OPERAND (exp, 1));
9504 /* If the target doesn't support combined unordered
9505 compares, decompose into UNORDERED + comparison. */
9506 cmp0 = fold (build (UNORDERED_EXPR, TREE_TYPE (exp), op0, op1));
9507 cmp1 = fold (build (tcode2, TREE_TYPE (exp), op0, op1));
9508 exp = build (TRUTH_ORIF_EXPR, TREE_TYPE (exp), cmp0, cmp1);
9509 do_jump (exp, if_false_label, if_true_label);
9515 __builtin_expect (<test>, 0) and
9516 __builtin_expect (<test>, 1)
9518 We need to do this here, so that <test> is not converted to a SCC
9519 operation on machines that use condition code registers and COMPARE
9520 like the PowerPC, and then the jump is done based on whether the SCC
9521 operation produced a 1 or 0. */
9523 /* Check for a built-in function. */
9524 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR)
9526 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
9527 tree arglist = TREE_OPERAND (exp, 1);
9529 if (TREE_CODE (fndecl) == FUNCTION_DECL
9530 && DECL_BUILT_IN (fndecl)
9531 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT
9532 && arglist != NULL_TREE
9533 && TREE_CHAIN (arglist) != NULL_TREE)
9535 rtx seq = expand_builtin_expect_jump (exp, if_false_label,
9538 if (seq != NULL_RTX)
9545 /* fall through and generate the normal code. */
9549 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
9551 /* This is not needed any more and causes poor code since it causes
9552 comparisons and tests from non-SI objects to have different code
9554 /* Copy to register to avoid generating bad insns by cse
9555 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
9556 if (!cse_not_expected && GET_CODE (temp) == MEM)
9557 temp = copy_to_reg (temp);
9559 do_pending_stack_adjust ();
9560 /* Do any postincrements in the expression that was tested. */
9563 if (GET_CODE (temp) == CONST_INT
9564 || (GET_CODE (temp) == CONST_DOUBLE && GET_MODE (temp) == VOIDmode)
9565 || GET_CODE (temp) == LABEL_REF)
9567 rtx target = temp == const0_rtx ? if_false_label : if_true_label;
9571 else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
9572 && ! can_compare_p (NE, GET_MODE (temp), ccp_jump))
9573 /* Note swapping the labels gives us not-equal. */
9574 do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label);
9575 else if (GET_MODE (temp) != VOIDmode)
9576 do_compare_rtx_and_jump (temp, CONST0_RTX (GET_MODE (temp)),
9577 NE, TREE_UNSIGNED (TREE_TYPE (exp)),
9578 GET_MODE (temp), NULL_RTX,
9579 if_false_label, if_true_label);
9584 if (drop_through_label)
9586 /* If do_jump produces code that might be jumped around,
9587 do any stack adjusts from that code, before the place
9588 where control merges in. */
9589 do_pending_stack_adjust ();
9590 emit_label (drop_through_label);
9594 /* Given a comparison expression EXP for values too wide to be compared
9595 with one insn, test the comparison and jump to the appropriate label.
9596 The code of EXP is ignored; we always test GT if SWAP is 0,
9597 and LT if SWAP is 1. */
9600 do_jump_by_parts_greater (exp, swap, if_false_label, if_true_label)
9603 rtx if_false_label, if_true_label;
9605 rtx op0 = expand_expr (TREE_OPERAND (exp, swap), NULL_RTX, VOIDmode, 0);
9606 rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), NULL_RTX, VOIDmode, 0);
9607 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9608 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
9610 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label);
9613 /* Compare OP0 with OP1, word at a time, in mode MODE.
9614 UNSIGNEDP says to do unsigned comparison.
9615 Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
9618 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label)
9619 enum machine_mode mode;
9622 rtx if_false_label, if_true_label;
9624 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
9625 rtx drop_through_label = 0;
9628 if (! if_true_label || ! if_false_label)
9629 drop_through_label = gen_label_rtx ();
9630 if (! if_true_label)
9631 if_true_label = drop_through_label;
9632 if (! if_false_label)
9633 if_false_label = drop_through_label;
9635 /* Compare a word at a time, high order first. */
9636 for (i = 0; i < nwords; i++)
9638 rtx op0_word, op1_word;
9640 if (WORDS_BIG_ENDIAN)
9642 op0_word = operand_subword_force (op0, i, mode);
9643 op1_word = operand_subword_force (op1, i, mode);
9647 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
9648 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
9651 /* All but high-order word must be compared as unsigned. */
9652 do_compare_rtx_and_jump (op0_word, op1_word, GT,
9653 (unsignedp || i > 0), word_mode, NULL_RTX,
9654 NULL_RTX, if_true_label);
9656 /* Consider lower words only if these are equal. */
9657 do_compare_rtx_and_jump (op0_word, op1_word, NE, unsignedp, word_mode,
9658 NULL_RTX, NULL_RTX, if_false_label);
9662 emit_jump (if_false_label);
9663 if (drop_through_label)
9664 emit_label (drop_through_label);
9667 /* Given an EQ_EXPR expression EXP for values too wide to be compared
9668 with one insn, test the comparison and jump to the appropriate label. */
9671 do_jump_by_parts_equality (exp, if_false_label, if_true_label)
9673 rtx if_false_label, if_true_label;
9675 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
9676 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
9677 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9678 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
9680 rtx drop_through_label = 0;
9682 if (! if_false_label)
9683 drop_through_label = if_false_label = gen_label_rtx ();
9685 for (i = 0; i < nwords; i++)
9686 do_compare_rtx_and_jump (operand_subword_force (op0, i, mode),
9687 operand_subword_force (op1, i, mode),
9688 EQ, TREE_UNSIGNED (TREE_TYPE (exp)),
9689 word_mode, NULL_RTX, if_false_label, NULL_RTX);
9692 emit_jump (if_true_label);
9693 if (drop_through_label)
9694 emit_label (drop_through_label);
9697 /* Jump according to whether OP0 is 0.
9698 We assume that OP0 has an integer mode that is too wide
9699 for the available compare insns. */
9702 do_jump_by_parts_equality_rtx (op0, if_false_label, if_true_label)
9704 rtx if_false_label, if_true_label;
9706 int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD;
9709 rtx drop_through_label = 0;
9711 /* The fastest way of doing this comparison on almost any machine is to
9712 "or" all the words and compare the result. If all have to be loaded
9713 from memory and this is a very wide item, it's possible this may
9714 be slower, but that's highly unlikely. */
9716 part = gen_reg_rtx (word_mode);
9717 emit_move_insn (part, operand_subword_force (op0, 0, GET_MODE (op0)));
9718 for (i = 1; i < nwords && part != 0; i++)
9719 part = expand_binop (word_mode, ior_optab, part,
9720 operand_subword_force (op0, i, GET_MODE (op0)),
9721 part, 1, OPTAB_WIDEN);
9725 do_compare_rtx_and_jump (part, const0_rtx, EQ, 1, word_mode,
9726 NULL_RTX, if_false_label, if_true_label);
9731 /* If we couldn't do the "or" simply, do this with a series of compares. */
9732 if (! if_false_label)
9733 drop_through_label = if_false_label = gen_label_rtx ();
9735 for (i = 0; i < nwords; i++)
9736 do_compare_rtx_and_jump (operand_subword_force (op0, i, GET_MODE (op0)),
9737 const0_rtx, EQ, 1, word_mode, NULL_RTX,
9738 if_false_label, NULL_RTX);
9741 emit_jump (if_true_label);
9743 if (drop_through_label)
9744 emit_label (drop_through_label);
9747 /* Generate code for a comparison of OP0 and OP1 with rtx code CODE.
9748 (including code to compute the values to be compared)
9749 and set (CC0) according to the result.
9750 The decision as to signed or unsigned comparison must be made by the caller.
9752 We force a stack adjustment unless there are currently
9753 things pushed on the stack that aren't yet used.
9755 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
9759 compare_from_rtx (op0, op1, code, unsignedp, mode, size)
9763 enum machine_mode mode;
9768 /* If one operand is constant, make it the second one. Only do this
9769 if the other operand is not constant as well. */
9771 if (swap_commutative_operands_p (op0, op1))
9776 code = swap_condition (code);
9781 op0 = force_not_mem (op0);
9782 op1 = force_not_mem (op1);
9785 do_pending_stack_adjust ();
9787 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
9788 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
9792 /* There's no need to do this now that combine.c can eliminate lots of
9793 sign extensions. This can be less efficient in certain cases on other
9796 /* If this is a signed equality comparison, we can do it as an
9797 unsigned comparison since zero-extension is cheaper than sign
9798 extension and comparisons with zero are done as unsigned. This is
9799 the case even on machines that can do fast sign extension, since
9800 zero-extension is easier to combine with other operations than
9801 sign-extension is. If we are comparing against a constant, we must
9802 convert it to what it would look like unsigned. */
9803 if ((code == EQ || code == NE) && ! unsignedp
9804 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
9806 if (GET_CODE (op1) == CONST_INT
9807 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
9808 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
9813 emit_cmp_insn (op0, op1, code, size, mode, unsignedp);
9815 return gen_rtx_fmt_ee (code, VOIDmode, cc0_rtx, const0_rtx);
9818 /* Like do_compare_and_jump but expects the values to compare as two rtx's.
9819 The decision as to signed or unsigned comparison must be made by the caller.
9821 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
9825 do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode, size,
9826 if_false_label, if_true_label)
9830 enum machine_mode mode;
9832 rtx if_false_label, if_true_label;
9835 int dummy_true_label = 0;
9837 /* Reverse the comparison if that is safe and we want to jump if it is
9839 if (! if_true_label && ! FLOAT_MODE_P (mode))
9841 if_true_label = if_false_label;
9843 code = reverse_condition (code);
9846 /* If one operand is constant, make it the second one. Only do this
9847 if the other operand is not constant as well. */
9849 if (swap_commutative_operands_p (op0, op1))
9854 code = swap_condition (code);
9859 op0 = force_not_mem (op0);
9860 op1 = force_not_mem (op1);
9863 do_pending_stack_adjust ();
9865 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
9866 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
9868 if (tem == const_true_rtx)
9871 emit_jump (if_true_label);
9876 emit_jump (if_false_label);
9882 /* There's no need to do this now that combine.c can eliminate lots of
9883 sign extensions. This can be less efficient in certain cases on other
9886 /* If this is a signed equality comparison, we can do it as an
9887 unsigned comparison since zero-extension is cheaper than sign
9888 extension and comparisons with zero are done as unsigned. This is
9889 the case even on machines that can do fast sign extension, since
9890 zero-extension is easier to combine with other operations than
9891 sign-extension is. If we are comparing against a constant, we must
9892 convert it to what it would look like unsigned. */
9893 if ((code == EQ || code == NE) && ! unsignedp
9894 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
9896 if (GET_CODE (op1) == CONST_INT
9897 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
9898 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
9903 if (! if_true_label)
9905 dummy_true_label = 1;
9906 if_true_label = gen_label_rtx ();
9909 emit_cmp_and_jump_insns (op0, op1, code, size, mode, unsignedp,
9913 emit_jump (if_false_label);
9914 if (dummy_true_label)
9915 emit_label (if_true_label);
9918 /* Generate code for a comparison expression EXP (including code to compute
9919 the values to be compared) and a conditional jump to IF_FALSE_LABEL and/or
9920 IF_TRUE_LABEL. One of the labels can be NULL_RTX, in which case the
9921 generated code will drop through.
9922 SIGNED_CODE should be the rtx operation for this comparison for
9923 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
9925 We force a stack adjustment unless there are currently
9926 things pushed on the stack that aren't yet used. */
9929 do_compare_and_jump (exp, signed_code, unsigned_code, if_false_label,
9932 enum rtx_code signed_code, unsigned_code;
9933 rtx if_false_label, if_true_label;
9937 enum machine_mode mode;
9941 /* Don't crash if the comparison was erroneous. */
9942 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
9943 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
9946 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
9947 if (TREE_CODE (TREE_OPERAND (exp, 1)) == ERROR_MARK)
9950 type = TREE_TYPE (TREE_OPERAND (exp, 0));
9951 mode = TYPE_MODE (type);
9952 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
9953 && (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST
9954 || (GET_MODE_BITSIZE (mode)
9955 > GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp,
9958 /* op0 might have been replaced by promoted constant, in which
9959 case the type of second argument should be used. */
9960 type = TREE_TYPE (TREE_OPERAND (exp, 1));
9961 mode = TYPE_MODE (type);
9963 unsignedp = TREE_UNSIGNED (type);
9964 code = unsignedp ? unsigned_code : signed_code;
9966 #ifdef HAVE_canonicalize_funcptr_for_compare
9967 /* If function pointers need to be "canonicalized" before they can
9968 be reliably compared, then canonicalize them. */
9969 if (HAVE_canonicalize_funcptr_for_compare
9970 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
9971 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
9974 rtx new_op0 = gen_reg_rtx (mode);
9976 emit_insn (gen_canonicalize_funcptr_for_compare (new_op0, op0));
9980 if (HAVE_canonicalize_funcptr_for_compare
9981 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
9982 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
9985 rtx new_op1 = gen_reg_rtx (mode);
9987 emit_insn (gen_canonicalize_funcptr_for_compare (new_op1, op1));
9992 /* Do any postincrements in the expression that was tested. */
9995 do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode,
9997 ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
9998 if_false_label, if_true_label);
10001 /* Generate code to calculate EXP using a store-flag instruction
10002 and return an rtx for the result. EXP is either a comparison
10003 or a TRUTH_NOT_EXPR whose operand is a comparison.
10005 If TARGET is nonzero, store the result there if convenient.
10007 If ONLY_CHEAP is non-zero, only do this if it is likely to be very
10010 Return zero if there is no suitable set-flag instruction
10011 available on this machine.
10013 Once expand_expr has been called on the arguments of the comparison,
10014 we are committed to doing the store flag, since it is not safe to
10015 re-evaluate the expression. We emit the store-flag insn by calling
10016 emit_store_flag, but only expand the arguments if we have a reason
10017 to believe that emit_store_flag will be successful. If we think that
10018 it will, but it isn't, we have to simulate the store-flag with a
10019 set/jump/set sequence. */
10022 do_store_flag (exp, target, mode, only_cheap)
10025 enum machine_mode mode;
10028 enum rtx_code code;
10029 tree arg0, arg1, type;
10031 enum machine_mode operand_mode;
10035 enum insn_code icode;
10036 rtx subtarget = target;
10039 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
10040 result at the end. We can't simply invert the test since it would
10041 have already been inverted if it were valid. This case occurs for
10042 some floating-point comparisons. */
10044 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
10045 invert = 1, exp = TREE_OPERAND (exp, 0);
10047 arg0 = TREE_OPERAND (exp, 0);
10048 arg1 = TREE_OPERAND (exp, 1);
10050 /* Don't crash if the comparison was erroneous. */
10051 if (arg0 == error_mark_node || arg1 == error_mark_node)
10054 type = TREE_TYPE (arg0);
10055 operand_mode = TYPE_MODE (type);
10056 unsignedp = TREE_UNSIGNED (type);
10058 /* We won't bother with BLKmode store-flag operations because it would mean
10059 passing a lot of information to emit_store_flag. */
10060 if (operand_mode == BLKmode)
10063 /* We won't bother with store-flag operations involving function pointers
10064 when function pointers must be canonicalized before comparisons. */
10065 #ifdef HAVE_canonicalize_funcptr_for_compare
10066 if (HAVE_canonicalize_funcptr_for_compare
10067 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10068 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10070 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10071 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10072 == FUNCTION_TYPE))))
10079 /* Get the rtx comparison code to use. We know that EXP is a comparison
10080 operation of some type. Some comparisons against 1 and -1 can be
10081 converted to comparisons with zero. Do so here so that the tests
10082 below will be aware that we have a comparison with zero. These
10083 tests will not catch constants in the first operand, but constants
10084 are rarely passed as the first operand. */
10086 switch (TREE_CODE (exp))
10095 if (integer_onep (arg1))
10096 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
10098 code = unsignedp ? LTU : LT;
10101 if (! unsignedp && integer_all_onesp (arg1))
10102 arg1 = integer_zero_node, code = LT;
10104 code = unsignedp ? LEU : LE;
10107 if (! unsignedp && integer_all_onesp (arg1))
10108 arg1 = integer_zero_node, code = GE;
10110 code = unsignedp ? GTU : GT;
10113 if (integer_onep (arg1))
10114 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
10116 code = unsignedp ? GEU : GE;
10119 case UNORDERED_EXPR:
10145 /* Put a constant second. */
10146 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
10148 tem = arg0; arg0 = arg1; arg1 = tem;
10149 code = swap_condition (code);
10152 /* If this is an equality or inequality test of a single bit, we can
10153 do this by shifting the bit being tested to the low-order bit and
10154 masking the result with the constant 1. If the condition was EQ,
10155 we xor it with 1. This does not require an scc insn and is faster
10156 than an scc insn even if we have it. */
10158 if ((code == NE || code == EQ)
10159 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
10160 && integer_pow2p (TREE_OPERAND (arg0, 1)))
10162 tree inner = TREE_OPERAND (arg0, 0);
10163 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
10166 /* If INNER is a right shift of a constant and it plus BITNUM does
10167 not overflow, adjust BITNUM and INNER. */
10169 if (TREE_CODE (inner) == RSHIFT_EXPR
10170 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
10171 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
10172 && bitnum < TYPE_PRECISION (type)
10173 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
10174 bitnum - TYPE_PRECISION (type)))
10176 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
10177 inner = TREE_OPERAND (inner, 0);
10180 /* If we are going to be able to omit the AND below, we must do our
10181 operations as unsigned. If we must use the AND, we have a choice.
10182 Normally unsigned is faster, but for some machines signed is. */
10183 ops_unsignedp = (bitnum == TYPE_PRECISION (type) - 1 ? 1
10184 #ifdef LOAD_EXTEND_OP
10185 : (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1)
10191 if (! get_subtarget (subtarget)
10192 || GET_MODE (subtarget) != operand_mode
10193 || ! safe_from_p (subtarget, inner, 1))
10196 op0 = expand_expr (inner, subtarget, VOIDmode, 0);
10199 op0 = expand_shift (RSHIFT_EXPR, operand_mode, op0,
10200 size_int (bitnum), subtarget, ops_unsignedp);
10202 if (GET_MODE (op0) != mode)
10203 op0 = convert_to_mode (mode, op0, ops_unsignedp);
10205 if ((code == EQ && ! invert) || (code == NE && invert))
10206 op0 = expand_binop (mode, xor_optab, op0, const1_rtx, subtarget,
10207 ops_unsignedp, OPTAB_LIB_WIDEN);
10209 /* Put the AND last so it can combine with more things. */
10210 if (bitnum != TYPE_PRECISION (type) - 1)
10211 op0 = expand_and (mode, op0, const1_rtx, subtarget);
10216 /* Now see if we are likely to be able to do this. Return if not. */
10217 if (! can_compare_p (code, operand_mode, ccp_store_flag))
10220 icode = setcc_gen_code[(int) code];
10221 if (icode == CODE_FOR_nothing
10222 || (only_cheap && insn_data[(int) icode].operand[0].mode != mode))
10224 /* We can only do this if it is one of the special cases that
10225 can be handled without an scc insn. */
10226 if ((code == LT && integer_zerop (arg1))
10227 || (! only_cheap && code == GE && integer_zerop (arg1)))
10229 else if (BRANCH_COST >= 0
10230 && ! only_cheap && (code == NE || code == EQ)
10231 && TREE_CODE (type) != REAL_TYPE
10232 && ((abs_optab->handlers[(int) operand_mode].insn_code
10233 != CODE_FOR_nothing)
10234 || (ffs_optab->handlers[(int) operand_mode].insn_code
10235 != CODE_FOR_nothing)))
10241 if (! get_subtarget (target)
10242 || GET_MODE (subtarget) != operand_mode
10243 || ! safe_from_p (subtarget, arg1, 1))
10246 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
10247 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
10250 target = gen_reg_rtx (mode);
10252 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
10253 because, if the emit_store_flag does anything it will succeed and
10254 OP0 and OP1 will not be used subsequently. */
10256 result = emit_store_flag (target, code,
10257 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
10258 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
10259 operand_mode, unsignedp, 1);
10264 result = expand_binop (mode, xor_optab, result, const1_rtx,
10265 result, 0, OPTAB_LIB_WIDEN);
10269 /* If this failed, we have to do this with set/compare/jump/set code. */
10270 if (GET_CODE (target) != REG
10271 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
10272 target = gen_reg_rtx (GET_MODE (target));
10274 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
10275 result = compare_from_rtx (op0, op1, code, unsignedp,
10276 operand_mode, NULL_RTX);
10277 if (GET_CODE (result) == CONST_INT)
10278 return (((result == const0_rtx && ! invert)
10279 || (result != const0_rtx && invert))
10280 ? const0_rtx : const1_rtx);
10282 /* The code of RESULT may not match CODE if compare_from_rtx
10283 decided to swap its operands and reverse the original code.
10285 We know that compare_from_rtx returns either a CONST_INT or
10286 a new comparison code, so it is safe to just extract the
10287 code from RESULT. */
10288 code = GET_CODE (result);
10290 label = gen_label_rtx ();
10291 if (bcc_gen_fctn[(int) code] == 0)
10294 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
10295 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
10296 emit_label (label);
10302 /* Stubs in case we haven't got a casesi insn. */
10303 #ifndef HAVE_casesi
10304 # define HAVE_casesi 0
10305 # define gen_casesi(a, b, c, d, e) (0)
10306 # define CODE_FOR_casesi CODE_FOR_nothing
10309 /* If the machine does not have a case insn that compares the bounds,
10310 this means extra overhead for dispatch tables, which raises the
10311 threshold for using them. */
10312 #ifndef CASE_VALUES_THRESHOLD
10313 #define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
10314 #endif /* CASE_VALUES_THRESHOLD */
10317 case_values_threshold ()
10319 return CASE_VALUES_THRESHOLD;
10322 /* Attempt to generate a casesi instruction. Returns 1 if successful,
10323 0 otherwise (i.e. if there is no casesi instruction). */
10325 try_casesi (index_type, index_expr, minval, range,
10326 table_label, default_label)
10327 tree index_type, index_expr, minval, range;
10328 rtx table_label ATTRIBUTE_UNUSED;
10331 enum machine_mode index_mode = SImode;
10332 int index_bits = GET_MODE_BITSIZE (index_mode);
10333 rtx op1, op2, index;
10334 enum machine_mode op_mode;
10339 /* Convert the index to SImode. */
10340 if (GET_MODE_BITSIZE (TYPE_MODE (index_type)) > GET_MODE_BITSIZE (index_mode))
10342 enum machine_mode omode = TYPE_MODE (index_type);
10343 rtx rangertx = expand_expr (range, NULL_RTX, VOIDmode, 0);
10345 /* We must handle the endpoints in the original mode. */
10346 index_expr = build (MINUS_EXPR, index_type,
10347 index_expr, minval);
10348 minval = integer_zero_node;
10349 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
10350 emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
10351 omode, 1, default_label);
10352 /* Now we can safely truncate. */
10353 index = convert_to_mode (index_mode, index, 0);
10357 if (TYPE_MODE (index_type) != index_mode)
10359 index_expr = convert (type_for_size (index_bits, 0),
10361 index_type = TREE_TYPE (index_expr);
10364 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
10367 index = protect_from_queue (index, 0);
10368 do_pending_stack_adjust ();
10370 op_mode = insn_data[(int) CODE_FOR_casesi].operand[0].mode;
10371 if (! (*insn_data[(int) CODE_FOR_casesi].operand[0].predicate)
10373 index = copy_to_mode_reg (op_mode, index);
10375 op1 = expand_expr (minval, NULL_RTX, VOIDmode, 0);
10377 op_mode = insn_data[(int) CODE_FOR_casesi].operand[1].mode;
10378 op1 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (minval)),
10379 op1, TREE_UNSIGNED (TREE_TYPE (minval)));
10380 if (! (*insn_data[(int) CODE_FOR_casesi].operand[1].predicate)
10382 op1 = copy_to_mode_reg (op_mode, op1);
10384 op2 = expand_expr (range, NULL_RTX, VOIDmode, 0);
10386 op_mode = insn_data[(int) CODE_FOR_casesi].operand[2].mode;
10387 op2 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (range)),
10388 op2, TREE_UNSIGNED (TREE_TYPE (range)));
10389 if (! (*insn_data[(int) CODE_FOR_casesi].operand[2].predicate)
10391 op2 = copy_to_mode_reg (op_mode, op2);
10393 emit_jump_insn (gen_casesi (index, op1, op2,
10394 table_label, default_label));
10398 /* Attempt to generate a tablejump instruction; same concept. */
10399 #ifndef HAVE_tablejump
10400 #define HAVE_tablejump 0
10401 #define gen_tablejump(x, y) (0)
10404 /* Subroutine of the next function.
10406 INDEX is the value being switched on, with the lowest value
10407 in the table already subtracted.
10408 MODE is its expected mode (needed if INDEX is constant).
10409 RANGE is the length of the jump table.
10410 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
10412 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
10413 index value is out of range. */
10416 do_tablejump (index, mode, range, table_label, default_label)
10417 rtx index, range, table_label, default_label;
10418 enum machine_mode mode;
10422 /* Do an unsigned comparison (in the proper mode) between the index
10423 expression and the value which represents the length of the range.
10424 Since we just finished subtracting the lower bound of the range
10425 from the index expression, this comparison allows us to simultaneously
10426 check that the original index expression value is both greater than
10427 or equal to the minimum value of the range and less than or equal to
10428 the maximum value of the range. */
10430 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
10433 /* If index is in range, it must fit in Pmode.
10434 Convert to Pmode so we can index with it. */
10436 index = convert_to_mode (Pmode, index, 1);
10438 /* Don't let a MEM slip thru, because then INDEX that comes
10439 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
10440 and break_out_memory_refs will go to work on it and mess it up. */
10441 #ifdef PIC_CASE_VECTOR_ADDRESS
10442 if (flag_pic && GET_CODE (index) != REG)
10443 index = copy_to_mode_reg (Pmode, index);
10446 /* If flag_force_addr were to affect this address
10447 it could interfere with the tricky assumptions made
10448 about addresses that contain label-refs,
10449 which may be valid only very near the tablejump itself. */
10450 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
10451 GET_MODE_SIZE, because this indicates how large insns are. The other
10452 uses should all be Pmode, because they are addresses. This code
10453 could fail if addresses and insns are not the same size. */
10454 index = gen_rtx_PLUS (Pmode,
10455 gen_rtx_MULT (Pmode, index,
10456 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
10457 gen_rtx_LABEL_REF (Pmode, table_label));
10458 #ifdef PIC_CASE_VECTOR_ADDRESS
10460 index = PIC_CASE_VECTOR_ADDRESS (index);
10463 index = memory_address_noforce (CASE_VECTOR_MODE, index);
10464 temp = gen_reg_rtx (CASE_VECTOR_MODE);
10465 vector = gen_rtx_MEM (CASE_VECTOR_MODE, index);
10466 RTX_UNCHANGING_P (vector) = 1;
10467 convert_move (temp, vector, 0);
10469 emit_jump_insn (gen_tablejump (temp, table_label));
10471 /* If we are generating PIC code or if the table is PC-relative, the
10472 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
10473 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
10478 try_tablejump (index_type, index_expr, minval, range,
10479 table_label, default_label)
10480 tree index_type, index_expr, minval, range;
10481 rtx table_label, default_label;
10485 if (! HAVE_tablejump)
10488 index_expr = fold (build (MINUS_EXPR, index_type,
10489 convert (index_type, index_expr),
10490 convert (index_type, minval)));
10491 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
10493 index = protect_from_queue (index, 0);
10494 do_pending_stack_adjust ();
10496 do_tablejump (index, TYPE_MODE (index_type),
10497 convert_modes (TYPE_MODE (index_type),
10498 TYPE_MODE (TREE_TYPE (range)),
10499 expand_expr (range, NULL_RTX,
10501 TREE_UNSIGNED (TREE_TYPE (range))),
10502 table_label, default_label);