1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
30 #include "hard-reg-set.h"
33 #include "insn-config.h"
34 #include "insn-attr.h"
35 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
42 #include "typeclass.h"
45 #include "langhooks.h"
49 /* Decide whether a function's arguments should be processed
50 from first to last or from last to first.
52 They should if the stack and args grow in opposite directions, but
53 only if we have push insns. */
57 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
58 #define PUSH_ARGS_REVERSED /* If it's last to first. */
63 #ifndef STACK_PUSH_CODE
64 #ifdef STACK_GROWS_DOWNWARD
65 #define STACK_PUSH_CODE PRE_DEC
67 #define STACK_PUSH_CODE PRE_INC
71 /* Assume that case vectors are not pc-relative. */
72 #ifndef CASE_VECTOR_PC_RELATIVE
73 #define CASE_VECTOR_PC_RELATIVE 0
76 /* If this is nonzero, we do not bother generating VOLATILE
77 around volatile memory references, and we are willing to
78 output indirect addresses. If cse is to follow, we reject
79 indirect addresses so a useful potential cse is generated;
80 if it is used only once, instruction combination will produce
81 the same indirect address eventually. */
84 /* Chain of pending expressions for PLACEHOLDER_EXPR to replace. */
85 static tree placeholder_list = 0;
87 /* This structure is used by move_by_pieces to describe the move to
98 int explicit_inc_from;
99 unsigned HOST_WIDE_INT len;
100 HOST_WIDE_INT offset;
104 /* This structure is used by store_by_pieces to describe the clear to
107 struct store_by_pieces
113 unsigned HOST_WIDE_INT len;
114 HOST_WIDE_INT offset;
115 rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
120 extern struct obstack permanent_obstack;
122 static rtx enqueue_insn PARAMS ((rtx, rtx));
123 static unsigned HOST_WIDE_INT move_by_pieces_ninsns
124 PARAMS ((unsigned HOST_WIDE_INT,
126 static void move_by_pieces_1 PARAMS ((rtx (*) (rtx, ...), enum machine_mode,
127 struct move_by_pieces *));
128 static rtx clear_by_pieces_1 PARAMS ((PTR, HOST_WIDE_INT,
130 static void clear_by_pieces PARAMS ((rtx, unsigned HOST_WIDE_INT,
132 static void store_by_pieces_1 PARAMS ((struct store_by_pieces *,
134 static void store_by_pieces_2 PARAMS ((rtx (*) (rtx, ...),
136 struct store_by_pieces *));
137 static rtx get_subtarget PARAMS ((rtx));
138 static int is_zeros_p PARAMS ((tree));
139 static int mostly_zeros_p PARAMS ((tree));
140 static void store_constructor_field PARAMS ((rtx, unsigned HOST_WIDE_INT,
141 HOST_WIDE_INT, enum machine_mode,
142 tree, tree, int, int));
143 static void store_constructor PARAMS ((tree, rtx, int, HOST_WIDE_INT));
144 static rtx store_field PARAMS ((rtx, HOST_WIDE_INT,
145 HOST_WIDE_INT, enum machine_mode,
146 tree, enum machine_mode, int, tree,
148 static rtx var_rtx PARAMS ((tree));
149 static HOST_WIDE_INT highest_pow2_factor PARAMS ((tree));
150 static rtx expand_increment PARAMS ((tree, int, int));
151 static void do_jump_by_parts_greater PARAMS ((tree, int, rtx, rtx));
152 static void do_jump_by_parts_equality PARAMS ((tree, rtx, rtx));
153 static void do_compare_and_jump PARAMS ((tree, enum rtx_code, enum rtx_code,
155 static rtx do_store_flag PARAMS ((tree, rtx, enum machine_mode, int));
157 static void emit_single_push_insn PARAMS ((enum machine_mode, rtx, tree));
159 static void do_tablejump PARAMS ((rtx, enum machine_mode, rtx, rtx, rtx));
161 /* Record for each mode whether we can move a register directly to or
162 from an object of that mode in memory. If we can't, we won't try
163 to use that mode directly when accessing a field of that mode. */
165 static char direct_load[NUM_MACHINE_MODES];
166 static char direct_store[NUM_MACHINE_MODES];
168 /* If a memory-to-memory move would take MOVE_RATIO or more simple
169 move-instruction sequences, we will do a movstr or libcall instead. */
172 #if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
175 /* If we are optimizing for space (-Os), cut down the default move ratio. */
176 #define MOVE_RATIO (optimize_size ? 3 : 15)
180 /* This macro is used to determine whether move_by_pieces should be called
181 to perform a structure copy. */
182 #ifndef MOVE_BY_PIECES_P
183 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
184 (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) MOVE_RATIO)
187 /* This array records the insn_code of insns to perform block moves. */
188 enum insn_code movstr_optab[NUM_MACHINE_MODES];
190 /* This array records the insn_code of insns to perform block clears. */
191 enum insn_code clrstr_optab[NUM_MACHINE_MODES];
193 /* SLOW_UNALIGNED_ACCESS is non-zero if unaligned accesses are very slow. */
195 #ifndef SLOW_UNALIGNED_ACCESS
196 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
199 /* This is run once per compilation to set up which modes can be used
200 directly in memory and to initialize the block move optab. */
206 enum machine_mode mode;
212 /* Try indexing by frame ptr and try by stack ptr.
213 It is known that on the Convex the stack ptr isn't a valid index.
214 With luck, one or the other is valid on any machine. */
215 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
216 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
218 insn = emit_insn (gen_rtx_SET (0, NULL_RTX, NULL_RTX));
219 pat = PATTERN (insn);
221 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
222 mode = (enum machine_mode) ((int) mode + 1))
227 direct_load[(int) mode] = direct_store[(int) mode] = 0;
228 PUT_MODE (mem, mode);
229 PUT_MODE (mem1, mode);
231 /* See if there is some register that can be used in this mode and
232 directly loaded or stored from memory. */
234 if (mode != VOIDmode && mode != BLKmode)
235 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
236 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
239 if (! HARD_REGNO_MODE_OK (regno, mode))
242 reg = gen_rtx_REG (mode, regno);
245 SET_DEST (pat) = reg;
246 if (recog (pat, insn, &num_clobbers) >= 0)
247 direct_load[(int) mode] = 1;
249 SET_SRC (pat) = mem1;
250 SET_DEST (pat) = reg;
251 if (recog (pat, insn, &num_clobbers) >= 0)
252 direct_load[(int) mode] = 1;
255 SET_DEST (pat) = mem;
256 if (recog (pat, insn, &num_clobbers) >= 0)
257 direct_store[(int) mode] = 1;
260 SET_DEST (pat) = mem1;
261 if (recog (pat, insn, &num_clobbers) >= 0)
262 direct_store[(int) mode] = 1;
269 /* This is run at the start of compiling a function. */
274 cfun->expr = (struct expr_status *) xmalloc (sizeof (struct expr_status));
277 pending_stack_adjust = 0;
278 stack_pointer_delta = 0;
279 inhibit_defer_pop = 0;
281 apply_args_value = 0;
287 struct expr_status *p;
292 ggc_mark_rtx (p->x_saveregs_value);
293 ggc_mark_rtx (p->x_apply_args_value);
294 ggc_mark_rtx (p->x_forced_labels);
305 /* Small sanity check that the queue is empty at the end of a function. */
308 finish_expr_for_function ()
314 /* Manage the queue of increment instructions to be output
315 for POSTINCREMENT_EXPR expressions, etc. */
317 /* Queue up to increment (or change) VAR later. BODY says how:
318 BODY should be the same thing you would pass to emit_insn
319 to increment right away. It will go to emit_insn later on.
321 The value is a QUEUED expression to be used in place of VAR
322 where you want to guarantee the pre-incrementation value of VAR. */
325 enqueue_insn (var, body)
328 pending_chain = gen_rtx_QUEUED (GET_MODE (var), var, NULL_RTX, NULL_RTX,
329 body, pending_chain);
330 return pending_chain;
333 /* Use protect_from_queue to convert a QUEUED expression
334 into something that you can put immediately into an instruction.
335 If the queued incrementation has not happened yet,
336 protect_from_queue returns the variable itself.
337 If the incrementation has happened, protect_from_queue returns a temp
338 that contains a copy of the old value of the variable.
340 Any time an rtx which might possibly be a QUEUED is to be put
341 into an instruction, it must be passed through protect_from_queue first.
342 QUEUED expressions are not meaningful in instructions.
344 Do not pass a value through protect_from_queue and then hold
345 on to it for a while before putting it in an instruction!
346 If the queue is flushed in between, incorrect code will result. */
349 protect_from_queue (x, modify)
353 RTX_CODE code = GET_CODE (x);
355 #if 0 /* A QUEUED can hang around after the queue is forced out. */
356 /* Shortcut for most common case. */
357 if (pending_chain == 0)
363 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
364 use of autoincrement. Make a copy of the contents of the memory
365 location rather than a copy of the address, but not if the value is
366 of mode BLKmode. Don't modify X in place since it might be
368 if (code == MEM && GET_MODE (x) != BLKmode
369 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
372 rtx new = replace_equiv_address_nv (x, QUEUED_VAR (y));
376 rtx temp = gen_reg_rtx (GET_MODE (x));
378 emit_insn_before (gen_move_insn (temp, new),
383 /* Copy the address into a pseudo, so that the returned value
384 remains correct across calls to emit_queue. */
385 return replace_equiv_address (new, copy_to_reg (XEXP (new, 0)));
388 /* Otherwise, recursively protect the subexpressions of all
389 the kinds of rtx's that can contain a QUEUED. */
392 rtx tem = protect_from_queue (XEXP (x, 0), 0);
393 if (tem != XEXP (x, 0))
399 else if (code == PLUS || code == MULT)
401 rtx new0 = protect_from_queue (XEXP (x, 0), 0);
402 rtx new1 = protect_from_queue (XEXP (x, 1), 0);
403 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
412 /* If the increment has not happened, use the variable itself. Copy it
413 into a new pseudo so that the value remains correct across calls to
415 if (QUEUED_INSN (x) == 0)
416 return copy_to_reg (QUEUED_VAR (x));
417 /* If the increment has happened and a pre-increment copy exists,
419 if (QUEUED_COPY (x) != 0)
420 return QUEUED_COPY (x);
421 /* The increment has happened but we haven't set up a pre-increment copy.
422 Set one up now, and use it. */
423 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
424 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
426 return QUEUED_COPY (x);
429 /* Return nonzero if X contains a QUEUED expression:
430 if it contains anything that will be altered by a queued increment.
431 We handle only combinations of MEM, PLUS, MINUS and MULT operators
432 since memory addresses generally contain only those. */
438 enum rtx_code code = GET_CODE (x);
444 return queued_subexp_p (XEXP (x, 0));
448 return (queued_subexp_p (XEXP (x, 0))
449 || queued_subexp_p (XEXP (x, 1)));
455 /* Perform all the pending incrementations. */
461 while ((p = pending_chain))
463 rtx body = QUEUED_BODY (p);
465 if (GET_CODE (body) == SEQUENCE)
467 QUEUED_INSN (p) = XVECEXP (QUEUED_BODY (p), 0, 0);
468 emit_insn (QUEUED_BODY (p));
471 QUEUED_INSN (p) = emit_insn (QUEUED_BODY (p));
472 pending_chain = QUEUED_NEXT (p);
476 /* Copy data from FROM to TO, where the machine modes are not the same.
477 Both modes may be integer, or both may be floating.
478 UNSIGNEDP should be nonzero if FROM is an unsigned type.
479 This causes zero-extension instead of sign-extension. */
482 convert_move (to, from, unsignedp)
486 enum machine_mode to_mode = GET_MODE (to);
487 enum machine_mode from_mode = GET_MODE (from);
488 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
489 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
493 /* rtx code for making an equivalent value. */
494 enum rtx_code equiv_code = (unsignedp ? ZERO_EXTEND : SIGN_EXTEND);
496 to = protect_from_queue (to, 1);
497 from = protect_from_queue (from, 0);
499 if (to_real != from_real)
502 /* If FROM is a SUBREG that indicates that we have already done at least
503 the required extension, strip it. We don't handle such SUBREGs as
506 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
507 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
508 >= GET_MODE_SIZE (to_mode))
509 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
510 from = gen_lowpart (to_mode, from), from_mode = to_mode;
512 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
515 if (to_mode == from_mode
516 || (from_mode == VOIDmode && CONSTANT_P (from)))
518 emit_move_insn (to, from);
522 if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
524 if (GET_MODE_BITSIZE (from_mode) != GET_MODE_BITSIZE (to_mode))
527 if (VECTOR_MODE_P (to_mode))
528 from = simplify_gen_subreg (to_mode, from, GET_MODE (from), 0);
530 to = simplify_gen_subreg (from_mode, to, GET_MODE (to), 0);
532 emit_move_insn (to, from);
536 if (to_real != from_real)
543 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode))
545 /* Try converting directly if the insn is supported. */
546 if ((code = can_extend_p (to_mode, from_mode, 0))
549 emit_unop_insn (code, to, from, UNKNOWN);
554 #ifdef HAVE_trunchfqf2
555 if (HAVE_trunchfqf2 && from_mode == HFmode && to_mode == QFmode)
557 emit_unop_insn (CODE_FOR_trunchfqf2, to, from, UNKNOWN);
561 #ifdef HAVE_trunctqfqf2
562 if (HAVE_trunctqfqf2 && from_mode == TQFmode && to_mode == QFmode)
564 emit_unop_insn (CODE_FOR_trunctqfqf2, to, from, UNKNOWN);
568 #ifdef HAVE_truncsfqf2
569 if (HAVE_truncsfqf2 && from_mode == SFmode && to_mode == QFmode)
571 emit_unop_insn (CODE_FOR_truncsfqf2, to, from, UNKNOWN);
575 #ifdef HAVE_truncdfqf2
576 if (HAVE_truncdfqf2 && from_mode == DFmode && to_mode == QFmode)
578 emit_unop_insn (CODE_FOR_truncdfqf2, to, from, UNKNOWN);
582 #ifdef HAVE_truncxfqf2
583 if (HAVE_truncxfqf2 && from_mode == XFmode && to_mode == QFmode)
585 emit_unop_insn (CODE_FOR_truncxfqf2, to, from, UNKNOWN);
589 #ifdef HAVE_trunctfqf2
590 if (HAVE_trunctfqf2 && from_mode == TFmode && to_mode == QFmode)
592 emit_unop_insn (CODE_FOR_trunctfqf2, to, from, UNKNOWN);
597 #ifdef HAVE_trunctqfhf2
598 if (HAVE_trunctqfhf2 && from_mode == TQFmode && to_mode == HFmode)
600 emit_unop_insn (CODE_FOR_trunctqfhf2, to, from, UNKNOWN);
604 #ifdef HAVE_truncsfhf2
605 if (HAVE_truncsfhf2 && from_mode == SFmode && to_mode == HFmode)
607 emit_unop_insn (CODE_FOR_truncsfhf2, to, from, UNKNOWN);
611 #ifdef HAVE_truncdfhf2
612 if (HAVE_truncdfhf2 && from_mode == DFmode && to_mode == HFmode)
614 emit_unop_insn (CODE_FOR_truncdfhf2, to, from, UNKNOWN);
618 #ifdef HAVE_truncxfhf2
619 if (HAVE_truncxfhf2 && from_mode == XFmode && to_mode == HFmode)
621 emit_unop_insn (CODE_FOR_truncxfhf2, to, from, UNKNOWN);
625 #ifdef HAVE_trunctfhf2
626 if (HAVE_trunctfhf2 && from_mode == TFmode && to_mode == HFmode)
628 emit_unop_insn (CODE_FOR_trunctfhf2, to, from, UNKNOWN);
633 #ifdef HAVE_truncsftqf2
634 if (HAVE_truncsftqf2 && from_mode == SFmode && to_mode == TQFmode)
636 emit_unop_insn (CODE_FOR_truncsftqf2, to, from, UNKNOWN);
640 #ifdef HAVE_truncdftqf2
641 if (HAVE_truncdftqf2 && from_mode == DFmode && to_mode == TQFmode)
643 emit_unop_insn (CODE_FOR_truncdftqf2, to, from, UNKNOWN);
647 #ifdef HAVE_truncxftqf2
648 if (HAVE_truncxftqf2 && from_mode == XFmode && to_mode == TQFmode)
650 emit_unop_insn (CODE_FOR_truncxftqf2, to, from, UNKNOWN);
654 #ifdef HAVE_trunctftqf2
655 if (HAVE_trunctftqf2 && from_mode == TFmode && to_mode == TQFmode)
657 emit_unop_insn (CODE_FOR_trunctftqf2, to, from, UNKNOWN);
662 #ifdef HAVE_truncdfsf2
663 if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
665 emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
669 #ifdef HAVE_truncxfsf2
670 if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode)
672 emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN);
676 #ifdef HAVE_trunctfsf2
677 if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
679 emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
683 #ifdef HAVE_truncxfdf2
684 if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode)
686 emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN);
690 #ifdef HAVE_trunctfdf2
691 if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
693 emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
705 libcall = extendsfdf2_libfunc;
709 libcall = extendsfxf2_libfunc;
713 libcall = extendsftf2_libfunc;
725 libcall = truncdfsf2_libfunc;
729 libcall = extenddfxf2_libfunc;
733 libcall = extenddftf2_libfunc;
745 libcall = truncxfsf2_libfunc;
749 libcall = truncxfdf2_libfunc;
761 libcall = trunctfsf2_libfunc;
765 libcall = trunctfdf2_libfunc;
777 if (libcall == (rtx) 0)
778 /* This conversion is not implemented yet. */
782 value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
784 insns = get_insns ();
786 emit_libcall_block (insns, to, value, gen_rtx_FLOAT_TRUNCATE (to_mode,
791 /* Now both modes are integers. */
793 /* Handle expanding beyond a word. */
794 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
795 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
802 enum machine_mode lowpart_mode;
803 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
805 /* Try converting directly if the insn is supported. */
806 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
809 /* If FROM is a SUBREG, put it into a register. Do this
810 so that we always generate the same set of insns for
811 better cse'ing; if an intermediate assignment occurred,
812 we won't be doing the operation directly on the SUBREG. */
813 if (optimize > 0 && GET_CODE (from) == SUBREG)
814 from = force_reg (from_mode, from);
815 emit_unop_insn (code, to, from, equiv_code);
818 /* Next, try converting via full word. */
819 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
820 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
821 != CODE_FOR_nothing))
823 if (GET_CODE (to) == REG)
824 emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
825 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
826 emit_unop_insn (code, to,
827 gen_lowpart (word_mode, to), equiv_code);
831 /* No special multiword conversion insn; do it by hand. */
834 /* Since we will turn this into a no conflict block, we must ensure
835 that the source does not overlap the target. */
837 if (reg_overlap_mentioned_p (to, from))
838 from = force_reg (from_mode, from);
840 /* Get a copy of FROM widened to a word, if necessary. */
841 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
842 lowpart_mode = word_mode;
844 lowpart_mode = from_mode;
846 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
848 lowpart = gen_lowpart (lowpart_mode, to);
849 emit_move_insn (lowpart, lowfrom);
851 /* Compute the value to put in each remaining word. */
853 fill_value = const0_rtx;
858 && insn_data[(int) CODE_FOR_slt].operand[0].mode == word_mode
859 && STORE_FLAG_VALUE == -1)
861 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
863 fill_value = gen_reg_rtx (word_mode);
864 emit_insn (gen_slt (fill_value));
870 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
871 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
873 fill_value = convert_to_mode (word_mode, fill_value, 1);
877 /* Fill the remaining words. */
878 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
880 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
881 rtx subword = operand_subword (to, index, 1, to_mode);
886 if (fill_value != subword)
887 emit_move_insn (subword, fill_value);
890 insns = get_insns ();
893 emit_no_conflict_block (insns, to, from, NULL_RTX,
894 gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
898 /* Truncating multi-word to a word or less. */
899 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
900 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
902 if (!((GET_CODE (from) == MEM
903 && ! MEM_VOLATILE_P (from)
904 && direct_load[(int) to_mode]
905 && ! mode_dependent_address_p (XEXP (from, 0)))
906 || GET_CODE (from) == REG
907 || GET_CODE (from) == SUBREG))
908 from = force_reg (from_mode, from);
909 convert_move (to, gen_lowpart (word_mode, from), 0);
913 /* Handle pointer conversion. */ /* SPEE 900220. */
914 if (to_mode == PQImode)
916 if (from_mode != QImode)
917 from = convert_to_mode (QImode, from, unsignedp);
919 #ifdef HAVE_truncqipqi2
920 if (HAVE_truncqipqi2)
922 emit_unop_insn (CODE_FOR_truncqipqi2, to, from, UNKNOWN);
925 #endif /* HAVE_truncqipqi2 */
929 if (from_mode == PQImode)
931 if (to_mode != QImode)
933 from = convert_to_mode (QImode, from, unsignedp);
938 #ifdef HAVE_extendpqiqi2
939 if (HAVE_extendpqiqi2)
941 emit_unop_insn (CODE_FOR_extendpqiqi2, to, from, UNKNOWN);
944 #endif /* HAVE_extendpqiqi2 */
949 if (to_mode == PSImode)
951 if (from_mode != SImode)
952 from = convert_to_mode (SImode, from, unsignedp);
954 #ifdef HAVE_truncsipsi2
955 if (HAVE_truncsipsi2)
957 emit_unop_insn (CODE_FOR_truncsipsi2, to, from, UNKNOWN);
960 #endif /* HAVE_truncsipsi2 */
964 if (from_mode == PSImode)
966 if (to_mode != SImode)
968 from = convert_to_mode (SImode, from, unsignedp);
973 #ifdef HAVE_extendpsisi2
974 if (! unsignedp && HAVE_extendpsisi2)
976 emit_unop_insn (CODE_FOR_extendpsisi2, to, from, UNKNOWN);
979 #endif /* HAVE_extendpsisi2 */
980 #ifdef HAVE_zero_extendpsisi2
981 if (unsignedp && HAVE_zero_extendpsisi2)
983 emit_unop_insn (CODE_FOR_zero_extendpsisi2, to, from, UNKNOWN);
986 #endif /* HAVE_zero_extendpsisi2 */
991 if (to_mode == PDImode)
993 if (from_mode != DImode)
994 from = convert_to_mode (DImode, from, unsignedp);
996 #ifdef HAVE_truncdipdi2
997 if (HAVE_truncdipdi2)
999 emit_unop_insn (CODE_FOR_truncdipdi2, to, from, UNKNOWN);
1002 #endif /* HAVE_truncdipdi2 */
1006 if (from_mode == PDImode)
1008 if (to_mode != DImode)
1010 from = convert_to_mode (DImode, from, unsignedp);
1015 #ifdef HAVE_extendpdidi2
1016 if (HAVE_extendpdidi2)
1018 emit_unop_insn (CODE_FOR_extendpdidi2, to, from, UNKNOWN);
1021 #endif /* HAVE_extendpdidi2 */
1026 /* Now follow all the conversions between integers
1027 no more than a word long. */
1029 /* For truncation, usually we can just refer to FROM in a narrower mode. */
1030 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
1031 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1032 GET_MODE_BITSIZE (from_mode)))
1034 if (!((GET_CODE (from) == MEM
1035 && ! MEM_VOLATILE_P (from)
1036 && direct_load[(int) to_mode]
1037 && ! mode_dependent_address_p (XEXP (from, 0)))
1038 || GET_CODE (from) == REG
1039 || GET_CODE (from) == SUBREG))
1040 from = force_reg (from_mode, from);
1041 if (GET_CODE (from) == REG && REGNO (from) < FIRST_PSEUDO_REGISTER
1042 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
1043 from = copy_to_reg (from);
1044 emit_move_insn (to, gen_lowpart (to_mode, from));
1048 /* Handle extension. */
1049 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
1051 /* Convert directly if that works. */
1052 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
1053 != CODE_FOR_nothing)
1055 emit_unop_insn (code, to, from, equiv_code);
1060 enum machine_mode intermediate;
1064 /* Search for a mode to convert via. */
1065 for (intermediate = from_mode; intermediate != VOIDmode;
1066 intermediate = GET_MODE_WIDER_MODE (intermediate))
1067 if (((can_extend_p (to_mode, intermediate, unsignedp)
1068 != CODE_FOR_nothing)
1069 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
1070 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1071 GET_MODE_BITSIZE (intermediate))))
1072 && (can_extend_p (intermediate, from_mode, unsignedp)
1073 != CODE_FOR_nothing))
1075 convert_move (to, convert_to_mode (intermediate, from,
1076 unsignedp), unsignedp);
1080 /* No suitable intermediate mode.
1081 Generate what we need with shifts. */
1082 shift_amount = build_int_2 (GET_MODE_BITSIZE (to_mode)
1083 - GET_MODE_BITSIZE (from_mode), 0);
1084 from = gen_lowpart (to_mode, force_reg (from_mode, from));
1085 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
1087 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
1090 emit_move_insn (to, tmp);
1095 /* Support special truncate insns for certain modes. */
1097 if (from_mode == DImode && to_mode == SImode)
1099 #ifdef HAVE_truncdisi2
1100 if (HAVE_truncdisi2)
1102 emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
1106 convert_move (to, force_reg (from_mode, from), unsignedp);
1110 if (from_mode == DImode && to_mode == HImode)
1112 #ifdef HAVE_truncdihi2
1113 if (HAVE_truncdihi2)
1115 emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
1119 convert_move (to, force_reg (from_mode, from), unsignedp);
1123 if (from_mode == DImode && to_mode == QImode)
1125 #ifdef HAVE_truncdiqi2
1126 if (HAVE_truncdiqi2)
1128 emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
1132 convert_move (to, force_reg (from_mode, from), unsignedp);
1136 if (from_mode == SImode && to_mode == HImode)
1138 #ifdef HAVE_truncsihi2
1139 if (HAVE_truncsihi2)
1141 emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
1145 convert_move (to, force_reg (from_mode, from), unsignedp);
1149 if (from_mode == SImode && to_mode == QImode)
1151 #ifdef HAVE_truncsiqi2
1152 if (HAVE_truncsiqi2)
1154 emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
1158 convert_move (to, force_reg (from_mode, from), unsignedp);
1162 if (from_mode == HImode && to_mode == QImode)
1164 #ifdef HAVE_trunchiqi2
1165 if (HAVE_trunchiqi2)
1167 emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
1171 convert_move (to, force_reg (from_mode, from), unsignedp);
1175 if (from_mode == TImode && to_mode == DImode)
1177 #ifdef HAVE_trunctidi2
1178 if (HAVE_trunctidi2)
1180 emit_unop_insn (CODE_FOR_trunctidi2, to, from, UNKNOWN);
1184 convert_move (to, force_reg (from_mode, from), unsignedp);
1188 if (from_mode == TImode && to_mode == SImode)
1190 #ifdef HAVE_trunctisi2
1191 if (HAVE_trunctisi2)
1193 emit_unop_insn (CODE_FOR_trunctisi2, to, from, UNKNOWN);
1197 convert_move (to, force_reg (from_mode, from), unsignedp);
1201 if (from_mode == TImode && to_mode == HImode)
1203 #ifdef HAVE_trunctihi2
1204 if (HAVE_trunctihi2)
1206 emit_unop_insn (CODE_FOR_trunctihi2, to, from, UNKNOWN);
1210 convert_move (to, force_reg (from_mode, from), unsignedp);
1214 if (from_mode == TImode && to_mode == QImode)
1216 #ifdef HAVE_trunctiqi2
1217 if (HAVE_trunctiqi2)
1219 emit_unop_insn (CODE_FOR_trunctiqi2, to, from, UNKNOWN);
1223 convert_move (to, force_reg (from_mode, from), unsignedp);
1227 /* Handle truncation of volatile memrefs, and so on;
1228 the things that couldn't be truncated directly,
1229 and for which there was no special instruction. */
1230 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
1232 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
1233 emit_move_insn (to, temp);
1237 /* Mode combination is not recognized. */
1241 /* Return an rtx for a value that would result
1242 from converting X to mode MODE.
1243 Both X and MODE may be floating, or both integer.
1244 UNSIGNEDP is nonzero if X is an unsigned value.
1245 This can be done by referring to a part of X in place
1246 or by copying to a new temporary with conversion.
1248 This function *must not* call protect_from_queue
1249 except when putting X into an insn (in which case convert_move does it). */
1252 convert_to_mode (mode, x, unsignedp)
1253 enum machine_mode mode;
1257 return convert_modes (mode, VOIDmode, x, unsignedp);
1260 /* Return an rtx for a value that would result
1261 from converting X from mode OLDMODE to mode MODE.
1262 Both modes may be floating, or both integer.
1263 UNSIGNEDP is nonzero if X is an unsigned value.
1265 This can be done by referring to a part of X in place
1266 or by copying to a new temporary with conversion.
1268 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
1270 This function *must not* call protect_from_queue
1271 except when putting X into an insn (in which case convert_move does it). */
1274 convert_modes (mode, oldmode, x, unsignedp)
1275 enum machine_mode mode, oldmode;
1281 /* If FROM is a SUBREG that indicates that we have already done at least
1282 the required extension, strip it. */
1284 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
1285 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
1286 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
1287 x = gen_lowpart (mode, x);
1289 if (GET_MODE (x) != VOIDmode)
1290 oldmode = GET_MODE (x);
1292 if (mode == oldmode)
1295 /* There is one case that we must handle specially: If we are converting
1296 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
1297 we are to interpret the constant as unsigned, gen_lowpart will do
1298 the wrong if the constant appears negative. What we want to do is
1299 make the high-order word of the constant zero, not all ones. */
1301 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
1302 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
1303 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
1305 HOST_WIDE_INT val = INTVAL (x);
1307 if (oldmode != VOIDmode
1308 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
1310 int width = GET_MODE_BITSIZE (oldmode);
1312 /* We need to zero extend VAL. */
1313 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1316 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
1319 /* We can do this with a gen_lowpart if both desired and current modes
1320 are integer, and this is either a constant integer, a register, or a
1321 non-volatile MEM. Except for the constant case where MODE is no
1322 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
1324 if ((GET_CODE (x) == CONST_INT
1325 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
1326 || (GET_MODE_CLASS (mode) == MODE_INT
1327 && GET_MODE_CLASS (oldmode) == MODE_INT
1328 && (GET_CODE (x) == CONST_DOUBLE
1329 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
1330 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x)
1331 && direct_load[(int) mode])
1332 || (GET_CODE (x) == REG
1333 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
1334 GET_MODE_BITSIZE (GET_MODE (x)))))))))
1336 /* ?? If we don't know OLDMODE, we have to assume here that
1337 X does not need sign- or zero-extension. This may not be
1338 the case, but it's the best we can do. */
1339 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
1340 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
1342 HOST_WIDE_INT val = INTVAL (x);
1343 int width = GET_MODE_BITSIZE (oldmode);
1345 /* We must sign or zero-extend in this case. Start by
1346 zero-extending, then sign extend if we need to. */
1347 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1349 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
1350 val |= (HOST_WIDE_INT) (-1) << width;
1352 return GEN_INT (trunc_int_for_mode (val, mode));
1355 return gen_lowpart (mode, x);
1358 temp = gen_reg_rtx (mode);
1359 convert_move (temp, x, unsignedp);
1363 /* This macro is used to determine what the largest unit size that
1364 move_by_pieces can use is. */
1366 /* MOVE_MAX_PIECES is the number of bytes at a time which we can
1367 move efficiently, as opposed to MOVE_MAX which is the maximum
1368 number of bytes we can move with a single instruction. */
1370 #ifndef MOVE_MAX_PIECES
1371 #define MOVE_MAX_PIECES MOVE_MAX
1374 /* Generate several move instructions to copy LEN bytes from block FROM to
1375 block TO. (These are MEM rtx's with BLKmode). The caller must pass FROM
1376 and TO through protect_from_queue before calling.
1378 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
1379 used to push FROM to the stack.
1381 ALIGN is maximum alignment we can assume. */
1384 move_by_pieces (to, from, len, align)
1386 unsigned HOST_WIDE_INT len;
1389 struct move_by_pieces data;
1390 rtx to_addr, from_addr = XEXP (from, 0);
1391 unsigned int max_size = MOVE_MAX_PIECES + 1;
1392 enum machine_mode mode = VOIDmode, tmode;
1393 enum insn_code icode;
1396 data.from_addr = from_addr;
1399 to_addr = XEXP (to, 0);
1402 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1403 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1405 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1412 #ifdef STACK_GROWS_DOWNWARD
1418 data.to_addr = to_addr;
1421 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1422 || GET_CODE (from_addr) == POST_INC
1423 || GET_CODE (from_addr) == POST_DEC);
1425 data.explicit_inc_from = 0;
1426 data.explicit_inc_to = 0;
1427 if (data.reverse) data.offset = len;
1430 /* If copying requires more than two move insns,
1431 copy addresses to registers (to make displacements shorter)
1432 and use post-increment if available. */
1433 if (!(data.autinc_from && data.autinc_to)
1434 && move_by_pieces_ninsns (len, align) > 2)
1436 /* Find the mode of the largest move... */
1437 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1438 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1439 if (GET_MODE_SIZE (tmode) < max_size)
1442 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
1444 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1445 data.autinc_from = 1;
1446 data.explicit_inc_from = -1;
1448 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
1450 data.from_addr = copy_addr_to_reg (from_addr);
1451 data.autinc_from = 1;
1452 data.explicit_inc_from = 1;
1454 if (!data.autinc_from && CONSTANT_P (from_addr))
1455 data.from_addr = copy_addr_to_reg (from_addr);
1456 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
1458 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1460 data.explicit_inc_to = -1;
1462 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
1464 data.to_addr = copy_addr_to_reg (to_addr);
1466 data.explicit_inc_to = 1;
1468 if (!data.autinc_to && CONSTANT_P (to_addr))
1469 data.to_addr = copy_addr_to_reg (to_addr);
1472 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1473 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1474 align = MOVE_MAX * BITS_PER_UNIT;
1476 /* First move what we can in the largest integer mode, then go to
1477 successively smaller modes. */
1479 while (max_size > 1)
1481 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1482 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1483 if (GET_MODE_SIZE (tmode) < max_size)
1486 if (mode == VOIDmode)
1489 icode = mov_optab->handlers[(int) mode].insn_code;
1490 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1491 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1493 max_size = GET_MODE_SIZE (mode);
1496 /* The code above should have handled everything. */
1501 /* Return number of insns required to move L bytes by pieces.
1502 ALIGN (in bits) is maximum alignment we can assume. */
1504 static unsigned HOST_WIDE_INT
1505 move_by_pieces_ninsns (l, align)
1506 unsigned HOST_WIDE_INT l;
1509 unsigned HOST_WIDE_INT n_insns = 0;
1510 unsigned HOST_WIDE_INT max_size = MOVE_MAX + 1;
1512 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1513 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1514 align = MOVE_MAX * BITS_PER_UNIT;
1516 while (max_size > 1)
1518 enum machine_mode mode = VOIDmode, tmode;
1519 enum insn_code icode;
1521 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1522 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1523 if (GET_MODE_SIZE (tmode) < max_size)
1526 if (mode == VOIDmode)
1529 icode = mov_optab->handlers[(int) mode].insn_code;
1530 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1531 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1533 max_size = GET_MODE_SIZE (mode);
1541 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1542 with move instructions for mode MODE. GENFUN is the gen_... function
1543 to make a move insn for that mode. DATA has all the other info. */
1546 move_by_pieces_1 (genfun, mode, data)
1547 rtx (*genfun) PARAMS ((rtx, ...));
1548 enum machine_mode mode;
1549 struct move_by_pieces *data;
1551 unsigned int size = GET_MODE_SIZE (mode);
1552 rtx to1 = NULL_RTX, from1;
1554 while (data->len >= size)
1557 data->offset -= size;
1561 if (data->autinc_to)
1562 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
1565 to1 = adjust_address (data->to, mode, data->offset);
1568 if (data->autinc_from)
1569 from1 = adjust_automodify_address (data->from, mode, data->from_addr,
1572 from1 = adjust_address (data->from, mode, data->offset);
1574 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1575 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
1576 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1577 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (-size)));
1580 emit_insn ((*genfun) (to1, from1));
1583 #ifdef PUSH_ROUNDING
1584 emit_single_push_insn (mode, from1, NULL);
1590 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1591 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1592 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1593 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1595 if (! data->reverse)
1596 data->offset += size;
1602 /* Emit code to move a block Y to a block X.
1603 This may be done with string-move instructions,
1604 with multiple scalar move instructions, or with a library call.
1606 Both X and Y must be MEM rtx's (perhaps inside VOLATILE)
1608 SIZE is an rtx that says how long they are.
1609 ALIGN is the maximum alignment we can assume they have.
1611 Return the address of the new block, if memcpy is called and returns it,
1615 emit_block_move (x, y, size)
1620 #ifdef TARGET_MEM_FUNCTIONS
1622 tree call_expr, arg_list;
1624 unsigned int align = MIN (MEM_ALIGN (x), MEM_ALIGN (y));
1626 if (GET_MODE (x) != BLKmode)
1629 if (GET_MODE (y) != BLKmode)
1632 x = protect_from_queue (x, 1);
1633 y = protect_from_queue (y, 0);
1634 size = protect_from_queue (size, 0);
1636 if (GET_CODE (x) != MEM)
1638 if (GET_CODE (y) != MEM)
1643 if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
1644 move_by_pieces (x, y, INTVAL (size), align);
1647 /* Try the most limited insn first, because there's no point
1648 including more than one in the machine description unless
1649 the more limited one has some advantage. */
1651 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
1652 enum machine_mode mode;
1654 /* Since this is a move insn, we don't care about volatility. */
1657 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1658 mode = GET_MODE_WIDER_MODE (mode))
1660 enum insn_code code = movstr_optab[(int) mode];
1661 insn_operand_predicate_fn pred;
1663 if (code != CODE_FOR_nothing
1664 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1665 here because if SIZE is less than the mode mask, as it is
1666 returned by the macro, it will definitely be less than the
1667 actual mode mask. */
1668 && ((GET_CODE (size) == CONST_INT
1669 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1670 <= (GET_MODE_MASK (mode) >> 1)))
1671 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1672 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
1673 || (*pred) (x, BLKmode))
1674 && ((pred = insn_data[(int) code].operand[1].predicate) == 0
1675 || (*pred) (y, BLKmode))
1676 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
1677 || (*pred) (opalign, VOIDmode)))
1680 rtx last = get_last_insn ();
1683 op2 = convert_to_mode (mode, size, 1);
1684 pred = insn_data[(int) code].operand[2].predicate;
1685 if (pred != 0 && ! (*pred) (op2, mode))
1686 op2 = copy_to_mode_reg (mode, op2);
1688 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1696 delete_insns_since (last);
1702 /* X, Y, or SIZE may have been passed through protect_from_queue.
1704 It is unsafe to save the value generated by protect_from_queue
1705 and reuse it later. Consider what happens if emit_queue is
1706 called before the return value from protect_from_queue is used.
1708 Expansion of the CALL_EXPR below will call emit_queue before
1709 we are finished emitting RTL for argument setup. So if we are
1710 not careful we could get the wrong value for an argument.
1712 To avoid this problem we go ahead and emit code to copy X, Y &
1713 SIZE into new pseudos. We can then place those new pseudos
1714 into an RTL_EXPR and use them later, even after a call to
1717 Note this is not strictly needed for library calls since they
1718 do not call emit_queue before loading their arguments. However,
1719 we may need to have library calls call emit_queue in the future
1720 since failing to do so could cause problems for targets which
1721 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
1722 x = copy_to_mode_reg (Pmode, XEXP (x, 0));
1723 y = copy_to_mode_reg (Pmode, XEXP (y, 0));
1725 #ifdef TARGET_MEM_FUNCTIONS
1726 size = copy_to_mode_reg (TYPE_MODE (sizetype), size);
1728 size = convert_to_mode (TYPE_MODE (integer_type_node), size,
1729 TREE_UNSIGNED (integer_type_node));
1730 size = copy_to_mode_reg (TYPE_MODE (integer_type_node), size);
1733 #ifdef TARGET_MEM_FUNCTIONS
1734 /* It is incorrect to use the libcall calling conventions to call
1735 memcpy in this context.
1737 This could be a user call to memcpy and the user may wish to
1738 examine the return value from memcpy.
1740 For targets where libcalls and normal calls have different conventions
1741 for returning pointers, we could end up generating incorrect code.
1743 So instead of using a libcall sequence we build up a suitable
1744 CALL_EXPR and expand the call in the normal fashion. */
1745 if (fn == NULL_TREE)
1749 /* This was copied from except.c, I don't know if all this is
1750 necessary in this context or not. */
1751 fn = get_identifier ("memcpy");
1752 fntype = build_pointer_type (void_type_node);
1753 fntype = build_function_type (fntype, NULL_TREE);
1754 fn = build_decl (FUNCTION_DECL, fn, fntype);
1755 ggc_add_tree_root (&fn, 1);
1756 DECL_EXTERNAL (fn) = 1;
1757 TREE_PUBLIC (fn) = 1;
1758 DECL_ARTIFICIAL (fn) = 1;
1759 TREE_NOTHROW (fn) = 1;
1760 make_decl_rtl (fn, NULL);
1761 assemble_external (fn);
1764 /* We need to make an argument list for the function call.
1766 memcpy has three arguments, the first two are void * addresses and
1767 the last is a size_t byte count for the copy. */
1769 = build_tree_list (NULL_TREE,
1770 make_tree (build_pointer_type (void_type_node), x));
1771 TREE_CHAIN (arg_list)
1772 = build_tree_list (NULL_TREE,
1773 make_tree (build_pointer_type (void_type_node), y));
1774 TREE_CHAIN (TREE_CHAIN (arg_list))
1775 = build_tree_list (NULL_TREE, make_tree (sizetype, size));
1776 TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list))) = NULL_TREE;
1778 /* Now we have to build up the CALL_EXPR itself. */
1779 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
1780 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
1781 call_expr, arg_list, NULL_TREE);
1782 TREE_SIDE_EFFECTS (call_expr) = 1;
1784 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
1786 emit_library_call (bcopy_libfunc, LCT_NORMAL,
1787 VOIDmode, 3, y, Pmode, x, Pmode,
1788 convert_to_mode (TYPE_MODE (integer_type_node), size,
1789 TREE_UNSIGNED (integer_type_node)),
1790 TYPE_MODE (integer_type_node));
1793 /* If we are initializing a readonly value, show the above call
1794 clobbered it. Otherwise, a load from it may erroneously be hoisted
1796 if (RTX_UNCHANGING_P (x))
1797 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
1803 /* Copy all or part of a value X into registers starting at REGNO.
1804 The number of registers to be filled is NREGS. */
1807 move_block_to_reg (regno, x, nregs, mode)
1811 enum machine_mode mode;
1814 #ifdef HAVE_load_multiple
1822 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1823 x = validize_mem (force_const_mem (mode, x));
1825 /* See if the machine can do this with a load multiple insn. */
1826 #ifdef HAVE_load_multiple
1827 if (HAVE_load_multiple)
1829 last = get_last_insn ();
1830 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
1838 delete_insns_since (last);
1842 for (i = 0; i < nregs; i++)
1843 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1844 operand_subword_force (x, i, mode));
1847 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1848 The number of registers to be filled is NREGS. SIZE indicates the number
1849 of bytes in the object X. */
1852 move_block_from_reg (regno, x, nregs, size)
1859 #ifdef HAVE_store_multiple
1863 enum machine_mode mode;
1868 /* If SIZE is that of a mode no bigger than a word, just use that
1869 mode's store operation. */
1870 if (size <= UNITS_PER_WORD
1871 && (mode = mode_for_size (size * BITS_PER_UNIT, MODE_INT, 0)) != BLKmode
1872 && !FUNCTION_ARG_REG_LITTLE_ENDIAN)
1874 emit_move_insn (adjust_address (x, mode, 0), gen_rtx_REG (mode, regno));
1878 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN machine must be aligned
1879 to the left before storing to memory. Note that the previous test
1880 doesn't handle all cases (e.g. SIZE == 3). */
1881 if (size < UNITS_PER_WORD
1883 && !FUNCTION_ARG_REG_LITTLE_ENDIAN)
1885 rtx tem = operand_subword (x, 0, 1, BLKmode);
1891 shift = expand_shift (LSHIFT_EXPR, word_mode,
1892 gen_rtx_REG (word_mode, regno),
1893 build_int_2 ((UNITS_PER_WORD - size)
1894 * BITS_PER_UNIT, 0), NULL_RTX, 0);
1895 emit_move_insn (tem, shift);
1899 /* See if the machine can do this with a store multiple insn. */
1900 #ifdef HAVE_store_multiple
1901 if (HAVE_store_multiple)
1903 last = get_last_insn ();
1904 pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1912 delete_insns_since (last);
1916 for (i = 0; i < nregs; i++)
1918 rtx tem = operand_subword (x, i, 1, BLKmode);
1923 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1927 /* Emit code to move a block SRC to a block DST, where DST is non-consecutive
1928 registers represented by a PARALLEL. SSIZE represents the total size of
1929 block SRC in bytes, or -1 if not known. */
1930 /* ??? If SSIZE % UNITS_PER_WORD != 0, we make the blatant assumption that
1931 the balance will be in what would be the low-order memory addresses, i.e.
1932 left justified for big endian, right justified for little endian. This
1933 happens to be true for the targets currently using this support. If this
1934 ever changes, a new target macro along the lines of FUNCTION_ARG_PADDING
1938 emit_group_load (dst, orig_src, ssize)
1945 if (GET_CODE (dst) != PARALLEL)
1948 /* Check for a NULL entry, used to indicate that the parameter goes
1949 both on the stack and in registers. */
1950 if (XEXP (XVECEXP (dst, 0, 0), 0))
1955 tmps = (rtx *) alloca (sizeof (rtx) * XVECLEN (dst, 0));
1957 /* Process the pieces. */
1958 for (i = start; i < XVECLEN (dst, 0); i++)
1960 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1961 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1962 unsigned int bytelen = GET_MODE_SIZE (mode);
1965 /* Handle trailing fragments that run over the size of the struct. */
1966 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
1968 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1969 bytelen = ssize - bytepos;
1974 /* If we won't be loading directly from memory, protect the real source
1975 from strange tricks we might play; but make sure that the source can
1976 be loaded directly into the destination. */
1978 if (GET_CODE (orig_src) != MEM
1979 && (!CONSTANT_P (orig_src)
1980 || (GET_MODE (orig_src) != mode
1981 && GET_MODE (orig_src) != VOIDmode)))
1983 if (GET_MODE (orig_src) == VOIDmode)
1984 src = gen_reg_rtx (mode);
1986 src = gen_reg_rtx (GET_MODE (orig_src));
1988 emit_move_insn (src, orig_src);
1991 /* Optimize the access just a bit. */
1992 if (GET_CODE (src) == MEM
1993 && MEM_ALIGN (src) >= GET_MODE_ALIGNMENT (mode)
1994 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
1995 && bytelen == GET_MODE_SIZE (mode))
1997 tmps[i] = gen_reg_rtx (mode);
1998 emit_move_insn (tmps[i], adjust_address (src, mode, bytepos));
2000 else if (GET_CODE (src) == CONCAT)
2003 && bytelen == GET_MODE_SIZE (GET_MODE (XEXP (src, 0))))
2004 tmps[i] = XEXP (src, 0);
2005 else if (bytepos == (HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (XEXP (src, 0)))
2006 && bytelen == GET_MODE_SIZE (GET_MODE (XEXP (src, 1))))
2007 tmps[i] = XEXP (src, 1);
2008 else if (bytepos == 0)
2010 rtx mem = assign_stack_temp (GET_MODE (src),
2011 GET_MODE_SIZE (GET_MODE (src)), 0);
2012 emit_move_insn (mem, src);
2013 tmps[i] = adjust_address (mem, mode, 0);
2018 else if (CONSTANT_P (src)
2019 || (GET_CODE (src) == REG && GET_MODE (src) == mode))
2022 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
2023 bytepos * BITS_PER_UNIT, 1, NULL_RTX,
2026 if (BYTES_BIG_ENDIAN && shift)
2027 expand_binop (mode, ashl_optab, tmps[i], GEN_INT (shift),
2028 tmps[i], 0, OPTAB_WIDEN);
2033 /* Copy the extracted pieces into the proper (probable) hard regs. */
2034 for (i = start; i < XVECLEN (dst, 0); i++)
2035 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0), tmps[i]);
2038 /* Emit code to move a block SRC to a block DST, where SRC is non-consecutive
2039 registers represented by a PARALLEL. SSIZE represents the total size of
2040 block DST, or -1 if not known. */
2043 emit_group_store (orig_dst, src, ssize)
2050 if (GET_CODE (src) != PARALLEL)
2053 /* Check for a NULL entry, used to indicate that the parameter goes
2054 both on the stack and in registers. */
2055 if (XEXP (XVECEXP (src, 0, 0), 0))
2060 tmps = (rtx *) alloca (sizeof (rtx) * XVECLEN (src, 0));
2062 /* Copy the (probable) hard regs into pseudos. */
2063 for (i = start; i < XVECLEN (src, 0); i++)
2065 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
2066 tmps[i] = gen_reg_rtx (GET_MODE (reg));
2067 emit_move_insn (tmps[i], reg);
2071 /* If we won't be storing directly into memory, protect the real destination
2072 from strange tricks we might play. */
2074 if (GET_CODE (dst) == PARALLEL)
2078 /* We can get a PARALLEL dst if there is a conditional expression in
2079 a return statement. In that case, the dst and src are the same,
2080 so no action is necessary. */
2081 if (rtx_equal_p (dst, src))
2084 /* It is unclear if we can ever reach here, but we may as well handle
2085 it. Allocate a temporary, and split this into a store/load to/from
2088 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
2089 emit_group_store (temp, src, ssize);
2090 emit_group_load (dst, temp, ssize);
2093 else if (GET_CODE (dst) != MEM)
2095 dst = gen_reg_rtx (GET_MODE (orig_dst));
2096 /* Make life a bit easier for combine. */
2097 emit_move_insn (dst, const0_rtx);
2100 /* Process the pieces. */
2101 for (i = start; i < XVECLEN (src, 0); i++)
2103 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
2104 enum machine_mode mode = GET_MODE (tmps[i]);
2105 unsigned int bytelen = GET_MODE_SIZE (mode);
2107 /* Handle trailing fragments that run over the size of the struct. */
2108 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2110 if (BYTES_BIG_ENDIAN)
2112 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2113 expand_binop (mode, ashr_optab, tmps[i], GEN_INT (shift),
2114 tmps[i], 0, OPTAB_WIDEN);
2116 bytelen = ssize - bytepos;
2119 /* Optimize the access just a bit. */
2120 if (GET_CODE (dst) == MEM
2121 && MEM_ALIGN (dst) >= GET_MODE_ALIGNMENT (mode)
2122 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2123 && bytelen == GET_MODE_SIZE (mode))
2124 emit_move_insn (adjust_address (dst, mode, bytepos), tmps[i]);
2126 store_bit_field (dst, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2127 mode, tmps[i], ssize);
2132 /* Copy from the pseudo into the (probable) hard reg. */
2133 if (GET_CODE (dst) == REG)
2134 emit_move_insn (orig_dst, dst);
2137 /* Generate code to copy a BLKmode object of TYPE out of a
2138 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2139 is null, a stack temporary is created. TGTBLK is returned.
2141 The primary purpose of this routine is to handle functions
2142 that return BLKmode structures in registers. Some machines
2143 (the PA for example) want to return all small structures
2144 in registers regardless of the structure's alignment. */
2147 copy_blkmode_from_reg (tgtblk, srcreg, type)
2152 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2153 rtx src = NULL, dst = NULL;
2154 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2155 unsigned HOST_WIDE_INT bitpos, xbitpos, big_endian_correction = 0;
2159 tgtblk = assign_temp (build_qualified_type (type,
2161 | TYPE_QUAL_CONST)),
2163 preserve_temp_slots (tgtblk);
2166 /* This code assumes srcreg is at least a full word. If it isn't,
2167 copy it into a new pseudo which is a full word. */
2169 /* If FUNCTION_ARG_REG_LITTLE_ENDIAN is set and convert_to_mode does
2170 a copy, the wrong part of the register gets copied so we fake
2171 a type conversion in place. */
2173 if (GET_MODE (srcreg) != BLKmode
2174 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
2175 if (FUNCTION_ARG_REG_LITTLE_ENDIAN)
2176 srcreg = simplify_gen_subreg (word_mode, srcreg, GET_MODE (srcreg), 0);
2178 srcreg = convert_to_mode (word_mode, srcreg, TREE_UNSIGNED (type));
2180 /* Structures whose size is not a multiple of a word are aligned
2181 to the least significant byte (to the right). On a BYTES_BIG_ENDIAN
2182 machine, this means we must skip the empty high order bytes when
2183 calculating the bit offset. */
2184 if (BYTES_BIG_ENDIAN
2185 && !FUNCTION_ARG_REG_LITTLE_ENDIAN
2186 && bytes % UNITS_PER_WORD)
2187 big_endian_correction
2188 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2190 /* Copy the structure BITSIZE bites at a time.
2192 We could probably emit more efficient code for machines which do not use
2193 strict alignment, but it doesn't seem worth the effort at the current
2195 for (bitpos = 0, xbitpos = big_endian_correction;
2196 bitpos < bytes * BITS_PER_UNIT;
2197 bitpos += bitsize, xbitpos += bitsize)
2199 /* We need a new source operand each time xbitpos is on a
2200 word boundary and when xbitpos == big_endian_correction
2201 (the first time through). */
2202 if (xbitpos % BITS_PER_WORD == 0
2203 || xbitpos == big_endian_correction)
2204 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD,
2207 /* We need a new destination operand each time bitpos is on
2209 if (bitpos % BITS_PER_WORD == 0)
2210 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
2212 /* Use xbitpos for the source extraction (right justified) and
2213 xbitpos for the destination store (left justified). */
2214 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode,
2215 extract_bit_field (src, bitsize,
2216 xbitpos % BITS_PER_WORD, 1,
2217 NULL_RTX, word_mode, word_mode,
2225 /* Add a USE expression for REG to the (possibly empty) list pointed
2226 to by CALL_FUSAGE. REG must denote a hard register. */
2229 use_reg (call_fusage, reg)
2230 rtx *call_fusage, reg;
2232 if (GET_CODE (reg) != REG
2233 || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
2237 = gen_rtx_EXPR_LIST (VOIDmode,
2238 gen_rtx_USE (VOIDmode, reg), *call_fusage);
2241 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2242 starting at REGNO. All of these registers must be hard registers. */
2245 use_regs (call_fusage, regno, nregs)
2252 if (regno + nregs > FIRST_PSEUDO_REGISTER)
2255 for (i = 0; i < nregs; i++)
2256 use_reg (call_fusage, gen_rtx_REG (reg_raw_mode[regno + i], regno + i));
2259 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2260 PARALLEL REGS. This is for calls that pass values in multiple
2261 non-contiguous locations. The Irix 6 ABI has examples of this. */
2264 use_group_regs (call_fusage, regs)
2270 for (i = 0; i < XVECLEN (regs, 0); i++)
2272 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2274 /* A NULL entry means the parameter goes both on the stack and in
2275 registers. This can also be a MEM for targets that pass values
2276 partially on the stack and partially in registers. */
2277 if (reg != 0 && GET_CODE (reg) == REG)
2278 use_reg (call_fusage, reg);
2284 can_store_by_pieces (len, constfun, constfundata, align)
2285 unsigned HOST_WIDE_INT len;
2286 rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
2290 unsigned HOST_WIDE_INT max_size, l;
2291 HOST_WIDE_INT offset = 0;
2292 enum machine_mode mode, tmode;
2293 enum insn_code icode;
2297 if (! MOVE_BY_PIECES_P (len, align))
2300 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2301 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2302 align = MOVE_MAX * BITS_PER_UNIT;
2304 /* We would first store what we can in the largest integer mode, then go to
2305 successively smaller modes. */
2308 reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2313 max_size = MOVE_MAX_PIECES + 1;
2314 while (max_size > 1)
2316 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2317 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2318 if (GET_MODE_SIZE (tmode) < max_size)
2321 if (mode == VOIDmode)
2324 icode = mov_optab->handlers[(int) mode].insn_code;
2325 if (icode != CODE_FOR_nothing
2326 && align >= GET_MODE_ALIGNMENT (mode))
2328 unsigned int size = GET_MODE_SIZE (mode);
2335 cst = (*constfun) (constfundata, offset, mode);
2336 if (!LEGITIMATE_CONSTANT_P (cst))
2346 max_size = GET_MODE_SIZE (mode);
2349 /* The code above should have handled everything. */
2357 /* Generate several move instructions to store LEN bytes generated by
2358 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2359 pointer which will be passed as argument in every CONSTFUN call.
2360 ALIGN is maximum alignment we can assume. */
2363 store_by_pieces (to, len, constfun, constfundata, align)
2365 unsigned HOST_WIDE_INT len;
2366 rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
2370 struct store_by_pieces data;
2372 if (! MOVE_BY_PIECES_P (len, align))
2374 to = protect_from_queue (to, 1);
2375 data.constfun = constfun;
2376 data.constfundata = constfundata;
2379 store_by_pieces_1 (&data, align);
2382 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2383 rtx with BLKmode). The caller must pass TO through protect_from_queue
2384 before calling. ALIGN is maximum alignment we can assume. */
2387 clear_by_pieces (to, len, align)
2389 unsigned HOST_WIDE_INT len;
2392 struct store_by_pieces data;
2394 data.constfun = clear_by_pieces_1;
2395 data.constfundata = NULL;
2398 store_by_pieces_1 (&data, align);
2401 /* Callback routine for clear_by_pieces.
2402 Return const0_rtx unconditionally. */
2405 clear_by_pieces_1 (data, offset, mode)
2406 PTR data ATTRIBUTE_UNUSED;
2407 HOST_WIDE_INT offset ATTRIBUTE_UNUSED;
2408 enum machine_mode mode ATTRIBUTE_UNUSED;
2413 /* Subroutine of clear_by_pieces and store_by_pieces.
2414 Generate several move instructions to store LEN bytes of block TO. (A MEM
2415 rtx with BLKmode). The caller must pass TO through protect_from_queue
2416 before calling. ALIGN is maximum alignment we can assume. */
2419 store_by_pieces_1 (data, align)
2420 struct store_by_pieces *data;
2423 rtx to_addr = XEXP (data->to, 0);
2424 unsigned HOST_WIDE_INT max_size = MOVE_MAX_PIECES + 1;
2425 enum machine_mode mode = VOIDmode, tmode;
2426 enum insn_code icode;
2429 data->to_addr = to_addr;
2431 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2432 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2434 data->explicit_inc_to = 0;
2436 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2438 data->offset = data->len;
2440 /* If storing requires more than two move insns,
2441 copy addresses to registers (to make displacements shorter)
2442 and use post-increment if available. */
2443 if (!data->autinc_to
2444 && move_by_pieces_ninsns (data->len, align) > 2)
2446 /* Determine the main mode we'll be using. */
2447 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2448 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2449 if (GET_MODE_SIZE (tmode) < max_size)
2452 if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
2454 data->to_addr = copy_addr_to_reg (plus_constant (to_addr, data->len));
2455 data->autinc_to = 1;
2456 data->explicit_inc_to = -1;
2459 if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2460 && ! data->autinc_to)
2462 data->to_addr = copy_addr_to_reg (to_addr);
2463 data->autinc_to = 1;
2464 data->explicit_inc_to = 1;
2467 if ( !data->autinc_to && CONSTANT_P (to_addr))
2468 data->to_addr = copy_addr_to_reg (to_addr);
2471 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2472 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2473 align = MOVE_MAX * BITS_PER_UNIT;
2475 /* First store what we can in the largest integer mode, then go to
2476 successively smaller modes. */
2478 while (max_size > 1)
2480 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2481 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2482 if (GET_MODE_SIZE (tmode) < max_size)
2485 if (mode == VOIDmode)
2488 icode = mov_optab->handlers[(int) mode].insn_code;
2489 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
2490 store_by_pieces_2 (GEN_FCN (icode), mode, data);
2492 max_size = GET_MODE_SIZE (mode);
2495 /* The code above should have handled everything. */
2500 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2501 with move instructions for mode MODE. GENFUN is the gen_... function
2502 to make a move insn for that mode. DATA has all the other info. */
2505 store_by_pieces_2 (genfun, mode, data)
2506 rtx (*genfun) PARAMS ((rtx, ...));
2507 enum machine_mode mode;
2508 struct store_by_pieces *data;
2510 unsigned int size = GET_MODE_SIZE (mode);
2513 while (data->len >= size)
2516 data->offset -= size;
2518 if (data->autinc_to)
2519 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
2522 to1 = adjust_address (data->to, mode, data->offset);
2524 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2525 emit_insn (gen_add2_insn (data->to_addr,
2526 GEN_INT (-(HOST_WIDE_INT) size)));
2528 cst = (*data->constfun) (data->constfundata, data->offset, mode);
2529 emit_insn ((*genfun) (to1, cst));
2531 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2532 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2534 if (! data->reverse)
2535 data->offset += size;
2541 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2542 its length in bytes. */
2545 clear_storage (object, size)
2549 #ifdef TARGET_MEM_FUNCTIONS
2551 tree call_expr, arg_list;
2554 unsigned int align = (GET_CODE (object) == MEM ? MEM_ALIGN (object)
2555 : GET_MODE_ALIGNMENT (GET_MODE (object)));
2557 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2558 just move a zero. Otherwise, do this a piece at a time. */
2559 if (GET_MODE (object) != BLKmode
2560 && GET_CODE (size) == CONST_INT
2561 && GET_MODE_SIZE (GET_MODE (object)) == (unsigned int) INTVAL (size))
2562 emit_move_insn (object, CONST0_RTX (GET_MODE (object)));
2565 object = protect_from_queue (object, 1);
2566 size = protect_from_queue (size, 0);
2568 if (GET_CODE (size) == CONST_INT
2569 && MOVE_BY_PIECES_P (INTVAL (size), align))
2570 clear_by_pieces (object, INTVAL (size), align);
2573 /* Try the most limited insn first, because there's no point
2574 including more than one in the machine description unless
2575 the more limited one has some advantage. */
2577 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
2578 enum machine_mode mode;
2580 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2581 mode = GET_MODE_WIDER_MODE (mode))
2583 enum insn_code code = clrstr_optab[(int) mode];
2584 insn_operand_predicate_fn pred;
2586 if (code != CODE_FOR_nothing
2587 /* We don't need MODE to be narrower than
2588 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2589 the mode mask, as it is returned by the macro, it will
2590 definitely be less than the actual mode mask. */
2591 && ((GET_CODE (size) == CONST_INT
2592 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2593 <= (GET_MODE_MASK (mode) >> 1)))
2594 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2595 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
2596 || (*pred) (object, BLKmode))
2597 && ((pred = insn_data[(int) code].operand[2].predicate) == 0
2598 || (*pred) (opalign, VOIDmode)))
2601 rtx last = get_last_insn ();
2604 op1 = convert_to_mode (mode, size, 1);
2605 pred = insn_data[(int) code].operand[1].predicate;
2606 if (pred != 0 && ! (*pred) (op1, mode))
2607 op1 = copy_to_mode_reg (mode, op1);
2609 pat = GEN_FCN ((int) code) (object, op1, opalign);
2616 delete_insns_since (last);
2620 /* OBJECT or SIZE may have been passed through protect_from_queue.
2622 It is unsafe to save the value generated by protect_from_queue
2623 and reuse it later. Consider what happens if emit_queue is
2624 called before the return value from protect_from_queue is used.
2626 Expansion of the CALL_EXPR below will call emit_queue before
2627 we are finished emitting RTL for argument setup. So if we are
2628 not careful we could get the wrong value for an argument.
2630 To avoid this problem we go ahead and emit code to copy OBJECT
2631 and SIZE into new pseudos. We can then place those new pseudos
2632 into an RTL_EXPR and use them later, even after a call to
2635 Note this is not strictly needed for library calls since they
2636 do not call emit_queue before loading their arguments. However,
2637 we may need to have library calls call emit_queue in the future
2638 since failing to do so could cause problems for targets which
2639 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
2640 object = copy_to_mode_reg (Pmode, XEXP (object, 0));
2642 #ifdef TARGET_MEM_FUNCTIONS
2643 size = copy_to_mode_reg (TYPE_MODE (sizetype), size);
2645 size = convert_to_mode (TYPE_MODE (integer_type_node), size,
2646 TREE_UNSIGNED (integer_type_node));
2647 size = copy_to_mode_reg (TYPE_MODE (integer_type_node), size);
2650 #ifdef TARGET_MEM_FUNCTIONS
2651 /* It is incorrect to use the libcall calling conventions to call
2652 memset in this context.
2654 This could be a user call to memset and the user may wish to
2655 examine the return value from memset.
2657 For targets where libcalls and normal calls have different
2658 conventions for returning pointers, we could end up generating
2661 So instead of using a libcall sequence we build up a suitable
2662 CALL_EXPR and expand the call in the normal fashion. */
2663 if (fn == NULL_TREE)
2667 /* This was copied from except.c, I don't know if all this is
2668 necessary in this context or not. */
2669 fn = get_identifier ("memset");
2670 fntype = build_pointer_type (void_type_node);
2671 fntype = build_function_type (fntype, NULL_TREE);
2672 fn = build_decl (FUNCTION_DECL, fn, fntype);
2673 ggc_add_tree_root (&fn, 1);
2674 DECL_EXTERNAL (fn) = 1;
2675 TREE_PUBLIC (fn) = 1;
2676 DECL_ARTIFICIAL (fn) = 1;
2677 TREE_NOTHROW (fn) = 1;
2678 make_decl_rtl (fn, NULL);
2679 assemble_external (fn);
2682 /* We need to make an argument list for the function call.
2684 memset has three arguments, the first is a void * addresses, the
2685 second an integer with the initialization value, the last is a
2686 size_t byte count for the copy. */
2688 = build_tree_list (NULL_TREE,
2689 make_tree (build_pointer_type (void_type_node),
2691 TREE_CHAIN (arg_list)
2692 = build_tree_list (NULL_TREE,
2693 make_tree (integer_type_node, const0_rtx));
2694 TREE_CHAIN (TREE_CHAIN (arg_list))
2695 = build_tree_list (NULL_TREE, make_tree (sizetype, size));
2696 TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list))) = NULL_TREE;
2698 /* Now we have to build up the CALL_EXPR itself. */
2699 call_expr = build1 (ADDR_EXPR,
2700 build_pointer_type (TREE_TYPE (fn)), fn);
2701 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
2702 call_expr, arg_list, NULL_TREE);
2703 TREE_SIDE_EFFECTS (call_expr) = 1;
2705 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
2707 emit_library_call (bzero_libfunc, LCT_NORMAL,
2708 VOIDmode, 2, object, Pmode, size,
2709 TYPE_MODE (integer_type_node));
2712 /* If we are initializing a readonly value, show the above call
2713 clobbered it. Otherwise, a load from it may erroneously be
2714 hoisted from a loop. */
2715 if (RTX_UNCHANGING_P (object))
2716 emit_insn (gen_rtx_CLOBBER (VOIDmode, object));
2723 /* Generate code to copy Y into X.
2724 Both Y and X must have the same mode, except that
2725 Y can be a constant with VOIDmode.
2726 This mode cannot be BLKmode; use emit_block_move for that.
2728 Return the last instruction emitted. */
2731 emit_move_insn (x, y)
2734 enum machine_mode mode = GET_MODE (x);
2735 rtx y_cst = NULL_RTX;
2738 x = protect_from_queue (x, 1);
2739 y = protect_from_queue (y, 0);
2741 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
2744 /* Never force constant_p_rtx to memory. */
2745 if (GET_CODE (y) == CONSTANT_P_RTX)
2747 else if (CONSTANT_P (y) && ! LEGITIMATE_CONSTANT_P (y))
2750 y = force_const_mem (mode, y);
2753 /* If X or Y are memory references, verify that their addresses are valid
2755 if (GET_CODE (x) == MEM
2756 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
2757 && ! push_operand (x, GET_MODE (x)))
2759 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
2760 x = validize_mem (x);
2762 if (GET_CODE (y) == MEM
2763 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
2765 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
2766 y = validize_mem (y);
2768 if (mode == BLKmode)
2771 last_insn = emit_move_insn_1 (x, y);
2773 if (y_cst && GET_CODE (x) == REG)
2774 set_unique_reg_note (last_insn, REG_EQUAL, y_cst);
2779 /* Low level part of emit_move_insn.
2780 Called just like emit_move_insn, but assumes X and Y
2781 are basically valid. */
2784 emit_move_insn_1 (x, y)
2787 enum machine_mode mode = GET_MODE (x);
2788 enum machine_mode submode;
2789 enum mode_class class = GET_MODE_CLASS (mode);
2792 if ((unsigned int) mode >= (unsigned int) MAX_MACHINE_MODE)
2795 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
2797 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
2799 /* Expand complex moves by moving real part and imag part, if possible. */
2800 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
2801 && BLKmode != (submode = mode_for_size ((GET_MODE_UNIT_SIZE (mode)
2803 (class == MODE_COMPLEX_INT
2804 ? MODE_INT : MODE_FLOAT),
2806 && (mov_optab->handlers[(int) submode].insn_code
2807 != CODE_FOR_nothing))
2809 /* Don't split destination if it is a stack push. */
2810 int stack = push_operand (x, GET_MODE (x));
2812 #ifdef PUSH_ROUNDING
2813 /* In case we output to the stack, but the size is smaller machine can
2814 push exactly, we need to use move instructions. */
2816 && PUSH_ROUNDING (GET_MODE_SIZE (submode)) != GET_MODE_SIZE (submode))
2819 int offset1, offset2;
2821 /* Do not use anti_adjust_stack, since we don't want to update
2822 stack_pointer_delta. */
2823 temp = expand_binop (Pmode,
2824 #ifdef STACK_GROWS_DOWNWARD
2831 (PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)))),
2835 if (temp != stack_pointer_rtx)
2836 emit_move_insn (stack_pointer_rtx, temp);
2837 #ifdef STACK_GROWS_DOWNWARD
2839 offset2 = GET_MODE_SIZE (submode);
2841 offset1 = -PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)));
2842 offset2 = (-PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)))
2843 + GET_MODE_SIZE (submode));
2845 emit_move_insn (change_address (x, submode,
2846 gen_rtx_PLUS (Pmode,
2848 GEN_INT (offset1))),
2849 gen_realpart (submode, y));
2850 emit_move_insn (change_address (x, submode,
2851 gen_rtx_PLUS (Pmode,
2853 GEN_INT (offset2))),
2854 gen_imagpart (submode, y));
2858 /* If this is a stack, push the highpart first, so it
2859 will be in the argument order.
2861 In that case, change_address is used only to convert
2862 the mode, not to change the address. */
2865 /* Note that the real part always precedes the imag part in memory
2866 regardless of machine's endianness. */
2867 #ifdef STACK_GROWS_DOWNWARD
2868 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2869 (gen_rtx_MEM (submode, XEXP (x, 0)),
2870 gen_imagpart (submode, y)));
2871 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2872 (gen_rtx_MEM (submode, XEXP (x, 0)),
2873 gen_realpart (submode, y)));
2875 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2876 (gen_rtx_MEM (submode, XEXP (x, 0)),
2877 gen_realpart (submode, y)));
2878 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2879 (gen_rtx_MEM (submode, XEXP (x, 0)),
2880 gen_imagpart (submode, y)));
2885 rtx realpart_x, realpart_y;
2886 rtx imagpart_x, imagpart_y;
2888 /* If this is a complex value with each part being smaller than a
2889 word, the usual calling sequence will likely pack the pieces into
2890 a single register. Unfortunately, SUBREG of hard registers only
2891 deals in terms of words, so we have a problem converting input
2892 arguments to the CONCAT of two registers that is used elsewhere
2893 for complex values. If this is before reload, we can copy it into
2894 memory and reload. FIXME, we should see about using extract and
2895 insert on integer registers, but complex short and complex char
2896 variables should be rarely used. */
2897 if (GET_MODE_BITSIZE (mode) < 2 * BITS_PER_WORD
2898 && (reload_in_progress | reload_completed) == 0)
2900 int packed_dest_p = (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER);
2901 int packed_src_p = (REG_P (y) && REGNO (y) < FIRST_PSEUDO_REGISTER);
2903 if (packed_dest_p || packed_src_p)
2905 enum mode_class reg_class = ((class == MODE_COMPLEX_FLOAT)
2906 ? MODE_FLOAT : MODE_INT);
2908 enum machine_mode reg_mode
2909 = mode_for_size (GET_MODE_BITSIZE (mode), reg_class, 1);
2911 if (reg_mode != BLKmode)
2913 rtx mem = assign_stack_temp (reg_mode,
2914 GET_MODE_SIZE (mode), 0);
2915 rtx cmem = adjust_address (mem, mode, 0);
2918 = N_("function using short complex types cannot be inline");
2922 rtx sreg = gen_rtx_SUBREG (reg_mode, x, 0);
2923 emit_move_insn_1 (cmem, y);
2924 return emit_move_insn_1 (sreg, mem);
2928 rtx sreg = gen_rtx_SUBREG (reg_mode, y, 0);
2929 emit_move_insn_1 (mem, sreg);
2930 return emit_move_insn_1 (x, cmem);
2936 realpart_x = gen_realpart (submode, x);
2937 realpart_y = gen_realpart (submode, y);
2938 imagpart_x = gen_imagpart (submode, x);
2939 imagpart_y = gen_imagpart (submode, y);
2941 /* Show the output dies here. This is necessary for SUBREGs
2942 of pseudos since we cannot track their lifetimes correctly;
2943 hard regs shouldn't appear here except as return values.
2944 We never want to emit such a clobber after reload. */
2946 && ! (reload_in_progress || reload_completed)
2947 && (GET_CODE (realpart_x) == SUBREG
2948 || GET_CODE (imagpart_x) == SUBREG))
2950 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
2953 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2954 (realpart_x, realpart_y));
2955 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2956 (imagpart_x, imagpart_y));
2959 return get_last_insn ();
2962 /* This will handle any multi-word mode that lacks a move_insn pattern.
2963 However, you will get better code if you define such patterns,
2964 even if they must turn into multiple assembler instructions. */
2965 else if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
2971 #ifdef PUSH_ROUNDING
2973 /* If X is a push on the stack, do the push now and replace
2974 X with a reference to the stack pointer. */
2975 if (push_operand (x, GET_MODE (x)))
2980 /* Do not use anti_adjust_stack, since we don't want to update
2981 stack_pointer_delta. */
2982 temp = expand_binop (Pmode,
2983 #ifdef STACK_GROWS_DOWNWARD
2990 (PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)))),
2994 if (temp != stack_pointer_rtx)
2995 emit_move_insn (stack_pointer_rtx, temp);
2997 code = GET_CODE (XEXP (x, 0));
2998 /* Just hope that small offsets off SP are OK. */
2999 if (code == POST_INC)
3000 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3001 GEN_INT (-(HOST_WIDE_INT)
3002 GET_MODE_SIZE (GET_MODE (x))));
3003 else if (code == POST_DEC)
3004 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3005 GEN_INT (GET_MODE_SIZE (GET_MODE (x))));
3007 temp = stack_pointer_rtx;
3009 x = change_address (x, VOIDmode, temp);
3013 /* If we are in reload, see if either operand is a MEM whose address
3014 is scheduled for replacement. */
3015 if (reload_in_progress && GET_CODE (x) == MEM
3016 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
3017 x = replace_equiv_address_nv (x, inner);
3018 if (reload_in_progress && GET_CODE (y) == MEM
3019 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
3020 y = replace_equiv_address_nv (y, inner);
3026 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
3029 rtx xpart = operand_subword (x, i, 1, mode);
3030 rtx ypart = operand_subword (y, i, 1, mode);
3032 /* If we can't get a part of Y, put Y into memory if it is a
3033 constant. Otherwise, force it into a register. If we still
3034 can't get a part of Y, abort. */
3035 if (ypart == 0 && CONSTANT_P (y))
3037 y = force_const_mem (mode, y);
3038 ypart = operand_subword (y, i, 1, mode);
3040 else if (ypart == 0)
3041 ypart = operand_subword_force (y, i, mode);
3043 if (xpart == 0 || ypart == 0)
3046 need_clobber |= (GET_CODE (xpart) == SUBREG);
3048 last_insn = emit_move_insn (xpart, ypart);
3051 seq = gen_sequence ();
3054 /* Show the output dies here. This is necessary for SUBREGs
3055 of pseudos since we cannot track their lifetimes correctly;
3056 hard regs shouldn't appear here except as return values.
3057 We never want to emit such a clobber after reload. */
3059 && ! (reload_in_progress || reload_completed)
3060 && need_clobber != 0)
3062 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
3073 /* Pushing data onto the stack. */
3075 /* Push a block of length SIZE (perhaps variable)
3076 and return an rtx to address the beginning of the block.
3077 Note that it is not possible for the value returned to be a QUEUED.
3078 The value may be virtual_outgoing_args_rtx.
3080 EXTRA is the number of bytes of padding to push in addition to SIZE.
3081 BELOW nonzero means this padding comes at low addresses;
3082 otherwise, the padding comes at high addresses. */
3085 push_block (size, extra, below)
3091 size = convert_modes (Pmode, ptr_mode, size, 1);
3092 if (CONSTANT_P (size))
3093 anti_adjust_stack (plus_constant (size, extra));
3094 else if (GET_CODE (size) == REG && extra == 0)
3095 anti_adjust_stack (size);
3098 temp = copy_to_mode_reg (Pmode, size);
3100 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
3101 temp, 0, OPTAB_LIB_WIDEN);
3102 anti_adjust_stack (temp);
3105 #ifndef STACK_GROWS_DOWNWARD
3111 temp = virtual_outgoing_args_rtx;
3112 if (extra != 0 && below)
3113 temp = plus_constant (temp, extra);
3117 if (GET_CODE (size) == CONST_INT)
3118 temp = plus_constant (virtual_outgoing_args_rtx,
3119 -INTVAL (size) - (below ? 0 : extra));
3120 else if (extra != 0 && !below)
3121 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3122 negate_rtx (Pmode, plus_constant (size, extra)));
3124 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3125 negate_rtx (Pmode, size));
3128 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
3131 #ifdef PUSH_ROUNDING
3133 /* Emit single push insn. */
3136 emit_single_push_insn (mode, x, type)
3138 enum machine_mode mode;
3142 unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
3144 enum insn_code icode;
3145 insn_operand_predicate_fn pred;
3147 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3148 /* If there is push pattern, use it. Otherwise try old way of throwing
3149 MEM representing push operation to move expander. */
3150 icode = push_optab->handlers[(int) mode].insn_code;
3151 if (icode != CODE_FOR_nothing)
3153 if (((pred = insn_data[(int) icode].operand[0].predicate)
3154 && !((*pred) (x, mode))))
3155 x = force_reg (mode, x);
3156 emit_insn (GEN_FCN (icode) (x));
3159 if (GET_MODE_SIZE (mode) == rounded_size)
3160 dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
3163 #ifdef STACK_GROWS_DOWNWARD
3164 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3165 GEN_INT (-(HOST_WIDE_INT)rounded_size));
3167 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3168 GEN_INT (rounded_size));
3170 dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
3173 dest = gen_rtx_MEM (mode, dest_addr);
3177 set_mem_attributes (dest, type, 1);
3179 if (flag_optimize_sibling_calls)
3180 /* Function incoming arguments may overlap with sibling call
3181 outgoing arguments and we cannot allow reordering of reads
3182 from function arguments with stores to outgoing arguments
3183 of sibling calls. */
3184 set_mem_alias_set (dest, 0);
3186 emit_move_insn (dest, x);
3190 /* Generate code to push X onto the stack, assuming it has mode MODE and
3192 MODE is redundant except when X is a CONST_INT (since they don't
3194 SIZE is an rtx for the size of data to be copied (in bytes),
3195 needed only if X is BLKmode.
3197 ALIGN (in bits) is maximum alignment we can assume.
3199 If PARTIAL and REG are both nonzero, then copy that many of the first
3200 words of X into registers starting with REG, and push the rest of X.
3201 The amount of space pushed is decreased by PARTIAL words,
3202 rounded *down* to a multiple of PARM_BOUNDARY.
3203 REG must be a hard register in this case.
3204 If REG is zero but PARTIAL is not, take any all others actions for an
3205 argument partially in registers, but do not actually load any
3208 EXTRA is the amount in bytes of extra space to leave next to this arg.
3209 This is ignored if an argument block has already been allocated.
3211 On a machine that lacks real push insns, ARGS_ADDR is the address of
3212 the bottom of the argument block for this call. We use indexing off there
3213 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
3214 argument block has not been preallocated.
3216 ARGS_SO_FAR is the size of args previously pushed for this call.
3218 REG_PARM_STACK_SPACE is nonzero if functions require stack space
3219 for arguments passed in registers. If nonzero, it will be the number
3220 of bytes required. */
3223 emit_push_insn (x, mode, type, size, align, partial, reg, extra,
3224 args_addr, args_so_far, reg_parm_stack_space,
3227 enum machine_mode mode;
3236 int reg_parm_stack_space;
3240 enum direction stack_direction
3241 #ifdef STACK_GROWS_DOWNWARD
3247 /* Decide where to pad the argument: `downward' for below,
3248 `upward' for above, or `none' for don't pad it.
3249 Default is below for small data on big-endian machines; else above. */
3250 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
3252 /* Invert direction if stack is post-decrement.
3254 if (STACK_PUSH_CODE == POST_DEC)
3255 if (where_pad != none)
3256 where_pad = (where_pad == downward ? upward : downward);
3258 xinner = x = protect_from_queue (x, 0);
3260 if (mode == BLKmode)
3262 /* Copy a block into the stack, entirely or partially. */
3265 int used = partial * UNITS_PER_WORD;
3266 int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
3274 /* USED is now the # of bytes we need not copy to the stack
3275 because registers will take care of them. */
3278 xinner = adjust_address (xinner, BLKmode, used);
3280 /* If the partial register-part of the arg counts in its stack size,
3281 skip the part of stack space corresponding to the registers.
3282 Otherwise, start copying to the beginning of the stack space,
3283 by setting SKIP to 0. */
3284 skip = (reg_parm_stack_space == 0) ? 0 : used;
3286 #ifdef PUSH_ROUNDING
3287 /* Do it with several push insns if that doesn't take lots of insns
3288 and if there is no difficulty with push insns that skip bytes
3289 on the stack for alignment purposes. */
3292 && GET_CODE (size) == CONST_INT
3294 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
3295 /* Here we avoid the case of a structure whose weak alignment
3296 forces many pushes of a small amount of data,
3297 and such small pushes do rounding that causes trouble. */
3298 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
3299 || align >= BIGGEST_ALIGNMENT
3300 || (PUSH_ROUNDING (align / BITS_PER_UNIT)
3301 == (align / BITS_PER_UNIT)))
3302 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
3304 /* Push padding now if padding above and stack grows down,
3305 or if padding below and stack grows up.
3306 But if space already allocated, this has already been done. */
3307 if (extra && args_addr == 0
3308 && where_pad != none && where_pad != stack_direction)
3309 anti_adjust_stack (GEN_INT (extra));
3311 move_by_pieces (NULL, xinner, INTVAL (size) - used, align);
3314 #endif /* PUSH_ROUNDING */
3318 /* Otherwise make space on the stack and copy the data
3319 to the address of that space. */
3321 /* Deduct words put into registers from the size we must copy. */
3324 if (GET_CODE (size) == CONST_INT)
3325 size = GEN_INT (INTVAL (size) - used);
3327 size = expand_binop (GET_MODE (size), sub_optab, size,
3328 GEN_INT (used), NULL_RTX, 0,
3332 /* Get the address of the stack space.
3333 In this case, we do not deal with EXTRA separately.
3334 A single stack adjust will do. */
3337 temp = push_block (size, extra, where_pad == downward);
3340 else if (GET_CODE (args_so_far) == CONST_INT)
3341 temp = memory_address (BLKmode,
3342 plus_constant (args_addr,
3343 skip + INTVAL (args_so_far)));
3345 temp = memory_address (BLKmode,
3346 plus_constant (gen_rtx_PLUS (Pmode,
3350 target = gen_rtx_MEM (BLKmode, temp);
3354 set_mem_attributes (target, type, 1);
3355 /* Function incoming arguments may overlap with sibling call
3356 outgoing arguments and we cannot allow reordering of reads
3357 from function arguments with stores to outgoing arguments
3358 of sibling calls. */
3359 set_mem_alias_set (target, 0);
3362 set_mem_align (target, align);
3364 /* TEMP is the address of the block. Copy the data there. */
3365 if (GET_CODE (size) == CONST_INT
3366 && MOVE_BY_PIECES_P ((unsigned) INTVAL (size), align))
3368 move_by_pieces (target, xinner, INTVAL (size), align);
3373 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
3374 enum machine_mode mode;
3376 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
3378 mode = GET_MODE_WIDER_MODE (mode))
3380 enum insn_code code = movstr_optab[(int) mode];
3381 insn_operand_predicate_fn pred;
3383 if (code != CODE_FOR_nothing
3384 && ((GET_CODE (size) == CONST_INT
3385 && ((unsigned HOST_WIDE_INT) INTVAL (size)
3386 <= (GET_MODE_MASK (mode) >> 1)))
3387 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
3388 && (!(pred = insn_data[(int) code].operand[0].predicate)
3389 || ((*pred) (target, BLKmode)))
3390 && (!(pred = insn_data[(int) code].operand[1].predicate)
3391 || ((*pred) (xinner, BLKmode)))
3392 && (!(pred = insn_data[(int) code].operand[3].predicate)
3393 || ((*pred) (opalign, VOIDmode))))
3395 rtx op2 = convert_to_mode (mode, size, 1);
3396 rtx last = get_last_insn ();
3399 pred = insn_data[(int) code].operand[2].predicate;
3400 if (pred != 0 && ! (*pred) (op2, mode))
3401 op2 = copy_to_mode_reg (mode, op2);
3403 pat = GEN_FCN ((int) code) (target, xinner,
3411 delete_insns_since (last);
3416 if (!ACCUMULATE_OUTGOING_ARGS)
3418 /* If the source is referenced relative to the stack pointer,
3419 copy it to another register to stabilize it. We do not need
3420 to do this if we know that we won't be changing sp. */
3422 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3423 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3424 temp = copy_to_reg (temp);
3427 /* Make inhibit_defer_pop nonzero around the library call
3428 to force it to pop the bcopy-arguments right away. */
3430 #ifdef TARGET_MEM_FUNCTIONS
3431 emit_library_call (memcpy_libfunc, LCT_NORMAL,
3432 VOIDmode, 3, temp, Pmode, XEXP (xinner, 0), Pmode,
3433 convert_to_mode (TYPE_MODE (sizetype),
3434 size, TREE_UNSIGNED (sizetype)),
3435 TYPE_MODE (sizetype));
3437 emit_library_call (bcopy_libfunc, LCT_NORMAL,
3438 VOIDmode, 3, XEXP (xinner, 0), Pmode, temp, Pmode,
3439 convert_to_mode (TYPE_MODE (integer_type_node),
3441 TREE_UNSIGNED (integer_type_node)),
3442 TYPE_MODE (integer_type_node));
3447 else if (partial > 0)
3449 /* Scalar partly in registers. */
3451 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3454 /* # words of start of argument
3455 that we must make space for but need not store. */
3456 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
3457 int args_offset = INTVAL (args_so_far);
3460 /* Push padding now if padding above and stack grows down,
3461 or if padding below and stack grows up.
3462 But if space already allocated, this has already been done. */
3463 if (extra && args_addr == 0
3464 && where_pad != none && where_pad != stack_direction)
3465 anti_adjust_stack (GEN_INT (extra));
3467 /* If we make space by pushing it, we might as well push
3468 the real data. Otherwise, we can leave OFFSET nonzero
3469 and leave the space uninitialized. */
3473 /* Now NOT_STACK gets the number of words that we don't need to
3474 allocate on the stack. */
3475 not_stack = partial - offset;
3477 /* If the partial register-part of the arg counts in its stack size,
3478 skip the part of stack space corresponding to the registers.
3479 Otherwise, start copying to the beginning of the stack space,
3480 by setting SKIP to 0. */
3481 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
3483 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
3484 x = validize_mem (force_const_mem (mode, x));
3486 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3487 SUBREGs of such registers are not allowed. */
3488 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
3489 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
3490 x = copy_to_reg (x);
3492 /* Loop over all the words allocated on the stack for this arg. */
3493 /* We can do it by words, because any scalar bigger than a word
3494 has a size a multiple of a word. */
3495 #ifndef PUSH_ARGS_REVERSED
3496 for (i = not_stack; i < size; i++)
3498 for (i = size - 1; i >= not_stack; i--)
3500 if (i >= not_stack + offset)
3501 emit_push_insn (operand_subword_force (x, i, mode),
3502 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
3504 GEN_INT (args_offset + ((i - not_stack + skip)
3506 reg_parm_stack_space, alignment_pad);
3511 rtx target = NULL_RTX;
3514 /* Push padding now if padding above and stack grows down,
3515 or if padding below and stack grows up.
3516 But if space already allocated, this has already been done. */
3517 if (extra && args_addr == 0
3518 && where_pad != none && where_pad != stack_direction)
3519 anti_adjust_stack (GEN_INT (extra));
3521 #ifdef PUSH_ROUNDING
3522 if (args_addr == 0 && PUSH_ARGS)
3523 emit_single_push_insn (mode, x, type);
3527 if (GET_CODE (args_so_far) == CONST_INT)
3529 = memory_address (mode,
3530 plus_constant (args_addr,
3531 INTVAL (args_so_far)));
3533 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
3536 dest = gen_rtx_MEM (mode, addr);
3539 set_mem_attributes (dest, type, 1);
3540 /* Function incoming arguments may overlap with sibling call
3541 outgoing arguments and we cannot allow reordering of reads
3542 from function arguments with stores to outgoing arguments
3543 of sibling calls. */
3544 set_mem_alias_set (dest, 0);
3547 emit_move_insn (dest, x);
3553 /* If part should go in registers, copy that part
3554 into the appropriate registers. Do this now, at the end,
3555 since mem-to-mem copies above may do function calls. */
3556 if (partial > 0 && reg != 0)
3558 /* Handle calls that pass values in multiple non-contiguous locations.
3559 The Irix 6 ABI has examples of this. */
3560 if (GET_CODE (reg) == PARALLEL)
3561 emit_group_load (reg, x, -1); /* ??? size? */
3563 move_block_to_reg (REGNO (reg), x, partial, mode);
3566 if (extra && args_addr == 0 && where_pad == stack_direction)
3567 anti_adjust_stack (GEN_INT (extra));
3569 if (alignment_pad && args_addr == 0)
3570 anti_adjust_stack (alignment_pad);
3573 /* Return X if X can be used as a subtarget in a sequence of arithmetic
3581 /* Only registers can be subtargets. */
3582 || GET_CODE (x) != REG
3583 /* If the register is readonly, it can't be set more than once. */
3584 || RTX_UNCHANGING_P (x)
3585 /* Don't use hard regs to avoid extending their life. */
3586 || REGNO (x) < FIRST_PSEUDO_REGISTER
3587 /* Avoid subtargets inside loops,
3588 since they hide some invariant expressions. */
3589 || preserve_subexpressions_p ())
3593 /* Expand an assignment that stores the value of FROM into TO.
3594 If WANT_VALUE is nonzero, return an rtx for the value of TO.
3595 (This may contain a QUEUED rtx;
3596 if the value is constant, this rtx is a constant.)
3597 Otherwise, the returned value is NULL_RTX.
3599 SUGGEST_REG is no longer actually used.
3600 It used to mean, copy the value through a register
3601 and return that register, if that is possible.
3602 We now use WANT_VALUE to decide whether to do this. */
3605 expand_assignment (to, from, want_value, suggest_reg)
3608 int suggest_reg ATTRIBUTE_UNUSED;
3613 /* Don't crash if the lhs of the assignment was erroneous. */
3615 if (TREE_CODE (to) == ERROR_MARK)
3617 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
3618 return want_value ? result : NULL_RTX;
3621 /* Assignment of a structure component needs special treatment
3622 if the structure component's rtx is not simply a MEM.
3623 Assignment of an array element at a constant index, and assignment of
3624 an array element in an unaligned packed structure field, has the same
3627 if (TREE_CODE (to) == COMPONENT_REF || TREE_CODE (to) == BIT_FIELD_REF
3628 || TREE_CODE (to) == ARRAY_REF || TREE_CODE (to) == ARRAY_RANGE_REF)
3630 enum machine_mode mode1;
3631 HOST_WIDE_INT bitsize, bitpos;
3639 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
3640 &unsignedp, &volatilep);
3642 /* If we are going to use store_bit_field and extract_bit_field,
3643 make sure to_rtx will be safe for multiple use. */
3645 if (mode1 == VOIDmode && want_value)
3646 tem = stabilize_reference (tem);
3648 orig_to_rtx = to_rtx = expand_expr (tem, NULL_RTX, VOIDmode,
3652 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
3654 if (GET_CODE (to_rtx) != MEM)
3657 if (GET_MODE (offset_rtx) != ptr_mode)
3658 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
3660 #ifdef POINTERS_EXTEND_UNSIGNED
3661 if (GET_MODE (offset_rtx) != Pmode)
3662 offset_rtx = convert_memory_address (Pmode, offset_rtx);
3665 /* A constant address in TO_RTX can have VOIDmode, we must not try
3666 to call force_reg for that case. Avoid that case. */
3667 if (GET_CODE (to_rtx) == MEM
3668 && GET_MODE (to_rtx) == BLKmode
3669 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
3671 && (bitpos % bitsize) == 0
3672 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
3673 && MEM_ALIGN (to_rtx) == GET_MODE_ALIGNMENT (mode1))
3676 = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT);
3678 if (GET_CODE (XEXP (temp, 0)) == REG)
3681 to_rtx = (replace_equiv_address
3682 (to_rtx, force_reg (GET_MODE (XEXP (temp, 0)),
3687 to_rtx = offset_address (to_rtx, offset_rtx,
3688 highest_pow2_factor (offset));
3691 if (GET_CODE (to_rtx) == MEM)
3693 tree old_expr = MEM_EXPR (to_rtx);
3695 /* If the field is at offset zero, we could have been given the
3696 DECL_RTX of the parent struct. Don't munge it. */
3697 to_rtx = shallow_copy_rtx (to_rtx);
3699 set_mem_attributes (to_rtx, to, 0);
3701 /* If we changed MEM_EXPR, that means we're now referencing
3702 the COMPONENT_REF, which means that MEM_OFFSET must be
3703 relative to that field. But we've not yet reflected BITPOS
3704 in TO_RTX. This will be done in store_field. Adjust for
3705 that by biasing MEM_OFFSET by -bitpos. */
3706 if (MEM_EXPR (to_rtx) != old_expr && MEM_OFFSET (to_rtx)
3707 && (bitpos / BITS_PER_UNIT) != 0)
3708 set_mem_offset (to_rtx, GEN_INT (INTVAL (MEM_OFFSET (to_rtx))
3709 - (bitpos / BITS_PER_UNIT)));
3712 /* Deal with volatile and readonly fields. The former is only done
3713 for MEM. Also set MEM_KEEP_ALIAS_SET_P if needed. */
3714 if (volatilep && GET_CODE (to_rtx) == MEM)
3716 if (to_rtx == orig_to_rtx)
3717 to_rtx = copy_rtx (to_rtx);
3718 MEM_VOLATILE_P (to_rtx) = 1;
3721 if (TREE_CODE (to) == COMPONENT_REF
3722 && TREE_READONLY (TREE_OPERAND (to, 1)))
3724 if (to_rtx == orig_to_rtx)
3725 to_rtx = copy_rtx (to_rtx);
3726 RTX_UNCHANGING_P (to_rtx) = 1;
3729 if (! can_address_p (to))
3731 if (to_rtx == orig_to_rtx)
3732 to_rtx = copy_rtx (to_rtx);
3733 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
3736 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
3738 /* Spurious cast for HPUX compiler. */
3739 ? ((enum machine_mode)
3740 TYPE_MODE (TREE_TYPE (to)))
3742 unsignedp, TREE_TYPE (tem), get_alias_set (to));
3744 preserve_temp_slots (result);
3748 /* If the value is meaningful, convert RESULT to the proper mode.
3749 Otherwise, return nothing. */
3750 return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
3751 TYPE_MODE (TREE_TYPE (from)),
3753 TREE_UNSIGNED (TREE_TYPE (to)))
3757 /* If the rhs is a function call and its value is not an aggregate,
3758 call the function before we start to compute the lhs.
3759 This is needed for correct code for cases such as
3760 val = setjmp (buf) on machines where reference to val
3761 requires loading up part of an address in a separate insn.
3763 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
3764 since it might be a promoted variable where the zero- or sign- extension
3765 needs to be done. Handling this in the normal way is safe because no
3766 computation is done before the call. */
3767 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from)
3768 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
3769 && ! ((TREE_CODE (to) == VAR_DECL || TREE_CODE (to) == PARM_DECL)
3770 && GET_CODE (DECL_RTL (to)) == REG))
3775 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
3777 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
3779 /* Handle calls that return values in multiple non-contiguous locations.
3780 The Irix 6 ABI has examples of this. */
3781 if (GET_CODE (to_rtx) == PARALLEL)
3782 emit_group_load (to_rtx, value, int_size_in_bytes (TREE_TYPE (from)));
3783 else if (GET_MODE (to_rtx) == BLKmode)
3784 emit_block_move (to_rtx, value, expr_size (from));
3787 #ifdef POINTERS_EXTEND_UNSIGNED
3788 if (POINTER_TYPE_P (TREE_TYPE (to))
3789 && GET_MODE (to_rtx) != GET_MODE (value))
3790 value = convert_memory_address (GET_MODE (to_rtx), value);
3792 emit_move_insn (to_rtx, value);
3794 preserve_temp_slots (to_rtx);
3797 return want_value ? to_rtx : NULL_RTX;
3800 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
3801 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
3804 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
3806 /* Don't move directly into a return register. */
3807 if (TREE_CODE (to) == RESULT_DECL
3808 && (GET_CODE (to_rtx) == REG || GET_CODE (to_rtx) == PARALLEL))
3813 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
3815 if (GET_CODE (to_rtx) == PARALLEL)
3816 emit_group_load (to_rtx, temp, int_size_in_bytes (TREE_TYPE (from)));
3818 emit_move_insn (to_rtx, temp);
3820 preserve_temp_slots (to_rtx);
3823 return want_value ? to_rtx : NULL_RTX;
3826 /* In case we are returning the contents of an object which overlaps
3827 the place the value is being stored, use a safe function when copying
3828 a value through a pointer into a structure value return block. */
3829 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
3830 && current_function_returns_struct
3831 && !current_function_returns_pcc_struct)
3836 size = expr_size (from);
3837 from_rtx = expand_expr (from, NULL_RTX, VOIDmode, 0);
3839 #ifdef TARGET_MEM_FUNCTIONS
3840 emit_library_call (memmove_libfunc, LCT_NORMAL,
3841 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
3842 XEXP (from_rtx, 0), Pmode,
3843 convert_to_mode (TYPE_MODE (sizetype),
3844 size, TREE_UNSIGNED (sizetype)),
3845 TYPE_MODE (sizetype));
3847 emit_library_call (bcopy_libfunc, LCT_NORMAL,
3848 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
3849 XEXP (to_rtx, 0), Pmode,
3850 convert_to_mode (TYPE_MODE (integer_type_node),
3851 size, TREE_UNSIGNED (integer_type_node)),
3852 TYPE_MODE (integer_type_node));
3855 preserve_temp_slots (to_rtx);
3858 return want_value ? to_rtx : NULL_RTX;
3861 /* Compute FROM and store the value in the rtx we got. */
3864 result = store_expr (from, to_rtx, want_value);
3865 preserve_temp_slots (result);
3868 return want_value ? result : NULL_RTX;
3871 /* Generate code for computing expression EXP,
3872 and storing the value into TARGET.
3873 TARGET may contain a QUEUED rtx.
3875 If WANT_VALUE is nonzero, return a copy of the value
3876 not in TARGET, so that we can be sure to use the proper
3877 value in a containing expression even if TARGET has something
3878 else stored in it. If possible, we copy the value through a pseudo
3879 and return that pseudo. Or, if the value is constant, we try to
3880 return the constant. In some cases, we return a pseudo
3881 copied *from* TARGET.
3883 If the mode is BLKmode then we may return TARGET itself.
3884 It turns out that in BLKmode it doesn't cause a problem.
3885 because C has no operators that could combine two different
3886 assignments into the same BLKmode object with different values
3887 with no sequence point. Will other languages need this to
3890 If WANT_VALUE is 0, we return NULL, to make sure
3891 to catch quickly any cases where the caller uses the value
3892 and fails to set WANT_VALUE. */
3895 store_expr (exp, target, want_value)
3901 int dont_return_target = 0;
3902 int dont_store_target = 0;
3904 if (TREE_CODE (exp) == COMPOUND_EXPR)
3906 /* Perform first part of compound expression, then assign from second
3908 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
3910 return store_expr (TREE_OPERAND (exp, 1), target, want_value);
3912 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
3914 /* For conditional expression, get safe form of the target. Then
3915 test the condition, doing the appropriate assignment on either
3916 side. This avoids the creation of unnecessary temporaries.
3917 For non-BLKmode, it is more efficient not to do this. */
3919 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
3922 target = protect_from_queue (target, 1);
3924 do_pending_stack_adjust ();
3926 jumpifnot (TREE_OPERAND (exp, 0), lab1);
3927 start_cleanup_deferral ();
3928 store_expr (TREE_OPERAND (exp, 1), target, 0);
3929 end_cleanup_deferral ();
3931 emit_jump_insn (gen_jump (lab2));
3934 start_cleanup_deferral ();
3935 store_expr (TREE_OPERAND (exp, 2), target, 0);
3936 end_cleanup_deferral ();
3941 return want_value ? target : NULL_RTX;
3943 else if (queued_subexp_p (target))
3944 /* If target contains a postincrement, let's not risk
3945 using it as the place to generate the rhs. */
3947 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
3949 /* Expand EXP into a new pseudo. */
3950 temp = gen_reg_rtx (GET_MODE (target));
3951 temp = expand_expr (exp, temp, GET_MODE (target), 0);
3954 temp = expand_expr (exp, NULL_RTX, GET_MODE (target), 0);
3956 /* If target is volatile, ANSI requires accessing the value
3957 *from* the target, if it is accessed. So make that happen.
3958 In no case return the target itself. */
3959 if (! MEM_VOLATILE_P (target) && want_value)
3960 dont_return_target = 1;
3962 else if (want_value && GET_CODE (target) == MEM && ! MEM_VOLATILE_P (target)
3963 && GET_MODE (target) != BLKmode)
3964 /* If target is in memory and caller wants value in a register instead,
3965 arrange that. Pass TARGET as target for expand_expr so that,
3966 if EXP is another assignment, WANT_VALUE will be nonzero for it.
3967 We know expand_expr will not use the target in that case.
3968 Don't do this if TARGET is volatile because we are supposed
3969 to write it and then read it. */
3971 temp = expand_expr (exp, target, GET_MODE (target), 0);
3972 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
3974 /* If TEMP is already in the desired TARGET, only copy it from
3975 memory and don't store it there again. */
3977 || (rtx_equal_p (temp, target)
3978 && ! side_effects_p (temp) && ! side_effects_p (target)))
3979 dont_store_target = 1;
3980 temp = copy_to_reg (temp);
3982 dont_return_target = 1;
3984 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
3985 /* If this is an scalar in a register that is stored in a wider mode
3986 than the declared mode, compute the result into its declared mode
3987 and then convert to the wider mode. Our value is the computed
3990 /* If we don't want a value, we can do the conversion inside EXP,
3991 which will often result in some optimizations. Do the conversion
3992 in two steps: first change the signedness, if needed, then
3993 the extend. But don't do this if the type of EXP is a subtype
3994 of something else since then the conversion might involve
3995 more than just converting modes. */
3996 if (! want_value && INTEGRAL_TYPE_P (TREE_TYPE (exp))
3997 && TREE_TYPE (TREE_TYPE (exp)) == 0)
3999 if (TREE_UNSIGNED (TREE_TYPE (exp))
4000 != SUBREG_PROMOTED_UNSIGNED_P (target))
4003 (signed_or_unsigned_type (SUBREG_PROMOTED_UNSIGNED_P (target),
4007 exp = convert (type_for_mode (GET_MODE (SUBREG_REG (target)),
4008 SUBREG_PROMOTED_UNSIGNED_P (target)),
4012 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
4014 /* If TEMP is a volatile MEM and we want a result value, make
4015 the access now so it gets done only once. Likewise if
4016 it contains TARGET. */
4017 if (GET_CODE (temp) == MEM && want_value
4018 && (MEM_VOLATILE_P (temp)
4019 || reg_mentioned_p (SUBREG_REG (target), XEXP (temp, 0))))
4020 temp = copy_to_reg (temp);
4022 /* If TEMP is a VOIDmode constant, use convert_modes to make
4023 sure that we properly convert it. */
4024 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
4026 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4027 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4028 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
4029 GET_MODE (target), temp,
4030 SUBREG_PROMOTED_UNSIGNED_P (target));
4033 convert_move (SUBREG_REG (target), temp,
4034 SUBREG_PROMOTED_UNSIGNED_P (target));
4036 /* If we promoted a constant, change the mode back down to match
4037 target. Otherwise, the caller might get confused by a result whose
4038 mode is larger than expected. */
4040 if (want_value && GET_MODE (temp) != GET_MODE (target)
4041 && GET_MODE (temp) != VOIDmode)
4043 temp = gen_lowpart_SUBREG (GET_MODE (target), temp);
4044 SUBREG_PROMOTED_VAR_P (temp) = 1;
4045 SUBREG_PROMOTED_UNSIGNED_P (temp)
4046 = SUBREG_PROMOTED_UNSIGNED_P (target);
4049 return want_value ? temp : NULL_RTX;
4053 temp = expand_expr (exp, target, GET_MODE (target), 0);
4054 /* Return TARGET if it's a specified hardware register.
4055 If TARGET is a volatile mem ref, either return TARGET
4056 or return a reg copied *from* TARGET; ANSI requires this.
4058 Otherwise, if TEMP is not TARGET, return TEMP
4059 if it is constant (for efficiency),
4060 or if we really want the correct value. */
4061 if (!(target && GET_CODE (target) == REG
4062 && REGNO (target) < FIRST_PSEUDO_REGISTER)
4063 && !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
4064 && ! rtx_equal_p (temp, target)
4065 && (CONSTANT_P (temp) || want_value))
4066 dont_return_target = 1;
4069 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
4070 the same as that of TARGET, adjust the constant. This is needed, for
4071 example, in case it is a CONST_DOUBLE and we want only a word-sized
4073 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
4074 && TREE_CODE (exp) != ERROR_MARK
4075 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
4076 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4077 temp, TREE_UNSIGNED (TREE_TYPE (exp)));
4079 /* If value was not generated in the target, store it there.
4080 Convert the value to TARGET's type first if necessary.
4081 If TEMP and TARGET compare equal according to rtx_equal_p, but
4082 one or both of them are volatile memory refs, we have to distinguish
4084 - expand_expr has used TARGET. In this case, we must not generate
4085 another copy. This can be detected by TARGET being equal according
4087 - expand_expr has not used TARGET - that means that the source just
4088 happens to have the same RTX form. Since temp will have been created
4089 by expand_expr, it will compare unequal according to == .
4090 We must generate a copy in this case, to reach the correct number
4091 of volatile memory references. */
4093 if ((! rtx_equal_p (temp, target)
4094 || (temp != target && (side_effects_p (temp)
4095 || side_effects_p (target))))
4096 && TREE_CODE (exp) != ERROR_MARK
4097 && ! dont_store_target)
4099 target = protect_from_queue (target, 1);
4100 if (GET_MODE (temp) != GET_MODE (target)
4101 && GET_MODE (temp) != VOIDmode)
4103 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
4104 if (dont_return_target)
4106 /* In this case, we will return TEMP,
4107 so make sure it has the proper mode.
4108 But don't forget to store the value into TARGET. */
4109 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
4110 emit_move_insn (target, temp);
4113 convert_move (target, temp, unsignedp);
4116 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
4118 /* Handle copying a string constant into an array. The string
4119 constant may be shorter than the array. So copy just the string's
4120 actual length, and clear the rest. First get the size of the data
4121 type of the string, which is actually the size of the target. */
4122 rtx size = expr_size (exp);
4124 if (GET_CODE (size) == CONST_INT
4125 && INTVAL (size) < TREE_STRING_LENGTH (exp))
4126 emit_block_move (target, temp, size);
4129 /* Compute the size of the data to copy from the string. */
4131 = size_binop (MIN_EXPR,
4132 make_tree (sizetype, size),
4133 size_int (TREE_STRING_LENGTH (exp)));
4134 rtx copy_size_rtx = expand_expr (copy_size, NULL_RTX,
4138 /* Copy that much. */
4139 copy_size_rtx = convert_to_mode (ptr_mode, copy_size_rtx, 0);
4140 emit_block_move (target, temp, copy_size_rtx);
4142 /* Figure out how much is left in TARGET that we have to clear.
4143 Do all calculations in ptr_mode. */
4144 if (GET_CODE (copy_size_rtx) == CONST_INT)
4146 size = plus_constant (size, -INTVAL (copy_size_rtx));
4147 target = adjust_address (target, BLKmode,
4148 INTVAL (copy_size_rtx));
4152 size = expand_binop (ptr_mode, sub_optab, size,
4153 copy_size_rtx, NULL_RTX, 0,
4156 #ifdef POINTERS_EXTEND_UNSIGNED
4157 if (GET_MODE (copy_size_rtx) != Pmode)
4158 copy_size_rtx = convert_memory_address (Pmode,
4162 target = offset_address (target, copy_size_rtx,
4163 highest_pow2_factor (copy_size));
4164 label = gen_label_rtx ();
4165 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
4166 GET_MODE (size), 0, label);
4169 if (size != const0_rtx)
4170 clear_storage (target, size);
4176 /* Handle calls that return values in multiple non-contiguous locations.
4177 The Irix 6 ABI has examples of this. */
4178 else if (GET_CODE (target) == PARALLEL)
4179 emit_group_load (target, temp, int_size_in_bytes (TREE_TYPE (exp)));
4180 else if (GET_MODE (temp) == BLKmode)
4181 emit_block_move (target, temp, expr_size (exp));
4183 emit_move_insn (target, temp);
4186 /* If we don't want a value, return NULL_RTX. */
4190 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
4191 ??? The latter test doesn't seem to make sense. */
4192 else if (dont_return_target && GET_CODE (temp) != MEM)
4195 /* Return TARGET itself if it is a hard register. */
4196 else if (want_value && GET_MODE (target) != BLKmode
4197 && ! (GET_CODE (target) == REG
4198 && REGNO (target) < FIRST_PSEUDO_REGISTER))
4199 return copy_to_reg (target);
4205 /* Return 1 if EXP just contains zeros. */
4213 switch (TREE_CODE (exp))
4217 case NON_LVALUE_EXPR:
4218 case VIEW_CONVERT_EXPR:
4219 return is_zeros_p (TREE_OPERAND (exp, 0));
4222 return integer_zerop (exp);
4226 is_zeros_p (TREE_REALPART (exp)) && is_zeros_p (TREE_IMAGPART (exp));
4229 return REAL_VALUES_IDENTICAL (TREE_REAL_CST (exp), dconst0);
4232 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4233 return CONSTRUCTOR_ELTS (exp) == NULL_TREE;
4234 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4235 if (! is_zeros_p (TREE_VALUE (elt)))
4245 /* Return 1 if EXP contains mostly (3/4) zeros. */
4248 mostly_zeros_p (exp)
4251 if (TREE_CODE (exp) == CONSTRUCTOR)
4253 int elts = 0, zeros = 0;
4254 tree elt = CONSTRUCTOR_ELTS (exp);
4255 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4257 /* If there are no ranges of true bits, it is all zero. */
4258 return elt == NULL_TREE;
4260 for (; elt; elt = TREE_CHAIN (elt))
4262 /* We do not handle the case where the index is a RANGE_EXPR,
4263 so the statistic will be somewhat inaccurate.
4264 We do make a more accurate count in store_constructor itself,
4265 so since this function is only used for nested array elements,
4266 this should be close enough. */
4267 if (mostly_zeros_p (TREE_VALUE (elt)))
4272 return 4 * zeros >= 3 * elts;
4275 return is_zeros_p (exp);
4278 /* Helper function for store_constructor.
4279 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4280 TYPE is the type of the CONSTRUCTOR, not the element type.
4281 CLEARED is as for store_constructor.
4282 ALIAS_SET is the alias set to use for any stores.
4284 This provides a recursive shortcut back to store_constructor when it isn't
4285 necessary to go through store_field. This is so that we can pass through
4286 the cleared field to let store_constructor know that we may not have to
4287 clear a substructure if the outer structure has already been cleared. */
4290 store_constructor_field (target, bitsize, bitpos, mode, exp, type, cleared,
4293 unsigned HOST_WIDE_INT bitsize;
4294 HOST_WIDE_INT bitpos;
4295 enum machine_mode mode;
4300 if (TREE_CODE (exp) == CONSTRUCTOR
4301 && bitpos % BITS_PER_UNIT == 0
4302 /* If we have a non-zero bitpos for a register target, then we just
4303 let store_field do the bitfield handling. This is unlikely to
4304 generate unnecessary clear instructions anyways. */
4305 && (bitpos == 0 || GET_CODE (target) == MEM))
4307 if (GET_CODE (target) == MEM)
4309 = adjust_address (target,
4310 GET_MODE (target) == BLKmode
4312 % GET_MODE_ALIGNMENT (GET_MODE (target)))
4313 ? BLKmode : VOIDmode, bitpos / BITS_PER_UNIT);
4316 /* Update the alias set, if required. */
4317 if (GET_CODE (target) == MEM && ! MEM_KEEP_ALIAS_SET_P (target)
4318 && MEM_ALIAS_SET (target) != 0)
4320 target = copy_rtx (target);
4321 set_mem_alias_set (target, alias_set);
4324 store_constructor (exp, target, cleared, bitsize / BITS_PER_UNIT);
4327 store_field (target, bitsize, bitpos, mode, exp, VOIDmode, 0, type,
4331 /* Store the value of constructor EXP into the rtx TARGET.
4332 TARGET is either a REG or a MEM; we know it cannot conflict, since
4333 safe_from_p has been called.
4334 CLEARED is true if TARGET is known to have been zero'd.
4335 SIZE is the number of bytes of TARGET we are allowed to modify: this
4336 may not be the same as the size of EXP if we are assigning to a field
4337 which has been packed to exclude padding bits. */
4340 store_constructor (exp, target, cleared, size)
4346 tree type = TREE_TYPE (exp);
4347 #ifdef WORD_REGISTER_OPERATIONS
4348 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
4351 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
4352 || TREE_CODE (type) == QUAL_UNION_TYPE)
4356 /* We either clear the aggregate or indicate the value is dead. */
4357 if ((TREE_CODE (type) == UNION_TYPE
4358 || TREE_CODE (type) == QUAL_UNION_TYPE)
4360 && ! CONSTRUCTOR_ELTS (exp))
4361 /* If the constructor is empty, clear the union. */
4363 clear_storage (target, expr_size (exp));
4367 /* If we are building a static constructor into a register,
4368 set the initial value as zero so we can fold the value into
4369 a constant. But if more than one register is involved,
4370 this probably loses. */
4371 else if (! cleared && GET_CODE (target) == REG && TREE_STATIC (exp)
4372 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
4374 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4378 /* If the constructor has fewer fields than the structure
4379 or if we are initializing the structure to mostly zeros,
4380 clear the whole structure first. Don't do this if TARGET is a
4381 register whose mode size isn't equal to SIZE since clear_storage
4382 can't handle this case. */
4383 else if (! cleared && size > 0
4384 && ((list_length (CONSTRUCTOR_ELTS (exp))
4385 != fields_length (type))
4386 || mostly_zeros_p (exp))
4387 && (GET_CODE (target) != REG
4388 || ((HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target))
4391 clear_storage (target, GEN_INT (size));
4396 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4398 /* Store each element of the constructor into
4399 the corresponding field of TARGET. */
4401 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4403 tree field = TREE_PURPOSE (elt);
4404 tree value = TREE_VALUE (elt);
4405 enum machine_mode mode;
4406 HOST_WIDE_INT bitsize;
4407 HOST_WIDE_INT bitpos = 0;
4410 rtx to_rtx = target;
4412 /* Just ignore missing fields.
4413 We cleared the whole structure, above,
4414 if any fields are missing. */
4418 if (cleared && is_zeros_p (value))
4421 if (host_integerp (DECL_SIZE (field), 1))
4422 bitsize = tree_low_cst (DECL_SIZE (field), 1);
4426 unsignedp = TREE_UNSIGNED (field);
4427 mode = DECL_MODE (field);
4428 if (DECL_BIT_FIELD (field))
4431 offset = DECL_FIELD_OFFSET (field);
4432 if (host_integerp (offset, 0)
4433 && host_integerp (bit_position (field), 0))
4435 bitpos = int_bit_position (field);
4439 bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
4445 if (contains_placeholder_p (offset))
4446 offset = build (WITH_RECORD_EXPR, sizetype,
4447 offset, make_tree (TREE_TYPE (exp), target));
4449 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
4450 if (GET_CODE (to_rtx) != MEM)
4453 if (GET_MODE (offset_rtx) != ptr_mode)
4454 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4456 #ifdef POINTERS_EXTEND_UNSIGNED
4457 if (GET_MODE (offset_rtx) != Pmode)
4458 offset_rtx = convert_memory_address (Pmode, offset_rtx);
4461 to_rtx = offset_address (to_rtx, offset_rtx,
4462 highest_pow2_factor (offset));
4465 if (TREE_READONLY (field))
4467 if (GET_CODE (to_rtx) == MEM)
4468 to_rtx = copy_rtx (to_rtx);
4470 RTX_UNCHANGING_P (to_rtx) = 1;
4473 #ifdef WORD_REGISTER_OPERATIONS
4474 /* If this initializes a field that is smaller than a word, at the
4475 start of a word, try to widen it to a full word.
4476 This special case allows us to output C++ member function
4477 initializations in a form that the optimizers can understand. */
4478 if (GET_CODE (target) == REG
4479 && bitsize < BITS_PER_WORD
4480 && bitpos % BITS_PER_WORD == 0
4481 && GET_MODE_CLASS (mode) == MODE_INT
4482 && TREE_CODE (value) == INTEGER_CST
4484 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
4486 tree type = TREE_TYPE (value);
4488 if (TYPE_PRECISION (type) < BITS_PER_WORD)
4490 type = type_for_size (BITS_PER_WORD, TREE_UNSIGNED (type));
4491 value = convert (type, value);
4494 if (BYTES_BIG_ENDIAN)
4496 = fold (build (LSHIFT_EXPR, type, value,
4497 build_int_2 (BITS_PER_WORD - bitsize, 0)));
4498 bitsize = BITS_PER_WORD;
4503 if (GET_CODE (to_rtx) == MEM && !MEM_KEEP_ALIAS_SET_P (to_rtx)
4504 && DECL_NONADDRESSABLE_P (field))
4506 to_rtx = copy_rtx (to_rtx);
4507 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
4510 store_constructor_field (to_rtx, bitsize, bitpos, mode,
4511 value, type, cleared,
4512 get_alias_set (TREE_TYPE (field)));
4515 else if (TREE_CODE (type) == ARRAY_TYPE)
4520 tree domain = TYPE_DOMAIN (type);
4521 tree elttype = TREE_TYPE (type);
4522 int const_bounds_p = (TYPE_MIN_VALUE (domain)
4523 && TYPE_MAX_VALUE (domain)
4524 && host_integerp (TYPE_MIN_VALUE (domain), 0)
4525 && host_integerp (TYPE_MAX_VALUE (domain), 0));
4526 HOST_WIDE_INT minelt = 0;
4527 HOST_WIDE_INT maxelt = 0;
4529 /* If we have constant bounds for the range of the type, get them. */
4532 minelt = tree_low_cst (TYPE_MIN_VALUE (domain), 0);
4533 maxelt = tree_low_cst (TYPE_MAX_VALUE (domain), 0);
4536 /* If the constructor has fewer elements than the array,
4537 clear the whole array first. Similarly if this is
4538 static constructor of a non-BLKmode object. */
4539 if (cleared || (GET_CODE (target) == REG && TREE_STATIC (exp)))
4543 HOST_WIDE_INT count = 0, zero_count = 0;
4544 need_to_clear = ! const_bounds_p;
4546 /* This loop is a more accurate version of the loop in
4547 mostly_zeros_p (it handles RANGE_EXPR in an index).
4548 It is also needed to check for missing elements. */
4549 for (elt = CONSTRUCTOR_ELTS (exp);
4550 elt != NULL_TREE && ! need_to_clear;
4551 elt = TREE_CHAIN (elt))
4553 tree index = TREE_PURPOSE (elt);
4554 HOST_WIDE_INT this_node_count;
4556 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4558 tree lo_index = TREE_OPERAND (index, 0);
4559 tree hi_index = TREE_OPERAND (index, 1);
4561 if (! host_integerp (lo_index, 1)
4562 || ! host_integerp (hi_index, 1))
4568 this_node_count = (tree_low_cst (hi_index, 1)
4569 - tree_low_cst (lo_index, 1) + 1);
4572 this_node_count = 1;
4574 count += this_node_count;
4575 if (mostly_zeros_p (TREE_VALUE (elt)))
4576 zero_count += this_node_count;
4579 /* Clear the entire array first if there are any missing elements,
4580 or if the incidence of zero elements is >= 75%. */
4582 && (count < maxelt - minelt + 1 || 4 * zero_count >= 3 * count))
4586 if (need_to_clear && size > 0)
4589 clear_storage (target, GEN_INT (size));
4592 else if (REG_P (target))
4593 /* Inform later passes that the old value is dead. */
4594 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4596 /* Store each element of the constructor into
4597 the corresponding element of TARGET, determined
4598 by counting the elements. */
4599 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
4601 elt = TREE_CHAIN (elt), i++)
4603 enum machine_mode mode;
4604 HOST_WIDE_INT bitsize;
4605 HOST_WIDE_INT bitpos;
4607 tree value = TREE_VALUE (elt);
4608 tree index = TREE_PURPOSE (elt);
4609 rtx xtarget = target;
4611 if (cleared && is_zeros_p (value))
4614 unsignedp = TREE_UNSIGNED (elttype);
4615 mode = TYPE_MODE (elttype);
4616 if (mode == BLKmode)
4617 bitsize = (host_integerp (TYPE_SIZE (elttype), 1)
4618 ? tree_low_cst (TYPE_SIZE (elttype), 1)
4621 bitsize = GET_MODE_BITSIZE (mode);
4623 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4625 tree lo_index = TREE_OPERAND (index, 0);
4626 tree hi_index = TREE_OPERAND (index, 1);
4627 rtx index_r, pos_rtx, hi_r, loop_top, loop_end;
4628 struct nesting *loop;
4629 HOST_WIDE_INT lo, hi, count;
4632 /* If the range is constant and "small", unroll the loop. */
4634 && host_integerp (lo_index, 0)
4635 && host_integerp (hi_index, 0)
4636 && (lo = tree_low_cst (lo_index, 0),
4637 hi = tree_low_cst (hi_index, 0),
4638 count = hi - lo + 1,
4639 (GET_CODE (target) != MEM
4641 || (host_integerp (TYPE_SIZE (elttype), 1)
4642 && (tree_low_cst (TYPE_SIZE (elttype), 1) * count
4645 lo -= minelt; hi -= minelt;
4646 for (; lo <= hi; lo++)
4648 bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0);
4650 if (GET_CODE (target) == MEM
4651 && !MEM_KEEP_ALIAS_SET_P (target)
4652 && TYPE_NONALIASED_COMPONENT (type))
4654 target = copy_rtx (target);
4655 MEM_KEEP_ALIAS_SET_P (target) = 1;
4658 store_constructor_field
4659 (target, bitsize, bitpos, mode, value, type, cleared,
4660 get_alias_set (elttype));
4665 hi_r = expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
4666 loop_top = gen_label_rtx ();
4667 loop_end = gen_label_rtx ();
4669 unsignedp = TREE_UNSIGNED (domain);
4671 index = build_decl (VAR_DECL, NULL_TREE, domain);
4674 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
4676 SET_DECL_RTL (index, index_r);
4677 if (TREE_CODE (value) == SAVE_EXPR
4678 && SAVE_EXPR_RTL (value) == 0)
4680 /* Make sure value gets expanded once before the
4682 expand_expr (value, const0_rtx, VOIDmode, 0);
4685 store_expr (lo_index, index_r, 0);
4686 loop = expand_start_loop (0);
4688 /* Assign value to element index. */
4690 = convert (ssizetype,
4691 fold (build (MINUS_EXPR, TREE_TYPE (index),
4692 index, TYPE_MIN_VALUE (domain))));
4693 position = size_binop (MULT_EXPR, position,
4695 TYPE_SIZE_UNIT (elttype)));
4697 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
4698 xtarget = offset_address (target, pos_rtx,
4699 highest_pow2_factor (position));
4700 xtarget = adjust_address (xtarget, mode, 0);
4701 if (TREE_CODE (value) == CONSTRUCTOR)
4702 store_constructor (value, xtarget, cleared,
4703 bitsize / BITS_PER_UNIT);
4705 store_expr (value, xtarget, 0);
4707 expand_exit_loop_if_false (loop,
4708 build (LT_EXPR, integer_type_node,
4711 expand_increment (build (PREINCREMENT_EXPR,
4713 index, integer_one_node), 0, 0);
4715 emit_label (loop_end);
4718 else if ((index != 0 && ! host_integerp (index, 0))
4719 || ! host_integerp (TYPE_SIZE (elttype), 1))
4724 index = ssize_int (1);
4727 index = convert (ssizetype,
4728 fold (build (MINUS_EXPR, index,
4729 TYPE_MIN_VALUE (domain))));
4731 position = size_binop (MULT_EXPR, index,
4733 TYPE_SIZE_UNIT (elttype)));
4734 xtarget = offset_address (target,
4735 expand_expr (position, 0, VOIDmode, 0),
4736 highest_pow2_factor (position));
4737 xtarget = adjust_address (xtarget, mode, 0);
4738 store_expr (value, xtarget, 0);
4743 bitpos = ((tree_low_cst (index, 0) - minelt)
4744 * tree_low_cst (TYPE_SIZE (elttype), 1));
4746 bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
4748 if (GET_CODE (target) == MEM && !MEM_KEEP_ALIAS_SET_P (target)
4749 && TYPE_NONALIASED_COMPONENT (type))
4751 target = copy_rtx (target);
4752 MEM_KEEP_ALIAS_SET_P (target) = 1;
4755 store_constructor_field (target, bitsize, bitpos, mode, value,
4756 type, cleared, get_alias_set (elttype));
4762 /* Set constructor assignments. */
4763 else if (TREE_CODE (type) == SET_TYPE)
4765 tree elt = CONSTRUCTOR_ELTS (exp);
4766 unsigned HOST_WIDE_INT nbytes = int_size_in_bytes (type), nbits;
4767 tree domain = TYPE_DOMAIN (type);
4768 tree domain_min, domain_max, bitlength;
4770 /* The default implementation strategy is to extract the constant
4771 parts of the constructor, use that to initialize the target,
4772 and then "or" in whatever non-constant ranges we need in addition.
4774 If a large set is all zero or all ones, it is
4775 probably better to set it using memset (if available) or bzero.
4776 Also, if a large set has just a single range, it may also be
4777 better to first clear all the first clear the set (using
4778 bzero/memset), and set the bits we want. */
4780 /* Check for all zeros. */
4781 if (elt == NULL_TREE && size > 0)
4784 clear_storage (target, GEN_INT (size));
4788 domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
4789 domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
4790 bitlength = size_binop (PLUS_EXPR,
4791 size_diffop (domain_max, domain_min),
4794 nbits = tree_low_cst (bitlength, 1);
4796 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
4797 are "complicated" (more than one range), initialize (the
4798 constant parts) by copying from a constant. */
4799 if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD
4800 || (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE))
4802 unsigned int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
4803 enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
4804 char *bit_buffer = (char *) alloca (nbits);
4805 HOST_WIDE_INT word = 0;
4806 unsigned int bit_pos = 0;
4807 unsigned int ibit = 0;
4808 unsigned int offset = 0; /* In bytes from beginning of set. */
4810 elt = get_set_constructor_bits (exp, bit_buffer, nbits);
4813 if (bit_buffer[ibit])
4815 if (BYTES_BIG_ENDIAN)
4816 word |= (1 << (set_word_size - 1 - bit_pos));
4818 word |= 1 << bit_pos;
4822 if (bit_pos >= set_word_size || ibit == nbits)
4824 if (word != 0 || ! cleared)
4826 rtx datum = GEN_INT (word);
4829 /* The assumption here is that it is safe to use
4830 XEXP if the set is multi-word, but not if
4831 it's single-word. */
4832 if (GET_CODE (target) == MEM)
4833 to_rtx = adjust_address (target, mode, offset);
4834 else if (offset == 0)
4838 emit_move_insn (to_rtx, datum);
4845 offset += set_word_size / BITS_PER_UNIT;
4850 /* Don't bother clearing storage if the set is all ones. */
4851 if (TREE_CHAIN (elt) != NULL_TREE
4852 || (TREE_PURPOSE (elt) == NULL_TREE
4854 : ( ! host_integerp (TREE_VALUE (elt), 0)
4855 || ! host_integerp (TREE_PURPOSE (elt), 0)
4856 || (tree_low_cst (TREE_VALUE (elt), 0)
4857 - tree_low_cst (TREE_PURPOSE (elt), 0) + 1
4858 != (HOST_WIDE_INT) nbits))))
4859 clear_storage (target, expr_size (exp));
4861 for (; elt != NULL_TREE; elt = TREE_CHAIN (elt))
4863 /* Start of range of element or NULL. */
4864 tree startbit = TREE_PURPOSE (elt);
4865 /* End of range of element, or element value. */
4866 tree endbit = TREE_VALUE (elt);
4867 #ifdef TARGET_MEM_FUNCTIONS
4868 HOST_WIDE_INT startb, endb;
4870 rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
4872 bitlength_rtx = expand_expr (bitlength,
4873 NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
4875 /* Handle non-range tuple element like [ expr ]. */
4876 if (startbit == NULL_TREE)
4878 startbit = save_expr (endbit);
4882 startbit = convert (sizetype, startbit);
4883 endbit = convert (sizetype, endbit);
4884 if (! integer_zerop (domain_min))
4886 startbit = size_binop (MINUS_EXPR, startbit, domain_min);
4887 endbit = size_binop (MINUS_EXPR, endbit, domain_min);
4889 startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
4890 EXPAND_CONST_ADDRESS);
4891 endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
4892 EXPAND_CONST_ADDRESS);
4898 ((build_qualified_type (type_for_mode (GET_MODE (target), 0),
4901 emit_move_insn (targetx, target);
4904 else if (GET_CODE (target) == MEM)
4909 #ifdef TARGET_MEM_FUNCTIONS
4910 /* Optimization: If startbit and endbit are
4911 constants divisible by BITS_PER_UNIT,
4912 call memset instead. */
4913 if (TREE_CODE (startbit) == INTEGER_CST
4914 && TREE_CODE (endbit) == INTEGER_CST
4915 && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
4916 && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0)
4918 emit_library_call (memset_libfunc, LCT_NORMAL,
4920 plus_constant (XEXP (targetx, 0),
4921 startb / BITS_PER_UNIT),
4923 constm1_rtx, TYPE_MODE (integer_type_node),
4924 GEN_INT ((endb - startb) / BITS_PER_UNIT),
4925 TYPE_MODE (sizetype));
4929 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__setbits"),
4930 LCT_NORMAL, VOIDmode, 4, XEXP (targetx, 0),
4931 Pmode, bitlength_rtx, TYPE_MODE (sizetype),
4932 startbit_rtx, TYPE_MODE (sizetype),
4933 endbit_rtx, TYPE_MODE (sizetype));
4936 emit_move_insn (target, targetx);
4944 /* Store the value of EXP (an expression tree)
4945 into a subfield of TARGET which has mode MODE and occupies
4946 BITSIZE bits, starting BITPOS bits from the start of TARGET.
4947 If MODE is VOIDmode, it means that we are storing into a bit-field.
4949 If VALUE_MODE is VOIDmode, return nothing in particular.
4950 UNSIGNEDP is not used in this case.
4952 Otherwise, return an rtx for the value stored. This rtx
4953 has mode VALUE_MODE if that is convenient to do.
4954 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
4956 TYPE is the type of the underlying object,
4958 ALIAS_SET is the alias set for the destination. This value will
4959 (in general) be different from that for TARGET, since TARGET is a
4960 reference to the containing structure. */
4963 store_field (target, bitsize, bitpos, mode, exp, value_mode, unsignedp, type,
4966 HOST_WIDE_INT bitsize;
4967 HOST_WIDE_INT bitpos;
4968 enum machine_mode mode;
4970 enum machine_mode value_mode;
4975 HOST_WIDE_INT width_mask = 0;
4977 if (TREE_CODE (exp) == ERROR_MARK)
4980 /* If we have nothing to store, do nothing unless the expression has
4983 return expand_expr (exp, const0_rtx, VOIDmode, 0);
4984 else if (bitsize >=0 && bitsize < HOST_BITS_PER_WIDE_INT)
4985 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
4987 /* If we are storing into an unaligned field of an aligned union that is
4988 in a register, we may have the mode of TARGET being an integer mode but
4989 MODE == BLKmode. In that case, get an aligned object whose size and
4990 alignment are the same as TARGET and store TARGET into it (we can avoid
4991 the store if the field being stored is the entire width of TARGET). Then
4992 call ourselves recursively to store the field into a BLKmode version of
4993 that object. Finally, load from the object into TARGET. This is not
4994 very efficient in general, but should only be slightly more expensive
4995 than the otherwise-required unaligned accesses. Perhaps this can be
4996 cleaned up later. */
4999 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
5003 (build_qualified_type (type, TYPE_QUALS (type) | TYPE_QUAL_CONST),
5005 rtx blk_object = copy_rtx (object);
5007 PUT_MODE (blk_object, BLKmode);
5009 if (bitsize != (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (target)))
5010 emit_move_insn (object, target);
5012 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0, type,
5015 emit_move_insn (target, object);
5017 /* We want to return the BLKmode version of the data. */
5021 if (GET_CODE (target) == CONCAT)
5023 /* We're storing into a struct containing a single __complex. */
5027 return store_expr (exp, target, 0);
5030 /* If the structure is in a register or if the component
5031 is a bit field, we cannot use addressing to access it.
5032 Use bit-field techniques or SUBREG to store in it. */
5034 if (mode == VOIDmode
5035 || (mode != BLKmode && ! direct_store[(int) mode]
5036 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
5037 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
5038 || GET_CODE (target) == REG
5039 || GET_CODE (target) == SUBREG
5040 /* If the field isn't aligned enough to store as an ordinary memref,
5041 store it as a bit field. */
5042 || (mode != BLKmode && SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target))
5043 && (MEM_ALIGN (target) < GET_MODE_ALIGNMENT (mode)
5044 || bitpos % GET_MODE_ALIGNMENT (mode)))
5045 /* If the RHS and field are a constant size and the size of the
5046 RHS isn't the same size as the bitfield, we must use bitfield
5049 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
5050 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0))
5052 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
5054 /* If BITSIZE is narrower than the size of the type of EXP
5055 we will be narrowing TEMP. Normally, what's wanted are the
5056 low-order bits. However, if EXP's type is a record and this is
5057 big-endian machine, we want the upper BITSIZE bits. */
5058 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
5059 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (temp))
5060 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
5061 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
5062 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
5066 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
5068 if (mode != VOIDmode && mode != BLKmode
5069 && mode != TYPE_MODE (TREE_TYPE (exp)))
5070 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
5072 /* If the modes of TARGET and TEMP are both BLKmode, both
5073 must be in memory and BITPOS must be aligned on a byte
5074 boundary. If so, we simply do a block copy. */
5075 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
5077 if (GET_CODE (target) != MEM || GET_CODE (temp) != MEM
5078 || bitpos % BITS_PER_UNIT != 0)
5081 target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT);
5082 emit_block_move (target, temp,
5083 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
5086 return value_mode == VOIDmode ? const0_rtx : target;
5089 /* Store the value in the bitfield. */
5090 store_bit_field (target, bitsize, bitpos, mode, temp,
5091 int_size_in_bytes (type));
5093 if (value_mode != VOIDmode)
5095 /* The caller wants an rtx for the value.
5096 If possible, avoid refetching from the bitfield itself. */
5098 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
5101 enum machine_mode tmode;
5104 return expand_and (temp,
5108 GET_MODE (temp) == VOIDmode
5110 : GET_MODE (temp))), NULL_RTX);
5112 tmode = GET_MODE (temp);
5113 if (tmode == VOIDmode)
5115 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
5116 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
5117 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
5120 return extract_bit_field (target, bitsize, bitpos, unsignedp,
5121 NULL_RTX, value_mode, VOIDmode,
5122 int_size_in_bytes (type));
5128 rtx addr = XEXP (target, 0);
5129 rtx to_rtx = target;
5131 /* If a value is wanted, it must be the lhs;
5132 so make the address stable for multiple use. */
5134 if (value_mode != VOIDmode && GET_CODE (addr) != REG
5135 && ! CONSTANT_ADDRESS_P (addr)
5136 /* A frame-pointer reference is already stable. */
5137 && ! (GET_CODE (addr) == PLUS
5138 && GET_CODE (XEXP (addr, 1)) == CONST_INT
5139 && (XEXP (addr, 0) == virtual_incoming_args_rtx
5140 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
5141 to_rtx = replace_equiv_address (to_rtx, copy_to_reg (addr));
5143 /* Now build a reference to just the desired component. */
5145 to_rtx = adjust_address (target, mode, bitpos / BITS_PER_UNIT);
5147 if (to_rtx == target)
5148 to_rtx = copy_rtx (to_rtx);
5150 MEM_SET_IN_STRUCT_P (to_rtx, 1);
5151 if (!MEM_KEEP_ALIAS_SET_P (to_rtx) && MEM_ALIAS_SET (to_rtx) != 0)
5152 set_mem_alias_set (to_rtx, alias_set);
5154 return store_expr (exp, to_rtx, value_mode != VOIDmode);
5158 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
5159 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
5160 codes and find the ultimate containing object, which we return.
5162 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5163 bit position, and *PUNSIGNEDP to the signedness of the field.
5164 If the position of the field is variable, we store a tree
5165 giving the variable offset (in units) in *POFFSET.
5166 This offset is in addition to the bit position.
5167 If the position is not variable, we store 0 in *POFFSET.
5169 If any of the extraction expressions is volatile,
5170 we store 1 in *PVOLATILEP. Otherwise we don't change that.
5172 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
5173 is a mode that can be used to access the field. In that case, *PBITSIZE
5176 If the field describes a variable-sized object, *PMODE is set to
5177 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
5178 this case, but the address of the object can be found. */
5181 get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode,
5182 punsignedp, pvolatilep)
5184 HOST_WIDE_INT *pbitsize;
5185 HOST_WIDE_INT *pbitpos;
5187 enum machine_mode *pmode;
5192 enum machine_mode mode = VOIDmode;
5193 tree offset = size_zero_node;
5194 tree bit_offset = bitsize_zero_node;
5195 tree placeholder_ptr = 0;
5198 /* First get the mode, signedness, and size. We do this from just the
5199 outermost expression. */
5200 if (TREE_CODE (exp) == COMPONENT_REF)
5202 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
5203 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
5204 mode = DECL_MODE (TREE_OPERAND (exp, 1));
5206 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
5208 else if (TREE_CODE (exp) == BIT_FIELD_REF)
5210 size_tree = TREE_OPERAND (exp, 1);
5211 *punsignedp = TREE_UNSIGNED (exp);
5215 mode = TYPE_MODE (TREE_TYPE (exp));
5216 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
5218 if (mode == BLKmode)
5219 size_tree = TYPE_SIZE (TREE_TYPE (exp));
5221 *pbitsize = GET_MODE_BITSIZE (mode);
5226 if (! host_integerp (size_tree, 1))
5227 mode = BLKmode, *pbitsize = -1;
5229 *pbitsize = tree_low_cst (size_tree, 1);
5232 /* Compute cumulative bit-offset for nested component-refs and array-refs,
5233 and find the ultimate containing object. */
5236 if (TREE_CODE (exp) == BIT_FIELD_REF)
5237 bit_offset = size_binop (PLUS_EXPR, bit_offset, TREE_OPERAND (exp, 2));
5238 else if (TREE_CODE (exp) == COMPONENT_REF)
5240 tree field = TREE_OPERAND (exp, 1);
5241 tree this_offset = DECL_FIELD_OFFSET (field);
5243 /* If this field hasn't been filled in yet, don't go
5244 past it. This should only happen when folding expressions
5245 made during type construction. */
5246 if (this_offset == 0)
5248 else if (! TREE_CONSTANT (this_offset)
5249 && contains_placeholder_p (this_offset))
5250 this_offset = build (WITH_RECORD_EXPR, sizetype, this_offset, exp);
5252 offset = size_binop (PLUS_EXPR, offset, this_offset);
5253 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5254 DECL_FIELD_BIT_OFFSET (field));
5256 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
5259 else if (TREE_CODE (exp) == ARRAY_REF
5260 || TREE_CODE (exp) == ARRAY_RANGE_REF)
5262 tree index = TREE_OPERAND (exp, 1);
5263 tree array = TREE_OPERAND (exp, 0);
5264 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
5265 tree low_bound = (domain ? TYPE_MIN_VALUE (domain) : 0);
5266 tree unit_size = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (array)));
5268 /* We assume all arrays have sizes that are a multiple of a byte.
5269 First subtract the lower bound, if any, in the type of the
5270 index, then convert to sizetype and multiply by the size of the
5272 if (low_bound != 0 && ! integer_zerop (low_bound))
5273 index = fold (build (MINUS_EXPR, TREE_TYPE (index),
5276 /* If the index has a self-referential type, pass it to a
5277 WITH_RECORD_EXPR; if the component size is, pass our
5278 component to one. */
5279 if (! TREE_CONSTANT (index)
5280 && contains_placeholder_p (index))
5281 index = build (WITH_RECORD_EXPR, TREE_TYPE (index), index, exp);
5282 if (! TREE_CONSTANT (unit_size)
5283 && contains_placeholder_p (unit_size))
5284 unit_size = build (WITH_RECORD_EXPR, sizetype, unit_size, array);
5286 offset = size_binop (PLUS_EXPR, offset,
5287 size_binop (MULT_EXPR,
5288 convert (sizetype, index),
5292 else if (TREE_CODE (exp) == PLACEHOLDER_EXPR)
5294 tree new = find_placeholder (exp, &placeholder_ptr);
5296 /* If we couldn't find the replacement, return the PLACEHOLDER_EXPR.
5297 We might have been called from tree optimization where we
5298 haven't set up an object yet. */
5306 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
5307 && TREE_CODE (exp) != VIEW_CONVERT_EXPR
5308 && ! ((TREE_CODE (exp) == NOP_EXPR
5309 || TREE_CODE (exp) == CONVERT_EXPR)
5310 && (TYPE_MODE (TREE_TYPE (exp))
5311 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
5314 /* If any reference in the chain is volatile, the effect is volatile. */
5315 if (TREE_THIS_VOLATILE (exp))
5318 exp = TREE_OPERAND (exp, 0);
5321 /* If OFFSET is constant, see if we can return the whole thing as a
5322 constant bit position. Otherwise, split it up. */
5323 if (host_integerp (offset, 0)
5324 && 0 != (tem = size_binop (MULT_EXPR, convert (bitsizetype, offset),
5326 && 0 != (tem = size_binop (PLUS_EXPR, tem, bit_offset))
5327 && host_integerp (tem, 0))
5328 *pbitpos = tree_low_cst (tem, 0), *poffset = 0;
5330 *pbitpos = tree_low_cst (bit_offset, 0), *poffset = offset;
5336 /* Return 1 if T is an expression that get_inner_reference handles. */
5339 handled_component_p (t)
5342 switch (TREE_CODE (t))
5347 case ARRAY_RANGE_REF:
5348 case NON_LVALUE_EXPR:
5349 case VIEW_CONVERT_EXPR:
5354 return (TYPE_MODE (TREE_TYPE (t))
5355 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (t, 0))));
5362 /* Given an rtx VALUE that may contain additions and multiplications, return
5363 an equivalent value that just refers to a register, memory, or constant.
5364 This is done by generating instructions to perform the arithmetic and
5365 returning a pseudo-register containing the value.
5367 The returned value may be a REG, SUBREG, MEM or constant. */
5370 force_operand (value, target)
5374 /* Use a temporary to force order of execution of calls to
5378 /* Use subtarget as the target for operand 0 of a binary operation. */
5379 rtx subtarget = get_subtarget (target);
5381 /* Check for a PIC address load. */
5383 && (GET_CODE (value) == PLUS || GET_CODE (value) == MINUS)
5384 && XEXP (value, 0) == pic_offset_table_rtx
5385 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
5386 || GET_CODE (XEXP (value, 1)) == LABEL_REF
5387 || GET_CODE (XEXP (value, 1)) == CONST))
5390 subtarget = gen_reg_rtx (GET_MODE (value));
5391 emit_move_insn (subtarget, value);
5395 if (GET_CODE (value) == PLUS)
5396 binoptab = add_optab;
5397 else if (GET_CODE (value) == MINUS)
5398 binoptab = sub_optab;
5399 else if (GET_CODE (value) == MULT)
5401 op2 = XEXP (value, 1);
5402 if (!CONSTANT_P (op2)
5403 && !(GET_CODE (op2) == REG && op2 != subtarget))
5405 tmp = force_operand (XEXP (value, 0), subtarget);
5406 return expand_mult (GET_MODE (value), tmp,
5407 force_operand (op2, NULL_RTX),
5413 op2 = XEXP (value, 1);
5414 if (!CONSTANT_P (op2)
5415 && !(GET_CODE (op2) == REG && op2 != subtarget))
5417 if (binoptab == sub_optab && GET_CODE (op2) == CONST_INT)
5419 binoptab = add_optab;
5420 op2 = negate_rtx (GET_MODE (value), op2);
5423 /* Check for an addition with OP2 a constant integer and our first
5424 operand a PLUS of a virtual register and something else. In that
5425 case, we want to emit the sum of the virtual register and the
5426 constant first and then add the other value. This allows virtual
5427 register instantiation to simply modify the constant rather than
5428 creating another one around this addition. */
5429 if (binoptab == add_optab && GET_CODE (op2) == CONST_INT
5430 && GET_CODE (XEXP (value, 0)) == PLUS
5431 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
5432 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
5433 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
5435 rtx temp = expand_binop (GET_MODE (value), binoptab,
5436 XEXP (XEXP (value, 0), 0), op2,
5437 subtarget, 0, OPTAB_LIB_WIDEN);
5438 return expand_binop (GET_MODE (value), binoptab, temp,
5439 force_operand (XEXP (XEXP (value, 0), 1), 0),
5440 target, 0, OPTAB_LIB_WIDEN);
5443 tmp = force_operand (XEXP (value, 0), subtarget);
5444 return expand_binop (GET_MODE (value), binoptab, tmp,
5445 force_operand (op2, NULL_RTX),
5446 target, 0, OPTAB_LIB_WIDEN);
5447 /* We give UNSIGNEDP = 0 to expand_binop
5448 because the only operations we are expanding here are signed ones. */
5451 #ifdef INSN_SCHEDULING
5452 /* On machines that have insn scheduling, we want all memory reference to be
5453 explicit, so we need to deal with such paradoxical SUBREGs. */
5454 if (GET_CODE (value) == SUBREG && GET_CODE (SUBREG_REG (value)) == MEM
5455 && (GET_MODE_SIZE (GET_MODE (value))
5456 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (value)))))
5458 = simplify_gen_subreg (GET_MODE (value),
5459 force_reg (GET_MODE (SUBREG_REG (value)),
5460 force_operand (SUBREG_REG (value),
5462 GET_MODE (SUBREG_REG (value)),
5463 SUBREG_BYTE (value));
5469 /* Subroutine of expand_expr: return nonzero iff there is no way that
5470 EXP can reference X, which is being modified. TOP_P is nonzero if this
5471 call is going to be used to determine whether we need a temporary
5472 for EXP, as opposed to a recursive call to this function.
5474 It is always safe for this routine to return zero since it merely
5475 searches for optimization opportunities. */
5478 safe_from_p (x, exp, top_p)
5485 static tree save_expr_list;
5488 /* If EXP has varying size, we MUST use a target since we currently
5489 have no way of allocating temporaries of variable size
5490 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
5491 So we assume here that something at a higher level has prevented a
5492 clash. This is somewhat bogus, but the best we can do. Only
5493 do this when X is BLKmode and when we are at the top level. */
5494 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
5495 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
5496 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
5497 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
5498 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
5500 && GET_MODE (x) == BLKmode)
5501 /* If X is in the outgoing argument area, it is always safe. */
5502 || (GET_CODE (x) == MEM
5503 && (XEXP (x, 0) == virtual_outgoing_args_rtx
5504 || (GET_CODE (XEXP (x, 0)) == PLUS
5505 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
5508 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
5509 find the underlying pseudo. */
5510 if (GET_CODE (x) == SUBREG)
5513 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5517 /* A SAVE_EXPR might appear many times in the expression passed to the
5518 top-level safe_from_p call, and if it has a complex subexpression,
5519 examining it multiple times could result in a combinatorial explosion.
5520 E.g. on an Alpha running at least 200MHz, a Fortran test case compiled
5521 with optimization took about 28 minutes to compile -- even though it was
5522 only a few lines long. So we mark each SAVE_EXPR we see with TREE_PRIVATE
5523 and turn that off when we are done. We keep a list of the SAVE_EXPRs
5524 we have processed. Note that the only test of top_p was above. */
5533 rtn = safe_from_p (x, exp, 0);
5535 for (t = save_expr_list; t != 0; t = TREE_CHAIN (t))
5536 TREE_PRIVATE (TREE_PURPOSE (t)) = 0;
5541 /* Now look at our tree code and possibly recurse. */
5542 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
5545 exp_rtl = DECL_RTL_SET_P (exp) ? DECL_RTL (exp) : NULL_RTX;
5552 if (TREE_CODE (exp) == TREE_LIST)
5553 return ((TREE_VALUE (exp) == 0
5554 || safe_from_p (x, TREE_VALUE (exp), 0))
5555 && (TREE_CHAIN (exp) == 0
5556 || safe_from_p (x, TREE_CHAIN (exp), 0)));
5557 else if (TREE_CODE (exp) == ERROR_MARK)
5558 return 1; /* An already-visited SAVE_EXPR? */
5563 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5567 return (safe_from_p (x, TREE_OPERAND (exp, 0), 0)
5568 && safe_from_p (x, TREE_OPERAND (exp, 1), 0));
5572 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
5573 the expression. If it is set, we conflict iff we are that rtx or
5574 both are in memory. Otherwise, we check all operands of the
5575 expression recursively. */
5577 switch (TREE_CODE (exp))
5580 /* If the operand is static or we are static, we can't conflict.
5581 Likewise if we don't conflict with the operand at all. */
5582 if (staticp (TREE_OPERAND (exp, 0))
5583 || TREE_STATIC (exp)
5584 || safe_from_p (x, TREE_OPERAND (exp, 0), 0))
5587 /* Otherwise, the only way this can conflict is if we are taking
5588 the address of a DECL a that address if part of X, which is
5590 exp = TREE_OPERAND (exp, 0);
5593 if (!DECL_RTL_SET_P (exp)
5594 || GET_CODE (DECL_RTL (exp)) != MEM)
5597 exp_rtl = XEXP (DECL_RTL (exp), 0);
5602 if (GET_CODE (x) == MEM
5603 && alias_sets_conflict_p (MEM_ALIAS_SET (x),
5604 get_alias_set (exp)))
5609 /* Assume that the call will clobber all hard registers and
5611 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5612 || GET_CODE (x) == MEM)
5617 /* If a sequence exists, we would have to scan every instruction
5618 in the sequence to see if it was safe. This is probably not
5620 if (RTL_EXPR_SEQUENCE (exp))
5623 exp_rtl = RTL_EXPR_RTL (exp);
5626 case WITH_CLEANUP_EXPR:
5627 exp_rtl = WITH_CLEANUP_EXPR_RTL (exp);
5630 case CLEANUP_POINT_EXPR:
5631 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5634 exp_rtl = SAVE_EXPR_RTL (exp);
5638 /* If we've already scanned this, don't do it again. Otherwise,
5639 show we've scanned it and record for clearing the flag if we're
5641 if (TREE_PRIVATE (exp))
5644 TREE_PRIVATE (exp) = 1;
5645 if (! safe_from_p (x, TREE_OPERAND (exp, 0), 0))
5647 TREE_PRIVATE (exp) = 0;
5651 save_expr_list = tree_cons (exp, NULL_TREE, save_expr_list);
5655 /* The only operand we look at is operand 1. The rest aren't
5656 part of the expression. */
5657 return safe_from_p (x, TREE_OPERAND (exp, 1), 0);
5659 case METHOD_CALL_EXPR:
5660 /* This takes an rtx argument, but shouldn't appear here. */
5667 /* If we have an rtx, we do not need to scan our operands. */
5671 nops = first_rtl_op (TREE_CODE (exp));
5672 for (i = 0; i < nops; i++)
5673 if (TREE_OPERAND (exp, i) != 0
5674 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
5677 /* If this is a language-specific tree code, it may require
5678 special handling. */
5679 if ((unsigned int) TREE_CODE (exp)
5680 >= (unsigned int) LAST_AND_UNUSED_TREE_CODE
5681 && !(*lang_hooks.safe_from_p) (x, exp))
5685 /* If we have an rtl, find any enclosed object. Then see if we conflict
5689 if (GET_CODE (exp_rtl) == SUBREG)
5691 exp_rtl = SUBREG_REG (exp_rtl);
5692 if (GET_CODE (exp_rtl) == REG
5693 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
5697 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
5698 are memory and they conflict. */
5699 return ! (rtx_equal_p (x, exp_rtl)
5700 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
5701 && true_dependence (exp_rtl, GET_MODE (x), x,
5702 rtx_addr_varies_p)));
5705 /* If we reach here, it is safe. */
5709 /* Subroutine of expand_expr: return rtx if EXP is a
5710 variable or parameter; else return 0. */
5717 switch (TREE_CODE (exp))
5721 return DECL_RTL (exp);
5727 #ifdef MAX_INTEGER_COMPUTATION_MODE
5730 check_max_integer_computation_mode (exp)
5733 enum tree_code code;
5734 enum machine_mode mode;
5736 /* Strip any NOPs that don't change the mode. */
5738 code = TREE_CODE (exp);
5740 /* We must allow conversions of constants to MAX_INTEGER_COMPUTATION_MODE. */
5741 if (code == NOP_EXPR
5742 && TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
5745 /* First check the type of the overall operation. We need only look at
5746 unary, binary and relational operations. */
5747 if (TREE_CODE_CLASS (code) == '1'
5748 || TREE_CODE_CLASS (code) == '2'
5749 || TREE_CODE_CLASS (code) == '<')
5751 mode = TYPE_MODE (TREE_TYPE (exp));
5752 if (GET_MODE_CLASS (mode) == MODE_INT
5753 && mode > MAX_INTEGER_COMPUTATION_MODE)
5754 internal_error ("unsupported wide integer operation");
5757 /* Check operand of a unary op. */
5758 if (TREE_CODE_CLASS (code) == '1')
5760 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5761 if (GET_MODE_CLASS (mode) == MODE_INT
5762 && mode > MAX_INTEGER_COMPUTATION_MODE)
5763 internal_error ("unsupported wide integer operation");
5766 /* Check operands of a binary/comparison op. */
5767 if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<')
5769 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5770 if (GET_MODE_CLASS (mode) == MODE_INT
5771 && mode > MAX_INTEGER_COMPUTATION_MODE)
5772 internal_error ("unsupported wide integer operation");
5774 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1)));
5775 if (GET_MODE_CLASS (mode) == MODE_INT
5776 && mode > MAX_INTEGER_COMPUTATION_MODE)
5777 internal_error ("unsupported wide integer operation");
5782 /* Return the highest power of two that EXP is known to be a multiple of.
5783 This is used in updating alignment of MEMs in array references. */
5785 static HOST_WIDE_INT
5786 highest_pow2_factor (exp)
5789 HOST_WIDE_INT c0, c1;
5791 switch (TREE_CODE (exp))
5794 /* If the integer is expressable in a HOST_WIDE_INT, we can find the
5795 lowest bit that's a one. If the result is zero, pessimize by
5796 returning 1. This is overly-conservative, but such things should not
5797 happen in the offset expressions that we are called with. */
5798 if (host_integerp (exp, 0))
5800 c0 = tree_low_cst (exp, 0);
5801 c0 = c0 < 0 ? - c0 : c0;
5802 return c0 != 0 ? c0 & -c0 : 1;
5806 case PLUS_EXPR: case MINUS_EXPR: case MIN_EXPR: case MAX_EXPR:
5807 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
5808 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
5809 return MIN (c0, c1);
5812 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
5813 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
5816 case ROUND_DIV_EXPR: case TRUNC_DIV_EXPR: case FLOOR_DIV_EXPR:
5818 if (integer_pow2p (TREE_OPERAND (exp, 1))
5819 && host_integerp (TREE_OPERAND (exp, 1), 1))
5821 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
5822 c1 = tree_low_cst (TREE_OPERAND (exp, 1), 1);
5823 return MAX (1, c0 / c1);
5827 case NON_LVALUE_EXPR: case NOP_EXPR: case CONVERT_EXPR:
5828 case SAVE_EXPR: case WITH_RECORD_EXPR:
5829 return highest_pow2_factor (TREE_OPERAND (exp, 0));
5832 return highest_pow2_factor (TREE_OPERAND (exp, 1));
5835 c0 = highest_pow2_factor (TREE_OPERAND (exp, 1));
5836 c1 = highest_pow2_factor (TREE_OPERAND (exp, 2));
5837 return MIN (c0, c1);
5846 /* Return an object on the placeholder list that matches EXP, a
5847 PLACEHOLDER_EXPR. An object "matches" if it is of the type of the
5848 PLACEHOLDER_EXPR or a pointer type to it. For further information, see
5849 tree.def. If no such object is found, return 0. If PLIST is nonzero, it
5850 is a location which initially points to a starting location in the
5851 placeholder list (zero means start of the list) and where a pointer into
5852 the placeholder list at which the object is found is placed. */
5855 find_placeholder (exp, plist)
5859 tree type = TREE_TYPE (exp);
5860 tree placeholder_expr;
5862 for (placeholder_expr
5863 = plist && *plist ? TREE_CHAIN (*plist) : placeholder_list;
5864 placeholder_expr != 0;
5865 placeholder_expr = TREE_CHAIN (placeholder_expr))
5867 tree need_type = TYPE_MAIN_VARIANT (type);
5870 /* Find the outermost reference that is of the type we want. If none,
5871 see if any object has a type that is a pointer to the type we
5873 for (elt = TREE_PURPOSE (placeholder_expr); elt != 0;
5874 elt = ((TREE_CODE (elt) == COMPOUND_EXPR
5875 || TREE_CODE (elt) == COND_EXPR)
5876 ? TREE_OPERAND (elt, 1)
5877 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
5878 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
5879 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
5880 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
5881 ? TREE_OPERAND (elt, 0) : 0))
5882 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
5885 *plist = placeholder_expr;
5889 for (elt = TREE_PURPOSE (placeholder_expr); elt != 0;
5891 = ((TREE_CODE (elt) == COMPOUND_EXPR
5892 || TREE_CODE (elt) == COND_EXPR)
5893 ? TREE_OPERAND (elt, 1)
5894 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
5895 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
5896 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
5897 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
5898 ? TREE_OPERAND (elt, 0) : 0))
5899 if (POINTER_TYPE_P (TREE_TYPE (elt))
5900 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
5904 *plist = placeholder_expr;
5905 return build1 (INDIRECT_REF, need_type, elt);
5912 /* expand_expr: generate code for computing expression EXP.
5913 An rtx for the computed value is returned. The value is never null.
5914 In the case of a void EXP, const0_rtx is returned.
5916 The value may be stored in TARGET if TARGET is nonzero.
5917 TARGET is just a suggestion; callers must assume that
5918 the rtx returned may not be the same as TARGET.
5920 If TARGET is CONST0_RTX, it means that the value will be ignored.
5922 If TMODE is not VOIDmode, it suggests generating the
5923 result in mode TMODE. But this is done only when convenient.
5924 Otherwise, TMODE is ignored and the value generated in its natural mode.
5925 TMODE is just a suggestion; callers must assume that
5926 the rtx returned may not have mode TMODE.
5928 Note that TARGET may have neither TMODE nor MODE. In that case, it
5929 probably will not be used.
5931 If MODIFIER is EXPAND_SUM then when EXP is an addition
5932 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
5933 or a nest of (PLUS ...) and (MINUS ...) where the terms are
5934 products as above, or REG or MEM, or constant.
5935 Ordinarily in such cases we would output mul or add instructions
5936 and then return a pseudo reg containing the sum.
5938 EXPAND_INITIALIZER is much like EXPAND_SUM except that
5939 it also marks a label as absolutely required (it can't be dead).
5940 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
5941 This is used for outputting expressions used in initializers.
5943 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
5944 with a constant address even if that address is not normally legitimate.
5945 EXPAND_INITIALIZER and EXPAND_SUM also have this effect. */
5948 expand_expr (exp, target, tmode, modifier)
5951 enum machine_mode tmode;
5952 enum expand_modifier modifier;
5955 tree type = TREE_TYPE (exp);
5956 int unsignedp = TREE_UNSIGNED (type);
5957 enum machine_mode mode;
5958 enum tree_code code = TREE_CODE (exp);
5960 rtx subtarget, original_target;
5964 /* Handle ERROR_MARK before anybody tries to access its type. */
5965 if (TREE_CODE (exp) == ERROR_MARK || TREE_CODE (type) == ERROR_MARK)
5967 op0 = CONST0_RTX (tmode);
5973 mode = TYPE_MODE (type);
5974 /* Use subtarget as the target for operand 0 of a binary operation. */
5975 subtarget = get_subtarget (target);
5976 original_target = target;
5977 ignore = (target == const0_rtx
5978 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
5979 || code == CONVERT_EXPR || code == REFERENCE_EXPR
5980 || code == COND_EXPR || code == VIEW_CONVERT_EXPR)
5981 && TREE_CODE (type) == VOID_TYPE));
5983 /* If we are going to ignore this result, we need only do something
5984 if there is a side-effect somewhere in the expression. If there
5985 is, short-circuit the most common cases here. Note that we must
5986 not call expand_expr with anything but const0_rtx in case this
5987 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
5991 if (! TREE_SIDE_EFFECTS (exp))
5994 /* Ensure we reference a volatile object even if value is ignored, but
5995 don't do this if all we are doing is taking its address. */
5996 if (TREE_THIS_VOLATILE (exp)
5997 && TREE_CODE (exp) != FUNCTION_DECL
5998 && mode != VOIDmode && mode != BLKmode
5999 && modifier != EXPAND_CONST_ADDRESS)
6001 temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
6002 if (GET_CODE (temp) == MEM)
6003 temp = copy_to_reg (temp);
6007 if (TREE_CODE_CLASS (code) == '1' || code == COMPONENT_REF
6008 || code == INDIRECT_REF || code == BUFFER_REF)
6009 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6012 else if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<'
6013 || code == ARRAY_REF || code == ARRAY_RANGE_REF)
6015 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6016 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6019 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
6020 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
6021 /* If the second operand has no side effects, just evaluate
6023 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6025 else if (code == BIT_FIELD_REF)
6027 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6028 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6029 expand_expr (TREE_OPERAND (exp, 2), const0_rtx, VOIDmode, modifier);
6036 #ifdef MAX_INTEGER_COMPUTATION_MODE
6037 /* Only check stuff here if the mode we want is different from the mode
6038 of the expression; if it's the same, check_max_integer_computiation_mode
6039 will handle it. Do we really need to check this stuff at all? */
6042 && GET_MODE (target) != mode
6043 && TREE_CODE (exp) != INTEGER_CST
6044 && TREE_CODE (exp) != PARM_DECL
6045 && TREE_CODE (exp) != ARRAY_REF
6046 && TREE_CODE (exp) != ARRAY_RANGE_REF
6047 && TREE_CODE (exp) != COMPONENT_REF
6048 && TREE_CODE (exp) != BIT_FIELD_REF
6049 && TREE_CODE (exp) != INDIRECT_REF
6050 && TREE_CODE (exp) != CALL_EXPR
6051 && TREE_CODE (exp) != VAR_DECL
6052 && TREE_CODE (exp) != RTL_EXPR)
6054 enum machine_mode mode = GET_MODE (target);
6056 if (GET_MODE_CLASS (mode) == MODE_INT
6057 && mode > MAX_INTEGER_COMPUTATION_MODE)
6058 internal_error ("unsupported wide integer operation");
6062 && TREE_CODE (exp) != INTEGER_CST
6063 && TREE_CODE (exp) != PARM_DECL
6064 && TREE_CODE (exp) != ARRAY_REF
6065 && TREE_CODE (exp) != ARRAY_RANGE_REF
6066 && TREE_CODE (exp) != COMPONENT_REF
6067 && TREE_CODE (exp) != BIT_FIELD_REF
6068 && TREE_CODE (exp) != INDIRECT_REF
6069 && TREE_CODE (exp) != VAR_DECL
6070 && TREE_CODE (exp) != CALL_EXPR
6071 && TREE_CODE (exp) != RTL_EXPR
6072 && GET_MODE_CLASS (tmode) == MODE_INT
6073 && tmode > MAX_INTEGER_COMPUTATION_MODE)
6074 internal_error ("unsupported wide integer operation");
6076 check_max_integer_computation_mode (exp);
6079 /* If will do cse, generate all results into pseudo registers
6080 since 1) that allows cse to find more things
6081 and 2) otherwise cse could produce an insn the machine
6082 cannot support. And exception is a CONSTRUCTOR into a multi-word
6083 MEM: that's much more likely to be most efficient into the MEM. */
6085 if (! cse_not_expected && mode != BLKmode && target
6086 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER)
6087 && ! (code == CONSTRUCTOR && GET_MODE_SIZE (mode) > UNITS_PER_WORD))
6094 tree function = decl_function_context (exp);
6095 /* Handle using a label in a containing function. */
6096 if (function != current_function_decl
6097 && function != inline_function_decl && function != 0)
6099 struct function *p = find_function_data (function);
6100 p->expr->x_forced_labels
6101 = gen_rtx_EXPR_LIST (VOIDmode, label_rtx (exp),
6102 p->expr->x_forced_labels);
6106 if (modifier == EXPAND_INITIALIZER)
6107 forced_labels = gen_rtx_EXPR_LIST (VOIDmode,
6112 temp = gen_rtx_MEM (FUNCTION_MODE,
6113 gen_rtx_LABEL_REF (Pmode, label_rtx (exp)));
6114 if (function != current_function_decl
6115 && function != inline_function_decl && function != 0)
6116 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
6121 if (DECL_RTL (exp) == 0)
6123 error_with_decl (exp, "prior parameter's size depends on `%s'");
6124 return CONST0_RTX (mode);
6127 /* ... fall through ... */
6130 /* If a static var's type was incomplete when the decl was written,
6131 but the type is complete now, lay out the decl now. */
6132 if (DECL_SIZE (exp) == 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
6133 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
6135 rtx value = DECL_RTL_IF_SET (exp);
6137 layout_decl (exp, 0);
6139 /* If the RTL was already set, update its mode and memory
6143 PUT_MODE (value, DECL_MODE (exp));
6144 SET_DECL_RTL (exp, 0);
6145 set_mem_attributes (value, exp, 1);
6146 SET_DECL_RTL (exp, value);
6150 /* ... fall through ... */
6154 if (DECL_RTL (exp) == 0)
6157 /* Ensure variable marked as used even if it doesn't go through
6158 a parser. If it hasn't be used yet, write out an external
6160 if (! TREE_USED (exp))
6162 assemble_external (exp);
6163 TREE_USED (exp) = 1;
6166 /* Show we haven't gotten RTL for this yet. */
6169 /* Handle variables inherited from containing functions. */
6170 context = decl_function_context (exp);
6172 /* We treat inline_function_decl as an alias for the current function
6173 because that is the inline function whose vars, types, etc.
6174 are being merged into the current function.
6175 See expand_inline_function. */
6177 if (context != 0 && context != current_function_decl
6178 && context != inline_function_decl
6179 /* If var is static, we don't need a static chain to access it. */
6180 && ! (GET_CODE (DECL_RTL (exp)) == MEM
6181 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
6185 /* Mark as non-local and addressable. */
6186 DECL_NONLOCAL (exp) = 1;
6187 if (DECL_NO_STATIC_CHAIN (current_function_decl))
6189 mark_addressable (exp);
6190 if (GET_CODE (DECL_RTL (exp)) != MEM)
6192 addr = XEXP (DECL_RTL (exp), 0);
6193 if (GET_CODE (addr) == MEM)
6195 = replace_equiv_address (addr,
6196 fix_lexical_addr (XEXP (addr, 0), exp));
6198 addr = fix_lexical_addr (addr, exp);
6200 temp = replace_equiv_address (DECL_RTL (exp), addr);
6203 /* This is the case of an array whose size is to be determined
6204 from its initializer, while the initializer is still being parsed.
6207 else if (GET_CODE (DECL_RTL (exp)) == MEM
6208 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
6209 temp = validize_mem (DECL_RTL (exp));
6211 /* If DECL_RTL is memory, we are in the normal case and either
6212 the address is not valid or it is not a register and -fforce-addr
6213 is specified, get the address into a register. */
6215 else if (GET_CODE (DECL_RTL (exp)) == MEM
6216 && modifier != EXPAND_CONST_ADDRESS
6217 && modifier != EXPAND_SUM
6218 && modifier != EXPAND_INITIALIZER
6219 && (! memory_address_p (DECL_MODE (exp),
6220 XEXP (DECL_RTL (exp), 0))
6222 && GET_CODE (XEXP (DECL_RTL (exp), 0)) != REG)))
6223 temp = replace_equiv_address (DECL_RTL (exp),
6224 copy_rtx (XEXP (DECL_RTL (exp), 0)));
6226 /* If we got something, return it. But first, set the alignment
6227 if the address is a register. */
6230 if (GET_CODE (temp) == MEM && GET_CODE (XEXP (temp, 0)) == REG)
6231 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
6236 /* If the mode of DECL_RTL does not match that of the decl, it
6237 must be a promoted value. We return a SUBREG of the wanted mode,
6238 but mark it so that we know that it was already extended. */
6240 if (GET_CODE (DECL_RTL (exp)) == REG
6241 && GET_MODE (DECL_RTL (exp)) != DECL_MODE (exp))
6243 /* Get the signedness used for this variable. Ensure we get the
6244 same mode we got when the variable was declared. */
6245 if (GET_MODE (DECL_RTL (exp))
6246 != promote_mode (type, DECL_MODE (exp), &unsignedp, 0))
6249 temp = gen_lowpart_SUBREG (mode, DECL_RTL (exp));
6250 SUBREG_PROMOTED_VAR_P (temp) = 1;
6251 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
6255 return DECL_RTL (exp);
6258 return immed_double_const (TREE_INT_CST_LOW (exp),
6259 TREE_INT_CST_HIGH (exp), mode);
6262 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, 0);
6265 /* If optimized, generate immediate CONST_DOUBLE
6266 which will be turned into memory by reload if necessary.
6268 We used to force a register so that loop.c could see it. But
6269 this does not allow gen_* patterns to perform optimizations with
6270 the constants. It also produces two insns in cases like "x = 1.0;".
6271 On most machines, floating-point constants are not permitted in
6272 many insns, so we'd end up copying it to a register in any case.
6274 Now, we do the copying in expand_binop, if appropriate. */
6275 return immed_real_const (exp);
6279 if (! TREE_CST_RTL (exp))
6280 output_constant_def (exp, 1);
6282 /* TREE_CST_RTL probably contains a constant address.
6283 On RISC machines where a constant address isn't valid,
6284 make some insns to get that address into a register. */
6285 if (GET_CODE (TREE_CST_RTL (exp)) == MEM
6286 && modifier != EXPAND_CONST_ADDRESS
6287 && modifier != EXPAND_INITIALIZER
6288 && modifier != EXPAND_SUM
6289 && (! memory_address_p (mode, XEXP (TREE_CST_RTL (exp), 0))
6291 && GET_CODE (XEXP (TREE_CST_RTL (exp), 0)) != REG)))
6292 return replace_equiv_address (TREE_CST_RTL (exp),
6293 copy_rtx (XEXP (TREE_CST_RTL (exp), 0)));
6294 return TREE_CST_RTL (exp);
6296 case EXPR_WITH_FILE_LOCATION:
6299 const char *saved_input_filename = input_filename;
6300 int saved_lineno = lineno;
6301 input_filename = EXPR_WFL_FILENAME (exp);
6302 lineno = EXPR_WFL_LINENO (exp);
6303 if (EXPR_WFL_EMIT_LINE_NOTE (exp))
6304 emit_line_note (input_filename, lineno);
6305 /* Possibly avoid switching back and forth here. */
6306 to_return = expand_expr (EXPR_WFL_NODE (exp), target, tmode, modifier);
6307 input_filename = saved_input_filename;
6308 lineno = saved_lineno;
6313 context = decl_function_context (exp);
6315 /* If this SAVE_EXPR was at global context, assume we are an
6316 initialization function and move it into our context. */
6318 SAVE_EXPR_CONTEXT (exp) = current_function_decl;
6320 /* We treat inline_function_decl as an alias for the current function
6321 because that is the inline function whose vars, types, etc.
6322 are being merged into the current function.
6323 See expand_inline_function. */
6324 if (context == current_function_decl || context == inline_function_decl)
6327 /* If this is non-local, handle it. */
6330 /* The following call just exists to abort if the context is
6331 not of a containing function. */
6332 find_function_data (context);
6334 temp = SAVE_EXPR_RTL (exp);
6335 if (temp && GET_CODE (temp) == REG)
6337 put_var_into_stack (exp);
6338 temp = SAVE_EXPR_RTL (exp);
6340 if (temp == 0 || GET_CODE (temp) != MEM)
6343 replace_equiv_address (temp,
6344 fix_lexical_addr (XEXP (temp, 0), exp));
6346 if (SAVE_EXPR_RTL (exp) == 0)
6348 if (mode == VOIDmode)
6351 temp = assign_temp (build_qualified_type (type,
6353 | TYPE_QUAL_CONST)),
6356 SAVE_EXPR_RTL (exp) = temp;
6357 if (!optimize && GET_CODE (temp) == REG)
6358 save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, temp,
6361 /* If the mode of TEMP does not match that of the expression, it
6362 must be a promoted value. We pass store_expr a SUBREG of the
6363 wanted mode but mark it so that we know that it was already
6364 extended. Note that `unsignedp' was modified above in
6367 if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
6369 temp = gen_lowpart_SUBREG (mode, SAVE_EXPR_RTL (exp));
6370 SUBREG_PROMOTED_VAR_P (temp) = 1;
6371 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
6374 if (temp == const0_rtx)
6375 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
6377 store_expr (TREE_OPERAND (exp, 0), temp, 0);
6379 TREE_USED (exp) = 1;
6382 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
6383 must be a promoted value. We return a SUBREG of the wanted mode,
6384 but mark it so that we know that it was already extended. */
6386 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
6387 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
6389 /* Compute the signedness and make the proper SUBREG. */
6390 promote_mode (type, mode, &unsignedp, 0);
6391 temp = gen_lowpart_SUBREG (mode, SAVE_EXPR_RTL (exp));
6392 SUBREG_PROMOTED_VAR_P (temp) = 1;
6393 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
6397 return SAVE_EXPR_RTL (exp);
6402 temp = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
6403 TREE_OPERAND (exp, 0) = unsave_expr_now (TREE_OPERAND (exp, 0));
6407 case PLACEHOLDER_EXPR:
6409 tree old_list = placeholder_list;
6410 tree placeholder_expr = 0;
6412 exp = find_placeholder (exp, &placeholder_expr);
6416 placeholder_list = TREE_CHAIN (placeholder_expr);
6417 temp = expand_expr (exp, original_target, tmode, modifier);
6418 placeholder_list = old_list;
6422 /* We can't find the object or there was a missing WITH_RECORD_EXPR. */
6425 case WITH_RECORD_EXPR:
6426 /* Put the object on the placeholder list, expand our first operand,
6427 and pop the list. */
6428 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
6430 target = expand_expr (TREE_OPERAND (exp, 0), original_target, tmode,
6432 placeholder_list = TREE_CHAIN (placeholder_list);
6436 if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
6437 expand_goto (TREE_OPERAND (exp, 0));
6439 expand_computed_goto (TREE_OPERAND (exp, 0));
6443 expand_exit_loop_if_false (NULL,
6444 invert_truthvalue (TREE_OPERAND (exp, 0)));
6447 case LABELED_BLOCK_EXPR:
6448 if (LABELED_BLOCK_BODY (exp))
6449 expand_expr_stmt (LABELED_BLOCK_BODY (exp));
6450 /* Should perhaps use expand_label, but this is simpler and safer. */
6451 do_pending_stack_adjust ();
6452 emit_label (label_rtx (LABELED_BLOCK_LABEL (exp)));
6455 case EXIT_BLOCK_EXPR:
6456 if (EXIT_BLOCK_RETURN (exp))
6457 sorry ("returned value in block_exit_expr");
6458 expand_goto (LABELED_BLOCK_LABEL (EXIT_BLOCK_LABELED_BLOCK (exp)));
6463 expand_start_loop (1);
6464 expand_expr_stmt (TREE_OPERAND (exp, 0));
6472 tree vars = TREE_OPERAND (exp, 0);
6473 int vars_need_expansion = 0;
6475 /* Need to open a binding contour here because
6476 if there are any cleanups they must be contained here. */
6477 expand_start_bindings (2);
6479 /* Mark the corresponding BLOCK for output in its proper place. */
6480 if (TREE_OPERAND (exp, 2) != 0
6481 && ! TREE_USED (TREE_OPERAND (exp, 2)))
6482 insert_block (TREE_OPERAND (exp, 2));
6484 /* If VARS have not yet been expanded, expand them now. */
6487 if (!DECL_RTL_SET_P (vars))
6489 vars_need_expansion = 1;
6492 expand_decl_init (vars);
6493 vars = TREE_CHAIN (vars);
6496 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, modifier);
6498 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
6504 if (RTL_EXPR_SEQUENCE (exp))
6506 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
6508 emit_insns (RTL_EXPR_SEQUENCE (exp));
6509 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
6511 preserve_rtl_expr_result (RTL_EXPR_RTL (exp));
6512 free_temps_for_rtl_expr (exp);
6513 return RTL_EXPR_RTL (exp);
6516 /* If we don't need the result, just ensure we evaluate any
6522 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
6523 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode, 0);
6528 /* All elts simple constants => refer to a constant in memory. But
6529 if this is a non-BLKmode mode, let it store a field at a time
6530 since that should make a CONST_INT or CONST_DOUBLE when we
6531 fold. Likewise, if we have a target we can use, it is best to
6532 store directly into the target unless the type is large enough
6533 that memcpy will be used. If we are making an initializer and
6534 all operands are constant, put it in memory as well. */
6535 else if ((TREE_STATIC (exp)
6536 && ((mode == BLKmode
6537 && ! (target != 0 && safe_from_p (target, exp, 1)))
6538 || TREE_ADDRESSABLE (exp)
6539 || (host_integerp (TYPE_SIZE_UNIT (type), 1)
6540 && (! MOVE_BY_PIECES_P
6541 (tree_low_cst (TYPE_SIZE_UNIT (type), 1),
6543 && ! mostly_zeros_p (exp))))
6544 || (modifier == EXPAND_INITIALIZER && TREE_CONSTANT (exp)))
6546 rtx constructor = output_constant_def (exp, 1);
6548 if (modifier != EXPAND_CONST_ADDRESS
6549 && modifier != EXPAND_INITIALIZER
6550 && modifier != EXPAND_SUM)
6551 constructor = validize_mem (constructor);
6557 /* Handle calls that pass values in multiple non-contiguous
6558 locations. The Irix 6 ABI has examples of this. */
6559 if (target == 0 || ! safe_from_p (target, exp, 1)
6560 || GET_CODE (target) == PARALLEL)
6562 = assign_temp (build_qualified_type (type,
6564 | (TREE_READONLY (exp)
6565 * TYPE_QUAL_CONST))),
6566 0, TREE_ADDRESSABLE (exp), 1);
6568 store_constructor (exp, target, 0,
6569 int_size_in_bytes (TREE_TYPE (exp)));
6575 tree exp1 = TREE_OPERAND (exp, 0);
6577 tree string = string_constant (exp1, &index);
6579 /* Try to optimize reads from const strings. */
6581 && TREE_CODE (string) == STRING_CST
6582 && TREE_CODE (index) == INTEGER_CST
6583 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
6584 && GET_MODE_CLASS (mode) == MODE_INT
6585 && GET_MODE_SIZE (mode) == 1
6586 && modifier != EXPAND_WRITE)
6588 GEN_INT (TREE_STRING_POINTER (string)[TREE_INT_CST_LOW (index)]);
6590 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
6591 op0 = memory_address (mode, op0);
6592 temp = gen_rtx_MEM (mode, op0);
6593 set_mem_attributes (temp, exp, 0);
6595 /* If we are writing to this object and its type is a record with
6596 readonly fields, we must mark it as readonly so it will
6597 conflict with readonly references to those fields. */
6598 if (modifier == EXPAND_WRITE && readonly_fields_p (type))
6599 RTX_UNCHANGING_P (temp) = 1;
6605 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
6609 tree array = TREE_OPERAND (exp, 0);
6610 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
6611 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
6612 tree index = convert (sizetype, TREE_OPERAND (exp, 1));
6615 /* Optimize the special-case of a zero lower bound.
6617 We convert the low_bound to sizetype to avoid some problems
6618 with constant folding. (E.g. suppose the lower bound is 1,
6619 and its mode is QI. Without the conversion, (ARRAY
6620 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
6621 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
6623 if (! integer_zerop (low_bound))
6624 index = size_diffop (index, convert (sizetype, low_bound));
6626 /* Fold an expression like: "foo"[2].
6627 This is not done in fold so it won't happen inside &.
6628 Don't fold if this is for wide characters since it's too
6629 difficult to do correctly and this is a very rare case. */
6631 if (modifier != EXPAND_CONST_ADDRESS && modifier != EXPAND_INITIALIZER
6632 && TREE_CODE (array) == STRING_CST
6633 && TREE_CODE (index) == INTEGER_CST
6634 && compare_tree_int (index, TREE_STRING_LENGTH (array)) < 0
6635 && GET_MODE_CLASS (mode) == MODE_INT
6636 && GET_MODE_SIZE (mode) == 1)
6638 GEN_INT (TREE_STRING_POINTER (array)[TREE_INT_CST_LOW (index)]);
6640 /* If this is a constant index into a constant array,
6641 just get the value from the array. Handle both the cases when
6642 we have an explicit constructor and when our operand is a variable
6643 that was declared const. */
6645 if (modifier != EXPAND_CONST_ADDRESS && modifier != EXPAND_INITIALIZER
6646 && TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array)
6647 && TREE_CODE (index) == INTEGER_CST
6648 && 0 > compare_tree_int (index,
6649 list_length (CONSTRUCTOR_ELTS
6650 (TREE_OPERAND (exp, 0)))))
6654 for (elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
6655 i = TREE_INT_CST_LOW (index);
6656 elem != 0 && i != 0; i--, elem = TREE_CHAIN (elem))
6660 return expand_expr (fold (TREE_VALUE (elem)), target, tmode,
6664 else if (optimize >= 1
6665 && modifier != EXPAND_CONST_ADDRESS
6666 && modifier != EXPAND_INITIALIZER
6667 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
6668 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
6669 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
6671 if (TREE_CODE (index) == INTEGER_CST)
6673 tree init = DECL_INITIAL (array);
6675 if (TREE_CODE (init) == CONSTRUCTOR)
6679 for (elem = CONSTRUCTOR_ELTS (init);
6681 && !tree_int_cst_equal (TREE_PURPOSE (elem), index));
6682 elem = TREE_CHAIN (elem))
6685 if (elem && !TREE_SIDE_EFFECTS (TREE_VALUE (elem)))
6686 return expand_expr (fold (TREE_VALUE (elem)), target,
6689 else if (TREE_CODE (init) == STRING_CST
6690 && 0 > compare_tree_int (index,
6691 TREE_STRING_LENGTH (init)))
6693 tree type = TREE_TYPE (TREE_TYPE (init));
6694 enum machine_mode mode = TYPE_MODE (type);
6696 if (GET_MODE_CLASS (mode) == MODE_INT
6697 && GET_MODE_SIZE (mode) == 1)
6699 (TREE_STRING_POINTER
6700 (init)[TREE_INT_CST_LOW (index)]));
6709 case ARRAY_RANGE_REF:
6710 /* If the operand is a CONSTRUCTOR, we can just extract the
6711 appropriate field if it is present. Don't do this if we have
6712 already written the data since we want to refer to that copy
6713 and varasm.c assumes that's what we'll do. */
6714 if (code == COMPONENT_REF
6715 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
6716 && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
6720 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
6721 elt = TREE_CHAIN (elt))
6722 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1)
6723 /* We can normally use the value of the field in the
6724 CONSTRUCTOR. However, if this is a bitfield in
6725 an integral mode that we can fit in a HOST_WIDE_INT,
6726 we must mask only the number of bits in the bitfield,
6727 since this is done implicitly by the constructor. If
6728 the bitfield does not meet either of those conditions,
6729 we can't do this optimization. */
6730 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt))
6731 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt)))
6733 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt)))
6734 <= HOST_BITS_PER_WIDE_INT))))
6736 op0 = expand_expr (TREE_VALUE (elt), target, tmode, modifier);
6737 if (DECL_BIT_FIELD (TREE_PURPOSE (elt)))
6739 HOST_WIDE_INT bitsize
6740 = TREE_INT_CST_LOW (DECL_SIZE (TREE_PURPOSE (elt)));
6742 if (TREE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt))))
6744 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
6745 op0 = expand_and (op0, op1, target);
6749 enum machine_mode imode
6750 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt)));
6752 = build_int_2 (GET_MODE_BITSIZE (imode) - bitsize,
6755 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
6757 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
6767 enum machine_mode mode1;
6768 HOST_WIDE_INT bitsize, bitpos;
6771 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
6772 &mode1, &unsignedp, &volatilep);
6775 /* If we got back the original object, something is wrong. Perhaps
6776 we are evaluating an expression too early. In any event, don't
6777 infinitely recurse. */
6781 /* If TEM's type is a union of variable size, pass TARGET to the inner
6782 computation, since it will need a temporary and TARGET is known
6783 to have to do. This occurs in unchecked conversion in Ada. */
6787 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
6788 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
6790 ? target : NULL_RTX),
6792 (modifier == EXPAND_INITIALIZER
6793 || modifier == EXPAND_CONST_ADDRESS)
6794 ? modifier : EXPAND_NORMAL);
6796 /* If this is a constant, put it into a register if it is a
6797 legitimate constant and OFFSET is 0 and memory if it isn't. */
6798 if (CONSTANT_P (op0))
6800 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
6801 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
6803 op0 = force_reg (mode, op0);
6805 op0 = validize_mem (force_const_mem (mode, op0));
6810 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
6812 /* If this object is in a register, put it into memory.
6813 This case can't occur in C, but can in Ada if we have
6814 unchecked conversion of an expression from a scalar type to
6815 an array or record type. */
6816 if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
6817 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
6819 /* If the operand is a SAVE_EXPR, we can deal with this by
6820 forcing the SAVE_EXPR into memory. */
6821 if (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR)
6823 put_var_into_stack (TREE_OPERAND (exp, 0));
6824 op0 = SAVE_EXPR_RTL (TREE_OPERAND (exp, 0));
6829 = build_qualified_type (TREE_TYPE (tem),
6830 (TYPE_QUALS (TREE_TYPE (tem))
6831 | TYPE_QUAL_CONST));
6832 rtx memloc = assign_temp (nt, 1, 1, 1);
6834 emit_move_insn (memloc, op0);
6839 if (GET_CODE (op0) != MEM)
6842 if (GET_MODE (offset_rtx) != ptr_mode)
6843 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
6845 #ifdef POINTERS_EXTEND_UNSIGNED
6846 if (GET_MODE (offset_rtx) != Pmode)
6847 offset_rtx = convert_memory_address (Pmode, offset_rtx);
6850 /* A constant address in OP0 can have VOIDmode, we must not try
6851 to call force_reg for that case. Avoid that case. */
6852 if (GET_CODE (op0) == MEM
6853 && GET_MODE (op0) == BLKmode
6854 && GET_MODE (XEXP (op0, 0)) != VOIDmode
6856 && (bitpos % bitsize) == 0
6857 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
6858 && MEM_ALIGN (op0) == GET_MODE_ALIGNMENT (mode1))
6860 rtx temp = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
6862 if (GET_CODE (XEXP (temp, 0)) == REG)
6865 op0 = (replace_equiv_address
6867 force_reg (GET_MODE (XEXP (temp, 0)),
6872 op0 = offset_address (op0, offset_rtx,
6873 highest_pow2_factor (offset));
6876 /* Don't forget about volatility even if this is a bitfield. */
6877 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
6879 if (op0 == orig_op0)
6880 op0 = copy_rtx (op0);
6882 MEM_VOLATILE_P (op0) = 1;
6885 /* In cases where an aligned union has an unaligned object
6886 as a field, we might be extracting a BLKmode value from
6887 an integer-mode (e.g., SImode) object. Handle this case
6888 by doing the extract into an object as wide as the field
6889 (which we know to be the width of a basic mode), then
6890 storing into memory, and changing the mode to BLKmode. */
6891 if (mode1 == VOIDmode
6892 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
6893 || (mode1 != BLKmode && ! direct_load[(int) mode1]
6894 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
6895 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT
6896 && modifier != EXPAND_CONST_ADDRESS
6897 && modifier != EXPAND_INITIALIZER)
6898 /* If the field isn't aligned enough to fetch as a memref,
6899 fetch it as a bit field. */
6900 || (mode1 != BLKmode
6901 && SLOW_UNALIGNED_ACCESS (mode1, MEM_ALIGN (op0))
6902 && ((TYPE_ALIGN (TREE_TYPE (tem))
6903 < GET_MODE_ALIGNMENT (mode))
6904 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)))
6905 /* If the type and the field are a constant size and the
6906 size of the type isn't the same size as the bitfield,
6907 we must use bitfield operations. */
6909 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (exp)))
6911 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
6914 enum machine_mode ext_mode = mode;
6916 if (ext_mode == BLKmode
6917 && ! (target != 0 && GET_CODE (op0) == MEM
6918 && GET_CODE (target) == MEM
6919 && bitpos % BITS_PER_UNIT == 0))
6920 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
6922 if (ext_mode == BLKmode)
6924 /* In this case, BITPOS must start at a byte boundary and
6925 TARGET, if specified, must be a MEM. */
6926 if (GET_CODE (op0) != MEM
6927 || (target != 0 && GET_CODE (target) != MEM)
6928 || bitpos % BITS_PER_UNIT != 0)
6931 op0 = adjust_address (op0, VOIDmode, bitpos / BITS_PER_UNIT);
6933 target = assign_temp (type, 0, 1, 1);
6935 emit_block_move (target, op0,
6936 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
6942 op0 = validize_mem (op0);
6944 if (GET_CODE (op0) == MEM && GET_CODE (XEXP (op0, 0)) == REG)
6945 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
6947 op0 = extract_bit_field (op0, bitsize, bitpos,
6948 unsignedp, target, ext_mode, ext_mode,
6949 int_size_in_bytes (TREE_TYPE (tem)));
6951 /* If the result is a record type and BITSIZE is narrower than
6952 the mode of OP0, an integral mode, and this is a big endian
6953 machine, we must put the field into the high-order bits. */
6954 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
6955 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
6956 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (op0)))
6957 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
6958 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
6962 if (mode == BLKmode)
6964 rtx new = assign_temp (build_qualified_type
6965 (type_for_mode (ext_mode, 0),
6966 TYPE_QUAL_CONST), 0, 1, 1);
6968 emit_move_insn (new, op0);
6969 op0 = copy_rtx (new);
6970 PUT_MODE (op0, BLKmode);
6971 set_mem_attributes (op0, exp, 1);
6977 /* If the result is BLKmode, use that to access the object
6979 if (mode == BLKmode)
6982 /* Get a reference to just this component. */
6983 if (modifier == EXPAND_CONST_ADDRESS
6984 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
6985 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
6987 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
6989 if (op0 == orig_op0)
6990 op0 = copy_rtx (op0);
6992 set_mem_attributes (op0, exp, 0);
6993 if (GET_CODE (XEXP (op0, 0)) == REG)
6994 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
6996 MEM_VOLATILE_P (op0) |= volatilep;
6997 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
6998 || modifier == EXPAND_CONST_ADDRESS
6999 || modifier == EXPAND_INITIALIZER)
7001 else if (target == 0)
7002 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7004 convert_move (target, op0, unsignedp);
7010 rtx insn, before = get_last_insn (), vtbl_ref;
7012 /* Evaluate the interior expression. */
7013 subtarget = expand_expr (TREE_OPERAND (exp, 0), target,
7016 /* Get or create an instruction off which to hang a note. */
7017 if (REG_P (subtarget))
7020 insn = get_last_insn ();
7023 if (! INSN_P (insn))
7024 insn = prev_nonnote_insn (insn);
7028 target = gen_reg_rtx (GET_MODE (subtarget));
7029 insn = emit_move_insn (target, subtarget);
7032 /* Collect the data for the note. */
7033 vtbl_ref = XEXP (DECL_RTL (TREE_OPERAND (exp, 1)), 0);
7034 vtbl_ref = plus_constant (vtbl_ref,
7035 tree_low_cst (TREE_OPERAND (exp, 2), 0));
7036 /* Discard the initial CONST that was added. */
7037 vtbl_ref = XEXP (vtbl_ref, 0);
7040 = gen_rtx_EXPR_LIST (REG_VTABLE_REF, vtbl_ref, REG_NOTES (insn));
7045 /* Intended for a reference to a buffer of a file-object in Pascal.
7046 But it's not certain that a special tree code will really be
7047 necessary for these. INDIRECT_REF might work for them. */
7053 /* Pascal set IN expression.
7056 rlo = set_low - (set_low%bits_per_word);
7057 the_word = set [ (index - rlo)/bits_per_word ];
7058 bit_index = index % bits_per_word;
7059 bitmask = 1 << bit_index;
7060 return !!(the_word & bitmask); */
7062 tree set = TREE_OPERAND (exp, 0);
7063 tree index = TREE_OPERAND (exp, 1);
7064 int iunsignedp = TREE_UNSIGNED (TREE_TYPE (index));
7065 tree set_type = TREE_TYPE (set);
7066 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
7067 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
7068 rtx index_val = expand_expr (index, 0, VOIDmode, 0);
7069 rtx lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
7070 rtx hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
7071 rtx setval = expand_expr (set, 0, VOIDmode, 0);
7072 rtx setaddr = XEXP (setval, 0);
7073 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
7075 rtx diff, quo, rem, addr, bit, result;
7077 /* If domain is empty, answer is no. Likewise if index is constant
7078 and out of bounds. */
7079 if (((TREE_CODE (set_high_bound) == INTEGER_CST
7080 && TREE_CODE (set_low_bound) == INTEGER_CST
7081 && tree_int_cst_lt (set_high_bound, set_low_bound))
7082 || (TREE_CODE (index) == INTEGER_CST
7083 && TREE_CODE (set_low_bound) == INTEGER_CST
7084 && tree_int_cst_lt (index, set_low_bound))
7085 || (TREE_CODE (set_high_bound) == INTEGER_CST
7086 && TREE_CODE (index) == INTEGER_CST
7087 && tree_int_cst_lt (set_high_bound, index))))
7091 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7093 /* If we get here, we have to generate the code for both cases
7094 (in range and out of range). */
7096 op0 = gen_label_rtx ();
7097 op1 = gen_label_rtx ();
7099 if (! (GET_CODE (index_val) == CONST_INT
7100 && GET_CODE (lo_r) == CONST_INT))
7101 emit_cmp_and_jump_insns (index_val, lo_r, LT, NULL_RTX,
7102 GET_MODE (index_val), iunsignedp, op1);
7104 if (! (GET_CODE (index_val) == CONST_INT
7105 && GET_CODE (hi_r) == CONST_INT))
7106 emit_cmp_and_jump_insns (index_val, hi_r, GT, NULL_RTX,
7107 GET_MODE (index_val), iunsignedp, op1);
7109 /* Calculate the element number of bit zero in the first word
7111 if (GET_CODE (lo_r) == CONST_INT)
7112 rlow = GEN_INT (INTVAL (lo_r)
7113 & ~((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
7115 rlow = expand_binop (index_mode, and_optab, lo_r,
7116 GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
7117 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7119 diff = expand_binop (index_mode, sub_optab, index_val, rlow,
7120 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7122 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
7123 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7124 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
7125 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7127 addr = memory_address (byte_mode,
7128 expand_binop (index_mode, add_optab, diff,
7129 setaddr, NULL_RTX, iunsignedp,
7132 /* Extract the bit we want to examine. */
7133 bit = expand_shift (RSHIFT_EXPR, byte_mode,
7134 gen_rtx_MEM (byte_mode, addr),
7135 make_tree (TREE_TYPE (index), rem),
7137 result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
7138 GET_MODE (target) == byte_mode ? target : 0,
7139 1, OPTAB_LIB_WIDEN);
7141 if (result != target)
7142 convert_move (target, result, 1);
7144 /* Output the code to handle the out-of-range case. */
7147 emit_move_insn (target, const0_rtx);
7152 case WITH_CLEANUP_EXPR:
7153 if (WITH_CLEANUP_EXPR_RTL (exp) == 0)
7155 WITH_CLEANUP_EXPR_RTL (exp)
7156 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
7157 expand_decl_cleanup (NULL_TREE, TREE_OPERAND (exp, 1));
7159 /* That's it for this cleanup. */
7160 TREE_OPERAND (exp, 1) = 0;
7162 return WITH_CLEANUP_EXPR_RTL (exp);
7164 case CLEANUP_POINT_EXPR:
7166 /* Start a new binding layer that will keep track of all cleanup
7167 actions to be performed. */
7168 expand_start_bindings (2);
7170 target_temp_slot_level = temp_slot_level;
7172 op0 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
7173 /* If we're going to use this value, load it up now. */
7175 op0 = force_not_mem (op0);
7176 preserve_temp_slots (op0);
7177 expand_end_bindings (NULL_TREE, 0, 0);
7182 /* Check for a built-in function. */
7183 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
7184 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7186 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7188 if (DECL_BUILT_IN_CLASS (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7189 == BUILT_IN_FRONTEND)
7190 return (*lang_expand_expr) (exp, original_target, tmode, modifier);
7192 return expand_builtin (exp, target, subtarget, tmode, ignore);
7195 return expand_call (exp, target, ignore);
7197 case NON_LVALUE_EXPR:
7200 case REFERENCE_EXPR:
7201 if (TREE_OPERAND (exp, 0) == error_mark_node)
7204 if (TREE_CODE (type) == UNION_TYPE)
7206 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
7208 /* If both input and output are BLKmode, this conversion isn't doing
7209 anything except possibly changing memory attribute. */
7210 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode)
7212 rtx result = expand_expr (TREE_OPERAND (exp, 0), target, tmode,
7215 result = copy_rtx (result);
7216 set_mem_attributes (result, exp, 0);
7221 target = assign_temp (type, 0, 1, 1);
7223 if (GET_CODE (target) == MEM)
7224 /* Store data into beginning of memory target. */
7225 store_expr (TREE_OPERAND (exp, 0),
7226 adjust_address (target, TYPE_MODE (valtype), 0), 0);
7228 else if (GET_CODE (target) == REG)
7229 /* Store this field into a union of the proper type. */
7230 store_field (target,
7231 MIN ((int_size_in_bytes (TREE_TYPE
7232 (TREE_OPERAND (exp, 0)))
7234 (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
7235 0, TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
7236 VOIDmode, 0, type, 0);
7240 /* Return the entire union. */
7244 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7246 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
7249 /* If the signedness of the conversion differs and OP0 is
7250 a promoted SUBREG, clear that indication since we now
7251 have to do the proper extension. */
7252 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
7253 && GET_CODE (op0) == SUBREG)
7254 SUBREG_PROMOTED_VAR_P (op0) = 0;
7259 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, 0);
7260 if (GET_MODE (op0) == mode)
7263 /* If OP0 is a constant, just convert it into the proper mode. */
7264 if (CONSTANT_P (op0))
7266 convert_modes (mode, TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7267 op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7269 if (modifier == EXPAND_INITIALIZER)
7270 return gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
7274 convert_to_mode (mode, op0,
7275 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7277 convert_move (target, op0,
7278 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7281 case VIEW_CONVERT_EXPR:
7282 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
7284 /* If the input and output modes are both the same, we are done.
7285 Otherwise, if neither mode is BLKmode and both are within a word, we
7286 can use gen_lowpart. If neither is true, make sure the operand is
7287 in memory and convert the MEM to the new mode. */
7288 if (TYPE_MODE (type) == GET_MODE (op0))
7290 else if (TYPE_MODE (type) != BLKmode && GET_MODE (op0) != BLKmode
7291 && GET_MODE_SIZE (TYPE_MODE (type)) <= UNITS_PER_WORD
7292 && GET_MODE_SIZE (GET_MODE (op0)) <= UNITS_PER_WORD)
7293 op0 = gen_lowpart (TYPE_MODE (type), op0);
7294 else if (GET_CODE (op0) != MEM)
7296 /* If the operand is not a MEM, force it into memory. Since we
7297 are going to be be changing the mode of the MEM, don't call
7298 force_const_mem for constants because we don't allow pool
7299 constants to change mode. */
7300 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7302 if (TREE_ADDRESSABLE (exp))
7305 if (target == 0 || GET_MODE (target) != TYPE_MODE (inner_type))
7307 = assign_stack_temp_for_type
7308 (TYPE_MODE (inner_type),
7309 GET_MODE_SIZE (TYPE_MODE (inner_type)), 0, inner_type);
7311 emit_move_insn (target, op0);
7315 /* At this point, OP0 is in the correct mode. If the output type is such
7316 that the operand is known to be aligned, indicate that it is.
7317 Otherwise, we need only be concerned about alignment for non-BLKmode
7319 if (GET_CODE (op0) == MEM)
7321 op0 = copy_rtx (op0);
7323 if (TYPE_ALIGN_OK (type))
7324 set_mem_align (op0, MAX (MEM_ALIGN (op0), TYPE_ALIGN (type)));
7325 else if (TYPE_MODE (type) != BLKmode && STRICT_ALIGNMENT
7326 && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
7328 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7329 HOST_WIDE_INT temp_size
7330 = MAX (int_size_in_bytes (inner_type),
7331 (HOST_WIDE_INT) GET_MODE_SIZE (TYPE_MODE (type)));
7332 rtx new = assign_stack_temp_for_type (TYPE_MODE (type),
7333 temp_size, 0, type);
7334 rtx new_with_op0_mode = copy_rtx (new);
7336 if (TREE_ADDRESSABLE (exp))
7339 PUT_MODE (new_with_op0_mode, GET_MODE (op0));
7340 if (GET_MODE (op0) == BLKmode)
7341 emit_block_move (new_with_op0_mode, op0,
7342 GEN_INT (GET_MODE_SIZE (TYPE_MODE (type))));
7344 emit_move_insn (new_with_op0_mode, op0);
7349 PUT_MODE (op0, TYPE_MODE (type));
7355 /* We come here from MINUS_EXPR when the second operand is a
7358 this_optab = ! unsignedp && flag_trapv
7359 && (GET_MODE_CLASS(mode) == MODE_INT)
7360 ? addv_optab : add_optab;
7362 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
7363 something else, make sure we add the register to the constant and
7364 then to the other thing. This case can occur during strength
7365 reduction and doing it this way will produce better code if the
7366 frame pointer or argument pointer is eliminated.
7368 fold-const.c will ensure that the constant is always in the inner
7369 PLUS_EXPR, so the only case we need to do anything about is if
7370 sp, ap, or fp is our second argument, in which case we must swap
7371 the innermost first argument and our second argument. */
7373 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
7374 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
7375 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
7376 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
7377 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
7378 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
7380 tree t = TREE_OPERAND (exp, 1);
7382 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7383 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
7386 /* If the result is to be ptr_mode and we are adding an integer to
7387 something, we might be forming a constant. So try to use
7388 plus_constant. If it produces a sum and we can't accept it,
7389 use force_operand. This allows P = &ARR[const] to generate
7390 efficient code on machines where a SYMBOL_REF is not a valid
7393 If this is an EXPAND_SUM call, always return the sum. */
7394 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
7395 || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
7397 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
7398 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7399 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
7403 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
7405 /* Use immed_double_const to ensure that the constant is
7406 truncated according to the mode of OP1, then sign extended
7407 to a HOST_WIDE_INT. Using the constant directly can result
7408 in non-canonical RTL in a 64x32 cross compile. */
7410 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)),
7412 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))));
7413 op1 = plus_constant (op1, INTVAL (constant_part));
7414 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7415 op1 = force_operand (op1, target);
7419 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7420 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
7421 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
7425 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7427 if (! CONSTANT_P (op0))
7429 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7430 VOIDmode, modifier);
7431 /* Don't go to both_summands if modifier
7432 says it's not right to return a PLUS. */
7433 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7437 /* Use immed_double_const to ensure that the constant is
7438 truncated according to the mode of OP1, then sign extended
7439 to a HOST_WIDE_INT. Using the constant directly can result
7440 in non-canonical RTL in a 64x32 cross compile. */
7442 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)),
7444 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
7445 op0 = plus_constant (op0, INTVAL (constant_part));
7446 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7447 op0 = force_operand (op0, target);
7452 /* No sense saving up arithmetic to be done
7453 if it's all in the wrong mode to form part of an address.
7454 And force_operand won't know whether to sign-extend or
7456 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7457 || mode != ptr_mode)
7460 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7463 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, modifier);
7464 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, modifier);
7467 /* Make sure any term that's a sum with a constant comes last. */
7468 if (GET_CODE (op0) == PLUS
7469 && CONSTANT_P (XEXP (op0, 1)))
7475 /* If adding to a sum including a constant,
7476 associate it to put the constant outside. */
7477 if (GET_CODE (op1) == PLUS
7478 && CONSTANT_P (XEXP (op1, 1)))
7480 rtx constant_term = const0_rtx;
7482 temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0);
7485 /* Ensure that MULT comes first if there is one. */
7486 else if (GET_CODE (op0) == MULT)
7487 op0 = gen_rtx_PLUS (mode, op0, XEXP (op1, 0));
7489 op0 = gen_rtx_PLUS (mode, XEXP (op1, 0), op0);
7491 /* Let's also eliminate constants from op0 if possible. */
7492 op0 = eliminate_constant_term (op0, &constant_term);
7494 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
7495 their sum should be a constant. Form it into OP1, since the
7496 result we want will then be OP0 + OP1. */
7498 temp = simplify_binary_operation (PLUS, mode, constant_term,
7503 op1 = gen_rtx_PLUS (mode, constant_term, XEXP (op1, 1));
7506 /* Put a constant term last and put a multiplication first. */
7507 if (CONSTANT_P (op0) || GET_CODE (op1) == MULT)
7508 temp = op1, op1 = op0, op0 = temp;
7510 temp = simplify_binary_operation (PLUS, mode, op0, op1);
7511 return temp ? temp : gen_rtx_PLUS (mode, op0, op1);
7514 /* For initializers, we are allowed to return a MINUS of two
7515 symbolic constants. Here we handle all cases when both operands
7517 /* Handle difference of two symbolic constants,
7518 for the sake of an initializer. */
7519 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7520 && really_constant_p (TREE_OPERAND (exp, 0))
7521 && really_constant_p (TREE_OPERAND (exp, 1)))
7523 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode,
7525 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode,
7528 /* If the last operand is a CONST_INT, use plus_constant of
7529 the negated constant. Else make the MINUS. */
7530 if (GET_CODE (op1) == CONST_INT)
7531 return plus_constant (op0, - INTVAL (op1));
7533 return gen_rtx_MINUS (mode, op0, op1);
7535 /* Convert A - const to A + (-const). */
7536 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7538 tree negated = fold (build1 (NEGATE_EXPR, type,
7539 TREE_OPERAND (exp, 1)));
7541 if (TREE_UNSIGNED (type) || TREE_OVERFLOW (negated))
7542 /* If we can't negate the constant in TYPE, leave it alone and
7543 expand_binop will negate it for us. We used to try to do it
7544 here in the signed version of TYPE, but that doesn't work
7545 on POINTER_TYPEs. */;
7548 exp = build (PLUS_EXPR, type, TREE_OPERAND (exp, 0), negated);
7552 this_optab = ! unsignedp && flag_trapv
7553 && (GET_MODE_CLASS(mode) == MODE_INT)
7554 ? subv_optab : sub_optab;
7558 /* If first operand is constant, swap them.
7559 Thus the following special case checks need only
7560 check the second operand. */
7561 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
7563 tree t1 = TREE_OPERAND (exp, 0);
7564 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
7565 TREE_OPERAND (exp, 1) = t1;
7568 /* Attempt to return something suitable for generating an
7569 indexed address, for machines that support that. */
7571 if (modifier == EXPAND_SUM && mode == ptr_mode
7572 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7573 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
7575 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7578 /* Apply distributive law if OP0 is x+c. */
7579 if (GET_CODE (op0) == PLUS
7580 && GET_CODE (XEXP (op0, 1)) == CONST_INT)
7585 (mode, XEXP (op0, 0),
7586 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))),
7587 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))
7588 * INTVAL (XEXP (op0, 1))));
7590 if (GET_CODE (op0) != REG)
7591 op0 = force_operand (op0, NULL_RTX);
7592 if (GET_CODE (op0) != REG)
7593 op0 = copy_to_mode_reg (mode, op0);
7596 gen_rtx_MULT (mode, op0,
7597 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))));
7600 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7603 /* Check for multiplying things that have been extended
7604 from a narrower type. If this machine supports multiplying
7605 in that narrower type with a result in the desired type,
7606 do it that way, and avoid the explicit type-conversion. */
7607 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
7608 && TREE_CODE (type) == INTEGER_TYPE
7609 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7610 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
7611 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7612 && int_fits_type_p (TREE_OPERAND (exp, 1),
7613 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7614 /* Don't use a widening multiply if a shift will do. */
7615 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
7616 > HOST_BITS_PER_WIDE_INT)
7617 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
7619 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
7620 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7622 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
7623 /* If both operands are extended, they must either both
7624 be zero-extended or both be sign-extended. */
7625 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7627 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
7629 enum machine_mode innermode
7630 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
7631 optab other_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7632 ? smul_widen_optab : umul_widen_optab);
7633 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7634 ? umul_widen_optab : smul_widen_optab);
7635 if (mode == GET_MODE_WIDER_MODE (innermode))
7637 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
7639 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7640 NULL_RTX, VOIDmode, 0);
7641 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7642 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7645 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7646 NULL_RTX, VOIDmode, 0);
7649 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
7650 && innermode == word_mode)
7653 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7654 NULL_RTX, VOIDmode, 0);
7655 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7656 op1 = convert_modes (innermode, mode,
7657 expand_expr (TREE_OPERAND (exp, 1),
7658 NULL_RTX, VOIDmode, 0),
7661 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7662 NULL_RTX, VOIDmode, 0);
7663 temp = expand_binop (mode, other_optab, op0, op1, target,
7664 unsignedp, OPTAB_LIB_WIDEN);
7665 htem = expand_mult_highpart_adjust (innermode,
7666 gen_highpart (innermode, temp),
7668 gen_highpart (innermode, temp),
7670 emit_move_insn (gen_highpart (innermode, temp), htem);
7675 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7676 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7677 return expand_mult (mode, op0, op1, target, unsignedp);
7679 case TRUNC_DIV_EXPR:
7680 case FLOOR_DIV_EXPR:
7682 case ROUND_DIV_EXPR:
7683 case EXACT_DIV_EXPR:
7684 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7686 /* Possible optimization: compute the dividend with EXPAND_SUM
7687 then if the divisor is constant can optimize the case
7688 where some terms of the dividend have coeffs divisible by it. */
7689 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7690 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7691 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
7694 /* Emit a/b as a*(1/b). Later we may manage CSE the reciprocal saving
7695 expensive divide. If not, combine will rebuild the original
7697 if (flag_unsafe_math_optimizations && optimize && !optimize_size
7698 && !real_onep (TREE_OPERAND (exp, 0)))
7699 return expand_expr (build (MULT_EXPR, type, TREE_OPERAND (exp, 0),
7700 build (RDIV_EXPR, type,
7701 build_real (type, dconst1),
7702 TREE_OPERAND (exp, 1))),
7703 target, tmode, unsignedp);
7704 this_optab = sdiv_optab;
7707 case TRUNC_MOD_EXPR:
7708 case FLOOR_MOD_EXPR:
7710 case ROUND_MOD_EXPR:
7711 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7713 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7714 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7715 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
7717 case FIX_ROUND_EXPR:
7718 case FIX_FLOOR_EXPR:
7720 abort (); /* Not used for C. */
7722 case FIX_TRUNC_EXPR:
7723 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7725 target = gen_reg_rtx (mode);
7726 expand_fix (target, op0, unsignedp);
7730 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7732 target = gen_reg_rtx (mode);
7733 /* expand_float can't figure out what to do if FROM has VOIDmode.
7734 So give it the correct mode. With -O, cse will optimize this. */
7735 if (GET_MODE (op0) == VOIDmode)
7736 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7738 expand_float (target, op0,
7739 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7743 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7744 temp = expand_unop (mode,
7745 ! unsignedp && flag_trapv
7746 && (GET_MODE_CLASS(mode) == MODE_INT)
7747 ? negv_optab : neg_optab, op0, target, 0);
7753 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7755 /* Handle complex values specially. */
7756 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT
7757 || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
7758 return expand_complex_abs (mode, op0, target, unsignedp);
7760 /* Unsigned abs is simply the operand. Testing here means we don't
7761 risk generating incorrect code below. */
7762 if (TREE_UNSIGNED (type))
7765 return expand_abs (mode, op0, target, unsignedp,
7766 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
7770 target = original_target;
7771 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 1), 1)
7772 || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
7773 || GET_MODE (target) != mode
7774 || (GET_CODE (target) == REG
7775 && REGNO (target) < FIRST_PSEUDO_REGISTER))
7776 target = gen_reg_rtx (mode);
7777 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7778 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
7780 /* First try to do it with a special MIN or MAX instruction.
7781 If that does not win, use a conditional jump to select the proper
7783 this_optab = (TREE_UNSIGNED (type)
7784 ? (code == MIN_EXPR ? umin_optab : umax_optab)
7785 : (code == MIN_EXPR ? smin_optab : smax_optab));
7787 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
7792 /* At this point, a MEM target is no longer useful; we will get better
7795 if (GET_CODE (target) == MEM)
7796 target = gen_reg_rtx (mode);
7799 emit_move_insn (target, op0);
7801 op0 = gen_label_rtx ();
7803 /* If this mode is an integer too wide to compare properly,
7804 compare word by word. Rely on cse to optimize constant cases. */
7805 if (GET_MODE_CLASS (mode) == MODE_INT
7806 && ! can_compare_p (GE, mode, ccp_jump))
7808 if (code == MAX_EXPR)
7809 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
7810 target, op1, NULL_RTX, op0);
7812 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
7813 op1, target, NULL_RTX, op0);
7817 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)));
7818 do_compare_rtx_and_jump (target, op1, code == MAX_EXPR ? GE : LE,
7819 unsignedp, mode, NULL_RTX, NULL_RTX,
7822 emit_move_insn (target, op1);
7827 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7828 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
7834 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7835 temp = expand_unop (mode, ffs_optab, op0, target, 1);
7840 /* ??? Can optimize bitwise operations with one arg constant.
7841 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
7842 and (a bitwise1 b) bitwise2 b (etc)
7843 but that is probably not worth while. */
7845 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
7846 boolean values when we want in all cases to compute both of them. In
7847 general it is fastest to do TRUTH_AND_EXPR by computing both operands
7848 as actual zero-or-1 values and then bitwise anding. In cases where
7849 there cannot be any side effects, better code would be made by
7850 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
7851 how to recognize those cases. */
7853 case TRUTH_AND_EXPR:
7855 this_optab = and_optab;
7860 this_optab = ior_optab;
7863 case TRUTH_XOR_EXPR:
7865 this_optab = xor_optab;
7872 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7874 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7875 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
7878 /* Could determine the answer when only additive constants differ. Also,
7879 the addition of one can be handled by changing the condition. */
7886 case UNORDERED_EXPR:
7893 temp = do_store_flag (exp, target, tmode != VOIDmode ? tmode : mode, 0);
7897 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
7898 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
7900 && GET_CODE (original_target) == REG
7901 && (GET_MODE (original_target)
7902 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
7904 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
7907 if (temp != original_target)
7908 temp = copy_to_reg (temp);
7910 op1 = gen_label_rtx ();
7911 emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX,
7912 GET_MODE (temp), unsignedp, op1);
7913 emit_move_insn (temp, const1_rtx);
7918 /* If no set-flag instruction, must generate a conditional
7919 store into a temporary variable. Drop through
7920 and handle this like && and ||. */
7922 case TRUTH_ANDIF_EXPR:
7923 case TRUTH_ORIF_EXPR:
7925 && (target == 0 || ! safe_from_p (target, exp, 1)
7926 /* Make sure we don't have a hard reg (such as function's return
7927 value) live across basic blocks, if not optimizing. */
7928 || (!optimize && GET_CODE (target) == REG
7929 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
7930 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7933 emit_clr_insn (target);
7935 op1 = gen_label_rtx ();
7936 jumpifnot (exp, op1);
7939 emit_0_to_1_insn (target);
7942 return ignore ? const0_rtx : target;
7944 case TRUTH_NOT_EXPR:
7945 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
7946 /* The parser is careful to generate TRUTH_NOT_EXPR
7947 only with operands that are always zero or one. */
7948 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
7949 target, 1, OPTAB_LIB_WIDEN);
7955 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
7957 return expand_expr (TREE_OPERAND (exp, 1),
7958 (ignore ? const0_rtx : target),
7962 /* If we would have a "singleton" (see below) were it not for a
7963 conversion in each arm, bring that conversion back out. */
7964 if (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
7965 && TREE_CODE (TREE_OPERAND (exp, 2)) == NOP_EXPR
7966 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0))
7967 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 2), 0))))
7969 tree iftrue = TREE_OPERAND (TREE_OPERAND (exp, 1), 0);
7970 tree iffalse = TREE_OPERAND (TREE_OPERAND (exp, 2), 0);
7972 if ((TREE_CODE_CLASS (TREE_CODE (iftrue)) == '2'
7973 && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
7974 || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '2'
7975 && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0))
7976 || (TREE_CODE_CLASS (TREE_CODE (iftrue)) == '1'
7977 && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
7978 || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '1'
7979 && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0)))
7980 return expand_expr (build1 (NOP_EXPR, type,
7981 build (COND_EXPR, TREE_TYPE (iftrue),
7982 TREE_OPERAND (exp, 0),
7984 target, tmode, modifier);
7988 /* Note that COND_EXPRs whose type is a structure or union
7989 are required to be constructed to contain assignments of
7990 a temporary variable, so that we can evaluate them here
7991 for side effect only. If type is void, we must do likewise. */
7993 /* If an arm of the branch requires a cleanup,
7994 only that cleanup is performed. */
7997 tree binary_op = 0, unary_op = 0;
7999 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
8000 convert it to our mode, if necessary. */
8001 if (integer_onep (TREE_OPERAND (exp, 1))
8002 && integer_zerop (TREE_OPERAND (exp, 2))
8003 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8007 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
8012 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, modifier);
8013 if (GET_MODE (op0) == mode)
8017 target = gen_reg_rtx (mode);
8018 convert_move (target, op0, unsignedp);
8022 /* Check for X ? A + B : A. If we have this, we can copy A to the
8023 output and conditionally add B. Similarly for unary operations.
8024 Don't do this if X has side-effects because those side effects
8025 might affect A or B and the "?" operation is a sequence point in
8026 ANSI. (operand_equal_p tests for side effects.) */
8028 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
8029 && operand_equal_p (TREE_OPERAND (exp, 2),
8030 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8031 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
8032 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
8033 && operand_equal_p (TREE_OPERAND (exp, 1),
8034 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8035 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
8036 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
8037 && operand_equal_p (TREE_OPERAND (exp, 2),
8038 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8039 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
8040 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
8041 && operand_equal_p (TREE_OPERAND (exp, 1),
8042 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8043 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
8045 /* If we are not to produce a result, we have no target. Otherwise,
8046 if a target was specified use it; it will not be used as an
8047 intermediate target unless it is safe. If no target, use a
8052 else if (original_target
8053 && (safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
8054 || (singleton && GET_CODE (original_target) == REG
8055 && REGNO (original_target) >= FIRST_PSEUDO_REGISTER
8056 && original_target == var_rtx (singleton)))
8057 && GET_MODE (original_target) == mode
8058 #ifdef HAVE_conditional_move
8059 && (! can_conditionally_move_p (mode)
8060 || GET_CODE (original_target) == REG
8061 || TREE_ADDRESSABLE (type))
8063 && (GET_CODE (original_target) != MEM
8064 || TREE_ADDRESSABLE (type)))
8065 temp = original_target;
8066 else if (TREE_ADDRESSABLE (type))
8069 temp = assign_temp (type, 0, 0, 1);
8071 /* If we had X ? A + C : A, with C a constant power of 2, and we can
8072 do the test of X as a store-flag operation, do this as
8073 A + ((X != 0) << log C). Similarly for other simple binary
8074 operators. Only do for C == 1 if BRANCH_COST is low. */
8075 if (temp && singleton && binary_op
8076 && (TREE_CODE (binary_op) == PLUS_EXPR
8077 || TREE_CODE (binary_op) == MINUS_EXPR
8078 || TREE_CODE (binary_op) == BIT_IOR_EXPR
8079 || TREE_CODE (binary_op) == BIT_XOR_EXPR)
8080 && (BRANCH_COST >= 3 ? integer_pow2p (TREE_OPERAND (binary_op, 1))
8081 : integer_onep (TREE_OPERAND (binary_op, 1)))
8082 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8085 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR
8086 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8087 ? addv_optab : add_optab)
8088 : TREE_CODE (binary_op) == MINUS_EXPR
8089 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8090 ? subv_optab : sub_optab)
8091 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
8094 /* If we had X ? A : A + 1, do this as A + (X == 0).
8096 We have to invert the truth value here and then put it
8097 back later if do_store_flag fails. We cannot simply copy
8098 TREE_OPERAND (exp, 0) to another variable and modify that
8099 because invert_truthvalue can modify the tree pointed to
8101 if (singleton == TREE_OPERAND (exp, 1))
8102 TREE_OPERAND (exp, 0)
8103 = invert_truthvalue (TREE_OPERAND (exp, 0));
8105 result = do_store_flag (TREE_OPERAND (exp, 0),
8106 (safe_from_p (temp, singleton, 1)
8108 mode, BRANCH_COST <= 1);
8110 if (result != 0 && ! integer_onep (TREE_OPERAND (binary_op, 1)))
8111 result = expand_shift (LSHIFT_EXPR, mode, result,
8112 build_int_2 (tree_log2
8116 (safe_from_p (temp, singleton, 1)
8117 ? temp : NULL_RTX), 0);
8121 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
8122 return expand_binop (mode, boptab, op1, result, temp,
8123 unsignedp, OPTAB_LIB_WIDEN);
8125 else if (singleton == TREE_OPERAND (exp, 1))
8126 TREE_OPERAND (exp, 0)
8127 = invert_truthvalue (TREE_OPERAND (exp, 0));
8130 do_pending_stack_adjust ();
8132 op0 = gen_label_rtx ();
8134 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
8138 /* If the target conflicts with the other operand of the
8139 binary op, we can't use it. Also, we can't use the target
8140 if it is a hard register, because evaluating the condition
8141 might clobber it. */
8143 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1), 1))
8144 || (GET_CODE (temp) == REG
8145 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
8146 temp = gen_reg_rtx (mode);
8147 store_expr (singleton, temp, 0);
8150 expand_expr (singleton,
8151 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8152 if (singleton == TREE_OPERAND (exp, 1))
8153 jumpif (TREE_OPERAND (exp, 0), op0);
8155 jumpifnot (TREE_OPERAND (exp, 0), op0);
8157 start_cleanup_deferral ();
8158 if (binary_op && temp == 0)
8159 /* Just touch the other operand. */
8160 expand_expr (TREE_OPERAND (binary_op, 1),
8161 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8163 store_expr (build (TREE_CODE (binary_op), type,
8164 make_tree (type, temp),
8165 TREE_OPERAND (binary_op, 1)),
8168 store_expr (build1 (TREE_CODE (unary_op), type,
8169 make_tree (type, temp)),
8173 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
8174 comparison operator. If we have one of these cases, set the
8175 output to A, branch on A (cse will merge these two references),
8176 then set the output to FOO. */
8178 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8179 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8180 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8181 TREE_OPERAND (exp, 1), 0)
8182 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8183 || TREE_CODE (TREE_OPERAND (exp, 1)) == SAVE_EXPR)
8184 && safe_from_p (temp, TREE_OPERAND (exp, 2), 1))
8186 if (GET_CODE (temp) == REG
8187 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
8188 temp = gen_reg_rtx (mode);
8189 store_expr (TREE_OPERAND (exp, 1), temp, 0);
8190 jumpif (TREE_OPERAND (exp, 0), op0);
8192 start_cleanup_deferral ();
8193 store_expr (TREE_OPERAND (exp, 2), temp, 0);
8197 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8198 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8199 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8200 TREE_OPERAND (exp, 2), 0)
8201 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8202 || TREE_CODE (TREE_OPERAND (exp, 2)) == SAVE_EXPR)
8203 && safe_from_p (temp, TREE_OPERAND (exp, 1), 1))
8205 if (GET_CODE (temp) == REG
8206 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
8207 temp = gen_reg_rtx (mode);
8208 store_expr (TREE_OPERAND (exp, 2), temp, 0);
8209 jumpifnot (TREE_OPERAND (exp, 0), op0);
8211 start_cleanup_deferral ();
8212 store_expr (TREE_OPERAND (exp, 1), temp, 0);
8217 op1 = gen_label_rtx ();
8218 jumpifnot (TREE_OPERAND (exp, 0), op0);
8220 start_cleanup_deferral ();
8222 /* One branch of the cond can be void, if it never returns. For
8223 example A ? throw : E */
8225 && TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node)
8226 store_expr (TREE_OPERAND (exp, 1), temp, 0);
8228 expand_expr (TREE_OPERAND (exp, 1),
8229 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8230 end_cleanup_deferral ();
8232 emit_jump_insn (gen_jump (op1));
8235 start_cleanup_deferral ();
8237 && TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node)
8238 store_expr (TREE_OPERAND (exp, 2), temp, 0);
8240 expand_expr (TREE_OPERAND (exp, 2),
8241 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8244 end_cleanup_deferral ();
8255 /* Something needs to be initialized, but we didn't know
8256 where that thing was when building the tree. For example,
8257 it could be the return value of a function, or a parameter
8258 to a function which lays down in the stack, or a temporary
8259 variable which must be passed by reference.
8261 We guarantee that the expression will either be constructed
8262 or copied into our original target. */
8264 tree slot = TREE_OPERAND (exp, 0);
8265 tree cleanups = NULL_TREE;
8268 if (TREE_CODE (slot) != VAR_DECL)
8272 target = original_target;
8274 /* Set this here so that if we get a target that refers to a
8275 register variable that's already been used, put_reg_into_stack
8276 knows that it should fix up those uses. */
8277 TREE_USED (slot) = 1;
8281 if (DECL_RTL_SET_P (slot))
8283 target = DECL_RTL (slot);
8284 /* If we have already expanded the slot, so don't do
8286 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8291 target = assign_temp (type, 2, 0, 1);
8292 /* All temp slots at this level must not conflict. */
8293 preserve_temp_slots (target);
8294 SET_DECL_RTL (slot, target);
8295 if (TREE_ADDRESSABLE (slot))
8296 put_var_into_stack (slot);
8298 /* Since SLOT is not known to the called function
8299 to belong to its stack frame, we must build an explicit
8300 cleanup. This case occurs when we must build up a reference
8301 to pass the reference as an argument. In this case,
8302 it is very likely that such a reference need not be
8305 if (TREE_OPERAND (exp, 2) == 0)
8306 TREE_OPERAND (exp, 2) = maybe_build_cleanup (slot);
8307 cleanups = TREE_OPERAND (exp, 2);
8312 /* This case does occur, when expanding a parameter which
8313 needs to be constructed on the stack. The target
8314 is the actual stack address that we want to initialize.
8315 The function we call will perform the cleanup in this case. */
8317 /* If we have already assigned it space, use that space,
8318 not target that we were passed in, as our target
8319 parameter is only a hint. */
8320 if (DECL_RTL_SET_P (slot))
8322 target = DECL_RTL (slot);
8323 /* If we have already expanded the slot, so don't do
8325 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8330 SET_DECL_RTL (slot, target);
8331 /* If we must have an addressable slot, then make sure that
8332 the RTL that we just stored in slot is OK. */
8333 if (TREE_ADDRESSABLE (slot))
8334 put_var_into_stack (slot);
8338 exp1 = TREE_OPERAND (exp, 3) = TREE_OPERAND (exp, 1);
8339 /* Mark it as expanded. */
8340 TREE_OPERAND (exp, 1) = NULL_TREE;
8342 store_expr (exp1, target, 0);
8344 expand_decl_cleanup (NULL_TREE, cleanups);
8351 tree lhs = TREE_OPERAND (exp, 0);
8352 tree rhs = TREE_OPERAND (exp, 1);
8354 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
8360 /* If lhs is complex, expand calls in rhs before computing it.
8361 That's so we don't compute a pointer and save it over a
8362 call. If lhs is simple, compute it first so we can give it
8363 as a target if the rhs is just a call. This avoids an
8364 extra temp and copy and that prevents a partial-subsumption
8365 which makes bad code. Actually we could treat
8366 component_ref's of vars like vars. */
8368 tree lhs = TREE_OPERAND (exp, 0);
8369 tree rhs = TREE_OPERAND (exp, 1);
8373 /* Check for |= or &= of a bitfield of size one into another bitfield
8374 of size 1. In this case, (unless we need the result of the
8375 assignment) we can do this more efficiently with a
8376 test followed by an assignment, if necessary.
8378 ??? At this point, we can't get a BIT_FIELD_REF here. But if
8379 things change so we do, this code should be enhanced to
8382 && TREE_CODE (lhs) == COMPONENT_REF
8383 && (TREE_CODE (rhs) == BIT_IOR_EXPR
8384 || TREE_CODE (rhs) == BIT_AND_EXPR)
8385 && TREE_OPERAND (rhs, 0) == lhs
8386 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
8387 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
8388 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
8390 rtx label = gen_label_rtx ();
8392 do_jump (TREE_OPERAND (rhs, 1),
8393 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
8394 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
8395 expand_assignment (lhs, convert (TREE_TYPE (rhs),
8396 (TREE_CODE (rhs) == BIT_IOR_EXPR
8398 : integer_zero_node)),
8400 do_pending_stack_adjust ();
8405 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
8411 if (!TREE_OPERAND (exp, 0))
8412 expand_null_return ();
8414 expand_return (TREE_OPERAND (exp, 0));
8417 case PREINCREMENT_EXPR:
8418 case PREDECREMENT_EXPR:
8419 return expand_increment (exp, 0, ignore);
8421 case POSTINCREMENT_EXPR:
8422 case POSTDECREMENT_EXPR:
8423 /* Faster to treat as pre-increment if result is not used. */
8424 return expand_increment (exp, ! ignore, ignore);
8427 /* Are we taking the address of a nested function? */
8428 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
8429 && decl_function_context (TREE_OPERAND (exp, 0)) != 0
8430 && ! DECL_NO_STATIC_CHAIN (TREE_OPERAND (exp, 0))
8431 && ! TREE_STATIC (exp))
8433 op0 = trampoline_address (TREE_OPERAND (exp, 0));
8434 op0 = force_operand (op0, target);
8436 /* If we are taking the address of something erroneous, just
8438 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
8440 /* If we are taking the address of a constant and are at the
8441 top level, we have to use output_constant_def since we can't
8442 call force_const_mem at top level. */
8444 && (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
8445 || (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0)))
8447 op0 = XEXP (output_constant_def (TREE_OPERAND (exp, 0), 0), 0);
8450 /* We make sure to pass const0_rtx down if we came in with
8451 ignore set, to avoid doing the cleanups twice for something. */
8452 op0 = expand_expr (TREE_OPERAND (exp, 0),
8453 ignore ? const0_rtx : NULL_RTX, VOIDmode,
8454 (modifier == EXPAND_INITIALIZER
8455 ? modifier : EXPAND_CONST_ADDRESS));
8457 /* If we are going to ignore the result, OP0 will have been set
8458 to const0_rtx, so just return it. Don't get confused and
8459 think we are taking the address of the constant. */
8463 /* Pass 1 for MODIFY, so that protect_from_queue doesn't get
8464 clever and returns a REG when given a MEM. */
8465 op0 = protect_from_queue (op0, 1);
8467 /* We would like the object in memory. If it is a constant, we can
8468 have it be statically allocated into memory. For a non-constant,
8469 we need to allocate some memory and store the value into it. */
8471 if (CONSTANT_P (op0))
8472 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
8474 else if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
8475 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF
8476 || GET_CODE (op0) == PARALLEL)
8478 /* If this object is in a register, it must can't be BLKmode. */
8479 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8480 tree nt = build_qualified_type (inner_type,
8481 (TYPE_QUALS (inner_type)
8482 | TYPE_QUAL_CONST));
8483 rtx memloc = assign_temp (nt, 1, 1, 1);
8485 if (GET_CODE (op0) == PARALLEL)
8486 /* Handle calls that pass values in multiple non-contiguous
8487 locations. The Irix 6 ABI has examples of this. */
8488 emit_group_store (memloc, op0, int_size_in_bytes (inner_type));
8490 emit_move_insn (memloc, op0);
8495 if (GET_CODE (op0) != MEM)
8498 mark_temp_addr_taken (op0);
8499 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
8501 op0 = XEXP (op0, 0);
8502 #ifdef POINTERS_EXTEND_UNSIGNED
8503 if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
8504 && mode == ptr_mode)
8505 op0 = convert_memory_address (ptr_mode, op0);
8510 /* If OP0 is not aligned as least as much as the type requires, we
8511 need to make a temporary, copy OP0 to it, and take the address of
8512 the temporary. We want to use the alignment of the type, not of
8513 the operand. Note that this is incorrect for FUNCTION_TYPE, but
8514 the test for BLKmode means that can't happen. The test for
8515 BLKmode is because we never make mis-aligned MEMs with
8518 We don't need to do this at all if the machine doesn't have
8519 strict alignment. */
8520 if (STRICT_ALIGNMENT && GET_MODE (op0) == BLKmode
8521 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
8523 && MEM_ALIGN (op0) < BIGGEST_ALIGNMENT)
8525 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8527 = assign_stack_temp_for_type
8528 (TYPE_MODE (inner_type),
8529 MEM_SIZE (op0) ? INTVAL (MEM_SIZE (op0))
8530 : int_size_in_bytes (inner_type),
8531 1, build_qualified_type (inner_type,
8532 (TYPE_QUALS (inner_type)
8533 | TYPE_QUAL_CONST)));
8535 if (TYPE_ALIGN_OK (inner_type))
8538 emit_block_move (new, op0, expr_size (TREE_OPERAND (exp, 0)));
8542 op0 = force_operand (XEXP (op0, 0), target);
8545 if (flag_force_addr && GET_CODE (op0) != REG)
8546 op0 = force_reg (Pmode, op0);
8548 if (GET_CODE (op0) == REG
8549 && ! REG_USERVAR_P (op0))
8550 mark_reg_pointer (op0, TYPE_ALIGN (TREE_TYPE (type)));
8552 #ifdef POINTERS_EXTEND_UNSIGNED
8553 if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
8554 && mode == ptr_mode)
8555 op0 = convert_memory_address (ptr_mode, op0);
8560 case ENTRY_VALUE_EXPR:
8563 /* COMPLEX type for Extended Pascal & Fortran */
8566 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8569 /* Get the rtx code of the operands. */
8570 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8571 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
8574 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
8578 /* Move the real (op0) and imaginary (op1) parts to their location. */
8579 emit_move_insn (gen_realpart (mode, target), op0);
8580 emit_move_insn (gen_imagpart (mode, target), op1);
8582 insns = get_insns ();
8585 /* Complex construction should appear as a single unit. */
8586 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
8587 each with a separate pseudo as destination.
8588 It's not correct for flow to treat them as a unit. */
8589 if (GET_CODE (target) != CONCAT)
8590 emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
8598 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8599 return gen_realpart (mode, op0);
8602 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8603 return gen_imagpart (mode, op0);
8607 enum machine_mode partmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8611 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8614 target = gen_reg_rtx (mode);
8618 /* Store the realpart and the negated imagpart to target. */
8619 emit_move_insn (gen_realpart (partmode, target),
8620 gen_realpart (partmode, op0));
8622 imag_t = gen_imagpart (partmode, target);
8623 temp = expand_unop (partmode,
8624 ! unsignedp && flag_trapv
8625 && (GET_MODE_CLASS(partmode) == MODE_INT)
8626 ? negv_optab : neg_optab,
8627 gen_imagpart (partmode, op0), imag_t, 0);
8629 emit_move_insn (imag_t, temp);
8631 insns = get_insns ();
8634 /* Conjugate should appear as a single unit
8635 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
8636 each with a separate pseudo as destination.
8637 It's not correct for flow to treat them as a unit. */
8638 if (GET_CODE (target) != CONCAT)
8639 emit_no_conflict_block (insns, target, op0, NULL_RTX, NULL_RTX);
8646 case TRY_CATCH_EXPR:
8648 tree handler = TREE_OPERAND (exp, 1);
8650 expand_eh_region_start ();
8652 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8654 expand_eh_region_end_cleanup (handler);
8659 case TRY_FINALLY_EXPR:
8661 tree try_block = TREE_OPERAND (exp, 0);
8662 tree finally_block = TREE_OPERAND (exp, 1);
8663 rtx finally_label = gen_label_rtx ();
8664 rtx done_label = gen_label_rtx ();
8665 rtx return_link = gen_reg_rtx (Pmode);
8666 tree cleanup = build (GOTO_SUBROUTINE_EXPR, void_type_node,
8667 (tree) finally_label, (tree) return_link);
8668 TREE_SIDE_EFFECTS (cleanup) = 1;
8670 /* Start a new binding layer that will keep track of all cleanup
8671 actions to be performed. */
8672 expand_start_bindings (2);
8674 target_temp_slot_level = temp_slot_level;
8676 expand_decl_cleanup (NULL_TREE, cleanup);
8677 op0 = expand_expr (try_block, target, tmode, modifier);
8679 preserve_temp_slots (op0);
8680 expand_end_bindings (NULL_TREE, 0, 0);
8681 emit_jump (done_label);
8682 emit_label (finally_label);
8683 expand_expr (finally_block, const0_rtx, VOIDmode, 0);
8684 emit_indirect_jump (return_link);
8685 emit_label (done_label);
8689 case GOTO_SUBROUTINE_EXPR:
8691 rtx subr = (rtx) TREE_OPERAND (exp, 0);
8692 rtx return_link = *(rtx *) &TREE_OPERAND (exp, 1);
8693 rtx return_address = gen_label_rtx ();
8694 emit_move_insn (return_link,
8695 gen_rtx_LABEL_REF (Pmode, return_address));
8697 emit_label (return_address);
8702 return expand_builtin_va_arg (TREE_OPERAND (exp, 0), type);
8705 return get_exception_pointer (cfun);
8708 /* Function descriptors are not valid except for as
8709 initialization constants, and should not be expanded. */
8713 return (*lang_expand_expr) (exp, original_target, tmode, modifier);
8716 /* Here to do an ordinary binary operator, generating an instruction
8717 from the optab already placed in `this_optab'. */
8719 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8721 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8722 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8724 temp = expand_binop (mode, this_optab, op0, op1, target,
8725 unsignedp, OPTAB_LIB_WIDEN);
8731 /* Return the tree node if a ARG corresponds to a string constant or zero
8732 if it doesn't. If we return non-zero, set *PTR_OFFSET to the offset
8733 in bytes within the string that ARG is accessing. The type of the
8734 offset will be `sizetype'. */
8737 string_constant (arg, ptr_offset)
8743 if (TREE_CODE (arg) == ADDR_EXPR
8744 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
8746 *ptr_offset = size_zero_node;
8747 return TREE_OPERAND (arg, 0);
8749 else if (TREE_CODE (arg) == PLUS_EXPR)
8751 tree arg0 = TREE_OPERAND (arg, 0);
8752 tree arg1 = TREE_OPERAND (arg, 1);
8757 if (TREE_CODE (arg0) == ADDR_EXPR
8758 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
8760 *ptr_offset = convert (sizetype, arg1);
8761 return TREE_OPERAND (arg0, 0);
8763 else if (TREE_CODE (arg1) == ADDR_EXPR
8764 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
8766 *ptr_offset = convert (sizetype, arg0);
8767 return TREE_OPERAND (arg1, 0);
8774 /* Expand code for a post- or pre- increment or decrement
8775 and return the RTX for the result.
8776 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
8779 expand_increment (exp, post, ignore)
8785 tree incremented = TREE_OPERAND (exp, 0);
8786 optab this_optab = add_optab;
8788 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
8789 int op0_is_copy = 0;
8790 int single_insn = 0;
8791 /* 1 means we can't store into OP0 directly,
8792 because it is a subreg narrower than a word,
8793 and we don't dare clobber the rest of the word. */
8796 /* Stabilize any component ref that might need to be
8797 evaluated more than once below. */
8799 || TREE_CODE (incremented) == BIT_FIELD_REF
8800 || (TREE_CODE (incremented) == COMPONENT_REF
8801 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
8802 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
8803 incremented = stabilize_reference (incremented);
8804 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
8805 ones into save exprs so that they don't accidentally get evaluated
8806 more than once by the code below. */
8807 if (TREE_CODE (incremented) == PREINCREMENT_EXPR
8808 || TREE_CODE (incremented) == PREDECREMENT_EXPR)
8809 incremented = save_expr (incremented);
8811 /* Compute the operands as RTX.
8812 Note whether OP0 is the actual lvalue or a copy of it:
8813 I believe it is a copy iff it is a register or subreg
8814 and insns were generated in computing it. */
8816 temp = get_last_insn ();
8817 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, 0);
8819 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
8820 in place but instead must do sign- or zero-extension during assignment,
8821 so we copy it into a new register and let the code below use it as
8824 Note that we can safely modify this SUBREG since it is know not to be
8825 shared (it was made by the expand_expr call above). */
8827 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
8830 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
8834 else if (GET_CODE (op0) == SUBREG
8835 && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD)
8837 /* We cannot increment this SUBREG in place. If we are
8838 post-incrementing, get a copy of the old value. Otherwise,
8839 just mark that we cannot increment in place. */
8841 op0 = copy_to_reg (op0);
8846 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
8847 && temp != get_last_insn ());
8848 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8850 /* Decide whether incrementing or decrementing. */
8851 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
8852 || TREE_CODE (exp) == PREDECREMENT_EXPR)
8853 this_optab = sub_optab;
8855 /* Convert decrement by a constant into a negative increment. */
8856 if (this_optab == sub_optab
8857 && GET_CODE (op1) == CONST_INT)
8859 op1 = GEN_INT (-INTVAL (op1));
8860 this_optab = add_optab;
8863 if (TYPE_TRAP_SIGNED (TREE_TYPE (exp)))
8864 this_optab = this_optab == add_optab ? addv_optab : subv_optab;
8866 /* For a preincrement, see if we can do this with a single instruction. */
8869 icode = (int) this_optab->handlers[(int) mode].insn_code;
8870 if (icode != (int) CODE_FOR_nothing
8871 /* Make sure that OP0 is valid for operands 0 and 1
8872 of the insn we want to queue. */
8873 && (*insn_data[icode].operand[0].predicate) (op0, mode)
8874 && (*insn_data[icode].operand[1].predicate) (op0, mode)
8875 && (*insn_data[icode].operand[2].predicate) (op1, mode))
8879 /* If OP0 is not the actual lvalue, but rather a copy in a register,
8880 then we cannot just increment OP0. We must therefore contrive to
8881 increment the original value. Then, for postincrement, we can return
8882 OP0 since it is a copy of the old value. For preincrement, expand here
8883 unless we can do it with a single insn.
8885 Likewise if storing directly into OP0 would clobber high bits
8886 we need to preserve (bad_subreg). */
8887 if (op0_is_copy || (!post && !single_insn) || bad_subreg)
8889 /* This is the easiest way to increment the value wherever it is.
8890 Problems with multiple evaluation of INCREMENTED are prevented
8891 because either (1) it is a component_ref or preincrement,
8892 in which case it was stabilized above, or (2) it is an array_ref
8893 with constant index in an array in a register, which is
8894 safe to reevaluate. */
8895 tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
8896 || TREE_CODE (exp) == PREDECREMENT_EXPR)
8897 ? MINUS_EXPR : PLUS_EXPR),
8900 TREE_OPERAND (exp, 1));
8902 while (TREE_CODE (incremented) == NOP_EXPR
8903 || TREE_CODE (incremented) == CONVERT_EXPR)
8905 newexp = convert (TREE_TYPE (incremented), newexp);
8906 incremented = TREE_OPERAND (incremented, 0);
8909 temp = expand_assignment (incremented, newexp, ! post && ! ignore , 0);
8910 return post ? op0 : temp;
8915 /* We have a true reference to the value in OP0.
8916 If there is an insn to add or subtract in this mode, queue it.
8917 Queueing the increment insn avoids the register shuffling
8918 that often results if we must increment now and first save
8919 the old value for subsequent use. */
8921 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
8922 op0 = stabilize (op0);
8925 icode = (int) this_optab->handlers[(int) mode].insn_code;
8926 if (icode != (int) CODE_FOR_nothing
8927 /* Make sure that OP0 is valid for operands 0 and 1
8928 of the insn we want to queue. */
8929 && (*insn_data[icode].operand[0].predicate) (op0, mode)
8930 && (*insn_data[icode].operand[1].predicate) (op0, mode))
8932 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
8933 op1 = force_reg (mode, op1);
8935 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
8937 if (icode != (int) CODE_FOR_nothing && GET_CODE (op0) == MEM)
8939 rtx addr = (general_operand (XEXP (op0, 0), mode)
8940 ? force_reg (Pmode, XEXP (op0, 0))
8941 : copy_to_reg (XEXP (op0, 0)));
8944 op0 = replace_equiv_address (op0, addr);
8945 temp = force_reg (GET_MODE (op0), op0);
8946 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
8947 op1 = force_reg (mode, op1);
8949 /* The increment queue is LIFO, thus we have to `queue'
8950 the instructions in reverse order. */
8951 enqueue_insn (op0, gen_move_insn (op0, temp));
8952 result = enqueue_insn (temp, GEN_FCN (icode) (temp, temp, op1));
8957 /* Preincrement, or we can't increment with one simple insn. */
8959 /* Save a copy of the value before inc or dec, to return it later. */
8960 temp = value = copy_to_reg (op0);
8962 /* Arrange to return the incremented value. */
8963 /* Copy the rtx because expand_binop will protect from the queue,
8964 and the results of that would be invalid for us to return
8965 if our caller does emit_queue before using our result. */
8966 temp = copy_rtx (value = op0);
8968 /* Increment however we can. */
8969 op1 = expand_binop (mode, this_optab, value, op1, op0,
8970 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
8972 /* Make sure the value is stored into OP0. */
8974 emit_move_insn (op0, op1);
8979 /* At the start of a function, record that we have no previously-pushed
8980 arguments waiting to be popped. */
8983 init_pending_stack_adjust ()
8985 pending_stack_adjust = 0;
8988 /* When exiting from function, if safe, clear out any pending stack adjust
8989 so the adjustment won't get done.
8991 Note, if the current function calls alloca, then it must have a
8992 frame pointer regardless of the value of flag_omit_frame_pointer. */
8995 clear_pending_stack_adjust ()
8997 #ifdef EXIT_IGNORE_STACK
8999 && (! flag_omit_frame_pointer || current_function_calls_alloca)
9000 && EXIT_IGNORE_STACK
9001 && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline)
9002 && ! flag_inline_functions)
9004 stack_pointer_delta -= pending_stack_adjust,
9005 pending_stack_adjust = 0;
9010 /* Pop any previously-pushed arguments that have not been popped yet. */
9013 do_pending_stack_adjust ()
9015 if (inhibit_defer_pop == 0)
9017 if (pending_stack_adjust != 0)
9018 adjust_stack (GEN_INT (pending_stack_adjust));
9019 pending_stack_adjust = 0;
9023 /* Expand conditional expressions. */
9025 /* Generate code to evaluate EXP and jump to LABEL if the value is zero.
9026 LABEL is an rtx of code CODE_LABEL, in this function and all the
9030 jumpifnot (exp, label)
9034 do_jump (exp, label, NULL_RTX);
9037 /* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
9044 do_jump (exp, NULL_RTX, label);
9047 /* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
9048 the result is zero, or IF_TRUE_LABEL if the result is one.
9049 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
9050 meaning fall through in that case.
9052 do_jump always does any pending stack adjust except when it does not
9053 actually perform a jump. An example where there is no jump
9054 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
9056 This function is responsible for optimizing cases such as
9057 &&, || and comparison operators in EXP. */
9060 do_jump (exp, if_false_label, if_true_label)
9062 rtx if_false_label, if_true_label;
9064 enum tree_code code = TREE_CODE (exp);
9065 /* Some cases need to create a label to jump to
9066 in order to properly fall through.
9067 These cases set DROP_THROUGH_LABEL nonzero. */
9068 rtx drop_through_label = 0;
9072 enum machine_mode mode;
9074 #ifdef MAX_INTEGER_COMPUTATION_MODE
9075 check_max_integer_computation_mode (exp);
9086 temp = integer_zerop (exp) ? if_false_label : if_true_label;
9092 /* This is not true with #pragma weak */
9094 /* The address of something can never be zero. */
9096 emit_jump (if_true_label);
9101 if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
9102 || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
9103 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF
9104 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_RANGE_REF)
9107 /* If we are narrowing the operand, we have to do the compare in the
9109 if ((TYPE_PRECISION (TREE_TYPE (exp))
9110 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
9112 case NON_LVALUE_EXPR:
9113 case REFERENCE_EXPR:
9118 /* These cannot change zero->non-zero or vice versa. */
9119 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9122 case WITH_RECORD_EXPR:
9123 /* Put the object on the placeholder list, recurse through our first
9124 operand, and pop the list. */
9125 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
9127 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9128 placeholder_list = TREE_CHAIN (placeholder_list);
9132 /* This is never less insns than evaluating the PLUS_EXPR followed by
9133 a test and can be longer if the test is eliminated. */
9135 /* Reduce to minus. */
9136 exp = build (MINUS_EXPR, TREE_TYPE (exp),
9137 TREE_OPERAND (exp, 0),
9138 fold (build1 (NEGATE_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
9139 TREE_OPERAND (exp, 1))));
9140 /* Process as MINUS. */
9144 /* Non-zero iff operands of minus differ. */
9145 do_compare_and_jump (build (NE_EXPR, TREE_TYPE (exp),
9146 TREE_OPERAND (exp, 0),
9147 TREE_OPERAND (exp, 1)),
9148 NE, NE, if_false_label, if_true_label);
9152 /* If we are AND'ing with a small constant, do this comparison in the
9153 smallest type that fits. If the machine doesn't have comparisons
9154 that small, it will be converted back to the wider comparison.
9155 This helps if we are testing the sign bit of a narrower object.
9156 combine can't do this for us because it can't know whether a
9157 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
9159 if (! SLOW_BYTE_ACCESS
9160 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
9161 && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
9162 && (i = tree_floor_log2 (TREE_OPERAND (exp, 1))) >= 0
9163 && (mode = mode_for_size (i + 1, MODE_INT, 0)) != BLKmode
9164 && (type = type_for_mode (mode, 1)) != 0
9165 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9166 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9167 != CODE_FOR_nothing))
9169 do_jump (convert (type, exp), if_false_label, if_true_label);
9174 case TRUTH_NOT_EXPR:
9175 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9178 case TRUTH_ANDIF_EXPR:
9179 if (if_false_label == 0)
9180 if_false_label = drop_through_label = gen_label_rtx ();
9181 do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
9182 start_cleanup_deferral ();
9183 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9184 end_cleanup_deferral ();
9187 case TRUTH_ORIF_EXPR:
9188 if (if_true_label == 0)
9189 if_true_label = drop_through_label = gen_label_rtx ();
9190 do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
9191 start_cleanup_deferral ();
9192 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9193 end_cleanup_deferral ();
9198 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
9199 preserve_temp_slots (NULL_RTX);
9203 do_pending_stack_adjust ();
9204 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9210 case ARRAY_RANGE_REF:
9212 HOST_WIDE_INT bitsize, bitpos;
9214 enum machine_mode mode;
9219 /* Get description of this reference. We don't actually care
9220 about the underlying object here. */
9221 get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode,
9222 &unsignedp, &volatilep);
9224 type = type_for_size (bitsize, unsignedp);
9225 if (! SLOW_BYTE_ACCESS
9226 && type != 0 && bitsize >= 0
9227 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9228 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9229 != CODE_FOR_nothing))
9231 do_jump (convert (type, exp), if_false_label, if_true_label);
9238 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
9239 if (integer_onep (TREE_OPERAND (exp, 1))
9240 && integer_zerop (TREE_OPERAND (exp, 2)))
9241 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9243 else if (integer_zerop (TREE_OPERAND (exp, 1))
9244 && integer_onep (TREE_OPERAND (exp, 2)))
9245 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9249 rtx label1 = gen_label_rtx ();
9250 drop_through_label = gen_label_rtx ();
9252 do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX);
9254 start_cleanup_deferral ();
9255 /* Now the THEN-expression. */
9256 do_jump (TREE_OPERAND (exp, 1),
9257 if_false_label ? if_false_label : drop_through_label,
9258 if_true_label ? if_true_label : drop_through_label);
9259 /* In case the do_jump just above never jumps. */
9260 do_pending_stack_adjust ();
9261 emit_label (label1);
9263 /* Now the ELSE-expression. */
9264 do_jump (TREE_OPERAND (exp, 2),
9265 if_false_label ? if_false_label : drop_through_label,
9266 if_true_label ? if_true_label : drop_through_label);
9267 end_cleanup_deferral ();
9273 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9275 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
9276 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
9278 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
9279 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
9282 (build (TRUTH_ANDIF_EXPR, TREE_TYPE (exp),
9283 fold (build (EQ_EXPR, TREE_TYPE (exp),
9284 fold (build1 (REALPART_EXPR,
9285 TREE_TYPE (inner_type),
9287 fold (build1 (REALPART_EXPR,
9288 TREE_TYPE (inner_type),
9290 fold (build (EQ_EXPR, TREE_TYPE (exp),
9291 fold (build1 (IMAGPART_EXPR,
9292 TREE_TYPE (inner_type),
9294 fold (build1 (IMAGPART_EXPR,
9295 TREE_TYPE (inner_type),
9297 if_false_label, if_true_label);
9300 else if (integer_zerop (TREE_OPERAND (exp, 1)))
9301 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9303 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
9304 && !can_compare_p (EQ, TYPE_MODE (inner_type), ccp_jump))
9305 do_jump_by_parts_equality (exp, if_false_label, if_true_label);
9307 do_compare_and_jump (exp, EQ, EQ, if_false_label, if_true_label);
9313 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9315 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
9316 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
9318 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
9319 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
9322 (build (TRUTH_ORIF_EXPR, TREE_TYPE (exp),
9323 fold (build (NE_EXPR, TREE_TYPE (exp),
9324 fold (build1 (REALPART_EXPR,
9325 TREE_TYPE (inner_type),
9327 fold (build1 (REALPART_EXPR,
9328 TREE_TYPE (inner_type),
9330 fold (build (NE_EXPR, TREE_TYPE (exp),
9331 fold (build1 (IMAGPART_EXPR,
9332 TREE_TYPE (inner_type),
9334 fold (build1 (IMAGPART_EXPR,
9335 TREE_TYPE (inner_type),
9337 if_false_label, if_true_label);
9340 else if (integer_zerop (TREE_OPERAND (exp, 1)))
9341 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9343 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
9344 && !can_compare_p (NE, TYPE_MODE (inner_type), ccp_jump))
9345 do_jump_by_parts_equality (exp, if_true_label, if_false_label);
9347 do_compare_and_jump (exp, NE, NE, if_false_label, if_true_label);
9352 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9353 if (GET_MODE_CLASS (mode) == MODE_INT
9354 && ! can_compare_p (LT, mode, ccp_jump))
9355 do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
9357 do_compare_and_jump (exp, LT, LTU, if_false_label, if_true_label);
9361 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9362 if (GET_MODE_CLASS (mode) == MODE_INT
9363 && ! can_compare_p (LE, mode, ccp_jump))
9364 do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
9366 do_compare_and_jump (exp, LE, LEU, if_false_label, if_true_label);
9370 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9371 if (GET_MODE_CLASS (mode) == MODE_INT
9372 && ! can_compare_p (GT, mode, ccp_jump))
9373 do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
9375 do_compare_and_jump (exp, GT, GTU, if_false_label, if_true_label);
9379 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9380 if (GET_MODE_CLASS (mode) == MODE_INT
9381 && ! can_compare_p (GE, mode, ccp_jump))
9382 do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
9384 do_compare_and_jump (exp, GE, GEU, if_false_label, if_true_label);
9387 case UNORDERED_EXPR:
9390 enum rtx_code cmp, rcmp;
9393 if (code == UNORDERED_EXPR)
9394 cmp = UNORDERED, rcmp = ORDERED;
9396 cmp = ORDERED, rcmp = UNORDERED;
9397 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9400 if (! can_compare_p (cmp, mode, ccp_jump)
9401 && (can_compare_p (rcmp, mode, ccp_jump)
9402 /* If the target doesn't provide either UNORDERED or ORDERED
9403 comparisons, canonicalize on UNORDERED for the library. */
9404 || rcmp == UNORDERED))
9408 do_compare_and_jump (exp, cmp, cmp, if_false_label, if_true_label);
9410 do_compare_and_jump (exp, rcmp, rcmp, if_true_label, if_false_label);
9415 enum rtx_code rcode1;
9416 enum tree_code tcode2;
9440 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9441 if (can_compare_p (rcode1, mode, ccp_jump))
9442 do_compare_and_jump (exp, rcode1, rcode1, if_false_label,
9446 tree op0 = save_expr (TREE_OPERAND (exp, 0));
9447 tree op1 = save_expr (TREE_OPERAND (exp, 1));
9450 /* If the target doesn't support combined unordered
9451 compares, decompose into UNORDERED + comparison. */
9452 cmp0 = fold (build (UNORDERED_EXPR, TREE_TYPE (exp), op0, op1));
9453 cmp1 = fold (build (tcode2, TREE_TYPE (exp), op0, op1));
9454 exp = build (TRUTH_ORIF_EXPR, TREE_TYPE (exp), cmp0, cmp1);
9455 do_jump (exp, if_false_label, if_true_label);
9461 __builtin_expect (<test>, 0) and
9462 __builtin_expect (<test>, 1)
9464 We need to do this here, so that <test> is not converted to a SCC
9465 operation on machines that use condition code registers and COMPARE
9466 like the PowerPC, and then the jump is done based on whether the SCC
9467 operation produced a 1 or 0. */
9469 /* Check for a built-in function. */
9470 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR)
9472 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
9473 tree arglist = TREE_OPERAND (exp, 1);
9475 if (TREE_CODE (fndecl) == FUNCTION_DECL
9476 && DECL_BUILT_IN (fndecl)
9477 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT
9478 && arglist != NULL_TREE
9479 && TREE_CHAIN (arglist) != NULL_TREE)
9481 rtx seq = expand_builtin_expect_jump (exp, if_false_label,
9484 if (seq != NULL_RTX)
9491 /* fall through and generate the normal code. */
9495 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
9497 /* This is not needed any more and causes poor code since it causes
9498 comparisons and tests from non-SI objects to have different code
9500 /* Copy to register to avoid generating bad insns by cse
9501 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
9502 if (!cse_not_expected && GET_CODE (temp) == MEM)
9503 temp = copy_to_reg (temp);
9505 do_pending_stack_adjust ();
9506 /* Do any postincrements in the expression that was tested. */
9509 if (GET_CODE (temp) == CONST_INT
9510 || (GET_CODE (temp) == CONST_DOUBLE && GET_MODE (temp) == VOIDmode)
9511 || GET_CODE (temp) == LABEL_REF)
9513 rtx target = temp == const0_rtx ? if_false_label : if_true_label;
9517 else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
9518 && ! can_compare_p (NE, GET_MODE (temp), ccp_jump))
9519 /* Note swapping the labels gives us not-equal. */
9520 do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label);
9521 else if (GET_MODE (temp) != VOIDmode)
9522 do_compare_rtx_and_jump (temp, CONST0_RTX (GET_MODE (temp)),
9523 NE, TREE_UNSIGNED (TREE_TYPE (exp)),
9524 GET_MODE (temp), NULL_RTX,
9525 if_false_label, if_true_label);
9530 if (drop_through_label)
9532 /* If do_jump produces code that might be jumped around,
9533 do any stack adjusts from that code, before the place
9534 where control merges in. */
9535 do_pending_stack_adjust ();
9536 emit_label (drop_through_label);
9540 /* Given a comparison expression EXP for values too wide to be compared
9541 with one insn, test the comparison and jump to the appropriate label.
9542 The code of EXP is ignored; we always test GT if SWAP is 0,
9543 and LT if SWAP is 1. */
9546 do_jump_by_parts_greater (exp, swap, if_false_label, if_true_label)
9549 rtx if_false_label, if_true_label;
9551 rtx op0 = expand_expr (TREE_OPERAND (exp, swap), NULL_RTX, VOIDmode, 0);
9552 rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), NULL_RTX, VOIDmode, 0);
9553 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9554 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
9556 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label);
9559 /* Compare OP0 with OP1, word at a time, in mode MODE.
9560 UNSIGNEDP says to do unsigned comparison.
9561 Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
9564 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label)
9565 enum machine_mode mode;
9568 rtx if_false_label, if_true_label;
9570 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
9571 rtx drop_through_label = 0;
9574 if (! if_true_label || ! if_false_label)
9575 drop_through_label = gen_label_rtx ();
9576 if (! if_true_label)
9577 if_true_label = drop_through_label;
9578 if (! if_false_label)
9579 if_false_label = drop_through_label;
9581 /* Compare a word at a time, high order first. */
9582 for (i = 0; i < nwords; i++)
9584 rtx op0_word, op1_word;
9586 if (WORDS_BIG_ENDIAN)
9588 op0_word = operand_subword_force (op0, i, mode);
9589 op1_word = operand_subword_force (op1, i, mode);
9593 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
9594 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
9597 /* All but high-order word must be compared as unsigned. */
9598 do_compare_rtx_and_jump (op0_word, op1_word, GT,
9599 (unsignedp || i > 0), word_mode, NULL_RTX,
9600 NULL_RTX, if_true_label);
9602 /* Consider lower words only if these are equal. */
9603 do_compare_rtx_and_jump (op0_word, op1_word, NE, unsignedp, word_mode,
9604 NULL_RTX, NULL_RTX, if_false_label);
9608 emit_jump (if_false_label);
9609 if (drop_through_label)
9610 emit_label (drop_through_label);
9613 /* Given an EQ_EXPR expression EXP for values too wide to be compared
9614 with one insn, test the comparison and jump to the appropriate label. */
9617 do_jump_by_parts_equality (exp, if_false_label, if_true_label)
9619 rtx if_false_label, if_true_label;
9621 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
9622 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
9623 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9624 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
9626 rtx drop_through_label = 0;
9628 if (! if_false_label)
9629 drop_through_label = if_false_label = gen_label_rtx ();
9631 for (i = 0; i < nwords; i++)
9632 do_compare_rtx_and_jump (operand_subword_force (op0, i, mode),
9633 operand_subword_force (op1, i, mode),
9634 EQ, TREE_UNSIGNED (TREE_TYPE (exp)),
9635 word_mode, NULL_RTX, if_false_label, NULL_RTX);
9638 emit_jump (if_true_label);
9639 if (drop_through_label)
9640 emit_label (drop_through_label);
9643 /* Jump according to whether OP0 is 0.
9644 We assume that OP0 has an integer mode that is too wide
9645 for the available compare insns. */
9648 do_jump_by_parts_equality_rtx (op0, if_false_label, if_true_label)
9650 rtx if_false_label, if_true_label;
9652 int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD;
9655 rtx drop_through_label = 0;
9657 /* The fastest way of doing this comparison on almost any machine is to
9658 "or" all the words and compare the result. If all have to be loaded
9659 from memory and this is a very wide item, it's possible this may
9660 be slower, but that's highly unlikely. */
9662 part = gen_reg_rtx (word_mode);
9663 emit_move_insn (part, operand_subword_force (op0, 0, GET_MODE (op0)));
9664 for (i = 1; i < nwords && part != 0; i++)
9665 part = expand_binop (word_mode, ior_optab, part,
9666 operand_subword_force (op0, i, GET_MODE (op0)),
9667 part, 1, OPTAB_WIDEN);
9671 do_compare_rtx_and_jump (part, const0_rtx, EQ, 1, word_mode,
9672 NULL_RTX, if_false_label, if_true_label);
9677 /* If we couldn't do the "or" simply, do this with a series of compares. */
9678 if (! if_false_label)
9679 drop_through_label = if_false_label = gen_label_rtx ();
9681 for (i = 0; i < nwords; i++)
9682 do_compare_rtx_and_jump (operand_subword_force (op0, i, GET_MODE (op0)),
9683 const0_rtx, EQ, 1, word_mode, NULL_RTX,
9684 if_false_label, NULL_RTX);
9687 emit_jump (if_true_label);
9689 if (drop_through_label)
9690 emit_label (drop_through_label);
9693 /* Generate code for a comparison of OP0 and OP1 with rtx code CODE.
9694 (including code to compute the values to be compared)
9695 and set (CC0) according to the result.
9696 The decision as to signed or unsigned comparison must be made by the caller.
9698 We force a stack adjustment unless there are currently
9699 things pushed on the stack that aren't yet used.
9701 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
9705 compare_from_rtx (op0, op1, code, unsignedp, mode, size)
9709 enum machine_mode mode;
9714 /* If one operand is constant, make it the second one. Only do this
9715 if the other operand is not constant as well. */
9717 if (swap_commutative_operands_p (op0, op1))
9722 code = swap_condition (code);
9727 op0 = force_not_mem (op0);
9728 op1 = force_not_mem (op1);
9731 do_pending_stack_adjust ();
9733 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
9734 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
9738 /* There's no need to do this now that combine.c can eliminate lots of
9739 sign extensions. This can be less efficient in certain cases on other
9742 /* If this is a signed equality comparison, we can do it as an
9743 unsigned comparison since zero-extension is cheaper than sign
9744 extension and comparisons with zero are done as unsigned. This is
9745 the case even on machines that can do fast sign extension, since
9746 zero-extension is easier to combine with other operations than
9747 sign-extension is. If we are comparing against a constant, we must
9748 convert it to what it would look like unsigned. */
9749 if ((code == EQ || code == NE) && ! unsignedp
9750 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
9752 if (GET_CODE (op1) == CONST_INT
9753 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
9754 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
9759 emit_cmp_insn (op0, op1, code, size, mode, unsignedp);
9761 return gen_rtx_fmt_ee (code, VOIDmode, cc0_rtx, const0_rtx);
9764 /* Like do_compare_and_jump but expects the values to compare as two rtx's.
9765 The decision as to signed or unsigned comparison must be made by the caller.
9767 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
9771 do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode, size,
9772 if_false_label, if_true_label)
9776 enum machine_mode mode;
9778 rtx if_false_label, if_true_label;
9781 int dummy_true_label = 0;
9783 /* Reverse the comparison if that is safe and we want to jump if it is
9785 if (! if_true_label && ! FLOAT_MODE_P (mode))
9787 if_true_label = if_false_label;
9789 code = reverse_condition (code);
9792 /* If one operand is constant, make it the second one. Only do this
9793 if the other operand is not constant as well. */
9795 if (swap_commutative_operands_p (op0, op1))
9800 code = swap_condition (code);
9805 op0 = force_not_mem (op0);
9806 op1 = force_not_mem (op1);
9809 do_pending_stack_adjust ();
9811 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
9812 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
9814 if (tem == const_true_rtx)
9817 emit_jump (if_true_label);
9822 emit_jump (if_false_label);
9828 /* There's no need to do this now that combine.c can eliminate lots of
9829 sign extensions. This can be less efficient in certain cases on other
9832 /* If this is a signed equality comparison, we can do it as an
9833 unsigned comparison since zero-extension is cheaper than sign
9834 extension and comparisons with zero are done as unsigned. This is
9835 the case even on machines that can do fast sign extension, since
9836 zero-extension is easier to combine with other operations than
9837 sign-extension is. If we are comparing against a constant, we must
9838 convert it to what it would look like unsigned. */
9839 if ((code == EQ || code == NE) && ! unsignedp
9840 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
9842 if (GET_CODE (op1) == CONST_INT
9843 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
9844 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
9849 if (! if_true_label)
9851 dummy_true_label = 1;
9852 if_true_label = gen_label_rtx ();
9855 emit_cmp_and_jump_insns (op0, op1, code, size, mode, unsignedp,
9859 emit_jump (if_false_label);
9860 if (dummy_true_label)
9861 emit_label (if_true_label);
9864 /* Generate code for a comparison expression EXP (including code to compute
9865 the values to be compared) and a conditional jump to IF_FALSE_LABEL and/or
9866 IF_TRUE_LABEL. One of the labels can be NULL_RTX, in which case the
9867 generated code will drop through.
9868 SIGNED_CODE should be the rtx operation for this comparison for
9869 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
9871 We force a stack adjustment unless there are currently
9872 things pushed on the stack that aren't yet used. */
9875 do_compare_and_jump (exp, signed_code, unsigned_code, if_false_label,
9878 enum rtx_code signed_code, unsigned_code;
9879 rtx if_false_label, if_true_label;
9883 enum machine_mode mode;
9887 /* Don't crash if the comparison was erroneous. */
9888 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
9889 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
9892 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
9893 if (TREE_CODE (TREE_OPERAND (exp, 1)) == ERROR_MARK)
9896 type = TREE_TYPE (TREE_OPERAND (exp, 0));
9897 mode = TYPE_MODE (type);
9898 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
9899 && (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST
9900 || (GET_MODE_BITSIZE (mode)
9901 > GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp,
9904 /* op0 might have been replaced by promoted constant, in which
9905 case the type of second argument should be used. */
9906 type = TREE_TYPE (TREE_OPERAND (exp, 1));
9907 mode = TYPE_MODE (type);
9909 unsignedp = TREE_UNSIGNED (type);
9910 code = unsignedp ? unsigned_code : signed_code;
9912 #ifdef HAVE_canonicalize_funcptr_for_compare
9913 /* If function pointers need to be "canonicalized" before they can
9914 be reliably compared, then canonicalize them. */
9915 if (HAVE_canonicalize_funcptr_for_compare
9916 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
9917 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
9920 rtx new_op0 = gen_reg_rtx (mode);
9922 emit_insn (gen_canonicalize_funcptr_for_compare (new_op0, op0));
9926 if (HAVE_canonicalize_funcptr_for_compare
9927 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
9928 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
9931 rtx new_op1 = gen_reg_rtx (mode);
9933 emit_insn (gen_canonicalize_funcptr_for_compare (new_op1, op1));
9938 /* Do any postincrements in the expression that was tested. */
9941 do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode,
9943 ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
9944 if_false_label, if_true_label);
9947 /* Generate code to calculate EXP using a store-flag instruction
9948 and return an rtx for the result. EXP is either a comparison
9949 or a TRUTH_NOT_EXPR whose operand is a comparison.
9951 If TARGET is nonzero, store the result there if convenient.
9953 If ONLY_CHEAP is non-zero, only do this if it is likely to be very
9956 Return zero if there is no suitable set-flag instruction
9957 available on this machine.
9959 Once expand_expr has been called on the arguments of the comparison,
9960 we are committed to doing the store flag, since it is not safe to
9961 re-evaluate the expression. We emit the store-flag insn by calling
9962 emit_store_flag, but only expand the arguments if we have a reason
9963 to believe that emit_store_flag will be successful. If we think that
9964 it will, but it isn't, we have to simulate the store-flag with a
9965 set/jump/set sequence. */
9968 do_store_flag (exp, target, mode, only_cheap)
9971 enum machine_mode mode;
9975 tree arg0, arg1, type;
9977 enum machine_mode operand_mode;
9981 enum insn_code icode;
9982 rtx subtarget = target;
9985 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
9986 result at the end. We can't simply invert the test since it would
9987 have already been inverted if it were valid. This case occurs for
9988 some floating-point comparisons. */
9990 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
9991 invert = 1, exp = TREE_OPERAND (exp, 0);
9993 arg0 = TREE_OPERAND (exp, 0);
9994 arg1 = TREE_OPERAND (exp, 1);
9996 /* Don't crash if the comparison was erroneous. */
9997 if (arg0 == error_mark_node || arg1 == error_mark_node)
10000 type = TREE_TYPE (arg0);
10001 operand_mode = TYPE_MODE (type);
10002 unsignedp = TREE_UNSIGNED (type);
10004 /* We won't bother with BLKmode store-flag operations because it would mean
10005 passing a lot of information to emit_store_flag. */
10006 if (operand_mode == BLKmode)
10009 /* We won't bother with store-flag operations involving function pointers
10010 when function pointers must be canonicalized before comparisons. */
10011 #ifdef HAVE_canonicalize_funcptr_for_compare
10012 if (HAVE_canonicalize_funcptr_for_compare
10013 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10014 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10016 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10017 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10018 == FUNCTION_TYPE))))
10025 /* Get the rtx comparison code to use. We know that EXP is a comparison
10026 operation of some type. Some comparisons against 1 and -1 can be
10027 converted to comparisons with zero. Do so here so that the tests
10028 below will be aware that we have a comparison with zero. These
10029 tests will not catch constants in the first operand, but constants
10030 are rarely passed as the first operand. */
10032 switch (TREE_CODE (exp))
10041 if (integer_onep (arg1))
10042 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
10044 code = unsignedp ? LTU : LT;
10047 if (! unsignedp && integer_all_onesp (arg1))
10048 arg1 = integer_zero_node, code = LT;
10050 code = unsignedp ? LEU : LE;
10053 if (! unsignedp && integer_all_onesp (arg1))
10054 arg1 = integer_zero_node, code = GE;
10056 code = unsignedp ? GTU : GT;
10059 if (integer_onep (arg1))
10060 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
10062 code = unsignedp ? GEU : GE;
10065 case UNORDERED_EXPR:
10091 /* Put a constant second. */
10092 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
10094 tem = arg0; arg0 = arg1; arg1 = tem;
10095 code = swap_condition (code);
10098 /* If this is an equality or inequality test of a single bit, we can
10099 do this by shifting the bit being tested to the low-order bit and
10100 masking the result with the constant 1. If the condition was EQ,
10101 we xor it with 1. This does not require an scc insn and is faster
10102 than an scc insn even if we have it. */
10104 if ((code == NE || code == EQ)
10105 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
10106 && integer_pow2p (TREE_OPERAND (arg0, 1)))
10108 tree inner = TREE_OPERAND (arg0, 0);
10109 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
10112 /* If INNER is a right shift of a constant and it plus BITNUM does
10113 not overflow, adjust BITNUM and INNER. */
10115 if (TREE_CODE (inner) == RSHIFT_EXPR
10116 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
10117 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
10118 && bitnum < TYPE_PRECISION (type)
10119 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
10120 bitnum - TYPE_PRECISION (type)))
10122 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
10123 inner = TREE_OPERAND (inner, 0);
10126 /* If we are going to be able to omit the AND below, we must do our
10127 operations as unsigned. If we must use the AND, we have a choice.
10128 Normally unsigned is faster, but for some machines signed is. */
10129 ops_unsignedp = (bitnum == TYPE_PRECISION (type) - 1 ? 1
10130 #ifdef LOAD_EXTEND_OP
10131 : (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1)
10137 if (! get_subtarget (subtarget)
10138 || GET_MODE (subtarget) != operand_mode
10139 || ! safe_from_p (subtarget, inner, 1))
10142 op0 = expand_expr (inner, subtarget, VOIDmode, 0);
10145 op0 = expand_shift (RSHIFT_EXPR, operand_mode, op0,
10146 size_int (bitnum), subtarget, ops_unsignedp);
10148 if (GET_MODE (op0) != mode)
10149 op0 = convert_to_mode (mode, op0, ops_unsignedp);
10151 if ((code == EQ && ! invert) || (code == NE && invert))
10152 op0 = expand_binop (mode, xor_optab, op0, const1_rtx, subtarget,
10153 ops_unsignedp, OPTAB_LIB_WIDEN);
10155 /* Put the AND last so it can combine with more things. */
10156 if (bitnum != TYPE_PRECISION (type) - 1)
10157 op0 = expand_and (op0, const1_rtx, subtarget);
10162 /* Now see if we are likely to be able to do this. Return if not. */
10163 if (! can_compare_p (code, operand_mode, ccp_store_flag))
10166 icode = setcc_gen_code[(int) code];
10167 if (icode == CODE_FOR_nothing
10168 || (only_cheap && insn_data[(int) icode].operand[0].mode != mode))
10170 /* We can only do this if it is one of the special cases that
10171 can be handled without an scc insn. */
10172 if ((code == LT && integer_zerop (arg1))
10173 || (! only_cheap && code == GE && integer_zerop (arg1)))
10175 else if (BRANCH_COST >= 0
10176 && ! only_cheap && (code == NE || code == EQ)
10177 && TREE_CODE (type) != REAL_TYPE
10178 && ((abs_optab->handlers[(int) operand_mode].insn_code
10179 != CODE_FOR_nothing)
10180 || (ffs_optab->handlers[(int) operand_mode].insn_code
10181 != CODE_FOR_nothing)))
10187 if (! get_subtarget (target)
10188 || GET_MODE (subtarget) != operand_mode
10189 || ! safe_from_p (subtarget, arg1, 1))
10192 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
10193 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
10196 target = gen_reg_rtx (mode);
10198 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
10199 because, if the emit_store_flag does anything it will succeed and
10200 OP0 and OP1 will not be used subsequently. */
10202 result = emit_store_flag (target, code,
10203 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
10204 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
10205 operand_mode, unsignedp, 1);
10210 result = expand_binop (mode, xor_optab, result, const1_rtx,
10211 result, 0, OPTAB_LIB_WIDEN);
10215 /* If this failed, we have to do this with set/compare/jump/set code. */
10216 if (GET_CODE (target) != REG
10217 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
10218 target = gen_reg_rtx (GET_MODE (target));
10220 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
10221 result = compare_from_rtx (op0, op1, code, unsignedp,
10222 operand_mode, NULL_RTX);
10223 if (GET_CODE (result) == CONST_INT)
10224 return (((result == const0_rtx && ! invert)
10225 || (result != const0_rtx && invert))
10226 ? const0_rtx : const1_rtx);
10228 label = gen_label_rtx ();
10229 if (bcc_gen_fctn[(int) code] == 0)
10232 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
10233 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
10234 emit_label (label);
10240 /* Stubs in case we haven't got a casesi insn. */
10241 #ifndef HAVE_casesi
10242 # define HAVE_casesi 0
10243 # define gen_casesi(a, b, c, d, e) (0)
10244 # define CODE_FOR_casesi CODE_FOR_nothing
10247 /* If the machine does not have a case insn that compares the bounds,
10248 this means extra overhead for dispatch tables, which raises the
10249 threshold for using them. */
10250 #ifndef CASE_VALUES_THRESHOLD
10251 #define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
10252 #endif /* CASE_VALUES_THRESHOLD */
10255 case_values_threshold ()
10257 return CASE_VALUES_THRESHOLD;
10260 /* Attempt to generate a casesi instruction. Returns 1 if successful,
10261 0 otherwise (i.e. if there is no casesi instruction). */
10263 try_casesi (index_type, index_expr, minval, range,
10264 table_label, default_label)
10265 tree index_type, index_expr, minval, range;
10266 rtx table_label ATTRIBUTE_UNUSED;
10269 enum machine_mode index_mode = SImode;
10270 int index_bits = GET_MODE_BITSIZE (index_mode);
10271 rtx op1, op2, index;
10272 enum machine_mode op_mode;
10277 /* Convert the index to SImode. */
10278 if (GET_MODE_BITSIZE (TYPE_MODE (index_type)) > GET_MODE_BITSIZE (index_mode))
10280 enum machine_mode omode = TYPE_MODE (index_type);
10281 rtx rangertx = expand_expr (range, NULL_RTX, VOIDmode, 0);
10283 /* We must handle the endpoints in the original mode. */
10284 index_expr = build (MINUS_EXPR, index_type,
10285 index_expr, minval);
10286 minval = integer_zero_node;
10287 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
10288 emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
10289 omode, 1, default_label);
10290 /* Now we can safely truncate. */
10291 index = convert_to_mode (index_mode, index, 0);
10295 if (TYPE_MODE (index_type) != index_mode)
10297 index_expr = convert (type_for_size (index_bits, 0),
10299 index_type = TREE_TYPE (index_expr);
10302 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
10305 index = protect_from_queue (index, 0);
10306 do_pending_stack_adjust ();
10308 op_mode = insn_data[(int) CODE_FOR_casesi].operand[0].mode;
10309 if (! (*insn_data[(int) CODE_FOR_casesi].operand[0].predicate)
10311 index = copy_to_mode_reg (op_mode, index);
10313 op1 = expand_expr (minval, NULL_RTX, VOIDmode, 0);
10315 op_mode = insn_data[(int) CODE_FOR_casesi].operand[1].mode;
10316 op1 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (minval)),
10317 op1, TREE_UNSIGNED (TREE_TYPE (minval)));
10318 if (! (*insn_data[(int) CODE_FOR_casesi].operand[1].predicate)
10320 op1 = copy_to_mode_reg (op_mode, op1);
10322 op2 = expand_expr (range, NULL_RTX, VOIDmode, 0);
10324 op_mode = insn_data[(int) CODE_FOR_casesi].operand[2].mode;
10325 op2 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (range)),
10326 op2, TREE_UNSIGNED (TREE_TYPE (range)));
10327 if (! (*insn_data[(int) CODE_FOR_casesi].operand[2].predicate)
10329 op2 = copy_to_mode_reg (op_mode, op2);
10331 emit_jump_insn (gen_casesi (index, op1, op2,
10332 table_label, default_label));
10336 /* Attempt to generate a tablejump instruction; same concept. */
10337 #ifndef HAVE_tablejump
10338 #define HAVE_tablejump 0
10339 #define gen_tablejump(x, y) (0)
10342 /* Subroutine of the next function.
10344 INDEX is the value being switched on, with the lowest value
10345 in the table already subtracted.
10346 MODE is its expected mode (needed if INDEX is constant).
10347 RANGE is the length of the jump table.
10348 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
10350 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
10351 index value is out of range. */
10354 do_tablejump (index, mode, range, table_label, default_label)
10355 rtx index, range, table_label, default_label;
10356 enum machine_mode mode;
10360 /* Do an unsigned comparison (in the proper mode) between the index
10361 expression and the value which represents the length of the range.
10362 Since we just finished subtracting the lower bound of the range
10363 from the index expression, this comparison allows us to simultaneously
10364 check that the original index expression value is both greater than
10365 or equal to the minimum value of the range and less than or equal to
10366 the maximum value of the range. */
10368 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
10371 /* If index is in range, it must fit in Pmode.
10372 Convert to Pmode so we can index with it. */
10374 index = convert_to_mode (Pmode, index, 1);
10376 /* Don't let a MEM slip thru, because then INDEX that comes
10377 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
10378 and break_out_memory_refs will go to work on it and mess it up. */
10379 #ifdef PIC_CASE_VECTOR_ADDRESS
10380 if (flag_pic && GET_CODE (index) != REG)
10381 index = copy_to_mode_reg (Pmode, index);
10384 /* If flag_force_addr were to affect this address
10385 it could interfere with the tricky assumptions made
10386 about addresses that contain label-refs,
10387 which may be valid only very near the tablejump itself. */
10388 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
10389 GET_MODE_SIZE, because this indicates how large insns are. The other
10390 uses should all be Pmode, because they are addresses. This code
10391 could fail if addresses and insns are not the same size. */
10392 index = gen_rtx_PLUS (Pmode,
10393 gen_rtx_MULT (Pmode, index,
10394 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
10395 gen_rtx_LABEL_REF (Pmode, table_label));
10396 #ifdef PIC_CASE_VECTOR_ADDRESS
10398 index = PIC_CASE_VECTOR_ADDRESS (index);
10401 index = memory_address_noforce (CASE_VECTOR_MODE, index);
10402 temp = gen_reg_rtx (CASE_VECTOR_MODE);
10403 vector = gen_rtx_MEM (CASE_VECTOR_MODE, index);
10404 RTX_UNCHANGING_P (vector) = 1;
10405 convert_move (temp, vector, 0);
10407 emit_jump_insn (gen_tablejump (temp, table_label));
10409 /* If we are generating PIC code or if the table is PC-relative, the
10410 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
10411 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
10416 try_tablejump (index_type, index_expr, minval, range,
10417 table_label, default_label)
10418 tree index_type, index_expr, minval, range;
10419 rtx table_label, default_label;
10423 if (! HAVE_tablejump)
10426 index_expr = fold (build (MINUS_EXPR, index_type,
10427 convert (index_type, index_expr),
10428 convert (index_type, minval)));
10429 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
10431 index = protect_from_queue (index, 0);
10432 do_pending_stack_adjust ();
10434 do_tablejump (index, TYPE_MODE (index_type),
10435 convert_modes (TYPE_MODE (index_type),
10436 TYPE_MODE (TREE_TYPE (range)),
10437 expand_expr (range, NULL_RTX,
10439 TREE_UNSIGNED (TREE_TYPE (range))),
10440 table_label, default_label);