1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
30 #include "hard-reg-set.h"
33 #include "insn-config.h"
34 #include "insn-attr.h"
35 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
42 #include "typeclass.h"
45 #include "langhooks.h"
49 /* Decide whether a function's arguments should be processed
50 from first to last or from last to first.
52 They should if the stack and args grow in opposite directions, but
53 only if we have push insns. */
57 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
58 #define PUSH_ARGS_REVERSED /* If it's last to first. */
63 #ifndef STACK_PUSH_CODE
64 #ifdef STACK_GROWS_DOWNWARD
65 #define STACK_PUSH_CODE PRE_DEC
67 #define STACK_PUSH_CODE PRE_INC
71 /* Assume that case vectors are not pc-relative. */
72 #ifndef CASE_VECTOR_PC_RELATIVE
73 #define CASE_VECTOR_PC_RELATIVE 0
76 /* If this is nonzero, we do not bother generating VOLATILE
77 around volatile memory references, and we are willing to
78 output indirect addresses. If cse is to follow, we reject
79 indirect addresses so a useful potential cse is generated;
80 if it is used only once, instruction combination will produce
81 the same indirect address eventually. */
84 /* Chain of pending expressions for PLACEHOLDER_EXPR to replace. */
85 static tree placeholder_list = 0;
87 /* This structure is used by move_by_pieces to describe the move to
98 int explicit_inc_from;
99 unsigned HOST_WIDE_INT len;
100 HOST_WIDE_INT offset;
104 /* This structure is used by store_by_pieces to describe the clear to
107 struct store_by_pieces
113 unsigned HOST_WIDE_INT len;
114 HOST_WIDE_INT offset;
115 rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
120 extern struct obstack permanent_obstack;
122 static rtx enqueue_insn PARAMS ((rtx, rtx));
123 static unsigned HOST_WIDE_INT move_by_pieces_ninsns
124 PARAMS ((unsigned HOST_WIDE_INT,
126 static void move_by_pieces_1 PARAMS ((rtx (*) (rtx, ...), enum machine_mode,
127 struct move_by_pieces *));
128 static rtx clear_by_pieces_1 PARAMS ((PTR, HOST_WIDE_INT,
130 static void clear_by_pieces PARAMS ((rtx, unsigned HOST_WIDE_INT,
132 static void store_by_pieces_1 PARAMS ((struct store_by_pieces *,
134 static void store_by_pieces_2 PARAMS ((rtx (*) (rtx, ...),
136 struct store_by_pieces *));
137 static rtx get_subtarget PARAMS ((rtx));
138 static int is_zeros_p PARAMS ((tree));
139 static int mostly_zeros_p PARAMS ((tree));
140 static void store_constructor_field PARAMS ((rtx, unsigned HOST_WIDE_INT,
141 HOST_WIDE_INT, enum machine_mode,
142 tree, tree, int, int));
143 static void store_constructor PARAMS ((tree, rtx, int, HOST_WIDE_INT));
144 static rtx store_field PARAMS ((rtx, HOST_WIDE_INT,
145 HOST_WIDE_INT, enum machine_mode,
146 tree, enum machine_mode, int, tree,
148 static rtx var_rtx PARAMS ((tree));
149 static HOST_WIDE_INT highest_pow2_factor PARAMS ((tree));
150 static rtx expand_increment PARAMS ((tree, int, int));
151 static void do_jump_by_parts_greater PARAMS ((tree, int, rtx, rtx));
152 static void do_jump_by_parts_equality PARAMS ((tree, rtx, rtx));
153 static void do_compare_and_jump PARAMS ((tree, enum rtx_code, enum rtx_code,
155 static rtx do_store_flag PARAMS ((tree, rtx, enum machine_mode, int));
157 static void emit_single_push_insn PARAMS ((enum machine_mode, rtx, tree));
159 static void do_tablejump PARAMS ((rtx, enum machine_mode, rtx, rtx, rtx));
161 /* Record for each mode whether we can move a register directly to or
162 from an object of that mode in memory. If we can't, we won't try
163 to use that mode directly when accessing a field of that mode. */
165 static char direct_load[NUM_MACHINE_MODES];
166 static char direct_store[NUM_MACHINE_MODES];
168 /* If a memory-to-memory move would take MOVE_RATIO or more simple
169 move-instruction sequences, we will do a movstr or libcall instead. */
172 #if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
175 /* If we are optimizing for space (-Os), cut down the default move ratio. */
176 #define MOVE_RATIO (optimize_size ? 3 : 15)
180 /* This macro is used to determine whether move_by_pieces should be called
181 to perform a structure copy. */
182 #ifndef MOVE_BY_PIECES_P
183 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
184 (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) MOVE_RATIO)
187 /* This array records the insn_code of insns to perform block moves. */
188 enum insn_code movstr_optab[NUM_MACHINE_MODES];
190 /* This array records the insn_code of insns to perform block clears. */
191 enum insn_code clrstr_optab[NUM_MACHINE_MODES];
193 /* SLOW_UNALIGNED_ACCESS is non-zero if unaligned accesses are very slow. */
195 #ifndef SLOW_UNALIGNED_ACCESS
196 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
199 /* This is run once per compilation to set up which modes can be used
200 directly in memory and to initialize the block move optab. */
206 enum machine_mode mode;
212 /* Try indexing by frame ptr and try by stack ptr.
213 It is known that on the Convex the stack ptr isn't a valid index.
214 With luck, one or the other is valid on any machine. */
215 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
216 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
218 insn = emit_insn (gen_rtx_SET (0, NULL_RTX, NULL_RTX));
219 pat = PATTERN (insn);
221 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
222 mode = (enum machine_mode) ((int) mode + 1))
227 direct_load[(int) mode] = direct_store[(int) mode] = 0;
228 PUT_MODE (mem, mode);
229 PUT_MODE (mem1, mode);
231 /* See if there is some register that can be used in this mode and
232 directly loaded or stored from memory. */
234 if (mode != VOIDmode && mode != BLKmode)
235 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
236 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
239 if (! HARD_REGNO_MODE_OK (regno, mode))
242 reg = gen_rtx_REG (mode, regno);
245 SET_DEST (pat) = reg;
246 if (recog (pat, insn, &num_clobbers) >= 0)
247 direct_load[(int) mode] = 1;
249 SET_SRC (pat) = mem1;
250 SET_DEST (pat) = reg;
251 if (recog (pat, insn, &num_clobbers) >= 0)
252 direct_load[(int) mode] = 1;
255 SET_DEST (pat) = mem;
256 if (recog (pat, insn, &num_clobbers) >= 0)
257 direct_store[(int) mode] = 1;
260 SET_DEST (pat) = mem1;
261 if (recog (pat, insn, &num_clobbers) >= 0)
262 direct_store[(int) mode] = 1;
269 /* This is run at the start of compiling a function. */
274 cfun->expr = (struct expr_status *) xmalloc (sizeof (struct expr_status));
277 pending_stack_adjust = 0;
278 stack_pointer_delta = 0;
279 inhibit_defer_pop = 0;
281 apply_args_value = 0;
287 struct expr_status *p;
292 ggc_mark_rtx (p->x_saveregs_value);
293 ggc_mark_rtx (p->x_apply_args_value);
294 ggc_mark_rtx (p->x_forced_labels);
305 /* Small sanity check that the queue is empty at the end of a function. */
308 finish_expr_for_function ()
314 /* Manage the queue of increment instructions to be output
315 for POSTINCREMENT_EXPR expressions, etc. */
317 /* Queue up to increment (or change) VAR later. BODY says how:
318 BODY should be the same thing you would pass to emit_insn
319 to increment right away. It will go to emit_insn later on.
321 The value is a QUEUED expression to be used in place of VAR
322 where you want to guarantee the pre-incrementation value of VAR. */
325 enqueue_insn (var, body)
328 pending_chain = gen_rtx_QUEUED (GET_MODE (var), var, NULL_RTX, NULL_RTX,
329 body, pending_chain);
330 return pending_chain;
333 /* Use protect_from_queue to convert a QUEUED expression
334 into something that you can put immediately into an instruction.
335 If the queued incrementation has not happened yet,
336 protect_from_queue returns the variable itself.
337 If the incrementation has happened, protect_from_queue returns a temp
338 that contains a copy of the old value of the variable.
340 Any time an rtx which might possibly be a QUEUED is to be put
341 into an instruction, it must be passed through protect_from_queue first.
342 QUEUED expressions are not meaningful in instructions.
344 Do not pass a value through protect_from_queue and then hold
345 on to it for a while before putting it in an instruction!
346 If the queue is flushed in between, incorrect code will result. */
349 protect_from_queue (x, modify)
353 RTX_CODE code = GET_CODE (x);
355 #if 0 /* A QUEUED can hang around after the queue is forced out. */
356 /* Shortcut for most common case. */
357 if (pending_chain == 0)
363 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
364 use of autoincrement. Make a copy of the contents of the memory
365 location rather than a copy of the address, but not if the value is
366 of mode BLKmode. Don't modify X in place since it might be
368 if (code == MEM && GET_MODE (x) != BLKmode
369 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
372 rtx new = replace_equiv_address_nv (x, QUEUED_VAR (y));
376 rtx temp = gen_reg_rtx (GET_MODE (x));
378 emit_insn_before (gen_move_insn (temp, new),
383 /* Copy the address into a pseudo, so that the returned value
384 remains correct across calls to emit_queue. */
385 return replace_equiv_address (new, copy_to_reg (XEXP (new, 0)));
388 /* Otherwise, recursively protect the subexpressions of all
389 the kinds of rtx's that can contain a QUEUED. */
392 rtx tem = protect_from_queue (XEXP (x, 0), 0);
393 if (tem != XEXP (x, 0))
399 else if (code == PLUS || code == MULT)
401 rtx new0 = protect_from_queue (XEXP (x, 0), 0);
402 rtx new1 = protect_from_queue (XEXP (x, 1), 0);
403 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
412 /* If the increment has not happened, use the variable itself. Copy it
413 into a new pseudo so that the value remains correct across calls to
415 if (QUEUED_INSN (x) == 0)
416 return copy_to_reg (QUEUED_VAR (x));
417 /* If the increment has happened and a pre-increment copy exists,
419 if (QUEUED_COPY (x) != 0)
420 return QUEUED_COPY (x);
421 /* The increment has happened but we haven't set up a pre-increment copy.
422 Set one up now, and use it. */
423 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
424 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
426 return QUEUED_COPY (x);
429 /* Return nonzero if X contains a QUEUED expression:
430 if it contains anything that will be altered by a queued increment.
431 We handle only combinations of MEM, PLUS, MINUS and MULT operators
432 since memory addresses generally contain only those. */
438 enum rtx_code code = GET_CODE (x);
444 return queued_subexp_p (XEXP (x, 0));
448 return (queued_subexp_p (XEXP (x, 0))
449 || queued_subexp_p (XEXP (x, 1)));
455 /* Perform all the pending incrementations. */
461 while ((p = pending_chain))
463 rtx body = QUEUED_BODY (p);
465 if (GET_CODE (body) == SEQUENCE)
467 QUEUED_INSN (p) = XVECEXP (QUEUED_BODY (p), 0, 0);
468 emit_insn (QUEUED_BODY (p));
471 QUEUED_INSN (p) = emit_insn (QUEUED_BODY (p));
472 pending_chain = QUEUED_NEXT (p);
476 /* Copy data from FROM to TO, where the machine modes are not the same.
477 Both modes may be integer, or both may be floating.
478 UNSIGNEDP should be nonzero if FROM is an unsigned type.
479 This causes zero-extension instead of sign-extension. */
482 convert_move (to, from, unsignedp)
486 enum machine_mode to_mode = GET_MODE (to);
487 enum machine_mode from_mode = GET_MODE (from);
488 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
489 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
493 /* rtx code for making an equivalent value. */
494 enum rtx_code equiv_code = (unsignedp ? ZERO_EXTEND : SIGN_EXTEND);
496 to = protect_from_queue (to, 1);
497 from = protect_from_queue (from, 0);
499 if (to_real != from_real)
502 /* If FROM is a SUBREG that indicates that we have already done at least
503 the required extension, strip it. We don't handle such SUBREGs as
506 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
507 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
508 >= GET_MODE_SIZE (to_mode))
509 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
510 from = gen_lowpart (to_mode, from), from_mode = to_mode;
512 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
515 if (to_mode == from_mode
516 || (from_mode == VOIDmode && CONSTANT_P (from)))
518 emit_move_insn (to, from);
522 if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
524 if (GET_MODE_BITSIZE (from_mode) != GET_MODE_BITSIZE (to_mode))
527 if (VECTOR_MODE_P (to_mode))
528 from = simplify_gen_subreg (to_mode, from, GET_MODE (from), 0);
530 to = simplify_gen_subreg (from_mode, to, GET_MODE (to), 0);
532 emit_move_insn (to, from);
536 if (to_real != from_real)
543 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode))
545 /* Try converting directly if the insn is supported. */
546 if ((code = can_extend_p (to_mode, from_mode, 0))
549 emit_unop_insn (code, to, from, UNKNOWN);
554 #ifdef HAVE_trunchfqf2
555 if (HAVE_trunchfqf2 && from_mode == HFmode && to_mode == QFmode)
557 emit_unop_insn (CODE_FOR_trunchfqf2, to, from, UNKNOWN);
561 #ifdef HAVE_trunctqfqf2
562 if (HAVE_trunctqfqf2 && from_mode == TQFmode && to_mode == QFmode)
564 emit_unop_insn (CODE_FOR_trunctqfqf2, to, from, UNKNOWN);
568 #ifdef HAVE_truncsfqf2
569 if (HAVE_truncsfqf2 && from_mode == SFmode && to_mode == QFmode)
571 emit_unop_insn (CODE_FOR_truncsfqf2, to, from, UNKNOWN);
575 #ifdef HAVE_truncdfqf2
576 if (HAVE_truncdfqf2 && from_mode == DFmode && to_mode == QFmode)
578 emit_unop_insn (CODE_FOR_truncdfqf2, to, from, UNKNOWN);
582 #ifdef HAVE_truncxfqf2
583 if (HAVE_truncxfqf2 && from_mode == XFmode && to_mode == QFmode)
585 emit_unop_insn (CODE_FOR_truncxfqf2, to, from, UNKNOWN);
589 #ifdef HAVE_trunctfqf2
590 if (HAVE_trunctfqf2 && from_mode == TFmode && to_mode == QFmode)
592 emit_unop_insn (CODE_FOR_trunctfqf2, to, from, UNKNOWN);
597 #ifdef HAVE_trunctqfhf2
598 if (HAVE_trunctqfhf2 && from_mode == TQFmode && to_mode == HFmode)
600 emit_unop_insn (CODE_FOR_trunctqfhf2, to, from, UNKNOWN);
604 #ifdef HAVE_truncsfhf2
605 if (HAVE_truncsfhf2 && from_mode == SFmode && to_mode == HFmode)
607 emit_unop_insn (CODE_FOR_truncsfhf2, to, from, UNKNOWN);
611 #ifdef HAVE_truncdfhf2
612 if (HAVE_truncdfhf2 && from_mode == DFmode && to_mode == HFmode)
614 emit_unop_insn (CODE_FOR_truncdfhf2, to, from, UNKNOWN);
618 #ifdef HAVE_truncxfhf2
619 if (HAVE_truncxfhf2 && from_mode == XFmode && to_mode == HFmode)
621 emit_unop_insn (CODE_FOR_truncxfhf2, to, from, UNKNOWN);
625 #ifdef HAVE_trunctfhf2
626 if (HAVE_trunctfhf2 && from_mode == TFmode && to_mode == HFmode)
628 emit_unop_insn (CODE_FOR_trunctfhf2, to, from, UNKNOWN);
633 #ifdef HAVE_truncsftqf2
634 if (HAVE_truncsftqf2 && from_mode == SFmode && to_mode == TQFmode)
636 emit_unop_insn (CODE_FOR_truncsftqf2, to, from, UNKNOWN);
640 #ifdef HAVE_truncdftqf2
641 if (HAVE_truncdftqf2 && from_mode == DFmode && to_mode == TQFmode)
643 emit_unop_insn (CODE_FOR_truncdftqf2, to, from, UNKNOWN);
647 #ifdef HAVE_truncxftqf2
648 if (HAVE_truncxftqf2 && from_mode == XFmode && to_mode == TQFmode)
650 emit_unop_insn (CODE_FOR_truncxftqf2, to, from, UNKNOWN);
654 #ifdef HAVE_trunctftqf2
655 if (HAVE_trunctftqf2 && from_mode == TFmode && to_mode == TQFmode)
657 emit_unop_insn (CODE_FOR_trunctftqf2, to, from, UNKNOWN);
662 #ifdef HAVE_truncdfsf2
663 if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
665 emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
669 #ifdef HAVE_truncxfsf2
670 if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode)
672 emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN);
676 #ifdef HAVE_trunctfsf2
677 if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
679 emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
683 #ifdef HAVE_truncxfdf2
684 if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode)
686 emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN);
690 #ifdef HAVE_trunctfdf2
691 if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
693 emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
705 libcall = extendsfdf2_libfunc;
709 libcall = extendsfxf2_libfunc;
713 libcall = extendsftf2_libfunc;
725 libcall = truncdfsf2_libfunc;
729 libcall = extenddfxf2_libfunc;
733 libcall = extenddftf2_libfunc;
745 libcall = truncxfsf2_libfunc;
749 libcall = truncxfdf2_libfunc;
761 libcall = trunctfsf2_libfunc;
765 libcall = trunctfdf2_libfunc;
777 if (libcall == (rtx) 0)
778 /* This conversion is not implemented yet. */
782 value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
784 insns = get_insns ();
786 emit_libcall_block (insns, to, value, gen_rtx_FLOAT_TRUNCATE (to_mode,
791 /* Now both modes are integers. */
793 /* Handle expanding beyond a word. */
794 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
795 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
802 enum machine_mode lowpart_mode;
803 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
805 /* Try converting directly if the insn is supported. */
806 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
809 /* If FROM is a SUBREG, put it into a register. Do this
810 so that we always generate the same set of insns for
811 better cse'ing; if an intermediate assignment occurred,
812 we won't be doing the operation directly on the SUBREG. */
813 if (optimize > 0 && GET_CODE (from) == SUBREG)
814 from = force_reg (from_mode, from);
815 emit_unop_insn (code, to, from, equiv_code);
818 /* Next, try converting via full word. */
819 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
820 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
821 != CODE_FOR_nothing))
823 if (GET_CODE (to) == REG)
824 emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
825 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
826 emit_unop_insn (code, to,
827 gen_lowpart (word_mode, to), equiv_code);
831 /* No special multiword conversion insn; do it by hand. */
834 /* Since we will turn this into a no conflict block, we must ensure
835 that the source does not overlap the target. */
837 if (reg_overlap_mentioned_p (to, from))
838 from = force_reg (from_mode, from);
840 /* Get a copy of FROM widened to a word, if necessary. */
841 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
842 lowpart_mode = word_mode;
844 lowpart_mode = from_mode;
846 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
848 lowpart = gen_lowpart (lowpart_mode, to);
849 emit_move_insn (lowpart, lowfrom);
851 /* Compute the value to put in each remaining word. */
853 fill_value = const0_rtx;
858 && insn_data[(int) CODE_FOR_slt].operand[0].mode == word_mode
859 && STORE_FLAG_VALUE == -1)
861 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
863 fill_value = gen_reg_rtx (word_mode);
864 emit_insn (gen_slt (fill_value));
870 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
871 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
873 fill_value = convert_to_mode (word_mode, fill_value, 1);
877 /* Fill the remaining words. */
878 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
880 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
881 rtx subword = operand_subword (to, index, 1, to_mode);
886 if (fill_value != subword)
887 emit_move_insn (subword, fill_value);
890 insns = get_insns ();
893 emit_no_conflict_block (insns, to, from, NULL_RTX,
894 gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
898 /* Truncating multi-word to a word or less. */
899 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
900 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
902 if (!((GET_CODE (from) == MEM
903 && ! MEM_VOLATILE_P (from)
904 && direct_load[(int) to_mode]
905 && ! mode_dependent_address_p (XEXP (from, 0)))
906 || GET_CODE (from) == REG
907 || GET_CODE (from) == SUBREG))
908 from = force_reg (from_mode, from);
909 convert_move (to, gen_lowpart (word_mode, from), 0);
913 /* Handle pointer conversion. */ /* SPEE 900220. */
914 if (to_mode == PQImode)
916 if (from_mode != QImode)
917 from = convert_to_mode (QImode, from, unsignedp);
919 #ifdef HAVE_truncqipqi2
920 if (HAVE_truncqipqi2)
922 emit_unop_insn (CODE_FOR_truncqipqi2, to, from, UNKNOWN);
925 #endif /* HAVE_truncqipqi2 */
929 if (from_mode == PQImode)
931 if (to_mode != QImode)
933 from = convert_to_mode (QImode, from, unsignedp);
938 #ifdef HAVE_extendpqiqi2
939 if (HAVE_extendpqiqi2)
941 emit_unop_insn (CODE_FOR_extendpqiqi2, to, from, UNKNOWN);
944 #endif /* HAVE_extendpqiqi2 */
949 if (to_mode == PSImode)
951 if (from_mode != SImode)
952 from = convert_to_mode (SImode, from, unsignedp);
954 #ifdef HAVE_truncsipsi2
955 if (HAVE_truncsipsi2)
957 emit_unop_insn (CODE_FOR_truncsipsi2, to, from, UNKNOWN);
960 #endif /* HAVE_truncsipsi2 */
964 if (from_mode == PSImode)
966 if (to_mode != SImode)
968 from = convert_to_mode (SImode, from, unsignedp);
973 #ifdef HAVE_extendpsisi2
974 if (! unsignedp && HAVE_extendpsisi2)
976 emit_unop_insn (CODE_FOR_extendpsisi2, to, from, UNKNOWN);
979 #endif /* HAVE_extendpsisi2 */
980 #ifdef HAVE_zero_extendpsisi2
981 if (unsignedp && HAVE_zero_extendpsisi2)
983 emit_unop_insn (CODE_FOR_zero_extendpsisi2, to, from, UNKNOWN);
986 #endif /* HAVE_zero_extendpsisi2 */
991 if (to_mode == PDImode)
993 if (from_mode != DImode)
994 from = convert_to_mode (DImode, from, unsignedp);
996 #ifdef HAVE_truncdipdi2
997 if (HAVE_truncdipdi2)
999 emit_unop_insn (CODE_FOR_truncdipdi2, to, from, UNKNOWN);
1002 #endif /* HAVE_truncdipdi2 */
1006 if (from_mode == PDImode)
1008 if (to_mode != DImode)
1010 from = convert_to_mode (DImode, from, unsignedp);
1015 #ifdef HAVE_extendpdidi2
1016 if (HAVE_extendpdidi2)
1018 emit_unop_insn (CODE_FOR_extendpdidi2, to, from, UNKNOWN);
1021 #endif /* HAVE_extendpdidi2 */
1026 /* Now follow all the conversions between integers
1027 no more than a word long. */
1029 /* For truncation, usually we can just refer to FROM in a narrower mode. */
1030 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
1031 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1032 GET_MODE_BITSIZE (from_mode)))
1034 if (!((GET_CODE (from) == MEM
1035 && ! MEM_VOLATILE_P (from)
1036 && direct_load[(int) to_mode]
1037 && ! mode_dependent_address_p (XEXP (from, 0)))
1038 || GET_CODE (from) == REG
1039 || GET_CODE (from) == SUBREG))
1040 from = force_reg (from_mode, from);
1041 if (GET_CODE (from) == REG && REGNO (from) < FIRST_PSEUDO_REGISTER
1042 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
1043 from = copy_to_reg (from);
1044 emit_move_insn (to, gen_lowpart (to_mode, from));
1048 /* Handle extension. */
1049 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
1051 /* Convert directly if that works. */
1052 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
1053 != CODE_FOR_nothing)
1056 from = force_not_mem (from);
1058 emit_unop_insn (code, to, from, equiv_code);
1063 enum machine_mode intermediate;
1067 /* Search for a mode to convert via. */
1068 for (intermediate = from_mode; intermediate != VOIDmode;
1069 intermediate = GET_MODE_WIDER_MODE (intermediate))
1070 if (((can_extend_p (to_mode, intermediate, unsignedp)
1071 != CODE_FOR_nothing)
1072 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
1073 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1074 GET_MODE_BITSIZE (intermediate))))
1075 && (can_extend_p (intermediate, from_mode, unsignedp)
1076 != CODE_FOR_nothing))
1078 convert_move (to, convert_to_mode (intermediate, from,
1079 unsignedp), unsignedp);
1083 /* No suitable intermediate mode.
1084 Generate what we need with shifts. */
1085 shift_amount = build_int_2 (GET_MODE_BITSIZE (to_mode)
1086 - GET_MODE_BITSIZE (from_mode), 0);
1087 from = gen_lowpart (to_mode, force_reg (from_mode, from));
1088 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
1090 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
1093 emit_move_insn (to, tmp);
1098 /* Support special truncate insns for certain modes. */
1100 if (from_mode == DImode && to_mode == SImode)
1102 #ifdef HAVE_truncdisi2
1103 if (HAVE_truncdisi2)
1105 emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
1109 convert_move (to, force_reg (from_mode, from), unsignedp);
1113 if (from_mode == DImode && to_mode == HImode)
1115 #ifdef HAVE_truncdihi2
1116 if (HAVE_truncdihi2)
1118 emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
1122 convert_move (to, force_reg (from_mode, from), unsignedp);
1126 if (from_mode == DImode && to_mode == QImode)
1128 #ifdef HAVE_truncdiqi2
1129 if (HAVE_truncdiqi2)
1131 emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
1135 convert_move (to, force_reg (from_mode, from), unsignedp);
1139 if (from_mode == SImode && to_mode == HImode)
1141 #ifdef HAVE_truncsihi2
1142 if (HAVE_truncsihi2)
1144 emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
1148 convert_move (to, force_reg (from_mode, from), unsignedp);
1152 if (from_mode == SImode && to_mode == QImode)
1154 #ifdef HAVE_truncsiqi2
1155 if (HAVE_truncsiqi2)
1157 emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
1161 convert_move (to, force_reg (from_mode, from), unsignedp);
1165 if (from_mode == HImode && to_mode == QImode)
1167 #ifdef HAVE_trunchiqi2
1168 if (HAVE_trunchiqi2)
1170 emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
1174 convert_move (to, force_reg (from_mode, from), unsignedp);
1178 if (from_mode == TImode && to_mode == DImode)
1180 #ifdef HAVE_trunctidi2
1181 if (HAVE_trunctidi2)
1183 emit_unop_insn (CODE_FOR_trunctidi2, to, from, UNKNOWN);
1187 convert_move (to, force_reg (from_mode, from), unsignedp);
1191 if (from_mode == TImode && to_mode == SImode)
1193 #ifdef HAVE_trunctisi2
1194 if (HAVE_trunctisi2)
1196 emit_unop_insn (CODE_FOR_trunctisi2, to, from, UNKNOWN);
1200 convert_move (to, force_reg (from_mode, from), unsignedp);
1204 if (from_mode == TImode && to_mode == HImode)
1206 #ifdef HAVE_trunctihi2
1207 if (HAVE_trunctihi2)
1209 emit_unop_insn (CODE_FOR_trunctihi2, to, from, UNKNOWN);
1213 convert_move (to, force_reg (from_mode, from), unsignedp);
1217 if (from_mode == TImode && to_mode == QImode)
1219 #ifdef HAVE_trunctiqi2
1220 if (HAVE_trunctiqi2)
1222 emit_unop_insn (CODE_FOR_trunctiqi2, to, from, UNKNOWN);
1226 convert_move (to, force_reg (from_mode, from), unsignedp);
1230 /* Handle truncation of volatile memrefs, and so on;
1231 the things that couldn't be truncated directly,
1232 and for which there was no special instruction. */
1233 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
1235 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
1236 emit_move_insn (to, temp);
1240 /* Mode combination is not recognized. */
1244 /* Return an rtx for a value that would result
1245 from converting X to mode MODE.
1246 Both X and MODE may be floating, or both integer.
1247 UNSIGNEDP is nonzero if X is an unsigned value.
1248 This can be done by referring to a part of X in place
1249 or by copying to a new temporary with conversion.
1251 This function *must not* call protect_from_queue
1252 except when putting X into an insn (in which case convert_move does it). */
1255 convert_to_mode (mode, x, unsignedp)
1256 enum machine_mode mode;
1260 return convert_modes (mode, VOIDmode, x, unsignedp);
1263 /* Return an rtx for a value that would result
1264 from converting X from mode OLDMODE to mode MODE.
1265 Both modes may be floating, or both integer.
1266 UNSIGNEDP is nonzero if X is an unsigned value.
1268 This can be done by referring to a part of X in place
1269 or by copying to a new temporary with conversion.
1271 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
1273 This function *must not* call protect_from_queue
1274 except when putting X into an insn (in which case convert_move does it). */
1277 convert_modes (mode, oldmode, x, unsignedp)
1278 enum machine_mode mode, oldmode;
1284 /* If FROM is a SUBREG that indicates that we have already done at least
1285 the required extension, strip it. */
1287 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
1288 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
1289 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
1290 x = gen_lowpart (mode, x);
1292 if (GET_MODE (x) != VOIDmode)
1293 oldmode = GET_MODE (x);
1295 if (mode == oldmode)
1298 /* There is one case that we must handle specially: If we are converting
1299 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
1300 we are to interpret the constant as unsigned, gen_lowpart will do
1301 the wrong if the constant appears negative. What we want to do is
1302 make the high-order word of the constant zero, not all ones. */
1304 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
1305 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
1306 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
1308 HOST_WIDE_INT val = INTVAL (x);
1310 if (oldmode != VOIDmode
1311 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
1313 int width = GET_MODE_BITSIZE (oldmode);
1315 /* We need to zero extend VAL. */
1316 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1319 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
1322 /* We can do this with a gen_lowpart if both desired and current modes
1323 are integer, and this is either a constant integer, a register, or a
1324 non-volatile MEM. Except for the constant case where MODE is no
1325 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
1327 if ((GET_CODE (x) == CONST_INT
1328 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
1329 || (GET_MODE_CLASS (mode) == MODE_INT
1330 && GET_MODE_CLASS (oldmode) == MODE_INT
1331 && (GET_CODE (x) == CONST_DOUBLE
1332 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
1333 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x)
1334 && direct_load[(int) mode])
1335 || (GET_CODE (x) == REG
1336 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
1337 GET_MODE_BITSIZE (GET_MODE (x)))))))))
1339 /* ?? If we don't know OLDMODE, we have to assume here that
1340 X does not need sign- or zero-extension. This may not be
1341 the case, but it's the best we can do. */
1342 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
1343 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
1345 HOST_WIDE_INT val = INTVAL (x);
1346 int width = GET_MODE_BITSIZE (oldmode);
1348 /* We must sign or zero-extend in this case. Start by
1349 zero-extending, then sign extend if we need to. */
1350 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1352 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
1353 val |= (HOST_WIDE_INT) (-1) << width;
1355 return gen_int_mode (val, mode);
1358 return gen_lowpart (mode, x);
1361 temp = gen_reg_rtx (mode);
1362 convert_move (temp, x, unsignedp);
1366 /* This macro is used to determine what the largest unit size that
1367 move_by_pieces can use is. */
1369 /* MOVE_MAX_PIECES is the number of bytes at a time which we can
1370 move efficiently, as opposed to MOVE_MAX which is the maximum
1371 number of bytes we can move with a single instruction. */
1373 #ifndef MOVE_MAX_PIECES
1374 #define MOVE_MAX_PIECES MOVE_MAX
1377 /* Generate several move instructions to copy LEN bytes from block FROM to
1378 block TO. (These are MEM rtx's with BLKmode). The caller must pass FROM
1379 and TO through protect_from_queue before calling.
1381 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
1382 used to push FROM to the stack.
1384 ALIGN is maximum alignment we can assume. */
1387 move_by_pieces (to, from, len, align)
1389 unsigned HOST_WIDE_INT len;
1392 struct move_by_pieces data;
1393 rtx to_addr, from_addr = XEXP (from, 0);
1394 unsigned int max_size = MOVE_MAX_PIECES + 1;
1395 enum machine_mode mode = VOIDmode, tmode;
1396 enum insn_code icode;
1399 data.from_addr = from_addr;
1402 to_addr = XEXP (to, 0);
1405 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1406 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1408 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1415 #ifdef STACK_GROWS_DOWNWARD
1421 data.to_addr = to_addr;
1424 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1425 || GET_CODE (from_addr) == POST_INC
1426 || GET_CODE (from_addr) == POST_DEC);
1428 data.explicit_inc_from = 0;
1429 data.explicit_inc_to = 0;
1430 if (data.reverse) data.offset = len;
1433 /* If copying requires more than two move insns,
1434 copy addresses to registers (to make displacements shorter)
1435 and use post-increment if available. */
1436 if (!(data.autinc_from && data.autinc_to)
1437 && move_by_pieces_ninsns (len, align) > 2)
1439 /* Find the mode of the largest move... */
1440 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1441 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1442 if (GET_MODE_SIZE (tmode) < max_size)
1445 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
1447 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1448 data.autinc_from = 1;
1449 data.explicit_inc_from = -1;
1451 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
1453 data.from_addr = copy_addr_to_reg (from_addr);
1454 data.autinc_from = 1;
1455 data.explicit_inc_from = 1;
1457 if (!data.autinc_from && CONSTANT_P (from_addr))
1458 data.from_addr = copy_addr_to_reg (from_addr);
1459 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
1461 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1463 data.explicit_inc_to = -1;
1465 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
1467 data.to_addr = copy_addr_to_reg (to_addr);
1469 data.explicit_inc_to = 1;
1471 if (!data.autinc_to && CONSTANT_P (to_addr))
1472 data.to_addr = copy_addr_to_reg (to_addr);
1475 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1476 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1477 align = MOVE_MAX * BITS_PER_UNIT;
1479 /* First move what we can in the largest integer mode, then go to
1480 successively smaller modes. */
1482 while (max_size > 1)
1484 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1485 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1486 if (GET_MODE_SIZE (tmode) < max_size)
1489 if (mode == VOIDmode)
1492 icode = mov_optab->handlers[(int) mode].insn_code;
1493 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1494 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1496 max_size = GET_MODE_SIZE (mode);
1499 /* The code above should have handled everything. */
1504 /* Return number of insns required to move L bytes by pieces.
1505 ALIGN (in bits) is maximum alignment we can assume. */
1507 static unsigned HOST_WIDE_INT
1508 move_by_pieces_ninsns (l, align)
1509 unsigned HOST_WIDE_INT l;
1512 unsigned HOST_WIDE_INT n_insns = 0;
1513 unsigned HOST_WIDE_INT max_size = MOVE_MAX + 1;
1515 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1516 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1517 align = MOVE_MAX * BITS_PER_UNIT;
1519 while (max_size > 1)
1521 enum machine_mode mode = VOIDmode, tmode;
1522 enum insn_code icode;
1524 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1525 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1526 if (GET_MODE_SIZE (tmode) < max_size)
1529 if (mode == VOIDmode)
1532 icode = mov_optab->handlers[(int) mode].insn_code;
1533 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1534 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1536 max_size = GET_MODE_SIZE (mode);
1544 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1545 with move instructions for mode MODE. GENFUN is the gen_... function
1546 to make a move insn for that mode. DATA has all the other info. */
1549 move_by_pieces_1 (genfun, mode, data)
1550 rtx (*genfun) PARAMS ((rtx, ...));
1551 enum machine_mode mode;
1552 struct move_by_pieces *data;
1554 unsigned int size = GET_MODE_SIZE (mode);
1555 rtx to1 = NULL_RTX, from1;
1557 while (data->len >= size)
1560 data->offset -= size;
1564 if (data->autinc_to)
1565 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
1568 to1 = adjust_address (data->to, mode, data->offset);
1571 if (data->autinc_from)
1572 from1 = adjust_automodify_address (data->from, mode, data->from_addr,
1575 from1 = adjust_address (data->from, mode, data->offset);
1577 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1578 emit_insn (gen_add2_insn (data->to_addr,
1579 GEN_INT (-(HOST_WIDE_INT)size)));
1580 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1581 emit_insn (gen_add2_insn (data->from_addr,
1582 GEN_INT (-(HOST_WIDE_INT)size)));
1585 emit_insn ((*genfun) (to1, from1));
1588 #ifdef PUSH_ROUNDING
1589 emit_single_push_insn (mode, from1, NULL);
1595 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1596 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1597 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1598 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1600 if (! data->reverse)
1601 data->offset += size;
1607 /* Emit code to move a block Y to a block X.
1608 This may be done with string-move instructions,
1609 with multiple scalar move instructions, or with a library call.
1611 Both X and Y must be MEM rtx's (perhaps inside VOLATILE)
1613 SIZE is an rtx that says how long they are.
1614 ALIGN is the maximum alignment we can assume they have.
1616 Return the address of the new block, if memcpy is called and returns it,
1620 emit_block_move (x, y, size)
1625 #ifdef TARGET_MEM_FUNCTIONS
1627 tree call_expr, arg_list;
1629 unsigned int align = MIN (MEM_ALIGN (x), MEM_ALIGN (y));
1631 if (GET_MODE (x) != BLKmode)
1634 if (GET_MODE (y) != BLKmode)
1637 x = protect_from_queue (x, 1);
1638 y = protect_from_queue (y, 0);
1639 size = protect_from_queue (size, 0);
1641 if (GET_CODE (x) != MEM)
1643 if (GET_CODE (y) != MEM)
1648 if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
1649 move_by_pieces (x, y, INTVAL (size), align);
1652 /* Try the most limited insn first, because there's no point
1653 including more than one in the machine description unless
1654 the more limited one has some advantage. */
1656 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
1657 enum machine_mode mode;
1659 /* Since this is a move insn, we don't care about volatility. */
1662 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1663 mode = GET_MODE_WIDER_MODE (mode))
1665 enum insn_code code = movstr_optab[(int) mode];
1666 insn_operand_predicate_fn pred;
1668 if (code != CODE_FOR_nothing
1669 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1670 here because if SIZE is less than the mode mask, as it is
1671 returned by the macro, it will definitely be less than the
1672 actual mode mask. */
1673 && ((GET_CODE (size) == CONST_INT
1674 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1675 <= (GET_MODE_MASK (mode) >> 1)))
1676 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1677 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
1678 || (*pred) (x, BLKmode))
1679 && ((pred = insn_data[(int) code].operand[1].predicate) == 0
1680 || (*pred) (y, BLKmode))
1681 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
1682 || (*pred) (opalign, VOIDmode)))
1685 rtx last = get_last_insn ();
1688 op2 = convert_to_mode (mode, size, 1);
1689 pred = insn_data[(int) code].operand[2].predicate;
1690 if (pred != 0 && ! (*pred) (op2, mode))
1691 op2 = copy_to_mode_reg (mode, op2);
1693 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1701 delete_insns_since (last);
1707 /* X, Y, or SIZE may have been passed through protect_from_queue.
1709 It is unsafe to save the value generated by protect_from_queue
1710 and reuse it later. Consider what happens if emit_queue is
1711 called before the return value from protect_from_queue is used.
1713 Expansion of the CALL_EXPR below will call emit_queue before
1714 we are finished emitting RTL for argument setup. So if we are
1715 not careful we could get the wrong value for an argument.
1717 To avoid this problem we go ahead and emit code to copy X, Y &
1718 SIZE into new pseudos. We can then place those new pseudos
1719 into an RTL_EXPR and use them later, even after a call to
1722 Note this is not strictly needed for library calls since they
1723 do not call emit_queue before loading their arguments. However,
1724 we may need to have library calls call emit_queue in the future
1725 since failing to do so could cause problems for targets which
1726 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
1727 x = copy_to_mode_reg (Pmode, XEXP (x, 0));
1728 y = copy_to_mode_reg (Pmode, XEXP (y, 0));
1730 #ifdef TARGET_MEM_FUNCTIONS
1731 size = copy_to_mode_reg (TYPE_MODE (sizetype), size);
1733 size = convert_to_mode (TYPE_MODE (integer_type_node), size,
1734 TREE_UNSIGNED (integer_type_node));
1735 size = copy_to_mode_reg (TYPE_MODE (integer_type_node), size);
1738 #ifdef TARGET_MEM_FUNCTIONS
1739 /* It is incorrect to use the libcall calling conventions to call
1740 memcpy in this context.
1742 This could be a user call to memcpy and the user may wish to
1743 examine the return value from memcpy.
1745 For targets where libcalls and normal calls have different conventions
1746 for returning pointers, we could end up generating incorrect code.
1748 So instead of using a libcall sequence we build up a suitable
1749 CALL_EXPR and expand the call in the normal fashion. */
1750 if (fn == NULL_TREE)
1754 /* This was copied from except.c, I don't know if all this is
1755 necessary in this context or not. */
1756 fn = get_identifier ("memcpy");
1757 fntype = build_pointer_type (void_type_node);
1758 fntype = build_function_type (fntype, NULL_TREE);
1759 fn = build_decl (FUNCTION_DECL, fn, fntype);
1760 ggc_add_tree_root (&fn, 1);
1761 DECL_EXTERNAL (fn) = 1;
1762 TREE_PUBLIC (fn) = 1;
1763 DECL_ARTIFICIAL (fn) = 1;
1764 TREE_NOTHROW (fn) = 1;
1765 make_decl_rtl (fn, NULL);
1766 assemble_external (fn);
1769 /* We need to make an argument list for the function call.
1771 memcpy has three arguments, the first two are void * addresses and
1772 the last is a size_t byte count for the copy. */
1774 = build_tree_list (NULL_TREE,
1775 make_tree (build_pointer_type (void_type_node), x));
1776 TREE_CHAIN (arg_list)
1777 = build_tree_list (NULL_TREE,
1778 make_tree (build_pointer_type (void_type_node), y));
1779 TREE_CHAIN (TREE_CHAIN (arg_list))
1780 = build_tree_list (NULL_TREE, make_tree (sizetype, size));
1781 TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list))) = NULL_TREE;
1783 /* Now we have to build up the CALL_EXPR itself. */
1784 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
1785 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
1786 call_expr, arg_list, NULL_TREE);
1787 TREE_SIDE_EFFECTS (call_expr) = 1;
1789 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
1791 emit_library_call (bcopy_libfunc, LCT_NORMAL,
1792 VOIDmode, 3, y, Pmode, x, Pmode,
1793 convert_to_mode (TYPE_MODE (integer_type_node), size,
1794 TREE_UNSIGNED (integer_type_node)),
1795 TYPE_MODE (integer_type_node));
1798 /* If we are initializing a readonly value, show the above call
1799 clobbered it. Otherwise, a load from it may erroneously be hoisted
1801 if (RTX_UNCHANGING_P (x))
1802 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
1808 /* Copy all or part of a value X into registers starting at REGNO.
1809 The number of registers to be filled is NREGS. */
1812 move_block_to_reg (regno, x, nregs, mode)
1816 enum machine_mode mode;
1819 #ifdef HAVE_load_multiple
1827 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1828 x = validize_mem (force_const_mem (mode, x));
1830 /* See if the machine can do this with a load multiple insn. */
1831 #ifdef HAVE_load_multiple
1832 if (HAVE_load_multiple)
1834 last = get_last_insn ();
1835 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
1843 delete_insns_since (last);
1847 for (i = 0; i < nregs; i++)
1848 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1849 operand_subword_force (x, i, mode));
1852 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1853 The number of registers to be filled is NREGS. SIZE indicates the number
1854 of bytes in the object X. */
1857 move_block_from_reg (regno, x, nregs, size)
1864 #ifdef HAVE_store_multiple
1868 enum machine_mode mode;
1873 /* If SIZE is that of a mode no bigger than a word, just use that
1874 mode's store operation. */
1875 if (size <= UNITS_PER_WORD
1876 && (mode = mode_for_size (size * BITS_PER_UNIT, MODE_INT, 0)) != BLKmode
1877 && !FUNCTION_ARG_REG_LITTLE_ENDIAN)
1879 emit_move_insn (adjust_address (x, mode, 0), gen_rtx_REG (mode, regno));
1883 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN machine must be aligned
1884 to the left before storing to memory. Note that the previous test
1885 doesn't handle all cases (e.g. SIZE == 3). */
1886 if (size < UNITS_PER_WORD
1888 && !FUNCTION_ARG_REG_LITTLE_ENDIAN)
1890 rtx tem = operand_subword (x, 0, 1, BLKmode);
1896 shift = expand_shift (LSHIFT_EXPR, word_mode,
1897 gen_rtx_REG (word_mode, regno),
1898 build_int_2 ((UNITS_PER_WORD - size)
1899 * BITS_PER_UNIT, 0), NULL_RTX, 0);
1900 emit_move_insn (tem, shift);
1904 /* See if the machine can do this with a store multiple insn. */
1905 #ifdef HAVE_store_multiple
1906 if (HAVE_store_multiple)
1908 last = get_last_insn ();
1909 pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1917 delete_insns_since (last);
1921 for (i = 0; i < nregs; i++)
1923 rtx tem = operand_subword (x, i, 1, BLKmode);
1928 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1932 /* Emit code to move a block SRC to a block DST, where DST is non-consecutive
1933 registers represented by a PARALLEL. SSIZE represents the total size of
1934 block SRC in bytes, or -1 if not known. */
1935 /* ??? If SSIZE % UNITS_PER_WORD != 0, we make the blatant assumption that
1936 the balance will be in what would be the low-order memory addresses, i.e.
1937 left justified for big endian, right justified for little endian. This
1938 happens to be true for the targets currently using this support. If this
1939 ever changes, a new target macro along the lines of FUNCTION_ARG_PADDING
1943 emit_group_load (dst, orig_src, ssize)
1950 if (GET_CODE (dst) != PARALLEL)
1953 /* Check for a NULL entry, used to indicate that the parameter goes
1954 both on the stack and in registers. */
1955 if (XEXP (XVECEXP (dst, 0, 0), 0))
1960 tmps = (rtx *) alloca (sizeof (rtx) * XVECLEN (dst, 0));
1962 /* Process the pieces. */
1963 for (i = start; i < XVECLEN (dst, 0); i++)
1965 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1966 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1967 unsigned int bytelen = GET_MODE_SIZE (mode);
1970 /* Handle trailing fragments that run over the size of the struct. */
1971 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
1973 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1974 bytelen = ssize - bytepos;
1979 /* If we won't be loading directly from memory, protect the real source
1980 from strange tricks we might play; but make sure that the source can
1981 be loaded directly into the destination. */
1983 if (GET_CODE (orig_src) != MEM
1984 && (!CONSTANT_P (orig_src)
1985 || (GET_MODE (orig_src) != mode
1986 && GET_MODE (orig_src) != VOIDmode)))
1988 if (GET_MODE (orig_src) == VOIDmode)
1989 src = gen_reg_rtx (mode);
1991 src = gen_reg_rtx (GET_MODE (orig_src));
1993 emit_move_insn (src, orig_src);
1996 /* Optimize the access just a bit. */
1997 if (GET_CODE (src) == MEM
1998 && MEM_ALIGN (src) >= GET_MODE_ALIGNMENT (mode)
1999 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2000 && bytelen == GET_MODE_SIZE (mode))
2002 tmps[i] = gen_reg_rtx (mode);
2003 emit_move_insn (tmps[i], adjust_address (src, mode, bytepos));
2005 else if (GET_CODE (src) == CONCAT)
2008 && bytelen == GET_MODE_SIZE (GET_MODE (XEXP (src, 0))))
2009 || (bytepos == (HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (XEXP (src, 0)))
2010 && bytelen == GET_MODE_SIZE (GET_MODE (XEXP (src, 1)))))
2012 tmps[i] = XEXP (src, bytepos != 0);
2013 if (! CONSTANT_P (tmps[i])
2014 && (GET_CODE (tmps[i]) != REG || GET_MODE (tmps[i]) != mode))
2015 tmps[i] = extract_bit_field (tmps[i], bytelen * BITS_PER_UNIT,
2016 0, 1, NULL_RTX, mode, mode, ssize);
2018 else if (bytepos == 0)
2020 rtx mem = assign_stack_temp (GET_MODE (src),
2021 GET_MODE_SIZE (GET_MODE (src)), 0);
2022 emit_move_insn (mem, src);
2023 tmps[i] = adjust_address (mem, mode, 0);
2028 else if (CONSTANT_P (src)
2029 || (GET_CODE (src) == REG && GET_MODE (src) == mode))
2032 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
2033 bytepos * BITS_PER_UNIT, 1, NULL_RTX,
2036 if (BYTES_BIG_ENDIAN && shift)
2037 expand_binop (mode, ashl_optab, tmps[i], GEN_INT (shift),
2038 tmps[i], 0, OPTAB_WIDEN);
2043 /* Copy the extracted pieces into the proper (probable) hard regs. */
2044 for (i = start; i < XVECLEN (dst, 0); i++)
2045 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0), tmps[i]);
2048 /* Emit code to move a block SRC to a block DST, where SRC is non-consecutive
2049 registers represented by a PARALLEL. SSIZE represents the total size of
2050 block DST, or -1 if not known. */
2053 emit_group_store (orig_dst, src, ssize)
2060 if (GET_CODE (src) != PARALLEL)
2063 /* Check for a NULL entry, used to indicate that the parameter goes
2064 both on the stack and in registers. */
2065 if (XEXP (XVECEXP (src, 0, 0), 0))
2070 tmps = (rtx *) alloca (sizeof (rtx) * XVECLEN (src, 0));
2072 /* Copy the (probable) hard regs into pseudos. */
2073 for (i = start; i < XVECLEN (src, 0); i++)
2075 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
2076 tmps[i] = gen_reg_rtx (GET_MODE (reg));
2077 emit_move_insn (tmps[i], reg);
2081 /* If we won't be storing directly into memory, protect the real destination
2082 from strange tricks we might play. */
2084 if (GET_CODE (dst) == PARALLEL)
2088 /* We can get a PARALLEL dst if there is a conditional expression in
2089 a return statement. In that case, the dst and src are the same,
2090 so no action is necessary. */
2091 if (rtx_equal_p (dst, src))
2094 /* It is unclear if we can ever reach here, but we may as well handle
2095 it. Allocate a temporary, and split this into a store/load to/from
2098 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
2099 emit_group_store (temp, src, ssize);
2100 emit_group_load (dst, temp, ssize);
2103 else if (GET_CODE (dst) != MEM)
2105 dst = gen_reg_rtx (GET_MODE (orig_dst));
2106 /* Make life a bit easier for combine. */
2107 emit_move_insn (dst, const0_rtx);
2110 /* Process the pieces. */
2111 for (i = start; i < XVECLEN (src, 0); i++)
2113 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
2114 enum machine_mode mode = GET_MODE (tmps[i]);
2115 unsigned int bytelen = GET_MODE_SIZE (mode);
2117 /* Handle trailing fragments that run over the size of the struct. */
2118 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2120 if (BYTES_BIG_ENDIAN)
2122 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2123 expand_binop (mode, ashr_optab, tmps[i], GEN_INT (shift),
2124 tmps[i], 0, OPTAB_WIDEN);
2126 bytelen = ssize - bytepos;
2129 /* Optimize the access just a bit. */
2130 if (GET_CODE (dst) == MEM
2131 && MEM_ALIGN (dst) >= GET_MODE_ALIGNMENT (mode)
2132 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2133 && bytelen == GET_MODE_SIZE (mode))
2134 emit_move_insn (adjust_address (dst, mode, bytepos), tmps[i]);
2136 store_bit_field (dst, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2137 mode, tmps[i], ssize);
2142 /* Copy from the pseudo into the (probable) hard reg. */
2143 if (GET_CODE (dst) == REG)
2144 emit_move_insn (orig_dst, dst);
2147 /* Generate code to copy a BLKmode object of TYPE out of a
2148 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2149 is null, a stack temporary is created. TGTBLK is returned.
2151 The primary purpose of this routine is to handle functions
2152 that return BLKmode structures in registers. Some machines
2153 (the PA for example) want to return all small structures
2154 in registers regardless of the structure's alignment. */
2157 copy_blkmode_from_reg (tgtblk, srcreg, type)
2162 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2163 rtx src = NULL, dst = NULL;
2164 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2165 unsigned HOST_WIDE_INT bitpos, xbitpos, big_endian_correction = 0;
2169 tgtblk = assign_temp (build_qualified_type (type,
2171 | TYPE_QUAL_CONST)),
2173 preserve_temp_slots (tgtblk);
2176 /* This code assumes srcreg is at least a full word. If it isn't, copy it
2177 into a new pseudo which is a full word.
2179 If FUNCTION_ARG_REG_LITTLE_ENDIAN is set and convert_to_mode does a copy,
2180 the wrong part of the register gets copied so we fake a type conversion
2182 if (GET_MODE (srcreg) != BLKmode
2183 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
2185 if (FUNCTION_ARG_REG_LITTLE_ENDIAN)
2186 srcreg = simplify_gen_subreg (word_mode, srcreg, GET_MODE (srcreg), 0);
2188 srcreg = convert_to_mode (word_mode, srcreg, TREE_UNSIGNED (type));
2191 /* Structures whose size is not a multiple of a word are aligned
2192 to the least significant byte (to the right). On a BYTES_BIG_ENDIAN
2193 machine, this means we must skip the empty high order bytes when
2194 calculating the bit offset. */
2195 if (BYTES_BIG_ENDIAN
2196 && !FUNCTION_ARG_REG_LITTLE_ENDIAN
2197 && bytes % UNITS_PER_WORD)
2198 big_endian_correction
2199 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2201 /* Copy the structure BITSIZE bites at a time.
2203 We could probably emit more efficient code for machines which do not use
2204 strict alignment, but it doesn't seem worth the effort at the current
2206 for (bitpos = 0, xbitpos = big_endian_correction;
2207 bitpos < bytes * BITS_PER_UNIT;
2208 bitpos += bitsize, xbitpos += bitsize)
2210 /* We need a new source operand each time xbitpos is on a
2211 word boundary and when xbitpos == big_endian_correction
2212 (the first time through). */
2213 if (xbitpos % BITS_PER_WORD == 0
2214 || xbitpos == big_endian_correction)
2215 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD,
2218 /* We need a new destination operand each time bitpos is on
2220 if (bitpos % BITS_PER_WORD == 0)
2221 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
2223 /* Use xbitpos for the source extraction (right justified) and
2224 xbitpos for the destination store (left justified). */
2225 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode,
2226 extract_bit_field (src, bitsize,
2227 xbitpos % BITS_PER_WORD, 1,
2228 NULL_RTX, word_mode, word_mode,
2236 /* Add a USE expression for REG to the (possibly empty) list pointed
2237 to by CALL_FUSAGE. REG must denote a hard register. */
2240 use_reg (call_fusage, reg)
2241 rtx *call_fusage, reg;
2243 if (GET_CODE (reg) != REG
2244 || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
2248 = gen_rtx_EXPR_LIST (VOIDmode,
2249 gen_rtx_USE (VOIDmode, reg), *call_fusage);
2252 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2253 starting at REGNO. All of these registers must be hard registers. */
2256 use_regs (call_fusage, regno, nregs)
2263 if (regno + nregs > FIRST_PSEUDO_REGISTER)
2266 for (i = 0; i < nregs; i++)
2267 use_reg (call_fusage, gen_rtx_REG (reg_raw_mode[regno + i], regno + i));
2270 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2271 PARALLEL REGS. This is for calls that pass values in multiple
2272 non-contiguous locations. The Irix 6 ABI has examples of this. */
2275 use_group_regs (call_fusage, regs)
2281 for (i = 0; i < XVECLEN (regs, 0); i++)
2283 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2285 /* A NULL entry means the parameter goes both on the stack and in
2286 registers. This can also be a MEM for targets that pass values
2287 partially on the stack and partially in registers. */
2288 if (reg != 0 && GET_CODE (reg) == REG)
2289 use_reg (call_fusage, reg);
2295 can_store_by_pieces (len, constfun, constfundata, align)
2296 unsigned HOST_WIDE_INT len;
2297 rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
2301 unsigned HOST_WIDE_INT max_size, l;
2302 HOST_WIDE_INT offset = 0;
2303 enum machine_mode mode, tmode;
2304 enum insn_code icode;
2308 if (! MOVE_BY_PIECES_P (len, align))
2311 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2312 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2313 align = MOVE_MAX * BITS_PER_UNIT;
2315 /* We would first store what we can in the largest integer mode, then go to
2316 successively smaller modes. */
2319 reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2324 max_size = MOVE_MAX_PIECES + 1;
2325 while (max_size > 1)
2327 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2328 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2329 if (GET_MODE_SIZE (tmode) < max_size)
2332 if (mode == VOIDmode)
2335 icode = mov_optab->handlers[(int) mode].insn_code;
2336 if (icode != CODE_FOR_nothing
2337 && align >= GET_MODE_ALIGNMENT (mode))
2339 unsigned int size = GET_MODE_SIZE (mode);
2346 cst = (*constfun) (constfundata, offset, mode);
2347 if (!LEGITIMATE_CONSTANT_P (cst))
2357 max_size = GET_MODE_SIZE (mode);
2360 /* The code above should have handled everything. */
2368 /* Generate several move instructions to store LEN bytes generated by
2369 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2370 pointer which will be passed as argument in every CONSTFUN call.
2371 ALIGN is maximum alignment we can assume. */
2374 store_by_pieces (to, len, constfun, constfundata, align)
2376 unsigned HOST_WIDE_INT len;
2377 rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
2381 struct store_by_pieces data;
2383 if (! MOVE_BY_PIECES_P (len, align))
2385 to = protect_from_queue (to, 1);
2386 data.constfun = constfun;
2387 data.constfundata = constfundata;
2390 store_by_pieces_1 (&data, align);
2393 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2394 rtx with BLKmode). The caller must pass TO through protect_from_queue
2395 before calling. ALIGN is maximum alignment we can assume. */
2398 clear_by_pieces (to, len, align)
2400 unsigned HOST_WIDE_INT len;
2403 struct store_by_pieces data;
2405 data.constfun = clear_by_pieces_1;
2406 data.constfundata = NULL;
2409 store_by_pieces_1 (&data, align);
2412 /* Callback routine for clear_by_pieces.
2413 Return const0_rtx unconditionally. */
2416 clear_by_pieces_1 (data, offset, mode)
2417 PTR data ATTRIBUTE_UNUSED;
2418 HOST_WIDE_INT offset ATTRIBUTE_UNUSED;
2419 enum machine_mode mode ATTRIBUTE_UNUSED;
2424 /* Subroutine of clear_by_pieces and store_by_pieces.
2425 Generate several move instructions to store LEN bytes of block TO. (A MEM
2426 rtx with BLKmode). The caller must pass TO through protect_from_queue
2427 before calling. ALIGN is maximum alignment we can assume. */
2430 store_by_pieces_1 (data, align)
2431 struct store_by_pieces *data;
2434 rtx to_addr = XEXP (data->to, 0);
2435 unsigned HOST_WIDE_INT max_size = MOVE_MAX_PIECES + 1;
2436 enum machine_mode mode = VOIDmode, tmode;
2437 enum insn_code icode;
2440 data->to_addr = to_addr;
2442 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2443 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2445 data->explicit_inc_to = 0;
2447 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2449 data->offset = data->len;
2451 /* If storing requires more than two move insns,
2452 copy addresses to registers (to make displacements shorter)
2453 and use post-increment if available. */
2454 if (!data->autinc_to
2455 && move_by_pieces_ninsns (data->len, align) > 2)
2457 /* Determine the main mode we'll be using. */
2458 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2459 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2460 if (GET_MODE_SIZE (tmode) < max_size)
2463 if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
2465 data->to_addr = copy_addr_to_reg (plus_constant (to_addr, data->len));
2466 data->autinc_to = 1;
2467 data->explicit_inc_to = -1;
2470 if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2471 && ! data->autinc_to)
2473 data->to_addr = copy_addr_to_reg (to_addr);
2474 data->autinc_to = 1;
2475 data->explicit_inc_to = 1;
2478 if ( !data->autinc_to && CONSTANT_P (to_addr))
2479 data->to_addr = copy_addr_to_reg (to_addr);
2482 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2483 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2484 align = MOVE_MAX * BITS_PER_UNIT;
2486 /* First store what we can in the largest integer mode, then go to
2487 successively smaller modes. */
2489 while (max_size > 1)
2491 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2492 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2493 if (GET_MODE_SIZE (tmode) < max_size)
2496 if (mode == VOIDmode)
2499 icode = mov_optab->handlers[(int) mode].insn_code;
2500 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
2501 store_by_pieces_2 (GEN_FCN (icode), mode, data);
2503 max_size = GET_MODE_SIZE (mode);
2506 /* The code above should have handled everything. */
2511 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2512 with move instructions for mode MODE. GENFUN is the gen_... function
2513 to make a move insn for that mode. DATA has all the other info. */
2516 store_by_pieces_2 (genfun, mode, data)
2517 rtx (*genfun) PARAMS ((rtx, ...));
2518 enum machine_mode mode;
2519 struct store_by_pieces *data;
2521 unsigned int size = GET_MODE_SIZE (mode);
2524 while (data->len >= size)
2527 data->offset -= size;
2529 if (data->autinc_to)
2530 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
2533 to1 = adjust_address (data->to, mode, data->offset);
2535 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2536 emit_insn (gen_add2_insn (data->to_addr,
2537 GEN_INT (-(HOST_WIDE_INT) size)));
2539 cst = (*data->constfun) (data->constfundata, data->offset, mode);
2540 emit_insn ((*genfun) (to1, cst));
2542 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2543 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2545 if (! data->reverse)
2546 data->offset += size;
2552 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2553 its length in bytes. */
2556 clear_storage (object, size)
2560 #ifdef TARGET_MEM_FUNCTIONS
2562 tree call_expr, arg_list;
2565 unsigned int align = (GET_CODE (object) == MEM ? MEM_ALIGN (object)
2566 : GET_MODE_ALIGNMENT (GET_MODE (object)));
2568 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2569 just move a zero. Otherwise, do this a piece at a time. */
2570 if (GET_MODE (object) != BLKmode
2571 && GET_CODE (size) == CONST_INT
2572 && GET_MODE_SIZE (GET_MODE (object)) == (unsigned int) INTVAL (size))
2573 emit_move_insn (object, CONST0_RTX (GET_MODE (object)));
2576 object = protect_from_queue (object, 1);
2577 size = protect_from_queue (size, 0);
2579 if (GET_CODE (size) == CONST_INT
2580 && MOVE_BY_PIECES_P (INTVAL (size), align))
2581 clear_by_pieces (object, INTVAL (size), align);
2584 /* Try the most limited insn first, because there's no point
2585 including more than one in the machine description unless
2586 the more limited one has some advantage. */
2588 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
2589 enum machine_mode mode;
2591 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2592 mode = GET_MODE_WIDER_MODE (mode))
2594 enum insn_code code = clrstr_optab[(int) mode];
2595 insn_operand_predicate_fn pred;
2597 if (code != CODE_FOR_nothing
2598 /* We don't need MODE to be narrower than
2599 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2600 the mode mask, as it is returned by the macro, it will
2601 definitely be less than the actual mode mask. */
2602 && ((GET_CODE (size) == CONST_INT
2603 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2604 <= (GET_MODE_MASK (mode) >> 1)))
2605 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2606 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
2607 || (*pred) (object, BLKmode))
2608 && ((pred = insn_data[(int) code].operand[2].predicate) == 0
2609 || (*pred) (opalign, VOIDmode)))
2612 rtx last = get_last_insn ();
2615 op1 = convert_to_mode (mode, size, 1);
2616 pred = insn_data[(int) code].operand[1].predicate;
2617 if (pred != 0 && ! (*pred) (op1, mode))
2618 op1 = copy_to_mode_reg (mode, op1);
2620 pat = GEN_FCN ((int) code) (object, op1, opalign);
2627 delete_insns_since (last);
2631 /* OBJECT or SIZE may have been passed through protect_from_queue.
2633 It is unsafe to save the value generated by protect_from_queue
2634 and reuse it later. Consider what happens if emit_queue is
2635 called before the return value from protect_from_queue is used.
2637 Expansion of the CALL_EXPR below will call emit_queue before
2638 we are finished emitting RTL for argument setup. So if we are
2639 not careful we could get the wrong value for an argument.
2641 To avoid this problem we go ahead and emit code to copy OBJECT
2642 and SIZE into new pseudos. We can then place those new pseudos
2643 into an RTL_EXPR and use them later, even after a call to
2646 Note this is not strictly needed for library calls since they
2647 do not call emit_queue before loading their arguments. However,
2648 we may need to have library calls call emit_queue in the future
2649 since failing to do so could cause problems for targets which
2650 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
2651 object = copy_to_mode_reg (Pmode, XEXP (object, 0));
2653 #ifdef TARGET_MEM_FUNCTIONS
2654 size = copy_to_mode_reg (TYPE_MODE (sizetype), size);
2656 size = convert_to_mode (TYPE_MODE (integer_type_node), size,
2657 TREE_UNSIGNED (integer_type_node));
2658 size = copy_to_mode_reg (TYPE_MODE (integer_type_node), size);
2661 #ifdef TARGET_MEM_FUNCTIONS
2662 /* It is incorrect to use the libcall calling conventions to call
2663 memset in this context.
2665 This could be a user call to memset and the user may wish to
2666 examine the return value from memset.
2668 For targets where libcalls and normal calls have different
2669 conventions for returning pointers, we could end up generating
2672 So instead of using a libcall sequence we build up a suitable
2673 CALL_EXPR and expand the call in the normal fashion. */
2674 if (fn == NULL_TREE)
2678 /* This was copied from except.c, I don't know if all this is
2679 necessary in this context or not. */
2680 fn = get_identifier ("memset");
2681 fntype = build_pointer_type (void_type_node);
2682 fntype = build_function_type (fntype, NULL_TREE);
2683 fn = build_decl (FUNCTION_DECL, fn, fntype);
2684 ggc_add_tree_root (&fn, 1);
2685 DECL_EXTERNAL (fn) = 1;
2686 TREE_PUBLIC (fn) = 1;
2687 DECL_ARTIFICIAL (fn) = 1;
2688 TREE_NOTHROW (fn) = 1;
2689 make_decl_rtl (fn, NULL);
2690 assemble_external (fn);
2693 /* We need to make an argument list for the function call.
2695 memset has three arguments, the first is a void * addresses, the
2696 second an integer with the initialization value, the last is a
2697 size_t byte count for the copy. */
2699 = build_tree_list (NULL_TREE,
2700 make_tree (build_pointer_type (void_type_node),
2702 TREE_CHAIN (arg_list)
2703 = build_tree_list (NULL_TREE,
2704 make_tree (integer_type_node, const0_rtx));
2705 TREE_CHAIN (TREE_CHAIN (arg_list))
2706 = build_tree_list (NULL_TREE, make_tree (sizetype, size));
2707 TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list))) = NULL_TREE;
2709 /* Now we have to build up the CALL_EXPR itself. */
2710 call_expr = build1 (ADDR_EXPR,
2711 build_pointer_type (TREE_TYPE (fn)), fn);
2712 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
2713 call_expr, arg_list, NULL_TREE);
2714 TREE_SIDE_EFFECTS (call_expr) = 1;
2716 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
2718 emit_library_call (bzero_libfunc, LCT_NORMAL,
2719 VOIDmode, 2, object, Pmode, size,
2720 TYPE_MODE (integer_type_node));
2723 /* If we are initializing a readonly value, show the above call
2724 clobbered it. Otherwise, a load from it may erroneously be
2725 hoisted from a loop. */
2726 if (RTX_UNCHANGING_P (object))
2727 emit_insn (gen_rtx_CLOBBER (VOIDmode, object));
2734 /* Generate code to copy Y into X.
2735 Both Y and X must have the same mode, except that
2736 Y can be a constant with VOIDmode.
2737 This mode cannot be BLKmode; use emit_block_move for that.
2739 Return the last instruction emitted. */
2742 emit_move_insn (x, y)
2745 enum machine_mode mode = GET_MODE (x);
2746 rtx y_cst = NULL_RTX;
2749 x = protect_from_queue (x, 1);
2750 y = protect_from_queue (y, 0);
2752 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
2755 /* Never force constant_p_rtx to memory. */
2756 if (GET_CODE (y) == CONSTANT_P_RTX)
2758 else if (CONSTANT_P (y) && ! LEGITIMATE_CONSTANT_P (y))
2761 y = force_const_mem (mode, y);
2764 /* If X or Y are memory references, verify that their addresses are valid
2766 if (GET_CODE (x) == MEM
2767 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
2768 && ! push_operand (x, GET_MODE (x)))
2770 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
2771 x = validize_mem (x);
2773 if (GET_CODE (y) == MEM
2774 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
2776 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
2777 y = validize_mem (y);
2779 if (mode == BLKmode)
2782 last_insn = emit_move_insn_1 (x, y);
2784 if (y_cst && GET_CODE (x) == REG)
2785 set_unique_reg_note (last_insn, REG_EQUAL, y_cst);
2790 /* Low level part of emit_move_insn.
2791 Called just like emit_move_insn, but assumes X and Y
2792 are basically valid. */
2795 emit_move_insn_1 (x, y)
2798 enum machine_mode mode = GET_MODE (x);
2799 enum machine_mode submode;
2800 enum mode_class class = GET_MODE_CLASS (mode);
2802 if ((unsigned int) mode >= (unsigned int) MAX_MACHINE_MODE)
2805 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
2807 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
2809 /* Expand complex moves by moving real part and imag part, if possible. */
2810 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
2811 && BLKmode != (submode = mode_for_size ((GET_MODE_UNIT_SIZE (mode)
2813 (class == MODE_COMPLEX_INT
2814 ? MODE_INT : MODE_FLOAT),
2816 && (mov_optab->handlers[(int) submode].insn_code
2817 != CODE_FOR_nothing))
2819 /* Don't split destination if it is a stack push. */
2820 int stack = push_operand (x, GET_MODE (x));
2822 #ifdef PUSH_ROUNDING
2823 /* In case we output to the stack, but the size is smaller machine can
2824 push exactly, we need to use move instructions. */
2826 && (PUSH_ROUNDING (GET_MODE_SIZE (submode))
2827 != GET_MODE_SIZE (submode)))
2830 HOST_WIDE_INT offset1, offset2;
2832 /* Do not use anti_adjust_stack, since we don't want to update
2833 stack_pointer_delta. */
2834 temp = expand_binop (Pmode,
2835 #ifdef STACK_GROWS_DOWNWARD
2843 (GET_MODE_SIZE (GET_MODE (x)))),
2844 stack_pointer_rtx, 0, OPTAB_LIB_WIDEN);
2846 if (temp != stack_pointer_rtx)
2847 emit_move_insn (stack_pointer_rtx, temp);
2849 #ifdef STACK_GROWS_DOWNWARD
2851 offset2 = GET_MODE_SIZE (submode);
2853 offset1 = -PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)));
2854 offset2 = (-PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)))
2855 + GET_MODE_SIZE (submode));
2858 emit_move_insn (change_address (x, submode,
2859 gen_rtx_PLUS (Pmode,
2861 GEN_INT (offset1))),
2862 gen_realpart (submode, y));
2863 emit_move_insn (change_address (x, submode,
2864 gen_rtx_PLUS (Pmode,
2866 GEN_INT (offset2))),
2867 gen_imagpart (submode, y));
2871 /* If this is a stack, push the highpart first, so it
2872 will be in the argument order.
2874 In that case, change_address is used only to convert
2875 the mode, not to change the address. */
2878 /* Note that the real part always precedes the imag part in memory
2879 regardless of machine's endianness. */
2880 #ifdef STACK_GROWS_DOWNWARD
2881 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2882 (gen_rtx_MEM (submode, XEXP (x, 0)),
2883 gen_imagpart (submode, y)));
2884 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2885 (gen_rtx_MEM (submode, XEXP (x, 0)),
2886 gen_realpart (submode, y)));
2888 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2889 (gen_rtx_MEM (submode, XEXP (x, 0)),
2890 gen_realpart (submode, y)));
2891 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2892 (gen_rtx_MEM (submode, XEXP (x, 0)),
2893 gen_imagpart (submode, y)));
2898 rtx realpart_x, realpart_y;
2899 rtx imagpart_x, imagpart_y;
2901 /* If this is a complex value with each part being smaller than a
2902 word, the usual calling sequence will likely pack the pieces into
2903 a single register. Unfortunately, SUBREG of hard registers only
2904 deals in terms of words, so we have a problem converting input
2905 arguments to the CONCAT of two registers that is used elsewhere
2906 for complex values. If this is before reload, we can copy it into
2907 memory and reload. FIXME, we should see about using extract and
2908 insert on integer registers, but complex short and complex char
2909 variables should be rarely used. */
2910 if (GET_MODE_BITSIZE (mode) < 2 * BITS_PER_WORD
2911 && (reload_in_progress | reload_completed) == 0)
2914 = (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER);
2916 = (REG_P (y) && REGNO (y) < FIRST_PSEUDO_REGISTER);
2918 if (packed_dest_p || packed_src_p)
2920 enum mode_class reg_class = ((class == MODE_COMPLEX_FLOAT)
2921 ? MODE_FLOAT : MODE_INT);
2923 enum machine_mode reg_mode
2924 = mode_for_size (GET_MODE_BITSIZE (mode), reg_class, 1);
2926 if (reg_mode != BLKmode)
2928 rtx mem = assign_stack_temp (reg_mode,
2929 GET_MODE_SIZE (mode), 0);
2930 rtx cmem = adjust_address (mem, mode, 0);
2933 = N_("function using short complex types cannot be inline");
2937 rtx sreg = gen_rtx_SUBREG (reg_mode, x, 0);
2939 emit_move_insn_1 (cmem, y);
2940 return emit_move_insn_1 (sreg, mem);
2944 rtx sreg = gen_rtx_SUBREG (reg_mode, y, 0);
2946 emit_move_insn_1 (mem, sreg);
2947 return emit_move_insn_1 (x, cmem);
2953 realpart_x = gen_realpart (submode, x);
2954 realpart_y = gen_realpart (submode, y);
2955 imagpart_x = gen_imagpart (submode, x);
2956 imagpart_y = gen_imagpart (submode, y);
2958 /* Show the output dies here. This is necessary for SUBREGs
2959 of pseudos since we cannot track their lifetimes correctly;
2960 hard regs shouldn't appear here except as return values.
2961 We never want to emit such a clobber after reload. */
2963 && ! (reload_in_progress || reload_completed)
2964 && (GET_CODE (realpart_x) == SUBREG
2965 || GET_CODE (imagpart_x) == SUBREG))
2966 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
2968 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2969 (realpart_x, realpart_y));
2970 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2971 (imagpart_x, imagpart_y));
2974 return get_last_insn ();
2977 /* This will handle any multi-word mode that lacks a move_insn pattern.
2978 However, you will get better code if you define such patterns,
2979 even if they must turn into multiple assembler instructions. */
2980 else if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
2987 #ifdef PUSH_ROUNDING
2989 /* If X is a push on the stack, do the push now and replace
2990 X with a reference to the stack pointer. */
2991 if (push_operand (x, GET_MODE (x)))
2996 /* Do not use anti_adjust_stack, since we don't want to update
2997 stack_pointer_delta. */
2998 temp = expand_binop (Pmode,
2999 #ifdef STACK_GROWS_DOWNWARD
3007 (GET_MODE_SIZE (GET_MODE (x)))),
3008 stack_pointer_rtx, 0, OPTAB_LIB_WIDEN);
3010 if (temp != stack_pointer_rtx)
3011 emit_move_insn (stack_pointer_rtx, temp);
3013 code = GET_CODE (XEXP (x, 0));
3015 /* Just hope that small offsets off SP are OK. */
3016 if (code == POST_INC)
3017 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3018 GEN_INT (-((HOST_WIDE_INT)
3019 GET_MODE_SIZE (GET_MODE (x)))));
3020 else if (code == POST_DEC)
3021 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3022 GEN_INT (GET_MODE_SIZE (GET_MODE (x))));
3024 temp = stack_pointer_rtx;
3026 x = change_address (x, VOIDmode, temp);
3030 /* If we are in reload, see if either operand is a MEM whose address
3031 is scheduled for replacement. */
3032 if (reload_in_progress && GET_CODE (x) == MEM
3033 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
3034 x = replace_equiv_address_nv (x, inner);
3035 if (reload_in_progress && GET_CODE (y) == MEM
3036 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
3037 y = replace_equiv_address_nv (y, inner);
3043 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
3046 rtx xpart = operand_subword (x, i, 1, mode);
3047 rtx ypart = operand_subword (y, i, 1, mode);
3049 /* If we can't get a part of Y, put Y into memory if it is a
3050 constant. Otherwise, force it into a register. If we still
3051 can't get a part of Y, abort. */
3052 if (ypart == 0 && CONSTANT_P (y))
3054 y = force_const_mem (mode, y);
3055 ypart = operand_subword (y, i, 1, mode);
3057 else if (ypart == 0)
3058 ypart = operand_subword_force (y, i, mode);
3060 if (xpart == 0 || ypart == 0)
3063 need_clobber |= (GET_CODE (xpart) == SUBREG);
3065 last_insn = emit_move_insn (xpart, ypart);
3068 seq = gen_sequence ();
3071 /* Show the output dies here. This is necessary for SUBREGs
3072 of pseudos since we cannot track their lifetimes correctly;
3073 hard regs shouldn't appear here except as return values.
3074 We never want to emit such a clobber after reload. */
3076 && ! (reload_in_progress || reload_completed)
3077 && need_clobber != 0)
3078 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
3088 /* Pushing data onto the stack. */
3090 /* Push a block of length SIZE (perhaps variable)
3091 and return an rtx to address the beginning of the block.
3092 Note that it is not possible for the value returned to be a QUEUED.
3093 The value may be virtual_outgoing_args_rtx.
3095 EXTRA is the number of bytes of padding to push in addition to SIZE.
3096 BELOW nonzero means this padding comes at low addresses;
3097 otherwise, the padding comes at high addresses. */
3100 push_block (size, extra, below)
3106 size = convert_modes (Pmode, ptr_mode, size, 1);
3107 if (CONSTANT_P (size))
3108 anti_adjust_stack (plus_constant (size, extra));
3109 else if (GET_CODE (size) == REG && extra == 0)
3110 anti_adjust_stack (size);
3113 temp = copy_to_mode_reg (Pmode, size);
3115 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
3116 temp, 0, OPTAB_LIB_WIDEN);
3117 anti_adjust_stack (temp);
3120 #ifndef STACK_GROWS_DOWNWARD
3126 temp = virtual_outgoing_args_rtx;
3127 if (extra != 0 && below)
3128 temp = plus_constant (temp, extra);
3132 if (GET_CODE (size) == CONST_INT)
3133 temp = plus_constant (virtual_outgoing_args_rtx,
3134 -INTVAL (size) - (below ? 0 : extra));
3135 else if (extra != 0 && !below)
3136 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3137 negate_rtx (Pmode, plus_constant (size, extra)));
3139 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3140 negate_rtx (Pmode, size));
3143 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
3146 #ifdef PUSH_ROUNDING
3148 /* Emit single push insn. */
3151 emit_single_push_insn (mode, x, type)
3153 enum machine_mode mode;
3157 unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
3159 enum insn_code icode;
3160 insn_operand_predicate_fn pred;
3162 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3163 /* If there is push pattern, use it. Otherwise try old way of throwing
3164 MEM representing push operation to move expander. */
3165 icode = push_optab->handlers[(int) mode].insn_code;
3166 if (icode != CODE_FOR_nothing)
3168 if (((pred = insn_data[(int) icode].operand[0].predicate)
3169 && !((*pred) (x, mode))))
3170 x = force_reg (mode, x);
3171 emit_insn (GEN_FCN (icode) (x));
3174 if (GET_MODE_SIZE (mode) == rounded_size)
3175 dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
3178 #ifdef STACK_GROWS_DOWNWARD
3179 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3180 GEN_INT (-(HOST_WIDE_INT) rounded_size));
3182 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3183 GEN_INT (rounded_size));
3185 dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
3188 dest = gen_rtx_MEM (mode, dest_addr);
3192 set_mem_attributes (dest, type, 1);
3194 if (flag_optimize_sibling_calls)
3195 /* Function incoming arguments may overlap with sibling call
3196 outgoing arguments and we cannot allow reordering of reads
3197 from function arguments with stores to outgoing arguments
3198 of sibling calls. */
3199 set_mem_alias_set (dest, 0);
3201 emit_move_insn (dest, x);
3205 /* Generate code to push X onto the stack, assuming it has mode MODE and
3207 MODE is redundant except when X is a CONST_INT (since they don't
3209 SIZE is an rtx for the size of data to be copied (in bytes),
3210 needed only if X is BLKmode.
3212 ALIGN (in bits) is maximum alignment we can assume.
3214 If PARTIAL and REG are both nonzero, then copy that many of the first
3215 words of X into registers starting with REG, and push the rest of X.
3216 The amount of space pushed is decreased by PARTIAL words,
3217 rounded *down* to a multiple of PARM_BOUNDARY.
3218 REG must be a hard register in this case.
3219 If REG is zero but PARTIAL is not, take any all others actions for an
3220 argument partially in registers, but do not actually load any
3223 EXTRA is the amount in bytes of extra space to leave next to this arg.
3224 This is ignored if an argument block has already been allocated.
3226 On a machine that lacks real push insns, ARGS_ADDR is the address of
3227 the bottom of the argument block for this call. We use indexing off there
3228 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
3229 argument block has not been preallocated.
3231 ARGS_SO_FAR is the size of args previously pushed for this call.
3233 REG_PARM_STACK_SPACE is nonzero if functions require stack space
3234 for arguments passed in registers. If nonzero, it will be the number
3235 of bytes required. */
3238 emit_push_insn (x, mode, type, size, align, partial, reg, extra,
3239 args_addr, args_so_far, reg_parm_stack_space,
3242 enum machine_mode mode;
3251 int reg_parm_stack_space;
3255 enum direction stack_direction
3256 #ifdef STACK_GROWS_DOWNWARD
3262 /* Decide where to pad the argument: `downward' for below,
3263 `upward' for above, or `none' for don't pad it.
3264 Default is below for small data on big-endian machines; else above. */
3265 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
3267 /* Invert direction if stack is post-decrement.
3269 if (STACK_PUSH_CODE == POST_DEC)
3270 if (where_pad != none)
3271 where_pad = (where_pad == downward ? upward : downward);
3273 xinner = x = protect_from_queue (x, 0);
3275 if (mode == BLKmode)
3277 /* Copy a block into the stack, entirely or partially. */
3280 int used = partial * UNITS_PER_WORD;
3281 int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
3289 /* USED is now the # of bytes we need not copy to the stack
3290 because registers will take care of them. */
3293 xinner = adjust_address (xinner, BLKmode, used);
3295 /* If the partial register-part of the arg counts in its stack size,
3296 skip the part of stack space corresponding to the registers.
3297 Otherwise, start copying to the beginning of the stack space,
3298 by setting SKIP to 0. */
3299 skip = (reg_parm_stack_space == 0) ? 0 : used;
3301 #ifdef PUSH_ROUNDING
3302 /* Do it with several push insns if that doesn't take lots of insns
3303 and if there is no difficulty with push insns that skip bytes
3304 on the stack for alignment purposes. */
3307 && GET_CODE (size) == CONST_INT
3309 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
3310 /* Here we avoid the case of a structure whose weak alignment
3311 forces many pushes of a small amount of data,
3312 and such small pushes do rounding that causes trouble. */
3313 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
3314 || align >= BIGGEST_ALIGNMENT
3315 || (PUSH_ROUNDING (align / BITS_PER_UNIT)
3316 == (align / BITS_PER_UNIT)))
3317 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
3319 /* Push padding now if padding above and stack grows down,
3320 or if padding below and stack grows up.
3321 But if space already allocated, this has already been done. */
3322 if (extra && args_addr == 0
3323 && where_pad != none && where_pad != stack_direction)
3324 anti_adjust_stack (GEN_INT (extra));
3326 move_by_pieces (NULL, xinner, INTVAL (size) - used, align);
3329 #endif /* PUSH_ROUNDING */
3333 /* Otherwise make space on the stack and copy the data
3334 to the address of that space. */
3336 /* Deduct words put into registers from the size we must copy. */
3339 if (GET_CODE (size) == CONST_INT)
3340 size = GEN_INT (INTVAL (size) - used);
3342 size = expand_binop (GET_MODE (size), sub_optab, size,
3343 GEN_INT (used), NULL_RTX, 0,
3347 /* Get the address of the stack space.
3348 In this case, we do not deal with EXTRA separately.
3349 A single stack adjust will do. */
3352 temp = push_block (size, extra, where_pad == downward);
3355 else if (GET_CODE (args_so_far) == CONST_INT)
3356 temp = memory_address (BLKmode,
3357 plus_constant (args_addr,
3358 skip + INTVAL (args_so_far)));
3360 temp = memory_address (BLKmode,
3361 plus_constant (gen_rtx_PLUS (Pmode,
3365 target = gen_rtx_MEM (BLKmode, temp);
3369 set_mem_attributes (target, type, 1);
3370 /* Function incoming arguments may overlap with sibling call
3371 outgoing arguments and we cannot allow reordering of reads
3372 from function arguments with stores to outgoing arguments
3373 of sibling calls. */
3374 set_mem_alias_set (target, 0);
3377 set_mem_align (target, align);
3379 /* TEMP is the address of the block. Copy the data there. */
3380 if (GET_CODE (size) == CONST_INT
3381 && MOVE_BY_PIECES_P ((unsigned) INTVAL (size), align))
3383 move_by_pieces (target, xinner, INTVAL (size), align);
3388 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
3389 enum machine_mode mode;
3391 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
3393 mode = GET_MODE_WIDER_MODE (mode))
3395 enum insn_code code = movstr_optab[(int) mode];
3396 insn_operand_predicate_fn pred;
3398 if (code != CODE_FOR_nothing
3399 && ((GET_CODE (size) == CONST_INT
3400 && ((unsigned HOST_WIDE_INT) INTVAL (size)
3401 <= (GET_MODE_MASK (mode) >> 1)))
3402 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
3403 && (!(pred = insn_data[(int) code].operand[0].predicate)
3404 || ((*pred) (target, BLKmode)))
3405 && (!(pred = insn_data[(int) code].operand[1].predicate)
3406 || ((*pred) (xinner, BLKmode)))
3407 && (!(pred = insn_data[(int) code].operand[3].predicate)
3408 || ((*pred) (opalign, VOIDmode))))
3410 rtx op2 = convert_to_mode (mode, size, 1);
3411 rtx last = get_last_insn ();
3414 pred = insn_data[(int) code].operand[2].predicate;
3415 if (pred != 0 && ! (*pred) (op2, mode))
3416 op2 = copy_to_mode_reg (mode, op2);
3418 pat = GEN_FCN ((int) code) (target, xinner,
3426 delete_insns_since (last);
3431 if (!ACCUMULATE_OUTGOING_ARGS)
3433 /* If the source is referenced relative to the stack pointer,
3434 copy it to another register to stabilize it. We do not need
3435 to do this if we know that we won't be changing sp. */
3437 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3438 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3439 temp = copy_to_reg (temp);
3442 /* Make inhibit_defer_pop nonzero around the library call
3443 to force it to pop the bcopy-arguments right away. */
3445 #ifdef TARGET_MEM_FUNCTIONS
3446 emit_library_call (memcpy_libfunc, LCT_NORMAL,
3447 VOIDmode, 3, temp, Pmode, XEXP (xinner, 0), Pmode,
3448 convert_to_mode (TYPE_MODE (sizetype),
3449 size, TREE_UNSIGNED (sizetype)),
3450 TYPE_MODE (sizetype));
3452 emit_library_call (bcopy_libfunc, LCT_NORMAL,
3453 VOIDmode, 3, XEXP (xinner, 0), Pmode, temp, Pmode,
3454 convert_to_mode (TYPE_MODE (integer_type_node),
3456 TREE_UNSIGNED (integer_type_node)),
3457 TYPE_MODE (integer_type_node));
3462 else if (partial > 0)
3464 /* Scalar partly in registers. */
3466 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3469 /* # words of start of argument
3470 that we must make space for but need not store. */
3471 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
3472 int args_offset = INTVAL (args_so_far);
3475 /* Push padding now if padding above and stack grows down,
3476 or if padding below and stack grows up.
3477 But if space already allocated, this has already been done. */
3478 if (extra && args_addr == 0
3479 && where_pad != none && where_pad != stack_direction)
3480 anti_adjust_stack (GEN_INT (extra));
3482 /* If we make space by pushing it, we might as well push
3483 the real data. Otherwise, we can leave OFFSET nonzero
3484 and leave the space uninitialized. */
3488 /* Now NOT_STACK gets the number of words that we don't need to
3489 allocate on the stack. */
3490 not_stack = partial - offset;
3492 /* If the partial register-part of the arg counts in its stack size,
3493 skip the part of stack space corresponding to the registers.
3494 Otherwise, start copying to the beginning of the stack space,
3495 by setting SKIP to 0. */
3496 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
3498 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
3499 x = validize_mem (force_const_mem (mode, x));
3501 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3502 SUBREGs of such registers are not allowed. */
3503 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
3504 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
3505 x = copy_to_reg (x);
3507 /* Loop over all the words allocated on the stack for this arg. */
3508 /* We can do it by words, because any scalar bigger than a word
3509 has a size a multiple of a word. */
3510 #ifndef PUSH_ARGS_REVERSED
3511 for (i = not_stack; i < size; i++)
3513 for (i = size - 1; i >= not_stack; i--)
3515 if (i >= not_stack + offset)
3516 emit_push_insn (operand_subword_force (x, i, mode),
3517 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
3519 GEN_INT (args_offset + ((i - not_stack + skip)
3521 reg_parm_stack_space, alignment_pad);
3526 rtx target = NULL_RTX;
3529 /* Push padding now if padding above and stack grows down,
3530 or if padding below and stack grows up.
3531 But if space already allocated, this has already been done. */
3532 if (extra && args_addr == 0
3533 && where_pad != none && where_pad != stack_direction)
3534 anti_adjust_stack (GEN_INT (extra));
3536 #ifdef PUSH_ROUNDING
3537 if (args_addr == 0 && PUSH_ARGS)
3538 emit_single_push_insn (mode, x, type);
3542 if (GET_CODE (args_so_far) == CONST_INT)
3544 = memory_address (mode,
3545 plus_constant (args_addr,
3546 INTVAL (args_so_far)));
3548 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
3551 dest = gen_rtx_MEM (mode, addr);
3554 set_mem_attributes (dest, type, 1);
3555 /* Function incoming arguments may overlap with sibling call
3556 outgoing arguments and we cannot allow reordering of reads
3557 from function arguments with stores to outgoing arguments
3558 of sibling calls. */
3559 set_mem_alias_set (dest, 0);
3562 emit_move_insn (dest, x);
3568 /* If part should go in registers, copy that part
3569 into the appropriate registers. Do this now, at the end,
3570 since mem-to-mem copies above may do function calls. */
3571 if (partial > 0 && reg != 0)
3573 /* Handle calls that pass values in multiple non-contiguous locations.
3574 The Irix 6 ABI has examples of this. */
3575 if (GET_CODE (reg) == PARALLEL)
3576 emit_group_load (reg, x, -1); /* ??? size? */
3578 move_block_to_reg (REGNO (reg), x, partial, mode);
3581 if (extra && args_addr == 0 && where_pad == stack_direction)
3582 anti_adjust_stack (GEN_INT (extra));
3584 if (alignment_pad && args_addr == 0)
3585 anti_adjust_stack (alignment_pad);
3588 /* Return X if X can be used as a subtarget in a sequence of arithmetic
3596 /* Only registers can be subtargets. */
3597 || GET_CODE (x) != REG
3598 /* If the register is readonly, it can't be set more than once. */
3599 || RTX_UNCHANGING_P (x)
3600 /* Don't use hard regs to avoid extending their life. */
3601 || REGNO (x) < FIRST_PSEUDO_REGISTER
3602 /* Avoid subtargets inside loops,
3603 since they hide some invariant expressions. */
3604 || preserve_subexpressions_p ())
3608 /* Expand an assignment that stores the value of FROM into TO.
3609 If WANT_VALUE is nonzero, return an rtx for the value of TO.
3610 (This may contain a QUEUED rtx;
3611 if the value is constant, this rtx is a constant.)
3612 Otherwise, the returned value is NULL_RTX.
3614 SUGGEST_REG is no longer actually used.
3615 It used to mean, copy the value through a register
3616 and return that register, if that is possible.
3617 We now use WANT_VALUE to decide whether to do this. */
3620 expand_assignment (to, from, want_value, suggest_reg)
3623 int suggest_reg ATTRIBUTE_UNUSED;
3628 /* Don't crash if the lhs of the assignment was erroneous. */
3630 if (TREE_CODE (to) == ERROR_MARK)
3632 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
3633 return want_value ? result : NULL_RTX;
3636 /* Assignment of a structure component needs special treatment
3637 if the structure component's rtx is not simply a MEM.
3638 Assignment of an array element at a constant index, and assignment of
3639 an array element in an unaligned packed structure field, has the same
3642 if (TREE_CODE (to) == COMPONENT_REF || TREE_CODE (to) == BIT_FIELD_REF
3643 || TREE_CODE (to) == ARRAY_REF || TREE_CODE (to) == ARRAY_RANGE_REF)
3645 enum machine_mode mode1;
3646 HOST_WIDE_INT bitsize, bitpos;
3654 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
3655 &unsignedp, &volatilep);
3657 /* If we are going to use store_bit_field and extract_bit_field,
3658 make sure to_rtx will be safe for multiple use. */
3660 if (mode1 == VOIDmode && want_value)
3661 tem = stabilize_reference (tem);
3663 orig_to_rtx = to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, 0);
3667 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM);
3669 if (GET_CODE (to_rtx) != MEM)
3672 if (GET_MODE (offset_rtx) != ptr_mode)
3673 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
3675 #ifdef POINTERS_EXTEND_UNSIGNED
3676 if (GET_MODE (offset_rtx) != Pmode)
3677 offset_rtx = convert_memory_address (Pmode, offset_rtx);
3680 /* A constant address in TO_RTX can have VOIDmode, we must not try
3681 to call force_reg for that case. Avoid that case. */
3682 if (GET_CODE (to_rtx) == MEM
3683 && GET_MODE (to_rtx) == BLKmode
3684 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
3686 && (bitpos % bitsize) == 0
3687 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
3688 && MEM_ALIGN (to_rtx) == GET_MODE_ALIGNMENT (mode1))
3690 to_rtx = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT);
3694 to_rtx = offset_address (to_rtx, offset_rtx,
3695 highest_pow2_factor (offset));
3698 if (GET_CODE (to_rtx) == MEM)
3700 tree old_expr = MEM_EXPR (to_rtx);
3702 /* If the field is at offset zero, we could have been given the
3703 DECL_RTX of the parent struct. Don't munge it. */
3704 to_rtx = shallow_copy_rtx (to_rtx);
3706 set_mem_attributes (to_rtx, to, 0);
3708 /* If we changed MEM_EXPR, that means we're now referencing
3709 the COMPONENT_REF, which means that MEM_OFFSET must be
3710 relative to that field. But we've not yet reflected BITPOS
3711 in TO_RTX. This will be done in store_field. Adjust for
3712 that by biasing MEM_OFFSET by -bitpos. */
3713 if (MEM_EXPR (to_rtx) != old_expr && MEM_OFFSET (to_rtx)
3714 && (bitpos / BITS_PER_UNIT) != 0)
3715 set_mem_offset (to_rtx, GEN_INT (INTVAL (MEM_OFFSET (to_rtx))
3716 - (bitpos / BITS_PER_UNIT)));
3719 /* Deal with volatile and readonly fields. The former is only done
3720 for MEM. Also set MEM_KEEP_ALIAS_SET_P if needed. */
3721 if (volatilep && GET_CODE (to_rtx) == MEM)
3723 if (to_rtx == orig_to_rtx)
3724 to_rtx = copy_rtx (to_rtx);
3725 MEM_VOLATILE_P (to_rtx) = 1;
3728 if (TREE_CODE (to) == COMPONENT_REF
3729 && TREE_READONLY (TREE_OPERAND (to, 1)))
3731 if (to_rtx == orig_to_rtx)
3732 to_rtx = copy_rtx (to_rtx);
3733 RTX_UNCHANGING_P (to_rtx) = 1;
3736 if (GET_CODE (to_rtx) == MEM && ! can_address_p (to))
3738 if (to_rtx == orig_to_rtx)
3739 to_rtx = copy_rtx (to_rtx);
3740 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
3743 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
3745 /* Spurious cast for HPUX compiler. */
3746 ? ((enum machine_mode)
3747 TYPE_MODE (TREE_TYPE (to)))
3749 unsignedp, TREE_TYPE (tem), get_alias_set (to));
3751 preserve_temp_slots (result);
3755 /* If the value is meaningful, convert RESULT to the proper mode.
3756 Otherwise, return nothing. */
3757 return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
3758 TYPE_MODE (TREE_TYPE (from)),
3760 TREE_UNSIGNED (TREE_TYPE (to)))
3764 /* If the rhs is a function call and its value is not an aggregate,
3765 call the function before we start to compute the lhs.
3766 This is needed for correct code for cases such as
3767 val = setjmp (buf) on machines where reference to val
3768 requires loading up part of an address in a separate insn.
3770 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
3771 since it might be a promoted variable where the zero- or sign- extension
3772 needs to be done. Handling this in the normal way is safe because no
3773 computation is done before the call. */
3774 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from)
3775 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
3776 && ! ((TREE_CODE (to) == VAR_DECL || TREE_CODE (to) == PARM_DECL)
3777 && GET_CODE (DECL_RTL (to)) == REG))
3782 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
3784 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
3786 /* Handle calls that return values in multiple non-contiguous locations.
3787 The Irix 6 ABI has examples of this. */
3788 if (GET_CODE (to_rtx) == PARALLEL)
3789 emit_group_load (to_rtx, value, int_size_in_bytes (TREE_TYPE (from)));
3790 else if (GET_MODE (to_rtx) == BLKmode)
3791 emit_block_move (to_rtx, value, expr_size (from));
3794 #ifdef POINTERS_EXTEND_UNSIGNED
3795 if (POINTER_TYPE_P (TREE_TYPE (to))
3796 && GET_MODE (to_rtx) != GET_MODE (value))
3797 value = convert_memory_address (GET_MODE (to_rtx), value);
3799 emit_move_insn (to_rtx, value);
3801 preserve_temp_slots (to_rtx);
3804 return want_value ? to_rtx : NULL_RTX;
3807 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
3808 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
3811 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
3813 /* Don't move directly into a return register. */
3814 if (TREE_CODE (to) == RESULT_DECL
3815 && (GET_CODE (to_rtx) == REG || GET_CODE (to_rtx) == PARALLEL))
3820 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
3822 if (GET_CODE (to_rtx) == PARALLEL)
3823 emit_group_load (to_rtx, temp, int_size_in_bytes (TREE_TYPE (from)));
3825 emit_move_insn (to_rtx, temp);
3827 preserve_temp_slots (to_rtx);
3830 return want_value ? to_rtx : NULL_RTX;
3833 /* In case we are returning the contents of an object which overlaps
3834 the place the value is being stored, use a safe function when copying
3835 a value through a pointer into a structure value return block. */
3836 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
3837 && current_function_returns_struct
3838 && !current_function_returns_pcc_struct)
3843 size = expr_size (from);
3844 from_rtx = expand_expr (from, NULL_RTX, VOIDmode, 0);
3846 #ifdef TARGET_MEM_FUNCTIONS
3847 emit_library_call (memmove_libfunc, LCT_NORMAL,
3848 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
3849 XEXP (from_rtx, 0), Pmode,
3850 convert_to_mode (TYPE_MODE (sizetype),
3851 size, TREE_UNSIGNED (sizetype)),
3852 TYPE_MODE (sizetype));
3854 emit_library_call (bcopy_libfunc, LCT_NORMAL,
3855 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
3856 XEXP (to_rtx, 0), Pmode,
3857 convert_to_mode (TYPE_MODE (integer_type_node),
3858 size, TREE_UNSIGNED (integer_type_node)),
3859 TYPE_MODE (integer_type_node));
3862 preserve_temp_slots (to_rtx);
3865 return want_value ? to_rtx : NULL_RTX;
3868 /* Compute FROM and store the value in the rtx we got. */
3871 result = store_expr (from, to_rtx, want_value);
3872 preserve_temp_slots (result);
3875 return want_value ? result : NULL_RTX;
3878 /* Generate code for computing expression EXP,
3879 and storing the value into TARGET.
3880 TARGET may contain a QUEUED rtx.
3882 If WANT_VALUE is nonzero, return a copy of the value
3883 not in TARGET, so that we can be sure to use the proper
3884 value in a containing expression even if TARGET has something
3885 else stored in it. If possible, we copy the value through a pseudo
3886 and return that pseudo. Or, if the value is constant, we try to
3887 return the constant. In some cases, we return a pseudo
3888 copied *from* TARGET.
3890 If the mode is BLKmode then we may return TARGET itself.
3891 It turns out that in BLKmode it doesn't cause a problem.
3892 because C has no operators that could combine two different
3893 assignments into the same BLKmode object with different values
3894 with no sequence point. Will other languages need this to
3897 If WANT_VALUE is 0, we return NULL, to make sure
3898 to catch quickly any cases where the caller uses the value
3899 and fails to set WANT_VALUE. */
3902 store_expr (exp, target, want_value)
3908 int dont_return_target = 0;
3909 int dont_store_target = 0;
3911 if (TREE_CODE (exp) == COMPOUND_EXPR)
3913 /* Perform first part of compound expression, then assign from second
3915 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
3917 return store_expr (TREE_OPERAND (exp, 1), target, want_value);
3919 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
3921 /* For conditional expression, get safe form of the target. Then
3922 test the condition, doing the appropriate assignment on either
3923 side. This avoids the creation of unnecessary temporaries.
3924 For non-BLKmode, it is more efficient not to do this. */
3926 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
3929 target = protect_from_queue (target, 1);
3931 do_pending_stack_adjust ();
3933 jumpifnot (TREE_OPERAND (exp, 0), lab1);
3934 start_cleanup_deferral ();
3935 store_expr (TREE_OPERAND (exp, 1), target, 0);
3936 end_cleanup_deferral ();
3938 emit_jump_insn (gen_jump (lab2));
3941 start_cleanup_deferral ();
3942 store_expr (TREE_OPERAND (exp, 2), target, 0);
3943 end_cleanup_deferral ();
3948 return want_value ? target : NULL_RTX;
3950 else if (queued_subexp_p (target))
3951 /* If target contains a postincrement, let's not risk
3952 using it as the place to generate the rhs. */
3954 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
3956 /* Expand EXP into a new pseudo. */
3957 temp = gen_reg_rtx (GET_MODE (target));
3958 temp = expand_expr (exp, temp, GET_MODE (target), 0);
3961 temp = expand_expr (exp, NULL_RTX, GET_MODE (target), 0);
3963 /* If target is volatile, ANSI requires accessing the value
3964 *from* the target, if it is accessed. So make that happen.
3965 In no case return the target itself. */
3966 if (! MEM_VOLATILE_P (target) && want_value)
3967 dont_return_target = 1;
3969 else if (want_value && GET_CODE (target) == MEM && ! MEM_VOLATILE_P (target)
3970 && GET_MODE (target) != BLKmode)
3971 /* If target is in memory and caller wants value in a register instead,
3972 arrange that. Pass TARGET as target for expand_expr so that,
3973 if EXP is another assignment, WANT_VALUE will be nonzero for it.
3974 We know expand_expr will not use the target in that case.
3975 Don't do this if TARGET is volatile because we are supposed
3976 to write it and then read it. */
3978 temp = expand_expr (exp, target, GET_MODE (target), 0);
3979 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
3981 /* If TEMP is already in the desired TARGET, only copy it from
3982 memory and don't store it there again. */
3984 || (rtx_equal_p (temp, target)
3985 && ! side_effects_p (temp) && ! side_effects_p (target)))
3986 dont_store_target = 1;
3987 temp = copy_to_reg (temp);
3989 dont_return_target = 1;
3991 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
3992 /* If this is an scalar in a register that is stored in a wider mode
3993 than the declared mode, compute the result into its declared mode
3994 and then convert to the wider mode. Our value is the computed
3997 rtx inner_target = 0;
3999 /* If we don't want a value, we can do the conversion inside EXP,
4000 which will often result in some optimizations. Do the conversion
4001 in two steps: first change the signedness, if needed, then
4002 the extend. But don't do this if the type of EXP is a subtype
4003 of something else since then the conversion might involve
4004 more than just converting modes. */
4005 if (! want_value && INTEGRAL_TYPE_P (TREE_TYPE (exp))
4006 && TREE_TYPE (TREE_TYPE (exp)) == 0)
4008 if (TREE_UNSIGNED (TREE_TYPE (exp))
4009 != SUBREG_PROMOTED_UNSIGNED_P (target))
4012 (signed_or_unsigned_type (SUBREG_PROMOTED_UNSIGNED_P (target),
4016 exp = convert (type_for_mode (GET_MODE (SUBREG_REG (target)),
4017 SUBREG_PROMOTED_UNSIGNED_P (target)),
4020 inner_target = SUBREG_REG (target);
4023 temp = expand_expr (exp, inner_target, VOIDmode, 0);
4025 /* If TEMP is a volatile MEM and we want a result value, make
4026 the access now so it gets done only once. Likewise if
4027 it contains TARGET. */
4028 if (GET_CODE (temp) == MEM && want_value
4029 && (MEM_VOLATILE_P (temp)
4030 || reg_mentioned_p (SUBREG_REG (target), XEXP (temp, 0))))
4031 temp = copy_to_reg (temp);
4033 /* If TEMP is a VOIDmode constant, use convert_modes to make
4034 sure that we properly convert it. */
4035 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
4037 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4038 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4039 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
4040 GET_MODE (target), temp,
4041 SUBREG_PROMOTED_UNSIGNED_P (target));
4044 convert_move (SUBREG_REG (target), temp,
4045 SUBREG_PROMOTED_UNSIGNED_P (target));
4047 /* If we promoted a constant, change the mode back down to match
4048 target. Otherwise, the caller might get confused by a result whose
4049 mode is larger than expected. */
4051 if (want_value && GET_MODE (temp) != GET_MODE (target))
4053 if (GET_MODE (temp) != VOIDmode)
4055 temp = gen_lowpart_SUBREG (GET_MODE (target), temp);
4056 SUBREG_PROMOTED_VAR_P (temp) = 1;
4057 SUBREG_PROMOTED_UNSIGNED_SET (temp,
4058 SUBREG_PROMOTED_UNSIGNED_P (target));
4061 temp = convert_modes (GET_MODE (target),
4062 GET_MODE (SUBREG_REG (target)),
4063 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4066 return want_value ? temp : NULL_RTX;
4070 temp = expand_expr (exp, target, GET_MODE (target), 0);
4071 /* Return TARGET if it's a specified hardware register.
4072 If TARGET is a volatile mem ref, either return TARGET
4073 or return a reg copied *from* TARGET; ANSI requires this.
4075 Otherwise, if TEMP is not TARGET, return TEMP
4076 if it is constant (for efficiency),
4077 or if we really want the correct value. */
4078 if (!(target && GET_CODE (target) == REG
4079 && REGNO (target) < FIRST_PSEUDO_REGISTER)
4080 && !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
4081 && ! rtx_equal_p (temp, target)
4082 && (CONSTANT_P (temp) || want_value))
4083 dont_return_target = 1;
4086 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
4087 the same as that of TARGET, adjust the constant. This is needed, for
4088 example, in case it is a CONST_DOUBLE and we want only a word-sized
4090 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
4091 && TREE_CODE (exp) != ERROR_MARK
4092 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
4093 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4094 temp, TREE_UNSIGNED (TREE_TYPE (exp)));
4096 /* If value was not generated in the target, store it there.
4097 Convert the value to TARGET's type first if necessary.
4098 If TEMP and TARGET compare equal according to rtx_equal_p, but
4099 one or both of them are volatile memory refs, we have to distinguish
4101 - expand_expr has used TARGET. In this case, we must not generate
4102 another copy. This can be detected by TARGET being equal according
4104 - expand_expr has not used TARGET - that means that the source just
4105 happens to have the same RTX form. Since temp will have been created
4106 by expand_expr, it will compare unequal according to == .
4107 We must generate a copy in this case, to reach the correct number
4108 of volatile memory references. */
4110 if ((! rtx_equal_p (temp, target)
4111 || (temp != target && (side_effects_p (temp)
4112 || side_effects_p (target))))
4113 && TREE_CODE (exp) != ERROR_MARK
4114 && ! dont_store_target)
4116 target = protect_from_queue (target, 1);
4117 if (GET_MODE (temp) != GET_MODE (target)
4118 && GET_MODE (temp) != VOIDmode)
4120 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
4121 if (dont_return_target)
4123 /* In this case, we will return TEMP,
4124 so make sure it has the proper mode.
4125 But don't forget to store the value into TARGET. */
4126 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
4127 emit_move_insn (target, temp);
4130 convert_move (target, temp, unsignedp);
4133 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
4135 /* Handle copying a string constant into an array. The string
4136 constant may be shorter than the array. So copy just the string's
4137 actual length, and clear the rest. First get the size of the data
4138 type of the string, which is actually the size of the target. */
4139 rtx size = expr_size (exp);
4141 if (GET_CODE (size) == CONST_INT
4142 && INTVAL (size) < TREE_STRING_LENGTH (exp))
4143 emit_block_move (target, temp, size);
4146 /* Compute the size of the data to copy from the string. */
4148 = size_binop (MIN_EXPR,
4149 make_tree (sizetype, size),
4150 size_int (TREE_STRING_LENGTH (exp)));
4151 rtx copy_size_rtx = expand_expr (copy_size, NULL_RTX,
4155 /* Copy that much. */
4156 copy_size_rtx = convert_to_mode (ptr_mode, copy_size_rtx, 0);
4157 emit_block_move (target, temp, copy_size_rtx);
4159 /* Figure out how much is left in TARGET that we have to clear.
4160 Do all calculations in ptr_mode. */
4161 if (GET_CODE (copy_size_rtx) == CONST_INT)
4163 size = plus_constant (size, -INTVAL (copy_size_rtx));
4164 target = adjust_address (target, BLKmode,
4165 INTVAL (copy_size_rtx));
4169 size = expand_binop (ptr_mode, sub_optab, size,
4170 copy_size_rtx, NULL_RTX, 0,
4173 #ifdef POINTERS_EXTEND_UNSIGNED
4174 if (GET_MODE (copy_size_rtx) != Pmode)
4175 copy_size_rtx = convert_memory_address (Pmode,
4179 target = offset_address (target, copy_size_rtx,
4180 highest_pow2_factor (copy_size));
4181 label = gen_label_rtx ();
4182 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
4183 GET_MODE (size), 0, label);
4186 if (size != const0_rtx)
4187 clear_storage (target, size);
4193 /* Handle calls that return values in multiple non-contiguous locations.
4194 The Irix 6 ABI has examples of this. */
4195 else if (GET_CODE (target) == PARALLEL)
4196 emit_group_load (target, temp, int_size_in_bytes (TREE_TYPE (exp)));
4197 else if (GET_MODE (temp) == BLKmode)
4198 emit_block_move (target, temp, expr_size (exp));
4200 emit_move_insn (target, temp);
4203 /* If we don't want a value, return NULL_RTX. */
4207 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
4208 ??? The latter test doesn't seem to make sense. */
4209 else if (dont_return_target && GET_CODE (temp) != MEM)
4212 /* Return TARGET itself if it is a hard register. */
4213 else if (want_value && GET_MODE (target) != BLKmode
4214 && ! (GET_CODE (target) == REG
4215 && REGNO (target) < FIRST_PSEUDO_REGISTER))
4216 return copy_to_reg (target);
4222 /* Return 1 if EXP just contains zeros. */
4230 switch (TREE_CODE (exp))
4234 case NON_LVALUE_EXPR:
4235 case VIEW_CONVERT_EXPR:
4236 return is_zeros_p (TREE_OPERAND (exp, 0));
4239 return integer_zerop (exp);
4243 is_zeros_p (TREE_REALPART (exp)) && is_zeros_p (TREE_IMAGPART (exp));
4246 return REAL_VALUES_IDENTICAL (TREE_REAL_CST (exp), dconst0);
4249 for (elt = TREE_VECTOR_CST_ELTS (exp); elt;
4250 elt = TREE_CHAIN (elt))
4251 if (!is_zeros_p (TREE_VALUE (elt)))
4257 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4258 return CONSTRUCTOR_ELTS (exp) == NULL_TREE;
4259 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4260 if (! is_zeros_p (TREE_VALUE (elt)))
4270 /* Return 1 if EXP contains mostly (3/4) zeros. */
4273 mostly_zeros_p (exp)
4276 if (TREE_CODE (exp) == CONSTRUCTOR)
4278 int elts = 0, zeros = 0;
4279 tree elt = CONSTRUCTOR_ELTS (exp);
4280 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4282 /* If there are no ranges of true bits, it is all zero. */
4283 return elt == NULL_TREE;
4285 for (; elt; elt = TREE_CHAIN (elt))
4287 /* We do not handle the case where the index is a RANGE_EXPR,
4288 so the statistic will be somewhat inaccurate.
4289 We do make a more accurate count in store_constructor itself,
4290 so since this function is only used for nested array elements,
4291 this should be close enough. */
4292 if (mostly_zeros_p (TREE_VALUE (elt)))
4297 return 4 * zeros >= 3 * elts;
4300 return is_zeros_p (exp);
4303 /* Helper function for store_constructor.
4304 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4305 TYPE is the type of the CONSTRUCTOR, not the element type.
4306 CLEARED is as for store_constructor.
4307 ALIAS_SET is the alias set to use for any stores.
4309 This provides a recursive shortcut back to store_constructor when it isn't
4310 necessary to go through store_field. This is so that we can pass through
4311 the cleared field to let store_constructor know that we may not have to
4312 clear a substructure if the outer structure has already been cleared. */
4315 store_constructor_field (target, bitsize, bitpos, mode, exp, type, cleared,
4318 unsigned HOST_WIDE_INT bitsize;
4319 HOST_WIDE_INT bitpos;
4320 enum machine_mode mode;
4325 if (TREE_CODE (exp) == CONSTRUCTOR
4326 && bitpos % BITS_PER_UNIT == 0
4327 /* If we have a non-zero bitpos for a register target, then we just
4328 let store_field do the bitfield handling. This is unlikely to
4329 generate unnecessary clear instructions anyways. */
4330 && (bitpos == 0 || GET_CODE (target) == MEM))
4332 if (GET_CODE (target) == MEM)
4334 = adjust_address (target,
4335 GET_MODE (target) == BLKmode
4337 % GET_MODE_ALIGNMENT (GET_MODE (target)))
4338 ? BLKmode : VOIDmode, bitpos / BITS_PER_UNIT);
4341 /* Update the alias set, if required. */
4342 if (GET_CODE (target) == MEM && ! MEM_KEEP_ALIAS_SET_P (target)
4343 && MEM_ALIAS_SET (target) != 0)
4345 target = copy_rtx (target);
4346 set_mem_alias_set (target, alias_set);
4349 store_constructor (exp, target, cleared, bitsize / BITS_PER_UNIT);
4352 store_field (target, bitsize, bitpos, mode, exp, VOIDmode, 0, type,
4356 /* Store the value of constructor EXP into the rtx TARGET.
4357 TARGET is either a REG or a MEM; we know it cannot conflict, since
4358 safe_from_p has been called.
4359 CLEARED is true if TARGET is known to have been zero'd.
4360 SIZE is the number of bytes of TARGET we are allowed to modify: this
4361 may not be the same as the size of EXP if we are assigning to a field
4362 which has been packed to exclude padding bits. */
4365 store_constructor (exp, target, cleared, size)
4371 tree type = TREE_TYPE (exp);
4372 #ifdef WORD_REGISTER_OPERATIONS
4373 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
4376 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
4377 || TREE_CODE (type) == QUAL_UNION_TYPE)
4381 /* We either clear the aggregate or indicate the value is dead. */
4382 if ((TREE_CODE (type) == UNION_TYPE
4383 || TREE_CODE (type) == QUAL_UNION_TYPE)
4385 && ! CONSTRUCTOR_ELTS (exp))
4386 /* If the constructor is empty, clear the union. */
4388 clear_storage (target, expr_size (exp));
4392 /* If we are building a static constructor into a register,
4393 set the initial value as zero so we can fold the value into
4394 a constant. But if more than one register is involved,
4395 this probably loses. */
4396 else if (! cleared && GET_CODE (target) == REG && TREE_STATIC (exp)
4397 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
4399 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4403 /* If the constructor has fewer fields than the structure
4404 or if we are initializing the structure to mostly zeros,
4405 clear the whole structure first. Don't do this if TARGET is a
4406 register whose mode size isn't equal to SIZE since clear_storage
4407 can't handle this case. */
4408 else if (! cleared && size > 0
4409 && ((list_length (CONSTRUCTOR_ELTS (exp))
4410 != fields_length (type))
4411 || mostly_zeros_p (exp))
4412 && (GET_CODE (target) != REG
4413 || ((HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target))
4416 clear_storage (target, GEN_INT (size));
4421 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4423 /* Store each element of the constructor into
4424 the corresponding field of TARGET. */
4426 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4428 tree field = TREE_PURPOSE (elt);
4429 tree value = TREE_VALUE (elt);
4430 enum machine_mode mode;
4431 HOST_WIDE_INT bitsize;
4432 HOST_WIDE_INT bitpos = 0;
4435 rtx to_rtx = target;
4437 /* Just ignore missing fields.
4438 We cleared the whole structure, above,
4439 if any fields are missing. */
4443 if (cleared && is_zeros_p (value))
4446 if (host_integerp (DECL_SIZE (field), 1))
4447 bitsize = tree_low_cst (DECL_SIZE (field), 1);
4451 unsignedp = TREE_UNSIGNED (field);
4452 mode = DECL_MODE (field);
4453 if (DECL_BIT_FIELD (field))
4456 offset = DECL_FIELD_OFFSET (field);
4457 if (host_integerp (offset, 0)
4458 && host_integerp (bit_position (field), 0))
4460 bitpos = int_bit_position (field);
4464 bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
4470 if (contains_placeholder_p (offset))
4471 offset = build (WITH_RECORD_EXPR, sizetype,
4472 offset, make_tree (TREE_TYPE (exp), target));
4474 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
4475 if (GET_CODE (to_rtx) != MEM)
4478 if (GET_MODE (offset_rtx) != ptr_mode)
4479 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4481 #ifdef POINTERS_EXTEND_UNSIGNED
4482 if (GET_MODE (offset_rtx) != Pmode)
4483 offset_rtx = convert_memory_address (Pmode, offset_rtx);
4486 to_rtx = offset_address (to_rtx, offset_rtx,
4487 highest_pow2_factor (offset));
4490 if (TREE_READONLY (field))
4492 if (GET_CODE (to_rtx) == MEM)
4493 to_rtx = copy_rtx (to_rtx);
4495 RTX_UNCHANGING_P (to_rtx) = 1;
4498 #ifdef WORD_REGISTER_OPERATIONS
4499 /* If this initializes a field that is smaller than a word, at the
4500 start of a word, try to widen it to a full word.
4501 This special case allows us to output C++ member function
4502 initializations in a form that the optimizers can understand. */
4503 if (GET_CODE (target) == REG
4504 && bitsize < BITS_PER_WORD
4505 && bitpos % BITS_PER_WORD == 0
4506 && GET_MODE_CLASS (mode) == MODE_INT
4507 && TREE_CODE (value) == INTEGER_CST
4509 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
4511 tree type = TREE_TYPE (value);
4513 if (TYPE_PRECISION (type) < BITS_PER_WORD)
4515 type = type_for_size (BITS_PER_WORD, TREE_UNSIGNED (type));
4516 value = convert (type, value);
4519 if (BYTES_BIG_ENDIAN)
4521 = fold (build (LSHIFT_EXPR, type, value,
4522 build_int_2 (BITS_PER_WORD - bitsize, 0)));
4523 bitsize = BITS_PER_WORD;
4528 if (GET_CODE (to_rtx) == MEM && !MEM_KEEP_ALIAS_SET_P (to_rtx)
4529 && DECL_NONADDRESSABLE_P (field))
4531 to_rtx = copy_rtx (to_rtx);
4532 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
4535 store_constructor_field (to_rtx, bitsize, bitpos, mode,
4536 value, type, cleared,
4537 get_alias_set (TREE_TYPE (field)));
4540 else if (TREE_CODE (type) == ARRAY_TYPE
4541 || TREE_CODE (type) == VECTOR_TYPE)
4546 tree domain = TYPE_DOMAIN (type);
4547 tree elttype = TREE_TYPE (type);
4549 HOST_WIDE_INT minelt = 0;
4550 HOST_WIDE_INT maxelt = 0;
4552 /* Vectors are like arrays, but the domain is stored via an array
4554 if (TREE_CODE (type) == VECTOR_TYPE)
4556 /* Note that although TYPE_DEBUG_REPRESENTATION_TYPE uses
4557 the same field as TYPE_DOMAIN, we are not guaranteed that
4559 domain = TYPE_DEBUG_REPRESENTATION_TYPE (type);
4560 domain = TYPE_DOMAIN (TREE_TYPE (TYPE_FIELDS (domain)));
4563 const_bounds_p = (TYPE_MIN_VALUE (domain)
4564 && TYPE_MAX_VALUE (domain)
4565 && host_integerp (TYPE_MIN_VALUE (domain), 0)
4566 && host_integerp (TYPE_MAX_VALUE (domain), 0));
4568 /* If we have constant bounds for the range of the type, get them. */
4571 minelt = tree_low_cst (TYPE_MIN_VALUE (domain), 0);
4572 maxelt = tree_low_cst (TYPE_MAX_VALUE (domain), 0);
4575 /* If the constructor has fewer elements than the array,
4576 clear the whole array first. Similarly if this is
4577 static constructor of a non-BLKmode object. */
4578 if (cleared || (GET_CODE (target) == REG && TREE_STATIC (exp)))
4582 HOST_WIDE_INT count = 0, zero_count = 0;
4583 need_to_clear = ! const_bounds_p;
4585 /* This loop is a more accurate version of the loop in
4586 mostly_zeros_p (it handles RANGE_EXPR in an index).
4587 It is also needed to check for missing elements. */
4588 for (elt = CONSTRUCTOR_ELTS (exp);
4589 elt != NULL_TREE && ! need_to_clear;
4590 elt = TREE_CHAIN (elt))
4592 tree index = TREE_PURPOSE (elt);
4593 HOST_WIDE_INT this_node_count;
4595 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4597 tree lo_index = TREE_OPERAND (index, 0);
4598 tree hi_index = TREE_OPERAND (index, 1);
4600 if (! host_integerp (lo_index, 1)
4601 || ! host_integerp (hi_index, 1))
4607 this_node_count = (tree_low_cst (hi_index, 1)
4608 - tree_low_cst (lo_index, 1) + 1);
4611 this_node_count = 1;
4613 count += this_node_count;
4614 if (mostly_zeros_p (TREE_VALUE (elt)))
4615 zero_count += this_node_count;
4618 /* Clear the entire array first if there are any missing elements,
4619 or if the incidence of zero elements is >= 75%. */
4621 && (count < maxelt - minelt + 1 || 4 * zero_count >= 3 * count))
4625 if (need_to_clear && size > 0)
4630 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4632 clear_storage (target, GEN_INT (size));
4636 else if (REG_P (target))
4637 /* Inform later passes that the old value is dead. */
4638 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4640 /* Store each element of the constructor into
4641 the corresponding element of TARGET, determined
4642 by counting the elements. */
4643 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
4645 elt = TREE_CHAIN (elt), i++)
4647 enum machine_mode mode;
4648 HOST_WIDE_INT bitsize;
4649 HOST_WIDE_INT bitpos;
4651 tree value = TREE_VALUE (elt);
4652 tree index = TREE_PURPOSE (elt);
4653 rtx xtarget = target;
4655 if (cleared && is_zeros_p (value))
4658 unsignedp = TREE_UNSIGNED (elttype);
4659 mode = TYPE_MODE (elttype);
4660 if (mode == BLKmode)
4661 bitsize = (host_integerp (TYPE_SIZE (elttype), 1)
4662 ? tree_low_cst (TYPE_SIZE (elttype), 1)
4665 bitsize = GET_MODE_BITSIZE (mode);
4667 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4669 tree lo_index = TREE_OPERAND (index, 0);
4670 tree hi_index = TREE_OPERAND (index, 1);
4671 rtx index_r, pos_rtx, hi_r, loop_top, loop_end;
4672 struct nesting *loop;
4673 HOST_WIDE_INT lo, hi, count;
4676 /* If the range is constant and "small", unroll the loop. */
4678 && host_integerp (lo_index, 0)
4679 && host_integerp (hi_index, 0)
4680 && (lo = tree_low_cst (lo_index, 0),
4681 hi = tree_low_cst (hi_index, 0),
4682 count = hi - lo + 1,
4683 (GET_CODE (target) != MEM
4685 || (host_integerp (TYPE_SIZE (elttype), 1)
4686 && (tree_low_cst (TYPE_SIZE (elttype), 1) * count
4689 lo -= minelt; hi -= minelt;
4690 for (; lo <= hi; lo++)
4692 bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0);
4694 if (GET_CODE (target) == MEM
4695 && !MEM_KEEP_ALIAS_SET_P (target)
4696 && TREE_CODE (type) == ARRAY_TYPE
4697 && TYPE_NONALIASED_COMPONENT (type))
4699 target = copy_rtx (target);
4700 MEM_KEEP_ALIAS_SET_P (target) = 1;
4703 store_constructor_field
4704 (target, bitsize, bitpos, mode, value, type, cleared,
4705 get_alias_set (elttype));
4710 hi_r = expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
4711 loop_top = gen_label_rtx ();
4712 loop_end = gen_label_rtx ();
4714 unsignedp = TREE_UNSIGNED (domain);
4716 index = build_decl (VAR_DECL, NULL_TREE, domain);
4719 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
4721 SET_DECL_RTL (index, index_r);
4722 if (TREE_CODE (value) == SAVE_EXPR
4723 && SAVE_EXPR_RTL (value) == 0)
4725 /* Make sure value gets expanded once before the
4727 expand_expr (value, const0_rtx, VOIDmode, 0);
4730 store_expr (lo_index, index_r, 0);
4731 loop = expand_start_loop (0);
4733 /* Assign value to element index. */
4735 = convert (ssizetype,
4736 fold (build (MINUS_EXPR, TREE_TYPE (index),
4737 index, TYPE_MIN_VALUE (domain))));
4738 position = size_binop (MULT_EXPR, position,
4740 TYPE_SIZE_UNIT (elttype)));
4742 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
4743 xtarget = offset_address (target, pos_rtx,
4744 highest_pow2_factor (position));
4745 xtarget = adjust_address (xtarget, mode, 0);
4746 if (TREE_CODE (value) == CONSTRUCTOR)
4747 store_constructor (value, xtarget, cleared,
4748 bitsize / BITS_PER_UNIT);
4750 store_expr (value, xtarget, 0);
4752 expand_exit_loop_if_false (loop,
4753 build (LT_EXPR, integer_type_node,
4756 expand_increment (build (PREINCREMENT_EXPR,
4758 index, integer_one_node), 0, 0);
4760 emit_label (loop_end);
4763 else if ((index != 0 && ! host_integerp (index, 0))
4764 || ! host_integerp (TYPE_SIZE (elttype), 1))
4769 index = ssize_int (1);
4772 index = convert (ssizetype,
4773 fold (build (MINUS_EXPR, index,
4774 TYPE_MIN_VALUE (domain))));
4776 position = size_binop (MULT_EXPR, index,
4778 TYPE_SIZE_UNIT (elttype)));
4779 xtarget = offset_address (target,
4780 expand_expr (position, 0, VOIDmode, 0),
4781 highest_pow2_factor (position));
4782 xtarget = adjust_address (xtarget, mode, 0);
4783 store_expr (value, xtarget, 0);
4788 bitpos = ((tree_low_cst (index, 0) - minelt)
4789 * tree_low_cst (TYPE_SIZE (elttype), 1));
4791 bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
4793 if (GET_CODE (target) == MEM && !MEM_KEEP_ALIAS_SET_P (target)
4794 && TREE_CODE (type) == ARRAY_TYPE
4795 && TYPE_NONALIASED_COMPONENT (type))
4797 target = copy_rtx (target);
4798 MEM_KEEP_ALIAS_SET_P (target) = 1;
4801 store_constructor_field (target, bitsize, bitpos, mode, value,
4802 type, cleared, get_alias_set (elttype));
4808 /* Set constructor assignments. */
4809 else if (TREE_CODE (type) == SET_TYPE)
4811 tree elt = CONSTRUCTOR_ELTS (exp);
4812 unsigned HOST_WIDE_INT nbytes = int_size_in_bytes (type), nbits;
4813 tree domain = TYPE_DOMAIN (type);
4814 tree domain_min, domain_max, bitlength;
4816 /* The default implementation strategy is to extract the constant
4817 parts of the constructor, use that to initialize the target,
4818 and then "or" in whatever non-constant ranges we need in addition.
4820 If a large set is all zero or all ones, it is
4821 probably better to set it using memset (if available) or bzero.
4822 Also, if a large set has just a single range, it may also be
4823 better to first clear all the first clear the set (using
4824 bzero/memset), and set the bits we want. */
4826 /* Check for all zeros. */
4827 if (elt == NULL_TREE && size > 0)
4830 clear_storage (target, GEN_INT (size));
4834 domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
4835 domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
4836 bitlength = size_binop (PLUS_EXPR,
4837 size_diffop (domain_max, domain_min),
4840 nbits = tree_low_cst (bitlength, 1);
4842 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
4843 are "complicated" (more than one range), initialize (the
4844 constant parts) by copying from a constant. */
4845 if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD
4846 || (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE))
4848 unsigned int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
4849 enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
4850 char *bit_buffer = (char *) alloca (nbits);
4851 HOST_WIDE_INT word = 0;
4852 unsigned int bit_pos = 0;
4853 unsigned int ibit = 0;
4854 unsigned int offset = 0; /* In bytes from beginning of set. */
4856 elt = get_set_constructor_bits (exp, bit_buffer, nbits);
4859 if (bit_buffer[ibit])
4861 if (BYTES_BIG_ENDIAN)
4862 word |= (1 << (set_word_size - 1 - bit_pos));
4864 word |= 1 << bit_pos;
4868 if (bit_pos >= set_word_size || ibit == nbits)
4870 if (word != 0 || ! cleared)
4872 rtx datum = GEN_INT (word);
4875 /* The assumption here is that it is safe to use
4876 XEXP if the set is multi-word, but not if
4877 it's single-word. */
4878 if (GET_CODE (target) == MEM)
4879 to_rtx = adjust_address (target, mode, offset);
4880 else if (offset == 0)
4884 emit_move_insn (to_rtx, datum);
4891 offset += set_word_size / BITS_PER_UNIT;
4896 /* Don't bother clearing storage if the set is all ones. */
4897 if (TREE_CHAIN (elt) != NULL_TREE
4898 || (TREE_PURPOSE (elt) == NULL_TREE
4900 : ( ! host_integerp (TREE_VALUE (elt), 0)
4901 || ! host_integerp (TREE_PURPOSE (elt), 0)
4902 || (tree_low_cst (TREE_VALUE (elt), 0)
4903 - tree_low_cst (TREE_PURPOSE (elt), 0) + 1
4904 != (HOST_WIDE_INT) nbits))))
4905 clear_storage (target, expr_size (exp));
4907 for (; elt != NULL_TREE; elt = TREE_CHAIN (elt))
4909 /* Start of range of element or NULL. */
4910 tree startbit = TREE_PURPOSE (elt);
4911 /* End of range of element, or element value. */
4912 tree endbit = TREE_VALUE (elt);
4913 #ifdef TARGET_MEM_FUNCTIONS
4914 HOST_WIDE_INT startb, endb;
4916 rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
4918 bitlength_rtx = expand_expr (bitlength,
4919 NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
4921 /* Handle non-range tuple element like [ expr ]. */
4922 if (startbit == NULL_TREE)
4924 startbit = save_expr (endbit);
4928 startbit = convert (sizetype, startbit);
4929 endbit = convert (sizetype, endbit);
4930 if (! integer_zerop (domain_min))
4932 startbit = size_binop (MINUS_EXPR, startbit, domain_min);
4933 endbit = size_binop (MINUS_EXPR, endbit, domain_min);
4935 startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
4936 EXPAND_CONST_ADDRESS);
4937 endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
4938 EXPAND_CONST_ADDRESS);
4944 ((build_qualified_type (type_for_mode (GET_MODE (target), 0),
4947 emit_move_insn (targetx, target);
4950 else if (GET_CODE (target) == MEM)
4955 #ifdef TARGET_MEM_FUNCTIONS
4956 /* Optimization: If startbit and endbit are
4957 constants divisible by BITS_PER_UNIT,
4958 call memset instead. */
4959 if (TREE_CODE (startbit) == INTEGER_CST
4960 && TREE_CODE (endbit) == INTEGER_CST
4961 && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
4962 && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0)
4964 emit_library_call (memset_libfunc, LCT_NORMAL,
4966 plus_constant (XEXP (targetx, 0),
4967 startb / BITS_PER_UNIT),
4969 constm1_rtx, TYPE_MODE (integer_type_node),
4970 GEN_INT ((endb - startb) / BITS_PER_UNIT),
4971 TYPE_MODE (sizetype));
4975 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__setbits"),
4976 LCT_NORMAL, VOIDmode, 4, XEXP (targetx, 0),
4977 Pmode, bitlength_rtx, TYPE_MODE (sizetype),
4978 startbit_rtx, TYPE_MODE (sizetype),
4979 endbit_rtx, TYPE_MODE (sizetype));
4982 emit_move_insn (target, targetx);
4990 /* Store the value of EXP (an expression tree)
4991 into a subfield of TARGET which has mode MODE and occupies
4992 BITSIZE bits, starting BITPOS bits from the start of TARGET.
4993 If MODE is VOIDmode, it means that we are storing into a bit-field.
4995 If VALUE_MODE is VOIDmode, return nothing in particular.
4996 UNSIGNEDP is not used in this case.
4998 Otherwise, return an rtx for the value stored. This rtx
4999 has mode VALUE_MODE if that is convenient to do.
5000 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
5002 TYPE is the type of the underlying object,
5004 ALIAS_SET is the alias set for the destination. This value will
5005 (in general) be different from that for TARGET, since TARGET is a
5006 reference to the containing structure. */
5009 store_field (target, bitsize, bitpos, mode, exp, value_mode, unsignedp, type,
5012 HOST_WIDE_INT bitsize;
5013 HOST_WIDE_INT bitpos;
5014 enum machine_mode mode;
5016 enum machine_mode value_mode;
5021 HOST_WIDE_INT width_mask = 0;
5023 if (TREE_CODE (exp) == ERROR_MARK)
5026 /* If we have nothing to store, do nothing unless the expression has
5029 return expand_expr (exp, const0_rtx, VOIDmode, 0);
5030 else if (bitsize >=0 && bitsize < HOST_BITS_PER_WIDE_INT)
5031 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
5033 /* If we are storing into an unaligned field of an aligned union that is
5034 in a register, we may have the mode of TARGET being an integer mode but
5035 MODE == BLKmode. In that case, get an aligned object whose size and
5036 alignment are the same as TARGET and store TARGET into it (we can avoid
5037 the store if the field being stored is the entire width of TARGET). Then
5038 call ourselves recursively to store the field into a BLKmode version of
5039 that object. Finally, load from the object into TARGET. This is not
5040 very efficient in general, but should only be slightly more expensive
5041 than the otherwise-required unaligned accesses. Perhaps this can be
5042 cleaned up later. */
5045 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
5049 (build_qualified_type (type, TYPE_QUALS (type) | TYPE_QUAL_CONST),
5051 rtx blk_object = adjust_address (object, BLKmode, 0);
5053 if (bitsize != (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (target)))
5054 emit_move_insn (object, target);
5056 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0, type,
5059 emit_move_insn (target, object);
5061 /* We want to return the BLKmode version of the data. */
5065 if (GET_CODE (target) == CONCAT)
5067 /* We're storing into a struct containing a single __complex. */
5071 return store_expr (exp, target, 0);
5074 /* If the structure is in a register or if the component
5075 is a bit field, we cannot use addressing to access it.
5076 Use bit-field techniques or SUBREG to store in it. */
5078 if (mode == VOIDmode
5079 || (mode != BLKmode && ! direct_store[(int) mode]
5080 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
5081 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
5082 || GET_CODE (target) == REG
5083 || GET_CODE (target) == SUBREG
5084 /* If the field isn't aligned enough to store as an ordinary memref,
5085 store it as a bit field. */
5086 || (mode != BLKmode && SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target))
5087 && (MEM_ALIGN (target) < GET_MODE_ALIGNMENT (mode)
5088 || bitpos % GET_MODE_ALIGNMENT (mode)))
5089 /* If the RHS and field are a constant size and the size of the
5090 RHS isn't the same size as the bitfield, we must use bitfield
5093 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
5094 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0))
5096 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
5098 /* If BITSIZE is narrower than the size of the type of EXP
5099 we will be narrowing TEMP. Normally, what's wanted are the
5100 low-order bits. However, if EXP's type is a record and this is
5101 big-endian machine, we want the upper BITSIZE bits. */
5102 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
5103 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (temp))
5104 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
5105 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
5106 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
5110 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
5112 if (mode != VOIDmode && mode != BLKmode
5113 && mode != TYPE_MODE (TREE_TYPE (exp)))
5114 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
5116 /* If the modes of TARGET and TEMP are both BLKmode, both
5117 must be in memory and BITPOS must be aligned on a byte
5118 boundary. If so, we simply do a block copy. */
5119 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
5121 if (GET_CODE (target) != MEM || GET_CODE (temp) != MEM
5122 || bitpos % BITS_PER_UNIT != 0)
5125 target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT);
5126 emit_block_move (target, temp,
5127 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
5130 return value_mode == VOIDmode ? const0_rtx : target;
5133 /* Store the value in the bitfield. */
5134 store_bit_field (target, bitsize, bitpos, mode, temp,
5135 int_size_in_bytes (type));
5137 if (value_mode != VOIDmode)
5139 /* The caller wants an rtx for the value.
5140 If possible, avoid refetching from the bitfield itself. */
5142 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
5145 enum machine_mode tmode;
5147 tmode = GET_MODE (temp);
5148 if (tmode == VOIDmode)
5152 return expand_and (tmode, temp,
5153 gen_int_mode (width_mask, tmode),
5156 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
5157 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
5158 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
5161 return extract_bit_field (target, bitsize, bitpos, unsignedp,
5162 NULL_RTX, value_mode, VOIDmode,
5163 int_size_in_bytes (type));
5169 rtx addr = XEXP (target, 0);
5170 rtx to_rtx = target;
5172 /* If a value is wanted, it must be the lhs;
5173 so make the address stable for multiple use. */
5175 if (value_mode != VOIDmode && GET_CODE (addr) != REG
5176 && ! CONSTANT_ADDRESS_P (addr)
5177 /* A frame-pointer reference is already stable. */
5178 && ! (GET_CODE (addr) == PLUS
5179 && GET_CODE (XEXP (addr, 1)) == CONST_INT
5180 && (XEXP (addr, 0) == virtual_incoming_args_rtx
5181 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
5182 to_rtx = replace_equiv_address (to_rtx, copy_to_reg (addr));
5184 /* Now build a reference to just the desired component. */
5186 to_rtx = adjust_address (target, mode, bitpos / BITS_PER_UNIT);
5188 if (to_rtx == target)
5189 to_rtx = copy_rtx (to_rtx);
5191 MEM_SET_IN_STRUCT_P (to_rtx, 1);
5192 if (!MEM_KEEP_ALIAS_SET_P (to_rtx) && MEM_ALIAS_SET (to_rtx) != 0)
5193 set_mem_alias_set (to_rtx, alias_set);
5195 return store_expr (exp, to_rtx, value_mode != VOIDmode);
5199 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
5200 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
5201 codes and find the ultimate containing object, which we return.
5203 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5204 bit position, and *PUNSIGNEDP to the signedness of the field.
5205 If the position of the field is variable, we store a tree
5206 giving the variable offset (in units) in *POFFSET.
5207 This offset is in addition to the bit position.
5208 If the position is not variable, we store 0 in *POFFSET.
5210 If any of the extraction expressions is volatile,
5211 we store 1 in *PVOLATILEP. Otherwise we don't change that.
5213 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
5214 is a mode that can be used to access the field. In that case, *PBITSIZE
5217 If the field describes a variable-sized object, *PMODE is set to
5218 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
5219 this case, but the address of the object can be found. */
5222 get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode,
5223 punsignedp, pvolatilep)
5225 HOST_WIDE_INT *pbitsize;
5226 HOST_WIDE_INT *pbitpos;
5228 enum machine_mode *pmode;
5233 enum machine_mode mode = VOIDmode;
5234 tree offset = size_zero_node;
5235 tree bit_offset = bitsize_zero_node;
5236 tree placeholder_ptr = 0;
5239 /* First get the mode, signedness, and size. We do this from just the
5240 outermost expression. */
5241 if (TREE_CODE (exp) == COMPONENT_REF)
5243 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
5244 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
5245 mode = DECL_MODE (TREE_OPERAND (exp, 1));
5247 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
5249 else if (TREE_CODE (exp) == BIT_FIELD_REF)
5251 size_tree = TREE_OPERAND (exp, 1);
5252 *punsignedp = TREE_UNSIGNED (exp);
5256 mode = TYPE_MODE (TREE_TYPE (exp));
5257 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
5259 if (mode == BLKmode)
5260 size_tree = TYPE_SIZE (TREE_TYPE (exp));
5262 *pbitsize = GET_MODE_BITSIZE (mode);
5267 if (! host_integerp (size_tree, 1))
5268 mode = BLKmode, *pbitsize = -1;
5270 *pbitsize = tree_low_cst (size_tree, 1);
5273 /* Compute cumulative bit-offset for nested component-refs and array-refs,
5274 and find the ultimate containing object. */
5277 if (TREE_CODE (exp) == BIT_FIELD_REF)
5278 bit_offset = size_binop (PLUS_EXPR, bit_offset, TREE_OPERAND (exp, 2));
5279 else if (TREE_CODE (exp) == COMPONENT_REF)
5281 tree field = TREE_OPERAND (exp, 1);
5282 tree this_offset = DECL_FIELD_OFFSET (field);
5284 /* If this field hasn't been filled in yet, don't go
5285 past it. This should only happen when folding expressions
5286 made during type construction. */
5287 if (this_offset == 0)
5289 else if (! TREE_CONSTANT (this_offset)
5290 && contains_placeholder_p (this_offset))
5291 this_offset = build (WITH_RECORD_EXPR, sizetype, this_offset, exp);
5293 offset = size_binop (PLUS_EXPR, offset, this_offset);
5294 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5295 DECL_FIELD_BIT_OFFSET (field));
5297 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
5300 else if (TREE_CODE (exp) == ARRAY_REF
5301 || TREE_CODE (exp) == ARRAY_RANGE_REF)
5303 tree index = TREE_OPERAND (exp, 1);
5304 tree array = TREE_OPERAND (exp, 0);
5305 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
5306 tree low_bound = (domain ? TYPE_MIN_VALUE (domain) : 0);
5307 tree unit_size = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (array)));
5309 /* We assume all arrays have sizes that are a multiple of a byte.
5310 First subtract the lower bound, if any, in the type of the
5311 index, then convert to sizetype and multiply by the size of the
5313 if (low_bound != 0 && ! integer_zerop (low_bound))
5314 index = fold (build (MINUS_EXPR, TREE_TYPE (index),
5317 /* If the index has a self-referential type, pass it to a
5318 WITH_RECORD_EXPR; if the component size is, pass our
5319 component to one. */
5320 if (! TREE_CONSTANT (index)
5321 && contains_placeholder_p (index))
5322 index = build (WITH_RECORD_EXPR, TREE_TYPE (index), index, exp);
5323 if (! TREE_CONSTANT (unit_size)
5324 && contains_placeholder_p (unit_size))
5325 unit_size = build (WITH_RECORD_EXPR, sizetype, unit_size, array);
5327 offset = size_binop (PLUS_EXPR, offset,
5328 size_binop (MULT_EXPR,
5329 convert (sizetype, index),
5333 else if (TREE_CODE (exp) == PLACEHOLDER_EXPR)
5335 tree new = find_placeholder (exp, &placeholder_ptr);
5337 /* If we couldn't find the replacement, return the PLACEHOLDER_EXPR.
5338 We might have been called from tree optimization where we
5339 haven't set up an object yet. */
5347 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
5348 && TREE_CODE (exp) != VIEW_CONVERT_EXPR
5349 && ! ((TREE_CODE (exp) == NOP_EXPR
5350 || TREE_CODE (exp) == CONVERT_EXPR)
5351 && (TYPE_MODE (TREE_TYPE (exp))
5352 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
5355 /* If any reference in the chain is volatile, the effect is volatile. */
5356 if (TREE_THIS_VOLATILE (exp))
5359 exp = TREE_OPERAND (exp, 0);
5362 /* If OFFSET is constant, see if we can return the whole thing as a
5363 constant bit position. Otherwise, split it up. */
5364 if (host_integerp (offset, 0)
5365 && 0 != (tem = size_binop (MULT_EXPR, convert (bitsizetype, offset),
5367 && 0 != (tem = size_binop (PLUS_EXPR, tem, bit_offset))
5368 && host_integerp (tem, 0))
5369 *pbitpos = tree_low_cst (tem, 0), *poffset = 0;
5371 *pbitpos = tree_low_cst (bit_offset, 0), *poffset = offset;
5377 /* Return 1 if T is an expression that get_inner_reference handles. */
5380 handled_component_p (t)
5383 switch (TREE_CODE (t))
5388 case ARRAY_RANGE_REF:
5389 case NON_LVALUE_EXPR:
5390 case VIEW_CONVERT_EXPR:
5395 return (TYPE_MODE (TREE_TYPE (t))
5396 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (t, 0))));
5403 /* Given an rtx VALUE that may contain additions and multiplications, return
5404 an equivalent value that just refers to a register, memory, or constant.
5405 This is done by generating instructions to perform the arithmetic and
5406 returning a pseudo-register containing the value.
5408 The returned value may be a REG, SUBREG, MEM or constant. */
5411 force_operand (value, target)
5415 /* Use a temporary to force order of execution of calls to
5419 /* Use subtarget as the target for operand 0 of a binary operation. */
5420 rtx subtarget = get_subtarget (target);
5422 /* Check for a PIC address load. */
5423 if ((GET_CODE (value) == PLUS || GET_CODE (value) == MINUS)
5424 && XEXP (value, 0) == pic_offset_table_rtx
5425 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
5426 || GET_CODE (XEXP (value, 1)) == LABEL_REF
5427 || GET_CODE (XEXP (value, 1)) == CONST))
5430 subtarget = gen_reg_rtx (GET_MODE (value));
5431 emit_move_insn (subtarget, value);
5435 if (GET_CODE (value) == PLUS)
5436 binoptab = add_optab;
5437 else if (GET_CODE (value) == MINUS)
5438 binoptab = sub_optab;
5439 else if (GET_CODE (value) == MULT)
5441 op2 = XEXP (value, 1);
5442 if (!CONSTANT_P (op2)
5443 && !(GET_CODE (op2) == REG && op2 != subtarget))
5445 tmp = force_operand (XEXP (value, 0), subtarget);
5446 return expand_mult (GET_MODE (value), tmp,
5447 force_operand (op2, NULL_RTX),
5453 op2 = XEXP (value, 1);
5454 if (!CONSTANT_P (op2)
5455 && !(GET_CODE (op2) == REG && op2 != subtarget))
5457 if (binoptab == sub_optab && GET_CODE (op2) == CONST_INT)
5459 binoptab = add_optab;
5460 op2 = negate_rtx (GET_MODE (value), op2);
5463 /* Check for an addition with OP2 a constant integer and our first
5464 operand a PLUS of a virtual register and something else. In that
5465 case, we want to emit the sum of the virtual register and the
5466 constant first and then add the other value. This allows virtual
5467 register instantiation to simply modify the constant rather than
5468 creating another one around this addition. */
5469 if (binoptab == add_optab && GET_CODE (op2) == CONST_INT
5470 && GET_CODE (XEXP (value, 0)) == PLUS
5471 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
5472 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
5473 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
5475 rtx temp = expand_binop (GET_MODE (value), binoptab,
5476 XEXP (XEXP (value, 0), 0), op2,
5477 subtarget, 0, OPTAB_LIB_WIDEN);
5478 return expand_binop (GET_MODE (value), binoptab, temp,
5479 force_operand (XEXP (XEXP (value, 0), 1), 0),
5480 target, 0, OPTAB_LIB_WIDEN);
5483 tmp = force_operand (XEXP (value, 0), subtarget);
5484 return expand_binop (GET_MODE (value), binoptab, tmp,
5485 force_operand (op2, NULL_RTX),
5486 target, 0, OPTAB_LIB_WIDEN);
5487 /* We give UNSIGNEDP = 0 to expand_binop
5488 because the only operations we are expanding here are signed ones. */
5491 #ifdef INSN_SCHEDULING
5492 /* On machines that have insn scheduling, we want all memory reference to be
5493 explicit, so we need to deal with such paradoxical SUBREGs. */
5494 if (GET_CODE (value) == SUBREG && GET_CODE (SUBREG_REG (value)) == MEM
5495 && (GET_MODE_SIZE (GET_MODE (value))
5496 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (value)))))
5498 = simplify_gen_subreg (GET_MODE (value),
5499 force_reg (GET_MODE (SUBREG_REG (value)),
5500 force_operand (SUBREG_REG (value),
5502 GET_MODE (SUBREG_REG (value)),
5503 SUBREG_BYTE (value));
5509 /* Subroutine of expand_expr: return nonzero iff there is no way that
5510 EXP can reference X, which is being modified. TOP_P is nonzero if this
5511 call is going to be used to determine whether we need a temporary
5512 for EXP, as opposed to a recursive call to this function.
5514 It is always safe for this routine to return zero since it merely
5515 searches for optimization opportunities. */
5518 safe_from_p (x, exp, top_p)
5525 static tree save_expr_list;
5528 /* If EXP has varying size, we MUST use a target since we currently
5529 have no way of allocating temporaries of variable size
5530 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
5531 So we assume here that something at a higher level has prevented a
5532 clash. This is somewhat bogus, but the best we can do. Only
5533 do this when X is BLKmode and when we are at the top level. */
5534 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
5535 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
5536 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
5537 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
5538 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
5540 && GET_MODE (x) == BLKmode)
5541 /* If X is in the outgoing argument area, it is always safe. */
5542 || (GET_CODE (x) == MEM
5543 && (XEXP (x, 0) == virtual_outgoing_args_rtx
5544 || (GET_CODE (XEXP (x, 0)) == PLUS
5545 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
5548 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
5549 find the underlying pseudo. */
5550 if (GET_CODE (x) == SUBREG)
5553 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5557 /* A SAVE_EXPR might appear many times in the expression passed to the
5558 top-level safe_from_p call, and if it has a complex subexpression,
5559 examining it multiple times could result in a combinatorial explosion.
5560 E.g. on an Alpha running at least 200MHz, a Fortran test case compiled
5561 with optimization took about 28 minutes to compile -- even though it was
5562 only a few lines long. So we mark each SAVE_EXPR we see with TREE_PRIVATE
5563 and turn that off when we are done. We keep a list of the SAVE_EXPRs
5564 we have processed. Note that the only test of top_p was above. */
5573 rtn = safe_from_p (x, exp, 0);
5575 for (t = save_expr_list; t != 0; t = TREE_CHAIN (t))
5576 TREE_PRIVATE (TREE_PURPOSE (t)) = 0;
5581 /* Now look at our tree code and possibly recurse. */
5582 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
5585 exp_rtl = DECL_RTL_SET_P (exp) ? DECL_RTL (exp) : NULL_RTX;
5592 if (TREE_CODE (exp) == TREE_LIST)
5593 return ((TREE_VALUE (exp) == 0
5594 || safe_from_p (x, TREE_VALUE (exp), 0))
5595 && (TREE_CHAIN (exp) == 0
5596 || safe_from_p (x, TREE_CHAIN (exp), 0)));
5597 else if (TREE_CODE (exp) == ERROR_MARK)
5598 return 1; /* An already-visited SAVE_EXPR? */
5603 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5607 return (safe_from_p (x, TREE_OPERAND (exp, 0), 0)
5608 && safe_from_p (x, TREE_OPERAND (exp, 1), 0));
5612 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
5613 the expression. If it is set, we conflict iff we are that rtx or
5614 both are in memory. Otherwise, we check all operands of the
5615 expression recursively. */
5617 switch (TREE_CODE (exp))
5620 /* If the operand is static or we are static, we can't conflict.
5621 Likewise if we don't conflict with the operand at all. */
5622 if (staticp (TREE_OPERAND (exp, 0))
5623 || TREE_STATIC (exp)
5624 || safe_from_p (x, TREE_OPERAND (exp, 0), 0))
5627 /* Otherwise, the only way this can conflict is if we are taking
5628 the address of a DECL a that address if part of X, which is
5630 exp = TREE_OPERAND (exp, 0);
5633 if (!DECL_RTL_SET_P (exp)
5634 || GET_CODE (DECL_RTL (exp)) != MEM)
5637 exp_rtl = XEXP (DECL_RTL (exp), 0);
5642 if (GET_CODE (x) == MEM
5643 && alias_sets_conflict_p (MEM_ALIAS_SET (x),
5644 get_alias_set (exp)))
5649 /* Assume that the call will clobber all hard registers and
5651 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5652 || GET_CODE (x) == MEM)
5657 /* If a sequence exists, we would have to scan every instruction
5658 in the sequence to see if it was safe. This is probably not
5660 if (RTL_EXPR_SEQUENCE (exp))
5663 exp_rtl = RTL_EXPR_RTL (exp);
5666 case WITH_CLEANUP_EXPR:
5667 exp_rtl = WITH_CLEANUP_EXPR_RTL (exp);
5670 case CLEANUP_POINT_EXPR:
5671 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5674 exp_rtl = SAVE_EXPR_RTL (exp);
5678 /* If we've already scanned this, don't do it again. Otherwise,
5679 show we've scanned it and record for clearing the flag if we're
5681 if (TREE_PRIVATE (exp))
5684 TREE_PRIVATE (exp) = 1;
5685 if (! safe_from_p (x, TREE_OPERAND (exp, 0), 0))
5687 TREE_PRIVATE (exp) = 0;
5691 save_expr_list = tree_cons (exp, NULL_TREE, save_expr_list);
5695 /* The only operand we look at is operand 1. The rest aren't
5696 part of the expression. */
5697 return safe_from_p (x, TREE_OPERAND (exp, 1), 0);
5699 case METHOD_CALL_EXPR:
5700 /* This takes an rtx argument, but shouldn't appear here. */
5707 /* If we have an rtx, we do not need to scan our operands. */
5711 nops = first_rtl_op (TREE_CODE (exp));
5712 for (i = 0; i < nops; i++)
5713 if (TREE_OPERAND (exp, i) != 0
5714 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
5717 /* If this is a language-specific tree code, it may require
5718 special handling. */
5719 if ((unsigned int) TREE_CODE (exp)
5720 >= (unsigned int) LAST_AND_UNUSED_TREE_CODE
5721 && !(*lang_hooks.safe_from_p) (x, exp))
5725 /* If we have an rtl, find any enclosed object. Then see if we conflict
5729 if (GET_CODE (exp_rtl) == SUBREG)
5731 exp_rtl = SUBREG_REG (exp_rtl);
5732 if (GET_CODE (exp_rtl) == REG
5733 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
5737 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
5738 are memory and they conflict. */
5739 return ! (rtx_equal_p (x, exp_rtl)
5740 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
5741 && true_dependence (exp_rtl, VOIDmode, x,
5742 rtx_addr_varies_p)));
5745 /* If we reach here, it is safe. */
5749 /* Subroutine of expand_expr: return rtx if EXP is a
5750 variable or parameter; else return 0. */
5757 switch (TREE_CODE (exp))
5761 return DECL_RTL (exp);
5767 #ifdef MAX_INTEGER_COMPUTATION_MODE
5770 check_max_integer_computation_mode (exp)
5773 enum tree_code code;
5774 enum machine_mode mode;
5776 /* Strip any NOPs that don't change the mode. */
5778 code = TREE_CODE (exp);
5780 /* We must allow conversions of constants to MAX_INTEGER_COMPUTATION_MODE. */
5781 if (code == NOP_EXPR
5782 && TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
5785 /* First check the type of the overall operation. We need only look at
5786 unary, binary and relational operations. */
5787 if (TREE_CODE_CLASS (code) == '1'
5788 || TREE_CODE_CLASS (code) == '2'
5789 || TREE_CODE_CLASS (code) == '<')
5791 mode = TYPE_MODE (TREE_TYPE (exp));
5792 if (GET_MODE_CLASS (mode) == MODE_INT
5793 && mode > MAX_INTEGER_COMPUTATION_MODE)
5794 internal_error ("unsupported wide integer operation");
5797 /* Check operand of a unary op. */
5798 if (TREE_CODE_CLASS (code) == '1')
5800 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5801 if (GET_MODE_CLASS (mode) == MODE_INT
5802 && mode > MAX_INTEGER_COMPUTATION_MODE)
5803 internal_error ("unsupported wide integer operation");
5806 /* Check operands of a binary/comparison op. */
5807 if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<')
5809 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5810 if (GET_MODE_CLASS (mode) == MODE_INT
5811 && mode > MAX_INTEGER_COMPUTATION_MODE)
5812 internal_error ("unsupported wide integer operation");
5814 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1)));
5815 if (GET_MODE_CLASS (mode) == MODE_INT
5816 && mode > MAX_INTEGER_COMPUTATION_MODE)
5817 internal_error ("unsupported wide integer operation");
5822 /* Return the highest power of two that EXP is known to be a multiple of.
5823 This is used in updating alignment of MEMs in array references. */
5825 static HOST_WIDE_INT
5826 highest_pow2_factor (exp)
5829 HOST_WIDE_INT c0, c1;
5831 switch (TREE_CODE (exp))
5834 /* We can find the lowest bit that's a one. If the low
5835 HOST_BITS_PER_WIDE_INT bits are zero, return BIGGEST_ALIGNMENT.
5836 We need to handle this case since we can find it in a COND_EXPR,
5837 a MIN_EXPR, or a MAX_EXPR. If the constant overlows, we have an
5838 erroneous program, so return BIGGEST_ALIGNMENT to avoid any
5840 if (TREE_CONSTANT_OVERFLOW (exp))
5841 return BIGGEST_ALIGNMENT;
5844 /* Note: tree_low_cst is intentionally not used here,
5845 we don't care about the upper bits. */
5846 c0 = TREE_INT_CST_LOW (exp);
5848 return c0 ? c0 : BIGGEST_ALIGNMENT;
5852 case PLUS_EXPR: case MINUS_EXPR: case MIN_EXPR: case MAX_EXPR:
5853 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
5854 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
5855 return MIN (c0, c1);
5858 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
5859 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
5862 case ROUND_DIV_EXPR: case TRUNC_DIV_EXPR: case FLOOR_DIV_EXPR:
5864 if (integer_pow2p (TREE_OPERAND (exp, 1))
5865 && host_integerp (TREE_OPERAND (exp, 1), 1))
5867 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
5868 c1 = tree_low_cst (TREE_OPERAND (exp, 1), 1);
5869 return MAX (1, c0 / c1);
5873 case NON_LVALUE_EXPR: case NOP_EXPR: case CONVERT_EXPR:
5874 case SAVE_EXPR: case WITH_RECORD_EXPR:
5875 return highest_pow2_factor (TREE_OPERAND (exp, 0));
5878 return highest_pow2_factor (TREE_OPERAND (exp, 1));
5881 c0 = highest_pow2_factor (TREE_OPERAND (exp, 1));
5882 c1 = highest_pow2_factor (TREE_OPERAND (exp, 2));
5883 return MIN (c0, c1);
5892 /* Return an object on the placeholder list that matches EXP, a
5893 PLACEHOLDER_EXPR. An object "matches" if it is of the type of the
5894 PLACEHOLDER_EXPR or a pointer type to it. For further information, see
5895 tree.def. If no such object is found, return 0. If PLIST is nonzero, it
5896 is a location which initially points to a starting location in the
5897 placeholder list (zero means start of the list) and where a pointer into
5898 the placeholder list at which the object is found is placed. */
5901 find_placeholder (exp, plist)
5905 tree type = TREE_TYPE (exp);
5906 tree placeholder_expr;
5908 for (placeholder_expr
5909 = plist && *plist ? TREE_CHAIN (*plist) : placeholder_list;
5910 placeholder_expr != 0;
5911 placeholder_expr = TREE_CHAIN (placeholder_expr))
5913 tree need_type = TYPE_MAIN_VARIANT (type);
5916 /* Find the outermost reference that is of the type we want. If none,
5917 see if any object has a type that is a pointer to the type we
5919 for (elt = TREE_PURPOSE (placeholder_expr); elt != 0;
5920 elt = ((TREE_CODE (elt) == COMPOUND_EXPR
5921 || TREE_CODE (elt) == COND_EXPR)
5922 ? TREE_OPERAND (elt, 1)
5923 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
5924 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
5925 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
5926 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
5927 ? TREE_OPERAND (elt, 0) : 0))
5928 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
5931 *plist = placeholder_expr;
5935 for (elt = TREE_PURPOSE (placeholder_expr); elt != 0;
5937 = ((TREE_CODE (elt) == COMPOUND_EXPR
5938 || TREE_CODE (elt) == COND_EXPR)
5939 ? TREE_OPERAND (elt, 1)
5940 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
5941 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
5942 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
5943 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
5944 ? TREE_OPERAND (elt, 0) : 0))
5945 if (POINTER_TYPE_P (TREE_TYPE (elt))
5946 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
5950 *plist = placeholder_expr;
5951 return build1 (INDIRECT_REF, need_type, elt);
5958 /* expand_expr: generate code for computing expression EXP.
5959 An rtx for the computed value is returned. The value is never null.
5960 In the case of a void EXP, const0_rtx is returned.
5962 The value may be stored in TARGET if TARGET is nonzero.
5963 TARGET is just a suggestion; callers must assume that
5964 the rtx returned may not be the same as TARGET.
5966 If TARGET is CONST0_RTX, it means that the value will be ignored.
5968 If TMODE is not VOIDmode, it suggests generating the
5969 result in mode TMODE. But this is done only when convenient.
5970 Otherwise, TMODE is ignored and the value generated in its natural mode.
5971 TMODE is just a suggestion; callers must assume that
5972 the rtx returned may not have mode TMODE.
5974 Note that TARGET may have neither TMODE nor MODE. In that case, it
5975 probably will not be used.
5977 If MODIFIER is EXPAND_SUM then when EXP is an addition
5978 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
5979 or a nest of (PLUS ...) and (MINUS ...) where the terms are
5980 products as above, or REG or MEM, or constant.
5981 Ordinarily in such cases we would output mul or add instructions
5982 and then return a pseudo reg containing the sum.
5984 EXPAND_INITIALIZER is much like EXPAND_SUM except that
5985 it also marks a label as absolutely required (it can't be dead).
5986 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
5987 This is used for outputting expressions used in initializers.
5989 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
5990 with a constant address even if that address is not normally legitimate.
5991 EXPAND_INITIALIZER and EXPAND_SUM also have this effect. */
5994 expand_expr (exp, target, tmode, modifier)
5997 enum machine_mode tmode;
5998 enum expand_modifier modifier;
6001 tree type = TREE_TYPE (exp);
6002 int unsignedp = TREE_UNSIGNED (type);
6003 enum machine_mode mode;
6004 enum tree_code code = TREE_CODE (exp);
6006 rtx subtarget, original_target;
6010 /* Handle ERROR_MARK before anybody tries to access its type. */
6011 if (TREE_CODE (exp) == ERROR_MARK || TREE_CODE (type) == ERROR_MARK)
6013 op0 = CONST0_RTX (tmode);
6019 mode = TYPE_MODE (type);
6020 /* Use subtarget as the target for operand 0 of a binary operation. */
6021 subtarget = get_subtarget (target);
6022 original_target = target;
6023 ignore = (target == const0_rtx
6024 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
6025 || code == CONVERT_EXPR || code == REFERENCE_EXPR
6026 || code == COND_EXPR || code == VIEW_CONVERT_EXPR)
6027 && TREE_CODE (type) == VOID_TYPE));
6029 /* If we are going to ignore this result, we need only do something
6030 if there is a side-effect somewhere in the expression. If there
6031 is, short-circuit the most common cases here. Note that we must
6032 not call expand_expr with anything but const0_rtx in case this
6033 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
6037 if (! TREE_SIDE_EFFECTS (exp))
6040 /* Ensure we reference a volatile object even if value is ignored, but
6041 don't do this if all we are doing is taking its address. */
6042 if (TREE_THIS_VOLATILE (exp)
6043 && TREE_CODE (exp) != FUNCTION_DECL
6044 && mode != VOIDmode && mode != BLKmode
6045 && modifier != EXPAND_CONST_ADDRESS)
6047 temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
6048 if (GET_CODE (temp) == MEM)
6049 temp = copy_to_reg (temp);
6053 if (TREE_CODE_CLASS (code) == '1' || code == COMPONENT_REF
6054 || code == INDIRECT_REF || code == BUFFER_REF)
6055 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6058 else if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<'
6059 || code == ARRAY_REF || code == ARRAY_RANGE_REF)
6061 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6062 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6065 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
6066 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
6067 /* If the second operand has no side effects, just evaluate
6069 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6071 else if (code == BIT_FIELD_REF)
6073 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6074 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6075 expand_expr (TREE_OPERAND (exp, 2), const0_rtx, VOIDmode, modifier);
6082 #ifdef MAX_INTEGER_COMPUTATION_MODE
6083 /* Only check stuff here if the mode we want is different from the mode
6084 of the expression; if it's the same, check_max_integer_computiation_mode
6085 will handle it. Do we really need to check this stuff at all? */
6088 && GET_MODE (target) != mode
6089 && TREE_CODE (exp) != INTEGER_CST
6090 && TREE_CODE (exp) != PARM_DECL
6091 && TREE_CODE (exp) != ARRAY_REF
6092 && TREE_CODE (exp) != ARRAY_RANGE_REF
6093 && TREE_CODE (exp) != COMPONENT_REF
6094 && TREE_CODE (exp) != BIT_FIELD_REF
6095 && TREE_CODE (exp) != INDIRECT_REF
6096 && TREE_CODE (exp) != CALL_EXPR
6097 && TREE_CODE (exp) != VAR_DECL
6098 && TREE_CODE (exp) != RTL_EXPR)
6100 enum machine_mode mode = GET_MODE (target);
6102 if (GET_MODE_CLASS (mode) == MODE_INT
6103 && mode > MAX_INTEGER_COMPUTATION_MODE)
6104 internal_error ("unsupported wide integer operation");
6108 && TREE_CODE (exp) != INTEGER_CST
6109 && TREE_CODE (exp) != PARM_DECL
6110 && TREE_CODE (exp) != ARRAY_REF
6111 && TREE_CODE (exp) != ARRAY_RANGE_REF
6112 && TREE_CODE (exp) != COMPONENT_REF
6113 && TREE_CODE (exp) != BIT_FIELD_REF
6114 && TREE_CODE (exp) != INDIRECT_REF
6115 && TREE_CODE (exp) != VAR_DECL
6116 && TREE_CODE (exp) != CALL_EXPR
6117 && TREE_CODE (exp) != RTL_EXPR
6118 && GET_MODE_CLASS (tmode) == MODE_INT
6119 && tmode > MAX_INTEGER_COMPUTATION_MODE)
6120 internal_error ("unsupported wide integer operation");
6122 check_max_integer_computation_mode (exp);
6125 /* If will do cse, generate all results into pseudo registers
6126 since 1) that allows cse to find more things
6127 and 2) otherwise cse could produce an insn the machine
6128 cannot support. And exception is a CONSTRUCTOR into a multi-word
6129 MEM: that's much more likely to be most efficient into the MEM. */
6131 if (! cse_not_expected && mode != BLKmode && target
6132 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER)
6133 && ! (code == CONSTRUCTOR && GET_MODE_SIZE (mode) > UNITS_PER_WORD))
6140 tree function = decl_function_context (exp);
6141 /* Handle using a label in a containing function. */
6142 if (function != current_function_decl
6143 && function != inline_function_decl && function != 0)
6145 struct function *p = find_function_data (function);
6146 p->expr->x_forced_labels
6147 = gen_rtx_EXPR_LIST (VOIDmode, label_rtx (exp),
6148 p->expr->x_forced_labels);
6152 if (modifier == EXPAND_INITIALIZER)
6153 forced_labels = gen_rtx_EXPR_LIST (VOIDmode,
6158 temp = gen_rtx_MEM (FUNCTION_MODE,
6159 gen_rtx_LABEL_REF (Pmode, label_rtx (exp)));
6160 if (function != current_function_decl
6161 && function != inline_function_decl && function != 0)
6162 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
6167 if (DECL_RTL (exp) == 0)
6169 error_with_decl (exp, "prior parameter's size depends on `%s'");
6170 return CONST0_RTX (mode);
6173 /* ... fall through ... */
6176 /* If a static var's type was incomplete when the decl was written,
6177 but the type is complete now, lay out the decl now. */
6178 if (DECL_SIZE (exp) == 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
6179 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
6181 rtx value = DECL_RTL_IF_SET (exp);
6183 layout_decl (exp, 0);
6185 /* If the RTL was already set, update its mode and memory
6189 PUT_MODE (value, DECL_MODE (exp));
6190 SET_DECL_RTL (exp, 0);
6191 set_mem_attributes (value, exp, 1);
6192 SET_DECL_RTL (exp, value);
6196 /* ... fall through ... */
6200 if (DECL_RTL (exp) == 0)
6203 /* Ensure variable marked as used even if it doesn't go through
6204 a parser. If it hasn't be used yet, write out an external
6206 if (! TREE_USED (exp))
6208 assemble_external (exp);
6209 TREE_USED (exp) = 1;
6212 /* Show we haven't gotten RTL for this yet. */
6215 /* Handle variables inherited from containing functions. */
6216 context = decl_function_context (exp);
6218 /* We treat inline_function_decl as an alias for the current function
6219 because that is the inline function whose vars, types, etc.
6220 are being merged into the current function.
6221 See expand_inline_function. */
6223 if (context != 0 && context != current_function_decl
6224 && context != inline_function_decl
6225 /* If var is static, we don't need a static chain to access it. */
6226 && ! (GET_CODE (DECL_RTL (exp)) == MEM
6227 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
6231 /* Mark as non-local and addressable. */
6232 DECL_NONLOCAL (exp) = 1;
6233 if (DECL_NO_STATIC_CHAIN (current_function_decl))
6235 mark_addressable (exp);
6236 if (GET_CODE (DECL_RTL (exp)) != MEM)
6238 addr = XEXP (DECL_RTL (exp), 0);
6239 if (GET_CODE (addr) == MEM)
6241 = replace_equiv_address (addr,
6242 fix_lexical_addr (XEXP (addr, 0), exp));
6244 addr = fix_lexical_addr (addr, exp);
6246 temp = replace_equiv_address (DECL_RTL (exp), addr);
6249 /* This is the case of an array whose size is to be determined
6250 from its initializer, while the initializer is still being parsed.
6253 else if (GET_CODE (DECL_RTL (exp)) == MEM
6254 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
6255 temp = validize_mem (DECL_RTL (exp));
6257 /* If DECL_RTL is memory, we are in the normal case and either
6258 the address is not valid or it is not a register and -fforce-addr
6259 is specified, get the address into a register. */
6261 else if (GET_CODE (DECL_RTL (exp)) == MEM
6262 && modifier != EXPAND_CONST_ADDRESS
6263 && modifier != EXPAND_SUM
6264 && modifier != EXPAND_INITIALIZER
6265 && (! memory_address_p (DECL_MODE (exp),
6266 XEXP (DECL_RTL (exp), 0))
6268 && GET_CODE (XEXP (DECL_RTL (exp), 0)) != REG)))
6269 temp = replace_equiv_address (DECL_RTL (exp),
6270 copy_rtx (XEXP (DECL_RTL (exp), 0)));
6272 /* If we got something, return it. But first, set the alignment
6273 if the address is a register. */
6276 if (GET_CODE (temp) == MEM && GET_CODE (XEXP (temp, 0)) == REG)
6277 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
6282 /* If the mode of DECL_RTL does not match that of the decl, it
6283 must be a promoted value. We return a SUBREG of the wanted mode,
6284 but mark it so that we know that it was already extended. */
6286 if (GET_CODE (DECL_RTL (exp)) == REG
6287 && GET_MODE (DECL_RTL (exp)) != DECL_MODE (exp))
6289 /* Get the signedness used for this variable. Ensure we get the
6290 same mode we got when the variable was declared. */
6291 if (GET_MODE (DECL_RTL (exp))
6292 != promote_mode (type, DECL_MODE (exp), &unsignedp, 0))
6295 temp = gen_lowpart_SUBREG (mode, DECL_RTL (exp));
6296 SUBREG_PROMOTED_VAR_P (temp) = 1;
6297 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
6301 return DECL_RTL (exp);
6304 return immed_double_const (TREE_INT_CST_LOW (exp),
6305 TREE_INT_CST_HIGH (exp), mode);
6308 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, 0);
6311 /* If optimized, generate immediate CONST_DOUBLE
6312 which will be turned into memory by reload if necessary.
6314 We used to force a register so that loop.c could see it. But
6315 this does not allow gen_* patterns to perform optimizations with
6316 the constants. It also produces two insns in cases like "x = 1.0;".
6317 On most machines, floating-point constants are not permitted in
6318 many insns, so we'd end up copying it to a register in any case.
6320 Now, we do the copying in expand_binop, if appropriate. */
6321 return immed_real_const (exp);
6325 if (! TREE_CST_RTL (exp))
6326 output_constant_def (exp, 1);
6328 /* TREE_CST_RTL probably contains a constant address.
6329 On RISC machines where a constant address isn't valid,
6330 make some insns to get that address into a register. */
6331 if (GET_CODE (TREE_CST_RTL (exp)) == MEM
6332 && modifier != EXPAND_CONST_ADDRESS
6333 && modifier != EXPAND_INITIALIZER
6334 && modifier != EXPAND_SUM
6335 && (! memory_address_p (mode, XEXP (TREE_CST_RTL (exp), 0))
6337 && GET_CODE (XEXP (TREE_CST_RTL (exp), 0)) != REG)))
6338 return replace_equiv_address (TREE_CST_RTL (exp),
6339 copy_rtx (XEXP (TREE_CST_RTL (exp), 0)));
6340 return TREE_CST_RTL (exp);
6342 case EXPR_WITH_FILE_LOCATION:
6345 const char *saved_input_filename = input_filename;
6346 int saved_lineno = lineno;
6347 input_filename = EXPR_WFL_FILENAME (exp);
6348 lineno = EXPR_WFL_LINENO (exp);
6349 if (EXPR_WFL_EMIT_LINE_NOTE (exp))
6350 emit_line_note (input_filename, lineno);
6351 /* Possibly avoid switching back and forth here. */
6352 to_return = expand_expr (EXPR_WFL_NODE (exp), target, tmode, modifier);
6353 input_filename = saved_input_filename;
6354 lineno = saved_lineno;
6359 context = decl_function_context (exp);
6361 /* If this SAVE_EXPR was at global context, assume we are an
6362 initialization function and move it into our context. */
6364 SAVE_EXPR_CONTEXT (exp) = current_function_decl;
6366 /* We treat inline_function_decl as an alias for the current function
6367 because that is the inline function whose vars, types, etc.
6368 are being merged into the current function.
6369 See expand_inline_function. */
6370 if (context == current_function_decl || context == inline_function_decl)
6373 /* If this is non-local, handle it. */
6376 /* The following call just exists to abort if the context is
6377 not of a containing function. */
6378 find_function_data (context);
6380 temp = SAVE_EXPR_RTL (exp);
6381 if (temp && GET_CODE (temp) == REG)
6383 put_var_into_stack (exp);
6384 temp = SAVE_EXPR_RTL (exp);
6386 if (temp == 0 || GET_CODE (temp) != MEM)
6389 replace_equiv_address (temp,
6390 fix_lexical_addr (XEXP (temp, 0), exp));
6392 if (SAVE_EXPR_RTL (exp) == 0)
6394 if (mode == VOIDmode)
6397 temp = assign_temp (build_qualified_type (type,
6399 | TYPE_QUAL_CONST)),
6402 SAVE_EXPR_RTL (exp) = temp;
6403 if (!optimize && GET_CODE (temp) == REG)
6404 save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, temp,
6407 /* If the mode of TEMP does not match that of the expression, it
6408 must be a promoted value. We pass store_expr a SUBREG of the
6409 wanted mode but mark it so that we know that it was already
6410 extended. Note that `unsignedp' was modified above in
6413 if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
6415 temp = gen_lowpart_SUBREG (mode, SAVE_EXPR_RTL (exp));
6416 SUBREG_PROMOTED_VAR_P (temp) = 1;
6417 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
6420 if (temp == const0_rtx)
6421 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
6423 store_expr (TREE_OPERAND (exp, 0), temp, 0);
6425 TREE_USED (exp) = 1;
6428 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
6429 must be a promoted value. We return a SUBREG of the wanted mode,
6430 but mark it so that we know that it was already extended. */
6432 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
6433 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
6435 /* Compute the signedness and make the proper SUBREG. */
6436 promote_mode (type, mode, &unsignedp, 0);
6437 temp = gen_lowpart_SUBREG (mode, SAVE_EXPR_RTL (exp));
6438 SUBREG_PROMOTED_VAR_P (temp) = 1;
6439 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
6443 return SAVE_EXPR_RTL (exp);
6448 temp = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
6449 TREE_OPERAND (exp, 0)
6450 = (*lang_hooks.unsave_expr_now) (TREE_OPERAND (exp, 0));
6454 case PLACEHOLDER_EXPR:
6456 tree old_list = placeholder_list;
6457 tree placeholder_expr = 0;
6459 exp = find_placeholder (exp, &placeholder_expr);
6463 placeholder_list = TREE_CHAIN (placeholder_expr);
6464 temp = expand_expr (exp, original_target, tmode, modifier);
6465 placeholder_list = old_list;
6469 /* We can't find the object or there was a missing WITH_RECORD_EXPR. */
6472 case WITH_RECORD_EXPR:
6473 /* Put the object on the placeholder list, expand our first operand,
6474 and pop the list. */
6475 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
6477 target = expand_expr (TREE_OPERAND (exp, 0), original_target, tmode,
6479 placeholder_list = TREE_CHAIN (placeholder_list);
6483 if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
6484 expand_goto (TREE_OPERAND (exp, 0));
6486 expand_computed_goto (TREE_OPERAND (exp, 0));
6490 expand_exit_loop_if_false (NULL,
6491 invert_truthvalue (TREE_OPERAND (exp, 0)));
6494 case LABELED_BLOCK_EXPR:
6495 if (LABELED_BLOCK_BODY (exp))
6496 expand_expr_stmt_value (LABELED_BLOCK_BODY (exp), 0, 1);
6497 /* Should perhaps use expand_label, but this is simpler and safer. */
6498 do_pending_stack_adjust ();
6499 emit_label (label_rtx (LABELED_BLOCK_LABEL (exp)));
6502 case EXIT_BLOCK_EXPR:
6503 if (EXIT_BLOCK_RETURN (exp))
6504 sorry ("returned value in block_exit_expr");
6505 expand_goto (LABELED_BLOCK_LABEL (EXIT_BLOCK_LABELED_BLOCK (exp)));
6510 expand_start_loop (1);
6511 expand_expr_stmt_value (TREE_OPERAND (exp, 0), 0, 1);
6519 tree vars = TREE_OPERAND (exp, 0);
6520 int vars_need_expansion = 0;
6522 /* Need to open a binding contour here because
6523 if there are any cleanups they must be contained here. */
6524 expand_start_bindings (2);
6526 /* Mark the corresponding BLOCK for output in its proper place. */
6527 if (TREE_OPERAND (exp, 2) != 0
6528 && ! TREE_USED (TREE_OPERAND (exp, 2)))
6529 insert_block (TREE_OPERAND (exp, 2));
6531 /* If VARS have not yet been expanded, expand them now. */
6534 if (!DECL_RTL_SET_P (vars))
6536 vars_need_expansion = 1;
6539 expand_decl_init (vars);
6540 vars = TREE_CHAIN (vars);
6543 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, modifier);
6545 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
6551 if (RTL_EXPR_SEQUENCE (exp))
6553 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
6555 emit_insns (RTL_EXPR_SEQUENCE (exp));
6556 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
6558 preserve_rtl_expr_result (RTL_EXPR_RTL (exp));
6559 free_temps_for_rtl_expr (exp);
6560 return RTL_EXPR_RTL (exp);
6563 /* If we don't need the result, just ensure we evaluate any
6569 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
6570 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode, 0);
6575 /* All elts simple constants => refer to a constant in memory. But
6576 if this is a non-BLKmode mode, let it store a field at a time
6577 since that should make a CONST_INT or CONST_DOUBLE when we
6578 fold. Likewise, if we have a target we can use, it is best to
6579 store directly into the target unless the type is large enough
6580 that memcpy will be used. If we are making an initializer and
6581 all operands are constant, put it in memory as well. */
6582 else if ((TREE_STATIC (exp)
6583 && ((mode == BLKmode
6584 && ! (target != 0 && safe_from_p (target, exp, 1)))
6585 || TREE_ADDRESSABLE (exp)
6586 || (host_integerp (TYPE_SIZE_UNIT (type), 1)
6587 && (! MOVE_BY_PIECES_P
6588 (tree_low_cst (TYPE_SIZE_UNIT (type), 1),
6590 && ! mostly_zeros_p (exp))))
6591 || (modifier == EXPAND_INITIALIZER && TREE_CONSTANT (exp)))
6593 rtx constructor = output_constant_def (exp, 1);
6595 if (modifier != EXPAND_CONST_ADDRESS
6596 && modifier != EXPAND_INITIALIZER
6597 && modifier != EXPAND_SUM)
6598 constructor = validize_mem (constructor);
6604 /* Handle calls that pass values in multiple non-contiguous
6605 locations. The Irix 6 ABI has examples of this. */
6606 if (target == 0 || ! safe_from_p (target, exp, 1)
6607 || GET_CODE (target) == PARALLEL)
6609 = assign_temp (build_qualified_type (type,
6611 | (TREE_READONLY (exp)
6612 * TYPE_QUAL_CONST))),
6613 0, TREE_ADDRESSABLE (exp), 1);
6615 store_constructor (exp, target, 0,
6616 int_size_in_bytes (TREE_TYPE (exp)));
6622 tree exp1 = TREE_OPERAND (exp, 0);
6624 tree string = string_constant (exp1, &index);
6626 /* Try to optimize reads from const strings. */
6628 && TREE_CODE (string) == STRING_CST
6629 && TREE_CODE (index) == INTEGER_CST
6630 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
6631 && GET_MODE_CLASS (mode) == MODE_INT
6632 && GET_MODE_SIZE (mode) == 1
6633 && modifier != EXPAND_WRITE)
6634 return gen_int_mode (TREE_STRING_POINTER (string)
6635 [TREE_INT_CST_LOW (index)], mode);
6637 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
6638 op0 = memory_address (mode, op0);
6639 temp = gen_rtx_MEM (mode, op0);
6640 set_mem_attributes (temp, exp, 0);
6642 /* If we are writing to this object and its type is a record with
6643 readonly fields, we must mark it as readonly so it will
6644 conflict with readonly references to those fields. */
6645 if (modifier == EXPAND_WRITE && readonly_fields_p (type))
6646 RTX_UNCHANGING_P (temp) = 1;
6652 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
6656 tree array = TREE_OPERAND (exp, 0);
6657 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
6658 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
6659 tree index = convert (sizetype, TREE_OPERAND (exp, 1));
6662 /* Optimize the special-case of a zero lower bound.
6664 We convert the low_bound to sizetype to avoid some problems
6665 with constant folding. (E.g. suppose the lower bound is 1,
6666 and its mode is QI. Without the conversion, (ARRAY
6667 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
6668 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
6670 if (! integer_zerop (low_bound))
6671 index = size_diffop (index, convert (sizetype, low_bound));
6673 /* Fold an expression like: "foo"[2].
6674 This is not done in fold so it won't happen inside &.
6675 Don't fold if this is for wide characters since it's too
6676 difficult to do correctly and this is a very rare case. */
6678 if (modifier != EXPAND_CONST_ADDRESS && modifier != EXPAND_INITIALIZER
6679 && TREE_CODE (array) == STRING_CST
6680 && TREE_CODE (index) == INTEGER_CST
6681 && compare_tree_int (index, TREE_STRING_LENGTH (array)) < 0
6682 && GET_MODE_CLASS (mode) == MODE_INT
6683 && GET_MODE_SIZE (mode) == 1)
6684 return gen_int_mode (TREE_STRING_POINTER (array)
6685 [TREE_INT_CST_LOW (index)], mode);
6687 /* If this is a constant index into a constant array,
6688 just get the value from the array. Handle both the cases when
6689 we have an explicit constructor and when our operand is a variable
6690 that was declared const. */
6692 if (modifier != EXPAND_CONST_ADDRESS && modifier != EXPAND_INITIALIZER
6693 && TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array)
6694 && TREE_CODE (index) == INTEGER_CST
6695 && 0 > compare_tree_int (index,
6696 list_length (CONSTRUCTOR_ELTS
6697 (TREE_OPERAND (exp, 0)))))
6701 for (elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
6702 i = TREE_INT_CST_LOW (index);
6703 elem != 0 && i != 0; i--, elem = TREE_CHAIN (elem))
6707 return expand_expr (fold (TREE_VALUE (elem)), target, tmode,
6711 else if (optimize >= 1
6712 && modifier != EXPAND_CONST_ADDRESS
6713 && modifier != EXPAND_INITIALIZER
6714 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
6715 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
6716 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
6718 if (TREE_CODE (index) == INTEGER_CST)
6720 tree init = DECL_INITIAL (array);
6722 if (TREE_CODE (init) == CONSTRUCTOR)
6726 for (elem = CONSTRUCTOR_ELTS (init);
6728 && !tree_int_cst_equal (TREE_PURPOSE (elem), index));
6729 elem = TREE_CHAIN (elem))
6732 if (elem && !TREE_SIDE_EFFECTS (TREE_VALUE (elem)))
6733 return expand_expr (fold (TREE_VALUE (elem)), target,
6736 else if (TREE_CODE (init) == STRING_CST
6737 && 0 > compare_tree_int (index,
6738 TREE_STRING_LENGTH (init)))
6740 tree type = TREE_TYPE (TREE_TYPE (init));
6741 enum machine_mode mode = TYPE_MODE (type);
6743 if (GET_MODE_CLASS (mode) == MODE_INT
6744 && GET_MODE_SIZE (mode) == 1)
6745 return gen_int_mode (TREE_STRING_POINTER (init)
6746 [TREE_INT_CST_LOW (index)], mode);
6755 case ARRAY_RANGE_REF:
6756 /* If the operand is a CONSTRUCTOR, we can just extract the
6757 appropriate field if it is present. Don't do this if we have
6758 already written the data since we want to refer to that copy
6759 and varasm.c assumes that's what we'll do. */
6760 if (code == COMPONENT_REF
6761 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
6762 && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
6766 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
6767 elt = TREE_CHAIN (elt))
6768 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1)
6769 /* We can normally use the value of the field in the
6770 CONSTRUCTOR. However, if this is a bitfield in
6771 an integral mode that we can fit in a HOST_WIDE_INT,
6772 we must mask only the number of bits in the bitfield,
6773 since this is done implicitly by the constructor. If
6774 the bitfield does not meet either of those conditions,
6775 we can't do this optimization. */
6776 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt))
6777 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt)))
6779 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt)))
6780 <= HOST_BITS_PER_WIDE_INT))))
6782 op0 = expand_expr (TREE_VALUE (elt), target, tmode, modifier);
6783 if (DECL_BIT_FIELD (TREE_PURPOSE (elt)))
6785 HOST_WIDE_INT bitsize
6786 = TREE_INT_CST_LOW (DECL_SIZE (TREE_PURPOSE (elt)));
6787 enum machine_mode imode
6788 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt)));
6790 if (TREE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt))))
6792 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
6793 op0 = expand_and (imode, op0, op1, target);
6798 = build_int_2 (GET_MODE_BITSIZE (imode) - bitsize,
6801 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
6803 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
6813 enum machine_mode mode1;
6814 HOST_WIDE_INT bitsize, bitpos;
6817 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
6818 &mode1, &unsignedp, &volatilep);
6821 /* If we got back the original object, something is wrong. Perhaps
6822 we are evaluating an expression too early. In any event, don't
6823 infinitely recurse. */
6827 /* If TEM's type is a union of variable size, pass TARGET to the inner
6828 computation, since it will need a temporary and TARGET is known
6829 to have to do. This occurs in unchecked conversion in Ada. */
6833 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
6834 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
6836 ? target : NULL_RTX),
6838 (modifier == EXPAND_INITIALIZER
6839 || modifier == EXPAND_CONST_ADDRESS)
6840 ? modifier : EXPAND_NORMAL);
6842 /* If this is a constant, put it into a register if it is a
6843 legitimate constant and OFFSET is 0 and memory if it isn't. */
6844 if (CONSTANT_P (op0))
6846 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
6847 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
6849 op0 = force_reg (mode, op0);
6851 op0 = validize_mem (force_const_mem (mode, op0));
6856 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM);
6858 /* If this object is in a register, put it into memory.
6859 This case can't occur in C, but can in Ada if we have
6860 unchecked conversion of an expression from a scalar type to
6861 an array or record type. */
6862 if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
6863 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
6865 /* If the operand is a SAVE_EXPR, we can deal with this by
6866 forcing the SAVE_EXPR into memory. */
6867 if (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR)
6869 put_var_into_stack (TREE_OPERAND (exp, 0));
6870 op0 = SAVE_EXPR_RTL (TREE_OPERAND (exp, 0));
6875 = build_qualified_type (TREE_TYPE (tem),
6876 (TYPE_QUALS (TREE_TYPE (tem))
6877 | TYPE_QUAL_CONST));
6878 rtx memloc = assign_temp (nt, 1, 1, 1);
6880 emit_move_insn (memloc, op0);
6885 if (GET_CODE (op0) != MEM)
6888 if (GET_MODE (offset_rtx) != ptr_mode)
6889 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
6891 #ifdef POINTERS_EXTEND_UNSIGNED
6892 if (GET_MODE (offset_rtx) != Pmode)
6893 offset_rtx = convert_memory_address (Pmode, offset_rtx);
6896 /* A constant address in OP0 can have VOIDmode, we must not try
6897 to call force_reg for that case. Avoid that case. */
6898 if (GET_CODE (op0) == MEM
6899 && GET_MODE (op0) == BLKmode
6900 && GET_MODE (XEXP (op0, 0)) != VOIDmode
6902 && (bitpos % bitsize) == 0
6903 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
6904 && MEM_ALIGN (op0) == GET_MODE_ALIGNMENT (mode1))
6906 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
6910 op0 = offset_address (op0, offset_rtx,
6911 highest_pow2_factor (offset));
6914 /* Don't forget about volatility even if this is a bitfield. */
6915 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
6917 if (op0 == orig_op0)
6918 op0 = copy_rtx (op0);
6920 MEM_VOLATILE_P (op0) = 1;
6923 /* In cases where an aligned union has an unaligned object
6924 as a field, we might be extracting a BLKmode value from
6925 an integer-mode (e.g., SImode) object. Handle this case
6926 by doing the extract into an object as wide as the field
6927 (which we know to be the width of a basic mode), then
6928 storing into memory, and changing the mode to BLKmode. */
6929 if (mode1 == VOIDmode
6930 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
6931 || (mode1 != BLKmode && ! direct_load[(int) mode1]
6932 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
6933 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT
6934 && modifier != EXPAND_CONST_ADDRESS
6935 && modifier != EXPAND_INITIALIZER)
6936 /* If the field isn't aligned enough to fetch as a memref,
6937 fetch it as a bit field. */
6938 || (mode1 != BLKmode
6939 && SLOW_UNALIGNED_ACCESS (mode1, MEM_ALIGN (op0))
6940 && ((TYPE_ALIGN (TREE_TYPE (tem))
6941 < GET_MODE_ALIGNMENT (mode))
6942 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)))
6943 /* If the type and the field are a constant size and the
6944 size of the type isn't the same size as the bitfield,
6945 we must use bitfield operations. */
6947 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (exp)))
6949 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
6952 enum machine_mode ext_mode = mode;
6954 if (ext_mode == BLKmode
6955 && ! (target != 0 && GET_CODE (op0) == MEM
6956 && GET_CODE (target) == MEM
6957 && bitpos % BITS_PER_UNIT == 0))
6958 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
6960 if (ext_mode == BLKmode)
6962 /* In this case, BITPOS must start at a byte boundary and
6963 TARGET, if specified, must be a MEM. */
6964 if (GET_CODE (op0) != MEM
6965 || (target != 0 && GET_CODE (target) != MEM)
6966 || bitpos % BITS_PER_UNIT != 0)
6969 op0 = adjust_address (op0, VOIDmode, bitpos / BITS_PER_UNIT);
6971 target = assign_temp (type, 0, 1, 1);
6973 emit_block_move (target, op0,
6974 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
6980 op0 = validize_mem (op0);
6982 if (GET_CODE (op0) == MEM && GET_CODE (XEXP (op0, 0)) == REG)
6983 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
6985 op0 = extract_bit_field (op0, bitsize, bitpos,
6986 unsignedp, target, ext_mode, ext_mode,
6987 int_size_in_bytes (TREE_TYPE (tem)));
6989 /* If the result is a record type and BITSIZE is narrower than
6990 the mode of OP0, an integral mode, and this is a big endian
6991 machine, we must put the field into the high-order bits. */
6992 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
6993 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
6994 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (op0)))
6995 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
6996 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
7000 if (mode == BLKmode)
7002 rtx new = assign_temp (build_qualified_type
7003 (type_for_mode (ext_mode, 0),
7004 TYPE_QUAL_CONST), 0, 1, 1);
7006 emit_move_insn (new, op0);
7007 op0 = copy_rtx (new);
7008 PUT_MODE (op0, BLKmode);
7009 set_mem_attributes (op0, exp, 1);
7015 /* If the result is BLKmode, use that to access the object
7017 if (mode == BLKmode)
7020 /* Get a reference to just this component. */
7021 if (modifier == EXPAND_CONST_ADDRESS
7022 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7023 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
7025 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7027 if (op0 == orig_op0)
7028 op0 = copy_rtx (op0);
7030 set_mem_attributes (op0, exp, 0);
7031 if (GET_CODE (XEXP (op0, 0)) == REG)
7032 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7034 MEM_VOLATILE_P (op0) |= volatilep;
7035 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
7036 || modifier == EXPAND_CONST_ADDRESS
7037 || modifier == EXPAND_INITIALIZER)
7039 else if (target == 0)
7040 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7042 convert_move (target, op0, unsignedp);
7048 rtx insn, before = get_last_insn (), vtbl_ref;
7050 /* Evaluate the interior expression. */
7051 subtarget = expand_expr (TREE_OPERAND (exp, 0), target,
7054 /* Get or create an instruction off which to hang a note. */
7055 if (REG_P (subtarget))
7058 insn = get_last_insn ();
7061 if (! INSN_P (insn))
7062 insn = prev_nonnote_insn (insn);
7066 target = gen_reg_rtx (GET_MODE (subtarget));
7067 insn = emit_move_insn (target, subtarget);
7070 /* Collect the data for the note. */
7071 vtbl_ref = XEXP (DECL_RTL (TREE_OPERAND (exp, 1)), 0);
7072 vtbl_ref = plus_constant (vtbl_ref,
7073 tree_low_cst (TREE_OPERAND (exp, 2), 0));
7074 /* Discard the initial CONST that was added. */
7075 vtbl_ref = XEXP (vtbl_ref, 0);
7078 = gen_rtx_EXPR_LIST (REG_VTABLE_REF, vtbl_ref, REG_NOTES (insn));
7083 /* Intended for a reference to a buffer of a file-object in Pascal.
7084 But it's not certain that a special tree code will really be
7085 necessary for these. INDIRECT_REF might work for them. */
7091 /* Pascal set IN expression.
7094 rlo = set_low - (set_low%bits_per_word);
7095 the_word = set [ (index - rlo)/bits_per_word ];
7096 bit_index = index % bits_per_word;
7097 bitmask = 1 << bit_index;
7098 return !!(the_word & bitmask); */
7100 tree set = TREE_OPERAND (exp, 0);
7101 tree index = TREE_OPERAND (exp, 1);
7102 int iunsignedp = TREE_UNSIGNED (TREE_TYPE (index));
7103 tree set_type = TREE_TYPE (set);
7104 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
7105 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
7106 rtx index_val = expand_expr (index, 0, VOIDmode, 0);
7107 rtx lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
7108 rtx hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
7109 rtx setval = expand_expr (set, 0, VOIDmode, 0);
7110 rtx setaddr = XEXP (setval, 0);
7111 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
7113 rtx diff, quo, rem, addr, bit, result;
7115 /* If domain is empty, answer is no. Likewise if index is constant
7116 and out of bounds. */
7117 if (((TREE_CODE (set_high_bound) == INTEGER_CST
7118 && TREE_CODE (set_low_bound) == INTEGER_CST
7119 && tree_int_cst_lt (set_high_bound, set_low_bound))
7120 || (TREE_CODE (index) == INTEGER_CST
7121 && TREE_CODE (set_low_bound) == INTEGER_CST
7122 && tree_int_cst_lt (index, set_low_bound))
7123 || (TREE_CODE (set_high_bound) == INTEGER_CST
7124 && TREE_CODE (index) == INTEGER_CST
7125 && tree_int_cst_lt (set_high_bound, index))))
7129 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7131 /* If we get here, we have to generate the code for both cases
7132 (in range and out of range). */
7134 op0 = gen_label_rtx ();
7135 op1 = gen_label_rtx ();
7137 if (! (GET_CODE (index_val) == CONST_INT
7138 && GET_CODE (lo_r) == CONST_INT))
7139 emit_cmp_and_jump_insns (index_val, lo_r, LT, NULL_RTX,
7140 GET_MODE (index_val), iunsignedp, op1);
7142 if (! (GET_CODE (index_val) == CONST_INT
7143 && GET_CODE (hi_r) == CONST_INT))
7144 emit_cmp_and_jump_insns (index_val, hi_r, GT, NULL_RTX,
7145 GET_MODE (index_val), iunsignedp, op1);
7147 /* Calculate the element number of bit zero in the first word
7149 if (GET_CODE (lo_r) == CONST_INT)
7150 rlow = GEN_INT (INTVAL (lo_r)
7151 & ~((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
7153 rlow = expand_binop (index_mode, and_optab, lo_r,
7154 GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
7155 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7157 diff = expand_binop (index_mode, sub_optab, index_val, rlow,
7158 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7160 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
7161 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7162 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
7163 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7165 addr = memory_address (byte_mode,
7166 expand_binop (index_mode, add_optab, diff,
7167 setaddr, NULL_RTX, iunsignedp,
7170 /* Extract the bit we want to examine. */
7171 bit = expand_shift (RSHIFT_EXPR, byte_mode,
7172 gen_rtx_MEM (byte_mode, addr),
7173 make_tree (TREE_TYPE (index), rem),
7175 result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
7176 GET_MODE (target) == byte_mode ? target : 0,
7177 1, OPTAB_LIB_WIDEN);
7179 if (result != target)
7180 convert_move (target, result, 1);
7182 /* Output the code to handle the out-of-range case. */
7185 emit_move_insn (target, const0_rtx);
7190 case WITH_CLEANUP_EXPR:
7191 if (WITH_CLEANUP_EXPR_RTL (exp) == 0)
7193 WITH_CLEANUP_EXPR_RTL (exp)
7194 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
7195 expand_decl_cleanup (NULL_TREE, TREE_OPERAND (exp, 1));
7197 /* That's it for this cleanup. */
7198 TREE_OPERAND (exp, 1) = 0;
7200 return WITH_CLEANUP_EXPR_RTL (exp);
7202 case CLEANUP_POINT_EXPR:
7204 /* Start a new binding layer that will keep track of all cleanup
7205 actions to be performed. */
7206 expand_start_bindings (2);
7208 target_temp_slot_level = temp_slot_level;
7210 op0 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
7211 /* If we're going to use this value, load it up now. */
7213 op0 = force_not_mem (op0);
7214 preserve_temp_slots (op0);
7215 expand_end_bindings (NULL_TREE, 0, 0);
7220 /* Check for a built-in function. */
7221 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
7222 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7224 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7226 if (DECL_BUILT_IN_CLASS (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7227 == BUILT_IN_FRONTEND)
7228 return (*lang_expand_expr) (exp, original_target, tmode, modifier);
7230 return expand_builtin (exp, target, subtarget, tmode, ignore);
7233 return expand_call (exp, target, ignore);
7235 case NON_LVALUE_EXPR:
7238 case REFERENCE_EXPR:
7239 if (TREE_OPERAND (exp, 0) == error_mark_node)
7242 if (TREE_CODE (type) == UNION_TYPE)
7244 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
7246 /* If both input and output are BLKmode, this conversion isn't doing
7247 anything except possibly changing memory attribute. */
7248 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode)
7250 rtx result = expand_expr (TREE_OPERAND (exp, 0), target, tmode,
7253 result = copy_rtx (result);
7254 set_mem_attributes (result, exp, 0);
7259 target = assign_temp (type, 0, 1, 1);
7261 if (GET_CODE (target) == MEM)
7262 /* Store data into beginning of memory target. */
7263 store_expr (TREE_OPERAND (exp, 0),
7264 adjust_address (target, TYPE_MODE (valtype), 0), 0);
7266 else if (GET_CODE (target) == REG)
7267 /* Store this field into a union of the proper type. */
7268 store_field (target,
7269 MIN ((int_size_in_bytes (TREE_TYPE
7270 (TREE_OPERAND (exp, 0)))
7272 (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
7273 0, TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
7274 VOIDmode, 0, type, 0);
7278 /* Return the entire union. */
7282 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7284 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
7287 /* If the signedness of the conversion differs and OP0 is
7288 a promoted SUBREG, clear that indication since we now
7289 have to do the proper extension. */
7290 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
7291 && GET_CODE (op0) == SUBREG)
7292 SUBREG_PROMOTED_VAR_P (op0) = 0;
7297 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
7298 if (GET_MODE (op0) == mode)
7301 /* If OP0 is a constant, just convert it into the proper mode. */
7302 if (CONSTANT_P (op0))
7304 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7305 enum machine_mode inner_mode = TYPE_MODE (inner_type);
7307 if (modifier == EXPAND_INITIALIZER)
7308 return simplify_gen_subreg (mode, op0, inner_mode,
7309 subreg_lowpart_offset (mode,
7312 return convert_modes (mode, inner_mode, op0,
7313 TREE_UNSIGNED (inner_type));
7316 if (modifier == EXPAND_INITIALIZER)
7317 return gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
7321 convert_to_mode (mode, op0,
7322 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7324 convert_move (target, op0,
7325 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7328 case VIEW_CONVERT_EXPR:
7329 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
7331 /* If the input and output modes are both the same, we are done.
7332 Otherwise, if neither mode is BLKmode and both are within a word, we
7333 can use gen_lowpart. If neither is true, make sure the operand is
7334 in memory and convert the MEM to the new mode. */
7335 if (TYPE_MODE (type) == GET_MODE (op0))
7337 else if (TYPE_MODE (type) != BLKmode && GET_MODE (op0) != BLKmode
7338 && GET_MODE_SIZE (TYPE_MODE (type)) <= UNITS_PER_WORD
7339 && GET_MODE_SIZE (GET_MODE (op0)) <= UNITS_PER_WORD)
7340 op0 = gen_lowpart (TYPE_MODE (type), op0);
7341 else if (GET_CODE (op0) != MEM)
7343 /* If the operand is not a MEM, force it into memory. Since we
7344 are going to be be changing the mode of the MEM, don't call
7345 force_const_mem for constants because we don't allow pool
7346 constants to change mode. */
7347 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7349 if (TREE_ADDRESSABLE (exp))
7352 if (target == 0 || GET_MODE (target) != TYPE_MODE (inner_type))
7354 = assign_stack_temp_for_type
7355 (TYPE_MODE (inner_type),
7356 GET_MODE_SIZE (TYPE_MODE (inner_type)), 0, inner_type);
7358 emit_move_insn (target, op0);
7362 /* At this point, OP0 is in the correct mode. If the output type is such
7363 that the operand is known to be aligned, indicate that it is.
7364 Otherwise, we need only be concerned about alignment for non-BLKmode
7366 if (GET_CODE (op0) == MEM)
7368 op0 = copy_rtx (op0);
7370 if (TYPE_ALIGN_OK (type))
7371 set_mem_align (op0, MAX (MEM_ALIGN (op0), TYPE_ALIGN (type)));
7372 else if (TYPE_MODE (type) != BLKmode && STRICT_ALIGNMENT
7373 && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
7375 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7376 HOST_WIDE_INT temp_size
7377 = MAX (int_size_in_bytes (inner_type),
7378 (HOST_WIDE_INT) GET_MODE_SIZE (TYPE_MODE (type)));
7379 rtx new = assign_stack_temp_for_type (TYPE_MODE (type),
7380 temp_size, 0, type);
7381 rtx new_with_op0_mode = adjust_address (new, GET_MODE (op0), 0);
7383 if (TREE_ADDRESSABLE (exp))
7386 if (GET_MODE (op0) == BLKmode)
7387 emit_block_move (new_with_op0_mode, op0,
7388 GEN_INT (GET_MODE_SIZE (TYPE_MODE (type))));
7390 emit_move_insn (new_with_op0_mode, op0);
7395 op0 = adjust_address (op0, TYPE_MODE (type), 0);
7401 /* We come here from MINUS_EXPR when the second operand is a
7404 this_optab = ! unsignedp && flag_trapv
7405 && (GET_MODE_CLASS (mode) == MODE_INT)
7406 ? addv_optab : add_optab;
7408 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
7409 something else, make sure we add the register to the constant and
7410 then to the other thing. This case can occur during strength
7411 reduction and doing it this way will produce better code if the
7412 frame pointer or argument pointer is eliminated.
7414 fold-const.c will ensure that the constant is always in the inner
7415 PLUS_EXPR, so the only case we need to do anything about is if
7416 sp, ap, or fp is our second argument, in which case we must swap
7417 the innermost first argument and our second argument. */
7419 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
7420 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
7421 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
7422 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
7423 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
7424 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
7426 tree t = TREE_OPERAND (exp, 1);
7428 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7429 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
7432 /* If the result is to be ptr_mode and we are adding an integer to
7433 something, we might be forming a constant. So try to use
7434 plus_constant. If it produces a sum and we can't accept it,
7435 use force_operand. This allows P = &ARR[const] to generate
7436 efficient code on machines where a SYMBOL_REF is not a valid
7439 If this is an EXPAND_SUM call, always return the sum. */
7440 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
7441 || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
7443 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
7444 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7445 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
7449 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
7451 /* Use immed_double_const to ensure that the constant is
7452 truncated according to the mode of OP1, then sign extended
7453 to a HOST_WIDE_INT. Using the constant directly can result
7454 in non-canonical RTL in a 64x32 cross compile. */
7456 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)),
7458 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))));
7459 op1 = plus_constant (op1, INTVAL (constant_part));
7460 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7461 op1 = force_operand (op1, target);
7465 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7466 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
7467 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
7471 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7473 if (! CONSTANT_P (op0))
7475 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7476 VOIDmode, modifier);
7477 /* Don't go to both_summands if modifier
7478 says it's not right to return a PLUS. */
7479 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7483 /* Use immed_double_const to ensure that the constant is
7484 truncated according to the mode of OP1, then sign extended
7485 to a HOST_WIDE_INT. Using the constant directly can result
7486 in non-canonical RTL in a 64x32 cross compile. */
7488 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)),
7490 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
7491 op0 = plus_constant (op0, INTVAL (constant_part));
7492 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7493 op0 = force_operand (op0, target);
7498 /* No sense saving up arithmetic to be done
7499 if it's all in the wrong mode to form part of an address.
7500 And force_operand won't know whether to sign-extend or
7502 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7503 || mode != ptr_mode)
7506 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7509 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, modifier);
7510 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, modifier);
7513 /* Make sure any term that's a sum with a constant comes last. */
7514 if (GET_CODE (op0) == PLUS
7515 && CONSTANT_P (XEXP (op0, 1)))
7521 /* If adding to a sum including a constant,
7522 associate it to put the constant outside. */
7523 if (GET_CODE (op1) == PLUS
7524 && CONSTANT_P (XEXP (op1, 1)))
7526 rtx constant_term = const0_rtx;
7528 temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0);
7531 /* Ensure that MULT comes first if there is one. */
7532 else if (GET_CODE (op0) == MULT)
7533 op0 = gen_rtx_PLUS (mode, op0, XEXP (op1, 0));
7535 op0 = gen_rtx_PLUS (mode, XEXP (op1, 0), op0);
7537 /* Let's also eliminate constants from op0 if possible. */
7538 op0 = eliminate_constant_term (op0, &constant_term);
7540 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
7541 their sum should be a constant. Form it into OP1, since the
7542 result we want will then be OP0 + OP1. */
7544 temp = simplify_binary_operation (PLUS, mode, constant_term,
7549 op1 = gen_rtx_PLUS (mode, constant_term, XEXP (op1, 1));
7552 /* Put a constant term last and put a multiplication first. */
7553 if (CONSTANT_P (op0) || GET_CODE (op1) == MULT)
7554 temp = op1, op1 = op0, op0 = temp;
7556 temp = simplify_binary_operation (PLUS, mode, op0, op1);
7557 return temp ? temp : gen_rtx_PLUS (mode, op0, op1);
7560 /* For initializers, we are allowed to return a MINUS of two
7561 symbolic constants. Here we handle all cases when both operands
7563 /* Handle difference of two symbolic constants,
7564 for the sake of an initializer. */
7565 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7566 && really_constant_p (TREE_OPERAND (exp, 0))
7567 && really_constant_p (TREE_OPERAND (exp, 1)))
7569 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode,
7571 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode,
7574 /* If the last operand is a CONST_INT, use plus_constant of
7575 the negated constant. Else make the MINUS. */
7576 if (GET_CODE (op1) == CONST_INT)
7577 return plus_constant (op0, - INTVAL (op1));
7579 return gen_rtx_MINUS (mode, op0, op1);
7581 /* Convert A - const to A + (-const). */
7582 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7584 tree negated = fold (build1 (NEGATE_EXPR, type,
7585 TREE_OPERAND (exp, 1)));
7587 if (TREE_UNSIGNED (type) || TREE_OVERFLOW (negated))
7588 /* If we can't negate the constant in TYPE, leave it alone and
7589 expand_binop will negate it for us. We used to try to do it
7590 here in the signed version of TYPE, but that doesn't work
7591 on POINTER_TYPEs. */;
7594 exp = build (PLUS_EXPR, type, TREE_OPERAND (exp, 0), negated);
7598 this_optab = ! unsignedp && flag_trapv
7599 && (GET_MODE_CLASS(mode) == MODE_INT)
7600 ? subv_optab : sub_optab;
7604 /* If first operand is constant, swap them.
7605 Thus the following special case checks need only
7606 check the second operand. */
7607 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
7609 tree t1 = TREE_OPERAND (exp, 0);
7610 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
7611 TREE_OPERAND (exp, 1) = t1;
7614 /* Attempt to return something suitable for generating an
7615 indexed address, for machines that support that. */
7617 if (modifier == EXPAND_SUM && mode == ptr_mode
7618 && host_integerp (TREE_OPERAND (exp, 1), 0))
7620 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7623 /* If we knew for certain that this is arithmetic for an array
7624 reference, and we knew the bounds of the array, then we could
7625 apply the distributive law across (PLUS X C) for constant C.
7626 Without such knowledge, we risk overflowing the computation
7627 when both X and C are large, but X+C isn't. */
7628 /* ??? Could perhaps special-case EXP being unsigned and C being
7629 positive. In that case we are certain that X+C is no smaller
7630 than X and so the transformed expression will overflow iff the
7631 original would have. */
7633 if (GET_CODE (op0) != REG)
7634 op0 = force_operand (op0, NULL_RTX);
7635 if (GET_CODE (op0) != REG)
7636 op0 = copy_to_mode_reg (mode, op0);
7639 gen_rtx_MULT (mode, op0,
7640 GEN_INT (tree_low_cst (TREE_OPERAND (exp, 1), 0)));
7643 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7646 /* Check for multiplying things that have been extended
7647 from a narrower type. If this machine supports multiplying
7648 in that narrower type with a result in the desired type,
7649 do it that way, and avoid the explicit type-conversion. */
7650 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
7651 && TREE_CODE (type) == INTEGER_TYPE
7652 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7653 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
7654 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7655 && int_fits_type_p (TREE_OPERAND (exp, 1),
7656 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7657 /* Don't use a widening multiply if a shift will do. */
7658 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
7659 > HOST_BITS_PER_WIDE_INT)
7660 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
7662 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
7663 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7665 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
7666 /* If both operands are extended, they must either both
7667 be zero-extended or both be sign-extended. */
7668 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7670 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
7672 enum machine_mode innermode
7673 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
7674 optab other_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7675 ? smul_widen_optab : umul_widen_optab);
7676 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7677 ? umul_widen_optab : smul_widen_optab);
7678 if (mode == GET_MODE_WIDER_MODE (innermode))
7680 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
7682 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7683 NULL_RTX, VOIDmode, 0);
7684 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7685 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7688 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7689 NULL_RTX, VOIDmode, 0);
7692 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
7693 && innermode == word_mode)
7696 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7697 NULL_RTX, VOIDmode, 0);
7698 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7699 op1 = convert_modes (innermode, mode,
7700 expand_expr (TREE_OPERAND (exp, 1),
7701 NULL_RTX, VOIDmode, 0),
7704 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7705 NULL_RTX, VOIDmode, 0);
7706 temp = expand_binop (mode, other_optab, op0, op1, target,
7707 unsignedp, OPTAB_LIB_WIDEN);
7708 htem = expand_mult_highpart_adjust (innermode,
7709 gen_highpart (innermode, temp),
7711 gen_highpart (innermode, temp),
7713 emit_move_insn (gen_highpart (innermode, temp), htem);
7718 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7719 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7720 return expand_mult (mode, op0, op1, target, unsignedp);
7722 case TRUNC_DIV_EXPR:
7723 case FLOOR_DIV_EXPR:
7725 case ROUND_DIV_EXPR:
7726 case EXACT_DIV_EXPR:
7727 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7729 /* Possible optimization: compute the dividend with EXPAND_SUM
7730 then if the divisor is constant can optimize the case
7731 where some terms of the dividend have coeffs divisible by it. */
7732 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7733 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7734 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
7737 /* Emit a/b as a*(1/b). Later we may manage CSE the reciprocal saving
7738 expensive divide. If not, combine will rebuild the original
7740 if (flag_unsafe_math_optimizations && optimize && !optimize_size
7741 && !real_onep (TREE_OPERAND (exp, 0)))
7742 return expand_expr (build (MULT_EXPR, type, TREE_OPERAND (exp, 0),
7743 build (RDIV_EXPR, type,
7744 build_real (type, dconst1),
7745 TREE_OPERAND (exp, 1))),
7746 target, tmode, unsignedp);
7747 this_optab = sdiv_optab;
7750 case TRUNC_MOD_EXPR:
7751 case FLOOR_MOD_EXPR:
7753 case ROUND_MOD_EXPR:
7754 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7756 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7757 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7758 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
7760 case FIX_ROUND_EXPR:
7761 case FIX_FLOOR_EXPR:
7763 abort (); /* Not used for C. */
7765 case FIX_TRUNC_EXPR:
7766 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7768 target = gen_reg_rtx (mode);
7769 expand_fix (target, op0, unsignedp);
7773 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7775 target = gen_reg_rtx (mode);
7776 /* expand_float can't figure out what to do if FROM has VOIDmode.
7777 So give it the correct mode. With -O, cse will optimize this. */
7778 if (GET_MODE (op0) == VOIDmode)
7779 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7781 expand_float (target, op0,
7782 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7786 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7787 temp = expand_unop (mode,
7788 ! unsignedp && flag_trapv
7789 && (GET_MODE_CLASS(mode) == MODE_INT)
7790 ? negv_optab : neg_optab, op0, target, 0);
7796 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7798 /* Handle complex values specially. */
7799 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT
7800 || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
7801 return expand_complex_abs (mode, op0, target, unsignedp);
7803 /* Unsigned abs is simply the operand. Testing here means we don't
7804 risk generating incorrect code below. */
7805 if (TREE_UNSIGNED (type))
7808 return expand_abs (mode, op0, target, unsignedp,
7809 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
7813 target = original_target;
7814 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 1), 1)
7815 || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
7816 || GET_MODE (target) != mode
7817 || (GET_CODE (target) == REG
7818 && REGNO (target) < FIRST_PSEUDO_REGISTER))
7819 target = gen_reg_rtx (mode);
7820 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7821 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
7823 /* First try to do it with a special MIN or MAX instruction.
7824 If that does not win, use a conditional jump to select the proper
7826 this_optab = (TREE_UNSIGNED (type)
7827 ? (code == MIN_EXPR ? umin_optab : umax_optab)
7828 : (code == MIN_EXPR ? smin_optab : smax_optab));
7830 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
7835 /* At this point, a MEM target is no longer useful; we will get better
7838 if (GET_CODE (target) == MEM)
7839 target = gen_reg_rtx (mode);
7842 emit_move_insn (target, op0);
7844 op0 = gen_label_rtx ();
7846 /* If this mode is an integer too wide to compare properly,
7847 compare word by word. Rely on cse to optimize constant cases. */
7848 if (GET_MODE_CLASS (mode) == MODE_INT
7849 && ! can_compare_p (GE, mode, ccp_jump))
7851 if (code == MAX_EXPR)
7852 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
7853 target, op1, NULL_RTX, op0);
7855 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
7856 op1, target, NULL_RTX, op0);
7860 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)));
7861 do_compare_rtx_and_jump (target, op1, code == MAX_EXPR ? GE : LE,
7862 unsignedp, mode, NULL_RTX, NULL_RTX,
7865 emit_move_insn (target, op1);
7870 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7871 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
7877 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7878 temp = expand_unop (mode, ffs_optab, op0, target, 1);
7883 /* ??? Can optimize bitwise operations with one arg constant.
7884 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
7885 and (a bitwise1 b) bitwise2 b (etc)
7886 but that is probably not worth while. */
7888 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
7889 boolean values when we want in all cases to compute both of them. In
7890 general it is fastest to do TRUTH_AND_EXPR by computing both operands
7891 as actual zero-or-1 values and then bitwise anding. In cases where
7892 there cannot be any side effects, better code would be made by
7893 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
7894 how to recognize those cases. */
7896 case TRUTH_AND_EXPR:
7898 this_optab = and_optab;
7903 this_optab = ior_optab;
7906 case TRUTH_XOR_EXPR:
7908 this_optab = xor_optab;
7915 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7917 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7918 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
7921 /* Could determine the answer when only additive constants differ. Also,
7922 the addition of one can be handled by changing the condition. */
7929 case UNORDERED_EXPR:
7936 temp = do_store_flag (exp, target, tmode != VOIDmode ? tmode : mode, 0);
7940 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
7941 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
7943 && GET_CODE (original_target) == REG
7944 && (GET_MODE (original_target)
7945 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
7947 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
7950 /* If temp is constant, we can just compute the result. */
7951 if (GET_CODE (temp) == CONST_INT)
7953 if (INTVAL (temp) != 0)
7954 emit_move_insn (target, const1_rtx);
7956 emit_move_insn (target, const0_rtx);
7961 if (temp != original_target)
7963 enum machine_mode mode1 = GET_MODE (temp);
7964 if (mode1 == VOIDmode)
7965 mode1 = tmode != VOIDmode ? tmode : mode;
7967 temp = copy_to_mode_reg (mode1, temp);
7970 op1 = gen_label_rtx ();
7971 emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX,
7972 GET_MODE (temp), unsignedp, op1);
7973 emit_move_insn (temp, const1_rtx);
7978 /* If no set-flag instruction, must generate a conditional
7979 store into a temporary variable. Drop through
7980 and handle this like && and ||. */
7982 case TRUTH_ANDIF_EXPR:
7983 case TRUTH_ORIF_EXPR:
7985 && (target == 0 || ! safe_from_p (target, exp, 1)
7986 /* Make sure we don't have a hard reg (such as function's return
7987 value) live across basic blocks, if not optimizing. */
7988 || (!optimize && GET_CODE (target) == REG
7989 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
7990 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7993 emit_clr_insn (target);
7995 op1 = gen_label_rtx ();
7996 jumpifnot (exp, op1);
7999 emit_0_to_1_insn (target);
8002 return ignore ? const0_rtx : target;
8004 case TRUTH_NOT_EXPR:
8005 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
8006 /* The parser is careful to generate TRUTH_NOT_EXPR
8007 only with operands that are always zero or one. */
8008 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
8009 target, 1, OPTAB_LIB_WIDEN);
8015 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
8017 return expand_expr (TREE_OPERAND (exp, 1),
8018 (ignore ? const0_rtx : target),
8022 /* If we would have a "singleton" (see below) were it not for a
8023 conversion in each arm, bring that conversion back out. */
8024 if (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
8025 && TREE_CODE (TREE_OPERAND (exp, 2)) == NOP_EXPR
8026 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0))
8027 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 2), 0))))
8029 tree iftrue = TREE_OPERAND (TREE_OPERAND (exp, 1), 0);
8030 tree iffalse = TREE_OPERAND (TREE_OPERAND (exp, 2), 0);
8032 if ((TREE_CODE_CLASS (TREE_CODE (iftrue)) == '2'
8033 && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
8034 || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '2'
8035 && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0))
8036 || (TREE_CODE_CLASS (TREE_CODE (iftrue)) == '1'
8037 && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
8038 || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '1'
8039 && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0)))
8040 return expand_expr (build1 (NOP_EXPR, type,
8041 build (COND_EXPR, TREE_TYPE (iftrue),
8042 TREE_OPERAND (exp, 0),
8044 target, tmode, modifier);
8048 /* Note that COND_EXPRs whose type is a structure or union
8049 are required to be constructed to contain assignments of
8050 a temporary variable, so that we can evaluate them here
8051 for side effect only. If type is void, we must do likewise. */
8053 /* If an arm of the branch requires a cleanup,
8054 only that cleanup is performed. */
8057 tree binary_op = 0, unary_op = 0;
8059 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
8060 convert it to our mode, if necessary. */
8061 if (integer_onep (TREE_OPERAND (exp, 1))
8062 && integer_zerop (TREE_OPERAND (exp, 2))
8063 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8067 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
8072 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, modifier);
8073 if (GET_MODE (op0) == mode)
8077 target = gen_reg_rtx (mode);
8078 convert_move (target, op0, unsignedp);
8082 /* Check for X ? A + B : A. If we have this, we can copy A to the
8083 output and conditionally add B. Similarly for unary operations.
8084 Don't do this if X has side-effects because those side effects
8085 might affect A or B and the "?" operation is a sequence point in
8086 ANSI. (operand_equal_p tests for side effects.) */
8088 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
8089 && operand_equal_p (TREE_OPERAND (exp, 2),
8090 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8091 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
8092 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
8093 && operand_equal_p (TREE_OPERAND (exp, 1),
8094 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8095 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
8096 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
8097 && operand_equal_p (TREE_OPERAND (exp, 2),
8098 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8099 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
8100 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
8101 && operand_equal_p (TREE_OPERAND (exp, 1),
8102 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8103 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
8105 /* If we are not to produce a result, we have no target. Otherwise,
8106 if a target was specified use it; it will not be used as an
8107 intermediate target unless it is safe. If no target, use a
8112 else if (original_target
8113 && (safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
8114 || (singleton && GET_CODE (original_target) == REG
8115 && REGNO (original_target) >= FIRST_PSEUDO_REGISTER
8116 && original_target == var_rtx (singleton)))
8117 && GET_MODE (original_target) == mode
8118 #ifdef HAVE_conditional_move
8119 && (! can_conditionally_move_p (mode)
8120 || GET_CODE (original_target) == REG
8121 || TREE_ADDRESSABLE (type))
8123 && (GET_CODE (original_target) != MEM
8124 || TREE_ADDRESSABLE (type)))
8125 temp = original_target;
8126 else if (TREE_ADDRESSABLE (type))
8129 temp = assign_temp (type, 0, 0, 1);
8131 /* If we had X ? A + C : A, with C a constant power of 2, and we can
8132 do the test of X as a store-flag operation, do this as
8133 A + ((X != 0) << log C). Similarly for other simple binary
8134 operators. Only do for C == 1 if BRANCH_COST is low. */
8135 if (temp && singleton && binary_op
8136 && (TREE_CODE (binary_op) == PLUS_EXPR
8137 || TREE_CODE (binary_op) == MINUS_EXPR
8138 || TREE_CODE (binary_op) == BIT_IOR_EXPR
8139 || TREE_CODE (binary_op) == BIT_XOR_EXPR)
8140 && (BRANCH_COST >= 3 ? integer_pow2p (TREE_OPERAND (binary_op, 1))
8141 : integer_onep (TREE_OPERAND (binary_op, 1)))
8142 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8145 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR
8146 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8147 ? addv_optab : add_optab)
8148 : TREE_CODE (binary_op) == MINUS_EXPR
8149 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8150 ? subv_optab : sub_optab)
8151 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
8154 /* If we had X ? A : A + 1, do this as A + (X == 0).
8156 We have to invert the truth value here and then put it
8157 back later if do_store_flag fails. We cannot simply copy
8158 TREE_OPERAND (exp, 0) to another variable and modify that
8159 because invert_truthvalue can modify the tree pointed to
8161 if (singleton == TREE_OPERAND (exp, 1))
8162 TREE_OPERAND (exp, 0)
8163 = invert_truthvalue (TREE_OPERAND (exp, 0));
8165 result = do_store_flag (TREE_OPERAND (exp, 0),
8166 (safe_from_p (temp, singleton, 1)
8168 mode, BRANCH_COST <= 1);
8170 if (result != 0 && ! integer_onep (TREE_OPERAND (binary_op, 1)))
8171 result = expand_shift (LSHIFT_EXPR, mode, result,
8172 build_int_2 (tree_log2
8176 (safe_from_p (temp, singleton, 1)
8177 ? temp : NULL_RTX), 0);
8181 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
8182 return expand_binop (mode, boptab, op1, result, temp,
8183 unsignedp, OPTAB_LIB_WIDEN);
8185 else if (singleton == TREE_OPERAND (exp, 1))
8186 TREE_OPERAND (exp, 0)
8187 = invert_truthvalue (TREE_OPERAND (exp, 0));
8190 do_pending_stack_adjust ();
8192 op0 = gen_label_rtx ();
8194 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
8198 /* If the target conflicts with the other operand of the
8199 binary op, we can't use it. Also, we can't use the target
8200 if it is a hard register, because evaluating the condition
8201 might clobber it. */
8203 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1), 1))
8204 || (GET_CODE (temp) == REG
8205 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
8206 temp = gen_reg_rtx (mode);
8207 store_expr (singleton, temp, 0);
8210 expand_expr (singleton,
8211 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8212 if (singleton == TREE_OPERAND (exp, 1))
8213 jumpif (TREE_OPERAND (exp, 0), op0);
8215 jumpifnot (TREE_OPERAND (exp, 0), op0);
8217 start_cleanup_deferral ();
8218 if (binary_op && temp == 0)
8219 /* Just touch the other operand. */
8220 expand_expr (TREE_OPERAND (binary_op, 1),
8221 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8223 store_expr (build (TREE_CODE (binary_op), type,
8224 make_tree (type, temp),
8225 TREE_OPERAND (binary_op, 1)),
8228 store_expr (build1 (TREE_CODE (unary_op), type,
8229 make_tree (type, temp)),
8233 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
8234 comparison operator. If we have one of these cases, set the
8235 output to A, branch on A (cse will merge these two references),
8236 then set the output to FOO. */
8238 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8239 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8240 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8241 TREE_OPERAND (exp, 1), 0)
8242 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8243 || TREE_CODE (TREE_OPERAND (exp, 1)) == SAVE_EXPR)
8244 && safe_from_p (temp, TREE_OPERAND (exp, 2), 1))
8246 if (GET_CODE (temp) == REG
8247 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
8248 temp = gen_reg_rtx (mode);
8249 store_expr (TREE_OPERAND (exp, 1), temp, 0);
8250 jumpif (TREE_OPERAND (exp, 0), op0);
8252 start_cleanup_deferral ();
8253 store_expr (TREE_OPERAND (exp, 2), temp, 0);
8257 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8258 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8259 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8260 TREE_OPERAND (exp, 2), 0)
8261 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8262 || TREE_CODE (TREE_OPERAND (exp, 2)) == SAVE_EXPR)
8263 && safe_from_p (temp, TREE_OPERAND (exp, 1), 1))
8265 if (GET_CODE (temp) == REG
8266 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
8267 temp = gen_reg_rtx (mode);
8268 store_expr (TREE_OPERAND (exp, 2), temp, 0);
8269 jumpifnot (TREE_OPERAND (exp, 0), op0);
8271 start_cleanup_deferral ();
8272 store_expr (TREE_OPERAND (exp, 1), temp, 0);
8277 op1 = gen_label_rtx ();
8278 jumpifnot (TREE_OPERAND (exp, 0), op0);
8280 start_cleanup_deferral ();
8282 /* One branch of the cond can be void, if it never returns. For
8283 example A ? throw : E */
8285 && TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node)
8286 store_expr (TREE_OPERAND (exp, 1), temp, 0);
8288 expand_expr (TREE_OPERAND (exp, 1),
8289 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8290 end_cleanup_deferral ();
8292 emit_jump_insn (gen_jump (op1));
8295 start_cleanup_deferral ();
8297 && TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node)
8298 store_expr (TREE_OPERAND (exp, 2), temp, 0);
8300 expand_expr (TREE_OPERAND (exp, 2),
8301 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8304 end_cleanup_deferral ();
8315 /* Something needs to be initialized, but we didn't know
8316 where that thing was when building the tree. For example,
8317 it could be the return value of a function, or a parameter
8318 to a function which lays down in the stack, or a temporary
8319 variable which must be passed by reference.
8321 We guarantee that the expression will either be constructed
8322 or copied into our original target. */
8324 tree slot = TREE_OPERAND (exp, 0);
8325 tree cleanups = NULL_TREE;
8328 if (TREE_CODE (slot) != VAR_DECL)
8332 target = original_target;
8334 /* Set this here so that if we get a target that refers to a
8335 register variable that's already been used, put_reg_into_stack
8336 knows that it should fix up those uses. */
8337 TREE_USED (slot) = 1;
8341 if (DECL_RTL_SET_P (slot))
8343 target = DECL_RTL (slot);
8344 /* If we have already expanded the slot, so don't do
8346 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8351 target = assign_temp (type, 2, 0, 1);
8352 /* All temp slots at this level must not conflict. */
8353 preserve_temp_slots (target);
8354 SET_DECL_RTL (slot, target);
8355 if (TREE_ADDRESSABLE (slot))
8356 put_var_into_stack (slot);
8358 /* Since SLOT is not known to the called function
8359 to belong to its stack frame, we must build an explicit
8360 cleanup. This case occurs when we must build up a reference
8361 to pass the reference as an argument. In this case,
8362 it is very likely that such a reference need not be
8365 if (TREE_OPERAND (exp, 2) == 0)
8366 TREE_OPERAND (exp, 2) = maybe_build_cleanup (slot);
8367 cleanups = TREE_OPERAND (exp, 2);
8372 /* This case does occur, when expanding a parameter which
8373 needs to be constructed on the stack. The target
8374 is the actual stack address that we want to initialize.
8375 The function we call will perform the cleanup in this case. */
8377 /* If we have already assigned it space, use that space,
8378 not target that we were passed in, as our target
8379 parameter is only a hint. */
8380 if (DECL_RTL_SET_P (slot))
8382 target = DECL_RTL (slot);
8383 /* If we have already expanded the slot, so don't do
8385 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8390 SET_DECL_RTL (slot, target);
8391 /* If we must have an addressable slot, then make sure that
8392 the RTL that we just stored in slot is OK. */
8393 if (TREE_ADDRESSABLE (slot))
8394 put_var_into_stack (slot);
8398 exp1 = TREE_OPERAND (exp, 3) = TREE_OPERAND (exp, 1);
8399 /* Mark it as expanded. */
8400 TREE_OPERAND (exp, 1) = NULL_TREE;
8402 store_expr (exp1, target, 0);
8404 expand_decl_cleanup (NULL_TREE, cleanups);
8411 tree lhs = TREE_OPERAND (exp, 0);
8412 tree rhs = TREE_OPERAND (exp, 1);
8414 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
8420 /* If lhs is complex, expand calls in rhs before computing it.
8421 That's so we don't compute a pointer and save it over a
8422 call. If lhs is simple, compute it first so we can give it
8423 as a target if the rhs is just a call. This avoids an
8424 extra temp and copy and that prevents a partial-subsumption
8425 which makes bad code. Actually we could treat
8426 component_ref's of vars like vars. */
8428 tree lhs = TREE_OPERAND (exp, 0);
8429 tree rhs = TREE_OPERAND (exp, 1);
8433 /* Check for |= or &= of a bitfield of size one into another bitfield
8434 of size 1. In this case, (unless we need the result of the
8435 assignment) we can do this more efficiently with a
8436 test followed by an assignment, if necessary.
8438 ??? At this point, we can't get a BIT_FIELD_REF here. But if
8439 things change so we do, this code should be enhanced to
8442 && TREE_CODE (lhs) == COMPONENT_REF
8443 && (TREE_CODE (rhs) == BIT_IOR_EXPR
8444 || TREE_CODE (rhs) == BIT_AND_EXPR)
8445 && TREE_OPERAND (rhs, 0) == lhs
8446 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
8447 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
8448 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
8450 rtx label = gen_label_rtx ();
8452 do_jump (TREE_OPERAND (rhs, 1),
8453 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
8454 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
8455 expand_assignment (lhs, convert (TREE_TYPE (rhs),
8456 (TREE_CODE (rhs) == BIT_IOR_EXPR
8458 : integer_zero_node)),
8460 do_pending_stack_adjust ();
8465 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
8471 if (!TREE_OPERAND (exp, 0))
8472 expand_null_return ();
8474 expand_return (TREE_OPERAND (exp, 0));
8477 case PREINCREMENT_EXPR:
8478 case PREDECREMENT_EXPR:
8479 return expand_increment (exp, 0, ignore);
8481 case POSTINCREMENT_EXPR:
8482 case POSTDECREMENT_EXPR:
8483 /* Faster to treat as pre-increment if result is not used. */
8484 return expand_increment (exp, ! ignore, ignore);
8487 /* Are we taking the address of a nested function? */
8488 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
8489 && decl_function_context (TREE_OPERAND (exp, 0)) != 0
8490 && ! DECL_NO_STATIC_CHAIN (TREE_OPERAND (exp, 0))
8491 && ! TREE_STATIC (exp))
8493 op0 = trampoline_address (TREE_OPERAND (exp, 0));
8494 op0 = force_operand (op0, target);
8496 /* If we are taking the address of something erroneous, just
8498 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
8500 /* If we are taking the address of a constant and are at the
8501 top level, we have to use output_constant_def since we can't
8502 call force_const_mem at top level. */
8504 && (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
8505 || (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0)))
8507 op0 = XEXP (output_constant_def (TREE_OPERAND (exp, 0), 0), 0);
8510 /* We make sure to pass const0_rtx down if we came in with
8511 ignore set, to avoid doing the cleanups twice for something. */
8512 op0 = expand_expr (TREE_OPERAND (exp, 0),
8513 ignore ? const0_rtx : NULL_RTX, VOIDmode,
8514 (modifier == EXPAND_INITIALIZER
8515 ? modifier : EXPAND_CONST_ADDRESS));
8517 /* If we are going to ignore the result, OP0 will have been set
8518 to const0_rtx, so just return it. Don't get confused and
8519 think we are taking the address of the constant. */
8523 /* Pass 1 for MODIFY, so that protect_from_queue doesn't get
8524 clever and returns a REG when given a MEM. */
8525 op0 = protect_from_queue (op0, 1);
8527 /* We would like the object in memory. If it is a constant, we can
8528 have it be statically allocated into memory. For a non-constant,
8529 we need to allocate some memory and store the value into it. */
8531 if (CONSTANT_P (op0))
8532 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
8534 else if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
8535 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF
8536 || GET_CODE (op0) == PARALLEL)
8538 /* If the operand is a SAVE_EXPR, we can deal with this by
8539 forcing the SAVE_EXPR into memory. */
8540 if (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR)
8542 put_var_into_stack (TREE_OPERAND (exp, 0));
8543 op0 = SAVE_EXPR_RTL (TREE_OPERAND (exp, 0));
8547 /* If this object is in a register, it can't be BLKmode. */
8548 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8549 rtx memloc = assign_temp (inner_type, 1, 1, 1);
8551 if (GET_CODE (op0) == PARALLEL)
8552 /* Handle calls that pass values in multiple
8553 non-contiguous locations. The Irix 6 ABI has examples
8555 emit_group_store (memloc, op0,
8556 int_size_in_bytes (inner_type));
8558 emit_move_insn (memloc, op0);
8564 if (GET_CODE (op0) != MEM)
8567 mark_temp_addr_taken (op0);
8568 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
8570 op0 = XEXP (op0, 0);
8571 #ifdef POINTERS_EXTEND_UNSIGNED
8572 if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
8573 && mode == ptr_mode)
8574 op0 = convert_memory_address (ptr_mode, op0);
8579 /* If OP0 is not aligned as least as much as the type requires, we
8580 need to make a temporary, copy OP0 to it, and take the address of
8581 the temporary. We want to use the alignment of the type, not of
8582 the operand. Note that this is incorrect for FUNCTION_TYPE, but
8583 the test for BLKmode means that can't happen. The test for
8584 BLKmode is because we never make mis-aligned MEMs with
8587 We don't need to do this at all if the machine doesn't have
8588 strict alignment. */
8589 if (STRICT_ALIGNMENT && GET_MODE (op0) == BLKmode
8590 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
8592 && MEM_ALIGN (op0) < BIGGEST_ALIGNMENT)
8594 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8596 = assign_stack_temp_for_type
8597 (TYPE_MODE (inner_type),
8598 MEM_SIZE (op0) ? INTVAL (MEM_SIZE (op0))
8599 : int_size_in_bytes (inner_type),
8600 1, build_qualified_type (inner_type,
8601 (TYPE_QUALS (inner_type)
8602 | TYPE_QUAL_CONST)));
8604 if (TYPE_ALIGN_OK (inner_type))
8607 emit_block_move (new, op0, expr_size (TREE_OPERAND (exp, 0)));
8611 op0 = force_operand (XEXP (op0, 0), target);
8615 && GET_CODE (op0) != REG
8616 && modifier != EXPAND_CONST_ADDRESS
8617 && modifier != EXPAND_INITIALIZER
8618 && modifier != EXPAND_SUM)
8619 op0 = force_reg (Pmode, op0);
8621 if (GET_CODE (op0) == REG
8622 && ! REG_USERVAR_P (op0))
8623 mark_reg_pointer (op0, TYPE_ALIGN (TREE_TYPE (type)));
8625 #ifdef POINTERS_EXTEND_UNSIGNED
8626 if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
8627 && mode == ptr_mode)
8628 op0 = convert_memory_address (ptr_mode, op0);
8633 case ENTRY_VALUE_EXPR:
8636 /* COMPLEX type for Extended Pascal & Fortran */
8639 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8642 /* Get the rtx code of the operands. */
8643 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8644 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
8647 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
8651 /* Move the real (op0) and imaginary (op1) parts to their location. */
8652 emit_move_insn (gen_realpart (mode, target), op0);
8653 emit_move_insn (gen_imagpart (mode, target), op1);
8655 insns = get_insns ();
8658 /* Complex construction should appear as a single unit. */
8659 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
8660 each with a separate pseudo as destination.
8661 It's not correct for flow to treat them as a unit. */
8662 if (GET_CODE (target) != CONCAT)
8663 emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
8671 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8672 return gen_realpart (mode, op0);
8675 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8676 return gen_imagpart (mode, op0);
8680 enum machine_mode partmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8684 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8687 target = gen_reg_rtx (mode);
8691 /* Store the realpart and the negated imagpart to target. */
8692 emit_move_insn (gen_realpart (partmode, target),
8693 gen_realpart (partmode, op0));
8695 imag_t = gen_imagpart (partmode, target);
8696 temp = expand_unop (partmode,
8697 ! unsignedp && flag_trapv
8698 && (GET_MODE_CLASS(partmode) == MODE_INT)
8699 ? negv_optab : neg_optab,
8700 gen_imagpart (partmode, op0), imag_t, 0);
8702 emit_move_insn (imag_t, temp);
8704 insns = get_insns ();
8707 /* Conjugate should appear as a single unit
8708 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
8709 each with a separate pseudo as destination.
8710 It's not correct for flow to treat them as a unit. */
8711 if (GET_CODE (target) != CONCAT)
8712 emit_no_conflict_block (insns, target, op0, NULL_RTX, NULL_RTX);
8719 case TRY_CATCH_EXPR:
8721 tree handler = TREE_OPERAND (exp, 1);
8723 expand_eh_region_start ();
8725 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8727 expand_eh_region_end_cleanup (handler);
8732 case TRY_FINALLY_EXPR:
8734 tree try_block = TREE_OPERAND (exp, 0);
8735 tree finally_block = TREE_OPERAND (exp, 1);
8736 rtx finally_label = gen_label_rtx ();
8737 rtx done_label = gen_label_rtx ();
8738 rtx return_link = gen_reg_rtx (Pmode);
8739 tree cleanup = build (GOTO_SUBROUTINE_EXPR, void_type_node,
8740 (tree) finally_label, (tree) return_link);
8741 TREE_SIDE_EFFECTS (cleanup) = 1;
8743 /* Start a new binding layer that will keep track of all cleanup
8744 actions to be performed. */
8745 expand_start_bindings (2);
8747 target_temp_slot_level = temp_slot_level;
8749 expand_decl_cleanup (NULL_TREE, cleanup);
8750 op0 = expand_expr (try_block, target, tmode, modifier);
8752 preserve_temp_slots (op0);
8753 expand_end_bindings (NULL_TREE, 0, 0);
8754 emit_jump (done_label);
8755 emit_label (finally_label);
8756 expand_expr (finally_block, const0_rtx, VOIDmode, 0);
8757 emit_indirect_jump (return_link);
8758 emit_label (done_label);
8762 case GOTO_SUBROUTINE_EXPR:
8764 rtx subr = (rtx) TREE_OPERAND (exp, 0);
8765 rtx return_link = *(rtx *) &TREE_OPERAND (exp, 1);
8766 rtx return_address = gen_label_rtx ();
8767 emit_move_insn (return_link,
8768 gen_rtx_LABEL_REF (Pmode, return_address));
8770 emit_label (return_address);
8775 return expand_builtin_va_arg (TREE_OPERAND (exp, 0), type);
8778 return get_exception_pointer (cfun);
8781 /* Function descriptors are not valid except for as
8782 initialization constants, and should not be expanded. */
8786 return (*lang_expand_expr) (exp, original_target, tmode, modifier);
8789 /* Here to do an ordinary binary operator, generating an instruction
8790 from the optab already placed in `this_optab'. */
8792 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8794 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8795 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8797 temp = expand_binop (mode, this_optab, op0, op1, target,
8798 unsignedp, OPTAB_LIB_WIDEN);
8804 /* Return the tree node if a ARG corresponds to a string constant or zero
8805 if it doesn't. If we return non-zero, set *PTR_OFFSET to the offset
8806 in bytes within the string that ARG is accessing. The type of the
8807 offset will be `sizetype'. */
8810 string_constant (arg, ptr_offset)
8816 if (TREE_CODE (arg) == ADDR_EXPR
8817 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
8819 *ptr_offset = size_zero_node;
8820 return TREE_OPERAND (arg, 0);
8822 else if (TREE_CODE (arg) == PLUS_EXPR)
8824 tree arg0 = TREE_OPERAND (arg, 0);
8825 tree arg1 = TREE_OPERAND (arg, 1);
8830 if (TREE_CODE (arg0) == ADDR_EXPR
8831 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
8833 *ptr_offset = convert (sizetype, arg1);
8834 return TREE_OPERAND (arg0, 0);
8836 else if (TREE_CODE (arg1) == ADDR_EXPR
8837 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
8839 *ptr_offset = convert (sizetype, arg0);
8840 return TREE_OPERAND (arg1, 0);
8847 /* Expand code for a post- or pre- increment or decrement
8848 and return the RTX for the result.
8849 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
8852 expand_increment (exp, post, ignore)
8858 tree incremented = TREE_OPERAND (exp, 0);
8859 optab this_optab = add_optab;
8861 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
8862 int op0_is_copy = 0;
8863 int single_insn = 0;
8864 /* 1 means we can't store into OP0 directly,
8865 because it is a subreg narrower than a word,
8866 and we don't dare clobber the rest of the word. */
8869 /* Stabilize any component ref that might need to be
8870 evaluated more than once below. */
8872 || TREE_CODE (incremented) == BIT_FIELD_REF
8873 || (TREE_CODE (incremented) == COMPONENT_REF
8874 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
8875 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
8876 incremented = stabilize_reference (incremented);
8877 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
8878 ones into save exprs so that they don't accidentally get evaluated
8879 more than once by the code below. */
8880 if (TREE_CODE (incremented) == PREINCREMENT_EXPR
8881 || TREE_CODE (incremented) == PREDECREMENT_EXPR)
8882 incremented = save_expr (incremented);
8884 /* Compute the operands as RTX.
8885 Note whether OP0 is the actual lvalue or a copy of it:
8886 I believe it is a copy iff it is a register or subreg
8887 and insns were generated in computing it. */
8889 temp = get_last_insn ();
8890 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, 0);
8892 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
8893 in place but instead must do sign- or zero-extension during assignment,
8894 so we copy it into a new register and let the code below use it as
8897 Note that we can safely modify this SUBREG since it is know not to be
8898 shared (it was made by the expand_expr call above). */
8900 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
8903 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
8907 else if (GET_CODE (op0) == SUBREG
8908 && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD)
8910 /* We cannot increment this SUBREG in place. If we are
8911 post-incrementing, get a copy of the old value. Otherwise,
8912 just mark that we cannot increment in place. */
8914 op0 = copy_to_reg (op0);
8919 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
8920 && temp != get_last_insn ());
8921 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8923 /* Decide whether incrementing or decrementing. */
8924 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
8925 || TREE_CODE (exp) == PREDECREMENT_EXPR)
8926 this_optab = sub_optab;
8928 /* Convert decrement by a constant into a negative increment. */
8929 if (this_optab == sub_optab
8930 && GET_CODE (op1) == CONST_INT)
8932 op1 = GEN_INT (-INTVAL (op1));
8933 this_optab = add_optab;
8936 if (TYPE_TRAP_SIGNED (TREE_TYPE (exp)))
8937 this_optab = this_optab == add_optab ? addv_optab : subv_optab;
8939 /* For a preincrement, see if we can do this with a single instruction. */
8942 icode = (int) this_optab->handlers[(int) mode].insn_code;
8943 if (icode != (int) CODE_FOR_nothing
8944 /* Make sure that OP0 is valid for operands 0 and 1
8945 of the insn we want to queue. */
8946 && (*insn_data[icode].operand[0].predicate) (op0, mode)
8947 && (*insn_data[icode].operand[1].predicate) (op0, mode)
8948 && (*insn_data[icode].operand[2].predicate) (op1, mode))
8952 /* If OP0 is not the actual lvalue, but rather a copy in a register,
8953 then we cannot just increment OP0. We must therefore contrive to
8954 increment the original value. Then, for postincrement, we can return
8955 OP0 since it is a copy of the old value. For preincrement, expand here
8956 unless we can do it with a single insn.
8958 Likewise if storing directly into OP0 would clobber high bits
8959 we need to preserve (bad_subreg). */
8960 if (op0_is_copy || (!post && !single_insn) || bad_subreg)
8962 /* This is the easiest way to increment the value wherever it is.
8963 Problems with multiple evaluation of INCREMENTED are prevented
8964 because either (1) it is a component_ref or preincrement,
8965 in which case it was stabilized above, or (2) it is an array_ref
8966 with constant index in an array in a register, which is
8967 safe to reevaluate. */
8968 tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
8969 || TREE_CODE (exp) == PREDECREMENT_EXPR)
8970 ? MINUS_EXPR : PLUS_EXPR),
8973 TREE_OPERAND (exp, 1));
8975 while (TREE_CODE (incremented) == NOP_EXPR
8976 || TREE_CODE (incremented) == CONVERT_EXPR)
8978 newexp = convert (TREE_TYPE (incremented), newexp);
8979 incremented = TREE_OPERAND (incremented, 0);
8982 temp = expand_assignment (incremented, newexp, ! post && ! ignore , 0);
8983 return post ? op0 : temp;
8988 /* We have a true reference to the value in OP0.
8989 If there is an insn to add or subtract in this mode, queue it.
8990 Queueing the increment insn avoids the register shuffling
8991 that often results if we must increment now and first save
8992 the old value for subsequent use. */
8994 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
8995 op0 = stabilize (op0);
8998 icode = (int) this_optab->handlers[(int) mode].insn_code;
8999 if (icode != (int) CODE_FOR_nothing
9000 /* Make sure that OP0 is valid for operands 0 and 1
9001 of the insn we want to queue. */
9002 && (*insn_data[icode].operand[0].predicate) (op0, mode)
9003 && (*insn_data[icode].operand[1].predicate) (op0, mode))
9005 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9006 op1 = force_reg (mode, op1);
9008 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
9010 if (icode != (int) CODE_FOR_nothing && GET_CODE (op0) == MEM)
9012 rtx addr = (general_operand (XEXP (op0, 0), mode)
9013 ? force_reg (Pmode, XEXP (op0, 0))
9014 : copy_to_reg (XEXP (op0, 0)));
9017 op0 = replace_equiv_address (op0, addr);
9018 temp = force_reg (GET_MODE (op0), op0);
9019 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9020 op1 = force_reg (mode, op1);
9022 /* The increment queue is LIFO, thus we have to `queue'
9023 the instructions in reverse order. */
9024 enqueue_insn (op0, gen_move_insn (op0, temp));
9025 result = enqueue_insn (temp, GEN_FCN (icode) (temp, temp, op1));
9030 /* Preincrement, or we can't increment with one simple insn. */
9032 /* Save a copy of the value before inc or dec, to return it later. */
9033 temp = value = copy_to_reg (op0);
9035 /* Arrange to return the incremented value. */
9036 /* Copy the rtx because expand_binop will protect from the queue,
9037 and the results of that would be invalid for us to return
9038 if our caller does emit_queue before using our result. */
9039 temp = copy_rtx (value = op0);
9041 /* Increment however we can. */
9042 op1 = expand_binop (mode, this_optab, value, op1, op0,
9043 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
9045 /* Make sure the value is stored into OP0. */
9047 emit_move_insn (op0, op1);
9052 /* At the start of a function, record that we have no previously-pushed
9053 arguments waiting to be popped. */
9056 init_pending_stack_adjust ()
9058 pending_stack_adjust = 0;
9061 /* When exiting from function, if safe, clear out any pending stack adjust
9062 so the adjustment won't get done.
9064 Note, if the current function calls alloca, then it must have a
9065 frame pointer regardless of the value of flag_omit_frame_pointer. */
9068 clear_pending_stack_adjust ()
9070 #ifdef EXIT_IGNORE_STACK
9072 && (! flag_omit_frame_pointer || current_function_calls_alloca)
9073 && EXIT_IGNORE_STACK
9074 && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline)
9075 && ! flag_inline_functions)
9077 stack_pointer_delta -= pending_stack_adjust,
9078 pending_stack_adjust = 0;
9083 /* Pop any previously-pushed arguments that have not been popped yet. */
9086 do_pending_stack_adjust ()
9088 if (inhibit_defer_pop == 0)
9090 if (pending_stack_adjust != 0)
9091 adjust_stack (GEN_INT (pending_stack_adjust));
9092 pending_stack_adjust = 0;
9096 /* Expand conditional expressions. */
9098 /* Generate code to evaluate EXP and jump to LABEL if the value is zero.
9099 LABEL is an rtx of code CODE_LABEL, in this function and all the
9103 jumpifnot (exp, label)
9107 do_jump (exp, label, NULL_RTX);
9110 /* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
9117 do_jump (exp, NULL_RTX, label);
9120 /* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
9121 the result is zero, or IF_TRUE_LABEL if the result is one.
9122 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
9123 meaning fall through in that case.
9125 do_jump always does any pending stack adjust except when it does not
9126 actually perform a jump. An example where there is no jump
9127 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
9129 This function is responsible for optimizing cases such as
9130 &&, || and comparison operators in EXP. */
9133 do_jump (exp, if_false_label, if_true_label)
9135 rtx if_false_label, if_true_label;
9137 enum tree_code code = TREE_CODE (exp);
9138 /* Some cases need to create a label to jump to
9139 in order to properly fall through.
9140 These cases set DROP_THROUGH_LABEL nonzero. */
9141 rtx drop_through_label = 0;
9145 enum machine_mode mode;
9147 #ifdef MAX_INTEGER_COMPUTATION_MODE
9148 check_max_integer_computation_mode (exp);
9159 temp = integer_zerop (exp) ? if_false_label : if_true_label;
9165 /* This is not true with #pragma weak */
9167 /* The address of something can never be zero. */
9169 emit_jump (if_true_label);
9174 if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
9175 || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
9176 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF
9177 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_RANGE_REF)
9180 /* If we are narrowing the operand, we have to do the compare in the
9182 if ((TYPE_PRECISION (TREE_TYPE (exp))
9183 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
9185 case NON_LVALUE_EXPR:
9186 case REFERENCE_EXPR:
9191 /* These cannot change zero->non-zero or vice versa. */
9192 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9195 case WITH_RECORD_EXPR:
9196 /* Put the object on the placeholder list, recurse through our first
9197 operand, and pop the list. */
9198 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
9200 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9201 placeholder_list = TREE_CHAIN (placeholder_list);
9205 /* This is never less insns than evaluating the PLUS_EXPR followed by
9206 a test and can be longer if the test is eliminated. */
9208 /* Reduce to minus. */
9209 exp = build (MINUS_EXPR, TREE_TYPE (exp),
9210 TREE_OPERAND (exp, 0),
9211 fold (build1 (NEGATE_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
9212 TREE_OPERAND (exp, 1))));
9213 /* Process as MINUS. */
9217 /* Non-zero iff operands of minus differ. */
9218 do_compare_and_jump (build (NE_EXPR, TREE_TYPE (exp),
9219 TREE_OPERAND (exp, 0),
9220 TREE_OPERAND (exp, 1)),
9221 NE, NE, if_false_label, if_true_label);
9225 /* If we are AND'ing with a small constant, do this comparison in the
9226 smallest type that fits. If the machine doesn't have comparisons
9227 that small, it will be converted back to the wider comparison.
9228 This helps if we are testing the sign bit of a narrower object.
9229 combine can't do this for us because it can't know whether a
9230 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
9232 if (! SLOW_BYTE_ACCESS
9233 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
9234 && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
9235 && (i = tree_floor_log2 (TREE_OPERAND (exp, 1))) >= 0
9236 && (mode = mode_for_size (i + 1, MODE_INT, 0)) != BLKmode
9237 && (type = type_for_mode (mode, 1)) != 0
9238 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9239 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9240 != CODE_FOR_nothing))
9242 do_jump (convert (type, exp), if_false_label, if_true_label);
9247 case TRUTH_NOT_EXPR:
9248 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9251 case TRUTH_ANDIF_EXPR:
9252 if (if_false_label == 0)
9253 if_false_label = drop_through_label = gen_label_rtx ();
9254 do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
9255 start_cleanup_deferral ();
9256 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9257 end_cleanup_deferral ();
9260 case TRUTH_ORIF_EXPR:
9261 if (if_true_label == 0)
9262 if_true_label = drop_through_label = gen_label_rtx ();
9263 do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
9264 start_cleanup_deferral ();
9265 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9266 end_cleanup_deferral ();
9271 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
9272 preserve_temp_slots (NULL_RTX);
9276 do_pending_stack_adjust ();
9277 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9283 case ARRAY_RANGE_REF:
9285 HOST_WIDE_INT bitsize, bitpos;
9287 enum machine_mode mode;
9292 /* Get description of this reference. We don't actually care
9293 about the underlying object here. */
9294 get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode,
9295 &unsignedp, &volatilep);
9297 type = type_for_size (bitsize, unsignedp);
9298 if (! SLOW_BYTE_ACCESS
9299 && type != 0 && bitsize >= 0
9300 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9301 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9302 != CODE_FOR_nothing))
9304 do_jump (convert (type, exp), if_false_label, if_true_label);
9311 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
9312 if (integer_onep (TREE_OPERAND (exp, 1))
9313 && integer_zerop (TREE_OPERAND (exp, 2)))
9314 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9316 else if (integer_zerop (TREE_OPERAND (exp, 1))
9317 && integer_onep (TREE_OPERAND (exp, 2)))
9318 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9322 rtx label1 = gen_label_rtx ();
9323 drop_through_label = gen_label_rtx ();
9325 do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX);
9327 start_cleanup_deferral ();
9328 /* Now the THEN-expression. */
9329 do_jump (TREE_OPERAND (exp, 1),
9330 if_false_label ? if_false_label : drop_through_label,
9331 if_true_label ? if_true_label : drop_through_label);
9332 /* In case the do_jump just above never jumps. */
9333 do_pending_stack_adjust ();
9334 emit_label (label1);
9336 /* Now the ELSE-expression. */
9337 do_jump (TREE_OPERAND (exp, 2),
9338 if_false_label ? if_false_label : drop_through_label,
9339 if_true_label ? if_true_label : drop_through_label);
9340 end_cleanup_deferral ();
9346 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9348 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
9349 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
9351 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
9352 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
9355 (build (TRUTH_ANDIF_EXPR, TREE_TYPE (exp),
9356 fold (build (EQ_EXPR, TREE_TYPE (exp),
9357 fold (build1 (REALPART_EXPR,
9358 TREE_TYPE (inner_type),
9360 fold (build1 (REALPART_EXPR,
9361 TREE_TYPE (inner_type),
9363 fold (build (EQ_EXPR, TREE_TYPE (exp),
9364 fold (build1 (IMAGPART_EXPR,
9365 TREE_TYPE (inner_type),
9367 fold (build1 (IMAGPART_EXPR,
9368 TREE_TYPE (inner_type),
9370 if_false_label, if_true_label);
9373 else if (integer_zerop (TREE_OPERAND (exp, 1)))
9374 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9376 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
9377 && !can_compare_p (EQ, TYPE_MODE (inner_type), ccp_jump))
9378 do_jump_by_parts_equality (exp, if_false_label, if_true_label);
9380 do_compare_and_jump (exp, EQ, EQ, if_false_label, if_true_label);
9386 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9388 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
9389 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
9391 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
9392 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
9395 (build (TRUTH_ORIF_EXPR, TREE_TYPE (exp),
9396 fold (build (NE_EXPR, TREE_TYPE (exp),
9397 fold (build1 (REALPART_EXPR,
9398 TREE_TYPE (inner_type),
9400 fold (build1 (REALPART_EXPR,
9401 TREE_TYPE (inner_type),
9403 fold (build (NE_EXPR, TREE_TYPE (exp),
9404 fold (build1 (IMAGPART_EXPR,
9405 TREE_TYPE (inner_type),
9407 fold (build1 (IMAGPART_EXPR,
9408 TREE_TYPE (inner_type),
9410 if_false_label, if_true_label);
9413 else if (integer_zerop (TREE_OPERAND (exp, 1)))
9414 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9416 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
9417 && !can_compare_p (NE, TYPE_MODE (inner_type), ccp_jump))
9418 do_jump_by_parts_equality (exp, if_true_label, if_false_label);
9420 do_compare_and_jump (exp, NE, NE, if_false_label, if_true_label);
9425 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9426 if (GET_MODE_CLASS (mode) == MODE_INT
9427 && ! can_compare_p (LT, mode, ccp_jump))
9428 do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
9430 do_compare_and_jump (exp, LT, LTU, if_false_label, if_true_label);
9434 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9435 if (GET_MODE_CLASS (mode) == MODE_INT
9436 && ! can_compare_p (LE, mode, ccp_jump))
9437 do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
9439 do_compare_and_jump (exp, LE, LEU, if_false_label, if_true_label);
9443 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9444 if (GET_MODE_CLASS (mode) == MODE_INT
9445 && ! can_compare_p (GT, mode, ccp_jump))
9446 do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
9448 do_compare_and_jump (exp, GT, GTU, if_false_label, if_true_label);
9452 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9453 if (GET_MODE_CLASS (mode) == MODE_INT
9454 && ! can_compare_p (GE, mode, ccp_jump))
9455 do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
9457 do_compare_and_jump (exp, GE, GEU, if_false_label, if_true_label);
9460 case UNORDERED_EXPR:
9463 enum rtx_code cmp, rcmp;
9466 if (code == UNORDERED_EXPR)
9467 cmp = UNORDERED, rcmp = ORDERED;
9469 cmp = ORDERED, rcmp = UNORDERED;
9470 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9473 if (! can_compare_p (cmp, mode, ccp_jump)
9474 && (can_compare_p (rcmp, mode, ccp_jump)
9475 /* If the target doesn't provide either UNORDERED or ORDERED
9476 comparisons, canonicalize on UNORDERED for the library. */
9477 || rcmp == UNORDERED))
9481 do_compare_and_jump (exp, cmp, cmp, if_false_label, if_true_label);
9483 do_compare_and_jump (exp, rcmp, rcmp, if_true_label, if_false_label);
9488 enum rtx_code rcode1;
9489 enum tree_code tcode2;
9513 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9514 if (can_compare_p (rcode1, mode, ccp_jump))
9515 do_compare_and_jump (exp, rcode1, rcode1, if_false_label,
9519 tree op0 = save_expr (TREE_OPERAND (exp, 0));
9520 tree op1 = save_expr (TREE_OPERAND (exp, 1));
9523 /* If the target doesn't support combined unordered
9524 compares, decompose into UNORDERED + comparison. */
9525 cmp0 = fold (build (UNORDERED_EXPR, TREE_TYPE (exp), op0, op1));
9526 cmp1 = fold (build (tcode2, TREE_TYPE (exp), op0, op1));
9527 exp = build (TRUTH_ORIF_EXPR, TREE_TYPE (exp), cmp0, cmp1);
9528 do_jump (exp, if_false_label, if_true_label);
9534 __builtin_expect (<test>, 0) and
9535 __builtin_expect (<test>, 1)
9537 We need to do this here, so that <test> is not converted to a SCC
9538 operation on machines that use condition code registers and COMPARE
9539 like the PowerPC, and then the jump is done based on whether the SCC
9540 operation produced a 1 or 0. */
9542 /* Check for a built-in function. */
9543 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR)
9545 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
9546 tree arglist = TREE_OPERAND (exp, 1);
9548 if (TREE_CODE (fndecl) == FUNCTION_DECL
9549 && DECL_BUILT_IN (fndecl)
9550 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT
9551 && arglist != NULL_TREE
9552 && TREE_CHAIN (arglist) != NULL_TREE)
9554 rtx seq = expand_builtin_expect_jump (exp, if_false_label,
9557 if (seq != NULL_RTX)
9564 /* fall through and generate the normal code. */
9568 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
9570 /* This is not needed any more and causes poor code since it causes
9571 comparisons and tests from non-SI objects to have different code
9573 /* Copy to register to avoid generating bad insns by cse
9574 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
9575 if (!cse_not_expected && GET_CODE (temp) == MEM)
9576 temp = copy_to_reg (temp);
9578 do_pending_stack_adjust ();
9579 /* Do any postincrements in the expression that was tested. */
9582 if (GET_CODE (temp) == CONST_INT
9583 || (GET_CODE (temp) == CONST_DOUBLE && GET_MODE (temp) == VOIDmode)
9584 || GET_CODE (temp) == LABEL_REF)
9586 rtx target = temp == const0_rtx ? if_false_label : if_true_label;
9590 else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
9591 && ! can_compare_p (NE, GET_MODE (temp), ccp_jump))
9592 /* Note swapping the labels gives us not-equal. */
9593 do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label);
9594 else if (GET_MODE (temp) != VOIDmode)
9595 do_compare_rtx_and_jump (temp, CONST0_RTX (GET_MODE (temp)),
9596 NE, TREE_UNSIGNED (TREE_TYPE (exp)),
9597 GET_MODE (temp), NULL_RTX,
9598 if_false_label, if_true_label);
9603 if (drop_through_label)
9605 /* If do_jump produces code that might be jumped around,
9606 do any stack adjusts from that code, before the place
9607 where control merges in. */
9608 do_pending_stack_adjust ();
9609 emit_label (drop_through_label);
9613 /* Given a comparison expression EXP for values too wide to be compared
9614 with one insn, test the comparison and jump to the appropriate label.
9615 The code of EXP is ignored; we always test GT if SWAP is 0,
9616 and LT if SWAP is 1. */
9619 do_jump_by_parts_greater (exp, swap, if_false_label, if_true_label)
9622 rtx if_false_label, if_true_label;
9624 rtx op0 = expand_expr (TREE_OPERAND (exp, swap), NULL_RTX, VOIDmode, 0);
9625 rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), NULL_RTX, VOIDmode, 0);
9626 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9627 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
9629 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label);
9632 /* Compare OP0 with OP1, word at a time, in mode MODE.
9633 UNSIGNEDP says to do unsigned comparison.
9634 Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
9637 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label)
9638 enum machine_mode mode;
9641 rtx if_false_label, if_true_label;
9643 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
9644 rtx drop_through_label = 0;
9647 if (! if_true_label || ! if_false_label)
9648 drop_through_label = gen_label_rtx ();
9649 if (! if_true_label)
9650 if_true_label = drop_through_label;
9651 if (! if_false_label)
9652 if_false_label = drop_through_label;
9654 /* Compare a word at a time, high order first. */
9655 for (i = 0; i < nwords; i++)
9657 rtx op0_word, op1_word;
9659 if (WORDS_BIG_ENDIAN)
9661 op0_word = operand_subword_force (op0, i, mode);
9662 op1_word = operand_subword_force (op1, i, mode);
9666 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
9667 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
9670 /* All but high-order word must be compared as unsigned. */
9671 do_compare_rtx_and_jump (op0_word, op1_word, GT,
9672 (unsignedp || i > 0), word_mode, NULL_RTX,
9673 NULL_RTX, if_true_label);
9675 /* Consider lower words only if these are equal. */
9676 do_compare_rtx_and_jump (op0_word, op1_word, NE, unsignedp, word_mode,
9677 NULL_RTX, NULL_RTX, if_false_label);
9681 emit_jump (if_false_label);
9682 if (drop_through_label)
9683 emit_label (drop_through_label);
9686 /* Given an EQ_EXPR expression EXP for values too wide to be compared
9687 with one insn, test the comparison and jump to the appropriate label. */
9690 do_jump_by_parts_equality (exp, if_false_label, if_true_label)
9692 rtx if_false_label, if_true_label;
9694 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
9695 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
9696 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9697 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
9699 rtx drop_through_label = 0;
9701 if (! if_false_label)
9702 drop_through_label = if_false_label = gen_label_rtx ();
9704 for (i = 0; i < nwords; i++)
9705 do_compare_rtx_and_jump (operand_subword_force (op0, i, mode),
9706 operand_subword_force (op1, i, mode),
9707 EQ, TREE_UNSIGNED (TREE_TYPE (exp)),
9708 word_mode, NULL_RTX, if_false_label, NULL_RTX);
9711 emit_jump (if_true_label);
9712 if (drop_through_label)
9713 emit_label (drop_through_label);
9716 /* Jump according to whether OP0 is 0.
9717 We assume that OP0 has an integer mode that is too wide
9718 for the available compare insns. */
9721 do_jump_by_parts_equality_rtx (op0, if_false_label, if_true_label)
9723 rtx if_false_label, if_true_label;
9725 int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD;
9728 rtx drop_through_label = 0;
9730 /* The fastest way of doing this comparison on almost any machine is to
9731 "or" all the words and compare the result. If all have to be loaded
9732 from memory and this is a very wide item, it's possible this may
9733 be slower, but that's highly unlikely. */
9735 part = gen_reg_rtx (word_mode);
9736 emit_move_insn (part, operand_subword_force (op0, 0, GET_MODE (op0)));
9737 for (i = 1; i < nwords && part != 0; i++)
9738 part = expand_binop (word_mode, ior_optab, part,
9739 operand_subword_force (op0, i, GET_MODE (op0)),
9740 part, 1, OPTAB_WIDEN);
9744 do_compare_rtx_and_jump (part, const0_rtx, EQ, 1, word_mode,
9745 NULL_RTX, if_false_label, if_true_label);
9750 /* If we couldn't do the "or" simply, do this with a series of compares. */
9751 if (! if_false_label)
9752 drop_through_label = if_false_label = gen_label_rtx ();
9754 for (i = 0; i < nwords; i++)
9755 do_compare_rtx_and_jump (operand_subword_force (op0, i, GET_MODE (op0)),
9756 const0_rtx, EQ, 1, word_mode, NULL_RTX,
9757 if_false_label, NULL_RTX);
9760 emit_jump (if_true_label);
9762 if (drop_through_label)
9763 emit_label (drop_through_label);
9766 /* Generate code for a comparison of OP0 and OP1 with rtx code CODE.
9767 (including code to compute the values to be compared)
9768 and set (CC0) according to the result.
9769 The decision as to signed or unsigned comparison must be made by the caller.
9771 We force a stack adjustment unless there are currently
9772 things pushed on the stack that aren't yet used.
9774 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
9778 compare_from_rtx (op0, op1, code, unsignedp, mode, size)
9782 enum machine_mode mode;
9787 /* If one operand is constant, make it the second one. Only do this
9788 if the other operand is not constant as well. */
9790 if (swap_commutative_operands_p (op0, op1))
9795 code = swap_condition (code);
9800 op0 = force_not_mem (op0);
9801 op1 = force_not_mem (op1);
9804 do_pending_stack_adjust ();
9806 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
9807 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
9811 /* There's no need to do this now that combine.c can eliminate lots of
9812 sign extensions. This can be less efficient in certain cases on other
9815 /* If this is a signed equality comparison, we can do it as an
9816 unsigned comparison since zero-extension is cheaper than sign
9817 extension and comparisons with zero are done as unsigned. This is
9818 the case even on machines that can do fast sign extension, since
9819 zero-extension is easier to combine with other operations than
9820 sign-extension is. If we are comparing against a constant, we must
9821 convert it to what it would look like unsigned. */
9822 if ((code == EQ || code == NE) && ! unsignedp
9823 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
9825 if (GET_CODE (op1) == CONST_INT
9826 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
9827 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
9832 emit_cmp_insn (op0, op1, code, size, mode, unsignedp);
9834 return gen_rtx_fmt_ee (code, VOIDmode, cc0_rtx, const0_rtx);
9837 /* Like do_compare_and_jump but expects the values to compare as two rtx's.
9838 The decision as to signed or unsigned comparison must be made by the caller.
9840 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
9844 do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode, size,
9845 if_false_label, if_true_label)
9849 enum machine_mode mode;
9851 rtx if_false_label, if_true_label;
9854 int dummy_true_label = 0;
9856 /* Reverse the comparison if that is safe and we want to jump if it is
9858 if (! if_true_label && ! FLOAT_MODE_P (mode))
9860 if_true_label = if_false_label;
9862 code = reverse_condition (code);
9865 /* If one operand is constant, make it the second one. Only do this
9866 if the other operand is not constant as well. */
9868 if (swap_commutative_operands_p (op0, op1))
9873 code = swap_condition (code);
9878 op0 = force_not_mem (op0);
9879 op1 = force_not_mem (op1);
9882 do_pending_stack_adjust ();
9884 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
9885 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
9887 if (tem == const_true_rtx)
9890 emit_jump (if_true_label);
9895 emit_jump (if_false_label);
9901 /* There's no need to do this now that combine.c can eliminate lots of
9902 sign extensions. This can be less efficient in certain cases on other
9905 /* If this is a signed equality comparison, we can do it as an
9906 unsigned comparison since zero-extension is cheaper than sign
9907 extension and comparisons with zero are done as unsigned. This is
9908 the case even on machines that can do fast sign extension, since
9909 zero-extension is easier to combine with other operations than
9910 sign-extension is. If we are comparing against a constant, we must
9911 convert it to what it would look like unsigned. */
9912 if ((code == EQ || code == NE) && ! unsignedp
9913 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
9915 if (GET_CODE (op1) == CONST_INT
9916 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
9917 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
9922 if (! if_true_label)
9924 dummy_true_label = 1;
9925 if_true_label = gen_label_rtx ();
9928 emit_cmp_and_jump_insns (op0, op1, code, size, mode, unsignedp,
9932 emit_jump (if_false_label);
9933 if (dummy_true_label)
9934 emit_label (if_true_label);
9937 /* Generate code for a comparison expression EXP (including code to compute
9938 the values to be compared) and a conditional jump to IF_FALSE_LABEL and/or
9939 IF_TRUE_LABEL. One of the labels can be NULL_RTX, in which case the
9940 generated code will drop through.
9941 SIGNED_CODE should be the rtx operation for this comparison for
9942 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
9944 We force a stack adjustment unless there are currently
9945 things pushed on the stack that aren't yet used. */
9948 do_compare_and_jump (exp, signed_code, unsigned_code, if_false_label,
9951 enum rtx_code signed_code, unsigned_code;
9952 rtx if_false_label, if_true_label;
9956 enum machine_mode mode;
9960 /* Don't crash if the comparison was erroneous. */
9961 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
9962 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
9965 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
9966 if (TREE_CODE (TREE_OPERAND (exp, 1)) == ERROR_MARK)
9969 type = TREE_TYPE (TREE_OPERAND (exp, 0));
9970 mode = TYPE_MODE (type);
9971 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
9972 && (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST
9973 || (GET_MODE_BITSIZE (mode)
9974 > GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp,
9977 /* op0 might have been replaced by promoted constant, in which
9978 case the type of second argument should be used. */
9979 type = TREE_TYPE (TREE_OPERAND (exp, 1));
9980 mode = TYPE_MODE (type);
9982 unsignedp = TREE_UNSIGNED (type);
9983 code = unsignedp ? unsigned_code : signed_code;
9985 #ifdef HAVE_canonicalize_funcptr_for_compare
9986 /* If function pointers need to be "canonicalized" before they can
9987 be reliably compared, then canonicalize them. */
9988 if (HAVE_canonicalize_funcptr_for_compare
9989 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
9990 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
9993 rtx new_op0 = gen_reg_rtx (mode);
9995 emit_insn (gen_canonicalize_funcptr_for_compare (new_op0, op0));
9999 if (HAVE_canonicalize_funcptr_for_compare
10000 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10001 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10004 rtx new_op1 = gen_reg_rtx (mode);
10006 emit_insn (gen_canonicalize_funcptr_for_compare (new_op1, op1));
10011 /* Do any postincrements in the expression that was tested. */
10014 do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode,
10016 ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
10017 if_false_label, if_true_label);
10020 /* Generate code to calculate EXP using a store-flag instruction
10021 and return an rtx for the result. EXP is either a comparison
10022 or a TRUTH_NOT_EXPR whose operand is a comparison.
10024 If TARGET is nonzero, store the result there if convenient.
10026 If ONLY_CHEAP is non-zero, only do this if it is likely to be very
10029 Return zero if there is no suitable set-flag instruction
10030 available on this machine.
10032 Once expand_expr has been called on the arguments of the comparison,
10033 we are committed to doing the store flag, since it is not safe to
10034 re-evaluate the expression. We emit the store-flag insn by calling
10035 emit_store_flag, but only expand the arguments if we have a reason
10036 to believe that emit_store_flag will be successful. If we think that
10037 it will, but it isn't, we have to simulate the store-flag with a
10038 set/jump/set sequence. */
10041 do_store_flag (exp, target, mode, only_cheap)
10044 enum machine_mode mode;
10047 enum rtx_code code;
10048 tree arg0, arg1, type;
10050 enum machine_mode operand_mode;
10054 enum insn_code icode;
10055 rtx subtarget = target;
10058 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
10059 result at the end. We can't simply invert the test since it would
10060 have already been inverted if it were valid. This case occurs for
10061 some floating-point comparisons. */
10063 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
10064 invert = 1, exp = TREE_OPERAND (exp, 0);
10066 arg0 = TREE_OPERAND (exp, 0);
10067 arg1 = TREE_OPERAND (exp, 1);
10069 /* Don't crash if the comparison was erroneous. */
10070 if (arg0 == error_mark_node || arg1 == error_mark_node)
10073 type = TREE_TYPE (arg0);
10074 operand_mode = TYPE_MODE (type);
10075 unsignedp = TREE_UNSIGNED (type);
10077 /* We won't bother with BLKmode store-flag operations because it would mean
10078 passing a lot of information to emit_store_flag. */
10079 if (operand_mode == BLKmode)
10082 /* We won't bother with store-flag operations involving function pointers
10083 when function pointers must be canonicalized before comparisons. */
10084 #ifdef HAVE_canonicalize_funcptr_for_compare
10085 if (HAVE_canonicalize_funcptr_for_compare
10086 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10087 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10089 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10090 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10091 == FUNCTION_TYPE))))
10098 /* Get the rtx comparison code to use. We know that EXP is a comparison
10099 operation of some type. Some comparisons against 1 and -1 can be
10100 converted to comparisons with zero. Do so here so that the tests
10101 below will be aware that we have a comparison with zero. These
10102 tests will not catch constants in the first operand, but constants
10103 are rarely passed as the first operand. */
10105 switch (TREE_CODE (exp))
10114 if (integer_onep (arg1))
10115 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
10117 code = unsignedp ? LTU : LT;
10120 if (! unsignedp && integer_all_onesp (arg1))
10121 arg1 = integer_zero_node, code = LT;
10123 code = unsignedp ? LEU : LE;
10126 if (! unsignedp && integer_all_onesp (arg1))
10127 arg1 = integer_zero_node, code = GE;
10129 code = unsignedp ? GTU : GT;
10132 if (integer_onep (arg1))
10133 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
10135 code = unsignedp ? GEU : GE;
10138 case UNORDERED_EXPR:
10164 /* Put a constant second. */
10165 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
10167 tem = arg0; arg0 = arg1; arg1 = tem;
10168 code = swap_condition (code);
10171 /* If this is an equality or inequality test of a single bit, we can
10172 do this by shifting the bit being tested to the low-order bit and
10173 masking the result with the constant 1. If the condition was EQ,
10174 we xor it with 1. This does not require an scc insn and is faster
10175 than an scc insn even if we have it. */
10177 if ((code == NE || code == EQ)
10178 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
10179 && integer_pow2p (TREE_OPERAND (arg0, 1)))
10181 tree inner = TREE_OPERAND (arg0, 0);
10182 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
10185 /* If INNER is a right shift of a constant and it plus BITNUM does
10186 not overflow, adjust BITNUM and INNER. */
10188 if (TREE_CODE (inner) == RSHIFT_EXPR
10189 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
10190 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
10191 && bitnum < TYPE_PRECISION (type)
10192 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
10193 bitnum - TYPE_PRECISION (type)))
10195 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
10196 inner = TREE_OPERAND (inner, 0);
10199 /* If we are going to be able to omit the AND below, we must do our
10200 operations as unsigned. If we must use the AND, we have a choice.
10201 Normally unsigned is faster, but for some machines signed is. */
10202 ops_unsignedp = (bitnum == TYPE_PRECISION (type) - 1 ? 1
10203 #ifdef LOAD_EXTEND_OP
10204 : (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1)
10210 if (! get_subtarget (subtarget)
10211 || GET_MODE (subtarget) != operand_mode
10212 || ! safe_from_p (subtarget, inner, 1))
10215 op0 = expand_expr (inner, subtarget, VOIDmode, 0);
10218 op0 = expand_shift (RSHIFT_EXPR, operand_mode, op0,
10219 size_int (bitnum), subtarget, ops_unsignedp);
10221 if (GET_MODE (op0) != mode)
10222 op0 = convert_to_mode (mode, op0, ops_unsignedp);
10224 if ((code == EQ && ! invert) || (code == NE && invert))
10225 op0 = expand_binop (mode, xor_optab, op0, const1_rtx, subtarget,
10226 ops_unsignedp, OPTAB_LIB_WIDEN);
10228 /* Put the AND last so it can combine with more things. */
10229 if (bitnum != TYPE_PRECISION (type) - 1)
10230 op0 = expand_and (mode, op0, const1_rtx, subtarget);
10235 /* Now see if we are likely to be able to do this. Return if not. */
10236 if (! can_compare_p (code, operand_mode, ccp_store_flag))
10239 icode = setcc_gen_code[(int) code];
10240 if (icode == CODE_FOR_nothing
10241 || (only_cheap && insn_data[(int) icode].operand[0].mode != mode))
10243 /* We can only do this if it is one of the special cases that
10244 can be handled without an scc insn. */
10245 if ((code == LT && integer_zerop (arg1))
10246 || (! only_cheap && code == GE && integer_zerop (arg1)))
10248 else if (BRANCH_COST >= 0
10249 && ! only_cheap && (code == NE || code == EQ)
10250 && TREE_CODE (type) != REAL_TYPE
10251 && ((abs_optab->handlers[(int) operand_mode].insn_code
10252 != CODE_FOR_nothing)
10253 || (ffs_optab->handlers[(int) operand_mode].insn_code
10254 != CODE_FOR_nothing)))
10260 if (! get_subtarget (target)
10261 || GET_MODE (subtarget) != operand_mode
10262 || ! safe_from_p (subtarget, arg1, 1))
10265 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
10266 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
10269 target = gen_reg_rtx (mode);
10271 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
10272 because, if the emit_store_flag does anything it will succeed and
10273 OP0 and OP1 will not be used subsequently. */
10275 result = emit_store_flag (target, code,
10276 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
10277 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
10278 operand_mode, unsignedp, 1);
10283 result = expand_binop (mode, xor_optab, result, const1_rtx,
10284 result, 0, OPTAB_LIB_WIDEN);
10288 /* If this failed, we have to do this with set/compare/jump/set code. */
10289 if (GET_CODE (target) != REG
10290 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
10291 target = gen_reg_rtx (GET_MODE (target));
10293 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
10294 result = compare_from_rtx (op0, op1, code, unsignedp,
10295 operand_mode, NULL_RTX);
10296 if (GET_CODE (result) == CONST_INT)
10297 return (((result == const0_rtx && ! invert)
10298 || (result != const0_rtx && invert))
10299 ? const0_rtx : const1_rtx);
10301 /* The code of RESULT may not match CODE if compare_from_rtx
10302 decided to swap its operands and reverse the original code.
10304 We know that compare_from_rtx returns either a CONST_INT or
10305 a new comparison code, so it is safe to just extract the
10306 code from RESULT. */
10307 code = GET_CODE (result);
10309 label = gen_label_rtx ();
10310 if (bcc_gen_fctn[(int) code] == 0)
10313 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
10314 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
10315 emit_label (label);
10321 /* Stubs in case we haven't got a casesi insn. */
10322 #ifndef HAVE_casesi
10323 # define HAVE_casesi 0
10324 # define gen_casesi(a, b, c, d, e) (0)
10325 # define CODE_FOR_casesi CODE_FOR_nothing
10328 /* If the machine does not have a case insn that compares the bounds,
10329 this means extra overhead for dispatch tables, which raises the
10330 threshold for using them. */
10331 #ifndef CASE_VALUES_THRESHOLD
10332 #define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
10333 #endif /* CASE_VALUES_THRESHOLD */
10336 case_values_threshold ()
10338 return CASE_VALUES_THRESHOLD;
10341 /* Attempt to generate a casesi instruction. Returns 1 if successful,
10342 0 otherwise (i.e. if there is no casesi instruction). */
10344 try_casesi (index_type, index_expr, minval, range,
10345 table_label, default_label)
10346 tree index_type, index_expr, minval, range;
10347 rtx table_label ATTRIBUTE_UNUSED;
10350 enum machine_mode index_mode = SImode;
10351 int index_bits = GET_MODE_BITSIZE (index_mode);
10352 rtx op1, op2, index;
10353 enum machine_mode op_mode;
10358 /* Convert the index to SImode. */
10359 if (GET_MODE_BITSIZE (TYPE_MODE (index_type)) > GET_MODE_BITSIZE (index_mode))
10361 enum machine_mode omode = TYPE_MODE (index_type);
10362 rtx rangertx = expand_expr (range, NULL_RTX, VOIDmode, 0);
10364 /* We must handle the endpoints in the original mode. */
10365 index_expr = build (MINUS_EXPR, index_type,
10366 index_expr, minval);
10367 minval = integer_zero_node;
10368 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
10369 emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
10370 omode, 1, default_label);
10371 /* Now we can safely truncate. */
10372 index = convert_to_mode (index_mode, index, 0);
10376 if (TYPE_MODE (index_type) != index_mode)
10378 index_expr = convert (type_for_size (index_bits, 0),
10380 index_type = TREE_TYPE (index_expr);
10383 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
10386 index = protect_from_queue (index, 0);
10387 do_pending_stack_adjust ();
10389 op_mode = insn_data[(int) CODE_FOR_casesi].operand[0].mode;
10390 if (! (*insn_data[(int) CODE_FOR_casesi].operand[0].predicate)
10392 index = copy_to_mode_reg (op_mode, index);
10394 op1 = expand_expr (minval, NULL_RTX, VOIDmode, 0);
10396 op_mode = insn_data[(int) CODE_FOR_casesi].operand[1].mode;
10397 op1 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (minval)),
10398 op1, TREE_UNSIGNED (TREE_TYPE (minval)));
10399 if (! (*insn_data[(int) CODE_FOR_casesi].operand[1].predicate)
10401 op1 = copy_to_mode_reg (op_mode, op1);
10403 op2 = expand_expr (range, NULL_RTX, VOIDmode, 0);
10405 op_mode = insn_data[(int) CODE_FOR_casesi].operand[2].mode;
10406 op2 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (range)),
10407 op2, TREE_UNSIGNED (TREE_TYPE (range)));
10408 if (! (*insn_data[(int) CODE_FOR_casesi].operand[2].predicate)
10410 op2 = copy_to_mode_reg (op_mode, op2);
10412 emit_jump_insn (gen_casesi (index, op1, op2,
10413 table_label, default_label));
10417 /* Attempt to generate a tablejump instruction; same concept. */
10418 #ifndef HAVE_tablejump
10419 #define HAVE_tablejump 0
10420 #define gen_tablejump(x, y) (0)
10423 /* Subroutine of the next function.
10425 INDEX is the value being switched on, with the lowest value
10426 in the table already subtracted.
10427 MODE is its expected mode (needed if INDEX is constant).
10428 RANGE is the length of the jump table.
10429 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
10431 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
10432 index value is out of range. */
10435 do_tablejump (index, mode, range, table_label, default_label)
10436 rtx index, range, table_label, default_label;
10437 enum machine_mode mode;
10441 /* Do an unsigned comparison (in the proper mode) between the index
10442 expression and the value which represents the length of the range.
10443 Since we just finished subtracting the lower bound of the range
10444 from the index expression, this comparison allows us to simultaneously
10445 check that the original index expression value is both greater than
10446 or equal to the minimum value of the range and less than or equal to
10447 the maximum value of the range. */
10449 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
10452 /* If index is in range, it must fit in Pmode.
10453 Convert to Pmode so we can index with it. */
10455 index = convert_to_mode (Pmode, index, 1);
10457 /* Don't let a MEM slip thru, because then INDEX that comes
10458 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
10459 and break_out_memory_refs will go to work on it and mess it up. */
10460 #ifdef PIC_CASE_VECTOR_ADDRESS
10461 if (flag_pic && GET_CODE (index) != REG)
10462 index = copy_to_mode_reg (Pmode, index);
10465 /* If flag_force_addr were to affect this address
10466 it could interfere with the tricky assumptions made
10467 about addresses that contain label-refs,
10468 which may be valid only very near the tablejump itself. */
10469 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
10470 GET_MODE_SIZE, because this indicates how large insns are. The other
10471 uses should all be Pmode, because they are addresses. This code
10472 could fail if addresses and insns are not the same size. */
10473 index = gen_rtx_PLUS (Pmode,
10474 gen_rtx_MULT (Pmode, index,
10475 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
10476 gen_rtx_LABEL_REF (Pmode, table_label));
10477 #ifdef PIC_CASE_VECTOR_ADDRESS
10479 index = PIC_CASE_VECTOR_ADDRESS (index);
10482 index = memory_address_noforce (CASE_VECTOR_MODE, index);
10483 temp = gen_reg_rtx (CASE_VECTOR_MODE);
10484 vector = gen_rtx_MEM (CASE_VECTOR_MODE, index);
10485 RTX_UNCHANGING_P (vector) = 1;
10486 convert_move (temp, vector, 0);
10488 emit_jump_insn (gen_tablejump (temp, table_label));
10490 /* If we are generating PIC code or if the table is PC-relative, the
10491 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
10492 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
10497 try_tablejump (index_type, index_expr, minval, range,
10498 table_label, default_label)
10499 tree index_type, index_expr, minval, range;
10500 rtx table_label, default_label;
10504 if (! HAVE_tablejump)
10507 index_expr = fold (build (MINUS_EXPR, index_type,
10508 convert (index_type, index_expr),
10509 convert (index_type, minval)));
10510 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
10512 index = protect_from_queue (index, 0);
10513 do_pending_stack_adjust ();
10515 do_tablejump (index, TYPE_MODE (index_type),
10516 convert_modes (TYPE_MODE (index_type),
10517 TYPE_MODE (TREE_TYPE (range)),
10518 expand_expr (range, NULL_RTX,
10520 TREE_UNSIGNED (TREE_TYPE (range))),
10521 table_label, default_label);