1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
30 #include "hard-reg-set.h"
33 #include "insn-config.h"
34 #include "insn-attr.h"
35 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
42 #include "typeclass.h"
45 #include "langhooks.h"
49 /* Decide whether a function's arguments should be processed
50 from first to last or from last to first.
52 They should if the stack and args grow in opposite directions, but
53 only if we have push insns. */
57 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
58 #define PUSH_ARGS_REVERSED /* If it's last to first. */
63 #ifndef STACK_PUSH_CODE
64 #ifdef STACK_GROWS_DOWNWARD
65 #define STACK_PUSH_CODE PRE_DEC
67 #define STACK_PUSH_CODE PRE_INC
71 /* Assume that case vectors are not pc-relative. */
72 #ifndef CASE_VECTOR_PC_RELATIVE
73 #define CASE_VECTOR_PC_RELATIVE 0
76 /* If this is nonzero, we do not bother generating VOLATILE
77 around volatile memory references, and we are willing to
78 output indirect addresses. If cse is to follow, we reject
79 indirect addresses so a useful potential cse is generated;
80 if it is used only once, instruction combination will produce
81 the same indirect address eventually. */
84 /* Chain of pending expressions for PLACEHOLDER_EXPR to replace. */
85 static tree placeholder_list = 0;
87 /* This structure is used by move_by_pieces to describe the move to
98 int explicit_inc_from;
99 unsigned HOST_WIDE_INT len;
100 HOST_WIDE_INT offset;
104 /* This structure is used by store_by_pieces to describe the clear to
107 struct store_by_pieces
113 unsigned HOST_WIDE_INT len;
114 HOST_WIDE_INT offset;
115 rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
120 extern struct obstack permanent_obstack;
122 static rtx enqueue_insn PARAMS ((rtx, rtx));
123 static unsigned HOST_WIDE_INT move_by_pieces_ninsns
124 PARAMS ((unsigned HOST_WIDE_INT,
126 static void move_by_pieces_1 PARAMS ((rtx (*) (rtx, ...), enum machine_mode,
127 struct move_by_pieces *));
128 static rtx clear_by_pieces_1 PARAMS ((PTR, HOST_WIDE_INT,
130 static void clear_by_pieces PARAMS ((rtx, unsigned HOST_WIDE_INT,
132 static void store_by_pieces_1 PARAMS ((struct store_by_pieces *,
134 static void store_by_pieces_2 PARAMS ((rtx (*) (rtx, ...),
136 struct store_by_pieces *));
137 static rtx get_subtarget PARAMS ((rtx));
138 static int is_zeros_p PARAMS ((tree));
139 static int mostly_zeros_p PARAMS ((tree));
140 static void store_constructor_field PARAMS ((rtx, unsigned HOST_WIDE_INT,
141 HOST_WIDE_INT, enum machine_mode,
142 tree, tree, int, int));
143 static void store_constructor PARAMS ((tree, rtx, int, HOST_WIDE_INT));
144 static rtx store_field PARAMS ((rtx, HOST_WIDE_INT,
145 HOST_WIDE_INT, enum machine_mode,
146 tree, enum machine_mode, int, tree,
148 static rtx var_rtx PARAMS ((tree));
149 static HOST_WIDE_INT highest_pow2_factor PARAMS ((tree));
150 static rtx expand_increment PARAMS ((tree, int, int));
151 static void do_jump_by_parts_greater PARAMS ((tree, int, rtx, rtx));
152 static void do_jump_by_parts_equality PARAMS ((tree, rtx, rtx));
153 static void do_compare_and_jump PARAMS ((tree, enum rtx_code, enum rtx_code,
155 static rtx do_store_flag PARAMS ((tree, rtx, enum machine_mode, int));
157 static void emit_single_push_insn PARAMS ((enum machine_mode, rtx, tree));
159 static void do_tablejump PARAMS ((rtx, enum machine_mode, rtx, rtx, rtx));
161 /* Record for each mode whether we can move a register directly to or
162 from an object of that mode in memory. If we can't, we won't try
163 to use that mode directly when accessing a field of that mode. */
165 static char direct_load[NUM_MACHINE_MODES];
166 static char direct_store[NUM_MACHINE_MODES];
168 /* If a memory-to-memory move would take MOVE_RATIO or more simple
169 move-instruction sequences, we will do a movstr or libcall instead. */
172 #if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
175 /* If we are optimizing for space (-Os), cut down the default move ratio. */
176 #define MOVE_RATIO (optimize_size ? 3 : 15)
180 /* This macro is used to determine whether move_by_pieces should be called
181 to perform a structure copy. */
182 #ifndef MOVE_BY_PIECES_P
183 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
184 (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) MOVE_RATIO)
187 /* This array records the insn_code of insns to perform block moves. */
188 enum insn_code movstr_optab[NUM_MACHINE_MODES];
190 /* This array records the insn_code of insns to perform block clears. */
191 enum insn_code clrstr_optab[NUM_MACHINE_MODES];
193 /* SLOW_UNALIGNED_ACCESS is non-zero if unaligned accesses are very slow. */
195 #ifndef SLOW_UNALIGNED_ACCESS
196 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
199 /* This is run once per compilation to set up which modes can be used
200 directly in memory and to initialize the block move optab. */
206 enum machine_mode mode;
212 /* Try indexing by frame ptr and try by stack ptr.
213 It is known that on the Convex the stack ptr isn't a valid index.
214 With luck, one or the other is valid on any machine. */
215 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
216 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
218 insn = emit_insn (gen_rtx_SET (0, NULL_RTX, NULL_RTX));
219 pat = PATTERN (insn);
221 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
222 mode = (enum machine_mode) ((int) mode + 1))
227 direct_load[(int) mode] = direct_store[(int) mode] = 0;
228 PUT_MODE (mem, mode);
229 PUT_MODE (mem1, mode);
231 /* See if there is some register that can be used in this mode and
232 directly loaded or stored from memory. */
234 if (mode != VOIDmode && mode != BLKmode)
235 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
236 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
239 if (! HARD_REGNO_MODE_OK (regno, mode))
242 reg = gen_rtx_REG (mode, regno);
245 SET_DEST (pat) = reg;
246 if (recog (pat, insn, &num_clobbers) >= 0)
247 direct_load[(int) mode] = 1;
249 SET_SRC (pat) = mem1;
250 SET_DEST (pat) = reg;
251 if (recog (pat, insn, &num_clobbers) >= 0)
252 direct_load[(int) mode] = 1;
255 SET_DEST (pat) = mem;
256 if (recog (pat, insn, &num_clobbers) >= 0)
257 direct_store[(int) mode] = 1;
260 SET_DEST (pat) = mem1;
261 if (recog (pat, insn, &num_clobbers) >= 0)
262 direct_store[(int) mode] = 1;
269 /* This is run at the start of compiling a function. */
274 cfun->expr = (struct expr_status *) xmalloc (sizeof (struct expr_status));
277 pending_stack_adjust = 0;
278 stack_pointer_delta = 0;
279 inhibit_defer_pop = 0;
281 apply_args_value = 0;
287 struct expr_status *p;
292 ggc_mark_rtx (p->x_saveregs_value);
293 ggc_mark_rtx (p->x_apply_args_value);
294 ggc_mark_rtx (p->x_forced_labels);
305 /* Small sanity check that the queue is empty at the end of a function. */
308 finish_expr_for_function ()
314 /* Manage the queue of increment instructions to be output
315 for POSTINCREMENT_EXPR expressions, etc. */
317 /* Queue up to increment (or change) VAR later. BODY says how:
318 BODY should be the same thing you would pass to emit_insn
319 to increment right away. It will go to emit_insn later on.
321 The value is a QUEUED expression to be used in place of VAR
322 where you want to guarantee the pre-incrementation value of VAR. */
325 enqueue_insn (var, body)
328 pending_chain = gen_rtx_QUEUED (GET_MODE (var), var, NULL_RTX, NULL_RTX,
329 body, pending_chain);
330 return pending_chain;
333 /* Use protect_from_queue to convert a QUEUED expression
334 into something that you can put immediately into an instruction.
335 If the queued incrementation has not happened yet,
336 protect_from_queue returns the variable itself.
337 If the incrementation has happened, protect_from_queue returns a temp
338 that contains a copy of the old value of the variable.
340 Any time an rtx which might possibly be a QUEUED is to be put
341 into an instruction, it must be passed through protect_from_queue first.
342 QUEUED expressions are not meaningful in instructions.
344 Do not pass a value through protect_from_queue and then hold
345 on to it for a while before putting it in an instruction!
346 If the queue is flushed in between, incorrect code will result. */
349 protect_from_queue (x, modify)
353 RTX_CODE code = GET_CODE (x);
355 #if 0 /* A QUEUED can hang around after the queue is forced out. */
356 /* Shortcut for most common case. */
357 if (pending_chain == 0)
363 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
364 use of autoincrement. Make a copy of the contents of the memory
365 location rather than a copy of the address, but not if the value is
366 of mode BLKmode. Don't modify X in place since it might be
368 if (code == MEM && GET_MODE (x) != BLKmode
369 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
372 rtx new = replace_equiv_address_nv (x, QUEUED_VAR (y));
376 rtx temp = gen_reg_rtx (GET_MODE (x));
378 emit_insn_before (gen_move_insn (temp, new),
383 /* Copy the address into a pseudo, so that the returned value
384 remains correct across calls to emit_queue. */
385 return replace_equiv_address (new, copy_to_reg (XEXP (new, 0)));
388 /* Otherwise, recursively protect the subexpressions of all
389 the kinds of rtx's that can contain a QUEUED. */
392 rtx tem = protect_from_queue (XEXP (x, 0), 0);
393 if (tem != XEXP (x, 0))
399 else if (code == PLUS || code == MULT)
401 rtx new0 = protect_from_queue (XEXP (x, 0), 0);
402 rtx new1 = protect_from_queue (XEXP (x, 1), 0);
403 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
412 /* If the increment has not happened, use the variable itself. Copy it
413 into a new pseudo so that the value remains correct across calls to
415 if (QUEUED_INSN (x) == 0)
416 return copy_to_reg (QUEUED_VAR (x));
417 /* If the increment has happened and a pre-increment copy exists,
419 if (QUEUED_COPY (x) != 0)
420 return QUEUED_COPY (x);
421 /* The increment has happened but we haven't set up a pre-increment copy.
422 Set one up now, and use it. */
423 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
424 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
426 return QUEUED_COPY (x);
429 /* Return nonzero if X contains a QUEUED expression:
430 if it contains anything that will be altered by a queued increment.
431 We handle only combinations of MEM, PLUS, MINUS and MULT operators
432 since memory addresses generally contain only those. */
438 enum rtx_code code = GET_CODE (x);
444 return queued_subexp_p (XEXP (x, 0));
448 return (queued_subexp_p (XEXP (x, 0))
449 || queued_subexp_p (XEXP (x, 1)));
455 /* Perform all the pending incrementations. */
461 while ((p = pending_chain))
463 rtx body = QUEUED_BODY (p);
465 if (GET_CODE (body) == SEQUENCE)
467 QUEUED_INSN (p) = XVECEXP (QUEUED_BODY (p), 0, 0);
468 emit_insn (QUEUED_BODY (p));
471 QUEUED_INSN (p) = emit_insn (QUEUED_BODY (p));
472 pending_chain = QUEUED_NEXT (p);
476 /* Copy data from FROM to TO, where the machine modes are not the same.
477 Both modes may be integer, or both may be floating.
478 UNSIGNEDP should be nonzero if FROM is an unsigned type.
479 This causes zero-extension instead of sign-extension. */
482 convert_move (to, from, unsignedp)
486 enum machine_mode to_mode = GET_MODE (to);
487 enum machine_mode from_mode = GET_MODE (from);
488 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
489 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
493 /* rtx code for making an equivalent value. */
494 enum rtx_code equiv_code = (unsignedp ? ZERO_EXTEND : SIGN_EXTEND);
496 to = protect_from_queue (to, 1);
497 from = protect_from_queue (from, 0);
499 if (to_real != from_real)
502 /* If FROM is a SUBREG that indicates that we have already done at least
503 the required extension, strip it. We don't handle such SUBREGs as
506 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
507 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
508 >= GET_MODE_SIZE (to_mode))
509 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
510 from = gen_lowpart (to_mode, from), from_mode = to_mode;
512 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
515 if (to_mode == from_mode
516 || (from_mode == VOIDmode && CONSTANT_P (from)))
518 emit_move_insn (to, from);
522 if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
524 if (GET_MODE_BITSIZE (from_mode) != GET_MODE_BITSIZE (to_mode))
527 if (VECTOR_MODE_P (to_mode))
528 from = simplify_gen_subreg (to_mode, from, GET_MODE (from), 0);
530 to = simplify_gen_subreg (from_mode, to, GET_MODE (to), 0);
532 emit_move_insn (to, from);
536 if (to_real != from_real)
543 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode))
545 /* Try converting directly if the insn is supported. */
546 if ((code = can_extend_p (to_mode, from_mode, 0))
549 emit_unop_insn (code, to, from, UNKNOWN);
554 #ifdef HAVE_trunchfqf2
555 if (HAVE_trunchfqf2 && from_mode == HFmode && to_mode == QFmode)
557 emit_unop_insn (CODE_FOR_trunchfqf2, to, from, UNKNOWN);
561 #ifdef HAVE_trunctqfqf2
562 if (HAVE_trunctqfqf2 && from_mode == TQFmode && to_mode == QFmode)
564 emit_unop_insn (CODE_FOR_trunctqfqf2, to, from, UNKNOWN);
568 #ifdef HAVE_truncsfqf2
569 if (HAVE_truncsfqf2 && from_mode == SFmode && to_mode == QFmode)
571 emit_unop_insn (CODE_FOR_truncsfqf2, to, from, UNKNOWN);
575 #ifdef HAVE_truncdfqf2
576 if (HAVE_truncdfqf2 && from_mode == DFmode && to_mode == QFmode)
578 emit_unop_insn (CODE_FOR_truncdfqf2, to, from, UNKNOWN);
582 #ifdef HAVE_truncxfqf2
583 if (HAVE_truncxfqf2 && from_mode == XFmode && to_mode == QFmode)
585 emit_unop_insn (CODE_FOR_truncxfqf2, to, from, UNKNOWN);
589 #ifdef HAVE_trunctfqf2
590 if (HAVE_trunctfqf2 && from_mode == TFmode && to_mode == QFmode)
592 emit_unop_insn (CODE_FOR_trunctfqf2, to, from, UNKNOWN);
597 #ifdef HAVE_trunctqfhf2
598 if (HAVE_trunctqfhf2 && from_mode == TQFmode && to_mode == HFmode)
600 emit_unop_insn (CODE_FOR_trunctqfhf2, to, from, UNKNOWN);
604 #ifdef HAVE_truncsfhf2
605 if (HAVE_truncsfhf2 && from_mode == SFmode && to_mode == HFmode)
607 emit_unop_insn (CODE_FOR_truncsfhf2, to, from, UNKNOWN);
611 #ifdef HAVE_truncdfhf2
612 if (HAVE_truncdfhf2 && from_mode == DFmode && to_mode == HFmode)
614 emit_unop_insn (CODE_FOR_truncdfhf2, to, from, UNKNOWN);
618 #ifdef HAVE_truncxfhf2
619 if (HAVE_truncxfhf2 && from_mode == XFmode && to_mode == HFmode)
621 emit_unop_insn (CODE_FOR_truncxfhf2, to, from, UNKNOWN);
625 #ifdef HAVE_trunctfhf2
626 if (HAVE_trunctfhf2 && from_mode == TFmode && to_mode == HFmode)
628 emit_unop_insn (CODE_FOR_trunctfhf2, to, from, UNKNOWN);
633 #ifdef HAVE_truncsftqf2
634 if (HAVE_truncsftqf2 && from_mode == SFmode && to_mode == TQFmode)
636 emit_unop_insn (CODE_FOR_truncsftqf2, to, from, UNKNOWN);
640 #ifdef HAVE_truncdftqf2
641 if (HAVE_truncdftqf2 && from_mode == DFmode && to_mode == TQFmode)
643 emit_unop_insn (CODE_FOR_truncdftqf2, to, from, UNKNOWN);
647 #ifdef HAVE_truncxftqf2
648 if (HAVE_truncxftqf2 && from_mode == XFmode && to_mode == TQFmode)
650 emit_unop_insn (CODE_FOR_truncxftqf2, to, from, UNKNOWN);
654 #ifdef HAVE_trunctftqf2
655 if (HAVE_trunctftqf2 && from_mode == TFmode && to_mode == TQFmode)
657 emit_unop_insn (CODE_FOR_trunctftqf2, to, from, UNKNOWN);
662 #ifdef HAVE_truncdfsf2
663 if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
665 emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
669 #ifdef HAVE_truncxfsf2
670 if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode)
672 emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN);
676 #ifdef HAVE_trunctfsf2
677 if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
679 emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
683 #ifdef HAVE_truncxfdf2
684 if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode)
686 emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN);
690 #ifdef HAVE_trunctfdf2
691 if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
693 emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
705 libcall = extendsfdf2_libfunc;
709 libcall = extendsfxf2_libfunc;
713 libcall = extendsftf2_libfunc;
725 libcall = truncdfsf2_libfunc;
729 libcall = extenddfxf2_libfunc;
733 libcall = extenddftf2_libfunc;
745 libcall = truncxfsf2_libfunc;
749 libcall = truncxfdf2_libfunc;
761 libcall = trunctfsf2_libfunc;
765 libcall = trunctfdf2_libfunc;
777 if (libcall == (rtx) 0)
778 /* This conversion is not implemented yet. */
782 value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
784 insns = get_insns ();
786 emit_libcall_block (insns, to, value, gen_rtx_FLOAT_TRUNCATE (to_mode,
791 /* Now both modes are integers. */
793 /* Handle expanding beyond a word. */
794 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
795 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
802 enum machine_mode lowpart_mode;
803 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
805 /* Try converting directly if the insn is supported. */
806 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
809 /* If FROM is a SUBREG, put it into a register. Do this
810 so that we always generate the same set of insns for
811 better cse'ing; if an intermediate assignment occurred,
812 we won't be doing the operation directly on the SUBREG. */
813 if (optimize > 0 && GET_CODE (from) == SUBREG)
814 from = force_reg (from_mode, from);
815 emit_unop_insn (code, to, from, equiv_code);
818 /* Next, try converting via full word. */
819 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
820 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
821 != CODE_FOR_nothing))
823 if (GET_CODE (to) == REG)
824 emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
825 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
826 emit_unop_insn (code, to,
827 gen_lowpart (word_mode, to), equiv_code);
831 /* No special multiword conversion insn; do it by hand. */
834 /* Since we will turn this into a no conflict block, we must ensure
835 that the source does not overlap the target. */
837 if (reg_overlap_mentioned_p (to, from))
838 from = force_reg (from_mode, from);
840 /* Get a copy of FROM widened to a word, if necessary. */
841 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
842 lowpart_mode = word_mode;
844 lowpart_mode = from_mode;
846 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
848 lowpart = gen_lowpart (lowpart_mode, to);
849 emit_move_insn (lowpart, lowfrom);
851 /* Compute the value to put in each remaining word. */
853 fill_value = const0_rtx;
858 && insn_data[(int) CODE_FOR_slt].operand[0].mode == word_mode
859 && STORE_FLAG_VALUE == -1)
861 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
863 fill_value = gen_reg_rtx (word_mode);
864 emit_insn (gen_slt (fill_value));
870 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
871 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
873 fill_value = convert_to_mode (word_mode, fill_value, 1);
877 /* Fill the remaining words. */
878 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
880 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
881 rtx subword = operand_subword (to, index, 1, to_mode);
886 if (fill_value != subword)
887 emit_move_insn (subword, fill_value);
890 insns = get_insns ();
893 emit_no_conflict_block (insns, to, from, NULL_RTX,
894 gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
898 /* Truncating multi-word to a word or less. */
899 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
900 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
902 if (!((GET_CODE (from) == MEM
903 && ! MEM_VOLATILE_P (from)
904 && direct_load[(int) to_mode]
905 && ! mode_dependent_address_p (XEXP (from, 0)))
906 || GET_CODE (from) == REG
907 || GET_CODE (from) == SUBREG))
908 from = force_reg (from_mode, from);
909 convert_move (to, gen_lowpart (word_mode, from), 0);
913 /* Handle pointer conversion. */ /* SPEE 900220. */
914 if (to_mode == PQImode)
916 if (from_mode != QImode)
917 from = convert_to_mode (QImode, from, unsignedp);
919 #ifdef HAVE_truncqipqi2
920 if (HAVE_truncqipqi2)
922 emit_unop_insn (CODE_FOR_truncqipqi2, to, from, UNKNOWN);
925 #endif /* HAVE_truncqipqi2 */
929 if (from_mode == PQImode)
931 if (to_mode != QImode)
933 from = convert_to_mode (QImode, from, unsignedp);
938 #ifdef HAVE_extendpqiqi2
939 if (HAVE_extendpqiqi2)
941 emit_unop_insn (CODE_FOR_extendpqiqi2, to, from, UNKNOWN);
944 #endif /* HAVE_extendpqiqi2 */
949 if (to_mode == PSImode)
951 if (from_mode != SImode)
952 from = convert_to_mode (SImode, from, unsignedp);
954 #ifdef HAVE_truncsipsi2
955 if (HAVE_truncsipsi2)
957 emit_unop_insn (CODE_FOR_truncsipsi2, to, from, UNKNOWN);
960 #endif /* HAVE_truncsipsi2 */
964 if (from_mode == PSImode)
966 if (to_mode != SImode)
968 from = convert_to_mode (SImode, from, unsignedp);
973 #ifdef HAVE_extendpsisi2
974 if (! unsignedp && HAVE_extendpsisi2)
976 emit_unop_insn (CODE_FOR_extendpsisi2, to, from, UNKNOWN);
979 #endif /* HAVE_extendpsisi2 */
980 #ifdef HAVE_zero_extendpsisi2
981 if (unsignedp && HAVE_zero_extendpsisi2)
983 emit_unop_insn (CODE_FOR_zero_extendpsisi2, to, from, UNKNOWN);
986 #endif /* HAVE_zero_extendpsisi2 */
991 if (to_mode == PDImode)
993 if (from_mode != DImode)
994 from = convert_to_mode (DImode, from, unsignedp);
996 #ifdef HAVE_truncdipdi2
997 if (HAVE_truncdipdi2)
999 emit_unop_insn (CODE_FOR_truncdipdi2, to, from, UNKNOWN);
1002 #endif /* HAVE_truncdipdi2 */
1006 if (from_mode == PDImode)
1008 if (to_mode != DImode)
1010 from = convert_to_mode (DImode, from, unsignedp);
1015 #ifdef HAVE_extendpdidi2
1016 if (HAVE_extendpdidi2)
1018 emit_unop_insn (CODE_FOR_extendpdidi2, to, from, UNKNOWN);
1021 #endif /* HAVE_extendpdidi2 */
1026 /* Now follow all the conversions between integers
1027 no more than a word long. */
1029 /* For truncation, usually we can just refer to FROM in a narrower mode. */
1030 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
1031 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1032 GET_MODE_BITSIZE (from_mode)))
1034 if (!((GET_CODE (from) == MEM
1035 && ! MEM_VOLATILE_P (from)
1036 && direct_load[(int) to_mode]
1037 && ! mode_dependent_address_p (XEXP (from, 0)))
1038 || GET_CODE (from) == REG
1039 || GET_CODE (from) == SUBREG))
1040 from = force_reg (from_mode, from);
1041 if (GET_CODE (from) == REG && REGNO (from) < FIRST_PSEUDO_REGISTER
1042 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
1043 from = copy_to_reg (from);
1044 emit_move_insn (to, gen_lowpart (to_mode, from));
1048 /* Handle extension. */
1049 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
1051 /* Convert directly if that works. */
1052 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
1053 != CODE_FOR_nothing)
1056 from = force_not_mem (from);
1058 emit_unop_insn (code, to, from, equiv_code);
1063 enum machine_mode intermediate;
1067 /* Search for a mode to convert via. */
1068 for (intermediate = from_mode; intermediate != VOIDmode;
1069 intermediate = GET_MODE_WIDER_MODE (intermediate))
1070 if (((can_extend_p (to_mode, intermediate, unsignedp)
1071 != CODE_FOR_nothing)
1072 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
1073 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1074 GET_MODE_BITSIZE (intermediate))))
1075 && (can_extend_p (intermediate, from_mode, unsignedp)
1076 != CODE_FOR_nothing))
1078 convert_move (to, convert_to_mode (intermediate, from,
1079 unsignedp), unsignedp);
1083 /* No suitable intermediate mode.
1084 Generate what we need with shifts. */
1085 shift_amount = build_int_2 (GET_MODE_BITSIZE (to_mode)
1086 - GET_MODE_BITSIZE (from_mode), 0);
1087 from = gen_lowpart (to_mode, force_reg (from_mode, from));
1088 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
1090 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
1093 emit_move_insn (to, tmp);
1098 /* Support special truncate insns for certain modes. */
1100 if (from_mode == DImode && to_mode == SImode)
1102 #ifdef HAVE_truncdisi2
1103 if (HAVE_truncdisi2)
1105 emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
1109 convert_move (to, force_reg (from_mode, from), unsignedp);
1113 if (from_mode == DImode && to_mode == HImode)
1115 #ifdef HAVE_truncdihi2
1116 if (HAVE_truncdihi2)
1118 emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
1122 convert_move (to, force_reg (from_mode, from), unsignedp);
1126 if (from_mode == DImode && to_mode == QImode)
1128 #ifdef HAVE_truncdiqi2
1129 if (HAVE_truncdiqi2)
1131 emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
1135 convert_move (to, force_reg (from_mode, from), unsignedp);
1139 if (from_mode == SImode && to_mode == HImode)
1141 #ifdef HAVE_truncsihi2
1142 if (HAVE_truncsihi2)
1144 emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
1148 convert_move (to, force_reg (from_mode, from), unsignedp);
1152 if (from_mode == SImode && to_mode == QImode)
1154 #ifdef HAVE_truncsiqi2
1155 if (HAVE_truncsiqi2)
1157 emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
1161 convert_move (to, force_reg (from_mode, from), unsignedp);
1165 if (from_mode == HImode && to_mode == QImode)
1167 #ifdef HAVE_trunchiqi2
1168 if (HAVE_trunchiqi2)
1170 emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
1174 convert_move (to, force_reg (from_mode, from), unsignedp);
1178 if (from_mode == TImode && to_mode == DImode)
1180 #ifdef HAVE_trunctidi2
1181 if (HAVE_trunctidi2)
1183 emit_unop_insn (CODE_FOR_trunctidi2, to, from, UNKNOWN);
1187 convert_move (to, force_reg (from_mode, from), unsignedp);
1191 if (from_mode == TImode && to_mode == SImode)
1193 #ifdef HAVE_trunctisi2
1194 if (HAVE_trunctisi2)
1196 emit_unop_insn (CODE_FOR_trunctisi2, to, from, UNKNOWN);
1200 convert_move (to, force_reg (from_mode, from), unsignedp);
1204 if (from_mode == TImode && to_mode == HImode)
1206 #ifdef HAVE_trunctihi2
1207 if (HAVE_trunctihi2)
1209 emit_unop_insn (CODE_FOR_trunctihi2, to, from, UNKNOWN);
1213 convert_move (to, force_reg (from_mode, from), unsignedp);
1217 if (from_mode == TImode && to_mode == QImode)
1219 #ifdef HAVE_trunctiqi2
1220 if (HAVE_trunctiqi2)
1222 emit_unop_insn (CODE_FOR_trunctiqi2, to, from, UNKNOWN);
1226 convert_move (to, force_reg (from_mode, from), unsignedp);
1230 /* Handle truncation of volatile memrefs, and so on;
1231 the things that couldn't be truncated directly,
1232 and for which there was no special instruction. */
1233 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
1235 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
1236 emit_move_insn (to, temp);
1240 /* Mode combination is not recognized. */
1244 /* Return an rtx for a value that would result
1245 from converting X to mode MODE.
1246 Both X and MODE may be floating, or both integer.
1247 UNSIGNEDP is nonzero if X is an unsigned value.
1248 This can be done by referring to a part of X in place
1249 or by copying to a new temporary with conversion.
1251 This function *must not* call protect_from_queue
1252 except when putting X into an insn (in which case convert_move does it). */
1255 convert_to_mode (mode, x, unsignedp)
1256 enum machine_mode mode;
1260 return convert_modes (mode, VOIDmode, x, unsignedp);
1263 /* Return an rtx for a value that would result
1264 from converting X from mode OLDMODE to mode MODE.
1265 Both modes may be floating, or both integer.
1266 UNSIGNEDP is nonzero if X is an unsigned value.
1268 This can be done by referring to a part of X in place
1269 or by copying to a new temporary with conversion.
1271 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
1273 This function *must not* call protect_from_queue
1274 except when putting X into an insn (in which case convert_move does it). */
1277 convert_modes (mode, oldmode, x, unsignedp)
1278 enum machine_mode mode, oldmode;
1284 /* If FROM is a SUBREG that indicates that we have already done at least
1285 the required extension, strip it. */
1287 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
1288 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
1289 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
1290 x = gen_lowpart (mode, x);
1292 if (GET_MODE (x) != VOIDmode)
1293 oldmode = GET_MODE (x);
1295 if (mode == oldmode)
1298 /* There is one case that we must handle specially: If we are converting
1299 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
1300 we are to interpret the constant as unsigned, gen_lowpart will do
1301 the wrong if the constant appears negative. What we want to do is
1302 make the high-order word of the constant zero, not all ones. */
1304 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
1305 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
1306 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
1308 HOST_WIDE_INT val = INTVAL (x);
1310 if (oldmode != VOIDmode
1311 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
1313 int width = GET_MODE_BITSIZE (oldmode);
1315 /* We need to zero extend VAL. */
1316 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1319 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
1322 /* We can do this with a gen_lowpart if both desired and current modes
1323 are integer, and this is either a constant integer, a register, or a
1324 non-volatile MEM. Except for the constant case where MODE is no
1325 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
1327 if ((GET_CODE (x) == CONST_INT
1328 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
1329 || (GET_MODE_CLASS (mode) == MODE_INT
1330 && GET_MODE_CLASS (oldmode) == MODE_INT
1331 && (GET_CODE (x) == CONST_DOUBLE
1332 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
1333 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x)
1334 && direct_load[(int) mode])
1335 || (GET_CODE (x) == REG
1336 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
1337 GET_MODE_BITSIZE (GET_MODE (x)))))))))
1339 /* ?? If we don't know OLDMODE, we have to assume here that
1340 X does not need sign- or zero-extension. This may not be
1341 the case, but it's the best we can do. */
1342 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
1343 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
1345 HOST_WIDE_INT val = INTVAL (x);
1346 int width = GET_MODE_BITSIZE (oldmode);
1348 /* We must sign or zero-extend in this case. Start by
1349 zero-extending, then sign extend if we need to. */
1350 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1352 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
1353 val |= (HOST_WIDE_INT) (-1) << width;
1355 return GEN_INT (trunc_int_for_mode (val, mode));
1358 return gen_lowpart (mode, x);
1361 temp = gen_reg_rtx (mode);
1362 convert_move (temp, x, unsignedp);
1366 /* This macro is used to determine what the largest unit size that
1367 move_by_pieces can use is. */
1369 /* MOVE_MAX_PIECES is the number of bytes at a time which we can
1370 move efficiently, as opposed to MOVE_MAX which is the maximum
1371 number of bytes we can move with a single instruction. */
1373 #ifndef MOVE_MAX_PIECES
1374 #define MOVE_MAX_PIECES MOVE_MAX
1377 /* Generate several move instructions to copy LEN bytes from block FROM to
1378 block TO. (These are MEM rtx's with BLKmode). The caller must pass FROM
1379 and TO through protect_from_queue before calling.
1381 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
1382 used to push FROM to the stack.
1384 ALIGN is maximum alignment we can assume. */
1387 move_by_pieces (to, from, len, align)
1389 unsigned HOST_WIDE_INT len;
1392 struct move_by_pieces data;
1393 rtx to_addr, from_addr = XEXP (from, 0);
1394 unsigned int max_size = MOVE_MAX_PIECES + 1;
1395 enum machine_mode mode = VOIDmode, tmode;
1396 enum insn_code icode;
1399 data.from_addr = from_addr;
1402 to_addr = XEXP (to, 0);
1405 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1406 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1408 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1415 #ifdef STACK_GROWS_DOWNWARD
1421 data.to_addr = to_addr;
1424 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1425 || GET_CODE (from_addr) == POST_INC
1426 || GET_CODE (from_addr) == POST_DEC);
1428 data.explicit_inc_from = 0;
1429 data.explicit_inc_to = 0;
1430 if (data.reverse) data.offset = len;
1433 /* If copying requires more than two move insns,
1434 copy addresses to registers (to make displacements shorter)
1435 and use post-increment if available. */
1436 if (!(data.autinc_from && data.autinc_to)
1437 && move_by_pieces_ninsns (len, align) > 2)
1439 /* Find the mode of the largest move... */
1440 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1441 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1442 if (GET_MODE_SIZE (tmode) < max_size)
1445 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
1447 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1448 data.autinc_from = 1;
1449 data.explicit_inc_from = -1;
1451 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
1453 data.from_addr = copy_addr_to_reg (from_addr);
1454 data.autinc_from = 1;
1455 data.explicit_inc_from = 1;
1457 if (!data.autinc_from && CONSTANT_P (from_addr))
1458 data.from_addr = copy_addr_to_reg (from_addr);
1459 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
1461 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1463 data.explicit_inc_to = -1;
1465 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
1467 data.to_addr = copy_addr_to_reg (to_addr);
1469 data.explicit_inc_to = 1;
1471 if (!data.autinc_to && CONSTANT_P (to_addr))
1472 data.to_addr = copy_addr_to_reg (to_addr);
1475 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1476 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1477 align = MOVE_MAX * BITS_PER_UNIT;
1479 /* First move what we can in the largest integer mode, then go to
1480 successively smaller modes. */
1482 while (max_size > 1)
1484 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1485 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1486 if (GET_MODE_SIZE (tmode) < max_size)
1489 if (mode == VOIDmode)
1492 icode = mov_optab->handlers[(int) mode].insn_code;
1493 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1494 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1496 max_size = GET_MODE_SIZE (mode);
1499 /* The code above should have handled everything. */
1504 /* Return number of insns required to move L bytes by pieces.
1505 ALIGN (in bits) is maximum alignment we can assume. */
1507 static unsigned HOST_WIDE_INT
1508 move_by_pieces_ninsns (l, align)
1509 unsigned HOST_WIDE_INT l;
1512 unsigned HOST_WIDE_INT n_insns = 0;
1513 unsigned HOST_WIDE_INT max_size = MOVE_MAX + 1;
1515 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1516 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1517 align = MOVE_MAX * BITS_PER_UNIT;
1519 while (max_size > 1)
1521 enum machine_mode mode = VOIDmode, tmode;
1522 enum insn_code icode;
1524 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1525 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1526 if (GET_MODE_SIZE (tmode) < max_size)
1529 if (mode == VOIDmode)
1532 icode = mov_optab->handlers[(int) mode].insn_code;
1533 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1534 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1536 max_size = GET_MODE_SIZE (mode);
1544 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1545 with move instructions for mode MODE. GENFUN is the gen_... function
1546 to make a move insn for that mode. DATA has all the other info. */
1549 move_by_pieces_1 (genfun, mode, data)
1550 rtx (*genfun) PARAMS ((rtx, ...));
1551 enum machine_mode mode;
1552 struct move_by_pieces *data;
1554 unsigned int size = GET_MODE_SIZE (mode);
1555 rtx to1 = NULL_RTX, from1;
1557 while (data->len >= size)
1560 data->offset -= size;
1564 if (data->autinc_to)
1565 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
1568 to1 = adjust_address (data->to, mode, data->offset);
1571 if (data->autinc_from)
1572 from1 = adjust_automodify_address (data->from, mode, data->from_addr,
1575 from1 = adjust_address (data->from, mode, data->offset);
1577 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1578 emit_insn (gen_add2_insn (data->to_addr,
1579 GEN_INT (-(HOST_WIDE_INT)size)));
1580 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1581 emit_insn (gen_add2_insn (data->from_addr,
1582 GEN_INT (-(HOST_WIDE_INT)size)));
1585 emit_insn ((*genfun) (to1, from1));
1588 #ifdef PUSH_ROUNDING
1589 emit_single_push_insn (mode, from1, NULL);
1595 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1596 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1597 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1598 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1600 if (! data->reverse)
1601 data->offset += size;
1607 /* Emit code to move a block Y to a block X.
1608 This may be done with string-move instructions,
1609 with multiple scalar move instructions, or with a library call.
1611 Both X and Y must be MEM rtx's (perhaps inside VOLATILE)
1613 SIZE is an rtx that says how long they are.
1614 ALIGN is the maximum alignment we can assume they have.
1616 Return the address of the new block, if memcpy is called and returns it,
1620 emit_block_move (x, y, size)
1625 #ifdef TARGET_MEM_FUNCTIONS
1627 tree call_expr, arg_list;
1629 unsigned int align = MIN (MEM_ALIGN (x), MEM_ALIGN (y));
1631 if (GET_MODE (x) != BLKmode)
1634 if (GET_MODE (y) != BLKmode)
1637 x = protect_from_queue (x, 1);
1638 y = protect_from_queue (y, 0);
1639 size = protect_from_queue (size, 0);
1641 if (GET_CODE (x) != MEM)
1643 if (GET_CODE (y) != MEM)
1648 if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
1649 move_by_pieces (x, y, INTVAL (size), align);
1652 /* Try the most limited insn first, because there's no point
1653 including more than one in the machine description unless
1654 the more limited one has some advantage. */
1656 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
1657 enum machine_mode mode;
1659 /* Since this is a move insn, we don't care about volatility. */
1662 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1663 mode = GET_MODE_WIDER_MODE (mode))
1665 enum insn_code code = movstr_optab[(int) mode];
1666 insn_operand_predicate_fn pred;
1668 if (code != CODE_FOR_nothing
1669 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1670 here because if SIZE is less than the mode mask, as it is
1671 returned by the macro, it will definitely be less than the
1672 actual mode mask. */
1673 && ((GET_CODE (size) == CONST_INT
1674 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1675 <= (GET_MODE_MASK (mode) >> 1)))
1676 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1677 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
1678 || (*pred) (x, BLKmode))
1679 && ((pred = insn_data[(int) code].operand[1].predicate) == 0
1680 || (*pred) (y, BLKmode))
1681 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
1682 || (*pred) (opalign, VOIDmode)))
1685 rtx last = get_last_insn ();
1688 op2 = convert_to_mode (mode, size, 1);
1689 pred = insn_data[(int) code].operand[2].predicate;
1690 if (pred != 0 && ! (*pred) (op2, mode))
1691 op2 = copy_to_mode_reg (mode, op2);
1693 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1701 delete_insns_since (last);
1707 /* X, Y, or SIZE may have been passed through protect_from_queue.
1709 It is unsafe to save the value generated by protect_from_queue
1710 and reuse it later. Consider what happens if emit_queue is
1711 called before the return value from protect_from_queue is used.
1713 Expansion of the CALL_EXPR below will call emit_queue before
1714 we are finished emitting RTL for argument setup. So if we are
1715 not careful we could get the wrong value for an argument.
1717 To avoid this problem we go ahead and emit code to copy X, Y &
1718 SIZE into new pseudos. We can then place those new pseudos
1719 into an RTL_EXPR and use them later, even after a call to
1722 Note this is not strictly needed for library calls since they
1723 do not call emit_queue before loading their arguments. However,
1724 we may need to have library calls call emit_queue in the future
1725 since failing to do so could cause problems for targets which
1726 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
1727 x = copy_to_mode_reg (Pmode, XEXP (x, 0));
1728 y = copy_to_mode_reg (Pmode, XEXP (y, 0));
1730 #ifdef TARGET_MEM_FUNCTIONS
1731 size = copy_to_mode_reg (TYPE_MODE (sizetype), size);
1733 size = convert_to_mode (TYPE_MODE (integer_type_node), size,
1734 TREE_UNSIGNED (integer_type_node));
1735 size = copy_to_mode_reg (TYPE_MODE (integer_type_node), size);
1738 #ifdef TARGET_MEM_FUNCTIONS
1739 /* It is incorrect to use the libcall calling conventions to call
1740 memcpy in this context.
1742 This could be a user call to memcpy and the user may wish to
1743 examine the return value from memcpy.
1745 For targets where libcalls and normal calls have different conventions
1746 for returning pointers, we could end up generating incorrect code.
1748 So instead of using a libcall sequence we build up a suitable
1749 CALL_EXPR and expand the call in the normal fashion. */
1750 if (fn == NULL_TREE)
1754 /* This was copied from except.c, I don't know if all this is
1755 necessary in this context or not. */
1756 fn = get_identifier ("memcpy");
1757 fntype = build_pointer_type (void_type_node);
1758 fntype = build_function_type (fntype, NULL_TREE);
1759 fn = build_decl (FUNCTION_DECL, fn, fntype);
1760 ggc_add_tree_root (&fn, 1);
1761 DECL_EXTERNAL (fn) = 1;
1762 TREE_PUBLIC (fn) = 1;
1763 DECL_ARTIFICIAL (fn) = 1;
1764 TREE_NOTHROW (fn) = 1;
1765 make_decl_rtl (fn, NULL);
1766 assemble_external (fn);
1769 /* We need to make an argument list for the function call.
1771 memcpy has three arguments, the first two are void * addresses and
1772 the last is a size_t byte count for the copy. */
1774 = build_tree_list (NULL_TREE,
1775 make_tree (build_pointer_type (void_type_node), x));
1776 TREE_CHAIN (arg_list)
1777 = build_tree_list (NULL_TREE,
1778 make_tree (build_pointer_type (void_type_node), y));
1779 TREE_CHAIN (TREE_CHAIN (arg_list))
1780 = build_tree_list (NULL_TREE, make_tree (sizetype, size));
1781 TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list))) = NULL_TREE;
1783 /* Now we have to build up the CALL_EXPR itself. */
1784 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
1785 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
1786 call_expr, arg_list, NULL_TREE);
1787 TREE_SIDE_EFFECTS (call_expr) = 1;
1789 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
1791 emit_library_call (bcopy_libfunc, LCT_NORMAL,
1792 VOIDmode, 3, y, Pmode, x, Pmode,
1793 convert_to_mode (TYPE_MODE (integer_type_node), size,
1794 TREE_UNSIGNED (integer_type_node)),
1795 TYPE_MODE (integer_type_node));
1798 /* If we are initializing a readonly value, show the above call
1799 clobbered it. Otherwise, a load from it may erroneously be hoisted
1801 if (RTX_UNCHANGING_P (x))
1802 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
1808 /* Copy all or part of a value X into registers starting at REGNO.
1809 The number of registers to be filled is NREGS. */
1812 move_block_to_reg (regno, x, nregs, mode)
1816 enum machine_mode mode;
1819 #ifdef HAVE_load_multiple
1827 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1828 x = validize_mem (force_const_mem (mode, x));
1830 /* See if the machine can do this with a load multiple insn. */
1831 #ifdef HAVE_load_multiple
1832 if (HAVE_load_multiple)
1834 last = get_last_insn ();
1835 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
1843 delete_insns_since (last);
1847 for (i = 0; i < nregs; i++)
1848 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1849 operand_subword_force (x, i, mode));
1852 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1853 The number of registers to be filled is NREGS. SIZE indicates the number
1854 of bytes in the object X. */
1857 move_block_from_reg (regno, x, nregs, size)
1864 #ifdef HAVE_store_multiple
1868 enum machine_mode mode;
1873 /* If SIZE is that of a mode no bigger than a word, just use that
1874 mode's store operation. */
1875 if (size <= UNITS_PER_WORD
1876 && (mode = mode_for_size (size * BITS_PER_UNIT, MODE_INT, 0)) != BLKmode
1877 && !FUNCTION_ARG_REG_LITTLE_ENDIAN)
1879 emit_move_insn (adjust_address (x, mode, 0), gen_rtx_REG (mode, regno));
1883 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN machine must be aligned
1884 to the left before storing to memory. Note that the previous test
1885 doesn't handle all cases (e.g. SIZE == 3). */
1886 if (size < UNITS_PER_WORD
1888 && !FUNCTION_ARG_REG_LITTLE_ENDIAN)
1890 rtx tem = operand_subword (x, 0, 1, BLKmode);
1896 shift = expand_shift (LSHIFT_EXPR, word_mode,
1897 gen_rtx_REG (word_mode, regno),
1898 build_int_2 ((UNITS_PER_WORD - size)
1899 * BITS_PER_UNIT, 0), NULL_RTX, 0);
1900 emit_move_insn (tem, shift);
1904 /* See if the machine can do this with a store multiple insn. */
1905 #ifdef HAVE_store_multiple
1906 if (HAVE_store_multiple)
1908 last = get_last_insn ();
1909 pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1917 delete_insns_since (last);
1921 for (i = 0; i < nregs; i++)
1923 rtx tem = operand_subword (x, i, 1, BLKmode);
1928 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1932 /* Emit code to move a block SRC to a block DST, where DST is non-consecutive
1933 registers represented by a PARALLEL. SSIZE represents the total size of
1934 block SRC in bytes, or -1 if not known. */
1935 /* ??? If SSIZE % UNITS_PER_WORD != 0, we make the blatant assumption that
1936 the balance will be in what would be the low-order memory addresses, i.e.
1937 left justified for big endian, right justified for little endian. This
1938 happens to be true for the targets currently using this support. If this
1939 ever changes, a new target macro along the lines of FUNCTION_ARG_PADDING
1943 emit_group_load (dst, orig_src, ssize)
1950 if (GET_CODE (dst) != PARALLEL)
1953 /* Check for a NULL entry, used to indicate that the parameter goes
1954 both on the stack and in registers. */
1955 if (XEXP (XVECEXP (dst, 0, 0), 0))
1960 tmps = (rtx *) alloca (sizeof (rtx) * XVECLEN (dst, 0));
1962 /* Process the pieces. */
1963 for (i = start; i < XVECLEN (dst, 0); i++)
1965 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1966 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1967 unsigned int bytelen = GET_MODE_SIZE (mode);
1970 /* Handle trailing fragments that run over the size of the struct. */
1971 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
1973 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1974 bytelen = ssize - bytepos;
1979 /* If we won't be loading directly from memory, protect the real source
1980 from strange tricks we might play; but make sure that the source can
1981 be loaded directly into the destination. */
1983 if (GET_CODE (orig_src) != MEM
1984 && (!CONSTANT_P (orig_src)
1985 || (GET_MODE (orig_src) != mode
1986 && GET_MODE (orig_src) != VOIDmode)))
1988 if (GET_MODE (orig_src) == VOIDmode)
1989 src = gen_reg_rtx (mode);
1991 src = gen_reg_rtx (GET_MODE (orig_src));
1993 emit_move_insn (src, orig_src);
1996 /* Optimize the access just a bit. */
1997 if (GET_CODE (src) == MEM
1998 && MEM_ALIGN (src) >= GET_MODE_ALIGNMENT (mode)
1999 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2000 && bytelen == GET_MODE_SIZE (mode))
2002 tmps[i] = gen_reg_rtx (mode);
2003 emit_move_insn (tmps[i], adjust_address (src, mode, bytepos));
2005 else if (GET_CODE (src) == CONCAT)
2008 && bytelen == GET_MODE_SIZE (GET_MODE (XEXP (src, 0))))
2009 tmps[i] = XEXP (src, 0);
2010 else if (bytepos == (HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (XEXP (src, 0)))
2011 && bytelen == GET_MODE_SIZE (GET_MODE (XEXP (src, 1))))
2012 tmps[i] = XEXP (src, 1);
2013 else if (bytepos == 0)
2015 rtx mem = assign_stack_temp (GET_MODE (src),
2016 GET_MODE_SIZE (GET_MODE (src)), 0);
2017 emit_move_insn (mem, src);
2018 tmps[i] = adjust_address (mem, mode, 0);
2023 else if (CONSTANT_P (src)
2024 || (GET_CODE (src) == REG && GET_MODE (src) == mode))
2027 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
2028 bytepos * BITS_PER_UNIT, 1, NULL_RTX,
2031 if (BYTES_BIG_ENDIAN && shift)
2032 expand_binop (mode, ashl_optab, tmps[i], GEN_INT (shift),
2033 tmps[i], 0, OPTAB_WIDEN);
2038 /* Copy the extracted pieces into the proper (probable) hard regs. */
2039 for (i = start; i < XVECLEN (dst, 0); i++)
2040 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0), tmps[i]);
2043 /* Emit code to move a block SRC to a block DST, where SRC is non-consecutive
2044 registers represented by a PARALLEL. SSIZE represents the total size of
2045 block DST, or -1 if not known. */
2048 emit_group_store (orig_dst, src, ssize)
2055 if (GET_CODE (src) != PARALLEL)
2058 /* Check for a NULL entry, used to indicate that the parameter goes
2059 both on the stack and in registers. */
2060 if (XEXP (XVECEXP (src, 0, 0), 0))
2065 tmps = (rtx *) alloca (sizeof (rtx) * XVECLEN (src, 0));
2067 /* Copy the (probable) hard regs into pseudos. */
2068 for (i = start; i < XVECLEN (src, 0); i++)
2070 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
2071 tmps[i] = gen_reg_rtx (GET_MODE (reg));
2072 emit_move_insn (tmps[i], reg);
2076 /* If we won't be storing directly into memory, protect the real destination
2077 from strange tricks we might play. */
2079 if (GET_CODE (dst) == PARALLEL)
2083 /* We can get a PARALLEL dst if there is a conditional expression in
2084 a return statement. In that case, the dst and src are the same,
2085 so no action is necessary. */
2086 if (rtx_equal_p (dst, src))
2089 /* It is unclear if we can ever reach here, but we may as well handle
2090 it. Allocate a temporary, and split this into a store/load to/from
2093 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
2094 emit_group_store (temp, src, ssize);
2095 emit_group_load (dst, temp, ssize);
2098 else if (GET_CODE (dst) != MEM)
2100 dst = gen_reg_rtx (GET_MODE (orig_dst));
2101 /* Make life a bit easier for combine. */
2102 emit_move_insn (dst, const0_rtx);
2105 /* Process the pieces. */
2106 for (i = start; i < XVECLEN (src, 0); i++)
2108 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
2109 enum machine_mode mode = GET_MODE (tmps[i]);
2110 unsigned int bytelen = GET_MODE_SIZE (mode);
2112 /* Handle trailing fragments that run over the size of the struct. */
2113 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2115 if (BYTES_BIG_ENDIAN)
2117 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2118 expand_binop (mode, ashr_optab, tmps[i], GEN_INT (shift),
2119 tmps[i], 0, OPTAB_WIDEN);
2121 bytelen = ssize - bytepos;
2124 /* Optimize the access just a bit. */
2125 if (GET_CODE (dst) == MEM
2126 && MEM_ALIGN (dst) >= GET_MODE_ALIGNMENT (mode)
2127 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2128 && bytelen == GET_MODE_SIZE (mode))
2129 emit_move_insn (adjust_address (dst, mode, bytepos), tmps[i]);
2131 store_bit_field (dst, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2132 mode, tmps[i], ssize);
2137 /* Copy from the pseudo into the (probable) hard reg. */
2138 if (GET_CODE (dst) == REG)
2139 emit_move_insn (orig_dst, dst);
2142 /* Generate code to copy a BLKmode object of TYPE out of a
2143 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2144 is null, a stack temporary is created. TGTBLK is returned.
2146 The primary purpose of this routine is to handle functions
2147 that return BLKmode structures in registers. Some machines
2148 (the PA for example) want to return all small structures
2149 in registers regardless of the structure's alignment. */
2152 copy_blkmode_from_reg (tgtblk, srcreg, type)
2157 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2158 rtx src = NULL, dst = NULL;
2159 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2160 unsigned HOST_WIDE_INT bitpos, xbitpos, big_endian_correction = 0;
2164 tgtblk = assign_temp (build_qualified_type (type,
2166 | TYPE_QUAL_CONST)),
2168 preserve_temp_slots (tgtblk);
2171 /* This code assumes srcreg is at least a full word. If it isn't, copy it
2172 into a new pseudo which is a full word.
2174 If FUNCTION_ARG_REG_LITTLE_ENDIAN is set and convert_to_mode does a copy,
2175 the wrong part of the register gets copied so we fake a type conversion
2177 if (GET_MODE (srcreg) != BLKmode
2178 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
2180 if (FUNCTION_ARG_REG_LITTLE_ENDIAN)
2181 srcreg = simplify_gen_subreg (word_mode, srcreg, GET_MODE (srcreg), 0);
2183 srcreg = convert_to_mode (word_mode, srcreg, TREE_UNSIGNED (type));
2186 /* Structures whose size is not a multiple of a word are aligned
2187 to the least significant byte (to the right). On a BYTES_BIG_ENDIAN
2188 machine, this means we must skip the empty high order bytes when
2189 calculating the bit offset. */
2190 if (BYTES_BIG_ENDIAN
2191 && !FUNCTION_ARG_REG_LITTLE_ENDIAN
2192 && bytes % UNITS_PER_WORD)
2193 big_endian_correction
2194 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2196 /* Copy the structure BITSIZE bites at a time.
2198 We could probably emit more efficient code for machines which do not use
2199 strict alignment, but it doesn't seem worth the effort at the current
2201 for (bitpos = 0, xbitpos = big_endian_correction;
2202 bitpos < bytes * BITS_PER_UNIT;
2203 bitpos += bitsize, xbitpos += bitsize)
2205 /* We need a new source operand each time xbitpos is on a
2206 word boundary and when xbitpos == big_endian_correction
2207 (the first time through). */
2208 if (xbitpos % BITS_PER_WORD == 0
2209 || xbitpos == big_endian_correction)
2210 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD,
2213 /* We need a new destination operand each time bitpos is on
2215 if (bitpos % BITS_PER_WORD == 0)
2216 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
2218 /* Use xbitpos for the source extraction (right justified) and
2219 xbitpos for the destination store (left justified). */
2220 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode,
2221 extract_bit_field (src, bitsize,
2222 xbitpos % BITS_PER_WORD, 1,
2223 NULL_RTX, word_mode, word_mode,
2231 /* Add a USE expression for REG to the (possibly empty) list pointed
2232 to by CALL_FUSAGE. REG must denote a hard register. */
2235 use_reg (call_fusage, reg)
2236 rtx *call_fusage, reg;
2238 if (GET_CODE (reg) != REG
2239 || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
2243 = gen_rtx_EXPR_LIST (VOIDmode,
2244 gen_rtx_USE (VOIDmode, reg), *call_fusage);
2247 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2248 starting at REGNO. All of these registers must be hard registers. */
2251 use_regs (call_fusage, regno, nregs)
2258 if (regno + nregs > FIRST_PSEUDO_REGISTER)
2261 for (i = 0; i < nregs; i++)
2262 use_reg (call_fusage, gen_rtx_REG (reg_raw_mode[regno + i], regno + i));
2265 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2266 PARALLEL REGS. This is for calls that pass values in multiple
2267 non-contiguous locations. The Irix 6 ABI has examples of this. */
2270 use_group_regs (call_fusage, regs)
2276 for (i = 0; i < XVECLEN (regs, 0); i++)
2278 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2280 /* A NULL entry means the parameter goes both on the stack and in
2281 registers. This can also be a MEM for targets that pass values
2282 partially on the stack and partially in registers. */
2283 if (reg != 0 && GET_CODE (reg) == REG)
2284 use_reg (call_fusage, reg);
2290 can_store_by_pieces (len, constfun, constfundata, align)
2291 unsigned HOST_WIDE_INT len;
2292 rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
2296 unsigned HOST_WIDE_INT max_size, l;
2297 HOST_WIDE_INT offset = 0;
2298 enum machine_mode mode, tmode;
2299 enum insn_code icode;
2303 if (! MOVE_BY_PIECES_P (len, align))
2306 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2307 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2308 align = MOVE_MAX * BITS_PER_UNIT;
2310 /* We would first store what we can in the largest integer mode, then go to
2311 successively smaller modes. */
2314 reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2319 max_size = MOVE_MAX_PIECES + 1;
2320 while (max_size > 1)
2322 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2323 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2324 if (GET_MODE_SIZE (tmode) < max_size)
2327 if (mode == VOIDmode)
2330 icode = mov_optab->handlers[(int) mode].insn_code;
2331 if (icode != CODE_FOR_nothing
2332 && align >= GET_MODE_ALIGNMENT (mode))
2334 unsigned int size = GET_MODE_SIZE (mode);
2341 cst = (*constfun) (constfundata, offset, mode);
2342 if (!LEGITIMATE_CONSTANT_P (cst))
2352 max_size = GET_MODE_SIZE (mode);
2355 /* The code above should have handled everything. */
2363 /* Generate several move instructions to store LEN bytes generated by
2364 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2365 pointer which will be passed as argument in every CONSTFUN call.
2366 ALIGN is maximum alignment we can assume. */
2369 store_by_pieces (to, len, constfun, constfundata, align)
2371 unsigned HOST_WIDE_INT len;
2372 rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
2376 struct store_by_pieces data;
2378 if (! MOVE_BY_PIECES_P (len, align))
2380 to = protect_from_queue (to, 1);
2381 data.constfun = constfun;
2382 data.constfundata = constfundata;
2385 store_by_pieces_1 (&data, align);
2388 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2389 rtx with BLKmode). The caller must pass TO through protect_from_queue
2390 before calling. ALIGN is maximum alignment we can assume. */
2393 clear_by_pieces (to, len, align)
2395 unsigned HOST_WIDE_INT len;
2398 struct store_by_pieces data;
2400 data.constfun = clear_by_pieces_1;
2401 data.constfundata = NULL;
2404 store_by_pieces_1 (&data, align);
2407 /* Callback routine for clear_by_pieces.
2408 Return const0_rtx unconditionally. */
2411 clear_by_pieces_1 (data, offset, mode)
2412 PTR data ATTRIBUTE_UNUSED;
2413 HOST_WIDE_INT offset ATTRIBUTE_UNUSED;
2414 enum machine_mode mode ATTRIBUTE_UNUSED;
2419 /* Subroutine of clear_by_pieces and store_by_pieces.
2420 Generate several move instructions to store LEN bytes of block TO. (A MEM
2421 rtx with BLKmode). The caller must pass TO through protect_from_queue
2422 before calling. ALIGN is maximum alignment we can assume. */
2425 store_by_pieces_1 (data, align)
2426 struct store_by_pieces *data;
2429 rtx to_addr = XEXP (data->to, 0);
2430 unsigned HOST_WIDE_INT max_size = MOVE_MAX_PIECES + 1;
2431 enum machine_mode mode = VOIDmode, tmode;
2432 enum insn_code icode;
2435 data->to_addr = to_addr;
2437 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2438 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2440 data->explicit_inc_to = 0;
2442 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2444 data->offset = data->len;
2446 /* If storing requires more than two move insns,
2447 copy addresses to registers (to make displacements shorter)
2448 and use post-increment if available. */
2449 if (!data->autinc_to
2450 && move_by_pieces_ninsns (data->len, align) > 2)
2452 /* Determine the main mode we'll be using. */
2453 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2454 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2455 if (GET_MODE_SIZE (tmode) < max_size)
2458 if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
2460 data->to_addr = copy_addr_to_reg (plus_constant (to_addr, data->len));
2461 data->autinc_to = 1;
2462 data->explicit_inc_to = -1;
2465 if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2466 && ! data->autinc_to)
2468 data->to_addr = copy_addr_to_reg (to_addr);
2469 data->autinc_to = 1;
2470 data->explicit_inc_to = 1;
2473 if ( !data->autinc_to && CONSTANT_P (to_addr))
2474 data->to_addr = copy_addr_to_reg (to_addr);
2477 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2478 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2479 align = MOVE_MAX * BITS_PER_UNIT;
2481 /* First store what we can in the largest integer mode, then go to
2482 successively smaller modes. */
2484 while (max_size > 1)
2486 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2487 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2488 if (GET_MODE_SIZE (tmode) < max_size)
2491 if (mode == VOIDmode)
2494 icode = mov_optab->handlers[(int) mode].insn_code;
2495 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
2496 store_by_pieces_2 (GEN_FCN (icode), mode, data);
2498 max_size = GET_MODE_SIZE (mode);
2501 /* The code above should have handled everything. */
2506 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2507 with move instructions for mode MODE. GENFUN is the gen_... function
2508 to make a move insn for that mode. DATA has all the other info. */
2511 store_by_pieces_2 (genfun, mode, data)
2512 rtx (*genfun) PARAMS ((rtx, ...));
2513 enum machine_mode mode;
2514 struct store_by_pieces *data;
2516 unsigned int size = GET_MODE_SIZE (mode);
2519 while (data->len >= size)
2522 data->offset -= size;
2524 if (data->autinc_to)
2525 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
2528 to1 = adjust_address (data->to, mode, data->offset);
2530 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2531 emit_insn (gen_add2_insn (data->to_addr,
2532 GEN_INT (-(HOST_WIDE_INT) size)));
2534 cst = (*data->constfun) (data->constfundata, data->offset, mode);
2535 emit_insn ((*genfun) (to1, cst));
2537 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2538 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2540 if (! data->reverse)
2541 data->offset += size;
2547 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2548 its length in bytes. */
2551 clear_storage (object, size)
2555 #ifdef TARGET_MEM_FUNCTIONS
2557 tree call_expr, arg_list;
2560 unsigned int align = (GET_CODE (object) == MEM ? MEM_ALIGN (object)
2561 : GET_MODE_ALIGNMENT (GET_MODE (object)));
2563 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2564 just move a zero. Otherwise, do this a piece at a time. */
2565 if (GET_MODE (object) != BLKmode
2566 && GET_CODE (size) == CONST_INT
2567 && GET_MODE_SIZE (GET_MODE (object)) == (unsigned int) INTVAL (size))
2568 emit_move_insn (object, CONST0_RTX (GET_MODE (object)));
2571 object = protect_from_queue (object, 1);
2572 size = protect_from_queue (size, 0);
2574 if (GET_CODE (size) == CONST_INT
2575 && MOVE_BY_PIECES_P (INTVAL (size), align))
2576 clear_by_pieces (object, INTVAL (size), align);
2579 /* Try the most limited insn first, because there's no point
2580 including more than one in the machine description unless
2581 the more limited one has some advantage. */
2583 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
2584 enum machine_mode mode;
2586 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2587 mode = GET_MODE_WIDER_MODE (mode))
2589 enum insn_code code = clrstr_optab[(int) mode];
2590 insn_operand_predicate_fn pred;
2592 if (code != CODE_FOR_nothing
2593 /* We don't need MODE to be narrower than
2594 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2595 the mode mask, as it is returned by the macro, it will
2596 definitely be less than the actual mode mask. */
2597 && ((GET_CODE (size) == CONST_INT
2598 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2599 <= (GET_MODE_MASK (mode) >> 1)))
2600 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2601 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
2602 || (*pred) (object, BLKmode))
2603 && ((pred = insn_data[(int) code].operand[2].predicate) == 0
2604 || (*pred) (opalign, VOIDmode)))
2607 rtx last = get_last_insn ();
2610 op1 = convert_to_mode (mode, size, 1);
2611 pred = insn_data[(int) code].operand[1].predicate;
2612 if (pred != 0 && ! (*pred) (op1, mode))
2613 op1 = copy_to_mode_reg (mode, op1);
2615 pat = GEN_FCN ((int) code) (object, op1, opalign);
2622 delete_insns_since (last);
2626 /* OBJECT or SIZE may have been passed through protect_from_queue.
2628 It is unsafe to save the value generated by protect_from_queue
2629 and reuse it later. Consider what happens if emit_queue is
2630 called before the return value from protect_from_queue is used.
2632 Expansion of the CALL_EXPR below will call emit_queue before
2633 we are finished emitting RTL for argument setup. So if we are
2634 not careful we could get the wrong value for an argument.
2636 To avoid this problem we go ahead and emit code to copy OBJECT
2637 and SIZE into new pseudos. We can then place those new pseudos
2638 into an RTL_EXPR and use them later, even after a call to
2641 Note this is not strictly needed for library calls since they
2642 do not call emit_queue before loading their arguments. However,
2643 we may need to have library calls call emit_queue in the future
2644 since failing to do so could cause problems for targets which
2645 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
2646 object = copy_to_mode_reg (Pmode, XEXP (object, 0));
2648 #ifdef TARGET_MEM_FUNCTIONS
2649 size = copy_to_mode_reg (TYPE_MODE (sizetype), size);
2651 size = convert_to_mode (TYPE_MODE (integer_type_node), size,
2652 TREE_UNSIGNED (integer_type_node));
2653 size = copy_to_mode_reg (TYPE_MODE (integer_type_node), size);
2656 #ifdef TARGET_MEM_FUNCTIONS
2657 /* It is incorrect to use the libcall calling conventions to call
2658 memset in this context.
2660 This could be a user call to memset and the user may wish to
2661 examine the return value from memset.
2663 For targets where libcalls and normal calls have different
2664 conventions for returning pointers, we could end up generating
2667 So instead of using a libcall sequence we build up a suitable
2668 CALL_EXPR and expand the call in the normal fashion. */
2669 if (fn == NULL_TREE)
2673 /* This was copied from except.c, I don't know if all this is
2674 necessary in this context or not. */
2675 fn = get_identifier ("memset");
2676 fntype = build_pointer_type (void_type_node);
2677 fntype = build_function_type (fntype, NULL_TREE);
2678 fn = build_decl (FUNCTION_DECL, fn, fntype);
2679 ggc_add_tree_root (&fn, 1);
2680 DECL_EXTERNAL (fn) = 1;
2681 TREE_PUBLIC (fn) = 1;
2682 DECL_ARTIFICIAL (fn) = 1;
2683 TREE_NOTHROW (fn) = 1;
2684 make_decl_rtl (fn, NULL);
2685 assemble_external (fn);
2688 /* We need to make an argument list for the function call.
2690 memset has three arguments, the first is a void * addresses, the
2691 second an integer with the initialization value, the last is a
2692 size_t byte count for the copy. */
2694 = build_tree_list (NULL_TREE,
2695 make_tree (build_pointer_type (void_type_node),
2697 TREE_CHAIN (arg_list)
2698 = build_tree_list (NULL_TREE,
2699 make_tree (integer_type_node, const0_rtx));
2700 TREE_CHAIN (TREE_CHAIN (arg_list))
2701 = build_tree_list (NULL_TREE, make_tree (sizetype, size));
2702 TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list))) = NULL_TREE;
2704 /* Now we have to build up the CALL_EXPR itself. */
2705 call_expr = build1 (ADDR_EXPR,
2706 build_pointer_type (TREE_TYPE (fn)), fn);
2707 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
2708 call_expr, arg_list, NULL_TREE);
2709 TREE_SIDE_EFFECTS (call_expr) = 1;
2711 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
2713 emit_library_call (bzero_libfunc, LCT_NORMAL,
2714 VOIDmode, 2, object, Pmode, size,
2715 TYPE_MODE (integer_type_node));
2718 /* If we are initializing a readonly value, show the above call
2719 clobbered it. Otherwise, a load from it may erroneously be
2720 hoisted from a loop. */
2721 if (RTX_UNCHANGING_P (object))
2722 emit_insn (gen_rtx_CLOBBER (VOIDmode, object));
2729 /* Generate code to copy Y into X.
2730 Both Y and X must have the same mode, except that
2731 Y can be a constant with VOIDmode.
2732 This mode cannot be BLKmode; use emit_block_move for that.
2734 Return the last instruction emitted. */
2737 emit_move_insn (x, y)
2740 enum machine_mode mode = GET_MODE (x);
2741 rtx y_cst = NULL_RTX;
2744 x = protect_from_queue (x, 1);
2745 y = protect_from_queue (y, 0);
2747 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
2750 /* Never force constant_p_rtx to memory. */
2751 if (GET_CODE (y) == CONSTANT_P_RTX)
2753 else if (CONSTANT_P (y) && ! LEGITIMATE_CONSTANT_P (y))
2756 y = force_const_mem (mode, y);
2759 /* If X or Y are memory references, verify that their addresses are valid
2761 if (GET_CODE (x) == MEM
2762 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
2763 && ! push_operand (x, GET_MODE (x)))
2765 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
2766 x = validize_mem (x);
2768 if (GET_CODE (y) == MEM
2769 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
2771 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
2772 y = validize_mem (y);
2774 if (mode == BLKmode)
2777 last_insn = emit_move_insn_1 (x, y);
2779 if (y_cst && GET_CODE (x) == REG)
2780 set_unique_reg_note (last_insn, REG_EQUAL, y_cst);
2785 /* Low level part of emit_move_insn.
2786 Called just like emit_move_insn, but assumes X and Y
2787 are basically valid. */
2790 emit_move_insn_1 (x, y)
2793 enum machine_mode mode = GET_MODE (x);
2794 enum machine_mode submode;
2795 enum mode_class class = GET_MODE_CLASS (mode);
2797 if ((unsigned int) mode >= (unsigned int) MAX_MACHINE_MODE)
2800 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
2802 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
2804 /* Expand complex moves by moving real part and imag part, if possible. */
2805 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
2806 && BLKmode != (submode = mode_for_size ((GET_MODE_UNIT_SIZE (mode)
2808 (class == MODE_COMPLEX_INT
2809 ? MODE_INT : MODE_FLOAT),
2811 && (mov_optab->handlers[(int) submode].insn_code
2812 != CODE_FOR_nothing))
2814 /* Don't split destination if it is a stack push. */
2815 int stack = push_operand (x, GET_MODE (x));
2817 #ifdef PUSH_ROUNDING
2818 /* In case we output to the stack, but the size is smaller machine can
2819 push exactly, we need to use move instructions. */
2821 && (PUSH_ROUNDING (GET_MODE_SIZE (submode))
2822 != GET_MODE_SIZE (submode)))
2825 HOST_WIDE_INT offset1, offset2;
2827 /* Do not use anti_adjust_stack, since we don't want to update
2828 stack_pointer_delta. */
2829 temp = expand_binop (Pmode,
2830 #ifdef STACK_GROWS_DOWNWARD
2838 (GET_MODE_SIZE (GET_MODE (x)))),
2839 stack_pointer_rtx, 0, OPTAB_LIB_WIDEN);
2841 if (temp != stack_pointer_rtx)
2842 emit_move_insn (stack_pointer_rtx, temp);
2844 #ifdef STACK_GROWS_DOWNWARD
2846 offset2 = GET_MODE_SIZE (submode);
2848 offset1 = -PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)));
2849 offset2 = (-PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)))
2850 + GET_MODE_SIZE (submode));
2853 emit_move_insn (change_address (x, submode,
2854 gen_rtx_PLUS (Pmode,
2856 GEN_INT (offset1))),
2857 gen_realpart (submode, y));
2858 emit_move_insn (change_address (x, submode,
2859 gen_rtx_PLUS (Pmode,
2861 GEN_INT (offset2))),
2862 gen_imagpart (submode, y));
2866 /* If this is a stack, push the highpart first, so it
2867 will be in the argument order.
2869 In that case, change_address is used only to convert
2870 the mode, not to change the address. */
2873 /* Note that the real part always precedes the imag part in memory
2874 regardless of machine's endianness. */
2875 #ifdef STACK_GROWS_DOWNWARD
2876 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2877 (gen_rtx_MEM (submode, XEXP (x, 0)),
2878 gen_imagpart (submode, y)));
2879 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2880 (gen_rtx_MEM (submode, XEXP (x, 0)),
2881 gen_realpart (submode, y)));
2883 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2884 (gen_rtx_MEM (submode, XEXP (x, 0)),
2885 gen_realpart (submode, y)));
2886 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2887 (gen_rtx_MEM (submode, XEXP (x, 0)),
2888 gen_imagpart (submode, y)));
2893 rtx realpart_x, realpart_y;
2894 rtx imagpart_x, imagpart_y;
2896 /* If this is a complex value with each part being smaller than a
2897 word, the usual calling sequence will likely pack the pieces into
2898 a single register. Unfortunately, SUBREG of hard registers only
2899 deals in terms of words, so we have a problem converting input
2900 arguments to the CONCAT of two registers that is used elsewhere
2901 for complex values. If this is before reload, we can copy it into
2902 memory and reload. FIXME, we should see about using extract and
2903 insert on integer registers, but complex short and complex char
2904 variables should be rarely used. */
2905 if (GET_MODE_BITSIZE (mode) < 2 * BITS_PER_WORD
2906 && (reload_in_progress | reload_completed) == 0)
2909 = (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER);
2911 = (REG_P (y) && REGNO (y) < FIRST_PSEUDO_REGISTER);
2913 if (packed_dest_p || packed_src_p)
2915 enum mode_class reg_class = ((class == MODE_COMPLEX_FLOAT)
2916 ? MODE_FLOAT : MODE_INT);
2918 enum machine_mode reg_mode
2919 = mode_for_size (GET_MODE_BITSIZE (mode), reg_class, 1);
2921 if (reg_mode != BLKmode)
2923 rtx mem = assign_stack_temp (reg_mode,
2924 GET_MODE_SIZE (mode), 0);
2925 rtx cmem = adjust_address (mem, mode, 0);
2928 = N_("function using short complex types cannot be inline");
2932 rtx sreg = gen_rtx_SUBREG (reg_mode, x, 0);
2934 emit_move_insn_1 (cmem, y);
2935 return emit_move_insn_1 (sreg, mem);
2939 rtx sreg = gen_rtx_SUBREG (reg_mode, y, 0);
2941 emit_move_insn_1 (mem, sreg);
2942 return emit_move_insn_1 (x, cmem);
2948 realpart_x = gen_realpart (submode, x);
2949 realpart_y = gen_realpart (submode, y);
2950 imagpart_x = gen_imagpart (submode, x);
2951 imagpart_y = gen_imagpart (submode, y);
2953 /* Show the output dies here. This is necessary for SUBREGs
2954 of pseudos since we cannot track their lifetimes correctly;
2955 hard regs shouldn't appear here except as return values.
2956 We never want to emit such a clobber after reload. */
2958 && ! (reload_in_progress || reload_completed)
2959 && (GET_CODE (realpart_x) == SUBREG
2960 || GET_CODE (imagpart_x) == SUBREG))
2961 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
2963 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2964 (realpart_x, realpart_y));
2965 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2966 (imagpart_x, imagpart_y));
2969 return get_last_insn ();
2972 /* This will handle any multi-word mode that lacks a move_insn pattern.
2973 However, you will get better code if you define such patterns,
2974 even if they must turn into multiple assembler instructions. */
2975 else if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
2982 #ifdef PUSH_ROUNDING
2984 /* If X is a push on the stack, do the push now and replace
2985 X with a reference to the stack pointer. */
2986 if (push_operand (x, GET_MODE (x)))
2991 /* Do not use anti_adjust_stack, since we don't want to update
2992 stack_pointer_delta. */
2993 temp = expand_binop (Pmode,
2994 #ifdef STACK_GROWS_DOWNWARD
3002 (GET_MODE_SIZE (GET_MODE (x)))),
3003 stack_pointer_rtx, 0, OPTAB_LIB_WIDEN);
3005 if (temp != stack_pointer_rtx)
3006 emit_move_insn (stack_pointer_rtx, temp);
3008 code = GET_CODE (XEXP (x, 0));
3010 /* Just hope that small offsets off SP are OK. */
3011 if (code == POST_INC)
3012 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3013 GEN_INT (-((HOST_WIDE_INT)
3014 GET_MODE_SIZE (GET_MODE (x)))));
3015 else if (code == POST_DEC)
3016 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3017 GEN_INT (GET_MODE_SIZE (GET_MODE (x))));
3019 temp = stack_pointer_rtx;
3021 x = change_address (x, VOIDmode, temp);
3025 /* If we are in reload, see if either operand is a MEM whose address
3026 is scheduled for replacement. */
3027 if (reload_in_progress && GET_CODE (x) == MEM
3028 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
3029 x = replace_equiv_address_nv (x, inner);
3030 if (reload_in_progress && GET_CODE (y) == MEM
3031 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
3032 y = replace_equiv_address_nv (y, inner);
3038 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
3041 rtx xpart = operand_subword (x, i, 1, mode);
3042 rtx ypart = operand_subword (y, i, 1, mode);
3044 /* If we can't get a part of Y, put Y into memory if it is a
3045 constant. Otherwise, force it into a register. If we still
3046 can't get a part of Y, abort. */
3047 if (ypart == 0 && CONSTANT_P (y))
3049 y = force_const_mem (mode, y);
3050 ypart = operand_subword (y, i, 1, mode);
3052 else if (ypart == 0)
3053 ypart = operand_subword_force (y, i, mode);
3055 if (xpart == 0 || ypart == 0)
3058 need_clobber |= (GET_CODE (xpart) == SUBREG);
3060 last_insn = emit_move_insn (xpart, ypart);
3063 seq = gen_sequence ();
3066 /* Show the output dies here. This is necessary for SUBREGs
3067 of pseudos since we cannot track their lifetimes correctly;
3068 hard regs shouldn't appear here except as return values.
3069 We never want to emit such a clobber after reload. */
3071 && ! (reload_in_progress || reload_completed)
3072 && need_clobber != 0)
3073 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
3083 /* Pushing data onto the stack. */
3085 /* Push a block of length SIZE (perhaps variable)
3086 and return an rtx to address the beginning of the block.
3087 Note that it is not possible for the value returned to be a QUEUED.
3088 The value may be virtual_outgoing_args_rtx.
3090 EXTRA is the number of bytes of padding to push in addition to SIZE.
3091 BELOW nonzero means this padding comes at low addresses;
3092 otherwise, the padding comes at high addresses. */
3095 push_block (size, extra, below)
3101 size = convert_modes (Pmode, ptr_mode, size, 1);
3102 if (CONSTANT_P (size))
3103 anti_adjust_stack (plus_constant (size, extra));
3104 else if (GET_CODE (size) == REG && extra == 0)
3105 anti_adjust_stack (size);
3108 temp = copy_to_mode_reg (Pmode, size);
3110 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
3111 temp, 0, OPTAB_LIB_WIDEN);
3112 anti_adjust_stack (temp);
3115 #ifndef STACK_GROWS_DOWNWARD
3121 temp = virtual_outgoing_args_rtx;
3122 if (extra != 0 && below)
3123 temp = plus_constant (temp, extra);
3127 if (GET_CODE (size) == CONST_INT)
3128 temp = plus_constant (virtual_outgoing_args_rtx,
3129 -INTVAL (size) - (below ? 0 : extra));
3130 else if (extra != 0 && !below)
3131 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3132 negate_rtx (Pmode, plus_constant (size, extra)));
3134 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3135 negate_rtx (Pmode, size));
3138 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
3141 #ifdef PUSH_ROUNDING
3143 /* Emit single push insn. */
3146 emit_single_push_insn (mode, x, type)
3148 enum machine_mode mode;
3152 unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
3154 enum insn_code icode;
3155 insn_operand_predicate_fn pred;
3157 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3158 /* If there is push pattern, use it. Otherwise try old way of throwing
3159 MEM representing push operation to move expander. */
3160 icode = push_optab->handlers[(int) mode].insn_code;
3161 if (icode != CODE_FOR_nothing)
3163 if (((pred = insn_data[(int) icode].operand[0].predicate)
3164 && !((*pred) (x, mode))))
3165 x = force_reg (mode, x);
3166 emit_insn (GEN_FCN (icode) (x));
3169 if (GET_MODE_SIZE (mode) == rounded_size)
3170 dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
3173 #ifdef STACK_GROWS_DOWNWARD
3174 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3175 GEN_INT (-(HOST_WIDE_INT) rounded_size));
3177 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3178 GEN_INT (rounded_size));
3180 dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
3183 dest = gen_rtx_MEM (mode, dest_addr);
3187 set_mem_attributes (dest, type, 1);
3189 if (flag_optimize_sibling_calls)
3190 /* Function incoming arguments may overlap with sibling call
3191 outgoing arguments and we cannot allow reordering of reads
3192 from function arguments with stores to outgoing arguments
3193 of sibling calls. */
3194 set_mem_alias_set (dest, 0);
3196 emit_move_insn (dest, x);
3200 /* Generate code to push X onto the stack, assuming it has mode MODE and
3202 MODE is redundant except when X is a CONST_INT (since they don't
3204 SIZE is an rtx for the size of data to be copied (in bytes),
3205 needed only if X is BLKmode.
3207 ALIGN (in bits) is maximum alignment we can assume.
3209 If PARTIAL and REG are both nonzero, then copy that many of the first
3210 words of X into registers starting with REG, and push the rest of X.
3211 The amount of space pushed is decreased by PARTIAL words,
3212 rounded *down* to a multiple of PARM_BOUNDARY.
3213 REG must be a hard register in this case.
3214 If REG is zero but PARTIAL is not, take any all others actions for an
3215 argument partially in registers, but do not actually load any
3218 EXTRA is the amount in bytes of extra space to leave next to this arg.
3219 This is ignored if an argument block has already been allocated.
3221 On a machine that lacks real push insns, ARGS_ADDR is the address of
3222 the bottom of the argument block for this call. We use indexing off there
3223 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
3224 argument block has not been preallocated.
3226 ARGS_SO_FAR is the size of args previously pushed for this call.
3228 REG_PARM_STACK_SPACE is nonzero if functions require stack space
3229 for arguments passed in registers. If nonzero, it will be the number
3230 of bytes required. */
3233 emit_push_insn (x, mode, type, size, align, partial, reg, extra,
3234 args_addr, args_so_far, reg_parm_stack_space,
3237 enum machine_mode mode;
3246 int reg_parm_stack_space;
3250 enum direction stack_direction
3251 #ifdef STACK_GROWS_DOWNWARD
3257 /* Decide where to pad the argument: `downward' for below,
3258 `upward' for above, or `none' for don't pad it.
3259 Default is below for small data on big-endian machines; else above. */
3260 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
3262 /* Invert direction if stack is post-decrement.
3264 if (STACK_PUSH_CODE == POST_DEC)
3265 if (where_pad != none)
3266 where_pad = (where_pad == downward ? upward : downward);
3268 xinner = x = protect_from_queue (x, 0);
3270 if (mode == BLKmode)
3272 /* Copy a block into the stack, entirely or partially. */
3275 int used = partial * UNITS_PER_WORD;
3276 int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
3284 /* USED is now the # of bytes we need not copy to the stack
3285 because registers will take care of them. */
3288 xinner = adjust_address (xinner, BLKmode, used);
3290 /* If the partial register-part of the arg counts in its stack size,
3291 skip the part of stack space corresponding to the registers.
3292 Otherwise, start copying to the beginning of the stack space,
3293 by setting SKIP to 0. */
3294 skip = (reg_parm_stack_space == 0) ? 0 : used;
3296 #ifdef PUSH_ROUNDING
3297 /* Do it with several push insns if that doesn't take lots of insns
3298 and if there is no difficulty with push insns that skip bytes
3299 on the stack for alignment purposes. */
3302 && GET_CODE (size) == CONST_INT
3304 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
3305 /* Here we avoid the case of a structure whose weak alignment
3306 forces many pushes of a small amount of data,
3307 and such small pushes do rounding that causes trouble. */
3308 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
3309 || align >= BIGGEST_ALIGNMENT
3310 || (PUSH_ROUNDING (align / BITS_PER_UNIT)
3311 == (align / BITS_PER_UNIT)))
3312 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
3314 /* Push padding now if padding above and stack grows down,
3315 or if padding below and stack grows up.
3316 But if space already allocated, this has already been done. */
3317 if (extra && args_addr == 0
3318 && where_pad != none && where_pad != stack_direction)
3319 anti_adjust_stack (GEN_INT (extra));
3321 move_by_pieces (NULL, xinner, INTVAL (size) - used, align);
3324 #endif /* PUSH_ROUNDING */
3328 /* Otherwise make space on the stack and copy the data
3329 to the address of that space. */
3331 /* Deduct words put into registers from the size we must copy. */
3334 if (GET_CODE (size) == CONST_INT)
3335 size = GEN_INT (INTVAL (size) - used);
3337 size = expand_binop (GET_MODE (size), sub_optab, size,
3338 GEN_INT (used), NULL_RTX, 0,
3342 /* Get the address of the stack space.
3343 In this case, we do not deal with EXTRA separately.
3344 A single stack adjust will do. */
3347 temp = push_block (size, extra, where_pad == downward);
3350 else if (GET_CODE (args_so_far) == CONST_INT)
3351 temp = memory_address (BLKmode,
3352 plus_constant (args_addr,
3353 skip + INTVAL (args_so_far)));
3355 temp = memory_address (BLKmode,
3356 plus_constant (gen_rtx_PLUS (Pmode,
3360 target = gen_rtx_MEM (BLKmode, temp);
3364 set_mem_attributes (target, type, 1);
3365 /* Function incoming arguments may overlap with sibling call
3366 outgoing arguments and we cannot allow reordering of reads
3367 from function arguments with stores to outgoing arguments
3368 of sibling calls. */
3369 set_mem_alias_set (target, 0);
3372 set_mem_align (target, align);
3374 /* TEMP is the address of the block. Copy the data there. */
3375 if (GET_CODE (size) == CONST_INT
3376 && MOVE_BY_PIECES_P ((unsigned) INTVAL (size), align))
3378 move_by_pieces (target, xinner, INTVAL (size), align);
3383 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
3384 enum machine_mode mode;
3386 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
3388 mode = GET_MODE_WIDER_MODE (mode))
3390 enum insn_code code = movstr_optab[(int) mode];
3391 insn_operand_predicate_fn pred;
3393 if (code != CODE_FOR_nothing
3394 && ((GET_CODE (size) == CONST_INT
3395 && ((unsigned HOST_WIDE_INT) INTVAL (size)
3396 <= (GET_MODE_MASK (mode) >> 1)))
3397 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
3398 && (!(pred = insn_data[(int) code].operand[0].predicate)
3399 || ((*pred) (target, BLKmode)))
3400 && (!(pred = insn_data[(int) code].operand[1].predicate)
3401 || ((*pred) (xinner, BLKmode)))
3402 && (!(pred = insn_data[(int) code].operand[3].predicate)
3403 || ((*pred) (opalign, VOIDmode))))
3405 rtx op2 = convert_to_mode (mode, size, 1);
3406 rtx last = get_last_insn ();
3409 pred = insn_data[(int) code].operand[2].predicate;
3410 if (pred != 0 && ! (*pred) (op2, mode))
3411 op2 = copy_to_mode_reg (mode, op2);
3413 pat = GEN_FCN ((int) code) (target, xinner,
3421 delete_insns_since (last);
3426 if (!ACCUMULATE_OUTGOING_ARGS)
3428 /* If the source is referenced relative to the stack pointer,
3429 copy it to another register to stabilize it. We do not need
3430 to do this if we know that we won't be changing sp. */
3432 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3433 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3434 temp = copy_to_reg (temp);
3437 /* Make inhibit_defer_pop nonzero around the library call
3438 to force it to pop the bcopy-arguments right away. */
3440 #ifdef TARGET_MEM_FUNCTIONS
3441 emit_library_call (memcpy_libfunc, LCT_NORMAL,
3442 VOIDmode, 3, temp, Pmode, XEXP (xinner, 0), Pmode,
3443 convert_to_mode (TYPE_MODE (sizetype),
3444 size, TREE_UNSIGNED (sizetype)),
3445 TYPE_MODE (sizetype));
3447 emit_library_call (bcopy_libfunc, LCT_NORMAL,
3448 VOIDmode, 3, XEXP (xinner, 0), Pmode, temp, Pmode,
3449 convert_to_mode (TYPE_MODE (integer_type_node),
3451 TREE_UNSIGNED (integer_type_node)),
3452 TYPE_MODE (integer_type_node));
3457 else if (partial > 0)
3459 /* Scalar partly in registers. */
3461 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3464 /* # words of start of argument
3465 that we must make space for but need not store. */
3466 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
3467 int args_offset = INTVAL (args_so_far);
3470 /* Push padding now if padding above and stack grows down,
3471 or if padding below and stack grows up.
3472 But if space already allocated, this has already been done. */
3473 if (extra && args_addr == 0
3474 && where_pad != none && where_pad != stack_direction)
3475 anti_adjust_stack (GEN_INT (extra));
3477 /* If we make space by pushing it, we might as well push
3478 the real data. Otherwise, we can leave OFFSET nonzero
3479 and leave the space uninitialized. */
3483 /* Now NOT_STACK gets the number of words that we don't need to
3484 allocate on the stack. */
3485 not_stack = partial - offset;
3487 /* If the partial register-part of the arg counts in its stack size,
3488 skip the part of stack space corresponding to the registers.
3489 Otherwise, start copying to the beginning of the stack space,
3490 by setting SKIP to 0. */
3491 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
3493 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
3494 x = validize_mem (force_const_mem (mode, x));
3496 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3497 SUBREGs of such registers are not allowed. */
3498 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
3499 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
3500 x = copy_to_reg (x);
3502 /* Loop over all the words allocated on the stack for this arg. */
3503 /* We can do it by words, because any scalar bigger than a word
3504 has a size a multiple of a word. */
3505 #ifndef PUSH_ARGS_REVERSED
3506 for (i = not_stack; i < size; i++)
3508 for (i = size - 1; i >= not_stack; i--)
3510 if (i >= not_stack + offset)
3511 emit_push_insn (operand_subword_force (x, i, mode),
3512 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
3514 GEN_INT (args_offset + ((i - not_stack + skip)
3516 reg_parm_stack_space, alignment_pad);
3521 rtx target = NULL_RTX;
3524 /* Push padding now if padding above and stack grows down,
3525 or if padding below and stack grows up.
3526 But if space already allocated, this has already been done. */
3527 if (extra && args_addr == 0
3528 && where_pad != none && where_pad != stack_direction)
3529 anti_adjust_stack (GEN_INT (extra));
3531 #ifdef PUSH_ROUNDING
3532 if (args_addr == 0 && PUSH_ARGS)
3533 emit_single_push_insn (mode, x, type);
3537 if (GET_CODE (args_so_far) == CONST_INT)
3539 = memory_address (mode,
3540 plus_constant (args_addr,
3541 INTVAL (args_so_far)));
3543 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
3546 dest = gen_rtx_MEM (mode, addr);
3549 set_mem_attributes (dest, type, 1);
3550 /* Function incoming arguments may overlap with sibling call
3551 outgoing arguments and we cannot allow reordering of reads
3552 from function arguments with stores to outgoing arguments
3553 of sibling calls. */
3554 set_mem_alias_set (dest, 0);
3557 emit_move_insn (dest, x);
3563 /* If part should go in registers, copy that part
3564 into the appropriate registers. Do this now, at the end,
3565 since mem-to-mem copies above may do function calls. */
3566 if (partial > 0 && reg != 0)
3568 /* Handle calls that pass values in multiple non-contiguous locations.
3569 The Irix 6 ABI has examples of this. */
3570 if (GET_CODE (reg) == PARALLEL)
3571 emit_group_load (reg, x, -1); /* ??? size? */
3573 move_block_to_reg (REGNO (reg), x, partial, mode);
3576 if (extra && args_addr == 0 && where_pad == stack_direction)
3577 anti_adjust_stack (GEN_INT (extra));
3579 if (alignment_pad && args_addr == 0)
3580 anti_adjust_stack (alignment_pad);
3583 /* Return X if X can be used as a subtarget in a sequence of arithmetic
3591 /* Only registers can be subtargets. */
3592 || GET_CODE (x) != REG
3593 /* If the register is readonly, it can't be set more than once. */
3594 || RTX_UNCHANGING_P (x)
3595 /* Don't use hard regs to avoid extending their life. */
3596 || REGNO (x) < FIRST_PSEUDO_REGISTER
3597 /* Avoid subtargets inside loops,
3598 since they hide some invariant expressions. */
3599 || preserve_subexpressions_p ())
3603 /* Expand an assignment that stores the value of FROM into TO.
3604 If WANT_VALUE is nonzero, return an rtx for the value of TO.
3605 (This may contain a QUEUED rtx;
3606 if the value is constant, this rtx is a constant.)
3607 Otherwise, the returned value is NULL_RTX.
3609 SUGGEST_REG is no longer actually used.
3610 It used to mean, copy the value through a register
3611 and return that register, if that is possible.
3612 We now use WANT_VALUE to decide whether to do this. */
3615 expand_assignment (to, from, want_value, suggest_reg)
3618 int suggest_reg ATTRIBUTE_UNUSED;
3623 /* Don't crash if the lhs of the assignment was erroneous. */
3625 if (TREE_CODE (to) == ERROR_MARK)
3627 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
3628 return want_value ? result : NULL_RTX;
3631 /* Assignment of a structure component needs special treatment
3632 if the structure component's rtx is not simply a MEM.
3633 Assignment of an array element at a constant index, and assignment of
3634 an array element in an unaligned packed structure field, has the same
3637 if (TREE_CODE (to) == COMPONENT_REF || TREE_CODE (to) == BIT_FIELD_REF
3638 || TREE_CODE (to) == ARRAY_REF || TREE_CODE (to) == ARRAY_RANGE_REF)
3640 enum machine_mode mode1;
3641 HOST_WIDE_INT bitsize, bitpos;
3649 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
3650 &unsignedp, &volatilep);
3652 /* If we are going to use store_bit_field and extract_bit_field,
3653 make sure to_rtx will be safe for multiple use. */
3655 if (mode1 == VOIDmode && want_value)
3656 tem = stabilize_reference (tem);
3658 orig_to_rtx = to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, 0);
3662 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
3664 if (GET_CODE (to_rtx) != MEM)
3667 if (GET_MODE (offset_rtx) != ptr_mode)
3668 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
3670 #ifdef POINTERS_EXTEND_UNSIGNED
3671 if (GET_MODE (offset_rtx) != Pmode)
3672 offset_rtx = convert_memory_address (Pmode, offset_rtx);
3675 /* A constant address in TO_RTX can have VOIDmode, we must not try
3676 to call force_reg for that case. Avoid that case. */
3677 if (GET_CODE (to_rtx) == MEM
3678 && GET_MODE (to_rtx) == BLKmode
3679 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
3681 && (bitpos % bitsize) == 0
3682 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
3683 && MEM_ALIGN (to_rtx) == GET_MODE_ALIGNMENT (mode1))
3686 = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT);
3688 if (GET_CODE (XEXP (temp, 0)) == REG)
3691 to_rtx = (replace_equiv_address
3692 (to_rtx, force_reg (GET_MODE (XEXP (temp, 0)),
3697 to_rtx = offset_address (to_rtx, offset_rtx,
3698 highest_pow2_factor (offset));
3701 if (GET_CODE (to_rtx) == MEM)
3703 tree old_expr = MEM_EXPR (to_rtx);
3705 /* If the field is at offset zero, we could have been given the
3706 DECL_RTX of the parent struct. Don't munge it. */
3707 to_rtx = shallow_copy_rtx (to_rtx);
3709 set_mem_attributes (to_rtx, to, 0);
3711 /* If we changed MEM_EXPR, that means we're now referencing
3712 the COMPONENT_REF, which means that MEM_OFFSET must be
3713 relative to that field. But we've not yet reflected BITPOS
3714 in TO_RTX. This will be done in store_field. Adjust for
3715 that by biasing MEM_OFFSET by -bitpos. */
3716 if (MEM_EXPR (to_rtx) != old_expr && MEM_OFFSET (to_rtx)
3717 && (bitpos / BITS_PER_UNIT) != 0)
3718 set_mem_offset (to_rtx, GEN_INT (INTVAL (MEM_OFFSET (to_rtx))
3719 - (bitpos / BITS_PER_UNIT)));
3722 /* Deal with volatile and readonly fields. The former is only done
3723 for MEM. Also set MEM_KEEP_ALIAS_SET_P if needed. */
3724 if (volatilep && GET_CODE (to_rtx) == MEM)
3726 if (to_rtx == orig_to_rtx)
3727 to_rtx = copy_rtx (to_rtx);
3728 MEM_VOLATILE_P (to_rtx) = 1;
3731 if (TREE_CODE (to) == COMPONENT_REF
3732 && TREE_READONLY (TREE_OPERAND (to, 1)))
3734 if (to_rtx == orig_to_rtx)
3735 to_rtx = copy_rtx (to_rtx);
3736 RTX_UNCHANGING_P (to_rtx) = 1;
3739 if (GET_CODE (to_rtx) == MEM && ! can_address_p (to))
3741 if (to_rtx == orig_to_rtx)
3742 to_rtx = copy_rtx (to_rtx);
3743 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
3746 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
3748 /* Spurious cast for HPUX compiler. */
3749 ? ((enum machine_mode)
3750 TYPE_MODE (TREE_TYPE (to)))
3752 unsignedp, TREE_TYPE (tem), get_alias_set (to));
3754 preserve_temp_slots (result);
3758 /* If the value is meaningful, convert RESULT to the proper mode.
3759 Otherwise, return nothing. */
3760 return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
3761 TYPE_MODE (TREE_TYPE (from)),
3763 TREE_UNSIGNED (TREE_TYPE (to)))
3767 /* If the rhs is a function call and its value is not an aggregate,
3768 call the function before we start to compute the lhs.
3769 This is needed for correct code for cases such as
3770 val = setjmp (buf) on machines where reference to val
3771 requires loading up part of an address in a separate insn.
3773 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
3774 since it might be a promoted variable where the zero- or sign- extension
3775 needs to be done. Handling this in the normal way is safe because no
3776 computation is done before the call. */
3777 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from)
3778 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
3779 && ! ((TREE_CODE (to) == VAR_DECL || TREE_CODE (to) == PARM_DECL)
3780 && GET_CODE (DECL_RTL (to)) == REG))
3785 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
3787 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
3789 /* Handle calls that return values in multiple non-contiguous locations.
3790 The Irix 6 ABI has examples of this. */
3791 if (GET_CODE (to_rtx) == PARALLEL)
3792 emit_group_load (to_rtx, value, int_size_in_bytes (TREE_TYPE (from)));
3793 else if (GET_MODE (to_rtx) == BLKmode)
3794 emit_block_move (to_rtx, value, expr_size (from));
3797 #ifdef POINTERS_EXTEND_UNSIGNED
3798 if (POINTER_TYPE_P (TREE_TYPE (to))
3799 && GET_MODE (to_rtx) != GET_MODE (value))
3800 value = convert_memory_address (GET_MODE (to_rtx), value);
3802 emit_move_insn (to_rtx, value);
3804 preserve_temp_slots (to_rtx);
3807 return want_value ? to_rtx : NULL_RTX;
3810 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
3811 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
3814 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
3816 /* Don't move directly into a return register. */
3817 if (TREE_CODE (to) == RESULT_DECL
3818 && (GET_CODE (to_rtx) == REG || GET_CODE (to_rtx) == PARALLEL))
3823 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
3825 if (GET_CODE (to_rtx) == PARALLEL)
3826 emit_group_load (to_rtx, temp, int_size_in_bytes (TREE_TYPE (from)));
3828 emit_move_insn (to_rtx, temp);
3830 preserve_temp_slots (to_rtx);
3833 return want_value ? to_rtx : NULL_RTX;
3836 /* In case we are returning the contents of an object which overlaps
3837 the place the value is being stored, use a safe function when copying
3838 a value through a pointer into a structure value return block. */
3839 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
3840 && current_function_returns_struct
3841 && !current_function_returns_pcc_struct)
3846 size = expr_size (from);
3847 from_rtx = expand_expr (from, NULL_RTX, VOIDmode, 0);
3849 #ifdef TARGET_MEM_FUNCTIONS
3850 emit_library_call (memmove_libfunc, LCT_NORMAL,
3851 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
3852 XEXP (from_rtx, 0), Pmode,
3853 convert_to_mode (TYPE_MODE (sizetype),
3854 size, TREE_UNSIGNED (sizetype)),
3855 TYPE_MODE (sizetype));
3857 emit_library_call (bcopy_libfunc, LCT_NORMAL,
3858 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
3859 XEXP (to_rtx, 0), Pmode,
3860 convert_to_mode (TYPE_MODE (integer_type_node),
3861 size, TREE_UNSIGNED (integer_type_node)),
3862 TYPE_MODE (integer_type_node));
3865 preserve_temp_slots (to_rtx);
3868 return want_value ? to_rtx : NULL_RTX;
3871 /* Compute FROM and store the value in the rtx we got. */
3874 result = store_expr (from, to_rtx, want_value);
3875 preserve_temp_slots (result);
3878 return want_value ? result : NULL_RTX;
3881 /* Generate code for computing expression EXP,
3882 and storing the value into TARGET.
3883 TARGET may contain a QUEUED rtx.
3885 If WANT_VALUE is nonzero, return a copy of the value
3886 not in TARGET, so that we can be sure to use the proper
3887 value in a containing expression even if TARGET has something
3888 else stored in it. If possible, we copy the value through a pseudo
3889 and return that pseudo. Or, if the value is constant, we try to
3890 return the constant. In some cases, we return a pseudo
3891 copied *from* TARGET.
3893 If the mode is BLKmode then we may return TARGET itself.
3894 It turns out that in BLKmode it doesn't cause a problem.
3895 because C has no operators that could combine two different
3896 assignments into the same BLKmode object with different values
3897 with no sequence point. Will other languages need this to
3900 If WANT_VALUE is 0, we return NULL, to make sure
3901 to catch quickly any cases where the caller uses the value
3902 and fails to set WANT_VALUE. */
3905 store_expr (exp, target, want_value)
3911 int dont_return_target = 0;
3912 int dont_store_target = 0;
3914 if (TREE_CODE (exp) == COMPOUND_EXPR)
3916 /* Perform first part of compound expression, then assign from second
3918 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
3920 return store_expr (TREE_OPERAND (exp, 1), target, want_value);
3922 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
3924 /* For conditional expression, get safe form of the target. Then
3925 test the condition, doing the appropriate assignment on either
3926 side. This avoids the creation of unnecessary temporaries.
3927 For non-BLKmode, it is more efficient not to do this. */
3929 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
3932 target = protect_from_queue (target, 1);
3934 do_pending_stack_adjust ();
3936 jumpifnot (TREE_OPERAND (exp, 0), lab1);
3937 start_cleanup_deferral ();
3938 store_expr (TREE_OPERAND (exp, 1), target, 0);
3939 end_cleanup_deferral ();
3941 emit_jump_insn (gen_jump (lab2));
3944 start_cleanup_deferral ();
3945 store_expr (TREE_OPERAND (exp, 2), target, 0);
3946 end_cleanup_deferral ();
3951 return want_value ? target : NULL_RTX;
3953 else if (queued_subexp_p (target))
3954 /* If target contains a postincrement, let's not risk
3955 using it as the place to generate the rhs. */
3957 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
3959 /* Expand EXP into a new pseudo. */
3960 temp = gen_reg_rtx (GET_MODE (target));
3961 temp = expand_expr (exp, temp, GET_MODE (target), 0);
3964 temp = expand_expr (exp, NULL_RTX, GET_MODE (target), 0);
3966 /* If target is volatile, ANSI requires accessing the value
3967 *from* the target, if it is accessed. So make that happen.
3968 In no case return the target itself. */
3969 if (! MEM_VOLATILE_P (target) && want_value)
3970 dont_return_target = 1;
3972 else if (want_value && GET_CODE (target) == MEM && ! MEM_VOLATILE_P (target)
3973 && GET_MODE (target) != BLKmode)
3974 /* If target is in memory and caller wants value in a register instead,
3975 arrange that. Pass TARGET as target for expand_expr so that,
3976 if EXP is another assignment, WANT_VALUE will be nonzero for it.
3977 We know expand_expr will not use the target in that case.
3978 Don't do this if TARGET is volatile because we are supposed
3979 to write it and then read it. */
3981 temp = expand_expr (exp, target, GET_MODE (target), 0);
3982 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
3984 /* If TEMP is already in the desired TARGET, only copy it from
3985 memory and don't store it there again. */
3987 || (rtx_equal_p (temp, target)
3988 && ! side_effects_p (temp) && ! side_effects_p (target)))
3989 dont_store_target = 1;
3990 temp = copy_to_reg (temp);
3992 dont_return_target = 1;
3994 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
3995 /* If this is an scalar in a register that is stored in a wider mode
3996 than the declared mode, compute the result into its declared mode
3997 and then convert to the wider mode. Our value is the computed
4000 /* If we don't want a value, we can do the conversion inside EXP,
4001 which will often result in some optimizations. Do the conversion
4002 in two steps: first change the signedness, if needed, then
4003 the extend. But don't do this if the type of EXP is a subtype
4004 of something else since then the conversion might involve
4005 more than just converting modes. */
4006 if (! want_value && INTEGRAL_TYPE_P (TREE_TYPE (exp))
4007 && TREE_TYPE (TREE_TYPE (exp)) == 0)
4009 if (TREE_UNSIGNED (TREE_TYPE (exp))
4010 != SUBREG_PROMOTED_UNSIGNED_P (target))
4013 (signed_or_unsigned_type (SUBREG_PROMOTED_UNSIGNED_P (target),
4017 exp = convert (type_for_mode (GET_MODE (SUBREG_REG (target)),
4018 SUBREG_PROMOTED_UNSIGNED_P (target)),
4022 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
4024 /* If TEMP is a volatile MEM and we want a result value, make
4025 the access now so it gets done only once. Likewise if
4026 it contains TARGET. */
4027 if (GET_CODE (temp) == MEM && want_value
4028 && (MEM_VOLATILE_P (temp)
4029 || reg_mentioned_p (SUBREG_REG (target), XEXP (temp, 0))))
4030 temp = copy_to_reg (temp);
4032 /* If TEMP is a VOIDmode constant, use convert_modes to make
4033 sure that we properly convert it. */
4034 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
4036 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4037 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4038 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
4039 GET_MODE (target), temp,
4040 SUBREG_PROMOTED_UNSIGNED_P (target));
4043 convert_move (SUBREG_REG (target), temp,
4044 SUBREG_PROMOTED_UNSIGNED_P (target));
4046 /* If we promoted a constant, change the mode back down to match
4047 target. Otherwise, the caller might get confused by a result whose
4048 mode is larger than expected. */
4050 if (want_value && GET_MODE (temp) != GET_MODE (target))
4052 if (GET_MODE (temp) != VOIDmode)
4054 temp = gen_lowpart_SUBREG (GET_MODE (target), temp);
4055 SUBREG_PROMOTED_VAR_P (temp) = 1;
4056 SUBREG_PROMOTED_UNSIGNED_P (temp)
4057 = SUBREG_PROMOTED_UNSIGNED_P (target);
4060 temp = convert_modes (GET_MODE (target),
4061 GET_MODE (SUBREG_REG (target)),
4062 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4065 return want_value ? temp : NULL_RTX;
4069 temp = expand_expr (exp, target, GET_MODE (target), 0);
4070 /* Return TARGET if it's a specified hardware register.
4071 If TARGET is a volatile mem ref, either return TARGET
4072 or return a reg copied *from* TARGET; ANSI requires this.
4074 Otherwise, if TEMP is not TARGET, return TEMP
4075 if it is constant (for efficiency),
4076 or if we really want the correct value. */
4077 if (!(target && GET_CODE (target) == REG
4078 && REGNO (target) < FIRST_PSEUDO_REGISTER)
4079 && !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
4080 && ! rtx_equal_p (temp, target)
4081 && (CONSTANT_P (temp) || want_value))
4082 dont_return_target = 1;
4085 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
4086 the same as that of TARGET, adjust the constant. This is needed, for
4087 example, in case it is a CONST_DOUBLE and we want only a word-sized
4089 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
4090 && TREE_CODE (exp) != ERROR_MARK
4091 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
4092 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4093 temp, TREE_UNSIGNED (TREE_TYPE (exp)));
4095 /* If value was not generated in the target, store it there.
4096 Convert the value to TARGET's type first if necessary.
4097 If TEMP and TARGET compare equal according to rtx_equal_p, but
4098 one or both of them are volatile memory refs, we have to distinguish
4100 - expand_expr has used TARGET. In this case, we must not generate
4101 another copy. This can be detected by TARGET being equal according
4103 - expand_expr has not used TARGET - that means that the source just
4104 happens to have the same RTX form. Since temp will have been created
4105 by expand_expr, it will compare unequal according to == .
4106 We must generate a copy in this case, to reach the correct number
4107 of volatile memory references. */
4109 if ((! rtx_equal_p (temp, target)
4110 || (temp != target && (side_effects_p (temp)
4111 || side_effects_p (target))))
4112 && TREE_CODE (exp) != ERROR_MARK
4113 && ! dont_store_target)
4115 target = protect_from_queue (target, 1);
4116 if (GET_MODE (temp) != GET_MODE (target)
4117 && GET_MODE (temp) != VOIDmode)
4119 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
4120 if (dont_return_target)
4122 /* In this case, we will return TEMP,
4123 so make sure it has the proper mode.
4124 But don't forget to store the value into TARGET. */
4125 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
4126 emit_move_insn (target, temp);
4129 convert_move (target, temp, unsignedp);
4132 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
4134 /* Handle copying a string constant into an array. The string
4135 constant may be shorter than the array. So copy just the string's
4136 actual length, and clear the rest. First get the size of the data
4137 type of the string, which is actually the size of the target. */
4138 rtx size = expr_size (exp);
4140 if (GET_CODE (size) == CONST_INT
4141 && INTVAL (size) < TREE_STRING_LENGTH (exp))
4142 emit_block_move (target, temp, size);
4145 /* Compute the size of the data to copy from the string. */
4147 = size_binop (MIN_EXPR,
4148 make_tree (sizetype, size),
4149 size_int (TREE_STRING_LENGTH (exp)));
4150 rtx copy_size_rtx = expand_expr (copy_size, NULL_RTX,
4154 /* Copy that much. */
4155 copy_size_rtx = convert_to_mode (ptr_mode, copy_size_rtx, 0);
4156 emit_block_move (target, temp, copy_size_rtx);
4158 /* Figure out how much is left in TARGET that we have to clear.
4159 Do all calculations in ptr_mode. */
4160 if (GET_CODE (copy_size_rtx) == CONST_INT)
4162 size = plus_constant (size, -INTVAL (copy_size_rtx));
4163 target = adjust_address (target, BLKmode,
4164 INTVAL (copy_size_rtx));
4168 size = expand_binop (ptr_mode, sub_optab, size,
4169 copy_size_rtx, NULL_RTX, 0,
4172 #ifdef POINTERS_EXTEND_UNSIGNED
4173 if (GET_MODE (copy_size_rtx) != Pmode)
4174 copy_size_rtx = convert_memory_address (Pmode,
4178 target = offset_address (target, copy_size_rtx,
4179 highest_pow2_factor (copy_size));
4180 label = gen_label_rtx ();
4181 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
4182 GET_MODE (size), 0, label);
4185 if (size != const0_rtx)
4186 clear_storage (target, size);
4192 /* Handle calls that return values in multiple non-contiguous locations.
4193 The Irix 6 ABI has examples of this. */
4194 else if (GET_CODE (target) == PARALLEL)
4195 emit_group_load (target, temp, int_size_in_bytes (TREE_TYPE (exp)));
4196 else if (GET_MODE (temp) == BLKmode)
4197 emit_block_move (target, temp, expr_size (exp));
4199 emit_move_insn (target, temp);
4202 /* If we don't want a value, return NULL_RTX. */
4206 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
4207 ??? The latter test doesn't seem to make sense. */
4208 else if (dont_return_target && GET_CODE (temp) != MEM)
4211 /* Return TARGET itself if it is a hard register. */
4212 else if (want_value && GET_MODE (target) != BLKmode
4213 && ! (GET_CODE (target) == REG
4214 && REGNO (target) < FIRST_PSEUDO_REGISTER))
4215 return copy_to_reg (target);
4221 /* Return 1 if EXP just contains zeros. */
4229 switch (TREE_CODE (exp))
4233 case NON_LVALUE_EXPR:
4234 case VIEW_CONVERT_EXPR:
4235 return is_zeros_p (TREE_OPERAND (exp, 0));
4238 return integer_zerop (exp);
4242 is_zeros_p (TREE_REALPART (exp)) && is_zeros_p (TREE_IMAGPART (exp));
4245 return REAL_VALUES_IDENTICAL (TREE_REAL_CST (exp), dconst0);
4248 for (elt = TREE_VECTOR_CST_ELTS (exp); elt;
4249 elt = TREE_CHAIN (elt))
4250 if (!is_zeros_p (TREE_VALUE (elt)))
4256 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4257 return CONSTRUCTOR_ELTS (exp) == NULL_TREE;
4258 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4259 if (! is_zeros_p (TREE_VALUE (elt)))
4269 /* Return 1 if EXP contains mostly (3/4) zeros. */
4272 mostly_zeros_p (exp)
4275 if (TREE_CODE (exp) == CONSTRUCTOR)
4277 int elts = 0, zeros = 0;
4278 tree elt = CONSTRUCTOR_ELTS (exp);
4279 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4281 /* If there are no ranges of true bits, it is all zero. */
4282 return elt == NULL_TREE;
4284 for (; elt; elt = TREE_CHAIN (elt))
4286 /* We do not handle the case where the index is a RANGE_EXPR,
4287 so the statistic will be somewhat inaccurate.
4288 We do make a more accurate count in store_constructor itself,
4289 so since this function is only used for nested array elements,
4290 this should be close enough. */
4291 if (mostly_zeros_p (TREE_VALUE (elt)))
4296 return 4 * zeros >= 3 * elts;
4299 return is_zeros_p (exp);
4302 /* Helper function for store_constructor.
4303 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4304 TYPE is the type of the CONSTRUCTOR, not the element type.
4305 CLEARED is as for store_constructor.
4306 ALIAS_SET is the alias set to use for any stores.
4308 This provides a recursive shortcut back to store_constructor when it isn't
4309 necessary to go through store_field. This is so that we can pass through
4310 the cleared field to let store_constructor know that we may not have to
4311 clear a substructure if the outer structure has already been cleared. */
4314 store_constructor_field (target, bitsize, bitpos, mode, exp, type, cleared,
4317 unsigned HOST_WIDE_INT bitsize;
4318 HOST_WIDE_INT bitpos;
4319 enum machine_mode mode;
4324 if (TREE_CODE (exp) == CONSTRUCTOR
4325 && bitpos % BITS_PER_UNIT == 0
4326 /* If we have a non-zero bitpos for a register target, then we just
4327 let store_field do the bitfield handling. This is unlikely to
4328 generate unnecessary clear instructions anyways. */
4329 && (bitpos == 0 || GET_CODE (target) == MEM))
4331 if (GET_CODE (target) == MEM)
4333 = adjust_address (target,
4334 GET_MODE (target) == BLKmode
4336 % GET_MODE_ALIGNMENT (GET_MODE (target)))
4337 ? BLKmode : VOIDmode, bitpos / BITS_PER_UNIT);
4340 /* Update the alias set, if required. */
4341 if (GET_CODE (target) == MEM && ! MEM_KEEP_ALIAS_SET_P (target)
4342 && MEM_ALIAS_SET (target) != 0)
4344 target = copy_rtx (target);
4345 set_mem_alias_set (target, alias_set);
4348 store_constructor (exp, target, cleared, bitsize / BITS_PER_UNIT);
4351 store_field (target, bitsize, bitpos, mode, exp, VOIDmode, 0, type,
4355 /* Store the value of constructor EXP into the rtx TARGET.
4356 TARGET is either a REG or a MEM; we know it cannot conflict, since
4357 safe_from_p has been called.
4358 CLEARED is true if TARGET is known to have been zero'd.
4359 SIZE is the number of bytes of TARGET we are allowed to modify: this
4360 may not be the same as the size of EXP if we are assigning to a field
4361 which has been packed to exclude padding bits. */
4364 store_constructor (exp, target, cleared, size)
4370 tree type = TREE_TYPE (exp);
4371 #ifdef WORD_REGISTER_OPERATIONS
4372 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
4375 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
4376 || TREE_CODE (type) == QUAL_UNION_TYPE)
4380 /* We either clear the aggregate or indicate the value is dead. */
4381 if ((TREE_CODE (type) == UNION_TYPE
4382 || TREE_CODE (type) == QUAL_UNION_TYPE)
4384 && ! CONSTRUCTOR_ELTS (exp))
4385 /* If the constructor is empty, clear the union. */
4387 clear_storage (target, expr_size (exp));
4391 /* If we are building a static constructor into a register,
4392 set the initial value as zero so we can fold the value into
4393 a constant. But if more than one register is involved,
4394 this probably loses. */
4395 else if (! cleared && GET_CODE (target) == REG && TREE_STATIC (exp)
4396 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
4398 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4402 /* If the constructor has fewer fields than the structure
4403 or if we are initializing the structure to mostly zeros,
4404 clear the whole structure first. Don't do this if TARGET is a
4405 register whose mode size isn't equal to SIZE since clear_storage
4406 can't handle this case. */
4407 else if (! cleared && size > 0
4408 && ((list_length (CONSTRUCTOR_ELTS (exp))
4409 != fields_length (type))
4410 || mostly_zeros_p (exp))
4411 && (GET_CODE (target) != REG
4412 || ((HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target))
4415 clear_storage (target, GEN_INT (size));
4420 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4422 /* Store each element of the constructor into
4423 the corresponding field of TARGET. */
4425 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4427 tree field = TREE_PURPOSE (elt);
4428 tree value = TREE_VALUE (elt);
4429 enum machine_mode mode;
4430 HOST_WIDE_INT bitsize;
4431 HOST_WIDE_INT bitpos = 0;
4434 rtx to_rtx = target;
4436 /* Just ignore missing fields.
4437 We cleared the whole structure, above,
4438 if any fields are missing. */
4442 if (cleared && is_zeros_p (value))
4445 if (host_integerp (DECL_SIZE (field), 1))
4446 bitsize = tree_low_cst (DECL_SIZE (field), 1);
4450 unsignedp = TREE_UNSIGNED (field);
4451 mode = DECL_MODE (field);
4452 if (DECL_BIT_FIELD (field))
4455 offset = DECL_FIELD_OFFSET (field);
4456 if (host_integerp (offset, 0)
4457 && host_integerp (bit_position (field), 0))
4459 bitpos = int_bit_position (field);
4463 bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
4469 if (contains_placeholder_p (offset))
4470 offset = build (WITH_RECORD_EXPR, sizetype,
4471 offset, make_tree (TREE_TYPE (exp), target));
4473 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
4474 if (GET_CODE (to_rtx) != MEM)
4477 if (GET_MODE (offset_rtx) != ptr_mode)
4478 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4480 #ifdef POINTERS_EXTEND_UNSIGNED
4481 if (GET_MODE (offset_rtx) != Pmode)
4482 offset_rtx = convert_memory_address (Pmode, offset_rtx);
4485 to_rtx = offset_address (to_rtx, offset_rtx,
4486 highest_pow2_factor (offset));
4489 if (TREE_READONLY (field))
4491 if (GET_CODE (to_rtx) == MEM)
4492 to_rtx = copy_rtx (to_rtx);
4494 RTX_UNCHANGING_P (to_rtx) = 1;
4497 #ifdef WORD_REGISTER_OPERATIONS
4498 /* If this initializes a field that is smaller than a word, at the
4499 start of a word, try to widen it to a full word.
4500 This special case allows us to output C++ member function
4501 initializations in a form that the optimizers can understand. */
4502 if (GET_CODE (target) == REG
4503 && bitsize < BITS_PER_WORD
4504 && bitpos % BITS_PER_WORD == 0
4505 && GET_MODE_CLASS (mode) == MODE_INT
4506 && TREE_CODE (value) == INTEGER_CST
4508 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
4510 tree type = TREE_TYPE (value);
4512 if (TYPE_PRECISION (type) < BITS_PER_WORD)
4514 type = type_for_size (BITS_PER_WORD, TREE_UNSIGNED (type));
4515 value = convert (type, value);
4518 if (BYTES_BIG_ENDIAN)
4520 = fold (build (LSHIFT_EXPR, type, value,
4521 build_int_2 (BITS_PER_WORD - bitsize, 0)));
4522 bitsize = BITS_PER_WORD;
4527 if (GET_CODE (to_rtx) == MEM && !MEM_KEEP_ALIAS_SET_P (to_rtx)
4528 && DECL_NONADDRESSABLE_P (field))
4530 to_rtx = copy_rtx (to_rtx);
4531 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
4534 store_constructor_field (to_rtx, bitsize, bitpos, mode,
4535 value, type, cleared,
4536 get_alias_set (TREE_TYPE (field)));
4539 else if (TREE_CODE (type) == ARRAY_TYPE
4540 || TREE_CODE (type) == VECTOR_TYPE)
4545 tree domain = TYPE_DOMAIN (type);
4546 tree elttype = TREE_TYPE (type);
4548 HOST_WIDE_INT minelt = 0;
4549 HOST_WIDE_INT maxelt = 0;
4551 /* Vectors are like arrays, but the domain is stored via an array
4553 if (TREE_CODE (type) == VECTOR_TYPE)
4555 /* Note that although TYPE_DEBUG_REPRESENTATION_TYPE uses
4556 the same field as TYPE_DOMAIN, we are not guaranteed that
4558 domain = TYPE_DEBUG_REPRESENTATION_TYPE (type);
4559 domain = TYPE_DOMAIN (TREE_TYPE (TYPE_FIELDS (domain)));
4562 const_bounds_p = (TYPE_MIN_VALUE (domain)
4563 && TYPE_MAX_VALUE (domain)
4564 && host_integerp (TYPE_MIN_VALUE (domain), 0)
4565 && host_integerp (TYPE_MAX_VALUE (domain), 0));
4567 /* If we have constant bounds for the range of the type, get them. */
4570 minelt = tree_low_cst (TYPE_MIN_VALUE (domain), 0);
4571 maxelt = tree_low_cst (TYPE_MAX_VALUE (domain), 0);
4574 /* If the constructor has fewer elements than the array,
4575 clear the whole array first. Similarly if this is
4576 static constructor of a non-BLKmode object. */
4577 if (cleared || (GET_CODE (target) == REG && TREE_STATIC (exp)))
4581 HOST_WIDE_INT count = 0, zero_count = 0;
4582 need_to_clear = ! const_bounds_p;
4584 /* This loop is a more accurate version of the loop in
4585 mostly_zeros_p (it handles RANGE_EXPR in an index).
4586 It is also needed to check for missing elements. */
4587 for (elt = CONSTRUCTOR_ELTS (exp);
4588 elt != NULL_TREE && ! need_to_clear;
4589 elt = TREE_CHAIN (elt))
4591 tree index = TREE_PURPOSE (elt);
4592 HOST_WIDE_INT this_node_count;
4594 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4596 tree lo_index = TREE_OPERAND (index, 0);
4597 tree hi_index = TREE_OPERAND (index, 1);
4599 if (! host_integerp (lo_index, 1)
4600 || ! host_integerp (hi_index, 1))
4606 this_node_count = (tree_low_cst (hi_index, 1)
4607 - tree_low_cst (lo_index, 1) + 1);
4610 this_node_count = 1;
4612 count += this_node_count;
4613 if (mostly_zeros_p (TREE_VALUE (elt)))
4614 zero_count += this_node_count;
4617 /* Clear the entire array first if there are any missing elements,
4618 or if the incidence of zero elements is >= 75%. */
4620 && (count < maxelt - minelt + 1 || 4 * zero_count >= 3 * count))
4624 if (need_to_clear && size > 0)
4629 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4631 clear_storage (target, GEN_INT (size));
4635 else if (REG_P (target))
4636 /* Inform later passes that the old value is dead. */
4637 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4639 /* Store each element of the constructor into
4640 the corresponding element of TARGET, determined
4641 by counting the elements. */
4642 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
4644 elt = TREE_CHAIN (elt), i++)
4646 enum machine_mode mode;
4647 HOST_WIDE_INT bitsize;
4648 HOST_WIDE_INT bitpos;
4650 tree value = TREE_VALUE (elt);
4651 tree index = TREE_PURPOSE (elt);
4652 rtx xtarget = target;
4654 if (cleared && is_zeros_p (value))
4657 unsignedp = TREE_UNSIGNED (elttype);
4658 mode = TYPE_MODE (elttype);
4659 if (mode == BLKmode)
4660 bitsize = (host_integerp (TYPE_SIZE (elttype), 1)
4661 ? tree_low_cst (TYPE_SIZE (elttype), 1)
4664 bitsize = GET_MODE_BITSIZE (mode);
4666 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4668 tree lo_index = TREE_OPERAND (index, 0);
4669 tree hi_index = TREE_OPERAND (index, 1);
4670 rtx index_r, pos_rtx, hi_r, loop_top, loop_end;
4671 struct nesting *loop;
4672 HOST_WIDE_INT lo, hi, count;
4675 /* If the range is constant and "small", unroll the loop. */
4677 && host_integerp (lo_index, 0)
4678 && host_integerp (hi_index, 0)
4679 && (lo = tree_low_cst (lo_index, 0),
4680 hi = tree_low_cst (hi_index, 0),
4681 count = hi - lo + 1,
4682 (GET_CODE (target) != MEM
4684 || (host_integerp (TYPE_SIZE (elttype), 1)
4685 && (tree_low_cst (TYPE_SIZE (elttype), 1) * count
4688 lo -= minelt; hi -= minelt;
4689 for (; lo <= hi; lo++)
4691 bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0);
4693 if (GET_CODE (target) == MEM
4694 && !MEM_KEEP_ALIAS_SET_P (target)
4695 && TREE_CODE (type) == ARRAY_TYPE
4696 && TYPE_NONALIASED_COMPONENT (type))
4698 target = copy_rtx (target);
4699 MEM_KEEP_ALIAS_SET_P (target) = 1;
4702 store_constructor_field
4703 (target, bitsize, bitpos, mode, value, type, cleared,
4704 get_alias_set (elttype));
4709 hi_r = expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
4710 loop_top = gen_label_rtx ();
4711 loop_end = gen_label_rtx ();
4713 unsignedp = TREE_UNSIGNED (domain);
4715 index = build_decl (VAR_DECL, NULL_TREE, domain);
4718 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
4720 SET_DECL_RTL (index, index_r);
4721 if (TREE_CODE (value) == SAVE_EXPR
4722 && SAVE_EXPR_RTL (value) == 0)
4724 /* Make sure value gets expanded once before the
4726 expand_expr (value, const0_rtx, VOIDmode, 0);
4729 store_expr (lo_index, index_r, 0);
4730 loop = expand_start_loop (0);
4732 /* Assign value to element index. */
4734 = convert (ssizetype,
4735 fold (build (MINUS_EXPR, TREE_TYPE (index),
4736 index, TYPE_MIN_VALUE (domain))));
4737 position = size_binop (MULT_EXPR, position,
4739 TYPE_SIZE_UNIT (elttype)));
4741 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
4742 xtarget = offset_address (target, pos_rtx,
4743 highest_pow2_factor (position));
4744 xtarget = adjust_address (xtarget, mode, 0);
4745 if (TREE_CODE (value) == CONSTRUCTOR)
4746 store_constructor (value, xtarget, cleared,
4747 bitsize / BITS_PER_UNIT);
4749 store_expr (value, xtarget, 0);
4751 expand_exit_loop_if_false (loop,
4752 build (LT_EXPR, integer_type_node,
4755 expand_increment (build (PREINCREMENT_EXPR,
4757 index, integer_one_node), 0, 0);
4759 emit_label (loop_end);
4762 else if ((index != 0 && ! host_integerp (index, 0))
4763 || ! host_integerp (TYPE_SIZE (elttype), 1))
4768 index = ssize_int (1);
4771 index = convert (ssizetype,
4772 fold (build (MINUS_EXPR, index,
4773 TYPE_MIN_VALUE (domain))));
4775 position = size_binop (MULT_EXPR, index,
4777 TYPE_SIZE_UNIT (elttype)));
4778 xtarget = offset_address (target,
4779 expand_expr (position, 0, VOIDmode, 0),
4780 highest_pow2_factor (position));
4781 xtarget = adjust_address (xtarget, mode, 0);
4782 store_expr (value, xtarget, 0);
4787 bitpos = ((tree_low_cst (index, 0) - minelt)
4788 * tree_low_cst (TYPE_SIZE (elttype), 1));
4790 bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
4792 if (GET_CODE (target) == MEM && !MEM_KEEP_ALIAS_SET_P (target)
4793 && TREE_CODE (type) == ARRAY_TYPE
4794 && TYPE_NONALIASED_COMPONENT (type))
4796 target = copy_rtx (target);
4797 MEM_KEEP_ALIAS_SET_P (target) = 1;
4800 store_constructor_field (target, bitsize, bitpos, mode, value,
4801 type, cleared, get_alias_set (elttype));
4807 /* Set constructor assignments. */
4808 else if (TREE_CODE (type) == SET_TYPE)
4810 tree elt = CONSTRUCTOR_ELTS (exp);
4811 unsigned HOST_WIDE_INT nbytes = int_size_in_bytes (type), nbits;
4812 tree domain = TYPE_DOMAIN (type);
4813 tree domain_min, domain_max, bitlength;
4815 /* The default implementation strategy is to extract the constant
4816 parts of the constructor, use that to initialize the target,
4817 and then "or" in whatever non-constant ranges we need in addition.
4819 If a large set is all zero or all ones, it is
4820 probably better to set it using memset (if available) or bzero.
4821 Also, if a large set has just a single range, it may also be
4822 better to first clear all the first clear the set (using
4823 bzero/memset), and set the bits we want. */
4825 /* Check for all zeros. */
4826 if (elt == NULL_TREE && size > 0)
4829 clear_storage (target, GEN_INT (size));
4833 domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
4834 domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
4835 bitlength = size_binop (PLUS_EXPR,
4836 size_diffop (domain_max, domain_min),
4839 nbits = tree_low_cst (bitlength, 1);
4841 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
4842 are "complicated" (more than one range), initialize (the
4843 constant parts) by copying from a constant. */
4844 if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD
4845 || (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE))
4847 unsigned int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
4848 enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
4849 char *bit_buffer = (char *) alloca (nbits);
4850 HOST_WIDE_INT word = 0;
4851 unsigned int bit_pos = 0;
4852 unsigned int ibit = 0;
4853 unsigned int offset = 0; /* In bytes from beginning of set. */
4855 elt = get_set_constructor_bits (exp, bit_buffer, nbits);
4858 if (bit_buffer[ibit])
4860 if (BYTES_BIG_ENDIAN)
4861 word |= (1 << (set_word_size - 1 - bit_pos));
4863 word |= 1 << bit_pos;
4867 if (bit_pos >= set_word_size || ibit == nbits)
4869 if (word != 0 || ! cleared)
4871 rtx datum = GEN_INT (word);
4874 /* The assumption here is that it is safe to use
4875 XEXP if the set is multi-word, but not if
4876 it's single-word. */
4877 if (GET_CODE (target) == MEM)
4878 to_rtx = adjust_address (target, mode, offset);
4879 else if (offset == 0)
4883 emit_move_insn (to_rtx, datum);
4890 offset += set_word_size / BITS_PER_UNIT;
4895 /* Don't bother clearing storage if the set is all ones. */
4896 if (TREE_CHAIN (elt) != NULL_TREE
4897 || (TREE_PURPOSE (elt) == NULL_TREE
4899 : ( ! host_integerp (TREE_VALUE (elt), 0)
4900 || ! host_integerp (TREE_PURPOSE (elt), 0)
4901 || (tree_low_cst (TREE_VALUE (elt), 0)
4902 - tree_low_cst (TREE_PURPOSE (elt), 0) + 1
4903 != (HOST_WIDE_INT) nbits))))
4904 clear_storage (target, expr_size (exp));
4906 for (; elt != NULL_TREE; elt = TREE_CHAIN (elt))
4908 /* Start of range of element or NULL. */
4909 tree startbit = TREE_PURPOSE (elt);
4910 /* End of range of element, or element value. */
4911 tree endbit = TREE_VALUE (elt);
4912 #ifdef TARGET_MEM_FUNCTIONS
4913 HOST_WIDE_INT startb, endb;
4915 rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
4917 bitlength_rtx = expand_expr (bitlength,
4918 NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
4920 /* Handle non-range tuple element like [ expr ]. */
4921 if (startbit == NULL_TREE)
4923 startbit = save_expr (endbit);
4927 startbit = convert (sizetype, startbit);
4928 endbit = convert (sizetype, endbit);
4929 if (! integer_zerop (domain_min))
4931 startbit = size_binop (MINUS_EXPR, startbit, domain_min);
4932 endbit = size_binop (MINUS_EXPR, endbit, domain_min);
4934 startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
4935 EXPAND_CONST_ADDRESS);
4936 endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
4937 EXPAND_CONST_ADDRESS);
4943 ((build_qualified_type (type_for_mode (GET_MODE (target), 0),
4946 emit_move_insn (targetx, target);
4949 else if (GET_CODE (target) == MEM)
4954 #ifdef TARGET_MEM_FUNCTIONS
4955 /* Optimization: If startbit and endbit are
4956 constants divisible by BITS_PER_UNIT,
4957 call memset instead. */
4958 if (TREE_CODE (startbit) == INTEGER_CST
4959 && TREE_CODE (endbit) == INTEGER_CST
4960 && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
4961 && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0)
4963 emit_library_call (memset_libfunc, LCT_NORMAL,
4965 plus_constant (XEXP (targetx, 0),
4966 startb / BITS_PER_UNIT),
4968 constm1_rtx, TYPE_MODE (integer_type_node),
4969 GEN_INT ((endb - startb) / BITS_PER_UNIT),
4970 TYPE_MODE (sizetype));
4974 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__setbits"),
4975 LCT_NORMAL, VOIDmode, 4, XEXP (targetx, 0),
4976 Pmode, bitlength_rtx, TYPE_MODE (sizetype),
4977 startbit_rtx, TYPE_MODE (sizetype),
4978 endbit_rtx, TYPE_MODE (sizetype));
4981 emit_move_insn (target, targetx);
4989 /* Store the value of EXP (an expression tree)
4990 into a subfield of TARGET which has mode MODE and occupies
4991 BITSIZE bits, starting BITPOS bits from the start of TARGET.
4992 If MODE is VOIDmode, it means that we are storing into a bit-field.
4994 If VALUE_MODE is VOIDmode, return nothing in particular.
4995 UNSIGNEDP is not used in this case.
4997 Otherwise, return an rtx for the value stored. This rtx
4998 has mode VALUE_MODE if that is convenient to do.
4999 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
5001 TYPE is the type of the underlying object,
5003 ALIAS_SET is the alias set for the destination. This value will
5004 (in general) be different from that for TARGET, since TARGET is a
5005 reference to the containing structure. */
5008 store_field (target, bitsize, bitpos, mode, exp, value_mode, unsignedp, type,
5011 HOST_WIDE_INT bitsize;
5012 HOST_WIDE_INT bitpos;
5013 enum machine_mode mode;
5015 enum machine_mode value_mode;
5020 HOST_WIDE_INT width_mask = 0;
5022 if (TREE_CODE (exp) == ERROR_MARK)
5025 /* If we have nothing to store, do nothing unless the expression has
5028 return expand_expr (exp, const0_rtx, VOIDmode, 0);
5029 else if (bitsize >=0 && bitsize < HOST_BITS_PER_WIDE_INT)
5030 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
5032 /* If we are storing into an unaligned field of an aligned union that is
5033 in a register, we may have the mode of TARGET being an integer mode but
5034 MODE == BLKmode. In that case, get an aligned object whose size and
5035 alignment are the same as TARGET and store TARGET into it (we can avoid
5036 the store if the field being stored is the entire width of TARGET). Then
5037 call ourselves recursively to store the field into a BLKmode version of
5038 that object. Finally, load from the object into TARGET. This is not
5039 very efficient in general, but should only be slightly more expensive
5040 than the otherwise-required unaligned accesses. Perhaps this can be
5041 cleaned up later. */
5044 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
5048 (build_qualified_type (type, TYPE_QUALS (type) | TYPE_QUAL_CONST),
5050 rtx blk_object = adjust_address (object, BLKmode, 0);
5052 if (bitsize != (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (target)))
5053 emit_move_insn (object, target);
5055 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0, type,
5058 emit_move_insn (target, object);
5060 /* We want to return the BLKmode version of the data. */
5064 if (GET_CODE (target) == CONCAT)
5066 /* We're storing into a struct containing a single __complex. */
5070 return store_expr (exp, target, 0);
5073 /* If the structure is in a register or if the component
5074 is a bit field, we cannot use addressing to access it.
5075 Use bit-field techniques or SUBREG to store in it. */
5077 if (mode == VOIDmode
5078 || (mode != BLKmode && ! direct_store[(int) mode]
5079 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
5080 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
5081 || GET_CODE (target) == REG
5082 || GET_CODE (target) == SUBREG
5083 /* If the field isn't aligned enough to store as an ordinary memref,
5084 store it as a bit field. */
5085 || (mode != BLKmode && SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target))
5086 && (MEM_ALIGN (target) < GET_MODE_ALIGNMENT (mode)
5087 || bitpos % GET_MODE_ALIGNMENT (mode)))
5088 /* If the RHS and field are a constant size and the size of the
5089 RHS isn't the same size as the bitfield, we must use bitfield
5092 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
5093 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0))
5095 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
5097 /* If BITSIZE is narrower than the size of the type of EXP
5098 we will be narrowing TEMP. Normally, what's wanted are the
5099 low-order bits. However, if EXP's type is a record and this is
5100 big-endian machine, we want the upper BITSIZE bits. */
5101 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
5102 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (temp))
5103 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
5104 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
5105 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
5109 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
5111 if (mode != VOIDmode && mode != BLKmode
5112 && mode != TYPE_MODE (TREE_TYPE (exp)))
5113 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
5115 /* If the modes of TARGET and TEMP are both BLKmode, both
5116 must be in memory and BITPOS must be aligned on a byte
5117 boundary. If so, we simply do a block copy. */
5118 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
5120 if (GET_CODE (target) != MEM || GET_CODE (temp) != MEM
5121 || bitpos % BITS_PER_UNIT != 0)
5124 target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT);
5125 emit_block_move (target, temp,
5126 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
5129 return value_mode == VOIDmode ? const0_rtx : target;
5132 /* Store the value in the bitfield. */
5133 store_bit_field (target, bitsize, bitpos, mode, temp,
5134 int_size_in_bytes (type));
5136 if (value_mode != VOIDmode)
5138 /* The caller wants an rtx for the value.
5139 If possible, avoid refetching from the bitfield itself. */
5141 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
5144 enum machine_mode tmode;
5147 return expand_and (temp,
5151 GET_MODE (temp) == VOIDmode
5153 : GET_MODE (temp))), NULL_RTX);
5155 tmode = GET_MODE (temp);
5156 if (tmode == VOIDmode)
5158 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
5159 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
5160 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
5163 return extract_bit_field (target, bitsize, bitpos, unsignedp,
5164 NULL_RTX, value_mode, VOIDmode,
5165 int_size_in_bytes (type));
5171 rtx addr = XEXP (target, 0);
5172 rtx to_rtx = target;
5174 /* If a value is wanted, it must be the lhs;
5175 so make the address stable for multiple use. */
5177 if (value_mode != VOIDmode && GET_CODE (addr) != REG
5178 && ! CONSTANT_ADDRESS_P (addr)
5179 /* A frame-pointer reference is already stable. */
5180 && ! (GET_CODE (addr) == PLUS
5181 && GET_CODE (XEXP (addr, 1)) == CONST_INT
5182 && (XEXP (addr, 0) == virtual_incoming_args_rtx
5183 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
5184 to_rtx = replace_equiv_address (to_rtx, copy_to_reg (addr));
5186 /* Now build a reference to just the desired component. */
5188 to_rtx = adjust_address (target, mode, bitpos / BITS_PER_UNIT);
5190 if (to_rtx == target)
5191 to_rtx = copy_rtx (to_rtx);
5193 MEM_SET_IN_STRUCT_P (to_rtx, 1);
5194 if (!MEM_KEEP_ALIAS_SET_P (to_rtx) && MEM_ALIAS_SET (to_rtx) != 0)
5195 set_mem_alias_set (to_rtx, alias_set);
5197 return store_expr (exp, to_rtx, value_mode != VOIDmode);
5201 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
5202 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
5203 codes and find the ultimate containing object, which we return.
5205 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5206 bit position, and *PUNSIGNEDP to the signedness of the field.
5207 If the position of the field is variable, we store a tree
5208 giving the variable offset (in units) in *POFFSET.
5209 This offset is in addition to the bit position.
5210 If the position is not variable, we store 0 in *POFFSET.
5212 If any of the extraction expressions is volatile,
5213 we store 1 in *PVOLATILEP. Otherwise we don't change that.
5215 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
5216 is a mode that can be used to access the field. In that case, *PBITSIZE
5219 If the field describes a variable-sized object, *PMODE is set to
5220 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
5221 this case, but the address of the object can be found. */
5224 get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode,
5225 punsignedp, pvolatilep)
5227 HOST_WIDE_INT *pbitsize;
5228 HOST_WIDE_INT *pbitpos;
5230 enum machine_mode *pmode;
5235 enum machine_mode mode = VOIDmode;
5236 tree offset = size_zero_node;
5237 tree bit_offset = bitsize_zero_node;
5238 tree placeholder_ptr = 0;
5241 /* First get the mode, signedness, and size. We do this from just the
5242 outermost expression. */
5243 if (TREE_CODE (exp) == COMPONENT_REF)
5245 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
5246 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
5247 mode = DECL_MODE (TREE_OPERAND (exp, 1));
5249 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
5251 else if (TREE_CODE (exp) == BIT_FIELD_REF)
5253 size_tree = TREE_OPERAND (exp, 1);
5254 *punsignedp = TREE_UNSIGNED (exp);
5258 mode = TYPE_MODE (TREE_TYPE (exp));
5259 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
5261 if (mode == BLKmode)
5262 size_tree = TYPE_SIZE (TREE_TYPE (exp));
5264 *pbitsize = GET_MODE_BITSIZE (mode);
5269 if (! host_integerp (size_tree, 1))
5270 mode = BLKmode, *pbitsize = -1;
5272 *pbitsize = tree_low_cst (size_tree, 1);
5275 /* Compute cumulative bit-offset for nested component-refs and array-refs,
5276 and find the ultimate containing object. */
5279 if (TREE_CODE (exp) == BIT_FIELD_REF)
5280 bit_offset = size_binop (PLUS_EXPR, bit_offset, TREE_OPERAND (exp, 2));
5281 else if (TREE_CODE (exp) == COMPONENT_REF)
5283 tree field = TREE_OPERAND (exp, 1);
5284 tree this_offset = DECL_FIELD_OFFSET (field);
5286 /* If this field hasn't been filled in yet, don't go
5287 past it. This should only happen when folding expressions
5288 made during type construction. */
5289 if (this_offset == 0)
5291 else if (! TREE_CONSTANT (this_offset)
5292 && contains_placeholder_p (this_offset))
5293 this_offset = build (WITH_RECORD_EXPR, sizetype, this_offset, exp);
5295 offset = size_binop (PLUS_EXPR, offset, this_offset);
5296 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5297 DECL_FIELD_BIT_OFFSET (field));
5299 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
5302 else if (TREE_CODE (exp) == ARRAY_REF
5303 || TREE_CODE (exp) == ARRAY_RANGE_REF)
5305 tree index = TREE_OPERAND (exp, 1);
5306 tree array = TREE_OPERAND (exp, 0);
5307 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
5308 tree low_bound = (domain ? TYPE_MIN_VALUE (domain) : 0);
5309 tree unit_size = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (array)));
5311 /* We assume all arrays have sizes that are a multiple of a byte.
5312 First subtract the lower bound, if any, in the type of the
5313 index, then convert to sizetype and multiply by the size of the
5315 if (low_bound != 0 && ! integer_zerop (low_bound))
5316 index = fold (build (MINUS_EXPR, TREE_TYPE (index),
5319 /* If the index has a self-referential type, pass it to a
5320 WITH_RECORD_EXPR; if the component size is, pass our
5321 component to one. */
5322 if (! TREE_CONSTANT (index)
5323 && contains_placeholder_p (index))
5324 index = build (WITH_RECORD_EXPR, TREE_TYPE (index), index, exp);
5325 if (! TREE_CONSTANT (unit_size)
5326 && contains_placeholder_p (unit_size))
5327 unit_size = build (WITH_RECORD_EXPR, sizetype, unit_size, array);
5329 offset = size_binop (PLUS_EXPR, offset,
5330 size_binop (MULT_EXPR,
5331 convert (sizetype, index),
5335 else if (TREE_CODE (exp) == PLACEHOLDER_EXPR)
5337 tree new = find_placeholder (exp, &placeholder_ptr);
5339 /* If we couldn't find the replacement, return the PLACEHOLDER_EXPR.
5340 We might have been called from tree optimization where we
5341 haven't set up an object yet. */
5349 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
5350 && TREE_CODE (exp) != VIEW_CONVERT_EXPR
5351 && ! ((TREE_CODE (exp) == NOP_EXPR
5352 || TREE_CODE (exp) == CONVERT_EXPR)
5353 && (TYPE_MODE (TREE_TYPE (exp))
5354 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
5357 /* If any reference in the chain is volatile, the effect is volatile. */
5358 if (TREE_THIS_VOLATILE (exp))
5361 exp = TREE_OPERAND (exp, 0);
5364 /* If OFFSET is constant, see if we can return the whole thing as a
5365 constant bit position. Otherwise, split it up. */
5366 if (host_integerp (offset, 0)
5367 && 0 != (tem = size_binop (MULT_EXPR, convert (bitsizetype, offset),
5369 && 0 != (tem = size_binop (PLUS_EXPR, tem, bit_offset))
5370 && host_integerp (tem, 0))
5371 *pbitpos = tree_low_cst (tem, 0), *poffset = 0;
5373 *pbitpos = tree_low_cst (bit_offset, 0), *poffset = offset;
5379 /* Return 1 if T is an expression that get_inner_reference handles. */
5382 handled_component_p (t)
5385 switch (TREE_CODE (t))
5390 case ARRAY_RANGE_REF:
5391 case NON_LVALUE_EXPR:
5392 case VIEW_CONVERT_EXPR:
5397 return (TYPE_MODE (TREE_TYPE (t))
5398 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (t, 0))));
5405 /* Given an rtx VALUE that may contain additions and multiplications, return
5406 an equivalent value that just refers to a register, memory, or constant.
5407 This is done by generating instructions to perform the arithmetic and
5408 returning a pseudo-register containing the value.
5410 The returned value may be a REG, SUBREG, MEM or constant. */
5413 force_operand (value, target)
5417 /* Use a temporary to force order of execution of calls to
5421 /* Use subtarget as the target for operand 0 of a binary operation. */
5422 rtx subtarget = get_subtarget (target);
5424 /* Check for a PIC address load. */
5425 if ((GET_CODE (value) == PLUS || GET_CODE (value) == MINUS)
5426 && XEXP (value, 0) == pic_offset_table_rtx
5427 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
5428 || GET_CODE (XEXP (value, 1)) == LABEL_REF
5429 || GET_CODE (XEXP (value, 1)) == CONST))
5432 subtarget = gen_reg_rtx (GET_MODE (value));
5433 emit_move_insn (subtarget, value);
5437 if (GET_CODE (value) == PLUS)
5438 binoptab = add_optab;
5439 else if (GET_CODE (value) == MINUS)
5440 binoptab = sub_optab;
5441 else if (GET_CODE (value) == MULT)
5443 op2 = XEXP (value, 1);
5444 if (!CONSTANT_P (op2)
5445 && !(GET_CODE (op2) == REG && op2 != subtarget))
5447 tmp = force_operand (XEXP (value, 0), subtarget);
5448 return expand_mult (GET_MODE (value), tmp,
5449 force_operand (op2, NULL_RTX),
5455 op2 = XEXP (value, 1);
5456 if (!CONSTANT_P (op2)
5457 && !(GET_CODE (op2) == REG && op2 != subtarget))
5459 if (binoptab == sub_optab && GET_CODE (op2) == CONST_INT)
5461 binoptab = add_optab;
5462 op2 = negate_rtx (GET_MODE (value), op2);
5465 /* Check for an addition with OP2 a constant integer and our first
5466 operand a PLUS of a virtual register and something else. In that
5467 case, we want to emit the sum of the virtual register and the
5468 constant first and then add the other value. This allows virtual
5469 register instantiation to simply modify the constant rather than
5470 creating another one around this addition. */
5471 if (binoptab == add_optab && GET_CODE (op2) == CONST_INT
5472 && GET_CODE (XEXP (value, 0)) == PLUS
5473 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
5474 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
5475 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
5477 rtx temp = expand_binop (GET_MODE (value), binoptab,
5478 XEXP (XEXP (value, 0), 0), op2,
5479 subtarget, 0, OPTAB_LIB_WIDEN);
5480 return expand_binop (GET_MODE (value), binoptab, temp,
5481 force_operand (XEXP (XEXP (value, 0), 1), 0),
5482 target, 0, OPTAB_LIB_WIDEN);
5485 tmp = force_operand (XEXP (value, 0), subtarget);
5486 return expand_binop (GET_MODE (value), binoptab, tmp,
5487 force_operand (op2, NULL_RTX),
5488 target, 0, OPTAB_LIB_WIDEN);
5489 /* We give UNSIGNEDP = 0 to expand_binop
5490 because the only operations we are expanding here are signed ones. */
5493 #ifdef INSN_SCHEDULING
5494 /* On machines that have insn scheduling, we want all memory reference to be
5495 explicit, so we need to deal with such paradoxical SUBREGs. */
5496 if (GET_CODE (value) == SUBREG && GET_CODE (SUBREG_REG (value)) == MEM
5497 && (GET_MODE_SIZE (GET_MODE (value))
5498 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (value)))))
5500 = simplify_gen_subreg (GET_MODE (value),
5501 force_reg (GET_MODE (SUBREG_REG (value)),
5502 force_operand (SUBREG_REG (value),
5504 GET_MODE (SUBREG_REG (value)),
5505 SUBREG_BYTE (value));
5511 /* Subroutine of expand_expr: return nonzero iff there is no way that
5512 EXP can reference X, which is being modified. TOP_P is nonzero if this
5513 call is going to be used to determine whether we need a temporary
5514 for EXP, as opposed to a recursive call to this function.
5516 It is always safe for this routine to return zero since it merely
5517 searches for optimization opportunities. */
5520 safe_from_p (x, exp, top_p)
5527 static tree save_expr_list;
5530 /* If EXP has varying size, we MUST use a target since we currently
5531 have no way of allocating temporaries of variable size
5532 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
5533 So we assume here that something at a higher level has prevented a
5534 clash. This is somewhat bogus, but the best we can do. Only
5535 do this when X is BLKmode and when we are at the top level. */
5536 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
5537 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
5538 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
5539 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
5540 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
5542 && GET_MODE (x) == BLKmode)
5543 /* If X is in the outgoing argument area, it is always safe. */
5544 || (GET_CODE (x) == MEM
5545 && (XEXP (x, 0) == virtual_outgoing_args_rtx
5546 || (GET_CODE (XEXP (x, 0)) == PLUS
5547 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
5550 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
5551 find the underlying pseudo. */
5552 if (GET_CODE (x) == SUBREG)
5555 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5559 /* A SAVE_EXPR might appear many times in the expression passed to the
5560 top-level safe_from_p call, and if it has a complex subexpression,
5561 examining it multiple times could result in a combinatorial explosion.
5562 E.g. on an Alpha running at least 200MHz, a Fortran test case compiled
5563 with optimization took about 28 minutes to compile -- even though it was
5564 only a few lines long. So we mark each SAVE_EXPR we see with TREE_PRIVATE
5565 and turn that off when we are done. We keep a list of the SAVE_EXPRs
5566 we have processed. Note that the only test of top_p was above. */
5575 rtn = safe_from_p (x, exp, 0);
5577 for (t = save_expr_list; t != 0; t = TREE_CHAIN (t))
5578 TREE_PRIVATE (TREE_PURPOSE (t)) = 0;
5583 /* Now look at our tree code and possibly recurse. */
5584 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
5587 exp_rtl = DECL_RTL_SET_P (exp) ? DECL_RTL (exp) : NULL_RTX;
5594 if (TREE_CODE (exp) == TREE_LIST)
5595 return ((TREE_VALUE (exp) == 0
5596 || safe_from_p (x, TREE_VALUE (exp), 0))
5597 && (TREE_CHAIN (exp) == 0
5598 || safe_from_p (x, TREE_CHAIN (exp), 0)));
5599 else if (TREE_CODE (exp) == ERROR_MARK)
5600 return 1; /* An already-visited SAVE_EXPR? */
5605 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5609 return (safe_from_p (x, TREE_OPERAND (exp, 0), 0)
5610 && safe_from_p (x, TREE_OPERAND (exp, 1), 0));
5614 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
5615 the expression. If it is set, we conflict iff we are that rtx or
5616 both are in memory. Otherwise, we check all operands of the
5617 expression recursively. */
5619 switch (TREE_CODE (exp))
5622 /* If the operand is static or we are static, we can't conflict.
5623 Likewise if we don't conflict with the operand at all. */
5624 if (staticp (TREE_OPERAND (exp, 0))
5625 || TREE_STATIC (exp)
5626 || safe_from_p (x, TREE_OPERAND (exp, 0), 0))
5629 /* Otherwise, the only way this can conflict is if we are taking
5630 the address of a DECL a that address if part of X, which is
5632 exp = TREE_OPERAND (exp, 0);
5635 if (!DECL_RTL_SET_P (exp)
5636 || GET_CODE (DECL_RTL (exp)) != MEM)
5639 exp_rtl = XEXP (DECL_RTL (exp), 0);
5644 if (GET_CODE (x) == MEM
5645 && alias_sets_conflict_p (MEM_ALIAS_SET (x),
5646 get_alias_set (exp)))
5651 /* Assume that the call will clobber all hard registers and
5653 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5654 || GET_CODE (x) == MEM)
5659 /* If a sequence exists, we would have to scan every instruction
5660 in the sequence to see if it was safe. This is probably not
5662 if (RTL_EXPR_SEQUENCE (exp))
5665 exp_rtl = RTL_EXPR_RTL (exp);
5668 case WITH_CLEANUP_EXPR:
5669 exp_rtl = WITH_CLEANUP_EXPR_RTL (exp);
5672 case CLEANUP_POINT_EXPR:
5673 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5676 exp_rtl = SAVE_EXPR_RTL (exp);
5680 /* If we've already scanned this, don't do it again. Otherwise,
5681 show we've scanned it and record for clearing the flag if we're
5683 if (TREE_PRIVATE (exp))
5686 TREE_PRIVATE (exp) = 1;
5687 if (! safe_from_p (x, TREE_OPERAND (exp, 0), 0))
5689 TREE_PRIVATE (exp) = 0;
5693 save_expr_list = tree_cons (exp, NULL_TREE, save_expr_list);
5697 /* The only operand we look at is operand 1. The rest aren't
5698 part of the expression. */
5699 return safe_from_p (x, TREE_OPERAND (exp, 1), 0);
5701 case METHOD_CALL_EXPR:
5702 /* This takes an rtx argument, but shouldn't appear here. */
5709 /* If we have an rtx, we do not need to scan our operands. */
5713 nops = first_rtl_op (TREE_CODE (exp));
5714 for (i = 0; i < nops; i++)
5715 if (TREE_OPERAND (exp, i) != 0
5716 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
5719 /* If this is a language-specific tree code, it may require
5720 special handling. */
5721 if ((unsigned int) TREE_CODE (exp)
5722 >= (unsigned int) LAST_AND_UNUSED_TREE_CODE
5723 && !(*lang_hooks.safe_from_p) (x, exp))
5727 /* If we have an rtl, find any enclosed object. Then see if we conflict
5731 if (GET_CODE (exp_rtl) == SUBREG)
5733 exp_rtl = SUBREG_REG (exp_rtl);
5734 if (GET_CODE (exp_rtl) == REG
5735 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
5739 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
5740 are memory and they conflict. */
5741 return ! (rtx_equal_p (x, exp_rtl)
5742 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
5743 && true_dependence (exp_rtl, VOIDmode, x,
5744 rtx_addr_varies_p)));
5747 /* If we reach here, it is safe. */
5751 /* Subroutine of expand_expr: return rtx if EXP is a
5752 variable or parameter; else return 0. */
5759 switch (TREE_CODE (exp))
5763 return DECL_RTL (exp);
5769 #ifdef MAX_INTEGER_COMPUTATION_MODE
5772 check_max_integer_computation_mode (exp)
5775 enum tree_code code;
5776 enum machine_mode mode;
5778 /* Strip any NOPs that don't change the mode. */
5780 code = TREE_CODE (exp);
5782 /* We must allow conversions of constants to MAX_INTEGER_COMPUTATION_MODE. */
5783 if (code == NOP_EXPR
5784 && TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
5787 /* First check the type of the overall operation. We need only look at
5788 unary, binary and relational operations. */
5789 if (TREE_CODE_CLASS (code) == '1'
5790 || TREE_CODE_CLASS (code) == '2'
5791 || TREE_CODE_CLASS (code) == '<')
5793 mode = TYPE_MODE (TREE_TYPE (exp));
5794 if (GET_MODE_CLASS (mode) == MODE_INT
5795 && mode > MAX_INTEGER_COMPUTATION_MODE)
5796 internal_error ("unsupported wide integer operation");
5799 /* Check operand of a unary op. */
5800 if (TREE_CODE_CLASS (code) == '1')
5802 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5803 if (GET_MODE_CLASS (mode) == MODE_INT
5804 && mode > MAX_INTEGER_COMPUTATION_MODE)
5805 internal_error ("unsupported wide integer operation");
5808 /* Check operands of a binary/comparison op. */
5809 if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<')
5811 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5812 if (GET_MODE_CLASS (mode) == MODE_INT
5813 && mode > MAX_INTEGER_COMPUTATION_MODE)
5814 internal_error ("unsupported wide integer operation");
5816 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1)));
5817 if (GET_MODE_CLASS (mode) == MODE_INT
5818 && mode > MAX_INTEGER_COMPUTATION_MODE)
5819 internal_error ("unsupported wide integer operation");
5824 /* Return the highest power of two that EXP is known to be a multiple of.
5825 This is used in updating alignment of MEMs in array references. */
5827 static HOST_WIDE_INT
5828 highest_pow2_factor (exp)
5831 HOST_WIDE_INT c0, c1;
5833 switch (TREE_CODE (exp))
5836 /* If the integer is expressable in a HOST_WIDE_INT, we can find the
5837 lowest bit that's a one. If the result is zero, return
5838 BIGGEST_ALIGNMENT. We need to handle this case since we can find it
5839 in a COND_EXPR, a MIN_EXPR, or a MAX_EXPR. If the constant overlows,
5840 we have an erroneous program, so return BIGGEST_ALIGNMENT to avoid any
5842 if (TREE_CONSTANT_OVERFLOW (exp)
5843 || integer_zerop (exp))
5844 return BIGGEST_ALIGNMENT;
5845 else if (host_integerp (exp, 0))
5847 c0 = tree_low_cst (exp, 0);
5848 c0 = c0 < 0 ? - c0 : c0;
5853 case PLUS_EXPR: case MINUS_EXPR: case MIN_EXPR: case MAX_EXPR:
5854 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
5855 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
5856 return MIN (c0, c1);
5859 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
5860 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
5863 case ROUND_DIV_EXPR: case TRUNC_DIV_EXPR: case FLOOR_DIV_EXPR:
5865 if (integer_pow2p (TREE_OPERAND (exp, 1))
5866 && host_integerp (TREE_OPERAND (exp, 1), 1))
5868 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
5869 c1 = tree_low_cst (TREE_OPERAND (exp, 1), 1);
5870 return MAX (1, c0 / c1);
5874 case NON_LVALUE_EXPR: case NOP_EXPR: case CONVERT_EXPR:
5875 case SAVE_EXPR: case WITH_RECORD_EXPR:
5876 return highest_pow2_factor (TREE_OPERAND (exp, 0));
5879 return highest_pow2_factor (TREE_OPERAND (exp, 1));
5882 c0 = highest_pow2_factor (TREE_OPERAND (exp, 1));
5883 c1 = highest_pow2_factor (TREE_OPERAND (exp, 2));
5884 return MIN (c0, c1);
5893 /* Return an object on the placeholder list that matches EXP, a
5894 PLACEHOLDER_EXPR. An object "matches" if it is of the type of the
5895 PLACEHOLDER_EXPR or a pointer type to it. For further information, see
5896 tree.def. If no such object is found, return 0. If PLIST is nonzero, it
5897 is a location which initially points to a starting location in the
5898 placeholder list (zero means start of the list) and where a pointer into
5899 the placeholder list at which the object is found is placed. */
5902 find_placeholder (exp, plist)
5906 tree type = TREE_TYPE (exp);
5907 tree placeholder_expr;
5909 for (placeholder_expr
5910 = plist && *plist ? TREE_CHAIN (*plist) : placeholder_list;
5911 placeholder_expr != 0;
5912 placeholder_expr = TREE_CHAIN (placeholder_expr))
5914 tree need_type = TYPE_MAIN_VARIANT (type);
5917 /* Find the outermost reference that is of the type we want. If none,
5918 see if any object has a type that is a pointer to the type we
5920 for (elt = TREE_PURPOSE (placeholder_expr); elt != 0;
5921 elt = ((TREE_CODE (elt) == COMPOUND_EXPR
5922 || TREE_CODE (elt) == COND_EXPR)
5923 ? TREE_OPERAND (elt, 1)
5924 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
5925 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
5926 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
5927 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
5928 ? TREE_OPERAND (elt, 0) : 0))
5929 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
5932 *plist = placeholder_expr;
5936 for (elt = TREE_PURPOSE (placeholder_expr); elt != 0;
5938 = ((TREE_CODE (elt) == COMPOUND_EXPR
5939 || TREE_CODE (elt) == COND_EXPR)
5940 ? TREE_OPERAND (elt, 1)
5941 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
5942 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
5943 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
5944 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
5945 ? TREE_OPERAND (elt, 0) : 0))
5946 if (POINTER_TYPE_P (TREE_TYPE (elt))
5947 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
5951 *plist = placeholder_expr;
5952 return build1 (INDIRECT_REF, need_type, elt);
5959 /* expand_expr: generate code for computing expression EXP.
5960 An rtx for the computed value is returned. The value is never null.
5961 In the case of a void EXP, const0_rtx is returned.
5963 The value may be stored in TARGET if TARGET is nonzero.
5964 TARGET is just a suggestion; callers must assume that
5965 the rtx returned may not be the same as TARGET.
5967 If TARGET is CONST0_RTX, it means that the value will be ignored.
5969 If TMODE is not VOIDmode, it suggests generating the
5970 result in mode TMODE. But this is done only when convenient.
5971 Otherwise, TMODE is ignored and the value generated in its natural mode.
5972 TMODE is just a suggestion; callers must assume that
5973 the rtx returned may not have mode TMODE.
5975 Note that TARGET may have neither TMODE nor MODE. In that case, it
5976 probably will not be used.
5978 If MODIFIER is EXPAND_SUM then when EXP is an addition
5979 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
5980 or a nest of (PLUS ...) and (MINUS ...) where the terms are
5981 products as above, or REG or MEM, or constant.
5982 Ordinarily in such cases we would output mul or add instructions
5983 and then return a pseudo reg containing the sum.
5985 EXPAND_INITIALIZER is much like EXPAND_SUM except that
5986 it also marks a label as absolutely required (it can't be dead).
5987 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
5988 This is used for outputting expressions used in initializers.
5990 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
5991 with a constant address even if that address is not normally legitimate.
5992 EXPAND_INITIALIZER and EXPAND_SUM also have this effect. */
5995 expand_expr (exp, target, tmode, modifier)
5998 enum machine_mode tmode;
5999 enum expand_modifier modifier;
6002 tree type = TREE_TYPE (exp);
6003 int unsignedp = TREE_UNSIGNED (type);
6004 enum machine_mode mode;
6005 enum tree_code code = TREE_CODE (exp);
6007 rtx subtarget, original_target;
6011 /* Handle ERROR_MARK before anybody tries to access its type. */
6012 if (TREE_CODE (exp) == ERROR_MARK || TREE_CODE (type) == ERROR_MARK)
6014 op0 = CONST0_RTX (tmode);
6020 mode = TYPE_MODE (type);
6021 /* Use subtarget as the target for operand 0 of a binary operation. */
6022 subtarget = get_subtarget (target);
6023 original_target = target;
6024 ignore = (target == const0_rtx
6025 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
6026 || code == CONVERT_EXPR || code == REFERENCE_EXPR
6027 || code == COND_EXPR || code == VIEW_CONVERT_EXPR)
6028 && TREE_CODE (type) == VOID_TYPE));
6030 /* If we are going to ignore this result, we need only do something
6031 if there is a side-effect somewhere in the expression. If there
6032 is, short-circuit the most common cases here. Note that we must
6033 not call expand_expr with anything but const0_rtx in case this
6034 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
6038 if (! TREE_SIDE_EFFECTS (exp))
6041 /* Ensure we reference a volatile object even if value is ignored, but
6042 don't do this if all we are doing is taking its address. */
6043 if (TREE_THIS_VOLATILE (exp)
6044 && TREE_CODE (exp) != FUNCTION_DECL
6045 && mode != VOIDmode && mode != BLKmode
6046 && modifier != EXPAND_CONST_ADDRESS)
6048 temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
6049 if (GET_CODE (temp) == MEM)
6050 temp = copy_to_reg (temp);
6054 if (TREE_CODE_CLASS (code) == '1' || code == COMPONENT_REF
6055 || code == INDIRECT_REF || code == BUFFER_REF)
6056 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6059 else if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<'
6060 || code == ARRAY_REF || code == ARRAY_RANGE_REF)
6062 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6063 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6066 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
6067 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
6068 /* If the second operand has no side effects, just evaluate
6070 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6072 else if (code == BIT_FIELD_REF)
6074 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6075 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6076 expand_expr (TREE_OPERAND (exp, 2), const0_rtx, VOIDmode, modifier);
6083 #ifdef MAX_INTEGER_COMPUTATION_MODE
6084 /* Only check stuff here if the mode we want is different from the mode
6085 of the expression; if it's the same, check_max_integer_computiation_mode
6086 will handle it. Do we really need to check this stuff at all? */
6089 && GET_MODE (target) != mode
6090 && TREE_CODE (exp) != INTEGER_CST
6091 && TREE_CODE (exp) != PARM_DECL
6092 && TREE_CODE (exp) != ARRAY_REF
6093 && TREE_CODE (exp) != ARRAY_RANGE_REF
6094 && TREE_CODE (exp) != COMPONENT_REF
6095 && TREE_CODE (exp) != BIT_FIELD_REF
6096 && TREE_CODE (exp) != INDIRECT_REF
6097 && TREE_CODE (exp) != CALL_EXPR
6098 && TREE_CODE (exp) != VAR_DECL
6099 && TREE_CODE (exp) != RTL_EXPR)
6101 enum machine_mode mode = GET_MODE (target);
6103 if (GET_MODE_CLASS (mode) == MODE_INT
6104 && mode > MAX_INTEGER_COMPUTATION_MODE)
6105 internal_error ("unsupported wide integer operation");
6109 && TREE_CODE (exp) != INTEGER_CST
6110 && TREE_CODE (exp) != PARM_DECL
6111 && TREE_CODE (exp) != ARRAY_REF
6112 && TREE_CODE (exp) != ARRAY_RANGE_REF
6113 && TREE_CODE (exp) != COMPONENT_REF
6114 && TREE_CODE (exp) != BIT_FIELD_REF
6115 && TREE_CODE (exp) != INDIRECT_REF
6116 && TREE_CODE (exp) != VAR_DECL
6117 && TREE_CODE (exp) != CALL_EXPR
6118 && TREE_CODE (exp) != RTL_EXPR
6119 && GET_MODE_CLASS (tmode) == MODE_INT
6120 && tmode > MAX_INTEGER_COMPUTATION_MODE)
6121 internal_error ("unsupported wide integer operation");
6123 check_max_integer_computation_mode (exp);
6126 /* If will do cse, generate all results into pseudo registers
6127 since 1) that allows cse to find more things
6128 and 2) otherwise cse could produce an insn the machine
6129 cannot support. And exception is a CONSTRUCTOR into a multi-word
6130 MEM: that's much more likely to be most efficient into the MEM. */
6132 if (! cse_not_expected && mode != BLKmode && target
6133 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER)
6134 && ! (code == CONSTRUCTOR && GET_MODE_SIZE (mode) > UNITS_PER_WORD))
6141 tree function = decl_function_context (exp);
6142 /* Handle using a label in a containing function. */
6143 if (function != current_function_decl
6144 && function != inline_function_decl && function != 0)
6146 struct function *p = find_function_data (function);
6147 p->expr->x_forced_labels
6148 = gen_rtx_EXPR_LIST (VOIDmode, label_rtx (exp),
6149 p->expr->x_forced_labels);
6153 if (modifier == EXPAND_INITIALIZER)
6154 forced_labels = gen_rtx_EXPR_LIST (VOIDmode,
6159 temp = gen_rtx_MEM (FUNCTION_MODE,
6160 gen_rtx_LABEL_REF (Pmode, label_rtx (exp)));
6161 if (function != current_function_decl
6162 && function != inline_function_decl && function != 0)
6163 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
6168 if (DECL_RTL (exp) == 0)
6170 error_with_decl (exp, "prior parameter's size depends on `%s'");
6171 return CONST0_RTX (mode);
6174 /* ... fall through ... */
6177 /* If a static var's type was incomplete when the decl was written,
6178 but the type is complete now, lay out the decl now. */
6179 if (DECL_SIZE (exp) == 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
6180 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
6182 rtx value = DECL_RTL_IF_SET (exp);
6184 layout_decl (exp, 0);
6186 /* If the RTL was already set, update its mode and memory
6190 PUT_MODE (value, DECL_MODE (exp));
6191 SET_DECL_RTL (exp, 0);
6192 set_mem_attributes (value, exp, 1);
6193 SET_DECL_RTL (exp, value);
6197 /* ... fall through ... */
6201 if (DECL_RTL (exp) == 0)
6204 /* Ensure variable marked as used even if it doesn't go through
6205 a parser. If it hasn't be used yet, write out an external
6207 if (! TREE_USED (exp))
6209 assemble_external (exp);
6210 TREE_USED (exp) = 1;
6213 /* Show we haven't gotten RTL for this yet. */
6216 /* Handle variables inherited from containing functions. */
6217 context = decl_function_context (exp);
6219 /* We treat inline_function_decl as an alias for the current function
6220 because that is the inline function whose vars, types, etc.
6221 are being merged into the current function.
6222 See expand_inline_function. */
6224 if (context != 0 && context != current_function_decl
6225 && context != inline_function_decl
6226 /* If var is static, we don't need a static chain to access it. */
6227 && ! (GET_CODE (DECL_RTL (exp)) == MEM
6228 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
6232 /* Mark as non-local and addressable. */
6233 DECL_NONLOCAL (exp) = 1;
6234 if (DECL_NO_STATIC_CHAIN (current_function_decl))
6236 mark_addressable (exp);
6237 if (GET_CODE (DECL_RTL (exp)) != MEM)
6239 addr = XEXP (DECL_RTL (exp), 0);
6240 if (GET_CODE (addr) == MEM)
6242 = replace_equiv_address (addr,
6243 fix_lexical_addr (XEXP (addr, 0), exp));
6245 addr = fix_lexical_addr (addr, exp);
6247 temp = replace_equiv_address (DECL_RTL (exp), addr);
6250 /* This is the case of an array whose size is to be determined
6251 from its initializer, while the initializer is still being parsed.
6254 else if (GET_CODE (DECL_RTL (exp)) == MEM
6255 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
6256 temp = validize_mem (DECL_RTL (exp));
6258 /* If DECL_RTL is memory, we are in the normal case and either
6259 the address is not valid or it is not a register and -fforce-addr
6260 is specified, get the address into a register. */
6262 else if (GET_CODE (DECL_RTL (exp)) == MEM
6263 && modifier != EXPAND_CONST_ADDRESS
6264 && modifier != EXPAND_SUM
6265 && modifier != EXPAND_INITIALIZER
6266 && (! memory_address_p (DECL_MODE (exp),
6267 XEXP (DECL_RTL (exp), 0))
6269 && GET_CODE (XEXP (DECL_RTL (exp), 0)) != REG)))
6270 temp = replace_equiv_address (DECL_RTL (exp),
6271 copy_rtx (XEXP (DECL_RTL (exp), 0)));
6273 /* If we got something, return it. But first, set the alignment
6274 if the address is a register. */
6277 if (GET_CODE (temp) == MEM && GET_CODE (XEXP (temp, 0)) == REG)
6278 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
6283 /* If the mode of DECL_RTL does not match that of the decl, it
6284 must be a promoted value. We return a SUBREG of the wanted mode,
6285 but mark it so that we know that it was already extended. */
6287 if (GET_CODE (DECL_RTL (exp)) == REG
6288 && GET_MODE (DECL_RTL (exp)) != DECL_MODE (exp))
6290 /* Get the signedness used for this variable. Ensure we get the
6291 same mode we got when the variable was declared. */
6292 if (GET_MODE (DECL_RTL (exp))
6293 != promote_mode (type, DECL_MODE (exp), &unsignedp, 0))
6296 temp = gen_lowpart_SUBREG (mode, DECL_RTL (exp));
6297 SUBREG_PROMOTED_VAR_P (temp) = 1;
6298 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
6302 return DECL_RTL (exp);
6305 return immed_double_const (TREE_INT_CST_LOW (exp),
6306 TREE_INT_CST_HIGH (exp), mode);
6309 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, 0);
6312 /* If optimized, generate immediate CONST_DOUBLE
6313 which will be turned into memory by reload if necessary.
6315 We used to force a register so that loop.c could see it. But
6316 this does not allow gen_* patterns to perform optimizations with
6317 the constants. It also produces two insns in cases like "x = 1.0;".
6318 On most machines, floating-point constants are not permitted in
6319 many insns, so we'd end up copying it to a register in any case.
6321 Now, we do the copying in expand_binop, if appropriate. */
6322 return immed_real_const (exp);
6326 if (! TREE_CST_RTL (exp))
6327 output_constant_def (exp, 1);
6329 /* TREE_CST_RTL probably contains a constant address.
6330 On RISC machines where a constant address isn't valid,
6331 make some insns to get that address into a register. */
6332 if (GET_CODE (TREE_CST_RTL (exp)) == MEM
6333 && modifier != EXPAND_CONST_ADDRESS
6334 && modifier != EXPAND_INITIALIZER
6335 && modifier != EXPAND_SUM
6336 && (! memory_address_p (mode, XEXP (TREE_CST_RTL (exp), 0))
6338 && GET_CODE (XEXP (TREE_CST_RTL (exp), 0)) != REG)))
6339 return replace_equiv_address (TREE_CST_RTL (exp),
6340 copy_rtx (XEXP (TREE_CST_RTL (exp), 0)));
6341 return TREE_CST_RTL (exp);
6343 case EXPR_WITH_FILE_LOCATION:
6346 const char *saved_input_filename = input_filename;
6347 int saved_lineno = lineno;
6348 input_filename = EXPR_WFL_FILENAME (exp);
6349 lineno = EXPR_WFL_LINENO (exp);
6350 if (EXPR_WFL_EMIT_LINE_NOTE (exp))
6351 emit_line_note (input_filename, lineno);
6352 /* Possibly avoid switching back and forth here. */
6353 to_return = expand_expr (EXPR_WFL_NODE (exp), target, tmode, modifier);
6354 input_filename = saved_input_filename;
6355 lineno = saved_lineno;
6360 context = decl_function_context (exp);
6362 /* If this SAVE_EXPR was at global context, assume we are an
6363 initialization function and move it into our context. */
6365 SAVE_EXPR_CONTEXT (exp) = current_function_decl;
6367 /* We treat inline_function_decl as an alias for the current function
6368 because that is the inline function whose vars, types, etc.
6369 are being merged into the current function.
6370 See expand_inline_function. */
6371 if (context == current_function_decl || context == inline_function_decl)
6374 /* If this is non-local, handle it. */
6377 /* The following call just exists to abort if the context is
6378 not of a containing function. */
6379 find_function_data (context);
6381 temp = SAVE_EXPR_RTL (exp);
6382 if (temp && GET_CODE (temp) == REG)
6384 put_var_into_stack (exp);
6385 temp = SAVE_EXPR_RTL (exp);
6387 if (temp == 0 || GET_CODE (temp) != MEM)
6390 replace_equiv_address (temp,
6391 fix_lexical_addr (XEXP (temp, 0), exp));
6393 if (SAVE_EXPR_RTL (exp) == 0)
6395 if (mode == VOIDmode)
6398 temp = assign_temp (build_qualified_type (type,
6400 | TYPE_QUAL_CONST)),
6403 SAVE_EXPR_RTL (exp) = temp;
6404 if (!optimize && GET_CODE (temp) == REG)
6405 save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, temp,
6408 /* If the mode of TEMP does not match that of the expression, it
6409 must be a promoted value. We pass store_expr a SUBREG of the
6410 wanted mode but mark it so that we know that it was already
6411 extended. Note that `unsignedp' was modified above in
6414 if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
6416 temp = gen_lowpart_SUBREG (mode, SAVE_EXPR_RTL (exp));
6417 SUBREG_PROMOTED_VAR_P (temp) = 1;
6418 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
6421 if (temp == const0_rtx)
6422 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
6424 store_expr (TREE_OPERAND (exp, 0), temp, 0);
6426 TREE_USED (exp) = 1;
6429 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
6430 must be a promoted value. We return a SUBREG of the wanted mode,
6431 but mark it so that we know that it was already extended. */
6433 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
6434 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
6436 /* Compute the signedness and make the proper SUBREG. */
6437 promote_mode (type, mode, &unsignedp, 0);
6438 temp = gen_lowpart_SUBREG (mode, SAVE_EXPR_RTL (exp));
6439 SUBREG_PROMOTED_VAR_P (temp) = 1;
6440 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
6444 return SAVE_EXPR_RTL (exp);
6449 temp = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
6450 TREE_OPERAND (exp, 0) = unsave_expr_now (TREE_OPERAND (exp, 0));
6454 case PLACEHOLDER_EXPR:
6456 tree old_list = placeholder_list;
6457 tree placeholder_expr = 0;
6459 exp = find_placeholder (exp, &placeholder_expr);
6463 placeholder_list = TREE_CHAIN (placeholder_expr);
6464 temp = expand_expr (exp, original_target, tmode, modifier);
6465 placeholder_list = old_list;
6469 /* We can't find the object or there was a missing WITH_RECORD_EXPR. */
6472 case WITH_RECORD_EXPR:
6473 /* Put the object on the placeholder list, expand our first operand,
6474 and pop the list. */
6475 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
6477 target = expand_expr (TREE_OPERAND (exp, 0), original_target, tmode,
6479 placeholder_list = TREE_CHAIN (placeholder_list);
6483 if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
6484 expand_goto (TREE_OPERAND (exp, 0));
6486 expand_computed_goto (TREE_OPERAND (exp, 0));
6490 expand_exit_loop_if_false (NULL,
6491 invert_truthvalue (TREE_OPERAND (exp, 0)));
6494 case LABELED_BLOCK_EXPR:
6495 if (LABELED_BLOCK_BODY (exp))
6496 expand_expr_stmt_value (LABELED_BLOCK_BODY (exp), 0, 1);
6497 /* Should perhaps use expand_label, but this is simpler and safer. */
6498 do_pending_stack_adjust ();
6499 emit_label (label_rtx (LABELED_BLOCK_LABEL (exp)));
6502 case EXIT_BLOCK_EXPR:
6503 if (EXIT_BLOCK_RETURN (exp))
6504 sorry ("returned value in block_exit_expr");
6505 expand_goto (LABELED_BLOCK_LABEL (EXIT_BLOCK_LABELED_BLOCK (exp)));
6510 expand_start_loop (1);
6511 expand_expr_stmt_value (TREE_OPERAND (exp, 0), 0, 1);
6519 tree vars = TREE_OPERAND (exp, 0);
6520 int vars_need_expansion = 0;
6522 /* Need to open a binding contour here because
6523 if there are any cleanups they must be contained here. */
6524 expand_start_bindings (2);
6526 /* Mark the corresponding BLOCK for output in its proper place. */
6527 if (TREE_OPERAND (exp, 2) != 0
6528 && ! TREE_USED (TREE_OPERAND (exp, 2)))
6529 insert_block (TREE_OPERAND (exp, 2));
6531 /* If VARS have not yet been expanded, expand them now. */
6534 if (!DECL_RTL_SET_P (vars))
6536 vars_need_expansion = 1;
6539 expand_decl_init (vars);
6540 vars = TREE_CHAIN (vars);
6543 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, modifier);
6545 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
6551 if (RTL_EXPR_SEQUENCE (exp))
6553 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
6555 emit_insns (RTL_EXPR_SEQUENCE (exp));
6556 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
6558 preserve_rtl_expr_result (RTL_EXPR_RTL (exp));
6559 free_temps_for_rtl_expr (exp);
6560 return RTL_EXPR_RTL (exp);
6563 /* If we don't need the result, just ensure we evaluate any
6569 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
6570 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode, 0);
6575 /* All elts simple constants => refer to a constant in memory. But
6576 if this is a non-BLKmode mode, let it store a field at a time
6577 since that should make a CONST_INT or CONST_DOUBLE when we
6578 fold. Likewise, if we have a target we can use, it is best to
6579 store directly into the target unless the type is large enough
6580 that memcpy will be used. If we are making an initializer and
6581 all operands are constant, put it in memory as well. */
6582 else if ((TREE_STATIC (exp)
6583 && ((mode == BLKmode
6584 && ! (target != 0 && safe_from_p (target, exp, 1)))
6585 || TREE_ADDRESSABLE (exp)
6586 || (host_integerp (TYPE_SIZE_UNIT (type), 1)
6587 && (! MOVE_BY_PIECES_P
6588 (tree_low_cst (TYPE_SIZE_UNIT (type), 1),
6590 && ! mostly_zeros_p (exp))))
6591 || (modifier == EXPAND_INITIALIZER && TREE_CONSTANT (exp)))
6593 rtx constructor = output_constant_def (exp, 1);
6595 if (modifier != EXPAND_CONST_ADDRESS
6596 && modifier != EXPAND_INITIALIZER
6597 && modifier != EXPAND_SUM)
6598 constructor = validize_mem (constructor);
6604 /* Handle calls that pass values in multiple non-contiguous
6605 locations. The Irix 6 ABI has examples of this. */
6606 if (target == 0 || ! safe_from_p (target, exp, 1)
6607 || GET_CODE (target) == PARALLEL)
6609 = assign_temp (build_qualified_type (type,
6611 | (TREE_READONLY (exp)
6612 * TYPE_QUAL_CONST))),
6613 0, TREE_ADDRESSABLE (exp), 1);
6615 store_constructor (exp, target, 0,
6616 int_size_in_bytes (TREE_TYPE (exp)));
6622 tree exp1 = TREE_OPERAND (exp, 0);
6624 tree string = string_constant (exp1, &index);
6626 /* Try to optimize reads from const strings. */
6628 && TREE_CODE (string) == STRING_CST
6629 && TREE_CODE (index) == INTEGER_CST
6630 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
6631 && GET_MODE_CLASS (mode) == MODE_INT
6632 && GET_MODE_SIZE (mode) == 1
6633 && modifier != EXPAND_WRITE)
6635 GEN_INT (TREE_STRING_POINTER (string)[TREE_INT_CST_LOW (index)]);
6637 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
6638 op0 = memory_address (mode, op0);
6639 temp = gen_rtx_MEM (mode, op0);
6640 set_mem_attributes (temp, exp, 0);
6642 /* If we are writing to this object and its type is a record with
6643 readonly fields, we must mark it as readonly so it will
6644 conflict with readonly references to those fields. */
6645 if (modifier == EXPAND_WRITE && readonly_fields_p (type))
6646 RTX_UNCHANGING_P (temp) = 1;
6652 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
6656 tree array = TREE_OPERAND (exp, 0);
6657 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
6658 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
6659 tree index = convert (sizetype, TREE_OPERAND (exp, 1));
6662 /* Optimize the special-case of a zero lower bound.
6664 We convert the low_bound to sizetype to avoid some problems
6665 with constant folding. (E.g. suppose the lower bound is 1,
6666 and its mode is QI. Without the conversion, (ARRAY
6667 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
6668 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
6670 if (! integer_zerop (low_bound))
6671 index = size_diffop (index, convert (sizetype, low_bound));
6673 /* Fold an expression like: "foo"[2].
6674 This is not done in fold so it won't happen inside &.
6675 Don't fold if this is for wide characters since it's too
6676 difficult to do correctly and this is a very rare case. */
6678 if (modifier != EXPAND_CONST_ADDRESS && modifier != EXPAND_INITIALIZER
6679 && TREE_CODE (array) == STRING_CST
6680 && TREE_CODE (index) == INTEGER_CST
6681 && compare_tree_int (index, TREE_STRING_LENGTH (array)) < 0
6682 && GET_MODE_CLASS (mode) == MODE_INT
6683 && GET_MODE_SIZE (mode) == 1)
6685 GEN_INT (TREE_STRING_POINTER (array)[TREE_INT_CST_LOW (index)]);
6687 /* If this is a constant index into a constant array,
6688 just get the value from the array. Handle both the cases when
6689 we have an explicit constructor and when our operand is a variable
6690 that was declared const. */
6692 if (modifier != EXPAND_CONST_ADDRESS && modifier != EXPAND_INITIALIZER
6693 && TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array)
6694 && TREE_CODE (index) == INTEGER_CST
6695 && 0 > compare_tree_int (index,
6696 list_length (CONSTRUCTOR_ELTS
6697 (TREE_OPERAND (exp, 0)))))
6701 for (elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
6702 i = TREE_INT_CST_LOW (index);
6703 elem != 0 && i != 0; i--, elem = TREE_CHAIN (elem))
6707 return expand_expr (fold (TREE_VALUE (elem)), target, tmode,
6711 else if (optimize >= 1
6712 && modifier != EXPAND_CONST_ADDRESS
6713 && modifier != EXPAND_INITIALIZER
6714 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
6715 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
6716 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
6718 if (TREE_CODE (index) == INTEGER_CST)
6720 tree init = DECL_INITIAL (array);
6722 if (TREE_CODE (init) == CONSTRUCTOR)
6726 for (elem = CONSTRUCTOR_ELTS (init);
6728 && !tree_int_cst_equal (TREE_PURPOSE (elem), index));
6729 elem = TREE_CHAIN (elem))
6732 if (elem && !TREE_SIDE_EFFECTS (TREE_VALUE (elem)))
6733 return expand_expr (fold (TREE_VALUE (elem)), target,
6736 else if (TREE_CODE (init) == STRING_CST
6737 && 0 > compare_tree_int (index,
6738 TREE_STRING_LENGTH (init)))
6740 tree type = TREE_TYPE (TREE_TYPE (init));
6741 enum machine_mode mode = TYPE_MODE (type);
6743 if (GET_MODE_CLASS (mode) == MODE_INT
6744 && GET_MODE_SIZE (mode) == 1)
6746 (TREE_STRING_POINTER
6747 (init)[TREE_INT_CST_LOW (index)]));
6756 case ARRAY_RANGE_REF:
6757 /* If the operand is a CONSTRUCTOR, we can just extract the
6758 appropriate field if it is present. Don't do this if we have
6759 already written the data since we want to refer to that copy
6760 and varasm.c assumes that's what we'll do. */
6761 if (code == COMPONENT_REF
6762 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
6763 && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
6767 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
6768 elt = TREE_CHAIN (elt))
6769 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1)
6770 /* We can normally use the value of the field in the
6771 CONSTRUCTOR. However, if this is a bitfield in
6772 an integral mode that we can fit in a HOST_WIDE_INT,
6773 we must mask only the number of bits in the bitfield,
6774 since this is done implicitly by the constructor. If
6775 the bitfield does not meet either of those conditions,
6776 we can't do this optimization. */
6777 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt))
6778 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt)))
6780 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt)))
6781 <= HOST_BITS_PER_WIDE_INT))))
6783 op0 = expand_expr (TREE_VALUE (elt), target, tmode, modifier);
6784 if (DECL_BIT_FIELD (TREE_PURPOSE (elt)))
6786 HOST_WIDE_INT bitsize
6787 = TREE_INT_CST_LOW (DECL_SIZE (TREE_PURPOSE (elt)));
6789 if (TREE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt))))
6791 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
6792 op0 = expand_and (op0, op1, target);
6796 enum machine_mode imode
6797 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt)));
6799 = build_int_2 (GET_MODE_BITSIZE (imode) - bitsize,
6802 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
6804 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
6814 enum machine_mode mode1;
6815 HOST_WIDE_INT bitsize, bitpos;
6818 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
6819 &mode1, &unsignedp, &volatilep);
6822 /* If we got back the original object, something is wrong. Perhaps
6823 we are evaluating an expression too early. In any event, don't
6824 infinitely recurse. */
6828 /* If TEM's type is a union of variable size, pass TARGET to the inner
6829 computation, since it will need a temporary and TARGET is known
6830 to have to do. This occurs in unchecked conversion in Ada. */
6834 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
6835 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
6837 ? target : NULL_RTX),
6839 (modifier == EXPAND_INITIALIZER
6840 || modifier == EXPAND_CONST_ADDRESS)
6841 ? modifier : EXPAND_NORMAL);
6843 /* If this is a constant, put it into a register if it is a
6844 legitimate constant and OFFSET is 0 and memory if it isn't. */
6845 if (CONSTANT_P (op0))
6847 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
6848 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
6850 op0 = force_reg (mode, op0);
6852 op0 = validize_mem (force_const_mem (mode, op0));
6857 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
6859 /* If this object is in a register, put it into memory.
6860 This case can't occur in C, but can in Ada if we have
6861 unchecked conversion of an expression from a scalar type to
6862 an array or record type. */
6863 if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
6864 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
6866 /* If the operand is a SAVE_EXPR, we can deal with this by
6867 forcing the SAVE_EXPR into memory. */
6868 if (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR)
6870 put_var_into_stack (TREE_OPERAND (exp, 0));
6871 op0 = SAVE_EXPR_RTL (TREE_OPERAND (exp, 0));
6876 = build_qualified_type (TREE_TYPE (tem),
6877 (TYPE_QUALS (TREE_TYPE (tem))
6878 | TYPE_QUAL_CONST));
6879 rtx memloc = assign_temp (nt, 1, 1, 1);
6881 emit_move_insn (memloc, op0);
6886 if (GET_CODE (op0) != MEM)
6889 if (GET_MODE (offset_rtx) != ptr_mode)
6890 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
6892 #ifdef POINTERS_EXTEND_UNSIGNED
6893 if (GET_MODE (offset_rtx) != Pmode)
6894 offset_rtx = convert_memory_address (Pmode, offset_rtx);
6897 /* A constant address in OP0 can have VOIDmode, we must not try
6898 to call force_reg for that case. Avoid that case. */
6899 if (GET_CODE (op0) == MEM
6900 && GET_MODE (op0) == BLKmode
6901 && GET_MODE (XEXP (op0, 0)) != VOIDmode
6903 && (bitpos % bitsize) == 0
6904 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
6905 && MEM_ALIGN (op0) == GET_MODE_ALIGNMENT (mode1))
6907 rtx temp = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
6909 if (GET_CODE (XEXP (temp, 0)) == REG)
6912 op0 = (replace_equiv_address
6914 force_reg (GET_MODE (XEXP (temp, 0)),
6919 op0 = offset_address (op0, offset_rtx,
6920 highest_pow2_factor (offset));
6923 /* Don't forget about volatility even if this is a bitfield. */
6924 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
6926 if (op0 == orig_op0)
6927 op0 = copy_rtx (op0);
6929 MEM_VOLATILE_P (op0) = 1;
6932 /* In cases where an aligned union has an unaligned object
6933 as a field, we might be extracting a BLKmode value from
6934 an integer-mode (e.g., SImode) object. Handle this case
6935 by doing the extract into an object as wide as the field
6936 (which we know to be the width of a basic mode), then
6937 storing into memory, and changing the mode to BLKmode. */
6938 if (mode1 == VOIDmode
6939 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
6940 || (mode1 != BLKmode && ! direct_load[(int) mode1]
6941 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
6942 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT
6943 && modifier != EXPAND_CONST_ADDRESS
6944 && modifier != EXPAND_INITIALIZER)
6945 /* If the field isn't aligned enough to fetch as a memref,
6946 fetch it as a bit field. */
6947 || (mode1 != BLKmode
6948 && SLOW_UNALIGNED_ACCESS (mode1, MEM_ALIGN (op0))
6949 && ((TYPE_ALIGN (TREE_TYPE (tem))
6950 < GET_MODE_ALIGNMENT (mode))
6951 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)))
6952 /* If the type and the field are a constant size and the
6953 size of the type isn't the same size as the bitfield,
6954 we must use bitfield operations. */
6956 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (exp)))
6958 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
6961 enum machine_mode ext_mode = mode;
6963 if (ext_mode == BLKmode
6964 && ! (target != 0 && GET_CODE (op0) == MEM
6965 && GET_CODE (target) == MEM
6966 && bitpos % BITS_PER_UNIT == 0))
6967 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
6969 if (ext_mode == BLKmode)
6971 /* In this case, BITPOS must start at a byte boundary and
6972 TARGET, if specified, must be a MEM. */
6973 if (GET_CODE (op0) != MEM
6974 || (target != 0 && GET_CODE (target) != MEM)
6975 || bitpos % BITS_PER_UNIT != 0)
6978 op0 = adjust_address (op0, VOIDmode, bitpos / BITS_PER_UNIT);
6980 target = assign_temp (type, 0, 1, 1);
6982 emit_block_move (target, op0,
6983 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
6989 op0 = validize_mem (op0);
6991 if (GET_CODE (op0) == MEM && GET_CODE (XEXP (op0, 0)) == REG)
6992 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
6994 op0 = extract_bit_field (op0, bitsize, bitpos,
6995 unsignedp, target, ext_mode, ext_mode,
6996 int_size_in_bytes (TREE_TYPE (tem)));
6998 /* If the result is a record type and BITSIZE is narrower than
6999 the mode of OP0, an integral mode, and this is a big endian
7000 machine, we must put the field into the high-order bits. */
7001 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
7002 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
7003 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (op0)))
7004 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
7005 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
7009 if (mode == BLKmode)
7011 rtx new = assign_temp (build_qualified_type
7012 (type_for_mode (ext_mode, 0),
7013 TYPE_QUAL_CONST), 0, 1, 1);
7015 emit_move_insn (new, op0);
7016 op0 = copy_rtx (new);
7017 PUT_MODE (op0, BLKmode);
7018 set_mem_attributes (op0, exp, 1);
7024 /* If the result is BLKmode, use that to access the object
7026 if (mode == BLKmode)
7029 /* Get a reference to just this component. */
7030 if (modifier == EXPAND_CONST_ADDRESS
7031 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7032 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
7034 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7036 if (op0 == orig_op0)
7037 op0 = copy_rtx (op0);
7039 set_mem_attributes (op0, exp, 0);
7040 if (GET_CODE (XEXP (op0, 0)) == REG)
7041 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7043 MEM_VOLATILE_P (op0) |= volatilep;
7044 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
7045 || modifier == EXPAND_CONST_ADDRESS
7046 || modifier == EXPAND_INITIALIZER)
7048 else if (target == 0)
7049 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7051 convert_move (target, op0, unsignedp);
7057 rtx insn, before = get_last_insn (), vtbl_ref;
7059 /* Evaluate the interior expression. */
7060 subtarget = expand_expr (TREE_OPERAND (exp, 0), target,
7063 /* Get or create an instruction off which to hang a note. */
7064 if (REG_P (subtarget))
7067 insn = get_last_insn ();
7070 if (! INSN_P (insn))
7071 insn = prev_nonnote_insn (insn);
7075 target = gen_reg_rtx (GET_MODE (subtarget));
7076 insn = emit_move_insn (target, subtarget);
7079 /* Collect the data for the note. */
7080 vtbl_ref = XEXP (DECL_RTL (TREE_OPERAND (exp, 1)), 0);
7081 vtbl_ref = plus_constant (vtbl_ref,
7082 tree_low_cst (TREE_OPERAND (exp, 2), 0));
7083 /* Discard the initial CONST that was added. */
7084 vtbl_ref = XEXP (vtbl_ref, 0);
7087 = gen_rtx_EXPR_LIST (REG_VTABLE_REF, vtbl_ref, REG_NOTES (insn));
7092 /* Intended for a reference to a buffer of a file-object in Pascal.
7093 But it's not certain that a special tree code will really be
7094 necessary for these. INDIRECT_REF might work for them. */
7100 /* Pascal set IN expression.
7103 rlo = set_low - (set_low%bits_per_word);
7104 the_word = set [ (index - rlo)/bits_per_word ];
7105 bit_index = index % bits_per_word;
7106 bitmask = 1 << bit_index;
7107 return !!(the_word & bitmask); */
7109 tree set = TREE_OPERAND (exp, 0);
7110 tree index = TREE_OPERAND (exp, 1);
7111 int iunsignedp = TREE_UNSIGNED (TREE_TYPE (index));
7112 tree set_type = TREE_TYPE (set);
7113 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
7114 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
7115 rtx index_val = expand_expr (index, 0, VOIDmode, 0);
7116 rtx lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
7117 rtx hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
7118 rtx setval = expand_expr (set, 0, VOIDmode, 0);
7119 rtx setaddr = XEXP (setval, 0);
7120 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
7122 rtx diff, quo, rem, addr, bit, result;
7124 /* If domain is empty, answer is no. Likewise if index is constant
7125 and out of bounds. */
7126 if (((TREE_CODE (set_high_bound) == INTEGER_CST
7127 && TREE_CODE (set_low_bound) == INTEGER_CST
7128 && tree_int_cst_lt (set_high_bound, set_low_bound))
7129 || (TREE_CODE (index) == INTEGER_CST
7130 && TREE_CODE (set_low_bound) == INTEGER_CST
7131 && tree_int_cst_lt (index, set_low_bound))
7132 || (TREE_CODE (set_high_bound) == INTEGER_CST
7133 && TREE_CODE (index) == INTEGER_CST
7134 && tree_int_cst_lt (set_high_bound, index))))
7138 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7140 /* If we get here, we have to generate the code for both cases
7141 (in range and out of range). */
7143 op0 = gen_label_rtx ();
7144 op1 = gen_label_rtx ();
7146 if (! (GET_CODE (index_val) == CONST_INT
7147 && GET_CODE (lo_r) == CONST_INT))
7148 emit_cmp_and_jump_insns (index_val, lo_r, LT, NULL_RTX,
7149 GET_MODE (index_val), iunsignedp, op1);
7151 if (! (GET_CODE (index_val) == CONST_INT
7152 && GET_CODE (hi_r) == CONST_INT))
7153 emit_cmp_and_jump_insns (index_val, hi_r, GT, NULL_RTX,
7154 GET_MODE (index_val), iunsignedp, op1);
7156 /* Calculate the element number of bit zero in the first word
7158 if (GET_CODE (lo_r) == CONST_INT)
7159 rlow = GEN_INT (INTVAL (lo_r)
7160 & ~((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
7162 rlow = expand_binop (index_mode, and_optab, lo_r,
7163 GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
7164 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7166 diff = expand_binop (index_mode, sub_optab, index_val, rlow,
7167 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7169 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
7170 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7171 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
7172 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7174 addr = memory_address (byte_mode,
7175 expand_binop (index_mode, add_optab, diff,
7176 setaddr, NULL_RTX, iunsignedp,
7179 /* Extract the bit we want to examine. */
7180 bit = expand_shift (RSHIFT_EXPR, byte_mode,
7181 gen_rtx_MEM (byte_mode, addr),
7182 make_tree (TREE_TYPE (index), rem),
7184 result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
7185 GET_MODE (target) == byte_mode ? target : 0,
7186 1, OPTAB_LIB_WIDEN);
7188 if (result != target)
7189 convert_move (target, result, 1);
7191 /* Output the code to handle the out-of-range case. */
7194 emit_move_insn (target, const0_rtx);
7199 case WITH_CLEANUP_EXPR:
7200 if (WITH_CLEANUP_EXPR_RTL (exp) == 0)
7202 WITH_CLEANUP_EXPR_RTL (exp)
7203 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
7204 expand_decl_cleanup (NULL_TREE, TREE_OPERAND (exp, 1));
7206 /* That's it for this cleanup. */
7207 TREE_OPERAND (exp, 1) = 0;
7209 return WITH_CLEANUP_EXPR_RTL (exp);
7211 case CLEANUP_POINT_EXPR:
7213 /* Start a new binding layer that will keep track of all cleanup
7214 actions to be performed. */
7215 expand_start_bindings (2);
7217 target_temp_slot_level = temp_slot_level;
7219 op0 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
7220 /* If we're going to use this value, load it up now. */
7222 op0 = force_not_mem (op0);
7223 preserve_temp_slots (op0);
7224 expand_end_bindings (NULL_TREE, 0, 0);
7229 /* Check for a built-in function. */
7230 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
7231 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7233 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7235 if (DECL_BUILT_IN_CLASS (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7236 == BUILT_IN_FRONTEND)
7237 return (*lang_expand_expr) (exp, original_target, tmode, modifier);
7239 return expand_builtin (exp, target, subtarget, tmode, ignore);
7242 return expand_call (exp, target, ignore);
7244 case NON_LVALUE_EXPR:
7247 case REFERENCE_EXPR:
7248 if (TREE_OPERAND (exp, 0) == error_mark_node)
7251 if (TREE_CODE (type) == UNION_TYPE)
7253 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
7255 /* If both input and output are BLKmode, this conversion isn't doing
7256 anything except possibly changing memory attribute. */
7257 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode)
7259 rtx result = expand_expr (TREE_OPERAND (exp, 0), target, tmode,
7262 result = copy_rtx (result);
7263 set_mem_attributes (result, exp, 0);
7268 target = assign_temp (type, 0, 1, 1);
7270 if (GET_CODE (target) == MEM)
7271 /* Store data into beginning of memory target. */
7272 store_expr (TREE_OPERAND (exp, 0),
7273 adjust_address (target, TYPE_MODE (valtype), 0), 0);
7275 else if (GET_CODE (target) == REG)
7276 /* Store this field into a union of the proper type. */
7277 store_field (target,
7278 MIN ((int_size_in_bytes (TREE_TYPE
7279 (TREE_OPERAND (exp, 0)))
7281 (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
7282 0, TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
7283 VOIDmode, 0, type, 0);
7287 /* Return the entire union. */
7291 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7293 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
7296 /* If the signedness of the conversion differs and OP0 is
7297 a promoted SUBREG, clear that indication since we now
7298 have to do the proper extension. */
7299 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
7300 && GET_CODE (op0) == SUBREG)
7301 SUBREG_PROMOTED_VAR_P (op0) = 0;
7306 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
7307 if (GET_MODE (op0) == mode)
7310 /* If OP0 is a constant, just convert it into the proper mode. */
7311 if (CONSTANT_P (op0))
7313 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7314 enum machine_mode inner_mode = TYPE_MODE (inner_type);
7316 if (modifier == EXPAND_INITIALIZER)
7317 return simplify_gen_subreg (mode, op0, inner_mode,
7318 subreg_lowpart_offset (mode,
7321 return convert_modes (mode, inner_mode, op0,
7322 TREE_UNSIGNED (inner_type));
7325 if (modifier == EXPAND_INITIALIZER)
7326 return gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
7330 convert_to_mode (mode, op0,
7331 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7333 convert_move (target, op0,
7334 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7337 case VIEW_CONVERT_EXPR:
7338 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
7340 /* If the input and output modes are both the same, we are done.
7341 Otherwise, if neither mode is BLKmode and both are within a word, we
7342 can use gen_lowpart. If neither is true, make sure the operand is
7343 in memory and convert the MEM to the new mode. */
7344 if (TYPE_MODE (type) == GET_MODE (op0))
7346 else if (TYPE_MODE (type) != BLKmode && GET_MODE (op0) != BLKmode
7347 && GET_MODE_SIZE (TYPE_MODE (type)) <= UNITS_PER_WORD
7348 && GET_MODE_SIZE (GET_MODE (op0)) <= UNITS_PER_WORD)
7349 op0 = gen_lowpart (TYPE_MODE (type), op0);
7350 else if (GET_CODE (op0) != MEM)
7352 /* If the operand is not a MEM, force it into memory. Since we
7353 are going to be be changing the mode of the MEM, don't call
7354 force_const_mem for constants because we don't allow pool
7355 constants to change mode. */
7356 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7358 if (TREE_ADDRESSABLE (exp))
7361 if (target == 0 || GET_MODE (target) != TYPE_MODE (inner_type))
7363 = assign_stack_temp_for_type
7364 (TYPE_MODE (inner_type),
7365 GET_MODE_SIZE (TYPE_MODE (inner_type)), 0, inner_type);
7367 emit_move_insn (target, op0);
7371 /* At this point, OP0 is in the correct mode. If the output type is such
7372 that the operand is known to be aligned, indicate that it is.
7373 Otherwise, we need only be concerned about alignment for non-BLKmode
7375 if (GET_CODE (op0) == MEM)
7377 op0 = copy_rtx (op0);
7379 if (TYPE_ALIGN_OK (type))
7380 set_mem_align (op0, MAX (MEM_ALIGN (op0), TYPE_ALIGN (type)));
7381 else if (TYPE_MODE (type) != BLKmode && STRICT_ALIGNMENT
7382 && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
7384 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7385 HOST_WIDE_INT temp_size
7386 = MAX (int_size_in_bytes (inner_type),
7387 (HOST_WIDE_INT) GET_MODE_SIZE (TYPE_MODE (type)));
7388 rtx new = assign_stack_temp_for_type (TYPE_MODE (type),
7389 temp_size, 0, type);
7390 rtx new_with_op0_mode = adjust_address (new, GET_MODE (op0), 0);
7392 if (TREE_ADDRESSABLE (exp))
7395 if (GET_MODE (op0) == BLKmode)
7396 emit_block_move (new_with_op0_mode, op0,
7397 GEN_INT (GET_MODE_SIZE (TYPE_MODE (type))));
7399 emit_move_insn (new_with_op0_mode, op0);
7404 op0 = adjust_address (op0, TYPE_MODE (type), 0);
7410 /* We come here from MINUS_EXPR when the second operand is a
7413 this_optab = ! unsignedp && flag_trapv
7414 && (GET_MODE_CLASS (mode) == MODE_INT)
7415 ? addv_optab : add_optab;
7417 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
7418 something else, make sure we add the register to the constant and
7419 then to the other thing. This case can occur during strength
7420 reduction and doing it this way will produce better code if the
7421 frame pointer or argument pointer is eliminated.
7423 fold-const.c will ensure that the constant is always in the inner
7424 PLUS_EXPR, so the only case we need to do anything about is if
7425 sp, ap, or fp is our second argument, in which case we must swap
7426 the innermost first argument and our second argument. */
7428 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
7429 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
7430 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
7431 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
7432 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
7433 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
7435 tree t = TREE_OPERAND (exp, 1);
7437 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7438 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
7441 /* If the result is to be ptr_mode and we are adding an integer to
7442 something, we might be forming a constant. So try to use
7443 plus_constant. If it produces a sum and we can't accept it,
7444 use force_operand. This allows P = &ARR[const] to generate
7445 efficient code on machines where a SYMBOL_REF is not a valid
7448 If this is an EXPAND_SUM call, always return the sum. */
7449 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
7450 || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
7452 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
7453 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7454 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
7458 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
7460 /* Use immed_double_const to ensure that the constant is
7461 truncated according to the mode of OP1, then sign extended
7462 to a HOST_WIDE_INT. Using the constant directly can result
7463 in non-canonical RTL in a 64x32 cross compile. */
7465 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)),
7467 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))));
7468 op1 = plus_constant (op1, INTVAL (constant_part));
7469 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7470 op1 = force_operand (op1, target);
7474 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7475 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
7476 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
7480 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7482 if (! CONSTANT_P (op0))
7484 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7485 VOIDmode, modifier);
7486 /* Don't go to both_summands if modifier
7487 says it's not right to return a PLUS. */
7488 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7492 /* Use immed_double_const to ensure that the constant is
7493 truncated according to the mode of OP1, then sign extended
7494 to a HOST_WIDE_INT. Using the constant directly can result
7495 in non-canonical RTL in a 64x32 cross compile. */
7497 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)),
7499 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
7500 op0 = plus_constant (op0, INTVAL (constant_part));
7501 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7502 op0 = force_operand (op0, target);
7507 /* No sense saving up arithmetic to be done
7508 if it's all in the wrong mode to form part of an address.
7509 And force_operand won't know whether to sign-extend or
7511 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7512 || mode != ptr_mode)
7515 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7518 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, modifier);
7519 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, modifier);
7522 /* Make sure any term that's a sum with a constant comes last. */
7523 if (GET_CODE (op0) == PLUS
7524 && CONSTANT_P (XEXP (op0, 1)))
7530 /* If adding to a sum including a constant,
7531 associate it to put the constant outside. */
7532 if (GET_CODE (op1) == PLUS
7533 && CONSTANT_P (XEXP (op1, 1)))
7535 rtx constant_term = const0_rtx;
7537 temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0);
7540 /* Ensure that MULT comes first if there is one. */
7541 else if (GET_CODE (op0) == MULT)
7542 op0 = gen_rtx_PLUS (mode, op0, XEXP (op1, 0));
7544 op0 = gen_rtx_PLUS (mode, XEXP (op1, 0), op0);
7546 /* Let's also eliminate constants from op0 if possible. */
7547 op0 = eliminate_constant_term (op0, &constant_term);
7549 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
7550 their sum should be a constant. Form it into OP1, since the
7551 result we want will then be OP0 + OP1. */
7553 temp = simplify_binary_operation (PLUS, mode, constant_term,
7558 op1 = gen_rtx_PLUS (mode, constant_term, XEXP (op1, 1));
7561 /* Put a constant term last and put a multiplication first. */
7562 if (CONSTANT_P (op0) || GET_CODE (op1) == MULT)
7563 temp = op1, op1 = op0, op0 = temp;
7565 temp = simplify_binary_operation (PLUS, mode, op0, op1);
7566 return temp ? temp : gen_rtx_PLUS (mode, op0, op1);
7569 /* For initializers, we are allowed to return a MINUS of two
7570 symbolic constants. Here we handle all cases when both operands
7572 /* Handle difference of two symbolic constants,
7573 for the sake of an initializer. */
7574 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7575 && really_constant_p (TREE_OPERAND (exp, 0))
7576 && really_constant_p (TREE_OPERAND (exp, 1)))
7578 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode,
7580 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode,
7583 /* If the last operand is a CONST_INT, use plus_constant of
7584 the negated constant. Else make the MINUS. */
7585 if (GET_CODE (op1) == CONST_INT)
7586 return plus_constant (op0, - INTVAL (op1));
7588 return gen_rtx_MINUS (mode, op0, op1);
7590 /* Convert A - const to A + (-const). */
7591 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7593 tree negated = fold (build1 (NEGATE_EXPR, type,
7594 TREE_OPERAND (exp, 1)));
7596 if (TREE_UNSIGNED (type) || TREE_OVERFLOW (negated))
7597 /* If we can't negate the constant in TYPE, leave it alone and
7598 expand_binop will negate it for us. We used to try to do it
7599 here in the signed version of TYPE, but that doesn't work
7600 on POINTER_TYPEs. */;
7603 exp = build (PLUS_EXPR, type, TREE_OPERAND (exp, 0), negated);
7607 this_optab = ! unsignedp && flag_trapv
7608 && (GET_MODE_CLASS(mode) == MODE_INT)
7609 ? subv_optab : sub_optab;
7613 /* If first operand is constant, swap them.
7614 Thus the following special case checks need only
7615 check the second operand. */
7616 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
7618 tree t1 = TREE_OPERAND (exp, 0);
7619 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
7620 TREE_OPERAND (exp, 1) = t1;
7623 /* Attempt to return something suitable for generating an
7624 indexed address, for machines that support that. */
7626 if (modifier == EXPAND_SUM && mode == ptr_mode
7627 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7628 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
7630 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7633 /* Apply distributive law if OP0 is x+c. */
7634 if (GET_CODE (op0) == PLUS
7635 && GET_CODE (XEXP (op0, 1)) == CONST_INT)
7640 (mode, XEXP (op0, 0),
7641 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))),
7642 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))
7643 * INTVAL (XEXP (op0, 1))));
7645 if (GET_CODE (op0) != REG)
7646 op0 = force_operand (op0, NULL_RTX);
7647 if (GET_CODE (op0) != REG)
7648 op0 = copy_to_mode_reg (mode, op0);
7651 gen_rtx_MULT (mode, op0,
7652 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))));
7655 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7658 /* Check for multiplying things that have been extended
7659 from a narrower type. If this machine supports multiplying
7660 in that narrower type with a result in the desired type,
7661 do it that way, and avoid the explicit type-conversion. */
7662 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
7663 && TREE_CODE (type) == INTEGER_TYPE
7664 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7665 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
7666 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7667 && int_fits_type_p (TREE_OPERAND (exp, 1),
7668 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7669 /* Don't use a widening multiply if a shift will do. */
7670 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
7671 > HOST_BITS_PER_WIDE_INT)
7672 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
7674 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
7675 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7677 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
7678 /* If both operands are extended, they must either both
7679 be zero-extended or both be sign-extended. */
7680 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7682 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
7684 enum machine_mode innermode
7685 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
7686 optab other_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7687 ? smul_widen_optab : umul_widen_optab);
7688 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7689 ? umul_widen_optab : smul_widen_optab);
7690 if (mode == GET_MODE_WIDER_MODE (innermode))
7692 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
7694 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7695 NULL_RTX, VOIDmode, 0);
7696 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7697 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7700 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7701 NULL_RTX, VOIDmode, 0);
7704 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
7705 && innermode == word_mode)
7708 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7709 NULL_RTX, VOIDmode, 0);
7710 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7711 op1 = convert_modes (innermode, mode,
7712 expand_expr (TREE_OPERAND (exp, 1),
7713 NULL_RTX, VOIDmode, 0),
7716 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7717 NULL_RTX, VOIDmode, 0);
7718 temp = expand_binop (mode, other_optab, op0, op1, target,
7719 unsignedp, OPTAB_LIB_WIDEN);
7720 htem = expand_mult_highpart_adjust (innermode,
7721 gen_highpart (innermode, temp),
7723 gen_highpart (innermode, temp),
7725 emit_move_insn (gen_highpart (innermode, temp), htem);
7730 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7731 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7732 return expand_mult (mode, op0, op1, target, unsignedp);
7734 case TRUNC_DIV_EXPR:
7735 case FLOOR_DIV_EXPR:
7737 case ROUND_DIV_EXPR:
7738 case EXACT_DIV_EXPR:
7739 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7741 /* Possible optimization: compute the dividend with EXPAND_SUM
7742 then if the divisor is constant can optimize the case
7743 where some terms of the dividend have coeffs divisible by it. */
7744 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7745 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7746 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
7749 /* Emit a/b as a*(1/b). Later we may manage CSE the reciprocal saving
7750 expensive divide. If not, combine will rebuild the original
7752 if (flag_unsafe_math_optimizations && optimize && !optimize_size
7753 && !real_onep (TREE_OPERAND (exp, 0)))
7754 return expand_expr (build (MULT_EXPR, type, TREE_OPERAND (exp, 0),
7755 build (RDIV_EXPR, type,
7756 build_real (type, dconst1),
7757 TREE_OPERAND (exp, 1))),
7758 target, tmode, unsignedp);
7759 this_optab = sdiv_optab;
7762 case TRUNC_MOD_EXPR:
7763 case FLOOR_MOD_EXPR:
7765 case ROUND_MOD_EXPR:
7766 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7768 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7769 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7770 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
7772 case FIX_ROUND_EXPR:
7773 case FIX_FLOOR_EXPR:
7775 abort (); /* Not used for C. */
7777 case FIX_TRUNC_EXPR:
7778 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7780 target = gen_reg_rtx (mode);
7781 expand_fix (target, op0, unsignedp);
7785 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7787 target = gen_reg_rtx (mode);
7788 /* expand_float can't figure out what to do if FROM has VOIDmode.
7789 So give it the correct mode. With -O, cse will optimize this. */
7790 if (GET_MODE (op0) == VOIDmode)
7791 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7793 expand_float (target, op0,
7794 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7798 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7799 temp = expand_unop (mode,
7800 ! unsignedp && flag_trapv
7801 && (GET_MODE_CLASS(mode) == MODE_INT)
7802 ? negv_optab : neg_optab, op0, target, 0);
7808 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7810 /* Handle complex values specially. */
7811 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT
7812 || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
7813 return expand_complex_abs (mode, op0, target, unsignedp);
7815 /* Unsigned abs is simply the operand. Testing here means we don't
7816 risk generating incorrect code below. */
7817 if (TREE_UNSIGNED (type))
7820 return expand_abs (mode, op0, target, unsignedp,
7821 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
7825 target = original_target;
7826 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 1), 1)
7827 || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
7828 || GET_MODE (target) != mode
7829 || (GET_CODE (target) == REG
7830 && REGNO (target) < FIRST_PSEUDO_REGISTER))
7831 target = gen_reg_rtx (mode);
7832 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7833 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
7835 /* First try to do it with a special MIN or MAX instruction.
7836 If that does not win, use a conditional jump to select the proper
7838 this_optab = (TREE_UNSIGNED (type)
7839 ? (code == MIN_EXPR ? umin_optab : umax_optab)
7840 : (code == MIN_EXPR ? smin_optab : smax_optab));
7842 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
7847 /* At this point, a MEM target is no longer useful; we will get better
7850 if (GET_CODE (target) == MEM)
7851 target = gen_reg_rtx (mode);
7854 emit_move_insn (target, op0);
7856 op0 = gen_label_rtx ();
7858 /* If this mode is an integer too wide to compare properly,
7859 compare word by word. Rely on cse to optimize constant cases. */
7860 if (GET_MODE_CLASS (mode) == MODE_INT
7861 && ! can_compare_p (GE, mode, ccp_jump))
7863 if (code == MAX_EXPR)
7864 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
7865 target, op1, NULL_RTX, op0);
7867 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
7868 op1, target, NULL_RTX, op0);
7872 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)));
7873 do_compare_rtx_and_jump (target, op1, code == MAX_EXPR ? GE : LE,
7874 unsignedp, mode, NULL_RTX, NULL_RTX,
7877 emit_move_insn (target, op1);
7882 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7883 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
7889 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7890 temp = expand_unop (mode, ffs_optab, op0, target, 1);
7895 /* ??? Can optimize bitwise operations with one arg constant.
7896 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
7897 and (a bitwise1 b) bitwise2 b (etc)
7898 but that is probably not worth while. */
7900 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
7901 boolean values when we want in all cases to compute both of them. In
7902 general it is fastest to do TRUTH_AND_EXPR by computing both operands
7903 as actual zero-or-1 values and then bitwise anding. In cases where
7904 there cannot be any side effects, better code would be made by
7905 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
7906 how to recognize those cases. */
7908 case TRUTH_AND_EXPR:
7910 this_optab = and_optab;
7915 this_optab = ior_optab;
7918 case TRUTH_XOR_EXPR:
7920 this_optab = xor_optab;
7927 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7929 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7930 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
7933 /* Could determine the answer when only additive constants differ. Also,
7934 the addition of one can be handled by changing the condition. */
7941 case UNORDERED_EXPR:
7948 temp = do_store_flag (exp, target, tmode != VOIDmode ? tmode : mode, 0);
7952 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
7953 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
7955 && GET_CODE (original_target) == REG
7956 && (GET_MODE (original_target)
7957 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
7959 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
7962 if (temp != original_target)
7963 temp = copy_to_reg (temp);
7965 op1 = gen_label_rtx ();
7966 emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX,
7967 GET_MODE (temp), unsignedp, op1);
7968 emit_move_insn (temp, const1_rtx);
7973 /* If no set-flag instruction, must generate a conditional
7974 store into a temporary variable. Drop through
7975 and handle this like && and ||. */
7977 case TRUTH_ANDIF_EXPR:
7978 case TRUTH_ORIF_EXPR:
7980 && (target == 0 || ! safe_from_p (target, exp, 1)
7981 /* Make sure we don't have a hard reg (such as function's return
7982 value) live across basic blocks, if not optimizing. */
7983 || (!optimize && GET_CODE (target) == REG
7984 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
7985 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7988 emit_clr_insn (target);
7990 op1 = gen_label_rtx ();
7991 jumpifnot (exp, op1);
7994 emit_0_to_1_insn (target);
7997 return ignore ? const0_rtx : target;
7999 case TRUTH_NOT_EXPR:
8000 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
8001 /* The parser is careful to generate TRUTH_NOT_EXPR
8002 only with operands that are always zero or one. */
8003 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
8004 target, 1, OPTAB_LIB_WIDEN);
8010 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
8012 return expand_expr (TREE_OPERAND (exp, 1),
8013 (ignore ? const0_rtx : target),
8017 /* If we would have a "singleton" (see below) were it not for a
8018 conversion in each arm, bring that conversion back out. */
8019 if (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
8020 && TREE_CODE (TREE_OPERAND (exp, 2)) == NOP_EXPR
8021 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0))
8022 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 2), 0))))
8024 tree iftrue = TREE_OPERAND (TREE_OPERAND (exp, 1), 0);
8025 tree iffalse = TREE_OPERAND (TREE_OPERAND (exp, 2), 0);
8027 if ((TREE_CODE_CLASS (TREE_CODE (iftrue)) == '2'
8028 && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
8029 || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '2'
8030 && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0))
8031 || (TREE_CODE_CLASS (TREE_CODE (iftrue)) == '1'
8032 && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
8033 || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '1'
8034 && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0)))
8035 return expand_expr (build1 (NOP_EXPR, type,
8036 build (COND_EXPR, TREE_TYPE (iftrue),
8037 TREE_OPERAND (exp, 0),
8039 target, tmode, modifier);
8043 /* Note that COND_EXPRs whose type is a structure or union
8044 are required to be constructed to contain assignments of
8045 a temporary variable, so that we can evaluate them here
8046 for side effect only. If type is void, we must do likewise. */
8048 /* If an arm of the branch requires a cleanup,
8049 only that cleanup is performed. */
8052 tree binary_op = 0, unary_op = 0;
8054 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
8055 convert it to our mode, if necessary. */
8056 if (integer_onep (TREE_OPERAND (exp, 1))
8057 && integer_zerop (TREE_OPERAND (exp, 2))
8058 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8062 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
8067 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, modifier);
8068 if (GET_MODE (op0) == mode)
8072 target = gen_reg_rtx (mode);
8073 convert_move (target, op0, unsignedp);
8077 /* Check for X ? A + B : A. If we have this, we can copy A to the
8078 output and conditionally add B. Similarly for unary operations.
8079 Don't do this if X has side-effects because those side effects
8080 might affect A or B and the "?" operation is a sequence point in
8081 ANSI. (operand_equal_p tests for side effects.) */
8083 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
8084 && operand_equal_p (TREE_OPERAND (exp, 2),
8085 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8086 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
8087 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
8088 && operand_equal_p (TREE_OPERAND (exp, 1),
8089 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8090 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
8091 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
8092 && operand_equal_p (TREE_OPERAND (exp, 2),
8093 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8094 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
8095 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
8096 && operand_equal_p (TREE_OPERAND (exp, 1),
8097 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8098 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
8100 /* If we are not to produce a result, we have no target. Otherwise,
8101 if a target was specified use it; it will not be used as an
8102 intermediate target unless it is safe. If no target, use a
8107 else if (original_target
8108 && (safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
8109 || (singleton && GET_CODE (original_target) == REG
8110 && REGNO (original_target) >= FIRST_PSEUDO_REGISTER
8111 && original_target == var_rtx (singleton)))
8112 && GET_MODE (original_target) == mode
8113 #ifdef HAVE_conditional_move
8114 && (! can_conditionally_move_p (mode)
8115 || GET_CODE (original_target) == REG
8116 || TREE_ADDRESSABLE (type))
8118 && (GET_CODE (original_target) != MEM
8119 || TREE_ADDRESSABLE (type)))
8120 temp = original_target;
8121 else if (TREE_ADDRESSABLE (type))
8124 temp = assign_temp (type, 0, 0, 1);
8126 /* If we had X ? A + C : A, with C a constant power of 2, and we can
8127 do the test of X as a store-flag operation, do this as
8128 A + ((X != 0) << log C). Similarly for other simple binary
8129 operators. Only do for C == 1 if BRANCH_COST is low. */
8130 if (temp && singleton && binary_op
8131 && (TREE_CODE (binary_op) == PLUS_EXPR
8132 || TREE_CODE (binary_op) == MINUS_EXPR
8133 || TREE_CODE (binary_op) == BIT_IOR_EXPR
8134 || TREE_CODE (binary_op) == BIT_XOR_EXPR)
8135 && (BRANCH_COST >= 3 ? integer_pow2p (TREE_OPERAND (binary_op, 1))
8136 : integer_onep (TREE_OPERAND (binary_op, 1)))
8137 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8140 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR
8141 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8142 ? addv_optab : add_optab)
8143 : TREE_CODE (binary_op) == MINUS_EXPR
8144 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8145 ? subv_optab : sub_optab)
8146 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
8149 /* If we had X ? A : A + 1, do this as A + (X == 0).
8151 We have to invert the truth value here and then put it
8152 back later if do_store_flag fails. We cannot simply copy
8153 TREE_OPERAND (exp, 0) to another variable and modify that
8154 because invert_truthvalue can modify the tree pointed to
8156 if (singleton == TREE_OPERAND (exp, 1))
8157 TREE_OPERAND (exp, 0)
8158 = invert_truthvalue (TREE_OPERAND (exp, 0));
8160 result = do_store_flag (TREE_OPERAND (exp, 0),
8161 (safe_from_p (temp, singleton, 1)
8163 mode, BRANCH_COST <= 1);
8165 if (result != 0 && ! integer_onep (TREE_OPERAND (binary_op, 1)))
8166 result = expand_shift (LSHIFT_EXPR, mode, result,
8167 build_int_2 (tree_log2
8171 (safe_from_p (temp, singleton, 1)
8172 ? temp : NULL_RTX), 0);
8176 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
8177 return expand_binop (mode, boptab, op1, result, temp,
8178 unsignedp, OPTAB_LIB_WIDEN);
8180 else if (singleton == TREE_OPERAND (exp, 1))
8181 TREE_OPERAND (exp, 0)
8182 = invert_truthvalue (TREE_OPERAND (exp, 0));
8185 do_pending_stack_adjust ();
8187 op0 = gen_label_rtx ();
8189 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
8193 /* If the target conflicts with the other operand of the
8194 binary op, we can't use it. Also, we can't use the target
8195 if it is a hard register, because evaluating the condition
8196 might clobber it. */
8198 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1), 1))
8199 || (GET_CODE (temp) == REG
8200 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
8201 temp = gen_reg_rtx (mode);
8202 store_expr (singleton, temp, 0);
8205 expand_expr (singleton,
8206 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8207 if (singleton == TREE_OPERAND (exp, 1))
8208 jumpif (TREE_OPERAND (exp, 0), op0);
8210 jumpifnot (TREE_OPERAND (exp, 0), op0);
8212 start_cleanup_deferral ();
8213 if (binary_op && temp == 0)
8214 /* Just touch the other operand. */
8215 expand_expr (TREE_OPERAND (binary_op, 1),
8216 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8218 store_expr (build (TREE_CODE (binary_op), type,
8219 make_tree (type, temp),
8220 TREE_OPERAND (binary_op, 1)),
8223 store_expr (build1 (TREE_CODE (unary_op), type,
8224 make_tree (type, temp)),
8228 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
8229 comparison operator. If we have one of these cases, set the
8230 output to A, branch on A (cse will merge these two references),
8231 then set the output to FOO. */
8233 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8234 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8235 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8236 TREE_OPERAND (exp, 1), 0)
8237 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8238 || TREE_CODE (TREE_OPERAND (exp, 1)) == SAVE_EXPR)
8239 && safe_from_p (temp, TREE_OPERAND (exp, 2), 1))
8241 if (GET_CODE (temp) == REG
8242 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
8243 temp = gen_reg_rtx (mode);
8244 store_expr (TREE_OPERAND (exp, 1), temp, 0);
8245 jumpif (TREE_OPERAND (exp, 0), op0);
8247 start_cleanup_deferral ();
8248 store_expr (TREE_OPERAND (exp, 2), temp, 0);
8252 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8253 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8254 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8255 TREE_OPERAND (exp, 2), 0)
8256 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8257 || TREE_CODE (TREE_OPERAND (exp, 2)) == SAVE_EXPR)
8258 && safe_from_p (temp, TREE_OPERAND (exp, 1), 1))
8260 if (GET_CODE (temp) == REG
8261 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
8262 temp = gen_reg_rtx (mode);
8263 store_expr (TREE_OPERAND (exp, 2), temp, 0);
8264 jumpifnot (TREE_OPERAND (exp, 0), op0);
8266 start_cleanup_deferral ();
8267 store_expr (TREE_OPERAND (exp, 1), temp, 0);
8272 op1 = gen_label_rtx ();
8273 jumpifnot (TREE_OPERAND (exp, 0), op0);
8275 start_cleanup_deferral ();
8277 /* One branch of the cond can be void, if it never returns. For
8278 example A ? throw : E */
8280 && TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node)
8281 store_expr (TREE_OPERAND (exp, 1), temp, 0);
8283 expand_expr (TREE_OPERAND (exp, 1),
8284 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8285 end_cleanup_deferral ();
8287 emit_jump_insn (gen_jump (op1));
8290 start_cleanup_deferral ();
8292 && TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node)
8293 store_expr (TREE_OPERAND (exp, 2), temp, 0);
8295 expand_expr (TREE_OPERAND (exp, 2),
8296 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8299 end_cleanup_deferral ();
8310 /* Something needs to be initialized, but we didn't know
8311 where that thing was when building the tree. For example,
8312 it could be the return value of a function, or a parameter
8313 to a function which lays down in the stack, or a temporary
8314 variable which must be passed by reference.
8316 We guarantee that the expression will either be constructed
8317 or copied into our original target. */
8319 tree slot = TREE_OPERAND (exp, 0);
8320 tree cleanups = NULL_TREE;
8323 if (TREE_CODE (slot) != VAR_DECL)
8327 target = original_target;
8329 /* Set this here so that if we get a target that refers to a
8330 register variable that's already been used, put_reg_into_stack
8331 knows that it should fix up those uses. */
8332 TREE_USED (slot) = 1;
8336 if (DECL_RTL_SET_P (slot))
8338 target = DECL_RTL (slot);
8339 /* If we have already expanded the slot, so don't do
8341 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8346 target = assign_temp (type, 2, 0, 1);
8347 /* All temp slots at this level must not conflict. */
8348 preserve_temp_slots (target);
8349 SET_DECL_RTL (slot, target);
8350 if (TREE_ADDRESSABLE (slot))
8351 put_var_into_stack (slot);
8353 /* Since SLOT is not known to the called function
8354 to belong to its stack frame, we must build an explicit
8355 cleanup. This case occurs when we must build up a reference
8356 to pass the reference as an argument. In this case,
8357 it is very likely that such a reference need not be
8360 if (TREE_OPERAND (exp, 2) == 0)
8361 TREE_OPERAND (exp, 2) = maybe_build_cleanup (slot);
8362 cleanups = TREE_OPERAND (exp, 2);
8367 /* This case does occur, when expanding a parameter which
8368 needs to be constructed on the stack. The target
8369 is the actual stack address that we want to initialize.
8370 The function we call will perform the cleanup in this case. */
8372 /* If we have already assigned it space, use that space,
8373 not target that we were passed in, as our target
8374 parameter is only a hint. */
8375 if (DECL_RTL_SET_P (slot))
8377 target = DECL_RTL (slot);
8378 /* If we have already expanded the slot, so don't do
8380 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8385 SET_DECL_RTL (slot, target);
8386 /* If we must have an addressable slot, then make sure that
8387 the RTL that we just stored in slot is OK. */
8388 if (TREE_ADDRESSABLE (slot))
8389 put_var_into_stack (slot);
8393 exp1 = TREE_OPERAND (exp, 3) = TREE_OPERAND (exp, 1);
8394 /* Mark it as expanded. */
8395 TREE_OPERAND (exp, 1) = NULL_TREE;
8397 store_expr (exp1, target, 0);
8399 expand_decl_cleanup (NULL_TREE, cleanups);
8406 tree lhs = TREE_OPERAND (exp, 0);
8407 tree rhs = TREE_OPERAND (exp, 1);
8409 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
8415 /* If lhs is complex, expand calls in rhs before computing it.
8416 That's so we don't compute a pointer and save it over a
8417 call. If lhs is simple, compute it first so we can give it
8418 as a target if the rhs is just a call. This avoids an
8419 extra temp and copy and that prevents a partial-subsumption
8420 which makes bad code. Actually we could treat
8421 component_ref's of vars like vars. */
8423 tree lhs = TREE_OPERAND (exp, 0);
8424 tree rhs = TREE_OPERAND (exp, 1);
8428 /* Check for |= or &= of a bitfield of size one into another bitfield
8429 of size 1. In this case, (unless we need the result of the
8430 assignment) we can do this more efficiently with a
8431 test followed by an assignment, if necessary.
8433 ??? At this point, we can't get a BIT_FIELD_REF here. But if
8434 things change so we do, this code should be enhanced to
8437 && TREE_CODE (lhs) == COMPONENT_REF
8438 && (TREE_CODE (rhs) == BIT_IOR_EXPR
8439 || TREE_CODE (rhs) == BIT_AND_EXPR)
8440 && TREE_OPERAND (rhs, 0) == lhs
8441 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
8442 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
8443 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
8445 rtx label = gen_label_rtx ();
8447 do_jump (TREE_OPERAND (rhs, 1),
8448 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
8449 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
8450 expand_assignment (lhs, convert (TREE_TYPE (rhs),
8451 (TREE_CODE (rhs) == BIT_IOR_EXPR
8453 : integer_zero_node)),
8455 do_pending_stack_adjust ();
8460 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
8466 if (!TREE_OPERAND (exp, 0))
8467 expand_null_return ();
8469 expand_return (TREE_OPERAND (exp, 0));
8472 case PREINCREMENT_EXPR:
8473 case PREDECREMENT_EXPR:
8474 return expand_increment (exp, 0, ignore);
8476 case POSTINCREMENT_EXPR:
8477 case POSTDECREMENT_EXPR:
8478 /* Faster to treat as pre-increment if result is not used. */
8479 return expand_increment (exp, ! ignore, ignore);
8482 /* Are we taking the address of a nested function? */
8483 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
8484 && decl_function_context (TREE_OPERAND (exp, 0)) != 0
8485 && ! DECL_NO_STATIC_CHAIN (TREE_OPERAND (exp, 0))
8486 && ! TREE_STATIC (exp))
8488 op0 = trampoline_address (TREE_OPERAND (exp, 0));
8489 op0 = force_operand (op0, target);
8491 /* If we are taking the address of something erroneous, just
8493 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
8495 /* If we are taking the address of a constant and are at the
8496 top level, we have to use output_constant_def since we can't
8497 call force_const_mem at top level. */
8499 && (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
8500 || (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0)))
8502 op0 = XEXP (output_constant_def (TREE_OPERAND (exp, 0), 0), 0);
8505 /* We make sure to pass const0_rtx down if we came in with
8506 ignore set, to avoid doing the cleanups twice for something. */
8507 op0 = expand_expr (TREE_OPERAND (exp, 0),
8508 ignore ? const0_rtx : NULL_RTX, VOIDmode,
8509 (modifier == EXPAND_INITIALIZER
8510 ? modifier : EXPAND_CONST_ADDRESS));
8512 /* If we are going to ignore the result, OP0 will have been set
8513 to const0_rtx, so just return it. Don't get confused and
8514 think we are taking the address of the constant. */
8518 /* Pass 1 for MODIFY, so that protect_from_queue doesn't get
8519 clever and returns a REG when given a MEM. */
8520 op0 = protect_from_queue (op0, 1);
8522 /* We would like the object in memory. If it is a constant, we can
8523 have it be statically allocated into memory. For a non-constant,
8524 we need to allocate some memory and store the value into it. */
8526 if (CONSTANT_P (op0))
8527 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
8529 else if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
8530 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF
8531 || GET_CODE (op0) == PARALLEL)
8533 /* If the operand is a SAVE_EXPR, we can deal with this by
8534 forcing the SAVE_EXPR into memory. */
8535 if (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR)
8537 put_var_into_stack (TREE_OPERAND (exp, 0));
8538 op0 = SAVE_EXPR_RTL (TREE_OPERAND (exp, 0));
8542 /* If this object is in a register, it can't be BLKmode. */
8543 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8544 rtx memloc = assign_temp (inner_type, 1, 1, 1);
8546 if (GET_CODE (op0) == PARALLEL)
8547 /* Handle calls that pass values in multiple
8548 non-contiguous locations. The Irix 6 ABI has examples
8550 emit_group_store (memloc, op0,
8551 int_size_in_bytes (inner_type));
8553 emit_move_insn (memloc, op0);
8559 if (GET_CODE (op0) != MEM)
8562 mark_temp_addr_taken (op0);
8563 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
8565 op0 = XEXP (op0, 0);
8566 #ifdef POINTERS_EXTEND_UNSIGNED
8567 if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
8568 && mode == ptr_mode)
8569 op0 = convert_memory_address (ptr_mode, op0);
8574 /* If OP0 is not aligned as least as much as the type requires, we
8575 need to make a temporary, copy OP0 to it, and take the address of
8576 the temporary. We want to use the alignment of the type, not of
8577 the operand. Note that this is incorrect for FUNCTION_TYPE, but
8578 the test for BLKmode means that can't happen. The test for
8579 BLKmode is because we never make mis-aligned MEMs with
8582 We don't need to do this at all if the machine doesn't have
8583 strict alignment. */
8584 if (STRICT_ALIGNMENT && GET_MODE (op0) == BLKmode
8585 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
8587 && MEM_ALIGN (op0) < BIGGEST_ALIGNMENT)
8589 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8591 = assign_stack_temp_for_type
8592 (TYPE_MODE (inner_type),
8593 MEM_SIZE (op0) ? INTVAL (MEM_SIZE (op0))
8594 : int_size_in_bytes (inner_type),
8595 1, build_qualified_type (inner_type,
8596 (TYPE_QUALS (inner_type)
8597 | TYPE_QUAL_CONST)));
8599 if (TYPE_ALIGN_OK (inner_type))
8602 emit_block_move (new, op0, expr_size (TREE_OPERAND (exp, 0)));
8606 op0 = force_operand (XEXP (op0, 0), target);
8610 && GET_CODE (op0) != REG
8611 && modifier != EXPAND_CONST_ADDRESS
8612 && modifier != EXPAND_INITIALIZER
8613 && modifier != EXPAND_SUM)
8614 op0 = force_reg (Pmode, op0);
8616 if (GET_CODE (op0) == REG
8617 && ! REG_USERVAR_P (op0))
8618 mark_reg_pointer (op0, TYPE_ALIGN (TREE_TYPE (type)));
8620 #ifdef POINTERS_EXTEND_UNSIGNED
8621 if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
8622 && mode == ptr_mode)
8623 op0 = convert_memory_address (ptr_mode, op0);
8628 case ENTRY_VALUE_EXPR:
8631 /* COMPLEX type for Extended Pascal & Fortran */
8634 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8637 /* Get the rtx code of the operands. */
8638 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8639 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
8642 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
8646 /* Move the real (op0) and imaginary (op1) parts to their location. */
8647 emit_move_insn (gen_realpart (mode, target), op0);
8648 emit_move_insn (gen_imagpart (mode, target), op1);
8650 insns = get_insns ();
8653 /* Complex construction should appear as a single unit. */
8654 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
8655 each with a separate pseudo as destination.
8656 It's not correct for flow to treat them as a unit. */
8657 if (GET_CODE (target) != CONCAT)
8658 emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
8666 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8667 return gen_realpart (mode, op0);
8670 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8671 return gen_imagpart (mode, op0);
8675 enum machine_mode partmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8679 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8682 target = gen_reg_rtx (mode);
8686 /* Store the realpart and the negated imagpart to target. */
8687 emit_move_insn (gen_realpart (partmode, target),
8688 gen_realpart (partmode, op0));
8690 imag_t = gen_imagpart (partmode, target);
8691 temp = expand_unop (partmode,
8692 ! unsignedp && flag_trapv
8693 && (GET_MODE_CLASS(partmode) == MODE_INT)
8694 ? negv_optab : neg_optab,
8695 gen_imagpart (partmode, op0), imag_t, 0);
8697 emit_move_insn (imag_t, temp);
8699 insns = get_insns ();
8702 /* Conjugate should appear as a single unit
8703 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
8704 each with a separate pseudo as destination.
8705 It's not correct for flow to treat them as a unit. */
8706 if (GET_CODE (target) != CONCAT)
8707 emit_no_conflict_block (insns, target, op0, NULL_RTX, NULL_RTX);
8714 case TRY_CATCH_EXPR:
8716 tree handler = TREE_OPERAND (exp, 1);
8718 expand_eh_region_start ();
8720 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8722 expand_eh_region_end_cleanup (handler);
8727 case TRY_FINALLY_EXPR:
8729 tree try_block = TREE_OPERAND (exp, 0);
8730 tree finally_block = TREE_OPERAND (exp, 1);
8731 rtx finally_label = gen_label_rtx ();
8732 rtx done_label = gen_label_rtx ();
8733 rtx return_link = gen_reg_rtx (Pmode);
8734 tree cleanup = build (GOTO_SUBROUTINE_EXPR, void_type_node,
8735 (tree) finally_label, (tree) return_link);
8736 TREE_SIDE_EFFECTS (cleanup) = 1;
8738 /* Start a new binding layer that will keep track of all cleanup
8739 actions to be performed. */
8740 expand_start_bindings (2);
8742 target_temp_slot_level = temp_slot_level;
8744 expand_decl_cleanup (NULL_TREE, cleanup);
8745 op0 = expand_expr (try_block, target, tmode, modifier);
8747 preserve_temp_slots (op0);
8748 expand_end_bindings (NULL_TREE, 0, 0);
8749 emit_jump (done_label);
8750 emit_label (finally_label);
8751 expand_expr (finally_block, const0_rtx, VOIDmode, 0);
8752 emit_indirect_jump (return_link);
8753 emit_label (done_label);
8757 case GOTO_SUBROUTINE_EXPR:
8759 rtx subr = (rtx) TREE_OPERAND (exp, 0);
8760 rtx return_link = *(rtx *) &TREE_OPERAND (exp, 1);
8761 rtx return_address = gen_label_rtx ();
8762 emit_move_insn (return_link,
8763 gen_rtx_LABEL_REF (Pmode, return_address));
8765 emit_label (return_address);
8770 return expand_builtin_va_arg (TREE_OPERAND (exp, 0), type);
8773 return get_exception_pointer (cfun);
8776 /* Function descriptors are not valid except for as
8777 initialization constants, and should not be expanded. */
8781 return (*lang_expand_expr) (exp, original_target, tmode, modifier);
8784 /* Here to do an ordinary binary operator, generating an instruction
8785 from the optab already placed in `this_optab'. */
8787 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8789 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8790 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8792 temp = expand_binop (mode, this_optab, op0, op1, target,
8793 unsignedp, OPTAB_LIB_WIDEN);
8799 /* Return the tree node if a ARG corresponds to a string constant or zero
8800 if it doesn't. If we return non-zero, set *PTR_OFFSET to the offset
8801 in bytes within the string that ARG is accessing. The type of the
8802 offset will be `sizetype'. */
8805 string_constant (arg, ptr_offset)
8811 if (TREE_CODE (arg) == ADDR_EXPR
8812 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
8814 *ptr_offset = size_zero_node;
8815 return TREE_OPERAND (arg, 0);
8817 else if (TREE_CODE (arg) == PLUS_EXPR)
8819 tree arg0 = TREE_OPERAND (arg, 0);
8820 tree arg1 = TREE_OPERAND (arg, 1);
8825 if (TREE_CODE (arg0) == ADDR_EXPR
8826 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
8828 *ptr_offset = convert (sizetype, arg1);
8829 return TREE_OPERAND (arg0, 0);
8831 else if (TREE_CODE (arg1) == ADDR_EXPR
8832 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
8834 *ptr_offset = convert (sizetype, arg0);
8835 return TREE_OPERAND (arg1, 0);
8842 /* Expand code for a post- or pre- increment or decrement
8843 and return the RTX for the result.
8844 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
8847 expand_increment (exp, post, ignore)
8853 tree incremented = TREE_OPERAND (exp, 0);
8854 optab this_optab = add_optab;
8856 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
8857 int op0_is_copy = 0;
8858 int single_insn = 0;
8859 /* 1 means we can't store into OP0 directly,
8860 because it is a subreg narrower than a word,
8861 and we don't dare clobber the rest of the word. */
8864 /* Stabilize any component ref that might need to be
8865 evaluated more than once below. */
8867 || TREE_CODE (incremented) == BIT_FIELD_REF
8868 || (TREE_CODE (incremented) == COMPONENT_REF
8869 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
8870 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
8871 incremented = stabilize_reference (incremented);
8872 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
8873 ones into save exprs so that they don't accidentally get evaluated
8874 more than once by the code below. */
8875 if (TREE_CODE (incremented) == PREINCREMENT_EXPR
8876 || TREE_CODE (incremented) == PREDECREMENT_EXPR)
8877 incremented = save_expr (incremented);
8879 /* Compute the operands as RTX.
8880 Note whether OP0 is the actual lvalue or a copy of it:
8881 I believe it is a copy iff it is a register or subreg
8882 and insns were generated in computing it. */
8884 temp = get_last_insn ();
8885 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, 0);
8887 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
8888 in place but instead must do sign- or zero-extension during assignment,
8889 so we copy it into a new register and let the code below use it as
8892 Note that we can safely modify this SUBREG since it is know not to be
8893 shared (it was made by the expand_expr call above). */
8895 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
8898 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
8902 else if (GET_CODE (op0) == SUBREG
8903 && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD)
8905 /* We cannot increment this SUBREG in place. If we are
8906 post-incrementing, get a copy of the old value. Otherwise,
8907 just mark that we cannot increment in place. */
8909 op0 = copy_to_reg (op0);
8914 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
8915 && temp != get_last_insn ());
8916 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8918 /* Decide whether incrementing or decrementing. */
8919 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
8920 || TREE_CODE (exp) == PREDECREMENT_EXPR)
8921 this_optab = sub_optab;
8923 /* Convert decrement by a constant into a negative increment. */
8924 if (this_optab == sub_optab
8925 && GET_CODE (op1) == CONST_INT)
8927 op1 = GEN_INT (-INTVAL (op1));
8928 this_optab = add_optab;
8931 if (TYPE_TRAP_SIGNED (TREE_TYPE (exp)))
8932 this_optab = this_optab == add_optab ? addv_optab : subv_optab;
8934 /* For a preincrement, see if we can do this with a single instruction. */
8937 icode = (int) this_optab->handlers[(int) mode].insn_code;
8938 if (icode != (int) CODE_FOR_nothing
8939 /* Make sure that OP0 is valid for operands 0 and 1
8940 of the insn we want to queue. */
8941 && (*insn_data[icode].operand[0].predicate) (op0, mode)
8942 && (*insn_data[icode].operand[1].predicate) (op0, mode)
8943 && (*insn_data[icode].operand[2].predicate) (op1, mode))
8947 /* If OP0 is not the actual lvalue, but rather a copy in a register,
8948 then we cannot just increment OP0. We must therefore contrive to
8949 increment the original value. Then, for postincrement, we can return
8950 OP0 since it is a copy of the old value. For preincrement, expand here
8951 unless we can do it with a single insn.
8953 Likewise if storing directly into OP0 would clobber high bits
8954 we need to preserve (bad_subreg). */
8955 if (op0_is_copy || (!post && !single_insn) || bad_subreg)
8957 /* This is the easiest way to increment the value wherever it is.
8958 Problems with multiple evaluation of INCREMENTED are prevented
8959 because either (1) it is a component_ref or preincrement,
8960 in which case it was stabilized above, or (2) it is an array_ref
8961 with constant index in an array in a register, which is
8962 safe to reevaluate. */
8963 tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
8964 || TREE_CODE (exp) == PREDECREMENT_EXPR)
8965 ? MINUS_EXPR : PLUS_EXPR),
8968 TREE_OPERAND (exp, 1));
8970 while (TREE_CODE (incremented) == NOP_EXPR
8971 || TREE_CODE (incremented) == CONVERT_EXPR)
8973 newexp = convert (TREE_TYPE (incremented), newexp);
8974 incremented = TREE_OPERAND (incremented, 0);
8977 temp = expand_assignment (incremented, newexp, ! post && ! ignore , 0);
8978 return post ? op0 : temp;
8983 /* We have a true reference to the value in OP0.
8984 If there is an insn to add or subtract in this mode, queue it.
8985 Queueing the increment insn avoids the register shuffling
8986 that often results if we must increment now and first save
8987 the old value for subsequent use. */
8989 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
8990 op0 = stabilize (op0);
8993 icode = (int) this_optab->handlers[(int) mode].insn_code;
8994 if (icode != (int) CODE_FOR_nothing
8995 /* Make sure that OP0 is valid for operands 0 and 1
8996 of the insn we want to queue. */
8997 && (*insn_data[icode].operand[0].predicate) (op0, mode)
8998 && (*insn_data[icode].operand[1].predicate) (op0, mode))
9000 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9001 op1 = force_reg (mode, op1);
9003 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
9005 if (icode != (int) CODE_FOR_nothing && GET_CODE (op0) == MEM)
9007 rtx addr = (general_operand (XEXP (op0, 0), mode)
9008 ? force_reg (Pmode, XEXP (op0, 0))
9009 : copy_to_reg (XEXP (op0, 0)));
9012 op0 = replace_equiv_address (op0, addr);
9013 temp = force_reg (GET_MODE (op0), op0);
9014 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9015 op1 = force_reg (mode, op1);
9017 /* The increment queue is LIFO, thus we have to `queue'
9018 the instructions in reverse order. */
9019 enqueue_insn (op0, gen_move_insn (op0, temp));
9020 result = enqueue_insn (temp, GEN_FCN (icode) (temp, temp, op1));
9025 /* Preincrement, or we can't increment with one simple insn. */
9027 /* Save a copy of the value before inc or dec, to return it later. */
9028 temp = value = copy_to_reg (op0);
9030 /* Arrange to return the incremented value. */
9031 /* Copy the rtx because expand_binop will protect from the queue,
9032 and the results of that would be invalid for us to return
9033 if our caller does emit_queue before using our result. */
9034 temp = copy_rtx (value = op0);
9036 /* Increment however we can. */
9037 op1 = expand_binop (mode, this_optab, value, op1, op0,
9038 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
9040 /* Make sure the value is stored into OP0. */
9042 emit_move_insn (op0, op1);
9047 /* At the start of a function, record that we have no previously-pushed
9048 arguments waiting to be popped. */
9051 init_pending_stack_adjust ()
9053 pending_stack_adjust = 0;
9056 /* When exiting from function, if safe, clear out any pending stack adjust
9057 so the adjustment won't get done.
9059 Note, if the current function calls alloca, then it must have a
9060 frame pointer regardless of the value of flag_omit_frame_pointer. */
9063 clear_pending_stack_adjust ()
9065 #ifdef EXIT_IGNORE_STACK
9067 && (! flag_omit_frame_pointer || current_function_calls_alloca)
9068 && EXIT_IGNORE_STACK
9069 && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline)
9070 && ! flag_inline_functions)
9072 stack_pointer_delta -= pending_stack_adjust,
9073 pending_stack_adjust = 0;
9078 /* Pop any previously-pushed arguments that have not been popped yet. */
9081 do_pending_stack_adjust ()
9083 if (inhibit_defer_pop == 0)
9085 if (pending_stack_adjust != 0)
9086 adjust_stack (GEN_INT (pending_stack_adjust));
9087 pending_stack_adjust = 0;
9091 /* Expand conditional expressions. */
9093 /* Generate code to evaluate EXP and jump to LABEL if the value is zero.
9094 LABEL is an rtx of code CODE_LABEL, in this function and all the
9098 jumpifnot (exp, label)
9102 do_jump (exp, label, NULL_RTX);
9105 /* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
9112 do_jump (exp, NULL_RTX, label);
9115 /* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
9116 the result is zero, or IF_TRUE_LABEL if the result is one.
9117 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
9118 meaning fall through in that case.
9120 do_jump always does any pending stack adjust except when it does not
9121 actually perform a jump. An example where there is no jump
9122 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
9124 This function is responsible for optimizing cases such as
9125 &&, || and comparison operators in EXP. */
9128 do_jump (exp, if_false_label, if_true_label)
9130 rtx if_false_label, if_true_label;
9132 enum tree_code code = TREE_CODE (exp);
9133 /* Some cases need to create a label to jump to
9134 in order to properly fall through.
9135 These cases set DROP_THROUGH_LABEL nonzero. */
9136 rtx drop_through_label = 0;
9140 enum machine_mode mode;
9142 #ifdef MAX_INTEGER_COMPUTATION_MODE
9143 check_max_integer_computation_mode (exp);
9154 temp = integer_zerop (exp) ? if_false_label : if_true_label;
9160 /* This is not true with #pragma weak */
9162 /* The address of something can never be zero. */
9164 emit_jump (if_true_label);
9169 if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
9170 || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
9171 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF
9172 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_RANGE_REF)
9175 /* If we are narrowing the operand, we have to do the compare in the
9177 if ((TYPE_PRECISION (TREE_TYPE (exp))
9178 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
9180 case NON_LVALUE_EXPR:
9181 case REFERENCE_EXPR:
9186 /* These cannot change zero->non-zero or vice versa. */
9187 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9190 case WITH_RECORD_EXPR:
9191 /* Put the object on the placeholder list, recurse through our first
9192 operand, and pop the list. */
9193 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
9195 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9196 placeholder_list = TREE_CHAIN (placeholder_list);
9200 /* This is never less insns than evaluating the PLUS_EXPR followed by
9201 a test and can be longer if the test is eliminated. */
9203 /* Reduce to minus. */
9204 exp = build (MINUS_EXPR, TREE_TYPE (exp),
9205 TREE_OPERAND (exp, 0),
9206 fold (build1 (NEGATE_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
9207 TREE_OPERAND (exp, 1))));
9208 /* Process as MINUS. */
9212 /* Non-zero iff operands of minus differ. */
9213 do_compare_and_jump (build (NE_EXPR, TREE_TYPE (exp),
9214 TREE_OPERAND (exp, 0),
9215 TREE_OPERAND (exp, 1)),
9216 NE, NE, if_false_label, if_true_label);
9220 /* If we are AND'ing with a small constant, do this comparison in the
9221 smallest type that fits. If the machine doesn't have comparisons
9222 that small, it will be converted back to the wider comparison.
9223 This helps if we are testing the sign bit of a narrower object.
9224 combine can't do this for us because it can't know whether a
9225 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
9227 if (! SLOW_BYTE_ACCESS
9228 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
9229 && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
9230 && (i = tree_floor_log2 (TREE_OPERAND (exp, 1))) >= 0
9231 && (mode = mode_for_size (i + 1, MODE_INT, 0)) != BLKmode
9232 && (type = type_for_mode (mode, 1)) != 0
9233 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9234 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9235 != CODE_FOR_nothing))
9237 do_jump (convert (type, exp), if_false_label, if_true_label);
9242 case TRUTH_NOT_EXPR:
9243 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9246 case TRUTH_ANDIF_EXPR:
9247 if (if_false_label == 0)
9248 if_false_label = drop_through_label = gen_label_rtx ();
9249 do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
9250 start_cleanup_deferral ();
9251 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9252 end_cleanup_deferral ();
9255 case TRUTH_ORIF_EXPR:
9256 if (if_true_label == 0)
9257 if_true_label = drop_through_label = gen_label_rtx ();
9258 do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
9259 start_cleanup_deferral ();
9260 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9261 end_cleanup_deferral ();
9266 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
9267 preserve_temp_slots (NULL_RTX);
9271 do_pending_stack_adjust ();
9272 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9278 case ARRAY_RANGE_REF:
9280 HOST_WIDE_INT bitsize, bitpos;
9282 enum machine_mode mode;
9287 /* Get description of this reference. We don't actually care
9288 about the underlying object here. */
9289 get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode,
9290 &unsignedp, &volatilep);
9292 type = type_for_size (bitsize, unsignedp);
9293 if (! SLOW_BYTE_ACCESS
9294 && type != 0 && bitsize >= 0
9295 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9296 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9297 != CODE_FOR_nothing))
9299 do_jump (convert (type, exp), if_false_label, if_true_label);
9306 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
9307 if (integer_onep (TREE_OPERAND (exp, 1))
9308 && integer_zerop (TREE_OPERAND (exp, 2)))
9309 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9311 else if (integer_zerop (TREE_OPERAND (exp, 1))
9312 && integer_onep (TREE_OPERAND (exp, 2)))
9313 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9317 rtx label1 = gen_label_rtx ();
9318 drop_through_label = gen_label_rtx ();
9320 do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX);
9322 start_cleanup_deferral ();
9323 /* Now the THEN-expression. */
9324 do_jump (TREE_OPERAND (exp, 1),
9325 if_false_label ? if_false_label : drop_through_label,
9326 if_true_label ? if_true_label : drop_through_label);
9327 /* In case the do_jump just above never jumps. */
9328 do_pending_stack_adjust ();
9329 emit_label (label1);
9331 /* Now the ELSE-expression. */
9332 do_jump (TREE_OPERAND (exp, 2),
9333 if_false_label ? if_false_label : drop_through_label,
9334 if_true_label ? if_true_label : drop_through_label);
9335 end_cleanup_deferral ();
9341 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9343 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
9344 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
9346 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
9347 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
9350 (build (TRUTH_ANDIF_EXPR, TREE_TYPE (exp),
9351 fold (build (EQ_EXPR, TREE_TYPE (exp),
9352 fold (build1 (REALPART_EXPR,
9353 TREE_TYPE (inner_type),
9355 fold (build1 (REALPART_EXPR,
9356 TREE_TYPE (inner_type),
9358 fold (build (EQ_EXPR, TREE_TYPE (exp),
9359 fold (build1 (IMAGPART_EXPR,
9360 TREE_TYPE (inner_type),
9362 fold (build1 (IMAGPART_EXPR,
9363 TREE_TYPE (inner_type),
9365 if_false_label, if_true_label);
9368 else if (integer_zerop (TREE_OPERAND (exp, 1)))
9369 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9371 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
9372 && !can_compare_p (EQ, TYPE_MODE (inner_type), ccp_jump))
9373 do_jump_by_parts_equality (exp, if_false_label, if_true_label);
9375 do_compare_and_jump (exp, EQ, EQ, if_false_label, if_true_label);
9381 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9383 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
9384 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
9386 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
9387 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
9390 (build (TRUTH_ORIF_EXPR, TREE_TYPE (exp),
9391 fold (build (NE_EXPR, TREE_TYPE (exp),
9392 fold (build1 (REALPART_EXPR,
9393 TREE_TYPE (inner_type),
9395 fold (build1 (REALPART_EXPR,
9396 TREE_TYPE (inner_type),
9398 fold (build (NE_EXPR, TREE_TYPE (exp),
9399 fold (build1 (IMAGPART_EXPR,
9400 TREE_TYPE (inner_type),
9402 fold (build1 (IMAGPART_EXPR,
9403 TREE_TYPE (inner_type),
9405 if_false_label, if_true_label);
9408 else if (integer_zerop (TREE_OPERAND (exp, 1)))
9409 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9411 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
9412 && !can_compare_p (NE, TYPE_MODE (inner_type), ccp_jump))
9413 do_jump_by_parts_equality (exp, if_true_label, if_false_label);
9415 do_compare_and_jump (exp, NE, NE, if_false_label, if_true_label);
9420 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9421 if (GET_MODE_CLASS (mode) == MODE_INT
9422 && ! can_compare_p (LT, mode, ccp_jump))
9423 do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
9425 do_compare_and_jump (exp, LT, LTU, if_false_label, if_true_label);
9429 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9430 if (GET_MODE_CLASS (mode) == MODE_INT
9431 && ! can_compare_p (LE, mode, ccp_jump))
9432 do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
9434 do_compare_and_jump (exp, LE, LEU, if_false_label, if_true_label);
9438 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9439 if (GET_MODE_CLASS (mode) == MODE_INT
9440 && ! can_compare_p (GT, mode, ccp_jump))
9441 do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
9443 do_compare_and_jump (exp, GT, GTU, if_false_label, if_true_label);
9447 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9448 if (GET_MODE_CLASS (mode) == MODE_INT
9449 && ! can_compare_p (GE, mode, ccp_jump))
9450 do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
9452 do_compare_and_jump (exp, GE, GEU, if_false_label, if_true_label);
9455 case UNORDERED_EXPR:
9458 enum rtx_code cmp, rcmp;
9461 if (code == UNORDERED_EXPR)
9462 cmp = UNORDERED, rcmp = ORDERED;
9464 cmp = ORDERED, rcmp = UNORDERED;
9465 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9468 if (! can_compare_p (cmp, mode, ccp_jump)
9469 && (can_compare_p (rcmp, mode, ccp_jump)
9470 /* If the target doesn't provide either UNORDERED or ORDERED
9471 comparisons, canonicalize on UNORDERED for the library. */
9472 || rcmp == UNORDERED))
9476 do_compare_and_jump (exp, cmp, cmp, if_false_label, if_true_label);
9478 do_compare_and_jump (exp, rcmp, rcmp, if_true_label, if_false_label);
9483 enum rtx_code rcode1;
9484 enum tree_code tcode2;
9508 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9509 if (can_compare_p (rcode1, mode, ccp_jump))
9510 do_compare_and_jump (exp, rcode1, rcode1, if_false_label,
9514 tree op0 = save_expr (TREE_OPERAND (exp, 0));
9515 tree op1 = save_expr (TREE_OPERAND (exp, 1));
9518 /* If the target doesn't support combined unordered
9519 compares, decompose into UNORDERED + comparison. */
9520 cmp0 = fold (build (UNORDERED_EXPR, TREE_TYPE (exp), op0, op1));
9521 cmp1 = fold (build (tcode2, TREE_TYPE (exp), op0, op1));
9522 exp = build (TRUTH_ORIF_EXPR, TREE_TYPE (exp), cmp0, cmp1);
9523 do_jump (exp, if_false_label, if_true_label);
9529 __builtin_expect (<test>, 0) and
9530 __builtin_expect (<test>, 1)
9532 We need to do this here, so that <test> is not converted to a SCC
9533 operation on machines that use condition code registers and COMPARE
9534 like the PowerPC, and then the jump is done based on whether the SCC
9535 operation produced a 1 or 0. */
9537 /* Check for a built-in function. */
9538 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR)
9540 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
9541 tree arglist = TREE_OPERAND (exp, 1);
9543 if (TREE_CODE (fndecl) == FUNCTION_DECL
9544 && DECL_BUILT_IN (fndecl)
9545 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT
9546 && arglist != NULL_TREE
9547 && TREE_CHAIN (arglist) != NULL_TREE)
9549 rtx seq = expand_builtin_expect_jump (exp, if_false_label,
9552 if (seq != NULL_RTX)
9559 /* fall through and generate the normal code. */
9563 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
9565 /* This is not needed any more and causes poor code since it causes
9566 comparisons and tests from non-SI objects to have different code
9568 /* Copy to register to avoid generating bad insns by cse
9569 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
9570 if (!cse_not_expected && GET_CODE (temp) == MEM)
9571 temp = copy_to_reg (temp);
9573 do_pending_stack_adjust ();
9574 /* Do any postincrements in the expression that was tested. */
9577 if (GET_CODE (temp) == CONST_INT
9578 || (GET_CODE (temp) == CONST_DOUBLE && GET_MODE (temp) == VOIDmode)
9579 || GET_CODE (temp) == LABEL_REF)
9581 rtx target = temp == const0_rtx ? if_false_label : if_true_label;
9585 else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
9586 && ! can_compare_p (NE, GET_MODE (temp), ccp_jump))
9587 /* Note swapping the labels gives us not-equal. */
9588 do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label);
9589 else if (GET_MODE (temp) != VOIDmode)
9590 do_compare_rtx_and_jump (temp, CONST0_RTX (GET_MODE (temp)),
9591 NE, TREE_UNSIGNED (TREE_TYPE (exp)),
9592 GET_MODE (temp), NULL_RTX,
9593 if_false_label, if_true_label);
9598 if (drop_through_label)
9600 /* If do_jump produces code that might be jumped around,
9601 do any stack adjusts from that code, before the place
9602 where control merges in. */
9603 do_pending_stack_adjust ();
9604 emit_label (drop_through_label);
9608 /* Given a comparison expression EXP for values too wide to be compared
9609 with one insn, test the comparison and jump to the appropriate label.
9610 The code of EXP is ignored; we always test GT if SWAP is 0,
9611 and LT if SWAP is 1. */
9614 do_jump_by_parts_greater (exp, swap, if_false_label, if_true_label)
9617 rtx if_false_label, if_true_label;
9619 rtx op0 = expand_expr (TREE_OPERAND (exp, swap), NULL_RTX, VOIDmode, 0);
9620 rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), NULL_RTX, VOIDmode, 0);
9621 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9622 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
9624 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label);
9627 /* Compare OP0 with OP1, word at a time, in mode MODE.
9628 UNSIGNEDP says to do unsigned comparison.
9629 Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
9632 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label)
9633 enum machine_mode mode;
9636 rtx if_false_label, if_true_label;
9638 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
9639 rtx drop_through_label = 0;
9642 if (! if_true_label || ! if_false_label)
9643 drop_through_label = gen_label_rtx ();
9644 if (! if_true_label)
9645 if_true_label = drop_through_label;
9646 if (! if_false_label)
9647 if_false_label = drop_through_label;
9649 /* Compare a word at a time, high order first. */
9650 for (i = 0; i < nwords; i++)
9652 rtx op0_word, op1_word;
9654 if (WORDS_BIG_ENDIAN)
9656 op0_word = operand_subword_force (op0, i, mode);
9657 op1_word = operand_subword_force (op1, i, mode);
9661 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
9662 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
9665 /* All but high-order word must be compared as unsigned. */
9666 do_compare_rtx_and_jump (op0_word, op1_word, GT,
9667 (unsignedp || i > 0), word_mode, NULL_RTX,
9668 NULL_RTX, if_true_label);
9670 /* Consider lower words only if these are equal. */
9671 do_compare_rtx_and_jump (op0_word, op1_word, NE, unsignedp, word_mode,
9672 NULL_RTX, NULL_RTX, if_false_label);
9676 emit_jump (if_false_label);
9677 if (drop_through_label)
9678 emit_label (drop_through_label);
9681 /* Given an EQ_EXPR expression EXP for values too wide to be compared
9682 with one insn, test the comparison and jump to the appropriate label. */
9685 do_jump_by_parts_equality (exp, if_false_label, if_true_label)
9687 rtx if_false_label, if_true_label;
9689 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
9690 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
9691 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9692 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
9694 rtx drop_through_label = 0;
9696 if (! if_false_label)
9697 drop_through_label = if_false_label = gen_label_rtx ();
9699 for (i = 0; i < nwords; i++)
9700 do_compare_rtx_and_jump (operand_subword_force (op0, i, mode),
9701 operand_subword_force (op1, i, mode),
9702 EQ, TREE_UNSIGNED (TREE_TYPE (exp)),
9703 word_mode, NULL_RTX, if_false_label, NULL_RTX);
9706 emit_jump (if_true_label);
9707 if (drop_through_label)
9708 emit_label (drop_through_label);
9711 /* Jump according to whether OP0 is 0.
9712 We assume that OP0 has an integer mode that is too wide
9713 for the available compare insns. */
9716 do_jump_by_parts_equality_rtx (op0, if_false_label, if_true_label)
9718 rtx if_false_label, if_true_label;
9720 int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD;
9723 rtx drop_through_label = 0;
9725 /* The fastest way of doing this comparison on almost any machine is to
9726 "or" all the words and compare the result. If all have to be loaded
9727 from memory and this is a very wide item, it's possible this may
9728 be slower, but that's highly unlikely. */
9730 part = gen_reg_rtx (word_mode);
9731 emit_move_insn (part, operand_subword_force (op0, 0, GET_MODE (op0)));
9732 for (i = 1; i < nwords && part != 0; i++)
9733 part = expand_binop (word_mode, ior_optab, part,
9734 operand_subword_force (op0, i, GET_MODE (op0)),
9735 part, 1, OPTAB_WIDEN);
9739 do_compare_rtx_and_jump (part, const0_rtx, EQ, 1, word_mode,
9740 NULL_RTX, if_false_label, if_true_label);
9745 /* If we couldn't do the "or" simply, do this with a series of compares. */
9746 if (! if_false_label)
9747 drop_through_label = if_false_label = gen_label_rtx ();
9749 for (i = 0; i < nwords; i++)
9750 do_compare_rtx_and_jump (operand_subword_force (op0, i, GET_MODE (op0)),
9751 const0_rtx, EQ, 1, word_mode, NULL_RTX,
9752 if_false_label, NULL_RTX);
9755 emit_jump (if_true_label);
9757 if (drop_through_label)
9758 emit_label (drop_through_label);
9761 /* Generate code for a comparison of OP0 and OP1 with rtx code CODE.
9762 (including code to compute the values to be compared)
9763 and set (CC0) according to the result.
9764 The decision as to signed or unsigned comparison must be made by the caller.
9766 We force a stack adjustment unless there are currently
9767 things pushed on the stack that aren't yet used.
9769 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
9773 compare_from_rtx (op0, op1, code, unsignedp, mode, size)
9777 enum machine_mode mode;
9782 /* If one operand is constant, make it the second one. Only do this
9783 if the other operand is not constant as well. */
9785 if (swap_commutative_operands_p (op0, op1))
9790 code = swap_condition (code);
9795 op0 = force_not_mem (op0);
9796 op1 = force_not_mem (op1);
9799 do_pending_stack_adjust ();
9801 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
9802 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
9806 /* There's no need to do this now that combine.c can eliminate lots of
9807 sign extensions. This can be less efficient in certain cases on other
9810 /* If this is a signed equality comparison, we can do it as an
9811 unsigned comparison since zero-extension is cheaper than sign
9812 extension and comparisons with zero are done as unsigned. This is
9813 the case even on machines that can do fast sign extension, since
9814 zero-extension is easier to combine with other operations than
9815 sign-extension is. If we are comparing against a constant, we must
9816 convert it to what it would look like unsigned. */
9817 if ((code == EQ || code == NE) && ! unsignedp
9818 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
9820 if (GET_CODE (op1) == CONST_INT
9821 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
9822 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
9827 emit_cmp_insn (op0, op1, code, size, mode, unsignedp);
9829 return gen_rtx_fmt_ee (code, VOIDmode, cc0_rtx, const0_rtx);
9832 /* Like do_compare_and_jump but expects the values to compare as two rtx's.
9833 The decision as to signed or unsigned comparison must be made by the caller.
9835 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
9839 do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode, size,
9840 if_false_label, if_true_label)
9844 enum machine_mode mode;
9846 rtx if_false_label, if_true_label;
9849 int dummy_true_label = 0;
9851 /* Reverse the comparison if that is safe and we want to jump if it is
9853 if (! if_true_label && ! FLOAT_MODE_P (mode))
9855 if_true_label = if_false_label;
9857 code = reverse_condition (code);
9860 /* If one operand is constant, make it the second one. Only do this
9861 if the other operand is not constant as well. */
9863 if (swap_commutative_operands_p (op0, op1))
9868 code = swap_condition (code);
9873 op0 = force_not_mem (op0);
9874 op1 = force_not_mem (op1);
9877 do_pending_stack_adjust ();
9879 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
9880 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
9882 if (tem == const_true_rtx)
9885 emit_jump (if_true_label);
9890 emit_jump (if_false_label);
9896 /* There's no need to do this now that combine.c can eliminate lots of
9897 sign extensions. This can be less efficient in certain cases on other
9900 /* If this is a signed equality comparison, we can do it as an
9901 unsigned comparison since zero-extension is cheaper than sign
9902 extension and comparisons with zero are done as unsigned. This is
9903 the case even on machines that can do fast sign extension, since
9904 zero-extension is easier to combine with other operations than
9905 sign-extension is. If we are comparing against a constant, we must
9906 convert it to what it would look like unsigned. */
9907 if ((code == EQ || code == NE) && ! unsignedp
9908 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
9910 if (GET_CODE (op1) == CONST_INT
9911 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
9912 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
9917 if (! if_true_label)
9919 dummy_true_label = 1;
9920 if_true_label = gen_label_rtx ();
9923 emit_cmp_and_jump_insns (op0, op1, code, size, mode, unsignedp,
9927 emit_jump (if_false_label);
9928 if (dummy_true_label)
9929 emit_label (if_true_label);
9932 /* Generate code for a comparison expression EXP (including code to compute
9933 the values to be compared) and a conditional jump to IF_FALSE_LABEL and/or
9934 IF_TRUE_LABEL. One of the labels can be NULL_RTX, in which case the
9935 generated code will drop through.
9936 SIGNED_CODE should be the rtx operation for this comparison for
9937 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
9939 We force a stack adjustment unless there are currently
9940 things pushed on the stack that aren't yet used. */
9943 do_compare_and_jump (exp, signed_code, unsigned_code, if_false_label,
9946 enum rtx_code signed_code, unsigned_code;
9947 rtx if_false_label, if_true_label;
9951 enum machine_mode mode;
9955 /* Don't crash if the comparison was erroneous. */
9956 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
9957 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
9960 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
9961 if (TREE_CODE (TREE_OPERAND (exp, 1)) == ERROR_MARK)
9964 type = TREE_TYPE (TREE_OPERAND (exp, 0));
9965 mode = TYPE_MODE (type);
9966 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
9967 && (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST
9968 || (GET_MODE_BITSIZE (mode)
9969 > GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp,
9972 /* op0 might have been replaced by promoted constant, in which
9973 case the type of second argument should be used. */
9974 type = TREE_TYPE (TREE_OPERAND (exp, 1));
9975 mode = TYPE_MODE (type);
9977 unsignedp = TREE_UNSIGNED (type);
9978 code = unsignedp ? unsigned_code : signed_code;
9980 #ifdef HAVE_canonicalize_funcptr_for_compare
9981 /* If function pointers need to be "canonicalized" before they can
9982 be reliably compared, then canonicalize them. */
9983 if (HAVE_canonicalize_funcptr_for_compare
9984 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
9985 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
9988 rtx new_op0 = gen_reg_rtx (mode);
9990 emit_insn (gen_canonicalize_funcptr_for_compare (new_op0, op0));
9994 if (HAVE_canonicalize_funcptr_for_compare
9995 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
9996 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
9999 rtx new_op1 = gen_reg_rtx (mode);
10001 emit_insn (gen_canonicalize_funcptr_for_compare (new_op1, op1));
10006 /* Do any postincrements in the expression that was tested. */
10009 do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode,
10011 ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
10012 if_false_label, if_true_label);
10015 /* Generate code to calculate EXP using a store-flag instruction
10016 and return an rtx for the result. EXP is either a comparison
10017 or a TRUTH_NOT_EXPR whose operand is a comparison.
10019 If TARGET is nonzero, store the result there if convenient.
10021 If ONLY_CHEAP is non-zero, only do this if it is likely to be very
10024 Return zero if there is no suitable set-flag instruction
10025 available on this machine.
10027 Once expand_expr has been called on the arguments of the comparison,
10028 we are committed to doing the store flag, since it is not safe to
10029 re-evaluate the expression. We emit the store-flag insn by calling
10030 emit_store_flag, but only expand the arguments if we have a reason
10031 to believe that emit_store_flag will be successful. If we think that
10032 it will, but it isn't, we have to simulate the store-flag with a
10033 set/jump/set sequence. */
10036 do_store_flag (exp, target, mode, only_cheap)
10039 enum machine_mode mode;
10042 enum rtx_code code;
10043 tree arg0, arg1, type;
10045 enum machine_mode operand_mode;
10049 enum insn_code icode;
10050 rtx subtarget = target;
10053 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
10054 result at the end. We can't simply invert the test since it would
10055 have already been inverted if it were valid. This case occurs for
10056 some floating-point comparisons. */
10058 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
10059 invert = 1, exp = TREE_OPERAND (exp, 0);
10061 arg0 = TREE_OPERAND (exp, 0);
10062 arg1 = TREE_OPERAND (exp, 1);
10064 /* Don't crash if the comparison was erroneous. */
10065 if (arg0 == error_mark_node || arg1 == error_mark_node)
10068 type = TREE_TYPE (arg0);
10069 operand_mode = TYPE_MODE (type);
10070 unsignedp = TREE_UNSIGNED (type);
10072 /* We won't bother with BLKmode store-flag operations because it would mean
10073 passing a lot of information to emit_store_flag. */
10074 if (operand_mode == BLKmode)
10077 /* We won't bother with store-flag operations involving function pointers
10078 when function pointers must be canonicalized before comparisons. */
10079 #ifdef HAVE_canonicalize_funcptr_for_compare
10080 if (HAVE_canonicalize_funcptr_for_compare
10081 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10082 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10084 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10085 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10086 == FUNCTION_TYPE))))
10093 /* Get the rtx comparison code to use. We know that EXP is a comparison
10094 operation of some type. Some comparisons against 1 and -1 can be
10095 converted to comparisons with zero. Do so here so that the tests
10096 below will be aware that we have a comparison with zero. These
10097 tests will not catch constants in the first operand, but constants
10098 are rarely passed as the first operand. */
10100 switch (TREE_CODE (exp))
10109 if (integer_onep (arg1))
10110 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
10112 code = unsignedp ? LTU : LT;
10115 if (! unsignedp && integer_all_onesp (arg1))
10116 arg1 = integer_zero_node, code = LT;
10118 code = unsignedp ? LEU : LE;
10121 if (! unsignedp && integer_all_onesp (arg1))
10122 arg1 = integer_zero_node, code = GE;
10124 code = unsignedp ? GTU : GT;
10127 if (integer_onep (arg1))
10128 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
10130 code = unsignedp ? GEU : GE;
10133 case UNORDERED_EXPR:
10159 /* Put a constant second. */
10160 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
10162 tem = arg0; arg0 = arg1; arg1 = tem;
10163 code = swap_condition (code);
10166 /* If this is an equality or inequality test of a single bit, we can
10167 do this by shifting the bit being tested to the low-order bit and
10168 masking the result with the constant 1. If the condition was EQ,
10169 we xor it with 1. This does not require an scc insn and is faster
10170 than an scc insn even if we have it. */
10172 if ((code == NE || code == EQ)
10173 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
10174 && integer_pow2p (TREE_OPERAND (arg0, 1)))
10176 tree inner = TREE_OPERAND (arg0, 0);
10177 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
10180 /* If INNER is a right shift of a constant and it plus BITNUM does
10181 not overflow, adjust BITNUM and INNER. */
10183 if (TREE_CODE (inner) == RSHIFT_EXPR
10184 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
10185 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
10186 && bitnum < TYPE_PRECISION (type)
10187 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
10188 bitnum - TYPE_PRECISION (type)))
10190 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
10191 inner = TREE_OPERAND (inner, 0);
10194 /* If we are going to be able to omit the AND below, we must do our
10195 operations as unsigned. If we must use the AND, we have a choice.
10196 Normally unsigned is faster, but for some machines signed is. */
10197 ops_unsignedp = (bitnum == TYPE_PRECISION (type) - 1 ? 1
10198 #ifdef LOAD_EXTEND_OP
10199 : (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1)
10205 if (! get_subtarget (subtarget)
10206 || GET_MODE (subtarget) != operand_mode
10207 || ! safe_from_p (subtarget, inner, 1))
10210 op0 = expand_expr (inner, subtarget, VOIDmode, 0);
10213 op0 = expand_shift (RSHIFT_EXPR, operand_mode, op0,
10214 size_int (bitnum), subtarget, ops_unsignedp);
10216 if (GET_MODE (op0) != mode)
10217 op0 = convert_to_mode (mode, op0, ops_unsignedp);
10219 if ((code == EQ && ! invert) || (code == NE && invert))
10220 op0 = expand_binop (mode, xor_optab, op0, const1_rtx, subtarget,
10221 ops_unsignedp, OPTAB_LIB_WIDEN);
10223 /* Put the AND last so it can combine with more things. */
10224 if (bitnum != TYPE_PRECISION (type) - 1)
10225 op0 = expand_and (op0, const1_rtx, subtarget);
10230 /* Now see if we are likely to be able to do this. Return if not. */
10231 if (! can_compare_p (code, operand_mode, ccp_store_flag))
10234 icode = setcc_gen_code[(int) code];
10235 if (icode == CODE_FOR_nothing
10236 || (only_cheap && insn_data[(int) icode].operand[0].mode != mode))
10238 /* We can only do this if it is one of the special cases that
10239 can be handled without an scc insn. */
10240 if ((code == LT && integer_zerop (arg1))
10241 || (! only_cheap && code == GE && integer_zerop (arg1)))
10243 else if (BRANCH_COST >= 0
10244 && ! only_cheap && (code == NE || code == EQ)
10245 && TREE_CODE (type) != REAL_TYPE
10246 && ((abs_optab->handlers[(int) operand_mode].insn_code
10247 != CODE_FOR_nothing)
10248 || (ffs_optab->handlers[(int) operand_mode].insn_code
10249 != CODE_FOR_nothing)))
10255 if (! get_subtarget (target)
10256 || GET_MODE (subtarget) != operand_mode
10257 || ! safe_from_p (subtarget, arg1, 1))
10260 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
10261 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
10264 target = gen_reg_rtx (mode);
10266 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
10267 because, if the emit_store_flag does anything it will succeed and
10268 OP0 and OP1 will not be used subsequently. */
10270 result = emit_store_flag (target, code,
10271 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
10272 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
10273 operand_mode, unsignedp, 1);
10278 result = expand_binop (mode, xor_optab, result, const1_rtx,
10279 result, 0, OPTAB_LIB_WIDEN);
10283 /* If this failed, we have to do this with set/compare/jump/set code. */
10284 if (GET_CODE (target) != REG
10285 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
10286 target = gen_reg_rtx (GET_MODE (target));
10288 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
10289 result = compare_from_rtx (op0, op1, code, unsignedp,
10290 operand_mode, NULL_RTX);
10291 if (GET_CODE (result) == CONST_INT)
10292 return (((result == const0_rtx && ! invert)
10293 || (result != const0_rtx && invert))
10294 ? const0_rtx : const1_rtx);
10296 /* The code of RESULT may not match CODE if compare_from_rtx
10297 decided to swap its operands and reverse the original code.
10299 We know that compare_from_rtx returns either a CONST_INT or
10300 a new comparison code, so it is safe to just extract the
10301 code from RESULT. */
10302 code = GET_CODE (result);
10304 label = gen_label_rtx ();
10305 if (bcc_gen_fctn[(int) code] == 0)
10308 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
10309 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
10310 emit_label (label);
10316 /* Stubs in case we haven't got a casesi insn. */
10317 #ifndef HAVE_casesi
10318 # define HAVE_casesi 0
10319 # define gen_casesi(a, b, c, d, e) (0)
10320 # define CODE_FOR_casesi CODE_FOR_nothing
10323 /* If the machine does not have a case insn that compares the bounds,
10324 this means extra overhead for dispatch tables, which raises the
10325 threshold for using them. */
10326 #ifndef CASE_VALUES_THRESHOLD
10327 #define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
10328 #endif /* CASE_VALUES_THRESHOLD */
10331 case_values_threshold ()
10333 return CASE_VALUES_THRESHOLD;
10336 /* Attempt to generate a casesi instruction. Returns 1 if successful,
10337 0 otherwise (i.e. if there is no casesi instruction). */
10339 try_casesi (index_type, index_expr, minval, range,
10340 table_label, default_label)
10341 tree index_type, index_expr, minval, range;
10342 rtx table_label ATTRIBUTE_UNUSED;
10345 enum machine_mode index_mode = SImode;
10346 int index_bits = GET_MODE_BITSIZE (index_mode);
10347 rtx op1, op2, index;
10348 enum machine_mode op_mode;
10353 /* Convert the index to SImode. */
10354 if (GET_MODE_BITSIZE (TYPE_MODE (index_type)) > GET_MODE_BITSIZE (index_mode))
10356 enum machine_mode omode = TYPE_MODE (index_type);
10357 rtx rangertx = expand_expr (range, NULL_RTX, VOIDmode, 0);
10359 /* We must handle the endpoints in the original mode. */
10360 index_expr = build (MINUS_EXPR, index_type,
10361 index_expr, minval);
10362 minval = integer_zero_node;
10363 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
10364 emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
10365 omode, 1, default_label);
10366 /* Now we can safely truncate. */
10367 index = convert_to_mode (index_mode, index, 0);
10371 if (TYPE_MODE (index_type) != index_mode)
10373 index_expr = convert (type_for_size (index_bits, 0),
10375 index_type = TREE_TYPE (index_expr);
10378 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
10381 index = protect_from_queue (index, 0);
10382 do_pending_stack_adjust ();
10384 op_mode = insn_data[(int) CODE_FOR_casesi].operand[0].mode;
10385 if (! (*insn_data[(int) CODE_FOR_casesi].operand[0].predicate)
10387 index = copy_to_mode_reg (op_mode, index);
10389 op1 = expand_expr (minval, NULL_RTX, VOIDmode, 0);
10391 op_mode = insn_data[(int) CODE_FOR_casesi].operand[1].mode;
10392 op1 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (minval)),
10393 op1, TREE_UNSIGNED (TREE_TYPE (minval)));
10394 if (! (*insn_data[(int) CODE_FOR_casesi].operand[1].predicate)
10396 op1 = copy_to_mode_reg (op_mode, op1);
10398 op2 = expand_expr (range, NULL_RTX, VOIDmode, 0);
10400 op_mode = insn_data[(int) CODE_FOR_casesi].operand[2].mode;
10401 op2 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (range)),
10402 op2, TREE_UNSIGNED (TREE_TYPE (range)));
10403 if (! (*insn_data[(int) CODE_FOR_casesi].operand[2].predicate)
10405 op2 = copy_to_mode_reg (op_mode, op2);
10407 emit_jump_insn (gen_casesi (index, op1, op2,
10408 table_label, default_label));
10412 /* Attempt to generate a tablejump instruction; same concept. */
10413 #ifndef HAVE_tablejump
10414 #define HAVE_tablejump 0
10415 #define gen_tablejump(x, y) (0)
10418 /* Subroutine of the next function.
10420 INDEX is the value being switched on, with the lowest value
10421 in the table already subtracted.
10422 MODE is its expected mode (needed if INDEX is constant).
10423 RANGE is the length of the jump table.
10424 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
10426 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
10427 index value is out of range. */
10430 do_tablejump (index, mode, range, table_label, default_label)
10431 rtx index, range, table_label, default_label;
10432 enum machine_mode mode;
10436 /* Do an unsigned comparison (in the proper mode) between the index
10437 expression and the value which represents the length of the range.
10438 Since we just finished subtracting the lower bound of the range
10439 from the index expression, this comparison allows us to simultaneously
10440 check that the original index expression value is both greater than
10441 or equal to the minimum value of the range and less than or equal to
10442 the maximum value of the range. */
10444 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
10447 /* If index is in range, it must fit in Pmode.
10448 Convert to Pmode so we can index with it. */
10450 index = convert_to_mode (Pmode, index, 1);
10452 /* Don't let a MEM slip thru, because then INDEX that comes
10453 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
10454 and break_out_memory_refs will go to work on it and mess it up. */
10455 #ifdef PIC_CASE_VECTOR_ADDRESS
10456 if (flag_pic && GET_CODE (index) != REG)
10457 index = copy_to_mode_reg (Pmode, index);
10460 /* If flag_force_addr were to affect this address
10461 it could interfere with the tricky assumptions made
10462 about addresses that contain label-refs,
10463 which may be valid only very near the tablejump itself. */
10464 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
10465 GET_MODE_SIZE, because this indicates how large insns are. The other
10466 uses should all be Pmode, because they are addresses. This code
10467 could fail if addresses and insns are not the same size. */
10468 index = gen_rtx_PLUS (Pmode,
10469 gen_rtx_MULT (Pmode, index,
10470 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
10471 gen_rtx_LABEL_REF (Pmode, table_label));
10472 #ifdef PIC_CASE_VECTOR_ADDRESS
10474 index = PIC_CASE_VECTOR_ADDRESS (index);
10477 index = memory_address_noforce (CASE_VECTOR_MODE, index);
10478 temp = gen_reg_rtx (CASE_VECTOR_MODE);
10479 vector = gen_rtx_MEM (CASE_VECTOR_MODE, index);
10480 RTX_UNCHANGING_P (vector) = 1;
10481 convert_move (temp, vector, 0);
10483 emit_jump_insn (gen_tablejump (temp, table_label));
10485 /* If we are generating PIC code or if the table is PC-relative, the
10486 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
10487 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
10492 try_tablejump (index_type, index_expr, minval, range,
10493 table_label, default_label)
10494 tree index_type, index_expr, minval, range;
10495 rtx table_label, default_label;
10499 if (! HAVE_tablejump)
10502 index_expr = fold (build (MINUS_EXPR, index_type,
10503 convert (index_type, index_expr),
10504 convert (index_type, minval)));
10505 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
10507 index = protect_from_queue (index, 0);
10508 do_pending_stack_adjust ();
10510 do_tablejump (index, TYPE_MODE (index_type),
10511 convert_modes (TYPE_MODE (index_type),
10512 TYPE_MODE (TREE_TYPE (range)),
10513 expand_expr (range, NULL_RTX,
10515 TREE_UNSIGNED (TREE_TYPE (range))),
10516 table_label, default_label);