1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
30 #include "hard-reg-set.h"
33 #include "insn-config.h"
34 #include "insn-attr.h"
35 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
42 #include "typeclass.h"
45 #include "langhooks.h"
49 /* Decide whether a function's arguments should be processed
50 from first to last or from last to first.
52 They should if the stack and args grow in opposite directions, but
53 only if we have push insns. */
57 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
58 #define PUSH_ARGS_REVERSED /* If it's last to first. */
63 #ifndef STACK_PUSH_CODE
64 #ifdef STACK_GROWS_DOWNWARD
65 #define STACK_PUSH_CODE PRE_DEC
67 #define STACK_PUSH_CODE PRE_INC
71 /* Assume that case vectors are not pc-relative. */
72 #ifndef CASE_VECTOR_PC_RELATIVE
73 #define CASE_VECTOR_PC_RELATIVE 0
76 /* If this is nonzero, we do not bother generating VOLATILE
77 around volatile memory references, and we are willing to
78 output indirect addresses. If cse is to follow, we reject
79 indirect addresses so a useful potential cse is generated;
80 if it is used only once, instruction combination will produce
81 the same indirect address eventually. */
84 /* Chain of pending expressions for PLACEHOLDER_EXPR to replace. */
85 static tree placeholder_list = 0;
87 /* This structure is used by move_by_pieces to describe the move to
98 int explicit_inc_from;
99 unsigned HOST_WIDE_INT len;
100 HOST_WIDE_INT offset;
104 /* This structure is used by store_by_pieces to describe the clear to
107 struct store_by_pieces
113 unsigned HOST_WIDE_INT len;
114 HOST_WIDE_INT offset;
115 rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
120 extern struct obstack permanent_obstack;
122 static rtx enqueue_insn PARAMS ((rtx, rtx));
123 static unsigned HOST_WIDE_INT move_by_pieces_ninsns
124 PARAMS ((unsigned HOST_WIDE_INT,
126 static void move_by_pieces_1 PARAMS ((rtx (*) (rtx, ...), enum machine_mode,
127 struct move_by_pieces *));
128 static rtx clear_by_pieces_1 PARAMS ((PTR, HOST_WIDE_INT,
130 static void clear_by_pieces PARAMS ((rtx, unsigned HOST_WIDE_INT,
132 static void store_by_pieces_1 PARAMS ((struct store_by_pieces *,
134 static void store_by_pieces_2 PARAMS ((rtx (*) (rtx, ...),
136 struct store_by_pieces *));
137 static rtx get_subtarget PARAMS ((rtx));
138 static int is_zeros_p PARAMS ((tree));
139 static int mostly_zeros_p PARAMS ((tree));
140 static void store_constructor_field PARAMS ((rtx, unsigned HOST_WIDE_INT,
141 HOST_WIDE_INT, enum machine_mode,
142 tree, tree, int, int));
143 static void store_constructor PARAMS ((tree, rtx, int, HOST_WIDE_INT));
144 static rtx store_field PARAMS ((rtx, HOST_WIDE_INT,
145 HOST_WIDE_INT, enum machine_mode,
146 tree, enum machine_mode, int, tree,
148 static rtx var_rtx PARAMS ((tree));
149 static HOST_WIDE_INT highest_pow2_factor PARAMS ((tree));
150 static rtx expand_increment PARAMS ((tree, int, int));
151 static void do_jump_by_parts_greater PARAMS ((tree, int, rtx, rtx));
152 static void do_jump_by_parts_equality PARAMS ((tree, rtx, rtx));
153 static void do_compare_and_jump PARAMS ((tree, enum rtx_code, enum rtx_code,
155 static rtx do_store_flag PARAMS ((tree, rtx, enum machine_mode, int));
157 static void emit_single_push_insn PARAMS ((enum machine_mode, rtx, tree));
159 static void do_tablejump PARAMS ((rtx, enum machine_mode, rtx, rtx, rtx));
161 /* Record for each mode whether we can move a register directly to or
162 from an object of that mode in memory. If we can't, we won't try
163 to use that mode directly when accessing a field of that mode. */
165 static char direct_load[NUM_MACHINE_MODES];
166 static char direct_store[NUM_MACHINE_MODES];
168 /* If a memory-to-memory move would take MOVE_RATIO or more simple
169 move-instruction sequences, we will do a movstr or libcall instead. */
172 #if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
175 /* If we are optimizing for space (-Os), cut down the default move ratio. */
176 #define MOVE_RATIO (optimize_size ? 3 : 15)
180 /* This macro is used to determine whether move_by_pieces should be called
181 to perform a structure copy. */
182 #ifndef MOVE_BY_PIECES_P
183 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
184 (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) MOVE_RATIO)
187 /* This array records the insn_code of insns to perform block moves. */
188 enum insn_code movstr_optab[NUM_MACHINE_MODES];
190 /* This array records the insn_code of insns to perform block clears. */
191 enum insn_code clrstr_optab[NUM_MACHINE_MODES];
193 /* SLOW_UNALIGNED_ACCESS is non-zero if unaligned accesses are very slow. */
195 #ifndef SLOW_UNALIGNED_ACCESS
196 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
199 /* This is run once per compilation to set up which modes can be used
200 directly in memory and to initialize the block move optab. */
206 enum machine_mode mode;
212 /* Try indexing by frame ptr and try by stack ptr.
213 It is known that on the Convex the stack ptr isn't a valid index.
214 With luck, one or the other is valid on any machine. */
215 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
216 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
218 insn = emit_insn (gen_rtx_SET (0, NULL_RTX, NULL_RTX));
219 pat = PATTERN (insn);
221 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
222 mode = (enum machine_mode) ((int) mode + 1))
227 direct_load[(int) mode] = direct_store[(int) mode] = 0;
228 PUT_MODE (mem, mode);
229 PUT_MODE (mem1, mode);
231 /* See if there is some register that can be used in this mode and
232 directly loaded or stored from memory. */
234 if (mode != VOIDmode && mode != BLKmode)
235 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
236 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
239 if (! HARD_REGNO_MODE_OK (regno, mode))
242 reg = gen_rtx_REG (mode, regno);
245 SET_DEST (pat) = reg;
246 if (recog (pat, insn, &num_clobbers) >= 0)
247 direct_load[(int) mode] = 1;
249 SET_SRC (pat) = mem1;
250 SET_DEST (pat) = reg;
251 if (recog (pat, insn, &num_clobbers) >= 0)
252 direct_load[(int) mode] = 1;
255 SET_DEST (pat) = mem;
256 if (recog (pat, insn, &num_clobbers) >= 0)
257 direct_store[(int) mode] = 1;
260 SET_DEST (pat) = mem1;
261 if (recog (pat, insn, &num_clobbers) >= 0)
262 direct_store[(int) mode] = 1;
269 /* This is run at the start of compiling a function. */
274 cfun->expr = (struct expr_status *) xmalloc (sizeof (struct expr_status));
277 pending_stack_adjust = 0;
278 stack_pointer_delta = 0;
279 inhibit_defer_pop = 0;
281 apply_args_value = 0;
287 struct expr_status *p;
292 ggc_mark_rtx (p->x_saveregs_value);
293 ggc_mark_rtx (p->x_apply_args_value);
294 ggc_mark_rtx (p->x_forced_labels);
305 /* Small sanity check that the queue is empty at the end of a function. */
308 finish_expr_for_function ()
314 /* Manage the queue of increment instructions to be output
315 for POSTINCREMENT_EXPR expressions, etc. */
317 /* Queue up to increment (or change) VAR later. BODY says how:
318 BODY should be the same thing you would pass to emit_insn
319 to increment right away. It will go to emit_insn later on.
321 The value is a QUEUED expression to be used in place of VAR
322 where you want to guarantee the pre-incrementation value of VAR. */
325 enqueue_insn (var, body)
328 pending_chain = gen_rtx_QUEUED (GET_MODE (var), var, NULL_RTX, NULL_RTX,
329 body, pending_chain);
330 return pending_chain;
333 /* Use protect_from_queue to convert a QUEUED expression
334 into something that you can put immediately into an instruction.
335 If the queued incrementation has not happened yet,
336 protect_from_queue returns the variable itself.
337 If the incrementation has happened, protect_from_queue returns a temp
338 that contains a copy of the old value of the variable.
340 Any time an rtx which might possibly be a QUEUED is to be put
341 into an instruction, it must be passed through protect_from_queue first.
342 QUEUED expressions are not meaningful in instructions.
344 Do not pass a value through protect_from_queue and then hold
345 on to it for a while before putting it in an instruction!
346 If the queue is flushed in between, incorrect code will result. */
349 protect_from_queue (x, modify)
353 RTX_CODE code = GET_CODE (x);
355 #if 0 /* A QUEUED can hang around after the queue is forced out. */
356 /* Shortcut for most common case. */
357 if (pending_chain == 0)
363 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
364 use of autoincrement. Make a copy of the contents of the memory
365 location rather than a copy of the address, but not if the value is
366 of mode BLKmode. Don't modify X in place since it might be
368 if (code == MEM && GET_MODE (x) != BLKmode
369 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
372 rtx new = replace_equiv_address_nv (x, QUEUED_VAR (y));
376 rtx temp = gen_reg_rtx (GET_MODE (x));
378 emit_insn_before (gen_move_insn (temp, new),
383 /* Copy the address into a pseudo, so that the returned value
384 remains correct across calls to emit_queue. */
385 return replace_equiv_address (new, copy_to_reg (XEXP (new, 0)));
388 /* Otherwise, recursively protect the subexpressions of all
389 the kinds of rtx's that can contain a QUEUED. */
392 rtx tem = protect_from_queue (XEXP (x, 0), 0);
393 if (tem != XEXP (x, 0))
399 else if (code == PLUS || code == MULT)
401 rtx new0 = protect_from_queue (XEXP (x, 0), 0);
402 rtx new1 = protect_from_queue (XEXP (x, 1), 0);
403 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
412 /* If the increment has not happened, use the variable itself. Copy it
413 into a new pseudo so that the value remains correct across calls to
415 if (QUEUED_INSN (x) == 0)
416 return copy_to_reg (QUEUED_VAR (x));
417 /* If the increment has happened and a pre-increment copy exists,
419 if (QUEUED_COPY (x) != 0)
420 return QUEUED_COPY (x);
421 /* The increment has happened but we haven't set up a pre-increment copy.
422 Set one up now, and use it. */
423 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
424 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
426 return QUEUED_COPY (x);
429 /* Return nonzero if X contains a QUEUED expression:
430 if it contains anything that will be altered by a queued increment.
431 We handle only combinations of MEM, PLUS, MINUS and MULT operators
432 since memory addresses generally contain only those. */
438 enum rtx_code code = GET_CODE (x);
444 return queued_subexp_p (XEXP (x, 0));
448 return (queued_subexp_p (XEXP (x, 0))
449 || queued_subexp_p (XEXP (x, 1)));
455 /* Perform all the pending incrementations. */
461 while ((p = pending_chain))
463 rtx body = QUEUED_BODY (p);
465 if (GET_CODE (body) == SEQUENCE)
467 QUEUED_INSN (p) = XVECEXP (QUEUED_BODY (p), 0, 0);
468 emit_insn (QUEUED_BODY (p));
471 QUEUED_INSN (p) = emit_insn (QUEUED_BODY (p));
472 pending_chain = QUEUED_NEXT (p);
476 /* Copy data from FROM to TO, where the machine modes are not the same.
477 Both modes may be integer, or both may be floating.
478 UNSIGNEDP should be nonzero if FROM is an unsigned type.
479 This causes zero-extension instead of sign-extension. */
482 convert_move (to, from, unsignedp)
486 enum machine_mode to_mode = GET_MODE (to);
487 enum machine_mode from_mode = GET_MODE (from);
488 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
489 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
493 /* rtx code for making an equivalent value. */
494 enum rtx_code equiv_code = (unsignedp ? ZERO_EXTEND : SIGN_EXTEND);
496 to = protect_from_queue (to, 1);
497 from = protect_from_queue (from, 0);
499 if (to_real != from_real)
502 /* If FROM is a SUBREG that indicates that we have already done at least
503 the required extension, strip it. We don't handle such SUBREGs as
506 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
507 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
508 >= GET_MODE_SIZE (to_mode))
509 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
510 from = gen_lowpart (to_mode, from), from_mode = to_mode;
512 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
515 if (to_mode == from_mode
516 || (from_mode == VOIDmode && CONSTANT_P (from)))
518 emit_move_insn (to, from);
522 if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
524 if (GET_MODE_BITSIZE (from_mode) != GET_MODE_BITSIZE (to_mode))
527 if (VECTOR_MODE_P (to_mode))
528 from = simplify_gen_subreg (to_mode, from, GET_MODE (from), 0);
530 to = simplify_gen_subreg (from_mode, to, GET_MODE (to), 0);
532 emit_move_insn (to, from);
536 if (to_real != from_real)
543 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode))
545 /* Try converting directly if the insn is supported. */
546 if ((code = can_extend_p (to_mode, from_mode, 0))
549 emit_unop_insn (code, to, from, UNKNOWN);
554 #ifdef HAVE_trunchfqf2
555 if (HAVE_trunchfqf2 && from_mode == HFmode && to_mode == QFmode)
557 emit_unop_insn (CODE_FOR_trunchfqf2, to, from, UNKNOWN);
561 #ifdef HAVE_trunctqfqf2
562 if (HAVE_trunctqfqf2 && from_mode == TQFmode && to_mode == QFmode)
564 emit_unop_insn (CODE_FOR_trunctqfqf2, to, from, UNKNOWN);
568 #ifdef HAVE_truncsfqf2
569 if (HAVE_truncsfqf2 && from_mode == SFmode && to_mode == QFmode)
571 emit_unop_insn (CODE_FOR_truncsfqf2, to, from, UNKNOWN);
575 #ifdef HAVE_truncdfqf2
576 if (HAVE_truncdfqf2 && from_mode == DFmode && to_mode == QFmode)
578 emit_unop_insn (CODE_FOR_truncdfqf2, to, from, UNKNOWN);
582 #ifdef HAVE_truncxfqf2
583 if (HAVE_truncxfqf2 && from_mode == XFmode && to_mode == QFmode)
585 emit_unop_insn (CODE_FOR_truncxfqf2, to, from, UNKNOWN);
589 #ifdef HAVE_trunctfqf2
590 if (HAVE_trunctfqf2 && from_mode == TFmode && to_mode == QFmode)
592 emit_unop_insn (CODE_FOR_trunctfqf2, to, from, UNKNOWN);
597 #ifdef HAVE_trunctqfhf2
598 if (HAVE_trunctqfhf2 && from_mode == TQFmode && to_mode == HFmode)
600 emit_unop_insn (CODE_FOR_trunctqfhf2, to, from, UNKNOWN);
604 #ifdef HAVE_truncsfhf2
605 if (HAVE_truncsfhf2 && from_mode == SFmode && to_mode == HFmode)
607 emit_unop_insn (CODE_FOR_truncsfhf2, to, from, UNKNOWN);
611 #ifdef HAVE_truncdfhf2
612 if (HAVE_truncdfhf2 && from_mode == DFmode && to_mode == HFmode)
614 emit_unop_insn (CODE_FOR_truncdfhf2, to, from, UNKNOWN);
618 #ifdef HAVE_truncxfhf2
619 if (HAVE_truncxfhf2 && from_mode == XFmode && to_mode == HFmode)
621 emit_unop_insn (CODE_FOR_truncxfhf2, to, from, UNKNOWN);
625 #ifdef HAVE_trunctfhf2
626 if (HAVE_trunctfhf2 && from_mode == TFmode && to_mode == HFmode)
628 emit_unop_insn (CODE_FOR_trunctfhf2, to, from, UNKNOWN);
633 #ifdef HAVE_truncsftqf2
634 if (HAVE_truncsftqf2 && from_mode == SFmode && to_mode == TQFmode)
636 emit_unop_insn (CODE_FOR_truncsftqf2, to, from, UNKNOWN);
640 #ifdef HAVE_truncdftqf2
641 if (HAVE_truncdftqf2 && from_mode == DFmode && to_mode == TQFmode)
643 emit_unop_insn (CODE_FOR_truncdftqf2, to, from, UNKNOWN);
647 #ifdef HAVE_truncxftqf2
648 if (HAVE_truncxftqf2 && from_mode == XFmode && to_mode == TQFmode)
650 emit_unop_insn (CODE_FOR_truncxftqf2, to, from, UNKNOWN);
654 #ifdef HAVE_trunctftqf2
655 if (HAVE_trunctftqf2 && from_mode == TFmode && to_mode == TQFmode)
657 emit_unop_insn (CODE_FOR_trunctftqf2, to, from, UNKNOWN);
662 #ifdef HAVE_truncdfsf2
663 if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
665 emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
669 #ifdef HAVE_truncxfsf2
670 if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode)
672 emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN);
676 #ifdef HAVE_trunctfsf2
677 if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
679 emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
683 #ifdef HAVE_truncxfdf2
684 if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode)
686 emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN);
690 #ifdef HAVE_trunctfdf2
691 if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
693 emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
705 libcall = extendsfdf2_libfunc;
709 libcall = extendsfxf2_libfunc;
713 libcall = extendsftf2_libfunc;
725 libcall = truncdfsf2_libfunc;
729 libcall = extenddfxf2_libfunc;
733 libcall = extenddftf2_libfunc;
745 libcall = truncxfsf2_libfunc;
749 libcall = truncxfdf2_libfunc;
761 libcall = trunctfsf2_libfunc;
765 libcall = trunctfdf2_libfunc;
777 if (libcall == (rtx) 0)
778 /* This conversion is not implemented yet. */
782 value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
784 insns = get_insns ();
786 emit_libcall_block (insns, to, value, gen_rtx_FLOAT_TRUNCATE (to_mode,
791 /* Now both modes are integers. */
793 /* Handle expanding beyond a word. */
794 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
795 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
802 enum machine_mode lowpart_mode;
803 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
805 /* Try converting directly if the insn is supported. */
806 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
809 /* If FROM is a SUBREG, put it into a register. Do this
810 so that we always generate the same set of insns for
811 better cse'ing; if an intermediate assignment occurred,
812 we won't be doing the operation directly on the SUBREG. */
813 if (optimize > 0 && GET_CODE (from) == SUBREG)
814 from = force_reg (from_mode, from);
815 emit_unop_insn (code, to, from, equiv_code);
818 /* Next, try converting via full word. */
819 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
820 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
821 != CODE_FOR_nothing))
823 if (GET_CODE (to) == REG)
824 emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
825 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
826 emit_unop_insn (code, to,
827 gen_lowpart (word_mode, to), equiv_code);
831 /* No special multiword conversion insn; do it by hand. */
834 /* Since we will turn this into a no conflict block, we must ensure
835 that the source does not overlap the target. */
837 if (reg_overlap_mentioned_p (to, from))
838 from = force_reg (from_mode, from);
840 /* Get a copy of FROM widened to a word, if necessary. */
841 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
842 lowpart_mode = word_mode;
844 lowpart_mode = from_mode;
846 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
848 lowpart = gen_lowpart (lowpart_mode, to);
849 emit_move_insn (lowpart, lowfrom);
851 /* Compute the value to put in each remaining word. */
853 fill_value = const0_rtx;
858 && insn_data[(int) CODE_FOR_slt].operand[0].mode == word_mode
859 && STORE_FLAG_VALUE == -1)
861 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
863 fill_value = gen_reg_rtx (word_mode);
864 emit_insn (gen_slt (fill_value));
870 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
871 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
873 fill_value = convert_to_mode (word_mode, fill_value, 1);
877 /* Fill the remaining words. */
878 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
880 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
881 rtx subword = operand_subword (to, index, 1, to_mode);
886 if (fill_value != subword)
887 emit_move_insn (subword, fill_value);
890 insns = get_insns ();
893 emit_no_conflict_block (insns, to, from, NULL_RTX,
894 gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
898 /* Truncating multi-word to a word or less. */
899 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
900 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
902 if (!((GET_CODE (from) == MEM
903 && ! MEM_VOLATILE_P (from)
904 && direct_load[(int) to_mode]
905 && ! mode_dependent_address_p (XEXP (from, 0)))
906 || GET_CODE (from) == REG
907 || GET_CODE (from) == SUBREG))
908 from = force_reg (from_mode, from);
909 convert_move (to, gen_lowpart (word_mode, from), 0);
913 /* Handle pointer conversion. */ /* SPEE 900220. */
914 if (to_mode == PQImode)
916 if (from_mode != QImode)
917 from = convert_to_mode (QImode, from, unsignedp);
919 #ifdef HAVE_truncqipqi2
920 if (HAVE_truncqipqi2)
922 emit_unop_insn (CODE_FOR_truncqipqi2, to, from, UNKNOWN);
925 #endif /* HAVE_truncqipqi2 */
929 if (from_mode == PQImode)
931 if (to_mode != QImode)
933 from = convert_to_mode (QImode, from, unsignedp);
938 #ifdef HAVE_extendpqiqi2
939 if (HAVE_extendpqiqi2)
941 emit_unop_insn (CODE_FOR_extendpqiqi2, to, from, UNKNOWN);
944 #endif /* HAVE_extendpqiqi2 */
949 if (to_mode == PSImode)
951 if (from_mode != SImode)
952 from = convert_to_mode (SImode, from, unsignedp);
954 #ifdef HAVE_truncsipsi2
955 if (HAVE_truncsipsi2)
957 emit_unop_insn (CODE_FOR_truncsipsi2, to, from, UNKNOWN);
960 #endif /* HAVE_truncsipsi2 */
964 if (from_mode == PSImode)
966 if (to_mode != SImode)
968 from = convert_to_mode (SImode, from, unsignedp);
973 #ifdef HAVE_extendpsisi2
974 if (! unsignedp && HAVE_extendpsisi2)
976 emit_unop_insn (CODE_FOR_extendpsisi2, to, from, UNKNOWN);
979 #endif /* HAVE_extendpsisi2 */
980 #ifdef HAVE_zero_extendpsisi2
981 if (unsignedp && HAVE_zero_extendpsisi2)
983 emit_unop_insn (CODE_FOR_zero_extendpsisi2, to, from, UNKNOWN);
986 #endif /* HAVE_zero_extendpsisi2 */
991 if (to_mode == PDImode)
993 if (from_mode != DImode)
994 from = convert_to_mode (DImode, from, unsignedp);
996 #ifdef HAVE_truncdipdi2
997 if (HAVE_truncdipdi2)
999 emit_unop_insn (CODE_FOR_truncdipdi2, to, from, UNKNOWN);
1002 #endif /* HAVE_truncdipdi2 */
1006 if (from_mode == PDImode)
1008 if (to_mode != DImode)
1010 from = convert_to_mode (DImode, from, unsignedp);
1015 #ifdef HAVE_extendpdidi2
1016 if (HAVE_extendpdidi2)
1018 emit_unop_insn (CODE_FOR_extendpdidi2, to, from, UNKNOWN);
1021 #endif /* HAVE_extendpdidi2 */
1026 /* Now follow all the conversions between integers
1027 no more than a word long. */
1029 /* For truncation, usually we can just refer to FROM in a narrower mode. */
1030 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
1031 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1032 GET_MODE_BITSIZE (from_mode)))
1034 if (!((GET_CODE (from) == MEM
1035 && ! MEM_VOLATILE_P (from)
1036 && direct_load[(int) to_mode]
1037 && ! mode_dependent_address_p (XEXP (from, 0)))
1038 || GET_CODE (from) == REG
1039 || GET_CODE (from) == SUBREG))
1040 from = force_reg (from_mode, from);
1041 if (GET_CODE (from) == REG && REGNO (from) < FIRST_PSEUDO_REGISTER
1042 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
1043 from = copy_to_reg (from);
1044 emit_move_insn (to, gen_lowpart (to_mode, from));
1048 /* Handle extension. */
1049 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
1051 /* Convert directly if that works. */
1052 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
1053 != CODE_FOR_nothing)
1056 from = force_not_mem (from);
1058 emit_unop_insn (code, to, from, equiv_code);
1063 enum machine_mode intermediate;
1067 /* Search for a mode to convert via. */
1068 for (intermediate = from_mode; intermediate != VOIDmode;
1069 intermediate = GET_MODE_WIDER_MODE (intermediate))
1070 if (((can_extend_p (to_mode, intermediate, unsignedp)
1071 != CODE_FOR_nothing)
1072 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
1073 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1074 GET_MODE_BITSIZE (intermediate))))
1075 && (can_extend_p (intermediate, from_mode, unsignedp)
1076 != CODE_FOR_nothing))
1078 convert_move (to, convert_to_mode (intermediate, from,
1079 unsignedp), unsignedp);
1083 /* No suitable intermediate mode.
1084 Generate what we need with shifts. */
1085 shift_amount = build_int_2 (GET_MODE_BITSIZE (to_mode)
1086 - GET_MODE_BITSIZE (from_mode), 0);
1087 from = gen_lowpart (to_mode, force_reg (from_mode, from));
1088 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
1090 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
1093 emit_move_insn (to, tmp);
1098 /* Support special truncate insns for certain modes. */
1100 if (from_mode == DImode && to_mode == SImode)
1102 #ifdef HAVE_truncdisi2
1103 if (HAVE_truncdisi2)
1105 emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
1109 convert_move (to, force_reg (from_mode, from), unsignedp);
1113 if (from_mode == DImode && to_mode == HImode)
1115 #ifdef HAVE_truncdihi2
1116 if (HAVE_truncdihi2)
1118 emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
1122 convert_move (to, force_reg (from_mode, from), unsignedp);
1126 if (from_mode == DImode && to_mode == QImode)
1128 #ifdef HAVE_truncdiqi2
1129 if (HAVE_truncdiqi2)
1131 emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
1135 convert_move (to, force_reg (from_mode, from), unsignedp);
1139 if (from_mode == SImode && to_mode == HImode)
1141 #ifdef HAVE_truncsihi2
1142 if (HAVE_truncsihi2)
1144 emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
1148 convert_move (to, force_reg (from_mode, from), unsignedp);
1152 if (from_mode == SImode && to_mode == QImode)
1154 #ifdef HAVE_truncsiqi2
1155 if (HAVE_truncsiqi2)
1157 emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
1161 convert_move (to, force_reg (from_mode, from), unsignedp);
1165 if (from_mode == HImode && to_mode == QImode)
1167 #ifdef HAVE_trunchiqi2
1168 if (HAVE_trunchiqi2)
1170 emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
1174 convert_move (to, force_reg (from_mode, from), unsignedp);
1178 if (from_mode == TImode && to_mode == DImode)
1180 #ifdef HAVE_trunctidi2
1181 if (HAVE_trunctidi2)
1183 emit_unop_insn (CODE_FOR_trunctidi2, to, from, UNKNOWN);
1187 convert_move (to, force_reg (from_mode, from), unsignedp);
1191 if (from_mode == TImode && to_mode == SImode)
1193 #ifdef HAVE_trunctisi2
1194 if (HAVE_trunctisi2)
1196 emit_unop_insn (CODE_FOR_trunctisi2, to, from, UNKNOWN);
1200 convert_move (to, force_reg (from_mode, from), unsignedp);
1204 if (from_mode == TImode && to_mode == HImode)
1206 #ifdef HAVE_trunctihi2
1207 if (HAVE_trunctihi2)
1209 emit_unop_insn (CODE_FOR_trunctihi2, to, from, UNKNOWN);
1213 convert_move (to, force_reg (from_mode, from), unsignedp);
1217 if (from_mode == TImode && to_mode == QImode)
1219 #ifdef HAVE_trunctiqi2
1220 if (HAVE_trunctiqi2)
1222 emit_unop_insn (CODE_FOR_trunctiqi2, to, from, UNKNOWN);
1226 convert_move (to, force_reg (from_mode, from), unsignedp);
1230 /* Handle truncation of volatile memrefs, and so on;
1231 the things that couldn't be truncated directly,
1232 and for which there was no special instruction. */
1233 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
1235 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
1236 emit_move_insn (to, temp);
1240 /* Mode combination is not recognized. */
1244 /* Return an rtx for a value that would result
1245 from converting X to mode MODE.
1246 Both X and MODE may be floating, or both integer.
1247 UNSIGNEDP is nonzero if X is an unsigned value.
1248 This can be done by referring to a part of X in place
1249 or by copying to a new temporary with conversion.
1251 This function *must not* call protect_from_queue
1252 except when putting X into an insn (in which case convert_move does it). */
1255 convert_to_mode (mode, x, unsignedp)
1256 enum machine_mode mode;
1260 return convert_modes (mode, VOIDmode, x, unsignedp);
1263 /* Return an rtx for a value that would result
1264 from converting X from mode OLDMODE to mode MODE.
1265 Both modes may be floating, or both integer.
1266 UNSIGNEDP is nonzero if X is an unsigned value.
1268 This can be done by referring to a part of X in place
1269 or by copying to a new temporary with conversion.
1271 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
1273 This function *must not* call protect_from_queue
1274 except when putting X into an insn (in which case convert_move does it). */
1277 convert_modes (mode, oldmode, x, unsignedp)
1278 enum machine_mode mode, oldmode;
1284 /* If FROM is a SUBREG that indicates that we have already done at least
1285 the required extension, strip it. */
1287 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
1288 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
1289 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
1290 x = gen_lowpart (mode, x);
1292 if (GET_MODE (x) != VOIDmode)
1293 oldmode = GET_MODE (x);
1295 if (mode == oldmode)
1298 /* There is one case that we must handle specially: If we are converting
1299 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
1300 we are to interpret the constant as unsigned, gen_lowpart will do
1301 the wrong if the constant appears negative. What we want to do is
1302 make the high-order word of the constant zero, not all ones. */
1304 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
1305 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
1306 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
1308 HOST_WIDE_INT val = INTVAL (x);
1310 if (oldmode != VOIDmode
1311 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
1313 int width = GET_MODE_BITSIZE (oldmode);
1315 /* We need to zero extend VAL. */
1316 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1319 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
1322 /* We can do this with a gen_lowpart if both desired and current modes
1323 are integer, and this is either a constant integer, a register, or a
1324 non-volatile MEM. Except for the constant case where MODE is no
1325 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
1327 if ((GET_CODE (x) == CONST_INT
1328 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
1329 || (GET_MODE_CLASS (mode) == MODE_INT
1330 && GET_MODE_CLASS (oldmode) == MODE_INT
1331 && (GET_CODE (x) == CONST_DOUBLE
1332 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
1333 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x)
1334 && direct_load[(int) mode])
1335 || (GET_CODE (x) == REG
1336 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
1337 GET_MODE_BITSIZE (GET_MODE (x)))))))))
1339 /* ?? If we don't know OLDMODE, we have to assume here that
1340 X does not need sign- or zero-extension. This may not be
1341 the case, but it's the best we can do. */
1342 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
1343 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
1345 HOST_WIDE_INT val = INTVAL (x);
1346 int width = GET_MODE_BITSIZE (oldmode);
1348 /* We must sign or zero-extend in this case. Start by
1349 zero-extending, then sign extend if we need to. */
1350 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1352 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
1353 val |= (HOST_WIDE_INT) (-1) << width;
1355 return gen_int_mode (val, mode);
1358 return gen_lowpart (mode, x);
1361 temp = gen_reg_rtx (mode);
1362 convert_move (temp, x, unsignedp);
1366 /* This macro is used to determine what the largest unit size that
1367 move_by_pieces can use is. */
1369 /* MOVE_MAX_PIECES is the number of bytes at a time which we can
1370 move efficiently, as opposed to MOVE_MAX which is the maximum
1371 number of bytes we can move with a single instruction. */
1373 #ifndef MOVE_MAX_PIECES
1374 #define MOVE_MAX_PIECES MOVE_MAX
1377 /* Generate several move instructions to copy LEN bytes from block FROM to
1378 block TO. (These are MEM rtx's with BLKmode). The caller must pass FROM
1379 and TO through protect_from_queue before calling.
1381 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
1382 used to push FROM to the stack.
1384 ALIGN is maximum alignment we can assume. */
1387 move_by_pieces (to, from, len, align)
1389 unsigned HOST_WIDE_INT len;
1392 struct move_by_pieces data;
1393 rtx to_addr, from_addr = XEXP (from, 0);
1394 unsigned int max_size = MOVE_MAX_PIECES + 1;
1395 enum machine_mode mode = VOIDmode, tmode;
1396 enum insn_code icode;
1399 data.from_addr = from_addr;
1402 to_addr = XEXP (to, 0);
1405 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1406 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1408 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1415 #ifdef STACK_GROWS_DOWNWARD
1421 data.to_addr = to_addr;
1424 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1425 || GET_CODE (from_addr) == POST_INC
1426 || GET_CODE (from_addr) == POST_DEC);
1428 data.explicit_inc_from = 0;
1429 data.explicit_inc_to = 0;
1430 if (data.reverse) data.offset = len;
1433 /* If copying requires more than two move insns,
1434 copy addresses to registers (to make displacements shorter)
1435 and use post-increment if available. */
1436 if (!(data.autinc_from && data.autinc_to)
1437 && move_by_pieces_ninsns (len, align) > 2)
1439 /* Find the mode of the largest move... */
1440 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1441 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1442 if (GET_MODE_SIZE (tmode) < max_size)
1445 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
1447 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1448 data.autinc_from = 1;
1449 data.explicit_inc_from = -1;
1451 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
1453 data.from_addr = copy_addr_to_reg (from_addr);
1454 data.autinc_from = 1;
1455 data.explicit_inc_from = 1;
1457 if (!data.autinc_from && CONSTANT_P (from_addr))
1458 data.from_addr = copy_addr_to_reg (from_addr);
1459 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
1461 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1463 data.explicit_inc_to = -1;
1465 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
1467 data.to_addr = copy_addr_to_reg (to_addr);
1469 data.explicit_inc_to = 1;
1471 if (!data.autinc_to && CONSTANT_P (to_addr))
1472 data.to_addr = copy_addr_to_reg (to_addr);
1475 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1476 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1477 align = MOVE_MAX * BITS_PER_UNIT;
1479 /* First move what we can in the largest integer mode, then go to
1480 successively smaller modes. */
1482 while (max_size > 1)
1484 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1485 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1486 if (GET_MODE_SIZE (tmode) < max_size)
1489 if (mode == VOIDmode)
1492 icode = mov_optab->handlers[(int) mode].insn_code;
1493 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1494 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1496 max_size = GET_MODE_SIZE (mode);
1499 /* The code above should have handled everything. */
1504 /* Return number of insns required to move L bytes by pieces.
1505 ALIGN (in bits) is maximum alignment we can assume. */
1507 static unsigned HOST_WIDE_INT
1508 move_by_pieces_ninsns (l, align)
1509 unsigned HOST_WIDE_INT l;
1512 unsigned HOST_WIDE_INT n_insns = 0;
1513 unsigned HOST_WIDE_INT max_size = MOVE_MAX + 1;
1515 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1516 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1517 align = MOVE_MAX * BITS_PER_UNIT;
1519 while (max_size > 1)
1521 enum machine_mode mode = VOIDmode, tmode;
1522 enum insn_code icode;
1524 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1525 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1526 if (GET_MODE_SIZE (tmode) < max_size)
1529 if (mode == VOIDmode)
1532 icode = mov_optab->handlers[(int) mode].insn_code;
1533 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1534 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1536 max_size = GET_MODE_SIZE (mode);
1544 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1545 with move instructions for mode MODE. GENFUN is the gen_... function
1546 to make a move insn for that mode. DATA has all the other info. */
1549 move_by_pieces_1 (genfun, mode, data)
1550 rtx (*genfun) PARAMS ((rtx, ...));
1551 enum machine_mode mode;
1552 struct move_by_pieces *data;
1554 unsigned int size = GET_MODE_SIZE (mode);
1555 rtx to1 = NULL_RTX, from1;
1557 while (data->len >= size)
1560 data->offset -= size;
1564 if (data->autinc_to)
1565 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
1568 to1 = adjust_address (data->to, mode, data->offset);
1571 if (data->autinc_from)
1572 from1 = adjust_automodify_address (data->from, mode, data->from_addr,
1575 from1 = adjust_address (data->from, mode, data->offset);
1577 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1578 emit_insn (gen_add2_insn (data->to_addr,
1579 GEN_INT (-(HOST_WIDE_INT)size)));
1580 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1581 emit_insn (gen_add2_insn (data->from_addr,
1582 GEN_INT (-(HOST_WIDE_INT)size)));
1585 emit_insn ((*genfun) (to1, from1));
1588 #ifdef PUSH_ROUNDING
1589 emit_single_push_insn (mode, from1, NULL);
1595 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1596 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1597 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1598 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1600 if (! data->reverse)
1601 data->offset += size;
1607 /* Emit code to move a block Y to a block X.
1608 This may be done with string-move instructions,
1609 with multiple scalar move instructions, or with a library call.
1611 Both X and Y must be MEM rtx's (perhaps inside VOLATILE)
1613 SIZE is an rtx that says how long they are.
1614 ALIGN is the maximum alignment we can assume they have.
1616 Return the address of the new block, if memcpy is called and returns it,
1620 emit_block_move (x, y, size)
1625 #ifdef TARGET_MEM_FUNCTIONS
1627 tree call_expr, arg_list;
1629 unsigned int align = MIN (MEM_ALIGN (x), MEM_ALIGN (y));
1631 if (GET_MODE (x) != BLKmode)
1634 if (GET_MODE (y) != BLKmode)
1637 x = protect_from_queue (x, 1);
1638 y = protect_from_queue (y, 0);
1639 size = protect_from_queue (size, 0);
1641 if (GET_CODE (x) != MEM)
1643 if (GET_CODE (y) != MEM)
1648 if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
1649 move_by_pieces (x, y, INTVAL (size), align);
1652 /* Try the most limited insn first, because there's no point
1653 including more than one in the machine description unless
1654 the more limited one has some advantage. */
1656 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
1657 enum machine_mode mode;
1659 /* Since this is a move insn, we don't care about volatility. */
1662 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1663 mode = GET_MODE_WIDER_MODE (mode))
1665 enum insn_code code = movstr_optab[(int) mode];
1666 insn_operand_predicate_fn pred;
1668 if (code != CODE_FOR_nothing
1669 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1670 here because if SIZE is less than the mode mask, as it is
1671 returned by the macro, it will definitely be less than the
1672 actual mode mask. */
1673 && ((GET_CODE (size) == CONST_INT
1674 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1675 <= (GET_MODE_MASK (mode) >> 1)))
1676 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1677 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
1678 || (*pred) (x, BLKmode))
1679 && ((pred = insn_data[(int) code].operand[1].predicate) == 0
1680 || (*pred) (y, BLKmode))
1681 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
1682 || (*pred) (opalign, VOIDmode)))
1685 rtx last = get_last_insn ();
1688 op2 = convert_to_mode (mode, size, 1);
1689 pred = insn_data[(int) code].operand[2].predicate;
1690 if (pred != 0 && ! (*pred) (op2, mode))
1691 op2 = copy_to_mode_reg (mode, op2);
1693 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1701 delete_insns_since (last);
1707 /* X, Y, or SIZE may have been passed through protect_from_queue.
1709 It is unsafe to save the value generated by protect_from_queue
1710 and reuse it later. Consider what happens if emit_queue is
1711 called before the return value from protect_from_queue is used.
1713 Expansion of the CALL_EXPR below will call emit_queue before
1714 we are finished emitting RTL for argument setup. So if we are
1715 not careful we could get the wrong value for an argument.
1717 To avoid this problem we go ahead and emit code to copy X, Y &
1718 SIZE into new pseudos. We can then place those new pseudos
1719 into an RTL_EXPR and use them later, even after a call to
1722 Note this is not strictly needed for library calls since they
1723 do not call emit_queue before loading their arguments. However,
1724 we may need to have library calls call emit_queue in the future
1725 since failing to do so could cause problems for targets which
1726 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
1727 x = copy_to_mode_reg (Pmode, XEXP (x, 0));
1728 y = copy_to_mode_reg (Pmode, XEXP (y, 0));
1730 #ifdef TARGET_MEM_FUNCTIONS
1731 size = copy_to_mode_reg (TYPE_MODE (sizetype), size);
1733 size = convert_to_mode (TYPE_MODE (integer_type_node), size,
1734 TREE_UNSIGNED (integer_type_node));
1735 size = copy_to_mode_reg (TYPE_MODE (integer_type_node), size);
1738 #ifdef TARGET_MEM_FUNCTIONS
1739 /* It is incorrect to use the libcall calling conventions to call
1740 memcpy in this context.
1742 This could be a user call to memcpy and the user may wish to
1743 examine the return value from memcpy.
1745 For targets where libcalls and normal calls have different conventions
1746 for returning pointers, we could end up generating incorrect code.
1748 So instead of using a libcall sequence we build up a suitable
1749 CALL_EXPR and expand the call in the normal fashion. */
1750 if (fn == NULL_TREE)
1754 /* This was copied from except.c, I don't know if all this is
1755 necessary in this context or not. */
1756 fn = get_identifier ("memcpy");
1757 fntype = build_pointer_type (void_type_node);
1758 fntype = build_function_type (fntype, NULL_TREE);
1759 fn = build_decl (FUNCTION_DECL, fn, fntype);
1760 ggc_add_tree_root (&fn, 1);
1761 DECL_EXTERNAL (fn) = 1;
1762 TREE_PUBLIC (fn) = 1;
1763 DECL_ARTIFICIAL (fn) = 1;
1764 TREE_NOTHROW (fn) = 1;
1765 make_decl_rtl (fn, NULL);
1766 assemble_external (fn);
1769 /* We need to make an argument list for the function call.
1771 memcpy has three arguments, the first two are void * addresses and
1772 the last is a size_t byte count for the copy. */
1774 = build_tree_list (NULL_TREE,
1775 make_tree (build_pointer_type (void_type_node), x));
1776 TREE_CHAIN (arg_list)
1777 = build_tree_list (NULL_TREE,
1778 make_tree (build_pointer_type (void_type_node), y));
1779 TREE_CHAIN (TREE_CHAIN (arg_list))
1780 = build_tree_list (NULL_TREE, make_tree (sizetype, size));
1781 TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list))) = NULL_TREE;
1783 /* Now we have to build up the CALL_EXPR itself. */
1784 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
1785 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
1786 call_expr, arg_list, NULL_TREE);
1787 TREE_SIDE_EFFECTS (call_expr) = 1;
1789 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
1791 emit_library_call (bcopy_libfunc, LCT_NORMAL,
1792 VOIDmode, 3, y, Pmode, x, Pmode,
1793 convert_to_mode (TYPE_MODE (integer_type_node), size,
1794 TREE_UNSIGNED (integer_type_node)),
1795 TYPE_MODE (integer_type_node));
1798 /* If we are initializing a readonly value, show the above call
1799 clobbered it. Otherwise, a load from it may erroneously be hoisted
1801 if (RTX_UNCHANGING_P (x))
1802 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
1808 /* Copy all or part of a value X into registers starting at REGNO.
1809 The number of registers to be filled is NREGS. */
1812 move_block_to_reg (regno, x, nregs, mode)
1816 enum machine_mode mode;
1819 #ifdef HAVE_load_multiple
1827 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1828 x = validize_mem (force_const_mem (mode, x));
1830 /* See if the machine can do this with a load multiple insn. */
1831 #ifdef HAVE_load_multiple
1832 if (HAVE_load_multiple)
1834 last = get_last_insn ();
1835 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
1843 delete_insns_since (last);
1847 for (i = 0; i < nregs; i++)
1848 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1849 operand_subword_force (x, i, mode));
1852 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1853 The number of registers to be filled is NREGS. SIZE indicates the number
1854 of bytes in the object X. */
1857 move_block_from_reg (regno, x, nregs, size)
1864 #ifdef HAVE_store_multiple
1868 enum machine_mode mode;
1873 /* If SIZE is that of a mode no bigger than a word, just use that
1874 mode's store operation. */
1875 if (size <= UNITS_PER_WORD
1876 && (mode = mode_for_size (size * BITS_PER_UNIT, MODE_INT, 0)) != BLKmode
1877 && !FUNCTION_ARG_REG_LITTLE_ENDIAN)
1879 emit_move_insn (adjust_address (x, mode, 0), gen_rtx_REG (mode, regno));
1883 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN machine must be aligned
1884 to the left before storing to memory. Note that the previous test
1885 doesn't handle all cases (e.g. SIZE == 3). */
1886 if (size < UNITS_PER_WORD
1888 && !FUNCTION_ARG_REG_LITTLE_ENDIAN)
1890 rtx tem = operand_subword (x, 0, 1, BLKmode);
1896 shift = expand_shift (LSHIFT_EXPR, word_mode,
1897 gen_rtx_REG (word_mode, regno),
1898 build_int_2 ((UNITS_PER_WORD - size)
1899 * BITS_PER_UNIT, 0), NULL_RTX, 0);
1900 emit_move_insn (tem, shift);
1904 /* See if the machine can do this with a store multiple insn. */
1905 #ifdef HAVE_store_multiple
1906 if (HAVE_store_multiple)
1908 last = get_last_insn ();
1909 pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1917 delete_insns_since (last);
1921 for (i = 0; i < nregs; i++)
1923 rtx tem = operand_subword (x, i, 1, BLKmode);
1928 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1932 /* Emit code to move a block SRC to a block DST, where DST is non-consecutive
1933 registers represented by a PARALLEL. SSIZE represents the total size of
1934 block SRC in bytes, or -1 if not known. */
1935 /* ??? If SSIZE % UNITS_PER_WORD != 0, we make the blatant assumption that
1936 the balance will be in what would be the low-order memory addresses, i.e.
1937 left justified for big endian, right justified for little endian. This
1938 happens to be true for the targets currently using this support. If this
1939 ever changes, a new target macro along the lines of FUNCTION_ARG_PADDING
1943 emit_group_load (dst, orig_src, ssize)
1950 if (GET_CODE (dst) != PARALLEL)
1953 /* Check for a NULL entry, used to indicate that the parameter goes
1954 both on the stack and in registers. */
1955 if (XEXP (XVECEXP (dst, 0, 0), 0))
1960 tmps = (rtx *) alloca (sizeof (rtx) * XVECLEN (dst, 0));
1962 /* Process the pieces. */
1963 for (i = start; i < XVECLEN (dst, 0); i++)
1965 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1966 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1967 unsigned int bytelen = GET_MODE_SIZE (mode);
1970 /* Handle trailing fragments that run over the size of the struct. */
1971 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
1973 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1974 bytelen = ssize - bytepos;
1979 /* If we won't be loading directly from memory, protect the real source
1980 from strange tricks we might play; but make sure that the source can
1981 be loaded directly into the destination. */
1983 if (GET_CODE (orig_src) != MEM
1984 && (!CONSTANT_P (orig_src)
1985 || (GET_MODE (orig_src) != mode
1986 && GET_MODE (orig_src) != VOIDmode)))
1988 if (GET_MODE (orig_src) == VOIDmode)
1989 src = gen_reg_rtx (mode);
1991 src = gen_reg_rtx (GET_MODE (orig_src));
1993 emit_move_insn (src, orig_src);
1996 /* Optimize the access just a bit. */
1997 if (GET_CODE (src) == MEM
1998 && MEM_ALIGN (src) >= GET_MODE_ALIGNMENT (mode)
1999 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2000 && bytelen == GET_MODE_SIZE (mode))
2002 tmps[i] = gen_reg_rtx (mode);
2003 emit_move_insn (tmps[i], adjust_address (src, mode, bytepos));
2005 else if (GET_CODE (src) == CONCAT)
2008 && bytelen == GET_MODE_SIZE (GET_MODE (XEXP (src, 0))))
2009 || (bytepos == (HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (XEXP (src, 0)))
2010 && bytelen == GET_MODE_SIZE (GET_MODE (XEXP (src, 1)))))
2012 tmps[i] = XEXP (src, bytepos != 0);
2013 if (! CONSTANT_P (tmps[i])
2014 && (GET_CODE (tmps[i]) != REG || GET_MODE (tmps[i]) != mode))
2015 tmps[i] = extract_bit_field (tmps[i], bytelen * BITS_PER_UNIT,
2016 0, 1, NULL_RTX, mode, mode, ssize);
2018 else if (bytepos == 0)
2020 rtx mem = assign_stack_temp (GET_MODE (src),
2021 GET_MODE_SIZE (GET_MODE (src)), 0);
2022 emit_move_insn (mem, src);
2023 tmps[i] = adjust_address (mem, mode, 0);
2028 else if (CONSTANT_P (src)
2029 || (GET_CODE (src) == REG && GET_MODE (src) == mode))
2032 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
2033 bytepos * BITS_PER_UNIT, 1, NULL_RTX,
2036 if (BYTES_BIG_ENDIAN && shift)
2037 expand_binop (mode, ashl_optab, tmps[i], GEN_INT (shift),
2038 tmps[i], 0, OPTAB_WIDEN);
2043 /* Copy the extracted pieces into the proper (probable) hard regs. */
2044 for (i = start; i < XVECLEN (dst, 0); i++)
2045 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0), tmps[i]);
2048 /* Emit code to move a block SRC to a block DST, where SRC is non-consecutive
2049 registers represented by a PARALLEL. SSIZE represents the total size of
2050 block DST, or -1 if not known. */
2053 emit_group_store (orig_dst, src, ssize)
2060 if (GET_CODE (src) != PARALLEL)
2063 /* Check for a NULL entry, used to indicate that the parameter goes
2064 both on the stack and in registers. */
2065 if (XEXP (XVECEXP (src, 0, 0), 0))
2070 tmps = (rtx *) alloca (sizeof (rtx) * XVECLEN (src, 0));
2072 /* Copy the (probable) hard regs into pseudos. */
2073 for (i = start; i < XVECLEN (src, 0); i++)
2075 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
2076 tmps[i] = gen_reg_rtx (GET_MODE (reg));
2077 emit_move_insn (tmps[i], reg);
2081 /* If we won't be storing directly into memory, protect the real destination
2082 from strange tricks we might play. */
2084 if (GET_CODE (dst) == PARALLEL)
2088 /* We can get a PARALLEL dst if there is a conditional expression in
2089 a return statement. In that case, the dst and src are the same,
2090 so no action is necessary. */
2091 if (rtx_equal_p (dst, src))
2094 /* It is unclear if we can ever reach here, but we may as well handle
2095 it. Allocate a temporary, and split this into a store/load to/from
2098 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
2099 emit_group_store (temp, src, ssize);
2100 emit_group_load (dst, temp, ssize);
2103 else if (GET_CODE (dst) != MEM && GET_CODE (dst) != CONCAT)
2105 dst = gen_reg_rtx (GET_MODE (orig_dst));
2106 /* Make life a bit easier for combine. */
2107 emit_move_insn (dst, const0_rtx);
2110 /* Process the pieces. */
2111 for (i = start; i < XVECLEN (src, 0); i++)
2113 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
2114 enum machine_mode mode = GET_MODE (tmps[i]);
2115 unsigned int bytelen = GET_MODE_SIZE (mode);
2118 /* Handle trailing fragments that run over the size of the struct. */
2119 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2121 if (BYTES_BIG_ENDIAN)
2123 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2124 expand_binop (mode, ashr_optab, tmps[i], GEN_INT (shift),
2125 tmps[i], 0, OPTAB_WIDEN);
2127 bytelen = ssize - bytepos;
2130 if (GET_CODE (dst) == CONCAT)
2132 if (bytepos + bytelen <= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2133 dest = XEXP (dst, 0);
2134 else if (bytepos >= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2136 bytepos -= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0)));
2137 dest = XEXP (dst, 1);
2143 /* Optimize the access just a bit. */
2144 if (GET_CODE (dest) == MEM
2145 && MEM_ALIGN (dest) >= GET_MODE_ALIGNMENT (mode)
2146 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2147 && bytelen == GET_MODE_SIZE (mode))
2148 emit_move_insn (adjust_address (dest, mode, bytepos), tmps[i]);
2150 store_bit_field (dest, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2151 mode, tmps[i], ssize);
2156 /* Copy from the pseudo into the (probable) hard reg. */
2157 if (GET_CODE (dst) == REG)
2158 emit_move_insn (orig_dst, dst);
2161 /* Generate code to copy a BLKmode object of TYPE out of a
2162 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2163 is null, a stack temporary is created. TGTBLK is returned.
2165 The primary purpose of this routine is to handle functions
2166 that return BLKmode structures in registers. Some machines
2167 (the PA for example) want to return all small structures
2168 in registers regardless of the structure's alignment. */
2171 copy_blkmode_from_reg (tgtblk, srcreg, type)
2176 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2177 rtx src = NULL, dst = NULL;
2178 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2179 unsigned HOST_WIDE_INT bitpos, xbitpos, big_endian_correction = 0;
2183 tgtblk = assign_temp (build_qualified_type (type,
2185 | TYPE_QUAL_CONST)),
2187 preserve_temp_slots (tgtblk);
2190 /* This code assumes srcreg is at least a full word. If it isn't, copy it
2191 into a new pseudo which is a full word.
2193 If FUNCTION_ARG_REG_LITTLE_ENDIAN is set and convert_to_mode does a copy,
2194 the wrong part of the register gets copied so we fake a type conversion
2196 if (GET_MODE (srcreg) != BLKmode
2197 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
2199 if (FUNCTION_ARG_REG_LITTLE_ENDIAN)
2200 srcreg = simplify_gen_subreg (word_mode, srcreg, GET_MODE (srcreg), 0);
2202 srcreg = convert_to_mode (word_mode, srcreg, TREE_UNSIGNED (type));
2205 /* Structures whose size is not a multiple of a word are aligned
2206 to the least significant byte (to the right). On a BYTES_BIG_ENDIAN
2207 machine, this means we must skip the empty high order bytes when
2208 calculating the bit offset. */
2209 if (BYTES_BIG_ENDIAN
2210 && !FUNCTION_ARG_REG_LITTLE_ENDIAN
2211 && bytes % UNITS_PER_WORD)
2212 big_endian_correction
2213 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2215 /* Copy the structure BITSIZE bites at a time.
2217 We could probably emit more efficient code for machines which do not use
2218 strict alignment, but it doesn't seem worth the effort at the current
2220 for (bitpos = 0, xbitpos = big_endian_correction;
2221 bitpos < bytes * BITS_PER_UNIT;
2222 bitpos += bitsize, xbitpos += bitsize)
2224 /* We need a new source operand each time xbitpos is on a
2225 word boundary and when xbitpos == big_endian_correction
2226 (the first time through). */
2227 if (xbitpos % BITS_PER_WORD == 0
2228 || xbitpos == big_endian_correction)
2229 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD,
2232 /* We need a new destination operand each time bitpos is on
2234 if (bitpos % BITS_PER_WORD == 0)
2235 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
2237 /* Use xbitpos for the source extraction (right justified) and
2238 xbitpos for the destination store (left justified). */
2239 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode,
2240 extract_bit_field (src, bitsize,
2241 xbitpos % BITS_PER_WORD, 1,
2242 NULL_RTX, word_mode, word_mode,
2250 /* Add a USE expression for REG to the (possibly empty) list pointed
2251 to by CALL_FUSAGE. REG must denote a hard register. */
2254 use_reg (call_fusage, reg)
2255 rtx *call_fusage, reg;
2257 if (GET_CODE (reg) != REG
2258 || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
2262 = gen_rtx_EXPR_LIST (VOIDmode,
2263 gen_rtx_USE (VOIDmode, reg), *call_fusage);
2266 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2267 starting at REGNO. All of these registers must be hard registers. */
2270 use_regs (call_fusage, regno, nregs)
2277 if (regno + nregs > FIRST_PSEUDO_REGISTER)
2280 for (i = 0; i < nregs; i++)
2281 use_reg (call_fusage, gen_rtx_REG (reg_raw_mode[regno + i], regno + i));
2284 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2285 PARALLEL REGS. This is for calls that pass values in multiple
2286 non-contiguous locations. The Irix 6 ABI has examples of this. */
2289 use_group_regs (call_fusage, regs)
2295 for (i = 0; i < XVECLEN (regs, 0); i++)
2297 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2299 /* A NULL entry means the parameter goes both on the stack and in
2300 registers. This can also be a MEM for targets that pass values
2301 partially on the stack and partially in registers. */
2302 if (reg != 0 && GET_CODE (reg) == REG)
2303 use_reg (call_fusage, reg);
2309 can_store_by_pieces (len, constfun, constfundata, align)
2310 unsigned HOST_WIDE_INT len;
2311 rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
2315 unsigned HOST_WIDE_INT max_size, l;
2316 HOST_WIDE_INT offset = 0;
2317 enum machine_mode mode, tmode;
2318 enum insn_code icode;
2322 if (! MOVE_BY_PIECES_P (len, align))
2325 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2326 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2327 align = MOVE_MAX * BITS_PER_UNIT;
2329 /* We would first store what we can in the largest integer mode, then go to
2330 successively smaller modes. */
2333 reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2338 max_size = MOVE_MAX_PIECES + 1;
2339 while (max_size > 1)
2341 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2342 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2343 if (GET_MODE_SIZE (tmode) < max_size)
2346 if (mode == VOIDmode)
2349 icode = mov_optab->handlers[(int) mode].insn_code;
2350 if (icode != CODE_FOR_nothing
2351 && align >= GET_MODE_ALIGNMENT (mode))
2353 unsigned int size = GET_MODE_SIZE (mode);
2360 cst = (*constfun) (constfundata, offset, mode);
2361 if (!LEGITIMATE_CONSTANT_P (cst))
2371 max_size = GET_MODE_SIZE (mode);
2374 /* The code above should have handled everything. */
2382 /* Generate several move instructions to store LEN bytes generated by
2383 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2384 pointer which will be passed as argument in every CONSTFUN call.
2385 ALIGN is maximum alignment we can assume. */
2388 store_by_pieces (to, len, constfun, constfundata, align)
2390 unsigned HOST_WIDE_INT len;
2391 rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
2395 struct store_by_pieces data;
2397 if (! MOVE_BY_PIECES_P (len, align))
2399 to = protect_from_queue (to, 1);
2400 data.constfun = constfun;
2401 data.constfundata = constfundata;
2404 store_by_pieces_1 (&data, align);
2407 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2408 rtx with BLKmode). The caller must pass TO through protect_from_queue
2409 before calling. ALIGN is maximum alignment we can assume. */
2412 clear_by_pieces (to, len, align)
2414 unsigned HOST_WIDE_INT len;
2417 struct store_by_pieces data;
2419 data.constfun = clear_by_pieces_1;
2420 data.constfundata = NULL;
2423 store_by_pieces_1 (&data, align);
2426 /* Callback routine for clear_by_pieces.
2427 Return const0_rtx unconditionally. */
2430 clear_by_pieces_1 (data, offset, mode)
2431 PTR data ATTRIBUTE_UNUSED;
2432 HOST_WIDE_INT offset ATTRIBUTE_UNUSED;
2433 enum machine_mode mode ATTRIBUTE_UNUSED;
2438 /* Subroutine of clear_by_pieces and store_by_pieces.
2439 Generate several move instructions to store LEN bytes of block TO. (A MEM
2440 rtx with BLKmode). The caller must pass TO through protect_from_queue
2441 before calling. ALIGN is maximum alignment we can assume. */
2444 store_by_pieces_1 (data, align)
2445 struct store_by_pieces *data;
2448 rtx to_addr = XEXP (data->to, 0);
2449 unsigned HOST_WIDE_INT max_size = MOVE_MAX_PIECES + 1;
2450 enum machine_mode mode = VOIDmode, tmode;
2451 enum insn_code icode;
2454 data->to_addr = to_addr;
2456 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2457 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2459 data->explicit_inc_to = 0;
2461 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2463 data->offset = data->len;
2465 /* If storing requires more than two move insns,
2466 copy addresses to registers (to make displacements shorter)
2467 and use post-increment if available. */
2468 if (!data->autinc_to
2469 && move_by_pieces_ninsns (data->len, align) > 2)
2471 /* Determine the main mode we'll be using. */
2472 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2473 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2474 if (GET_MODE_SIZE (tmode) < max_size)
2477 if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
2479 data->to_addr = copy_addr_to_reg (plus_constant (to_addr, data->len));
2480 data->autinc_to = 1;
2481 data->explicit_inc_to = -1;
2484 if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2485 && ! data->autinc_to)
2487 data->to_addr = copy_addr_to_reg (to_addr);
2488 data->autinc_to = 1;
2489 data->explicit_inc_to = 1;
2492 if ( !data->autinc_to && CONSTANT_P (to_addr))
2493 data->to_addr = copy_addr_to_reg (to_addr);
2496 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2497 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2498 align = MOVE_MAX * BITS_PER_UNIT;
2500 /* First store what we can in the largest integer mode, then go to
2501 successively smaller modes. */
2503 while (max_size > 1)
2505 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2506 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2507 if (GET_MODE_SIZE (tmode) < max_size)
2510 if (mode == VOIDmode)
2513 icode = mov_optab->handlers[(int) mode].insn_code;
2514 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
2515 store_by_pieces_2 (GEN_FCN (icode), mode, data);
2517 max_size = GET_MODE_SIZE (mode);
2520 /* The code above should have handled everything. */
2525 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2526 with move instructions for mode MODE. GENFUN is the gen_... function
2527 to make a move insn for that mode. DATA has all the other info. */
2530 store_by_pieces_2 (genfun, mode, data)
2531 rtx (*genfun) PARAMS ((rtx, ...));
2532 enum machine_mode mode;
2533 struct store_by_pieces *data;
2535 unsigned int size = GET_MODE_SIZE (mode);
2538 while (data->len >= size)
2541 data->offset -= size;
2543 if (data->autinc_to)
2544 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
2547 to1 = adjust_address (data->to, mode, data->offset);
2549 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2550 emit_insn (gen_add2_insn (data->to_addr,
2551 GEN_INT (-(HOST_WIDE_INT) size)));
2553 cst = (*data->constfun) (data->constfundata, data->offset, mode);
2554 emit_insn ((*genfun) (to1, cst));
2556 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2557 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2559 if (! data->reverse)
2560 data->offset += size;
2566 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2567 its length in bytes. */
2570 clear_storage (object, size)
2574 #ifdef TARGET_MEM_FUNCTIONS
2576 tree call_expr, arg_list;
2579 unsigned int align = (GET_CODE (object) == MEM ? MEM_ALIGN (object)
2580 : GET_MODE_ALIGNMENT (GET_MODE (object)));
2582 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2583 just move a zero. Otherwise, do this a piece at a time. */
2584 if (GET_MODE (object) != BLKmode
2585 && GET_CODE (size) == CONST_INT
2586 && GET_MODE_SIZE (GET_MODE (object)) == (unsigned int) INTVAL (size))
2587 emit_move_insn (object, CONST0_RTX (GET_MODE (object)));
2590 object = protect_from_queue (object, 1);
2591 size = protect_from_queue (size, 0);
2593 if (GET_CODE (size) == CONST_INT
2594 && MOVE_BY_PIECES_P (INTVAL (size), align))
2595 clear_by_pieces (object, INTVAL (size), align);
2598 /* Try the most limited insn first, because there's no point
2599 including more than one in the machine description unless
2600 the more limited one has some advantage. */
2602 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
2603 enum machine_mode mode;
2605 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2606 mode = GET_MODE_WIDER_MODE (mode))
2608 enum insn_code code = clrstr_optab[(int) mode];
2609 insn_operand_predicate_fn pred;
2611 if (code != CODE_FOR_nothing
2612 /* We don't need MODE to be narrower than
2613 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2614 the mode mask, as it is returned by the macro, it will
2615 definitely be less than the actual mode mask. */
2616 && ((GET_CODE (size) == CONST_INT
2617 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2618 <= (GET_MODE_MASK (mode) >> 1)))
2619 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2620 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
2621 || (*pred) (object, BLKmode))
2622 && ((pred = insn_data[(int) code].operand[2].predicate) == 0
2623 || (*pred) (opalign, VOIDmode)))
2626 rtx last = get_last_insn ();
2629 op1 = convert_to_mode (mode, size, 1);
2630 pred = insn_data[(int) code].operand[1].predicate;
2631 if (pred != 0 && ! (*pred) (op1, mode))
2632 op1 = copy_to_mode_reg (mode, op1);
2634 pat = GEN_FCN ((int) code) (object, op1, opalign);
2641 delete_insns_since (last);
2645 /* OBJECT or SIZE may have been passed through protect_from_queue.
2647 It is unsafe to save the value generated by protect_from_queue
2648 and reuse it later. Consider what happens if emit_queue is
2649 called before the return value from protect_from_queue is used.
2651 Expansion of the CALL_EXPR below will call emit_queue before
2652 we are finished emitting RTL for argument setup. So if we are
2653 not careful we could get the wrong value for an argument.
2655 To avoid this problem we go ahead and emit code to copy OBJECT
2656 and SIZE into new pseudos. We can then place those new pseudos
2657 into an RTL_EXPR and use them later, even after a call to
2660 Note this is not strictly needed for library calls since they
2661 do not call emit_queue before loading their arguments. However,
2662 we may need to have library calls call emit_queue in the future
2663 since failing to do so could cause problems for targets which
2664 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
2665 object = copy_to_mode_reg (Pmode, XEXP (object, 0));
2667 #ifdef TARGET_MEM_FUNCTIONS
2668 size = copy_to_mode_reg (TYPE_MODE (sizetype), size);
2670 size = convert_to_mode (TYPE_MODE (integer_type_node), size,
2671 TREE_UNSIGNED (integer_type_node));
2672 size = copy_to_mode_reg (TYPE_MODE (integer_type_node), size);
2675 #ifdef TARGET_MEM_FUNCTIONS
2676 /* It is incorrect to use the libcall calling conventions to call
2677 memset in this context.
2679 This could be a user call to memset and the user may wish to
2680 examine the return value from memset.
2682 For targets where libcalls and normal calls have different
2683 conventions for returning pointers, we could end up generating
2686 So instead of using a libcall sequence we build up a suitable
2687 CALL_EXPR and expand the call in the normal fashion. */
2688 if (fn == NULL_TREE)
2692 /* This was copied from except.c, I don't know if all this is
2693 necessary in this context or not. */
2694 fn = get_identifier ("memset");
2695 fntype = build_pointer_type (void_type_node);
2696 fntype = build_function_type (fntype, NULL_TREE);
2697 fn = build_decl (FUNCTION_DECL, fn, fntype);
2698 ggc_add_tree_root (&fn, 1);
2699 DECL_EXTERNAL (fn) = 1;
2700 TREE_PUBLIC (fn) = 1;
2701 DECL_ARTIFICIAL (fn) = 1;
2702 TREE_NOTHROW (fn) = 1;
2703 make_decl_rtl (fn, NULL);
2704 assemble_external (fn);
2707 /* We need to make an argument list for the function call.
2709 memset has three arguments, the first is a void * addresses, the
2710 second an integer with the initialization value, the last is a
2711 size_t byte count for the copy. */
2713 = build_tree_list (NULL_TREE,
2714 make_tree (build_pointer_type (void_type_node),
2716 TREE_CHAIN (arg_list)
2717 = build_tree_list (NULL_TREE,
2718 make_tree (integer_type_node, const0_rtx));
2719 TREE_CHAIN (TREE_CHAIN (arg_list))
2720 = build_tree_list (NULL_TREE, make_tree (sizetype, size));
2721 TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list))) = NULL_TREE;
2723 /* Now we have to build up the CALL_EXPR itself. */
2724 call_expr = build1 (ADDR_EXPR,
2725 build_pointer_type (TREE_TYPE (fn)), fn);
2726 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
2727 call_expr, arg_list, NULL_TREE);
2728 TREE_SIDE_EFFECTS (call_expr) = 1;
2730 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
2732 emit_library_call (bzero_libfunc, LCT_NORMAL,
2733 VOIDmode, 2, object, Pmode, size,
2734 TYPE_MODE (integer_type_node));
2737 /* If we are initializing a readonly value, show the above call
2738 clobbered it. Otherwise, a load from it may erroneously be
2739 hoisted from a loop. */
2740 if (RTX_UNCHANGING_P (object))
2741 emit_insn (gen_rtx_CLOBBER (VOIDmode, object));
2748 /* Generate code to copy Y into X.
2749 Both Y and X must have the same mode, except that
2750 Y can be a constant with VOIDmode.
2751 This mode cannot be BLKmode; use emit_block_move for that.
2753 Return the last instruction emitted. */
2756 emit_move_insn (x, y)
2759 enum machine_mode mode = GET_MODE (x);
2760 rtx y_cst = NULL_RTX;
2763 x = protect_from_queue (x, 1);
2764 y = protect_from_queue (y, 0);
2766 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
2769 /* Never force constant_p_rtx to memory. */
2770 if (GET_CODE (y) == CONSTANT_P_RTX)
2772 else if (CONSTANT_P (y) && ! LEGITIMATE_CONSTANT_P (y))
2775 y = force_const_mem (mode, y);
2778 /* If X or Y are memory references, verify that their addresses are valid
2780 if (GET_CODE (x) == MEM
2781 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
2782 && ! push_operand (x, GET_MODE (x)))
2784 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
2785 x = validize_mem (x);
2787 if (GET_CODE (y) == MEM
2788 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
2790 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
2791 y = validize_mem (y);
2793 if (mode == BLKmode)
2796 last_insn = emit_move_insn_1 (x, y);
2798 if (y_cst && GET_CODE (x) == REG)
2799 set_unique_reg_note (last_insn, REG_EQUAL, y_cst);
2804 /* Low level part of emit_move_insn.
2805 Called just like emit_move_insn, but assumes X and Y
2806 are basically valid. */
2809 emit_move_insn_1 (x, y)
2812 enum machine_mode mode = GET_MODE (x);
2813 enum machine_mode submode;
2814 enum mode_class class = GET_MODE_CLASS (mode);
2816 if ((unsigned int) mode >= (unsigned int) MAX_MACHINE_MODE)
2819 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
2821 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
2823 /* Expand complex moves by moving real part and imag part, if possible. */
2824 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
2825 && BLKmode != (submode = mode_for_size ((GET_MODE_UNIT_SIZE (mode)
2827 (class == MODE_COMPLEX_INT
2828 ? MODE_INT : MODE_FLOAT),
2830 && (mov_optab->handlers[(int) submode].insn_code
2831 != CODE_FOR_nothing))
2833 /* Don't split destination if it is a stack push. */
2834 int stack = push_operand (x, GET_MODE (x));
2836 #ifdef PUSH_ROUNDING
2837 /* In case we output to the stack, but the size is smaller machine can
2838 push exactly, we need to use move instructions. */
2840 && (PUSH_ROUNDING (GET_MODE_SIZE (submode))
2841 != GET_MODE_SIZE (submode)))
2844 HOST_WIDE_INT offset1, offset2;
2846 /* Do not use anti_adjust_stack, since we don't want to update
2847 stack_pointer_delta. */
2848 temp = expand_binop (Pmode,
2849 #ifdef STACK_GROWS_DOWNWARD
2857 (GET_MODE_SIZE (GET_MODE (x)))),
2858 stack_pointer_rtx, 0, OPTAB_LIB_WIDEN);
2860 if (temp != stack_pointer_rtx)
2861 emit_move_insn (stack_pointer_rtx, temp);
2863 #ifdef STACK_GROWS_DOWNWARD
2865 offset2 = GET_MODE_SIZE (submode);
2867 offset1 = -PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)));
2868 offset2 = (-PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)))
2869 + GET_MODE_SIZE (submode));
2872 emit_move_insn (change_address (x, submode,
2873 gen_rtx_PLUS (Pmode,
2875 GEN_INT (offset1))),
2876 gen_realpart (submode, y));
2877 emit_move_insn (change_address (x, submode,
2878 gen_rtx_PLUS (Pmode,
2880 GEN_INT (offset2))),
2881 gen_imagpart (submode, y));
2885 /* If this is a stack, push the highpart first, so it
2886 will be in the argument order.
2888 In that case, change_address is used only to convert
2889 the mode, not to change the address. */
2892 /* Note that the real part always precedes the imag part in memory
2893 regardless of machine's endianness. */
2894 #ifdef STACK_GROWS_DOWNWARD
2895 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2896 (gen_rtx_MEM (submode, XEXP (x, 0)),
2897 gen_imagpart (submode, y)));
2898 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2899 (gen_rtx_MEM (submode, XEXP (x, 0)),
2900 gen_realpart (submode, y)));
2902 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2903 (gen_rtx_MEM (submode, XEXP (x, 0)),
2904 gen_realpart (submode, y)));
2905 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2906 (gen_rtx_MEM (submode, XEXP (x, 0)),
2907 gen_imagpart (submode, y)));
2912 rtx realpart_x, realpart_y;
2913 rtx imagpart_x, imagpart_y;
2915 /* If this is a complex value with each part being smaller than a
2916 word, the usual calling sequence will likely pack the pieces into
2917 a single register. Unfortunately, SUBREG of hard registers only
2918 deals in terms of words, so we have a problem converting input
2919 arguments to the CONCAT of two registers that is used elsewhere
2920 for complex values. If this is before reload, we can copy it into
2921 memory and reload. FIXME, we should see about using extract and
2922 insert on integer registers, but complex short and complex char
2923 variables should be rarely used. */
2924 if (GET_MODE_BITSIZE (mode) < 2 * BITS_PER_WORD
2925 && (reload_in_progress | reload_completed) == 0)
2928 = (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER);
2930 = (REG_P (y) && REGNO (y) < FIRST_PSEUDO_REGISTER);
2932 if (packed_dest_p || packed_src_p)
2934 enum mode_class reg_class = ((class == MODE_COMPLEX_FLOAT)
2935 ? MODE_FLOAT : MODE_INT);
2937 enum machine_mode reg_mode
2938 = mode_for_size (GET_MODE_BITSIZE (mode), reg_class, 1);
2940 if (reg_mode != BLKmode)
2942 rtx mem = assign_stack_temp (reg_mode,
2943 GET_MODE_SIZE (mode), 0);
2944 rtx cmem = adjust_address (mem, mode, 0);
2947 = N_("function using short complex types cannot be inline");
2951 rtx sreg = gen_rtx_SUBREG (reg_mode, x, 0);
2953 emit_move_insn_1 (cmem, y);
2954 return emit_move_insn_1 (sreg, mem);
2958 rtx sreg = gen_rtx_SUBREG (reg_mode, y, 0);
2960 emit_move_insn_1 (mem, sreg);
2961 return emit_move_insn_1 (x, cmem);
2967 realpart_x = gen_realpart (submode, x);
2968 realpart_y = gen_realpart (submode, y);
2969 imagpart_x = gen_imagpart (submode, x);
2970 imagpart_y = gen_imagpart (submode, y);
2972 /* Show the output dies here. This is necessary for SUBREGs
2973 of pseudos since we cannot track their lifetimes correctly;
2974 hard regs shouldn't appear here except as return values.
2975 We never want to emit such a clobber after reload. */
2977 && ! (reload_in_progress || reload_completed)
2978 && (GET_CODE (realpart_x) == SUBREG
2979 || GET_CODE (imagpart_x) == SUBREG))
2980 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
2982 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2983 (realpart_x, realpart_y));
2984 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2985 (imagpart_x, imagpart_y));
2988 return get_last_insn ();
2991 /* This will handle any multi-word mode that lacks a move_insn pattern.
2992 However, you will get better code if you define such patterns,
2993 even if they must turn into multiple assembler instructions. */
2994 else if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
3001 #ifdef PUSH_ROUNDING
3003 /* If X is a push on the stack, do the push now and replace
3004 X with a reference to the stack pointer. */
3005 if (push_operand (x, GET_MODE (x)))
3010 /* Do not use anti_adjust_stack, since we don't want to update
3011 stack_pointer_delta. */
3012 temp = expand_binop (Pmode,
3013 #ifdef STACK_GROWS_DOWNWARD
3021 (GET_MODE_SIZE (GET_MODE (x)))),
3022 stack_pointer_rtx, 0, OPTAB_LIB_WIDEN);
3024 if (temp != stack_pointer_rtx)
3025 emit_move_insn (stack_pointer_rtx, temp);
3027 code = GET_CODE (XEXP (x, 0));
3029 /* Just hope that small offsets off SP are OK. */
3030 if (code == POST_INC)
3031 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3032 GEN_INT (-((HOST_WIDE_INT)
3033 GET_MODE_SIZE (GET_MODE (x)))));
3034 else if (code == POST_DEC)
3035 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3036 GEN_INT (GET_MODE_SIZE (GET_MODE (x))));
3038 temp = stack_pointer_rtx;
3040 x = change_address (x, VOIDmode, temp);
3044 /* If we are in reload, see if either operand is a MEM whose address
3045 is scheduled for replacement. */
3046 if (reload_in_progress && GET_CODE (x) == MEM
3047 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
3048 x = replace_equiv_address_nv (x, inner);
3049 if (reload_in_progress && GET_CODE (y) == MEM
3050 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
3051 y = replace_equiv_address_nv (y, inner);
3057 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
3060 rtx xpart = operand_subword (x, i, 1, mode);
3061 rtx ypart = operand_subword (y, i, 1, mode);
3063 /* If we can't get a part of Y, put Y into memory if it is a
3064 constant. Otherwise, force it into a register. If we still
3065 can't get a part of Y, abort. */
3066 if (ypart == 0 && CONSTANT_P (y))
3068 y = force_const_mem (mode, y);
3069 ypart = operand_subword (y, i, 1, mode);
3071 else if (ypart == 0)
3072 ypart = operand_subword_force (y, i, mode);
3074 if (xpart == 0 || ypart == 0)
3077 need_clobber |= (GET_CODE (xpart) == SUBREG);
3079 last_insn = emit_move_insn (xpart, ypart);
3082 seq = gen_sequence ();
3085 /* Show the output dies here. This is necessary for SUBREGs
3086 of pseudos since we cannot track their lifetimes correctly;
3087 hard regs shouldn't appear here except as return values.
3088 We never want to emit such a clobber after reload. */
3090 && ! (reload_in_progress || reload_completed)
3091 && need_clobber != 0)
3092 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
3102 /* Pushing data onto the stack. */
3104 /* Push a block of length SIZE (perhaps variable)
3105 and return an rtx to address the beginning of the block.
3106 Note that it is not possible for the value returned to be a QUEUED.
3107 The value may be virtual_outgoing_args_rtx.
3109 EXTRA is the number of bytes of padding to push in addition to SIZE.
3110 BELOW nonzero means this padding comes at low addresses;
3111 otherwise, the padding comes at high addresses. */
3114 push_block (size, extra, below)
3120 size = convert_modes (Pmode, ptr_mode, size, 1);
3121 if (CONSTANT_P (size))
3122 anti_adjust_stack (plus_constant (size, extra));
3123 else if (GET_CODE (size) == REG && extra == 0)
3124 anti_adjust_stack (size);
3127 temp = copy_to_mode_reg (Pmode, size);
3129 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
3130 temp, 0, OPTAB_LIB_WIDEN);
3131 anti_adjust_stack (temp);
3134 #ifndef STACK_GROWS_DOWNWARD
3140 temp = virtual_outgoing_args_rtx;
3141 if (extra != 0 && below)
3142 temp = plus_constant (temp, extra);
3146 if (GET_CODE (size) == CONST_INT)
3147 temp = plus_constant (virtual_outgoing_args_rtx,
3148 -INTVAL (size) - (below ? 0 : extra));
3149 else if (extra != 0 && !below)
3150 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3151 negate_rtx (Pmode, plus_constant (size, extra)));
3153 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3154 negate_rtx (Pmode, size));
3157 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
3160 #ifdef PUSH_ROUNDING
3162 /* Emit single push insn. */
3165 emit_single_push_insn (mode, x, type)
3167 enum machine_mode mode;
3171 unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
3173 enum insn_code icode;
3174 insn_operand_predicate_fn pred;
3176 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3177 /* If there is push pattern, use it. Otherwise try old way of throwing
3178 MEM representing push operation to move expander. */
3179 icode = push_optab->handlers[(int) mode].insn_code;
3180 if (icode != CODE_FOR_nothing)
3182 if (((pred = insn_data[(int) icode].operand[0].predicate)
3183 && !((*pred) (x, mode))))
3184 x = force_reg (mode, x);
3185 emit_insn (GEN_FCN (icode) (x));
3188 if (GET_MODE_SIZE (mode) == rounded_size)
3189 dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
3192 #ifdef STACK_GROWS_DOWNWARD
3193 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3194 GEN_INT (-(HOST_WIDE_INT) rounded_size));
3196 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3197 GEN_INT (rounded_size));
3199 dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
3202 dest = gen_rtx_MEM (mode, dest_addr);
3206 set_mem_attributes (dest, type, 1);
3208 if (flag_optimize_sibling_calls)
3209 /* Function incoming arguments may overlap with sibling call
3210 outgoing arguments and we cannot allow reordering of reads
3211 from function arguments with stores to outgoing arguments
3212 of sibling calls. */
3213 set_mem_alias_set (dest, 0);
3215 emit_move_insn (dest, x);
3219 /* Generate code to push X onto the stack, assuming it has mode MODE and
3221 MODE is redundant except when X is a CONST_INT (since they don't
3223 SIZE is an rtx for the size of data to be copied (in bytes),
3224 needed only if X is BLKmode.
3226 ALIGN (in bits) is maximum alignment we can assume.
3228 If PARTIAL and REG are both nonzero, then copy that many of the first
3229 words of X into registers starting with REG, and push the rest of X.
3230 The amount of space pushed is decreased by PARTIAL words,
3231 rounded *down* to a multiple of PARM_BOUNDARY.
3232 REG must be a hard register in this case.
3233 If REG is zero but PARTIAL is not, take any all others actions for an
3234 argument partially in registers, but do not actually load any
3237 EXTRA is the amount in bytes of extra space to leave next to this arg.
3238 This is ignored if an argument block has already been allocated.
3240 On a machine that lacks real push insns, ARGS_ADDR is the address of
3241 the bottom of the argument block for this call. We use indexing off there
3242 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
3243 argument block has not been preallocated.
3245 ARGS_SO_FAR is the size of args previously pushed for this call.
3247 REG_PARM_STACK_SPACE is nonzero if functions require stack space
3248 for arguments passed in registers. If nonzero, it will be the number
3249 of bytes required. */
3252 emit_push_insn (x, mode, type, size, align, partial, reg, extra,
3253 args_addr, args_so_far, reg_parm_stack_space,
3256 enum machine_mode mode;
3265 int reg_parm_stack_space;
3269 enum direction stack_direction
3270 #ifdef STACK_GROWS_DOWNWARD
3276 /* Decide where to pad the argument: `downward' for below,
3277 `upward' for above, or `none' for don't pad it.
3278 Default is below for small data on big-endian machines; else above. */
3279 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
3281 /* Invert direction if stack is post-decrement.
3283 if (STACK_PUSH_CODE == POST_DEC)
3284 if (where_pad != none)
3285 where_pad = (where_pad == downward ? upward : downward);
3287 xinner = x = protect_from_queue (x, 0);
3289 if (mode == BLKmode)
3291 /* Copy a block into the stack, entirely or partially. */
3294 int used = partial * UNITS_PER_WORD;
3295 int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
3303 /* USED is now the # of bytes we need not copy to the stack
3304 because registers will take care of them. */
3307 xinner = adjust_address (xinner, BLKmode, used);
3309 /* If the partial register-part of the arg counts in its stack size,
3310 skip the part of stack space corresponding to the registers.
3311 Otherwise, start copying to the beginning of the stack space,
3312 by setting SKIP to 0. */
3313 skip = (reg_parm_stack_space == 0) ? 0 : used;
3315 #ifdef PUSH_ROUNDING
3316 /* Do it with several push insns if that doesn't take lots of insns
3317 and if there is no difficulty with push insns that skip bytes
3318 on the stack for alignment purposes. */
3321 && GET_CODE (size) == CONST_INT
3323 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
3324 /* Here we avoid the case of a structure whose weak alignment
3325 forces many pushes of a small amount of data,
3326 and such small pushes do rounding that causes trouble. */
3327 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
3328 || align >= BIGGEST_ALIGNMENT
3329 || (PUSH_ROUNDING (align / BITS_PER_UNIT)
3330 == (align / BITS_PER_UNIT)))
3331 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
3333 /* Push padding now if padding above and stack grows down,
3334 or if padding below and stack grows up.
3335 But if space already allocated, this has already been done. */
3336 if (extra && args_addr == 0
3337 && where_pad != none && where_pad != stack_direction)
3338 anti_adjust_stack (GEN_INT (extra));
3340 move_by_pieces (NULL, xinner, INTVAL (size) - used, align);
3343 #endif /* PUSH_ROUNDING */
3347 /* Otherwise make space on the stack and copy the data
3348 to the address of that space. */
3350 /* Deduct words put into registers from the size we must copy. */
3353 if (GET_CODE (size) == CONST_INT)
3354 size = GEN_INT (INTVAL (size) - used);
3356 size = expand_binop (GET_MODE (size), sub_optab, size,
3357 GEN_INT (used), NULL_RTX, 0,
3361 /* Get the address of the stack space.
3362 In this case, we do not deal with EXTRA separately.
3363 A single stack adjust will do. */
3366 temp = push_block (size, extra, where_pad == downward);
3369 else if (GET_CODE (args_so_far) == CONST_INT)
3370 temp = memory_address (BLKmode,
3371 plus_constant (args_addr,
3372 skip + INTVAL (args_so_far)));
3374 temp = memory_address (BLKmode,
3375 plus_constant (gen_rtx_PLUS (Pmode,
3379 target = gen_rtx_MEM (BLKmode, temp);
3383 set_mem_attributes (target, type, 1);
3384 /* Function incoming arguments may overlap with sibling call
3385 outgoing arguments and we cannot allow reordering of reads
3386 from function arguments with stores to outgoing arguments
3387 of sibling calls. */
3388 set_mem_alias_set (target, 0);
3391 set_mem_align (target, align);
3393 /* TEMP is the address of the block. Copy the data there. */
3394 if (GET_CODE (size) == CONST_INT
3395 && MOVE_BY_PIECES_P ((unsigned) INTVAL (size), align))
3397 move_by_pieces (target, xinner, INTVAL (size), align);
3402 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
3403 enum machine_mode mode;
3405 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
3407 mode = GET_MODE_WIDER_MODE (mode))
3409 enum insn_code code = movstr_optab[(int) mode];
3410 insn_operand_predicate_fn pred;
3412 if (code != CODE_FOR_nothing
3413 && ((GET_CODE (size) == CONST_INT
3414 && ((unsigned HOST_WIDE_INT) INTVAL (size)
3415 <= (GET_MODE_MASK (mode) >> 1)))
3416 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
3417 && (!(pred = insn_data[(int) code].operand[0].predicate)
3418 || ((*pred) (target, BLKmode)))
3419 && (!(pred = insn_data[(int) code].operand[1].predicate)
3420 || ((*pred) (xinner, BLKmode)))
3421 && (!(pred = insn_data[(int) code].operand[3].predicate)
3422 || ((*pred) (opalign, VOIDmode))))
3424 rtx op2 = convert_to_mode (mode, size, 1);
3425 rtx last = get_last_insn ();
3428 pred = insn_data[(int) code].operand[2].predicate;
3429 if (pred != 0 && ! (*pred) (op2, mode))
3430 op2 = copy_to_mode_reg (mode, op2);
3432 pat = GEN_FCN ((int) code) (target, xinner,
3440 delete_insns_since (last);
3445 if (!ACCUMULATE_OUTGOING_ARGS)
3447 /* If the source is referenced relative to the stack pointer,
3448 copy it to another register to stabilize it. We do not need
3449 to do this if we know that we won't be changing sp. */
3451 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3452 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3453 temp = copy_to_reg (temp);
3456 /* Make inhibit_defer_pop nonzero around the library call
3457 to force it to pop the bcopy-arguments right away. */
3459 #ifdef TARGET_MEM_FUNCTIONS
3460 emit_library_call (memcpy_libfunc, LCT_NORMAL,
3461 VOIDmode, 3, temp, Pmode, XEXP (xinner, 0), Pmode,
3462 convert_to_mode (TYPE_MODE (sizetype),
3463 size, TREE_UNSIGNED (sizetype)),
3464 TYPE_MODE (sizetype));
3466 emit_library_call (bcopy_libfunc, LCT_NORMAL,
3467 VOIDmode, 3, XEXP (xinner, 0), Pmode, temp, Pmode,
3468 convert_to_mode (TYPE_MODE (integer_type_node),
3470 TREE_UNSIGNED (integer_type_node)),
3471 TYPE_MODE (integer_type_node));
3476 else if (partial > 0)
3478 /* Scalar partly in registers. */
3480 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3483 /* # words of start of argument
3484 that we must make space for but need not store. */
3485 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
3486 int args_offset = INTVAL (args_so_far);
3489 /* Push padding now if padding above and stack grows down,
3490 or if padding below and stack grows up.
3491 But if space already allocated, this has already been done. */
3492 if (extra && args_addr == 0
3493 && where_pad != none && where_pad != stack_direction)
3494 anti_adjust_stack (GEN_INT (extra));
3496 /* If we make space by pushing it, we might as well push
3497 the real data. Otherwise, we can leave OFFSET nonzero
3498 and leave the space uninitialized. */
3502 /* Now NOT_STACK gets the number of words that we don't need to
3503 allocate on the stack. */
3504 not_stack = partial - offset;
3506 /* If the partial register-part of the arg counts in its stack size,
3507 skip the part of stack space corresponding to the registers.
3508 Otherwise, start copying to the beginning of the stack space,
3509 by setting SKIP to 0. */
3510 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
3512 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
3513 x = validize_mem (force_const_mem (mode, x));
3515 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3516 SUBREGs of such registers are not allowed. */
3517 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
3518 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
3519 x = copy_to_reg (x);
3521 /* Loop over all the words allocated on the stack for this arg. */
3522 /* We can do it by words, because any scalar bigger than a word
3523 has a size a multiple of a word. */
3524 #ifndef PUSH_ARGS_REVERSED
3525 for (i = not_stack; i < size; i++)
3527 for (i = size - 1; i >= not_stack; i--)
3529 if (i >= not_stack + offset)
3530 emit_push_insn (operand_subword_force (x, i, mode),
3531 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
3533 GEN_INT (args_offset + ((i - not_stack + skip)
3535 reg_parm_stack_space, alignment_pad);
3540 rtx target = NULL_RTX;
3543 /* Push padding now if padding above and stack grows down,
3544 or if padding below and stack grows up.
3545 But if space already allocated, this has already been done. */
3546 if (extra && args_addr == 0
3547 && where_pad != none && where_pad != stack_direction)
3548 anti_adjust_stack (GEN_INT (extra));
3550 #ifdef PUSH_ROUNDING
3551 if (args_addr == 0 && PUSH_ARGS)
3552 emit_single_push_insn (mode, x, type);
3556 if (GET_CODE (args_so_far) == CONST_INT)
3558 = memory_address (mode,
3559 plus_constant (args_addr,
3560 INTVAL (args_so_far)));
3562 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
3565 dest = gen_rtx_MEM (mode, addr);
3568 set_mem_attributes (dest, type, 1);
3569 /* Function incoming arguments may overlap with sibling call
3570 outgoing arguments and we cannot allow reordering of reads
3571 from function arguments with stores to outgoing arguments
3572 of sibling calls. */
3573 set_mem_alias_set (dest, 0);
3576 emit_move_insn (dest, x);
3582 /* If part should go in registers, copy that part
3583 into the appropriate registers. Do this now, at the end,
3584 since mem-to-mem copies above may do function calls. */
3585 if (partial > 0 && reg != 0)
3587 /* Handle calls that pass values in multiple non-contiguous locations.
3588 The Irix 6 ABI has examples of this. */
3589 if (GET_CODE (reg) == PARALLEL)
3590 emit_group_load (reg, x, -1); /* ??? size? */
3592 move_block_to_reg (REGNO (reg), x, partial, mode);
3595 if (extra && args_addr == 0 && where_pad == stack_direction)
3596 anti_adjust_stack (GEN_INT (extra));
3598 if (alignment_pad && args_addr == 0)
3599 anti_adjust_stack (alignment_pad);
3602 /* Return X if X can be used as a subtarget in a sequence of arithmetic
3610 /* Only registers can be subtargets. */
3611 || GET_CODE (x) != REG
3612 /* If the register is readonly, it can't be set more than once. */
3613 || RTX_UNCHANGING_P (x)
3614 /* Don't use hard regs to avoid extending their life. */
3615 || REGNO (x) < FIRST_PSEUDO_REGISTER
3616 /* Avoid subtargets inside loops,
3617 since they hide some invariant expressions. */
3618 || preserve_subexpressions_p ())
3622 /* Expand an assignment that stores the value of FROM into TO.
3623 If WANT_VALUE is nonzero, return an rtx for the value of TO.
3624 (This may contain a QUEUED rtx;
3625 if the value is constant, this rtx is a constant.)
3626 Otherwise, the returned value is NULL_RTX.
3628 SUGGEST_REG is no longer actually used.
3629 It used to mean, copy the value through a register
3630 and return that register, if that is possible.
3631 We now use WANT_VALUE to decide whether to do this. */
3634 expand_assignment (to, from, want_value, suggest_reg)
3637 int suggest_reg ATTRIBUTE_UNUSED;
3642 /* Don't crash if the lhs of the assignment was erroneous. */
3644 if (TREE_CODE (to) == ERROR_MARK)
3646 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
3647 return want_value ? result : NULL_RTX;
3650 /* Assignment of a structure component needs special treatment
3651 if the structure component's rtx is not simply a MEM.
3652 Assignment of an array element at a constant index, and assignment of
3653 an array element in an unaligned packed structure field, has the same
3656 if (TREE_CODE (to) == COMPONENT_REF || TREE_CODE (to) == BIT_FIELD_REF
3657 || TREE_CODE (to) == ARRAY_REF || TREE_CODE (to) == ARRAY_RANGE_REF)
3659 enum machine_mode mode1;
3660 HOST_WIDE_INT bitsize, bitpos;
3668 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
3669 &unsignedp, &volatilep);
3671 /* If we are going to use store_bit_field and extract_bit_field,
3672 make sure to_rtx will be safe for multiple use. */
3674 if (mode1 == VOIDmode && want_value)
3675 tem = stabilize_reference (tem);
3677 orig_to_rtx = to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, 0);
3681 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM);
3683 if (GET_CODE (to_rtx) != MEM)
3686 if (GET_MODE (offset_rtx) != ptr_mode)
3687 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
3689 #ifdef POINTERS_EXTEND_UNSIGNED
3690 if (GET_MODE (offset_rtx) != Pmode)
3691 offset_rtx = convert_memory_address (Pmode, offset_rtx);
3694 /* A constant address in TO_RTX can have VOIDmode, we must not try
3695 to call force_reg for that case. Avoid that case. */
3696 if (GET_CODE (to_rtx) == MEM
3697 && GET_MODE (to_rtx) == BLKmode
3698 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
3700 && (bitpos % bitsize) == 0
3701 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
3702 && MEM_ALIGN (to_rtx) == GET_MODE_ALIGNMENT (mode1))
3704 to_rtx = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT);
3708 to_rtx = offset_address (to_rtx, offset_rtx,
3709 highest_pow2_factor (offset));
3712 if (GET_CODE (to_rtx) == MEM)
3714 tree old_expr = MEM_EXPR (to_rtx);
3716 /* If the field is at offset zero, we could have been given the
3717 DECL_RTX of the parent struct. Don't munge it. */
3718 to_rtx = shallow_copy_rtx (to_rtx);
3720 set_mem_attributes (to_rtx, to, 0);
3722 /* If we changed MEM_EXPR, that means we're now referencing
3723 the COMPONENT_REF, which means that MEM_OFFSET must be
3724 relative to that field. But we've not yet reflected BITPOS
3725 in TO_RTX. This will be done in store_field. Adjust for
3726 that by biasing MEM_OFFSET by -bitpos. */
3727 if (MEM_EXPR (to_rtx) != old_expr && MEM_OFFSET (to_rtx)
3728 && (bitpos / BITS_PER_UNIT) != 0)
3729 set_mem_offset (to_rtx, GEN_INT (INTVAL (MEM_OFFSET (to_rtx))
3730 - (bitpos / BITS_PER_UNIT)));
3733 /* Deal with volatile and readonly fields. The former is only done
3734 for MEM. Also set MEM_KEEP_ALIAS_SET_P if needed. */
3735 if (volatilep && GET_CODE (to_rtx) == MEM)
3737 if (to_rtx == orig_to_rtx)
3738 to_rtx = copy_rtx (to_rtx);
3739 MEM_VOLATILE_P (to_rtx) = 1;
3742 if (TREE_CODE (to) == COMPONENT_REF
3743 && TREE_READONLY (TREE_OPERAND (to, 1)))
3745 if (to_rtx == orig_to_rtx)
3746 to_rtx = copy_rtx (to_rtx);
3747 RTX_UNCHANGING_P (to_rtx) = 1;
3750 if (GET_CODE (to_rtx) == MEM && ! can_address_p (to))
3752 if (to_rtx == orig_to_rtx)
3753 to_rtx = copy_rtx (to_rtx);
3754 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
3757 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
3759 /* Spurious cast for HPUX compiler. */
3760 ? ((enum machine_mode)
3761 TYPE_MODE (TREE_TYPE (to)))
3763 unsignedp, TREE_TYPE (tem), get_alias_set (to));
3765 preserve_temp_slots (result);
3769 /* If the value is meaningful, convert RESULT to the proper mode.
3770 Otherwise, return nothing. */
3771 return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
3772 TYPE_MODE (TREE_TYPE (from)),
3774 TREE_UNSIGNED (TREE_TYPE (to)))
3778 /* If the rhs is a function call and its value is not an aggregate,
3779 call the function before we start to compute the lhs.
3780 This is needed for correct code for cases such as
3781 val = setjmp (buf) on machines where reference to val
3782 requires loading up part of an address in a separate insn.
3784 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
3785 since it might be a promoted variable where the zero- or sign- extension
3786 needs to be done. Handling this in the normal way is safe because no
3787 computation is done before the call. */
3788 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from)
3789 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
3790 && ! ((TREE_CODE (to) == VAR_DECL || TREE_CODE (to) == PARM_DECL)
3791 && GET_CODE (DECL_RTL (to)) == REG))
3796 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
3798 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
3800 /* Handle calls that return values in multiple non-contiguous locations.
3801 The Irix 6 ABI has examples of this. */
3802 if (GET_CODE (to_rtx) == PARALLEL)
3803 emit_group_load (to_rtx, value, int_size_in_bytes (TREE_TYPE (from)));
3804 else if (GET_MODE (to_rtx) == BLKmode)
3805 emit_block_move (to_rtx, value, expr_size (from));
3808 #ifdef POINTERS_EXTEND_UNSIGNED
3809 if (POINTER_TYPE_P (TREE_TYPE (to))
3810 && GET_MODE (to_rtx) != GET_MODE (value))
3811 value = convert_memory_address (GET_MODE (to_rtx), value);
3813 emit_move_insn (to_rtx, value);
3815 preserve_temp_slots (to_rtx);
3818 return want_value ? to_rtx : NULL_RTX;
3821 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
3822 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
3825 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
3827 /* Don't move directly into a return register. */
3828 if (TREE_CODE (to) == RESULT_DECL
3829 && (GET_CODE (to_rtx) == REG || GET_CODE (to_rtx) == PARALLEL))
3834 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
3836 if (GET_CODE (to_rtx) == PARALLEL)
3837 emit_group_load (to_rtx, temp, int_size_in_bytes (TREE_TYPE (from)));
3839 emit_move_insn (to_rtx, temp);
3841 preserve_temp_slots (to_rtx);
3844 return want_value ? to_rtx : NULL_RTX;
3847 /* In case we are returning the contents of an object which overlaps
3848 the place the value is being stored, use a safe function when copying
3849 a value through a pointer into a structure value return block. */
3850 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
3851 && current_function_returns_struct
3852 && !current_function_returns_pcc_struct)
3857 size = expr_size (from);
3858 from_rtx = expand_expr (from, NULL_RTX, VOIDmode, 0);
3860 #ifdef TARGET_MEM_FUNCTIONS
3861 emit_library_call (memmove_libfunc, LCT_NORMAL,
3862 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
3863 XEXP (from_rtx, 0), Pmode,
3864 convert_to_mode (TYPE_MODE (sizetype),
3865 size, TREE_UNSIGNED (sizetype)),
3866 TYPE_MODE (sizetype));
3868 emit_library_call (bcopy_libfunc, LCT_NORMAL,
3869 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
3870 XEXP (to_rtx, 0), Pmode,
3871 convert_to_mode (TYPE_MODE (integer_type_node),
3872 size, TREE_UNSIGNED (integer_type_node)),
3873 TYPE_MODE (integer_type_node));
3876 preserve_temp_slots (to_rtx);
3879 return want_value ? to_rtx : NULL_RTX;
3882 /* Compute FROM and store the value in the rtx we got. */
3885 result = store_expr (from, to_rtx, want_value);
3886 preserve_temp_slots (result);
3889 return want_value ? result : NULL_RTX;
3892 /* Generate code for computing expression EXP,
3893 and storing the value into TARGET.
3894 TARGET may contain a QUEUED rtx.
3896 If WANT_VALUE is nonzero, return a copy of the value
3897 not in TARGET, so that we can be sure to use the proper
3898 value in a containing expression even if TARGET has something
3899 else stored in it. If possible, we copy the value through a pseudo
3900 and return that pseudo. Or, if the value is constant, we try to
3901 return the constant. In some cases, we return a pseudo
3902 copied *from* TARGET.
3904 If the mode is BLKmode then we may return TARGET itself.
3905 It turns out that in BLKmode it doesn't cause a problem.
3906 because C has no operators that could combine two different
3907 assignments into the same BLKmode object with different values
3908 with no sequence point. Will other languages need this to
3911 If WANT_VALUE is 0, we return NULL, to make sure
3912 to catch quickly any cases where the caller uses the value
3913 and fails to set WANT_VALUE. */
3916 store_expr (exp, target, want_value)
3922 int dont_return_target = 0;
3923 int dont_store_target = 0;
3925 if (TREE_CODE (exp) == COMPOUND_EXPR)
3927 /* Perform first part of compound expression, then assign from second
3929 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
3931 return store_expr (TREE_OPERAND (exp, 1), target, want_value);
3933 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
3935 /* For conditional expression, get safe form of the target. Then
3936 test the condition, doing the appropriate assignment on either
3937 side. This avoids the creation of unnecessary temporaries.
3938 For non-BLKmode, it is more efficient not to do this. */
3940 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
3943 target = protect_from_queue (target, 1);
3945 do_pending_stack_adjust ();
3947 jumpifnot (TREE_OPERAND (exp, 0), lab1);
3948 start_cleanup_deferral ();
3949 store_expr (TREE_OPERAND (exp, 1), target, 0);
3950 end_cleanup_deferral ();
3952 emit_jump_insn (gen_jump (lab2));
3955 start_cleanup_deferral ();
3956 store_expr (TREE_OPERAND (exp, 2), target, 0);
3957 end_cleanup_deferral ();
3962 return want_value ? target : NULL_RTX;
3964 else if (queued_subexp_p (target))
3965 /* If target contains a postincrement, let's not risk
3966 using it as the place to generate the rhs. */
3968 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
3970 /* Expand EXP into a new pseudo. */
3971 temp = gen_reg_rtx (GET_MODE (target));
3972 temp = expand_expr (exp, temp, GET_MODE (target), 0);
3975 temp = expand_expr (exp, NULL_RTX, GET_MODE (target), 0);
3977 /* If target is volatile, ANSI requires accessing the value
3978 *from* the target, if it is accessed. So make that happen.
3979 In no case return the target itself. */
3980 if (! MEM_VOLATILE_P (target) && want_value)
3981 dont_return_target = 1;
3983 else if (want_value && GET_CODE (target) == MEM && ! MEM_VOLATILE_P (target)
3984 && GET_MODE (target) != BLKmode)
3985 /* If target is in memory and caller wants value in a register instead,
3986 arrange that. Pass TARGET as target for expand_expr so that,
3987 if EXP is another assignment, WANT_VALUE will be nonzero for it.
3988 We know expand_expr will not use the target in that case.
3989 Don't do this if TARGET is volatile because we are supposed
3990 to write it and then read it. */
3992 temp = expand_expr (exp, target, GET_MODE (target), 0);
3993 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
3995 /* If TEMP is already in the desired TARGET, only copy it from
3996 memory and don't store it there again. */
3998 || (rtx_equal_p (temp, target)
3999 && ! side_effects_p (temp) && ! side_effects_p (target)))
4000 dont_store_target = 1;
4001 temp = copy_to_reg (temp);
4003 dont_return_target = 1;
4005 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
4006 /* If this is an scalar in a register that is stored in a wider mode
4007 than the declared mode, compute the result into its declared mode
4008 and then convert to the wider mode. Our value is the computed
4011 rtx inner_target = 0;
4013 /* If we don't want a value, we can do the conversion inside EXP,
4014 which will often result in some optimizations. Do the conversion
4015 in two steps: first change the signedness, if needed, then
4016 the extend. But don't do this if the type of EXP is a subtype
4017 of something else since then the conversion might involve
4018 more than just converting modes. */
4019 if (! want_value && INTEGRAL_TYPE_P (TREE_TYPE (exp))
4020 && TREE_TYPE (TREE_TYPE (exp)) == 0)
4022 if (TREE_UNSIGNED (TREE_TYPE (exp))
4023 != SUBREG_PROMOTED_UNSIGNED_P (target))
4026 (signed_or_unsigned_type (SUBREG_PROMOTED_UNSIGNED_P (target),
4030 exp = convert (type_for_mode (GET_MODE (SUBREG_REG (target)),
4031 SUBREG_PROMOTED_UNSIGNED_P (target)),
4034 inner_target = SUBREG_REG (target);
4037 temp = expand_expr (exp, inner_target, VOIDmode, 0);
4039 /* If TEMP is a volatile MEM and we want a result value, make
4040 the access now so it gets done only once. Likewise if
4041 it contains TARGET. */
4042 if (GET_CODE (temp) == MEM && want_value
4043 && (MEM_VOLATILE_P (temp)
4044 || reg_mentioned_p (SUBREG_REG (target), XEXP (temp, 0))))
4045 temp = copy_to_reg (temp);
4047 /* If TEMP is a VOIDmode constant, use convert_modes to make
4048 sure that we properly convert it. */
4049 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
4051 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4052 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4053 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
4054 GET_MODE (target), temp,
4055 SUBREG_PROMOTED_UNSIGNED_P (target));
4058 convert_move (SUBREG_REG (target), temp,
4059 SUBREG_PROMOTED_UNSIGNED_P (target));
4061 /* If we promoted a constant, change the mode back down to match
4062 target. Otherwise, the caller might get confused by a result whose
4063 mode is larger than expected. */
4065 if (want_value && GET_MODE (temp) != GET_MODE (target))
4067 if (GET_MODE (temp) != VOIDmode)
4069 temp = gen_lowpart_SUBREG (GET_MODE (target), temp);
4070 SUBREG_PROMOTED_VAR_P (temp) = 1;
4071 SUBREG_PROMOTED_UNSIGNED_SET (temp,
4072 SUBREG_PROMOTED_UNSIGNED_P (target));
4075 temp = convert_modes (GET_MODE (target),
4076 GET_MODE (SUBREG_REG (target)),
4077 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4080 return want_value ? temp : NULL_RTX;
4084 temp = expand_expr (exp, target, GET_MODE (target), 0);
4085 /* Return TARGET if it's a specified hardware register.
4086 If TARGET is a volatile mem ref, either return TARGET
4087 or return a reg copied *from* TARGET; ANSI requires this.
4089 Otherwise, if TEMP is not TARGET, return TEMP
4090 if it is constant (for efficiency),
4091 or if we really want the correct value. */
4092 if (!(target && GET_CODE (target) == REG
4093 && REGNO (target) < FIRST_PSEUDO_REGISTER)
4094 && !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
4095 && ! rtx_equal_p (temp, target)
4096 && (CONSTANT_P (temp) || want_value))
4097 dont_return_target = 1;
4100 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
4101 the same as that of TARGET, adjust the constant. This is needed, for
4102 example, in case it is a CONST_DOUBLE and we want only a word-sized
4104 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
4105 && TREE_CODE (exp) != ERROR_MARK
4106 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
4107 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4108 temp, TREE_UNSIGNED (TREE_TYPE (exp)));
4110 /* If value was not generated in the target, store it there.
4111 Convert the value to TARGET's type first if necessary.
4112 If TEMP and TARGET compare equal according to rtx_equal_p, but
4113 one or both of them are volatile memory refs, we have to distinguish
4115 - expand_expr has used TARGET. In this case, we must not generate
4116 another copy. This can be detected by TARGET being equal according
4118 - expand_expr has not used TARGET - that means that the source just
4119 happens to have the same RTX form. Since temp will have been created
4120 by expand_expr, it will compare unequal according to == .
4121 We must generate a copy in this case, to reach the correct number
4122 of volatile memory references. */
4124 if ((! rtx_equal_p (temp, target)
4125 || (temp != target && (side_effects_p (temp)
4126 || side_effects_p (target))))
4127 && TREE_CODE (exp) != ERROR_MARK
4128 && ! dont_store_target)
4130 target = protect_from_queue (target, 1);
4131 if (GET_MODE (temp) != GET_MODE (target)
4132 && GET_MODE (temp) != VOIDmode)
4134 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
4135 if (dont_return_target)
4137 /* In this case, we will return TEMP,
4138 so make sure it has the proper mode.
4139 But don't forget to store the value into TARGET. */
4140 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
4141 emit_move_insn (target, temp);
4144 convert_move (target, temp, unsignedp);
4147 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
4149 /* Handle copying a string constant into an array. The string
4150 constant may be shorter than the array. So copy just the string's
4151 actual length, and clear the rest. First get the size of the data
4152 type of the string, which is actually the size of the target. */
4153 rtx size = expr_size (exp);
4155 if (GET_CODE (size) == CONST_INT
4156 && INTVAL (size) < TREE_STRING_LENGTH (exp))
4157 emit_block_move (target, temp, size);
4160 /* Compute the size of the data to copy from the string. */
4162 = size_binop (MIN_EXPR,
4163 make_tree (sizetype, size),
4164 size_int (TREE_STRING_LENGTH (exp)));
4165 rtx copy_size_rtx = expand_expr (copy_size, NULL_RTX,
4169 /* Copy that much. */
4170 copy_size_rtx = convert_to_mode (ptr_mode, copy_size_rtx, 0);
4171 emit_block_move (target, temp, copy_size_rtx);
4173 /* Figure out how much is left in TARGET that we have to clear.
4174 Do all calculations in ptr_mode. */
4175 if (GET_CODE (copy_size_rtx) == CONST_INT)
4177 size = plus_constant (size, -INTVAL (copy_size_rtx));
4178 target = adjust_address (target, BLKmode,
4179 INTVAL (copy_size_rtx));
4183 size = expand_binop (ptr_mode, sub_optab, size,
4184 copy_size_rtx, NULL_RTX, 0,
4187 #ifdef POINTERS_EXTEND_UNSIGNED
4188 if (GET_MODE (copy_size_rtx) != Pmode)
4189 copy_size_rtx = convert_memory_address (Pmode,
4193 target = offset_address (target, copy_size_rtx,
4194 highest_pow2_factor (copy_size));
4195 label = gen_label_rtx ();
4196 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
4197 GET_MODE (size), 0, label);
4200 if (size != const0_rtx)
4201 clear_storage (target, size);
4207 /* Handle calls that return values in multiple non-contiguous locations.
4208 The Irix 6 ABI has examples of this. */
4209 else if (GET_CODE (target) == PARALLEL)
4210 emit_group_load (target, temp, int_size_in_bytes (TREE_TYPE (exp)));
4211 else if (GET_MODE (temp) == BLKmode)
4212 emit_block_move (target, temp, expr_size (exp));
4214 emit_move_insn (target, temp);
4217 /* If we don't want a value, return NULL_RTX. */
4221 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
4222 ??? The latter test doesn't seem to make sense. */
4223 else if (dont_return_target && GET_CODE (temp) != MEM)
4226 /* Return TARGET itself if it is a hard register. */
4227 else if (want_value && GET_MODE (target) != BLKmode
4228 && ! (GET_CODE (target) == REG
4229 && REGNO (target) < FIRST_PSEUDO_REGISTER))
4230 return copy_to_reg (target);
4236 /* Return 1 if EXP just contains zeros. */
4244 switch (TREE_CODE (exp))
4248 case NON_LVALUE_EXPR:
4249 case VIEW_CONVERT_EXPR:
4250 return is_zeros_p (TREE_OPERAND (exp, 0));
4253 return integer_zerop (exp);
4257 is_zeros_p (TREE_REALPART (exp)) && is_zeros_p (TREE_IMAGPART (exp));
4260 return REAL_VALUES_IDENTICAL (TREE_REAL_CST (exp), dconst0);
4263 for (elt = TREE_VECTOR_CST_ELTS (exp); elt;
4264 elt = TREE_CHAIN (elt))
4265 if (!is_zeros_p (TREE_VALUE (elt)))
4271 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4272 return CONSTRUCTOR_ELTS (exp) == NULL_TREE;
4273 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4274 if (! is_zeros_p (TREE_VALUE (elt)))
4284 /* Return 1 if EXP contains mostly (3/4) zeros. */
4287 mostly_zeros_p (exp)
4290 if (TREE_CODE (exp) == CONSTRUCTOR)
4292 int elts = 0, zeros = 0;
4293 tree elt = CONSTRUCTOR_ELTS (exp);
4294 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4296 /* If there are no ranges of true bits, it is all zero. */
4297 return elt == NULL_TREE;
4299 for (; elt; elt = TREE_CHAIN (elt))
4301 /* We do not handle the case where the index is a RANGE_EXPR,
4302 so the statistic will be somewhat inaccurate.
4303 We do make a more accurate count in store_constructor itself,
4304 so since this function is only used for nested array elements,
4305 this should be close enough. */
4306 if (mostly_zeros_p (TREE_VALUE (elt)))
4311 return 4 * zeros >= 3 * elts;
4314 return is_zeros_p (exp);
4317 /* Helper function for store_constructor.
4318 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4319 TYPE is the type of the CONSTRUCTOR, not the element type.
4320 CLEARED is as for store_constructor.
4321 ALIAS_SET is the alias set to use for any stores.
4323 This provides a recursive shortcut back to store_constructor when it isn't
4324 necessary to go through store_field. This is so that we can pass through
4325 the cleared field to let store_constructor know that we may not have to
4326 clear a substructure if the outer structure has already been cleared. */
4329 store_constructor_field (target, bitsize, bitpos, mode, exp, type, cleared,
4332 unsigned HOST_WIDE_INT bitsize;
4333 HOST_WIDE_INT bitpos;
4334 enum machine_mode mode;
4339 if (TREE_CODE (exp) == CONSTRUCTOR
4340 && bitpos % BITS_PER_UNIT == 0
4341 /* If we have a non-zero bitpos for a register target, then we just
4342 let store_field do the bitfield handling. This is unlikely to
4343 generate unnecessary clear instructions anyways. */
4344 && (bitpos == 0 || GET_CODE (target) == MEM))
4346 if (GET_CODE (target) == MEM)
4348 = adjust_address (target,
4349 GET_MODE (target) == BLKmode
4351 % GET_MODE_ALIGNMENT (GET_MODE (target)))
4352 ? BLKmode : VOIDmode, bitpos / BITS_PER_UNIT);
4355 /* Update the alias set, if required. */
4356 if (GET_CODE (target) == MEM && ! MEM_KEEP_ALIAS_SET_P (target)
4357 && MEM_ALIAS_SET (target) != 0)
4359 target = copy_rtx (target);
4360 set_mem_alias_set (target, alias_set);
4363 store_constructor (exp, target, cleared, bitsize / BITS_PER_UNIT);
4366 store_field (target, bitsize, bitpos, mode, exp, VOIDmode, 0, type,
4370 /* Store the value of constructor EXP into the rtx TARGET.
4371 TARGET is either a REG or a MEM; we know it cannot conflict, since
4372 safe_from_p has been called.
4373 CLEARED is true if TARGET is known to have been zero'd.
4374 SIZE is the number of bytes of TARGET we are allowed to modify: this
4375 may not be the same as the size of EXP if we are assigning to a field
4376 which has been packed to exclude padding bits. */
4379 store_constructor (exp, target, cleared, size)
4385 tree type = TREE_TYPE (exp);
4386 #ifdef WORD_REGISTER_OPERATIONS
4387 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
4390 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
4391 || TREE_CODE (type) == QUAL_UNION_TYPE)
4395 /* We either clear the aggregate or indicate the value is dead. */
4396 if ((TREE_CODE (type) == UNION_TYPE
4397 || TREE_CODE (type) == QUAL_UNION_TYPE)
4399 && ! CONSTRUCTOR_ELTS (exp))
4400 /* If the constructor is empty, clear the union. */
4402 clear_storage (target, expr_size (exp));
4406 /* If we are building a static constructor into a register,
4407 set the initial value as zero so we can fold the value into
4408 a constant. But if more than one register is involved,
4409 this probably loses. */
4410 else if (! cleared && GET_CODE (target) == REG && TREE_STATIC (exp)
4411 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
4413 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4417 /* If the constructor has fewer fields than the structure
4418 or if we are initializing the structure to mostly zeros,
4419 clear the whole structure first. Don't do this if TARGET is a
4420 register whose mode size isn't equal to SIZE since clear_storage
4421 can't handle this case. */
4422 else if (! cleared && size > 0
4423 && ((list_length (CONSTRUCTOR_ELTS (exp))
4424 != fields_length (type))
4425 || mostly_zeros_p (exp))
4426 && (GET_CODE (target) != REG
4427 || ((HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target))
4430 clear_storage (target, GEN_INT (size));
4435 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4437 /* Store each element of the constructor into
4438 the corresponding field of TARGET. */
4440 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4442 tree field = TREE_PURPOSE (elt);
4443 tree value = TREE_VALUE (elt);
4444 enum machine_mode mode;
4445 HOST_WIDE_INT bitsize;
4446 HOST_WIDE_INT bitpos = 0;
4449 rtx to_rtx = target;
4451 /* Just ignore missing fields.
4452 We cleared the whole structure, above,
4453 if any fields are missing. */
4457 if (cleared && is_zeros_p (value))
4460 if (host_integerp (DECL_SIZE (field), 1))
4461 bitsize = tree_low_cst (DECL_SIZE (field), 1);
4465 unsignedp = TREE_UNSIGNED (field);
4466 mode = DECL_MODE (field);
4467 if (DECL_BIT_FIELD (field))
4470 offset = DECL_FIELD_OFFSET (field);
4471 if (host_integerp (offset, 0)
4472 && host_integerp (bit_position (field), 0))
4474 bitpos = int_bit_position (field);
4478 bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
4484 if (contains_placeholder_p (offset))
4485 offset = build (WITH_RECORD_EXPR, sizetype,
4486 offset, make_tree (TREE_TYPE (exp), target));
4488 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
4489 if (GET_CODE (to_rtx) != MEM)
4492 if (GET_MODE (offset_rtx) != ptr_mode)
4493 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4495 #ifdef POINTERS_EXTEND_UNSIGNED
4496 if (GET_MODE (offset_rtx) != Pmode)
4497 offset_rtx = convert_memory_address (Pmode, offset_rtx);
4500 to_rtx = offset_address (to_rtx, offset_rtx,
4501 highest_pow2_factor (offset));
4504 if (TREE_READONLY (field))
4506 if (GET_CODE (to_rtx) == MEM)
4507 to_rtx = copy_rtx (to_rtx);
4509 RTX_UNCHANGING_P (to_rtx) = 1;
4512 #ifdef WORD_REGISTER_OPERATIONS
4513 /* If this initializes a field that is smaller than a word, at the
4514 start of a word, try to widen it to a full word.
4515 This special case allows us to output C++ member function
4516 initializations in a form that the optimizers can understand. */
4517 if (GET_CODE (target) == REG
4518 && bitsize < BITS_PER_WORD
4519 && bitpos % BITS_PER_WORD == 0
4520 && GET_MODE_CLASS (mode) == MODE_INT
4521 && TREE_CODE (value) == INTEGER_CST
4523 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
4525 tree type = TREE_TYPE (value);
4527 if (TYPE_PRECISION (type) < BITS_PER_WORD)
4529 type = type_for_size (BITS_PER_WORD, TREE_UNSIGNED (type));
4530 value = convert (type, value);
4533 if (BYTES_BIG_ENDIAN)
4535 = fold (build (LSHIFT_EXPR, type, value,
4536 build_int_2 (BITS_PER_WORD - bitsize, 0)));
4537 bitsize = BITS_PER_WORD;
4542 if (GET_CODE (to_rtx) == MEM && !MEM_KEEP_ALIAS_SET_P (to_rtx)
4543 && DECL_NONADDRESSABLE_P (field))
4545 to_rtx = copy_rtx (to_rtx);
4546 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
4549 store_constructor_field (to_rtx, bitsize, bitpos, mode,
4550 value, type, cleared,
4551 get_alias_set (TREE_TYPE (field)));
4554 else if (TREE_CODE (type) == ARRAY_TYPE
4555 || TREE_CODE (type) == VECTOR_TYPE)
4560 tree domain = TYPE_DOMAIN (type);
4561 tree elttype = TREE_TYPE (type);
4563 HOST_WIDE_INT minelt = 0;
4564 HOST_WIDE_INT maxelt = 0;
4566 /* Vectors are like arrays, but the domain is stored via an array
4568 if (TREE_CODE (type) == VECTOR_TYPE)
4570 /* Note that although TYPE_DEBUG_REPRESENTATION_TYPE uses
4571 the same field as TYPE_DOMAIN, we are not guaranteed that
4573 domain = TYPE_DEBUG_REPRESENTATION_TYPE (type);
4574 domain = TYPE_DOMAIN (TREE_TYPE (TYPE_FIELDS (domain)));
4577 const_bounds_p = (TYPE_MIN_VALUE (domain)
4578 && TYPE_MAX_VALUE (domain)
4579 && host_integerp (TYPE_MIN_VALUE (domain), 0)
4580 && host_integerp (TYPE_MAX_VALUE (domain), 0));
4582 /* If we have constant bounds for the range of the type, get them. */
4585 minelt = tree_low_cst (TYPE_MIN_VALUE (domain), 0);
4586 maxelt = tree_low_cst (TYPE_MAX_VALUE (domain), 0);
4589 /* If the constructor has fewer elements than the array,
4590 clear the whole array first. Similarly if this is
4591 static constructor of a non-BLKmode object. */
4592 if (cleared || (GET_CODE (target) == REG && TREE_STATIC (exp)))
4596 HOST_WIDE_INT count = 0, zero_count = 0;
4597 need_to_clear = ! const_bounds_p;
4599 /* This loop is a more accurate version of the loop in
4600 mostly_zeros_p (it handles RANGE_EXPR in an index).
4601 It is also needed to check for missing elements. */
4602 for (elt = CONSTRUCTOR_ELTS (exp);
4603 elt != NULL_TREE && ! need_to_clear;
4604 elt = TREE_CHAIN (elt))
4606 tree index = TREE_PURPOSE (elt);
4607 HOST_WIDE_INT this_node_count;
4609 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4611 tree lo_index = TREE_OPERAND (index, 0);
4612 tree hi_index = TREE_OPERAND (index, 1);
4614 if (! host_integerp (lo_index, 1)
4615 || ! host_integerp (hi_index, 1))
4621 this_node_count = (tree_low_cst (hi_index, 1)
4622 - tree_low_cst (lo_index, 1) + 1);
4625 this_node_count = 1;
4627 count += this_node_count;
4628 if (mostly_zeros_p (TREE_VALUE (elt)))
4629 zero_count += this_node_count;
4632 /* Clear the entire array first if there are any missing elements,
4633 or if the incidence of zero elements is >= 75%. */
4635 && (count < maxelt - minelt + 1 || 4 * zero_count >= 3 * count))
4639 if (need_to_clear && size > 0)
4644 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4646 clear_storage (target, GEN_INT (size));
4650 else if (REG_P (target))
4651 /* Inform later passes that the old value is dead. */
4652 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4654 /* Store each element of the constructor into
4655 the corresponding element of TARGET, determined
4656 by counting the elements. */
4657 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
4659 elt = TREE_CHAIN (elt), i++)
4661 enum machine_mode mode;
4662 HOST_WIDE_INT bitsize;
4663 HOST_WIDE_INT bitpos;
4665 tree value = TREE_VALUE (elt);
4666 tree index = TREE_PURPOSE (elt);
4667 rtx xtarget = target;
4669 if (cleared && is_zeros_p (value))
4672 unsignedp = TREE_UNSIGNED (elttype);
4673 mode = TYPE_MODE (elttype);
4674 if (mode == BLKmode)
4675 bitsize = (host_integerp (TYPE_SIZE (elttype), 1)
4676 ? tree_low_cst (TYPE_SIZE (elttype), 1)
4679 bitsize = GET_MODE_BITSIZE (mode);
4681 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4683 tree lo_index = TREE_OPERAND (index, 0);
4684 tree hi_index = TREE_OPERAND (index, 1);
4685 rtx index_r, pos_rtx, hi_r, loop_top, loop_end;
4686 struct nesting *loop;
4687 HOST_WIDE_INT lo, hi, count;
4690 /* If the range is constant and "small", unroll the loop. */
4692 && host_integerp (lo_index, 0)
4693 && host_integerp (hi_index, 0)
4694 && (lo = tree_low_cst (lo_index, 0),
4695 hi = tree_low_cst (hi_index, 0),
4696 count = hi - lo + 1,
4697 (GET_CODE (target) != MEM
4699 || (host_integerp (TYPE_SIZE (elttype), 1)
4700 && (tree_low_cst (TYPE_SIZE (elttype), 1) * count
4703 lo -= minelt; hi -= minelt;
4704 for (; lo <= hi; lo++)
4706 bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0);
4708 if (GET_CODE (target) == MEM
4709 && !MEM_KEEP_ALIAS_SET_P (target)
4710 && TREE_CODE (type) == ARRAY_TYPE
4711 && TYPE_NONALIASED_COMPONENT (type))
4713 target = copy_rtx (target);
4714 MEM_KEEP_ALIAS_SET_P (target) = 1;
4717 store_constructor_field
4718 (target, bitsize, bitpos, mode, value, type, cleared,
4719 get_alias_set (elttype));
4724 hi_r = expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
4725 loop_top = gen_label_rtx ();
4726 loop_end = gen_label_rtx ();
4728 unsignedp = TREE_UNSIGNED (domain);
4730 index = build_decl (VAR_DECL, NULL_TREE, domain);
4733 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
4735 SET_DECL_RTL (index, index_r);
4736 if (TREE_CODE (value) == SAVE_EXPR
4737 && SAVE_EXPR_RTL (value) == 0)
4739 /* Make sure value gets expanded once before the
4741 expand_expr (value, const0_rtx, VOIDmode, 0);
4744 store_expr (lo_index, index_r, 0);
4745 loop = expand_start_loop (0);
4747 /* Assign value to element index. */
4749 = convert (ssizetype,
4750 fold (build (MINUS_EXPR, TREE_TYPE (index),
4751 index, TYPE_MIN_VALUE (domain))));
4752 position = size_binop (MULT_EXPR, position,
4754 TYPE_SIZE_UNIT (elttype)));
4756 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
4757 xtarget = offset_address (target, pos_rtx,
4758 highest_pow2_factor (position));
4759 xtarget = adjust_address (xtarget, mode, 0);
4760 if (TREE_CODE (value) == CONSTRUCTOR)
4761 store_constructor (value, xtarget, cleared,
4762 bitsize / BITS_PER_UNIT);
4764 store_expr (value, xtarget, 0);
4766 expand_exit_loop_if_false (loop,
4767 build (LT_EXPR, integer_type_node,
4770 expand_increment (build (PREINCREMENT_EXPR,
4772 index, integer_one_node), 0, 0);
4774 emit_label (loop_end);
4777 else if ((index != 0 && ! host_integerp (index, 0))
4778 || ! host_integerp (TYPE_SIZE (elttype), 1))
4783 index = ssize_int (1);
4786 index = convert (ssizetype,
4787 fold (build (MINUS_EXPR, index,
4788 TYPE_MIN_VALUE (domain))));
4790 position = size_binop (MULT_EXPR, index,
4792 TYPE_SIZE_UNIT (elttype)));
4793 xtarget = offset_address (target,
4794 expand_expr (position, 0, VOIDmode, 0),
4795 highest_pow2_factor (position));
4796 xtarget = adjust_address (xtarget, mode, 0);
4797 store_expr (value, xtarget, 0);
4802 bitpos = ((tree_low_cst (index, 0) - minelt)
4803 * tree_low_cst (TYPE_SIZE (elttype), 1));
4805 bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
4807 if (GET_CODE (target) == MEM && !MEM_KEEP_ALIAS_SET_P (target)
4808 && TREE_CODE (type) == ARRAY_TYPE
4809 && TYPE_NONALIASED_COMPONENT (type))
4811 target = copy_rtx (target);
4812 MEM_KEEP_ALIAS_SET_P (target) = 1;
4815 store_constructor_field (target, bitsize, bitpos, mode, value,
4816 type, cleared, get_alias_set (elttype));
4822 /* Set constructor assignments. */
4823 else if (TREE_CODE (type) == SET_TYPE)
4825 tree elt = CONSTRUCTOR_ELTS (exp);
4826 unsigned HOST_WIDE_INT nbytes = int_size_in_bytes (type), nbits;
4827 tree domain = TYPE_DOMAIN (type);
4828 tree domain_min, domain_max, bitlength;
4830 /* The default implementation strategy is to extract the constant
4831 parts of the constructor, use that to initialize the target,
4832 and then "or" in whatever non-constant ranges we need in addition.
4834 If a large set is all zero or all ones, it is
4835 probably better to set it using memset (if available) or bzero.
4836 Also, if a large set has just a single range, it may also be
4837 better to first clear all the first clear the set (using
4838 bzero/memset), and set the bits we want. */
4840 /* Check for all zeros. */
4841 if (elt == NULL_TREE && size > 0)
4844 clear_storage (target, GEN_INT (size));
4848 domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
4849 domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
4850 bitlength = size_binop (PLUS_EXPR,
4851 size_diffop (domain_max, domain_min),
4854 nbits = tree_low_cst (bitlength, 1);
4856 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
4857 are "complicated" (more than one range), initialize (the
4858 constant parts) by copying from a constant. */
4859 if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD
4860 || (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE))
4862 unsigned int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
4863 enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
4864 char *bit_buffer = (char *) alloca (nbits);
4865 HOST_WIDE_INT word = 0;
4866 unsigned int bit_pos = 0;
4867 unsigned int ibit = 0;
4868 unsigned int offset = 0; /* In bytes from beginning of set. */
4870 elt = get_set_constructor_bits (exp, bit_buffer, nbits);
4873 if (bit_buffer[ibit])
4875 if (BYTES_BIG_ENDIAN)
4876 word |= (1 << (set_word_size - 1 - bit_pos));
4878 word |= 1 << bit_pos;
4882 if (bit_pos >= set_word_size || ibit == nbits)
4884 if (word != 0 || ! cleared)
4886 rtx datum = GEN_INT (word);
4889 /* The assumption here is that it is safe to use
4890 XEXP if the set is multi-word, but not if
4891 it's single-word. */
4892 if (GET_CODE (target) == MEM)
4893 to_rtx = adjust_address (target, mode, offset);
4894 else if (offset == 0)
4898 emit_move_insn (to_rtx, datum);
4905 offset += set_word_size / BITS_PER_UNIT;
4910 /* Don't bother clearing storage if the set is all ones. */
4911 if (TREE_CHAIN (elt) != NULL_TREE
4912 || (TREE_PURPOSE (elt) == NULL_TREE
4914 : ( ! host_integerp (TREE_VALUE (elt), 0)
4915 || ! host_integerp (TREE_PURPOSE (elt), 0)
4916 || (tree_low_cst (TREE_VALUE (elt), 0)
4917 - tree_low_cst (TREE_PURPOSE (elt), 0) + 1
4918 != (HOST_WIDE_INT) nbits))))
4919 clear_storage (target, expr_size (exp));
4921 for (; elt != NULL_TREE; elt = TREE_CHAIN (elt))
4923 /* Start of range of element or NULL. */
4924 tree startbit = TREE_PURPOSE (elt);
4925 /* End of range of element, or element value. */
4926 tree endbit = TREE_VALUE (elt);
4927 #ifdef TARGET_MEM_FUNCTIONS
4928 HOST_WIDE_INT startb, endb;
4930 rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
4932 bitlength_rtx = expand_expr (bitlength,
4933 NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
4935 /* Handle non-range tuple element like [ expr ]. */
4936 if (startbit == NULL_TREE)
4938 startbit = save_expr (endbit);
4942 startbit = convert (sizetype, startbit);
4943 endbit = convert (sizetype, endbit);
4944 if (! integer_zerop (domain_min))
4946 startbit = size_binop (MINUS_EXPR, startbit, domain_min);
4947 endbit = size_binop (MINUS_EXPR, endbit, domain_min);
4949 startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
4950 EXPAND_CONST_ADDRESS);
4951 endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
4952 EXPAND_CONST_ADDRESS);
4958 ((build_qualified_type (type_for_mode (GET_MODE (target), 0),
4961 emit_move_insn (targetx, target);
4964 else if (GET_CODE (target) == MEM)
4969 #ifdef TARGET_MEM_FUNCTIONS
4970 /* Optimization: If startbit and endbit are
4971 constants divisible by BITS_PER_UNIT,
4972 call memset instead. */
4973 if (TREE_CODE (startbit) == INTEGER_CST
4974 && TREE_CODE (endbit) == INTEGER_CST
4975 && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
4976 && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0)
4978 emit_library_call (memset_libfunc, LCT_NORMAL,
4980 plus_constant (XEXP (targetx, 0),
4981 startb / BITS_PER_UNIT),
4983 constm1_rtx, TYPE_MODE (integer_type_node),
4984 GEN_INT ((endb - startb) / BITS_PER_UNIT),
4985 TYPE_MODE (sizetype));
4989 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__setbits"),
4990 LCT_NORMAL, VOIDmode, 4, XEXP (targetx, 0),
4991 Pmode, bitlength_rtx, TYPE_MODE (sizetype),
4992 startbit_rtx, TYPE_MODE (sizetype),
4993 endbit_rtx, TYPE_MODE (sizetype));
4996 emit_move_insn (target, targetx);
5004 /* Store the value of EXP (an expression tree)
5005 into a subfield of TARGET which has mode MODE and occupies
5006 BITSIZE bits, starting BITPOS bits from the start of TARGET.
5007 If MODE is VOIDmode, it means that we are storing into a bit-field.
5009 If VALUE_MODE is VOIDmode, return nothing in particular.
5010 UNSIGNEDP is not used in this case.
5012 Otherwise, return an rtx for the value stored. This rtx
5013 has mode VALUE_MODE if that is convenient to do.
5014 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
5016 TYPE is the type of the underlying object,
5018 ALIAS_SET is the alias set for the destination. This value will
5019 (in general) be different from that for TARGET, since TARGET is a
5020 reference to the containing structure. */
5023 store_field (target, bitsize, bitpos, mode, exp, value_mode, unsignedp, type,
5026 HOST_WIDE_INT bitsize;
5027 HOST_WIDE_INT bitpos;
5028 enum machine_mode mode;
5030 enum machine_mode value_mode;
5035 HOST_WIDE_INT width_mask = 0;
5037 if (TREE_CODE (exp) == ERROR_MARK)
5040 /* If we have nothing to store, do nothing unless the expression has
5043 return expand_expr (exp, const0_rtx, VOIDmode, 0);
5044 else if (bitsize >=0 && bitsize < HOST_BITS_PER_WIDE_INT)
5045 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
5047 /* If we are storing into an unaligned field of an aligned union that is
5048 in a register, we may have the mode of TARGET being an integer mode but
5049 MODE == BLKmode. In that case, get an aligned object whose size and
5050 alignment are the same as TARGET and store TARGET into it (we can avoid
5051 the store if the field being stored is the entire width of TARGET). Then
5052 call ourselves recursively to store the field into a BLKmode version of
5053 that object. Finally, load from the object into TARGET. This is not
5054 very efficient in general, but should only be slightly more expensive
5055 than the otherwise-required unaligned accesses. Perhaps this can be
5056 cleaned up later. */
5059 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
5063 (build_qualified_type (type, TYPE_QUALS (type) | TYPE_QUAL_CONST),
5065 rtx blk_object = adjust_address (object, BLKmode, 0);
5067 if (bitsize != (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (target)))
5068 emit_move_insn (object, target);
5070 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0, type,
5073 emit_move_insn (target, object);
5075 /* We want to return the BLKmode version of the data. */
5079 if (GET_CODE (target) == CONCAT)
5081 /* We're storing into a struct containing a single __complex. */
5085 return store_expr (exp, target, 0);
5088 /* If the structure is in a register or if the component
5089 is a bit field, we cannot use addressing to access it.
5090 Use bit-field techniques or SUBREG to store in it. */
5092 if (mode == VOIDmode
5093 || (mode != BLKmode && ! direct_store[(int) mode]
5094 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
5095 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
5096 || GET_CODE (target) == REG
5097 || GET_CODE (target) == SUBREG
5098 /* If the field isn't aligned enough to store as an ordinary memref,
5099 store it as a bit field. */
5100 || (mode != BLKmode && SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target))
5101 && (MEM_ALIGN (target) < GET_MODE_ALIGNMENT (mode)
5102 || bitpos % GET_MODE_ALIGNMENT (mode)))
5103 /* If the RHS and field are a constant size and the size of the
5104 RHS isn't the same size as the bitfield, we must use bitfield
5107 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
5108 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0))
5110 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
5112 /* If BITSIZE is narrower than the size of the type of EXP
5113 we will be narrowing TEMP. Normally, what's wanted are the
5114 low-order bits. However, if EXP's type is a record and this is
5115 big-endian machine, we want the upper BITSIZE bits. */
5116 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
5117 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (temp))
5118 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
5119 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
5120 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
5124 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
5126 if (mode != VOIDmode && mode != BLKmode
5127 && mode != TYPE_MODE (TREE_TYPE (exp)))
5128 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
5130 /* If the modes of TARGET and TEMP are both BLKmode, both
5131 must be in memory and BITPOS must be aligned on a byte
5132 boundary. If so, we simply do a block copy. */
5133 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
5135 if (GET_CODE (target) != MEM || GET_CODE (temp) != MEM
5136 || bitpos % BITS_PER_UNIT != 0)
5139 target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT);
5140 emit_block_move (target, temp,
5141 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
5144 return value_mode == VOIDmode ? const0_rtx : target;
5147 /* Store the value in the bitfield. */
5148 store_bit_field (target, bitsize, bitpos, mode, temp,
5149 int_size_in_bytes (type));
5151 if (value_mode != VOIDmode)
5153 /* The caller wants an rtx for the value.
5154 If possible, avoid refetching from the bitfield itself. */
5156 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
5159 enum machine_mode tmode;
5161 tmode = GET_MODE (temp);
5162 if (tmode == VOIDmode)
5166 return expand_and (tmode, temp,
5167 gen_int_mode (width_mask, tmode),
5170 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
5171 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
5172 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
5175 return extract_bit_field (target, bitsize, bitpos, unsignedp,
5176 NULL_RTX, value_mode, VOIDmode,
5177 int_size_in_bytes (type));
5183 rtx addr = XEXP (target, 0);
5184 rtx to_rtx = target;
5186 /* If a value is wanted, it must be the lhs;
5187 so make the address stable for multiple use. */
5189 if (value_mode != VOIDmode && GET_CODE (addr) != REG
5190 && ! CONSTANT_ADDRESS_P (addr)
5191 /* A frame-pointer reference is already stable. */
5192 && ! (GET_CODE (addr) == PLUS
5193 && GET_CODE (XEXP (addr, 1)) == CONST_INT
5194 && (XEXP (addr, 0) == virtual_incoming_args_rtx
5195 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
5196 to_rtx = replace_equiv_address (to_rtx, copy_to_reg (addr));
5198 /* Now build a reference to just the desired component. */
5200 to_rtx = adjust_address (target, mode, bitpos / BITS_PER_UNIT);
5202 if (to_rtx == target)
5203 to_rtx = copy_rtx (to_rtx);
5205 MEM_SET_IN_STRUCT_P (to_rtx, 1);
5206 if (!MEM_KEEP_ALIAS_SET_P (to_rtx) && MEM_ALIAS_SET (to_rtx) != 0)
5207 set_mem_alias_set (to_rtx, alias_set);
5209 return store_expr (exp, to_rtx, value_mode != VOIDmode);
5213 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
5214 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
5215 codes and find the ultimate containing object, which we return.
5217 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5218 bit position, and *PUNSIGNEDP to the signedness of the field.
5219 If the position of the field is variable, we store a tree
5220 giving the variable offset (in units) in *POFFSET.
5221 This offset is in addition to the bit position.
5222 If the position is not variable, we store 0 in *POFFSET.
5224 If any of the extraction expressions is volatile,
5225 we store 1 in *PVOLATILEP. Otherwise we don't change that.
5227 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
5228 is a mode that can be used to access the field. In that case, *PBITSIZE
5231 If the field describes a variable-sized object, *PMODE is set to
5232 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
5233 this case, but the address of the object can be found. */
5236 get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode,
5237 punsignedp, pvolatilep)
5239 HOST_WIDE_INT *pbitsize;
5240 HOST_WIDE_INT *pbitpos;
5242 enum machine_mode *pmode;
5247 enum machine_mode mode = VOIDmode;
5248 tree offset = size_zero_node;
5249 tree bit_offset = bitsize_zero_node;
5250 tree placeholder_ptr = 0;
5253 /* First get the mode, signedness, and size. We do this from just the
5254 outermost expression. */
5255 if (TREE_CODE (exp) == COMPONENT_REF)
5257 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
5258 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
5259 mode = DECL_MODE (TREE_OPERAND (exp, 1));
5261 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
5263 else if (TREE_CODE (exp) == BIT_FIELD_REF)
5265 size_tree = TREE_OPERAND (exp, 1);
5266 *punsignedp = TREE_UNSIGNED (exp);
5270 mode = TYPE_MODE (TREE_TYPE (exp));
5271 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
5273 if (mode == BLKmode)
5274 size_tree = TYPE_SIZE (TREE_TYPE (exp));
5276 *pbitsize = GET_MODE_BITSIZE (mode);
5281 if (! host_integerp (size_tree, 1))
5282 mode = BLKmode, *pbitsize = -1;
5284 *pbitsize = tree_low_cst (size_tree, 1);
5287 /* Compute cumulative bit-offset for nested component-refs and array-refs,
5288 and find the ultimate containing object. */
5291 if (TREE_CODE (exp) == BIT_FIELD_REF)
5292 bit_offset = size_binop (PLUS_EXPR, bit_offset, TREE_OPERAND (exp, 2));
5293 else if (TREE_CODE (exp) == COMPONENT_REF)
5295 tree field = TREE_OPERAND (exp, 1);
5296 tree this_offset = DECL_FIELD_OFFSET (field);
5298 /* If this field hasn't been filled in yet, don't go
5299 past it. This should only happen when folding expressions
5300 made during type construction. */
5301 if (this_offset == 0)
5303 else if (! TREE_CONSTANT (this_offset)
5304 && contains_placeholder_p (this_offset))
5305 this_offset = build (WITH_RECORD_EXPR, sizetype, this_offset, exp);
5307 offset = size_binop (PLUS_EXPR, offset, this_offset);
5308 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5309 DECL_FIELD_BIT_OFFSET (field));
5311 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
5314 else if (TREE_CODE (exp) == ARRAY_REF
5315 || TREE_CODE (exp) == ARRAY_RANGE_REF)
5317 tree index = TREE_OPERAND (exp, 1);
5318 tree array = TREE_OPERAND (exp, 0);
5319 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
5320 tree low_bound = (domain ? TYPE_MIN_VALUE (domain) : 0);
5321 tree unit_size = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (array)));
5323 /* We assume all arrays have sizes that are a multiple of a byte.
5324 First subtract the lower bound, if any, in the type of the
5325 index, then convert to sizetype and multiply by the size of the
5327 if (low_bound != 0 && ! integer_zerop (low_bound))
5328 index = fold (build (MINUS_EXPR, TREE_TYPE (index),
5331 /* If the index has a self-referential type, pass it to a
5332 WITH_RECORD_EXPR; if the component size is, pass our
5333 component to one. */
5334 if (! TREE_CONSTANT (index)
5335 && contains_placeholder_p (index))
5336 index = build (WITH_RECORD_EXPR, TREE_TYPE (index), index, exp);
5337 if (! TREE_CONSTANT (unit_size)
5338 && contains_placeholder_p (unit_size))
5339 unit_size = build (WITH_RECORD_EXPR, sizetype, unit_size, array);
5341 offset = size_binop (PLUS_EXPR, offset,
5342 size_binop (MULT_EXPR,
5343 convert (sizetype, index),
5347 else if (TREE_CODE (exp) == PLACEHOLDER_EXPR)
5349 tree new = find_placeholder (exp, &placeholder_ptr);
5351 /* If we couldn't find the replacement, return the PLACEHOLDER_EXPR.
5352 We might have been called from tree optimization where we
5353 haven't set up an object yet. */
5361 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
5362 && TREE_CODE (exp) != VIEW_CONVERT_EXPR
5363 && ! ((TREE_CODE (exp) == NOP_EXPR
5364 || TREE_CODE (exp) == CONVERT_EXPR)
5365 && (TYPE_MODE (TREE_TYPE (exp))
5366 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
5369 /* If any reference in the chain is volatile, the effect is volatile. */
5370 if (TREE_THIS_VOLATILE (exp))
5373 exp = TREE_OPERAND (exp, 0);
5376 /* If OFFSET is constant, see if we can return the whole thing as a
5377 constant bit position. Otherwise, split it up. */
5378 if (host_integerp (offset, 0)
5379 && 0 != (tem = size_binop (MULT_EXPR, convert (bitsizetype, offset),
5381 && 0 != (tem = size_binop (PLUS_EXPR, tem, bit_offset))
5382 && host_integerp (tem, 0))
5383 *pbitpos = tree_low_cst (tem, 0), *poffset = 0;
5385 *pbitpos = tree_low_cst (bit_offset, 0), *poffset = offset;
5391 /* Return 1 if T is an expression that get_inner_reference handles. */
5394 handled_component_p (t)
5397 switch (TREE_CODE (t))
5402 case ARRAY_RANGE_REF:
5403 case NON_LVALUE_EXPR:
5404 case VIEW_CONVERT_EXPR:
5409 return (TYPE_MODE (TREE_TYPE (t))
5410 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (t, 0))));
5417 /* Given an rtx VALUE that may contain additions and multiplications, return
5418 an equivalent value that just refers to a register, memory, or constant.
5419 This is done by generating instructions to perform the arithmetic and
5420 returning a pseudo-register containing the value.
5422 The returned value may be a REG, SUBREG, MEM or constant. */
5425 force_operand (value, target)
5429 /* Use a temporary to force order of execution of calls to
5433 /* Use subtarget as the target for operand 0 of a binary operation. */
5434 rtx subtarget = get_subtarget (target);
5436 /* Check for a PIC address load. */
5437 if ((GET_CODE (value) == PLUS || GET_CODE (value) == MINUS)
5438 && XEXP (value, 0) == pic_offset_table_rtx
5439 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
5440 || GET_CODE (XEXP (value, 1)) == LABEL_REF
5441 || GET_CODE (XEXP (value, 1)) == CONST))
5444 subtarget = gen_reg_rtx (GET_MODE (value));
5445 emit_move_insn (subtarget, value);
5449 if (GET_CODE (value) == PLUS)
5450 binoptab = add_optab;
5451 else if (GET_CODE (value) == MINUS)
5452 binoptab = sub_optab;
5453 else if (GET_CODE (value) == MULT)
5455 op2 = XEXP (value, 1);
5456 if (!CONSTANT_P (op2)
5457 && !(GET_CODE (op2) == REG && op2 != subtarget))
5459 tmp = force_operand (XEXP (value, 0), subtarget);
5460 return expand_mult (GET_MODE (value), tmp,
5461 force_operand (op2, NULL_RTX),
5467 op2 = XEXP (value, 1);
5468 if (!CONSTANT_P (op2)
5469 && !(GET_CODE (op2) == REG && op2 != subtarget))
5471 if (binoptab == sub_optab && GET_CODE (op2) == CONST_INT)
5473 binoptab = add_optab;
5474 op2 = negate_rtx (GET_MODE (value), op2);
5477 /* Check for an addition with OP2 a constant integer and our first
5478 operand a PLUS of a virtual register and something else. In that
5479 case, we want to emit the sum of the virtual register and the
5480 constant first and then add the other value. This allows virtual
5481 register instantiation to simply modify the constant rather than
5482 creating another one around this addition. */
5483 if (binoptab == add_optab && GET_CODE (op2) == CONST_INT
5484 && GET_CODE (XEXP (value, 0)) == PLUS
5485 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
5486 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
5487 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
5489 rtx temp = expand_binop (GET_MODE (value), binoptab,
5490 XEXP (XEXP (value, 0), 0), op2,
5491 subtarget, 0, OPTAB_LIB_WIDEN);
5492 return expand_binop (GET_MODE (value), binoptab, temp,
5493 force_operand (XEXP (XEXP (value, 0), 1), 0),
5494 target, 0, OPTAB_LIB_WIDEN);
5497 tmp = force_operand (XEXP (value, 0), subtarget);
5498 return expand_binop (GET_MODE (value), binoptab, tmp,
5499 force_operand (op2, NULL_RTX),
5500 target, 0, OPTAB_LIB_WIDEN);
5501 /* We give UNSIGNEDP = 0 to expand_binop
5502 because the only operations we are expanding here are signed ones. */
5505 #ifdef INSN_SCHEDULING
5506 /* On machines that have insn scheduling, we want all memory reference to be
5507 explicit, so we need to deal with such paradoxical SUBREGs. */
5508 if (GET_CODE (value) == SUBREG && GET_CODE (SUBREG_REG (value)) == MEM
5509 && (GET_MODE_SIZE (GET_MODE (value))
5510 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (value)))))
5512 = simplify_gen_subreg (GET_MODE (value),
5513 force_reg (GET_MODE (SUBREG_REG (value)),
5514 force_operand (SUBREG_REG (value),
5516 GET_MODE (SUBREG_REG (value)),
5517 SUBREG_BYTE (value));
5523 /* Subroutine of expand_expr: return nonzero iff there is no way that
5524 EXP can reference X, which is being modified. TOP_P is nonzero if this
5525 call is going to be used to determine whether we need a temporary
5526 for EXP, as opposed to a recursive call to this function.
5528 It is always safe for this routine to return zero since it merely
5529 searches for optimization opportunities. */
5532 safe_from_p (x, exp, top_p)
5539 static tree save_expr_list;
5542 /* If EXP has varying size, we MUST use a target since we currently
5543 have no way of allocating temporaries of variable size
5544 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
5545 So we assume here that something at a higher level has prevented a
5546 clash. This is somewhat bogus, but the best we can do. Only
5547 do this when X is BLKmode and when we are at the top level. */
5548 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
5549 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
5550 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
5551 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
5552 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
5554 && GET_MODE (x) == BLKmode)
5555 /* If X is in the outgoing argument area, it is always safe. */
5556 || (GET_CODE (x) == MEM
5557 && (XEXP (x, 0) == virtual_outgoing_args_rtx
5558 || (GET_CODE (XEXP (x, 0)) == PLUS
5559 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
5562 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
5563 find the underlying pseudo. */
5564 if (GET_CODE (x) == SUBREG)
5567 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5571 /* A SAVE_EXPR might appear many times in the expression passed to the
5572 top-level safe_from_p call, and if it has a complex subexpression,
5573 examining it multiple times could result in a combinatorial explosion.
5574 E.g. on an Alpha running at least 200MHz, a Fortran test case compiled
5575 with optimization took about 28 minutes to compile -- even though it was
5576 only a few lines long. So we mark each SAVE_EXPR we see with TREE_PRIVATE
5577 and turn that off when we are done. We keep a list of the SAVE_EXPRs
5578 we have processed. Note that the only test of top_p was above. */
5587 rtn = safe_from_p (x, exp, 0);
5589 for (t = save_expr_list; t != 0; t = TREE_CHAIN (t))
5590 TREE_PRIVATE (TREE_PURPOSE (t)) = 0;
5595 /* Now look at our tree code and possibly recurse. */
5596 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
5599 exp_rtl = DECL_RTL_SET_P (exp) ? DECL_RTL (exp) : NULL_RTX;
5606 if (TREE_CODE (exp) == TREE_LIST)
5607 return ((TREE_VALUE (exp) == 0
5608 || safe_from_p (x, TREE_VALUE (exp), 0))
5609 && (TREE_CHAIN (exp) == 0
5610 || safe_from_p (x, TREE_CHAIN (exp), 0)));
5611 else if (TREE_CODE (exp) == ERROR_MARK)
5612 return 1; /* An already-visited SAVE_EXPR? */
5617 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5621 return (safe_from_p (x, TREE_OPERAND (exp, 0), 0)
5622 && safe_from_p (x, TREE_OPERAND (exp, 1), 0));
5626 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
5627 the expression. If it is set, we conflict iff we are that rtx or
5628 both are in memory. Otherwise, we check all operands of the
5629 expression recursively. */
5631 switch (TREE_CODE (exp))
5634 /* If the operand is static or we are static, we can't conflict.
5635 Likewise if we don't conflict with the operand at all. */
5636 if (staticp (TREE_OPERAND (exp, 0))
5637 || TREE_STATIC (exp)
5638 || safe_from_p (x, TREE_OPERAND (exp, 0), 0))
5641 /* Otherwise, the only way this can conflict is if we are taking
5642 the address of a DECL a that address if part of X, which is
5644 exp = TREE_OPERAND (exp, 0);
5647 if (!DECL_RTL_SET_P (exp)
5648 || GET_CODE (DECL_RTL (exp)) != MEM)
5651 exp_rtl = XEXP (DECL_RTL (exp), 0);
5656 if (GET_CODE (x) == MEM
5657 && alias_sets_conflict_p (MEM_ALIAS_SET (x),
5658 get_alias_set (exp)))
5663 /* Assume that the call will clobber all hard registers and
5665 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5666 || GET_CODE (x) == MEM)
5671 /* If a sequence exists, we would have to scan every instruction
5672 in the sequence to see if it was safe. This is probably not
5674 if (RTL_EXPR_SEQUENCE (exp))
5677 exp_rtl = RTL_EXPR_RTL (exp);
5680 case WITH_CLEANUP_EXPR:
5681 exp_rtl = WITH_CLEANUP_EXPR_RTL (exp);
5684 case CLEANUP_POINT_EXPR:
5685 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5688 exp_rtl = SAVE_EXPR_RTL (exp);
5692 /* If we've already scanned this, don't do it again. Otherwise,
5693 show we've scanned it and record for clearing the flag if we're
5695 if (TREE_PRIVATE (exp))
5698 TREE_PRIVATE (exp) = 1;
5699 if (! safe_from_p (x, TREE_OPERAND (exp, 0), 0))
5701 TREE_PRIVATE (exp) = 0;
5705 save_expr_list = tree_cons (exp, NULL_TREE, save_expr_list);
5709 /* The only operand we look at is operand 1. The rest aren't
5710 part of the expression. */
5711 return safe_from_p (x, TREE_OPERAND (exp, 1), 0);
5713 case METHOD_CALL_EXPR:
5714 /* This takes an rtx argument, but shouldn't appear here. */
5721 /* If we have an rtx, we do not need to scan our operands. */
5725 nops = first_rtl_op (TREE_CODE (exp));
5726 for (i = 0; i < nops; i++)
5727 if (TREE_OPERAND (exp, i) != 0
5728 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
5731 /* If this is a language-specific tree code, it may require
5732 special handling. */
5733 if ((unsigned int) TREE_CODE (exp)
5734 >= (unsigned int) LAST_AND_UNUSED_TREE_CODE
5735 && !(*lang_hooks.safe_from_p) (x, exp))
5739 /* If we have an rtl, find any enclosed object. Then see if we conflict
5743 if (GET_CODE (exp_rtl) == SUBREG)
5745 exp_rtl = SUBREG_REG (exp_rtl);
5746 if (GET_CODE (exp_rtl) == REG
5747 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
5751 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
5752 are memory and they conflict. */
5753 return ! (rtx_equal_p (x, exp_rtl)
5754 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
5755 && true_dependence (exp_rtl, VOIDmode, x,
5756 rtx_addr_varies_p)));
5759 /* If we reach here, it is safe. */
5763 /* Subroutine of expand_expr: return rtx if EXP is a
5764 variable or parameter; else return 0. */
5771 switch (TREE_CODE (exp))
5775 return DECL_RTL (exp);
5781 #ifdef MAX_INTEGER_COMPUTATION_MODE
5784 check_max_integer_computation_mode (exp)
5787 enum tree_code code;
5788 enum machine_mode mode;
5790 /* Strip any NOPs that don't change the mode. */
5792 code = TREE_CODE (exp);
5794 /* We must allow conversions of constants to MAX_INTEGER_COMPUTATION_MODE. */
5795 if (code == NOP_EXPR
5796 && TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
5799 /* First check the type of the overall operation. We need only look at
5800 unary, binary and relational operations. */
5801 if (TREE_CODE_CLASS (code) == '1'
5802 || TREE_CODE_CLASS (code) == '2'
5803 || TREE_CODE_CLASS (code) == '<')
5805 mode = TYPE_MODE (TREE_TYPE (exp));
5806 if (GET_MODE_CLASS (mode) == MODE_INT
5807 && mode > MAX_INTEGER_COMPUTATION_MODE)
5808 internal_error ("unsupported wide integer operation");
5811 /* Check operand of a unary op. */
5812 if (TREE_CODE_CLASS (code) == '1')
5814 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5815 if (GET_MODE_CLASS (mode) == MODE_INT
5816 && mode > MAX_INTEGER_COMPUTATION_MODE)
5817 internal_error ("unsupported wide integer operation");
5820 /* Check operands of a binary/comparison op. */
5821 if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<')
5823 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5824 if (GET_MODE_CLASS (mode) == MODE_INT
5825 && mode > MAX_INTEGER_COMPUTATION_MODE)
5826 internal_error ("unsupported wide integer operation");
5828 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1)));
5829 if (GET_MODE_CLASS (mode) == MODE_INT
5830 && mode > MAX_INTEGER_COMPUTATION_MODE)
5831 internal_error ("unsupported wide integer operation");
5836 /* Return the highest power of two that EXP is known to be a multiple of.
5837 This is used in updating alignment of MEMs in array references. */
5839 static HOST_WIDE_INT
5840 highest_pow2_factor (exp)
5843 HOST_WIDE_INT c0, c1;
5845 switch (TREE_CODE (exp))
5848 /* We can find the lowest bit that's a one. If the low
5849 HOST_BITS_PER_WIDE_INT bits are zero, return BIGGEST_ALIGNMENT.
5850 We need to handle this case since we can find it in a COND_EXPR,
5851 a MIN_EXPR, or a MAX_EXPR. If the constant overlows, we have an
5852 erroneous program, so return BIGGEST_ALIGNMENT to avoid any
5854 if (TREE_CONSTANT_OVERFLOW (exp))
5855 return BIGGEST_ALIGNMENT;
5858 /* Note: tree_low_cst is intentionally not used here,
5859 we don't care about the upper bits. */
5860 c0 = TREE_INT_CST_LOW (exp);
5862 return c0 ? c0 : BIGGEST_ALIGNMENT;
5866 case PLUS_EXPR: case MINUS_EXPR: case MIN_EXPR: case MAX_EXPR:
5867 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
5868 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
5869 return MIN (c0, c1);
5872 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
5873 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
5876 case ROUND_DIV_EXPR: case TRUNC_DIV_EXPR: case FLOOR_DIV_EXPR:
5878 if (integer_pow2p (TREE_OPERAND (exp, 1))
5879 && host_integerp (TREE_OPERAND (exp, 1), 1))
5881 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
5882 c1 = tree_low_cst (TREE_OPERAND (exp, 1), 1);
5883 return MAX (1, c0 / c1);
5887 case NON_LVALUE_EXPR: case NOP_EXPR: case CONVERT_EXPR:
5888 case SAVE_EXPR: case WITH_RECORD_EXPR:
5889 return highest_pow2_factor (TREE_OPERAND (exp, 0));
5892 return highest_pow2_factor (TREE_OPERAND (exp, 1));
5895 c0 = highest_pow2_factor (TREE_OPERAND (exp, 1));
5896 c1 = highest_pow2_factor (TREE_OPERAND (exp, 2));
5897 return MIN (c0, c1);
5906 /* Return an object on the placeholder list that matches EXP, a
5907 PLACEHOLDER_EXPR. An object "matches" if it is of the type of the
5908 PLACEHOLDER_EXPR or a pointer type to it. For further information, see
5909 tree.def. If no such object is found, return 0. If PLIST is nonzero, it
5910 is a location which initially points to a starting location in the
5911 placeholder list (zero means start of the list) and where a pointer into
5912 the placeholder list at which the object is found is placed. */
5915 find_placeholder (exp, plist)
5919 tree type = TREE_TYPE (exp);
5920 tree placeholder_expr;
5922 for (placeholder_expr
5923 = plist && *plist ? TREE_CHAIN (*plist) : placeholder_list;
5924 placeholder_expr != 0;
5925 placeholder_expr = TREE_CHAIN (placeholder_expr))
5927 tree need_type = TYPE_MAIN_VARIANT (type);
5930 /* Find the outermost reference that is of the type we want. If none,
5931 see if any object has a type that is a pointer to the type we
5933 for (elt = TREE_PURPOSE (placeholder_expr); elt != 0;
5934 elt = ((TREE_CODE (elt) == COMPOUND_EXPR
5935 || TREE_CODE (elt) == COND_EXPR)
5936 ? TREE_OPERAND (elt, 1)
5937 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
5938 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
5939 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
5940 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
5941 ? TREE_OPERAND (elt, 0) : 0))
5942 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
5945 *plist = placeholder_expr;
5949 for (elt = TREE_PURPOSE (placeholder_expr); elt != 0;
5951 = ((TREE_CODE (elt) == COMPOUND_EXPR
5952 || TREE_CODE (elt) == COND_EXPR)
5953 ? TREE_OPERAND (elt, 1)
5954 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
5955 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
5956 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
5957 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
5958 ? TREE_OPERAND (elt, 0) : 0))
5959 if (POINTER_TYPE_P (TREE_TYPE (elt))
5960 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
5964 *plist = placeholder_expr;
5965 return build1 (INDIRECT_REF, need_type, elt);
5972 /* expand_expr: generate code for computing expression EXP.
5973 An rtx for the computed value is returned. The value is never null.
5974 In the case of a void EXP, const0_rtx is returned.
5976 The value may be stored in TARGET if TARGET is nonzero.
5977 TARGET is just a suggestion; callers must assume that
5978 the rtx returned may not be the same as TARGET.
5980 If TARGET is CONST0_RTX, it means that the value will be ignored.
5982 If TMODE is not VOIDmode, it suggests generating the
5983 result in mode TMODE. But this is done only when convenient.
5984 Otherwise, TMODE is ignored and the value generated in its natural mode.
5985 TMODE is just a suggestion; callers must assume that
5986 the rtx returned may not have mode TMODE.
5988 Note that TARGET may have neither TMODE nor MODE. In that case, it
5989 probably will not be used.
5991 If MODIFIER is EXPAND_SUM then when EXP is an addition
5992 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
5993 or a nest of (PLUS ...) and (MINUS ...) where the terms are
5994 products as above, or REG or MEM, or constant.
5995 Ordinarily in such cases we would output mul or add instructions
5996 and then return a pseudo reg containing the sum.
5998 EXPAND_INITIALIZER is much like EXPAND_SUM except that
5999 it also marks a label as absolutely required (it can't be dead).
6000 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
6001 This is used for outputting expressions used in initializers.
6003 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
6004 with a constant address even if that address is not normally legitimate.
6005 EXPAND_INITIALIZER and EXPAND_SUM also have this effect. */
6008 expand_expr (exp, target, tmode, modifier)
6011 enum machine_mode tmode;
6012 enum expand_modifier modifier;
6015 tree type = TREE_TYPE (exp);
6016 int unsignedp = TREE_UNSIGNED (type);
6017 enum machine_mode mode;
6018 enum tree_code code = TREE_CODE (exp);
6020 rtx subtarget, original_target;
6024 /* Handle ERROR_MARK before anybody tries to access its type. */
6025 if (TREE_CODE (exp) == ERROR_MARK || TREE_CODE (type) == ERROR_MARK)
6027 op0 = CONST0_RTX (tmode);
6033 mode = TYPE_MODE (type);
6034 /* Use subtarget as the target for operand 0 of a binary operation. */
6035 subtarget = get_subtarget (target);
6036 original_target = target;
6037 ignore = (target == const0_rtx
6038 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
6039 || code == CONVERT_EXPR || code == REFERENCE_EXPR
6040 || code == COND_EXPR || code == VIEW_CONVERT_EXPR)
6041 && TREE_CODE (type) == VOID_TYPE));
6043 /* If we are going to ignore this result, we need only do something
6044 if there is a side-effect somewhere in the expression. If there
6045 is, short-circuit the most common cases here. Note that we must
6046 not call expand_expr with anything but const0_rtx in case this
6047 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
6051 if (! TREE_SIDE_EFFECTS (exp))
6054 /* Ensure we reference a volatile object even if value is ignored, but
6055 don't do this if all we are doing is taking its address. */
6056 if (TREE_THIS_VOLATILE (exp)
6057 && TREE_CODE (exp) != FUNCTION_DECL
6058 && mode != VOIDmode && mode != BLKmode
6059 && modifier != EXPAND_CONST_ADDRESS)
6061 temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
6062 if (GET_CODE (temp) == MEM)
6063 temp = copy_to_reg (temp);
6067 if (TREE_CODE_CLASS (code) == '1' || code == COMPONENT_REF
6068 || code == INDIRECT_REF || code == BUFFER_REF)
6069 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6072 else if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<'
6073 || code == ARRAY_REF || code == ARRAY_RANGE_REF)
6075 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6076 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6079 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
6080 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
6081 /* If the second operand has no side effects, just evaluate
6083 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6085 else if (code == BIT_FIELD_REF)
6087 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6088 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6089 expand_expr (TREE_OPERAND (exp, 2), const0_rtx, VOIDmode, modifier);
6096 #ifdef MAX_INTEGER_COMPUTATION_MODE
6097 /* Only check stuff here if the mode we want is different from the mode
6098 of the expression; if it's the same, check_max_integer_computiation_mode
6099 will handle it. Do we really need to check this stuff at all? */
6102 && GET_MODE (target) != mode
6103 && TREE_CODE (exp) != INTEGER_CST
6104 && TREE_CODE (exp) != PARM_DECL
6105 && TREE_CODE (exp) != ARRAY_REF
6106 && TREE_CODE (exp) != ARRAY_RANGE_REF
6107 && TREE_CODE (exp) != COMPONENT_REF
6108 && TREE_CODE (exp) != BIT_FIELD_REF
6109 && TREE_CODE (exp) != INDIRECT_REF
6110 && TREE_CODE (exp) != CALL_EXPR
6111 && TREE_CODE (exp) != VAR_DECL
6112 && TREE_CODE (exp) != RTL_EXPR)
6114 enum machine_mode mode = GET_MODE (target);
6116 if (GET_MODE_CLASS (mode) == MODE_INT
6117 && mode > MAX_INTEGER_COMPUTATION_MODE)
6118 internal_error ("unsupported wide integer operation");
6122 && TREE_CODE (exp) != INTEGER_CST
6123 && TREE_CODE (exp) != PARM_DECL
6124 && TREE_CODE (exp) != ARRAY_REF
6125 && TREE_CODE (exp) != ARRAY_RANGE_REF
6126 && TREE_CODE (exp) != COMPONENT_REF
6127 && TREE_CODE (exp) != BIT_FIELD_REF
6128 && TREE_CODE (exp) != INDIRECT_REF
6129 && TREE_CODE (exp) != VAR_DECL
6130 && TREE_CODE (exp) != CALL_EXPR
6131 && TREE_CODE (exp) != RTL_EXPR
6132 && GET_MODE_CLASS (tmode) == MODE_INT
6133 && tmode > MAX_INTEGER_COMPUTATION_MODE)
6134 internal_error ("unsupported wide integer operation");
6136 check_max_integer_computation_mode (exp);
6139 /* If will do cse, generate all results into pseudo registers
6140 since 1) that allows cse to find more things
6141 and 2) otherwise cse could produce an insn the machine
6142 cannot support. And exception is a CONSTRUCTOR into a multi-word
6143 MEM: that's much more likely to be most efficient into the MEM. */
6145 if (! cse_not_expected && mode != BLKmode && target
6146 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER)
6147 && ! (code == CONSTRUCTOR && GET_MODE_SIZE (mode) > UNITS_PER_WORD))
6154 tree function = decl_function_context (exp);
6155 /* Handle using a label in a containing function. */
6156 if (function != current_function_decl
6157 && function != inline_function_decl && function != 0)
6159 struct function *p = find_function_data (function);
6160 p->expr->x_forced_labels
6161 = gen_rtx_EXPR_LIST (VOIDmode, label_rtx (exp),
6162 p->expr->x_forced_labels);
6166 if (modifier == EXPAND_INITIALIZER)
6167 forced_labels = gen_rtx_EXPR_LIST (VOIDmode,
6172 temp = gen_rtx_MEM (FUNCTION_MODE,
6173 gen_rtx_LABEL_REF (Pmode, label_rtx (exp)));
6174 if (function != current_function_decl
6175 && function != inline_function_decl && function != 0)
6176 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
6181 if (DECL_RTL (exp) == 0)
6183 error_with_decl (exp, "prior parameter's size depends on `%s'");
6184 return CONST0_RTX (mode);
6187 /* ... fall through ... */
6190 /* If a static var's type was incomplete when the decl was written,
6191 but the type is complete now, lay out the decl now. */
6192 if (DECL_SIZE (exp) == 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
6193 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
6195 rtx value = DECL_RTL_IF_SET (exp);
6197 layout_decl (exp, 0);
6199 /* If the RTL was already set, update its mode and memory
6203 PUT_MODE (value, DECL_MODE (exp));
6204 SET_DECL_RTL (exp, 0);
6205 set_mem_attributes (value, exp, 1);
6206 SET_DECL_RTL (exp, value);
6210 /* ... fall through ... */
6214 if (DECL_RTL (exp) == 0)
6217 /* Ensure variable marked as used even if it doesn't go through
6218 a parser. If it hasn't be used yet, write out an external
6220 if (! TREE_USED (exp))
6222 assemble_external (exp);
6223 TREE_USED (exp) = 1;
6226 /* Show we haven't gotten RTL for this yet. */
6229 /* Handle variables inherited from containing functions. */
6230 context = decl_function_context (exp);
6232 /* We treat inline_function_decl as an alias for the current function
6233 because that is the inline function whose vars, types, etc.
6234 are being merged into the current function.
6235 See expand_inline_function. */
6237 if (context != 0 && context != current_function_decl
6238 && context != inline_function_decl
6239 /* If var is static, we don't need a static chain to access it. */
6240 && ! (GET_CODE (DECL_RTL (exp)) == MEM
6241 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
6245 /* Mark as non-local and addressable. */
6246 DECL_NONLOCAL (exp) = 1;
6247 if (DECL_NO_STATIC_CHAIN (current_function_decl))
6249 mark_addressable (exp);
6250 if (GET_CODE (DECL_RTL (exp)) != MEM)
6252 addr = XEXP (DECL_RTL (exp), 0);
6253 if (GET_CODE (addr) == MEM)
6255 = replace_equiv_address (addr,
6256 fix_lexical_addr (XEXP (addr, 0), exp));
6258 addr = fix_lexical_addr (addr, exp);
6260 temp = replace_equiv_address (DECL_RTL (exp), addr);
6263 /* This is the case of an array whose size is to be determined
6264 from its initializer, while the initializer is still being parsed.
6267 else if (GET_CODE (DECL_RTL (exp)) == MEM
6268 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
6269 temp = validize_mem (DECL_RTL (exp));
6271 /* If DECL_RTL is memory, we are in the normal case and either
6272 the address is not valid or it is not a register and -fforce-addr
6273 is specified, get the address into a register. */
6275 else if (GET_CODE (DECL_RTL (exp)) == MEM
6276 && modifier != EXPAND_CONST_ADDRESS
6277 && modifier != EXPAND_SUM
6278 && modifier != EXPAND_INITIALIZER
6279 && (! memory_address_p (DECL_MODE (exp),
6280 XEXP (DECL_RTL (exp), 0))
6282 && GET_CODE (XEXP (DECL_RTL (exp), 0)) != REG)))
6283 temp = replace_equiv_address (DECL_RTL (exp),
6284 copy_rtx (XEXP (DECL_RTL (exp), 0)));
6286 /* If we got something, return it. But first, set the alignment
6287 if the address is a register. */
6290 if (GET_CODE (temp) == MEM && GET_CODE (XEXP (temp, 0)) == REG)
6291 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
6296 /* If the mode of DECL_RTL does not match that of the decl, it
6297 must be a promoted value. We return a SUBREG of the wanted mode,
6298 but mark it so that we know that it was already extended. */
6300 if (GET_CODE (DECL_RTL (exp)) == REG
6301 && GET_MODE (DECL_RTL (exp)) != DECL_MODE (exp))
6303 /* Get the signedness used for this variable. Ensure we get the
6304 same mode we got when the variable was declared. */
6305 if (GET_MODE (DECL_RTL (exp))
6306 != promote_mode (type, DECL_MODE (exp), &unsignedp,
6307 (TREE_CODE (exp) == RESULT_DECL ? 1 : 0)))
6310 temp = gen_lowpart_SUBREG (mode, DECL_RTL (exp));
6311 SUBREG_PROMOTED_VAR_P (temp) = 1;
6312 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
6316 return DECL_RTL (exp);
6319 return immed_double_const (TREE_INT_CST_LOW (exp),
6320 TREE_INT_CST_HIGH (exp), mode);
6323 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, 0);
6326 /* If optimized, generate immediate CONST_DOUBLE
6327 which will be turned into memory by reload if necessary.
6329 We used to force a register so that loop.c could see it. But
6330 this does not allow gen_* patterns to perform optimizations with
6331 the constants. It also produces two insns in cases like "x = 1.0;".
6332 On most machines, floating-point constants are not permitted in
6333 many insns, so we'd end up copying it to a register in any case.
6335 Now, we do the copying in expand_binop, if appropriate. */
6336 return immed_real_const (exp);
6340 if (! TREE_CST_RTL (exp))
6341 output_constant_def (exp, 1);
6343 /* TREE_CST_RTL probably contains a constant address.
6344 On RISC machines where a constant address isn't valid,
6345 make some insns to get that address into a register. */
6346 if (GET_CODE (TREE_CST_RTL (exp)) == MEM
6347 && modifier != EXPAND_CONST_ADDRESS
6348 && modifier != EXPAND_INITIALIZER
6349 && modifier != EXPAND_SUM
6350 && (! memory_address_p (mode, XEXP (TREE_CST_RTL (exp), 0))
6352 && GET_CODE (XEXP (TREE_CST_RTL (exp), 0)) != REG)))
6353 return replace_equiv_address (TREE_CST_RTL (exp),
6354 copy_rtx (XEXP (TREE_CST_RTL (exp), 0)));
6355 return TREE_CST_RTL (exp);
6357 case EXPR_WITH_FILE_LOCATION:
6360 const char *saved_input_filename = input_filename;
6361 int saved_lineno = lineno;
6362 input_filename = EXPR_WFL_FILENAME (exp);
6363 lineno = EXPR_WFL_LINENO (exp);
6364 if (EXPR_WFL_EMIT_LINE_NOTE (exp))
6365 emit_line_note (input_filename, lineno);
6366 /* Possibly avoid switching back and forth here. */
6367 to_return = expand_expr (EXPR_WFL_NODE (exp), target, tmode, modifier);
6368 input_filename = saved_input_filename;
6369 lineno = saved_lineno;
6374 context = decl_function_context (exp);
6376 /* If this SAVE_EXPR was at global context, assume we are an
6377 initialization function and move it into our context. */
6379 SAVE_EXPR_CONTEXT (exp) = current_function_decl;
6381 /* We treat inline_function_decl as an alias for the current function
6382 because that is the inline function whose vars, types, etc.
6383 are being merged into the current function.
6384 See expand_inline_function. */
6385 if (context == current_function_decl || context == inline_function_decl)
6388 /* If this is non-local, handle it. */
6391 /* The following call just exists to abort if the context is
6392 not of a containing function. */
6393 find_function_data (context);
6395 temp = SAVE_EXPR_RTL (exp);
6396 if (temp && GET_CODE (temp) == REG)
6398 put_var_into_stack (exp);
6399 temp = SAVE_EXPR_RTL (exp);
6401 if (temp == 0 || GET_CODE (temp) != MEM)
6404 replace_equiv_address (temp,
6405 fix_lexical_addr (XEXP (temp, 0), exp));
6407 if (SAVE_EXPR_RTL (exp) == 0)
6409 if (mode == VOIDmode)
6412 temp = assign_temp (build_qualified_type (type,
6414 | TYPE_QUAL_CONST)),
6417 SAVE_EXPR_RTL (exp) = temp;
6418 if (!optimize && GET_CODE (temp) == REG)
6419 save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, temp,
6422 /* If the mode of TEMP does not match that of the expression, it
6423 must be a promoted value. We pass store_expr a SUBREG of the
6424 wanted mode but mark it so that we know that it was already
6425 extended. Note that `unsignedp' was modified above in
6428 if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
6430 temp = gen_lowpart_SUBREG (mode, SAVE_EXPR_RTL (exp));
6431 SUBREG_PROMOTED_VAR_P (temp) = 1;
6432 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
6435 if (temp == const0_rtx)
6436 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
6438 store_expr (TREE_OPERAND (exp, 0), temp, 0);
6440 TREE_USED (exp) = 1;
6443 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
6444 must be a promoted value. We return a SUBREG of the wanted mode,
6445 but mark it so that we know that it was already extended. */
6447 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
6448 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
6450 /* Compute the signedness and make the proper SUBREG. */
6451 promote_mode (type, mode, &unsignedp, 0);
6452 temp = gen_lowpart_SUBREG (mode, SAVE_EXPR_RTL (exp));
6453 SUBREG_PROMOTED_VAR_P (temp) = 1;
6454 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
6458 return SAVE_EXPR_RTL (exp);
6463 temp = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
6464 TREE_OPERAND (exp, 0)
6465 = (*lang_hooks.unsave_expr_now) (TREE_OPERAND (exp, 0));
6469 case PLACEHOLDER_EXPR:
6471 tree old_list = placeholder_list;
6472 tree placeholder_expr = 0;
6474 exp = find_placeholder (exp, &placeholder_expr);
6478 placeholder_list = TREE_CHAIN (placeholder_expr);
6479 temp = expand_expr (exp, original_target, tmode, modifier);
6480 placeholder_list = old_list;
6484 /* We can't find the object or there was a missing WITH_RECORD_EXPR. */
6487 case WITH_RECORD_EXPR:
6488 /* Put the object on the placeholder list, expand our first operand,
6489 and pop the list. */
6490 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
6492 target = expand_expr (TREE_OPERAND (exp, 0), original_target, tmode,
6494 placeholder_list = TREE_CHAIN (placeholder_list);
6498 if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
6499 expand_goto (TREE_OPERAND (exp, 0));
6501 expand_computed_goto (TREE_OPERAND (exp, 0));
6505 expand_exit_loop_if_false (NULL,
6506 invert_truthvalue (TREE_OPERAND (exp, 0)));
6509 case LABELED_BLOCK_EXPR:
6510 if (LABELED_BLOCK_BODY (exp))
6511 expand_expr_stmt_value (LABELED_BLOCK_BODY (exp), 0, 1);
6512 /* Should perhaps use expand_label, but this is simpler and safer. */
6513 do_pending_stack_adjust ();
6514 emit_label (label_rtx (LABELED_BLOCK_LABEL (exp)));
6517 case EXIT_BLOCK_EXPR:
6518 if (EXIT_BLOCK_RETURN (exp))
6519 sorry ("returned value in block_exit_expr");
6520 expand_goto (LABELED_BLOCK_LABEL (EXIT_BLOCK_LABELED_BLOCK (exp)));
6525 expand_start_loop (1);
6526 expand_expr_stmt_value (TREE_OPERAND (exp, 0), 0, 1);
6534 tree vars = TREE_OPERAND (exp, 0);
6535 int vars_need_expansion = 0;
6537 /* Need to open a binding contour here because
6538 if there are any cleanups they must be contained here. */
6539 expand_start_bindings (2);
6541 /* Mark the corresponding BLOCK for output in its proper place. */
6542 if (TREE_OPERAND (exp, 2) != 0
6543 && ! TREE_USED (TREE_OPERAND (exp, 2)))
6544 (*lang_hooks.decls.insert_block) (TREE_OPERAND (exp, 2));
6546 /* If VARS have not yet been expanded, expand them now. */
6549 if (!DECL_RTL_SET_P (vars))
6551 vars_need_expansion = 1;
6554 expand_decl_init (vars);
6555 vars = TREE_CHAIN (vars);
6558 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, modifier);
6560 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
6566 if (RTL_EXPR_SEQUENCE (exp))
6568 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
6570 emit_insns (RTL_EXPR_SEQUENCE (exp));
6571 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
6573 preserve_rtl_expr_result (RTL_EXPR_RTL (exp));
6574 free_temps_for_rtl_expr (exp);
6575 return RTL_EXPR_RTL (exp);
6578 /* If we don't need the result, just ensure we evaluate any
6584 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
6585 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode, 0);
6590 /* All elts simple constants => refer to a constant in memory. But
6591 if this is a non-BLKmode mode, let it store a field at a time
6592 since that should make a CONST_INT or CONST_DOUBLE when we
6593 fold. Likewise, if we have a target we can use, it is best to
6594 store directly into the target unless the type is large enough
6595 that memcpy will be used. If we are making an initializer and
6596 all operands are constant, put it in memory as well. */
6597 else if ((TREE_STATIC (exp)
6598 && ((mode == BLKmode
6599 && ! (target != 0 && safe_from_p (target, exp, 1)))
6600 || TREE_ADDRESSABLE (exp)
6601 || (host_integerp (TYPE_SIZE_UNIT (type), 1)
6602 && (! MOVE_BY_PIECES_P
6603 (tree_low_cst (TYPE_SIZE_UNIT (type), 1),
6605 && ! mostly_zeros_p (exp))))
6606 || (modifier == EXPAND_INITIALIZER && TREE_CONSTANT (exp)))
6608 rtx constructor = output_constant_def (exp, 1);
6610 if (modifier != EXPAND_CONST_ADDRESS
6611 && modifier != EXPAND_INITIALIZER
6612 && modifier != EXPAND_SUM)
6613 constructor = validize_mem (constructor);
6619 /* Handle calls that pass values in multiple non-contiguous
6620 locations. The Irix 6 ABI has examples of this. */
6621 if (target == 0 || ! safe_from_p (target, exp, 1)
6622 || GET_CODE (target) == PARALLEL)
6624 = assign_temp (build_qualified_type (type,
6626 | (TREE_READONLY (exp)
6627 * TYPE_QUAL_CONST))),
6628 0, TREE_ADDRESSABLE (exp), 1);
6630 store_constructor (exp, target, 0,
6631 int_size_in_bytes (TREE_TYPE (exp)));
6637 tree exp1 = TREE_OPERAND (exp, 0);
6639 tree string = string_constant (exp1, &index);
6641 /* Try to optimize reads from const strings. */
6643 && TREE_CODE (string) == STRING_CST
6644 && TREE_CODE (index) == INTEGER_CST
6645 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
6646 && GET_MODE_CLASS (mode) == MODE_INT
6647 && GET_MODE_SIZE (mode) == 1
6648 && modifier != EXPAND_WRITE)
6649 return gen_int_mode (TREE_STRING_POINTER (string)
6650 [TREE_INT_CST_LOW (index)], mode);
6652 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
6653 op0 = memory_address (mode, op0);
6654 temp = gen_rtx_MEM (mode, op0);
6655 set_mem_attributes (temp, exp, 0);
6657 /* If we are writing to this object and its type is a record with
6658 readonly fields, we must mark it as readonly so it will
6659 conflict with readonly references to those fields. */
6660 if (modifier == EXPAND_WRITE && readonly_fields_p (type))
6661 RTX_UNCHANGING_P (temp) = 1;
6667 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
6671 tree array = TREE_OPERAND (exp, 0);
6672 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
6673 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
6674 tree index = convert (sizetype, TREE_OPERAND (exp, 1));
6677 /* Optimize the special-case of a zero lower bound.
6679 We convert the low_bound to sizetype to avoid some problems
6680 with constant folding. (E.g. suppose the lower bound is 1,
6681 and its mode is QI. Without the conversion, (ARRAY
6682 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
6683 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
6685 if (! integer_zerop (low_bound))
6686 index = size_diffop (index, convert (sizetype, low_bound));
6688 /* Fold an expression like: "foo"[2].
6689 This is not done in fold so it won't happen inside &.
6690 Don't fold if this is for wide characters since it's too
6691 difficult to do correctly and this is a very rare case. */
6693 if (modifier != EXPAND_CONST_ADDRESS && modifier != EXPAND_INITIALIZER
6694 && TREE_CODE (array) == STRING_CST
6695 && TREE_CODE (index) == INTEGER_CST
6696 && compare_tree_int (index, TREE_STRING_LENGTH (array)) < 0
6697 && GET_MODE_CLASS (mode) == MODE_INT
6698 && GET_MODE_SIZE (mode) == 1)
6699 return gen_int_mode (TREE_STRING_POINTER (array)
6700 [TREE_INT_CST_LOW (index)], mode);
6702 /* If this is a constant index into a constant array,
6703 just get the value from the array. Handle both the cases when
6704 we have an explicit constructor and when our operand is a variable
6705 that was declared const. */
6707 if (modifier != EXPAND_CONST_ADDRESS && modifier != EXPAND_INITIALIZER
6708 && TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array)
6709 && TREE_CODE (index) == INTEGER_CST
6710 && 0 > compare_tree_int (index,
6711 list_length (CONSTRUCTOR_ELTS
6712 (TREE_OPERAND (exp, 0)))))
6716 for (elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
6717 i = TREE_INT_CST_LOW (index);
6718 elem != 0 && i != 0; i--, elem = TREE_CHAIN (elem))
6722 return expand_expr (fold (TREE_VALUE (elem)), target, tmode,
6726 else if (optimize >= 1
6727 && modifier != EXPAND_CONST_ADDRESS
6728 && modifier != EXPAND_INITIALIZER
6729 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
6730 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
6731 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
6733 if (TREE_CODE (index) == INTEGER_CST)
6735 tree init = DECL_INITIAL (array);
6737 if (TREE_CODE (init) == CONSTRUCTOR)
6741 for (elem = CONSTRUCTOR_ELTS (init);
6743 && !tree_int_cst_equal (TREE_PURPOSE (elem), index));
6744 elem = TREE_CHAIN (elem))
6747 if (elem && !TREE_SIDE_EFFECTS (TREE_VALUE (elem)))
6748 return expand_expr (fold (TREE_VALUE (elem)), target,
6751 else if (TREE_CODE (init) == STRING_CST
6752 && 0 > compare_tree_int (index,
6753 TREE_STRING_LENGTH (init)))
6755 tree type = TREE_TYPE (TREE_TYPE (init));
6756 enum machine_mode mode = TYPE_MODE (type);
6758 if (GET_MODE_CLASS (mode) == MODE_INT
6759 && GET_MODE_SIZE (mode) == 1)
6760 return gen_int_mode (TREE_STRING_POINTER (init)
6761 [TREE_INT_CST_LOW (index)], mode);
6770 case ARRAY_RANGE_REF:
6771 /* If the operand is a CONSTRUCTOR, we can just extract the
6772 appropriate field if it is present. Don't do this if we have
6773 already written the data since we want to refer to that copy
6774 and varasm.c assumes that's what we'll do. */
6775 if (code == COMPONENT_REF
6776 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
6777 && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
6781 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
6782 elt = TREE_CHAIN (elt))
6783 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1)
6784 /* We can normally use the value of the field in the
6785 CONSTRUCTOR. However, if this is a bitfield in
6786 an integral mode that we can fit in a HOST_WIDE_INT,
6787 we must mask only the number of bits in the bitfield,
6788 since this is done implicitly by the constructor. If
6789 the bitfield does not meet either of those conditions,
6790 we can't do this optimization. */
6791 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt))
6792 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt)))
6794 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt)))
6795 <= HOST_BITS_PER_WIDE_INT))))
6797 op0 = expand_expr (TREE_VALUE (elt), target, tmode, modifier);
6798 if (DECL_BIT_FIELD (TREE_PURPOSE (elt)))
6800 HOST_WIDE_INT bitsize
6801 = TREE_INT_CST_LOW (DECL_SIZE (TREE_PURPOSE (elt)));
6802 enum machine_mode imode
6803 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt)));
6805 if (TREE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt))))
6807 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
6808 op0 = expand_and (imode, op0, op1, target);
6813 = build_int_2 (GET_MODE_BITSIZE (imode) - bitsize,
6816 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
6818 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
6828 enum machine_mode mode1;
6829 HOST_WIDE_INT bitsize, bitpos;
6832 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
6833 &mode1, &unsignedp, &volatilep);
6836 /* If we got back the original object, something is wrong. Perhaps
6837 we are evaluating an expression too early. In any event, don't
6838 infinitely recurse. */
6842 /* If TEM's type is a union of variable size, pass TARGET to the inner
6843 computation, since it will need a temporary and TARGET is known
6844 to have to do. This occurs in unchecked conversion in Ada. */
6848 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
6849 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
6851 ? target : NULL_RTX),
6853 (modifier == EXPAND_INITIALIZER
6854 || modifier == EXPAND_CONST_ADDRESS)
6855 ? modifier : EXPAND_NORMAL);
6857 /* If this is a constant, put it into a register if it is a
6858 legitimate constant and OFFSET is 0 and memory if it isn't. */
6859 if (CONSTANT_P (op0))
6861 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
6862 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
6864 op0 = force_reg (mode, op0);
6866 op0 = validize_mem (force_const_mem (mode, op0));
6871 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM);
6873 /* If this object is in a register, put it into memory.
6874 This case can't occur in C, but can in Ada if we have
6875 unchecked conversion of an expression from a scalar type to
6876 an array or record type. */
6877 if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
6878 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
6880 /* If the operand is a SAVE_EXPR, we can deal with this by
6881 forcing the SAVE_EXPR into memory. */
6882 if (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR)
6884 put_var_into_stack (TREE_OPERAND (exp, 0));
6885 op0 = SAVE_EXPR_RTL (TREE_OPERAND (exp, 0));
6890 = build_qualified_type (TREE_TYPE (tem),
6891 (TYPE_QUALS (TREE_TYPE (tem))
6892 | TYPE_QUAL_CONST));
6893 rtx memloc = assign_temp (nt, 1, 1, 1);
6895 emit_move_insn (memloc, op0);
6900 if (GET_CODE (op0) != MEM)
6903 if (GET_MODE (offset_rtx) != ptr_mode)
6904 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
6906 #ifdef POINTERS_EXTEND_UNSIGNED
6907 if (GET_MODE (offset_rtx) != Pmode)
6908 offset_rtx = convert_memory_address (Pmode, offset_rtx);
6911 /* A constant address in OP0 can have VOIDmode, we must not try
6912 to call force_reg for that case. Avoid that case. */
6913 if (GET_CODE (op0) == MEM
6914 && GET_MODE (op0) == BLKmode
6915 && GET_MODE (XEXP (op0, 0)) != VOIDmode
6917 && (bitpos % bitsize) == 0
6918 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
6919 && MEM_ALIGN (op0) == GET_MODE_ALIGNMENT (mode1))
6921 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
6925 op0 = offset_address (op0, offset_rtx,
6926 highest_pow2_factor (offset));
6929 /* Don't forget about volatility even if this is a bitfield. */
6930 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
6932 if (op0 == orig_op0)
6933 op0 = copy_rtx (op0);
6935 MEM_VOLATILE_P (op0) = 1;
6938 /* In cases where an aligned union has an unaligned object
6939 as a field, we might be extracting a BLKmode value from
6940 an integer-mode (e.g., SImode) object. Handle this case
6941 by doing the extract into an object as wide as the field
6942 (which we know to be the width of a basic mode), then
6943 storing into memory, and changing the mode to BLKmode. */
6944 if (mode1 == VOIDmode
6945 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
6946 || (mode1 != BLKmode && ! direct_load[(int) mode1]
6947 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
6948 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT
6949 && modifier != EXPAND_CONST_ADDRESS
6950 && modifier != EXPAND_INITIALIZER)
6951 /* If the field isn't aligned enough to fetch as a memref,
6952 fetch it as a bit field. */
6953 || (mode1 != BLKmode
6954 && SLOW_UNALIGNED_ACCESS (mode1, MEM_ALIGN (op0))
6955 && ((TYPE_ALIGN (TREE_TYPE (tem))
6956 < GET_MODE_ALIGNMENT (mode))
6957 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)))
6958 /* If the type and the field are a constant size and the
6959 size of the type isn't the same size as the bitfield,
6960 we must use bitfield operations. */
6962 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (exp)))
6964 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
6967 enum machine_mode ext_mode = mode;
6969 if (ext_mode == BLKmode
6970 && ! (target != 0 && GET_CODE (op0) == MEM
6971 && GET_CODE (target) == MEM
6972 && bitpos % BITS_PER_UNIT == 0))
6973 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
6975 if (ext_mode == BLKmode)
6977 /* In this case, BITPOS must start at a byte boundary and
6978 TARGET, if specified, must be a MEM. */
6979 if (GET_CODE (op0) != MEM
6980 || (target != 0 && GET_CODE (target) != MEM)
6981 || bitpos % BITS_PER_UNIT != 0)
6984 op0 = adjust_address (op0, VOIDmode, bitpos / BITS_PER_UNIT);
6986 target = assign_temp (type, 0, 1, 1);
6988 emit_block_move (target, op0,
6989 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
6995 op0 = validize_mem (op0);
6997 if (GET_CODE (op0) == MEM && GET_CODE (XEXP (op0, 0)) == REG)
6998 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7000 op0 = extract_bit_field (op0, bitsize, bitpos,
7001 unsignedp, target, ext_mode, ext_mode,
7002 int_size_in_bytes (TREE_TYPE (tem)));
7004 /* If the result is a record type and BITSIZE is narrower than
7005 the mode of OP0, an integral mode, and this is a big endian
7006 machine, we must put the field into the high-order bits. */
7007 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
7008 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
7009 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (op0)))
7010 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
7011 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
7015 if (mode == BLKmode)
7017 rtx new = assign_temp (build_qualified_type
7018 (type_for_mode (ext_mode, 0),
7019 TYPE_QUAL_CONST), 0, 1, 1);
7021 emit_move_insn (new, op0);
7022 op0 = copy_rtx (new);
7023 PUT_MODE (op0, BLKmode);
7024 set_mem_attributes (op0, exp, 1);
7030 /* If the result is BLKmode, use that to access the object
7032 if (mode == BLKmode)
7035 /* Get a reference to just this component. */
7036 if (modifier == EXPAND_CONST_ADDRESS
7037 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7038 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
7040 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7042 if (op0 == orig_op0)
7043 op0 = copy_rtx (op0);
7045 set_mem_attributes (op0, exp, 0);
7046 if (GET_CODE (XEXP (op0, 0)) == REG)
7047 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7049 MEM_VOLATILE_P (op0) |= volatilep;
7050 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
7051 || modifier == EXPAND_CONST_ADDRESS
7052 || modifier == EXPAND_INITIALIZER)
7054 else if (target == 0)
7055 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7057 convert_move (target, op0, unsignedp);
7063 rtx insn, before = get_last_insn (), vtbl_ref;
7065 /* Evaluate the interior expression. */
7066 subtarget = expand_expr (TREE_OPERAND (exp, 0), target,
7069 /* Get or create an instruction off which to hang a note. */
7070 if (REG_P (subtarget))
7073 insn = get_last_insn ();
7076 if (! INSN_P (insn))
7077 insn = prev_nonnote_insn (insn);
7081 target = gen_reg_rtx (GET_MODE (subtarget));
7082 insn = emit_move_insn (target, subtarget);
7085 /* Collect the data for the note. */
7086 vtbl_ref = XEXP (DECL_RTL (TREE_OPERAND (exp, 1)), 0);
7087 vtbl_ref = plus_constant (vtbl_ref,
7088 tree_low_cst (TREE_OPERAND (exp, 2), 0));
7089 /* Discard the initial CONST that was added. */
7090 vtbl_ref = XEXP (vtbl_ref, 0);
7093 = gen_rtx_EXPR_LIST (REG_VTABLE_REF, vtbl_ref, REG_NOTES (insn));
7098 /* Intended for a reference to a buffer of a file-object in Pascal.
7099 But it's not certain that a special tree code will really be
7100 necessary for these. INDIRECT_REF might work for them. */
7106 /* Pascal set IN expression.
7109 rlo = set_low - (set_low%bits_per_word);
7110 the_word = set [ (index - rlo)/bits_per_word ];
7111 bit_index = index % bits_per_word;
7112 bitmask = 1 << bit_index;
7113 return !!(the_word & bitmask); */
7115 tree set = TREE_OPERAND (exp, 0);
7116 tree index = TREE_OPERAND (exp, 1);
7117 int iunsignedp = TREE_UNSIGNED (TREE_TYPE (index));
7118 tree set_type = TREE_TYPE (set);
7119 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
7120 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
7121 rtx index_val = expand_expr (index, 0, VOIDmode, 0);
7122 rtx lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
7123 rtx hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
7124 rtx setval = expand_expr (set, 0, VOIDmode, 0);
7125 rtx setaddr = XEXP (setval, 0);
7126 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
7128 rtx diff, quo, rem, addr, bit, result;
7130 /* If domain is empty, answer is no. Likewise if index is constant
7131 and out of bounds. */
7132 if (((TREE_CODE (set_high_bound) == INTEGER_CST
7133 && TREE_CODE (set_low_bound) == INTEGER_CST
7134 && tree_int_cst_lt (set_high_bound, set_low_bound))
7135 || (TREE_CODE (index) == INTEGER_CST
7136 && TREE_CODE (set_low_bound) == INTEGER_CST
7137 && tree_int_cst_lt (index, set_low_bound))
7138 || (TREE_CODE (set_high_bound) == INTEGER_CST
7139 && TREE_CODE (index) == INTEGER_CST
7140 && tree_int_cst_lt (set_high_bound, index))))
7144 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7146 /* If we get here, we have to generate the code for both cases
7147 (in range and out of range). */
7149 op0 = gen_label_rtx ();
7150 op1 = gen_label_rtx ();
7152 if (! (GET_CODE (index_val) == CONST_INT
7153 && GET_CODE (lo_r) == CONST_INT))
7154 emit_cmp_and_jump_insns (index_val, lo_r, LT, NULL_RTX,
7155 GET_MODE (index_val), iunsignedp, op1);
7157 if (! (GET_CODE (index_val) == CONST_INT
7158 && GET_CODE (hi_r) == CONST_INT))
7159 emit_cmp_and_jump_insns (index_val, hi_r, GT, NULL_RTX,
7160 GET_MODE (index_val), iunsignedp, op1);
7162 /* Calculate the element number of bit zero in the first word
7164 if (GET_CODE (lo_r) == CONST_INT)
7165 rlow = GEN_INT (INTVAL (lo_r)
7166 & ~((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
7168 rlow = expand_binop (index_mode, and_optab, lo_r,
7169 GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
7170 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7172 diff = expand_binop (index_mode, sub_optab, index_val, rlow,
7173 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7175 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
7176 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7177 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
7178 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7180 addr = memory_address (byte_mode,
7181 expand_binop (index_mode, add_optab, diff,
7182 setaddr, NULL_RTX, iunsignedp,
7185 /* Extract the bit we want to examine. */
7186 bit = expand_shift (RSHIFT_EXPR, byte_mode,
7187 gen_rtx_MEM (byte_mode, addr),
7188 make_tree (TREE_TYPE (index), rem),
7190 result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
7191 GET_MODE (target) == byte_mode ? target : 0,
7192 1, OPTAB_LIB_WIDEN);
7194 if (result != target)
7195 convert_move (target, result, 1);
7197 /* Output the code to handle the out-of-range case. */
7200 emit_move_insn (target, const0_rtx);
7205 case WITH_CLEANUP_EXPR:
7206 if (WITH_CLEANUP_EXPR_RTL (exp) == 0)
7208 WITH_CLEANUP_EXPR_RTL (exp)
7209 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
7210 expand_decl_cleanup (NULL_TREE, TREE_OPERAND (exp, 1));
7212 /* That's it for this cleanup. */
7213 TREE_OPERAND (exp, 1) = 0;
7215 return WITH_CLEANUP_EXPR_RTL (exp);
7217 case CLEANUP_POINT_EXPR:
7219 /* Start a new binding layer that will keep track of all cleanup
7220 actions to be performed. */
7221 expand_start_bindings (2);
7223 target_temp_slot_level = temp_slot_level;
7225 op0 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
7226 /* If we're going to use this value, load it up now. */
7228 op0 = force_not_mem (op0);
7229 preserve_temp_slots (op0);
7230 expand_end_bindings (NULL_TREE, 0, 0);
7235 /* Check for a built-in function. */
7236 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
7237 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7239 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7241 if (DECL_BUILT_IN_CLASS (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7242 == BUILT_IN_FRONTEND)
7243 return (*lang_hooks.expand_expr)
7244 (exp, original_target, tmode, modifier);
7246 return expand_builtin (exp, target, subtarget, tmode, ignore);
7249 return expand_call (exp, target, ignore);
7251 case NON_LVALUE_EXPR:
7254 case REFERENCE_EXPR:
7255 if (TREE_OPERAND (exp, 0) == error_mark_node)
7258 if (TREE_CODE (type) == UNION_TYPE)
7260 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
7262 /* If both input and output are BLKmode, this conversion isn't doing
7263 anything except possibly changing memory attribute. */
7264 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode)
7266 rtx result = expand_expr (TREE_OPERAND (exp, 0), target, tmode,
7269 result = copy_rtx (result);
7270 set_mem_attributes (result, exp, 0);
7275 target = assign_temp (type, 0, 1, 1);
7277 if (GET_CODE (target) == MEM)
7278 /* Store data into beginning of memory target. */
7279 store_expr (TREE_OPERAND (exp, 0),
7280 adjust_address (target, TYPE_MODE (valtype), 0), 0);
7282 else if (GET_CODE (target) == REG)
7283 /* Store this field into a union of the proper type. */
7284 store_field (target,
7285 MIN ((int_size_in_bytes (TREE_TYPE
7286 (TREE_OPERAND (exp, 0)))
7288 (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
7289 0, TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
7290 VOIDmode, 0, type, 0);
7294 /* Return the entire union. */
7298 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7300 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
7303 /* If the signedness of the conversion differs and OP0 is
7304 a promoted SUBREG, clear that indication since we now
7305 have to do the proper extension. */
7306 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
7307 && GET_CODE (op0) == SUBREG)
7308 SUBREG_PROMOTED_VAR_P (op0) = 0;
7313 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
7314 if (GET_MODE (op0) == mode)
7317 /* If OP0 is a constant, just convert it into the proper mode. */
7318 if (CONSTANT_P (op0))
7320 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7321 enum machine_mode inner_mode = TYPE_MODE (inner_type);
7323 if (modifier == EXPAND_INITIALIZER)
7324 return simplify_gen_subreg (mode, op0, inner_mode,
7325 subreg_lowpart_offset (mode,
7328 return convert_modes (mode, inner_mode, op0,
7329 TREE_UNSIGNED (inner_type));
7332 if (modifier == EXPAND_INITIALIZER)
7333 return gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
7337 convert_to_mode (mode, op0,
7338 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7340 convert_move (target, op0,
7341 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7344 case VIEW_CONVERT_EXPR:
7345 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
7347 /* If the input and output modes are both the same, we are done.
7348 Otherwise, if neither mode is BLKmode and both are within a word, we
7349 can use gen_lowpart. If neither is true, make sure the operand is
7350 in memory and convert the MEM to the new mode. */
7351 if (TYPE_MODE (type) == GET_MODE (op0))
7353 else if (TYPE_MODE (type) != BLKmode && GET_MODE (op0) != BLKmode
7354 && GET_MODE_SIZE (TYPE_MODE (type)) <= UNITS_PER_WORD
7355 && GET_MODE_SIZE (GET_MODE (op0)) <= UNITS_PER_WORD)
7356 op0 = gen_lowpart (TYPE_MODE (type), op0);
7357 else if (GET_CODE (op0) != MEM)
7359 /* If the operand is not a MEM, force it into memory. Since we
7360 are going to be be changing the mode of the MEM, don't call
7361 force_const_mem for constants because we don't allow pool
7362 constants to change mode. */
7363 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7365 if (TREE_ADDRESSABLE (exp))
7368 if (target == 0 || GET_MODE (target) != TYPE_MODE (inner_type))
7370 = assign_stack_temp_for_type
7371 (TYPE_MODE (inner_type),
7372 GET_MODE_SIZE (TYPE_MODE (inner_type)), 0, inner_type);
7374 emit_move_insn (target, op0);
7378 /* At this point, OP0 is in the correct mode. If the output type is such
7379 that the operand is known to be aligned, indicate that it is.
7380 Otherwise, we need only be concerned about alignment for non-BLKmode
7382 if (GET_CODE (op0) == MEM)
7384 op0 = copy_rtx (op0);
7386 if (TYPE_ALIGN_OK (type))
7387 set_mem_align (op0, MAX (MEM_ALIGN (op0), TYPE_ALIGN (type)));
7388 else if (TYPE_MODE (type) != BLKmode && STRICT_ALIGNMENT
7389 && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
7391 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7392 HOST_WIDE_INT temp_size
7393 = MAX (int_size_in_bytes (inner_type),
7394 (HOST_WIDE_INT) GET_MODE_SIZE (TYPE_MODE (type)));
7395 rtx new = assign_stack_temp_for_type (TYPE_MODE (type),
7396 temp_size, 0, type);
7397 rtx new_with_op0_mode = adjust_address (new, GET_MODE (op0), 0);
7399 if (TREE_ADDRESSABLE (exp))
7402 if (GET_MODE (op0) == BLKmode)
7403 emit_block_move (new_with_op0_mode, op0,
7404 GEN_INT (GET_MODE_SIZE (TYPE_MODE (type))));
7406 emit_move_insn (new_with_op0_mode, op0);
7411 op0 = adjust_address (op0, TYPE_MODE (type), 0);
7417 /* We come here from MINUS_EXPR when the second operand is a
7420 this_optab = ! unsignedp && flag_trapv
7421 && (GET_MODE_CLASS (mode) == MODE_INT)
7422 ? addv_optab : add_optab;
7424 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
7425 something else, make sure we add the register to the constant and
7426 then to the other thing. This case can occur during strength
7427 reduction and doing it this way will produce better code if the
7428 frame pointer or argument pointer is eliminated.
7430 fold-const.c will ensure that the constant is always in the inner
7431 PLUS_EXPR, so the only case we need to do anything about is if
7432 sp, ap, or fp is our second argument, in which case we must swap
7433 the innermost first argument and our second argument. */
7435 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
7436 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
7437 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
7438 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
7439 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
7440 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
7442 tree t = TREE_OPERAND (exp, 1);
7444 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7445 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
7448 /* If the result is to be ptr_mode and we are adding an integer to
7449 something, we might be forming a constant. So try to use
7450 plus_constant. If it produces a sum and we can't accept it,
7451 use force_operand. This allows P = &ARR[const] to generate
7452 efficient code on machines where a SYMBOL_REF is not a valid
7455 If this is an EXPAND_SUM call, always return the sum. */
7456 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
7457 || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
7459 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
7460 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7461 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
7465 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
7467 /* Use immed_double_const to ensure that the constant is
7468 truncated according to the mode of OP1, then sign extended
7469 to a HOST_WIDE_INT. Using the constant directly can result
7470 in non-canonical RTL in a 64x32 cross compile. */
7472 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)),
7474 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))));
7475 op1 = plus_constant (op1, INTVAL (constant_part));
7476 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7477 op1 = force_operand (op1, target);
7481 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7482 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
7483 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
7487 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7489 if (! CONSTANT_P (op0))
7491 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7492 VOIDmode, modifier);
7493 /* Don't go to both_summands if modifier
7494 says it's not right to return a PLUS. */
7495 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7499 /* Use immed_double_const to ensure that the constant is
7500 truncated according to the mode of OP1, then sign extended
7501 to a HOST_WIDE_INT. Using the constant directly can result
7502 in non-canonical RTL in a 64x32 cross compile. */
7504 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)),
7506 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
7507 op0 = plus_constant (op0, INTVAL (constant_part));
7508 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7509 op0 = force_operand (op0, target);
7514 /* No sense saving up arithmetic to be done
7515 if it's all in the wrong mode to form part of an address.
7516 And force_operand won't know whether to sign-extend or
7518 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7519 || mode != ptr_mode)
7522 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7525 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, modifier);
7526 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, modifier);
7529 /* Make sure any term that's a sum with a constant comes last. */
7530 if (GET_CODE (op0) == PLUS
7531 && CONSTANT_P (XEXP (op0, 1)))
7537 /* If adding to a sum including a constant,
7538 associate it to put the constant outside. */
7539 if (GET_CODE (op1) == PLUS
7540 && CONSTANT_P (XEXP (op1, 1)))
7542 rtx constant_term = const0_rtx;
7544 temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0);
7547 /* Ensure that MULT comes first if there is one. */
7548 else if (GET_CODE (op0) == MULT)
7549 op0 = gen_rtx_PLUS (mode, op0, XEXP (op1, 0));
7551 op0 = gen_rtx_PLUS (mode, XEXP (op1, 0), op0);
7553 /* Let's also eliminate constants from op0 if possible. */
7554 op0 = eliminate_constant_term (op0, &constant_term);
7556 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
7557 their sum should be a constant. Form it into OP1, since the
7558 result we want will then be OP0 + OP1. */
7560 temp = simplify_binary_operation (PLUS, mode, constant_term,
7565 op1 = gen_rtx_PLUS (mode, constant_term, XEXP (op1, 1));
7568 /* Put a constant term last and put a multiplication first. */
7569 if (CONSTANT_P (op0) || GET_CODE (op1) == MULT)
7570 temp = op1, op1 = op0, op0 = temp;
7572 temp = simplify_binary_operation (PLUS, mode, op0, op1);
7573 return temp ? temp : gen_rtx_PLUS (mode, op0, op1);
7576 /* For initializers, we are allowed to return a MINUS of two
7577 symbolic constants. Here we handle all cases when both operands
7579 /* Handle difference of two symbolic constants,
7580 for the sake of an initializer. */
7581 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7582 && really_constant_p (TREE_OPERAND (exp, 0))
7583 && really_constant_p (TREE_OPERAND (exp, 1)))
7585 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode,
7587 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode,
7590 /* If the last operand is a CONST_INT, use plus_constant of
7591 the negated constant. Else make the MINUS. */
7592 if (GET_CODE (op1) == CONST_INT)
7593 return plus_constant (op0, - INTVAL (op1));
7595 return gen_rtx_MINUS (mode, op0, op1);
7597 /* Convert A - const to A + (-const). */
7598 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7600 tree negated = fold (build1 (NEGATE_EXPR, type,
7601 TREE_OPERAND (exp, 1)));
7603 if (TREE_UNSIGNED (type) || TREE_OVERFLOW (negated))
7604 /* If we can't negate the constant in TYPE, leave it alone and
7605 expand_binop will negate it for us. We used to try to do it
7606 here in the signed version of TYPE, but that doesn't work
7607 on POINTER_TYPEs. */;
7610 exp = build (PLUS_EXPR, type, TREE_OPERAND (exp, 0), negated);
7614 this_optab = ! unsignedp && flag_trapv
7615 && (GET_MODE_CLASS(mode) == MODE_INT)
7616 ? subv_optab : sub_optab;
7620 /* If first operand is constant, swap them.
7621 Thus the following special case checks need only
7622 check the second operand. */
7623 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
7625 tree t1 = TREE_OPERAND (exp, 0);
7626 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
7627 TREE_OPERAND (exp, 1) = t1;
7630 /* Attempt to return something suitable for generating an
7631 indexed address, for machines that support that. */
7633 if (modifier == EXPAND_SUM && mode == ptr_mode
7634 && host_integerp (TREE_OPERAND (exp, 1), 0))
7636 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7639 /* If we knew for certain that this is arithmetic for an array
7640 reference, and we knew the bounds of the array, then we could
7641 apply the distributive law across (PLUS X C) for constant C.
7642 Without such knowledge, we risk overflowing the computation
7643 when both X and C are large, but X+C isn't. */
7644 /* ??? Could perhaps special-case EXP being unsigned and C being
7645 positive. In that case we are certain that X+C is no smaller
7646 than X and so the transformed expression will overflow iff the
7647 original would have. */
7649 if (GET_CODE (op0) != REG)
7650 op0 = force_operand (op0, NULL_RTX);
7651 if (GET_CODE (op0) != REG)
7652 op0 = copy_to_mode_reg (mode, op0);
7655 gen_rtx_MULT (mode, op0,
7656 GEN_INT (tree_low_cst (TREE_OPERAND (exp, 1), 0)));
7659 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7662 /* Check for multiplying things that have been extended
7663 from a narrower type. If this machine supports multiplying
7664 in that narrower type with a result in the desired type,
7665 do it that way, and avoid the explicit type-conversion. */
7666 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
7667 && TREE_CODE (type) == INTEGER_TYPE
7668 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7669 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
7670 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7671 && int_fits_type_p (TREE_OPERAND (exp, 1),
7672 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7673 /* Don't use a widening multiply if a shift will do. */
7674 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
7675 > HOST_BITS_PER_WIDE_INT)
7676 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
7678 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
7679 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7681 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
7682 /* If both operands are extended, they must either both
7683 be zero-extended or both be sign-extended. */
7684 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7686 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
7688 enum machine_mode innermode
7689 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
7690 optab other_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7691 ? smul_widen_optab : umul_widen_optab);
7692 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7693 ? umul_widen_optab : smul_widen_optab);
7694 if (mode == GET_MODE_WIDER_MODE (innermode))
7696 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
7698 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7699 NULL_RTX, VOIDmode, 0);
7700 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7701 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7704 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7705 NULL_RTX, VOIDmode, 0);
7708 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
7709 && innermode == word_mode)
7712 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7713 NULL_RTX, VOIDmode, 0);
7714 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7715 op1 = convert_modes (innermode, mode,
7716 expand_expr (TREE_OPERAND (exp, 1),
7717 NULL_RTX, VOIDmode, 0),
7720 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7721 NULL_RTX, VOIDmode, 0);
7722 temp = expand_binop (mode, other_optab, op0, op1, target,
7723 unsignedp, OPTAB_LIB_WIDEN);
7724 htem = expand_mult_highpart_adjust (innermode,
7725 gen_highpart (innermode, temp),
7727 gen_highpart (innermode, temp),
7729 emit_move_insn (gen_highpart (innermode, temp), htem);
7734 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7735 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7736 return expand_mult (mode, op0, op1, target, unsignedp);
7738 case TRUNC_DIV_EXPR:
7739 case FLOOR_DIV_EXPR:
7741 case ROUND_DIV_EXPR:
7742 case EXACT_DIV_EXPR:
7743 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7745 /* Possible optimization: compute the dividend with EXPAND_SUM
7746 then if the divisor is constant can optimize the case
7747 where some terms of the dividend have coeffs divisible by it. */
7748 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7749 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7750 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
7753 /* Emit a/b as a*(1/b). Later we may manage CSE the reciprocal saving
7754 expensive divide. If not, combine will rebuild the original
7756 if (flag_unsafe_math_optimizations && optimize && !optimize_size
7757 && TREE_CODE (type) == REAL_TYPE
7758 && !real_onep (TREE_OPERAND (exp, 0)))
7759 return expand_expr (build (MULT_EXPR, type, TREE_OPERAND (exp, 0),
7760 build (RDIV_EXPR, type,
7761 build_real (type, dconst1),
7762 TREE_OPERAND (exp, 1))),
7763 target, tmode, unsignedp);
7764 this_optab = sdiv_optab;
7767 case TRUNC_MOD_EXPR:
7768 case FLOOR_MOD_EXPR:
7770 case ROUND_MOD_EXPR:
7771 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7773 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7774 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7775 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
7777 case FIX_ROUND_EXPR:
7778 case FIX_FLOOR_EXPR:
7780 abort (); /* Not used for C. */
7782 case FIX_TRUNC_EXPR:
7783 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7785 target = gen_reg_rtx (mode);
7786 expand_fix (target, op0, unsignedp);
7790 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7792 target = gen_reg_rtx (mode);
7793 /* expand_float can't figure out what to do if FROM has VOIDmode.
7794 So give it the correct mode. With -O, cse will optimize this. */
7795 if (GET_MODE (op0) == VOIDmode)
7796 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7798 expand_float (target, op0,
7799 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7803 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7804 temp = expand_unop (mode,
7805 ! unsignedp && flag_trapv
7806 && (GET_MODE_CLASS(mode) == MODE_INT)
7807 ? negv_optab : neg_optab, op0, target, 0);
7813 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7815 /* Handle complex values specially. */
7816 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT
7817 || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
7818 return expand_complex_abs (mode, op0, target, unsignedp);
7820 /* Unsigned abs is simply the operand. Testing here means we don't
7821 risk generating incorrect code below. */
7822 if (TREE_UNSIGNED (type))
7825 return expand_abs (mode, op0, target, unsignedp,
7826 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
7830 target = original_target;
7831 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 1), 1)
7832 || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
7833 || GET_MODE (target) != mode
7834 || (GET_CODE (target) == REG
7835 && REGNO (target) < FIRST_PSEUDO_REGISTER))
7836 target = gen_reg_rtx (mode);
7837 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7838 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
7840 /* First try to do it with a special MIN or MAX instruction.
7841 If that does not win, use a conditional jump to select the proper
7843 this_optab = (TREE_UNSIGNED (type)
7844 ? (code == MIN_EXPR ? umin_optab : umax_optab)
7845 : (code == MIN_EXPR ? smin_optab : smax_optab));
7847 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
7852 /* At this point, a MEM target is no longer useful; we will get better
7855 if (GET_CODE (target) == MEM)
7856 target = gen_reg_rtx (mode);
7859 emit_move_insn (target, op0);
7861 op0 = gen_label_rtx ();
7863 /* If this mode is an integer too wide to compare properly,
7864 compare word by word. Rely on cse to optimize constant cases. */
7865 if (GET_MODE_CLASS (mode) == MODE_INT
7866 && ! can_compare_p (GE, mode, ccp_jump))
7868 if (code == MAX_EXPR)
7869 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
7870 target, op1, NULL_RTX, op0);
7872 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
7873 op1, target, NULL_RTX, op0);
7877 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)));
7878 do_compare_rtx_and_jump (target, op1, code == MAX_EXPR ? GE : LE,
7879 unsignedp, mode, NULL_RTX, NULL_RTX,
7882 emit_move_insn (target, op1);
7887 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7888 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
7894 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7895 temp = expand_unop (mode, ffs_optab, op0, target, 1);
7900 /* ??? Can optimize bitwise operations with one arg constant.
7901 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
7902 and (a bitwise1 b) bitwise2 b (etc)
7903 but that is probably not worth while. */
7905 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
7906 boolean values when we want in all cases to compute both of them. In
7907 general it is fastest to do TRUTH_AND_EXPR by computing both operands
7908 as actual zero-or-1 values and then bitwise anding. In cases where
7909 there cannot be any side effects, better code would be made by
7910 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
7911 how to recognize those cases. */
7913 case TRUTH_AND_EXPR:
7915 this_optab = and_optab;
7920 this_optab = ior_optab;
7923 case TRUTH_XOR_EXPR:
7925 this_optab = xor_optab;
7932 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7934 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7935 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
7938 /* Could determine the answer when only additive constants differ. Also,
7939 the addition of one can be handled by changing the condition. */
7946 case UNORDERED_EXPR:
7953 temp = do_store_flag (exp, target, tmode != VOIDmode ? tmode : mode, 0);
7957 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
7958 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
7960 && GET_CODE (original_target) == REG
7961 && (GET_MODE (original_target)
7962 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
7964 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
7967 /* If temp is constant, we can just compute the result. */
7968 if (GET_CODE (temp) == CONST_INT)
7970 if (INTVAL (temp) != 0)
7971 emit_move_insn (target, const1_rtx);
7973 emit_move_insn (target, const0_rtx);
7978 if (temp != original_target)
7980 enum machine_mode mode1 = GET_MODE (temp);
7981 if (mode1 == VOIDmode)
7982 mode1 = tmode != VOIDmode ? tmode : mode;
7984 temp = copy_to_mode_reg (mode1, temp);
7987 op1 = gen_label_rtx ();
7988 emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX,
7989 GET_MODE (temp), unsignedp, op1);
7990 emit_move_insn (temp, const1_rtx);
7995 /* If no set-flag instruction, must generate a conditional
7996 store into a temporary variable. Drop through
7997 and handle this like && and ||. */
7999 case TRUTH_ANDIF_EXPR:
8000 case TRUTH_ORIF_EXPR:
8002 && (target == 0 || ! safe_from_p (target, exp, 1)
8003 /* Make sure we don't have a hard reg (such as function's return
8004 value) live across basic blocks, if not optimizing. */
8005 || (!optimize && GET_CODE (target) == REG
8006 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
8007 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
8010 emit_clr_insn (target);
8012 op1 = gen_label_rtx ();
8013 jumpifnot (exp, op1);
8016 emit_0_to_1_insn (target);
8019 return ignore ? const0_rtx : target;
8021 case TRUTH_NOT_EXPR:
8022 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
8023 /* The parser is careful to generate TRUTH_NOT_EXPR
8024 only with operands that are always zero or one. */
8025 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
8026 target, 1, OPTAB_LIB_WIDEN);
8032 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
8034 return expand_expr (TREE_OPERAND (exp, 1),
8035 (ignore ? const0_rtx : target),
8039 /* If we would have a "singleton" (see below) were it not for a
8040 conversion in each arm, bring that conversion back out. */
8041 if (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
8042 && TREE_CODE (TREE_OPERAND (exp, 2)) == NOP_EXPR
8043 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0))
8044 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 2), 0))))
8046 tree iftrue = TREE_OPERAND (TREE_OPERAND (exp, 1), 0);
8047 tree iffalse = TREE_OPERAND (TREE_OPERAND (exp, 2), 0);
8049 if ((TREE_CODE_CLASS (TREE_CODE (iftrue)) == '2'
8050 && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
8051 || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '2'
8052 && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0))
8053 || (TREE_CODE_CLASS (TREE_CODE (iftrue)) == '1'
8054 && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
8055 || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '1'
8056 && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0)))
8057 return expand_expr (build1 (NOP_EXPR, type,
8058 build (COND_EXPR, TREE_TYPE (iftrue),
8059 TREE_OPERAND (exp, 0),
8061 target, tmode, modifier);
8065 /* Note that COND_EXPRs whose type is a structure or union
8066 are required to be constructed to contain assignments of
8067 a temporary variable, so that we can evaluate them here
8068 for side effect only. If type is void, we must do likewise. */
8070 /* If an arm of the branch requires a cleanup,
8071 only that cleanup is performed. */
8074 tree binary_op = 0, unary_op = 0;
8076 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
8077 convert it to our mode, if necessary. */
8078 if (integer_onep (TREE_OPERAND (exp, 1))
8079 && integer_zerop (TREE_OPERAND (exp, 2))
8080 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8084 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
8089 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, modifier);
8090 if (GET_MODE (op0) == mode)
8094 target = gen_reg_rtx (mode);
8095 convert_move (target, op0, unsignedp);
8099 /* Check for X ? A + B : A. If we have this, we can copy A to the
8100 output and conditionally add B. Similarly for unary operations.
8101 Don't do this if X has side-effects because those side effects
8102 might affect A or B and the "?" operation is a sequence point in
8103 ANSI. (operand_equal_p tests for side effects.) */
8105 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
8106 && operand_equal_p (TREE_OPERAND (exp, 2),
8107 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8108 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
8109 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
8110 && operand_equal_p (TREE_OPERAND (exp, 1),
8111 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8112 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
8113 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
8114 && operand_equal_p (TREE_OPERAND (exp, 2),
8115 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8116 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
8117 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
8118 && operand_equal_p (TREE_OPERAND (exp, 1),
8119 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8120 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
8122 /* If we are not to produce a result, we have no target. Otherwise,
8123 if a target was specified use it; it will not be used as an
8124 intermediate target unless it is safe. If no target, use a
8129 else if (original_target
8130 && (safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
8131 || (singleton && GET_CODE (original_target) == REG
8132 && REGNO (original_target) >= FIRST_PSEUDO_REGISTER
8133 && original_target == var_rtx (singleton)))
8134 && GET_MODE (original_target) == mode
8135 #ifdef HAVE_conditional_move
8136 && (! can_conditionally_move_p (mode)
8137 || GET_CODE (original_target) == REG
8138 || TREE_ADDRESSABLE (type))
8140 && (GET_CODE (original_target) != MEM
8141 || TREE_ADDRESSABLE (type)))
8142 temp = original_target;
8143 else if (TREE_ADDRESSABLE (type))
8146 temp = assign_temp (type, 0, 0, 1);
8148 /* If we had X ? A + C : A, with C a constant power of 2, and we can
8149 do the test of X as a store-flag operation, do this as
8150 A + ((X != 0) << log C). Similarly for other simple binary
8151 operators. Only do for C == 1 if BRANCH_COST is low. */
8152 if (temp && singleton && binary_op
8153 && (TREE_CODE (binary_op) == PLUS_EXPR
8154 || TREE_CODE (binary_op) == MINUS_EXPR
8155 || TREE_CODE (binary_op) == BIT_IOR_EXPR
8156 || TREE_CODE (binary_op) == BIT_XOR_EXPR)
8157 && (BRANCH_COST >= 3 ? integer_pow2p (TREE_OPERAND (binary_op, 1))
8158 : integer_onep (TREE_OPERAND (binary_op, 1)))
8159 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8162 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR
8163 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8164 ? addv_optab : add_optab)
8165 : TREE_CODE (binary_op) == MINUS_EXPR
8166 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8167 ? subv_optab : sub_optab)
8168 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
8171 /* If we had X ? A : A + 1, do this as A + (X == 0).
8173 We have to invert the truth value here and then put it
8174 back later if do_store_flag fails. We cannot simply copy
8175 TREE_OPERAND (exp, 0) to another variable and modify that
8176 because invert_truthvalue can modify the tree pointed to
8178 if (singleton == TREE_OPERAND (exp, 1))
8179 TREE_OPERAND (exp, 0)
8180 = invert_truthvalue (TREE_OPERAND (exp, 0));
8182 result = do_store_flag (TREE_OPERAND (exp, 0),
8183 (safe_from_p (temp, singleton, 1)
8185 mode, BRANCH_COST <= 1);
8187 if (result != 0 && ! integer_onep (TREE_OPERAND (binary_op, 1)))
8188 result = expand_shift (LSHIFT_EXPR, mode, result,
8189 build_int_2 (tree_log2
8193 (safe_from_p (temp, singleton, 1)
8194 ? temp : NULL_RTX), 0);
8198 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
8199 return expand_binop (mode, boptab, op1, result, temp,
8200 unsignedp, OPTAB_LIB_WIDEN);
8202 else if (singleton == TREE_OPERAND (exp, 1))
8203 TREE_OPERAND (exp, 0)
8204 = invert_truthvalue (TREE_OPERAND (exp, 0));
8207 do_pending_stack_adjust ();
8209 op0 = gen_label_rtx ();
8211 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
8215 /* If the target conflicts with the other operand of the
8216 binary op, we can't use it. Also, we can't use the target
8217 if it is a hard register, because evaluating the condition
8218 might clobber it. */
8220 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1), 1))
8221 || (GET_CODE (temp) == REG
8222 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
8223 temp = gen_reg_rtx (mode);
8224 store_expr (singleton, temp, 0);
8227 expand_expr (singleton,
8228 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8229 if (singleton == TREE_OPERAND (exp, 1))
8230 jumpif (TREE_OPERAND (exp, 0), op0);
8232 jumpifnot (TREE_OPERAND (exp, 0), op0);
8234 start_cleanup_deferral ();
8235 if (binary_op && temp == 0)
8236 /* Just touch the other operand. */
8237 expand_expr (TREE_OPERAND (binary_op, 1),
8238 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8240 store_expr (build (TREE_CODE (binary_op), type,
8241 make_tree (type, temp),
8242 TREE_OPERAND (binary_op, 1)),
8245 store_expr (build1 (TREE_CODE (unary_op), type,
8246 make_tree (type, temp)),
8250 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
8251 comparison operator. If we have one of these cases, set the
8252 output to A, branch on A (cse will merge these two references),
8253 then set the output to FOO. */
8255 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8256 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8257 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8258 TREE_OPERAND (exp, 1), 0)
8259 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8260 || TREE_CODE (TREE_OPERAND (exp, 1)) == SAVE_EXPR)
8261 && safe_from_p (temp, TREE_OPERAND (exp, 2), 1))
8263 if (GET_CODE (temp) == REG
8264 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
8265 temp = gen_reg_rtx (mode);
8266 store_expr (TREE_OPERAND (exp, 1), temp, 0);
8267 jumpif (TREE_OPERAND (exp, 0), op0);
8269 start_cleanup_deferral ();
8270 store_expr (TREE_OPERAND (exp, 2), temp, 0);
8274 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8275 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8276 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8277 TREE_OPERAND (exp, 2), 0)
8278 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8279 || TREE_CODE (TREE_OPERAND (exp, 2)) == SAVE_EXPR)
8280 && safe_from_p (temp, TREE_OPERAND (exp, 1), 1))
8282 if (GET_CODE (temp) == REG
8283 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
8284 temp = gen_reg_rtx (mode);
8285 store_expr (TREE_OPERAND (exp, 2), temp, 0);
8286 jumpifnot (TREE_OPERAND (exp, 0), op0);
8288 start_cleanup_deferral ();
8289 store_expr (TREE_OPERAND (exp, 1), temp, 0);
8294 op1 = gen_label_rtx ();
8295 jumpifnot (TREE_OPERAND (exp, 0), op0);
8297 start_cleanup_deferral ();
8299 /* One branch of the cond can be void, if it never returns. For
8300 example A ? throw : E */
8302 && TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node)
8303 store_expr (TREE_OPERAND (exp, 1), temp, 0);
8305 expand_expr (TREE_OPERAND (exp, 1),
8306 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8307 end_cleanup_deferral ();
8309 emit_jump_insn (gen_jump (op1));
8312 start_cleanup_deferral ();
8314 && TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node)
8315 store_expr (TREE_OPERAND (exp, 2), temp, 0);
8317 expand_expr (TREE_OPERAND (exp, 2),
8318 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8321 end_cleanup_deferral ();
8332 /* Something needs to be initialized, but we didn't know
8333 where that thing was when building the tree. For example,
8334 it could be the return value of a function, or a parameter
8335 to a function which lays down in the stack, or a temporary
8336 variable which must be passed by reference.
8338 We guarantee that the expression will either be constructed
8339 or copied into our original target. */
8341 tree slot = TREE_OPERAND (exp, 0);
8342 tree cleanups = NULL_TREE;
8345 if (TREE_CODE (slot) != VAR_DECL)
8349 target = original_target;
8351 /* Set this here so that if we get a target that refers to a
8352 register variable that's already been used, put_reg_into_stack
8353 knows that it should fix up those uses. */
8354 TREE_USED (slot) = 1;
8358 if (DECL_RTL_SET_P (slot))
8360 target = DECL_RTL (slot);
8361 /* If we have already expanded the slot, so don't do
8363 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8368 target = assign_temp (type, 2, 0, 1);
8369 /* All temp slots at this level must not conflict. */
8370 preserve_temp_slots (target);
8371 SET_DECL_RTL (slot, target);
8372 if (TREE_ADDRESSABLE (slot))
8373 put_var_into_stack (slot);
8375 /* Since SLOT is not known to the called function
8376 to belong to its stack frame, we must build an explicit
8377 cleanup. This case occurs when we must build up a reference
8378 to pass the reference as an argument. In this case,
8379 it is very likely that such a reference need not be
8382 if (TREE_OPERAND (exp, 2) == 0)
8383 TREE_OPERAND (exp, 2)
8384 = (*lang_hooks.maybe_build_cleanup) (slot);
8385 cleanups = TREE_OPERAND (exp, 2);
8390 /* This case does occur, when expanding a parameter which
8391 needs to be constructed on the stack. The target
8392 is the actual stack address that we want to initialize.
8393 The function we call will perform the cleanup in this case. */
8395 /* If we have already assigned it space, use that space,
8396 not target that we were passed in, as our target
8397 parameter is only a hint. */
8398 if (DECL_RTL_SET_P (slot))
8400 target = DECL_RTL (slot);
8401 /* If we have already expanded the slot, so don't do
8403 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8408 SET_DECL_RTL (slot, target);
8409 /* If we must have an addressable slot, then make sure that
8410 the RTL that we just stored in slot is OK. */
8411 if (TREE_ADDRESSABLE (slot))
8412 put_var_into_stack (slot);
8416 exp1 = TREE_OPERAND (exp, 3) = TREE_OPERAND (exp, 1);
8417 /* Mark it as expanded. */
8418 TREE_OPERAND (exp, 1) = NULL_TREE;
8420 store_expr (exp1, target, 0);
8422 expand_decl_cleanup (NULL_TREE, cleanups);
8429 tree lhs = TREE_OPERAND (exp, 0);
8430 tree rhs = TREE_OPERAND (exp, 1);
8432 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
8438 /* If lhs is complex, expand calls in rhs before computing it.
8439 That's so we don't compute a pointer and save it over a
8440 call. If lhs is simple, compute it first so we can give it
8441 as a target if the rhs is just a call. This avoids an
8442 extra temp and copy and that prevents a partial-subsumption
8443 which makes bad code. Actually we could treat
8444 component_ref's of vars like vars. */
8446 tree lhs = TREE_OPERAND (exp, 0);
8447 tree rhs = TREE_OPERAND (exp, 1);
8451 /* Check for |= or &= of a bitfield of size one into another bitfield
8452 of size 1. In this case, (unless we need the result of the
8453 assignment) we can do this more efficiently with a
8454 test followed by an assignment, if necessary.
8456 ??? At this point, we can't get a BIT_FIELD_REF here. But if
8457 things change so we do, this code should be enhanced to
8460 && TREE_CODE (lhs) == COMPONENT_REF
8461 && (TREE_CODE (rhs) == BIT_IOR_EXPR
8462 || TREE_CODE (rhs) == BIT_AND_EXPR)
8463 && TREE_OPERAND (rhs, 0) == lhs
8464 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
8465 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
8466 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
8468 rtx label = gen_label_rtx ();
8470 do_jump (TREE_OPERAND (rhs, 1),
8471 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
8472 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
8473 expand_assignment (lhs, convert (TREE_TYPE (rhs),
8474 (TREE_CODE (rhs) == BIT_IOR_EXPR
8476 : integer_zero_node)),
8478 do_pending_stack_adjust ();
8483 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
8489 if (!TREE_OPERAND (exp, 0))
8490 expand_null_return ();
8492 expand_return (TREE_OPERAND (exp, 0));
8495 case PREINCREMENT_EXPR:
8496 case PREDECREMENT_EXPR:
8497 return expand_increment (exp, 0, ignore);
8499 case POSTINCREMENT_EXPR:
8500 case POSTDECREMENT_EXPR:
8501 /* Faster to treat as pre-increment if result is not used. */
8502 return expand_increment (exp, ! ignore, ignore);
8505 /* Are we taking the address of a nested function? */
8506 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
8507 && decl_function_context (TREE_OPERAND (exp, 0)) != 0
8508 && ! DECL_NO_STATIC_CHAIN (TREE_OPERAND (exp, 0))
8509 && ! TREE_STATIC (exp))
8511 op0 = trampoline_address (TREE_OPERAND (exp, 0));
8512 op0 = force_operand (op0, target);
8514 /* If we are taking the address of something erroneous, just
8516 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
8518 /* If we are taking the address of a constant and are at the
8519 top level, we have to use output_constant_def since we can't
8520 call force_const_mem at top level. */
8522 && (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
8523 || (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0)))
8525 op0 = XEXP (output_constant_def (TREE_OPERAND (exp, 0), 0), 0);
8528 /* We make sure to pass const0_rtx down if we came in with
8529 ignore set, to avoid doing the cleanups twice for something. */
8530 op0 = expand_expr (TREE_OPERAND (exp, 0),
8531 ignore ? const0_rtx : NULL_RTX, VOIDmode,
8532 (modifier == EXPAND_INITIALIZER
8533 ? modifier : EXPAND_CONST_ADDRESS));
8535 /* If we are going to ignore the result, OP0 will have been set
8536 to const0_rtx, so just return it. Don't get confused and
8537 think we are taking the address of the constant. */
8541 /* Pass 1 for MODIFY, so that protect_from_queue doesn't get
8542 clever and returns a REG when given a MEM. */
8543 op0 = protect_from_queue (op0, 1);
8545 /* We would like the object in memory. If it is a constant, we can
8546 have it be statically allocated into memory. For a non-constant,
8547 we need to allocate some memory and store the value into it. */
8549 if (CONSTANT_P (op0))
8550 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
8552 else if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
8553 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF
8554 || GET_CODE (op0) == PARALLEL)
8556 /* If the operand is a SAVE_EXPR, we can deal with this by
8557 forcing the SAVE_EXPR into memory. */
8558 if (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR)
8560 put_var_into_stack (TREE_OPERAND (exp, 0));
8561 op0 = SAVE_EXPR_RTL (TREE_OPERAND (exp, 0));
8565 /* If this object is in a register, it can't be BLKmode. */
8566 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8567 rtx memloc = assign_temp (inner_type, 1, 1, 1);
8569 if (GET_CODE (op0) == PARALLEL)
8570 /* Handle calls that pass values in multiple
8571 non-contiguous locations. The Irix 6 ABI has examples
8573 emit_group_store (memloc, op0,
8574 int_size_in_bytes (inner_type));
8576 emit_move_insn (memloc, op0);
8582 if (GET_CODE (op0) != MEM)
8585 mark_temp_addr_taken (op0);
8586 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
8588 op0 = XEXP (op0, 0);
8589 #ifdef POINTERS_EXTEND_UNSIGNED
8590 if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
8591 && mode == ptr_mode)
8592 op0 = convert_memory_address (ptr_mode, op0);
8597 /* If OP0 is not aligned as least as much as the type requires, we
8598 need to make a temporary, copy OP0 to it, and take the address of
8599 the temporary. We want to use the alignment of the type, not of
8600 the operand. Note that this is incorrect for FUNCTION_TYPE, but
8601 the test for BLKmode means that can't happen. The test for
8602 BLKmode is because we never make mis-aligned MEMs with
8605 We don't need to do this at all if the machine doesn't have
8606 strict alignment. */
8607 if (STRICT_ALIGNMENT && GET_MODE (op0) == BLKmode
8608 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
8610 && MEM_ALIGN (op0) < BIGGEST_ALIGNMENT)
8612 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8614 = assign_stack_temp_for_type
8615 (TYPE_MODE (inner_type),
8616 MEM_SIZE (op0) ? INTVAL (MEM_SIZE (op0))
8617 : int_size_in_bytes (inner_type),
8618 1, build_qualified_type (inner_type,
8619 (TYPE_QUALS (inner_type)
8620 | TYPE_QUAL_CONST)));
8622 if (TYPE_ALIGN_OK (inner_type))
8625 emit_block_move (new, op0, expr_size (TREE_OPERAND (exp, 0)));
8629 op0 = force_operand (XEXP (op0, 0), target);
8633 && GET_CODE (op0) != REG
8634 && modifier != EXPAND_CONST_ADDRESS
8635 && modifier != EXPAND_INITIALIZER
8636 && modifier != EXPAND_SUM)
8637 op0 = force_reg (Pmode, op0);
8639 if (GET_CODE (op0) == REG
8640 && ! REG_USERVAR_P (op0))
8641 mark_reg_pointer (op0, TYPE_ALIGN (TREE_TYPE (type)));
8643 #ifdef POINTERS_EXTEND_UNSIGNED
8644 if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
8645 && mode == ptr_mode)
8646 op0 = convert_memory_address (ptr_mode, op0);
8651 case ENTRY_VALUE_EXPR:
8654 /* COMPLEX type for Extended Pascal & Fortran */
8657 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8660 /* Get the rtx code of the operands. */
8661 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8662 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
8665 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
8669 /* Move the real (op0) and imaginary (op1) parts to their location. */
8670 emit_move_insn (gen_realpart (mode, target), op0);
8671 emit_move_insn (gen_imagpart (mode, target), op1);
8673 insns = get_insns ();
8676 /* Complex construction should appear as a single unit. */
8677 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
8678 each with a separate pseudo as destination.
8679 It's not correct for flow to treat them as a unit. */
8680 if (GET_CODE (target) != CONCAT)
8681 emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
8689 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8690 return gen_realpart (mode, op0);
8693 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8694 return gen_imagpart (mode, op0);
8698 enum machine_mode partmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8702 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8705 target = gen_reg_rtx (mode);
8709 /* Store the realpart and the negated imagpart to target. */
8710 emit_move_insn (gen_realpart (partmode, target),
8711 gen_realpart (partmode, op0));
8713 imag_t = gen_imagpart (partmode, target);
8714 temp = expand_unop (partmode,
8715 ! unsignedp && flag_trapv
8716 && (GET_MODE_CLASS(partmode) == MODE_INT)
8717 ? negv_optab : neg_optab,
8718 gen_imagpart (partmode, op0), imag_t, 0);
8720 emit_move_insn (imag_t, temp);
8722 insns = get_insns ();
8725 /* Conjugate should appear as a single unit
8726 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
8727 each with a separate pseudo as destination.
8728 It's not correct for flow to treat them as a unit. */
8729 if (GET_CODE (target) != CONCAT)
8730 emit_no_conflict_block (insns, target, op0, NULL_RTX, NULL_RTX);
8737 case TRY_CATCH_EXPR:
8739 tree handler = TREE_OPERAND (exp, 1);
8741 expand_eh_region_start ();
8743 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8745 expand_eh_region_end_cleanup (handler);
8750 case TRY_FINALLY_EXPR:
8752 tree try_block = TREE_OPERAND (exp, 0);
8753 tree finally_block = TREE_OPERAND (exp, 1);
8754 rtx finally_label = gen_label_rtx ();
8755 rtx done_label = gen_label_rtx ();
8756 rtx return_link = gen_reg_rtx (Pmode);
8757 tree cleanup = build (GOTO_SUBROUTINE_EXPR, void_type_node,
8758 (tree) finally_label, (tree) return_link);
8759 TREE_SIDE_EFFECTS (cleanup) = 1;
8761 /* Start a new binding layer that will keep track of all cleanup
8762 actions to be performed. */
8763 expand_start_bindings (2);
8765 target_temp_slot_level = temp_slot_level;
8767 expand_decl_cleanup (NULL_TREE, cleanup);
8768 op0 = expand_expr (try_block, target, tmode, modifier);
8770 preserve_temp_slots (op0);
8771 expand_end_bindings (NULL_TREE, 0, 0);
8772 emit_jump (done_label);
8773 emit_label (finally_label);
8774 expand_expr (finally_block, const0_rtx, VOIDmode, 0);
8775 emit_indirect_jump (return_link);
8776 emit_label (done_label);
8780 case GOTO_SUBROUTINE_EXPR:
8782 rtx subr = (rtx) TREE_OPERAND (exp, 0);
8783 rtx return_link = *(rtx *) &TREE_OPERAND (exp, 1);
8784 rtx return_address = gen_label_rtx ();
8785 emit_move_insn (return_link,
8786 gen_rtx_LABEL_REF (Pmode, return_address));
8788 emit_label (return_address);
8793 return expand_builtin_va_arg (TREE_OPERAND (exp, 0), type);
8796 return get_exception_pointer (cfun);
8799 /* Function descriptors are not valid except for as
8800 initialization constants, and should not be expanded. */
8804 return (*lang_hooks.expand_expr) (exp, original_target, tmode, modifier);
8807 /* Here to do an ordinary binary operator, generating an instruction
8808 from the optab already placed in `this_optab'. */
8810 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8812 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8813 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8815 temp = expand_binop (mode, this_optab, op0, op1, target,
8816 unsignedp, OPTAB_LIB_WIDEN);
8822 /* Return the tree node if a ARG corresponds to a string constant or zero
8823 if it doesn't. If we return non-zero, set *PTR_OFFSET to the offset
8824 in bytes within the string that ARG is accessing. The type of the
8825 offset will be `sizetype'. */
8828 string_constant (arg, ptr_offset)
8834 if (TREE_CODE (arg) == ADDR_EXPR
8835 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
8837 *ptr_offset = size_zero_node;
8838 return TREE_OPERAND (arg, 0);
8840 else if (TREE_CODE (arg) == PLUS_EXPR)
8842 tree arg0 = TREE_OPERAND (arg, 0);
8843 tree arg1 = TREE_OPERAND (arg, 1);
8848 if (TREE_CODE (arg0) == ADDR_EXPR
8849 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
8851 *ptr_offset = convert (sizetype, arg1);
8852 return TREE_OPERAND (arg0, 0);
8854 else if (TREE_CODE (arg1) == ADDR_EXPR
8855 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
8857 *ptr_offset = convert (sizetype, arg0);
8858 return TREE_OPERAND (arg1, 0);
8865 /* Expand code for a post- or pre- increment or decrement
8866 and return the RTX for the result.
8867 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
8870 expand_increment (exp, post, ignore)
8876 tree incremented = TREE_OPERAND (exp, 0);
8877 optab this_optab = add_optab;
8879 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
8880 int op0_is_copy = 0;
8881 int single_insn = 0;
8882 /* 1 means we can't store into OP0 directly,
8883 because it is a subreg narrower than a word,
8884 and we don't dare clobber the rest of the word. */
8887 /* Stabilize any component ref that might need to be
8888 evaluated more than once below. */
8890 || TREE_CODE (incremented) == BIT_FIELD_REF
8891 || (TREE_CODE (incremented) == COMPONENT_REF
8892 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
8893 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
8894 incremented = stabilize_reference (incremented);
8895 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
8896 ones into save exprs so that they don't accidentally get evaluated
8897 more than once by the code below. */
8898 if (TREE_CODE (incremented) == PREINCREMENT_EXPR
8899 || TREE_CODE (incremented) == PREDECREMENT_EXPR)
8900 incremented = save_expr (incremented);
8902 /* Compute the operands as RTX.
8903 Note whether OP0 is the actual lvalue or a copy of it:
8904 I believe it is a copy iff it is a register or subreg
8905 and insns were generated in computing it. */
8907 temp = get_last_insn ();
8908 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, 0);
8910 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
8911 in place but instead must do sign- or zero-extension during assignment,
8912 so we copy it into a new register and let the code below use it as
8915 Note that we can safely modify this SUBREG since it is know not to be
8916 shared (it was made by the expand_expr call above). */
8918 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
8921 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
8925 else if (GET_CODE (op0) == SUBREG
8926 && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD)
8928 /* We cannot increment this SUBREG in place. If we are
8929 post-incrementing, get a copy of the old value. Otherwise,
8930 just mark that we cannot increment in place. */
8932 op0 = copy_to_reg (op0);
8937 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
8938 && temp != get_last_insn ());
8939 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8941 /* Decide whether incrementing or decrementing. */
8942 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
8943 || TREE_CODE (exp) == PREDECREMENT_EXPR)
8944 this_optab = sub_optab;
8946 /* Convert decrement by a constant into a negative increment. */
8947 if (this_optab == sub_optab
8948 && GET_CODE (op1) == CONST_INT)
8950 op1 = GEN_INT (-INTVAL (op1));
8951 this_optab = add_optab;
8954 if (TYPE_TRAP_SIGNED (TREE_TYPE (exp)))
8955 this_optab = this_optab == add_optab ? addv_optab : subv_optab;
8957 /* For a preincrement, see if we can do this with a single instruction. */
8960 icode = (int) this_optab->handlers[(int) mode].insn_code;
8961 if (icode != (int) CODE_FOR_nothing
8962 /* Make sure that OP0 is valid for operands 0 and 1
8963 of the insn we want to queue. */
8964 && (*insn_data[icode].operand[0].predicate) (op0, mode)
8965 && (*insn_data[icode].operand[1].predicate) (op0, mode)
8966 && (*insn_data[icode].operand[2].predicate) (op1, mode))
8970 /* If OP0 is not the actual lvalue, but rather a copy in a register,
8971 then we cannot just increment OP0. We must therefore contrive to
8972 increment the original value. Then, for postincrement, we can return
8973 OP0 since it is a copy of the old value. For preincrement, expand here
8974 unless we can do it with a single insn.
8976 Likewise if storing directly into OP0 would clobber high bits
8977 we need to preserve (bad_subreg). */
8978 if (op0_is_copy || (!post && !single_insn) || bad_subreg)
8980 /* This is the easiest way to increment the value wherever it is.
8981 Problems with multiple evaluation of INCREMENTED are prevented
8982 because either (1) it is a component_ref or preincrement,
8983 in which case it was stabilized above, or (2) it is an array_ref
8984 with constant index in an array in a register, which is
8985 safe to reevaluate. */
8986 tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
8987 || TREE_CODE (exp) == PREDECREMENT_EXPR)
8988 ? MINUS_EXPR : PLUS_EXPR),
8991 TREE_OPERAND (exp, 1));
8993 while (TREE_CODE (incremented) == NOP_EXPR
8994 || TREE_CODE (incremented) == CONVERT_EXPR)
8996 newexp = convert (TREE_TYPE (incremented), newexp);
8997 incremented = TREE_OPERAND (incremented, 0);
9000 temp = expand_assignment (incremented, newexp, ! post && ! ignore , 0);
9001 return post ? op0 : temp;
9006 /* We have a true reference to the value in OP0.
9007 If there is an insn to add or subtract in this mode, queue it.
9008 Queueing the increment insn avoids the register shuffling
9009 that often results if we must increment now and first save
9010 the old value for subsequent use. */
9012 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
9013 op0 = stabilize (op0);
9016 icode = (int) this_optab->handlers[(int) mode].insn_code;
9017 if (icode != (int) CODE_FOR_nothing
9018 /* Make sure that OP0 is valid for operands 0 and 1
9019 of the insn we want to queue. */
9020 && (*insn_data[icode].operand[0].predicate) (op0, mode)
9021 && (*insn_data[icode].operand[1].predicate) (op0, mode))
9023 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9024 op1 = force_reg (mode, op1);
9026 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
9028 if (icode != (int) CODE_FOR_nothing && GET_CODE (op0) == MEM)
9030 rtx addr = (general_operand (XEXP (op0, 0), mode)
9031 ? force_reg (Pmode, XEXP (op0, 0))
9032 : copy_to_reg (XEXP (op0, 0)));
9035 op0 = replace_equiv_address (op0, addr);
9036 temp = force_reg (GET_MODE (op0), op0);
9037 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9038 op1 = force_reg (mode, op1);
9040 /* The increment queue is LIFO, thus we have to `queue'
9041 the instructions in reverse order. */
9042 enqueue_insn (op0, gen_move_insn (op0, temp));
9043 result = enqueue_insn (temp, GEN_FCN (icode) (temp, temp, op1));
9048 /* Preincrement, or we can't increment with one simple insn. */
9050 /* Save a copy of the value before inc or dec, to return it later. */
9051 temp = value = copy_to_reg (op0);
9053 /* Arrange to return the incremented value. */
9054 /* Copy the rtx because expand_binop will protect from the queue,
9055 and the results of that would be invalid for us to return
9056 if our caller does emit_queue before using our result. */
9057 temp = copy_rtx (value = op0);
9059 /* Increment however we can. */
9060 op1 = expand_binop (mode, this_optab, value, op1, op0,
9061 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
9063 /* Make sure the value is stored into OP0. */
9065 emit_move_insn (op0, op1);
9070 /* At the start of a function, record that we have no previously-pushed
9071 arguments waiting to be popped. */
9074 init_pending_stack_adjust ()
9076 pending_stack_adjust = 0;
9079 /* When exiting from function, if safe, clear out any pending stack adjust
9080 so the adjustment won't get done.
9082 Note, if the current function calls alloca, then it must have a
9083 frame pointer regardless of the value of flag_omit_frame_pointer. */
9086 clear_pending_stack_adjust ()
9088 #ifdef EXIT_IGNORE_STACK
9090 && (! flag_omit_frame_pointer || current_function_calls_alloca)
9091 && EXIT_IGNORE_STACK
9092 && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline)
9093 && ! flag_inline_functions)
9095 stack_pointer_delta -= pending_stack_adjust,
9096 pending_stack_adjust = 0;
9101 /* Pop any previously-pushed arguments that have not been popped yet. */
9104 do_pending_stack_adjust ()
9106 if (inhibit_defer_pop == 0)
9108 if (pending_stack_adjust != 0)
9109 adjust_stack (GEN_INT (pending_stack_adjust));
9110 pending_stack_adjust = 0;
9114 /* Expand conditional expressions. */
9116 /* Generate code to evaluate EXP and jump to LABEL if the value is zero.
9117 LABEL is an rtx of code CODE_LABEL, in this function and all the
9121 jumpifnot (exp, label)
9125 do_jump (exp, label, NULL_RTX);
9128 /* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
9135 do_jump (exp, NULL_RTX, label);
9138 /* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
9139 the result is zero, or IF_TRUE_LABEL if the result is one.
9140 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
9141 meaning fall through in that case.
9143 do_jump always does any pending stack adjust except when it does not
9144 actually perform a jump. An example where there is no jump
9145 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
9147 This function is responsible for optimizing cases such as
9148 &&, || and comparison operators in EXP. */
9151 do_jump (exp, if_false_label, if_true_label)
9153 rtx if_false_label, if_true_label;
9155 enum tree_code code = TREE_CODE (exp);
9156 /* Some cases need to create a label to jump to
9157 in order to properly fall through.
9158 These cases set DROP_THROUGH_LABEL nonzero. */
9159 rtx drop_through_label = 0;
9163 enum machine_mode mode;
9165 #ifdef MAX_INTEGER_COMPUTATION_MODE
9166 check_max_integer_computation_mode (exp);
9177 temp = integer_zerop (exp) ? if_false_label : if_true_label;
9183 /* This is not true with #pragma weak */
9185 /* The address of something can never be zero. */
9187 emit_jump (if_true_label);
9192 if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
9193 || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
9194 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF
9195 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_RANGE_REF)
9198 /* If we are narrowing the operand, we have to do the compare in the
9200 if ((TYPE_PRECISION (TREE_TYPE (exp))
9201 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
9203 case NON_LVALUE_EXPR:
9204 case REFERENCE_EXPR:
9209 /* These cannot change zero->non-zero or vice versa. */
9210 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9213 case WITH_RECORD_EXPR:
9214 /* Put the object on the placeholder list, recurse through our first
9215 operand, and pop the list. */
9216 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
9218 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9219 placeholder_list = TREE_CHAIN (placeholder_list);
9223 /* This is never less insns than evaluating the PLUS_EXPR followed by
9224 a test and can be longer if the test is eliminated. */
9226 /* Reduce to minus. */
9227 exp = build (MINUS_EXPR, TREE_TYPE (exp),
9228 TREE_OPERAND (exp, 0),
9229 fold (build1 (NEGATE_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
9230 TREE_OPERAND (exp, 1))));
9231 /* Process as MINUS. */
9235 /* Non-zero iff operands of minus differ. */
9236 do_compare_and_jump (build (NE_EXPR, TREE_TYPE (exp),
9237 TREE_OPERAND (exp, 0),
9238 TREE_OPERAND (exp, 1)),
9239 NE, NE, if_false_label, if_true_label);
9243 /* If we are AND'ing with a small constant, do this comparison in the
9244 smallest type that fits. If the machine doesn't have comparisons
9245 that small, it will be converted back to the wider comparison.
9246 This helps if we are testing the sign bit of a narrower object.
9247 combine can't do this for us because it can't know whether a
9248 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
9250 if (! SLOW_BYTE_ACCESS
9251 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
9252 && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
9253 && (i = tree_floor_log2 (TREE_OPERAND (exp, 1))) >= 0
9254 && (mode = mode_for_size (i + 1, MODE_INT, 0)) != BLKmode
9255 && (type = type_for_mode (mode, 1)) != 0
9256 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9257 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9258 != CODE_FOR_nothing))
9260 do_jump (convert (type, exp), if_false_label, if_true_label);
9265 case TRUTH_NOT_EXPR:
9266 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9269 case TRUTH_ANDIF_EXPR:
9270 if (if_false_label == 0)
9271 if_false_label = drop_through_label = gen_label_rtx ();
9272 do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
9273 start_cleanup_deferral ();
9274 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9275 end_cleanup_deferral ();
9278 case TRUTH_ORIF_EXPR:
9279 if (if_true_label == 0)
9280 if_true_label = drop_through_label = gen_label_rtx ();
9281 do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
9282 start_cleanup_deferral ();
9283 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9284 end_cleanup_deferral ();
9289 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
9290 preserve_temp_slots (NULL_RTX);
9294 do_pending_stack_adjust ();
9295 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9301 case ARRAY_RANGE_REF:
9303 HOST_WIDE_INT bitsize, bitpos;
9305 enum machine_mode mode;
9310 /* Get description of this reference. We don't actually care
9311 about the underlying object here. */
9312 get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode,
9313 &unsignedp, &volatilep);
9315 type = type_for_size (bitsize, unsignedp);
9316 if (! SLOW_BYTE_ACCESS
9317 && type != 0 && bitsize >= 0
9318 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9319 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9320 != CODE_FOR_nothing))
9322 do_jump (convert (type, exp), if_false_label, if_true_label);
9329 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
9330 if (integer_onep (TREE_OPERAND (exp, 1))
9331 && integer_zerop (TREE_OPERAND (exp, 2)))
9332 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9334 else if (integer_zerop (TREE_OPERAND (exp, 1))
9335 && integer_onep (TREE_OPERAND (exp, 2)))
9336 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9340 rtx label1 = gen_label_rtx ();
9341 drop_through_label = gen_label_rtx ();
9343 do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX);
9345 start_cleanup_deferral ();
9346 /* Now the THEN-expression. */
9347 do_jump (TREE_OPERAND (exp, 1),
9348 if_false_label ? if_false_label : drop_through_label,
9349 if_true_label ? if_true_label : drop_through_label);
9350 /* In case the do_jump just above never jumps. */
9351 do_pending_stack_adjust ();
9352 emit_label (label1);
9354 /* Now the ELSE-expression. */
9355 do_jump (TREE_OPERAND (exp, 2),
9356 if_false_label ? if_false_label : drop_through_label,
9357 if_true_label ? if_true_label : drop_through_label);
9358 end_cleanup_deferral ();
9364 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9366 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
9367 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
9369 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
9370 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
9373 (build (TRUTH_ANDIF_EXPR, TREE_TYPE (exp),
9374 fold (build (EQ_EXPR, TREE_TYPE (exp),
9375 fold (build1 (REALPART_EXPR,
9376 TREE_TYPE (inner_type),
9378 fold (build1 (REALPART_EXPR,
9379 TREE_TYPE (inner_type),
9381 fold (build (EQ_EXPR, TREE_TYPE (exp),
9382 fold (build1 (IMAGPART_EXPR,
9383 TREE_TYPE (inner_type),
9385 fold (build1 (IMAGPART_EXPR,
9386 TREE_TYPE (inner_type),
9388 if_false_label, if_true_label);
9391 else if (integer_zerop (TREE_OPERAND (exp, 1)))
9392 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9394 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
9395 && !can_compare_p (EQ, TYPE_MODE (inner_type), ccp_jump))
9396 do_jump_by_parts_equality (exp, if_false_label, if_true_label);
9398 do_compare_and_jump (exp, EQ, EQ, if_false_label, if_true_label);
9404 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9406 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
9407 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
9409 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
9410 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
9413 (build (TRUTH_ORIF_EXPR, TREE_TYPE (exp),
9414 fold (build (NE_EXPR, TREE_TYPE (exp),
9415 fold (build1 (REALPART_EXPR,
9416 TREE_TYPE (inner_type),
9418 fold (build1 (REALPART_EXPR,
9419 TREE_TYPE (inner_type),
9421 fold (build (NE_EXPR, TREE_TYPE (exp),
9422 fold (build1 (IMAGPART_EXPR,
9423 TREE_TYPE (inner_type),
9425 fold (build1 (IMAGPART_EXPR,
9426 TREE_TYPE (inner_type),
9428 if_false_label, if_true_label);
9431 else if (integer_zerop (TREE_OPERAND (exp, 1)))
9432 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9434 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
9435 && !can_compare_p (NE, TYPE_MODE (inner_type), ccp_jump))
9436 do_jump_by_parts_equality (exp, if_true_label, if_false_label);
9438 do_compare_and_jump (exp, NE, NE, if_false_label, if_true_label);
9443 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9444 if (GET_MODE_CLASS (mode) == MODE_INT
9445 && ! can_compare_p (LT, mode, ccp_jump))
9446 do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
9448 do_compare_and_jump (exp, LT, LTU, if_false_label, if_true_label);
9452 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9453 if (GET_MODE_CLASS (mode) == MODE_INT
9454 && ! can_compare_p (LE, mode, ccp_jump))
9455 do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
9457 do_compare_and_jump (exp, LE, LEU, if_false_label, if_true_label);
9461 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9462 if (GET_MODE_CLASS (mode) == MODE_INT
9463 && ! can_compare_p (GT, mode, ccp_jump))
9464 do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
9466 do_compare_and_jump (exp, GT, GTU, if_false_label, if_true_label);
9470 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9471 if (GET_MODE_CLASS (mode) == MODE_INT
9472 && ! can_compare_p (GE, mode, ccp_jump))
9473 do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
9475 do_compare_and_jump (exp, GE, GEU, if_false_label, if_true_label);
9478 case UNORDERED_EXPR:
9481 enum rtx_code cmp, rcmp;
9484 if (code == UNORDERED_EXPR)
9485 cmp = UNORDERED, rcmp = ORDERED;
9487 cmp = ORDERED, rcmp = UNORDERED;
9488 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9491 if (! can_compare_p (cmp, mode, ccp_jump)
9492 && (can_compare_p (rcmp, mode, ccp_jump)
9493 /* If the target doesn't provide either UNORDERED or ORDERED
9494 comparisons, canonicalize on UNORDERED for the library. */
9495 || rcmp == UNORDERED))
9499 do_compare_and_jump (exp, cmp, cmp, if_false_label, if_true_label);
9501 do_compare_and_jump (exp, rcmp, rcmp, if_true_label, if_false_label);
9506 enum rtx_code rcode1;
9507 enum tree_code tcode2;
9531 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9532 if (can_compare_p (rcode1, mode, ccp_jump))
9533 do_compare_and_jump (exp, rcode1, rcode1, if_false_label,
9537 tree op0 = save_expr (TREE_OPERAND (exp, 0));
9538 tree op1 = save_expr (TREE_OPERAND (exp, 1));
9541 /* If the target doesn't support combined unordered
9542 compares, decompose into UNORDERED + comparison. */
9543 cmp0 = fold (build (UNORDERED_EXPR, TREE_TYPE (exp), op0, op1));
9544 cmp1 = fold (build (tcode2, TREE_TYPE (exp), op0, op1));
9545 exp = build (TRUTH_ORIF_EXPR, TREE_TYPE (exp), cmp0, cmp1);
9546 do_jump (exp, if_false_label, if_true_label);
9552 __builtin_expect (<test>, 0) and
9553 __builtin_expect (<test>, 1)
9555 We need to do this here, so that <test> is not converted to a SCC
9556 operation on machines that use condition code registers and COMPARE
9557 like the PowerPC, and then the jump is done based on whether the SCC
9558 operation produced a 1 or 0. */
9560 /* Check for a built-in function. */
9561 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR)
9563 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
9564 tree arglist = TREE_OPERAND (exp, 1);
9566 if (TREE_CODE (fndecl) == FUNCTION_DECL
9567 && DECL_BUILT_IN (fndecl)
9568 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT
9569 && arglist != NULL_TREE
9570 && TREE_CHAIN (arglist) != NULL_TREE)
9572 rtx seq = expand_builtin_expect_jump (exp, if_false_label,
9575 if (seq != NULL_RTX)
9582 /* fall through and generate the normal code. */
9586 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
9588 /* This is not needed any more and causes poor code since it causes
9589 comparisons and tests from non-SI objects to have different code
9591 /* Copy to register to avoid generating bad insns by cse
9592 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
9593 if (!cse_not_expected && GET_CODE (temp) == MEM)
9594 temp = copy_to_reg (temp);
9596 do_pending_stack_adjust ();
9597 /* Do any postincrements in the expression that was tested. */
9600 if (GET_CODE (temp) == CONST_INT
9601 || (GET_CODE (temp) == CONST_DOUBLE && GET_MODE (temp) == VOIDmode)
9602 || GET_CODE (temp) == LABEL_REF)
9604 rtx target = temp == const0_rtx ? if_false_label : if_true_label;
9608 else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
9609 && ! can_compare_p (NE, GET_MODE (temp), ccp_jump))
9610 /* Note swapping the labels gives us not-equal. */
9611 do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label);
9612 else if (GET_MODE (temp) != VOIDmode)
9613 do_compare_rtx_and_jump (temp, CONST0_RTX (GET_MODE (temp)),
9614 NE, TREE_UNSIGNED (TREE_TYPE (exp)),
9615 GET_MODE (temp), NULL_RTX,
9616 if_false_label, if_true_label);
9621 if (drop_through_label)
9623 /* If do_jump produces code that might be jumped around,
9624 do any stack adjusts from that code, before the place
9625 where control merges in. */
9626 do_pending_stack_adjust ();
9627 emit_label (drop_through_label);
9631 /* Given a comparison expression EXP for values too wide to be compared
9632 with one insn, test the comparison and jump to the appropriate label.
9633 The code of EXP is ignored; we always test GT if SWAP is 0,
9634 and LT if SWAP is 1. */
9637 do_jump_by_parts_greater (exp, swap, if_false_label, if_true_label)
9640 rtx if_false_label, if_true_label;
9642 rtx op0 = expand_expr (TREE_OPERAND (exp, swap), NULL_RTX, VOIDmode, 0);
9643 rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), NULL_RTX, VOIDmode, 0);
9644 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9645 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
9647 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label);
9650 /* Compare OP0 with OP1, word at a time, in mode MODE.
9651 UNSIGNEDP says to do unsigned comparison.
9652 Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
9655 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label)
9656 enum machine_mode mode;
9659 rtx if_false_label, if_true_label;
9661 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
9662 rtx drop_through_label = 0;
9665 if (! if_true_label || ! if_false_label)
9666 drop_through_label = gen_label_rtx ();
9667 if (! if_true_label)
9668 if_true_label = drop_through_label;
9669 if (! if_false_label)
9670 if_false_label = drop_through_label;
9672 /* Compare a word at a time, high order first. */
9673 for (i = 0; i < nwords; i++)
9675 rtx op0_word, op1_word;
9677 if (WORDS_BIG_ENDIAN)
9679 op0_word = operand_subword_force (op0, i, mode);
9680 op1_word = operand_subword_force (op1, i, mode);
9684 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
9685 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
9688 /* All but high-order word must be compared as unsigned. */
9689 do_compare_rtx_and_jump (op0_word, op1_word, GT,
9690 (unsignedp || i > 0), word_mode, NULL_RTX,
9691 NULL_RTX, if_true_label);
9693 /* Consider lower words only if these are equal. */
9694 do_compare_rtx_and_jump (op0_word, op1_word, NE, unsignedp, word_mode,
9695 NULL_RTX, NULL_RTX, if_false_label);
9699 emit_jump (if_false_label);
9700 if (drop_through_label)
9701 emit_label (drop_through_label);
9704 /* Given an EQ_EXPR expression EXP for values too wide to be compared
9705 with one insn, test the comparison and jump to the appropriate label. */
9708 do_jump_by_parts_equality (exp, if_false_label, if_true_label)
9710 rtx if_false_label, if_true_label;
9712 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
9713 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
9714 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9715 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
9717 rtx drop_through_label = 0;
9719 if (! if_false_label)
9720 drop_through_label = if_false_label = gen_label_rtx ();
9722 for (i = 0; i < nwords; i++)
9723 do_compare_rtx_and_jump (operand_subword_force (op0, i, mode),
9724 operand_subword_force (op1, i, mode),
9725 EQ, TREE_UNSIGNED (TREE_TYPE (exp)),
9726 word_mode, NULL_RTX, if_false_label, NULL_RTX);
9729 emit_jump (if_true_label);
9730 if (drop_through_label)
9731 emit_label (drop_through_label);
9734 /* Jump according to whether OP0 is 0.
9735 We assume that OP0 has an integer mode that is too wide
9736 for the available compare insns. */
9739 do_jump_by_parts_equality_rtx (op0, if_false_label, if_true_label)
9741 rtx if_false_label, if_true_label;
9743 int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD;
9746 rtx drop_through_label = 0;
9748 /* The fastest way of doing this comparison on almost any machine is to
9749 "or" all the words and compare the result. If all have to be loaded
9750 from memory and this is a very wide item, it's possible this may
9751 be slower, but that's highly unlikely. */
9753 part = gen_reg_rtx (word_mode);
9754 emit_move_insn (part, operand_subword_force (op0, 0, GET_MODE (op0)));
9755 for (i = 1; i < nwords && part != 0; i++)
9756 part = expand_binop (word_mode, ior_optab, part,
9757 operand_subword_force (op0, i, GET_MODE (op0)),
9758 part, 1, OPTAB_WIDEN);
9762 do_compare_rtx_and_jump (part, const0_rtx, EQ, 1, word_mode,
9763 NULL_RTX, if_false_label, if_true_label);
9768 /* If we couldn't do the "or" simply, do this with a series of compares. */
9769 if (! if_false_label)
9770 drop_through_label = if_false_label = gen_label_rtx ();
9772 for (i = 0; i < nwords; i++)
9773 do_compare_rtx_and_jump (operand_subword_force (op0, i, GET_MODE (op0)),
9774 const0_rtx, EQ, 1, word_mode, NULL_RTX,
9775 if_false_label, NULL_RTX);
9778 emit_jump (if_true_label);
9780 if (drop_through_label)
9781 emit_label (drop_through_label);
9784 /* Generate code for a comparison of OP0 and OP1 with rtx code CODE.
9785 (including code to compute the values to be compared)
9786 and set (CC0) according to the result.
9787 The decision as to signed or unsigned comparison must be made by the caller.
9789 We force a stack adjustment unless there are currently
9790 things pushed on the stack that aren't yet used.
9792 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
9796 compare_from_rtx (op0, op1, code, unsignedp, mode, size)
9800 enum machine_mode mode;
9805 /* If one operand is constant, make it the second one. Only do this
9806 if the other operand is not constant as well. */
9808 if (swap_commutative_operands_p (op0, op1))
9813 code = swap_condition (code);
9818 op0 = force_not_mem (op0);
9819 op1 = force_not_mem (op1);
9822 do_pending_stack_adjust ();
9824 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
9825 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
9829 /* There's no need to do this now that combine.c can eliminate lots of
9830 sign extensions. This can be less efficient in certain cases on other
9833 /* If this is a signed equality comparison, we can do it as an
9834 unsigned comparison since zero-extension is cheaper than sign
9835 extension and comparisons with zero are done as unsigned. This is
9836 the case even on machines that can do fast sign extension, since
9837 zero-extension is easier to combine with other operations than
9838 sign-extension is. If we are comparing against a constant, we must
9839 convert it to what it would look like unsigned. */
9840 if ((code == EQ || code == NE) && ! unsignedp
9841 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
9843 if (GET_CODE (op1) == CONST_INT
9844 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
9845 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
9850 emit_cmp_insn (op0, op1, code, size, mode, unsignedp);
9852 return gen_rtx_fmt_ee (code, VOIDmode, cc0_rtx, const0_rtx);
9855 /* Like do_compare_and_jump but expects the values to compare as two rtx's.
9856 The decision as to signed or unsigned comparison must be made by the caller.
9858 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
9862 do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode, size,
9863 if_false_label, if_true_label)
9867 enum machine_mode mode;
9869 rtx if_false_label, if_true_label;
9872 int dummy_true_label = 0;
9874 /* Reverse the comparison if that is safe and we want to jump if it is
9876 if (! if_true_label && ! FLOAT_MODE_P (mode))
9878 if_true_label = if_false_label;
9880 code = reverse_condition (code);
9883 /* If one operand is constant, make it the second one. Only do this
9884 if the other operand is not constant as well. */
9886 if (swap_commutative_operands_p (op0, op1))
9891 code = swap_condition (code);
9896 op0 = force_not_mem (op0);
9897 op1 = force_not_mem (op1);
9900 do_pending_stack_adjust ();
9902 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
9903 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
9905 if (tem == const_true_rtx)
9908 emit_jump (if_true_label);
9913 emit_jump (if_false_label);
9919 /* There's no need to do this now that combine.c can eliminate lots of
9920 sign extensions. This can be less efficient in certain cases on other
9923 /* If this is a signed equality comparison, we can do it as an
9924 unsigned comparison since zero-extension is cheaper than sign
9925 extension and comparisons with zero are done as unsigned. This is
9926 the case even on machines that can do fast sign extension, since
9927 zero-extension is easier to combine with other operations than
9928 sign-extension is. If we are comparing against a constant, we must
9929 convert it to what it would look like unsigned. */
9930 if ((code == EQ || code == NE) && ! unsignedp
9931 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
9933 if (GET_CODE (op1) == CONST_INT
9934 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
9935 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
9940 if (! if_true_label)
9942 dummy_true_label = 1;
9943 if_true_label = gen_label_rtx ();
9946 emit_cmp_and_jump_insns (op0, op1, code, size, mode, unsignedp,
9950 emit_jump (if_false_label);
9951 if (dummy_true_label)
9952 emit_label (if_true_label);
9955 /* Generate code for a comparison expression EXP (including code to compute
9956 the values to be compared) and a conditional jump to IF_FALSE_LABEL and/or
9957 IF_TRUE_LABEL. One of the labels can be NULL_RTX, in which case the
9958 generated code will drop through.
9959 SIGNED_CODE should be the rtx operation for this comparison for
9960 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
9962 We force a stack adjustment unless there are currently
9963 things pushed on the stack that aren't yet used. */
9966 do_compare_and_jump (exp, signed_code, unsigned_code, if_false_label,
9969 enum rtx_code signed_code, unsigned_code;
9970 rtx if_false_label, if_true_label;
9974 enum machine_mode mode;
9978 /* Don't crash if the comparison was erroneous. */
9979 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
9980 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
9983 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
9984 if (TREE_CODE (TREE_OPERAND (exp, 1)) == ERROR_MARK)
9987 type = TREE_TYPE (TREE_OPERAND (exp, 0));
9988 mode = TYPE_MODE (type);
9989 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
9990 && (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST
9991 || (GET_MODE_BITSIZE (mode)
9992 > GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp,
9995 /* op0 might have been replaced by promoted constant, in which
9996 case the type of second argument should be used. */
9997 type = TREE_TYPE (TREE_OPERAND (exp, 1));
9998 mode = TYPE_MODE (type);
10000 unsignedp = TREE_UNSIGNED (type);
10001 code = unsignedp ? unsigned_code : signed_code;
10003 #ifdef HAVE_canonicalize_funcptr_for_compare
10004 /* If function pointers need to be "canonicalized" before they can
10005 be reliably compared, then canonicalize them. */
10006 if (HAVE_canonicalize_funcptr_for_compare
10007 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10008 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10011 rtx new_op0 = gen_reg_rtx (mode);
10013 emit_insn (gen_canonicalize_funcptr_for_compare (new_op0, op0));
10017 if (HAVE_canonicalize_funcptr_for_compare
10018 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10019 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10022 rtx new_op1 = gen_reg_rtx (mode);
10024 emit_insn (gen_canonicalize_funcptr_for_compare (new_op1, op1));
10029 /* Do any postincrements in the expression that was tested. */
10032 do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode,
10034 ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
10035 if_false_label, if_true_label);
10038 /* Generate code to calculate EXP using a store-flag instruction
10039 and return an rtx for the result. EXP is either a comparison
10040 or a TRUTH_NOT_EXPR whose operand is a comparison.
10042 If TARGET is nonzero, store the result there if convenient.
10044 If ONLY_CHEAP is non-zero, only do this if it is likely to be very
10047 Return zero if there is no suitable set-flag instruction
10048 available on this machine.
10050 Once expand_expr has been called on the arguments of the comparison,
10051 we are committed to doing the store flag, since it is not safe to
10052 re-evaluate the expression. We emit the store-flag insn by calling
10053 emit_store_flag, but only expand the arguments if we have a reason
10054 to believe that emit_store_flag will be successful. If we think that
10055 it will, but it isn't, we have to simulate the store-flag with a
10056 set/jump/set sequence. */
10059 do_store_flag (exp, target, mode, only_cheap)
10062 enum machine_mode mode;
10065 enum rtx_code code;
10066 tree arg0, arg1, type;
10068 enum machine_mode operand_mode;
10072 enum insn_code icode;
10073 rtx subtarget = target;
10076 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
10077 result at the end. We can't simply invert the test since it would
10078 have already been inverted if it were valid. This case occurs for
10079 some floating-point comparisons. */
10081 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
10082 invert = 1, exp = TREE_OPERAND (exp, 0);
10084 arg0 = TREE_OPERAND (exp, 0);
10085 arg1 = TREE_OPERAND (exp, 1);
10087 /* Don't crash if the comparison was erroneous. */
10088 if (arg0 == error_mark_node || arg1 == error_mark_node)
10091 type = TREE_TYPE (arg0);
10092 operand_mode = TYPE_MODE (type);
10093 unsignedp = TREE_UNSIGNED (type);
10095 /* We won't bother with BLKmode store-flag operations because it would mean
10096 passing a lot of information to emit_store_flag. */
10097 if (operand_mode == BLKmode)
10100 /* We won't bother with store-flag operations involving function pointers
10101 when function pointers must be canonicalized before comparisons. */
10102 #ifdef HAVE_canonicalize_funcptr_for_compare
10103 if (HAVE_canonicalize_funcptr_for_compare
10104 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10105 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10107 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10108 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10109 == FUNCTION_TYPE))))
10116 /* Get the rtx comparison code to use. We know that EXP is a comparison
10117 operation of some type. Some comparisons against 1 and -1 can be
10118 converted to comparisons with zero. Do so here so that the tests
10119 below will be aware that we have a comparison with zero. These
10120 tests will not catch constants in the first operand, but constants
10121 are rarely passed as the first operand. */
10123 switch (TREE_CODE (exp))
10132 if (integer_onep (arg1))
10133 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
10135 code = unsignedp ? LTU : LT;
10138 if (! unsignedp && integer_all_onesp (arg1))
10139 arg1 = integer_zero_node, code = LT;
10141 code = unsignedp ? LEU : LE;
10144 if (! unsignedp && integer_all_onesp (arg1))
10145 arg1 = integer_zero_node, code = GE;
10147 code = unsignedp ? GTU : GT;
10150 if (integer_onep (arg1))
10151 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
10153 code = unsignedp ? GEU : GE;
10156 case UNORDERED_EXPR:
10182 /* Put a constant second. */
10183 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
10185 tem = arg0; arg0 = arg1; arg1 = tem;
10186 code = swap_condition (code);
10189 /* If this is an equality or inequality test of a single bit, we can
10190 do this by shifting the bit being tested to the low-order bit and
10191 masking the result with the constant 1. If the condition was EQ,
10192 we xor it with 1. This does not require an scc insn and is faster
10193 than an scc insn even if we have it. */
10195 if ((code == NE || code == EQ)
10196 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
10197 && integer_pow2p (TREE_OPERAND (arg0, 1)))
10199 tree inner = TREE_OPERAND (arg0, 0);
10200 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
10203 /* If INNER is a right shift of a constant and it plus BITNUM does
10204 not overflow, adjust BITNUM and INNER. */
10206 if (TREE_CODE (inner) == RSHIFT_EXPR
10207 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
10208 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
10209 && bitnum < TYPE_PRECISION (type)
10210 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
10211 bitnum - TYPE_PRECISION (type)))
10213 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
10214 inner = TREE_OPERAND (inner, 0);
10217 /* If we are going to be able to omit the AND below, we must do our
10218 operations as unsigned. If we must use the AND, we have a choice.
10219 Normally unsigned is faster, but for some machines signed is. */
10220 ops_unsignedp = (bitnum == TYPE_PRECISION (type) - 1 ? 1
10221 #ifdef LOAD_EXTEND_OP
10222 : (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1)
10228 if (! get_subtarget (subtarget)
10229 || GET_MODE (subtarget) != operand_mode
10230 || ! safe_from_p (subtarget, inner, 1))
10233 op0 = expand_expr (inner, subtarget, VOIDmode, 0);
10236 op0 = expand_shift (RSHIFT_EXPR, operand_mode, op0,
10237 size_int (bitnum), subtarget, ops_unsignedp);
10239 if (GET_MODE (op0) != mode)
10240 op0 = convert_to_mode (mode, op0, ops_unsignedp);
10242 if ((code == EQ && ! invert) || (code == NE && invert))
10243 op0 = expand_binop (mode, xor_optab, op0, const1_rtx, subtarget,
10244 ops_unsignedp, OPTAB_LIB_WIDEN);
10246 /* Put the AND last so it can combine with more things. */
10247 if (bitnum != TYPE_PRECISION (type) - 1)
10248 op0 = expand_and (mode, op0, const1_rtx, subtarget);
10253 /* Now see if we are likely to be able to do this. Return if not. */
10254 if (! can_compare_p (code, operand_mode, ccp_store_flag))
10257 icode = setcc_gen_code[(int) code];
10258 if (icode == CODE_FOR_nothing
10259 || (only_cheap && insn_data[(int) icode].operand[0].mode != mode))
10261 /* We can only do this if it is one of the special cases that
10262 can be handled without an scc insn. */
10263 if ((code == LT && integer_zerop (arg1))
10264 || (! only_cheap && code == GE && integer_zerop (arg1)))
10266 else if (BRANCH_COST >= 0
10267 && ! only_cheap && (code == NE || code == EQ)
10268 && TREE_CODE (type) != REAL_TYPE
10269 && ((abs_optab->handlers[(int) operand_mode].insn_code
10270 != CODE_FOR_nothing)
10271 || (ffs_optab->handlers[(int) operand_mode].insn_code
10272 != CODE_FOR_nothing)))
10278 if (! get_subtarget (target)
10279 || GET_MODE (subtarget) != operand_mode
10280 || ! safe_from_p (subtarget, arg1, 1))
10283 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
10284 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
10287 target = gen_reg_rtx (mode);
10289 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
10290 because, if the emit_store_flag does anything it will succeed and
10291 OP0 and OP1 will not be used subsequently. */
10293 result = emit_store_flag (target, code,
10294 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
10295 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
10296 operand_mode, unsignedp, 1);
10301 result = expand_binop (mode, xor_optab, result, const1_rtx,
10302 result, 0, OPTAB_LIB_WIDEN);
10306 /* If this failed, we have to do this with set/compare/jump/set code. */
10307 if (GET_CODE (target) != REG
10308 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
10309 target = gen_reg_rtx (GET_MODE (target));
10311 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
10312 result = compare_from_rtx (op0, op1, code, unsignedp,
10313 operand_mode, NULL_RTX);
10314 if (GET_CODE (result) == CONST_INT)
10315 return (((result == const0_rtx && ! invert)
10316 || (result != const0_rtx && invert))
10317 ? const0_rtx : const1_rtx);
10319 /* The code of RESULT may not match CODE if compare_from_rtx
10320 decided to swap its operands and reverse the original code.
10322 We know that compare_from_rtx returns either a CONST_INT or
10323 a new comparison code, so it is safe to just extract the
10324 code from RESULT. */
10325 code = GET_CODE (result);
10327 label = gen_label_rtx ();
10328 if (bcc_gen_fctn[(int) code] == 0)
10331 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
10332 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
10333 emit_label (label);
10339 /* Stubs in case we haven't got a casesi insn. */
10340 #ifndef HAVE_casesi
10341 # define HAVE_casesi 0
10342 # define gen_casesi(a, b, c, d, e) (0)
10343 # define CODE_FOR_casesi CODE_FOR_nothing
10346 /* If the machine does not have a case insn that compares the bounds,
10347 this means extra overhead for dispatch tables, which raises the
10348 threshold for using them. */
10349 #ifndef CASE_VALUES_THRESHOLD
10350 #define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
10351 #endif /* CASE_VALUES_THRESHOLD */
10354 case_values_threshold ()
10356 return CASE_VALUES_THRESHOLD;
10359 /* Attempt to generate a casesi instruction. Returns 1 if successful,
10360 0 otherwise (i.e. if there is no casesi instruction). */
10362 try_casesi (index_type, index_expr, minval, range,
10363 table_label, default_label)
10364 tree index_type, index_expr, minval, range;
10365 rtx table_label ATTRIBUTE_UNUSED;
10368 enum machine_mode index_mode = SImode;
10369 int index_bits = GET_MODE_BITSIZE (index_mode);
10370 rtx op1, op2, index;
10371 enum machine_mode op_mode;
10376 /* Convert the index to SImode. */
10377 if (GET_MODE_BITSIZE (TYPE_MODE (index_type)) > GET_MODE_BITSIZE (index_mode))
10379 enum machine_mode omode = TYPE_MODE (index_type);
10380 rtx rangertx = expand_expr (range, NULL_RTX, VOIDmode, 0);
10382 /* We must handle the endpoints in the original mode. */
10383 index_expr = build (MINUS_EXPR, index_type,
10384 index_expr, minval);
10385 minval = integer_zero_node;
10386 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
10387 emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
10388 omode, 1, default_label);
10389 /* Now we can safely truncate. */
10390 index = convert_to_mode (index_mode, index, 0);
10394 if (TYPE_MODE (index_type) != index_mode)
10396 index_expr = convert (type_for_size (index_bits, 0),
10398 index_type = TREE_TYPE (index_expr);
10401 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
10404 index = protect_from_queue (index, 0);
10405 do_pending_stack_adjust ();
10407 op_mode = insn_data[(int) CODE_FOR_casesi].operand[0].mode;
10408 if (! (*insn_data[(int) CODE_FOR_casesi].operand[0].predicate)
10410 index = copy_to_mode_reg (op_mode, index);
10412 op1 = expand_expr (minval, NULL_RTX, VOIDmode, 0);
10414 op_mode = insn_data[(int) CODE_FOR_casesi].operand[1].mode;
10415 op1 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (minval)),
10416 op1, TREE_UNSIGNED (TREE_TYPE (minval)));
10417 if (! (*insn_data[(int) CODE_FOR_casesi].operand[1].predicate)
10419 op1 = copy_to_mode_reg (op_mode, op1);
10421 op2 = expand_expr (range, NULL_RTX, VOIDmode, 0);
10423 op_mode = insn_data[(int) CODE_FOR_casesi].operand[2].mode;
10424 op2 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (range)),
10425 op2, TREE_UNSIGNED (TREE_TYPE (range)));
10426 if (! (*insn_data[(int) CODE_FOR_casesi].operand[2].predicate)
10428 op2 = copy_to_mode_reg (op_mode, op2);
10430 emit_jump_insn (gen_casesi (index, op1, op2,
10431 table_label, default_label));
10435 /* Attempt to generate a tablejump instruction; same concept. */
10436 #ifndef HAVE_tablejump
10437 #define HAVE_tablejump 0
10438 #define gen_tablejump(x, y) (0)
10441 /* Subroutine of the next function.
10443 INDEX is the value being switched on, with the lowest value
10444 in the table already subtracted.
10445 MODE is its expected mode (needed if INDEX is constant).
10446 RANGE is the length of the jump table.
10447 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
10449 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
10450 index value is out of range. */
10453 do_tablejump (index, mode, range, table_label, default_label)
10454 rtx index, range, table_label, default_label;
10455 enum machine_mode mode;
10459 /* Do an unsigned comparison (in the proper mode) between the index
10460 expression and the value which represents the length of the range.
10461 Since we just finished subtracting the lower bound of the range
10462 from the index expression, this comparison allows us to simultaneously
10463 check that the original index expression value is both greater than
10464 or equal to the minimum value of the range and less than or equal to
10465 the maximum value of the range. */
10467 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
10470 /* If index is in range, it must fit in Pmode.
10471 Convert to Pmode so we can index with it. */
10473 index = convert_to_mode (Pmode, index, 1);
10475 /* Don't let a MEM slip thru, because then INDEX that comes
10476 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
10477 and break_out_memory_refs will go to work on it and mess it up. */
10478 #ifdef PIC_CASE_VECTOR_ADDRESS
10479 if (flag_pic && GET_CODE (index) != REG)
10480 index = copy_to_mode_reg (Pmode, index);
10483 /* If flag_force_addr were to affect this address
10484 it could interfere with the tricky assumptions made
10485 about addresses that contain label-refs,
10486 which may be valid only very near the tablejump itself. */
10487 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
10488 GET_MODE_SIZE, because this indicates how large insns are. The other
10489 uses should all be Pmode, because they are addresses. This code
10490 could fail if addresses and insns are not the same size. */
10491 index = gen_rtx_PLUS (Pmode,
10492 gen_rtx_MULT (Pmode, index,
10493 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
10494 gen_rtx_LABEL_REF (Pmode, table_label));
10495 #ifdef PIC_CASE_VECTOR_ADDRESS
10497 index = PIC_CASE_VECTOR_ADDRESS (index);
10500 index = memory_address_noforce (CASE_VECTOR_MODE, index);
10501 temp = gen_reg_rtx (CASE_VECTOR_MODE);
10502 vector = gen_rtx_MEM (CASE_VECTOR_MODE, index);
10503 RTX_UNCHANGING_P (vector) = 1;
10504 convert_move (temp, vector, 0);
10506 emit_jump_insn (gen_tablejump (temp, table_label));
10508 /* If we are generating PIC code or if the table is PC-relative, the
10509 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
10510 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
10515 try_tablejump (index_type, index_expr, minval, range,
10516 table_label, default_label)
10517 tree index_type, index_expr, minval, range;
10518 rtx table_label, default_label;
10522 if (! HAVE_tablejump)
10525 index_expr = fold (build (MINUS_EXPR, index_type,
10526 convert (index_type, index_expr),
10527 convert (index_type, minval)));
10528 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
10530 index = protect_from_queue (index, 0);
10531 do_pending_stack_adjust ();
10533 do_tablejump (index, TYPE_MODE (index_type),
10534 convert_modes (TYPE_MODE (index_type),
10535 TYPE_MODE (TREE_TYPE (range)),
10536 expand_expr (range, NULL_RTX,
10538 TREE_UNSIGNED (TREE_TYPE (range))),
10539 table_label, default_label);