1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
30 #include "hard-reg-set.h"
33 #include "insn-config.h"
34 #include "insn-attr.h"
35 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
42 #include "typeclass.h"
45 #include "langhooks.h"
49 /* Decide whether a function's arguments should be processed
50 from first to last or from last to first.
52 They should if the stack and args grow in opposite directions, but
53 only if we have push insns. */
57 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
58 #define PUSH_ARGS_REVERSED /* If it's last to first. */
63 #ifndef STACK_PUSH_CODE
64 #ifdef STACK_GROWS_DOWNWARD
65 #define STACK_PUSH_CODE PRE_DEC
67 #define STACK_PUSH_CODE PRE_INC
71 /* Assume that case vectors are not pc-relative. */
72 #ifndef CASE_VECTOR_PC_RELATIVE
73 #define CASE_VECTOR_PC_RELATIVE 0
76 /* If this is nonzero, we do not bother generating VOLATILE
77 around volatile memory references, and we are willing to
78 output indirect addresses. If cse is to follow, we reject
79 indirect addresses so a useful potential cse is generated;
80 if it is used only once, instruction combination will produce
81 the same indirect address eventually. */
84 /* Chain of pending expressions for PLACEHOLDER_EXPR to replace. */
85 static tree placeholder_list = 0;
87 /* This structure is used by move_by_pieces to describe the move to
98 int explicit_inc_from;
99 unsigned HOST_WIDE_INT len;
100 HOST_WIDE_INT offset;
104 /* This structure is used by store_by_pieces to describe the clear to
107 struct store_by_pieces
113 unsigned HOST_WIDE_INT len;
114 HOST_WIDE_INT offset;
115 rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
120 extern struct obstack permanent_obstack;
122 static rtx enqueue_insn PARAMS ((rtx, rtx));
123 static unsigned HOST_WIDE_INT move_by_pieces_ninsns
124 PARAMS ((unsigned HOST_WIDE_INT,
126 static void move_by_pieces_1 PARAMS ((rtx (*) (rtx, ...), enum machine_mode,
127 struct move_by_pieces *));
128 static rtx clear_by_pieces_1 PARAMS ((PTR, HOST_WIDE_INT,
130 static void clear_by_pieces PARAMS ((rtx, unsigned HOST_WIDE_INT,
132 static void store_by_pieces_1 PARAMS ((struct store_by_pieces *,
134 static void store_by_pieces_2 PARAMS ((rtx (*) (rtx, ...),
136 struct store_by_pieces *));
137 static rtx get_subtarget PARAMS ((rtx));
138 static int is_zeros_p PARAMS ((tree));
139 static int mostly_zeros_p PARAMS ((tree));
140 static void store_constructor_field PARAMS ((rtx, unsigned HOST_WIDE_INT,
141 HOST_WIDE_INT, enum machine_mode,
142 tree, tree, int, int));
143 static void store_constructor PARAMS ((tree, rtx, int, HOST_WIDE_INT));
144 static rtx store_field PARAMS ((rtx, HOST_WIDE_INT,
145 HOST_WIDE_INT, enum machine_mode,
146 tree, enum machine_mode, int, tree,
148 static rtx var_rtx PARAMS ((tree));
149 static HOST_WIDE_INT highest_pow2_factor PARAMS ((tree));
150 static rtx expand_increment PARAMS ((tree, int, int));
151 static void do_jump_by_parts_greater PARAMS ((tree, int, rtx, rtx));
152 static void do_jump_by_parts_equality PARAMS ((tree, rtx, rtx));
153 static void do_compare_and_jump PARAMS ((tree, enum rtx_code, enum rtx_code,
155 static rtx do_store_flag PARAMS ((tree, rtx, enum machine_mode, int));
157 static void emit_single_push_insn PARAMS ((enum machine_mode, rtx, tree));
159 static void do_tablejump PARAMS ((rtx, enum machine_mode, rtx, rtx, rtx));
161 /* Record for each mode whether we can move a register directly to or
162 from an object of that mode in memory. If we can't, we won't try
163 to use that mode directly when accessing a field of that mode. */
165 static char direct_load[NUM_MACHINE_MODES];
166 static char direct_store[NUM_MACHINE_MODES];
168 /* If a memory-to-memory move would take MOVE_RATIO or more simple
169 move-instruction sequences, we will do a movstr or libcall instead. */
172 #if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
175 /* If we are optimizing for space (-Os), cut down the default move ratio. */
176 #define MOVE_RATIO (optimize_size ? 3 : 15)
180 /* This macro is used to determine whether move_by_pieces should be called
181 to perform a structure copy. */
182 #ifndef MOVE_BY_PIECES_P
183 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
184 (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) MOVE_RATIO)
187 /* This array records the insn_code of insns to perform block moves. */
188 enum insn_code movstr_optab[NUM_MACHINE_MODES];
190 /* This array records the insn_code of insns to perform block clears. */
191 enum insn_code clrstr_optab[NUM_MACHINE_MODES];
193 /* SLOW_UNALIGNED_ACCESS is non-zero if unaligned accesses are very slow. */
195 #ifndef SLOW_UNALIGNED_ACCESS
196 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
199 /* This is run once per compilation to set up which modes can be used
200 directly in memory and to initialize the block move optab. */
206 enum machine_mode mode;
212 /* Try indexing by frame ptr and try by stack ptr.
213 It is known that on the Convex the stack ptr isn't a valid index.
214 With luck, one or the other is valid on any machine. */
215 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
216 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
218 insn = emit_insn (gen_rtx_SET (0, NULL_RTX, NULL_RTX));
219 pat = PATTERN (insn);
221 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
222 mode = (enum machine_mode) ((int) mode + 1))
227 direct_load[(int) mode] = direct_store[(int) mode] = 0;
228 PUT_MODE (mem, mode);
229 PUT_MODE (mem1, mode);
231 /* See if there is some register that can be used in this mode and
232 directly loaded or stored from memory. */
234 if (mode != VOIDmode && mode != BLKmode)
235 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
236 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
239 if (! HARD_REGNO_MODE_OK (regno, mode))
242 reg = gen_rtx_REG (mode, regno);
245 SET_DEST (pat) = reg;
246 if (recog (pat, insn, &num_clobbers) >= 0)
247 direct_load[(int) mode] = 1;
249 SET_SRC (pat) = mem1;
250 SET_DEST (pat) = reg;
251 if (recog (pat, insn, &num_clobbers) >= 0)
252 direct_load[(int) mode] = 1;
255 SET_DEST (pat) = mem;
256 if (recog (pat, insn, &num_clobbers) >= 0)
257 direct_store[(int) mode] = 1;
260 SET_DEST (pat) = mem1;
261 if (recog (pat, insn, &num_clobbers) >= 0)
262 direct_store[(int) mode] = 1;
269 /* This is run at the start of compiling a function. */
274 cfun->expr = (struct expr_status *) xmalloc (sizeof (struct expr_status));
277 pending_stack_adjust = 0;
278 stack_pointer_delta = 0;
279 inhibit_defer_pop = 0;
281 apply_args_value = 0;
287 struct expr_status *p;
292 ggc_mark_rtx (p->x_saveregs_value);
293 ggc_mark_rtx (p->x_apply_args_value);
294 ggc_mark_rtx (p->x_forced_labels);
305 /* Small sanity check that the queue is empty at the end of a function. */
308 finish_expr_for_function ()
314 /* Manage the queue of increment instructions to be output
315 for POSTINCREMENT_EXPR expressions, etc. */
317 /* Queue up to increment (or change) VAR later. BODY says how:
318 BODY should be the same thing you would pass to emit_insn
319 to increment right away. It will go to emit_insn later on.
321 The value is a QUEUED expression to be used in place of VAR
322 where you want to guarantee the pre-incrementation value of VAR. */
325 enqueue_insn (var, body)
328 pending_chain = gen_rtx_QUEUED (GET_MODE (var), var, NULL_RTX, NULL_RTX,
329 body, pending_chain);
330 return pending_chain;
333 /* Use protect_from_queue to convert a QUEUED expression
334 into something that you can put immediately into an instruction.
335 If the queued incrementation has not happened yet,
336 protect_from_queue returns the variable itself.
337 If the incrementation has happened, protect_from_queue returns a temp
338 that contains a copy of the old value of the variable.
340 Any time an rtx which might possibly be a QUEUED is to be put
341 into an instruction, it must be passed through protect_from_queue first.
342 QUEUED expressions are not meaningful in instructions.
344 Do not pass a value through protect_from_queue and then hold
345 on to it for a while before putting it in an instruction!
346 If the queue is flushed in between, incorrect code will result. */
349 protect_from_queue (x, modify)
353 RTX_CODE code = GET_CODE (x);
355 #if 0 /* A QUEUED can hang around after the queue is forced out. */
356 /* Shortcut for most common case. */
357 if (pending_chain == 0)
363 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
364 use of autoincrement. Make a copy of the contents of the memory
365 location rather than a copy of the address, but not if the value is
366 of mode BLKmode. Don't modify X in place since it might be
368 if (code == MEM && GET_MODE (x) != BLKmode
369 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
372 rtx new = replace_equiv_address_nv (x, QUEUED_VAR (y));
376 rtx temp = gen_reg_rtx (GET_MODE (x));
378 emit_insn_before (gen_move_insn (temp, new),
383 /* Copy the address into a pseudo, so that the returned value
384 remains correct across calls to emit_queue. */
385 return replace_equiv_address (new, copy_to_reg (XEXP (new, 0)));
388 /* Otherwise, recursively protect the subexpressions of all
389 the kinds of rtx's that can contain a QUEUED. */
392 rtx tem = protect_from_queue (XEXP (x, 0), 0);
393 if (tem != XEXP (x, 0))
399 else if (code == PLUS || code == MULT)
401 rtx new0 = protect_from_queue (XEXP (x, 0), 0);
402 rtx new1 = protect_from_queue (XEXP (x, 1), 0);
403 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
412 /* If the increment has not happened, use the variable itself. Copy it
413 into a new pseudo so that the value remains correct across calls to
415 if (QUEUED_INSN (x) == 0)
416 return copy_to_reg (QUEUED_VAR (x));
417 /* If the increment has happened and a pre-increment copy exists,
419 if (QUEUED_COPY (x) != 0)
420 return QUEUED_COPY (x);
421 /* The increment has happened but we haven't set up a pre-increment copy.
422 Set one up now, and use it. */
423 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
424 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
426 return QUEUED_COPY (x);
429 /* Return nonzero if X contains a QUEUED expression:
430 if it contains anything that will be altered by a queued increment.
431 We handle only combinations of MEM, PLUS, MINUS and MULT operators
432 since memory addresses generally contain only those. */
438 enum rtx_code code = GET_CODE (x);
444 return queued_subexp_p (XEXP (x, 0));
448 return (queued_subexp_p (XEXP (x, 0))
449 || queued_subexp_p (XEXP (x, 1)));
455 /* Perform all the pending incrementations. */
461 while ((p = pending_chain))
463 rtx body = QUEUED_BODY (p);
465 if (GET_CODE (body) == SEQUENCE)
467 QUEUED_INSN (p) = XVECEXP (QUEUED_BODY (p), 0, 0);
468 emit_insn (QUEUED_BODY (p));
471 QUEUED_INSN (p) = emit_insn (QUEUED_BODY (p));
472 pending_chain = QUEUED_NEXT (p);
476 /* Copy data from FROM to TO, where the machine modes are not the same.
477 Both modes may be integer, or both may be floating.
478 UNSIGNEDP should be nonzero if FROM is an unsigned type.
479 This causes zero-extension instead of sign-extension. */
482 convert_move (to, from, unsignedp)
486 enum machine_mode to_mode = GET_MODE (to);
487 enum machine_mode from_mode = GET_MODE (from);
488 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
489 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
493 /* rtx code for making an equivalent value. */
494 enum rtx_code equiv_code = (unsignedp ? ZERO_EXTEND : SIGN_EXTEND);
496 to = protect_from_queue (to, 1);
497 from = protect_from_queue (from, 0);
499 if (to_real != from_real)
502 /* If FROM is a SUBREG that indicates that we have already done at least
503 the required extension, strip it. We don't handle such SUBREGs as
506 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
507 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
508 >= GET_MODE_SIZE (to_mode))
509 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
510 from = gen_lowpart (to_mode, from), from_mode = to_mode;
512 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
515 if (to_mode == from_mode
516 || (from_mode == VOIDmode && CONSTANT_P (from)))
518 emit_move_insn (to, from);
522 if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
524 if (GET_MODE_BITSIZE (from_mode) != GET_MODE_BITSIZE (to_mode))
527 if (VECTOR_MODE_P (to_mode))
528 from = simplify_gen_subreg (to_mode, from, GET_MODE (from), 0);
530 to = simplify_gen_subreg (from_mode, to, GET_MODE (to), 0);
532 emit_move_insn (to, from);
536 if (to_real != from_real)
543 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode))
545 /* Try converting directly if the insn is supported. */
546 if ((code = can_extend_p (to_mode, from_mode, 0))
549 emit_unop_insn (code, to, from, UNKNOWN);
554 #ifdef HAVE_trunchfqf2
555 if (HAVE_trunchfqf2 && from_mode == HFmode && to_mode == QFmode)
557 emit_unop_insn (CODE_FOR_trunchfqf2, to, from, UNKNOWN);
561 #ifdef HAVE_trunctqfqf2
562 if (HAVE_trunctqfqf2 && from_mode == TQFmode && to_mode == QFmode)
564 emit_unop_insn (CODE_FOR_trunctqfqf2, to, from, UNKNOWN);
568 #ifdef HAVE_truncsfqf2
569 if (HAVE_truncsfqf2 && from_mode == SFmode && to_mode == QFmode)
571 emit_unop_insn (CODE_FOR_truncsfqf2, to, from, UNKNOWN);
575 #ifdef HAVE_truncdfqf2
576 if (HAVE_truncdfqf2 && from_mode == DFmode && to_mode == QFmode)
578 emit_unop_insn (CODE_FOR_truncdfqf2, to, from, UNKNOWN);
582 #ifdef HAVE_truncxfqf2
583 if (HAVE_truncxfqf2 && from_mode == XFmode && to_mode == QFmode)
585 emit_unop_insn (CODE_FOR_truncxfqf2, to, from, UNKNOWN);
589 #ifdef HAVE_trunctfqf2
590 if (HAVE_trunctfqf2 && from_mode == TFmode && to_mode == QFmode)
592 emit_unop_insn (CODE_FOR_trunctfqf2, to, from, UNKNOWN);
597 #ifdef HAVE_trunctqfhf2
598 if (HAVE_trunctqfhf2 && from_mode == TQFmode && to_mode == HFmode)
600 emit_unop_insn (CODE_FOR_trunctqfhf2, to, from, UNKNOWN);
604 #ifdef HAVE_truncsfhf2
605 if (HAVE_truncsfhf2 && from_mode == SFmode && to_mode == HFmode)
607 emit_unop_insn (CODE_FOR_truncsfhf2, to, from, UNKNOWN);
611 #ifdef HAVE_truncdfhf2
612 if (HAVE_truncdfhf2 && from_mode == DFmode && to_mode == HFmode)
614 emit_unop_insn (CODE_FOR_truncdfhf2, to, from, UNKNOWN);
618 #ifdef HAVE_truncxfhf2
619 if (HAVE_truncxfhf2 && from_mode == XFmode && to_mode == HFmode)
621 emit_unop_insn (CODE_FOR_truncxfhf2, to, from, UNKNOWN);
625 #ifdef HAVE_trunctfhf2
626 if (HAVE_trunctfhf2 && from_mode == TFmode && to_mode == HFmode)
628 emit_unop_insn (CODE_FOR_trunctfhf2, to, from, UNKNOWN);
633 #ifdef HAVE_truncsftqf2
634 if (HAVE_truncsftqf2 && from_mode == SFmode && to_mode == TQFmode)
636 emit_unop_insn (CODE_FOR_truncsftqf2, to, from, UNKNOWN);
640 #ifdef HAVE_truncdftqf2
641 if (HAVE_truncdftqf2 && from_mode == DFmode && to_mode == TQFmode)
643 emit_unop_insn (CODE_FOR_truncdftqf2, to, from, UNKNOWN);
647 #ifdef HAVE_truncxftqf2
648 if (HAVE_truncxftqf2 && from_mode == XFmode && to_mode == TQFmode)
650 emit_unop_insn (CODE_FOR_truncxftqf2, to, from, UNKNOWN);
654 #ifdef HAVE_trunctftqf2
655 if (HAVE_trunctftqf2 && from_mode == TFmode && to_mode == TQFmode)
657 emit_unop_insn (CODE_FOR_trunctftqf2, to, from, UNKNOWN);
662 #ifdef HAVE_truncdfsf2
663 if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
665 emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
669 #ifdef HAVE_truncxfsf2
670 if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode)
672 emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN);
676 #ifdef HAVE_trunctfsf2
677 if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
679 emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
683 #ifdef HAVE_truncxfdf2
684 if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode)
686 emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN);
690 #ifdef HAVE_trunctfdf2
691 if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
693 emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
705 libcall = extendsfdf2_libfunc;
709 libcall = extendsfxf2_libfunc;
713 libcall = extendsftf2_libfunc;
725 libcall = truncdfsf2_libfunc;
729 libcall = extenddfxf2_libfunc;
733 libcall = extenddftf2_libfunc;
745 libcall = truncxfsf2_libfunc;
749 libcall = truncxfdf2_libfunc;
761 libcall = trunctfsf2_libfunc;
765 libcall = trunctfdf2_libfunc;
777 if (libcall == (rtx) 0)
778 /* This conversion is not implemented yet. */
782 value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
784 insns = get_insns ();
786 emit_libcall_block (insns, to, value, gen_rtx_FLOAT_TRUNCATE (to_mode,
791 /* Now both modes are integers. */
793 /* Handle expanding beyond a word. */
794 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
795 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
802 enum machine_mode lowpart_mode;
803 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
805 /* Try converting directly if the insn is supported. */
806 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
809 /* If FROM is a SUBREG, put it into a register. Do this
810 so that we always generate the same set of insns for
811 better cse'ing; if an intermediate assignment occurred,
812 we won't be doing the operation directly on the SUBREG. */
813 if (optimize > 0 && GET_CODE (from) == SUBREG)
814 from = force_reg (from_mode, from);
815 emit_unop_insn (code, to, from, equiv_code);
818 /* Next, try converting via full word. */
819 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
820 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
821 != CODE_FOR_nothing))
823 if (GET_CODE (to) == REG)
824 emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
825 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
826 emit_unop_insn (code, to,
827 gen_lowpart (word_mode, to), equiv_code);
831 /* No special multiword conversion insn; do it by hand. */
834 /* Since we will turn this into a no conflict block, we must ensure
835 that the source does not overlap the target. */
837 if (reg_overlap_mentioned_p (to, from))
838 from = force_reg (from_mode, from);
840 /* Get a copy of FROM widened to a word, if necessary. */
841 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
842 lowpart_mode = word_mode;
844 lowpart_mode = from_mode;
846 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
848 lowpart = gen_lowpart (lowpart_mode, to);
849 emit_move_insn (lowpart, lowfrom);
851 /* Compute the value to put in each remaining word. */
853 fill_value = const0_rtx;
858 && insn_data[(int) CODE_FOR_slt].operand[0].mode == word_mode
859 && STORE_FLAG_VALUE == -1)
861 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
863 fill_value = gen_reg_rtx (word_mode);
864 emit_insn (gen_slt (fill_value));
870 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
871 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
873 fill_value = convert_to_mode (word_mode, fill_value, 1);
877 /* Fill the remaining words. */
878 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
880 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
881 rtx subword = operand_subword (to, index, 1, to_mode);
886 if (fill_value != subword)
887 emit_move_insn (subword, fill_value);
890 insns = get_insns ();
893 emit_no_conflict_block (insns, to, from, NULL_RTX,
894 gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
898 /* Truncating multi-word to a word or less. */
899 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
900 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
902 if (!((GET_CODE (from) == MEM
903 && ! MEM_VOLATILE_P (from)
904 && direct_load[(int) to_mode]
905 && ! mode_dependent_address_p (XEXP (from, 0)))
906 || GET_CODE (from) == REG
907 || GET_CODE (from) == SUBREG))
908 from = force_reg (from_mode, from);
909 convert_move (to, gen_lowpart (word_mode, from), 0);
913 /* Handle pointer conversion. */ /* SPEE 900220. */
914 if (to_mode == PQImode)
916 if (from_mode != QImode)
917 from = convert_to_mode (QImode, from, unsignedp);
919 #ifdef HAVE_truncqipqi2
920 if (HAVE_truncqipqi2)
922 emit_unop_insn (CODE_FOR_truncqipqi2, to, from, UNKNOWN);
925 #endif /* HAVE_truncqipqi2 */
929 if (from_mode == PQImode)
931 if (to_mode != QImode)
933 from = convert_to_mode (QImode, from, unsignedp);
938 #ifdef HAVE_extendpqiqi2
939 if (HAVE_extendpqiqi2)
941 emit_unop_insn (CODE_FOR_extendpqiqi2, to, from, UNKNOWN);
944 #endif /* HAVE_extendpqiqi2 */
949 if (to_mode == PSImode)
951 if (from_mode != SImode)
952 from = convert_to_mode (SImode, from, unsignedp);
954 #ifdef HAVE_truncsipsi2
955 if (HAVE_truncsipsi2)
957 emit_unop_insn (CODE_FOR_truncsipsi2, to, from, UNKNOWN);
960 #endif /* HAVE_truncsipsi2 */
964 if (from_mode == PSImode)
966 if (to_mode != SImode)
968 from = convert_to_mode (SImode, from, unsignedp);
973 #ifdef HAVE_extendpsisi2
974 if (! unsignedp && HAVE_extendpsisi2)
976 emit_unop_insn (CODE_FOR_extendpsisi2, to, from, UNKNOWN);
979 #endif /* HAVE_extendpsisi2 */
980 #ifdef HAVE_zero_extendpsisi2
981 if (unsignedp && HAVE_zero_extendpsisi2)
983 emit_unop_insn (CODE_FOR_zero_extendpsisi2, to, from, UNKNOWN);
986 #endif /* HAVE_zero_extendpsisi2 */
991 if (to_mode == PDImode)
993 if (from_mode != DImode)
994 from = convert_to_mode (DImode, from, unsignedp);
996 #ifdef HAVE_truncdipdi2
997 if (HAVE_truncdipdi2)
999 emit_unop_insn (CODE_FOR_truncdipdi2, to, from, UNKNOWN);
1002 #endif /* HAVE_truncdipdi2 */
1006 if (from_mode == PDImode)
1008 if (to_mode != DImode)
1010 from = convert_to_mode (DImode, from, unsignedp);
1015 #ifdef HAVE_extendpdidi2
1016 if (HAVE_extendpdidi2)
1018 emit_unop_insn (CODE_FOR_extendpdidi2, to, from, UNKNOWN);
1021 #endif /* HAVE_extendpdidi2 */
1026 /* Now follow all the conversions between integers
1027 no more than a word long. */
1029 /* For truncation, usually we can just refer to FROM in a narrower mode. */
1030 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
1031 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1032 GET_MODE_BITSIZE (from_mode)))
1034 if (!((GET_CODE (from) == MEM
1035 && ! MEM_VOLATILE_P (from)
1036 && direct_load[(int) to_mode]
1037 && ! mode_dependent_address_p (XEXP (from, 0)))
1038 || GET_CODE (from) == REG
1039 || GET_CODE (from) == SUBREG))
1040 from = force_reg (from_mode, from);
1041 if (GET_CODE (from) == REG && REGNO (from) < FIRST_PSEUDO_REGISTER
1042 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
1043 from = copy_to_reg (from);
1044 emit_move_insn (to, gen_lowpart (to_mode, from));
1048 /* Handle extension. */
1049 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
1051 /* Convert directly if that works. */
1052 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
1053 != CODE_FOR_nothing)
1056 from = force_not_mem (from);
1058 emit_unop_insn (code, to, from, equiv_code);
1063 enum machine_mode intermediate;
1067 /* Search for a mode to convert via. */
1068 for (intermediate = from_mode; intermediate != VOIDmode;
1069 intermediate = GET_MODE_WIDER_MODE (intermediate))
1070 if (((can_extend_p (to_mode, intermediate, unsignedp)
1071 != CODE_FOR_nothing)
1072 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
1073 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1074 GET_MODE_BITSIZE (intermediate))))
1075 && (can_extend_p (intermediate, from_mode, unsignedp)
1076 != CODE_FOR_nothing))
1078 convert_move (to, convert_to_mode (intermediate, from,
1079 unsignedp), unsignedp);
1083 /* No suitable intermediate mode.
1084 Generate what we need with shifts. */
1085 shift_amount = build_int_2 (GET_MODE_BITSIZE (to_mode)
1086 - GET_MODE_BITSIZE (from_mode), 0);
1087 from = gen_lowpart (to_mode, force_reg (from_mode, from));
1088 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
1090 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
1093 emit_move_insn (to, tmp);
1098 /* Support special truncate insns for certain modes. */
1100 if (from_mode == DImode && to_mode == SImode)
1102 #ifdef HAVE_truncdisi2
1103 if (HAVE_truncdisi2)
1105 emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
1109 convert_move (to, force_reg (from_mode, from), unsignedp);
1113 if (from_mode == DImode && to_mode == HImode)
1115 #ifdef HAVE_truncdihi2
1116 if (HAVE_truncdihi2)
1118 emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
1122 convert_move (to, force_reg (from_mode, from), unsignedp);
1126 if (from_mode == DImode && to_mode == QImode)
1128 #ifdef HAVE_truncdiqi2
1129 if (HAVE_truncdiqi2)
1131 emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
1135 convert_move (to, force_reg (from_mode, from), unsignedp);
1139 if (from_mode == SImode && to_mode == HImode)
1141 #ifdef HAVE_truncsihi2
1142 if (HAVE_truncsihi2)
1144 emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
1148 convert_move (to, force_reg (from_mode, from), unsignedp);
1152 if (from_mode == SImode && to_mode == QImode)
1154 #ifdef HAVE_truncsiqi2
1155 if (HAVE_truncsiqi2)
1157 emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
1161 convert_move (to, force_reg (from_mode, from), unsignedp);
1165 if (from_mode == HImode && to_mode == QImode)
1167 #ifdef HAVE_trunchiqi2
1168 if (HAVE_trunchiqi2)
1170 emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
1174 convert_move (to, force_reg (from_mode, from), unsignedp);
1178 if (from_mode == TImode && to_mode == DImode)
1180 #ifdef HAVE_trunctidi2
1181 if (HAVE_trunctidi2)
1183 emit_unop_insn (CODE_FOR_trunctidi2, to, from, UNKNOWN);
1187 convert_move (to, force_reg (from_mode, from), unsignedp);
1191 if (from_mode == TImode && to_mode == SImode)
1193 #ifdef HAVE_trunctisi2
1194 if (HAVE_trunctisi2)
1196 emit_unop_insn (CODE_FOR_trunctisi2, to, from, UNKNOWN);
1200 convert_move (to, force_reg (from_mode, from), unsignedp);
1204 if (from_mode == TImode && to_mode == HImode)
1206 #ifdef HAVE_trunctihi2
1207 if (HAVE_trunctihi2)
1209 emit_unop_insn (CODE_FOR_trunctihi2, to, from, UNKNOWN);
1213 convert_move (to, force_reg (from_mode, from), unsignedp);
1217 if (from_mode == TImode && to_mode == QImode)
1219 #ifdef HAVE_trunctiqi2
1220 if (HAVE_trunctiqi2)
1222 emit_unop_insn (CODE_FOR_trunctiqi2, to, from, UNKNOWN);
1226 convert_move (to, force_reg (from_mode, from), unsignedp);
1230 /* Handle truncation of volatile memrefs, and so on;
1231 the things that couldn't be truncated directly,
1232 and for which there was no special instruction. */
1233 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
1235 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
1236 emit_move_insn (to, temp);
1240 /* Mode combination is not recognized. */
1244 /* Return an rtx for a value that would result
1245 from converting X to mode MODE.
1246 Both X and MODE may be floating, or both integer.
1247 UNSIGNEDP is nonzero if X is an unsigned value.
1248 This can be done by referring to a part of X in place
1249 or by copying to a new temporary with conversion.
1251 This function *must not* call protect_from_queue
1252 except when putting X into an insn (in which case convert_move does it). */
1255 convert_to_mode (mode, x, unsignedp)
1256 enum machine_mode mode;
1260 return convert_modes (mode, VOIDmode, x, unsignedp);
1263 /* Return an rtx for a value that would result
1264 from converting X from mode OLDMODE to mode MODE.
1265 Both modes may be floating, or both integer.
1266 UNSIGNEDP is nonzero if X is an unsigned value.
1268 This can be done by referring to a part of X in place
1269 or by copying to a new temporary with conversion.
1271 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
1273 This function *must not* call protect_from_queue
1274 except when putting X into an insn (in which case convert_move does it). */
1277 convert_modes (mode, oldmode, x, unsignedp)
1278 enum machine_mode mode, oldmode;
1284 /* If FROM is a SUBREG that indicates that we have already done at least
1285 the required extension, strip it. */
1287 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
1288 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
1289 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
1290 x = gen_lowpart (mode, x);
1292 if (GET_MODE (x) != VOIDmode)
1293 oldmode = GET_MODE (x);
1295 if (mode == oldmode)
1298 /* There is one case that we must handle specially: If we are converting
1299 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
1300 we are to interpret the constant as unsigned, gen_lowpart will do
1301 the wrong if the constant appears negative. What we want to do is
1302 make the high-order word of the constant zero, not all ones. */
1304 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
1305 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
1306 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
1308 HOST_WIDE_INT val = INTVAL (x);
1310 if (oldmode != VOIDmode
1311 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
1313 int width = GET_MODE_BITSIZE (oldmode);
1315 /* We need to zero extend VAL. */
1316 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1319 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
1322 /* We can do this with a gen_lowpart if both desired and current modes
1323 are integer, and this is either a constant integer, a register, or a
1324 non-volatile MEM. Except for the constant case where MODE is no
1325 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
1327 if ((GET_CODE (x) == CONST_INT
1328 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
1329 || (GET_MODE_CLASS (mode) == MODE_INT
1330 && GET_MODE_CLASS (oldmode) == MODE_INT
1331 && (GET_CODE (x) == CONST_DOUBLE
1332 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
1333 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x)
1334 && direct_load[(int) mode])
1335 || (GET_CODE (x) == REG
1336 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
1337 GET_MODE_BITSIZE (GET_MODE (x)))))))))
1339 /* ?? If we don't know OLDMODE, we have to assume here that
1340 X does not need sign- or zero-extension. This may not be
1341 the case, but it's the best we can do. */
1342 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
1343 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
1345 HOST_WIDE_INT val = INTVAL (x);
1346 int width = GET_MODE_BITSIZE (oldmode);
1348 /* We must sign or zero-extend in this case. Start by
1349 zero-extending, then sign extend if we need to. */
1350 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1352 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
1353 val |= (HOST_WIDE_INT) (-1) << width;
1355 return GEN_INT (trunc_int_for_mode (val, mode));
1358 return gen_lowpart (mode, x);
1361 temp = gen_reg_rtx (mode);
1362 convert_move (temp, x, unsignedp);
1366 /* This macro is used to determine what the largest unit size that
1367 move_by_pieces can use is. */
1369 /* MOVE_MAX_PIECES is the number of bytes at a time which we can
1370 move efficiently, as opposed to MOVE_MAX which is the maximum
1371 number of bytes we can move with a single instruction. */
1373 #ifndef MOVE_MAX_PIECES
1374 #define MOVE_MAX_PIECES MOVE_MAX
1377 /* Generate several move instructions to copy LEN bytes from block FROM to
1378 block TO. (These are MEM rtx's with BLKmode). The caller must pass FROM
1379 and TO through protect_from_queue before calling.
1381 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
1382 used to push FROM to the stack.
1384 ALIGN is maximum alignment we can assume. */
1387 move_by_pieces (to, from, len, align)
1389 unsigned HOST_WIDE_INT len;
1392 struct move_by_pieces data;
1393 rtx to_addr, from_addr = XEXP (from, 0);
1394 unsigned int max_size = MOVE_MAX_PIECES + 1;
1395 enum machine_mode mode = VOIDmode, tmode;
1396 enum insn_code icode;
1399 data.from_addr = from_addr;
1402 to_addr = XEXP (to, 0);
1405 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1406 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1408 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1415 #ifdef STACK_GROWS_DOWNWARD
1421 data.to_addr = to_addr;
1424 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1425 || GET_CODE (from_addr) == POST_INC
1426 || GET_CODE (from_addr) == POST_DEC);
1428 data.explicit_inc_from = 0;
1429 data.explicit_inc_to = 0;
1430 if (data.reverse) data.offset = len;
1433 /* If copying requires more than two move insns,
1434 copy addresses to registers (to make displacements shorter)
1435 and use post-increment if available. */
1436 if (!(data.autinc_from && data.autinc_to)
1437 && move_by_pieces_ninsns (len, align) > 2)
1439 /* Find the mode of the largest move... */
1440 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1441 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1442 if (GET_MODE_SIZE (tmode) < max_size)
1445 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
1447 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1448 data.autinc_from = 1;
1449 data.explicit_inc_from = -1;
1451 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
1453 data.from_addr = copy_addr_to_reg (from_addr);
1454 data.autinc_from = 1;
1455 data.explicit_inc_from = 1;
1457 if (!data.autinc_from && CONSTANT_P (from_addr))
1458 data.from_addr = copy_addr_to_reg (from_addr);
1459 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
1461 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1463 data.explicit_inc_to = -1;
1465 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
1467 data.to_addr = copy_addr_to_reg (to_addr);
1469 data.explicit_inc_to = 1;
1471 if (!data.autinc_to && CONSTANT_P (to_addr))
1472 data.to_addr = copy_addr_to_reg (to_addr);
1475 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1476 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1477 align = MOVE_MAX * BITS_PER_UNIT;
1479 /* First move what we can in the largest integer mode, then go to
1480 successively smaller modes. */
1482 while (max_size > 1)
1484 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1485 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1486 if (GET_MODE_SIZE (tmode) < max_size)
1489 if (mode == VOIDmode)
1492 icode = mov_optab->handlers[(int) mode].insn_code;
1493 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1494 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1496 max_size = GET_MODE_SIZE (mode);
1499 /* The code above should have handled everything. */
1504 /* Return number of insns required to move L bytes by pieces.
1505 ALIGN (in bits) is maximum alignment we can assume. */
1507 static unsigned HOST_WIDE_INT
1508 move_by_pieces_ninsns (l, align)
1509 unsigned HOST_WIDE_INT l;
1512 unsigned HOST_WIDE_INT n_insns = 0;
1513 unsigned HOST_WIDE_INT max_size = MOVE_MAX + 1;
1515 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1516 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1517 align = MOVE_MAX * BITS_PER_UNIT;
1519 while (max_size > 1)
1521 enum machine_mode mode = VOIDmode, tmode;
1522 enum insn_code icode;
1524 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1525 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1526 if (GET_MODE_SIZE (tmode) < max_size)
1529 if (mode == VOIDmode)
1532 icode = mov_optab->handlers[(int) mode].insn_code;
1533 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1534 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1536 max_size = GET_MODE_SIZE (mode);
1544 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1545 with move instructions for mode MODE. GENFUN is the gen_... function
1546 to make a move insn for that mode. DATA has all the other info. */
1549 move_by_pieces_1 (genfun, mode, data)
1550 rtx (*genfun) PARAMS ((rtx, ...));
1551 enum machine_mode mode;
1552 struct move_by_pieces *data;
1554 unsigned int size = GET_MODE_SIZE (mode);
1555 rtx to1 = NULL_RTX, from1;
1557 while (data->len >= size)
1560 data->offset -= size;
1564 if (data->autinc_to)
1565 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
1568 to1 = adjust_address (data->to, mode, data->offset);
1571 if (data->autinc_from)
1572 from1 = adjust_automodify_address (data->from, mode, data->from_addr,
1575 from1 = adjust_address (data->from, mode, data->offset);
1577 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1578 emit_insn (gen_add2_insn (data->to_addr,
1579 GEN_INT (-(HOST_WIDE_INT)size)));
1580 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1581 emit_insn (gen_add2_insn (data->from_addr,
1582 GEN_INT (-(HOST_WIDE_INT)size)));
1585 emit_insn ((*genfun) (to1, from1));
1588 #ifdef PUSH_ROUNDING
1589 emit_single_push_insn (mode, from1, NULL);
1595 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1596 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1597 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1598 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1600 if (! data->reverse)
1601 data->offset += size;
1607 /* Emit code to move a block Y to a block X.
1608 This may be done with string-move instructions,
1609 with multiple scalar move instructions, or with a library call.
1611 Both X and Y must be MEM rtx's (perhaps inside VOLATILE)
1613 SIZE is an rtx that says how long they are.
1614 ALIGN is the maximum alignment we can assume they have.
1616 Return the address of the new block, if memcpy is called and returns it,
1620 emit_block_move (x, y, size)
1625 #ifdef TARGET_MEM_FUNCTIONS
1627 tree call_expr, arg_list;
1629 unsigned int align = MIN (MEM_ALIGN (x), MEM_ALIGN (y));
1631 if (GET_MODE (x) != BLKmode)
1634 if (GET_MODE (y) != BLKmode)
1637 x = protect_from_queue (x, 1);
1638 y = protect_from_queue (y, 0);
1639 size = protect_from_queue (size, 0);
1641 if (GET_CODE (x) != MEM)
1643 if (GET_CODE (y) != MEM)
1648 if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
1649 move_by_pieces (x, y, INTVAL (size), align);
1652 /* Try the most limited insn first, because there's no point
1653 including more than one in the machine description unless
1654 the more limited one has some advantage. */
1656 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
1657 enum machine_mode mode;
1659 /* Since this is a move insn, we don't care about volatility. */
1662 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1663 mode = GET_MODE_WIDER_MODE (mode))
1665 enum insn_code code = movstr_optab[(int) mode];
1666 insn_operand_predicate_fn pred;
1668 if (code != CODE_FOR_nothing
1669 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1670 here because if SIZE is less than the mode mask, as it is
1671 returned by the macro, it will definitely be less than the
1672 actual mode mask. */
1673 && ((GET_CODE (size) == CONST_INT
1674 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1675 <= (GET_MODE_MASK (mode) >> 1)))
1676 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1677 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
1678 || (*pred) (x, BLKmode))
1679 && ((pred = insn_data[(int) code].operand[1].predicate) == 0
1680 || (*pred) (y, BLKmode))
1681 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
1682 || (*pred) (opalign, VOIDmode)))
1685 rtx last = get_last_insn ();
1688 op2 = convert_to_mode (mode, size, 1);
1689 pred = insn_data[(int) code].operand[2].predicate;
1690 if (pred != 0 && ! (*pred) (op2, mode))
1691 op2 = copy_to_mode_reg (mode, op2);
1693 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1701 delete_insns_since (last);
1707 /* X, Y, or SIZE may have been passed through protect_from_queue.
1709 It is unsafe to save the value generated by protect_from_queue
1710 and reuse it later. Consider what happens if emit_queue is
1711 called before the return value from protect_from_queue is used.
1713 Expansion of the CALL_EXPR below will call emit_queue before
1714 we are finished emitting RTL for argument setup. So if we are
1715 not careful we could get the wrong value for an argument.
1717 To avoid this problem we go ahead and emit code to copy X, Y &
1718 SIZE into new pseudos. We can then place those new pseudos
1719 into an RTL_EXPR and use them later, even after a call to
1722 Note this is not strictly needed for library calls since they
1723 do not call emit_queue before loading their arguments. However,
1724 we may need to have library calls call emit_queue in the future
1725 since failing to do so could cause problems for targets which
1726 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
1727 x = copy_to_mode_reg (Pmode, XEXP (x, 0));
1728 y = copy_to_mode_reg (Pmode, XEXP (y, 0));
1730 #ifdef TARGET_MEM_FUNCTIONS
1731 size = copy_to_mode_reg (TYPE_MODE (sizetype), size);
1733 size = convert_to_mode (TYPE_MODE (integer_type_node), size,
1734 TREE_UNSIGNED (integer_type_node));
1735 size = copy_to_mode_reg (TYPE_MODE (integer_type_node), size);
1738 #ifdef TARGET_MEM_FUNCTIONS
1739 /* It is incorrect to use the libcall calling conventions to call
1740 memcpy in this context.
1742 This could be a user call to memcpy and the user may wish to
1743 examine the return value from memcpy.
1745 For targets where libcalls and normal calls have different conventions
1746 for returning pointers, we could end up generating incorrect code.
1748 So instead of using a libcall sequence we build up a suitable
1749 CALL_EXPR and expand the call in the normal fashion. */
1750 if (fn == NULL_TREE)
1754 /* This was copied from except.c, I don't know if all this is
1755 necessary in this context or not. */
1756 fn = get_identifier ("memcpy");
1757 fntype = build_pointer_type (void_type_node);
1758 fntype = build_function_type (fntype, NULL_TREE);
1759 fn = build_decl (FUNCTION_DECL, fn, fntype);
1760 ggc_add_tree_root (&fn, 1);
1761 DECL_EXTERNAL (fn) = 1;
1762 TREE_PUBLIC (fn) = 1;
1763 DECL_ARTIFICIAL (fn) = 1;
1764 TREE_NOTHROW (fn) = 1;
1765 make_decl_rtl (fn, NULL);
1766 assemble_external (fn);
1769 /* We need to make an argument list for the function call.
1771 memcpy has three arguments, the first two are void * addresses and
1772 the last is a size_t byte count for the copy. */
1774 = build_tree_list (NULL_TREE,
1775 make_tree (build_pointer_type (void_type_node), x));
1776 TREE_CHAIN (arg_list)
1777 = build_tree_list (NULL_TREE,
1778 make_tree (build_pointer_type (void_type_node), y));
1779 TREE_CHAIN (TREE_CHAIN (arg_list))
1780 = build_tree_list (NULL_TREE, make_tree (sizetype, size));
1781 TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list))) = NULL_TREE;
1783 /* Now we have to build up the CALL_EXPR itself. */
1784 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
1785 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
1786 call_expr, arg_list, NULL_TREE);
1787 TREE_SIDE_EFFECTS (call_expr) = 1;
1789 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
1791 emit_library_call (bcopy_libfunc, LCT_NORMAL,
1792 VOIDmode, 3, y, Pmode, x, Pmode,
1793 convert_to_mode (TYPE_MODE (integer_type_node), size,
1794 TREE_UNSIGNED (integer_type_node)),
1795 TYPE_MODE (integer_type_node));
1798 /* If we are initializing a readonly value, show the above call
1799 clobbered it. Otherwise, a load from it may erroneously be hoisted
1801 if (RTX_UNCHANGING_P (x))
1802 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
1808 /* Copy all or part of a value X into registers starting at REGNO.
1809 The number of registers to be filled is NREGS. */
1812 move_block_to_reg (regno, x, nregs, mode)
1816 enum machine_mode mode;
1819 #ifdef HAVE_load_multiple
1827 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1828 x = validize_mem (force_const_mem (mode, x));
1830 /* See if the machine can do this with a load multiple insn. */
1831 #ifdef HAVE_load_multiple
1832 if (HAVE_load_multiple)
1834 last = get_last_insn ();
1835 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
1843 delete_insns_since (last);
1847 for (i = 0; i < nregs; i++)
1848 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1849 operand_subword_force (x, i, mode));
1852 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1853 The number of registers to be filled is NREGS. SIZE indicates the number
1854 of bytes in the object X. */
1857 move_block_from_reg (regno, x, nregs, size)
1864 #ifdef HAVE_store_multiple
1868 enum machine_mode mode;
1873 /* If SIZE is that of a mode no bigger than a word, just use that
1874 mode's store operation. */
1875 if (size <= UNITS_PER_WORD
1876 && (mode = mode_for_size (size * BITS_PER_UNIT, MODE_INT, 0)) != BLKmode
1877 && !FUNCTION_ARG_REG_LITTLE_ENDIAN)
1879 emit_move_insn (adjust_address (x, mode, 0), gen_rtx_REG (mode, regno));
1883 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN machine must be aligned
1884 to the left before storing to memory. Note that the previous test
1885 doesn't handle all cases (e.g. SIZE == 3). */
1886 if (size < UNITS_PER_WORD
1888 && !FUNCTION_ARG_REG_LITTLE_ENDIAN)
1890 rtx tem = operand_subword (x, 0, 1, BLKmode);
1896 shift = expand_shift (LSHIFT_EXPR, word_mode,
1897 gen_rtx_REG (word_mode, regno),
1898 build_int_2 ((UNITS_PER_WORD - size)
1899 * BITS_PER_UNIT, 0), NULL_RTX, 0);
1900 emit_move_insn (tem, shift);
1904 /* See if the machine can do this with a store multiple insn. */
1905 #ifdef HAVE_store_multiple
1906 if (HAVE_store_multiple)
1908 last = get_last_insn ();
1909 pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1917 delete_insns_since (last);
1921 for (i = 0; i < nregs; i++)
1923 rtx tem = operand_subword (x, i, 1, BLKmode);
1928 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1932 /* Emit code to move a block SRC to a block DST, where DST is non-consecutive
1933 registers represented by a PARALLEL. SSIZE represents the total size of
1934 block SRC in bytes, or -1 if not known. */
1935 /* ??? If SSIZE % UNITS_PER_WORD != 0, we make the blatant assumption that
1936 the balance will be in what would be the low-order memory addresses, i.e.
1937 left justified for big endian, right justified for little endian. This
1938 happens to be true for the targets currently using this support. If this
1939 ever changes, a new target macro along the lines of FUNCTION_ARG_PADDING
1943 emit_group_load (dst, orig_src, ssize)
1950 if (GET_CODE (dst) != PARALLEL)
1953 /* Check for a NULL entry, used to indicate that the parameter goes
1954 both on the stack and in registers. */
1955 if (XEXP (XVECEXP (dst, 0, 0), 0))
1960 tmps = (rtx *) alloca (sizeof (rtx) * XVECLEN (dst, 0));
1962 /* Process the pieces. */
1963 for (i = start; i < XVECLEN (dst, 0); i++)
1965 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1966 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1967 unsigned int bytelen = GET_MODE_SIZE (mode);
1970 /* Handle trailing fragments that run over the size of the struct. */
1971 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
1973 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1974 bytelen = ssize - bytepos;
1979 /* If we won't be loading directly from memory, protect the real source
1980 from strange tricks we might play; but make sure that the source can
1981 be loaded directly into the destination. */
1983 if (GET_CODE (orig_src) != MEM
1984 && (!CONSTANT_P (orig_src)
1985 || (GET_MODE (orig_src) != mode
1986 && GET_MODE (orig_src) != VOIDmode)))
1988 if (GET_MODE (orig_src) == VOIDmode)
1989 src = gen_reg_rtx (mode);
1991 src = gen_reg_rtx (GET_MODE (orig_src));
1993 emit_move_insn (src, orig_src);
1996 /* Optimize the access just a bit. */
1997 if (GET_CODE (src) == MEM
1998 && MEM_ALIGN (src) >= GET_MODE_ALIGNMENT (mode)
1999 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2000 && bytelen == GET_MODE_SIZE (mode))
2002 tmps[i] = gen_reg_rtx (mode);
2003 emit_move_insn (tmps[i], adjust_address (src, mode, bytepos));
2005 else if (GET_CODE (src) == CONCAT)
2008 && bytelen == GET_MODE_SIZE (GET_MODE (XEXP (src, 0))))
2009 || (bytepos == (HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (XEXP (src, 0)))
2010 && bytelen == GET_MODE_SIZE (GET_MODE (XEXP (src, 1)))))
2012 tmps[i] = XEXP (src, bytepos != 0);
2013 if (! CONSTANT_P (tmps[i])
2014 && (GET_CODE (tmps[i]) != REG || GET_MODE (tmps[i]) != mode))
2015 tmps[i] = extract_bit_field (tmps[i], bytelen * BITS_PER_UNIT,
2016 0, 1, NULL_RTX, mode, mode, ssize);
2018 else if (bytepos == 0)
2020 rtx mem = assign_stack_temp (GET_MODE (src),
2021 GET_MODE_SIZE (GET_MODE (src)), 0);
2022 emit_move_insn (mem, src);
2023 tmps[i] = adjust_address (mem, mode, 0);
2028 else if (CONSTANT_P (src)
2029 || (GET_CODE (src) == REG && GET_MODE (src) == mode))
2032 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
2033 bytepos * BITS_PER_UNIT, 1, NULL_RTX,
2036 if (BYTES_BIG_ENDIAN && shift)
2037 expand_binop (mode, ashl_optab, tmps[i], GEN_INT (shift),
2038 tmps[i], 0, OPTAB_WIDEN);
2043 /* Copy the extracted pieces into the proper (probable) hard regs. */
2044 for (i = start; i < XVECLEN (dst, 0); i++)
2045 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0), tmps[i]);
2048 /* Emit code to move a block SRC to a block DST, where SRC is non-consecutive
2049 registers represented by a PARALLEL. SSIZE represents the total size of
2050 block DST, or -1 if not known. */
2053 emit_group_store (orig_dst, src, ssize)
2060 if (GET_CODE (src) != PARALLEL)
2063 /* Check for a NULL entry, used to indicate that the parameter goes
2064 both on the stack and in registers. */
2065 if (XEXP (XVECEXP (src, 0, 0), 0))
2070 tmps = (rtx *) alloca (sizeof (rtx) * XVECLEN (src, 0));
2072 /* Copy the (probable) hard regs into pseudos. */
2073 for (i = start; i < XVECLEN (src, 0); i++)
2075 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
2076 tmps[i] = gen_reg_rtx (GET_MODE (reg));
2077 emit_move_insn (tmps[i], reg);
2081 /* If we won't be storing directly into memory, protect the real destination
2082 from strange tricks we might play. */
2084 if (GET_CODE (dst) == PARALLEL)
2088 /* We can get a PARALLEL dst if there is a conditional expression in
2089 a return statement. In that case, the dst and src are the same,
2090 so no action is necessary. */
2091 if (rtx_equal_p (dst, src))
2094 /* It is unclear if we can ever reach here, but we may as well handle
2095 it. Allocate a temporary, and split this into a store/load to/from
2098 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
2099 emit_group_store (temp, src, ssize);
2100 emit_group_load (dst, temp, ssize);
2103 else if (GET_CODE (dst) != MEM)
2105 dst = gen_reg_rtx (GET_MODE (orig_dst));
2106 /* Make life a bit easier for combine. */
2107 emit_move_insn (dst, const0_rtx);
2110 /* Process the pieces. */
2111 for (i = start; i < XVECLEN (src, 0); i++)
2113 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
2114 enum machine_mode mode = GET_MODE (tmps[i]);
2115 unsigned int bytelen = GET_MODE_SIZE (mode);
2117 /* Handle trailing fragments that run over the size of the struct. */
2118 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2120 if (BYTES_BIG_ENDIAN)
2122 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2123 expand_binop (mode, ashr_optab, tmps[i], GEN_INT (shift),
2124 tmps[i], 0, OPTAB_WIDEN);
2126 bytelen = ssize - bytepos;
2129 /* Optimize the access just a bit. */
2130 if (GET_CODE (dst) == MEM
2131 && MEM_ALIGN (dst) >= GET_MODE_ALIGNMENT (mode)
2132 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2133 && bytelen == GET_MODE_SIZE (mode))
2134 emit_move_insn (adjust_address (dst, mode, bytepos), tmps[i]);
2136 store_bit_field (dst, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2137 mode, tmps[i], ssize);
2142 /* Copy from the pseudo into the (probable) hard reg. */
2143 if (GET_CODE (dst) == REG)
2144 emit_move_insn (orig_dst, dst);
2147 /* Generate code to copy a BLKmode object of TYPE out of a
2148 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2149 is null, a stack temporary is created. TGTBLK is returned.
2151 The primary purpose of this routine is to handle functions
2152 that return BLKmode structures in registers. Some machines
2153 (the PA for example) want to return all small structures
2154 in registers regardless of the structure's alignment. */
2157 copy_blkmode_from_reg (tgtblk, srcreg, type)
2162 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2163 rtx src = NULL, dst = NULL;
2164 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2165 unsigned HOST_WIDE_INT bitpos, xbitpos, big_endian_correction = 0;
2169 tgtblk = assign_temp (build_qualified_type (type,
2171 | TYPE_QUAL_CONST)),
2173 preserve_temp_slots (tgtblk);
2176 /* This code assumes srcreg is at least a full word. If it isn't, copy it
2177 into a new pseudo which is a full word.
2179 If FUNCTION_ARG_REG_LITTLE_ENDIAN is set and convert_to_mode does a copy,
2180 the wrong part of the register gets copied so we fake a type conversion
2182 if (GET_MODE (srcreg) != BLKmode
2183 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
2185 if (FUNCTION_ARG_REG_LITTLE_ENDIAN)
2186 srcreg = simplify_gen_subreg (word_mode, srcreg, GET_MODE (srcreg), 0);
2188 srcreg = convert_to_mode (word_mode, srcreg, TREE_UNSIGNED (type));
2191 /* Structures whose size is not a multiple of a word are aligned
2192 to the least significant byte (to the right). On a BYTES_BIG_ENDIAN
2193 machine, this means we must skip the empty high order bytes when
2194 calculating the bit offset. */
2195 if (BYTES_BIG_ENDIAN
2196 && !FUNCTION_ARG_REG_LITTLE_ENDIAN
2197 && bytes % UNITS_PER_WORD)
2198 big_endian_correction
2199 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2201 /* Copy the structure BITSIZE bites at a time.
2203 We could probably emit more efficient code for machines which do not use
2204 strict alignment, but it doesn't seem worth the effort at the current
2206 for (bitpos = 0, xbitpos = big_endian_correction;
2207 bitpos < bytes * BITS_PER_UNIT;
2208 bitpos += bitsize, xbitpos += bitsize)
2210 /* We need a new source operand each time xbitpos is on a
2211 word boundary and when xbitpos == big_endian_correction
2212 (the first time through). */
2213 if (xbitpos % BITS_PER_WORD == 0
2214 || xbitpos == big_endian_correction)
2215 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD,
2218 /* We need a new destination operand each time bitpos is on
2220 if (bitpos % BITS_PER_WORD == 0)
2221 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
2223 /* Use xbitpos for the source extraction (right justified) and
2224 xbitpos for the destination store (left justified). */
2225 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode,
2226 extract_bit_field (src, bitsize,
2227 xbitpos % BITS_PER_WORD, 1,
2228 NULL_RTX, word_mode, word_mode,
2236 /* Add a USE expression for REG to the (possibly empty) list pointed
2237 to by CALL_FUSAGE. REG must denote a hard register. */
2240 use_reg (call_fusage, reg)
2241 rtx *call_fusage, reg;
2243 if (GET_CODE (reg) != REG
2244 || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
2248 = gen_rtx_EXPR_LIST (VOIDmode,
2249 gen_rtx_USE (VOIDmode, reg), *call_fusage);
2252 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2253 starting at REGNO. All of these registers must be hard registers. */
2256 use_regs (call_fusage, regno, nregs)
2263 if (regno + nregs > FIRST_PSEUDO_REGISTER)
2266 for (i = 0; i < nregs; i++)
2267 use_reg (call_fusage, gen_rtx_REG (reg_raw_mode[regno + i], regno + i));
2270 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2271 PARALLEL REGS. This is for calls that pass values in multiple
2272 non-contiguous locations. The Irix 6 ABI has examples of this. */
2275 use_group_regs (call_fusage, regs)
2281 for (i = 0; i < XVECLEN (regs, 0); i++)
2283 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2285 /* A NULL entry means the parameter goes both on the stack and in
2286 registers. This can also be a MEM for targets that pass values
2287 partially on the stack and partially in registers. */
2288 if (reg != 0 && GET_CODE (reg) == REG)
2289 use_reg (call_fusage, reg);
2295 can_store_by_pieces (len, constfun, constfundata, align)
2296 unsigned HOST_WIDE_INT len;
2297 rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
2301 unsigned HOST_WIDE_INT max_size, l;
2302 HOST_WIDE_INT offset = 0;
2303 enum machine_mode mode, tmode;
2304 enum insn_code icode;
2308 if (! MOVE_BY_PIECES_P (len, align))
2311 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2312 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2313 align = MOVE_MAX * BITS_PER_UNIT;
2315 /* We would first store what we can in the largest integer mode, then go to
2316 successively smaller modes. */
2319 reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2324 max_size = MOVE_MAX_PIECES + 1;
2325 while (max_size > 1)
2327 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2328 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2329 if (GET_MODE_SIZE (tmode) < max_size)
2332 if (mode == VOIDmode)
2335 icode = mov_optab->handlers[(int) mode].insn_code;
2336 if (icode != CODE_FOR_nothing
2337 && align >= GET_MODE_ALIGNMENT (mode))
2339 unsigned int size = GET_MODE_SIZE (mode);
2346 cst = (*constfun) (constfundata, offset, mode);
2347 if (!LEGITIMATE_CONSTANT_P (cst))
2357 max_size = GET_MODE_SIZE (mode);
2360 /* The code above should have handled everything. */
2368 /* Generate several move instructions to store LEN bytes generated by
2369 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2370 pointer which will be passed as argument in every CONSTFUN call.
2371 ALIGN is maximum alignment we can assume. */
2374 store_by_pieces (to, len, constfun, constfundata, align)
2376 unsigned HOST_WIDE_INT len;
2377 rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
2381 struct store_by_pieces data;
2383 if (! MOVE_BY_PIECES_P (len, align))
2385 to = protect_from_queue (to, 1);
2386 data.constfun = constfun;
2387 data.constfundata = constfundata;
2390 store_by_pieces_1 (&data, align);
2393 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2394 rtx with BLKmode). The caller must pass TO through protect_from_queue
2395 before calling. ALIGN is maximum alignment we can assume. */
2398 clear_by_pieces (to, len, align)
2400 unsigned HOST_WIDE_INT len;
2403 struct store_by_pieces data;
2405 data.constfun = clear_by_pieces_1;
2406 data.constfundata = NULL;
2409 store_by_pieces_1 (&data, align);
2412 /* Callback routine for clear_by_pieces.
2413 Return const0_rtx unconditionally. */
2416 clear_by_pieces_1 (data, offset, mode)
2417 PTR data ATTRIBUTE_UNUSED;
2418 HOST_WIDE_INT offset ATTRIBUTE_UNUSED;
2419 enum machine_mode mode ATTRIBUTE_UNUSED;
2424 /* Subroutine of clear_by_pieces and store_by_pieces.
2425 Generate several move instructions to store LEN bytes of block TO. (A MEM
2426 rtx with BLKmode). The caller must pass TO through protect_from_queue
2427 before calling. ALIGN is maximum alignment we can assume. */
2430 store_by_pieces_1 (data, align)
2431 struct store_by_pieces *data;
2434 rtx to_addr = XEXP (data->to, 0);
2435 unsigned HOST_WIDE_INT max_size = MOVE_MAX_PIECES + 1;
2436 enum machine_mode mode = VOIDmode, tmode;
2437 enum insn_code icode;
2440 data->to_addr = to_addr;
2442 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2443 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2445 data->explicit_inc_to = 0;
2447 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2449 data->offset = data->len;
2451 /* If storing requires more than two move insns,
2452 copy addresses to registers (to make displacements shorter)
2453 and use post-increment if available. */
2454 if (!data->autinc_to
2455 && move_by_pieces_ninsns (data->len, align) > 2)
2457 /* Determine the main mode we'll be using. */
2458 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2459 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2460 if (GET_MODE_SIZE (tmode) < max_size)
2463 if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
2465 data->to_addr = copy_addr_to_reg (plus_constant (to_addr, data->len));
2466 data->autinc_to = 1;
2467 data->explicit_inc_to = -1;
2470 if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2471 && ! data->autinc_to)
2473 data->to_addr = copy_addr_to_reg (to_addr);
2474 data->autinc_to = 1;
2475 data->explicit_inc_to = 1;
2478 if ( !data->autinc_to && CONSTANT_P (to_addr))
2479 data->to_addr = copy_addr_to_reg (to_addr);
2482 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2483 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2484 align = MOVE_MAX * BITS_PER_UNIT;
2486 /* First store what we can in the largest integer mode, then go to
2487 successively smaller modes. */
2489 while (max_size > 1)
2491 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2492 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2493 if (GET_MODE_SIZE (tmode) < max_size)
2496 if (mode == VOIDmode)
2499 icode = mov_optab->handlers[(int) mode].insn_code;
2500 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
2501 store_by_pieces_2 (GEN_FCN (icode), mode, data);
2503 max_size = GET_MODE_SIZE (mode);
2506 /* The code above should have handled everything. */
2511 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2512 with move instructions for mode MODE. GENFUN is the gen_... function
2513 to make a move insn for that mode. DATA has all the other info. */
2516 store_by_pieces_2 (genfun, mode, data)
2517 rtx (*genfun) PARAMS ((rtx, ...));
2518 enum machine_mode mode;
2519 struct store_by_pieces *data;
2521 unsigned int size = GET_MODE_SIZE (mode);
2524 while (data->len >= size)
2527 data->offset -= size;
2529 if (data->autinc_to)
2530 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
2533 to1 = adjust_address (data->to, mode, data->offset);
2535 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2536 emit_insn (gen_add2_insn (data->to_addr,
2537 GEN_INT (-(HOST_WIDE_INT) size)));
2539 cst = (*data->constfun) (data->constfundata, data->offset, mode);
2540 emit_insn ((*genfun) (to1, cst));
2542 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2543 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2545 if (! data->reverse)
2546 data->offset += size;
2552 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2553 its length in bytes. */
2556 clear_storage (object, size)
2560 #ifdef TARGET_MEM_FUNCTIONS
2562 tree call_expr, arg_list;
2565 unsigned int align = (GET_CODE (object) == MEM ? MEM_ALIGN (object)
2566 : GET_MODE_ALIGNMENT (GET_MODE (object)));
2568 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2569 just move a zero. Otherwise, do this a piece at a time. */
2570 if (GET_MODE (object) != BLKmode
2571 && GET_CODE (size) == CONST_INT
2572 && GET_MODE_SIZE (GET_MODE (object)) == (unsigned int) INTVAL (size))
2573 emit_move_insn (object, CONST0_RTX (GET_MODE (object)));
2576 object = protect_from_queue (object, 1);
2577 size = protect_from_queue (size, 0);
2579 if (GET_CODE (size) == CONST_INT
2580 && MOVE_BY_PIECES_P (INTVAL (size), align))
2581 clear_by_pieces (object, INTVAL (size), align);
2584 /* Try the most limited insn first, because there's no point
2585 including more than one in the machine description unless
2586 the more limited one has some advantage. */
2588 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
2589 enum machine_mode mode;
2591 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2592 mode = GET_MODE_WIDER_MODE (mode))
2594 enum insn_code code = clrstr_optab[(int) mode];
2595 insn_operand_predicate_fn pred;
2597 if (code != CODE_FOR_nothing
2598 /* We don't need MODE to be narrower than
2599 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2600 the mode mask, as it is returned by the macro, it will
2601 definitely be less than the actual mode mask. */
2602 && ((GET_CODE (size) == CONST_INT
2603 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2604 <= (GET_MODE_MASK (mode) >> 1)))
2605 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2606 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
2607 || (*pred) (object, BLKmode))
2608 && ((pred = insn_data[(int) code].operand[2].predicate) == 0
2609 || (*pred) (opalign, VOIDmode)))
2612 rtx last = get_last_insn ();
2615 op1 = convert_to_mode (mode, size, 1);
2616 pred = insn_data[(int) code].operand[1].predicate;
2617 if (pred != 0 && ! (*pred) (op1, mode))
2618 op1 = copy_to_mode_reg (mode, op1);
2620 pat = GEN_FCN ((int) code) (object, op1, opalign);
2627 delete_insns_since (last);
2631 /* OBJECT or SIZE may have been passed through protect_from_queue.
2633 It is unsafe to save the value generated by protect_from_queue
2634 and reuse it later. Consider what happens if emit_queue is
2635 called before the return value from protect_from_queue is used.
2637 Expansion of the CALL_EXPR below will call emit_queue before
2638 we are finished emitting RTL for argument setup. So if we are
2639 not careful we could get the wrong value for an argument.
2641 To avoid this problem we go ahead and emit code to copy OBJECT
2642 and SIZE into new pseudos. We can then place those new pseudos
2643 into an RTL_EXPR and use them later, even after a call to
2646 Note this is not strictly needed for library calls since they
2647 do not call emit_queue before loading their arguments. However,
2648 we may need to have library calls call emit_queue in the future
2649 since failing to do so could cause problems for targets which
2650 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
2651 object = copy_to_mode_reg (Pmode, XEXP (object, 0));
2653 #ifdef TARGET_MEM_FUNCTIONS
2654 size = copy_to_mode_reg (TYPE_MODE (sizetype), size);
2656 size = convert_to_mode (TYPE_MODE (integer_type_node), size,
2657 TREE_UNSIGNED (integer_type_node));
2658 size = copy_to_mode_reg (TYPE_MODE (integer_type_node), size);
2661 #ifdef TARGET_MEM_FUNCTIONS
2662 /* It is incorrect to use the libcall calling conventions to call
2663 memset in this context.
2665 This could be a user call to memset and the user may wish to
2666 examine the return value from memset.
2668 For targets where libcalls and normal calls have different
2669 conventions for returning pointers, we could end up generating
2672 So instead of using a libcall sequence we build up a suitable
2673 CALL_EXPR and expand the call in the normal fashion. */
2674 if (fn == NULL_TREE)
2678 /* This was copied from except.c, I don't know if all this is
2679 necessary in this context or not. */
2680 fn = get_identifier ("memset");
2681 fntype = build_pointer_type (void_type_node);
2682 fntype = build_function_type (fntype, NULL_TREE);
2683 fn = build_decl (FUNCTION_DECL, fn, fntype);
2684 ggc_add_tree_root (&fn, 1);
2685 DECL_EXTERNAL (fn) = 1;
2686 TREE_PUBLIC (fn) = 1;
2687 DECL_ARTIFICIAL (fn) = 1;
2688 TREE_NOTHROW (fn) = 1;
2689 make_decl_rtl (fn, NULL);
2690 assemble_external (fn);
2693 /* We need to make an argument list for the function call.
2695 memset has three arguments, the first is a void * addresses, the
2696 second an integer with the initialization value, the last is a
2697 size_t byte count for the copy. */
2699 = build_tree_list (NULL_TREE,
2700 make_tree (build_pointer_type (void_type_node),
2702 TREE_CHAIN (arg_list)
2703 = build_tree_list (NULL_TREE,
2704 make_tree (integer_type_node, const0_rtx));
2705 TREE_CHAIN (TREE_CHAIN (arg_list))
2706 = build_tree_list (NULL_TREE, make_tree (sizetype, size));
2707 TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list))) = NULL_TREE;
2709 /* Now we have to build up the CALL_EXPR itself. */
2710 call_expr = build1 (ADDR_EXPR,
2711 build_pointer_type (TREE_TYPE (fn)), fn);
2712 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
2713 call_expr, arg_list, NULL_TREE);
2714 TREE_SIDE_EFFECTS (call_expr) = 1;
2716 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
2718 emit_library_call (bzero_libfunc, LCT_NORMAL,
2719 VOIDmode, 2, object, Pmode, size,
2720 TYPE_MODE (integer_type_node));
2723 /* If we are initializing a readonly value, show the above call
2724 clobbered it. Otherwise, a load from it may erroneously be
2725 hoisted from a loop. */
2726 if (RTX_UNCHANGING_P (object))
2727 emit_insn (gen_rtx_CLOBBER (VOIDmode, object));
2734 /* Generate code to copy Y into X.
2735 Both Y and X must have the same mode, except that
2736 Y can be a constant with VOIDmode.
2737 This mode cannot be BLKmode; use emit_block_move for that.
2739 Return the last instruction emitted. */
2742 emit_move_insn (x, y)
2745 enum machine_mode mode = GET_MODE (x);
2746 rtx y_cst = NULL_RTX;
2749 x = protect_from_queue (x, 1);
2750 y = protect_from_queue (y, 0);
2752 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
2755 /* Never force constant_p_rtx to memory. */
2756 if (GET_CODE (y) == CONSTANT_P_RTX)
2758 else if (CONSTANT_P (y) && ! LEGITIMATE_CONSTANT_P (y))
2761 y = force_const_mem (mode, y);
2764 /* If X or Y are memory references, verify that their addresses are valid
2766 if (GET_CODE (x) == MEM
2767 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
2768 && ! push_operand (x, GET_MODE (x)))
2770 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
2771 x = validize_mem (x);
2773 if (GET_CODE (y) == MEM
2774 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
2776 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
2777 y = validize_mem (y);
2779 if (mode == BLKmode)
2782 last_insn = emit_move_insn_1 (x, y);
2784 if (y_cst && GET_CODE (x) == REG)
2785 set_unique_reg_note (last_insn, REG_EQUAL, y_cst);
2790 /* Low level part of emit_move_insn.
2791 Called just like emit_move_insn, but assumes X and Y
2792 are basically valid. */
2795 emit_move_insn_1 (x, y)
2798 enum machine_mode mode = GET_MODE (x);
2799 enum machine_mode submode;
2800 enum mode_class class = GET_MODE_CLASS (mode);
2802 if ((unsigned int) mode >= (unsigned int) MAX_MACHINE_MODE)
2805 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
2807 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
2809 /* Expand complex moves by moving real part and imag part, if possible. */
2810 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
2811 && BLKmode != (submode = mode_for_size ((GET_MODE_UNIT_SIZE (mode)
2813 (class == MODE_COMPLEX_INT
2814 ? MODE_INT : MODE_FLOAT),
2816 && (mov_optab->handlers[(int) submode].insn_code
2817 != CODE_FOR_nothing))
2819 /* Don't split destination if it is a stack push. */
2820 int stack = push_operand (x, GET_MODE (x));
2822 #ifdef PUSH_ROUNDING
2823 /* In case we output to the stack, but the size is smaller machine can
2824 push exactly, we need to use move instructions. */
2826 && (PUSH_ROUNDING (GET_MODE_SIZE (submode))
2827 != GET_MODE_SIZE (submode)))
2830 HOST_WIDE_INT offset1, offset2;
2832 /* Do not use anti_adjust_stack, since we don't want to update
2833 stack_pointer_delta. */
2834 temp = expand_binop (Pmode,
2835 #ifdef STACK_GROWS_DOWNWARD
2843 (GET_MODE_SIZE (GET_MODE (x)))),
2844 stack_pointer_rtx, 0, OPTAB_LIB_WIDEN);
2846 if (temp != stack_pointer_rtx)
2847 emit_move_insn (stack_pointer_rtx, temp);
2849 #ifdef STACK_GROWS_DOWNWARD
2851 offset2 = GET_MODE_SIZE (submode);
2853 offset1 = -PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)));
2854 offset2 = (-PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)))
2855 + GET_MODE_SIZE (submode));
2858 emit_move_insn (change_address (x, submode,
2859 gen_rtx_PLUS (Pmode,
2861 GEN_INT (offset1))),
2862 gen_realpart (submode, y));
2863 emit_move_insn (change_address (x, submode,
2864 gen_rtx_PLUS (Pmode,
2866 GEN_INT (offset2))),
2867 gen_imagpart (submode, y));
2871 /* If this is a stack, push the highpart first, so it
2872 will be in the argument order.
2874 In that case, change_address is used only to convert
2875 the mode, not to change the address. */
2878 /* Note that the real part always precedes the imag part in memory
2879 regardless of machine's endianness. */
2880 #ifdef STACK_GROWS_DOWNWARD
2881 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2882 (gen_rtx_MEM (submode, XEXP (x, 0)),
2883 gen_imagpart (submode, y)));
2884 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2885 (gen_rtx_MEM (submode, XEXP (x, 0)),
2886 gen_realpart (submode, y)));
2888 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2889 (gen_rtx_MEM (submode, XEXP (x, 0)),
2890 gen_realpart (submode, y)));
2891 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2892 (gen_rtx_MEM (submode, XEXP (x, 0)),
2893 gen_imagpart (submode, y)));
2898 rtx realpart_x, realpart_y;
2899 rtx imagpart_x, imagpart_y;
2901 /* If this is a complex value with each part being smaller than a
2902 word, the usual calling sequence will likely pack the pieces into
2903 a single register. Unfortunately, SUBREG of hard registers only
2904 deals in terms of words, so we have a problem converting input
2905 arguments to the CONCAT of two registers that is used elsewhere
2906 for complex values. If this is before reload, we can copy it into
2907 memory and reload. FIXME, we should see about using extract and
2908 insert on integer registers, but complex short and complex char
2909 variables should be rarely used. */
2910 if (GET_MODE_BITSIZE (mode) < 2 * BITS_PER_WORD
2911 && (reload_in_progress | reload_completed) == 0)
2914 = (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER);
2916 = (REG_P (y) && REGNO (y) < FIRST_PSEUDO_REGISTER);
2918 if (packed_dest_p || packed_src_p)
2920 enum mode_class reg_class = ((class == MODE_COMPLEX_FLOAT)
2921 ? MODE_FLOAT : MODE_INT);
2923 enum machine_mode reg_mode
2924 = mode_for_size (GET_MODE_BITSIZE (mode), reg_class, 1);
2926 if (reg_mode != BLKmode)
2928 rtx mem = assign_stack_temp (reg_mode,
2929 GET_MODE_SIZE (mode), 0);
2930 rtx cmem = adjust_address (mem, mode, 0);
2933 = N_("function using short complex types cannot be inline");
2937 rtx sreg = gen_rtx_SUBREG (reg_mode, x, 0);
2939 emit_move_insn_1 (cmem, y);
2940 return emit_move_insn_1 (sreg, mem);
2944 rtx sreg = gen_rtx_SUBREG (reg_mode, y, 0);
2946 emit_move_insn_1 (mem, sreg);
2947 return emit_move_insn_1 (x, cmem);
2953 realpart_x = gen_realpart (submode, x);
2954 realpart_y = gen_realpart (submode, y);
2955 imagpart_x = gen_imagpart (submode, x);
2956 imagpart_y = gen_imagpart (submode, y);
2958 /* Show the output dies here. This is necessary for SUBREGs
2959 of pseudos since we cannot track their lifetimes correctly;
2960 hard regs shouldn't appear here except as return values.
2961 We never want to emit such a clobber after reload. */
2963 && ! (reload_in_progress || reload_completed)
2964 && (GET_CODE (realpart_x) == SUBREG
2965 || GET_CODE (imagpart_x) == SUBREG))
2966 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
2968 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2969 (realpart_x, realpart_y));
2970 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2971 (imagpart_x, imagpart_y));
2974 return get_last_insn ();
2977 /* This will handle any multi-word mode that lacks a move_insn pattern.
2978 However, you will get better code if you define such patterns,
2979 even if they must turn into multiple assembler instructions. */
2980 else if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
2987 #ifdef PUSH_ROUNDING
2989 /* If X is a push on the stack, do the push now and replace
2990 X with a reference to the stack pointer. */
2991 if (push_operand (x, GET_MODE (x)))
2996 /* Do not use anti_adjust_stack, since we don't want to update
2997 stack_pointer_delta. */
2998 temp = expand_binop (Pmode,
2999 #ifdef STACK_GROWS_DOWNWARD
3007 (GET_MODE_SIZE (GET_MODE (x)))),
3008 stack_pointer_rtx, 0, OPTAB_LIB_WIDEN);
3010 if (temp != stack_pointer_rtx)
3011 emit_move_insn (stack_pointer_rtx, temp);
3013 code = GET_CODE (XEXP (x, 0));
3015 /* Just hope that small offsets off SP are OK. */
3016 if (code == POST_INC)
3017 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3018 GEN_INT (-((HOST_WIDE_INT)
3019 GET_MODE_SIZE (GET_MODE (x)))));
3020 else if (code == POST_DEC)
3021 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3022 GEN_INT (GET_MODE_SIZE (GET_MODE (x))));
3024 temp = stack_pointer_rtx;
3026 x = change_address (x, VOIDmode, temp);
3030 /* If we are in reload, see if either operand is a MEM whose address
3031 is scheduled for replacement. */
3032 if (reload_in_progress && GET_CODE (x) == MEM
3033 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
3034 x = replace_equiv_address_nv (x, inner);
3035 if (reload_in_progress && GET_CODE (y) == MEM
3036 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
3037 y = replace_equiv_address_nv (y, inner);
3043 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
3046 rtx xpart = operand_subword (x, i, 1, mode);
3047 rtx ypart = operand_subword (y, i, 1, mode);
3049 /* If we can't get a part of Y, put Y into memory if it is a
3050 constant. Otherwise, force it into a register. If we still
3051 can't get a part of Y, abort. */
3052 if (ypart == 0 && CONSTANT_P (y))
3054 y = force_const_mem (mode, y);
3055 ypart = operand_subword (y, i, 1, mode);
3057 else if (ypart == 0)
3058 ypart = operand_subword_force (y, i, mode);
3060 if (xpart == 0 || ypart == 0)
3063 need_clobber |= (GET_CODE (xpart) == SUBREG);
3065 last_insn = emit_move_insn (xpart, ypart);
3068 seq = gen_sequence ();
3071 /* Show the output dies here. This is necessary for SUBREGs
3072 of pseudos since we cannot track their lifetimes correctly;
3073 hard regs shouldn't appear here except as return values.
3074 We never want to emit such a clobber after reload. */
3076 && ! (reload_in_progress || reload_completed)
3077 && need_clobber != 0)
3078 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
3088 /* Pushing data onto the stack. */
3090 /* Push a block of length SIZE (perhaps variable)
3091 and return an rtx to address the beginning of the block.
3092 Note that it is not possible for the value returned to be a QUEUED.
3093 The value may be virtual_outgoing_args_rtx.
3095 EXTRA is the number of bytes of padding to push in addition to SIZE.
3096 BELOW nonzero means this padding comes at low addresses;
3097 otherwise, the padding comes at high addresses. */
3100 push_block (size, extra, below)
3106 size = convert_modes (Pmode, ptr_mode, size, 1);
3107 if (CONSTANT_P (size))
3108 anti_adjust_stack (plus_constant (size, extra));
3109 else if (GET_CODE (size) == REG && extra == 0)
3110 anti_adjust_stack (size);
3113 temp = copy_to_mode_reg (Pmode, size);
3115 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
3116 temp, 0, OPTAB_LIB_WIDEN);
3117 anti_adjust_stack (temp);
3120 #ifndef STACK_GROWS_DOWNWARD
3126 temp = virtual_outgoing_args_rtx;
3127 if (extra != 0 && below)
3128 temp = plus_constant (temp, extra);
3132 if (GET_CODE (size) == CONST_INT)
3133 temp = plus_constant (virtual_outgoing_args_rtx,
3134 -INTVAL (size) - (below ? 0 : extra));
3135 else if (extra != 0 && !below)
3136 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3137 negate_rtx (Pmode, plus_constant (size, extra)));
3139 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3140 negate_rtx (Pmode, size));
3143 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
3146 #ifdef PUSH_ROUNDING
3148 /* Emit single push insn. */
3151 emit_single_push_insn (mode, x, type)
3153 enum machine_mode mode;
3157 unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
3159 enum insn_code icode;
3160 insn_operand_predicate_fn pred;
3162 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3163 /* If there is push pattern, use it. Otherwise try old way of throwing
3164 MEM representing push operation to move expander. */
3165 icode = push_optab->handlers[(int) mode].insn_code;
3166 if (icode != CODE_FOR_nothing)
3168 if (((pred = insn_data[(int) icode].operand[0].predicate)
3169 && !((*pred) (x, mode))))
3170 x = force_reg (mode, x);
3171 emit_insn (GEN_FCN (icode) (x));
3174 if (GET_MODE_SIZE (mode) == rounded_size)
3175 dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
3178 #ifdef STACK_GROWS_DOWNWARD
3179 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3180 GEN_INT (-(HOST_WIDE_INT) rounded_size));
3182 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3183 GEN_INT (rounded_size));
3185 dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
3188 dest = gen_rtx_MEM (mode, dest_addr);
3192 set_mem_attributes (dest, type, 1);
3194 if (flag_optimize_sibling_calls)
3195 /* Function incoming arguments may overlap with sibling call
3196 outgoing arguments and we cannot allow reordering of reads
3197 from function arguments with stores to outgoing arguments
3198 of sibling calls. */
3199 set_mem_alias_set (dest, 0);
3201 emit_move_insn (dest, x);
3205 /* Generate code to push X onto the stack, assuming it has mode MODE and
3207 MODE is redundant except when X is a CONST_INT (since they don't
3209 SIZE is an rtx for the size of data to be copied (in bytes),
3210 needed only if X is BLKmode.
3212 ALIGN (in bits) is maximum alignment we can assume.
3214 If PARTIAL and REG are both nonzero, then copy that many of the first
3215 words of X into registers starting with REG, and push the rest of X.
3216 The amount of space pushed is decreased by PARTIAL words,
3217 rounded *down* to a multiple of PARM_BOUNDARY.
3218 REG must be a hard register in this case.
3219 If REG is zero but PARTIAL is not, take any all others actions for an
3220 argument partially in registers, but do not actually load any
3223 EXTRA is the amount in bytes of extra space to leave next to this arg.
3224 This is ignored if an argument block has already been allocated.
3226 On a machine that lacks real push insns, ARGS_ADDR is the address of
3227 the bottom of the argument block for this call. We use indexing off there
3228 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
3229 argument block has not been preallocated.
3231 ARGS_SO_FAR is the size of args previously pushed for this call.
3233 REG_PARM_STACK_SPACE is nonzero if functions require stack space
3234 for arguments passed in registers. If nonzero, it will be the number
3235 of bytes required. */
3238 emit_push_insn (x, mode, type, size, align, partial, reg, extra,
3239 args_addr, args_so_far, reg_parm_stack_space,
3242 enum machine_mode mode;
3251 int reg_parm_stack_space;
3255 enum direction stack_direction
3256 #ifdef STACK_GROWS_DOWNWARD
3262 /* Decide where to pad the argument: `downward' for below,
3263 `upward' for above, or `none' for don't pad it.
3264 Default is below for small data on big-endian machines; else above. */
3265 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
3267 /* Invert direction if stack is post-decrement.
3269 if (STACK_PUSH_CODE == POST_DEC)
3270 if (where_pad != none)
3271 where_pad = (where_pad == downward ? upward : downward);
3273 xinner = x = protect_from_queue (x, 0);
3275 if (mode == BLKmode)
3277 /* Copy a block into the stack, entirely or partially. */
3280 int used = partial * UNITS_PER_WORD;
3281 int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
3289 /* USED is now the # of bytes we need not copy to the stack
3290 because registers will take care of them. */
3293 xinner = adjust_address (xinner, BLKmode, used);
3295 /* If the partial register-part of the arg counts in its stack size,
3296 skip the part of stack space corresponding to the registers.
3297 Otherwise, start copying to the beginning of the stack space,
3298 by setting SKIP to 0. */
3299 skip = (reg_parm_stack_space == 0) ? 0 : used;
3301 #ifdef PUSH_ROUNDING
3302 /* Do it with several push insns if that doesn't take lots of insns
3303 and if there is no difficulty with push insns that skip bytes
3304 on the stack for alignment purposes. */
3307 && GET_CODE (size) == CONST_INT
3309 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
3310 /* Here we avoid the case of a structure whose weak alignment
3311 forces many pushes of a small amount of data,
3312 and such small pushes do rounding that causes trouble. */
3313 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
3314 || align >= BIGGEST_ALIGNMENT
3315 || (PUSH_ROUNDING (align / BITS_PER_UNIT)
3316 == (align / BITS_PER_UNIT)))
3317 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
3319 /* Push padding now if padding above and stack grows down,
3320 or if padding below and stack grows up.
3321 But if space already allocated, this has already been done. */
3322 if (extra && args_addr == 0
3323 && where_pad != none && where_pad != stack_direction)
3324 anti_adjust_stack (GEN_INT (extra));
3326 move_by_pieces (NULL, xinner, INTVAL (size) - used, align);
3329 #endif /* PUSH_ROUNDING */
3333 /* Otherwise make space on the stack and copy the data
3334 to the address of that space. */
3336 /* Deduct words put into registers from the size we must copy. */
3339 if (GET_CODE (size) == CONST_INT)
3340 size = GEN_INT (INTVAL (size) - used);
3342 size = expand_binop (GET_MODE (size), sub_optab, size,
3343 GEN_INT (used), NULL_RTX, 0,
3347 /* Get the address of the stack space.
3348 In this case, we do not deal with EXTRA separately.
3349 A single stack adjust will do. */
3352 temp = push_block (size, extra, where_pad == downward);
3355 else if (GET_CODE (args_so_far) == CONST_INT)
3356 temp = memory_address (BLKmode,
3357 plus_constant (args_addr,
3358 skip + INTVAL (args_so_far)));
3360 temp = memory_address (BLKmode,
3361 plus_constant (gen_rtx_PLUS (Pmode,
3365 target = gen_rtx_MEM (BLKmode, temp);
3369 set_mem_attributes (target, type, 1);
3370 /* Function incoming arguments may overlap with sibling call
3371 outgoing arguments and we cannot allow reordering of reads
3372 from function arguments with stores to outgoing arguments
3373 of sibling calls. */
3374 set_mem_alias_set (target, 0);
3377 set_mem_align (target, align);
3379 /* TEMP is the address of the block. Copy the data there. */
3380 if (GET_CODE (size) == CONST_INT
3381 && MOVE_BY_PIECES_P ((unsigned) INTVAL (size), align))
3383 move_by_pieces (target, xinner, INTVAL (size), align);
3388 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
3389 enum machine_mode mode;
3391 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
3393 mode = GET_MODE_WIDER_MODE (mode))
3395 enum insn_code code = movstr_optab[(int) mode];
3396 insn_operand_predicate_fn pred;
3398 if (code != CODE_FOR_nothing
3399 && ((GET_CODE (size) == CONST_INT
3400 && ((unsigned HOST_WIDE_INT) INTVAL (size)
3401 <= (GET_MODE_MASK (mode) >> 1)))
3402 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
3403 && (!(pred = insn_data[(int) code].operand[0].predicate)
3404 || ((*pred) (target, BLKmode)))
3405 && (!(pred = insn_data[(int) code].operand[1].predicate)
3406 || ((*pred) (xinner, BLKmode)))
3407 && (!(pred = insn_data[(int) code].operand[3].predicate)
3408 || ((*pred) (opalign, VOIDmode))))
3410 rtx op2 = convert_to_mode (mode, size, 1);
3411 rtx last = get_last_insn ();
3414 pred = insn_data[(int) code].operand[2].predicate;
3415 if (pred != 0 && ! (*pred) (op2, mode))
3416 op2 = copy_to_mode_reg (mode, op2);
3418 pat = GEN_FCN ((int) code) (target, xinner,
3426 delete_insns_since (last);
3431 if (!ACCUMULATE_OUTGOING_ARGS)
3433 /* If the source is referenced relative to the stack pointer,
3434 copy it to another register to stabilize it. We do not need
3435 to do this if we know that we won't be changing sp. */
3437 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3438 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3439 temp = copy_to_reg (temp);
3442 /* Make inhibit_defer_pop nonzero around the library call
3443 to force it to pop the bcopy-arguments right away. */
3445 #ifdef TARGET_MEM_FUNCTIONS
3446 emit_library_call (memcpy_libfunc, LCT_NORMAL,
3447 VOIDmode, 3, temp, Pmode, XEXP (xinner, 0), Pmode,
3448 convert_to_mode (TYPE_MODE (sizetype),
3449 size, TREE_UNSIGNED (sizetype)),
3450 TYPE_MODE (sizetype));
3452 emit_library_call (bcopy_libfunc, LCT_NORMAL,
3453 VOIDmode, 3, XEXP (xinner, 0), Pmode, temp, Pmode,
3454 convert_to_mode (TYPE_MODE (integer_type_node),
3456 TREE_UNSIGNED (integer_type_node)),
3457 TYPE_MODE (integer_type_node));
3462 else if (partial > 0)
3464 /* Scalar partly in registers. */
3466 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3469 /* # words of start of argument
3470 that we must make space for but need not store. */
3471 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
3472 int args_offset = INTVAL (args_so_far);
3475 /* Push padding now if padding above and stack grows down,
3476 or if padding below and stack grows up.
3477 But if space already allocated, this has already been done. */
3478 if (extra && args_addr == 0
3479 && where_pad != none && where_pad != stack_direction)
3480 anti_adjust_stack (GEN_INT (extra));
3482 /* If we make space by pushing it, we might as well push
3483 the real data. Otherwise, we can leave OFFSET nonzero
3484 and leave the space uninitialized. */
3488 /* Now NOT_STACK gets the number of words that we don't need to
3489 allocate on the stack. */
3490 not_stack = partial - offset;
3492 /* If the partial register-part of the arg counts in its stack size,
3493 skip the part of stack space corresponding to the registers.
3494 Otherwise, start copying to the beginning of the stack space,
3495 by setting SKIP to 0. */
3496 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
3498 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
3499 x = validize_mem (force_const_mem (mode, x));
3501 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3502 SUBREGs of such registers are not allowed. */
3503 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
3504 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
3505 x = copy_to_reg (x);
3507 /* Loop over all the words allocated on the stack for this arg. */
3508 /* We can do it by words, because any scalar bigger than a word
3509 has a size a multiple of a word. */
3510 #ifndef PUSH_ARGS_REVERSED
3511 for (i = not_stack; i < size; i++)
3513 for (i = size - 1; i >= not_stack; i--)
3515 if (i >= not_stack + offset)
3516 emit_push_insn (operand_subword_force (x, i, mode),
3517 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
3519 GEN_INT (args_offset + ((i - not_stack + skip)
3521 reg_parm_stack_space, alignment_pad);
3526 rtx target = NULL_RTX;
3529 /* Push padding now if padding above and stack grows down,
3530 or if padding below and stack grows up.
3531 But if space already allocated, this has already been done. */
3532 if (extra && args_addr == 0
3533 && where_pad != none && where_pad != stack_direction)
3534 anti_adjust_stack (GEN_INT (extra));
3536 #ifdef PUSH_ROUNDING
3537 if (args_addr == 0 && PUSH_ARGS)
3538 emit_single_push_insn (mode, x, type);
3542 if (GET_CODE (args_so_far) == CONST_INT)
3544 = memory_address (mode,
3545 plus_constant (args_addr,
3546 INTVAL (args_so_far)));
3548 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
3551 dest = gen_rtx_MEM (mode, addr);
3554 set_mem_attributes (dest, type, 1);
3555 /* Function incoming arguments may overlap with sibling call
3556 outgoing arguments and we cannot allow reordering of reads
3557 from function arguments with stores to outgoing arguments
3558 of sibling calls. */
3559 set_mem_alias_set (dest, 0);
3562 emit_move_insn (dest, x);
3568 /* If part should go in registers, copy that part
3569 into the appropriate registers. Do this now, at the end,
3570 since mem-to-mem copies above may do function calls. */
3571 if (partial > 0 && reg != 0)
3573 /* Handle calls that pass values in multiple non-contiguous locations.
3574 The Irix 6 ABI has examples of this. */
3575 if (GET_CODE (reg) == PARALLEL)
3576 emit_group_load (reg, x, -1); /* ??? size? */
3578 move_block_to_reg (REGNO (reg), x, partial, mode);
3581 if (extra && args_addr == 0 && where_pad == stack_direction)
3582 anti_adjust_stack (GEN_INT (extra));
3584 if (alignment_pad && args_addr == 0)
3585 anti_adjust_stack (alignment_pad);
3588 /* Return X if X can be used as a subtarget in a sequence of arithmetic
3596 /* Only registers can be subtargets. */
3597 || GET_CODE (x) != REG
3598 /* If the register is readonly, it can't be set more than once. */
3599 || RTX_UNCHANGING_P (x)
3600 /* Don't use hard regs to avoid extending their life. */
3601 || REGNO (x) < FIRST_PSEUDO_REGISTER
3602 /* Avoid subtargets inside loops,
3603 since they hide some invariant expressions. */
3604 || preserve_subexpressions_p ())
3608 /* Expand an assignment that stores the value of FROM into TO.
3609 If WANT_VALUE is nonzero, return an rtx for the value of TO.
3610 (This may contain a QUEUED rtx;
3611 if the value is constant, this rtx is a constant.)
3612 Otherwise, the returned value is NULL_RTX.
3614 SUGGEST_REG is no longer actually used.
3615 It used to mean, copy the value through a register
3616 and return that register, if that is possible.
3617 We now use WANT_VALUE to decide whether to do this. */
3620 expand_assignment (to, from, want_value, suggest_reg)
3623 int suggest_reg ATTRIBUTE_UNUSED;
3628 /* Don't crash if the lhs of the assignment was erroneous. */
3630 if (TREE_CODE (to) == ERROR_MARK)
3632 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
3633 return want_value ? result : NULL_RTX;
3636 /* Assignment of a structure component needs special treatment
3637 if the structure component's rtx is not simply a MEM.
3638 Assignment of an array element at a constant index, and assignment of
3639 an array element in an unaligned packed structure field, has the same
3642 if (TREE_CODE (to) == COMPONENT_REF || TREE_CODE (to) == BIT_FIELD_REF
3643 || TREE_CODE (to) == ARRAY_REF || TREE_CODE (to) == ARRAY_RANGE_REF)
3645 enum machine_mode mode1;
3646 HOST_WIDE_INT bitsize, bitpos;
3654 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
3655 &unsignedp, &volatilep);
3657 /* If we are going to use store_bit_field and extract_bit_field,
3658 make sure to_rtx will be safe for multiple use. */
3660 if (mode1 == VOIDmode && want_value)
3661 tem = stabilize_reference (tem);
3663 orig_to_rtx = to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, 0);
3667 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM);
3669 if (GET_CODE (to_rtx) != MEM)
3672 if (GET_MODE (offset_rtx) != ptr_mode)
3673 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
3675 #ifdef POINTERS_EXTEND_UNSIGNED
3676 if (GET_MODE (offset_rtx) != Pmode)
3677 offset_rtx = convert_memory_address (Pmode, offset_rtx);
3680 /* A constant address in TO_RTX can have VOIDmode, we must not try
3681 to call force_reg for that case. Avoid that case. */
3682 if (GET_CODE (to_rtx) == MEM
3683 && GET_MODE (to_rtx) == BLKmode
3684 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
3686 && (bitpos % bitsize) == 0
3687 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
3688 && MEM_ALIGN (to_rtx) == GET_MODE_ALIGNMENT (mode1))
3690 to_rtx = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT);
3694 to_rtx = offset_address (to_rtx, offset_rtx,
3695 highest_pow2_factor (offset));
3698 if (GET_CODE (to_rtx) == MEM)
3700 tree old_expr = MEM_EXPR (to_rtx);
3702 /* If the field is at offset zero, we could have been given the
3703 DECL_RTX of the parent struct. Don't munge it. */
3704 to_rtx = shallow_copy_rtx (to_rtx);
3706 set_mem_attributes (to_rtx, to, 0);
3708 /* If we changed MEM_EXPR, that means we're now referencing
3709 the COMPONENT_REF, which means that MEM_OFFSET must be
3710 relative to that field. But we've not yet reflected BITPOS
3711 in TO_RTX. This will be done in store_field. Adjust for
3712 that by biasing MEM_OFFSET by -bitpos. */
3713 if (MEM_EXPR (to_rtx) != old_expr && MEM_OFFSET (to_rtx)
3714 && (bitpos / BITS_PER_UNIT) != 0)
3715 set_mem_offset (to_rtx, GEN_INT (INTVAL (MEM_OFFSET (to_rtx))
3716 - (bitpos / BITS_PER_UNIT)));
3719 /* Deal with volatile and readonly fields. The former is only done
3720 for MEM. Also set MEM_KEEP_ALIAS_SET_P if needed. */
3721 if (volatilep && GET_CODE (to_rtx) == MEM)
3723 if (to_rtx == orig_to_rtx)
3724 to_rtx = copy_rtx (to_rtx);
3725 MEM_VOLATILE_P (to_rtx) = 1;
3728 if (TREE_CODE (to) == COMPONENT_REF
3729 && TREE_READONLY (TREE_OPERAND (to, 1)))
3731 if (to_rtx == orig_to_rtx)
3732 to_rtx = copy_rtx (to_rtx);
3733 RTX_UNCHANGING_P (to_rtx) = 1;
3736 if (GET_CODE (to_rtx) == MEM && ! can_address_p (to))
3738 if (to_rtx == orig_to_rtx)
3739 to_rtx = copy_rtx (to_rtx);
3740 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
3743 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
3745 /* Spurious cast for HPUX compiler. */
3746 ? ((enum machine_mode)
3747 TYPE_MODE (TREE_TYPE (to)))
3749 unsignedp, TREE_TYPE (tem), get_alias_set (to));
3751 preserve_temp_slots (result);
3755 /* If the value is meaningful, convert RESULT to the proper mode.
3756 Otherwise, return nothing. */
3757 return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
3758 TYPE_MODE (TREE_TYPE (from)),
3760 TREE_UNSIGNED (TREE_TYPE (to)))
3764 /* If the rhs is a function call and its value is not an aggregate,
3765 call the function before we start to compute the lhs.
3766 This is needed for correct code for cases such as
3767 val = setjmp (buf) on machines where reference to val
3768 requires loading up part of an address in a separate insn.
3770 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
3771 since it might be a promoted variable where the zero- or sign- extension
3772 needs to be done. Handling this in the normal way is safe because no
3773 computation is done before the call. */
3774 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from)
3775 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
3776 && ! ((TREE_CODE (to) == VAR_DECL || TREE_CODE (to) == PARM_DECL)
3777 && GET_CODE (DECL_RTL (to)) == REG))
3782 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
3784 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
3786 /* Handle calls that return values in multiple non-contiguous locations.
3787 The Irix 6 ABI has examples of this. */
3788 if (GET_CODE (to_rtx) == PARALLEL)
3789 emit_group_load (to_rtx, value, int_size_in_bytes (TREE_TYPE (from)));
3790 else if (GET_MODE (to_rtx) == BLKmode)
3791 emit_block_move (to_rtx, value, expr_size (from));
3794 #ifdef POINTERS_EXTEND_UNSIGNED
3795 if (POINTER_TYPE_P (TREE_TYPE (to))
3796 && GET_MODE (to_rtx) != GET_MODE (value))
3797 value = convert_memory_address (GET_MODE (to_rtx), value);
3799 emit_move_insn (to_rtx, value);
3801 preserve_temp_slots (to_rtx);
3804 return want_value ? to_rtx : NULL_RTX;
3807 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
3808 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
3811 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
3813 /* Don't move directly into a return register. */
3814 if (TREE_CODE (to) == RESULT_DECL
3815 && (GET_CODE (to_rtx) == REG || GET_CODE (to_rtx) == PARALLEL))
3820 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
3822 if (GET_CODE (to_rtx) == PARALLEL)
3823 emit_group_load (to_rtx, temp, int_size_in_bytes (TREE_TYPE (from)));
3825 emit_move_insn (to_rtx, temp);
3827 preserve_temp_slots (to_rtx);
3830 return want_value ? to_rtx : NULL_RTX;
3833 /* In case we are returning the contents of an object which overlaps
3834 the place the value is being stored, use a safe function when copying
3835 a value through a pointer into a structure value return block. */
3836 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
3837 && current_function_returns_struct
3838 && !current_function_returns_pcc_struct)
3843 size = expr_size (from);
3844 from_rtx = expand_expr (from, NULL_RTX, VOIDmode, 0);
3846 #ifdef TARGET_MEM_FUNCTIONS
3847 emit_library_call (memmove_libfunc, LCT_NORMAL,
3848 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
3849 XEXP (from_rtx, 0), Pmode,
3850 convert_to_mode (TYPE_MODE (sizetype),
3851 size, TREE_UNSIGNED (sizetype)),
3852 TYPE_MODE (sizetype));
3854 emit_library_call (bcopy_libfunc, LCT_NORMAL,
3855 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
3856 XEXP (to_rtx, 0), Pmode,
3857 convert_to_mode (TYPE_MODE (integer_type_node),
3858 size, TREE_UNSIGNED (integer_type_node)),
3859 TYPE_MODE (integer_type_node));
3862 preserve_temp_slots (to_rtx);
3865 return want_value ? to_rtx : NULL_RTX;
3868 /* Compute FROM and store the value in the rtx we got. */
3871 result = store_expr (from, to_rtx, want_value);
3872 preserve_temp_slots (result);
3875 return want_value ? result : NULL_RTX;
3878 /* Generate code for computing expression EXP,
3879 and storing the value into TARGET.
3880 TARGET may contain a QUEUED rtx.
3882 If WANT_VALUE is nonzero, return a copy of the value
3883 not in TARGET, so that we can be sure to use the proper
3884 value in a containing expression even if TARGET has something
3885 else stored in it. If possible, we copy the value through a pseudo
3886 and return that pseudo. Or, if the value is constant, we try to
3887 return the constant. In some cases, we return a pseudo
3888 copied *from* TARGET.
3890 If the mode is BLKmode then we may return TARGET itself.
3891 It turns out that in BLKmode it doesn't cause a problem.
3892 because C has no operators that could combine two different
3893 assignments into the same BLKmode object with different values
3894 with no sequence point. Will other languages need this to
3897 If WANT_VALUE is 0, we return NULL, to make sure
3898 to catch quickly any cases where the caller uses the value
3899 and fails to set WANT_VALUE. */
3902 store_expr (exp, target, want_value)
3908 int dont_return_target = 0;
3909 int dont_store_target = 0;
3911 if (TREE_CODE (exp) == COMPOUND_EXPR)
3913 /* Perform first part of compound expression, then assign from second
3915 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
3917 return store_expr (TREE_OPERAND (exp, 1), target, want_value);
3919 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
3921 /* For conditional expression, get safe form of the target. Then
3922 test the condition, doing the appropriate assignment on either
3923 side. This avoids the creation of unnecessary temporaries.
3924 For non-BLKmode, it is more efficient not to do this. */
3926 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
3929 target = protect_from_queue (target, 1);
3931 do_pending_stack_adjust ();
3933 jumpifnot (TREE_OPERAND (exp, 0), lab1);
3934 start_cleanup_deferral ();
3935 store_expr (TREE_OPERAND (exp, 1), target, 0);
3936 end_cleanup_deferral ();
3938 emit_jump_insn (gen_jump (lab2));
3941 start_cleanup_deferral ();
3942 store_expr (TREE_OPERAND (exp, 2), target, 0);
3943 end_cleanup_deferral ();
3948 return want_value ? target : NULL_RTX;
3950 else if (queued_subexp_p (target))
3951 /* If target contains a postincrement, let's not risk
3952 using it as the place to generate the rhs. */
3954 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
3956 /* Expand EXP into a new pseudo. */
3957 temp = gen_reg_rtx (GET_MODE (target));
3958 temp = expand_expr (exp, temp, GET_MODE (target), 0);
3961 temp = expand_expr (exp, NULL_RTX, GET_MODE (target), 0);
3963 /* If target is volatile, ANSI requires accessing the value
3964 *from* the target, if it is accessed. So make that happen.
3965 In no case return the target itself. */
3966 if (! MEM_VOLATILE_P (target) && want_value)
3967 dont_return_target = 1;
3969 else if (want_value && GET_CODE (target) == MEM && ! MEM_VOLATILE_P (target)
3970 && GET_MODE (target) != BLKmode)
3971 /* If target is in memory and caller wants value in a register instead,
3972 arrange that. Pass TARGET as target for expand_expr so that,
3973 if EXP is another assignment, WANT_VALUE will be nonzero for it.
3974 We know expand_expr will not use the target in that case.
3975 Don't do this if TARGET is volatile because we are supposed
3976 to write it and then read it. */
3978 temp = expand_expr (exp, target, GET_MODE (target), 0);
3979 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
3981 /* If TEMP is already in the desired TARGET, only copy it from
3982 memory and don't store it there again. */
3984 || (rtx_equal_p (temp, target)
3985 && ! side_effects_p (temp) && ! side_effects_p (target)))
3986 dont_store_target = 1;
3987 temp = copy_to_reg (temp);
3989 dont_return_target = 1;
3991 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
3992 /* If this is an scalar in a register that is stored in a wider mode
3993 than the declared mode, compute the result into its declared mode
3994 and then convert to the wider mode. Our value is the computed
3997 rtx inner_target = 0;
3999 /* If we don't want a value, we can do the conversion inside EXP,
4000 which will often result in some optimizations. Do the conversion
4001 in two steps: first change the signedness, if needed, then
4002 the extend. But don't do this if the type of EXP is a subtype
4003 of something else since then the conversion might involve
4004 more than just converting modes. */
4005 if (! want_value && INTEGRAL_TYPE_P (TREE_TYPE (exp))
4006 && TREE_TYPE (TREE_TYPE (exp)) == 0)
4008 if (TREE_UNSIGNED (TREE_TYPE (exp))
4009 != SUBREG_PROMOTED_UNSIGNED_P (target))
4012 (signed_or_unsigned_type (SUBREG_PROMOTED_UNSIGNED_P (target),
4016 exp = convert (type_for_mode (GET_MODE (SUBREG_REG (target)),
4017 SUBREG_PROMOTED_UNSIGNED_P (target)),
4020 inner_target = SUBREG_REG (target);
4023 temp = expand_expr (exp, inner_target, VOIDmode, 0);
4025 /* If TEMP is a volatile MEM and we want a result value, make
4026 the access now so it gets done only once. Likewise if
4027 it contains TARGET. */
4028 if (GET_CODE (temp) == MEM && want_value
4029 && (MEM_VOLATILE_P (temp)
4030 || reg_mentioned_p (SUBREG_REG (target), XEXP (temp, 0))))
4031 temp = copy_to_reg (temp);
4033 /* If TEMP is a VOIDmode constant, use convert_modes to make
4034 sure that we properly convert it. */
4035 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
4037 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4038 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4039 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
4040 GET_MODE (target), temp,
4041 SUBREG_PROMOTED_UNSIGNED_P (target));
4044 convert_move (SUBREG_REG (target), temp,
4045 SUBREG_PROMOTED_UNSIGNED_P (target));
4047 /* If we promoted a constant, change the mode back down to match
4048 target. Otherwise, the caller might get confused by a result whose
4049 mode is larger than expected. */
4051 if (want_value && GET_MODE (temp) != GET_MODE (target))
4053 if (GET_MODE (temp) != VOIDmode)
4055 temp = gen_lowpart_SUBREG (GET_MODE (target), temp);
4056 SUBREG_PROMOTED_VAR_P (temp) = 1;
4057 SUBREG_PROMOTED_UNSIGNED_SET (temp,
4058 SUBREG_PROMOTED_UNSIGNED_P (target));
4061 temp = convert_modes (GET_MODE (target),
4062 GET_MODE (SUBREG_REG (target)),
4063 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4066 return want_value ? temp : NULL_RTX;
4070 temp = expand_expr (exp, target, GET_MODE (target), 0);
4071 /* Return TARGET if it's a specified hardware register.
4072 If TARGET is a volatile mem ref, either return TARGET
4073 or return a reg copied *from* TARGET; ANSI requires this.
4075 Otherwise, if TEMP is not TARGET, return TEMP
4076 if it is constant (for efficiency),
4077 or if we really want the correct value. */
4078 if (!(target && GET_CODE (target) == REG
4079 && REGNO (target) < FIRST_PSEUDO_REGISTER)
4080 && !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
4081 && ! rtx_equal_p (temp, target)
4082 && (CONSTANT_P (temp) || want_value))
4083 dont_return_target = 1;
4086 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
4087 the same as that of TARGET, adjust the constant. This is needed, for
4088 example, in case it is a CONST_DOUBLE and we want only a word-sized
4090 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
4091 && TREE_CODE (exp) != ERROR_MARK
4092 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
4093 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4094 temp, TREE_UNSIGNED (TREE_TYPE (exp)));
4096 /* If value was not generated in the target, store it there.
4097 Convert the value to TARGET's type first if necessary.
4098 If TEMP and TARGET compare equal according to rtx_equal_p, but
4099 one or both of them are volatile memory refs, we have to distinguish
4101 - expand_expr has used TARGET. In this case, we must not generate
4102 another copy. This can be detected by TARGET being equal according
4104 - expand_expr has not used TARGET - that means that the source just
4105 happens to have the same RTX form. Since temp will have been created
4106 by expand_expr, it will compare unequal according to == .
4107 We must generate a copy in this case, to reach the correct number
4108 of volatile memory references. */
4110 if ((! rtx_equal_p (temp, target)
4111 || (temp != target && (side_effects_p (temp)
4112 || side_effects_p (target))))
4113 && TREE_CODE (exp) != ERROR_MARK
4114 && ! dont_store_target)
4116 target = protect_from_queue (target, 1);
4117 if (GET_MODE (temp) != GET_MODE (target)
4118 && GET_MODE (temp) != VOIDmode)
4120 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
4121 if (dont_return_target)
4123 /* In this case, we will return TEMP,
4124 so make sure it has the proper mode.
4125 But don't forget to store the value into TARGET. */
4126 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
4127 emit_move_insn (target, temp);
4130 convert_move (target, temp, unsignedp);
4133 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
4135 /* Handle copying a string constant into an array. The string
4136 constant may be shorter than the array. So copy just the string's
4137 actual length, and clear the rest. First get the size of the data
4138 type of the string, which is actually the size of the target. */
4139 rtx size = expr_size (exp);
4141 if (GET_CODE (size) == CONST_INT
4142 && INTVAL (size) < TREE_STRING_LENGTH (exp))
4143 emit_block_move (target, temp, size);
4146 /* Compute the size of the data to copy from the string. */
4148 = size_binop (MIN_EXPR,
4149 make_tree (sizetype, size),
4150 size_int (TREE_STRING_LENGTH (exp)));
4151 rtx copy_size_rtx = expand_expr (copy_size, NULL_RTX,
4155 /* Copy that much. */
4156 copy_size_rtx = convert_to_mode (ptr_mode, copy_size_rtx, 0);
4157 emit_block_move (target, temp, copy_size_rtx);
4159 /* Figure out how much is left in TARGET that we have to clear.
4160 Do all calculations in ptr_mode. */
4161 if (GET_CODE (copy_size_rtx) == CONST_INT)
4163 size = plus_constant (size, -INTVAL (copy_size_rtx));
4164 target = adjust_address (target, BLKmode,
4165 INTVAL (copy_size_rtx));
4169 size = expand_binop (ptr_mode, sub_optab, size,
4170 copy_size_rtx, NULL_RTX, 0,
4173 #ifdef POINTERS_EXTEND_UNSIGNED
4174 if (GET_MODE (copy_size_rtx) != Pmode)
4175 copy_size_rtx = convert_memory_address (Pmode,
4179 target = offset_address (target, copy_size_rtx,
4180 highest_pow2_factor (copy_size));
4181 label = gen_label_rtx ();
4182 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
4183 GET_MODE (size), 0, label);
4186 if (size != const0_rtx)
4187 clear_storage (target, size);
4193 /* Handle calls that return values in multiple non-contiguous locations.
4194 The Irix 6 ABI has examples of this. */
4195 else if (GET_CODE (target) == PARALLEL)
4196 emit_group_load (target, temp, int_size_in_bytes (TREE_TYPE (exp)));
4197 else if (GET_MODE (temp) == BLKmode)
4198 emit_block_move (target, temp, expr_size (exp));
4200 emit_move_insn (target, temp);
4203 /* If we don't want a value, return NULL_RTX. */
4207 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
4208 ??? The latter test doesn't seem to make sense. */
4209 else if (dont_return_target && GET_CODE (temp) != MEM)
4212 /* Return TARGET itself if it is a hard register. */
4213 else if (want_value && GET_MODE (target) != BLKmode
4214 && ! (GET_CODE (target) == REG
4215 && REGNO (target) < FIRST_PSEUDO_REGISTER))
4216 return copy_to_reg (target);
4222 /* Return 1 if EXP just contains zeros. */
4230 switch (TREE_CODE (exp))
4234 case NON_LVALUE_EXPR:
4235 case VIEW_CONVERT_EXPR:
4236 return is_zeros_p (TREE_OPERAND (exp, 0));
4239 return integer_zerop (exp);
4243 is_zeros_p (TREE_REALPART (exp)) && is_zeros_p (TREE_IMAGPART (exp));
4246 return REAL_VALUES_IDENTICAL (TREE_REAL_CST (exp), dconst0);
4249 for (elt = TREE_VECTOR_CST_ELTS (exp); elt;
4250 elt = TREE_CHAIN (elt))
4251 if (!is_zeros_p (TREE_VALUE (elt)))
4257 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4258 return CONSTRUCTOR_ELTS (exp) == NULL_TREE;
4259 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4260 if (! is_zeros_p (TREE_VALUE (elt)))
4270 /* Return 1 if EXP contains mostly (3/4) zeros. */
4273 mostly_zeros_p (exp)
4276 if (TREE_CODE (exp) == CONSTRUCTOR)
4278 int elts = 0, zeros = 0;
4279 tree elt = CONSTRUCTOR_ELTS (exp);
4280 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4282 /* If there are no ranges of true bits, it is all zero. */
4283 return elt == NULL_TREE;
4285 for (; elt; elt = TREE_CHAIN (elt))
4287 /* We do not handle the case where the index is a RANGE_EXPR,
4288 so the statistic will be somewhat inaccurate.
4289 We do make a more accurate count in store_constructor itself,
4290 so since this function is only used for nested array elements,
4291 this should be close enough. */
4292 if (mostly_zeros_p (TREE_VALUE (elt)))
4297 return 4 * zeros >= 3 * elts;
4300 return is_zeros_p (exp);
4303 /* Helper function for store_constructor.
4304 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4305 TYPE is the type of the CONSTRUCTOR, not the element type.
4306 CLEARED is as for store_constructor.
4307 ALIAS_SET is the alias set to use for any stores.
4309 This provides a recursive shortcut back to store_constructor when it isn't
4310 necessary to go through store_field. This is so that we can pass through
4311 the cleared field to let store_constructor know that we may not have to
4312 clear a substructure if the outer structure has already been cleared. */
4315 store_constructor_field (target, bitsize, bitpos, mode, exp, type, cleared,
4318 unsigned HOST_WIDE_INT bitsize;
4319 HOST_WIDE_INT bitpos;
4320 enum machine_mode mode;
4325 if (TREE_CODE (exp) == CONSTRUCTOR
4326 && bitpos % BITS_PER_UNIT == 0
4327 /* If we have a non-zero bitpos for a register target, then we just
4328 let store_field do the bitfield handling. This is unlikely to
4329 generate unnecessary clear instructions anyways. */
4330 && (bitpos == 0 || GET_CODE (target) == MEM))
4332 if (GET_CODE (target) == MEM)
4334 = adjust_address (target,
4335 GET_MODE (target) == BLKmode
4337 % GET_MODE_ALIGNMENT (GET_MODE (target)))
4338 ? BLKmode : VOIDmode, bitpos / BITS_PER_UNIT);
4341 /* Update the alias set, if required. */
4342 if (GET_CODE (target) == MEM && ! MEM_KEEP_ALIAS_SET_P (target)
4343 && MEM_ALIAS_SET (target) != 0)
4345 target = copy_rtx (target);
4346 set_mem_alias_set (target, alias_set);
4349 store_constructor (exp, target, cleared, bitsize / BITS_PER_UNIT);
4352 store_field (target, bitsize, bitpos, mode, exp, VOIDmode, 0, type,
4356 /* Store the value of constructor EXP into the rtx TARGET.
4357 TARGET is either a REG or a MEM; we know it cannot conflict, since
4358 safe_from_p has been called.
4359 CLEARED is true if TARGET is known to have been zero'd.
4360 SIZE is the number of bytes of TARGET we are allowed to modify: this
4361 may not be the same as the size of EXP if we are assigning to a field
4362 which has been packed to exclude padding bits. */
4365 store_constructor (exp, target, cleared, size)
4371 tree type = TREE_TYPE (exp);
4372 #ifdef WORD_REGISTER_OPERATIONS
4373 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
4376 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
4377 || TREE_CODE (type) == QUAL_UNION_TYPE)
4381 /* We either clear the aggregate or indicate the value is dead. */
4382 if ((TREE_CODE (type) == UNION_TYPE
4383 || TREE_CODE (type) == QUAL_UNION_TYPE)
4385 && ! CONSTRUCTOR_ELTS (exp))
4386 /* If the constructor is empty, clear the union. */
4388 clear_storage (target, expr_size (exp));
4392 /* If we are building a static constructor into a register,
4393 set the initial value as zero so we can fold the value into
4394 a constant. But if more than one register is involved,
4395 this probably loses. */
4396 else if (! cleared && GET_CODE (target) == REG && TREE_STATIC (exp)
4397 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
4399 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4403 /* If the constructor has fewer fields than the structure
4404 or if we are initializing the structure to mostly zeros,
4405 clear the whole structure first. Don't do this if TARGET is a
4406 register whose mode size isn't equal to SIZE since clear_storage
4407 can't handle this case. */
4408 else if (! cleared && size > 0
4409 && ((list_length (CONSTRUCTOR_ELTS (exp))
4410 != fields_length (type))
4411 || mostly_zeros_p (exp))
4412 && (GET_CODE (target) != REG
4413 || ((HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target))
4416 clear_storage (target, GEN_INT (size));
4421 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4423 /* Store each element of the constructor into
4424 the corresponding field of TARGET. */
4426 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4428 tree field = TREE_PURPOSE (elt);
4429 tree value = TREE_VALUE (elt);
4430 enum machine_mode mode;
4431 HOST_WIDE_INT bitsize;
4432 HOST_WIDE_INT bitpos = 0;
4435 rtx to_rtx = target;
4437 /* Just ignore missing fields.
4438 We cleared the whole structure, above,
4439 if any fields are missing. */
4443 if (cleared && is_zeros_p (value))
4446 if (host_integerp (DECL_SIZE (field), 1))
4447 bitsize = tree_low_cst (DECL_SIZE (field), 1);
4451 unsignedp = TREE_UNSIGNED (field);
4452 mode = DECL_MODE (field);
4453 if (DECL_BIT_FIELD (field))
4456 offset = DECL_FIELD_OFFSET (field);
4457 if (host_integerp (offset, 0)
4458 && host_integerp (bit_position (field), 0))
4460 bitpos = int_bit_position (field);
4464 bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
4470 if (contains_placeholder_p (offset))
4471 offset = build (WITH_RECORD_EXPR, sizetype,
4472 offset, make_tree (TREE_TYPE (exp), target));
4474 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
4475 if (GET_CODE (to_rtx) != MEM)
4478 if (GET_MODE (offset_rtx) != ptr_mode)
4479 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4481 #ifdef POINTERS_EXTEND_UNSIGNED
4482 if (GET_MODE (offset_rtx) != Pmode)
4483 offset_rtx = convert_memory_address (Pmode, offset_rtx);
4486 to_rtx = offset_address (to_rtx, offset_rtx,
4487 highest_pow2_factor (offset));
4490 if (TREE_READONLY (field))
4492 if (GET_CODE (to_rtx) == MEM)
4493 to_rtx = copy_rtx (to_rtx);
4495 RTX_UNCHANGING_P (to_rtx) = 1;
4498 #ifdef WORD_REGISTER_OPERATIONS
4499 /* If this initializes a field that is smaller than a word, at the
4500 start of a word, try to widen it to a full word.
4501 This special case allows us to output C++ member function
4502 initializations in a form that the optimizers can understand. */
4503 if (GET_CODE (target) == REG
4504 && bitsize < BITS_PER_WORD
4505 && bitpos % BITS_PER_WORD == 0
4506 && GET_MODE_CLASS (mode) == MODE_INT
4507 && TREE_CODE (value) == INTEGER_CST
4509 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
4511 tree type = TREE_TYPE (value);
4513 if (TYPE_PRECISION (type) < BITS_PER_WORD)
4515 type = type_for_size (BITS_PER_WORD, TREE_UNSIGNED (type));
4516 value = convert (type, value);
4519 if (BYTES_BIG_ENDIAN)
4521 = fold (build (LSHIFT_EXPR, type, value,
4522 build_int_2 (BITS_PER_WORD - bitsize, 0)));
4523 bitsize = BITS_PER_WORD;
4528 if (GET_CODE (to_rtx) == MEM && !MEM_KEEP_ALIAS_SET_P (to_rtx)
4529 && DECL_NONADDRESSABLE_P (field))
4531 to_rtx = copy_rtx (to_rtx);
4532 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
4535 store_constructor_field (to_rtx, bitsize, bitpos, mode,
4536 value, type, cleared,
4537 get_alias_set (TREE_TYPE (field)));
4540 else if (TREE_CODE (type) == ARRAY_TYPE
4541 || TREE_CODE (type) == VECTOR_TYPE)
4546 tree domain = TYPE_DOMAIN (type);
4547 tree elttype = TREE_TYPE (type);
4549 HOST_WIDE_INT minelt = 0;
4550 HOST_WIDE_INT maxelt = 0;
4552 /* Vectors are like arrays, but the domain is stored via an array
4554 if (TREE_CODE (type) == VECTOR_TYPE)
4556 /* Note that although TYPE_DEBUG_REPRESENTATION_TYPE uses
4557 the same field as TYPE_DOMAIN, we are not guaranteed that
4559 domain = TYPE_DEBUG_REPRESENTATION_TYPE (type);
4560 domain = TYPE_DOMAIN (TREE_TYPE (TYPE_FIELDS (domain)));
4563 const_bounds_p = (TYPE_MIN_VALUE (domain)
4564 && TYPE_MAX_VALUE (domain)
4565 && host_integerp (TYPE_MIN_VALUE (domain), 0)
4566 && host_integerp (TYPE_MAX_VALUE (domain), 0));
4568 /* If we have constant bounds for the range of the type, get them. */
4571 minelt = tree_low_cst (TYPE_MIN_VALUE (domain), 0);
4572 maxelt = tree_low_cst (TYPE_MAX_VALUE (domain), 0);
4575 /* If the constructor has fewer elements than the array,
4576 clear the whole array first. Similarly if this is
4577 static constructor of a non-BLKmode object. */
4578 if (cleared || (GET_CODE (target) == REG && TREE_STATIC (exp)))
4582 HOST_WIDE_INT count = 0, zero_count = 0;
4583 need_to_clear = ! const_bounds_p;
4585 /* This loop is a more accurate version of the loop in
4586 mostly_zeros_p (it handles RANGE_EXPR in an index).
4587 It is also needed to check for missing elements. */
4588 for (elt = CONSTRUCTOR_ELTS (exp);
4589 elt != NULL_TREE && ! need_to_clear;
4590 elt = TREE_CHAIN (elt))
4592 tree index = TREE_PURPOSE (elt);
4593 HOST_WIDE_INT this_node_count;
4595 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4597 tree lo_index = TREE_OPERAND (index, 0);
4598 tree hi_index = TREE_OPERAND (index, 1);
4600 if (! host_integerp (lo_index, 1)
4601 || ! host_integerp (hi_index, 1))
4607 this_node_count = (tree_low_cst (hi_index, 1)
4608 - tree_low_cst (lo_index, 1) + 1);
4611 this_node_count = 1;
4613 count += this_node_count;
4614 if (mostly_zeros_p (TREE_VALUE (elt)))
4615 zero_count += this_node_count;
4618 /* Clear the entire array first if there are any missing elements,
4619 or if the incidence of zero elements is >= 75%. */
4621 && (count < maxelt - minelt + 1 || 4 * zero_count >= 3 * count))
4625 if (need_to_clear && size > 0)
4630 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4632 clear_storage (target, GEN_INT (size));
4636 else if (REG_P (target))
4637 /* Inform later passes that the old value is dead. */
4638 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4640 /* Store each element of the constructor into
4641 the corresponding element of TARGET, determined
4642 by counting the elements. */
4643 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
4645 elt = TREE_CHAIN (elt), i++)
4647 enum machine_mode mode;
4648 HOST_WIDE_INT bitsize;
4649 HOST_WIDE_INT bitpos;
4651 tree value = TREE_VALUE (elt);
4652 tree index = TREE_PURPOSE (elt);
4653 rtx xtarget = target;
4655 if (cleared && is_zeros_p (value))
4658 unsignedp = TREE_UNSIGNED (elttype);
4659 mode = TYPE_MODE (elttype);
4660 if (mode == BLKmode)
4661 bitsize = (host_integerp (TYPE_SIZE (elttype), 1)
4662 ? tree_low_cst (TYPE_SIZE (elttype), 1)
4665 bitsize = GET_MODE_BITSIZE (mode);
4667 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4669 tree lo_index = TREE_OPERAND (index, 0);
4670 tree hi_index = TREE_OPERAND (index, 1);
4671 rtx index_r, pos_rtx, hi_r, loop_top, loop_end;
4672 struct nesting *loop;
4673 HOST_WIDE_INT lo, hi, count;
4676 /* If the range is constant and "small", unroll the loop. */
4678 && host_integerp (lo_index, 0)
4679 && host_integerp (hi_index, 0)
4680 && (lo = tree_low_cst (lo_index, 0),
4681 hi = tree_low_cst (hi_index, 0),
4682 count = hi - lo + 1,
4683 (GET_CODE (target) != MEM
4685 || (host_integerp (TYPE_SIZE (elttype), 1)
4686 && (tree_low_cst (TYPE_SIZE (elttype), 1) * count
4689 lo -= minelt; hi -= minelt;
4690 for (; lo <= hi; lo++)
4692 bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0);
4694 if (GET_CODE (target) == MEM
4695 && !MEM_KEEP_ALIAS_SET_P (target)
4696 && TREE_CODE (type) == ARRAY_TYPE
4697 && TYPE_NONALIASED_COMPONENT (type))
4699 target = copy_rtx (target);
4700 MEM_KEEP_ALIAS_SET_P (target) = 1;
4703 store_constructor_field
4704 (target, bitsize, bitpos, mode, value, type, cleared,
4705 get_alias_set (elttype));
4710 hi_r = expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
4711 loop_top = gen_label_rtx ();
4712 loop_end = gen_label_rtx ();
4714 unsignedp = TREE_UNSIGNED (domain);
4716 index = build_decl (VAR_DECL, NULL_TREE, domain);
4719 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
4721 SET_DECL_RTL (index, index_r);
4722 if (TREE_CODE (value) == SAVE_EXPR
4723 && SAVE_EXPR_RTL (value) == 0)
4725 /* Make sure value gets expanded once before the
4727 expand_expr (value, const0_rtx, VOIDmode, 0);
4730 store_expr (lo_index, index_r, 0);
4731 loop = expand_start_loop (0);
4733 /* Assign value to element index. */
4735 = convert (ssizetype,
4736 fold (build (MINUS_EXPR, TREE_TYPE (index),
4737 index, TYPE_MIN_VALUE (domain))));
4738 position = size_binop (MULT_EXPR, position,
4740 TYPE_SIZE_UNIT (elttype)));
4742 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
4743 xtarget = offset_address (target, pos_rtx,
4744 highest_pow2_factor (position));
4745 xtarget = adjust_address (xtarget, mode, 0);
4746 if (TREE_CODE (value) == CONSTRUCTOR)
4747 store_constructor (value, xtarget, cleared,
4748 bitsize / BITS_PER_UNIT);
4750 store_expr (value, xtarget, 0);
4752 expand_exit_loop_if_false (loop,
4753 build (LT_EXPR, integer_type_node,
4756 expand_increment (build (PREINCREMENT_EXPR,
4758 index, integer_one_node), 0, 0);
4760 emit_label (loop_end);
4763 else if ((index != 0 && ! host_integerp (index, 0))
4764 || ! host_integerp (TYPE_SIZE (elttype), 1))
4769 index = ssize_int (1);
4772 index = convert (ssizetype,
4773 fold (build (MINUS_EXPR, index,
4774 TYPE_MIN_VALUE (domain))));
4776 position = size_binop (MULT_EXPR, index,
4778 TYPE_SIZE_UNIT (elttype)));
4779 xtarget = offset_address (target,
4780 expand_expr (position, 0, VOIDmode, 0),
4781 highest_pow2_factor (position));
4782 xtarget = adjust_address (xtarget, mode, 0);
4783 store_expr (value, xtarget, 0);
4788 bitpos = ((tree_low_cst (index, 0) - minelt)
4789 * tree_low_cst (TYPE_SIZE (elttype), 1));
4791 bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
4793 if (GET_CODE (target) == MEM && !MEM_KEEP_ALIAS_SET_P (target)
4794 && TREE_CODE (type) == ARRAY_TYPE
4795 && TYPE_NONALIASED_COMPONENT (type))
4797 target = copy_rtx (target);
4798 MEM_KEEP_ALIAS_SET_P (target) = 1;
4801 store_constructor_field (target, bitsize, bitpos, mode, value,
4802 type, cleared, get_alias_set (elttype));
4808 /* Set constructor assignments. */
4809 else if (TREE_CODE (type) == SET_TYPE)
4811 tree elt = CONSTRUCTOR_ELTS (exp);
4812 unsigned HOST_WIDE_INT nbytes = int_size_in_bytes (type), nbits;
4813 tree domain = TYPE_DOMAIN (type);
4814 tree domain_min, domain_max, bitlength;
4816 /* The default implementation strategy is to extract the constant
4817 parts of the constructor, use that to initialize the target,
4818 and then "or" in whatever non-constant ranges we need in addition.
4820 If a large set is all zero or all ones, it is
4821 probably better to set it using memset (if available) or bzero.
4822 Also, if a large set has just a single range, it may also be
4823 better to first clear all the first clear the set (using
4824 bzero/memset), and set the bits we want. */
4826 /* Check for all zeros. */
4827 if (elt == NULL_TREE && size > 0)
4830 clear_storage (target, GEN_INT (size));
4834 domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
4835 domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
4836 bitlength = size_binop (PLUS_EXPR,
4837 size_diffop (domain_max, domain_min),
4840 nbits = tree_low_cst (bitlength, 1);
4842 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
4843 are "complicated" (more than one range), initialize (the
4844 constant parts) by copying from a constant. */
4845 if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD
4846 || (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE))
4848 unsigned int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
4849 enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
4850 char *bit_buffer = (char *) alloca (nbits);
4851 HOST_WIDE_INT word = 0;
4852 unsigned int bit_pos = 0;
4853 unsigned int ibit = 0;
4854 unsigned int offset = 0; /* In bytes from beginning of set. */
4856 elt = get_set_constructor_bits (exp, bit_buffer, nbits);
4859 if (bit_buffer[ibit])
4861 if (BYTES_BIG_ENDIAN)
4862 word |= (1 << (set_word_size - 1 - bit_pos));
4864 word |= 1 << bit_pos;
4868 if (bit_pos >= set_word_size || ibit == nbits)
4870 if (word != 0 || ! cleared)
4872 rtx datum = GEN_INT (word);
4875 /* The assumption here is that it is safe to use
4876 XEXP if the set is multi-word, but not if
4877 it's single-word. */
4878 if (GET_CODE (target) == MEM)
4879 to_rtx = adjust_address (target, mode, offset);
4880 else if (offset == 0)
4884 emit_move_insn (to_rtx, datum);
4891 offset += set_word_size / BITS_PER_UNIT;
4896 /* Don't bother clearing storage if the set is all ones. */
4897 if (TREE_CHAIN (elt) != NULL_TREE
4898 || (TREE_PURPOSE (elt) == NULL_TREE
4900 : ( ! host_integerp (TREE_VALUE (elt), 0)
4901 || ! host_integerp (TREE_PURPOSE (elt), 0)
4902 || (tree_low_cst (TREE_VALUE (elt), 0)
4903 - tree_low_cst (TREE_PURPOSE (elt), 0) + 1
4904 != (HOST_WIDE_INT) nbits))))
4905 clear_storage (target, expr_size (exp));
4907 for (; elt != NULL_TREE; elt = TREE_CHAIN (elt))
4909 /* Start of range of element or NULL. */
4910 tree startbit = TREE_PURPOSE (elt);
4911 /* End of range of element, or element value. */
4912 tree endbit = TREE_VALUE (elt);
4913 #ifdef TARGET_MEM_FUNCTIONS
4914 HOST_WIDE_INT startb, endb;
4916 rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
4918 bitlength_rtx = expand_expr (bitlength,
4919 NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
4921 /* Handle non-range tuple element like [ expr ]. */
4922 if (startbit == NULL_TREE)
4924 startbit = save_expr (endbit);
4928 startbit = convert (sizetype, startbit);
4929 endbit = convert (sizetype, endbit);
4930 if (! integer_zerop (domain_min))
4932 startbit = size_binop (MINUS_EXPR, startbit, domain_min);
4933 endbit = size_binop (MINUS_EXPR, endbit, domain_min);
4935 startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
4936 EXPAND_CONST_ADDRESS);
4937 endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
4938 EXPAND_CONST_ADDRESS);
4944 ((build_qualified_type (type_for_mode (GET_MODE (target), 0),
4947 emit_move_insn (targetx, target);
4950 else if (GET_CODE (target) == MEM)
4955 #ifdef TARGET_MEM_FUNCTIONS
4956 /* Optimization: If startbit and endbit are
4957 constants divisible by BITS_PER_UNIT,
4958 call memset instead. */
4959 if (TREE_CODE (startbit) == INTEGER_CST
4960 && TREE_CODE (endbit) == INTEGER_CST
4961 && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
4962 && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0)
4964 emit_library_call (memset_libfunc, LCT_NORMAL,
4966 plus_constant (XEXP (targetx, 0),
4967 startb / BITS_PER_UNIT),
4969 constm1_rtx, TYPE_MODE (integer_type_node),
4970 GEN_INT ((endb - startb) / BITS_PER_UNIT),
4971 TYPE_MODE (sizetype));
4975 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__setbits"),
4976 LCT_NORMAL, VOIDmode, 4, XEXP (targetx, 0),
4977 Pmode, bitlength_rtx, TYPE_MODE (sizetype),
4978 startbit_rtx, TYPE_MODE (sizetype),
4979 endbit_rtx, TYPE_MODE (sizetype));
4982 emit_move_insn (target, targetx);
4990 /* Store the value of EXP (an expression tree)
4991 into a subfield of TARGET which has mode MODE and occupies
4992 BITSIZE bits, starting BITPOS bits from the start of TARGET.
4993 If MODE is VOIDmode, it means that we are storing into a bit-field.
4995 If VALUE_MODE is VOIDmode, return nothing in particular.
4996 UNSIGNEDP is not used in this case.
4998 Otherwise, return an rtx for the value stored. This rtx
4999 has mode VALUE_MODE if that is convenient to do.
5000 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
5002 TYPE is the type of the underlying object,
5004 ALIAS_SET is the alias set for the destination. This value will
5005 (in general) be different from that for TARGET, since TARGET is a
5006 reference to the containing structure. */
5009 store_field (target, bitsize, bitpos, mode, exp, value_mode, unsignedp, type,
5012 HOST_WIDE_INT bitsize;
5013 HOST_WIDE_INT bitpos;
5014 enum machine_mode mode;
5016 enum machine_mode value_mode;
5021 HOST_WIDE_INT width_mask = 0;
5023 if (TREE_CODE (exp) == ERROR_MARK)
5026 /* If we have nothing to store, do nothing unless the expression has
5029 return expand_expr (exp, const0_rtx, VOIDmode, 0);
5030 else if (bitsize >=0 && bitsize < HOST_BITS_PER_WIDE_INT)
5031 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
5033 /* If we are storing into an unaligned field of an aligned union that is
5034 in a register, we may have the mode of TARGET being an integer mode but
5035 MODE == BLKmode. In that case, get an aligned object whose size and
5036 alignment are the same as TARGET and store TARGET into it (we can avoid
5037 the store if the field being stored is the entire width of TARGET). Then
5038 call ourselves recursively to store the field into a BLKmode version of
5039 that object. Finally, load from the object into TARGET. This is not
5040 very efficient in general, but should only be slightly more expensive
5041 than the otherwise-required unaligned accesses. Perhaps this can be
5042 cleaned up later. */
5045 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
5049 (build_qualified_type (type, TYPE_QUALS (type) | TYPE_QUAL_CONST),
5051 rtx blk_object = adjust_address (object, BLKmode, 0);
5053 if (bitsize != (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (target)))
5054 emit_move_insn (object, target);
5056 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0, type,
5059 emit_move_insn (target, object);
5061 /* We want to return the BLKmode version of the data. */
5065 if (GET_CODE (target) == CONCAT)
5067 /* We're storing into a struct containing a single __complex. */
5071 return store_expr (exp, target, 0);
5074 /* If the structure is in a register or if the component
5075 is a bit field, we cannot use addressing to access it.
5076 Use bit-field techniques or SUBREG to store in it. */
5078 if (mode == VOIDmode
5079 || (mode != BLKmode && ! direct_store[(int) mode]
5080 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
5081 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
5082 || GET_CODE (target) == REG
5083 || GET_CODE (target) == SUBREG
5084 /* If the field isn't aligned enough to store as an ordinary memref,
5085 store it as a bit field. */
5086 || (mode != BLKmode && SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target))
5087 && (MEM_ALIGN (target) < GET_MODE_ALIGNMENT (mode)
5088 || bitpos % GET_MODE_ALIGNMENT (mode)))
5089 /* If the RHS and field are a constant size and the size of the
5090 RHS isn't the same size as the bitfield, we must use bitfield
5093 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
5094 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0))
5096 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
5098 /* If BITSIZE is narrower than the size of the type of EXP
5099 we will be narrowing TEMP. Normally, what's wanted are the
5100 low-order bits. However, if EXP's type is a record and this is
5101 big-endian machine, we want the upper BITSIZE bits. */
5102 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
5103 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (temp))
5104 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
5105 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
5106 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
5110 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
5112 if (mode != VOIDmode && mode != BLKmode
5113 && mode != TYPE_MODE (TREE_TYPE (exp)))
5114 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
5116 /* If the modes of TARGET and TEMP are both BLKmode, both
5117 must be in memory and BITPOS must be aligned on a byte
5118 boundary. If so, we simply do a block copy. */
5119 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
5121 if (GET_CODE (target) != MEM || GET_CODE (temp) != MEM
5122 || bitpos % BITS_PER_UNIT != 0)
5125 target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT);
5126 emit_block_move (target, temp,
5127 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
5130 return value_mode == VOIDmode ? const0_rtx : target;
5133 /* Store the value in the bitfield. */
5134 store_bit_field (target, bitsize, bitpos, mode, temp,
5135 int_size_in_bytes (type));
5137 if (value_mode != VOIDmode)
5139 /* The caller wants an rtx for the value.
5140 If possible, avoid refetching from the bitfield itself. */
5142 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
5145 enum machine_mode tmode;
5147 tmode = GET_MODE (temp);
5148 if (tmode == VOIDmode)
5152 return expand_and (tmode, temp,
5153 GEN_INT (trunc_int_for_mode (width_mask,
5157 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
5158 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
5159 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
5162 return extract_bit_field (target, bitsize, bitpos, unsignedp,
5163 NULL_RTX, value_mode, VOIDmode,
5164 int_size_in_bytes (type));
5170 rtx addr = XEXP (target, 0);
5171 rtx to_rtx = target;
5173 /* If a value is wanted, it must be the lhs;
5174 so make the address stable for multiple use. */
5176 if (value_mode != VOIDmode && GET_CODE (addr) != REG
5177 && ! CONSTANT_ADDRESS_P (addr)
5178 /* A frame-pointer reference is already stable. */
5179 && ! (GET_CODE (addr) == PLUS
5180 && GET_CODE (XEXP (addr, 1)) == CONST_INT
5181 && (XEXP (addr, 0) == virtual_incoming_args_rtx
5182 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
5183 to_rtx = replace_equiv_address (to_rtx, copy_to_reg (addr));
5185 /* Now build a reference to just the desired component. */
5187 to_rtx = adjust_address (target, mode, bitpos / BITS_PER_UNIT);
5189 if (to_rtx == target)
5190 to_rtx = copy_rtx (to_rtx);
5192 MEM_SET_IN_STRUCT_P (to_rtx, 1);
5193 if (!MEM_KEEP_ALIAS_SET_P (to_rtx) && MEM_ALIAS_SET (to_rtx) != 0)
5194 set_mem_alias_set (to_rtx, alias_set);
5196 return store_expr (exp, to_rtx, value_mode != VOIDmode);
5200 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
5201 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
5202 codes and find the ultimate containing object, which we return.
5204 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5205 bit position, and *PUNSIGNEDP to the signedness of the field.
5206 If the position of the field is variable, we store a tree
5207 giving the variable offset (in units) in *POFFSET.
5208 This offset is in addition to the bit position.
5209 If the position is not variable, we store 0 in *POFFSET.
5211 If any of the extraction expressions is volatile,
5212 we store 1 in *PVOLATILEP. Otherwise we don't change that.
5214 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
5215 is a mode that can be used to access the field. In that case, *PBITSIZE
5218 If the field describes a variable-sized object, *PMODE is set to
5219 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
5220 this case, but the address of the object can be found. */
5223 get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode,
5224 punsignedp, pvolatilep)
5226 HOST_WIDE_INT *pbitsize;
5227 HOST_WIDE_INT *pbitpos;
5229 enum machine_mode *pmode;
5234 enum machine_mode mode = VOIDmode;
5235 tree offset = size_zero_node;
5236 tree bit_offset = bitsize_zero_node;
5237 tree placeholder_ptr = 0;
5240 /* First get the mode, signedness, and size. We do this from just the
5241 outermost expression. */
5242 if (TREE_CODE (exp) == COMPONENT_REF)
5244 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
5245 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
5246 mode = DECL_MODE (TREE_OPERAND (exp, 1));
5248 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
5250 else if (TREE_CODE (exp) == BIT_FIELD_REF)
5252 size_tree = TREE_OPERAND (exp, 1);
5253 *punsignedp = TREE_UNSIGNED (exp);
5257 mode = TYPE_MODE (TREE_TYPE (exp));
5258 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
5260 if (mode == BLKmode)
5261 size_tree = TYPE_SIZE (TREE_TYPE (exp));
5263 *pbitsize = GET_MODE_BITSIZE (mode);
5268 if (! host_integerp (size_tree, 1))
5269 mode = BLKmode, *pbitsize = -1;
5271 *pbitsize = tree_low_cst (size_tree, 1);
5274 /* Compute cumulative bit-offset for nested component-refs and array-refs,
5275 and find the ultimate containing object. */
5278 if (TREE_CODE (exp) == BIT_FIELD_REF)
5279 bit_offset = size_binop (PLUS_EXPR, bit_offset, TREE_OPERAND (exp, 2));
5280 else if (TREE_CODE (exp) == COMPONENT_REF)
5282 tree field = TREE_OPERAND (exp, 1);
5283 tree this_offset = DECL_FIELD_OFFSET (field);
5285 /* If this field hasn't been filled in yet, don't go
5286 past it. This should only happen when folding expressions
5287 made during type construction. */
5288 if (this_offset == 0)
5290 else if (! TREE_CONSTANT (this_offset)
5291 && contains_placeholder_p (this_offset))
5292 this_offset = build (WITH_RECORD_EXPR, sizetype, this_offset, exp);
5294 offset = size_binop (PLUS_EXPR, offset, this_offset);
5295 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5296 DECL_FIELD_BIT_OFFSET (field));
5298 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
5301 else if (TREE_CODE (exp) == ARRAY_REF
5302 || TREE_CODE (exp) == ARRAY_RANGE_REF)
5304 tree index = TREE_OPERAND (exp, 1);
5305 tree array = TREE_OPERAND (exp, 0);
5306 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
5307 tree low_bound = (domain ? TYPE_MIN_VALUE (domain) : 0);
5308 tree unit_size = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (array)));
5310 /* We assume all arrays have sizes that are a multiple of a byte.
5311 First subtract the lower bound, if any, in the type of the
5312 index, then convert to sizetype and multiply by the size of the
5314 if (low_bound != 0 && ! integer_zerop (low_bound))
5315 index = fold (build (MINUS_EXPR, TREE_TYPE (index),
5318 /* If the index has a self-referential type, pass it to a
5319 WITH_RECORD_EXPR; if the component size is, pass our
5320 component to one. */
5321 if (! TREE_CONSTANT (index)
5322 && contains_placeholder_p (index))
5323 index = build (WITH_RECORD_EXPR, TREE_TYPE (index), index, exp);
5324 if (! TREE_CONSTANT (unit_size)
5325 && contains_placeholder_p (unit_size))
5326 unit_size = build (WITH_RECORD_EXPR, sizetype, unit_size, array);
5328 offset = size_binop (PLUS_EXPR, offset,
5329 size_binop (MULT_EXPR,
5330 convert (sizetype, index),
5334 else if (TREE_CODE (exp) == PLACEHOLDER_EXPR)
5336 tree new = find_placeholder (exp, &placeholder_ptr);
5338 /* If we couldn't find the replacement, return the PLACEHOLDER_EXPR.
5339 We might have been called from tree optimization where we
5340 haven't set up an object yet. */
5348 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
5349 && TREE_CODE (exp) != VIEW_CONVERT_EXPR
5350 && ! ((TREE_CODE (exp) == NOP_EXPR
5351 || TREE_CODE (exp) == CONVERT_EXPR)
5352 && (TYPE_MODE (TREE_TYPE (exp))
5353 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
5356 /* If any reference in the chain is volatile, the effect is volatile. */
5357 if (TREE_THIS_VOLATILE (exp))
5360 exp = TREE_OPERAND (exp, 0);
5363 /* If OFFSET is constant, see if we can return the whole thing as a
5364 constant bit position. Otherwise, split it up. */
5365 if (host_integerp (offset, 0)
5366 && 0 != (tem = size_binop (MULT_EXPR, convert (bitsizetype, offset),
5368 && 0 != (tem = size_binop (PLUS_EXPR, tem, bit_offset))
5369 && host_integerp (tem, 0))
5370 *pbitpos = tree_low_cst (tem, 0), *poffset = 0;
5372 *pbitpos = tree_low_cst (bit_offset, 0), *poffset = offset;
5378 /* Return 1 if T is an expression that get_inner_reference handles. */
5381 handled_component_p (t)
5384 switch (TREE_CODE (t))
5389 case ARRAY_RANGE_REF:
5390 case NON_LVALUE_EXPR:
5391 case VIEW_CONVERT_EXPR:
5396 return (TYPE_MODE (TREE_TYPE (t))
5397 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (t, 0))));
5404 /* Given an rtx VALUE that may contain additions and multiplications, return
5405 an equivalent value that just refers to a register, memory, or constant.
5406 This is done by generating instructions to perform the arithmetic and
5407 returning a pseudo-register containing the value.
5409 The returned value may be a REG, SUBREG, MEM or constant. */
5412 force_operand (value, target)
5416 /* Use a temporary to force order of execution of calls to
5420 /* Use subtarget as the target for operand 0 of a binary operation. */
5421 rtx subtarget = get_subtarget (target);
5423 /* Check for a PIC address load. */
5424 if ((GET_CODE (value) == PLUS || GET_CODE (value) == MINUS)
5425 && XEXP (value, 0) == pic_offset_table_rtx
5426 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
5427 || GET_CODE (XEXP (value, 1)) == LABEL_REF
5428 || GET_CODE (XEXP (value, 1)) == CONST))
5431 subtarget = gen_reg_rtx (GET_MODE (value));
5432 emit_move_insn (subtarget, value);
5436 if (GET_CODE (value) == PLUS)
5437 binoptab = add_optab;
5438 else if (GET_CODE (value) == MINUS)
5439 binoptab = sub_optab;
5440 else if (GET_CODE (value) == MULT)
5442 op2 = XEXP (value, 1);
5443 if (!CONSTANT_P (op2)
5444 && !(GET_CODE (op2) == REG && op2 != subtarget))
5446 tmp = force_operand (XEXP (value, 0), subtarget);
5447 return expand_mult (GET_MODE (value), tmp,
5448 force_operand (op2, NULL_RTX),
5454 op2 = XEXP (value, 1);
5455 if (!CONSTANT_P (op2)
5456 && !(GET_CODE (op2) == REG && op2 != subtarget))
5458 if (binoptab == sub_optab && GET_CODE (op2) == CONST_INT)
5460 binoptab = add_optab;
5461 op2 = negate_rtx (GET_MODE (value), op2);
5464 /* Check for an addition with OP2 a constant integer and our first
5465 operand a PLUS of a virtual register and something else. In that
5466 case, we want to emit the sum of the virtual register and the
5467 constant first and then add the other value. This allows virtual
5468 register instantiation to simply modify the constant rather than
5469 creating another one around this addition. */
5470 if (binoptab == add_optab && GET_CODE (op2) == CONST_INT
5471 && GET_CODE (XEXP (value, 0)) == PLUS
5472 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
5473 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
5474 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
5476 rtx temp = expand_binop (GET_MODE (value), binoptab,
5477 XEXP (XEXP (value, 0), 0), op2,
5478 subtarget, 0, OPTAB_LIB_WIDEN);
5479 return expand_binop (GET_MODE (value), binoptab, temp,
5480 force_operand (XEXP (XEXP (value, 0), 1), 0),
5481 target, 0, OPTAB_LIB_WIDEN);
5484 tmp = force_operand (XEXP (value, 0), subtarget);
5485 return expand_binop (GET_MODE (value), binoptab, tmp,
5486 force_operand (op2, NULL_RTX),
5487 target, 0, OPTAB_LIB_WIDEN);
5488 /* We give UNSIGNEDP = 0 to expand_binop
5489 because the only operations we are expanding here are signed ones. */
5492 #ifdef INSN_SCHEDULING
5493 /* On machines that have insn scheduling, we want all memory reference to be
5494 explicit, so we need to deal with such paradoxical SUBREGs. */
5495 if (GET_CODE (value) == SUBREG && GET_CODE (SUBREG_REG (value)) == MEM
5496 && (GET_MODE_SIZE (GET_MODE (value))
5497 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (value)))))
5499 = simplify_gen_subreg (GET_MODE (value),
5500 force_reg (GET_MODE (SUBREG_REG (value)),
5501 force_operand (SUBREG_REG (value),
5503 GET_MODE (SUBREG_REG (value)),
5504 SUBREG_BYTE (value));
5510 /* Subroutine of expand_expr: return nonzero iff there is no way that
5511 EXP can reference X, which is being modified. TOP_P is nonzero if this
5512 call is going to be used to determine whether we need a temporary
5513 for EXP, as opposed to a recursive call to this function.
5515 It is always safe for this routine to return zero since it merely
5516 searches for optimization opportunities. */
5519 safe_from_p (x, exp, top_p)
5526 static tree save_expr_list;
5529 /* If EXP has varying size, we MUST use a target since we currently
5530 have no way of allocating temporaries of variable size
5531 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
5532 So we assume here that something at a higher level has prevented a
5533 clash. This is somewhat bogus, but the best we can do. Only
5534 do this when X is BLKmode and when we are at the top level. */
5535 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
5536 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
5537 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
5538 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
5539 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
5541 && GET_MODE (x) == BLKmode)
5542 /* If X is in the outgoing argument area, it is always safe. */
5543 || (GET_CODE (x) == MEM
5544 && (XEXP (x, 0) == virtual_outgoing_args_rtx
5545 || (GET_CODE (XEXP (x, 0)) == PLUS
5546 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
5549 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
5550 find the underlying pseudo. */
5551 if (GET_CODE (x) == SUBREG)
5554 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5558 /* A SAVE_EXPR might appear many times in the expression passed to the
5559 top-level safe_from_p call, and if it has a complex subexpression,
5560 examining it multiple times could result in a combinatorial explosion.
5561 E.g. on an Alpha running at least 200MHz, a Fortran test case compiled
5562 with optimization took about 28 minutes to compile -- even though it was
5563 only a few lines long. So we mark each SAVE_EXPR we see with TREE_PRIVATE
5564 and turn that off when we are done. We keep a list of the SAVE_EXPRs
5565 we have processed. Note that the only test of top_p was above. */
5574 rtn = safe_from_p (x, exp, 0);
5576 for (t = save_expr_list; t != 0; t = TREE_CHAIN (t))
5577 TREE_PRIVATE (TREE_PURPOSE (t)) = 0;
5582 /* Now look at our tree code and possibly recurse. */
5583 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
5586 exp_rtl = DECL_RTL_SET_P (exp) ? DECL_RTL (exp) : NULL_RTX;
5593 if (TREE_CODE (exp) == TREE_LIST)
5594 return ((TREE_VALUE (exp) == 0
5595 || safe_from_p (x, TREE_VALUE (exp), 0))
5596 && (TREE_CHAIN (exp) == 0
5597 || safe_from_p (x, TREE_CHAIN (exp), 0)));
5598 else if (TREE_CODE (exp) == ERROR_MARK)
5599 return 1; /* An already-visited SAVE_EXPR? */
5604 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5608 return (safe_from_p (x, TREE_OPERAND (exp, 0), 0)
5609 && safe_from_p (x, TREE_OPERAND (exp, 1), 0));
5613 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
5614 the expression. If it is set, we conflict iff we are that rtx or
5615 both are in memory. Otherwise, we check all operands of the
5616 expression recursively. */
5618 switch (TREE_CODE (exp))
5621 /* If the operand is static or we are static, we can't conflict.
5622 Likewise if we don't conflict with the operand at all. */
5623 if (staticp (TREE_OPERAND (exp, 0))
5624 || TREE_STATIC (exp)
5625 || safe_from_p (x, TREE_OPERAND (exp, 0), 0))
5628 /* Otherwise, the only way this can conflict is if we are taking
5629 the address of a DECL a that address if part of X, which is
5631 exp = TREE_OPERAND (exp, 0);
5634 if (!DECL_RTL_SET_P (exp)
5635 || GET_CODE (DECL_RTL (exp)) != MEM)
5638 exp_rtl = XEXP (DECL_RTL (exp), 0);
5643 if (GET_CODE (x) == MEM
5644 && alias_sets_conflict_p (MEM_ALIAS_SET (x),
5645 get_alias_set (exp)))
5650 /* Assume that the call will clobber all hard registers and
5652 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5653 || GET_CODE (x) == MEM)
5658 /* If a sequence exists, we would have to scan every instruction
5659 in the sequence to see if it was safe. This is probably not
5661 if (RTL_EXPR_SEQUENCE (exp))
5664 exp_rtl = RTL_EXPR_RTL (exp);
5667 case WITH_CLEANUP_EXPR:
5668 exp_rtl = WITH_CLEANUP_EXPR_RTL (exp);
5671 case CLEANUP_POINT_EXPR:
5672 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5675 exp_rtl = SAVE_EXPR_RTL (exp);
5679 /* If we've already scanned this, don't do it again. Otherwise,
5680 show we've scanned it and record for clearing the flag if we're
5682 if (TREE_PRIVATE (exp))
5685 TREE_PRIVATE (exp) = 1;
5686 if (! safe_from_p (x, TREE_OPERAND (exp, 0), 0))
5688 TREE_PRIVATE (exp) = 0;
5692 save_expr_list = tree_cons (exp, NULL_TREE, save_expr_list);
5696 /* The only operand we look at is operand 1. The rest aren't
5697 part of the expression. */
5698 return safe_from_p (x, TREE_OPERAND (exp, 1), 0);
5700 case METHOD_CALL_EXPR:
5701 /* This takes an rtx argument, but shouldn't appear here. */
5708 /* If we have an rtx, we do not need to scan our operands. */
5712 nops = first_rtl_op (TREE_CODE (exp));
5713 for (i = 0; i < nops; i++)
5714 if (TREE_OPERAND (exp, i) != 0
5715 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
5718 /* If this is a language-specific tree code, it may require
5719 special handling. */
5720 if ((unsigned int) TREE_CODE (exp)
5721 >= (unsigned int) LAST_AND_UNUSED_TREE_CODE
5722 && !(*lang_hooks.safe_from_p) (x, exp))
5726 /* If we have an rtl, find any enclosed object. Then see if we conflict
5730 if (GET_CODE (exp_rtl) == SUBREG)
5732 exp_rtl = SUBREG_REG (exp_rtl);
5733 if (GET_CODE (exp_rtl) == REG
5734 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
5738 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
5739 are memory and they conflict. */
5740 return ! (rtx_equal_p (x, exp_rtl)
5741 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
5742 && true_dependence (exp_rtl, VOIDmode, x,
5743 rtx_addr_varies_p)));
5746 /* If we reach here, it is safe. */
5750 /* Subroutine of expand_expr: return rtx if EXP is a
5751 variable or parameter; else return 0. */
5758 switch (TREE_CODE (exp))
5762 return DECL_RTL (exp);
5768 #ifdef MAX_INTEGER_COMPUTATION_MODE
5771 check_max_integer_computation_mode (exp)
5774 enum tree_code code;
5775 enum machine_mode mode;
5777 /* Strip any NOPs that don't change the mode. */
5779 code = TREE_CODE (exp);
5781 /* We must allow conversions of constants to MAX_INTEGER_COMPUTATION_MODE. */
5782 if (code == NOP_EXPR
5783 && TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
5786 /* First check the type of the overall operation. We need only look at
5787 unary, binary and relational operations. */
5788 if (TREE_CODE_CLASS (code) == '1'
5789 || TREE_CODE_CLASS (code) == '2'
5790 || TREE_CODE_CLASS (code) == '<')
5792 mode = TYPE_MODE (TREE_TYPE (exp));
5793 if (GET_MODE_CLASS (mode) == MODE_INT
5794 && mode > MAX_INTEGER_COMPUTATION_MODE)
5795 internal_error ("unsupported wide integer operation");
5798 /* Check operand of a unary op. */
5799 if (TREE_CODE_CLASS (code) == '1')
5801 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5802 if (GET_MODE_CLASS (mode) == MODE_INT
5803 && mode > MAX_INTEGER_COMPUTATION_MODE)
5804 internal_error ("unsupported wide integer operation");
5807 /* Check operands of a binary/comparison op. */
5808 if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<')
5810 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5811 if (GET_MODE_CLASS (mode) == MODE_INT
5812 && mode > MAX_INTEGER_COMPUTATION_MODE)
5813 internal_error ("unsupported wide integer operation");
5815 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1)));
5816 if (GET_MODE_CLASS (mode) == MODE_INT
5817 && mode > MAX_INTEGER_COMPUTATION_MODE)
5818 internal_error ("unsupported wide integer operation");
5823 /* Return the highest power of two that EXP is known to be a multiple of.
5824 This is used in updating alignment of MEMs in array references. */
5826 static HOST_WIDE_INT
5827 highest_pow2_factor (exp)
5830 HOST_WIDE_INT c0, c1;
5832 switch (TREE_CODE (exp))
5835 /* We can find the lowest bit that's a one. If the low
5836 HOST_BITS_PER_WIDE_INT bits are zero, return BIGGEST_ALIGNMENT.
5837 We need to handle this case since we can find it in a COND_EXPR,
5838 a MIN_EXPR, or a MAX_EXPR. If the constant overlows, we have an
5839 erroneous program, so return BIGGEST_ALIGNMENT to avoid any
5841 if (TREE_CONSTANT_OVERFLOW (exp))
5842 return BIGGEST_ALIGNMENT;
5845 /* Note: tree_low_cst is intentionally not used here,
5846 we don't care about the upper bits. */
5847 c0 = TREE_INT_CST_LOW (exp);
5849 return c0 ? c0 : BIGGEST_ALIGNMENT;
5853 case PLUS_EXPR: case MINUS_EXPR: case MIN_EXPR: case MAX_EXPR:
5854 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
5855 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
5856 return MIN (c0, c1);
5859 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
5860 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
5863 case ROUND_DIV_EXPR: case TRUNC_DIV_EXPR: case FLOOR_DIV_EXPR:
5865 if (integer_pow2p (TREE_OPERAND (exp, 1))
5866 && host_integerp (TREE_OPERAND (exp, 1), 1))
5868 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
5869 c1 = tree_low_cst (TREE_OPERAND (exp, 1), 1);
5870 return MAX (1, c0 / c1);
5874 case NON_LVALUE_EXPR: case NOP_EXPR: case CONVERT_EXPR:
5875 case SAVE_EXPR: case WITH_RECORD_EXPR:
5876 return highest_pow2_factor (TREE_OPERAND (exp, 0));
5879 return highest_pow2_factor (TREE_OPERAND (exp, 1));
5882 c0 = highest_pow2_factor (TREE_OPERAND (exp, 1));
5883 c1 = highest_pow2_factor (TREE_OPERAND (exp, 2));
5884 return MIN (c0, c1);
5893 /* Return an object on the placeholder list that matches EXP, a
5894 PLACEHOLDER_EXPR. An object "matches" if it is of the type of the
5895 PLACEHOLDER_EXPR or a pointer type to it. For further information, see
5896 tree.def. If no such object is found, return 0. If PLIST is nonzero, it
5897 is a location which initially points to a starting location in the
5898 placeholder list (zero means start of the list) and where a pointer into
5899 the placeholder list at which the object is found is placed. */
5902 find_placeholder (exp, plist)
5906 tree type = TREE_TYPE (exp);
5907 tree placeholder_expr;
5909 for (placeholder_expr
5910 = plist && *plist ? TREE_CHAIN (*plist) : placeholder_list;
5911 placeholder_expr != 0;
5912 placeholder_expr = TREE_CHAIN (placeholder_expr))
5914 tree need_type = TYPE_MAIN_VARIANT (type);
5917 /* Find the outermost reference that is of the type we want. If none,
5918 see if any object has a type that is a pointer to the type we
5920 for (elt = TREE_PURPOSE (placeholder_expr); elt != 0;
5921 elt = ((TREE_CODE (elt) == COMPOUND_EXPR
5922 || TREE_CODE (elt) == COND_EXPR)
5923 ? TREE_OPERAND (elt, 1)
5924 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
5925 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
5926 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
5927 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
5928 ? TREE_OPERAND (elt, 0) : 0))
5929 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
5932 *plist = placeholder_expr;
5936 for (elt = TREE_PURPOSE (placeholder_expr); elt != 0;
5938 = ((TREE_CODE (elt) == COMPOUND_EXPR
5939 || TREE_CODE (elt) == COND_EXPR)
5940 ? TREE_OPERAND (elt, 1)
5941 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
5942 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
5943 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
5944 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
5945 ? TREE_OPERAND (elt, 0) : 0))
5946 if (POINTER_TYPE_P (TREE_TYPE (elt))
5947 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
5951 *plist = placeholder_expr;
5952 return build1 (INDIRECT_REF, need_type, elt);
5959 /* expand_expr: generate code for computing expression EXP.
5960 An rtx for the computed value is returned. The value is never null.
5961 In the case of a void EXP, const0_rtx is returned.
5963 The value may be stored in TARGET if TARGET is nonzero.
5964 TARGET is just a suggestion; callers must assume that
5965 the rtx returned may not be the same as TARGET.
5967 If TARGET is CONST0_RTX, it means that the value will be ignored.
5969 If TMODE is not VOIDmode, it suggests generating the
5970 result in mode TMODE. But this is done only when convenient.
5971 Otherwise, TMODE is ignored and the value generated in its natural mode.
5972 TMODE is just a suggestion; callers must assume that
5973 the rtx returned may not have mode TMODE.
5975 Note that TARGET may have neither TMODE nor MODE. In that case, it
5976 probably will not be used.
5978 If MODIFIER is EXPAND_SUM then when EXP is an addition
5979 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
5980 or a nest of (PLUS ...) and (MINUS ...) where the terms are
5981 products as above, or REG or MEM, or constant.
5982 Ordinarily in such cases we would output mul or add instructions
5983 and then return a pseudo reg containing the sum.
5985 EXPAND_INITIALIZER is much like EXPAND_SUM except that
5986 it also marks a label as absolutely required (it can't be dead).
5987 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
5988 This is used for outputting expressions used in initializers.
5990 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
5991 with a constant address even if that address is not normally legitimate.
5992 EXPAND_INITIALIZER and EXPAND_SUM also have this effect. */
5995 expand_expr (exp, target, tmode, modifier)
5998 enum machine_mode tmode;
5999 enum expand_modifier modifier;
6002 tree type = TREE_TYPE (exp);
6003 int unsignedp = TREE_UNSIGNED (type);
6004 enum machine_mode mode;
6005 enum tree_code code = TREE_CODE (exp);
6007 rtx subtarget, original_target;
6011 /* Handle ERROR_MARK before anybody tries to access its type. */
6012 if (TREE_CODE (exp) == ERROR_MARK || TREE_CODE (type) == ERROR_MARK)
6014 op0 = CONST0_RTX (tmode);
6020 mode = TYPE_MODE (type);
6021 /* Use subtarget as the target for operand 0 of a binary operation. */
6022 subtarget = get_subtarget (target);
6023 original_target = target;
6024 ignore = (target == const0_rtx
6025 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
6026 || code == CONVERT_EXPR || code == REFERENCE_EXPR
6027 || code == COND_EXPR || code == VIEW_CONVERT_EXPR)
6028 && TREE_CODE (type) == VOID_TYPE));
6030 /* If we are going to ignore this result, we need only do something
6031 if there is a side-effect somewhere in the expression. If there
6032 is, short-circuit the most common cases here. Note that we must
6033 not call expand_expr with anything but const0_rtx in case this
6034 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
6038 if (! TREE_SIDE_EFFECTS (exp))
6041 /* Ensure we reference a volatile object even if value is ignored, but
6042 don't do this if all we are doing is taking its address. */
6043 if (TREE_THIS_VOLATILE (exp)
6044 && TREE_CODE (exp) != FUNCTION_DECL
6045 && mode != VOIDmode && mode != BLKmode
6046 && modifier != EXPAND_CONST_ADDRESS)
6048 temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
6049 if (GET_CODE (temp) == MEM)
6050 temp = copy_to_reg (temp);
6054 if (TREE_CODE_CLASS (code) == '1' || code == COMPONENT_REF
6055 || code == INDIRECT_REF || code == BUFFER_REF)
6056 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6059 else if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<'
6060 || code == ARRAY_REF || code == ARRAY_RANGE_REF)
6062 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6063 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6066 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
6067 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
6068 /* If the second operand has no side effects, just evaluate
6070 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6072 else if (code == BIT_FIELD_REF)
6074 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6075 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6076 expand_expr (TREE_OPERAND (exp, 2), const0_rtx, VOIDmode, modifier);
6083 #ifdef MAX_INTEGER_COMPUTATION_MODE
6084 /* Only check stuff here if the mode we want is different from the mode
6085 of the expression; if it's the same, check_max_integer_computiation_mode
6086 will handle it. Do we really need to check this stuff at all? */
6089 && GET_MODE (target) != mode
6090 && TREE_CODE (exp) != INTEGER_CST
6091 && TREE_CODE (exp) != PARM_DECL
6092 && TREE_CODE (exp) != ARRAY_REF
6093 && TREE_CODE (exp) != ARRAY_RANGE_REF
6094 && TREE_CODE (exp) != COMPONENT_REF
6095 && TREE_CODE (exp) != BIT_FIELD_REF
6096 && TREE_CODE (exp) != INDIRECT_REF
6097 && TREE_CODE (exp) != CALL_EXPR
6098 && TREE_CODE (exp) != VAR_DECL
6099 && TREE_CODE (exp) != RTL_EXPR)
6101 enum machine_mode mode = GET_MODE (target);
6103 if (GET_MODE_CLASS (mode) == MODE_INT
6104 && mode > MAX_INTEGER_COMPUTATION_MODE)
6105 internal_error ("unsupported wide integer operation");
6109 && TREE_CODE (exp) != INTEGER_CST
6110 && TREE_CODE (exp) != PARM_DECL
6111 && TREE_CODE (exp) != ARRAY_REF
6112 && TREE_CODE (exp) != ARRAY_RANGE_REF
6113 && TREE_CODE (exp) != COMPONENT_REF
6114 && TREE_CODE (exp) != BIT_FIELD_REF
6115 && TREE_CODE (exp) != INDIRECT_REF
6116 && TREE_CODE (exp) != VAR_DECL
6117 && TREE_CODE (exp) != CALL_EXPR
6118 && TREE_CODE (exp) != RTL_EXPR
6119 && GET_MODE_CLASS (tmode) == MODE_INT
6120 && tmode > MAX_INTEGER_COMPUTATION_MODE)
6121 internal_error ("unsupported wide integer operation");
6123 check_max_integer_computation_mode (exp);
6126 /* If will do cse, generate all results into pseudo registers
6127 since 1) that allows cse to find more things
6128 and 2) otherwise cse could produce an insn the machine
6129 cannot support. And exception is a CONSTRUCTOR into a multi-word
6130 MEM: that's much more likely to be most efficient into the MEM. */
6132 if (! cse_not_expected && mode != BLKmode && target
6133 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER)
6134 && ! (code == CONSTRUCTOR && GET_MODE_SIZE (mode) > UNITS_PER_WORD))
6141 tree function = decl_function_context (exp);
6142 /* Handle using a label in a containing function. */
6143 if (function != current_function_decl
6144 && function != inline_function_decl && function != 0)
6146 struct function *p = find_function_data (function);
6147 p->expr->x_forced_labels
6148 = gen_rtx_EXPR_LIST (VOIDmode, label_rtx (exp),
6149 p->expr->x_forced_labels);
6153 if (modifier == EXPAND_INITIALIZER)
6154 forced_labels = gen_rtx_EXPR_LIST (VOIDmode,
6159 temp = gen_rtx_MEM (FUNCTION_MODE,
6160 gen_rtx_LABEL_REF (Pmode, label_rtx (exp)));
6161 if (function != current_function_decl
6162 && function != inline_function_decl && function != 0)
6163 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
6168 if (DECL_RTL (exp) == 0)
6170 error_with_decl (exp, "prior parameter's size depends on `%s'");
6171 return CONST0_RTX (mode);
6174 /* ... fall through ... */
6177 /* If a static var's type was incomplete when the decl was written,
6178 but the type is complete now, lay out the decl now. */
6179 if (DECL_SIZE (exp) == 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
6180 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
6182 rtx value = DECL_RTL_IF_SET (exp);
6184 layout_decl (exp, 0);
6186 /* If the RTL was already set, update its mode and memory
6190 PUT_MODE (value, DECL_MODE (exp));
6191 SET_DECL_RTL (exp, 0);
6192 set_mem_attributes (value, exp, 1);
6193 SET_DECL_RTL (exp, value);
6197 /* ... fall through ... */
6201 if (DECL_RTL (exp) == 0)
6204 /* Ensure variable marked as used even if it doesn't go through
6205 a parser. If it hasn't be used yet, write out an external
6207 if (! TREE_USED (exp))
6209 assemble_external (exp);
6210 TREE_USED (exp) = 1;
6213 /* Show we haven't gotten RTL for this yet. */
6216 /* Handle variables inherited from containing functions. */
6217 context = decl_function_context (exp);
6219 /* We treat inline_function_decl as an alias for the current function
6220 because that is the inline function whose vars, types, etc.
6221 are being merged into the current function.
6222 See expand_inline_function. */
6224 if (context != 0 && context != current_function_decl
6225 && context != inline_function_decl
6226 /* If var is static, we don't need a static chain to access it. */
6227 && ! (GET_CODE (DECL_RTL (exp)) == MEM
6228 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
6232 /* Mark as non-local and addressable. */
6233 DECL_NONLOCAL (exp) = 1;
6234 if (DECL_NO_STATIC_CHAIN (current_function_decl))
6236 mark_addressable (exp);
6237 if (GET_CODE (DECL_RTL (exp)) != MEM)
6239 addr = XEXP (DECL_RTL (exp), 0);
6240 if (GET_CODE (addr) == MEM)
6242 = replace_equiv_address (addr,
6243 fix_lexical_addr (XEXP (addr, 0), exp));
6245 addr = fix_lexical_addr (addr, exp);
6247 temp = replace_equiv_address (DECL_RTL (exp), addr);
6250 /* This is the case of an array whose size is to be determined
6251 from its initializer, while the initializer is still being parsed.
6254 else if (GET_CODE (DECL_RTL (exp)) == MEM
6255 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
6256 temp = validize_mem (DECL_RTL (exp));
6258 /* If DECL_RTL is memory, we are in the normal case and either
6259 the address is not valid or it is not a register and -fforce-addr
6260 is specified, get the address into a register. */
6262 else if (GET_CODE (DECL_RTL (exp)) == MEM
6263 && modifier != EXPAND_CONST_ADDRESS
6264 && modifier != EXPAND_SUM
6265 && modifier != EXPAND_INITIALIZER
6266 && (! memory_address_p (DECL_MODE (exp),
6267 XEXP (DECL_RTL (exp), 0))
6269 && GET_CODE (XEXP (DECL_RTL (exp), 0)) != REG)))
6270 temp = replace_equiv_address (DECL_RTL (exp),
6271 copy_rtx (XEXP (DECL_RTL (exp), 0)));
6273 /* If we got something, return it. But first, set the alignment
6274 if the address is a register. */
6277 if (GET_CODE (temp) == MEM && GET_CODE (XEXP (temp, 0)) == REG)
6278 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
6283 /* If the mode of DECL_RTL does not match that of the decl, it
6284 must be a promoted value. We return a SUBREG of the wanted mode,
6285 but mark it so that we know that it was already extended. */
6287 if (GET_CODE (DECL_RTL (exp)) == REG
6288 && GET_MODE (DECL_RTL (exp)) != DECL_MODE (exp))
6290 /* Get the signedness used for this variable. Ensure we get the
6291 same mode we got when the variable was declared. */
6292 if (GET_MODE (DECL_RTL (exp))
6293 != promote_mode (type, DECL_MODE (exp), &unsignedp, 0))
6296 temp = gen_lowpart_SUBREG (mode, DECL_RTL (exp));
6297 SUBREG_PROMOTED_VAR_P (temp) = 1;
6298 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
6302 return DECL_RTL (exp);
6305 return immed_double_const (TREE_INT_CST_LOW (exp),
6306 TREE_INT_CST_HIGH (exp), mode);
6309 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, 0);
6312 /* If optimized, generate immediate CONST_DOUBLE
6313 which will be turned into memory by reload if necessary.
6315 We used to force a register so that loop.c could see it. But
6316 this does not allow gen_* patterns to perform optimizations with
6317 the constants. It also produces two insns in cases like "x = 1.0;".
6318 On most machines, floating-point constants are not permitted in
6319 many insns, so we'd end up copying it to a register in any case.
6321 Now, we do the copying in expand_binop, if appropriate. */
6322 return immed_real_const (exp);
6326 if (! TREE_CST_RTL (exp))
6327 output_constant_def (exp, 1);
6329 /* TREE_CST_RTL probably contains a constant address.
6330 On RISC machines where a constant address isn't valid,
6331 make some insns to get that address into a register. */
6332 if (GET_CODE (TREE_CST_RTL (exp)) == MEM
6333 && modifier != EXPAND_CONST_ADDRESS
6334 && modifier != EXPAND_INITIALIZER
6335 && modifier != EXPAND_SUM
6336 && (! memory_address_p (mode, XEXP (TREE_CST_RTL (exp), 0))
6338 && GET_CODE (XEXP (TREE_CST_RTL (exp), 0)) != REG)))
6339 return replace_equiv_address (TREE_CST_RTL (exp),
6340 copy_rtx (XEXP (TREE_CST_RTL (exp), 0)));
6341 return TREE_CST_RTL (exp);
6343 case EXPR_WITH_FILE_LOCATION:
6346 const char *saved_input_filename = input_filename;
6347 int saved_lineno = lineno;
6348 input_filename = EXPR_WFL_FILENAME (exp);
6349 lineno = EXPR_WFL_LINENO (exp);
6350 if (EXPR_WFL_EMIT_LINE_NOTE (exp))
6351 emit_line_note (input_filename, lineno);
6352 /* Possibly avoid switching back and forth here. */
6353 to_return = expand_expr (EXPR_WFL_NODE (exp), target, tmode, modifier);
6354 input_filename = saved_input_filename;
6355 lineno = saved_lineno;
6360 context = decl_function_context (exp);
6362 /* If this SAVE_EXPR was at global context, assume we are an
6363 initialization function and move it into our context. */
6365 SAVE_EXPR_CONTEXT (exp) = current_function_decl;
6367 /* We treat inline_function_decl as an alias for the current function
6368 because that is the inline function whose vars, types, etc.
6369 are being merged into the current function.
6370 See expand_inline_function. */
6371 if (context == current_function_decl || context == inline_function_decl)
6374 /* If this is non-local, handle it. */
6377 /* The following call just exists to abort if the context is
6378 not of a containing function. */
6379 find_function_data (context);
6381 temp = SAVE_EXPR_RTL (exp);
6382 if (temp && GET_CODE (temp) == REG)
6384 put_var_into_stack (exp);
6385 temp = SAVE_EXPR_RTL (exp);
6387 if (temp == 0 || GET_CODE (temp) != MEM)
6390 replace_equiv_address (temp,
6391 fix_lexical_addr (XEXP (temp, 0), exp));
6393 if (SAVE_EXPR_RTL (exp) == 0)
6395 if (mode == VOIDmode)
6398 temp = assign_temp (build_qualified_type (type,
6400 | TYPE_QUAL_CONST)),
6403 SAVE_EXPR_RTL (exp) = temp;
6404 if (!optimize && GET_CODE (temp) == REG)
6405 save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, temp,
6408 /* If the mode of TEMP does not match that of the expression, it
6409 must be a promoted value. We pass store_expr a SUBREG of the
6410 wanted mode but mark it so that we know that it was already
6411 extended. Note that `unsignedp' was modified above in
6414 if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
6416 temp = gen_lowpart_SUBREG (mode, SAVE_EXPR_RTL (exp));
6417 SUBREG_PROMOTED_VAR_P (temp) = 1;
6418 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
6421 if (temp == const0_rtx)
6422 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
6424 store_expr (TREE_OPERAND (exp, 0), temp, 0);
6426 TREE_USED (exp) = 1;
6429 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
6430 must be a promoted value. We return a SUBREG of the wanted mode,
6431 but mark it so that we know that it was already extended. */
6433 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
6434 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
6436 /* Compute the signedness and make the proper SUBREG. */
6437 promote_mode (type, mode, &unsignedp, 0);
6438 temp = gen_lowpart_SUBREG (mode, SAVE_EXPR_RTL (exp));
6439 SUBREG_PROMOTED_VAR_P (temp) = 1;
6440 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
6444 return SAVE_EXPR_RTL (exp);
6449 temp = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
6450 TREE_OPERAND (exp, 0)
6451 = (*lang_hooks.unsave_expr_now) (TREE_OPERAND (exp, 0));
6455 case PLACEHOLDER_EXPR:
6457 tree old_list = placeholder_list;
6458 tree placeholder_expr = 0;
6460 exp = find_placeholder (exp, &placeholder_expr);
6464 placeholder_list = TREE_CHAIN (placeholder_expr);
6465 temp = expand_expr (exp, original_target, tmode, modifier);
6466 placeholder_list = old_list;
6470 /* We can't find the object or there was a missing WITH_RECORD_EXPR. */
6473 case WITH_RECORD_EXPR:
6474 /* Put the object on the placeholder list, expand our first operand,
6475 and pop the list. */
6476 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
6478 target = expand_expr (TREE_OPERAND (exp, 0), original_target, tmode,
6480 placeholder_list = TREE_CHAIN (placeholder_list);
6484 if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
6485 expand_goto (TREE_OPERAND (exp, 0));
6487 expand_computed_goto (TREE_OPERAND (exp, 0));
6491 expand_exit_loop_if_false (NULL,
6492 invert_truthvalue (TREE_OPERAND (exp, 0)));
6495 case LABELED_BLOCK_EXPR:
6496 if (LABELED_BLOCK_BODY (exp))
6497 expand_expr_stmt_value (LABELED_BLOCK_BODY (exp), 0, 1);
6498 /* Should perhaps use expand_label, but this is simpler and safer. */
6499 do_pending_stack_adjust ();
6500 emit_label (label_rtx (LABELED_BLOCK_LABEL (exp)));
6503 case EXIT_BLOCK_EXPR:
6504 if (EXIT_BLOCK_RETURN (exp))
6505 sorry ("returned value in block_exit_expr");
6506 expand_goto (LABELED_BLOCK_LABEL (EXIT_BLOCK_LABELED_BLOCK (exp)));
6511 expand_start_loop (1);
6512 expand_expr_stmt_value (TREE_OPERAND (exp, 0), 0, 1);
6520 tree vars = TREE_OPERAND (exp, 0);
6521 int vars_need_expansion = 0;
6523 /* Need to open a binding contour here because
6524 if there are any cleanups they must be contained here. */
6525 expand_start_bindings (2);
6527 /* Mark the corresponding BLOCK for output in its proper place. */
6528 if (TREE_OPERAND (exp, 2) != 0
6529 && ! TREE_USED (TREE_OPERAND (exp, 2)))
6530 insert_block (TREE_OPERAND (exp, 2));
6532 /* If VARS have not yet been expanded, expand them now. */
6535 if (!DECL_RTL_SET_P (vars))
6537 vars_need_expansion = 1;
6540 expand_decl_init (vars);
6541 vars = TREE_CHAIN (vars);
6544 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, modifier);
6546 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
6552 if (RTL_EXPR_SEQUENCE (exp))
6554 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
6556 emit_insns (RTL_EXPR_SEQUENCE (exp));
6557 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
6559 preserve_rtl_expr_result (RTL_EXPR_RTL (exp));
6560 free_temps_for_rtl_expr (exp);
6561 return RTL_EXPR_RTL (exp);
6564 /* If we don't need the result, just ensure we evaluate any
6570 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
6571 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode, 0);
6576 /* All elts simple constants => refer to a constant in memory. But
6577 if this is a non-BLKmode mode, let it store a field at a time
6578 since that should make a CONST_INT or CONST_DOUBLE when we
6579 fold. Likewise, if we have a target we can use, it is best to
6580 store directly into the target unless the type is large enough
6581 that memcpy will be used. If we are making an initializer and
6582 all operands are constant, put it in memory as well. */
6583 else if ((TREE_STATIC (exp)
6584 && ((mode == BLKmode
6585 && ! (target != 0 && safe_from_p (target, exp, 1)))
6586 || TREE_ADDRESSABLE (exp)
6587 || (host_integerp (TYPE_SIZE_UNIT (type), 1)
6588 && (! MOVE_BY_PIECES_P
6589 (tree_low_cst (TYPE_SIZE_UNIT (type), 1),
6591 && ! mostly_zeros_p (exp))))
6592 || (modifier == EXPAND_INITIALIZER && TREE_CONSTANT (exp)))
6594 rtx constructor = output_constant_def (exp, 1);
6596 if (modifier != EXPAND_CONST_ADDRESS
6597 && modifier != EXPAND_INITIALIZER
6598 && modifier != EXPAND_SUM)
6599 constructor = validize_mem (constructor);
6605 /* Handle calls that pass values in multiple non-contiguous
6606 locations. The Irix 6 ABI has examples of this. */
6607 if (target == 0 || ! safe_from_p (target, exp, 1)
6608 || GET_CODE (target) == PARALLEL)
6610 = assign_temp (build_qualified_type (type,
6612 | (TREE_READONLY (exp)
6613 * TYPE_QUAL_CONST))),
6614 0, TREE_ADDRESSABLE (exp), 1);
6616 store_constructor (exp, target, 0,
6617 int_size_in_bytes (TREE_TYPE (exp)));
6623 tree exp1 = TREE_OPERAND (exp, 0);
6625 tree string = string_constant (exp1, &index);
6627 /* Try to optimize reads from const strings. */
6629 && TREE_CODE (string) == STRING_CST
6630 && TREE_CODE (index) == INTEGER_CST
6631 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
6632 && GET_MODE_CLASS (mode) == MODE_INT
6633 && GET_MODE_SIZE (mode) == 1
6634 && modifier != EXPAND_WRITE)
6636 GEN_INT (TREE_STRING_POINTER (string)[TREE_INT_CST_LOW (index)]);
6638 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
6639 op0 = memory_address (mode, op0);
6640 temp = gen_rtx_MEM (mode, op0);
6641 set_mem_attributes (temp, exp, 0);
6643 /* If we are writing to this object and its type is a record with
6644 readonly fields, we must mark it as readonly so it will
6645 conflict with readonly references to those fields. */
6646 if (modifier == EXPAND_WRITE && readonly_fields_p (type))
6647 RTX_UNCHANGING_P (temp) = 1;
6653 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
6657 tree array = TREE_OPERAND (exp, 0);
6658 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
6659 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
6660 tree index = convert (sizetype, TREE_OPERAND (exp, 1));
6663 /* Optimize the special-case of a zero lower bound.
6665 We convert the low_bound to sizetype to avoid some problems
6666 with constant folding. (E.g. suppose the lower bound is 1,
6667 and its mode is QI. Without the conversion, (ARRAY
6668 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
6669 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
6671 if (! integer_zerop (low_bound))
6672 index = size_diffop (index, convert (sizetype, low_bound));
6674 /* Fold an expression like: "foo"[2].
6675 This is not done in fold so it won't happen inside &.
6676 Don't fold if this is for wide characters since it's too
6677 difficult to do correctly and this is a very rare case. */
6679 if (modifier != EXPAND_CONST_ADDRESS && modifier != EXPAND_INITIALIZER
6680 && TREE_CODE (array) == STRING_CST
6681 && TREE_CODE (index) == INTEGER_CST
6682 && compare_tree_int (index, TREE_STRING_LENGTH (array)) < 0
6683 && GET_MODE_CLASS (mode) == MODE_INT
6684 && GET_MODE_SIZE (mode) == 1)
6686 GEN_INT (TREE_STRING_POINTER (array)[TREE_INT_CST_LOW (index)]);
6688 /* If this is a constant index into a constant array,
6689 just get the value from the array. Handle both the cases when
6690 we have an explicit constructor and when our operand is a variable
6691 that was declared const. */
6693 if (modifier != EXPAND_CONST_ADDRESS && modifier != EXPAND_INITIALIZER
6694 && TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array)
6695 && TREE_CODE (index) == INTEGER_CST
6696 && 0 > compare_tree_int (index,
6697 list_length (CONSTRUCTOR_ELTS
6698 (TREE_OPERAND (exp, 0)))))
6702 for (elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
6703 i = TREE_INT_CST_LOW (index);
6704 elem != 0 && i != 0; i--, elem = TREE_CHAIN (elem))
6708 return expand_expr (fold (TREE_VALUE (elem)), target, tmode,
6712 else if (optimize >= 1
6713 && modifier != EXPAND_CONST_ADDRESS
6714 && modifier != EXPAND_INITIALIZER
6715 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
6716 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
6717 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
6719 if (TREE_CODE (index) == INTEGER_CST)
6721 tree init = DECL_INITIAL (array);
6723 if (TREE_CODE (init) == CONSTRUCTOR)
6727 for (elem = CONSTRUCTOR_ELTS (init);
6729 && !tree_int_cst_equal (TREE_PURPOSE (elem), index));
6730 elem = TREE_CHAIN (elem))
6733 if (elem && !TREE_SIDE_EFFECTS (TREE_VALUE (elem)))
6734 return expand_expr (fold (TREE_VALUE (elem)), target,
6737 else if (TREE_CODE (init) == STRING_CST
6738 && 0 > compare_tree_int (index,
6739 TREE_STRING_LENGTH (init)))
6741 tree type = TREE_TYPE (TREE_TYPE (init));
6742 enum machine_mode mode = TYPE_MODE (type);
6744 if (GET_MODE_CLASS (mode) == MODE_INT
6745 && GET_MODE_SIZE (mode) == 1)
6747 (TREE_STRING_POINTER
6748 (init)[TREE_INT_CST_LOW (index)]));
6757 case ARRAY_RANGE_REF:
6758 /* If the operand is a CONSTRUCTOR, we can just extract the
6759 appropriate field if it is present. Don't do this if we have
6760 already written the data since we want to refer to that copy
6761 and varasm.c assumes that's what we'll do. */
6762 if (code == COMPONENT_REF
6763 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
6764 && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
6768 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
6769 elt = TREE_CHAIN (elt))
6770 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1)
6771 /* We can normally use the value of the field in the
6772 CONSTRUCTOR. However, if this is a bitfield in
6773 an integral mode that we can fit in a HOST_WIDE_INT,
6774 we must mask only the number of bits in the bitfield,
6775 since this is done implicitly by the constructor. If
6776 the bitfield does not meet either of those conditions,
6777 we can't do this optimization. */
6778 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt))
6779 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt)))
6781 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt)))
6782 <= HOST_BITS_PER_WIDE_INT))))
6784 op0 = expand_expr (TREE_VALUE (elt), target, tmode, modifier);
6785 if (DECL_BIT_FIELD (TREE_PURPOSE (elt)))
6787 HOST_WIDE_INT bitsize
6788 = TREE_INT_CST_LOW (DECL_SIZE (TREE_PURPOSE (elt)));
6789 enum machine_mode imode
6790 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt)));
6792 if (TREE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt))))
6794 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
6795 op0 = expand_and (imode, op0, op1, target);
6800 = build_int_2 (GET_MODE_BITSIZE (imode) - bitsize,
6803 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
6805 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
6815 enum machine_mode mode1;
6816 HOST_WIDE_INT bitsize, bitpos;
6819 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
6820 &mode1, &unsignedp, &volatilep);
6823 /* If we got back the original object, something is wrong. Perhaps
6824 we are evaluating an expression too early. In any event, don't
6825 infinitely recurse. */
6829 /* If TEM's type is a union of variable size, pass TARGET to the inner
6830 computation, since it will need a temporary and TARGET is known
6831 to have to do. This occurs in unchecked conversion in Ada. */
6835 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
6836 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
6838 ? target : NULL_RTX),
6840 (modifier == EXPAND_INITIALIZER
6841 || modifier == EXPAND_CONST_ADDRESS)
6842 ? modifier : EXPAND_NORMAL);
6844 /* If this is a constant, put it into a register if it is a
6845 legitimate constant and OFFSET is 0 and memory if it isn't. */
6846 if (CONSTANT_P (op0))
6848 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
6849 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
6851 op0 = force_reg (mode, op0);
6853 op0 = validize_mem (force_const_mem (mode, op0));
6858 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM);
6860 /* If this object is in a register, put it into memory.
6861 This case can't occur in C, but can in Ada if we have
6862 unchecked conversion of an expression from a scalar type to
6863 an array or record type. */
6864 if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
6865 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
6867 /* If the operand is a SAVE_EXPR, we can deal with this by
6868 forcing the SAVE_EXPR into memory. */
6869 if (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR)
6871 put_var_into_stack (TREE_OPERAND (exp, 0));
6872 op0 = SAVE_EXPR_RTL (TREE_OPERAND (exp, 0));
6877 = build_qualified_type (TREE_TYPE (tem),
6878 (TYPE_QUALS (TREE_TYPE (tem))
6879 | TYPE_QUAL_CONST));
6880 rtx memloc = assign_temp (nt, 1, 1, 1);
6882 emit_move_insn (memloc, op0);
6887 if (GET_CODE (op0) != MEM)
6890 if (GET_MODE (offset_rtx) != ptr_mode)
6891 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
6893 #ifdef POINTERS_EXTEND_UNSIGNED
6894 if (GET_MODE (offset_rtx) != Pmode)
6895 offset_rtx = convert_memory_address (Pmode, offset_rtx);
6898 /* A constant address in OP0 can have VOIDmode, we must not try
6899 to call force_reg for that case. Avoid that case. */
6900 if (GET_CODE (op0) == MEM
6901 && GET_MODE (op0) == BLKmode
6902 && GET_MODE (XEXP (op0, 0)) != VOIDmode
6904 && (bitpos % bitsize) == 0
6905 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
6906 && MEM_ALIGN (op0) == GET_MODE_ALIGNMENT (mode1))
6908 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
6912 op0 = offset_address (op0, offset_rtx,
6913 highest_pow2_factor (offset));
6916 /* Don't forget about volatility even if this is a bitfield. */
6917 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
6919 if (op0 == orig_op0)
6920 op0 = copy_rtx (op0);
6922 MEM_VOLATILE_P (op0) = 1;
6925 /* In cases where an aligned union has an unaligned object
6926 as a field, we might be extracting a BLKmode value from
6927 an integer-mode (e.g., SImode) object. Handle this case
6928 by doing the extract into an object as wide as the field
6929 (which we know to be the width of a basic mode), then
6930 storing into memory, and changing the mode to BLKmode. */
6931 if (mode1 == VOIDmode
6932 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
6933 || (mode1 != BLKmode && ! direct_load[(int) mode1]
6934 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
6935 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT
6936 && modifier != EXPAND_CONST_ADDRESS
6937 && modifier != EXPAND_INITIALIZER)
6938 /* If the field isn't aligned enough to fetch as a memref,
6939 fetch it as a bit field. */
6940 || (mode1 != BLKmode
6941 && SLOW_UNALIGNED_ACCESS (mode1, MEM_ALIGN (op0))
6942 && ((TYPE_ALIGN (TREE_TYPE (tem))
6943 < GET_MODE_ALIGNMENT (mode))
6944 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)))
6945 /* If the type and the field are a constant size and the
6946 size of the type isn't the same size as the bitfield,
6947 we must use bitfield operations. */
6949 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (exp)))
6951 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
6954 enum machine_mode ext_mode = mode;
6956 if (ext_mode == BLKmode
6957 && ! (target != 0 && GET_CODE (op0) == MEM
6958 && GET_CODE (target) == MEM
6959 && bitpos % BITS_PER_UNIT == 0))
6960 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
6962 if (ext_mode == BLKmode)
6964 /* In this case, BITPOS must start at a byte boundary and
6965 TARGET, if specified, must be a MEM. */
6966 if (GET_CODE (op0) != MEM
6967 || (target != 0 && GET_CODE (target) != MEM)
6968 || bitpos % BITS_PER_UNIT != 0)
6971 op0 = adjust_address (op0, VOIDmode, bitpos / BITS_PER_UNIT);
6973 target = assign_temp (type, 0, 1, 1);
6975 emit_block_move (target, op0,
6976 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
6982 op0 = validize_mem (op0);
6984 if (GET_CODE (op0) == MEM && GET_CODE (XEXP (op0, 0)) == REG)
6985 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
6987 op0 = extract_bit_field (op0, bitsize, bitpos,
6988 unsignedp, target, ext_mode, ext_mode,
6989 int_size_in_bytes (TREE_TYPE (tem)));
6991 /* If the result is a record type and BITSIZE is narrower than
6992 the mode of OP0, an integral mode, and this is a big endian
6993 machine, we must put the field into the high-order bits. */
6994 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
6995 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
6996 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (op0)))
6997 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
6998 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
7002 if (mode == BLKmode)
7004 rtx new = assign_temp (build_qualified_type
7005 (type_for_mode (ext_mode, 0),
7006 TYPE_QUAL_CONST), 0, 1, 1);
7008 emit_move_insn (new, op0);
7009 op0 = copy_rtx (new);
7010 PUT_MODE (op0, BLKmode);
7011 set_mem_attributes (op0, exp, 1);
7017 /* If the result is BLKmode, use that to access the object
7019 if (mode == BLKmode)
7022 /* Get a reference to just this component. */
7023 if (modifier == EXPAND_CONST_ADDRESS
7024 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7025 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
7027 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7029 if (op0 == orig_op0)
7030 op0 = copy_rtx (op0);
7032 set_mem_attributes (op0, exp, 0);
7033 if (GET_CODE (XEXP (op0, 0)) == REG)
7034 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7036 MEM_VOLATILE_P (op0) |= volatilep;
7037 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
7038 || modifier == EXPAND_CONST_ADDRESS
7039 || modifier == EXPAND_INITIALIZER)
7041 else if (target == 0)
7042 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7044 convert_move (target, op0, unsignedp);
7050 rtx insn, before = get_last_insn (), vtbl_ref;
7052 /* Evaluate the interior expression. */
7053 subtarget = expand_expr (TREE_OPERAND (exp, 0), target,
7056 /* Get or create an instruction off which to hang a note. */
7057 if (REG_P (subtarget))
7060 insn = get_last_insn ();
7063 if (! INSN_P (insn))
7064 insn = prev_nonnote_insn (insn);
7068 target = gen_reg_rtx (GET_MODE (subtarget));
7069 insn = emit_move_insn (target, subtarget);
7072 /* Collect the data for the note. */
7073 vtbl_ref = XEXP (DECL_RTL (TREE_OPERAND (exp, 1)), 0);
7074 vtbl_ref = plus_constant (vtbl_ref,
7075 tree_low_cst (TREE_OPERAND (exp, 2), 0));
7076 /* Discard the initial CONST that was added. */
7077 vtbl_ref = XEXP (vtbl_ref, 0);
7080 = gen_rtx_EXPR_LIST (REG_VTABLE_REF, vtbl_ref, REG_NOTES (insn));
7085 /* Intended for a reference to a buffer of a file-object in Pascal.
7086 But it's not certain that a special tree code will really be
7087 necessary for these. INDIRECT_REF might work for them. */
7093 /* Pascal set IN expression.
7096 rlo = set_low - (set_low%bits_per_word);
7097 the_word = set [ (index - rlo)/bits_per_word ];
7098 bit_index = index % bits_per_word;
7099 bitmask = 1 << bit_index;
7100 return !!(the_word & bitmask); */
7102 tree set = TREE_OPERAND (exp, 0);
7103 tree index = TREE_OPERAND (exp, 1);
7104 int iunsignedp = TREE_UNSIGNED (TREE_TYPE (index));
7105 tree set_type = TREE_TYPE (set);
7106 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
7107 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
7108 rtx index_val = expand_expr (index, 0, VOIDmode, 0);
7109 rtx lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
7110 rtx hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
7111 rtx setval = expand_expr (set, 0, VOIDmode, 0);
7112 rtx setaddr = XEXP (setval, 0);
7113 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
7115 rtx diff, quo, rem, addr, bit, result;
7117 /* If domain is empty, answer is no. Likewise if index is constant
7118 and out of bounds. */
7119 if (((TREE_CODE (set_high_bound) == INTEGER_CST
7120 && TREE_CODE (set_low_bound) == INTEGER_CST
7121 && tree_int_cst_lt (set_high_bound, set_low_bound))
7122 || (TREE_CODE (index) == INTEGER_CST
7123 && TREE_CODE (set_low_bound) == INTEGER_CST
7124 && tree_int_cst_lt (index, set_low_bound))
7125 || (TREE_CODE (set_high_bound) == INTEGER_CST
7126 && TREE_CODE (index) == INTEGER_CST
7127 && tree_int_cst_lt (set_high_bound, index))))
7131 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7133 /* If we get here, we have to generate the code for both cases
7134 (in range and out of range). */
7136 op0 = gen_label_rtx ();
7137 op1 = gen_label_rtx ();
7139 if (! (GET_CODE (index_val) == CONST_INT
7140 && GET_CODE (lo_r) == CONST_INT))
7141 emit_cmp_and_jump_insns (index_val, lo_r, LT, NULL_RTX,
7142 GET_MODE (index_val), iunsignedp, op1);
7144 if (! (GET_CODE (index_val) == CONST_INT
7145 && GET_CODE (hi_r) == CONST_INT))
7146 emit_cmp_and_jump_insns (index_val, hi_r, GT, NULL_RTX,
7147 GET_MODE (index_val), iunsignedp, op1);
7149 /* Calculate the element number of bit zero in the first word
7151 if (GET_CODE (lo_r) == CONST_INT)
7152 rlow = GEN_INT (INTVAL (lo_r)
7153 & ~((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
7155 rlow = expand_binop (index_mode, and_optab, lo_r,
7156 GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
7157 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7159 diff = expand_binop (index_mode, sub_optab, index_val, rlow,
7160 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7162 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
7163 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7164 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
7165 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7167 addr = memory_address (byte_mode,
7168 expand_binop (index_mode, add_optab, diff,
7169 setaddr, NULL_RTX, iunsignedp,
7172 /* Extract the bit we want to examine. */
7173 bit = expand_shift (RSHIFT_EXPR, byte_mode,
7174 gen_rtx_MEM (byte_mode, addr),
7175 make_tree (TREE_TYPE (index), rem),
7177 result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
7178 GET_MODE (target) == byte_mode ? target : 0,
7179 1, OPTAB_LIB_WIDEN);
7181 if (result != target)
7182 convert_move (target, result, 1);
7184 /* Output the code to handle the out-of-range case. */
7187 emit_move_insn (target, const0_rtx);
7192 case WITH_CLEANUP_EXPR:
7193 if (WITH_CLEANUP_EXPR_RTL (exp) == 0)
7195 WITH_CLEANUP_EXPR_RTL (exp)
7196 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
7197 expand_decl_cleanup (NULL_TREE, TREE_OPERAND (exp, 1));
7199 /* That's it for this cleanup. */
7200 TREE_OPERAND (exp, 1) = 0;
7202 return WITH_CLEANUP_EXPR_RTL (exp);
7204 case CLEANUP_POINT_EXPR:
7206 /* Start a new binding layer that will keep track of all cleanup
7207 actions to be performed. */
7208 expand_start_bindings (2);
7210 target_temp_slot_level = temp_slot_level;
7212 op0 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
7213 /* If we're going to use this value, load it up now. */
7215 op0 = force_not_mem (op0);
7216 preserve_temp_slots (op0);
7217 expand_end_bindings (NULL_TREE, 0, 0);
7222 /* Check for a built-in function. */
7223 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
7224 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7226 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7228 if (DECL_BUILT_IN_CLASS (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7229 == BUILT_IN_FRONTEND)
7230 return (*lang_expand_expr) (exp, original_target, tmode, modifier);
7232 return expand_builtin (exp, target, subtarget, tmode, ignore);
7235 return expand_call (exp, target, ignore);
7237 case NON_LVALUE_EXPR:
7240 case REFERENCE_EXPR:
7241 if (TREE_OPERAND (exp, 0) == error_mark_node)
7244 if (TREE_CODE (type) == UNION_TYPE)
7246 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
7248 /* If both input and output are BLKmode, this conversion isn't doing
7249 anything except possibly changing memory attribute. */
7250 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode)
7252 rtx result = expand_expr (TREE_OPERAND (exp, 0), target, tmode,
7255 result = copy_rtx (result);
7256 set_mem_attributes (result, exp, 0);
7261 target = assign_temp (type, 0, 1, 1);
7263 if (GET_CODE (target) == MEM)
7264 /* Store data into beginning of memory target. */
7265 store_expr (TREE_OPERAND (exp, 0),
7266 adjust_address (target, TYPE_MODE (valtype), 0), 0);
7268 else if (GET_CODE (target) == REG)
7269 /* Store this field into a union of the proper type. */
7270 store_field (target,
7271 MIN ((int_size_in_bytes (TREE_TYPE
7272 (TREE_OPERAND (exp, 0)))
7274 (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
7275 0, TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
7276 VOIDmode, 0, type, 0);
7280 /* Return the entire union. */
7284 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7286 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
7289 /* If the signedness of the conversion differs and OP0 is
7290 a promoted SUBREG, clear that indication since we now
7291 have to do the proper extension. */
7292 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
7293 && GET_CODE (op0) == SUBREG)
7294 SUBREG_PROMOTED_VAR_P (op0) = 0;
7299 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
7300 if (GET_MODE (op0) == mode)
7303 /* If OP0 is a constant, just convert it into the proper mode. */
7304 if (CONSTANT_P (op0))
7306 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7307 enum machine_mode inner_mode = TYPE_MODE (inner_type);
7309 if (modifier == EXPAND_INITIALIZER)
7310 return simplify_gen_subreg (mode, op0, inner_mode,
7311 subreg_lowpart_offset (mode,
7314 return convert_modes (mode, inner_mode, op0,
7315 TREE_UNSIGNED (inner_type));
7318 if (modifier == EXPAND_INITIALIZER)
7319 return gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
7323 convert_to_mode (mode, op0,
7324 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7326 convert_move (target, op0,
7327 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7330 case VIEW_CONVERT_EXPR:
7331 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
7333 /* If the input and output modes are both the same, we are done.
7334 Otherwise, if neither mode is BLKmode and both are within a word, we
7335 can use gen_lowpart. If neither is true, make sure the operand is
7336 in memory and convert the MEM to the new mode. */
7337 if (TYPE_MODE (type) == GET_MODE (op0))
7339 else if (TYPE_MODE (type) != BLKmode && GET_MODE (op0) != BLKmode
7340 && GET_MODE_SIZE (TYPE_MODE (type)) <= UNITS_PER_WORD
7341 && GET_MODE_SIZE (GET_MODE (op0)) <= UNITS_PER_WORD)
7342 op0 = gen_lowpart (TYPE_MODE (type), op0);
7343 else if (GET_CODE (op0) != MEM)
7345 /* If the operand is not a MEM, force it into memory. Since we
7346 are going to be be changing the mode of the MEM, don't call
7347 force_const_mem for constants because we don't allow pool
7348 constants to change mode. */
7349 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7351 if (TREE_ADDRESSABLE (exp))
7354 if (target == 0 || GET_MODE (target) != TYPE_MODE (inner_type))
7356 = assign_stack_temp_for_type
7357 (TYPE_MODE (inner_type),
7358 GET_MODE_SIZE (TYPE_MODE (inner_type)), 0, inner_type);
7360 emit_move_insn (target, op0);
7364 /* At this point, OP0 is in the correct mode. If the output type is such
7365 that the operand is known to be aligned, indicate that it is.
7366 Otherwise, we need only be concerned about alignment for non-BLKmode
7368 if (GET_CODE (op0) == MEM)
7370 op0 = copy_rtx (op0);
7372 if (TYPE_ALIGN_OK (type))
7373 set_mem_align (op0, MAX (MEM_ALIGN (op0), TYPE_ALIGN (type)));
7374 else if (TYPE_MODE (type) != BLKmode && STRICT_ALIGNMENT
7375 && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
7377 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7378 HOST_WIDE_INT temp_size
7379 = MAX (int_size_in_bytes (inner_type),
7380 (HOST_WIDE_INT) GET_MODE_SIZE (TYPE_MODE (type)));
7381 rtx new = assign_stack_temp_for_type (TYPE_MODE (type),
7382 temp_size, 0, type);
7383 rtx new_with_op0_mode = adjust_address (new, GET_MODE (op0), 0);
7385 if (TREE_ADDRESSABLE (exp))
7388 if (GET_MODE (op0) == BLKmode)
7389 emit_block_move (new_with_op0_mode, op0,
7390 GEN_INT (GET_MODE_SIZE (TYPE_MODE (type))));
7392 emit_move_insn (new_with_op0_mode, op0);
7397 op0 = adjust_address (op0, TYPE_MODE (type), 0);
7403 /* We come here from MINUS_EXPR when the second operand is a
7406 this_optab = ! unsignedp && flag_trapv
7407 && (GET_MODE_CLASS (mode) == MODE_INT)
7408 ? addv_optab : add_optab;
7410 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
7411 something else, make sure we add the register to the constant and
7412 then to the other thing. This case can occur during strength
7413 reduction and doing it this way will produce better code if the
7414 frame pointer or argument pointer is eliminated.
7416 fold-const.c will ensure that the constant is always in the inner
7417 PLUS_EXPR, so the only case we need to do anything about is if
7418 sp, ap, or fp is our second argument, in which case we must swap
7419 the innermost first argument and our second argument. */
7421 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
7422 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
7423 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
7424 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
7425 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
7426 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
7428 tree t = TREE_OPERAND (exp, 1);
7430 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7431 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
7434 /* If the result is to be ptr_mode and we are adding an integer to
7435 something, we might be forming a constant. So try to use
7436 plus_constant. If it produces a sum and we can't accept it,
7437 use force_operand. This allows P = &ARR[const] to generate
7438 efficient code on machines where a SYMBOL_REF is not a valid
7441 If this is an EXPAND_SUM call, always return the sum. */
7442 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
7443 || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
7445 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
7446 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7447 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
7451 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
7453 /* Use immed_double_const to ensure that the constant is
7454 truncated according to the mode of OP1, then sign extended
7455 to a HOST_WIDE_INT. Using the constant directly can result
7456 in non-canonical RTL in a 64x32 cross compile. */
7458 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)),
7460 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))));
7461 op1 = plus_constant (op1, INTVAL (constant_part));
7462 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7463 op1 = force_operand (op1, target);
7467 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7468 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
7469 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
7473 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7475 if (! CONSTANT_P (op0))
7477 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7478 VOIDmode, modifier);
7479 /* Don't go to both_summands if modifier
7480 says it's not right to return a PLUS. */
7481 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7485 /* Use immed_double_const to ensure that the constant is
7486 truncated according to the mode of OP1, then sign extended
7487 to a HOST_WIDE_INT. Using the constant directly can result
7488 in non-canonical RTL in a 64x32 cross compile. */
7490 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)),
7492 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
7493 op0 = plus_constant (op0, INTVAL (constant_part));
7494 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7495 op0 = force_operand (op0, target);
7500 /* No sense saving up arithmetic to be done
7501 if it's all in the wrong mode to form part of an address.
7502 And force_operand won't know whether to sign-extend or
7504 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7505 || mode != ptr_mode)
7508 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7511 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, modifier);
7512 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, modifier);
7515 /* Make sure any term that's a sum with a constant comes last. */
7516 if (GET_CODE (op0) == PLUS
7517 && CONSTANT_P (XEXP (op0, 1)))
7523 /* If adding to a sum including a constant,
7524 associate it to put the constant outside. */
7525 if (GET_CODE (op1) == PLUS
7526 && CONSTANT_P (XEXP (op1, 1)))
7528 rtx constant_term = const0_rtx;
7530 temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0);
7533 /* Ensure that MULT comes first if there is one. */
7534 else if (GET_CODE (op0) == MULT)
7535 op0 = gen_rtx_PLUS (mode, op0, XEXP (op1, 0));
7537 op0 = gen_rtx_PLUS (mode, XEXP (op1, 0), op0);
7539 /* Let's also eliminate constants from op0 if possible. */
7540 op0 = eliminate_constant_term (op0, &constant_term);
7542 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
7543 their sum should be a constant. Form it into OP1, since the
7544 result we want will then be OP0 + OP1. */
7546 temp = simplify_binary_operation (PLUS, mode, constant_term,
7551 op1 = gen_rtx_PLUS (mode, constant_term, XEXP (op1, 1));
7554 /* Put a constant term last and put a multiplication first. */
7555 if (CONSTANT_P (op0) || GET_CODE (op1) == MULT)
7556 temp = op1, op1 = op0, op0 = temp;
7558 temp = simplify_binary_operation (PLUS, mode, op0, op1);
7559 return temp ? temp : gen_rtx_PLUS (mode, op0, op1);
7562 /* For initializers, we are allowed to return a MINUS of two
7563 symbolic constants. Here we handle all cases when both operands
7565 /* Handle difference of two symbolic constants,
7566 for the sake of an initializer. */
7567 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7568 && really_constant_p (TREE_OPERAND (exp, 0))
7569 && really_constant_p (TREE_OPERAND (exp, 1)))
7571 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode,
7573 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode,
7576 /* If the last operand is a CONST_INT, use plus_constant of
7577 the negated constant. Else make the MINUS. */
7578 if (GET_CODE (op1) == CONST_INT)
7579 return plus_constant (op0, - INTVAL (op1));
7581 return gen_rtx_MINUS (mode, op0, op1);
7583 /* Convert A - const to A + (-const). */
7584 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7586 tree negated = fold (build1 (NEGATE_EXPR, type,
7587 TREE_OPERAND (exp, 1)));
7589 if (TREE_UNSIGNED (type) || TREE_OVERFLOW (negated))
7590 /* If we can't negate the constant in TYPE, leave it alone and
7591 expand_binop will negate it for us. We used to try to do it
7592 here in the signed version of TYPE, but that doesn't work
7593 on POINTER_TYPEs. */;
7596 exp = build (PLUS_EXPR, type, TREE_OPERAND (exp, 0), negated);
7600 this_optab = ! unsignedp && flag_trapv
7601 && (GET_MODE_CLASS(mode) == MODE_INT)
7602 ? subv_optab : sub_optab;
7606 /* If first operand is constant, swap them.
7607 Thus the following special case checks need only
7608 check the second operand. */
7609 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
7611 tree t1 = TREE_OPERAND (exp, 0);
7612 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
7613 TREE_OPERAND (exp, 1) = t1;
7616 /* Attempt to return something suitable for generating an
7617 indexed address, for machines that support that. */
7619 if (modifier == EXPAND_SUM && mode == ptr_mode
7620 && host_integerp (TREE_OPERAND (exp, 1), 0))
7622 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7625 /* If we knew for certain that this is arithmetic for an array
7626 reference, and we knew the bounds of the array, then we could
7627 apply the distributive law across (PLUS X C) for constant C.
7628 Without such knowledge, we risk overflowing the computation
7629 when both X and C are large, but X+C isn't. */
7630 /* ??? Could perhaps special-case EXP being unsigned and C being
7631 positive. In that case we are certain that X+C is no smaller
7632 than X and so the transformed expression will overflow iff the
7633 original would have. */
7635 if (GET_CODE (op0) != REG)
7636 op0 = force_operand (op0, NULL_RTX);
7637 if (GET_CODE (op0) != REG)
7638 op0 = copy_to_mode_reg (mode, op0);
7641 gen_rtx_MULT (mode, op0,
7642 GEN_INT (tree_low_cst (TREE_OPERAND (exp, 1), 0)));
7645 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7648 /* Check for multiplying things that have been extended
7649 from a narrower type. If this machine supports multiplying
7650 in that narrower type with a result in the desired type,
7651 do it that way, and avoid the explicit type-conversion. */
7652 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
7653 && TREE_CODE (type) == INTEGER_TYPE
7654 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7655 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
7656 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7657 && int_fits_type_p (TREE_OPERAND (exp, 1),
7658 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7659 /* Don't use a widening multiply if a shift will do. */
7660 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
7661 > HOST_BITS_PER_WIDE_INT)
7662 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
7664 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
7665 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7667 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
7668 /* If both operands are extended, they must either both
7669 be zero-extended or both be sign-extended. */
7670 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7672 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
7674 enum machine_mode innermode
7675 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
7676 optab other_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7677 ? smul_widen_optab : umul_widen_optab);
7678 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7679 ? umul_widen_optab : smul_widen_optab);
7680 if (mode == GET_MODE_WIDER_MODE (innermode))
7682 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
7684 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7685 NULL_RTX, VOIDmode, 0);
7686 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7687 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7690 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7691 NULL_RTX, VOIDmode, 0);
7694 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
7695 && innermode == word_mode)
7698 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7699 NULL_RTX, VOIDmode, 0);
7700 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7701 op1 = convert_modes (innermode, mode,
7702 expand_expr (TREE_OPERAND (exp, 1),
7703 NULL_RTX, VOIDmode, 0),
7706 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7707 NULL_RTX, VOIDmode, 0);
7708 temp = expand_binop (mode, other_optab, op0, op1, target,
7709 unsignedp, OPTAB_LIB_WIDEN);
7710 htem = expand_mult_highpart_adjust (innermode,
7711 gen_highpart (innermode, temp),
7713 gen_highpart (innermode, temp),
7715 emit_move_insn (gen_highpart (innermode, temp), htem);
7720 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7721 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7722 return expand_mult (mode, op0, op1, target, unsignedp);
7724 case TRUNC_DIV_EXPR:
7725 case FLOOR_DIV_EXPR:
7727 case ROUND_DIV_EXPR:
7728 case EXACT_DIV_EXPR:
7729 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7731 /* Possible optimization: compute the dividend with EXPAND_SUM
7732 then if the divisor is constant can optimize the case
7733 where some terms of the dividend have coeffs divisible by it. */
7734 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7735 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7736 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
7739 /* Emit a/b as a*(1/b). Later we may manage CSE the reciprocal saving
7740 expensive divide. If not, combine will rebuild the original
7742 if (flag_unsafe_math_optimizations && optimize && !optimize_size
7743 && !real_onep (TREE_OPERAND (exp, 0)))
7744 return expand_expr (build (MULT_EXPR, type, TREE_OPERAND (exp, 0),
7745 build (RDIV_EXPR, type,
7746 build_real (type, dconst1),
7747 TREE_OPERAND (exp, 1))),
7748 target, tmode, unsignedp);
7749 this_optab = sdiv_optab;
7752 case TRUNC_MOD_EXPR:
7753 case FLOOR_MOD_EXPR:
7755 case ROUND_MOD_EXPR:
7756 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7758 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7759 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7760 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
7762 case FIX_ROUND_EXPR:
7763 case FIX_FLOOR_EXPR:
7765 abort (); /* Not used for C. */
7767 case FIX_TRUNC_EXPR:
7768 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7770 target = gen_reg_rtx (mode);
7771 expand_fix (target, op0, unsignedp);
7775 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7777 target = gen_reg_rtx (mode);
7778 /* expand_float can't figure out what to do if FROM has VOIDmode.
7779 So give it the correct mode. With -O, cse will optimize this. */
7780 if (GET_MODE (op0) == VOIDmode)
7781 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7783 expand_float (target, op0,
7784 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7788 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7789 temp = expand_unop (mode,
7790 ! unsignedp && flag_trapv
7791 && (GET_MODE_CLASS(mode) == MODE_INT)
7792 ? negv_optab : neg_optab, op0, target, 0);
7798 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7800 /* Handle complex values specially. */
7801 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT
7802 || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
7803 return expand_complex_abs (mode, op0, target, unsignedp);
7805 /* Unsigned abs is simply the operand. Testing here means we don't
7806 risk generating incorrect code below. */
7807 if (TREE_UNSIGNED (type))
7810 return expand_abs (mode, op0, target, unsignedp,
7811 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
7815 target = original_target;
7816 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 1), 1)
7817 || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
7818 || GET_MODE (target) != mode
7819 || (GET_CODE (target) == REG
7820 && REGNO (target) < FIRST_PSEUDO_REGISTER))
7821 target = gen_reg_rtx (mode);
7822 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7823 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
7825 /* First try to do it with a special MIN or MAX instruction.
7826 If that does not win, use a conditional jump to select the proper
7828 this_optab = (TREE_UNSIGNED (type)
7829 ? (code == MIN_EXPR ? umin_optab : umax_optab)
7830 : (code == MIN_EXPR ? smin_optab : smax_optab));
7832 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
7837 /* At this point, a MEM target is no longer useful; we will get better
7840 if (GET_CODE (target) == MEM)
7841 target = gen_reg_rtx (mode);
7844 emit_move_insn (target, op0);
7846 op0 = gen_label_rtx ();
7848 /* If this mode is an integer too wide to compare properly,
7849 compare word by word. Rely on cse to optimize constant cases. */
7850 if (GET_MODE_CLASS (mode) == MODE_INT
7851 && ! can_compare_p (GE, mode, ccp_jump))
7853 if (code == MAX_EXPR)
7854 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
7855 target, op1, NULL_RTX, op0);
7857 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
7858 op1, target, NULL_RTX, op0);
7862 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)));
7863 do_compare_rtx_and_jump (target, op1, code == MAX_EXPR ? GE : LE,
7864 unsignedp, mode, NULL_RTX, NULL_RTX,
7867 emit_move_insn (target, op1);
7872 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7873 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
7879 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7880 temp = expand_unop (mode, ffs_optab, op0, target, 1);
7885 /* ??? Can optimize bitwise operations with one arg constant.
7886 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
7887 and (a bitwise1 b) bitwise2 b (etc)
7888 but that is probably not worth while. */
7890 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
7891 boolean values when we want in all cases to compute both of them. In
7892 general it is fastest to do TRUTH_AND_EXPR by computing both operands
7893 as actual zero-or-1 values and then bitwise anding. In cases where
7894 there cannot be any side effects, better code would be made by
7895 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
7896 how to recognize those cases. */
7898 case TRUTH_AND_EXPR:
7900 this_optab = and_optab;
7905 this_optab = ior_optab;
7908 case TRUTH_XOR_EXPR:
7910 this_optab = xor_optab;
7917 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7919 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7920 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
7923 /* Could determine the answer when only additive constants differ. Also,
7924 the addition of one can be handled by changing the condition. */
7931 case UNORDERED_EXPR:
7938 temp = do_store_flag (exp, target, tmode != VOIDmode ? tmode : mode, 0);
7942 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
7943 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
7945 && GET_CODE (original_target) == REG
7946 && (GET_MODE (original_target)
7947 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
7949 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
7952 /* If temp is constant, we can just compute the result. */
7953 if (GET_CODE (temp) == CONST_INT)
7955 if (INTVAL (temp) != 0)
7956 emit_move_insn (target, const1_rtx);
7958 emit_move_insn (target, const0_rtx);
7963 if (temp != original_target)
7965 enum machine_mode mode1 = GET_MODE (temp);
7966 if (mode1 == VOIDmode)
7967 mode1 = tmode != VOIDmode ? tmode : mode;
7969 temp = copy_to_mode_reg (mode1, temp);
7972 op1 = gen_label_rtx ();
7973 emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX,
7974 GET_MODE (temp), unsignedp, op1);
7975 emit_move_insn (temp, const1_rtx);
7980 /* If no set-flag instruction, must generate a conditional
7981 store into a temporary variable. Drop through
7982 and handle this like && and ||. */
7984 case TRUTH_ANDIF_EXPR:
7985 case TRUTH_ORIF_EXPR:
7987 && (target == 0 || ! safe_from_p (target, exp, 1)
7988 /* Make sure we don't have a hard reg (such as function's return
7989 value) live across basic blocks, if not optimizing. */
7990 || (!optimize && GET_CODE (target) == REG
7991 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
7992 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7995 emit_clr_insn (target);
7997 op1 = gen_label_rtx ();
7998 jumpifnot (exp, op1);
8001 emit_0_to_1_insn (target);
8004 return ignore ? const0_rtx : target;
8006 case TRUTH_NOT_EXPR:
8007 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
8008 /* The parser is careful to generate TRUTH_NOT_EXPR
8009 only with operands that are always zero or one. */
8010 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
8011 target, 1, OPTAB_LIB_WIDEN);
8017 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
8019 return expand_expr (TREE_OPERAND (exp, 1),
8020 (ignore ? const0_rtx : target),
8024 /* If we would have a "singleton" (see below) were it not for a
8025 conversion in each arm, bring that conversion back out. */
8026 if (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
8027 && TREE_CODE (TREE_OPERAND (exp, 2)) == NOP_EXPR
8028 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0))
8029 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 2), 0))))
8031 tree iftrue = TREE_OPERAND (TREE_OPERAND (exp, 1), 0);
8032 tree iffalse = TREE_OPERAND (TREE_OPERAND (exp, 2), 0);
8034 if ((TREE_CODE_CLASS (TREE_CODE (iftrue)) == '2'
8035 && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
8036 || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '2'
8037 && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0))
8038 || (TREE_CODE_CLASS (TREE_CODE (iftrue)) == '1'
8039 && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
8040 || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '1'
8041 && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0)))
8042 return expand_expr (build1 (NOP_EXPR, type,
8043 build (COND_EXPR, TREE_TYPE (iftrue),
8044 TREE_OPERAND (exp, 0),
8046 target, tmode, modifier);
8050 /* Note that COND_EXPRs whose type is a structure or union
8051 are required to be constructed to contain assignments of
8052 a temporary variable, so that we can evaluate them here
8053 for side effect only. If type is void, we must do likewise. */
8055 /* If an arm of the branch requires a cleanup,
8056 only that cleanup is performed. */
8059 tree binary_op = 0, unary_op = 0;
8061 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
8062 convert it to our mode, if necessary. */
8063 if (integer_onep (TREE_OPERAND (exp, 1))
8064 && integer_zerop (TREE_OPERAND (exp, 2))
8065 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8069 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
8074 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, modifier);
8075 if (GET_MODE (op0) == mode)
8079 target = gen_reg_rtx (mode);
8080 convert_move (target, op0, unsignedp);
8084 /* Check for X ? A + B : A. If we have this, we can copy A to the
8085 output and conditionally add B. Similarly for unary operations.
8086 Don't do this if X has side-effects because those side effects
8087 might affect A or B and the "?" operation is a sequence point in
8088 ANSI. (operand_equal_p tests for side effects.) */
8090 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
8091 && operand_equal_p (TREE_OPERAND (exp, 2),
8092 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8093 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
8094 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
8095 && operand_equal_p (TREE_OPERAND (exp, 1),
8096 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8097 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
8098 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
8099 && operand_equal_p (TREE_OPERAND (exp, 2),
8100 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8101 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
8102 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
8103 && operand_equal_p (TREE_OPERAND (exp, 1),
8104 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8105 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
8107 /* If we are not to produce a result, we have no target. Otherwise,
8108 if a target was specified use it; it will not be used as an
8109 intermediate target unless it is safe. If no target, use a
8114 else if (original_target
8115 && (safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
8116 || (singleton && GET_CODE (original_target) == REG
8117 && REGNO (original_target) >= FIRST_PSEUDO_REGISTER
8118 && original_target == var_rtx (singleton)))
8119 && GET_MODE (original_target) == mode
8120 #ifdef HAVE_conditional_move
8121 && (! can_conditionally_move_p (mode)
8122 || GET_CODE (original_target) == REG
8123 || TREE_ADDRESSABLE (type))
8125 && (GET_CODE (original_target) != MEM
8126 || TREE_ADDRESSABLE (type)))
8127 temp = original_target;
8128 else if (TREE_ADDRESSABLE (type))
8131 temp = assign_temp (type, 0, 0, 1);
8133 /* If we had X ? A + C : A, with C a constant power of 2, and we can
8134 do the test of X as a store-flag operation, do this as
8135 A + ((X != 0) << log C). Similarly for other simple binary
8136 operators. Only do for C == 1 if BRANCH_COST is low. */
8137 if (temp && singleton && binary_op
8138 && (TREE_CODE (binary_op) == PLUS_EXPR
8139 || TREE_CODE (binary_op) == MINUS_EXPR
8140 || TREE_CODE (binary_op) == BIT_IOR_EXPR
8141 || TREE_CODE (binary_op) == BIT_XOR_EXPR)
8142 && (BRANCH_COST >= 3 ? integer_pow2p (TREE_OPERAND (binary_op, 1))
8143 : integer_onep (TREE_OPERAND (binary_op, 1)))
8144 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8147 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR
8148 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8149 ? addv_optab : add_optab)
8150 : TREE_CODE (binary_op) == MINUS_EXPR
8151 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8152 ? subv_optab : sub_optab)
8153 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
8156 /* If we had X ? A : A + 1, do this as A + (X == 0).
8158 We have to invert the truth value here and then put it
8159 back later if do_store_flag fails. We cannot simply copy
8160 TREE_OPERAND (exp, 0) to another variable and modify that
8161 because invert_truthvalue can modify the tree pointed to
8163 if (singleton == TREE_OPERAND (exp, 1))
8164 TREE_OPERAND (exp, 0)
8165 = invert_truthvalue (TREE_OPERAND (exp, 0));
8167 result = do_store_flag (TREE_OPERAND (exp, 0),
8168 (safe_from_p (temp, singleton, 1)
8170 mode, BRANCH_COST <= 1);
8172 if (result != 0 && ! integer_onep (TREE_OPERAND (binary_op, 1)))
8173 result = expand_shift (LSHIFT_EXPR, mode, result,
8174 build_int_2 (tree_log2
8178 (safe_from_p (temp, singleton, 1)
8179 ? temp : NULL_RTX), 0);
8183 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
8184 return expand_binop (mode, boptab, op1, result, temp,
8185 unsignedp, OPTAB_LIB_WIDEN);
8187 else if (singleton == TREE_OPERAND (exp, 1))
8188 TREE_OPERAND (exp, 0)
8189 = invert_truthvalue (TREE_OPERAND (exp, 0));
8192 do_pending_stack_adjust ();
8194 op0 = gen_label_rtx ();
8196 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
8200 /* If the target conflicts with the other operand of the
8201 binary op, we can't use it. Also, we can't use the target
8202 if it is a hard register, because evaluating the condition
8203 might clobber it. */
8205 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1), 1))
8206 || (GET_CODE (temp) == REG
8207 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
8208 temp = gen_reg_rtx (mode);
8209 store_expr (singleton, temp, 0);
8212 expand_expr (singleton,
8213 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8214 if (singleton == TREE_OPERAND (exp, 1))
8215 jumpif (TREE_OPERAND (exp, 0), op0);
8217 jumpifnot (TREE_OPERAND (exp, 0), op0);
8219 start_cleanup_deferral ();
8220 if (binary_op && temp == 0)
8221 /* Just touch the other operand. */
8222 expand_expr (TREE_OPERAND (binary_op, 1),
8223 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8225 store_expr (build (TREE_CODE (binary_op), type,
8226 make_tree (type, temp),
8227 TREE_OPERAND (binary_op, 1)),
8230 store_expr (build1 (TREE_CODE (unary_op), type,
8231 make_tree (type, temp)),
8235 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
8236 comparison operator. If we have one of these cases, set the
8237 output to A, branch on A (cse will merge these two references),
8238 then set the output to FOO. */
8240 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8241 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8242 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8243 TREE_OPERAND (exp, 1), 0)
8244 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8245 || TREE_CODE (TREE_OPERAND (exp, 1)) == SAVE_EXPR)
8246 && safe_from_p (temp, TREE_OPERAND (exp, 2), 1))
8248 if (GET_CODE (temp) == REG
8249 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
8250 temp = gen_reg_rtx (mode);
8251 store_expr (TREE_OPERAND (exp, 1), temp, 0);
8252 jumpif (TREE_OPERAND (exp, 0), op0);
8254 start_cleanup_deferral ();
8255 store_expr (TREE_OPERAND (exp, 2), temp, 0);
8259 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8260 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8261 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8262 TREE_OPERAND (exp, 2), 0)
8263 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8264 || TREE_CODE (TREE_OPERAND (exp, 2)) == SAVE_EXPR)
8265 && safe_from_p (temp, TREE_OPERAND (exp, 1), 1))
8267 if (GET_CODE (temp) == REG
8268 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
8269 temp = gen_reg_rtx (mode);
8270 store_expr (TREE_OPERAND (exp, 2), temp, 0);
8271 jumpifnot (TREE_OPERAND (exp, 0), op0);
8273 start_cleanup_deferral ();
8274 store_expr (TREE_OPERAND (exp, 1), temp, 0);
8279 op1 = gen_label_rtx ();
8280 jumpifnot (TREE_OPERAND (exp, 0), op0);
8282 start_cleanup_deferral ();
8284 /* One branch of the cond can be void, if it never returns. For
8285 example A ? throw : E */
8287 && TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node)
8288 store_expr (TREE_OPERAND (exp, 1), temp, 0);
8290 expand_expr (TREE_OPERAND (exp, 1),
8291 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8292 end_cleanup_deferral ();
8294 emit_jump_insn (gen_jump (op1));
8297 start_cleanup_deferral ();
8299 && TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node)
8300 store_expr (TREE_OPERAND (exp, 2), temp, 0);
8302 expand_expr (TREE_OPERAND (exp, 2),
8303 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8306 end_cleanup_deferral ();
8317 /* Something needs to be initialized, but we didn't know
8318 where that thing was when building the tree. For example,
8319 it could be the return value of a function, or a parameter
8320 to a function which lays down in the stack, or a temporary
8321 variable which must be passed by reference.
8323 We guarantee that the expression will either be constructed
8324 or copied into our original target. */
8326 tree slot = TREE_OPERAND (exp, 0);
8327 tree cleanups = NULL_TREE;
8330 if (TREE_CODE (slot) != VAR_DECL)
8334 target = original_target;
8336 /* Set this here so that if we get a target that refers to a
8337 register variable that's already been used, put_reg_into_stack
8338 knows that it should fix up those uses. */
8339 TREE_USED (slot) = 1;
8343 if (DECL_RTL_SET_P (slot))
8345 target = DECL_RTL (slot);
8346 /* If we have already expanded the slot, so don't do
8348 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8353 target = assign_temp (type, 2, 0, 1);
8354 /* All temp slots at this level must not conflict. */
8355 preserve_temp_slots (target);
8356 SET_DECL_RTL (slot, target);
8357 if (TREE_ADDRESSABLE (slot))
8358 put_var_into_stack (slot);
8360 /* Since SLOT is not known to the called function
8361 to belong to its stack frame, we must build an explicit
8362 cleanup. This case occurs when we must build up a reference
8363 to pass the reference as an argument. In this case,
8364 it is very likely that such a reference need not be
8367 if (TREE_OPERAND (exp, 2) == 0)
8368 TREE_OPERAND (exp, 2) = maybe_build_cleanup (slot);
8369 cleanups = TREE_OPERAND (exp, 2);
8374 /* This case does occur, when expanding a parameter which
8375 needs to be constructed on the stack. The target
8376 is the actual stack address that we want to initialize.
8377 The function we call will perform the cleanup in this case. */
8379 /* If we have already assigned it space, use that space,
8380 not target that we were passed in, as our target
8381 parameter is only a hint. */
8382 if (DECL_RTL_SET_P (slot))
8384 target = DECL_RTL (slot);
8385 /* If we have already expanded the slot, so don't do
8387 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8392 SET_DECL_RTL (slot, target);
8393 /* If we must have an addressable slot, then make sure that
8394 the RTL that we just stored in slot is OK. */
8395 if (TREE_ADDRESSABLE (slot))
8396 put_var_into_stack (slot);
8400 exp1 = TREE_OPERAND (exp, 3) = TREE_OPERAND (exp, 1);
8401 /* Mark it as expanded. */
8402 TREE_OPERAND (exp, 1) = NULL_TREE;
8404 store_expr (exp1, target, 0);
8406 expand_decl_cleanup (NULL_TREE, cleanups);
8413 tree lhs = TREE_OPERAND (exp, 0);
8414 tree rhs = TREE_OPERAND (exp, 1);
8416 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
8422 /* If lhs is complex, expand calls in rhs before computing it.
8423 That's so we don't compute a pointer and save it over a
8424 call. If lhs is simple, compute it first so we can give it
8425 as a target if the rhs is just a call. This avoids an
8426 extra temp and copy and that prevents a partial-subsumption
8427 which makes bad code. Actually we could treat
8428 component_ref's of vars like vars. */
8430 tree lhs = TREE_OPERAND (exp, 0);
8431 tree rhs = TREE_OPERAND (exp, 1);
8435 /* Check for |= or &= of a bitfield of size one into another bitfield
8436 of size 1. In this case, (unless we need the result of the
8437 assignment) we can do this more efficiently with a
8438 test followed by an assignment, if necessary.
8440 ??? At this point, we can't get a BIT_FIELD_REF here. But if
8441 things change so we do, this code should be enhanced to
8444 && TREE_CODE (lhs) == COMPONENT_REF
8445 && (TREE_CODE (rhs) == BIT_IOR_EXPR
8446 || TREE_CODE (rhs) == BIT_AND_EXPR)
8447 && TREE_OPERAND (rhs, 0) == lhs
8448 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
8449 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
8450 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
8452 rtx label = gen_label_rtx ();
8454 do_jump (TREE_OPERAND (rhs, 1),
8455 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
8456 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
8457 expand_assignment (lhs, convert (TREE_TYPE (rhs),
8458 (TREE_CODE (rhs) == BIT_IOR_EXPR
8460 : integer_zero_node)),
8462 do_pending_stack_adjust ();
8467 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
8473 if (!TREE_OPERAND (exp, 0))
8474 expand_null_return ();
8476 expand_return (TREE_OPERAND (exp, 0));
8479 case PREINCREMENT_EXPR:
8480 case PREDECREMENT_EXPR:
8481 return expand_increment (exp, 0, ignore);
8483 case POSTINCREMENT_EXPR:
8484 case POSTDECREMENT_EXPR:
8485 /* Faster to treat as pre-increment if result is not used. */
8486 return expand_increment (exp, ! ignore, ignore);
8489 /* Are we taking the address of a nested function? */
8490 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
8491 && decl_function_context (TREE_OPERAND (exp, 0)) != 0
8492 && ! DECL_NO_STATIC_CHAIN (TREE_OPERAND (exp, 0))
8493 && ! TREE_STATIC (exp))
8495 op0 = trampoline_address (TREE_OPERAND (exp, 0));
8496 op0 = force_operand (op0, target);
8498 /* If we are taking the address of something erroneous, just
8500 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
8502 /* If we are taking the address of a constant and are at the
8503 top level, we have to use output_constant_def since we can't
8504 call force_const_mem at top level. */
8506 && (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
8507 || (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0)))
8509 op0 = XEXP (output_constant_def (TREE_OPERAND (exp, 0), 0), 0);
8512 /* We make sure to pass const0_rtx down if we came in with
8513 ignore set, to avoid doing the cleanups twice for something. */
8514 op0 = expand_expr (TREE_OPERAND (exp, 0),
8515 ignore ? const0_rtx : NULL_RTX, VOIDmode,
8516 (modifier == EXPAND_INITIALIZER
8517 ? modifier : EXPAND_CONST_ADDRESS));
8519 /* If we are going to ignore the result, OP0 will have been set
8520 to const0_rtx, so just return it. Don't get confused and
8521 think we are taking the address of the constant. */
8525 /* Pass 1 for MODIFY, so that protect_from_queue doesn't get
8526 clever and returns a REG when given a MEM. */
8527 op0 = protect_from_queue (op0, 1);
8529 /* We would like the object in memory. If it is a constant, we can
8530 have it be statically allocated into memory. For a non-constant,
8531 we need to allocate some memory and store the value into it. */
8533 if (CONSTANT_P (op0))
8534 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
8536 else if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
8537 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF
8538 || GET_CODE (op0) == PARALLEL)
8540 /* If the operand is a SAVE_EXPR, we can deal with this by
8541 forcing the SAVE_EXPR into memory. */
8542 if (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR)
8544 put_var_into_stack (TREE_OPERAND (exp, 0));
8545 op0 = SAVE_EXPR_RTL (TREE_OPERAND (exp, 0));
8549 /* If this object is in a register, it can't be BLKmode. */
8550 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8551 rtx memloc = assign_temp (inner_type, 1, 1, 1);
8553 if (GET_CODE (op0) == PARALLEL)
8554 /* Handle calls that pass values in multiple
8555 non-contiguous locations. The Irix 6 ABI has examples
8557 emit_group_store (memloc, op0,
8558 int_size_in_bytes (inner_type));
8560 emit_move_insn (memloc, op0);
8566 if (GET_CODE (op0) != MEM)
8569 mark_temp_addr_taken (op0);
8570 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
8572 op0 = XEXP (op0, 0);
8573 #ifdef POINTERS_EXTEND_UNSIGNED
8574 if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
8575 && mode == ptr_mode)
8576 op0 = convert_memory_address (ptr_mode, op0);
8581 /* If OP0 is not aligned as least as much as the type requires, we
8582 need to make a temporary, copy OP0 to it, and take the address of
8583 the temporary. We want to use the alignment of the type, not of
8584 the operand. Note that this is incorrect for FUNCTION_TYPE, but
8585 the test for BLKmode means that can't happen. The test for
8586 BLKmode is because we never make mis-aligned MEMs with
8589 We don't need to do this at all if the machine doesn't have
8590 strict alignment. */
8591 if (STRICT_ALIGNMENT && GET_MODE (op0) == BLKmode
8592 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
8594 && MEM_ALIGN (op0) < BIGGEST_ALIGNMENT)
8596 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8598 = assign_stack_temp_for_type
8599 (TYPE_MODE (inner_type),
8600 MEM_SIZE (op0) ? INTVAL (MEM_SIZE (op0))
8601 : int_size_in_bytes (inner_type),
8602 1, build_qualified_type (inner_type,
8603 (TYPE_QUALS (inner_type)
8604 | TYPE_QUAL_CONST)));
8606 if (TYPE_ALIGN_OK (inner_type))
8609 emit_block_move (new, op0, expr_size (TREE_OPERAND (exp, 0)));
8613 op0 = force_operand (XEXP (op0, 0), target);
8617 && GET_CODE (op0) != REG
8618 && modifier != EXPAND_CONST_ADDRESS
8619 && modifier != EXPAND_INITIALIZER
8620 && modifier != EXPAND_SUM)
8621 op0 = force_reg (Pmode, op0);
8623 if (GET_CODE (op0) == REG
8624 && ! REG_USERVAR_P (op0))
8625 mark_reg_pointer (op0, TYPE_ALIGN (TREE_TYPE (type)));
8627 #ifdef POINTERS_EXTEND_UNSIGNED
8628 if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
8629 && mode == ptr_mode)
8630 op0 = convert_memory_address (ptr_mode, op0);
8635 case ENTRY_VALUE_EXPR:
8638 /* COMPLEX type for Extended Pascal & Fortran */
8641 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8644 /* Get the rtx code of the operands. */
8645 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8646 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
8649 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
8653 /* Move the real (op0) and imaginary (op1) parts to their location. */
8654 emit_move_insn (gen_realpart (mode, target), op0);
8655 emit_move_insn (gen_imagpart (mode, target), op1);
8657 insns = get_insns ();
8660 /* Complex construction should appear as a single unit. */
8661 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
8662 each with a separate pseudo as destination.
8663 It's not correct for flow to treat them as a unit. */
8664 if (GET_CODE (target) != CONCAT)
8665 emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
8673 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8674 return gen_realpart (mode, op0);
8677 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8678 return gen_imagpart (mode, op0);
8682 enum machine_mode partmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8686 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8689 target = gen_reg_rtx (mode);
8693 /* Store the realpart and the negated imagpart to target. */
8694 emit_move_insn (gen_realpart (partmode, target),
8695 gen_realpart (partmode, op0));
8697 imag_t = gen_imagpart (partmode, target);
8698 temp = expand_unop (partmode,
8699 ! unsignedp && flag_trapv
8700 && (GET_MODE_CLASS(partmode) == MODE_INT)
8701 ? negv_optab : neg_optab,
8702 gen_imagpart (partmode, op0), imag_t, 0);
8704 emit_move_insn (imag_t, temp);
8706 insns = get_insns ();
8709 /* Conjugate should appear as a single unit
8710 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
8711 each with a separate pseudo as destination.
8712 It's not correct for flow to treat them as a unit. */
8713 if (GET_CODE (target) != CONCAT)
8714 emit_no_conflict_block (insns, target, op0, NULL_RTX, NULL_RTX);
8721 case TRY_CATCH_EXPR:
8723 tree handler = TREE_OPERAND (exp, 1);
8725 expand_eh_region_start ();
8727 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8729 expand_eh_region_end_cleanup (handler);
8734 case TRY_FINALLY_EXPR:
8736 tree try_block = TREE_OPERAND (exp, 0);
8737 tree finally_block = TREE_OPERAND (exp, 1);
8738 rtx finally_label = gen_label_rtx ();
8739 rtx done_label = gen_label_rtx ();
8740 rtx return_link = gen_reg_rtx (Pmode);
8741 tree cleanup = build (GOTO_SUBROUTINE_EXPR, void_type_node,
8742 (tree) finally_label, (tree) return_link);
8743 TREE_SIDE_EFFECTS (cleanup) = 1;
8745 /* Start a new binding layer that will keep track of all cleanup
8746 actions to be performed. */
8747 expand_start_bindings (2);
8749 target_temp_slot_level = temp_slot_level;
8751 expand_decl_cleanup (NULL_TREE, cleanup);
8752 op0 = expand_expr (try_block, target, tmode, modifier);
8754 preserve_temp_slots (op0);
8755 expand_end_bindings (NULL_TREE, 0, 0);
8756 emit_jump (done_label);
8757 emit_label (finally_label);
8758 expand_expr (finally_block, const0_rtx, VOIDmode, 0);
8759 emit_indirect_jump (return_link);
8760 emit_label (done_label);
8764 case GOTO_SUBROUTINE_EXPR:
8766 rtx subr = (rtx) TREE_OPERAND (exp, 0);
8767 rtx return_link = *(rtx *) &TREE_OPERAND (exp, 1);
8768 rtx return_address = gen_label_rtx ();
8769 emit_move_insn (return_link,
8770 gen_rtx_LABEL_REF (Pmode, return_address));
8772 emit_label (return_address);
8777 return expand_builtin_va_arg (TREE_OPERAND (exp, 0), type);
8780 return get_exception_pointer (cfun);
8783 /* Function descriptors are not valid except for as
8784 initialization constants, and should not be expanded. */
8788 return (*lang_expand_expr) (exp, original_target, tmode, modifier);
8791 /* Here to do an ordinary binary operator, generating an instruction
8792 from the optab already placed in `this_optab'. */
8794 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8796 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8797 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8799 temp = expand_binop (mode, this_optab, op0, op1, target,
8800 unsignedp, OPTAB_LIB_WIDEN);
8806 /* Return the tree node if a ARG corresponds to a string constant or zero
8807 if it doesn't. If we return non-zero, set *PTR_OFFSET to the offset
8808 in bytes within the string that ARG is accessing. The type of the
8809 offset will be `sizetype'. */
8812 string_constant (arg, ptr_offset)
8818 if (TREE_CODE (arg) == ADDR_EXPR
8819 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
8821 *ptr_offset = size_zero_node;
8822 return TREE_OPERAND (arg, 0);
8824 else if (TREE_CODE (arg) == PLUS_EXPR)
8826 tree arg0 = TREE_OPERAND (arg, 0);
8827 tree arg1 = TREE_OPERAND (arg, 1);
8832 if (TREE_CODE (arg0) == ADDR_EXPR
8833 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
8835 *ptr_offset = convert (sizetype, arg1);
8836 return TREE_OPERAND (arg0, 0);
8838 else if (TREE_CODE (arg1) == ADDR_EXPR
8839 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
8841 *ptr_offset = convert (sizetype, arg0);
8842 return TREE_OPERAND (arg1, 0);
8849 /* Expand code for a post- or pre- increment or decrement
8850 and return the RTX for the result.
8851 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
8854 expand_increment (exp, post, ignore)
8860 tree incremented = TREE_OPERAND (exp, 0);
8861 optab this_optab = add_optab;
8863 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
8864 int op0_is_copy = 0;
8865 int single_insn = 0;
8866 /* 1 means we can't store into OP0 directly,
8867 because it is a subreg narrower than a word,
8868 and we don't dare clobber the rest of the word. */
8871 /* Stabilize any component ref that might need to be
8872 evaluated more than once below. */
8874 || TREE_CODE (incremented) == BIT_FIELD_REF
8875 || (TREE_CODE (incremented) == COMPONENT_REF
8876 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
8877 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
8878 incremented = stabilize_reference (incremented);
8879 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
8880 ones into save exprs so that they don't accidentally get evaluated
8881 more than once by the code below. */
8882 if (TREE_CODE (incremented) == PREINCREMENT_EXPR
8883 || TREE_CODE (incremented) == PREDECREMENT_EXPR)
8884 incremented = save_expr (incremented);
8886 /* Compute the operands as RTX.
8887 Note whether OP0 is the actual lvalue or a copy of it:
8888 I believe it is a copy iff it is a register or subreg
8889 and insns were generated in computing it. */
8891 temp = get_last_insn ();
8892 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, 0);
8894 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
8895 in place but instead must do sign- or zero-extension during assignment,
8896 so we copy it into a new register and let the code below use it as
8899 Note that we can safely modify this SUBREG since it is know not to be
8900 shared (it was made by the expand_expr call above). */
8902 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
8905 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
8909 else if (GET_CODE (op0) == SUBREG
8910 && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD)
8912 /* We cannot increment this SUBREG in place. If we are
8913 post-incrementing, get a copy of the old value. Otherwise,
8914 just mark that we cannot increment in place. */
8916 op0 = copy_to_reg (op0);
8921 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
8922 && temp != get_last_insn ());
8923 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8925 /* Decide whether incrementing or decrementing. */
8926 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
8927 || TREE_CODE (exp) == PREDECREMENT_EXPR)
8928 this_optab = sub_optab;
8930 /* Convert decrement by a constant into a negative increment. */
8931 if (this_optab == sub_optab
8932 && GET_CODE (op1) == CONST_INT)
8934 op1 = GEN_INT (-INTVAL (op1));
8935 this_optab = add_optab;
8938 if (TYPE_TRAP_SIGNED (TREE_TYPE (exp)))
8939 this_optab = this_optab == add_optab ? addv_optab : subv_optab;
8941 /* For a preincrement, see if we can do this with a single instruction. */
8944 icode = (int) this_optab->handlers[(int) mode].insn_code;
8945 if (icode != (int) CODE_FOR_nothing
8946 /* Make sure that OP0 is valid for operands 0 and 1
8947 of the insn we want to queue. */
8948 && (*insn_data[icode].operand[0].predicate) (op0, mode)
8949 && (*insn_data[icode].operand[1].predicate) (op0, mode)
8950 && (*insn_data[icode].operand[2].predicate) (op1, mode))
8954 /* If OP0 is not the actual lvalue, but rather a copy in a register,
8955 then we cannot just increment OP0. We must therefore contrive to
8956 increment the original value. Then, for postincrement, we can return
8957 OP0 since it is a copy of the old value. For preincrement, expand here
8958 unless we can do it with a single insn.
8960 Likewise if storing directly into OP0 would clobber high bits
8961 we need to preserve (bad_subreg). */
8962 if (op0_is_copy || (!post && !single_insn) || bad_subreg)
8964 /* This is the easiest way to increment the value wherever it is.
8965 Problems with multiple evaluation of INCREMENTED are prevented
8966 because either (1) it is a component_ref or preincrement,
8967 in which case it was stabilized above, or (2) it is an array_ref
8968 with constant index in an array in a register, which is
8969 safe to reevaluate. */
8970 tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
8971 || TREE_CODE (exp) == PREDECREMENT_EXPR)
8972 ? MINUS_EXPR : PLUS_EXPR),
8975 TREE_OPERAND (exp, 1));
8977 while (TREE_CODE (incremented) == NOP_EXPR
8978 || TREE_CODE (incremented) == CONVERT_EXPR)
8980 newexp = convert (TREE_TYPE (incremented), newexp);
8981 incremented = TREE_OPERAND (incremented, 0);
8984 temp = expand_assignment (incremented, newexp, ! post && ! ignore , 0);
8985 return post ? op0 : temp;
8990 /* We have a true reference to the value in OP0.
8991 If there is an insn to add or subtract in this mode, queue it.
8992 Queueing the increment insn avoids the register shuffling
8993 that often results if we must increment now and first save
8994 the old value for subsequent use. */
8996 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
8997 op0 = stabilize (op0);
9000 icode = (int) this_optab->handlers[(int) mode].insn_code;
9001 if (icode != (int) CODE_FOR_nothing
9002 /* Make sure that OP0 is valid for operands 0 and 1
9003 of the insn we want to queue. */
9004 && (*insn_data[icode].operand[0].predicate) (op0, mode)
9005 && (*insn_data[icode].operand[1].predicate) (op0, mode))
9007 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9008 op1 = force_reg (mode, op1);
9010 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
9012 if (icode != (int) CODE_FOR_nothing && GET_CODE (op0) == MEM)
9014 rtx addr = (general_operand (XEXP (op0, 0), mode)
9015 ? force_reg (Pmode, XEXP (op0, 0))
9016 : copy_to_reg (XEXP (op0, 0)));
9019 op0 = replace_equiv_address (op0, addr);
9020 temp = force_reg (GET_MODE (op0), op0);
9021 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9022 op1 = force_reg (mode, op1);
9024 /* The increment queue is LIFO, thus we have to `queue'
9025 the instructions in reverse order. */
9026 enqueue_insn (op0, gen_move_insn (op0, temp));
9027 result = enqueue_insn (temp, GEN_FCN (icode) (temp, temp, op1));
9032 /* Preincrement, or we can't increment with one simple insn. */
9034 /* Save a copy of the value before inc or dec, to return it later. */
9035 temp = value = copy_to_reg (op0);
9037 /* Arrange to return the incremented value. */
9038 /* Copy the rtx because expand_binop will protect from the queue,
9039 and the results of that would be invalid for us to return
9040 if our caller does emit_queue before using our result. */
9041 temp = copy_rtx (value = op0);
9043 /* Increment however we can. */
9044 op1 = expand_binop (mode, this_optab, value, op1, op0,
9045 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
9047 /* Make sure the value is stored into OP0. */
9049 emit_move_insn (op0, op1);
9054 /* At the start of a function, record that we have no previously-pushed
9055 arguments waiting to be popped. */
9058 init_pending_stack_adjust ()
9060 pending_stack_adjust = 0;
9063 /* When exiting from function, if safe, clear out any pending stack adjust
9064 so the adjustment won't get done.
9066 Note, if the current function calls alloca, then it must have a
9067 frame pointer regardless of the value of flag_omit_frame_pointer. */
9070 clear_pending_stack_adjust ()
9072 #ifdef EXIT_IGNORE_STACK
9074 && (! flag_omit_frame_pointer || current_function_calls_alloca)
9075 && EXIT_IGNORE_STACK
9076 && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline)
9077 && ! flag_inline_functions)
9079 stack_pointer_delta -= pending_stack_adjust,
9080 pending_stack_adjust = 0;
9085 /* Pop any previously-pushed arguments that have not been popped yet. */
9088 do_pending_stack_adjust ()
9090 if (inhibit_defer_pop == 0)
9092 if (pending_stack_adjust != 0)
9093 adjust_stack (GEN_INT (pending_stack_adjust));
9094 pending_stack_adjust = 0;
9098 /* Expand conditional expressions. */
9100 /* Generate code to evaluate EXP and jump to LABEL if the value is zero.
9101 LABEL is an rtx of code CODE_LABEL, in this function and all the
9105 jumpifnot (exp, label)
9109 do_jump (exp, label, NULL_RTX);
9112 /* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
9119 do_jump (exp, NULL_RTX, label);
9122 /* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
9123 the result is zero, or IF_TRUE_LABEL if the result is one.
9124 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
9125 meaning fall through in that case.
9127 do_jump always does any pending stack adjust except when it does not
9128 actually perform a jump. An example where there is no jump
9129 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
9131 This function is responsible for optimizing cases such as
9132 &&, || and comparison operators in EXP. */
9135 do_jump (exp, if_false_label, if_true_label)
9137 rtx if_false_label, if_true_label;
9139 enum tree_code code = TREE_CODE (exp);
9140 /* Some cases need to create a label to jump to
9141 in order to properly fall through.
9142 These cases set DROP_THROUGH_LABEL nonzero. */
9143 rtx drop_through_label = 0;
9147 enum machine_mode mode;
9149 #ifdef MAX_INTEGER_COMPUTATION_MODE
9150 check_max_integer_computation_mode (exp);
9161 temp = integer_zerop (exp) ? if_false_label : if_true_label;
9167 /* This is not true with #pragma weak */
9169 /* The address of something can never be zero. */
9171 emit_jump (if_true_label);
9176 if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
9177 || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
9178 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF
9179 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_RANGE_REF)
9182 /* If we are narrowing the operand, we have to do the compare in the
9184 if ((TYPE_PRECISION (TREE_TYPE (exp))
9185 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
9187 case NON_LVALUE_EXPR:
9188 case REFERENCE_EXPR:
9193 /* These cannot change zero->non-zero or vice versa. */
9194 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9197 case WITH_RECORD_EXPR:
9198 /* Put the object on the placeholder list, recurse through our first
9199 operand, and pop the list. */
9200 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
9202 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9203 placeholder_list = TREE_CHAIN (placeholder_list);
9207 /* This is never less insns than evaluating the PLUS_EXPR followed by
9208 a test and can be longer if the test is eliminated. */
9210 /* Reduce to minus. */
9211 exp = build (MINUS_EXPR, TREE_TYPE (exp),
9212 TREE_OPERAND (exp, 0),
9213 fold (build1 (NEGATE_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
9214 TREE_OPERAND (exp, 1))));
9215 /* Process as MINUS. */
9219 /* Non-zero iff operands of minus differ. */
9220 do_compare_and_jump (build (NE_EXPR, TREE_TYPE (exp),
9221 TREE_OPERAND (exp, 0),
9222 TREE_OPERAND (exp, 1)),
9223 NE, NE, if_false_label, if_true_label);
9227 /* If we are AND'ing with a small constant, do this comparison in the
9228 smallest type that fits. If the machine doesn't have comparisons
9229 that small, it will be converted back to the wider comparison.
9230 This helps if we are testing the sign bit of a narrower object.
9231 combine can't do this for us because it can't know whether a
9232 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
9234 if (! SLOW_BYTE_ACCESS
9235 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
9236 && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
9237 && (i = tree_floor_log2 (TREE_OPERAND (exp, 1))) >= 0
9238 && (mode = mode_for_size (i + 1, MODE_INT, 0)) != BLKmode
9239 && (type = type_for_mode (mode, 1)) != 0
9240 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9241 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9242 != CODE_FOR_nothing))
9244 do_jump (convert (type, exp), if_false_label, if_true_label);
9249 case TRUTH_NOT_EXPR:
9250 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9253 case TRUTH_ANDIF_EXPR:
9254 if (if_false_label == 0)
9255 if_false_label = drop_through_label = gen_label_rtx ();
9256 do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
9257 start_cleanup_deferral ();
9258 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9259 end_cleanup_deferral ();
9262 case TRUTH_ORIF_EXPR:
9263 if (if_true_label == 0)
9264 if_true_label = drop_through_label = gen_label_rtx ();
9265 do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
9266 start_cleanup_deferral ();
9267 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9268 end_cleanup_deferral ();
9273 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
9274 preserve_temp_slots (NULL_RTX);
9278 do_pending_stack_adjust ();
9279 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9285 case ARRAY_RANGE_REF:
9287 HOST_WIDE_INT bitsize, bitpos;
9289 enum machine_mode mode;
9294 /* Get description of this reference. We don't actually care
9295 about the underlying object here. */
9296 get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode,
9297 &unsignedp, &volatilep);
9299 type = type_for_size (bitsize, unsignedp);
9300 if (! SLOW_BYTE_ACCESS
9301 && type != 0 && bitsize >= 0
9302 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9303 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9304 != CODE_FOR_nothing))
9306 do_jump (convert (type, exp), if_false_label, if_true_label);
9313 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
9314 if (integer_onep (TREE_OPERAND (exp, 1))
9315 && integer_zerop (TREE_OPERAND (exp, 2)))
9316 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9318 else if (integer_zerop (TREE_OPERAND (exp, 1))
9319 && integer_onep (TREE_OPERAND (exp, 2)))
9320 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9324 rtx label1 = gen_label_rtx ();
9325 drop_through_label = gen_label_rtx ();
9327 do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX);
9329 start_cleanup_deferral ();
9330 /* Now the THEN-expression. */
9331 do_jump (TREE_OPERAND (exp, 1),
9332 if_false_label ? if_false_label : drop_through_label,
9333 if_true_label ? if_true_label : drop_through_label);
9334 /* In case the do_jump just above never jumps. */
9335 do_pending_stack_adjust ();
9336 emit_label (label1);
9338 /* Now the ELSE-expression. */
9339 do_jump (TREE_OPERAND (exp, 2),
9340 if_false_label ? if_false_label : drop_through_label,
9341 if_true_label ? if_true_label : drop_through_label);
9342 end_cleanup_deferral ();
9348 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9350 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
9351 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
9353 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
9354 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
9357 (build (TRUTH_ANDIF_EXPR, TREE_TYPE (exp),
9358 fold (build (EQ_EXPR, TREE_TYPE (exp),
9359 fold (build1 (REALPART_EXPR,
9360 TREE_TYPE (inner_type),
9362 fold (build1 (REALPART_EXPR,
9363 TREE_TYPE (inner_type),
9365 fold (build (EQ_EXPR, TREE_TYPE (exp),
9366 fold (build1 (IMAGPART_EXPR,
9367 TREE_TYPE (inner_type),
9369 fold (build1 (IMAGPART_EXPR,
9370 TREE_TYPE (inner_type),
9372 if_false_label, if_true_label);
9375 else if (integer_zerop (TREE_OPERAND (exp, 1)))
9376 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9378 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
9379 && !can_compare_p (EQ, TYPE_MODE (inner_type), ccp_jump))
9380 do_jump_by_parts_equality (exp, if_false_label, if_true_label);
9382 do_compare_and_jump (exp, EQ, EQ, if_false_label, if_true_label);
9388 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9390 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
9391 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
9393 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
9394 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
9397 (build (TRUTH_ORIF_EXPR, TREE_TYPE (exp),
9398 fold (build (NE_EXPR, TREE_TYPE (exp),
9399 fold (build1 (REALPART_EXPR,
9400 TREE_TYPE (inner_type),
9402 fold (build1 (REALPART_EXPR,
9403 TREE_TYPE (inner_type),
9405 fold (build (NE_EXPR, TREE_TYPE (exp),
9406 fold (build1 (IMAGPART_EXPR,
9407 TREE_TYPE (inner_type),
9409 fold (build1 (IMAGPART_EXPR,
9410 TREE_TYPE (inner_type),
9412 if_false_label, if_true_label);
9415 else if (integer_zerop (TREE_OPERAND (exp, 1)))
9416 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9418 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
9419 && !can_compare_p (NE, TYPE_MODE (inner_type), ccp_jump))
9420 do_jump_by_parts_equality (exp, if_true_label, if_false_label);
9422 do_compare_and_jump (exp, NE, NE, if_false_label, if_true_label);
9427 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9428 if (GET_MODE_CLASS (mode) == MODE_INT
9429 && ! can_compare_p (LT, mode, ccp_jump))
9430 do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
9432 do_compare_and_jump (exp, LT, LTU, if_false_label, if_true_label);
9436 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9437 if (GET_MODE_CLASS (mode) == MODE_INT
9438 && ! can_compare_p (LE, mode, ccp_jump))
9439 do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
9441 do_compare_and_jump (exp, LE, LEU, if_false_label, if_true_label);
9445 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9446 if (GET_MODE_CLASS (mode) == MODE_INT
9447 && ! can_compare_p (GT, mode, ccp_jump))
9448 do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
9450 do_compare_and_jump (exp, GT, GTU, if_false_label, if_true_label);
9454 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9455 if (GET_MODE_CLASS (mode) == MODE_INT
9456 && ! can_compare_p (GE, mode, ccp_jump))
9457 do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
9459 do_compare_and_jump (exp, GE, GEU, if_false_label, if_true_label);
9462 case UNORDERED_EXPR:
9465 enum rtx_code cmp, rcmp;
9468 if (code == UNORDERED_EXPR)
9469 cmp = UNORDERED, rcmp = ORDERED;
9471 cmp = ORDERED, rcmp = UNORDERED;
9472 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9475 if (! can_compare_p (cmp, mode, ccp_jump)
9476 && (can_compare_p (rcmp, mode, ccp_jump)
9477 /* If the target doesn't provide either UNORDERED or ORDERED
9478 comparisons, canonicalize on UNORDERED for the library. */
9479 || rcmp == UNORDERED))
9483 do_compare_and_jump (exp, cmp, cmp, if_false_label, if_true_label);
9485 do_compare_and_jump (exp, rcmp, rcmp, if_true_label, if_false_label);
9490 enum rtx_code rcode1;
9491 enum tree_code tcode2;
9515 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9516 if (can_compare_p (rcode1, mode, ccp_jump))
9517 do_compare_and_jump (exp, rcode1, rcode1, if_false_label,
9521 tree op0 = save_expr (TREE_OPERAND (exp, 0));
9522 tree op1 = save_expr (TREE_OPERAND (exp, 1));
9525 /* If the target doesn't support combined unordered
9526 compares, decompose into UNORDERED + comparison. */
9527 cmp0 = fold (build (UNORDERED_EXPR, TREE_TYPE (exp), op0, op1));
9528 cmp1 = fold (build (tcode2, TREE_TYPE (exp), op0, op1));
9529 exp = build (TRUTH_ORIF_EXPR, TREE_TYPE (exp), cmp0, cmp1);
9530 do_jump (exp, if_false_label, if_true_label);
9536 __builtin_expect (<test>, 0) and
9537 __builtin_expect (<test>, 1)
9539 We need to do this here, so that <test> is not converted to a SCC
9540 operation on machines that use condition code registers and COMPARE
9541 like the PowerPC, and then the jump is done based on whether the SCC
9542 operation produced a 1 or 0. */
9544 /* Check for a built-in function. */
9545 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR)
9547 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
9548 tree arglist = TREE_OPERAND (exp, 1);
9550 if (TREE_CODE (fndecl) == FUNCTION_DECL
9551 && DECL_BUILT_IN (fndecl)
9552 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT
9553 && arglist != NULL_TREE
9554 && TREE_CHAIN (arglist) != NULL_TREE)
9556 rtx seq = expand_builtin_expect_jump (exp, if_false_label,
9559 if (seq != NULL_RTX)
9566 /* fall through and generate the normal code. */
9570 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
9572 /* This is not needed any more and causes poor code since it causes
9573 comparisons and tests from non-SI objects to have different code
9575 /* Copy to register to avoid generating bad insns by cse
9576 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
9577 if (!cse_not_expected && GET_CODE (temp) == MEM)
9578 temp = copy_to_reg (temp);
9580 do_pending_stack_adjust ();
9581 /* Do any postincrements in the expression that was tested. */
9584 if (GET_CODE (temp) == CONST_INT
9585 || (GET_CODE (temp) == CONST_DOUBLE && GET_MODE (temp) == VOIDmode)
9586 || GET_CODE (temp) == LABEL_REF)
9588 rtx target = temp == const0_rtx ? if_false_label : if_true_label;
9592 else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
9593 && ! can_compare_p (NE, GET_MODE (temp), ccp_jump))
9594 /* Note swapping the labels gives us not-equal. */
9595 do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label);
9596 else if (GET_MODE (temp) != VOIDmode)
9597 do_compare_rtx_and_jump (temp, CONST0_RTX (GET_MODE (temp)),
9598 NE, TREE_UNSIGNED (TREE_TYPE (exp)),
9599 GET_MODE (temp), NULL_RTX,
9600 if_false_label, if_true_label);
9605 if (drop_through_label)
9607 /* If do_jump produces code that might be jumped around,
9608 do any stack adjusts from that code, before the place
9609 where control merges in. */
9610 do_pending_stack_adjust ();
9611 emit_label (drop_through_label);
9615 /* Given a comparison expression EXP for values too wide to be compared
9616 with one insn, test the comparison and jump to the appropriate label.
9617 The code of EXP is ignored; we always test GT if SWAP is 0,
9618 and LT if SWAP is 1. */
9621 do_jump_by_parts_greater (exp, swap, if_false_label, if_true_label)
9624 rtx if_false_label, if_true_label;
9626 rtx op0 = expand_expr (TREE_OPERAND (exp, swap), NULL_RTX, VOIDmode, 0);
9627 rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), NULL_RTX, VOIDmode, 0);
9628 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9629 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
9631 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label);
9634 /* Compare OP0 with OP1, word at a time, in mode MODE.
9635 UNSIGNEDP says to do unsigned comparison.
9636 Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
9639 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label)
9640 enum machine_mode mode;
9643 rtx if_false_label, if_true_label;
9645 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
9646 rtx drop_through_label = 0;
9649 if (! if_true_label || ! if_false_label)
9650 drop_through_label = gen_label_rtx ();
9651 if (! if_true_label)
9652 if_true_label = drop_through_label;
9653 if (! if_false_label)
9654 if_false_label = drop_through_label;
9656 /* Compare a word at a time, high order first. */
9657 for (i = 0; i < nwords; i++)
9659 rtx op0_word, op1_word;
9661 if (WORDS_BIG_ENDIAN)
9663 op0_word = operand_subword_force (op0, i, mode);
9664 op1_word = operand_subword_force (op1, i, mode);
9668 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
9669 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
9672 /* All but high-order word must be compared as unsigned. */
9673 do_compare_rtx_and_jump (op0_word, op1_word, GT,
9674 (unsignedp || i > 0), word_mode, NULL_RTX,
9675 NULL_RTX, if_true_label);
9677 /* Consider lower words only if these are equal. */
9678 do_compare_rtx_and_jump (op0_word, op1_word, NE, unsignedp, word_mode,
9679 NULL_RTX, NULL_RTX, if_false_label);
9683 emit_jump (if_false_label);
9684 if (drop_through_label)
9685 emit_label (drop_through_label);
9688 /* Given an EQ_EXPR expression EXP for values too wide to be compared
9689 with one insn, test the comparison and jump to the appropriate label. */
9692 do_jump_by_parts_equality (exp, if_false_label, if_true_label)
9694 rtx if_false_label, if_true_label;
9696 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
9697 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
9698 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9699 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
9701 rtx drop_through_label = 0;
9703 if (! if_false_label)
9704 drop_through_label = if_false_label = gen_label_rtx ();
9706 for (i = 0; i < nwords; i++)
9707 do_compare_rtx_and_jump (operand_subword_force (op0, i, mode),
9708 operand_subword_force (op1, i, mode),
9709 EQ, TREE_UNSIGNED (TREE_TYPE (exp)),
9710 word_mode, NULL_RTX, if_false_label, NULL_RTX);
9713 emit_jump (if_true_label);
9714 if (drop_through_label)
9715 emit_label (drop_through_label);
9718 /* Jump according to whether OP0 is 0.
9719 We assume that OP0 has an integer mode that is too wide
9720 for the available compare insns. */
9723 do_jump_by_parts_equality_rtx (op0, if_false_label, if_true_label)
9725 rtx if_false_label, if_true_label;
9727 int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD;
9730 rtx drop_through_label = 0;
9732 /* The fastest way of doing this comparison on almost any machine is to
9733 "or" all the words and compare the result. If all have to be loaded
9734 from memory and this is a very wide item, it's possible this may
9735 be slower, but that's highly unlikely. */
9737 part = gen_reg_rtx (word_mode);
9738 emit_move_insn (part, operand_subword_force (op0, 0, GET_MODE (op0)));
9739 for (i = 1; i < nwords && part != 0; i++)
9740 part = expand_binop (word_mode, ior_optab, part,
9741 operand_subword_force (op0, i, GET_MODE (op0)),
9742 part, 1, OPTAB_WIDEN);
9746 do_compare_rtx_and_jump (part, const0_rtx, EQ, 1, word_mode,
9747 NULL_RTX, if_false_label, if_true_label);
9752 /* If we couldn't do the "or" simply, do this with a series of compares. */
9753 if (! if_false_label)
9754 drop_through_label = if_false_label = gen_label_rtx ();
9756 for (i = 0; i < nwords; i++)
9757 do_compare_rtx_and_jump (operand_subword_force (op0, i, GET_MODE (op0)),
9758 const0_rtx, EQ, 1, word_mode, NULL_RTX,
9759 if_false_label, NULL_RTX);
9762 emit_jump (if_true_label);
9764 if (drop_through_label)
9765 emit_label (drop_through_label);
9768 /* Generate code for a comparison of OP0 and OP1 with rtx code CODE.
9769 (including code to compute the values to be compared)
9770 and set (CC0) according to the result.
9771 The decision as to signed or unsigned comparison must be made by the caller.
9773 We force a stack adjustment unless there are currently
9774 things pushed on the stack that aren't yet used.
9776 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
9780 compare_from_rtx (op0, op1, code, unsignedp, mode, size)
9784 enum machine_mode mode;
9789 /* If one operand is constant, make it the second one. Only do this
9790 if the other operand is not constant as well. */
9792 if (swap_commutative_operands_p (op0, op1))
9797 code = swap_condition (code);
9802 op0 = force_not_mem (op0);
9803 op1 = force_not_mem (op1);
9806 do_pending_stack_adjust ();
9808 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
9809 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
9813 /* There's no need to do this now that combine.c can eliminate lots of
9814 sign extensions. This can be less efficient in certain cases on other
9817 /* If this is a signed equality comparison, we can do it as an
9818 unsigned comparison since zero-extension is cheaper than sign
9819 extension and comparisons with zero are done as unsigned. This is
9820 the case even on machines that can do fast sign extension, since
9821 zero-extension is easier to combine with other operations than
9822 sign-extension is. If we are comparing against a constant, we must
9823 convert it to what it would look like unsigned. */
9824 if ((code == EQ || code == NE) && ! unsignedp
9825 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
9827 if (GET_CODE (op1) == CONST_INT
9828 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
9829 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
9834 emit_cmp_insn (op0, op1, code, size, mode, unsignedp);
9836 return gen_rtx_fmt_ee (code, VOIDmode, cc0_rtx, const0_rtx);
9839 /* Like do_compare_and_jump but expects the values to compare as two rtx's.
9840 The decision as to signed or unsigned comparison must be made by the caller.
9842 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
9846 do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode, size,
9847 if_false_label, if_true_label)
9851 enum machine_mode mode;
9853 rtx if_false_label, if_true_label;
9856 int dummy_true_label = 0;
9858 /* Reverse the comparison if that is safe and we want to jump if it is
9860 if (! if_true_label && ! FLOAT_MODE_P (mode))
9862 if_true_label = if_false_label;
9864 code = reverse_condition (code);
9867 /* If one operand is constant, make it the second one. Only do this
9868 if the other operand is not constant as well. */
9870 if (swap_commutative_operands_p (op0, op1))
9875 code = swap_condition (code);
9880 op0 = force_not_mem (op0);
9881 op1 = force_not_mem (op1);
9884 do_pending_stack_adjust ();
9886 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
9887 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
9889 if (tem == const_true_rtx)
9892 emit_jump (if_true_label);
9897 emit_jump (if_false_label);
9903 /* There's no need to do this now that combine.c can eliminate lots of
9904 sign extensions. This can be less efficient in certain cases on other
9907 /* If this is a signed equality comparison, we can do it as an
9908 unsigned comparison since zero-extension is cheaper than sign
9909 extension and comparisons with zero are done as unsigned. This is
9910 the case even on machines that can do fast sign extension, since
9911 zero-extension is easier to combine with other operations than
9912 sign-extension is. If we are comparing against a constant, we must
9913 convert it to what it would look like unsigned. */
9914 if ((code == EQ || code == NE) && ! unsignedp
9915 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
9917 if (GET_CODE (op1) == CONST_INT
9918 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
9919 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
9924 if (! if_true_label)
9926 dummy_true_label = 1;
9927 if_true_label = gen_label_rtx ();
9930 emit_cmp_and_jump_insns (op0, op1, code, size, mode, unsignedp,
9934 emit_jump (if_false_label);
9935 if (dummy_true_label)
9936 emit_label (if_true_label);
9939 /* Generate code for a comparison expression EXP (including code to compute
9940 the values to be compared) and a conditional jump to IF_FALSE_LABEL and/or
9941 IF_TRUE_LABEL. One of the labels can be NULL_RTX, in which case the
9942 generated code will drop through.
9943 SIGNED_CODE should be the rtx operation for this comparison for
9944 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
9946 We force a stack adjustment unless there are currently
9947 things pushed on the stack that aren't yet used. */
9950 do_compare_and_jump (exp, signed_code, unsigned_code, if_false_label,
9953 enum rtx_code signed_code, unsigned_code;
9954 rtx if_false_label, if_true_label;
9958 enum machine_mode mode;
9962 /* Don't crash if the comparison was erroneous. */
9963 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
9964 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
9967 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
9968 if (TREE_CODE (TREE_OPERAND (exp, 1)) == ERROR_MARK)
9971 type = TREE_TYPE (TREE_OPERAND (exp, 0));
9972 mode = TYPE_MODE (type);
9973 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
9974 && (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST
9975 || (GET_MODE_BITSIZE (mode)
9976 > GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp,
9979 /* op0 might have been replaced by promoted constant, in which
9980 case the type of second argument should be used. */
9981 type = TREE_TYPE (TREE_OPERAND (exp, 1));
9982 mode = TYPE_MODE (type);
9984 unsignedp = TREE_UNSIGNED (type);
9985 code = unsignedp ? unsigned_code : signed_code;
9987 #ifdef HAVE_canonicalize_funcptr_for_compare
9988 /* If function pointers need to be "canonicalized" before they can
9989 be reliably compared, then canonicalize them. */
9990 if (HAVE_canonicalize_funcptr_for_compare
9991 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
9992 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
9995 rtx new_op0 = gen_reg_rtx (mode);
9997 emit_insn (gen_canonicalize_funcptr_for_compare (new_op0, op0));
10001 if (HAVE_canonicalize_funcptr_for_compare
10002 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10003 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10006 rtx new_op1 = gen_reg_rtx (mode);
10008 emit_insn (gen_canonicalize_funcptr_for_compare (new_op1, op1));
10013 /* Do any postincrements in the expression that was tested. */
10016 do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode,
10018 ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
10019 if_false_label, if_true_label);
10022 /* Generate code to calculate EXP using a store-flag instruction
10023 and return an rtx for the result. EXP is either a comparison
10024 or a TRUTH_NOT_EXPR whose operand is a comparison.
10026 If TARGET is nonzero, store the result there if convenient.
10028 If ONLY_CHEAP is non-zero, only do this if it is likely to be very
10031 Return zero if there is no suitable set-flag instruction
10032 available on this machine.
10034 Once expand_expr has been called on the arguments of the comparison,
10035 we are committed to doing the store flag, since it is not safe to
10036 re-evaluate the expression. We emit the store-flag insn by calling
10037 emit_store_flag, but only expand the arguments if we have a reason
10038 to believe that emit_store_flag will be successful. If we think that
10039 it will, but it isn't, we have to simulate the store-flag with a
10040 set/jump/set sequence. */
10043 do_store_flag (exp, target, mode, only_cheap)
10046 enum machine_mode mode;
10049 enum rtx_code code;
10050 tree arg0, arg1, type;
10052 enum machine_mode operand_mode;
10056 enum insn_code icode;
10057 rtx subtarget = target;
10060 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
10061 result at the end. We can't simply invert the test since it would
10062 have already been inverted if it were valid. This case occurs for
10063 some floating-point comparisons. */
10065 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
10066 invert = 1, exp = TREE_OPERAND (exp, 0);
10068 arg0 = TREE_OPERAND (exp, 0);
10069 arg1 = TREE_OPERAND (exp, 1);
10071 /* Don't crash if the comparison was erroneous. */
10072 if (arg0 == error_mark_node || arg1 == error_mark_node)
10075 type = TREE_TYPE (arg0);
10076 operand_mode = TYPE_MODE (type);
10077 unsignedp = TREE_UNSIGNED (type);
10079 /* We won't bother with BLKmode store-flag operations because it would mean
10080 passing a lot of information to emit_store_flag. */
10081 if (operand_mode == BLKmode)
10084 /* We won't bother with store-flag operations involving function pointers
10085 when function pointers must be canonicalized before comparisons. */
10086 #ifdef HAVE_canonicalize_funcptr_for_compare
10087 if (HAVE_canonicalize_funcptr_for_compare
10088 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10089 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10091 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10092 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10093 == FUNCTION_TYPE))))
10100 /* Get the rtx comparison code to use. We know that EXP is a comparison
10101 operation of some type. Some comparisons against 1 and -1 can be
10102 converted to comparisons with zero. Do so here so that the tests
10103 below will be aware that we have a comparison with zero. These
10104 tests will not catch constants in the first operand, but constants
10105 are rarely passed as the first operand. */
10107 switch (TREE_CODE (exp))
10116 if (integer_onep (arg1))
10117 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
10119 code = unsignedp ? LTU : LT;
10122 if (! unsignedp && integer_all_onesp (arg1))
10123 arg1 = integer_zero_node, code = LT;
10125 code = unsignedp ? LEU : LE;
10128 if (! unsignedp && integer_all_onesp (arg1))
10129 arg1 = integer_zero_node, code = GE;
10131 code = unsignedp ? GTU : GT;
10134 if (integer_onep (arg1))
10135 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
10137 code = unsignedp ? GEU : GE;
10140 case UNORDERED_EXPR:
10166 /* Put a constant second. */
10167 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
10169 tem = arg0; arg0 = arg1; arg1 = tem;
10170 code = swap_condition (code);
10173 /* If this is an equality or inequality test of a single bit, we can
10174 do this by shifting the bit being tested to the low-order bit and
10175 masking the result with the constant 1. If the condition was EQ,
10176 we xor it with 1. This does not require an scc insn and is faster
10177 than an scc insn even if we have it. */
10179 if ((code == NE || code == EQ)
10180 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
10181 && integer_pow2p (TREE_OPERAND (arg0, 1)))
10183 tree inner = TREE_OPERAND (arg0, 0);
10184 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
10187 /* If INNER is a right shift of a constant and it plus BITNUM does
10188 not overflow, adjust BITNUM and INNER. */
10190 if (TREE_CODE (inner) == RSHIFT_EXPR
10191 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
10192 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
10193 && bitnum < TYPE_PRECISION (type)
10194 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
10195 bitnum - TYPE_PRECISION (type)))
10197 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
10198 inner = TREE_OPERAND (inner, 0);
10201 /* If we are going to be able to omit the AND below, we must do our
10202 operations as unsigned. If we must use the AND, we have a choice.
10203 Normally unsigned is faster, but for some machines signed is. */
10204 ops_unsignedp = (bitnum == TYPE_PRECISION (type) - 1 ? 1
10205 #ifdef LOAD_EXTEND_OP
10206 : (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1)
10212 if (! get_subtarget (subtarget)
10213 || GET_MODE (subtarget) != operand_mode
10214 || ! safe_from_p (subtarget, inner, 1))
10217 op0 = expand_expr (inner, subtarget, VOIDmode, 0);
10220 op0 = expand_shift (RSHIFT_EXPR, operand_mode, op0,
10221 size_int (bitnum), subtarget, ops_unsignedp);
10223 if (GET_MODE (op0) != mode)
10224 op0 = convert_to_mode (mode, op0, ops_unsignedp);
10226 if ((code == EQ && ! invert) || (code == NE && invert))
10227 op0 = expand_binop (mode, xor_optab, op0, const1_rtx, subtarget,
10228 ops_unsignedp, OPTAB_LIB_WIDEN);
10230 /* Put the AND last so it can combine with more things. */
10231 if (bitnum != TYPE_PRECISION (type) - 1)
10232 op0 = expand_and (mode, op0, const1_rtx, subtarget);
10237 /* Now see if we are likely to be able to do this. Return if not. */
10238 if (! can_compare_p (code, operand_mode, ccp_store_flag))
10241 icode = setcc_gen_code[(int) code];
10242 if (icode == CODE_FOR_nothing
10243 || (only_cheap && insn_data[(int) icode].operand[0].mode != mode))
10245 /* We can only do this if it is one of the special cases that
10246 can be handled without an scc insn. */
10247 if ((code == LT && integer_zerop (arg1))
10248 || (! only_cheap && code == GE && integer_zerop (arg1)))
10250 else if (BRANCH_COST >= 0
10251 && ! only_cheap && (code == NE || code == EQ)
10252 && TREE_CODE (type) != REAL_TYPE
10253 && ((abs_optab->handlers[(int) operand_mode].insn_code
10254 != CODE_FOR_nothing)
10255 || (ffs_optab->handlers[(int) operand_mode].insn_code
10256 != CODE_FOR_nothing)))
10262 if (! get_subtarget (target)
10263 || GET_MODE (subtarget) != operand_mode
10264 || ! safe_from_p (subtarget, arg1, 1))
10267 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
10268 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
10271 target = gen_reg_rtx (mode);
10273 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
10274 because, if the emit_store_flag does anything it will succeed and
10275 OP0 and OP1 will not be used subsequently. */
10277 result = emit_store_flag (target, code,
10278 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
10279 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
10280 operand_mode, unsignedp, 1);
10285 result = expand_binop (mode, xor_optab, result, const1_rtx,
10286 result, 0, OPTAB_LIB_WIDEN);
10290 /* If this failed, we have to do this with set/compare/jump/set code. */
10291 if (GET_CODE (target) != REG
10292 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
10293 target = gen_reg_rtx (GET_MODE (target));
10295 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
10296 result = compare_from_rtx (op0, op1, code, unsignedp,
10297 operand_mode, NULL_RTX);
10298 if (GET_CODE (result) == CONST_INT)
10299 return (((result == const0_rtx && ! invert)
10300 || (result != const0_rtx && invert))
10301 ? const0_rtx : const1_rtx);
10303 /* The code of RESULT may not match CODE if compare_from_rtx
10304 decided to swap its operands and reverse the original code.
10306 We know that compare_from_rtx returns either a CONST_INT or
10307 a new comparison code, so it is safe to just extract the
10308 code from RESULT. */
10309 code = GET_CODE (result);
10311 label = gen_label_rtx ();
10312 if (bcc_gen_fctn[(int) code] == 0)
10315 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
10316 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
10317 emit_label (label);
10323 /* Stubs in case we haven't got a casesi insn. */
10324 #ifndef HAVE_casesi
10325 # define HAVE_casesi 0
10326 # define gen_casesi(a, b, c, d, e) (0)
10327 # define CODE_FOR_casesi CODE_FOR_nothing
10330 /* If the machine does not have a case insn that compares the bounds,
10331 this means extra overhead for dispatch tables, which raises the
10332 threshold for using them. */
10333 #ifndef CASE_VALUES_THRESHOLD
10334 #define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
10335 #endif /* CASE_VALUES_THRESHOLD */
10338 case_values_threshold ()
10340 return CASE_VALUES_THRESHOLD;
10343 /* Attempt to generate a casesi instruction. Returns 1 if successful,
10344 0 otherwise (i.e. if there is no casesi instruction). */
10346 try_casesi (index_type, index_expr, minval, range,
10347 table_label, default_label)
10348 tree index_type, index_expr, minval, range;
10349 rtx table_label ATTRIBUTE_UNUSED;
10352 enum machine_mode index_mode = SImode;
10353 int index_bits = GET_MODE_BITSIZE (index_mode);
10354 rtx op1, op2, index;
10355 enum machine_mode op_mode;
10360 /* Convert the index to SImode. */
10361 if (GET_MODE_BITSIZE (TYPE_MODE (index_type)) > GET_MODE_BITSIZE (index_mode))
10363 enum machine_mode omode = TYPE_MODE (index_type);
10364 rtx rangertx = expand_expr (range, NULL_RTX, VOIDmode, 0);
10366 /* We must handle the endpoints in the original mode. */
10367 index_expr = build (MINUS_EXPR, index_type,
10368 index_expr, minval);
10369 minval = integer_zero_node;
10370 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
10371 emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
10372 omode, 1, default_label);
10373 /* Now we can safely truncate. */
10374 index = convert_to_mode (index_mode, index, 0);
10378 if (TYPE_MODE (index_type) != index_mode)
10380 index_expr = convert (type_for_size (index_bits, 0),
10382 index_type = TREE_TYPE (index_expr);
10385 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
10388 index = protect_from_queue (index, 0);
10389 do_pending_stack_adjust ();
10391 op_mode = insn_data[(int) CODE_FOR_casesi].operand[0].mode;
10392 if (! (*insn_data[(int) CODE_FOR_casesi].operand[0].predicate)
10394 index = copy_to_mode_reg (op_mode, index);
10396 op1 = expand_expr (minval, NULL_RTX, VOIDmode, 0);
10398 op_mode = insn_data[(int) CODE_FOR_casesi].operand[1].mode;
10399 op1 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (minval)),
10400 op1, TREE_UNSIGNED (TREE_TYPE (minval)));
10401 if (! (*insn_data[(int) CODE_FOR_casesi].operand[1].predicate)
10403 op1 = copy_to_mode_reg (op_mode, op1);
10405 op2 = expand_expr (range, NULL_RTX, VOIDmode, 0);
10407 op_mode = insn_data[(int) CODE_FOR_casesi].operand[2].mode;
10408 op2 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (range)),
10409 op2, TREE_UNSIGNED (TREE_TYPE (range)));
10410 if (! (*insn_data[(int) CODE_FOR_casesi].operand[2].predicate)
10412 op2 = copy_to_mode_reg (op_mode, op2);
10414 emit_jump_insn (gen_casesi (index, op1, op2,
10415 table_label, default_label));
10419 /* Attempt to generate a tablejump instruction; same concept. */
10420 #ifndef HAVE_tablejump
10421 #define HAVE_tablejump 0
10422 #define gen_tablejump(x, y) (0)
10425 /* Subroutine of the next function.
10427 INDEX is the value being switched on, with the lowest value
10428 in the table already subtracted.
10429 MODE is its expected mode (needed if INDEX is constant).
10430 RANGE is the length of the jump table.
10431 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
10433 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
10434 index value is out of range. */
10437 do_tablejump (index, mode, range, table_label, default_label)
10438 rtx index, range, table_label, default_label;
10439 enum machine_mode mode;
10443 /* Do an unsigned comparison (in the proper mode) between the index
10444 expression and the value which represents the length of the range.
10445 Since we just finished subtracting the lower bound of the range
10446 from the index expression, this comparison allows us to simultaneously
10447 check that the original index expression value is both greater than
10448 or equal to the minimum value of the range and less than or equal to
10449 the maximum value of the range. */
10451 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
10454 /* If index is in range, it must fit in Pmode.
10455 Convert to Pmode so we can index with it. */
10457 index = convert_to_mode (Pmode, index, 1);
10459 /* Don't let a MEM slip thru, because then INDEX that comes
10460 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
10461 and break_out_memory_refs will go to work on it and mess it up. */
10462 #ifdef PIC_CASE_VECTOR_ADDRESS
10463 if (flag_pic && GET_CODE (index) != REG)
10464 index = copy_to_mode_reg (Pmode, index);
10467 /* If flag_force_addr were to affect this address
10468 it could interfere with the tricky assumptions made
10469 about addresses that contain label-refs,
10470 which may be valid only very near the tablejump itself. */
10471 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
10472 GET_MODE_SIZE, because this indicates how large insns are. The other
10473 uses should all be Pmode, because they are addresses. This code
10474 could fail if addresses and insns are not the same size. */
10475 index = gen_rtx_PLUS (Pmode,
10476 gen_rtx_MULT (Pmode, index,
10477 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
10478 gen_rtx_LABEL_REF (Pmode, table_label));
10479 #ifdef PIC_CASE_VECTOR_ADDRESS
10481 index = PIC_CASE_VECTOR_ADDRESS (index);
10484 index = memory_address_noforce (CASE_VECTOR_MODE, index);
10485 temp = gen_reg_rtx (CASE_VECTOR_MODE);
10486 vector = gen_rtx_MEM (CASE_VECTOR_MODE, index);
10487 RTX_UNCHANGING_P (vector) = 1;
10488 convert_move (temp, vector, 0);
10490 emit_jump_insn (gen_tablejump (temp, table_label));
10492 /* If we are generating PIC code or if the table is PC-relative, the
10493 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
10494 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
10499 try_tablejump (index_type, index_expr, minval, range,
10500 table_label, default_label)
10501 tree index_type, index_expr, minval, range;
10502 rtx table_label, default_label;
10506 if (! HAVE_tablejump)
10509 index_expr = fold (build (MINUS_EXPR, index_type,
10510 convert (index_type, index_expr),
10511 convert (index_type, minval)));
10512 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
10514 index = protect_from_queue (index, 0);
10515 do_pending_stack_adjust ();
10517 do_tablejump (index, TYPE_MODE (index_type),
10518 convert_modes (TYPE_MODE (index_type),
10519 TYPE_MODE (TREE_TYPE (range)),
10520 expand_expr (range, NULL_RTX,
10522 TREE_UNSIGNED (TREE_TYPE (range))),
10523 table_label, default_label);