1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
30 #include "hard-reg-set.h"
33 #include "insn-config.h"
34 #include "insn-attr.h"
35 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
42 #include "typeclass.h"
45 #include "langhooks.h"
49 /* Decide whether a function's arguments should be processed
50 from first to last or from last to first.
52 They should if the stack and args grow in opposite directions, but
53 only if we have push insns. */
57 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
58 #define PUSH_ARGS_REVERSED /* If it's last to first. */
63 #ifndef STACK_PUSH_CODE
64 #ifdef STACK_GROWS_DOWNWARD
65 #define STACK_PUSH_CODE PRE_DEC
67 #define STACK_PUSH_CODE PRE_INC
71 /* Assume that case vectors are not pc-relative. */
72 #ifndef CASE_VECTOR_PC_RELATIVE
73 #define CASE_VECTOR_PC_RELATIVE 0
76 /* If this is nonzero, we do not bother generating VOLATILE
77 around volatile memory references, and we are willing to
78 output indirect addresses. If cse is to follow, we reject
79 indirect addresses so a useful potential cse is generated;
80 if it is used only once, instruction combination will produce
81 the same indirect address eventually. */
84 /* Chain of pending expressions for PLACEHOLDER_EXPR to replace. */
85 static tree placeholder_list = 0;
87 /* This structure is used by move_by_pieces to describe the move to
98 int explicit_inc_from;
99 unsigned HOST_WIDE_INT len;
100 HOST_WIDE_INT offset;
104 /* This structure is used by store_by_pieces to describe the clear to
107 struct store_by_pieces
113 unsigned HOST_WIDE_INT len;
114 HOST_WIDE_INT offset;
115 rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
120 extern struct obstack permanent_obstack;
122 static rtx enqueue_insn PARAMS ((rtx, rtx));
123 static unsigned HOST_WIDE_INT move_by_pieces_ninsns
124 PARAMS ((unsigned HOST_WIDE_INT,
126 static void move_by_pieces_1 PARAMS ((rtx (*) (rtx, ...), enum machine_mode,
127 struct move_by_pieces *));
128 static rtx clear_by_pieces_1 PARAMS ((PTR, HOST_WIDE_INT,
130 static void clear_by_pieces PARAMS ((rtx, unsigned HOST_WIDE_INT,
132 static void store_by_pieces_1 PARAMS ((struct store_by_pieces *,
134 static void store_by_pieces_2 PARAMS ((rtx (*) (rtx, ...),
136 struct store_by_pieces *));
137 static rtx get_subtarget PARAMS ((rtx));
138 static int is_zeros_p PARAMS ((tree));
139 static int mostly_zeros_p PARAMS ((tree));
140 static void store_constructor_field PARAMS ((rtx, unsigned HOST_WIDE_INT,
141 HOST_WIDE_INT, enum machine_mode,
142 tree, tree, int, int));
143 static void store_constructor PARAMS ((tree, rtx, int, HOST_WIDE_INT));
144 static rtx store_field PARAMS ((rtx, HOST_WIDE_INT,
145 HOST_WIDE_INT, enum machine_mode,
146 tree, enum machine_mode, int, tree,
148 static rtx var_rtx PARAMS ((tree));
149 static HOST_WIDE_INT highest_pow2_factor PARAMS ((tree));
150 static int is_aligning_offset PARAMS ((tree, tree));
151 static rtx expand_increment PARAMS ((tree, int, int));
152 static void do_jump_by_parts_greater PARAMS ((tree, int, rtx, rtx));
153 static void do_jump_by_parts_equality PARAMS ((tree, rtx, rtx));
154 static void do_compare_and_jump PARAMS ((tree, enum rtx_code, enum rtx_code,
156 static rtx do_store_flag PARAMS ((tree, rtx, enum machine_mode, int));
158 static void emit_single_push_insn PARAMS ((enum machine_mode, rtx, tree));
160 static void do_tablejump PARAMS ((rtx, enum machine_mode, rtx, rtx, rtx));
162 /* Record for each mode whether we can move a register directly to or
163 from an object of that mode in memory. If we can't, we won't try
164 to use that mode directly when accessing a field of that mode. */
166 static char direct_load[NUM_MACHINE_MODES];
167 static char direct_store[NUM_MACHINE_MODES];
169 /* If a memory-to-memory move would take MOVE_RATIO or more simple
170 move-instruction sequences, we will do a movstr or libcall instead. */
173 #if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
176 /* If we are optimizing for space (-Os), cut down the default move ratio. */
177 #define MOVE_RATIO (optimize_size ? 3 : 15)
181 /* This macro is used to determine whether move_by_pieces should be called
182 to perform a structure copy. */
183 #ifndef MOVE_BY_PIECES_P
184 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
185 (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) MOVE_RATIO)
188 /* This array records the insn_code of insns to perform block moves. */
189 enum insn_code movstr_optab[NUM_MACHINE_MODES];
191 /* This array records the insn_code of insns to perform block clears. */
192 enum insn_code clrstr_optab[NUM_MACHINE_MODES];
194 /* SLOW_UNALIGNED_ACCESS is non-zero if unaligned accesses are very slow. */
196 #ifndef SLOW_UNALIGNED_ACCESS
197 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
200 /* This is run once per compilation to set up which modes can be used
201 directly in memory and to initialize the block move optab. */
207 enum machine_mode mode;
213 /* Try indexing by frame ptr and try by stack ptr.
214 It is known that on the Convex the stack ptr isn't a valid index.
215 With luck, one or the other is valid on any machine. */
216 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
217 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
219 insn = emit_insn (gen_rtx_SET (0, NULL_RTX, NULL_RTX));
220 pat = PATTERN (insn);
222 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
223 mode = (enum machine_mode) ((int) mode + 1))
228 direct_load[(int) mode] = direct_store[(int) mode] = 0;
229 PUT_MODE (mem, mode);
230 PUT_MODE (mem1, mode);
232 /* See if there is some register that can be used in this mode and
233 directly loaded or stored from memory. */
235 if (mode != VOIDmode && mode != BLKmode)
236 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
237 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
240 if (! HARD_REGNO_MODE_OK (regno, mode))
243 reg = gen_rtx_REG (mode, regno);
246 SET_DEST (pat) = reg;
247 if (recog (pat, insn, &num_clobbers) >= 0)
248 direct_load[(int) mode] = 1;
250 SET_SRC (pat) = mem1;
251 SET_DEST (pat) = reg;
252 if (recog (pat, insn, &num_clobbers) >= 0)
253 direct_load[(int) mode] = 1;
256 SET_DEST (pat) = mem;
257 if (recog (pat, insn, &num_clobbers) >= 0)
258 direct_store[(int) mode] = 1;
261 SET_DEST (pat) = mem1;
262 if (recog (pat, insn, &num_clobbers) >= 0)
263 direct_store[(int) mode] = 1;
270 /* This is run at the start of compiling a function. */
275 cfun->expr = (struct expr_status *) xmalloc (sizeof (struct expr_status));
278 pending_stack_adjust = 0;
279 stack_pointer_delta = 0;
280 inhibit_defer_pop = 0;
282 apply_args_value = 0;
288 struct expr_status *p;
293 ggc_mark_rtx (p->x_saveregs_value);
294 ggc_mark_rtx (p->x_apply_args_value);
295 ggc_mark_rtx (p->x_forced_labels);
306 /* Small sanity check that the queue is empty at the end of a function. */
309 finish_expr_for_function ()
315 /* Manage the queue of increment instructions to be output
316 for POSTINCREMENT_EXPR expressions, etc. */
318 /* Queue up to increment (or change) VAR later. BODY says how:
319 BODY should be the same thing you would pass to emit_insn
320 to increment right away. It will go to emit_insn later on.
322 The value is a QUEUED expression to be used in place of VAR
323 where you want to guarantee the pre-incrementation value of VAR. */
326 enqueue_insn (var, body)
329 pending_chain = gen_rtx_QUEUED (GET_MODE (var), var, NULL_RTX, NULL_RTX,
330 body, pending_chain);
331 return pending_chain;
334 /* Use protect_from_queue to convert a QUEUED expression
335 into something that you can put immediately into an instruction.
336 If the queued incrementation has not happened yet,
337 protect_from_queue returns the variable itself.
338 If the incrementation has happened, protect_from_queue returns a temp
339 that contains a copy of the old value of the variable.
341 Any time an rtx which might possibly be a QUEUED is to be put
342 into an instruction, it must be passed through protect_from_queue first.
343 QUEUED expressions are not meaningful in instructions.
345 Do not pass a value through protect_from_queue and then hold
346 on to it for a while before putting it in an instruction!
347 If the queue is flushed in between, incorrect code will result. */
350 protect_from_queue (x, modify)
354 RTX_CODE code = GET_CODE (x);
356 #if 0 /* A QUEUED can hang around after the queue is forced out. */
357 /* Shortcut for most common case. */
358 if (pending_chain == 0)
364 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
365 use of autoincrement. Make a copy of the contents of the memory
366 location rather than a copy of the address, but not if the value is
367 of mode BLKmode. Don't modify X in place since it might be
369 if (code == MEM && GET_MODE (x) != BLKmode
370 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
373 rtx new = replace_equiv_address_nv (x, QUEUED_VAR (y));
377 rtx temp = gen_reg_rtx (GET_MODE (x));
379 emit_insn_before (gen_move_insn (temp, new),
384 /* Copy the address into a pseudo, so that the returned value
385 remains correct across calls to emit_queue. */
386 return replace_equiv_address (new, copy_to_reg (XEXP (new, 0)));
389 /* Otherwise, recursively protect the subexpressions of all
390 the kinds of rtx's that can contain a QUEUED. */
393 rtx tem = protect_from_queue (XEXP (x, 0), 0);
394 if (tem != XEXP (x, 0))
400 else if (code == PLUS || code == MULT)
402 rtx new0 = protect_from_queue (XEXP (x, 0), 0);
403 rtx new1 = protect_from_queue (XEXP (x, 1), 0);
404 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
413 /* If the increment has not happened, use the variable itself. Copy it
414 into a new pseudo so that the value remains correct across calls to
416 if (QUEUED_INSN (x) == 0)
417 return copy_to_reg (QUEUED_VAR (x));
418 /* If the increment has happened and a pre-increment copy exists,
420 if (QUEUED_COPY (x) != 0)
421 return QUEUED_COPY (x);
422 /* The increment has happened but we haven't set up a pre-increment copy.
423 Set one up now, and use it. */
424 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
425 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
427 return QUEUED_COPY (x);
430 /* Return nonzero if X contains a QUEUED expression:
431 if it contains anything that will be altered by a queued increment.
432 We handle only combinations of MEM, PLUS, MINUS and MULT operators
433 since memory addresses generally contain only those. */
439 enum rtx_code code = GET_CODE (x);
445 return queued_subexp_p (XEXP (x, 0));
449 return (queued_subexp_p (XEXP (x, 0))
450 || queued_subexp_p (XEXP (x, 1)));
456 /* Perform all the pending incrementations. */
462 while ((p = pending_chain))
464 rtx body = QUEUED_BODY (p);
466 if (GET_CODE (body) == SEQUENCE)
468 QUEUED_INSN (p) = XVECEXP (QUEUED_BODY (p), 0, 0);
469 emit_insn (QUEUED_BODY (p));
472 QUEUED_INSN (p) = emit_insn (QUEUED_BODY (p));
473 pending_chain = QUEUED_NEXT (p);
477 /* Copy data from FROM to TO, where the machine modes are not the same.
478 Both modes may be integer, or both may be floating.
479 UNSIGNEDP should be nonzero if FROM is an unsigned type.
480 This causes zero-extension instead of sign-extension. */
483 convert_move (to, from, unsignedp)
487 enum machine_mode to_mode = GET_MODE (to);
488 enum machine_mode from_mode = GET_MODE (from);
489 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
490 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
494 /* rtx code for making an equivalent value. */
495 enum rtx_code equiv_code = (unsignedp ? ZERO_EXTEND : SIGN_EXTEND);
497 to = protect_from_queue (to, 1);
498 from = protect_from_queue (from, 0);
500 if (to_real != from_real)
503 /* If FROM is a SUBREG that indicates that we have already done at least
504 the required extension, strip it. We don't handle such SUBREGs as
507 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
508 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
509 >= GET_MODE_SIZE (to_mode))
510 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
511 from = gen_lowpart (to_mode, from), from_mode = to_mode;
513 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
516 if (to_mode == from_mode
517 || (from_mode == VOIDmode && CONSTANT_P (from)))
519 emit_move_insn (to, from);
523 if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
525 if (GET_MODE_BITSIZE (from_mode) != GET_MODE_BITSIZE (to_mode))
528 if (VECTOR_MODE_P (to_mode))
529 from = simplify_gen_subreg (to_mode, from, GET_MODE (from), 0);
531 to = simplify_gen_subreg (from_mode, to, GET_MODE (to), 0);
533 emit_move_insn (to, from);
537 if (to_real != from_real)
544 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode))
546 /* Try converting directly if the insn is supported. */
547 if ((code = can_extend_p (to_mode, from_mode, 0))
550 emit_unop_insn (code, to, from, UNKNOWN);
555 #ifdef HAVE_trunchfqf2
556 if (HAVE_trunchfqf2 && from_mode == HFmode && to_mode == QFmode)
558 emit_unop_insn (CODE_FOR_trunchfqf2, to, from, UNKNOWN);
562 #ifdef HAVE_trunctqfqf2
563 if (HAVE_trunctqfqf2 && from_mode == TQFmode && to_mode == QFmode)
565 emit_unop_insn (CODE_FOR_trunctqfqf2, to, from, UNKNOWN);
569 #ifdef HAVE_truncsfqf2
570 if (HAVE_truncsfqf2 && from_mode == SFmode && to_mode == QFmode)
572 emit_unop_insn (CODE_FOR_truncsfqf2, to, from, UNKNOWN);
576 #ifdef HAVE_truncdfqf2
577 if (HAVE_truncdfqf2 && from_mode == DFmode && to_mode == QFmode)
579 emit_unop_insn (CODE_FOR_truncdfqf2, to, from, UNKNOWN);
583 #ifdef HAVE_truncxfqf2
584 if (HAVE_truncxfqf2 && from_mode == XFmode && to_mode == QFmode)
586 emit_unop_insn (CODE_FOR_truncxfqf2, to, from, UNKNOWN);
590 #ifdef HAVE_trunctfqf2
591 if (HAVE_trunctfqf2 && from_mode == TFmode && to_mode == QFmode)
593 emit_unop_insn (CODE_FOR_trunctfqf2, to, from, UNKNOWN);
598 #ifdef HAVE_trunctqfhf2
599 if (HAVE_trunctqfhf2 && from_mode == TQFmode && to_mode == HFmode)
601 emit_unop_insn (CODE_FOR_trunctqfhf2, to, from, UNKNOWN);
605 #ifdef HAVE_truncsfhf2
606 if (HAVE_truncsfhf2 && from_mode == SFmode && to_mode == HFmode)
608 emit_unop_insn (CODE_FOR_truncsfhf2, to, from, UNKNOWN);
612 #ifdef HAVE_truncdfhf2
613 if (HAVE_truncdfhf2 && from_mode == DFmode && to_mode == HFmode)
615 emit_unop_insn (CODE_FOR_truncdfhf2, to, from, UNKNOWN);
619 #ifdef HAVE_truncxfhf2
620 if (HAVE_truncxfhf2 && from_mode == XFmode && to_mode == HFmode)
622 emit_unop_insn (CODE_FOR_truncxfhf2, to, from, UNKNOWN);
626 #ifdef HAVE_trunctfhf2
627 if (HAVE_trunctfhf2 && from_mode == TFmode && to_mode == HFmode)
629 emit_unop_insn (CODE_FOR_trunctfhf2, to, from, UNKNOWN);
634 #ifdef HAVE_truncsftqf2
635 if (HAVE_truncsftqf2 && from_mode == SFmode && to_mode == TQFmode)
637 emit_unop_insn (CODE_FOR_truncsftqf2, to, from, UNKNOWN);
641 #ifdef HAVE_truncdftqf2
642 if (HAVE_truncdftqf2 && from_mode == DFmode && to_mode == TQFmode)
644 emit_unop_insn (CODE_FOR_truncdftqf2, to, from, UNKNOWN);
648 #ifdef HAVE_truncxftqf2
649 if (HAVE_truncxftqf2 && from_mode == XFmode && to_mode == TQFmode)
651 emit_unop_insn (CODE_FOR_truncxftqf2, to, from, UNKNOWN);
655 #ifdef HAVE_trunctftqf2
656 if (HAVE_trunctftqf2 && from_mode == TFmode && to_mode == TQFmode)
658 emit_unop_insn (CODE_FOR_trunctftqf2, to, from, UNKNOWN);
663 #ifdef HAVE_truncdfsf2
664 if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
666 emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
670 #ifdef HAVE_truncxfsf2
671 if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode)
673 emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN);
677 #ifdef HAVE_trunctfsf2
678 if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
680 emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
684 #ifdef HAVE_truncxfdf2
685 if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode)
687 emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN);
691 #ifdef HAVE_trunctfdf2
692 if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
694 emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
706 libcall = extendsfdf2_libfunc;
710 libcall = extendsfxf2_libfunc;
714 libcall = extendsftf2_libfunc;
726 libcall = truncdfsf2_libfunc;
730 libcall = extenddfxf2_libfunc;
734 libcall = extenddftf2_libfunc;
746 libcall = truncxfsf2_libfunc;
750 libcall = truncxfdf2_libfunc;
762 libcall = trunctfsf2_libfunc;
766 libcall = trunctfdf2_libfunc;
778 if (libcall == (rtx) 0)
779 /* This conversion is not implemented yet. */
783 value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
785 insns = get_insns ();
787 emit_libcall_block (insns, to, value, gen_rtx_FLOAT_TRUNCATE (to_mode,
792 /* Now both modes are integers. */
794 /* Handle expanding beyond a word. */
795 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
796 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
803 enum machine_mode lowpart_mode;
804 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
806 /* Try converting directly if the insn is supported. */
807 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
810 /* If FROM is a SUBREG, put it into a register. Do this
811 so that we always generate the same set of insns for
812 better cse'ing; if an intermediate assignment occurred,
813 we won't be doing the operation directly on the SUBREG. */
814 if (optimize > 0 && GET_CODE (from) == SUBREG)
815 from = force_reg (from_mode, from);
816 emit_unop_insn (code, to, from, equiv_code);
819 /* Next, try converting via full word. */
820 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
821 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
822 != CODE_FOR_nothing))
824 if (GET_CODE (to) == REG)
825 emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
826 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
827 emit_unop_insn (code, to,
828 gen_lowpart (word_mode, to), equiv_code);
832 /* No special multiword conversion insn; do it by hand. */
835 /* Since we will turn this into a no conflict block, we must ensure
836 that the source does not overlap the target. */
838 if (reg_overlap_mentioned_p (to, from))
839 from = force_reg (from_mode, from);
841 /* Get a copy of FROM widened to a word, if necessary. */
842 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
843 lowpart_mode = word_mode;
845 lowpart_mode = from_mode;
847 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
849 lowpart = gen_lowpart (lowpart_mode, to);
850 emit_move_insn (lowpart, lowfrom);
852 /* Compute the value to put in each remaining word. */
854 fill_value = const0_rtx;
859 && insn_data[(int) CODE_FOR_slt].operand[0].mode == word_mode
860 && STORE_FLAG_VALUE == -1)
862 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
864 fill_value = gen_reg_rtx (word_mode);
865 emit_insn (gen_slt (fill_value));
871 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
872 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
874 fill_value = convert_to_mode (word_mode, fill_value, 1);
878 /* Fill the remaining words. */
879 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
881 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
882 rtx subword = operand_subword (to, index, 1, to_mode);
887 if (fill_value != subword)
888 emit_move_insn (subword, fill_value);
891 insns = get_insns ();
894 emit_no_conflict_block (insns, to, from, NULL_RTX,
895 gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
899 /* Truncating multi-word to a word or less. */
900 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
901 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
903 if (!((GET_CODE (from) == MEM
904 && ! MEM_VOLATILE_P (from)
905 && direct_load[(int) to_mode]
906 && ! mode_dependent_address_p (XEXP (from, 0)))
907 || GET_CODE (from) == REG
908 || GET_CODE (from) == SUBREG))
909 from = force_reg (from_mode, from);
910 convert_move (to, gen_lowpart (word_mode, from), 0);
914 /* Handle pointer conversion. */ /* SPEE 900220. */
915 if (to_mode == PQImode)
917 if (from_mode != QImode)
918 from = convert_to_mode (QImode, from, unsignedp);
920 #ifdef HAVE_truncqipqi2
921 if (HAVE_truncqipqi2)
923 emit_unop_insn (CODE_FOR_truncqipqi2, to, from, UNKNOWN);
926 #endif /* HAVE_truncqipqi2 */
930 if (from_mode == PQImode)
932 if (to_mode != QImode)
934 from = convert_to_mode (QImode, from, unsignedp);
939 #ifdef HAVE_extendpqiqi2
940 if (HAVE_extendpqiqi2)
942 emit_unop_insn (CODE_FOR_extendpqiqi2, to, from, UNKNOWN);
945 #endif /* HAVE_extendpqiqi2 */
950 if (to_mode == PSImode)
952 if (from_mode != SImode)
953 from = convert_to_mode (SImode, from, unsignedp);
955 #ifdef HAVE_truncsipsi2
956 if (HAVE_truncsipsi2)
958 emit_unop_insn (CODE_FOR_truncsipsi2, to, from, UNKNOWN);
961 #endif /* HAVE_truncsipsi2 */
965 if (from_mode == PSImode)
967 if (to_mode != SImode)
969 from = convert_to_mode (SImode, from, unsignedp);
974 #ifdef HAVE_extendpsisi2
975 if (! unsignedp && HAVE_extendpsisi2)
977 emit_unop_insn (CODE_FOR_extendpsisi2, to, from, UNKNOWN);
980 #endif /* HAVE_extendpsisi2 */
981 #ifdef HAVE_zero_extendpsisi2
982 if (unsignedp && HAVE_zero_extendpsisi2)
984 emit_unop_insn (CODE_FOR_zero_extendpsisi2, to, from, UNKNOWN);
987 #endif /* HAVE_zero_extendpsisi2 */
992 if (to_mode == PDImode)
994 if (from_mode != DImode)
995 from = convert_to_mode (DImode, from, unsignedp);
997 #ifdef HAVE_truncdipdi2
998 if (HAVE_truncdipdi2)
1000 emit_unop_insn (CODE_FOR_truncdipdi2, to, from, UNKNOWN);
1003 #endif /* HAVE_truncdipdi2 */
1007 if (from_mode == PDImode)
1009 if (to_mode != DImode)
1011 from = convert_to_mode (DImode, from, unsignedp);
1016 #ifdef HAVE_extendpdidi2
1017 if (HAVE_extendpdidi2)
1019 emit_unop_insn (CODE_FOR_extendpdidi2, to, from, UNKNOWN);
1022 #endif /* HAVE_extendpdidi2 */
1027 /* Now follow all the conversions between integers
1028 no more than a word long. */
1030 /* For truncation, usually we can just refer to FROM in a narrower mode. */
1031 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
1032 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1033 GET_MODE_BITSIZE (from_mode)))
1035 if (!((GET_CODE (from) == MEM
1036 && ! MEM_VOLATILE_P (from)
1037 && direct_load[(int) to_mode]
1038 && ! mode_dependent_address_p (XEXP (from, 0)))
1039 || GET_CODE (from) == REG
1040 || GET_CODE (from) == SUBREG))
1041 from = force_reg (from_mode, from);
1042 if (GET_CODE (from) == REG && REGNO (from) < FIRST_PSEUDO_REGISTER
1043 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
1044 from = copy_to_reg (from);
1045 emit_move_insn (to, gen_lowpart (to_mode, from));
1049 /* Handle extension. */
1050 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
1052 /* Convert directly if that works. */
1053 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
1054 != CODE_FOR_nothing)
1057 from = force_not_mem (from);
1059 emit_unop_insn (code, to, from, equiv_code);
1064 enum machine_mode intermediate;
1068 /* Search for a mode to convert via. */
1069 for (intermediate = from_mode; intermediate != VOIDmode;
1070 intermediate = GET_MODE_WIDER_MODE (intermediate))
1071 if (((can_extend_p (to_mode, intermediate, unsignedp)
1072 != CODE_FOR_nothing)
1073 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
1074 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1075 GET_MODE_BITSIZE (intermediate))))
1076 && (can_extend_p (intermediate, from_mode, unsignedp)
1077 != CODE_FOR_nothing))
1079 convert_move (to, convert_to_mode (intermediate, from,
1080 unsignedp), unsignedp);
1084 /* No suitable intermediate mode.
1085 Generate what we need with shifts. */
1086 shift_amount = build_int_2 (GET_MODE_BITSIZE (to_mode)
1087 - GET_MODE_BITSIZE (from_mode), 0);
1088 from = gen_lowpart (to_mode, force_reg (from_mode, from));
1089 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
1091 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
1094 emit_move_insn (to, tmp);
1099 /* Support special truncate insns for certain modes. */
1101 if (from_mode == DImode && to_mode == SImode)
1103 #ifdef HAVE_truncdisi2
1104 if (HAVE_truncdisi2)
1106 emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
1110 convert_move (to, force_reg (from_mode, from), unsignedp);
1114 if (from_mode == DImode && to_mode == HImode)
1116 #ifdef HAVE_truncdihi2
1117 if (HAVE_truncdihi2)
1119 emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
1123 convert_move (to, force_reg (from_mode, from), unsignedp);
1127 if (from_mode == DImode && to_mode == QImode)
1129 #ifdef HAVE_truncdiqi2
1130 if (HAVE_truncdiqi2)
1132 emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
1136 convert_move (to, force_reg (from_mode, from), unsignedp);
1140 if (from_mode == SImode && to_mode == HImode)
1142 #ifdef HAVE_truncsihi2
1143 if (HAVE_truncsihi2)
1145 emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
1149 convert_move (to, force_reg (from_mode, from), unsignedp);
1153 if (from_mode == SImode && to_mode == QImode)
1155 #ifdef HAVE_truncsiqi2
1156 if (HAVE_truncsiqi2)
1158 emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
1162 convert_move (to, force_reg (from_mode, from), unsignedp);
1166 if (from_mode == HImode && to_mode == QImode)
1168 #ifdef HAVE_trunchiqi2
1169 if (HAVE_trunchiqi2)
1171 emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
1175 convert_move (to, force_reg (from_mode, from), unsignedp);
1179 if (from_mode == TImode && to_mode == DImode)
1181 #ifdef HAVE_trunctidi2
1182 if (HAVE_trunctidi2)
1184 emit_unop_insn (CODE_FOR_trunctidi2, to, from, UNKNOWN);
1188 convert_move (to, force_reg (from_mode, from), unsignedp);
1192 if (from_mode == TImode && to_mode == SImode)
1194 #ifdef HAVE_trunctisi2
1195 if (HAVE_trunctisi2)
1197 emit_unop_insn (CODE_FOR_trunctisi2, to, from, UNKNOWN);
1201 convert_move (to, force_reg (from_mode, from), unsignedp);
1205 if (from_mode == TImode && to_mode == HImode)
1207 #ifdef HAVE_trunctihi2
1208 if (HAVE_trunctihi2)
1210 emit_unop_insn (CODE_FOR_trunctihi2, to, from, UNKNOWN);
1214 convert_move (to, force_reg (from_mode, from), unsignedp);
1218 if (from_mode == TImode && to_mode == QImode)
1220 #ifdef HAVE_trunctiqi2
1221 if (HAVE_trunctiqi2)
1223 emit_unop_insn (CODE_FOR_trunctiqi2, to, from, UNKNOWN);
1227 convert_move (to, force_reg (from_mode, from), unsignedp);
1231 /* Handle truncation of volatile memrefs, and so on;
1232 the things that couldn't be truncated directly,
1233 and for which there was no special instruction. */
1234 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
1236 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
1237 emit_move_insn (to, temp);
1241 /* Mode combination is not recognized. */
1245 /* Return an rtx for a value that would result
1246 from converting X to mode MODE.
1247 Both X and MODE may be floating, or both integer.
1248 UNSIGNEDP is nonzero if X is an unsigned value.
1249 This can be done by referring to a part of X in place
1250 or by copying to a new temporary with conversion.
1252 This function *must not* call protect_from_queue
1253 except when putting X into an insn (in which case convert_move does it). */
1256 convert_to_mode (mode, x, unsignedp)
1257 enum machine_mode mode;
1261 return convert_modes (mode, VOIDmode, x, unsignedp);
1264 /* Return an rtx for a value that would result
1265 from converting X from mode OLDMODE to mode MODE.
1266 Both modes may be floating, or both integer.
1267 UNSIGNEDP is nonzero if X is an unsigned value.
1269 This can be done by referring to a part of X in place
1270 or by copying to a new temporary with conversion.
1272 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
1274 This function *must not* call protect_from_queue
1275 except when putting X into an insn (in which case convert_move does it). */
1278 convert_modes (mode, oldmode, x, unsignedp)
1279 enum machine_mode mode, oldmode;
1285 /* If FROM is a SUBREG that indicates that we have already done at least
1286 the required extension, strip it. */
1288 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
1289 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
1290 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
1291 x = gen_lowpart (mode, x);
1293 if (GET_MODE (x) != VOIDmode)
1294 oldmode = GET_MODE (x);
1296 if (mode == oldmode)
1299 /* There is one case that we must handle specially: If we are converting
1300 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
1301 we are to interpret the constant as unsigned, gen_lowpart will do
1302 the wrong if the constant appears negative. What we want to do is
1303 make the high-order word of the constant zero, not all ones. */
1305 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
1306 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
1307 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
1309 HOST_WIDE_INT val = INTVAL (x);
1311 if (oldmode != VOIDmode
1312 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
1314 int width = GET_MODE_BITSIZE (oldmode);
1316 /* We need to zero extend VAL. */
1317 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1320 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
1323 /* We can do this with a gen_lowpart if both desired and current modes
1324 are integer, and this is either a constant integer, a register, or a
1325 non-volatile MEM. Except for the constant case where MODE is no
1326 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
1328 if ((GET_CODE (x) == CONST_INT
1329 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
1330 || (GET_MODE_CLASS (mode) == MODE_INT
1331 && GET_MODE_CLASS (oldmode) == MODE_INT
1332 && (GET_CODE (x) == CONST_DOUBLE
1333 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
1334 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x)
1335 && direct_load[(int) mode])
1336 || (GET_CODE (x) == REG
1337 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
1338 GET_MODE_BITSIZE (GET_MODE (x)))))))))
1340 /* ?? If we don't know OLDMODE, we have to assume here that
1341 X does not need sign- or zero-extension. This may not be
1342 the case, but it's the best we can do. */
1343 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
1344 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
1346 HOST_WIDE_INT val = INTVAL (x);
1347 int width = GET_MODE_BITSIZE (oldmode);
1349 /* We must sign or zero-extend in this case. Start by
1350 zero-extending, then sign extend if we need to. */
1351 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1353 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
1354 val |= (HOST_WIDE_INT) (-1) << width;
1356 return gen_int_mode (val, mode);
1359 return gen_lowpart (mode, x);
1362 temp = gen_reg_rtx (mode);
1363 convert_move (temp, x, unsignedp);
1367 /* This macro is used to determine what the largest unit size that
1368 move_by_pieces can use is. */
1370 /* MOVE_MAX_PIECES is the number of bytes at a time which we can
1371 move efficiently, as opposed to MOVE_MAX which is the maximum
1372 number of bytes we can move with a single instruction. */
1374 #ifndef MOVE_MAX_PIECES
1375 #define MOVE_MAX_PIECES MOVE_MAX
1378 /* Generate several move instructions to copy LEN bytes from block FROM to
1379 block TO. (These are MEM rtx's with BLKmode). The caller must pass FROM
1380 and TO through protect_from_queue before calling.
1382 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
1383 used to push FROM to the stack.
1385 ALIGN is maximum alignment we can assume. */
1388 move_by_pieces (to, from, len, align)
1390 unsigned HOST_WIDE_INT len;
1393 struct move_by_pieces data;
1394 rtx to_addr, from_addr = XEXP (from, 0);
1395 unsigned int max_size = MOVE_MAX_PIECES + 1;
1396 enum machine_mode mode = VOIDmode, tmode;
1397 enum insn_code icode;
1400 data.from_addr = from_addr;
1403 to_addr = XEXP (to, 0);
1406 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1407 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1409 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1416 #ifdef STACK_GROWS_DOWNWARD
1422 data.to_addr = to_addr;
1425 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1426 || GET_CODE (from_addr) == POST_INC
1427 || GET_CODE (from_addr) == POST_DEC);
1429 data.explicit_inc_from = 0;
1430 data.explicit_inc_to = 0;
1431 if (data.reverse) data.offset = len;
1434 /* If copying requires more than two move insns,
1435 copy addresses to registers (to make displacements shorter)
1436 and use post-increment if available. */
1437 if (!(data.autinc_from && data.autinc_to)
1438 && move_by_pieces_ninsns (len, align) > 2)
1440 /* Find the mode of the largest move... */
1441 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1442 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1443 if (GET_MODE_SIZE (tmode) < max_size)
1446 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
1448 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1449 data.autinc_from = 1;
1450 data.explicit_inc_from = -1;
1452 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
1454 data.from_addr = copy_addr_to_reg (from_addr);
1455 data.autinc_from = 1;
1456 data.explicit_inc_from = 1;
1458 if (!data.autinc_from && CONSTANT_P (from_addr))
1459 data.from_addr = copy_addr_to_reg (from_addr);
1460 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
1462 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1464 data.explicit_inc_to = -1;
1466 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
1468 data.to_addr = copy_addr_to_reg (to_addr);
1470 data.explicit_inc_to = 1;
1472 if (!data.autinc_to && CONSTANT_P (to_addr))
1473 data.to_addr = copy_addr_to_reg (to_addr);
1476 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1477 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1478 align = MOVE_MAX * BITS_PER_UNIT;
1480 /* First move what we can in the largest integer mode, then go to
1481 successively smaller modes. */
1483 while (max_size > 1)
1485 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1486 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1487 if (GET_MODE_SIZE (tmode) < max_size)
1490 if (mode == VOIDmode)
1493 icode = mov_optab->handlers[(int) mode].insn_code;
1494 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1495 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1497 max_size = GET_MODE_SIZE (mode);
1500 /* The code above should have handled everything. */
1505 /* Return number of insns required to move L bytes by pieces.
1506 ALIGN (in bits) is maximum alignment we can assume. */
1508 static unsigned HOST_WIDE_INT
1509 move_by_pieces_ninsns (l, align)
1510 unsigned HOST_WIDE_INT l;
1513 unsigned HOST_WIDE_INT n_insns = 0;
1514 unsigned HOST_WIDE_INT max_size = MOVE_MAX + 1;
1516 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1517 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1518 align = MOVE_MAX * BITS_PER_UNIT;
1520 while (max_size > 1)
1522 enum machine_mode mode = VOIDmode, tmode;
1523 enum insn_code icode;
1525 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1526 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1527 if (GET_MODE_SIZE (tmode) < max_size)
1530 if (mode == VOIDmode)
1533 icode = mov_optab->handlers[(int) mode].insn_code;
1534 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1535 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1537 max_size = GET_MODE_SIZE (mode);
1545 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1546 with move instructions for mode MODE. GENFUN is the gen_... function
1547 to make a move insn for that mode. DATA has all the other info. */
1550 move_by_pieces_1 (genfun, mode, data)
1551 rtx (*genfun) PARAMS ((rtx, ...));
1552 enum machine_mode mode;
1553 struct move_by_pieces *data;
1555 unsigned int size = GET_MODE_SIZE (mode);
1556 rtx to1 = NULL_RTX, from1;
1558 while (data->len >= size)
1561 data->offset -= size;
1565 if (data->autinc_to)
1566 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
1569 to1 = adjust_address (data->to, mode, data->offset);
1572 if (data->autinc_from)
1573 from1 = adjust_automodify_address (data->from, mode, data->from_addr,
1576 from1 = adjust_address (data->from, mode, data->offset);
1578 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1579 emit_insn (gen_add2_insn (data->to_addr,
1580 GEN_INT (-(HOST_WIDE_INT)size)));
1581 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1582 emit_insn (gen_add2_insn (data->from_addr,
1583 GEN_INT (-(HOST_WIDE_INT)size)));
1586 emit_insn ((*genfun) (to1, from1));
1589 #ifdef PUSH_ROUNDING
1590 emit_single_push_insn (mode, from1, NULL);
1596 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1597 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1598 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1599 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1601 if (! data->reverse)
1602 data->offset += size;
1608 /* Emit code to move a block Y to a block X.
1609 This may be done with string-move instructions,
1610 with multiple scalar move instructions, or with a library call.
1612 Both X and Y must be MEM rtx's (perhaps inside VOLATILE)
1614 SIZE is an rtx that says how long they are.
1615 ALIGN is the maximum alignment we can assume they have.
1617 Return the address of the new block, if memcpy is called and returns it,
1621 emit_block_move (x, y, size)
1626 #ifdef TARGET_MEM_FUNCTIONS
1628 tree call_expr, arg_list;
1630 unsigned int align = MIN (MEM_ALIGN (x), MEM_ALIGN (y));
1632 if (GET_MODE (x) != BLKmode)
1635 if (GET_MODE (y) != BLKmode)
1638 x = protect_from_queue (x, 1);
1639 y = protect_from_queue (y, 0);
1640 size = protect_from_queue (size, 0);
1642 if (GET_CODE (x) != MEM)
1644 if (GET_CODE (y) != MEM)
1649 if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
1650 move_by_pieces (x, y, INTVAL (size), align);
1653 /* Try the most limited insn first, because there's no point
1654 including more than one in the machine description unless
1655 the more limited one has some advantage. */
1657 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
1658 enum machine_mode mode;
1660 /* Since this is a move insn, we don't care about volatility. */
1663 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1664 mode = GET_MODE_WIDER_MODE (mode))
1666 enum insn_code code = movstr_optab[(int) mode];
1667 insn_operand_predicate_fn pred;
1669 if (code != CODE_FOR_nothing
1670 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1671 here because if SIZE is less than the mode mask, as it is
1672 returned by the macro, it will definitely be less than the
1673 actual mode mask. */
1674 && ((GET_CODE (size) == CONST_INT
1675 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1676 <= (GET_MODE_MASK (mode) >> 1)))
1677 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1678 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
1679 || (*pred) (x, BLKmode))
1680 && ((pred = insn_data[(int) code].operand[1].predicate) == 0
1681 || (*pred) (y, BLKmode))
1682 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
1683 || (*pred) (opalign, VOIDmode)))
1686 rtx last = get_last_insn ();
1689 op2 = convert_to_mode (mode, size, 1);
1690 pred = insn_data[(int) code].operand[2].predicate;
1691 if (pred != 0 && ! (*pred) (op2, mode))
1692 op2 = copy_to_mode_reg (mode, op2);
1694 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1702 delete_insns_since (last);
1708 /* X, Y, or SIZE may have been passed through protect_from_queue.
1710 It is unsafe to save the value generated by protect_from_queue
1711 and reuse it later. Consider what happens if emit_queue is
1712 called before the return value from protect_from_queue is used.
1714 Expansion of the CALL_EXPR below will call emit_queue before
1715 we are finished emitting RTL for argument setup. So if we are
1716 not careful we could get the wrong value for an argument.
1718 To avoid this problem we go ahead and emit code to copy X, Y &
1719 SIZE into new pseudos. We can then place those new pseudos
1720 into an RTL_EXPR and use them later, even after a call to
1723 Note this is not strictly needed for library calls since they
1724 do not call emit_queue before loading their arguments. However,
1725 we may need to have library calls call emit_queue in the future
1726 since failing to do so could cause problems for targets which
1727 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
1728 x = copy_to_mode_reg (Pmode, XEXP (x, 0));
1729 y = copy_to_mode_reg (Pmode, XEXP (y, 0));
1731 #ifdef TARGET_MEM_FUNCTIONS
1732 size = copy_to_mode_reg (TYPE_MODE (sizetype), size);
1734 size = convert_to_mode (TYPE_MODE (integer_type_node), size,
1735 TREE_UNSIGNED (integer_type_node));
1736 size = copy_to_mode_reg (TYPE_MODE (integer_type_node), size);
1739 #ifdef TARGET_MEM_FUNCTIONS
1740 /* It is incorrect to use the libcall calling conventions to call
1741 memcpy in this context.
1743 This could be a user call to memcpy and the user may wish to
1744 examine the return value from memcpy.
1746 For targets where libcalls and normal calls have different conventions
1747 for returning pointers, we could end up generating incorrect code.
1749 So instead of using a libcall sequence we build up a suitable
1750 CALL_EXPR and expand the call in the normal fashion. */
1751 if (fn == NULL_TREE)
1755 /* This was copied from except.c, I don't know if all this is
1756 necessary in this context or not. */
1757 fn = get_identifier ("memcpy");
1758 fntype = build_pointer_type (void_type_node);
1759 fntype = build_function_type (fntype, NULL_TREE);
1760 fn = build_decl (FUNCTION_DECL, fn, fntype);
1761 ggc_add_tree_root (&fn, 1);
1762 DECL_EXTERNAL (fn) = 1;
1763 TREE_PUBLIC (fn) = 1;
1764 DECL_ARTIFICIAL (fn) = 1;
1765 TREE_NOTHROW (fn) = 1;
1766 make_decl_rtl (fn, NULL);
1767 assemble_external (fn);
1770 /* We need to make an argument list for the function call.
1772 memcpy has three arguments, the first two are void * addresses and
1773 the last is a size_t byte count for the copy. */
1775 = build_tree_list (NULL_TREE,
1776 make_tree (build_pointer_type (void_type_node), x));
1777 TREE_CHAIN (arg_list)
1778 = build_tree_list (NULL_TREE,
1779 make_tree (build_pointer_type (void_type_node), y));
1780 TREE_CHAIN (TREE_CHAIN (arg_list))
1781 = build_tree_list (NULL_TREE, make_tree (sizetype, size));
1782 TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list))) = NULL_TREE;
1784 /* Now we have to build up the CALL_EXPR itself. */
1785 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
1786 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
1787 call_expr, arg_list, NULL_TREE);
1788 TREE_SIDE_EFFECTS (call_expr) = 1;
1790 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
1792 emit_library_call (bcopy_libfunc, LCT_NORMAL,
1793 VOIDmode, 3, y, Pmode, x, Pmode,
1794 convert_to_mode (TYPE_MODE (integer_type_node), size,
1795 TREE_UNSIGNED (integer_type_node)),
1796 TYPE_MODE (integer_type_node));
1799 /* If we are initializing a readonly value, show the above call
1800 clobbered it. Otherwise, a load from it may erroneously be hoisted
1802 if (RTX_UNCHANGING_P (x))
1803 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
1809 /* Copy all or part of a value X into registers starting at REGNO.
1810 The number of registers to be filled is NREGS. */
1813 move_block_to_reg (regno, x, nregs, mode)
1817 enum machine_mode mode;
1820 #ifdef HAVE_load_multiple
1828 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1829 x = validize_mem (force_const_mem (mode, x));
1831 /* See if the machine can do this with a load multiple insn. */
1832 #ifdef HAVE_load_multiple
1833 if (HAVE_load_multiple)
1835 last = get_last_insn ();
1836 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
1844 delete_insns_since (last);
1848 for (i = 0; i < nregs; i++)
1849 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1850 operand_subword_force (x, i, mode));
1853 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1854 The number of registers to be filled is NREGS. SIZE indicates the number
1855 of bytes in the object X. */
1858 move_block_from_reg (regno, x, nregs, size)
1865 #ifdef HAVE_store_multiple
1869 enum machine_mode mode;
1874 /* If SIZE is that of a mode no bigger than a word, just use that
1875 mode's store operation. */
1876 if (size <= UNITS_PER_WORD
1877 && (mode = mode_for_size (size * BITS_PER_UNIT, MODE_INT, 0)) != BLKmode
1878 && !FUNCTION_ARG_REG_LITTLE_ENDIAN)
1880 emit_move_insn (adjust_address (x, mode, 0), gen_rtx_REG (mode, regno));
1884 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN machine must be aligned
1885 to the left before storing to memory. Note that the previous test
1886 doesn't handle all cases (e.g. SIZE == 3). */
1887 if (size < UNITS_PER_WORD
1889 && !FUNCTION_ARG_REG_LITTLE_ENDIAN)
1891 rtx tem = operand_subword (x, 0, 1, BLKmode);
1897 shift = expand_shift (LSHIFT_EXPR, word_mode,
1898 gen_rtx_REG (word_mode, regno),
1899 build_int_2 ((UNITS_PER_WORD - size)
1900 * BITS_PER_UNIT, 0), NULL_RTX, 0);
1901 emit_move_insn (tem, shift);
1905 /* See if the machine can do this with a store multiple insn. */
1906 #ifdef HAVE_store_multiple
1907 if (HAVE_store_multiple)
1909 last = get_last_insn ();
1910 pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1918 delete_insns_since (last);
1922 for (i = 0; i < nregs; i++)
1924 rtx tem = operand_subword (x, i, 1, BLKmode);
1929 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1933 /* Emit code to move a block SRC to a block DST, where DST is non-consecutive
1934 registers represented by a PARALLEL. SSIZE represents the total size of
1935 block SRC in bytes, or -1 if not known. */
1936 /* ??? If SSIZE % UNITS_PER_WORD != 0, we make the blatant assumption that
1937 the balance will be in what would be the low-order memory addresses, i.e.
1938 left justified for big endian, right justified for little endian. This
1939 happens to be true for the targets currently using this support. If this
1940 ever changes, a new target macro along the lines of FUNCTION_ARG_PADDING
1944 emit_group_load (dst, orig_src, ssize)
1951 if (GET_CODE (dst) != PARALLEL)
1954 /* Check for a NULL entry, used to indicate that the parameter goes
1955 both on the stack and in registers. */
1956 if (XEXP (XVECEXP (dst, 0, 0), 0))
1961 tmps = (rtx *) alloca (sizeof (rtx) * XVECLEN (dst, 0));
1963 /* Process the pieces. */
1964 for (i = start; i < XVECLEN (dst, 0); i++)
1966 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1967 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1968 unsigned int bytelen = GET_MODE_SIZE (mode);
1971 /* Handle trailing fragments that run over the size of the struct. */
1972 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
1974 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1975 bytelen = ssize - bytepos;
1980 /* If we won't be loading directly from memory, protect the real source
1981 from strange tricks we might play; but make sure that the source can
1982 be loaded directly into the destination. */
1984 if (GET_CODE (orig_src) != MEM
1985 && (!CONSTANT_P (orig_src)
1986 || (GET_MODE (orig_src) != mode
1987 && GET_MODE (orig_src) != VOIDmode)))
1989 if (GET_MODE (orig_src) == VOIDmode)
1990 src = gen_reg_rtx (mode);
1992 src = gen_reg_rtx (GET_MODE (orig_src));
1994 emit_move_insn (src, orig_src);
1997 /* Optimize the access just a bit. */
1998 if (GET_CODE (src) == MEM
1999 && MEM_ALIGN (src) >= GET_MODE_ALIGNMENT (mode)
2000 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2001 && bytelen == GET_MODE_SIZE (mode))
2003 tmps[i] = gen_reg_rtx (mode);
2004 emit_move_insn (tmps[i], adjust_address (src, mode, bytepos));
2006 else if (GET_CODE (src) == CONCAT)
2009 && bytelen == GET_MODE_SIZE (GET_MODE (XEXP (src, 0))))
2010 || (bytepos == (HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (XEXP (src, 0)))
2011 && bytelen == GET_MODE_SIZE (GET_MODE (XEXP (src, 1)))))
2013 tmps[i] = XEXP (src, bytepos != 0);
2014 if (! CONSTANT_P (tmps[i])
2015 && (GET_CODE (tmps[i]) != REG || GET_MODE (tmps[i]) != mode))
2016 tmps[i] = extract_bit_field (tmps[i], bytelen * BITS_PER_UNIT,
2017 0, 1, NULL_RTX, mode, mode, ssize);
2019 else if (bytepos == 0)
2021 rtx mem = assign_stack_temp (GET_MODE (src),
2022 GET_MODE_SIZE (GET_MODE (src)), 0);
2023 emit_move_insn (mem, src);
2024 tmps[i] = adjust_address (mem, mode, 0);
2029 else if (CONSTANT_P (src)
2030 || (GET_CODE (src) == REG && GET_MODE (src) == mode))
2033 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
2034 bytepos * BITS_PER_UNIT, 1, NULL_RTX,
2037 if (BYTES_BIG_ENDIAN && shift)
2038 expand_binop (mode, ashl_optab, tmps[i], GEN_INT (shift),
2039 tmps[i], 0, OPTAB_WIDEN);
2044 /* Copy the extracted pieces into the proper (probable) hard regs. */
2045 for (i = start; i < XVECLEN (dst, 0); i++)
2046 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0), tmps[i]);
2049 /* Emit code to move a block SRC to a block DST, where SRC is non-consecutive
2050 registers represented by a PARALLEL. SSIZE represents the total size of
2051 block DST, or -1 if not known. */
2054 emit_group_store (orig_dst, src, ssize)
2061 if (GET_CODE (src) != PARALLEL)
2064 /* Check for a NULL entry, used to indicate that the parameter goes
2065 both on the stack and in registers. */
2066 if (XEXP (XVECEXP (src, 0, 0), 0))
2071 tmps = (rtx *) alloca (sizeof (rtx) * XVECLEN (src, 0));
2073 /* Copy the (probable) hard regs into pseudos. */
2074 for (i = start; i < XVECLEN (src, 0); i++)
2076 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
2077 tmps[i] = gen_reg_rtx (GET_MODE (reg));
2078 emit_move_insn (tmps[i], reg);
2082 /* If we won't be storing directly into memory, protect the real destination
2083 from strange tricks we might play. */
2085 if (GET_CODE (dst) == PARALLEL)
2089 /* We can get a PARALLEL dst if there is a conditional expression in
2090 a return statement. In that case, the dst and src are the same,
2091 so no action is necessary. */
2092 if (rtx_equal_p (dst, src))
2095 /* It is unclear if we can ever reach here, but we may as well handle
2096 it. Allocate a temporary, and split this into a store/load to/from
2099 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
2100 emit_group_store (temp, src, ssize);
2101 emit_group_load (dst, temp, ssize);
2104 else if (GET_CODE (dst) != MEM && GET_CODE (dst) != CONCAT)
2106 dst = gen_reg_rtx (GET_MODE (orig_dst));
2107 /* Make life a bit easier for combine. */
2108 emit_move_insn (dst, const0_rtx);
2111 /* Process the pieces. */
2112 for (i = start; i < XVECLEN (src, 0); i++)
2114 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
2115 enum machine_mode mode = GET_MODE (tmps[i]);
2116 unsigned int bytelen = GET_MODE_SIZE (mode);
2119 /* Handle trailing fragments that run over the size of the struct. */
2120 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2122 if (BYTES_BIG_ENDIAN)
2124 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2125 expand_binop (mode, ashr_optab, tmps[i], GEN_INT (shift),
2126 tmps[i], 0, OPTAB_WIDEN);
2128 bytelen = ssize - bytepos;
2131 if (GET_CODE (dst) == CONCAT)
2133 if (bytepos + bytelen <= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2134 dest = XEXP (dst, 0);
2135 else if (bytepos >= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2137 bytepos -= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0)));
2138 dest = XEXP (dst, 1);
2144 /* Optimize the access just a bit. */
2145 if (GET_CODE (dest) == MEM
2146 && MEM_ALIGN (dest) >= GET_MODE_ALIGNMENT (mode)
2147 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2148 && bytelen == GET_MODE_SIZE (mode))
2149 emit_move_insn (adjust_address (dest, mode, bytepos), tmps[i]);
2151 store_bit_field (dest, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2152 mode, tmps[i], ssize);
2157 /* Copy from the pseudo into the (probable) hard reg. */
2158 if (GET_CODE (dst) == REG)
2159 emit_move_insn (orig_dst, dst);
2162 /* Generate code to copy a BLKmode object of TYPE out of a
2163 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2164 is null, a stack temporary is created. TGTBLK is returned.
2166 The primary purpose of this routine is to handle functions
2167 that return BLKmode structures in registers. Some machines
2168 (the PA for example) want to return all small structures
2169 in registers regardless of the structure's alignment. */
2172 copy_blkmode_from_reg (tgtblk, srcreg, type)
2177 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2178 rtx src = NULL, dst = NULL;
2179 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2180 unsigned HOST_WIDE_INT bitpos, xbitpos, big_endian_correction = 0;
2184 tgtblk = assign_temp (build_qualified_type (type,
2186 | TYPE_QUAL_CONST)),
2188 preserve_temp_slots (tgtblk);
2191 /* This code assumes srcreg is at least a full word. If it isn't, copy it
2192 into a new pseudo which is a full word.
2194 If FUNCTION_ARG_REG_LITTLE_ENDIAN is set and convert_to_mode does a copy,
2195 the wrong part of the register gets copied so we fake a type conversion
2197 if (GET_MODE (srcreg) != BLKmode
2198 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
2200 if (FUNCTION_ARG_REG_LITTLE_ENDIAN)
2201 srcreg = simplify_gen_subreg (word_mode, srcreg, GET_MODE (srcreg), 0);
2203 srcreg = convert_to_mode (word_mode, srcreg, TREE_UNSIGNED (type));
2206 /* Structures whose size is not a multiple of a word are aligned
2207 to the least significant byte (to the right). On a BYTES_BIG_ENDIAN
2208 machine, this means we must skip the empty high order bytes when
2209 calculating the bit offset. */
2210 if (BYTES_BIG_ENDIAN
2211 && !FUNCTION_ARG_REG_LITTLE_ENDIAN
2212 && bytes % UNITS_PER_WORD)
2213 big_endian_correction
2214 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2216 /* Copy the structure BITSIZE bites at a time.
2218 We could probably emit more efficient code for machines which do not use
2219 strict alignment, but it doesn't seem worth the effort at the current
2221 for (bitpos = 0, xbitpos = big_endian_correction;
2222 bitpos < bytes * BITS_PER_UNIT;
2223 bitpos += bitsize, xbitpos += bitsize)
2225 /* We need a new source operand each time xbitpos is on a
2226 word boundary and when xbitpos == big_endian_correction
2227 (the first time through). */
2228 if (xbitpos % BITS_PER_WORD == 0
2229 || xbitpos == big_endian_correction)
2230 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD,
2233 /* We need a new destination operand each time bitpos is on
2235 if (bitpos % BITS_PER_WORD == 0)
2236 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
2238 /* Use xbitpos for the source extraction (right justified) and
2239 xbitpos for the destination store (left justified). */
2240 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode,
2241 extract_bit_field (src, bitsize,
2242 xbitpos % BITS_PER_WORD, 1,
2243 NULL_RTX, word_mode, word_mode,
2251 /* Add a USE expression for REG to the (possibly empty) list pointed
2252 to by CALL_FUSAGE. REG must denote a hard register. */
2255 use_reg (call_fusage, reg)
2256 rtx *call_fusage, reg;
2258 if (GET_CODE (reg) != REG
2259 || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
2263 = gen_rtx_EXPR_LIST (VOIDmode,
2264 gen_rtx_USE (VOIDmode, reg), *call_fusage);
2267 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2268 starting at REGNO. All of these registers must be hard registers. */
2271 use_regs (call_fusage, regno, nregs)
2278 if (regno + nregs > FIRST_PSEUDO_REGISTER)
2281 for (i = 0; i < nregs; i++)
2282 use_reg (call_fusage, gen_rtx_REG (reg_raw_mode[regno + i], regno + i));
2285 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2286 PARALLEL REGS. This is for calls that pass values in multiple
2287 non-contiguous locations. The Irix 6 ABI has examples of this. */
2290 use_group_regs (call_fusage, regs)
2296 for (i = 0; i < XVECLEN (regs, 0); i++)
2298 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2300 /* A NULL entry means the parameter goes both on the stack and in
2301 registers. This can also be a MEM for targets that pass values
2302 partially on the stack and partially in registers. */
2303 if (reg != 0 && GET_CODE (reg) == REG)
2304 use_reg (call_fusage, reg);
2310 can_store_by_pieces (len, constfun, constfundata, align)
2311 unsigned HOST_WIDE_INT len;
2312 rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
2316 unsigned HOST_WIDE_INT max_size, l;
2317 HOST_WIDE_INT offset = 0;
2318 enum machine_mode mode, tmode;
2319 enum insn_code icode;
2323 if (! MOVE_BY_PIECES_P (len, align))
2326 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2327 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2328 align = MOVE_MAX * BITS_PER_UNIT;
2330 /* We would first store what we can in the largest integer mode, then go to
2331 successively smaller modes. */
2334 reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2339 max_size = MOVE_MAX_PIECES + 1;
2340 while (max_size > 1)
2342 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2343 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2344 if (GET_MODE_SIZE (tmode) < max_size)
2347 if (mode == VOIDmode)
2350 icode = mov_optab->handlers[(int) mode].insn_code;
2351 if (icode != CODE_FOR_nothing
2352 && align >= GET_MODE_ALIGNMENT (mode))
2354 unsigned int size = GET_MODE_SIZE (mode);
2361 cst = (*constfun) (constfundata, offset, mode);
2362 if (!LEGITIMATE_CONSTANT_P (cst))
2372 max_size = GET_MODE_SIZE (mode);
2375 /* The code above should have handled everything. */
2383 /* Generate several move instructions to store LEN bytes generated by
2384 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2385 pointer which will be passed as argument in every CONSTFUN call.
2386 ALIGN is maximum alignment we can assume. */
2389 store_by_pieces (to, len, constfun, constfundata, align)
2391 unsigned HOST_WIDE_INT len;
2392 rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
2396 struct store_by_pieces data;
2398 if (! MOVE_BY_PIECES_P (len, align))
2400 to = protect_from_queue (to, 1);
2401 data.constfun = constfun;
2402 data.constfundata = constfundata;
2405 store_by_pieces_1 (&data, align);
2408 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2409 rtx with BLKmode). The caller must pass TO through protect_from_queue
2410 before calling. ALIGN is maximum alignment we can assume. */
2413 clear_by_pieces (to, len, align)
2415 unsigned HOST_WIDE_INT len;
2418 struct store_by_pieces data;
2420 data.constfun = clear_by_pieces_1;
2421 data.constfundata = NULL;
2424 store_by_pieces_1 (&data, align);
2427 /* Callback routine for clear_by_pieces.
2428 Return const0_rtx unconditionally. */
2431 clear_by_pieces_1 (data, offset, mode)
2432 PTR data ATTRIBUTE_UNUSED;
2433 HOST_WIDE_INT offset ATTRIBUTE_UNUSED;
2434 enum machine_mode mode ATTRIBUTE_UNUSED;
2439 /* Subroutine of clear_by_pieces and store_by_pieces.
2440 Generate several move instructions to store LEN bytes of block TO. (A MEM
2441 rtx with BLKmode). The caller must pass TO through protect_from_queue
2442 before calling. ALIGN is maximum alignment we can assume. */
2445 store_by_pieces_1 (data, align)
2446 struct store_by_pieces *data;
2449 rtx to_addr = XEXP (data->to, 0);
2450 unsigned HOST_WIDE_INT max_size = MOVE_MAX_PIECES + 1;
2451 enum machine_mode mode = VOIDmode, tmode;
2452 enum insn_code icode;
2455 data->to_addr = to_addr;
2457 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2458 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2460 data->explicit_inc_to = 0;
2462 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2464 data->offset = data->len;
2466 /* If storing requires more than two move insns,
2467 copy addresses to registers (to make displacements shorter)
2468 and use post-increment if available. */
2469 if (!data->autinc_to
2470 && move_by_pieces_ninsns (data->len, align) > 2)
2472 /* Determine the main mode we'll be using. */
2473 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2474 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2475 if (GET_MODE_SIZE (tmode) < max_size)
2478 if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
2480 data->to_addr = copy_addr_to_reg (plus_constant (to_addr, data->len));
2481 data->autinc_to = 1;
2482 data->explicit_inc_to = -1;
2485 if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2486 && ! data->autinc_to)
2488 data->to_addr = copy_addr_to_reg (to_addr);
2489 data->autinc_to = 1;
2490 data->explicit_inc_to = 1;
2493 if ( !data->autinc_to && CONSTANT_P (to_addr))
2494 data->to_addr = copy_addr_to_reg (to_addr);
2497 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2498 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2499 align = MOVE_MAX * BITS_PER_UNIT;
2501 /* First store what we can in the largest integer mode, then go to
2502 successively smaller modes. */
2504 while (max_size > 1)
2506 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2507 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2508 if (GET_MODE_SIZE (tmode) < max_size)
2511 if (mode == VOIDmode)
2514 icode = mov_optab->handlers[(int) mode].insn_code;
2515 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
2516 store_by_pieces_2 (GEN_FCN (icode), mode, data);
2518 max_size = GET_MODE_SIZE (mode);
2521 /* The code above should have handled everything. */
2526 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2527 with move instructions for mode MODE. GENFUN is the gen_... function
2528 to make a move insn for that mode. DATA has all the other info. */
2531 store_by_pieces_2 (genfun, mode, data)
2532 rtx (*genfun) PARAMS ((rtx, ...));
2533 enum machine_mode mode;
2534 struct store_by_pieces *data;
2536 unsigned int size = GET_MODE_SIZE (mode);
2539 while (data->len >= size)
2542 data->offset -= size;
2544 if (data->autinc_to)
2545 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
2548 to1 = adjust_address (data->to, mode, data->offset);
2550 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2551 emit_insn (gen_add2_insn (data->to_addr,
2552 GEN_INT (-(HOST_WIDE_INT) size)));
2554 cst = (*data->constfun) (data->constfundata, data->offset, mode);
2555 emit_insn ((*genfun) (to1, cst));
2557 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2558 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2560 if (! data->reverse)
2561 data->offset += size;
2567 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2568 its length in bytes. */
2571 clear_storage (object, size)
2575 #ifdef TARGET_MEM_FUNCTIONS
2577 tree call_expr, arg_list;
2580 unsigned int align = (GET_CODE (object) == MEM ? MEM_ALIGN (object)
2581 : GET_MODE_ALIGNMENT (GET_MODE (object)));
2583 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2584 just move a zero. Otherwise, do this a piece at a time. */
2585 if (GET_MODE (object) != BLKmode
2586 && GET_CODE (size) == CONST_INT
2587 && GET_MODE_SIZE (GET_MODE (object)) == (unsigned int) INTVAL (size))
2588 emit_move_insn (object, CONST0_RTX (GET_MODE (object)));
2591 object = protect_from_queue (object, 1);
2592 size = protect_from_queue (size, 0);
2594 if (GET_CODE (size) == CONST_INT
2595 && MOVE_BY_PIECES_P (INTVAL (size), align))
2596 clear_by_pieces (object, INTVAL (size), align);
2599 /* Try the most limited insn first, because there's no point
2600 including more than one in the machine description unless
2601 the more limited one has some advantage. */
2603 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
2604 enum machine_mode mode;
2606 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2607 mode = GET_MODE_WIDER_MODE (mode))
2609 enum insn_code code = clrstr_optab[(int) mode];
2610 insn_operand_predicate_fn pred;
2612 if (code != CODE_FOR_nothing
2613 /* We don't need MODE to be narrower than
2614 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2615 the mode mask, as it is returned by the macro, it will
2616 definitely be less than the actual mode mask. */
2617 && ((GET_CODE (size) == CONST_INT
2618 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2619 <= (GET_MODE_MASK (mode) >> 1)))
2620 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2621 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
2622 || (*pred) (object, BLKmode))
2623 && ((pred = insn_data[(int) code].operand[2].predicate) == 0
2624 || (*pred) (opalign, VOIDmode)))
2627 rtx last = get_last_insn ();
2630 op1 = convert_to_mode (mode, size, 1);
2631 pred = insn_data[(int) code].operand[1].predicate;
2632 if (pred != 0 && ! (*pred) (op1, mode))
2633 op1 = copy_to_mode_reg (mode, op1);
2635 pat = GEN_FCN ((int) code) (object, op1, opalign);
2642 delete_insns_since (last);
2646 /* OBJECT or SIZE may have been passed through protect_from_queue.
2648 It is unsafe to save the value generated by protect_from_queue
2649 and reuse it later. Consider what happens if emit_queue is
2650 called before the return value from protect_from_queue is used.
2652 Expansion of the CALL_EXPR below will call emit_queue before
2653 we are finished emitting RTL for argument setup. So if we are
2654 not careful we could get the wrong value for an argument.
2656 To avoid this problem we go ahead and emit code to copy OBJECT
2657 and SIZE into new pseudos. We can then place those new pseudos
2658 into an RTL_EXPR and use them later, even after a call to
2661 Note this is not strictly needed for library calls since they
2662 do not call emit_queue before loading their arguments. However,
2663 we may need to have library calls call emit_queue in the future
2664 since failing to do so could cause problems for targets which
2665 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
2666 object = copy_to_mode_reg (Pmode, XEXP (object, 0));
2668 #ifdef TARGET_MEM_FUNCTIONS
2669 size = copy_to_mode_reg (TYPE_MODE (sizetype), size);
2671 size = convert_to_mode (TYPE_MODE (integer_type_node), size,
2672 TREE_UNSIGNED (integer_type_node));
2673 size = copy_to_mode_reg (TYPE_MODE (integer_type_node), size);
2676 #ifdef TARGET_MEM_FUNCTIONS
2677 /* It is incorrect to use the libcall calling conventions to call
2678 memset in this context.
2680 This could be a user call to memset and the user may wish to
2681 examine the return value from memset.
2683 For targets where libcalls and normal calls have different
2684 conventions for returning pointers, we could end up generating
2687 So instead of using a libcall sequence we build up a suitable
2688 CALL_EXPR and expand the call in the normal fashion. */
2689 if (fn == NULL_TREE)
2693 /* This was copied from except.c, I don't know if all this is
2694 necessary in this context or not. */
2695 fn = get_identifier ("memset");
2696 fntype = build_pointer_type (void_type_node);
2697 fntype = build_function_type (fntype, NULL_TREE);
2698 fn = build_decl (FUNCTION_DECL, fn, fntype);
2699 ggc_add_tree_root (&fn, 1);
2700 DECL_EXTERNAL (fn) = 1;
2701 TREE_PUBLIC (fn) = 1;
2702 DECL_ARTIFICIAL (fn) = 1;
2703 TREE_NOTHROW (fn) = 1;
2704 make_decl_rtl (fn, NULL);
2705 assemble_external (fn);
2708 /* We need to make an argument list for the function call.
2710 memset has three arguments, the first is a void * addresses, the
2711 second an integer with the initialization value, the last is a
2712 size_t byte count for the copy. */
2714 = build_tree_list (NULL_TREE,
2715 make_tree (build_pointer_type (void_type_node),
2717 TREE_CHAIN (arg_list)
2718 = build_tree_list (NULL_TREE,
2719 make_tree (integer_type_node, const0_rtx));
2720 TREE_CHAIN (TREE_CHAIN (arg_list))
2721 = build_tree_list (NULL_TREE, make_tree (sizetype, size));
2722 TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list))) = NULL_TREE;
2724 /* Now we have to build up the CALL_EXPR itself. */
2725 call_expr = build1 (ADDR_EXPR,
2726 build_pointer_type (TREE_TYPE (fn)), fn);
2727 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
2728 call_expr, arg_list, NULL_TREE);
2729 TREE_SIDE_EFFECTS (call_expr) = 1;
2731 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
2733 emit_library_call (bzero_libfunc, LCT_NORMAL,
2734 VOIDmode, 2, object, Pmode, size,
2735 TYPE_MODE (integer_type_node));
2738 /* If we are initializing a readonly value, show the above call
2739 clobbered it. Otherwise, a load from it may erroneously be
2740 hoisted from a loop. */
2741 if (RTX_UNCHANGING_P (object))
2742 emit_insn (gen_rtx_CLOBBER (VOIDmode, object));
2749 /* Generate code to copy Y into X.
2750 Both Y and X must have the same mode, except that
2751 Y can be a constant with VOIDmode.
2752 This mode cannot be BLKmode; use emit_block_move for that.
2754 Return the last instruction emitted. */
2757 emit_move_insn (x, y)
2760 enum machine_mode mode = GET_MODE (x);
2761 rtx y_cst = NULL_RTX;
2764 x = protect_from_queue (x, 1);
2765 y = protect_from_queue (y, 0);
2767 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
2770 /* Never force constant_p_rtx to memory. */
2771 if (GET_CODE (y) == CONSTANT_P_RTX)
2773 else if (CONSTANT_P (y) && ! LEGITIMATE_CONSTANT_P (y))
2776 y = force_const_mem (mode, y);
2779 /* If X or Y are memory references, verify that their addresses are valid
2781 if (GET_CODE (x) == MEM
2782 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
2783 && ! push_operand (x, GET_MODE (x)))
2785 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
2786 x = validize_mem (x);
2788 if (GET_CODE (y) == MEM
2789 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
2791 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
2792 y = validize_mem (y);
2794 if (mode == BLKmode)
2797 last_insn = emit_move_insn_1 (x, y);
2799 if (y_cst && GET_CODE (x) == REG)
2800 set_unique_reg_note (last_insn, REG_EQUAL, y_cst);
2805 /* Low level part of emit_move_insn.
2806 Called just like emit_move_insn, but assumes X and Y
2807 are basically valid. */
2810 emit_move_insn_1 (x, y)
2813 enum machine_mode mode = GET_MODE (x);
2814 enum machine_mode submode;
2815 enum mode_class class = GET_MODE_CLASS (mode);
2817 if ((unsigned int) mode >= (unsigned int) MAX_MACHINE_MODE)
2820 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
2822 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
2824 /* Expand complex moves by moving real part and imag part, if possible. */
2825 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
2826 && BLKmode != (submode = mode_for_size ((GET_MODE_UNIT_SIZE (mode)
2828 (class == MODE_COMPLEX_INT
2829 ? MODE_INT : MODE_FLOAT),
2831 && (mov_optab->handlers[(int) submode].insn_code
2832 != CODE_FOR_nothing))
2834 /* Don't split destination if it is a stack push. */
2835 int stack = push_operand (x, GET_MODE (x));
2837 #ifdef PUSH_ROUNDING
2838 /* In case we output to the stack, but the size is smaller machine can
2839 push exactly, we need to use move instructions. */
2841 && (PUSH_ROUNDING (GET_MODE_SIZE (submode))
2842 != GET_MODE_SIZE (submode)))
2845 HOST_WIDE_INT offset1, offset2;
2847 /* Do not use anti_adjust_stack, since we don't want to update
2848 stack_pointer_delta. */
2849 temp = expand_binop (Pmode,
2850 #ifdef STACK_GROWS_DOWNWARD
2858 (GET_MODE_SIZE (GET_MODE (x)))),
2859 stack_pointer_rtx, 0, OPTAB_LIB_WIDEN);
2861 if (temp != stack_pointer_rtx)
2862 emit_move_insn (stack_pointer_rtx, temp);
2864 #ifdef STACK_GROWS_DOWNWARD
2866 offset2 = GET_MODE_SIZE (submode);
2868 offset1 = -PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)));
2869 offset2 = (-PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)))
2870 + GET_MODE_SIZE (submode));
2873 emit_move_insn (change_address (x, submode,
2874 gen_rtx_PLUS (Pmode,
2876 GEN_INT (offset1))),
2877 gen_realpart (submode, y));
2878 emit_move_insn (change_address (x, submode,
2879 gen_rtx_PLUS (Pmode,
2881 GEN_INT (offset2))),
2882 gen_imagpart (submode, y));
2886 /* If this is a stack, push the highpart first, so it
2887 will be in the argument order.
2889 In that case, change_address is used only to convert
2890 the mode, not to change the address. */
2893 /* Note that the real part always precedes the imag part in memory
2894 regardless of machine's endianness. */
2895 #ifdef STACK_GROWS_DOWNWARD
2896 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2897 (gen_rtx_MEM (submode, XEXP (x, 0)),
2898 gen_imagpart (submode, y)));
2899 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2900 (gen_rtx_MEM (submode, XEXP (x, 0)),
2901 gen_realpart (submode, y)));
2903 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2904 (gen_rtx_MEM (submode, XEXP (x, 0)),
2905 gen_realpart (submode, y)));
2906 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2907 (gen_rtx_MEM (submode, XEXP (x, 0)),
2908 gen_imagpart (submode, y)));
2913 rtx realpart_x, realpart_y;
2914 rtx imagpart_x, imagpart_y;
2916 /* If this is a complex value with each part being smaller than a
2917 word, the usual calling sequence will likely pack the pieces into
2918 a single register. Unfortunately, SUBREG of hard registers only
2919 deals in terms of words, so we have a problem converting input
2920 arguments to the CONCAT of two registers that is used elsewhere
2921 for complex values. If this is before reload, we can copy it into
2922 memory and reload. FIXME, we should see about using extract and
2923 insert on integer registers, but complex short and complex char
2924 variables should be rarely used. */
2925 if (GET_MODE_BITSIZE (mode) < 2 * BITS_PER_WORD
2926 && (reload_in_progress | reload_completed) == 0)
2929 = (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER);
2931 = (REG_P (y) && REGNO (y) < FIRST_PSEUDO_REGISTER);
2933 if (packed_dest_p || packed_src_p)
2935 enum mode_class reg_class = ((class == MODE_COMPLEX_FLOAT)
2936 ? MODE_FLOAT : MODE_INT);
2938 enum machine_mode reg_mode
2939 = mode_for_size (GET_MODE_BITSIZE (mode), reg_class, 1);
2941 if (reg_mode != BLKmode)
2943 rtx mem = assign_stack_temp (reg_mode,
2944 GET_MODE_SIZE (mode), 0);
2945 rtx cmem = adjust_address (mem, mode, 0);
2948 = N_("function using short complex types cannot be inline");
2952 rtx sreg = gen_rtx_SUBREG (reg_mode, x, 0);
2954 emit_move_insn_1 (cmem, y);
2955 return emit_move_insn_1 (sreg, mem);
2959 rtx sreg = gen_rtx_SUBREG (reg_mode, y, 0);
2961 emit_move_insn_1 (mem, sreg);
2962 return emit_move_insn_1 (x, cmem);
2968 realpart_x = gen_realpart (submode, x);
2969 realpart_y = gen_realpart (submode, y);
2970 imagpart_x = gen_imagpart (submode, x);
2971 imagpart_y = gen_imagpart (submode, y);
2973 /* Show the output dies here. This is necessary for SUBREGs
2974 of pseudos since we cannot track their lifetimes correctly;
2975 hard regs shouldn't appear here except as return values.
2976 We never want to emit such a clobber after reload. */
2978 && ! (reload_in_progress || reload_completed)
2979 && (GET_CODE (realpart_x) == SUBREG
2980 || GET_CODE (imagpart_x) == SUBREG))
2981 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
2983 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2984 (realpart_x, realpart_y));
2985 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2986 (imagpart_x, imagpart_y));
2989 return get_last_insn ();
2992 /* This will handle any multi-word mode that lacks a move_insn pattern.
2993 However, you will get better code if you define such patterns,
2994 even if they must turn into multiple assembler instructions. */
2995 else if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
3002 #ifdef PUSH_ROUNDING
3004 /* If X is a push on the stack, do the push now and replace
3005 X with a reference to the stack pointer. */
3006 if (push_operand (x, GET_MODE (x)))
3011 /* Do not use anti_adjust_stack, since we don't want to update
3012 stack_pointer_delta. */
3013 temp = expand_binop (Pmode,
3014 #ifdef STACK_GROWS_DOWNWARD
3022 (GET_MODE_SIZE (GET_MODE (x)))),
3023 stack_pointer_rtx, 0, OPTAB_LIB_WIDEN);
3025 if (temp != stack_pointer_rtx)
3026 emit_move_insn (stack_pointer_rtx, temp);
3028 code = GET_CODE (XEXP (x, 0));
3030 /* Just hope that small offsets off SP are OK. */
3031 if (code == POST_INC)
3032 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3033 GEN_INT (-((HOST_WIDE_INT)
3034 GET_MODE_SIZE (GET_MODE (x)))));
3035 else if (code == POST_DEC)
3036 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3037 GEN_INT (GET_MODE_SIZE (GET_MODE (x))));
3039 temp = stack_pointer_rtx;
3041 x = change_address (x, VOIDmode, temp);
3045 /* If we are in reload, see if either operand is a MEM whose address
3046 is scheduled for replacement. */
3047 if (reload_in_progress && GET_CODE (x) == MEM
3048 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
3049 x = replace_equiv_address_nv (x, inner);
3050 if (reload_in_progress && GET_CODE (y) == MEM
3051 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
3052 y = replace_equiv_address_nv (y, inner);
3058 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
3061 rtx xpart = operand_subword (x, i, 1, mode);
3062 rtx ypart = operand_subword (y, i, 1, mode);
3064 /* If we can't get a part of Y, put Y into memory if it is a
3065 constant. Otherwise, force it into a register. If we still
3066 can't get a part of Y, abort. */
3067 if (ypart == 0 && CONSTANT_P (y))
3069 y = force_const_mem (mode, y);
3070 ypart = operand_subword (y, i, 1, mode);
3072 else if (ypart == 0)
3073 ypart = operand_subword_force (y, i, mode);
3075 if (xpart == 0 || ypart == 0)
3078 need_clobber |= (GET_CODE (xpart) == SUBREG);
3080 last_insn = emit_move_insn (xpart, ypart);
3083 seq = gen_sequence ();
3086 /* Show the output dies here. This is necessary for SUBREGs
3087 of pseudos since we cannot track their lifetimes correctly;
3088 hard regs shouldn't appear here except as return values.
3089 We never want to emit such a clobber after reload. */
3091 && ! (reload_in_progress || reload_completed)
3092 && need_clobber != 0)
3093 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
3103 /* Pushing data onto the stack. */
3105 /* Push a block of length SIZE (perhaps variable)
3106 and return an rtx to address the beginning of the block.
3107 Note that it is not possible for the value returned to be a QUEUED.
3108 The value may be virtual_outgoing_args_rtx.
3110 EXTRA is the number of bytes of padding to push in addition to SIZE.
3111 BELOW nonzero means this padding comes at low addresses;
3112 otherwise, the padding comes at high addresses. */
3115 push_block (size, extra, below)
3121 size = convert_modes (Pmode, ptr_mode, size, 1);
3122 if (CONSTANT_P (size))
3123 anti_adjust_stack (plus_constant (size, extra));
3124 else if (GET_CODE (size) == REG && extra == 0)
3125 anti_adjust_stack (size);
3128 temp = copy_to_mode_reg (Pmode, size);
3130 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
3131 temp, 0, OPTAB_LIB_WIDEN);
3132 anti_adjust_stack (temp);
3135 #ifndef STACK_GROWS_DOWNWARD
3141 temp = virtual_outgoing_args_rtx;
3142 if (extra != 0 && below)
3143 temp = plus_constant (temp, extra);
3147 if (GET_CODE (size) == CONST_INT)
3148 temp = plus_constant (virtual_outgoing_args_rtx,
3149 -INTVAL (size) - (below ? 0 : extra));
3150 else if (extra != 0 && !below)
3151 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3152 negate_rtx (Pmode, plus_constant (size, extra)));
3154 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3155 negate_rtx (Pmode, size));
3158 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
3161 #ifdef PUSH_ROUNDING
3163 /* Emit single push insn. */
3166 emit_single_push_insn (mode, x, type)
3168 enum machine_mode mode;
3172 unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
3174 enum insn_code icode;
3175 insn_operand_predicate_fn pred;
3177 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3178 /* If there is push pattern, use it. Otherwise try old way of throwing
3179 MEM representing push operation to move expander. */
3180 icode = push_optab->handlers[(int) mode].insn_code;
3181 if (icode != CODE_FOR_nothing)
3183 if (((pred = insn_data[(int) icode].operand[0].predicate)
3184 && !((*pred) (x, mode))))
3185 x = force_reg (mode, x);
3186 emit_insn (GEN_FCN (icode) (x));
3189 if (GET_MODE_SIZE (mode) == rounded_size)
3190 dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
3193 #ifdef STACK_GROWS_DOWNWARD
3194 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3195 GEN_INT (-(HOST_WIDE_INT) rounded_size));
3197 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3198 GEN_INT (rounded_size));
3200 dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
3203 dest = gen_rtx_MEM (mode, dest_addr);
3207 set_mem_attributes (dest, type, 1);
3209 if (flag_optimize_sibling_calls)
3210 /* Function incoming arguments may overlap with sibling call
3211 outgoing arguments and we cannot allow reordering of reads
3212 from function arguments with stores to outgoing arguments
3213 of sibling calls. */
3214 set_mem_alias_set (dest, 0);
3216 emit_move_insn (dest, x);
3220 /* Generate code to push X onto the stack, assuming it has mode MODE and
3222 MODE is redundant except when X is a CONST_INT (since they don't
3224 SIZE is an rtx for the size of data to be copied (in bytes),
3225 needed only if X is BLKmode.
3227 ALIGN (in bits) is maximum alignment we can assume.
3229 If PARTIAL and REG are both nonzero, then copy that many of the first
3230 words of X into registers starting with REG, and push the rest of X.
3231 The amount of space pushed is decreased by PARTIAL words,
3232 rounded *down* to a multiple of PARM_BOUNDARY.
3233 REG must be a hard register in this case.
3234 If REG is zero but PARTIAL is not, take any all others actions for an
3235 argument partially in registers, but do not actually load any
3238 EXTRA is the amount in bytes of extra space to leave next to this arg.
3239 This is ignored if an argument block has already been allocated.
3241 On a machine that lacks real push insns, ARGS_ADDR is the address of
3242 the bottom of the argument block for this call. We use indexing off there
3243 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
3244 argument block has not been preallocated.
3246 ARGS_SO_FAR is the size of args previously pushed for this call.
3248 REG_PARM_STACK_SPACE is nonzero if functions require stack space
3249 for arguments passed in registers. If nonzero, it will be the number
3250 of bytes required. */
3253 emit_push_insn (x, mode, type, size, align, partial, reg, extra,
3254 args_addr, args_so_far, reg_parm_stack_space,
3257 enum machine_mode mode;
3266 int reg_parm_stack_space;
3270 enum direction stack_direction
3271 #ifdef STACK_GROWS_DOWNWARD
3277 /* Decide where to pad the argument: `downward' for below,
3278 `upward' for above, or `none' for don't pad it.
3279 Default is below for small data on big-endian machines; else above. */
3280 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
3282 /* Invert direction if stack is post-decrement.
3284 if (STACK_PUSH_CODE == POST_DEC)
3285 if (where_pad != none)
3286 where_pad = (where_pad == downward ? upward : downward);
3288 xinner = x = protect_from_queue (x, 0);
3290 if (mode == BLKmode)
3292 /* Copy a block into the stack, entirely or partially. */
3295 int used = partial * UNITS_PER_WORD;
3296 int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
3304 /* USED is now the # of bytes we need not copy to the stack
3305 because registers will take care of them. */
3308 xinner = adjust_address (xinner, BLKmode, used);
3310 /* If the partial register-part of the arg counts in its stack size,
3311 skip the part of stack space corresponding to the registers.
3312 Otherwise, start copying to the beginning of the stack space,
3313 by setting SKIP to 0. */
3314 skip = (reg_parm_stack_space == 0) ? 0 : used;
3316 #ifdef PUSH_ROUNDING
3317 /* Do it with several push insns if that doesn't take lots of insns
3318 and if there is no difficulty with push insns that skip bytes
3319 on the stack for alignment purposes. */
3322 && GET_CODE (size) == CONST_INT
3324 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
3325 /* Here we avoid the case of a structure whose weak alignment
3326 forces many pushes of a small amount of data,
3327 and such small pushes do rounding that causes trouble. */
3328 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
3329 || align >= BIGGEST_ALIGNMENT
3330 || (PUSH_ROUNDING (align / BITS_PER_UNIT)
3331 == (align / BITS_PER_UNIT)))
3332 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
3334 /* Push padding now if padding above and stack grows down,
3335 or if padding below and stack grows up.
3336 But if space already allocated, this has already been done. */
3337 if (extra && args_addr == 0
3338 && where_pad != none && where_pad != stack_direction)
3339 anti_adjust_stack (GEN_INT (extra));
3341 move_by_pieces (NULL, xinner, INTVAL (size) - used, align);
3344 #endif /* PUSH_ROUNDING */
3348 /* Otherwise make space on the stack and copy the data
3349 to the address of that space. */
3351 /* Deduct words put into registers from the size we must copy. */
3354 if (GET_CODE (size) == CONST_INT)
3355 size = GEN_INT (INTVAL (size) - used);
3357 size = expand_binop (GET_MODE (size), sub_optab, size,
3358 GEN_INT (used), NULL_RTX, 0,
3362 /* Get the address of the stack space.
3363 In this case, we do not deal with EXTRA separately.
3364 A single stack adjust will do. */
3367 temp = push_block (size, extra, where_pad == downward);
3370 else if (GET_CODE (args_so_far) == CONST_INT)
3371 temp = memory_address (BLKmode,
3372 plus_constant (args_addr,
3373 skip + INTVAL (args_so_far)));
3375 temp = memory_address (BLKmode,
3376 plus_constant (gen_rtx_PLUS (Pmode,
3380 target = gen_rtx_MEM (BLKmode, temp);
3384 set_mem_attributes (target, type, 1);
3385 /* Function incoming arguments may overlap with sibling call
3386 outgoing arguments and we cannot allow reordering of reads
3387 from function arguments with stores to outgoing arguments
3388 of sibling calls. */
3389 set_mem_alias_set (target, 0);
3392 set_mem_align (target, align);
3394 /* TEMP is the address of the block. Copy the data there. */
3395 if (GET_CODE (size) == CONST_INT
3396 && MOVE_BY_PIECES_P ((unsigned) INTVAL (size), align))
3398 move_by_pieces (target, xinner, INTVAL (size), align);
3403 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
3404 enum machine_mode mode;
3406 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
3408 mode = GET_MODE_WIDER_MODE (mode))
3410 enum insn_code code = movstr_optab[(int) mode];
3411 insn_operand_predicate_fn pred;
3413 if (code != CODE_FOR_nothing
3414 && ((GET_CODE (size) == CONST_INT
3415 && ((unsigned HOST_WIDE_INT) INTVAL (size)
3416 <= (GET_MODE_MASK (mode) >> 1)))
3417 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
3418 && (!(pred = insn_data[(int) code].operand[0].predicate)
3419 || ((*pred) (target, BLKmode)))
3420 && (!(pred = insn_data[(int) code].operand[1].predicate)
3421 || ((*pred) (xinner, BLKmode)))
3422 && (!(pred = insn_data[(int) code].operand[3].predicate)
3423 || ((*pred) (opalign, VOIDmode))))
3425 rtx op2 = convert_to_mode (mode, size, 1);
3426 rtx last = get_last_insn ();
3429 pred = insn_data[(int) code].operand[2].predicate;
3430 if (pred != 0 && ! (*pred) (op2, mode))
3431 op2 = copy_to_mode_reg (mode, op2);
3433 pat = GEN_FCN ((int) code) (target, xinner,
3441 delete_insns_since (last);
3446 if (!ACCUMULATE_OUTGOING_ARGS)
3448 /* If the source is referenced relative to the stack pointer,
3449 copy it to another register to stabilize it. We do not need
3450 to do this if we know that we won't be changing sp. */
3452 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3453 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3454 temp = copy_to_reg (temp);
3457 /* Make inhibit_defer_pop nonzero around the library call
3458 to force it to pop the bcopy-arguments right away. */
3460 #ifdef TARGET_MEM_FUNCTIONS
3461 emit_library_call (memcpy_libfunc, LCT_NORMAL,
3462 VOIDmode, 3, temp, Pmode, XEXP (xinner, 0), Pmode,
3463 convert_to_mode (TYPE_MODE (sizetype),
3464 size, TREE_UNSIGNED (sizetype)),
3465 TYPE_MODE (sizetype));
3467 emit_library_call (bcopy_libfunc, LCT_NORMAL,
3468 VOIDmode, 3, XEXP (xinner, 0), Pmode, temp, Pmode,
3469 convert_to_mode (TYPE_MODE (integer_type_node),
3471 TREE_UNSIGNED (integer_type_node)),
3472 TYPE_MODE (integer_type_node));
3477 else if (partial > 0)
3479 /* Scalar partly in registers. */
3481 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3484 /* # words of start of argument
3485 that we must make space for but need not store. */
3486 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
3487 int args_offset = INTVAL (args_so_far);
3490 /* Push padding now if padding above and stack grows down,
3491 or if padding below and stack grows up.
3492 But if space already allocated, this has already been done. */
3493 if (extra && args_addr == 0
3494 && where_pad != none && where_pad != stack_direction)
3495 anti_adjust_stack (GEN_INT (extra));
3497 /* If we make space by pushing it, we might as well push
3498 the real data. Otherwise, we can leave OFFSET nonzero
3499 and leave the space uninitialized. */
3503 /* Now NOT_STACK gets the number of words that we don't need to
3504 allocate on the stack. */
3505 not_stack = partial - offset;
3507 /* If the partial register-part of the arg counts in its stack size,
3508 skip the part of stack space corresponding to the registers.
3509 Otherwise, start copying to the beginning of the stack space,
3510 by setting SKIP to 0. */
3511 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
3513 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
3514 x = validize_mem (force_const_mem (mode, x));
3516 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3517 SUBREGs of such registers are not allowed. */
3518 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
3519 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
3520 x = copy_to_reg (x);
3522 /* Loop over all the words allocated on the stack for this arg. */
3523 /* We can do it by words, because any scalar bigger than a word
3524 has a size a multiple of a word. */
3525 #ifndef PUSH_ARGS_REVERSED
3526 for (i = not_stack; i < size; i++)
3528 for (i = size - 1; i >= not_stack; i--)
3530 if (i >= not_stack + offset)
3531 emit_push_insn (operand_subword_force (x, i, mode),
3532 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
3534 GEN_INT (args_offset + ((i - not_stack + skip)
3536 reg_parm_stack_space, alignment_pad);
3541 rtx target = NULL_RTX;
3544 /* Push padding now if padding above and stack grows down,
3545 or if padding below and stack grows up.
3546 But if space already allocated, this has already been done. */
3547 if (extra && args_addr == 0
3548 && where_pad != none && where_pad != stack_direction)
3549 anti_adjust_stack (GEN_INT (extra));
3551 #ifdef PUSH_ROUNDING
3552 if (args_addr == 0 && PUSH_ARGS)
3553 emit_single_push_insn (mode, x, type);
3557 if (GET_CODE (args_so_far) == CONST_INT)
3559 = memory_address (mode,
3560 plus_constant (args_addr,
3561 INTVAL (args_so_far)));
3563 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
3566 dest = gen_rtx_MEM (mode, addr);
3569 set_mem_attributes (dest, type, 1);
3570 /* Function incoming arguments may overlap with sibling call
3571 outgoing arguments and we cannot allow reordering of reads
3572 from function arguments with stores to outgoing arguments
3573 of sibling calls. */
3574 set_mem_alias_set (dest, 0);
3577 emit_move_insn (dest, x);
3583 /* If part should go in registers, copy that part
3584 into the appropriate registers. Do this now, at the end,
3585 since mem-to-mem copies above may do function calls. */
3586 if (partial > 0 && reg != 0)
3588 /* Handle calls that pass values in multiple non-contiguous locations.
3589 The Irix 6 ABI has examples of this. */
3590 if (GET_CODE (reg) == PARALLEL)
3591 emit_group_load (reg, x, -1); /* ??? size? */
3593 move_block_to_reg (REGNO (reg), x, partial, mode);
3596 if (extra && args_addr == 0 && where_pad == stack_direction)
3597 anti_adjust_stack (GEN_INT (extra));
3599 if (alignment_pad && args_addr == 0)
3600 anti_adjust_stack (alignment_pad);
3603 /* Return X if X can be used as a subtarget in a sequence of arithmetic
3611 /* Only registers can be subtargets. */
3612 || GET_CODE (x) != REG
3613 /* If the register is readonly, it can't be set more than once. */
3614 || RTX_UNCHANGING_P (x)
3615 /* Don't use hard regs to avoid extending their life. */
3616 || REGNO (x) < FIRST_PSEUDO_REGISTER
3617 /* Avoid subtargets inside loops,
3618 since they hide some invariant expressions. */
3619 || preserve_subexpressions_p ())
3623 /* Expand an assignment that stores the value of FROM into TO.
3624 If WANT_VALUE is nonzero, return an rtx for the value of TO.
3625 (This may contain a QUEUED rtx;
3626 if the value is constant, this rtx is a constant.)
3627 Otherwise, the returned value is NULL_RTX.
3629 SUGGEST_REG is no longer actually used.
3630 It used to mean, copy the value through a register
3631 and return that register, if that is possible.
3632 We now use WANT_VALUE to decide whether to do this. */
3635 expand_assignment (to, from, want_value, suggest_reg)
3638 int suggest_reg ATTRIBUTE_UNUSED;
3643 /* Don't crash if the lhs of the assignment was erroneous. */
3645 if (TREE_CODE (to) == ERROR_MARK)
3647 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
3648 return want_value ? result : NULL_RTX;
3651 /* Assignment of a structure component needs special treatment
3652 if the structure component's rtx is not simply a MEM.
3653 Assignment of an array element at a constant index, and assignment of
3654 an array element in an unaligned packed structure field, has the same
3657 if (TREE_CODE (to) == COMPONENT_REF || TREE_CODE (to) == BIT_FIELD_REF
3658 || TREE_CODE (to) == ARRAY_REF || TREE_CODE (to) == ARRAY_RANGE_REF)
3660 enum machine_mode mode1;
3661 HOST_WIDE_INT bitsize, bitpos;
3669 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
3670 &unsignedp, &volatilep);
3672 /* If we are going to use store_bit_field and extract_bit_field,
3673 make sure to_rtx will be safe for multiple use. */
3675 if (mode1 == VOIDmode && want_value)
3676 tem = stabilize_reference (tem);
3678 orig_to_rtx = to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, 0);
3682 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM);
3684 if (GET_CODE (to_rtx) != MEM)
3687 if (GET_MODE (offset_rtx) != ptr_mode)
3688 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
3690 #ifdef POINTERS_EXTEND_UNSIGNED
3691 if (GET_MODE (offset_rtx) != Pmode)
3692 offset_rtx = convert_memory_address (Pmode, offset_rtx);
3695 /* A constant address in TO_RTX can have VOIDmode, we must not try
3696 to call force_reg for that case. Avoid that case. */
3697 if (GET_CODE (to_rtx) == MEM
3698 && GET_MODE (to_rtx) == BLKmode
3699 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
3701 && (bitpos % bitsize) == 0
3702 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
3703 && MEM_ALIGN (to_rtx) == GET_MODE_ALIGNMENT (mode1))
3705 to_rtx = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT);
3709 to_rtx = offset_address (to_rtx, offset_rtx,
3710 highest_pow2_factor (offset));
3713 if (GET_CODE (to_rtx) == MEM)
3715 tree old_expr = MEM_EXPR (to_rtx);
3717 /* If the field is at offset zero, we could have been given the
3718 DECL_RTX of the parent struct. Don't munge it. */
3719 to_rtx = shallow_copy_rtx (to_rtx);
3721 set_mem_attributes (to_rtx, to, 0);
3723 /* If we changed MEM_EXPR, that means we're now referencing
3724 the COMPONENT_REF, which means that MEM_OFFSET must be
3725 relative to that field. But we've not yet reflected BITPOS
3726 in TO_RTX. This will be done in store_field. Adjust for
3727 that by biasing MEM_OFFSET by -bitpos. */
3728 if (MEM_EXPR (to_rtx) != old_expr && MEM_OFFSET (to_rtx)
3729 && (bitpos / BITS_PER_UNIT) != 0)
3730 set_mem_offset (to_rtx, GEN_INT (INTVAL (MEM_OFFSET (to_rtx))
3731 - (bitpos / BITS_PER_UNIT)));
3734 /* Deal with volatile and readonly fields. The former is only done
3735 for MEM. Also set MEM_KEEP_ALIAS_SET_P if needed. */
3736 if (volatilep && GET_CODE (to_rtx) == MEM)
3738 if (to_rtx == orig_to_rtx)
3739 to_rtx = copy_rtx (to_rtx);
3740 MEM_VOLATILE_P (to_rtx) = 1;
3743 if (TREE_CODE (to) == COMPONENT_REF
3744 && TREE_READONLY (TREE_OPERAND (to, 1)))
3746 if (to_rtx == orig_to_rtx)
3747 to_rtx = copy_rtx (to_rtx);
3748 RTX_UNCHANGING_P (to_rtx) = 1;
3751 if (GET_CODE (to_rtx) == MEM && ! can_address_p (to))
3753 if (to_rtx == orig_to_rtx)
3754 to_rtx = copy_rtx (to_rtx);
3755 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
3758 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
3760 /* Spurious cast for HPUX compiler. */
3761 ? ((enum machine_mode)
3762 TYPE_MODE (TREE_TYPE (to)))
3764 unsignedp, TREE_TYPE (tem), get_alias_set (to));
3766 preserve_temp_slots (result);
3770 /* If the value is meaningful, convert RESULT to the proper mode.
3771 Otherwise, return nothing. */
3772 return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
3773 TYPE_MODE (TREE_TYPE (from)),
3775 TREE_UNSIGNED (TREE_TYPE (to)))
3779 /* If the rhs is a function call and its value is not an aggregate,
3780 call the function before we start to compute the lhs.
3781 This is needed for correct code for cases such as
3782 val = setjmp (buf) on machines where reference to val
3783 requires loading up part of an address in a separate insn.
3785 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
3786 since it might be a promoted variable where the zero- or sign- extension
3787 needs to be done. Handling this in the normal way is safe because no
3788 computation is done before the call. */
3789 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from)
3790 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
3791 && ! ((TREE_CODE (to) == VAR_DECL || TREE_CODE (to) == PARM_DECL)
3792 && GET_CODE (DECL_RTL (to)) == REG))
3797 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
3799 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
3801 /* Handle calls that return values in multiple non-contiguous locations.
3802 The Irix 6 ABI has examples of this. */
3803 if (GET_CODE (to_rtx) == PARALLEL)
3804 emit_group_load (to_rtx, value, int_size_in_bytes (TREE_TYPE (from)));
3805 else if (GET_MODE (to_rtx) == BLKmode)
3806 emit_block_move (to_rtx, value, expr_size (from));
3809 #ifdef POINTERS_EXTEND_UNSIGNED
3810 if (POINTER_TYPE_P (TREE_TYPE (to))
3811 && GET_MODE (to_rtx) != GET_MODE (value))
3812 value = convert_memory_address (GET_MODE (to_rtx), value);
3814 emit_move_insn (to_rtx, value);
3816 preserve_temp_slots (to_rtx);
3819 return want_value ? to_rtx : NULL_RTX;
3822 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
3823 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
3826 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
3828 /* Don't move directly into a return register. */
3829 if (TREE_CODE (to) == RESULT_DECL
3830 && (GET_CODE (to_rtx) == REG || GET_CODE (to_rtx) == PARALLEL))
3835 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
3837 if (GET_CODE (to_rtx) == PARALLEL)
3838 emit_group_load (to_rtx, temp, int_size_in_bytes (TREE_TYPE (from)));
3840 emit_move_insn (to_rtx, temp);
3842 preserve_temp_slots (to_rtx);
3845 return want_value ? to_rtx : NULL_RTX;
3848 /* In case we are returning the contents of an object which overlaps
3849 the place the value is being stored, use a safe function when copying
3850 a value through a pointer into a structure value return block. */
3851 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
3852 && current_function_returns_struct
3853 && !current_function_returns_pcc_struct)
3858 size = expr_size (from);
3859 from_rtx = expand_expr (from, NULL_RTX, VOIDmode, 0);
3861 #ifdef TARGET_MEM_FUNCTIONS
3862 emit_library_call (memmove_libfunc, LCT_NORMAL,
3863 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
3864 XEXP (from_rtx, 0), Pmode,
3865 convert_to_mode (TYPE_MODE (sizetype),
3866 size, TREE_UNSIGNED (sizetype)),
3867 TYPE_MODE (sizetype));
3869 emit_library_call (bcopy_libfunc, LCT_NORMAL,
3870 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
3871 XEXP (to_rtx, 0), Pmode,
3872 convert_to_mode (TYPE_MODE (integer_type_node),
3873 size, TREE_UNSIGNED (integer_type_node)),
3874 TYPE_MODE (integer_type_node));
3877 preserve_temp_slots (to_rtx);
3880 return want_value ? to_rtx : NULL_RTX;
3883 /* Compute FROM and store the value in the rtx we got. */
3886 result = store_expr (from, to_rtx, want_value);
3887 preserve_temp_slots (result);
3890 return want_value ? result : NULL_RTX;
3893 /* Generate code for computing expression EXP,
3894 and storing the value into TARGET.
3895 TARGET may contain a QUEUED rtx.
3897 If WANT_VALUE is nonzero, return a copy of the value
3898 not in TARGET, so that we can be sure to use the proper
3899 value in a containing expression even if TARGET has something
3900 else stored in it. If possible, we copy the value through a pseudo
3901 and return that pseudo. Or, if the value is constant, we try to
3902 return the constant. In some cases, we return a pseudo
3903 copied *from* TARGET.
3905 If the mode is BLKmode then we may return TARGET itself.
3906 It turns out that in BLKmode it doesn't cause a problem.
3907 because C has no operators that could combine two different
3908 assignments into the same BLKmode object with different values
3909 with no sequence point. Will other languages need this to
3912 If WANT_VALUE is 0, we return NULL, to make sure
3913 to catch quickly any cases where the caller uses the value
3914 and fails to set WANT_VALUE. */
3917 store_expr (exp, target, want_value)
3923 int dont_return_target = 0;
3924 int dont_store_target = 0;
3926 if (TREE_CODE (exp) == COMPOUND_EXPR)
3928 /* Perform first part of compound expression, then assign from second
3930 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
3932 return store_expr (TREE_OPERAND (exp, 1), target, want_value);
3934 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
3936 /* For conditional expression, get safe form of the target. Then
3937 test the condition, doing the appropriate assignment on either
3938 side. This avoids the creation of unnecessary temporaries.
3939 For non-BLKmode, it is more efficient not to do this. */
3941 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
3944 target = protect_from_queue (target, 1);
3946 do_pending_stack_adjust ();
3948 jumpifnot (TREE_OPERAND (exp, 0), lab1);
3949 start_cleanup_deferral ();
3950 store_expr (TREE_OPERAND (exp, 1), target, 0);
3951 end_cleanup_deferral ();
3953 emit_jump_insn (gen_jump (lab2));
3956 start_cleanup_deferral ();
3957 store_expr (TREE_OPERAND (exp, 2), target, 0);
3958 end_cleanup_deferral ();
3963 return want_value ? target : NULL_RTX;
3965 else if (queued_subexp_p (target))
3966 /* If target contains a postincrement, let's not risk
3967 using it as the place to generate the rhs. */
3969 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
3971 /* Expand EXP into a new pseudo. */
3972 temp = gen_reg_rtx (GET_MODE (target));
3973 temp = expand_expr (exp, temp, GET_MODE (target), 0);
3976 temp = expand_expr (exp, NULL_RTX, GET_MODE (target), 0);
3978 /* If target is volatile, ANSI requires accessing the value
3979 *from* the target, if it is accessed. So make that happen.
3980 In no case return the target itself. */
3981 if (! MEM_VOLATILE_P (target) && want_value)
3982 dont_return_target = 1;
3984 else if (want_value && GET_CODE (target) == MEM && ! MEM_VOLATILE_P (target)
3985 && GET_MODE (target) != BLKmode)
3986 /* If target is in memory and caller wants value in a register instead,
3987 arrange that. Pass TARGET as target for expand_expr so that,
3988 if EXP is another assignment, WANT_VALUE will be nonzero for it.
3989 We know expand_expr will not use the target in that case.
3990 Don't do this if TARGET is volatile because we are supposed
3991 to write it and then read it. */
3993 temp = expand_expr (exp, target, GET_MODE (target), 0);
3994 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
3996 /* If TEMP is already in the desired TARGET, only copy it from
3997 memory and don't store it there again. */
3999 || (rtx_equal_p (temp, target)
4000 && ! side_effects_p (temp) && ! side_effects_p (target)))
4001 dont_store_target = 1;
4002 temp = copy_to_reg (temp);
4004 dont_return_target = 1;
4006 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
4007 /* If this is an scalar in a register that is stored in a wider mode
4008 than the declared mode, compute the result into its declared mode
4009 and then convert to the wider mode. Our value is the computed
4012 rtx inner_target = 0;
4014 /* If we don't want a value, we can do the conversion inside EXP,
4015 which will often result in some optimizations. Do the conversion
4016 in two steps: first change the signedness, if needed, then
4017 the extend. But don't do this if the type of EXP is a subtype
4018 of something else since then the conversion might involve
4019 more than just converting modes. */
4020 if (! want_value && INTEGRAL_TYPE_P (TREE_TYPE (exp))
4021 && TREE_TYPE (TREE_TYPE (exp)) == 0)
4023 if (TREE_UNSIGNED (TREE_TYPE (exp))
4024 != SUBREG_PROMOTED_UNSIGNED_P (target))
4026 ((*lang_hooks.types.signed_or_unsigned_type)
4027 (SUBREG_PROMOTED_UNSIGNED_P (target), TREE_TYPE (exp)), exp);
4029 exp = convert ((*lang_hooks.types.type_for_mode)
4030 (GET_MODE (SUBREG_REG (target)),
4031 SUBREG_PROMOTED_UNSIGNED_P (target)),
4034 inner_target = SUBREG_REG (target);
4037 temp = expand_expr (exp, inner_target, VOIDmode, 0);
4039 /* If TEMP is a volatile MEM and we want a result value, make
4040 the access now so it gets done only once. Likewise if
4041 it contains TARGET. */
4042 if (GET_CODE (temp) == MEM && want_value
4043 && (MEM_VOLATILE_P (temp)
4044 || reg_mentioned_p (SUBREG_REG (target), XEXP (temp, 0))))
4045 temp = copy_to_reg (temp);
4047 /* If TEMP is a VOIDmode constant, use convert_modes to make
4048 sure that we properly convert it. */
4049 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
4051 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4052 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4053 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
4054 GET_MODE (target), temp,
4055 SUBREG_PROMOTED_UNSIGNED_P (target));
4058 convert_move (SUBREG_REG (target), temp,
4059 SUBREG_PROMOTED_UNSIGNED_P (target));
4061 /* If we promoted a constant, change the mode back down to match
4062 target. Otherwise, the caller might get confused by a result whose
4063 mode is larger than expected. */
4065 if (want_value && GET_MODE (temp) != GET_MODE (target))
4067 if (GET_MODE (temp) != VOIDmode)
4069 temp = gen_lowpart_SUBREG (GET_MODE (target), temp);
4070 SUBREG_PROMOTED_VAR_P (temp) = 1;
4071 SUBREG_PROMOTED_UNSIGNED_SET (temp,
4072 SUBREG_PROMOTED_UNSIGNED_P (target));
4075 temp = convert_modes (GET_MODE (target),
4076 GET_MODE (SUBREG_REG (target)),
4077 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4080 return want_value ? temp : NULL_RTX;
4084 temp = expand_expr (exp, target, GET_MODE (target), 0);
4085 /* Return TARGET if it's a specified hardware register.
4086 If TARGET is a volatile mem ref, either return TARGET
4087 or return a reg copied *from* TARGET; ANSI requires this.
4089 Otherwise, if TEMP is not TARGET, return TEMP
4090 if it is constant (for efficiency),
4091 or if we really want the correct value. */
4092 if (!(target && GET_CODE (target) == REG
4093 && REGNO (target) < FIRST_PSEUDO_REGISTER)
4094 && !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
4095 && ! rtx_equal_p (temp, target)
4096 && (CONSTANT_P (temp) || want_value))
4097 dont_return_target = 1;
4100 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
4101 the same as that of TARGET, adjust the constant. This is needed, for
4102 example, in case it is a CONST_DOUBLE and we want only a word-sized
4104 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
4105 && TREE_CODE (exp) != ERROR_MARK
4106 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
4107 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4108 temp, TREE_UNSIGNED (TREE_TYPE (exp)));
4110 /* If value was not generated in the target, store it there.
4111 Convert the value to TARGET's type first if necessary.
4112 If TEMP and TARGET compare equal according to rtx_equal_p, but
4113 one or both of them are volatile memory refs, we have to distinguish
4115 - expand_expr has used TARGET. In this case, we must not generate
4116 another copy. This can be detected by TARGET being equal according
4118 - expand_expr has not used TARGET - that means that the source just
4119 happens to have the same RTX form. Since temp will have been created
4120 by expand_expr, it will compare unequal according to == .
4121 We must generate a copy in this case, to reach the correct number
4122 of volatile memory references. */
4124 if ((! rtx_equal_p (temp, target)
4125 || (temp != target && (side_effects_p (temp)
4126 || side_effects_p (target))))
4127 && TREE_CODE (exp) != ERROR_MARK
4128 && ! dont_store_target)
4130 target = protect_from_queue (target, 1);
4131 if (GET_MODE (temp) != GET_MODE (target)
4132 && GET_MODE (temp) != VOIDmode)
4134 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
4135 if (dont_return_target)
4137 /* In this case, we will return TEMP,
4138 so make sure it has the proper mode.
4139 But don't forget to store the value into TARGET. */
4140 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
4141 emit_move_insn (target, temp);
4144 convert_move (target, temp, unsignedp);
4147 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
4149 /* Handle copying a string constant into an array. The string
4150 constant may be shorter than the array. So copy just the string's
4151 actual length, and clear the rest. First get the size of the data
4152 type of the string, which is actually the size of the target. */
4153 rtx size = expr_size (exp);
4155 if (GET_CODE (size) == CONST_INT
4156 && INTVAL (size) < TREE_STRING_LENGTH (exp))
4157 emit_block_move (target, temp, size);
4160 /* Compute the size of the data to copy from the string. */
4162 = size_binop (MIN_EXPR,
4163 make_tree (sizetype, size),
4164 size_int (TREE_STRING_LENGTH (exp)));
4165 rtx copy_size_rtx = expand_expr (copy_size, NULL_RTX,
4169 /* Copy that much. */
4170 copy_size_rtx = convert_to_mode (ptr_mode, copy_size_rtx, 0);
4171 emit_block_move (target, temp, copy_size_rtx);
4173 /* Figure out how much is left in TARGET that we have to clear.
4174 Do all calculations in ptr_mode. */
4175 if (GET_CODE (copy_size_rtx) == CONST_INT)
4177 size = plus_constant (size, -INTVAL (copy_size_rtx));
4178 target = adjust_address (target, BLKmode,
4179 INTVAL (copy_size_rtx));
4183 size = expand_binop (ptr_mode, sub_optab, size,
4184 copy_size_rtx, NULL_RTX, 0,
4187 #ifdef POINTERS_EXTEND_UNSIGNED
4188 if (GET_MODE (copy_size_rtx) != Pmode)
4189 copy_size_rtx = convert_memory_address (Pmode,
4193 target = offset_address (target, copy_size_rtx,
4194 highest_pow2_factor (copy_size));
4195 label = gen_label_rtx ();
4196 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
4197 GET_MODE (size), 0, label);
4200 if (size != const0_rtx)
4201 clear_storage (target, size);
4207 /* Handle calls that return values in multiple non-contiguous locations.
4208 The Irix 6 ABI has examples of this. */
4209 else if (GET_CODE (target) == PARALLEL)
4210 emit_group_load (target, temp, int_size_in_bytes (TREE_TYPE (exp)));
4211 else if (GET_MODE (temp) == BLKmode)
4212 emit_block_move (target, temp, expr_size (exp));
4214 emit_move_insn (target, temp);
4217 /* If we don't want a value, return NULL_RTX. */
4221 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
4222 ??? The latter test doesn't seem to make sense. */
4223 else if (dont_return_target && GET_CODE (temp) != MEM)
4226 /* Return TARGET itself if it is a hard register. */
4227 else if (want_value && GET_MODE (target) != BLKmode
4228 && ! (GET_CODE (target) == REG
4229 && REGNO (target) < FIRST_PSEUDO_REGISTER))
4230 return copy_to_reg (target);
4236 /* Return 1 if EXP just contains zeros. */
4244 switch (TREE_CODE (exp))
4248 case NON_LVALUE_EXPR:
4249 case VIEW_CONVERT_EXPR:
4250 return is_zeros_p (TREE_OPERAND (exp, 0));
4253 return integer_zerop (exp);
4257 is_zeros_p (TREE_REALPART (exp)) && is_zeros_p (TREE_IMAGPART (exp));
4260 return REAL_VALUES_IDENTICAL (TREE_REAL_CST (exp), dconst0);
4263 for (elt = TREE_VECTOR_CST_ELTS (exp); elt;
4264 elt = TREE_CHAIN (elt))
4265 if (!is_zeros_p (TREE_VALUE (elt)))
4271 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4272 return CONSTRUCTOR_ELTS (exp) == NULL_TREE;
4273 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4274 if (! is_zeros_p (TREE_VALUE (elt)))
4284 /* Return 1 if EXP contains mostly (3/4) zeros. */
4287 mostly_zeros_p (exp)
4290 if (TREE_CODE (exp) == CONSTRUCTOR)
4292 int elts = 0, zeros = 0;
4293 tree elt = CONSTRUCTOR_ELTS (exp);
4294 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4296 /* If there are no ranges of true bits, it is all zero. */
4297 return elt == NULL_TREE;
4299 for (; elt; elt = TREE_CHAIN (elt))
4301 /* We do not handle the case where the index is a RANGE_EXPR,
4302 so the statistic will be somewhat inaccurate.
4303 We do make a more accurate count in store_constructor itself,
4304 so since this function is only used for nested array elements,
4305 this should be close enough. */
4306 if (mostly_zeros_p (TREE_VALUE (elt)))
4311 return 4 * zeros >= 3 * elts;
4314 return is_zeros_p (exp);
4317 /* Helper function for store_constructor.
4318 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4319 TYPE is the type of the CONSTRUCTOR, not the element type.
4320 CLEARED is as for store_constructor.
4321 ALIAS_SET is the alias set to use for any stores.
4323 This provides a recursive shortcut back to store_constructor when it isn't
4324 necessary to go through store_field. This is so that we can pass through
4325 the cleared field to let store_constructor know that we may not have to
4326 clear a substructure if the outer structure has already been cleared. */
4329 store_constructor_field (target, bitsize, bitpos, mode, exp, type, cleared,
4332 unsigned HOST_WIDE_INT bitsize;
4333 HOST_WIDE_INT bitpos;
4334 enum machine_mode mode;
4339 if (TREE_CODE (exp) == CONSTRUCTOR
4340 && bitpos % BITS_PER_UNIT == 0
4341 /* If we have a non-zero bitpos for a register target, then we just
4342 let store_field do the bitfield handling. This is unlikely to
4343 generate unnecessary clear instructions anyways. */
4344 && (bitpos == 0 || GET_CODE (target) == MEM))
4346 if (GET_CODE (target) == MEM)
4348 = adjust_address (target,
4349 GET_MODE (target) == BLKmode
4351 % GET_MODE_ALIGNMENT (GET_MODE (target)))
4352 ? BLKmode : VOIDmode, bitpos / BITS_PER_UNIT);
4355 /* Update the alias set, if required. */
4356 if (GET_CODE (target) == MEM && ! MEM_KEEP_ALIAS_SET_P (target)
4357 && MEM_ALIAS_SET (target) != 0)
4359 target = copy_rtx (target);
4360 set_mem_alias_set (target, alias_set);
4363 store_constructor (exp, target, cleared, bitsize / BITS_PER_UNIT);
4366 store_field (target, bitsize, bitpos, mode, exp, VOIDmode, 0, type,
4370 /* Store the value of constructor EXP into the rtx TARGET.
4371 TARGET is either a REG or a MEM; we know it cannot conflict, since
4372 safe_from_p has been called.
4373 CLEARED is true if TARGET is known to have been zero'd.
4374 SIZE is the number of bytes of TARGET we are allowed to modify: this
4375 may not be the same as the size of EXP if we are assigning to a field
4376 which has been packed to exclude padding bits. */
4379 store_constructor (exp, target, cleared, size)
4385 tree type = TREE_TYPE (exp);
4386 #ifdef WORD_REGISTER_OPERATIONS
4387 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
4390 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
4391 || TREE_CODE (type) == QUAL_UNION_TYPE)
4395 /* We either clear the aggregate or indicate the value is dead. */
4396 if ((TREE_CODE (type) == UNION_TYPE
4397 || TREE_CODE (type) == QUAL_UNION_TYPE)
4399 && ! CONSTRUCTOR_ELTS (exp))
4400 /* If the constructor is empty, clear the union. */
4402 clear_storage (target, expr_size (exp));
4406 /* If we are building a static constructor into a register,
4407 set the initial value as zero so we can fold the value into
4408 a constant. But if more than one register is involved,
4409 this probably loses. */
4410 else if (! cleared && GET_CODE (target) == REG && TREE_STATIC (exp)
4411 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
4413 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4417 /* If the constructor has fewer fields than the structure
4418 or if we are initializing the structure to mostly zeros,
4419 clear the whole structure first. Don't do this if TARGET is a
4420 register whose mode size isn't equal to SIZE since clear_storage
4421 can't handle this case. */
4422 else if (! cleared && size > 0
4423 && ((list_length (CONSTRUCTOR_ELTS (exp))
4424 != fields_length (type))
4425 || mostly_zeros_p (exp))
4426 && (GET_CODE (target) != REG
4427 || ((HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target))
4430 clear_storage (target, GEN_INT (size));
4435 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4437 /* Store each element of the constructor into
4438 the corresponding field of TARGET. */
4440 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4442 tree field = TREE_PURPOSE (elt);
4443 tree value = TREE_VALUE (elt);
4444 enum machine_mode mode;
4445 HOST_WIDE_INT bitsize;
4446 HOST_WIDE_INT bitpos = 0;
4449 rtx to_rtx = target;
4451 /* Just ignore missing fields.
4452 We cleared the whole structure, above,
4453 if any fields are missing. */
4457 if (cleared && is_zeros_p (value))
4460 if (host_integerp (DECL_SIZE (field), 1))
4461 bitsize = tree_low_cst (DECL_SIZE (field), 1);
4465 unsignedp = TREE_UNSIGNED (field);
4466 mode = DECL_MODE (field);
4467 if (DECL_BIT_FIELD (field))
4470 offset = DECL_FIELD_OFFSET (field);
4471 if (host_integerp (offset, 0)
4472 && host_integerp (bit_position (field), 0))
4474 bitpos = int_bit_position (field);
4478 bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
4484 if (contains_placeholder_p (offset))
4485 offset = build (WITH_RECORD_EXPR, sizetype,
4486 offset, make_tree (TREE_TYPE (exp), target));
4488 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
4489 if (GET_CODE (to_rtx) != MEM)
4492 if (GET_MODE (offset_rtx) != ptr_mode)
4493 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4495 #ifdef POINTERS_EXTEND_UNSIGNED
4496 if (GET_MODE (offset_rtx) != Pmode)
4497 offset_rtx = convert_memory_address (Pmode, offset_rtx);
4500 to_rtx = offset_address (to_rtx, offset_rtx,
4501 highest_pow2_factor (offset));
4504 if (TREE_READONLY (field))
4506 if (GET_CODE (to_rtx) == MEM)
4507 to_rtx = copy_rtx (to_rtx);
4509 RTX_UNCHANGING_P (to_rtx) = 1;
4512 #ifdef WORD_REGISTER_OPERATIONS
4513 /* If this initializes a field that is smaller than a word, at the
4514 start of a word, try to widen it to a full word.
4515 This special case allows us to output C++ member function
4516 initializations in a form that the optimizers can understand. */
4517 if (GET_CODE (target) == REG
4518 && bitsize < BITS_PER_WORD
4519 && bitpos % BITS_PER_WORD == 0
4520 && GET_MODE_CLASS (mode) == MODE_INT
4521 && TREE_CODE (value) == INTEGER_CST
4523 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
4525 tree type = TREE_TYPE (value);
4527 if (TYPE_PRECISION (type) < BITS_PER_WORD)
4529 type = (*lang_hooks.types.type_for_size)
4530 (BITS_PER_WORD, TREE_UNSIGNED (type));
4531 value = convert (type, value);
4534 if (BYTES_BIG_ENDIAN)
4536 = fold (build (LSHIFT_EXPR, type, value,
4537 build_int_2 (BITS_PER_WORD - bitsize, 0)));
4538 bitsize = BITS_PER_WORD;
4543 if (GET_CODE (to_rtx) == MEM && !MEM_KEEP_ALIAS_SET_P (to_rtx)
4544 && DECL_NONADDRESSABLE_P (field))
4546 to_rtx = copy_rtx (to_rtx);
4547 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
4550 store_constructor_field (to_rtx, bitsize, bitpos, mode,
4551 value, type, cleared,
4552 get_alias_set (TREE_TYPE (field)));
4555 else if (TREE_CODE (type) == ARRAY_TYPE
4556 || TREE_CODE (type) == VECTOR_TYPE)
4561 tree domain = TYPE_DOMAIN (type);
4562 tree elttype = TREE_TYPE (type);
4564 HOST_WIDE_INT minelt = 0;
4565 HOST_WIDE_INT maxelt = 0;
4567 /* Vectors are like arrays, but the domain is stored via an array
4569 if (TREE_CODE (type) == VECTOR_TYPE)
4571 /* Note that although TYPE_DEBUG_REPRESENTATION_TYPE uses
4572 the same field as TYPE_DOMAIN, we are not guaranteed that
4574 domain = TYPE_DEBUG_REPRESENTATION_TYPE (type);
4575 domain = TYPE_DOMAIN (TREE_TYPE (TYPE_FIELDS (domain)));
4578 const_bounds_p = (TYPE_MIN_VALUE (domain)
4579 && TYPE_MAX_VALUE (domain)
4580 && host_integerp (TYPE_MIN_VALUE (domain), 0)
4581 && host_integerp (TYPE_MAX_VALUE (domain), 0));
4583 /* If we have constant bounds for the range of the type, get them. */
4586 minelt = tree_low_cst (TYPE_MIN_VALUE (domain), 0);
4587 maxelt = tree_low_cst (TYPE_MAX_VALUE (domain), 0);
4590 /* If the constructor has fewer elements than the array,
4591 clear the whole array first. Similarly if this is
4592 static constructor of a non-BLKmode object. */
4593 if (cleared || (GET_CODE (target) == REG && TREE_STATIC (exp)))
4597 HOST_WIDE_INT count = 0, zero_count = 0;
4598 need_to_clear = ! const_bounds_p;
4600 /* This loop is a more accurate version of the loop in
4601 mostly_zeros_p (it handles RANGE_EXPR in an index).
4602 It is also needed to check for missing elements. */
4603 for (elt = CONSTRUCTOR_ELTS (exp);
4604 elt != NULL_TREE && ! need_to_clear;
4605 elt = TREE_CHAIN (elt))
4607 tree index = TREE_PURPOSE (elt);
4608 HOST_WIDE_INT this_node_count;
4610 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4612 tree lo_index = TREE_OPERAND (index, 0);
4613 tree hi_index = TREE_OPERAND (index, 1);
4615 if (! host_integerp (lo_index, 1)
4616 || ! host_integerp (hi_index, 1))
4622 this_node_count = (tree_low_cst (hi_index, 1)
4623 - tree_low_cst (lo_index, 1) + 1);
4626 this_node_count = 1;
4628 count += this_node_count;
4629 if (mostly_zeros_p (TREE_VALUE (elt)))
4630 zero_count += this_node_count;
4633 /* Clear the entire array first if there are any missing elements,
4634 or if the incidence of zero elements is >= 75%. */
4636 && (count < maxelt - minelt + 1 || 4 * zero_count >= 3 * count))
4640 if (need_to_clear && size > 0)
4645 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4647 clear_storage (target, GEN_INT (size));
4651 else if (REG_P (target))
4652 /* Inform later passes that the old value is dead. */
4653 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4655 /* Store each element of the constructor into
4656 the corresponding element of TARGET, determined
4657 by counting the elements. */
4658 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
4660 elt = TREE_CHAIN (elt), i++)
4662 enum machine_mode mode;
4663 HOST_WIDE_INT bitsize;
4664 HOST_WIDE_INT bitpos;
4666 tree value = TREE_VALUE (elt);
4667 tree index = TREE_PURPOSE (elt);
4668 rtx xtarget = target;
4670 if (cleared && is_zeros_p (value))
4673 unsignedp = TREE_UNSIGNED (elttype);
4674 mode = TYPE_MODE (elttype);
4675 if (mode == BLKmode)
4676 bitsize = (host_integerp (TYPE_SIZE (elttype), 1)
4677 ? tree_low_cst (TYPE_SIZE (elttype), 1)
4680 bitsize = GET_MODE_BITSIZE (mode);
4682 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4684 tree lo_index = TREE_OPERAND (index, 0);
4685 tree hi_index = TREE_OPERAND (index, 1);
4686 rtx index_r, pos_rtx, hi_r, loop_top, loop_end;
4687 struct nesting *loop;
4688 HOST_WIDE_INT lo, hi, count;
4691 /* If the range is constant and "small", unroll the loop. */
4693 && host_integerp (lo_index, 0)
4694 && host_integerp (hi_index, 0)
4695 && (lo = tree_low_cst (lo_index, 0),
4696 hi = tree_low_cst (hi_index, 0),
4697 count = hi - lo + 1,
4698 (GET_CODE (target) != MEM
4700 || (host_integerp (TYPE_SIZE (elttype), 1)
4701 && (tree_low_cst (TYPE_SIZE (elttype), 1) * count
4704 lo -= minelt; hi -= minelt;
4705 for (; lo <= hi; lo++)
4707 bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0);
4709 if (GET_CODE (target) == MEM
4710 && !MEM_KEEP_ALIAS_SET_P (target)
4711 && TREE_CODE (type) == ARRAY_TYPE
4712 && TYPE_NONALIASED_COMPONENT (type))
4714 target = copy_rtx (target);
4715 MEM_KEEP_ALIAS_SET_P (target) = 1;
4718 store_constructor_field
4719 (target, bitsize, bitpos, mode, value, type, cleared,
4720 get_alias_set (elttype));
4725 hi_r = expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
4726 loop_top = gen_label_rtx ();
4727 loop_end = gen_label_rtx ();
4729 unsignedp = TREE_UNSIGNED (domain);
4731 index = build_decl (VAR_DECL, NULL_TREE, domain);
4734 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
4736 SET_DECL_RTL (index, index_r);
4737 if (TREE_CODE (value) == SAVE_EXPR
4738 && SAVE_EXPR_RTL (value) == 0)
4740 /* Make sure value gets expanded once before the
4742 expand_expr (value, const0_rtx, VOIDmode, 0);
4745 store_expr (lo_index, index_r, 0);
4746 loop = expand_start_loop (0);
4748 /* Assign value to element index. */
4750 = convert (ssizetype,
4751 fold (build (MINUS_EXPR, TREE_TYPE (index),
4752 index, TYPE_MIN_VALUE (domain))));
4753 position = size_binop (MULT_EXPR, position,
4755 TYPE_SIZE_UNIT (elttype)));
4757 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
4758 xtarget = offset_address (target, pos_rtx,
4759 highest_pow2_factor (position));
4760 xtarget = adjust_address (xtarget, mode, 0);
4761 if (TREE_CODE (value) == CONSTRUCTOR)
4762 store_constructor (value, xtarget, cleared,
4763 bitsize / BITS_PER_UNIT);
4765 store_expr (value, xtarget, 0);
4767 expand_exit_loop_if_false (loop,
4768 build (LT_EXPR, integer_type_node,
4771 expand_increment (build (PREINCREMENT_EXPR,
4773 index, integer_one_node), 0, 0);
4775 emit_label (loop_end);
4778 else if ((index != 0 && ! host_integerp (index, 0))
4779 || ! host_integerp (TYPE_SIZE (elttype), 1))
4784 index = ssize_int (1);
4787 index = convert (ssizetype,
4788 fold (build (MINUS_EXPR, index,
4789 TYPE_MIN_VALUE (domain))));
4791 position = size_binop (MULT_EXPR, index,
4793 TYPE_SIZE_UNIT (elttype)));
4794 xtarget = offset_address (target,
4795 expand_expr (position, 0, VOIDmode, 0),
4796 highest_pow2_factor (position));
4797 xtarget = adjust_address (xtarget, mode, 0);
4798 store_expr (value, xtarget, 0);
4803 bitpos = ((tree_low_cst (index, 0) - minelt)
4804 * tree_low_cst (TYPE_SIZE (elttype), 1));
4806 bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
4808 if (GET_CODE (target) == MEM && !MEM_KEEP_ALIAS_SET_P (target)
4809 && TREE_CODE (type) == ARRAY_TYPE
4810 && TYPE_NONALIASED_COMPONENT (type))
4812 target = copy_rtx (target);
4813 MEM_KEEP_ALIAS_SET_P (target) = 1;
4816 store_constructor_field (target, bitsize, bitpos, mode, value,
4817 type, cleared, get_alias_set (elttype));
4823 /* Set constructor assignments. */
4824 else if (TREE_CODE (type) == SET_TYPE)
4826 tree elt = CONSTRUCTOR_ELTS (exp);
4827 unsigned HOST_WIDE_INT nbytes = int_size_in_bytes (type), nbits;
4828 tree domain = TYPE_DOMAIN (type);
4829 tree domain_min, domain_max, bitlength;
4831 /* The default implementation strategy is to extract the constant
4832 parts of the constructor, use that to initialize the target,
4833 and then "or" in whatever non-constant ranges we need in addition.
4835 If a large set is all zero or all ones, it is
4836 probably better to set it using memset (if available) or bzero.
4837 Also, if a large set has just a single range, it may also be
4838 better to first clear all the first clear the set (using
4839 bzero/memset), and set the bits we want. */
4841 /* Check for all zeros. */
4842 if (elt == NULL_TREE && size > 0)
4845 clear_storage (target, GEN_INT (size));
4849 domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
4850 domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
4851 bitlength = size_binop (PLUS_EXPR,
4852 size_diffop (domain_max, domain_min),
4855 nbits = tree_low_cst (bitlength, 1);
4857 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
4858 are "complicated" (more than one range), initialize (the
4859 constant parts) by copying from a constant. */
4860 if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD
4861 || (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE))
4863 unsigned int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
4864 enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
4865 char *bit_buffer = (char *) alloca (nbits);
4866 HOST_WIDE_INT word = 0;
4867 unsigned int bit_pos = 0;
4868 unsigned int ibit = 0;
4869 unsigned int offset = 0; /* In bytes from beginning of set. */
4871 elt = get_set_constructor_bits (exp, bit_buffer, nbits);
4874 if (bit_buffer[ibit])
4876 if (BYTES_BIG_ENDIAN)
4877 word |= (1 << (set_word_size - 1 - bit_pos));
4879 word |= 1 << bit_pos;
4883 if (bit_pos >= set_word_size || ibit == nbits)
4885 if (word != 0 || ! cleared)
4887 rtx datum = GEN_INT (word);
4890 /* The assumption here is that it is safe to use
4891 XEXP if the set is multi-word, but not if
4892 it's single-word. */
4893 if (GET_CODE (target) == MEM)
4894 to_rtx = adjust_address (target, mode, offset);
4895 else if (offset == 0)
4899 emit_move_insn (to_rtx, datum);
4906 offset += set_word_size / BITS_PER_UNIT;
4911 /* Don't bother clearing storage if the set is all ones. */
4912 if (TREE_CHAIN (elt) != NULL_TREE
4913 || (TREE_PURPOSE (elt) == NULL_TREE
4915 : ( ! host_integerp (TREE_VALUE (elt), 0)
4916 || ! host_integerp (TREE_PURPOSE (elt), 0)
4917 || (tree_low_cst (TREE_VALUE (elt), 0)
4918 - tree_low_cst (TREE_PURPOSE (elt), 0) + 1
4919 != (HOST_WIDE_INT) nbits))))
4920 clear_storage (target, expr_size (exp));
4922 for (; elt != NULL_TREE; elt = TREE_CHAIN (elt))
4924 /* Start of range of element or NULL. */
4925 tree startbit = TREE_PURPOSE (elt);
4926 /* End of range of element, or element value. */
4927 tree endbit = TREE_VALUE (elt);
4928 #ifdef TARGET_MEM_FUNCTIONS
4929 HOST_WIDE_INT startb, endb;
4931 rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
4933 bitlength_rtx = expand_expr (bitlength,
4934 NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
4936 /* Handle non-range tuple element like [ expr ]. */
4937 if (startbit == NULL_TREE)
4939 startbit = save_expr (endbit);
4943 startbit = convert (sizetype, startbit);
4944 endbit = convert (sizetype, endbit);
4945 if (! integer_zerop (domain_min))
4947 startbit = size_binop (MINUS_EXPR, startbit, domain_min);
4948 endbit = size_binop (MINUS_EXPR, endbit, domain_min);
4950 startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
4951 EXPAND_CONST_ADDRESS);
4952 endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
4953 EXPAND_CONST_ADDRESS);
4959 ((build_qualified_type ((*lang_hooks.types.type_for_mode)
4960 (GET_MODE (target), 0),
4963 emit_move_insn (targetx, target);
4966 else if (GET_CODE (target) == MEM)
4971 #ifdef TARGET_MEM_FUNCTIONS
4972 /* Optimization: If startbit and endbit are
4973 constants divisible by BITS_PER_UNIT,
4974 call memset instead. */
4975 if (TREE_CODE (startbit) == INTEGER_CST
4976 && TREE_CODE (endbit) == INTEGER_CST
4977 && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
4978 && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0)
4980 emit_library_call (memset_libfunc, LCT_NORMAL,
4982 plus_constant (XEXP (targetx, 0),
4983 startb / BITS_PER_UNIT),
4985 constm1_rtx, TYPE_MODE (integer_type_node),
4986 GEN_INT ((endb - startb) / BITS_PER_UNIT),
4987 TYPE_MODE (sizetype));
4991 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__setbits"),
4992 LCT_NORMAL, VOIDmode, 4, XEXP (targetx, 0),
4993 Pmode, bitlength_rtx, TYPE_MODE (sizetype),
4994 startbit_rtx, TYPE_MODE (sizetype),
4995 endbit_rtx, TYPE_MODE (sizetype));
4998 emit_move_insn (target, targetx);
5006 /* Store the value of EXP (an expression tree)
5007 into a subfield of TARGET which has mode MODE and occupies
5008 BITSIZE bits, starting BITPOS bits from the start of TARGET.
5009 If MODE is VOIDmode, it means that we are storing into a bit-field.
5011 If VALUE_MODE is VOIDmode, return nothing in particular.
5012 UNSIGNEDP is not used in this case.
5014 Otherwise, return an rtx for the value stored. This rtx
5015 has mode VALUE_MODE if that is convenient to do.
5016 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
5018 TYPE is the type of the underlying object,
5020 ALIAS_SET is the alias set for the destination. This value will
5021 (in general) be different from that for TARGET, since TARGET is a
5022 reference to the containing structure. */
5025 store_field (target, bitsize, bitpos, mode, exp, value_mode, unsignedp, type,
5028 HOST_WIDE_INT bitsize;
5029 HOST_WIDE_INT bitpos;
5030 enum machine_mode mode;
5032 enum machine_mode value_mode;
5037 HOST_WIDE_INT width_mask = 0;
5039 if (TREE_CODE (exp) == ERROR_MARK)
5042 /* If we have nothing to store, do nothing unless the expression has
5045 return expand_expr (exp, const0_rtx, VOIDmode, 0);
5046 else if (bitsize >=0 && bitsize < HOST_BITS_PER_WIDE_INT)
5047 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
5049 /* If we are storing into an unaligned field of an aligned union that is
5050 in a register, we may have the mode of TARGET being an integer mode but
5051 MODE == BLKmode. In that case, get an aligned object whose size and
5052 alignment are the same as TARGET and store TARGET into it (we can avoid
5053 the store if the field being stored is the entire width of TARGET). Then
5054 call ourselves recursively to store the field into a BLKmode version of
5055 that object. Finally, load from the object into TARGET. This is not
5056 very efficient in general, but should only be slightly more expensive
5057 than the otherwise-required unaligned accesses. Perhaps this can be
5058 cleaned up later. */
5061 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
5065 (build_qualified_type (type, TYPE_QUALS (type) | TYPE_QUAL_CONST),
5067 rtx blk_object = adjust_address (object, BLKmode, 0);
5069 if (bitsize != (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (target)))
5070 emit_move_insn (object, target);
5072 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0, type,
5075 emit_move_insn (target, object);
5077 /* We want to return the BLKmode version of the data. */
5081 if (GET_CODE (target) == CONCAT)
5083 /* We're storing into a struct containing a single __complex. */
5087 return store_expr (exp, target, 0);
5090 /* If the structure is in a register or if the component
5091 is a bit field, we cannot use addressing to access it.
5092 Use bit-field techniques or SUBREG to store in it. */
5094 if (mode == VOIDmode
5095 || (mode != BLKmode && ! direct_store[(int) mode]
5096 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
5097 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
5098 || GET_CODE (target) == REG
5099 || GET_CODE (target) == SUBREG
5100 /* If the field isn't aligned enough to store as an ordinary memref,
5101 store it as a bit field. */
5102 || (mode != BLKmode && SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target))
5103 && (MEM_ALIGN (target) < GET_MODE_ALIGNMENT (mode)
5104 || bitpos % GET_MODE_ALIGNMENT (mode)))
5105 /* If the RHS and field are a constant size and the size of the
5106 RHS isn't the same size as the bitfield, we must use bitfield
5109 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
5110 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0))
5112 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
5114 /* If BITSIZE is narrower than the size of the type of EXP
5115 we will be narrowing TEMP. Normally, what's wanted are the
5116 low-order bits. However, if EXP's type is a record and this is
5117 big-endian machine, we want the upper BITSIZE bits. */
5118 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
5119 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (temp))
5120 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
5121 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
5122 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
5126 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
5128 if (mode != VOIDmode && mode != BLKmode
5129 && mode != TYPE_MODE (TREE_TYPE (exp)))
5130 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
5132 /* If the modes of TARGET and TEMP are both BLKmode, both
5133 must be in memory and BITPOS must be aligned on a byte
5134 boundary. If so, we simply do a block copy. */
5135 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
5137 if (GET_CODE (target) != MEM || GET_CODE (temp) != MEM
5138 || bitpos % BITS_PER_UNIT != 0)
5141 target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT);
5142 emit_block_move (target, temp,
5143 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
5146 return value_mode == VOIDmode ? const0_rtx : target;
5149 /* Store the value in the bitfield. */
5150 store_bit_field (target, bitsize, bitpos, mode, temp,
5151 int_size_in_bytes (type));
5153 if (value_mode != VOIDmode)
5155 /* The caller wants an rtx for the value.
5156 If possible, avoid refetching from the bitfield itself. */
5158 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
5161 enum machine_mode tmode;
5163 tmode = GET_MODE (temp);
5164 if (tmode == VOIDmode)
5168 return expand_and (tmode, temp,
5169 gen_int_mode (width_mask, tmode),
5172 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
5173 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
5174 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
5177 return extract_bit_field (target, bitsize, bitpos, unsignedp,
5178 NULL_RTX, value_mode, VOIDmode,
5179 int_size_in_bytes (type));
5185 rtx addr = XEXP (target, 0);
5186 rtx to_rtx = target;
5188 /* If a value is wanted, it must be the lhs;
5189 so make the address stable for multiple use. */
5191 if (value_mode != VOIDmode && GET_CODE (addr) != REG
5192 && ! CONSTANT_ADDRESS_P (addr)
5193 /* A frame-pointer reference is already stable. */
5194 && ! (GET_CODE (addr) == PLUS
5195 && GET_CODE (XEXP (addr, 1)) == CONST_INT
5196 && (XEXP (addr, 0) == virtual_incoming_args_rtx
5197 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
5198 to_rtx = replace_equiv_address (to_rtx, copy_to_reg (addr));
5200 /* Now build a reference to just the desired component. */
5202 to_rtx = adjust_address (target, mode, bitpos / BITS_PER_UNIT);
5204 if (to_rtx == target)
5205 to_rtx = copy_rtx (to_rtx);
5207 MEM_SET_IN_STRUCT_P (to_rtx, 1);
5208 if (!MEM_KEEP_ALIAS_SET_P (to_rtx) && MEM_ALIAS_SET (to_rtx) != 0)
5209 set_mem_alias_set (to_rtx, alias_set);
5211 return store_expr (exp, to_rtx, value_mode != VOIDmode);
5215 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
5216 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
5217 codes and find the ultimate containing object, which we return.
5219 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5220 bit position, and *PUNSIGNEDP to the signedness of the field.
5221 If the position of the field is variable, we store a tree
5222 giving the variable offset (in units) in *POFFSET.
5223 This offset is in addition to the bit position.
5224 If the position is not variable, we store 0 in *POFFSET.
5226 If any of the extraction expressions is volatile,
5227 we store 1 in *PVOLATILEP. Otherwise we don't change that.
5229 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
5230 is a mode that can be used to access the field. In that case, *PBITSIZE
5233 If the field describes a variable-sized object, *PMODE is set to
5234 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
5235 this case, but the address of the object can be found. */
5238 get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode,
5239 punsignedp, pvolatilep)
5241 HOST_WIDE_INT *pbitsize;
5242 HOST_WIDE_INT *pbitpos;
5244 enum machine_mode *pmode;
5249 enum machine_mode mode = VOIDmode;
5250 tree offset = size_zero_node;
5251 tree bit_offset = bitsize_zero_node;
5252 tree placeholder_ptr = 0;
5255 /* First get the mode, signedness, and size. We do this from just the
5256 outermost expression. */
5257 if (TREE_CODE (exp) == COMPONENT_REF)
5259 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
5260 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
5261 mode = DECL_MODE (TREE_OPERAND (exp, 1));
5263 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
5265 else if (TREE_CODE (exp) == BIT_FIELD_REF)
5267 size_tree = TREE_OPERAND (exp, 1);
5268 *punsignedp = TREE_UNSIGNED (exp);
5272 mode = TYPE_MODE (TREE_TYPE (exp));
5273 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
5275 if (mode == BLKmode)
5276 size_tree = TYPE_SIZE (TREE_TYPE (exp));
5278 *pbitsize = GET_MODE_BITSIZE (mode);
5283 if (! host_integerp (size_tree, 1))
5284 mode = BLKmode, *pbitsize = -1;
5286 *pbitsize = tree_low_cst (size_tree, 1);
5289 /* Compute cumulative bit-offset for nested component-refs and array-refs,
5290 and find the ultimate containing object. */
5293 if (TREE_CODE (exp) == BIT_FIELD_REF)
5294 bit_offset = size_binop (PLUS_EXPR, bit_offset, TREE_OPERAND (exp, 2));
5295 else if (TREE_CODE (exp) == COMPONENT_REF)
5297 tree field = TREE_OPERAND (exp, 1);
5298 tree this_offset = DECL_FIELD_OFFSET (field);
5300 /* If this field hasn't been filled in yet, don't go
5301 past it. This should only happen when folding expressions
5302 made during type construction. */
5303 if (this_offset == 0)
5305 else if (! TREE_CONSTANT (this_offset)
5306 && contains_placeholder_p (this_offset))
5307 this_offset = build (WITH_RECORD_EXPR, sizetype, this_offset, exp);
5309 offset = size_binop (PLUS_EXPR, offset, this_offset);
5310 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5311 DECL_FIELD_BIT_OFFSET (field));
5313 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
5316 else if (TREE_CODE (exp) == ARRAY_REF
5317 || TREE_CODE (exp) == ARRAY_RANGE_REF)
5319 tree index = TREE_OPERAND (exp, 1);
5320 tree array = TREE_OPERAND (exp, 0);
5321 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
5322 tree low_bound = (domain ? TYPE_MIN_VALUE (domain) : 0);
5323 tree unit_size = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (array)));
5325 /* We assume all arrays have sizes that are a multiple of a byte.
5326 First subtract the lower bound, if any, in the type of the
5327 index, then convert to sizetype and multiply by the size of the
5329 if (low_bound != 0 && ! integer_zerop (low_bound))
5330 index = fold (build (MINUS_EXPR, TREE_TYPE (index),
5333 /* If the index has a self-referential type, pass it to a
5334 WITH_RECORD_EXPR; if the component size is, pass our
5335 component to one. */
5336 if (! TREE_CONSTANT (index)
5337 && contains_placeholder_p (index))
5338 index = build (WITH_RECORD_EXPR, TREE_TYPE (index), index, exp);
5339 if (! TREE_CONSTANT (unit_size)
5340 && contains_placeholder_p (unit_size))
5341 unit_size = build (WITH_RECORD_EXPR, sizetype, unit_size, array);
5343 offset = size_binop (PLUS_EXPR, offset,
5344 size_binop (MULT_EXPR,
5345 convert (sizetype, index),
5349 else if (TREE_CODE (exp) == PLACEHOLDER_EXPR)
5351 tree new = find_placeholder (exp, &placeholder_ptr);
5353 /* If we couldn't find the replacement, return the PLACEHOLDER_EXPR.
5354 We might have been called from tree optimization where we
5355 haven't set up an object yet. */
5363 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
5364 && TREE_CODE (exp) != VIEW_CONVERT_EXPR
5365 && ! ((TREE_CODE (exp) == NOP_EXPR
5366 || TREE_CODE (exp) == CONVERT_EXPR)
5367 && (TYPE_MODE (TREE_TYPE (exp))
5368 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
5371 /* If any reference in the chain is volatile, the effect is volatile. */
5372 if (TREE_THIS_VOLATILE (exp))
5375 exp = TREE_OPERAND (exp, 0);
5378 /* If OFFSET is constant, see if we can return the whole thing as a
5379 constant bit position. Otherwise, split it up. */
5380 if (host_integerp (offset, 0)
5381 && 0 != (tem = size_binop (MULT_EXPR, convert (bitsizetype, offset),
5383 && 0 != (tem = size_binop (PLUS_EXPR, tem, bit_offset))
5384 && host_integerp (tem, 0))
5385 *pbitpos = tree_low_cst (tem, 0), *poffset = 0;
5387 *pbitpos = tree_low_cst (bit_offset, 0), *poffset = offset;
5393 /* Return 1 if T is an expression that get_inner_reference handles. */
5396 handled_component_p (t)
5399 switch (TREE_CODE (t))
5404 case ARRAY_RANGE_REF:
5405 case NON_LVALUE_EXPR:
5406 case VIEW_CONVERT_EXPR:
5411 return (TYPE_MODE (TREE_TYPE (t))
5412 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (t, 0))));
5419 /* Given an rtx VALUE that may contain additions and multiplications, return
5420 an equivalent value that just refers to a register, memory, or constant.
5421 This is done by generating instructions to perform the arithmetic and
5422 returning a pseudo-register containing the value.
5424 The returned value may be a REG, SUBREG, MEM or constant. */
5427 force_operand (value, target)
5431 /* Use a temporary to force order of execution of calls to
5435 /* Use subtarget as the target for operand 0 of a binary operation. */
5436 rtx subtarget = get_subtarget (target);
5438 /* Check for a PIC address load. */
5439 if ((GET_CODE (value) == PLUS || GET_CODE (value) == MINUS)
5440 && XEXP (value, 0) == pic_offset_table_rtx
5441 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
5442 || GET_CODE (XEXP (value, 1)) == LABEL_REF
5443 || GET_CODE (XEXP (value, 1)) == CONST))
5446 subtarget = gen_reg_rtx (GET_MODE (value));
5447 emit_move_insn (subtarget, value);
5451 if (GET_CODE (value) == PLUS)
5452 binoptab = add_optab;
5453 else if (GET_CODE (value) == MINUS)
5454 binoptab = sub_optab;
5455 else if (GET_CODE (value) == MULT)
5457 op2 = XEXP (value, 1);
5458 if (!CONSTANT_P (op2)
5459 && !(GET_CODE (op2) == REG && op2 != subtarget))
5461 tmp = force_operand (XEXP (value, 0), subtarget);
5462 return expand_mult (GET_MODE (value), tmp,
5463 force_operand (op2, NULL_RTX),
5469 op2 = XEXP (value, 1);
5470 if (!CONSTANT_P (op2)
5471 && !(GET_CODE (op2) == REG && op2 != subtarget))
5473 if (binoptab == sub_optab && GET_CODE (op2) == CONST_INT)
5475 binoptab = add_optab;
5476 op2 = negate_rtx (GET_MODE (value), op2);
5479 /* Check for an addition with OP2 a constant integer and our first
5480 operand a PLUS of a virtual register and something else. In that
5481 case, we want to emit the sum of the virtual register and the
5482 constant first and then add the other value. This allows virtual
5483 register instantiation to simply modify the constant rather than
5484 creating another one around this addition. */
5485 if (binoptab == add_optab && GET_CODE (op2) == CONST_INT
5486 && GET_CODE (XEXP (value, 0)) == PLUS
5487 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
5488 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
5489 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
5491 rtx temp = expand_binop (GET_MODE (value), binoptab,
5492 XEXP (XEXP (value, 0), 0), op2,
5493 subtarget, 0, OPTAB_LIB_WIDEN);
5494 return expand_binop (GET_MODE (value), binoptab, temp,
5495 force_operand (XEXP (XEXP (value, 0), 1), 0),
5496 target, 0, OPTAB_LIB_WIDEN);
5499 tmp = force_operand (XEXP (value, 0), subtarget);
5500 return expand_binop (GET_MODE (value), binoptab, tmp,
5501 force_operand (op2, NULL_RTX),
5502 target, 0, OPTAB_LIB_WIDEN);
5503 /* We give UNSIGNEDP = 0 to expand_binop
5504 because the only operations we are expanding here are signed ones. */
5507 #ifdef INSN_SCHEDULING
5508 /* On machines that have insn scheduling, we want all memory reference to be
5509 explicit, so we need to deal with such paradoxical SUBREGs. */
5510 if (GET_CODE (value) == SUBREG && GET_CODE (SUBREG_REG (value)) == MEM
5511 && (GET_MODE_SIZE (GET_MODE (value))
5512 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (value)))))
5514 = simplify_gen_subreg (GET_MODE (value),
5515 force_reg (GET_MODE (SUBREG_REG (value)),
5516 force_operand (SUBREG_REG (value),
5518 GET_MODE (SUBREG_REG (value)),
5519 SUBREG_BYTE (value));
5525 /* Subroutine of expand_expr: return nonzero iff there is no way that
5526 EXP can reference X, which is being modified. TOP_P is nonzero if this
5527 call is going to be used to determine whether we need a temporary
5528 for EXP, as opposed to a recursive call to this function.
5530 It is always safe for this routine to return zero since it merely
5531 searches for optimization opportunities. */
5534 safe_from_p (x, exp, top_p)
5541 static tree save_expr_list;
5544 /* If EXP has varying size, we MUST use a target since we currently
5545 have no way of allocating temporaries of variable size
5546 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
5547 So we assume here that something at a higher level has prevented a
5548 clash. This is somewhat bogus, but the best we can do. Only
5549 do this when X is BLKmode and when we are at the top level. */
5550 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
5551 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
5552 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
5553 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
5554 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
5556 && GET_MODE (x) == BLKmode)
5557 /* If X is in the outgoing argument area, it is always safe. */
5558 || (GET_CODE (x) == MEM
5559 && (XEXP (x, 0) == virtual_outgoing_args_rtx
5560 || (GET_CODE (XEXP (x, 0)) == PLUS
5561 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
5564 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
5565 find the underlying pseudo. */
5566 if (GET_CODE (x) == SUBREG)
5569 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5573 /* A SAVE_EXPR might appear many times in the expression passed to the
5574 top-level safe_from_p call, and if it has a complex subexpression,
5575 examining it multiple times could result in a combinatorial explosion.
5576 E.g. on an Alpha running at least 200MHz, a Fortran test case compiled
5577 with optimization took about 28 minutes to compile -- even though it was
5578 only a few lines long. So we mark each SAVE_EXPR we see with TREE_PRIVATE
5579 and turn that off when we are done. We keep a list of the SAVE_EXPRs
5580 we have processed. Note that the only test of top_p was above. */
5589 rtn = safe_from_p (x, exp, 0);
5591 for (t = save_expr_list; t != 0; t = TREE_CHAIN (t))
5592 TREE_PRIVATE (TREE_PURPOSE (t)) = 0;
5597 /* Now look at our tree code and possibly recurse. */
5598 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
5601 exp_rtl = DECL_RTL_SET_P (exp) ? DECL_RTL (exp) : NULL_RTX;
5608 if (TREE_CODE (exp) == TREE_LIST)
5609 return ((TREE_VALUE (exp) == 0
5610 || safe_from_p (x, TREE_VALUE (exp), 0))
5611 && (TREE_CHAIN (exp) == 0
5612 || safe_from_p (x, TREE_CHAIN (exp), 0)));
5613 else if (TREE_CODE (exp) == ERROR_MARK)
5614 return 1; /* An already-visited SAVE_EXPR? */
5619 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5623 return (safe_from_p (x, TREE_OPERAND (exp, 0), 0)
5624 && safe_from_p (x, TREE_OPERAND (exp, 1), 0));
5628 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
5629 the expression. If it is set, we conflict iff we are that rtx or
5630 both are in memory. Otherwise, we check all operands of the
5631 expression recursively. */
5633 switch (TREE_CODE (exp))
5636 /* If the operand is static or we are static, we can't conflict.
5637 Likewise if we don't conflict with the operand at all. */
5638 if (staticp (TREE_OPERAND (exp, 0))
5639 || TREE_STATIC (exp)
5640 || safe_from_p (x, TREE_OPERAND (exp, 0), 0))
5643 /* Otherwise, the only way this can conflict is if we are taking
5644 the address of a DECL a that address if part of X, which is
5646 exp = TREE_OPERAND (exp, 0);
5649 if (!DECL_RTL_SET_P (exp)
5650 || GET_CODE (DECL_RTL (exp)) != MEM)
5653 exp_rtl = XEXP (DECL_RTL (exp), 0);
5658 if (GET_CODE (x) == MEM
5659 && alias_sets_conflict_p (MEM_ALIAS_SET (x),
5660 get_alias_set (exp)))
5665 /* Assume that the call will clobber all hard registers and
5667 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5668 || GET_CODE (x) == MEM)
5673 /* If a sequence exists, we would have to scan every instruction
5674 in the sequence to see if it was safe. This is probably not
5676 if (RTL_EXPR_SEQUENCE (exp))
5679 exp_rtl = RTL_EXPR_RTL (exp);
5682 case WITH_CLEANUP_EXPR:
5683 exp_rtl = WITH_CLEANUP_EXPR_RTL (exp);
5686 case CLEANUP_POINT_EXPR:
5687 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5690 exp_rtl = SAVE_EXPR_RTL (exp);
5694 /* If we've already scanned this, don't do it again. Otherwise,
5695 show we've scanned it and record for clearing the flag if we're
5697 if (TREE_PRIVATE (exp))
5700 TREE_PRIVATE (exp) = 1;
5701 if (! safe_from_p (x, TREE_OPERAND (exp, 0), 0))
5703 TREE_PRIVATE (exp) = 0;
5707 save_expr_list = tree_cons (exp, NULL_TREE, save_expr_list);
5711 /* The only operand we look at is operand 1. The rest aren't
5712 part of the expression. */
5713 return safe_from_p (x, TREE_OPERAND (exp, 1), 0);
5715 case METHOD_CALL_EXPR:
5716 /* This takes an rtx argument, but shouldn't appear here. */
5723 /* If we have an rtx, we do not need to scan our operands. */
5727 nops = first_rtl_op (TREE_CODE (exp));
5728 for (i = 0; i < nops; i++)
5729 if (TREE_OPERAND (exp, i) != 0
5730 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
5733 /* If this is a language-specific tree code, it may require
5734 special handling. */
5735 if ((unsigned int) TREE_CODE (exp)
5736 >= (unsigned int) LAST_AND_UNUSED_TREE_CODE
5737 && !(*lang_hooks.safe_from_p) (x, exp))
5741 /* If we have an rtl, find any enclosed object. Then see if we conflict
5745 if (GET_CODE (exp_rtl) == SUBREG)
5747 exp_rtl = SUBREG_REG (exp_rtl);
5748 if (GET_CODE (exp_rtl) == REG
5749 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
5753 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
5754 are memory and they conflict. */
5755 return ! (rtx_equal_p (x, exp_rtl)
5756 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
5757 && true_dependence (exp_rtl, VOIDmode, x,
5758 rtx_addr_varies_p)));
5761 /* If we reach here, it is safe. */
5765 /* Subroutine of expand_expr: return rtx if EXP is a
5766 variable or parameter; else return 0. */
5773 switch (TREE_CODE (exp))
5777 return DECL_RTL (exp);
5783 #ifdef MAX_INTEGER_COMPUTATION_MODE
5786 check_max_integer_computation_mode (exp)
5789 enum tree_code code;
5790 enum machine_mode mode;
5792 /* Strip any NOPs that don't change the mode. */
5794 code = TREE_CODE (exp);
5796 /* We must allow conversions of constants to MAX_INTEGER_COMPUTATION_MODE. */
5797 if (code == NOP_EXPR
5798 && TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
5801 /* First check the type of the overall operation. We need only look at
5802 unary, binary and relational operations. */
5803 if (TREE_CODE_CLASS (code) == '1'
5804 || TREE_CODE_CLASS (code) == '2'
5805 || TREE_CODE_CLASS (code) == '<')
5807 mode = TYPE_MODE (TREE_TYPE (exp));
5808 if (GET_MODE_CLASS (mode) == MODE_INT
5809 && mode > MAX_INTEGER_COMPUTATION_MODE)
5810 internal_error ("unsupported wide integer operation");
5813 /* Check operand of a unary op. */
5814 if (TREE_CODE_CLASS (code) == '1')
5816 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5817 if (GET_MODE_CLASS (mode) == MODE_INT
5818 && mode > MAX_INTEGER_COMPUTATION_MODE)
5819 internal_error ("unsupported wide integer operation");
5822 /* Check operands of a binary/comparison op. */
5823 if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<')
5825 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5826 if (GET_MODE_CLASS (mode) == MODE_INT
5827 && mode > MAX_INTEGER_COMPUTATION_MODE)
5828 internal_error ("unsupported wide integer operation");
5830 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1)));
5831 if (GET_MODE_CLASS (mode) == MODE_INT
5832 && mode > MAX_INTEGER_COMPUTATION_MODE)
5833 internal_error ("unsupported wide integer operation");
5838 /* Return the highest power of two that EXP is known to be a multiple of.
5839 This is used in updating alignment of MEMs in array references. */
5841 static HOST_WIDE_INT
5842 highest_pow2_factor (exp)
5845 HOST_WIDE_INT c0, c1;
5847 switch (TREE_CODE (exp))
5850 /* We can find the lowest bit that's a one. If the low
5851 HOST_BITS_PER_WIDE_INT bits are zero, return BIGGEST_ALIGNMENT.
5852 We need to handle this case since we can find it in a COND_EXPR,
5853 a MIN_EXPR, or a MAX_EXPR. If the constant overlows, we have an
5854 erroneous program, so return BIGGEST_ALIGNMENT to avoid any
5856 if (TREE_CONSTANT_OVERFLOW (exp))
5857 return BIGGEST_ALIGNMENT;
5860 /* Note: tree_low_cst is intentionally not used here,
5861 we don't care about the upper bits. */
5862 c0 = TREE_INT_CST_LOW (exp);
5864 return c0 ? c0 : BIGGEST_ALIGNMENT;
5868 case PLUS_EXPR: case MINUS_EXPR: case MIN_EXPR: case MAX_EXPR:
5869 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
5870 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
5871 return MIN (c0, c1);
5874 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
5875 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
5878 case ROUND_DIV_EXPR: case TRUNC_DIV_EXPR: case FLOOR_DIV_EXPR:
5880 if (integer_pow2p (TREE_OPERAND (exp, 1))
5881 && host_integerp (TREE_OPERAND (exp, 1), 1))
5883 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
5884 c1 = tree_low_cst (TREE_OPERAND (exp, 1), 1);
5885 return MAX (1, c0 / c1);
5889 case NON_LVALUE_EXPR: case NOP_EXPR: case CONVERT_EXPR:
5890 case SAVE_EXPR: case WITH_RECORD_EXPR:
5891 return highest_pow2_factor (TREE_OPERAND (exp, 0));
5894 return highest_pow2_factor (TREE_OPERAND (exp, 1));
5897 c0 = highest_pow2_factor (TREE_OPERAND (exp, 1));
5898 c1 = highest_pow2_factor (TREE_OPERAND (exp, 2));
5899 return MIN (c0, c1);
5908 /* Return an object on the placeholder list that matches EXP, a
5909 PLACEHOLDER_EXPR. An object "matches" if it is of the type of the
5910 PLACEHOLDER_EXPR or a pointer type to it. For further information, see
5911 tree.def. If no such object is found, return 0. If PLIST is nonzero, it
5912 is a location which initially points to a starting location in the
5913 placeholder list (zero means start of the list) and where a pointer into
5914 the placeholder list at which the object is found is placed. */
5917 find_placeholder (exp, plist)
5921 tree type = TREE_TYPE (exp);
5922 tree placeholder_expr;
5924 for (placeholder_expr
5925 = plist && *plist ? TREE_CHAIN (*plist) : placeholder_list;
5926 placeholder_expr != 0;
5927 placeholder_expr = TREE_CHAIN (placeholder_expr))
5929 tree need_type = TYPE_MAIN_VARIANT (type);
5932 /* Find the outermost reference that is of the type we want. If none,
5933 see if any object has a type that is a pointer to the type we
5935 for (elt = TREE_PURPOSE (placeholder_expr); elt != 0;
5936 elt = ((TREE_CODE (elt) == COMPOUND_EXPR
5937 || TREE_CODE (elt) == COND_EXPR)
5938 ? TREE_OPERAND (elt, 1)
5939 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
5940 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
5941 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
5942 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
5943 ? TREE_OPERAND (elt, 0) : 0))
5944 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
5947 *plist = placeholder_expr;
5951 for (elt = TREE_PURPOSE (placeholder_expr); elt != 0;
5953 = ((TREE_CODE (elt) == COMPOUND_EXPR
5954 || TREE_CODE (elt) == COND_EXPR)
5955 ? TREE_OPERAND (elt, 1)
5956 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
5957 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
5958 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
5959 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
5960 ? TREE_OPERAND (elt, 0) : 0))
5961 if (POINTER_TYPE_P (TREE_TYPE (elt))
5962 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
5966 *plist = placeholder_expr;
5967 return build1 (INDIRECT_REF, need_type, elt);
5974 /* expand_expr: generate code for computing expression EXP.
5975 An rtx for the computed value is returned. The value is never null.
5976 In the case of a void EXP, const0_rtx is returned.
5978 The value may be stored in TARGET if TARGET is nonzero.
5979 TARGET is just a suggestion; callers must assume that
5980 the rtx returned may not be the same as TARGET.
5982 If TARGET is CONST0_RTX, it means that the value will be ignored.
5984 If TMODE is not VOIDmode, it suggests generating the
5985 result in mode TMODE. But this is done only when convenient.
5986 Otherwise, TMODE is ignored and the value generated in its natural mode.
5987 TMODE is just a suggestion; callers must assume that
5988 the rtx returned may not have mode TMODE.
5990 Note that TARGET may have neither TMODE nor MODE. In that case, it
5991 probably will not be used.
5993 If MODIFIER is EXPAND_SUM then when EXP is an addition
5994 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
5995 or a nest of (PLUS ...) and (MINUS ...) where the terms are
5996 products as above, or REG or MEM, or constant.
5997 Ordinarily in such cases we would output mul or add instructions
5998 and then return a pseudo reg containing the sum.
6000 EXPAND_INITIALIZER is much like EXPAND_SUM except that
6001 it also marks a label as absolutely required (it can't be dead).
6002 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
6003 This is used for outputting expressions used in initializers.
6005 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
6006 with a constant address even if that address is not normally legitimate.
6007 EXPAND_INITIALIZER and EXPAND_SUM also have this effect. */
6010 expand_expr (exp, target, tmode, modifier)
6013 enum machine_mode tmode;
6014 enum expand_modifier modifier;
6017 tree type = TREE_TYPE (exp);
6018 int unsignedp = TREE_UNSIGNED (type);
6019 enum machine_mode mode;
6020 enum tree_code code = TREE_CODE (exp);
6022 rtx subtarget, original_target;
6026 /* Handle ERROR_MARK before anybody tries to access its type. */
6027 if (TREE_CODE (exp) == ERROR_MARK || TREE_CODE (type) == ERROR_MARK)
6029 op0 = CONST0_RTX (tmode);
6035 mode = TYPE_MODE (type);
6036 /* Use subtarget as the target for operand 0 of a binary operation. */
6037 subtarget = get_subtarget (target);
6038 original_target = target;
6039 ignore = (target == const0_rtx
6040 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
6041 || code == CONVERT_EXPR || code == REFERENCE_EXPR
6042 || code == COND_EXPR || code == VIEW_CONVERT_EXPR)
6043 && TREE_CODE (type) == VOID_TYPE));
6045 /* If we are going to ignore this result, we need only do something
6046 if there is a side-effect somewhere in the expression. If there
6047 is, short-circuit the most common cases here. Note that we must
6048 not call expand_expr with anything but const0_rtx in case this
6049 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
6053 if (! TREE_SIDE_EFFECTS (exp))
6056 /* Ensure we reference a volatile object even if value is ignored, but
6057 don't do this if all we are doing is taking its address. */
6058 if (TREE_THIS_VOLATILE (exp)
6059 && TREE_CODE (exp) != FUNCTION_DECL
6060 && mode != VOIDmode && mode != BLKmode
6061 && modifier != EXPAND_CONST_ADDRESS)
6063 temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
6064 if (GET_CODE (temp) == MEM)
6065 temp = copy_to_reg (temp);
6069 if (TREE_CODE_CLASS (code) == '1' || code == COMPONENT_REF
6070 || code == INDIRECT_REF || code == BUFFER_REF)
6071 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6074 else if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<'
6075 || code == ARRAY_REF || code == ARRAY_RANGE_REF)
6077 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6078 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6081 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
6082 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
6083 /* If the second operand has no side effects, just evaluate
6085 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6087 else if (code == BIT_FIELD_REF)
6089 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6090 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6091 expand_expr (TREE_OPERAND (exp, 2), const0_rtx, VOIDmode, modifier);
6098 #ifdef MAX_INTEGER_COMPUTATION_MODE
6099 /* Only check stuff here if the mode we want is different from the mode
6100 of the expression; if it's the same, check_max_integer_computiation_mode
6101 will handle it. Do we really need to check this stuff at all? */
6104 && GET_MODE (target) != mode
6105 && TREE_CODE (exp) != INTEGER_CST
6106 && TREE_CODE (exp) != PARM_DECL
6107 && TREE_CODE (exp) != ARRAY_REF
6108 && TREE_CODE (exp) != ARRAY_RANGE_REF
6109 && TREE_CODE (exp) != COMPONENT_REF
6110 && TREE_CODE (exp) != BIT_FIELD_REF
6111 && TREE_CODE (exp) != INDIRECT_REF
6112 && TREE_CODE (exp) != CALL_EXPR
6113 && TREE_CODE (exp) != VAR_DECL
6114 && TREE_CODE (exp) != RTL_EXPR)
6116 enum machine_mode mode = GET_MODE (target);
6118 if (GET_MODE_CLASS (mode) == MODE_INT
6119 && mode > MAX_INTEGER_COMPUTATION_MODE)
6120 internal_error ("unsupported wide integer operation");
6124 && TREE_CODE (exp) != INTEGER_CST
6125 && TREE_CODE (exp) != PARM_DECL
6126 && TREE_CODE (exp) != ARRAY_REF
6127 && TREE_CODE (exp) != ARRAY_RANGE_REF
6128 && TREE_CODE (exp) != COMPONENT_REF
6129 && TREE_CODE (exp) != BIT_FIELD_REF
6130 && TREE_CODE (exp) != INDIRECT_REF
6131 && TREE_CODE (exp) != VAR_DECL
6132 && TREE_CODE (exp) != CALL_EXPR
6133 && TREE_CODE (exp) != RTL_EXPR
6134 && GET_MODE_CLASS (tmode) == MODE_INT
6135 && tmode > MAX_INTEGER_COMPUTATION_MODE)
6136 internal_error ("unsupported wide integer operation");
6138 check_max_integer_computation_mode (exp);
6141 /* If will do cse, generate all results into pseudo registers
6142 since 1) that allows cse to find more things
6143 and 2) otherwise cse could produce an insn the machine
6144 cannot support. And exception is a CONSTRUCTOR into a multi-word
6145 MEM: that's much more likely to be most efficient into the MEM. */
6147 if (! cse_not_expected && mode != BLKmode && target
6148 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER)
6149 && ! (code == CONSTRUCTOR && GET_MODE_SIZE (mode) > UNITS_PER_WORD))
6156 tree function = decl_function_context (exp);
6157 /* Handle using a label in a containing function. */
6158 if (function != current_function_decl
6159 && function != inline_function_decl && function != 0)
6161 struct function *p = find_function_data (function);
6162 p->expr->x_forced_labels
6163 = gen_rtx_EXPR_LIST (VOIDmode, label_rtx (exp),
6164 p->expr->x_forced_labels);
6168 if (modifier == EXPAND_INITIALIZER)
6169 forced_labels = gen_rtx_EXPR_LIST (VOIDmode,
6174 temp = gen_rtx_MEM (FUNCTION_MODE,
6175 gen_rtx_LABEL_REF (Pmode, label_rtx (exp)));
6176 if (function != current_function_decl
6177 && function != inline_function_decl && function != 0)
6178 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
6183 if (DECL_RTL (exp) == 0)
6185 error_with_decl (exp, "prior parameter's size depends on `%s'");
6186 return CONST0_RTX (mode);
6189 /* ... fall through ... */
6192 /* If a static var's type was incomplete when the decl was written,
6193 but the type is complete now, lay out the decl now. */
6194 if (DECL_SIZE (exp) == 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
6195 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
6197 rtx value = DECL_RTL_IF_SET (exp);
6199 layout_decl (exp, 0);
6201 /* If the RTL was already set, update its mode and memory
6205 PUT_MODE (value, DECL_MODE (exp));
6206 SET_DECL_RTL (exp, 0);
6207 set_mem_attributes (value, exp, 1);
6208 SET_DECL_RTL (exp, value);
6212 /* ... fall through ... */
6216 if (DECL_RTL (exp) == 0)
6219 /* Ensure variable marked as used even if it doesn't go through
6220 a parser. If it hasn't be used yet, write out an external
6222 if (! TREE_USED (exp))
6224 assemble_external (exp);
6225 TREE_USED (exp) = 1;
6228 /* Show we haven't gotten RTL for this yet. */
6231 /* Handle variables inherited from containing functions. */
6232 context = decl_function_context (exp);
6234 /* We treat inline_function_decl as an alias for the current function
6235 because that is the inline function whose vars, types, etc.
6236 are being merged into the current function.
6237 See expand_inline_function. */
6239 if (context != 0 && context != current_function_decl
6240 && context != inline_function_decl
6241 /* If var is static, we don't need a static chain to access it. */
6242 && ! (GET_CODE (DECL_RTL (exp)) == MEM
6243 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
6247 /* Mark as non-local and addressable. */
6248 DECL_NONLOCAL (exp) = 1;
6249 if (DECL_NO_STATIC_CHAIN (current_function_decl))
6251 (*lang_hooks.mark_addressable) (exp);
6252 if (GET_CODE (DECL_RTL (exp)) != MEM)
6254 addr = XEXP (DECL_RTL (exp), 0);
6255 if (GET_CODE (addr) == MEM)
6257 = replace_equiv_address (addr,
6258 fix_lexical_addr (XEXP (addr, 0), exp));
6260 addr = fix_lexical_addr (addr, exp);
6262 temp = replace_equiv_address (DECL_RTL (exp), addr);
6265 /* This is the case of an array whose size is to be determined
6266 from its initializer, while the initializer is still being parsed.
6269 else if (GET_CODE (DECL_RTL (exp)) == MEM
6270 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
6271 temp = validize_mem (DECL_RTL (exp));
6273 /* If DECL_RTL is memory, we are in the normal case and either
6274 the address is not valid or it is not a register and -fforce-addr
6275 is specified, get the address into a register. */
6277 else if (GET_CODE (DECL_RTL (exp)) == MEM
6278 && modifier != EXPAND_CONST_ADDRESS
6279 && modifier != EXPAND_SUM
6280 && modifier != EXPAND_INITIALIZER
6281 && (! memory_address_p (DECL_MODE (exp),
6282 XEXP (DECL_RTL (exp), 0))
6284 && GET_CODE (XEXP (DECL_RTL (exp), 0)) != REG)))
6285 temp = replace_equiv_address (DECL_RTL (exp),
6286 copy_rtx (XEXP (DECL_RTL (exp), 0)));
6288 /* If we got something, return it. But first, set the alignment
6289 if the address is a register. */
6292 if (GET_CODE (temp) == MEM && GET_CODE (XEXP (temp, 0)) == REG)
6293 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
6298 /* If the mode of DECL_RTL does not match that of the decl, it
6299 must be a promoted value. We return a SUBREG of the wanted mode,
6300 but mark it so that we know that it was already extended. */
6302 if (GET_CODE (DECL_RTL (exp)) == REG
6303 && GET_MODE (DECL_RTL (exp)) != DECL_MODE (exp))
6305 /* Get the signedness used for this variable. Ensure we get the
6306 same mode we got when the variable was declared. */
6307 if (GET_MODE (DECL_RTL (exp))
6308 != promote_mode (type, DECL_MODE (exp), &unsignedp,
6309 (TREE_CODE (exp) == RESULT_DECL ? 1 : 0)))
6312 temp = gen_lowpart_SUBREG (mode, DECL_RTL (exp));
6313 SUBREG_PROMOTED_VAR_P (temp) = 1;
6314 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
6318 return DECL_RTL (exp);
6321 temp = immed_double_const (TREE_INT_CST_LOW (exp),
6322 TREE_INT_CST_HIGH (exp), mode);
6324 /* ??? If overflow is set, fold will have done an incomplete job,
6325 which can result in (plus xx (const_int 0)), which can get
6326 simplified by validate_replace_rtx during virtual register
6327 instantiation, which can result in unrecognizable insns.
6328 Avoid this by forcing all overflows into registers. */
6329 if (TREE_CONSTANT_OVERFLOW (exp))
6330 temp = force_reg (mode, temp);
6335 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, 0);
6338 /* If optimized, generate immediate CONST_DOUBLE
6339 which will be turned into memory by reload if necessary.
6341 We used to force a register so that loop.c could see it. But
6342 this does not allow gen_* patterns to perform optimizations with
6343 the constants. It also produces two insns in cases like "x = 1.0;".
6344 On most machines, floating-point constants are not permitted in
6345 many insns, so we'd end up copying it to a register in any case.
6347 Now, we do the copying in expand_binop, if appropriate. */
6348 return immed_real_const (exp);
6352 if (! TREE_CST_RTL (exp))
6353 output_constant_def (exp, 1);
6355 /* TREE_CST_RTL probably contains a constant address.
6356 On RISC machines where a constant address isn't valid,
6357 make some insns to get that address into a register. */
6358 if (GET_CODE (TREE_CST_RTL (exp)) == MEM
6359 && modifier != EXPAND_CONST_ADDRESS
6360 && modifier != EXPAND_INITIALIZER
6361 && modifier != EXPAND_SUM
6362 && (! memory_address_p (mode, XEXP (TREE_CST_RTL (exp), 0))
6364 && GET_CODE (XEXP (TREE_CST_RTL (exp), 0)) != REG)))
6365 return replace_equiv_address (TREE_CST_RTL (exp),
6366 copy_rtx (XEXP (TREE_CST_RTL (exp), 0)));
6367 return TREE_CST_RTL (exp);
6369 case EXPR_WITH_FILE_LOCATION:
6372 const char *saved_input_filename = input_filename;
6373 int saved_lineno = lineno;
6374 input_filename = EXPR_WFL_FILENAME (exp);
6375 lineno = EXPR_WFL_LINENO (exp);
6376 if (EXPR_WFL_EMIT_LINE_NOTE (exp))
6377 emit_line_note (input_filename, lineno);
6378 /* Possibly avoid switching back and forth here. */
6379 to_return = expand_expr (EXPR_WFL_NODE (exp), target, tmode, modifier);
6380 input_filename = saved_input_filename;
6381 lineno = saved_lineno;
6386 context = decl_function_context (exp);
6388 /* If this SAVE_EXPR was at global context, assume we are an
6389 initialization function and move it into our context. */
6391 SAVE_EXPR_CONTEXT (exp) = current_function_decl;
6393 /* We treat inline_function_decl as an alias for the current function
6394 because that is the inline function whose vars, types, etc.
6395 are being merged into the current function.
6396 See expand_inline_function. */
6397 if (context == current_function_decl || context == inline_function_decl)
6400 /* If this is non-local, handle it. */
6403 /* The following call just exists to abort if the context is
6404 not of a containing function. */
6405 find_function_data (context);
6407 temp = SAVE_EXPR_RTL (exp);
6408 if (temp && GET_CODE (temp) == REG)
6410 put_var_into_stack (exp);
6411 temp = SAVE_EXPR_RTL (exp);
6413 if (temp == 0 || GET_CODE (temp) != MEM)
6416 replace_equiv_address (temp,
6417 fix_lexical_addr (XEXP (temp, 0), exp));
6419 if (SAVE_EXPR_RTL (exp) == 0)
6421 if (mode == VOIDmode)
6424 temp = assign_temp (build_qualified_type (type,
6426 | TYPE_QUAL_CONST)),
6429 SAVE_EXPR_RTL (exp) = temp;
6430 if (!optimize && GET_CODE (temp) == REG)
6431 save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, temp,
6434 /* If the mode of TEMP does not match that of the expression, it
6435 must be a promoted value. We pass store_expr a SUBREG of the
6436 wanted mode but mark it so that we know that it was already
6437 extended. Note that `unsignedp' was modified above in
6440 if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
6442 temp = gen_lowpart_SUBREG (mode, SAVE_EXPR_RTL (exp));
6443 SUBREG_PROMOTED_VAR_P (temp) = 1;
6444 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
6447 if (temp == const0_rtx)
6448 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
6450 store_expr (TREE_OPERAND (exp, 0), temp, 0);
6452 TREE_USED (exp) = 1;
6455 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
6456 must be a promoted value. We return a SUBREG of the wanted mode,
6457 but mark it so that we know that it was already extended. */
6459 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
6460 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
6462 /* Compute the signedness and make the proper SUBREG. */
6463 promote_mode (type, mode, &unsignedp, 0);
6464 temp = gen_lowpart_SUBREG (mode, SAVE_EXPR_RTL (exp));
6465 SUBREG_PROMOTED_VAR_P (temp) = 1;
6466 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
6470 return SAVE_EXPR_RTL (exp);
6475 temp = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
6476 TREE_OPERAND (exp, 0)
6477 = (*lang_hooks.unsave_expr_now) (TREE_OPERAND (exp, 0));
6481 case PLACEHOLDER_EXPR:
6483 tree old_list = placeholder_list;
6484 tree placeholder_expr = 0;
6486 exp = find_placeholder (exp, &placeholder_expr);
6490 placeholder_list = TREE_CHAIN (placeholder_expr);
6491 temp = expand_expr (exp, original_target, tmode, modifier);
6492 placeholder_list = old_list;
6496 /* We can't find the object or there was a missing WITH_RECORD_EXPR. */
6499 case WITH_RECORD_EXPR:
6500 /* Put the object on the placeholder list, expand our first operand,
6501 and pop the list. */
6502 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
6504 target = expand_expr (TREE_OPERAND (exp, 0), original_target, tmode,
6506 placeholder_list = TREE_CHAIN (placeholder_list);
6510 if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
6511 expand_goto (TREE_OPERAND (exp, 0));
6513 expand_computed_goto (TREE_OPERAND (exp, 0));
6517 expand_exit_loop_if_false (NULL,
6518 invert_truthvalue (TREE_OPERAND (exp, 0)));
6521 case LABELED_BLOCK_EXPR:
6522 if (LABELED_BLOCK_BODY (exp))
6523 expand_expr_stmt_value (LABELED_BLOCK_BODY (exp), 0, 1);
6524 /* Should perhaps use expand_label, but this is simpler and safer. */
6525 do_pending_stack_adjust ();
6526 emit_label (label_rtx (LABELED_BLOCK_LABEL (exp)));
6529 case EXIT_BLOCK_EXPR:
6530 if (EXIT_BLOCK_RETURN (exp))
6531 sorry ("returned value in block_exit_expr");
6532 expand_goto (LABELED_BLOCK_LABEL (EXIT_BLOCK_LABELED_BLOCK (exp)));
6537 expand_start_loop (1);
6538 expand_expr_stmt_value (TREE_OPERAND (exp, 0), 0, 1);
6546 tree vars = TREE_OPERAND (exp, 0);
6547 int vars_need_expansion = 0;
6549 /* Need to open a binding contour here because
6550 if there are any cleanups they must be contained here. */
6551 expand_start_bindings (2);
6553 /* Mark the corresponding BLOCK for output in its proper place. */
6554 if (TREE_OPERAND (exp, 2) != 0
6555 && ! TREE_USED (TREE_OPERAND (exp, 2)))
6556 (*lang_hooks.decls.insert_block) (TREE_OPERAND (exp, 2));
6558 /* If VARS have not yet been expanded, expand them now. */
6561 if (!DECL_RTL_SET_P (vars))
6563 vars_need_expansion = 1;
6566 expand_decl_init (vars);
6567 vars = TREE_CHAIN (vars);
6570 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, modifier);
6572 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
6578 if (RTL_EXPR_SEQUENCE (exp))
6580 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
6582 emit_insns (RTL_EXPR_SEQUENCE (exp));
6583 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
6585 preserve_rtl_expr_result (RTL_EXPR_RTL (exp));
6586 free_temps_for_rtl_expr (exp);
6587 return RTL_EXPR_RTL (exp);
6590 /* If we don't need the result, just ensure we evaluate any
6596 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
6597 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode, 0);
6602 /* All elts simple constants => refer to a constant in memory. But
6603 if this is a non-BLKmode mode, let it store a field at a time
6604 since that should make a CONST_INT or CONST_DOUBLE when we
6605 fold. Likewise, if we have a target we can use, it is best to
6606 store directly into the target unless the type is large enough
6607 that memcpy will be used. If we are making an initializer and
6608 all operands are constant, put it in memory as well. */
6609 else if ((TREE_STATIC (exp)
6610 && ((mode == BLKmode
6611 && ! (target != 0 && safe_from_p (target, exp, 1)))
6612 || TREE_ADDRESSABLE (exp)
6613 || (host_integerp (TYPE_SIZE_UNIT (type), 1)
6614 && (! MOVE_BY_PIECES_P
6615 (tree_low_cst (TYPE_SIZE_UNIT (type), 1),
6617 && ! mostly_zeros_p (exp))))
6618 || (modifier == EXPAND_INITIALIZER && TREE_CONSTANT (exp)))
6620 rtx constructor = output_constant_def (exp, 1);
6622 if (modifier != EXPAND_CONST_ADDRESS
6623 && modifier != EXPAND_INITIALIZER
6624 && modifier != EXPAND_SUM)
6625 constructor = validize_mem (constructor);
6631 /* Handle calls that pass values in multiple non-contiguous
6632 locations. The Irix 6 ABI has examples of this. */
6633 if (target == 0 || ! safe_from_p (target, exp, 1)
6634 || GET_CODE (target) == PARALLEL)
6636 = assign_temp (build_qualified_type (type,
6638 | (TREE_READONLY (exp)
6639 * TYPE_QUAL_CONST))),
6640 0, TREE_ADDRESSABLE (exp), 1);
6642 store_constructor (exp, target, 0,
6643 int_size_in_bytes (TREE_TYPE (exp)));
6649 tree exp1 = TREE_OPERAND (exp, 0);
6651 tree string = string_constant (exp1, &index);
6653 /* Try to optimize reads from const strings. */
6655 && TREE_CODE (string) == STRING_CST
6656 && TREE_CODE (index) == INTEGER_CST
6657 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
6658 && GET_MODE_CLASS (mode) == MODE_INT
6659 && GET_MODE_SIZE (mode) == 1
6660 && modifier != EXPAND_WRITE)
6661 return gen_int_mode (TREE_STRING_POINTER (string)
6662 [TREE_INT_CST_LOW (index)], mode);
6664 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
6665 op0 = memory_address (mode, op0);
6666 temp = gen_rtx_MEM (mode, op0);
6667 set_mem_attributes (temp, exp, 0);
6669 /* If we are writing to this object and its type is a record with
6670 readonly fields, we must mark it as readonly so it will
6671 conflict with readonly references to those fields. */
6672 if (modifier == EXPAND_WRITE && readonly_fields_p (type))
6673 RTX_UNCHANGING_P (temp) = 1;
6679 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
6683 tree array = TREE_OPERAND (exp, 0);
6684 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
6685 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
6686 tree index = convert (sizetype, TREE_OPERAND (exp, 1));
6689 /* Optimize the special-case of a zero lower bound.
6691 We convert the low_bound to sizetype to avoid some problems
6692 with constant folding. (E.g. suppose the lower bound is 1,
6693 and its mode is QI. Without the conversion, (ARRAY
6694 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
6695 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
6697 if (! integer_zerop (low_bound))
6698 index = size_diffop (index, convert (sizetype, low_bound));
6700 /* Fold an expression like: "foo"[2].
6701 This is not done in fold so it won't happen inside &.
6702 Don't fold if this is for wide characters since it's too
6703 difficult to do correctly and this is a very rare case. */
6705 if (modifier != EXPAND_CONST_ADDRESS && modifier != EXPAND_INITIALIZER
6706 && TREE_CODE (array) == STRING_CST
6707 && TREE_CODE (index) == INTEGER_CST
6708 && compare_tree_int (index, TREE_STRING_LENGTH (array)) < 0
6709 && GET_MODE_CLASS (mode) == MODE_INT
6710 && GET_MODE_SIZE (mode) == 1)
6711 return gen_int_mode (TREE_STRING_POINTER (array)
6712 [TREE_INT_CST_LOW (index)], mode);
6714 /* If this is a constant index into a constant array,
6715 just get the value from the array. Handle both the cases when
6716 we have an explicit constructor and when our operand is a variable
6717 that was declared const. */
6719 if (modifier != EXPAND_CONST_ADDRESS && modifier != EXPAND_INITIALIZER
6720 && TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array)
6721 && TREE_CODE (index) == INTEGER_CST
6722 && 0 > compare_tree_int (index,
6723 list_length (CONSTRUCTOR_ELTS
6724 (TREE_OPERAND (exp, 0)))))
6728 for (elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
6729 i = TREE_INT_CST_LOW (index);
6730 elem != 0 && i != 0; i--, elem = TREE_CHAIN (elem))
6734 return expand_expr (fold (TREE_VALUE (elem)), target, tmode,
6738 else if (optimize >= 1
6739 && modifier != EXPAND_CONST_ADDRESS
6740 && modifier != EXPAND_INITIALIZER
6741 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
6742 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
6743 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
6745 if (TREE_CODE (index) == INTEGER_CST)
6747 tree init = DECL_INITIAL (array);
6749 if (TREE_CODE (init) == CONSTRUCTOR)
6753 for (elem = CONSTRUCTOR_ELTS (init);
6755 && !tree_int_cst_equal (TREE_PURPOSE (elem), index));
6756 elem = TREE_CHAIN (elem))
6759 if (elem && !TREE_SIDE_EFFECTS (TREE_VALUE (elem)))
6760 return expand_expr (fold (TREE_VALUE (elem)), target,
6763 else if (TREE_CODE (init) == STRING_CST
6764 && 0 > compare_tree_int (index,
6765 TREE_STRING_LENGTH (init)))
6767 tree type = TREE_TYPE (TREE_TYPE (init));
6768 enum machine_mode mode = TYPE_MODE (type);
6770 if (GET_MODE_CLASS (mode) == MODE_INT
6771 && GET_MODE_SIZE (mode) == 1)
6772 return gen_int_mode (TREE_STRING_POINTER (init)
6773 [TREE_INT_CST_LOW (index)], mode);
6782 case ARRAY_RANGE_REF:
6783 /* If the operand is a CONSTRUCTOR, we can just extract the
6784 appropriate field if it is present. Don't do this if we have
6785 already written the data since we want to refer to that copy
6786 and varasm.c assumes that's what we'll do. */
6787 if (code == COMPONENT_REF
6788 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
6789 && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
6793 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
6794 elt = TREE_CHAIN (elt))
6795 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1)
6796 /* We can normally use the value of the field in the
6797 CONSTRUCTOR. However, if this is a bitfield in
6798 an integral mode that we can fit in a HOST_WIDE_INT,
6799 we must mask only the number of bits in the bitfield,
6800 since this is done implicitly by the constructor. If
6801 the bitfield does not meet either of those conditions,
6802 we can't do this optimization. */
6803 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt))
6804 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt)))
6806 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt)))
6807 <= HOST_BITS_PER_WIDE_INT))))
6809 op0 = expand_expr (TREE_VALUE (elt), target, tmode, modifier);
6810 if (DECL_BIT_FIELD (TREE_PURPOSE (elt)))
6812 HOST_WIDE_INT bitsize
6813 = TREE_INT_CST_LOW (DECL_SIZE (TREE_PURPOSE (elt)));
6814 enum machine_mode imode
6815 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt)));
6817 if (TREE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt))))
6819 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
6820 op0 = expand_and (imode, op0, op1, target);
6825 = build_int_2 (GET_MODE_BITSIZE (imode) - bitsize,
6828 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
6830 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
6840 enum machine_mode mode1;
6841 HOST_WIDE_INT bitsize, bitpos;
6844 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
6845 &mode1, &unsignedp, &volatilep);
6848 /* If we got back the original object, something is wrong. Perhaps
6849 we are evaluating an expression too early. In any event, don't
6850 infinitely recurse. */
6854 /* If TEM's type is a union of variable size, pass TARGET to the inner
6855 computation, since it will need a temporary and TARGET is known
6856 to have to do. This occurs in unchecked conversion in Ada. */
6860 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
6861 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
6863 ? target : NULL_RTX),
6865 (modifier == EXPAND_INITIALIZER
6866 || modifier == EXPAND_CONST_ADDRESS)
6867 ? modifier : EXPAND_NORMAL);
6869 /* If this is a constant, put it into a register if it is a
6870 legitimate constant and OFFSET is 0 and memory if it isn't. */
6871 if (CONSTANT_P (op0))
6873 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
6874 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
6876 op0 = force_reg (mode, op0);
6878 op0 = validize_mem (force_const_mem (mode, op0));
6883 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM);
6885 /* If this object is in a register, put it into memory.
6886 This case can't occur in C, but can in Ada if we have
6887 unchecked conversion of an expression from a scalar type to
6888 an array or record type. */
6889 if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
6890 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
6892 /* If the operand is a SAVE_EXPR, we can deal with this by
6893 forcing the SAVE_EXPR into memory. */
6894 if (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR)
6896 put_var_into_stack (TREE_OPERAND (exp, 0));
6897 op0 = SAVE_EXPR_RTL (TREE_OPERAND (exp, 0));
6902 = build_qualified_type (TREE_TYPE (tem),
6903 (TYPE_QUALS (TREE_TYPE (tem))
6904 | TYPE_QUAL_CONST));
6905 rtx memloc = assign_temp (nt, 1, 1, 1);
6907 emit_move_insn (memloc, op0);
6912 if (GET_CODE (op0) != MEM)
6915 if (GET_MODE (offset_rtx) != ptr_mode)
6916 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
6918 #ifdef POINTERS_EXTEND_UNSIGNED
6919 if (GET_MODE (offset_rtx) != Pmode)
6920 offset_rtx = convert_memory_address (Pmode, offset_rtx);
6923 /* A constant address in OP0 can have VOIDmode, we must not try
6924 to call force_reg for that case. Avoid that case. */
6925 if (GET_CODE (op0) == MEM
6926 && GET_MODE (op0) == BLKmode
6927 && GET_MODE (XEXP (op0, 0)) != VOIDmode
6929 && (bitpos % bitsize) == 0
6930 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
6931 && MEM_ALIGN (op0) == GET_MODE_ALIGNMENT (mode1))
6933 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
6937 op0 = offset_address (op0, offset_rtx,
6938 highest_pow2_factor (offset));
6941 /* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
6942 record its alignment as BIGGEST_ALIGNMENT. */
6943 if (GET_CODE (op0) == MEM && bitpos == 0 && offset != 0
6944 && is_aligning_offset (offset, tem))
6945 set_mem_align (op0, BIGGEST_ALIGNMENT);
6947 /* Don't forget about volatility even if this is a bitfield. */
6948 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
6950 if (op0 == orig_op0)
6951 op0 = copy_rtx (op0);
6953 MEM_VOLATILE_P (op0) = 1;
6956 /* In cases where an aligned union has an unaligned object
6957 as a field, we might be extracting a BLKmode value from
6958 an integer-mode (e.g., SImode) object. Handle this case
6959 by doing the extract into an object as wide as the field
6960 (which we know to be the width of a basic mode), then
6961 storing into memory, and changing the mode to BLKmode. */
6962 if (mode1 == VOIDmode
6963 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
6964 || (mode1 != BLKmode && ! direct_load[(int) mode1]
6965 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
6966 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT
6967 && modifier != EXPAND_CONST_ADDRESS
6968 && modifier != EXPAND_INITIALIZER)
6969 /* If the field isn't aligned enough to fetch as a memref,
6970 fetch it as a bit field. */
6971 || (mode1 != BLKmode
6972 && SLOW_UNALIGNED_ACCESS (mode1, MEM_ALIGN (op0))
6973 && ((TYPE_ALIGN (TREE_TYPE (tem))
6974 < GET_MODE_ALIGNMENT (mode))
6975 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)))
6976 /* If the type and the field are a constant size and the
6977 size of the type isn't the same size as the bitfield,
6978 we must use bitfield operations. */
6980 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (exp)))
6982 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
6985 enum machine_mode ext_mode = mode;
6987 if (ext_mode == BLKmode
6988 && ! (target != 0 && GET_CODE (op0) == MEM
6989 && GET_CODE (target) == MEM
6990 && bitpos % BITS_PER_UNIT == 0))
6991 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
6993 if (ext_mode == BLKmode)
6995 /* In this case, BITPOS must start at a byte boundary and
6996 TARGET, if specified, must be a MEM. */
6997 if (GET_CODE (op0) != MEM
6998 || (target != 0 && GET_CODE (target) != MEM)
6999 || bitpos % BITS_PER_UNIT != 0)
7002 op0 = adjust_address (op0, VOIDmode, bitpos / BITS_PER_UNIT);
7004 target = assign_temp (type, 0, 1, 1);
7006 emit_block_move (target, op0,
7007 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
7013 op0 = validize_mem (op0);
7015 if (GET_CODE (op0) == MEM && GET_CODE (XEXP (op0, 0)) == REG)
7016 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7018 op0 = extract_bit_field (op0, bitsize, bitpos,
7019 unsignedp, target, ext_mode, ext_mode,
7020 int_size_in_bytes (TREE_TYPE (tem)));
7022 /* If the result is a record type and BITSIZE is narrower than
7023 the mode of OP0, an integral mode, and this is a big endian
7024 machine, we must put the field into the high-order bits. */
7025 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
7026 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
7027 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (op0)))
7028 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
7029 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
7033 if (mode == BLKmode)
7035 rtx new = assign_temp (build_qualified_type
7036 ((*lang_hooks.types.type_for_mode)
7038 TYPE_QUAL_CONST), 0, 1, 1);
7040 emit_move_insn (new, op0);
7041 op0 = copy_rtx (new);
7042 PUT_MODE (op0, BLKmode);
7043 set_mem_attributes (op0, exp, 1);
7049 /* If the result is BLKmode, use that to access the object
7051 if (mode == BLKmode)
7054 /* Get a reference to just this component. */
7055 if (modifier == EXPAND_CONST_ADDRESS
7056 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7057 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
7059 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7061 if (op0 == orig_op0)
7062 op0 = copy_rtx (op0);
7064 set_mem_attributes (op0, exp, 0);
7065 if (GET_CODE (XEXP (op0, 0)) == REG)
7066 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7068 MEM_VOLATILE_P (op0) |= volatilep;
7069 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
7070 || modifier == EXPAND_CONST_ADDRESS
7071 || modifier == EXPAND_INITIALIZER)
7073 else if (target == 0)
7074 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7076 convert_move (target, op0, unsignedp);
7082 rtx insn, before = get_last_insn (), vtbl_ref;
7084 /* Evaluate the interior expression. */
7085 subtarget = expand_expr (TREE_OPERAND (exp, 0), target,
7088 /* Get or create an instruction off which to hang a note. */
7089 if (REG_P (subtarget))
7092 insn = get_last_insn ();
7095 if (! INSN_P (insn))
7096 insn = prev_nonnote_insn (insn);
7100 target = gen_reg_rtx (GET_MODE (subtarget));
7101 insn = emit_move_insn (target, subtarget);
7104 /* Collect the data for the note. */
7105 vtbl_ref = XEXP (DECL_RTL (TREE_OPERAND (exp, 1)), 0);
7106 vtbl_ref = plus_constant (vtbl_ref,
7107 tree_low_cst (TREE_OPERAND (exp, 2), 0));
7108 /* Discard the initial CONST that was added. */
7109 vtbl_ref = XEXP (vtbl_ref, 0);
7112 = gen_rtx_EXPR_LIST (REG_VTABLE_REF, vtbl_ref, REG_NOTES (insn));
7117 /* Intended for a reference to a buffer of a file-object in Pascal.
7118 But it's not certain that a special tree code will really be
7119 necessary for these. INDIRECT_REF might work for them. */
7125 /* Pascal set IN expression.
7128 rlo = set_low - (set_low%bits_per_word);
7129 the_word = set [ (index - rlo)/bits_per_word ];
7130 bit_index = index % bits_per_word;
7131 bitmask = 1 << bit_index;
7132 return !!(the_word & bitmask); */
7134 tree set = TREE_OPERAND (exp, 0);
7135 tree index = TREE_OPERAND (exp, 1);
7136 int iunsignedp = TREE_UNSIGNED (TREE_TYPE (index));
7137 tree set_type = TREE_TYPE (set);
7138 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
7139 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
7140 rtx index_val = expand_expr (index, 0, VOIDmode, 0);
7141 rtx lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
7142 rtx hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
7143 rtx setval = expand_expr (set, 0, VOIDmode, 0);
7144 rtx setaddr = XEXP (setval, 0);
7145 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
7147 rtx diff, quo, rem, addr, bit, result;
7149 /* If domain is empty, answer is no. Likewise if index is constant
7150 and out of bounds. */
7151 if (((TREE_CODE (set_high_bound) == INTEGER_CST
7152 && TREE_CODE (set_low_bound) == INTEGER_CST
7153 && tree_int_cst_lt (set_high_bound, set_low_bound))
7154 || (TREE_CODE (index) == INTEGER_CST
7155 && TREE_CODE (set_low_bound) == INTEGER_CST
7156 && tree_int_cst_lt (index, set_low_bound))
7157 || (TREE_CODE (set_high_bound) == INTEGER_CST
7158 && TREE_CODE (index) == INTEGER_CST
7159 && tree_int_cst_lt (set_high_bound, index))))
7163 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7165 /* If we get here, we have to generate the code for both cases
7166 (in range and out of range). */
7168 op0 = gen_label_rtx ();
7169 op1 = gen_label_rtx ();
7171 if (! (GET_CODE (index_val) == CONST_INT
7172 && GET_CODE (lo_r) == CONST_INT))
7173 emit_cmp_and_jump_insns (index_val, lo_r, LT, NULL_RTX,
7174 GET_MODE (index_val), iunsignedp, op1);
7176 if (! (GET_CODE (index_val) == CONST_INT
7177 && GET_CODE (hi_r) == CONST_INT))
7178 emit_cmp_and_jump_insns (index_val, hi_r, GT, NULL_RTX,
7179 GET_MODE (index_val), iunsignedp, op1);
7181 /* Calculate the element number of bit zero in the first word
7183 if (GET_CODE (lo_r) == CONST_INT)
7184 rlow = GEN_INT (INTVAL (lo_r)
7185 & ~((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
7187 rlow = expand_binop (index_mode, and_optab, lo_r,
7188 GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
7189 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7191 diff = expand_binop (index_mode, sub_optab, index_val, rlow,
7192 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7194 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
7195 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7196 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
7197 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7199 addr = memory_address (byte_mode,
7200 expand_binop (index_mode, add_optab, diff,
7201 setaddr, NULL_RTX, iunsignedp,
7204 /* Extract the bit we want to examine. */
7205 bit = expand_shift (RSHIFT_EXPR, byte_mode,
7206 gen_rtx_MEM (byte_mode, addr),
7207 make_tree (TREE_TYPE (index), rem),
7209 result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
7210 GET_MODE (target) == byte_mode ? target : 0,
7211 1, OPTAB_LIB_WIDEN);
7213 if (result != target)
7214 convert_move (target, result, 1);
7216 /* Output the code to handle the out-of-range case. */
7219 emit_move_insn (target, const0_rtx);
7224 case WITH_CLEANUP_EXPR:
7225 if (WITH_CLEANUP_EXPR_RTL (exp) == 0)
7227 WITH_CLEANUP_EXPR_RTL (exp)
7228 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
7229 expand_decl_cleanup_eh (NULL_TREE, TREE_OPERAND (exp, 1),
7230 CLEANUP_EH_ONLY (exp));
7232 /* That's it for this cleanup. */
7233 TREE_OPERAND (exp, 1) = 0;
7235 return WITH_CLEANUP_EXPR_RTL (exp);
7237 case CLEANUP_POINT_EXPR:
7239 /* Start a new binding layer that will keep track of all cleanup
7240 actions to be performed. */
7241 expand_start_bindings (2);
7243 target_temp_slot_level = temp_slot_level;
7245 op0 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
7246 /* If we're going to use this value, load it up now. */
7248 op0 = force_not_mem (op0);
7249 preserve_temp_slots (op0);
7250 expand_end_bindings (NULL_TREE, 0, 0);
7255 /* Check for a built-in function. */
7256 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
7257 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7259 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7261 if (DECL_BUILT_IN_CLASS (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7262 == BUILT_IN_FRONTEND)
7263 return (*lang_hooks.expand_expr)
7264 (exp, original_target, tmode, modifier);
7266 return expand_builtin (exp, target, subtarget, tmode, ignore);
7269 return expand_call (exp, target, ignore);
7271 case NON_LVALUE_EXPR:
7274 case REFERENCE_EXPR:
7275 if (TREE_OPERAND (exp, 0) == error_mark_node)
7278 if (TREE_CODE (type) == UNION_TYPE)
7280 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
7282 /* If both input and output are BLKmode, this conversion isn't doing
7283 anything except possibly changing memory attribute. */
7284 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode)
7286 rtx result = expand_expr (TREE_OPERAND (exp, 0), target, tmode,
7289 result = copy_rtx (result);
7290 set_mem_attributes (result, exp, 0);
7295 target = assign_temp (type, 0, 1, 1);
7297 if (GET_CODE (target) == MEM)
7298 /* Store data into beginning of memory target. */
7299 store_expr (TREE_OPERAND (exp, 0),
7300 adjust_address (target, TYPE_MODE (valtype), 0), 0);
7302 else if (GET_CODE (target) == REG)
7303 /* Store this field into a union of the proper type. */
7304 store_field (target,
7305 MIN ((int_size_in_bytes (TREE_TYPE
7306 (TREE_OPERAND (exp, 0)))
7308 (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
7309 0, TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
7310 VOIDmode, 0, type, 0);
7314 /* Return the entire union. */
7318 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7320 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
7323 /* If the signedness of the conversion differs and OP0 is
7324 a promoted SUBREG, clear that indication since we now
7325 have to do the proper extension. */
7326 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
7327 && GET_CODE (op0) == SUBREG)
7328 SUBREG_PROMOTED_VAR_P (op0) = 0;
7333 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
7334 if (GET_MODE (op0) == mode)
7337 /* If OP0 is a constant, just convert it into the proper mode. */
7338 if (CONSTANT_P (op0))
7340 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7341 enum machine_mode inner_mode = TYPE_MODE (inner_type);
7343 if (modifier == EXPAND_INITIALIZER)
7344 return simplify_gen_subreg (mode, op0, inner_mode,
7345 subreg_lowpart_offset (mode,
7348 return convert_modes (mode, inner_mode, op0,
7349 TREE_UNSIGNED (inner_type));
7352 if (modifier == EXPAND_INITIALIZER)
7353 return gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
7357 convert_to_mode (mode, op0,
7358 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7360 convert_move (target, op0,
7361 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7364 case VIEW_CONVERT_EXPR:
7365 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
7367 /* If the input and output modes are both the same, we are done.
7368 Otherwise, if neither mode is BLKmode and both are within a word, we
7369 can use gen_lowpart. If neither is true, make sure the operand is
7370 in memory and convert the MEM to the new mode. */
7371 if (TYPE_MODE (type) == GET_MODE (op0))
7373 else if (TYPE_MODE (type) != BLKmode && GET_MODE (op0) != BLKmode
7374 && GET_MODE_SIZE (TYPE_MODE (type)) <= UNITS_PER_WORD
7375 && GET_MODE_SIZE (GET_MODE (op0)) <= UNITS_PER_WORD)
7376 op0 = gen_lowpart (TYPE_MODE (type), op0);
7377 else if (GET_CODE (op0) != MEM)
7379 /* If the operand is not a MEM, force it into memory. Since we
7380 are going to be be changing the mode of the MEM, don't call
7381 force_const_mem for constants because we don't allow pool
7382 constants to change mode. */
7383 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7385 if (TREE_ADDRESSABLE (exp))
7388 if (target == 0 || GET_MODE (target) != TYPE_MODE (inner_type))
7390 = assign_stack_temp_for_type
7391 (TYPE_MODE (inner_type),
7392 GET_MODE_SIZE (TYPE_MODE (inner_type)), 0, inner_type);
7394 emit_move_insn (target, op0);
7398 /* At this point, OP0 is in the correct mode. If the output type is such
7399 that the operand is known to be aligned, indicate that it is.
7400 Otherwise, we need only be concerned about alignment for non-BLKmode
7402 if (GET_CODE (op0) == MEM)
7404 op0 = copy_rtx (op0);
7406 if (TYPE_ALIGN_OK (type))
7407 set_mem_align (op0, MAX (MEM_ALIGN (op0), TYPE_ALIGN (type)));
7408 else if (TYPE_MODE (type) != BLKmode && STRICT_ALIGNMENT
7409 && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
7411 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7412 HOST_WIDE_INT temp_size
7413 = MAX (int_size_in_bytes (inner_type),
7414 (HOST_WIDE_INT) GET_MODE_SIZE (TYPE_MODE (type)));
7415 rtx new = assign_stack_temp_for_type (TYPE_MODE (type),
7416 temp_size, 0, type);
7417 rtx new_with_op0_mode = adjust_address (new, GET_MODE (op0), 0);
7419 if (TREE_ADDRESSABLE (exp))
7422 if (GET_MODE (op0) == BLKmode)
7423 emit_block_move (new_with_op0_mode, op0,
7424 GEN_INT (GET_MODE_SIZE (TYPE_MODE (type))));
7426 emit_move_insn (new_with_op0_mode, op0);
7431 op0 = adjust_address (op0, TYPE_MODE (type), 0);
7437 /* We come here from MINUS_EXPR when the second operand is a
7440 this_optab = ! unsignedp && flag_trapv
7441 && (GET_MODE_CLASS (mode) == MODE_INT)
7442 ? addv_optab : add_optab;
7444 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
7445 something else, make sure we add the register to the constant and
7446 then to the other thing. This case can occur during strength
7447 reduction and doing it this way will produce better code if the
7448 frame pointer or argument pointer is eliminated.
7450 fold-const.c will ensure that the constant is always in the inner
7451 PLUS_EXPR, so the only case we need to do anything about is if
7452 sp, ap, or fp is our second argument, in which case we must swap
7453 the innermost first argument and our second argument. */
7455 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
7456 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
7457 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
7458 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
7459 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
7460 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
7462 tree t = TREE_OPERAND (exp, 1);
7464 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7465 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
7468 /* If the result is to be ptr_mode and we are adding an integer to
7469 something, we might be forming a constant. So try to use
7470 plus_constant. If it produces a sum and we can't accept it,
7471 use force_operand. This allows P = &ARR[const] to generate
7472 efficient code on machines where a SYMBOL_REF is not a valid
7475 If this is an EXPAND_SUM call, always return the sum. */
7476 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
7477 || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
7479 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
7480 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7481 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
7485 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
7487 /* Use immed_double_const to ensure that the constant is
7488 truncated according to the mode of OP1, then sign extended
7489 to a HOST_WIDE_INT. Using the constant directly can result
7490 in non-canonical RTL in a 64x32 cross compile. */
7492 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)),
7494 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))));
7495 op1 = plus_constant (op1, INTVAL (constant_part));
7496 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7497 op1 = force_operand (op1, target);
7501 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7502 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
7503 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
7507 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7508 (modifier == EXPAND_INITIALIZER
7509 ? EXPAND_INITIALIZER : EXPAND_SUM));
7510 if (! CONSTANT_P (op0))
7512 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7513 VOIDmode, modifier);
7514 /* Don't go to both_summands if modifier
7515 says it's not right to return a PLUS. */
7516 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7520 /* Use immed_double_const to ensure that the constant is
7521 truncated according to the mode of OP1, then sign extended
7522 to a HOST_WIDE_INT. Using the constant directly can result
7523 in non-canonical RTL in a 64x32 cross compile. */
7525 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)),
7527 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
7528 op0 = plus_constant (op0, INTVAL (constant_part));
7529 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7530 op0 = force_operand (op0, target);
7535 /* No sense saving up arithmetic to be done
7536 if it's all in the wrong mode to form part of an address.
7537 And force_operand won't know whether to sign-extend or
7539 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7540 || mode != ptr_mode)
7543 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7546 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, modifier);
7547 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, modifier);
7550 /* Make sure any term that's a sum with a constant comes last. */
7551 if (GET_CODE (op0) == PLUS
7552 && CONSTANT_P (XEXP (op0, 1)))
7558 /* If adding to a sum including a constant,
7559 associate it to put the constant outside. */
7560 if (GET_CODE (op1) == PLUS
7561 && CONSTANT_P (XEXP (op1, 1)))
7563 rtx constant_term = const0_rtx;
7565 temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0);
7568 /* Ensure that MULT comes first if there is one. */
7569 else if (GET_CODE (op0) == MULT)
7570 op0 = gen_rtx_PLUS (mode, op0, XEXP (op1, 0));
7572 op0 = gen_rtx_PLUS (mode, XEXP (op1, 0), op0);
7574 /* Let's also eliminate constants from op0 if possible. */
7575 op0 = eliminate_constant_term (op0, &constant_term);
7577 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
7578 their sum should be a constant. Form it into OP1, since the
7579 result we want will then be OP0 + OP1. */
7581 temp = simplify_binary_operation (PLUS, mode, constant_term,
7586 op1 = gen_rtx_PLUS (mode, constant_term, XEXP (op1, 1));
7589 /* Put a constant term last and put a multiplication first. */
7590 if (CONSTANT_P (op0) || GET_CODE (op1) == MULT)
7591 temp = op1, op1 = op0, op0 = temp;
7593 temp = simplify_binary_operation (PLUS, mode, op0, op1);
7594 return temp ? temp : gen_rtx_PLUS (mode, op0, op1);
7597 /* For initializers, we are allowed to return a MINUS of two
7598 symbolic constants. Here we handle all cases when both operands
7600 /* Handle difference of two symbolic constants,
7601 for the sake of an initializer. */
7602 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7603 && really_constant_p (TREE_OPERAND (exp, 0))
7604 && really_constant_p (TREE_OPERAND (exp, 1)))
7606 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode,
7608 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode,
7611 /* If the last operand is a CONST_INT, use plus_constant of
7612 the negated constant. Else make the MINUS. */
7613 if (GET_CODE (op1) == CONST_INT)
7614 return plus_constant (op0, - INTVAL (op1));
7616 return gen_rtx_MINUS (mode, op0, op1);
7618 /* Convert A - const to A + (-const). */
7619 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7621 tree negated = fold (build1 (NEGATE_EXPR, type,
7622 TREE_OPERAND (exp, 1)));
7624 if (TREE_UNSIGNED (type) || TREE_OVERFLOW (negated))
7625 /* If we can't negate the constant in TYPE, leave it alone and
7626 expand_binop will negate it for us. We used to try to do it
7627 here in the signed version of TYPE, but that doesn't work
7628 on POINTER_TYPEs. */;
7631 exp = build (PLUS_EXPR, type, TREE_OPERAND (exp, 0), negated);
7635 this_optab = ! unsignedp && flag_trapv
7636 && (GET_MODE_CLASS(mode) == MODE_INT)
7637 ? subv_optab : sub_optab;
7641 /* If first operand is constant, swap them.
7642 Thus the following special case checks need only
7643 check the second operand. */
7644 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
7646 tree t1 = TREE_OPERAND (exp, 0);
7647 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
7648 TREE_OPERAND (exp, 1) = t1;
7651 /* Attempt to return something suitable for generating an
7652 indexed address, for machines that support that. */
7654 if (modifier == EXPAND_SUM && mode == ptr_mode
7655 && host_integerp (TREE_OPERAND (exp, 1), 0))
7657 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7660 /* If we knew for certain that this is arithmetic for an array
7661 reference, and we knew the bounds of the array, then we could
7662 apply the distributive law across (PLUS X C) for constant C.
7663 Without such knowledge, we risk overflowing the computation
7664 when both X and C are large, but X+C isn't. */
7665 /* ??? Could perhaps special-case EXP being unsigned and C being
7666 positive. In that case we are certain that X+C is no smaller
7667 than X and so the transformed expression will overflow iff the
7668 original would have. */
7670 if (GET_CODE (op0) != REG)
7671 op0 = force_operand (op0, NULL_RTX);
7672 if (GET_CODE (op0) != REG)
7673 op0 = copy_to_mode_reg (mode, op0);
7676 gen_rtx_MULT (mode, op0,
7677 GEN_INT (tree_low_cst (TREE_OPERAND (exp, 1), 0)));
7680 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7683 /* Check for multiplying things that have been extended
7684 from a narrower type. If this machine supports multiplying
7685 in that narrower type with a result in the desired type,
7686 do it that way, and avoid the explicit type-conversion. */
7687 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
7688 && TREE_CODE (type) == INTEGER_TYPE
7689 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7690 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
7691 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7692 && int_fits_type_p (TREE_OPERAND (exp, 1),
7693 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7694 /* Don't use a widening multiply if a shift will do. */
7695 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
7696 > HOST_BITS_PER_WIDE_INT)
7697 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
7699 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
7700 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7702 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
7703 /* If both operands are extended, they must either both
7704 be zero-extended or both be sign-extended. */
7705 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7707 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
7709 enum machine_mode innermode
7710 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
7711 optab other_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7712 ? smul_widen_optab : umul_widen_optab);
7713 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7714 ? umul_widen_optab : smul_widen_optab);
7715 if (mode == GET_MODE_WIDER_MODE (innermode))
7717 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
7719 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7720 NULL_RTX, VOIDmode, 0);
7721 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7722 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7725 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7726 NULL_RTX, VOIDmode, 0);
7729 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
7730 && innermode == word_mode)
7733 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7734 NULL_RTX, VOIDmode, 0);
7735 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7736 op1 = convert_modes (innermode, mode,
7737 expand_expr (TREE_OPERAND (exp, 1),
7738 NULL_RTX, VOIDmode, 0),
7741 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7742 NULL_RTX, VOIDmode, 0);
7743 temp = expand_binop (mode, other_optab, op0, op1, target,
7744 unsignedp, OPTAB_LIB_WIDEN);
7745 htem = expand_mult_highpart_adjust (innermode,
7746 gen_highpart (innermode, temp),
7748 gen_highpart (innermode, temp),
7750 emit_move_insn (gen_highpart (innermode, temp), htem);
7755 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7756 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7757 return expand_mult (mode, op0, op1, target, unsignedp);
7759 case TRUNC_DIV_EXPR:
7760 case FLOOR_DIV_EXPR:
7762 case ROUND_DIV_EXPR:
7763 case EXACT_DIV_EXPR:
7764 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7766 /* Possible optimization: compute the dividend with EXPAND_SUM
7767 then if the divisor is constant can optimize the case
7768 where some terms of the dividend have coeffs divisible by it. */
7769 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7770 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7771 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
7774 /* Emit a/b as a*(1/b). Later we may manage CSE the reciprocal saving
7775 expensive divide. If not, combine will rebuild the original
7777 if (flag_unsafe_math_optimizations && optimize && !optimize_size
7778 && TREE_CODE (type) == REAL_TYPE
7779 && !real_onep (TREE_OPERAND (exp, 0)))
7780 return expand_expr (build (MULT_EXPR, type, TREE_OPERAND (exp, 0),
7781 build (RDIV_EXPR, type,
7782 build_real (type, dconst1),
7783 TREE_OPERAND (exp, 1))),
7784 target, tmode, unsignedp);
7785 this_optab = sdiv_optab;
7788 case TRUNC_MOD_EXPR:
7789 case FLOOR_MOD_EXPR:
7791 case ROUND_MOD_EXPR:
7792 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7794 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7795 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7796 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
7798 case FIX_ROUND_EXPR:
7799 case FIX_FLOOR_EXPR:
7801 abort (); /* Not used for C. */
7803 case FIX_TRUNC_EXPR:
7804 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7806 target = gen_reg_rtx (mode);
7807 expand_fix (target, op0, unsignedp);
7811 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7813 target = gen_reg_rtx (mode);
7814 /* expand_float can't figure out what to do if FROM has VOIDmode.
7815 So give it the correct mode. With -O, cse will optimize this. */
7816 if (GET_MODE (op0) == VOIDmode)
7817 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7819 expand_float (target, op0,
7820 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7824 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7825 temp = expand_unop (mode,
7826 ! unsignedp && flag_trapv
7827 && (GET_MODE_CLASS(mode) == MODE_INT)
7828 ? negv_optab : neg_optab, op0, target, 0);
7834 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7836 /* Handle complex values specially. */
7837 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT
7838 || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
7839 return expand_complex_abs (mode, op0, target, unsignedp);
7841 /* Unsigned abs is simply the operand. Testing here means we don't
7842 risk generating incorrect code below. */
7843 if (TREE_UNSIGNED (type))
7846 return expand_abs (mode, op0, target, unsignedp,
7847 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
7851 target = original_target;
7852 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 1), 1)
7853 || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
7854 || GET_MODE (target) != mode
7855 || (GET_CODE (target) == REG
7856 && REGNO (target) < FIRST_PSEUDO_REGISTER))
7857 target = gen_reg_rtx (mode);
7858 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7859 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
7861 /* First try to do it with a special MIN or MAX instruction.
7862 If that does not win, use a conditional jump to select the proper
7864 this_optab = (TREE_UNSIGNED (type)
7865 ? (code == MIN_EXPR ? umin_optab : umax_optab)
7866 : (code == MIN_EXPR ? smin_optab : smax_optab));
7868 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
7873 /* At this point, a MEM target is no longer useful; we will get better
7876 if (GET_CODE (target) == MEM)
7877 target = gen_reg_rtx (mode);
7880 emit_move_insn (target, op0);
7882 op0 = gen_label_rtx ();
7884 /* If this mode is an integer too wide to compare properly,
7885 compare word by word. Rely on cse to optimize constant cases. */
7886 if (GET_MODE_CLASS (mode) == MODE_INT
7887 && ! can_compare_p (GE, mode, ccp_jump))
7889 if (code == MAX_EXPR)
7890 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
7891 target, op1, NULL_RTX, op0);
7893 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
7894 op1, target, NULL_RTX, op0);
7898 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)));
7899 do_compare_rtx_and_jump (target, op1, code == MAX_EXPR ? GE : LE,
7900 unsignedp, mode, NULL_RTX, NULL_RTX,
7903 emit_move_insn (target, op1);
7908 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7909 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
7915 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7916 temp = expand_unop (mode, ffs_optab, op0, target, 1);
7921 /* ??? Can optimize bitwise operations with one arg constant.
7922 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
7923 and (a bitwise1 b) bitwise2 b (etc)
7924 but that is probably not worth while. */
7926 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
7927 boolean values when we want in all cases to compute both of them. In
7928 general it is fastest to do TRUTH_AND_EXPR by computing both operands
7929 as actual zero-or-1 values and then bitwise anding. In cases where
7930 there cannot be any side effects, better code would be made by
7931 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
7932 how to recognize those cases. */
7934 case TRUTH_AND_EXPR:
7936 this_optab = and_optab;
7941 this_optab = ior_optab;
7944 case TRUTH_XOR_EXPR:
7946 this_optab = xor_optab;
7953 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7955 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7956 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
7959 /* Could determine the answer when only additive constants differ. Also,
7960 the addition of one can be handled by changing the condition. */
7967 case UNORDERED_EXPR:
7974 temp = do_store_flag (exp, target, tmode != VOIDmode ? tmode : mode, 0);
7978 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
7979 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
7981 && GET_CODE (original_target) == REG
7982 && (GET_MODE (original_target)
7983 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
7985 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
7988 /* If temp is constant, we can just compute the result. */
7989 if (GET_CODE (temp) == CONST_INT)
7991 if (INTVAL (temp) != 0)
7992 emit_move_insn (target, const1_rtx);
7994 emit_move_insn (target, const0_rtx);
7999 if (temp != original_target)
8001 enum machine_mode mode1 = GET_MODE (temp);
8002 if (mode1 == VOIDmode)
8003 mode1 = tmode != VOIDmode ? tmode : mode;
8005 temp = copy_to_mode_reg (mode1, temp);
8008 op1 = gen_label_rtx ();
8009 emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX,
8010 GET_MODE (temp), unsignedp, op1);
8011 emit_move_insn (temp, const1_rtx);
8016 /* If no set-flag instruction, must generate a conditional
8017 store into a temporary variable. Drop through
8018 and handle this like && and ||. */
8020 case TRUTH_ANDIF_EXPR:
8021 case TRUTH_ORIF_EXPR:
8023 && (target == 0 || ! safe_from_p (target, exp, 1)
8024 /* Make sure we don't have a hard reg (such as function's return
8025 value) live across basic blocks, if not optimizing. */
8026 || (!optimize && GET_CODE (target) == REG
8027 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
8028 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
8031 emit_clr_insn (target);
8033 op1 = gen_label_rtx ();
8034 jumpifnot (exp, op1);
8037 emit_0_to_1_insn (target);
8040 return ignore ? const0_rtx : target;
8042 case TRUTH_NOT_EXPR:
8043 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
8044 /* The parser is careful to generate TRUTH_NOT_EXPR
8045 only with operands that are always zero or one. */
8046 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
8047 target, 1, OPTAB_LIB_WIDEN);
8053 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
8055 return expand_expr (TREE_OPERAND (exp, 1),
8056 (ignore ? const0_rtx : target),
8060 /* If we would have a "singleton" (see below) were it not for a
8061 conversion in each arm, bring that conversion back out. */
8062 if (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
8063 && TREE_CODE (TREE_OPERAND (exp, 2)) == NOP_EXPR
8064 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0))
8065 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 2), 0))))
8067 tree iftrue = TREE_OPERAND (TREE_OPERAND (exp, 1), 0);
8068 tree iffalse = TREE_OPERAND (TREE_OPERAND (exp, 2), 0);
8070 if ((TREE_CODE_CLASS (TREE_CODE (iftrue)) == '2'
8071 && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
8072 || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '2'
8073 && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0))
8074 || (TREE_CODE_CLASS (TREE_CODE (iftrue)) == '1'
8075 && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
8076 || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '1'
8077 && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0)))
8078 return expand_expr (build1 (NOP_EXPR, type,
8079 build (COND_EXPR, TREE_TYPE (iftrue),
8080 TREE_OPERAND (exp, 0),
8082 target, tmode, modifier);
8086 /* Note that COND_EXPRs whose type is a structure or union
8087 are required to be constructed to contain assignments of
8088 a temporary variable, so that we can evaluate them here
8089 for side effect only. If type is void, we must do likewise. */
8091 /* If an arm of the branch requires a cleanup,
8092 only that cleanup is performed. */
8095 tree binary_op = 0, unary_op = 0;
8097 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
8098 convert it to our mode, if necessary. */
8099 if (integer_onep (TREE_OPERAND (exp, 1))
8100 && integer_zerop (TREE_OPERAND (exp, 2))
8101 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8105 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
8110 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, modifier);
8111 if (GET_MODE (op0) == mode)
8115 target = gen_reg_rtx (mode);
8116 convert_move (target, op0, unsignedp);
8120 /* Check for X ? A + B : A. If we have this, we can copy A to the
8121 output and conditionally add B. Similarly for unary operations.
8122 Don't do this if X has side-effects because those side effects
8123 might affect A or B and the "?" operation is a sequence point in
8124 ANSI. (operand_equal_p tests for side effects.) */
8126 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
8127 && operand_equal_p (TREE_OPERAND (exp, 2),
8128 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8129 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
8130 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
8131 && operand_equal_p (TREE_OPERAND (exp, 1),
8132 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8133 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
8134 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
8135 && operand_equal_p (TREE_OPERAND (exp, 2),
8136 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8137 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
8138 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
8139 && operand_equal_p (TREE_OPERAND (exp, 1),
8140 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8141 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
8143 /* If we are not to produce a result, we have no target. Otherwise,
8144 if a target was specified use it; it will not be used as an
8145 intermediate target unless it is safe. If no target, use a
8150 else if (original_target
8151 && (safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
8152 || (singleton && GET_CODE (original_target) == REG
8153 && REGNO (original_target) >= FIRST_PSEUDO_REGISTER
8154 && original_target == var_rtx (singleton)))
8155 && GET_MODE (original_target) == mode
8156 #ifdef HAVE_conditional_move
8157 && (! can_conditionally_move_p (mode)
8158 || GET_CODE (original_target) == REG
8159 || TREE_ADDRESSABLE (type))
8161 && (GET_CODE (original_target) != MEM
8162 || TREE_ADDRESSABLE (type)))
8163 temp = original_target;
8164 else if (TREE_ADDRESSABLE (type))
8167 temp = assign_temp (type, 0, 0, 1);
8169 /* If we had X ? A + C : A, with C a constant power of 2, and we can
8170 do the test of X as a store-flag operation, do this as
8171 A + ((X != 0) << log C). Similarly for other simple binary
8172 operators. Only do for C == 1 if BRANCH_COST is low. */
8173 if (temp && singleton && binary_op
8174 && (TREE_CODE (binary_op) == PLUS_EXPR
8175 || TREE_CODE (binary_op) == MINUS_EXPR
8176 || TREE_CODE (binary_op) == BIT_IOR_EXPR
8177 || TREE_CODE (binary_op) == BIT_XOR_EXPR)
8178 && (BRANCH_COST >= 3 ? integer_pow2p (TREE_OPERAND (binary_op, 1))
8179 : integer_onep (TREE_OPERAND (binary_op, 1)))
8180 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8183 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR
8184 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8185 ? addv_optab : add_optab)
8186 : TREE_CODE (binary_op) == MINUS_EXPR
8187 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8188 ? subv_optab : sub_optab)
8189 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
8192 /* If we had X ? A : A + 1, do this as A + (X == 0).
8194 We have to invert the truth value here and then put it
8195 back later if do_store_flag fails. We cannot simply copy
8196 TREE_OPERAND (exp, 0) to another variable and modify that
8197 because invert_truthvalue can modify the tree pointed to
8199 if (singleton == TREE_OPERAND (exp, 1))
8200 TREE_OPERAND (exp, 0)
8201 = invert_truthvalue (TREE_OPERAND (exp, 0));
8203 result = do_store_flag (TREE_OPERAND (exp, 0),
8204 (safe_from_p (temp, singleton, 1)
8206 mode, BRANCH_COST <= 1);
8208 if (result != 0 && ! integer_onep (TREE_OPERAND (binary_op, 1)))
8209 result = expand_shift (LSHIFT_EXPR, mode, result,
8210 build_int_2 (tree_log2
8214 (safe_from_p (temp, singleton, 1)
8215 ? temp : NULL_RTX), 0);
8219 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
8220 return expand_binop (mode, boptab, op1, result, temp,
8221 unsignedp, OPTAB_LIB_WIDEN);
8223 else if (singleton == TREE_OPERAND (exp, 1))
8224 TREE_OPERAND (exp, 0)
8225 = invert_truthvalue (TREE_OPERAND (exp, 0));
8228 do_pending_stack_adjust ();
8230 op0 = gen_label_rtx ();
8232 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
8236 /* If the target conflicts with the other operand of the
8237 binary op, we can't use it. Also, we can't use the target
8238 if it is a hard register, because evaluating the condition
8239 might clobber it. */
8241 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1), 1))
8242 || (GET_CODE (temp) == REG
8243 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
8244 temp = gen_reg_rtx (mode);
8245 store_expr (singleton, temp, 0);
8248 expand_expr (singleton,
8249 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8250 if (singleton == TREE_OPERAND (exp, 1))
8251 jumpif (TREE_OPERAND (exp, 0), op0);
8253 jumpifnot (TREE_OPERAND (exp, 0), op0);
8255 start_cleanup_deferral ();
8256 if (binary_op && temp == 0)
8257 /* Just touch the other operand. */
8258 expand_expr (TREE_OPERAND (binary_op, 1),
8259 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8261 store_expr (build (TREE_CODE (binary_op), type,
8262 make_tree (type, temp),
8263 TREE_OPERAND (binary_op, 1)),
8266 store_expr (build1 (TREE_CODE (unary_op), type,
8267 make_tree (type, temp)),
8271 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
8272 comparison operator. If we have one of these cases, set the
8273 output to A, branch on A (cse will merge these two references),
8274 then set the output to FOO. */
8276 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8277 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8278 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8279 TREE_OPERAND (exp, 1), 0)
8280 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8281 || TREE_CODE (TREE_OPERAND (exp, 1)) == SAVE_EXPR)
8282 && safe_from_p (temp, TREE_OPERAND (exp, 2), 1))
8284 if (GET_CODE (temp) == REG
8285 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
8286 temp = gen_reg_rtx (mode);
8287 store_expr (TREE_OPERAND (exp, 1), temp, 0);
8288 jumpif (TREE_OPERAND (exp, 0), op0);
8290 start_cleanup_deferral ();
8291 store_expr (TREE_OPERAND (exp, 2), temp, 0);
8295 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8296 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8297 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8298 TREE_OPERAND (exp, 2), 0)
8299 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8300 || TREE_CODE (TREE_OPERAND (exp, 2)) == SAVE_EXPR)
8301 && safe_from_p (temp, TREE_OPERAND (exp, 1), 1))
8303 if (GET_CODE (temp) == REG
8304 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
8305 temp = gen_reg_rtx (mode);
8306 store_expr (TREE_OPERAND (exp, 2), temp, 0);
8307 jumpifnot (TREE_OPERAND (exp, 0), op0);
8309 start_cleanup_deferral ();
8310 store_expr (TREE_OPERAND (exp, 1), temp, 0);
8315 op1 = gen_label_rtx ();
8316 jumpifnot (TREE_OPERAND (exp, 0), op0);
8318 start_cleanup_deferral ();
8320 /* One branch of the cond can be void, if it never returns. For
8321 example A ? throw : E */
8323 && TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node)
8324 store_expr (TREE_OPERAND (exp, 1), temp, 0);
8326 expand_expr (TREE_OPERAND (exp, 1),
8327 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8328 end_cleanup_deferral ();
8330 emit_jump_insn (gen_jump (op1));
8333 start_cleanup_deferral ();
8335 && TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node)
8336 store_expr (TREE_OPERAND (exp, 2), temp, 0);
8338 expand_expr (TREE_OPERAND (exp, 2),
8339 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8342 end_cleanup_deferral ();
8353 /* Something needs to be initialized, but we didn't know
8354 where that thing was when building the tree. For example,
8355 it could be the return value of a function, or a parameter
8356 to a function which lays down in the stack, or a temporary
8357 variable which must be passed by reference.
8359 We guarantee that the expression will either be constructed
8360 or copied into our original target. */
8362 tree slot = TREE_OPERAND (exp, 0);
8363 tree cleanups = NULL_TREE;
8366 if (TREE_CODE (slot) != VAR_DECL)
8370 target = original_target;
8372 /* Set this here so that if we get a target that refers to a
8373 register variable that's already been used, put_reg_into_stack
8374 knows that it should fix up those uses. */
8375 TREE_USED (slot) = 1;
8379 if (DECL_RTL_SET_P (slot))
8381 target = DECL_RTL (slot);
8382 /* If we have already expanded the slot, so don't do
8384 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8389 target = assign_temp (type, 2, 0, 1);
8390 /* All temp slots at this level must not conflict. */
8391 preserve_temp_slots (target);
8392 SET_DECL_RTL (slot, target);
8393 if (TREE_ADDRESSABLE (slot))
8394 put_var_into_stack (slot);
8396 /* Since SLOT is not known to the called function
8397 to belong to its stack frame, we must build an explicit
8398 cleanup. This case occurs when we must build up a reference
8399 to pass the reference as an argument. In this case,
8400 it is very likely that such a reference need not be
8403 if (TREE_OPERAND (exp, 2) == 0)
8404 TREE_OPERAND (exp, 2)
8405 = (*lang_hooks.maybe_build_cleanup) (slot);
8406 cleanups = TREE_OPERAND (exp, 2);
8411 /* This case does occur, when expanding a parameter which
8412 needs to be constructed on the stack. The target
8413 is the actual stack address that we want to initialize.
8414 The function we call will perform the cleanup in this case. */
8416 /* If we have already assigned it space, use that space,
8417 not target that we were passed in, as our target
8418 parameter is only a hint. */
8419 if (DECL_RTL_SET_P (slot))
8421 target = DECL_RTL (slot);
8422 /* If we have already expanded the slot, so don't do
8424 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8429 SET_DECL_RTL (slot, target);
8430 /* If we must have an addressable slot, then make sure that
8431 the RTL that we just stored in slot is OK. */
8432 if (TREE_ADDRESSABLE (slot))
8433 put_var_into_stack (slot);
8437 exp1 = TREE_OPERAND (exp, 3) = TREE_OPERAND (exp, 1);
8438 /* Mark it as expanded. */
8439 TREE_OPERAND (exp, 1) = NULL_TREE;
8441 store_expr (exp1, target, 0);
8443 expand_decl_cleanup_eh (NULL_TREE, cleanups, CLEANUP_EH_ONLY (exp));
8450 tree lhs = TREE_OPERAND (exp, 0);
8451 tree rhs = TREE_OPERAND (exp, 1);
8453 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
8459 /* If lhs is complex, expand calls in rhs before computing it.
8460 That's so we don't compute a pointer and save it over a
8461 call. If lhs is simple, compute it first so we can give it
8462 as a target if the rhs is just a call. This avoids an
8463 extra temp and copy and that prevents a partial-subsumption
8464 which makes bad code. Actually we could treat
8465 component_ref's of vars like vars. */
8467 tree lhs = TREE_OPERAND (exp, 0);
8468 tree rhs = TREE_OPERAND (exp, 1);
8472 /* Check for |= or &= of a bitfield of size one into another bitfield
8473 of size 1. In this case, (unless we need the result of the
8474 assignment) we can do this more efficiently with a
8475 test followed by an assignment, if necessary.
8477 ??? At this point, we can't get a BIT_FIELD_REF here. But if
8478 things change so we do, this code should be enhanced to
8481 && TREE_CODE (lhs) == COMPONENT_REF
8482 && (TREE_CODE (rhs) == BIT_IOR_EXPR
8483 || TREE_CODE (rhs) == BIT_AND_EXPR)
8484 && TREE_OPERAND (rhs, 0) == lhs
8485 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
8486 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
8487 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
8489 rtx label = gen_label_rtx ();
8491 do_jump (TREE_OPERAND (rhs, 1),
8492 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
8493 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
8494 expand_assignment (lhs, convert (TREE_TYPE (rhs),
8495 (TREE_CODE (rhs) == BIT_IOR_EXPR
8497 : integer_zero_node)),
8499 do_pending_stack_adjust ();
8504 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
8510 if (!TREE_OPERAND (exp, 0))
8511 expand_null_return ();
8513 expand_return (TREE_OPERAND (exp, 0));
8516 case PREINCREMENT_EXPR:
8517 case PREDECREMENT_EXPR:
8518 return expand_increment (exp, 0, ignore);
8520 case POSTINCREMENT_EXPR:
8521 case POSTDECREMENT_EXPR:
8522 /* Faster to treat as pre-increment if result is not used. */
8523 return expand_increment (exp, ! ignore, ignore);
8526 /* Are we taking the address of a nested function? */
8527 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
8528 && decl_function_context (TREE_OPERAND (exp, 0)) != 0
8529 && ! DECL_NO_STATIC_CHAIN (TREE_OPERAND (exp, 0))
8530 && ! TREE_STATIC (exp))
8532 op0 = trampoline_address (TREE_OPERAND (exp, 0));
8533 op0 = force_operand (op0, target);
8535 /* If we are taking the address of something erroneous, just
8537 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
8539 /* If we are taking the address of a constant and are at the
8540 top level, we have to use output_constant_def since we can't
8541 call force_const_mem at top level. */
8543 && (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
8544 || (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0)))
8546 op0 = XEXP (output_constant_def (TREE_OPERAND (exp, 0), 0), 0);
8549 /* We make sure to pass const0_rtx down if we came in with
8550 ignore set, to avoid doing the cleanups twice for something. */
8551 op0 = expand_expr (TREE_OPERAND (exp, 0),
8552 ignore ? const0_rtx : NULL_RTX, VOIDmode,
8553 (modifier == EXPAND_INITIALIZER
8554 ? modifier : EXPAND_CONST_ADDRESS));
8556 /* If we are going to ignore the result, OP0 will have been set
8557 to const0_rtx, so just return it. Don't get confused and
8558 think we are taking the address of the constant. */
8562 /* Pass 1 for MODIFY, so that protect_from_queue doesn't get
8563 clever and returns a REG when given a MEM. */
8564 op0 = protect_from_queue (op0, 1);
8566 /* We would like the object in memory. If it is a constant, we can
8567 have it be statically allocated into memory. For a non-constant,
8568 we need to allocate some memory and store the value into it. */
8570 if (CONSTANT_P (op0))
8571 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
8573 else if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
8574 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF
8575 || GET_CODE (op0) == PARALLEL)
8577 /* If the operand is a SAVE_EXPR, we can deal with this by
8578 forcing the SAVE_EXPR into memory. */
8579 if (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR)
8581 put_var_into_stack (TREE_OPERAND (exp, 0));
8582 op0 = SAVE_EXPR_RTL (TREE_OPERAND (exp, 0));
8586 /* If this object is in a register, it can't be BLKmode. */
8587 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8588 rtx memloc = assign_temp (inner_type, 1, 1, 1);
8590 if (GET_CODE (op0) == PARALLEL)
8591 /* Handle calls that pass values in multiple
8592 non-contiguous locations. The Irix 6 ABI has examples
8594 emit_group_store (memloc, op0,
8595 int_size_in_bytes (inner_type));
8597 emit_move_insn (memloc, op0);
8603 if (GET_CODE (op0) != MEM)
8606 mark_temp_addr_taken (op0);
8607 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
8609 op0 = XEXP (op0, 0);
8610 #ifdef POINTERS_EXTEND_UNSIGNED
8611 if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
8612 && mode == ptr_mode)
8613 op0 = convert_memory_address (ptr_mode, op0);
8618 /* If OP0 is not aligned as least as much as the type requires, we
8619 need to make a temporary, copy OP0 to it, and take the address of
8620 the temporary. We want to use the alignment of the type, not of
8621 the operand. Note that this is incorrect for FUNCTION_TYPE, but
8622 the test for BLKmode means that can't happen. The test for
8623 BLKmode is because we never make mis-aligned MEMs with
8626 We don't need to do this at all if the machine doesn't have
8627 strict alignment. */
8628 if (STRICT_ALIGNMENT && GET_MODE (op0) == BLKmode
8629 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
8631 && MEM_ALIGN (op0) < BIGGEST_ALIGNMENT)
8633 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8635 = assign_stack_temp_for_type
8636 (TYPE_MODE (inner_type),
8637 MEM_SIZE (op0) ? INTVAL (MEM_SIZE (op0))
8638 : int_size_in_bytes (inner_type),
8639 1, build_qualified_type (inner_type,
8640 (TYPE_QUALS (inner_type)
8641 | TYPE_QUAL_CONST)));
8643 if (TYPE_ALIGN_OK (inner_type))
8646 emit_block_move (new, op0, expr_size (TREE_OPERAND (exp, 0)));
8650 op0 = force_operand (XEXP (op0, 0), target);
8654 && GET_CODE (op0) != REG
8655 && modifier != EXPAND_CONST_ADDRESS
8656 && modifier != EXPAND_INITIALIZER
8657 && modifier != EXPAND_SUM)
8658 op0 = force_reg (Pmode, op0);
8660 if (GET_CODE (op0) == REG
8661 && ! REG_USERVAR_P (op0))
8662 mark_reg_pointer (op0, TYPE_ALIGN (TREE_TYPE (type)));
8664 #ifdef POINTERS_EXTEND_UNSIGNED
8665 if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
8666 && mode == ptr_mode)
8667 op0 = convert_memory_address (ptr_mode, op0);
8672 case ENTRY_VALUE_EXPR:
8675 /* COMPLEX type for Extended Pascal & Fortran */
8678 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8681 /* Get the rtx code of the operands. */
8682 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8683 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
8686 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
8690 /* Move the real (op0) and imaginary (op1) parts to their location. */
8691 emit_move_insn (gen_realpart (mode, target), op0);
8692 emit_move_insn (gen_imagpart (mode, target), op1);
8694 insns = get_insns ();
8697 /* Complex construction should appear as a single unit. */
8698 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
8699 each with a separate pseudo as destination.
8700 It's not correct for flow to treat them as a unit. */
8701 if (GET_CODE (target) != CONCAT)
8702 emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
8710 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8711 return gen_realpart (mode, op0);
8714 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8715 return gen_imagpart (mode, op0);
8719 enum machine_mode partmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8723 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8726 target = gen_reg_rtx (mode);
8730 /* Store the realpart and the negated imagpart to target. */
8731 emit_move_insn (gen_realpart (partmode, target),
8732 gen_realpart (partmode, op0));
8734 imag_t = gen_imagpart (partmode, target);
8735 temp = expand_unop (partmode,
8736 ! unsignedp && flag_trapv
8737 && (GET_MODE_CLASS(partmode) == MODE_INT)
8738 ? negv_optab : neg_optab,
8739 gen_imagpart (partmode, op0), imag_t, 0);
8741 emit_move_insn (imag_t, temp);
8743 insns = get_insns ();
8746 /* Conjugate should appear as a single unit
8747 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
8748 each with a separate pseudo as destination.
8749 It's not correct for flow to treat them as a unit. */
8750 if (GET_CODE (target) != CONCAT)
8751 emit_no_conflict_block (insns, target, op0, NULL_RTX, NULL_RTX);
8758 case TRY_CATCH_EXPR:
8760 tree handler = TREE_OPERAND (exp, 1);
8762 expand_eh_region_start ();
8764 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8766 expand_eh_region_end_cleanup (handler);
8771 case TRY_FINALLY_EXPR:
8773 tree try_block = TREE_OPERAND (exp, 0);
8774 tree finally_block = TREE_OPERAND (exp, 1);
8775 rtx finally_label = gen_label_rtx ();
8776 rtx done_label = gen_label_rtx ();
8777 rtx return_link = gen_reg_rtx (Pmode);
8778 tree cleanup = build (GOTO_SUBROUTINE_EXPR, void_type_node,
8779 (tree) finally_label, (tree) return_link);
8780 TREE_SIDE_EFFECTS (cleanup) = 1;
8782 /* Start a new binding layer that will keep track of all cleanup
8783 actions to be performed. */
8784 expand_start_bindings (2);
8786 target_temp_slot_level = temp_slot_level;
8788 expand_decl_cleanup (NULL_TREE, cleanup);
8789 op0 = expand_expr (try_block, target, tmode, modifier);
8791 preserve_temp_slots (op0);
8792 expand_end_bindings (NULL_TREE, 0, 0);
8793 emit_jump (done_label);
8794 emit_label (finally_label);
8795 expand_expr (finally_block, const0_rtx, VOIDmode, 0);
8796 emit_indirect_jump (return_link);
8797 emit_label (done_label);
8801 case GOTO_SUBROUTINE_EXPR:
8803 rtx subr = (rtx) TREE_OPERAND (exp, 0);
8804 rtx return_link = *(rtx *) &TREE_OPERAND (exp, 1);
8805 rtx return_address = gen_label_rtx ();
8806 emit_move_insn (return_link,
8807 gen_rtx_LABEL_REF (Pmode, return_address));
8809 emit_label (return_address);
8814 return expand_builtin_va_arg (TREE_OPERAND (exp, 0), type);
8817 return get_exception_pointer (cfun);
8820 /* Function descriptors are not valid except for as
8821 initialization constants, and should not be expanded. */
8825 return (*lang_hooks.expand_expr) (exp, original_target, tmode, modifier);
8828 /* Here to do an ordinary binary operator, generating an instruction
8829 from the optab already placed in `this_optab'. */
8831 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8833 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8834 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8836 temp = expand_binop (mode, this_optab, op0, op1, target,
8837 unsignedp, OPTAB_LIB_WIDEN);
8843 /* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
8844 when applied to the address of EXP produces an address known to be
8845 aligned more than BIGGEST_ALIGNMENT. */
8848 is_aligning_offset (offset, exp)
8852 /* Strip off any conversions and WITH_RECORD_EXPR nodes. */
8853 while (TREE_CODE (offset) == NON_LVALUE_EXPR
8854 || TREE_CODE (offset) == NOP_EXPR
8855 || TREE_CODE (offset) == CONVERT_EXPR
8856 || TREE_CODE (offset) == WITH_RECORD_EXPR)
8857 offset = TREE_OPERAND (offset, 0);
8859 /* We must now have a BIT_AND_EXPR with a constant that is one less than
8860 power of 2 and which is larger than BIGGEST_ALIGNMENT. */
8861 if (TREE_CODE (offset) != BIT_AND_EXPR
8862 || !host_integerp (TREE_OPERAND (offset, 1), 1)
8863 || compare_tree_int (TREE_OPERAND (offset, 1), BIGGEST_ALIGNMENT) <= 0
8864 || !exact_log2 (tree_low_cst (TREE_OPERAND (offset, 1), 1) + 1) < 0)
8867 /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
8868 It must be NEGATE_EXPR. Then strip any more conversions. */
8869 offset = TREE_OPERAND (offset, 0);
8870 while (TREE_CODE (offset) == NON_LVALUE_EXPR
8871 || TREE_CODE (offset) == NOP_EXPR
8872 || TREE_CODE (offset) == CONVERT_EXPR)
8873 offset = TREE_OPERAND (offset, 0);
8875 if (TREE_CODE (offset) != NEGATE_EXPR)
8878 offset = TREE_OPERAND (offset, 0);
8879 while (TREE_CODE (offset) == NON_LVALUE_EXPR
8880 || TREE_CODE (offset) == NOP_EXPR
8881 || TREE_CODE (offset) == CONVERT_EXPR)
8882 offset = TREE_OPERAND (offset, 0);
8884 /* This must now be the address either of EXP or of a PLACEHOLDER_EXPR
8885 whose type is the same as EXP. */
8886 return (TREE_CODE (offset) == ADDR_EXPR
8887 && (TREE_OPERAND (offset, 0) == exp
8888 || (TREE_CODE (TREE_OPERAND (offset, 0)) == PLACEHOLDER_EXPR
8889 && (TREE_TYPE (TREE_OPERAND (offset, 0))
8890 == TREE_TYPE (exp)))));
8893 /* Return the tree node if a ARG corresponds to a string constant or zero
8894 if it doesn't. If we return non-zero, set *PTR_OFFSET to the offset
8895 in bytes within the string that ARG is accessing. The type of the
8896 offset will be `sizetype'. */
8899 string_constant (arg, ptr_offset)
8905 if (TREE_CODE (arg) == ADDR_EXPR
8906 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
8908 *ptr_offset = size_zero_node;
8909 return TREE_OPERAND (arg, 0);
8911 else if (TREE_CODE (arg) == PLUS_EXPR)
8913 tree arg0 = TREE_OPERAND (arg, 0);
8914 tree arg1 = TREE_OPERAND (arg, 1);
8919 if (TREE_CODE (arg0) == ADDR_EXPR
8920 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
8922 *ptr_offset = convert (sizetype, arg1);
8923 return TREE_OPERAND (arg0, 0);
8925 else if (TREE_CODE (arg1) == ADDR_EXPR
8926 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
8928 *ptr_offset = convert (sizetype, arg0);
8929 return TREE_OPERAND (arg1, 0);
8936 /* Expand code for a post- or pre- increment or decrement
8937 and return the RTX for the result.
8938 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
8941 expand_increment (exp, post, ignore)
8947 tree incremented = TREE_OPERAND (exp, 0);
8948 optab this_optab = add_optab;
8950 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
8951 int op0_is_copy = 0;
8952 int single_insn = 0;
8953 /* 1 means we can't store into OP0 directly,
8954 because it is a subreg narrower than a word,
8955 and we don't dare clobber the rest of the word. */
8958 /* Stabilize any component ref that might need to be
8959 evaluated more than once below. */
8961 || TREE_CODE (incremented) == BIT_FIELD_REF
8962 || (TREE_CODE (incremented) == COMPONENT_REF
8963 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
8964 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
8965 incremented = stabilize_reference (incremented);
8966 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
8967 ones into save exprs so that they don't accidentally get evaluated
8968 more than once by the code below. */
8969 if (TREE_CODE (incremented) == PREINCREMENT_EXPR
8970 || TREE_CODE (incremented) == PREDECREMENT_EXPR)
8971 incremented = save_expr (incremented);
8973 /* Compute the operands as RTX.
8974 Note whether OP0 is the actual lvalue or a copy of it:
8975 I believe it is a copy iff it is a register or subreg
8976 and insns were generated in computing it. */
8978 temp = get_last_insn ();
8979 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, 0);
8981 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
8982 in place but instead must do sign- or zero-extension during assignment,
8983 so we copy it into a new register and let the code below use it as
8986 Note that we can safely modify this SUBREG since it is know not to be
8987 shared (it was made by the expand_expr call above). */
8989 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
8992 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
8996 else if (GET_CODE (op0) == SUBREG
8997 && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD)
8999 /* We cannot increment this SUBREG in place. If we are
9000 post-incrementing, get a copy of the old value. Otherwise,
9001 just mark that we cannot increment in place. */
9003 op0 = copy_to_reg (op0);
9008 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
9009 && temp != get_last_insn ());
9010 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
9012 /* Decide whether incrementing or decrementing. */
9013 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
9014 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9015 this_optab = sub_optab;
9017 /* Convert decrement by a constant into a negative increment. */
9018 if (this_optab == sub_optab
9019 && GET_CODE (op1) == CONST_INT)
9021 op1 = GEN_INT (-INTVAL (op1));
9022 this_optab = add_optab;
9025 if (TYPE_TRAP_SIGNED (TREE_TYPE (exp)))
9026 this_optab = this_optab == add_optab ? addv_optab : subv_optab;
9028 /* For a preincrement, see if we can do this with a single instruction. */
9031 icode = (int) this_optab->handlers[(int) mode].insn_code;
9032 if (icode != (int) CODE_FOR_nothing
9033 /* Make sure that OP0 is valid for operands 0 and 1
9034 of the insn we want to queue. */
9035 && (*insn_data[icode].operand[0].predicate) (op0, mode)
9036 && (*insn_data[icode].operand[1].predicate) (op0, mode)
9037 && (*insn_data[icode].operand[2].predicate) (op1, mode))
9041 /* If OP0 is not the actual lvalue, but rather a copy in a register,
9042 then we cannot just increment OP0. We must therefore contrive to
9043 increment the original value. Then, for postincrement, we can return
9044 OP0 since it is a copy of the old value. For preincrement, expand here
9045 unless we can do it with a single insn.
9047 Likewise if storing directly into OP0 would clobber high bits
9048 we need to preserve (bad_subreg). */
9049 if (op0_is_copy || (!post && !single_insn) || bad_subreg)
9051 /* This is the easiest way to increment the value wherever it is.
9052 Problems with multiple evaluation of INCREMENTED are prevented
9053 because either (1) it is a component_ref or preincrement,
9054 in which case it was stabilized above, or (2) it is an array_ref
9055 with constant index in an array in a register, which is
9056 safe to reevaluate. */
9057 tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
9058 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9059 ? MINUS_EXPR : PLUS_EXPR),
9062 TREE_OPERAND (exp, 1));
9064 while (TREE_CODE (incremented) == NOP_EXPR
9065 || TREE_CODE (incremented) == CONVERT_EXPR)
9067 newexp = convert (TREE_TYPE (incremented), newexp);
9068 incremented = TREE_OPERAND (incremented, 0);
9071 temp = expand_assignment (incremented, newexp, ! post && ! ignore , 0);
9072 return post ? op0 : temp;
9077 /* We have a true reference to the value in OP0.
9078 If there is an insn to add or subtract in this mode, queue it.
9079 Queueing the increment insn avoids the register shuffling
9080 that often results if we must increment now and first save
9081 the old value for subsequent use. */
9083 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
9084 op0 = stabilize (op0);
9087 icode = (int) this_optab->handlers[(int) mode].insn_code;
9088 if (icode != (int) CODE_FOR_nothing
9089 /* Make sure that OP0 is valid for operands 0 and 1
9090 of the insn we want to queue. */
9091 && (*insn_data[icode].operand[0].predicate) (op0, mode)
9092 && (*insn_data[icode].operand[1].predicate) (op0, mode))
9094 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9095 op1 = force_reg (mode, op1);
9097 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
9099 if (icode != (int) CODE_FOR_nothing && GET_CODE (op0) == MEM)
9101 rtx addr = (general_operand (XEXP (op0, 0), mode)
9102 ? force_reg (Pmode, XEXP (op0, 0))
9103 : copy_to_reg (XEXP (op0, 0)));
9106 op0 = replace_equiv_address (op0, addr);
9107 temp = force_reg (GET_MODE (op0), op0);
9108 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9109 op1 = force_reg (mode, op1);
9111 /* The increment queue is LIFO, thus we have to `queue'
9112 the instructions in reverse order. */
9113 enqueue_insn (op0, gen_move_insn (op0, temp));
9114 result = enqueue_insn (temp, GEN_FCN (icode) (temp, temp, op1));
9119 /* Preincrement, or we can't increment with one simple insn. */
9121 /* Save a copy of the value before inc or dec, to return it later. */
9122 temp = value = copy_to_reg (op0);
9124 /* Arrange to return the incremented value. */
9125 /* Copy the rtx because expand_binop will protect from the queue,
9126 and the results of that would be invalid for us to return
9127 if our caller does emit_queue before using our result. */
9128 temp = copy_rtx (value = op0);
9130 /* Increment however we can. */
9131 op1 = expand_binop (mode, this_optab, value, op1, op0,
9132 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
9134 /* Make sure the value is stored into OP0. */
9136 emit_move_insn (op0, op1);
9141 /* At the start of a function, record that we have no previously-pushed
9142 arguments waiting to be popped. */
9145 init_pending_stack_adjust ()
9147 pending_stack_adjust = 0;
9150 /* When exiting from function, if safe, clear out any pending stack adjust
9151 so the adjustment won't get done.
9153 Note, if the current function calls alloca, then it must have a
9154 frame pointer regardless of the value of flag_omit_frame_pointer. */
9157 clear_pending_stack_adjust ()
9159 #ifdef EXIT_IGNORE_STACK
9161 && (! flag_omit_frame_pointer || current_function_calls_alloca)
9162 && EXIT_IGNORE_STACK
9163 && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline)
9164 && ! flag_inline_functions)
9166 stack_pointer_delta -= pending_stack_adjust,
9167 pending_stack_adjust = 0;
9172 /* Pop any previously-pushed arguments that have not been popped yet. */
9175 do_pending_stack_adjust ()
9177 if (inhibit_defer_pop == 0)
9179 if (pending_stack_adjust != 0)
9180 adjust_stack (GEN_INT (pending_stack_adjust));
9181 pending_stack_adjust = 0;
9185 /* Expand conditional expressions. */
9187 /* Generate code to evaluate EXP and jump to LABEL if the value is zero.
9188 LABEL is an rtx of code CODE_LABEL, in this function and all the
9192 jumpifnot (exp, label)
9196 do_jump (exp, label, NULL_RTX);
9199 /* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
9206 do_jump (exp, NULL_RTX, label);
9209 /* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
9210 the result is zero, or IF_TRUE_LABEL if the result is one.
9211 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
9212 meaning fall through in that case.
9214 do_jump always does any pending stack adjust except when it does not
9215 actually perform a jump. An example where there is no jump
9216 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
9218 This function is responsible for optimizing cases such as
9219 &&, || and comparison operators in EXP. */
9222 do_jump (exp, if_false_label, if_true_label)
9224 rtx if_false_label, if_true_label;
9226 enum tree_code code = TREE_CODE (exp);
9227 /* Some cases need to create a label to jump to
9228 in order to properly fall through.
9229 These cases set DROP_THROUGH_LABEL nonzero. */
9230 rtx drop_through_label = 0;
9234 enum machine_mode mode;
9236 #ifdef MAX_INTEGER_COMPUTATION_MODE
9237 check_max_integer_computation_mode (exp);
9248 temp = integer_zerop (exp) ? if_false_label : if_true_label;
9254 /* This is not true with #pragma weak */
9256 /* The address of something can never be zero. */
9258 emit_jump (if_true_label);
9263 if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
9264 || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
9265 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF
9266 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_RANGE_REF)
9269 /* If we are narrowing the operand, we have to do the compare in the
9271 if ((TYPE_PRECISION (TREE_TYPE (exp))
9272 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
9274 case NON_LVALUE_EXPR:
9275 case REFERENCE_EXPR:
9280 /* These cannot change zero->non-zero or vice versa. */
9281 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9284 case WITH_RECORD_EXPR:
9285 /* Put the object on the placeholder list, recurse through our first
9286 operand, and pop the list. */
9287 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
9289 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9290 placeholder_list = TREE_CHAIN (placeholder_list);
9294 /* This is never less insns than evaluating the PLUS_EXPR followed by
9295 a test and can be longer if the test is eliminated. */
9297 /* Reduce to minus. */
9298 exp = build (MINUS_EXPR, TREE_TYPE (exp),
9299 TREE_OPERAND (exp, 0),
9300 fold (build1 (NEGATE_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
9301 TREE_OPERAND (exp, 1))));
9302 /* Process as MINUS. */
9306 /* Non-zero iff operands of minus differ. */
9307 do_compare_and_jump (build (NE_EXPR, TREE_TYPE (exp),
9308 TREE_OPERAND (exp, 0),
9309 TREE_OPERAND (exp, 1)),
9310 NE, NE, if_false_label, if_true_label);
9314 /* If we are AND'ing with a small constant, do this comparison in the
9315 smallest type that fits. If the machine doesn't have comparisons
9316 that small, it will be converted back to the wider comparison.
9317 This helps if we are testing the sign bit of a narrower object.
9318 combine can't do this for us because it can't know whether a
9319 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
9321 if (! SLOW_BYTE_ACCESS
9322 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
9323 && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
9324 && (i = tree_floor_log2 (TREE_OPERAND (exp, 1))) >= 0
9325 && (mode = mode_for_size (i + 1, MODE_INT, 0)) != BLKmode
9326 && (type = (*lang_hooks.types.type_for_mode) (mode, 1)) != 0
9327 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9328 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9329 != CODE_FOR_nothing))
9331 do_jump (convert (type, exp), if_false_label, if_true_label);
9336 case TRUTH_NOT_EXPR:
9337 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9340 case TRUTH_ANDIF_EXPR:
9341 if (if_false_label == 0)
9342 if_false_label = drop_through_label = gen_label_rtx ();
9343 do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
9344 start_cleanup_deferral ();
9345 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9346 end_cleanup_deferral ();
9349 case TRUTH_ORIF_EXPR:
9350 if (if_true_label == 0)
9351 if_true_label = drop_through_label = gen_label_rtx ();
9352 do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
9353 start_cleanup_deferral ();
9354 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9355 end_cleanup_deferral ();
9360 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
9361 preserve_temp_slots (NULL_RTX);
9365 do_pending_stack_adjust ();
9366 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9372 case ARRAY_RANGE_REF:
9374 HOST_WIDE_INT bitsize, bitpos;
9376 enum machine_mode mode;
9381 /* Get description of this reference. We don't actually care
9382 about the underlying object here. */
9383 get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode,
9384 &unsignedp, &volatilep);
9386 type = (*lang_hooks.types.type_for_size) (bitsize, unsignedp);
9387 if (! SLOW_BYTE_ACCESS
9388 && type != 0 && bitsize >= 0
9389 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9390 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9391 != CODE_FOR_nothing))
9393 do_jump (convert (type, exp), if_false_label, if_true_label);
9400 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
9401 if (integer_onep (TREE_OPERAND (exp, 1))
9402 && integer_zerop (TREE_OPERAND (exp, 2)))
9403 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9405 else if (integer_zerop (TREE_OPERAND (exp, 1))
9406 && integer_onep (TREE_OPERAND (exp, 2)))
9407 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9411 rtx label1 = gen_label_rtx ();
9412 drop_through_label = gen_label_rtx ();
9414 do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX);
9416 start_cleanup_deferral ();
9417 /* Now the THEN-expression. */
9418 do_jump (TREE_OPERAND (exp, 1),
9419 if_false_label ? if_false_label : drop_through_label,
9420 if_true_label ? if_true_label : drop_through_label);
9421 /* In case the do_jump just above never jumps. */
9422 do_pending_stack_adjust ();
9423 emit_label (label1);
9425 /* Now the ELSE-expression. */
9426 do_jump (TREE_OPERAND (exp, 2),
9427 if_false_label ? if_false_label : drop_through_label,
9428 if_true_label ? if_true_label : drop_through_label);
9429 end_cleanup_deferral ();
9435 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9437 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
9438 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
9440 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
9441 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
9444 (build (TRUTH_ANDIF_EXPR, TREE_TYPE (exp),
9445 fold (build (EQ_EXPR, TREE_TYPE (exp),
9446 fold (build1 (REALPART_EXPR,
9447 TREE_TYPE (inner_type),
9449 fold (build1 (REALPART_EXPR,
9450 TREE_TYPE (inner_type),
9452 fold (build (EQ_EXPR, TREE_TYPE (exp),
9453 fold (build1 (IMAGPART_EXPR,
9454 TREE_TYPE (inner_type),
9456 fold (build1 (IMAGPART_EXPR,
9457 TREE_TYPE (inner_type),
9459 if_false_label, if_true_label);
9462 else if (integer_zerop (TREE_OPERAND (exp, 1)))
9463 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9465 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
9466 && !can_compare_p (EQ, TYPE_MODE (inner_type), ccp_jump))
9467 do_jump_by_parts_equality (exp, if_false_label, if_true_label);
9469 do_compare_and_jump (exp, EQ, EQ, if_false_label, if_true_label);
9475 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9477 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
9478 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
9480 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
9481 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
9484 (build (TRUTH_ORIF_EXPR, TREE_TYPE (exp),
9485 fold (build (NE_EXPR, TREE_TYPE (exp),
9486 fold (build1 (REALPART_EXPR,
9487 TREE_TYPE (inner_type),
9489 fold (build1 (REALPART_EXPR,
9490 TREE_TYPE (inner_type),
9492 fold (build (NE_EXPR, TREE_TYPE (exp),
9493 fold (build1 (IMAGPART_EXPR,
9494 TREE_TYPE (inner_type),
9496 fold (build1 (IMAGPART_EXPR,
9497 TREE_TYPE (inner_type),
9499 if_false_label, if_true_label);
9502 else if (integer_zerop (TREE_OPERAND (exp, 1)))
9503 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9505 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
9506 && !can_compare_p (NE, TYPE_MODE (inner_type), ccp_jump))
9507 do_jump_by_parts_equality (exp, if_true_label, if_false_label);
9509 do_compare_and_jump (exp, NE, NE, if_false_label, if_true_label);
9514 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9515 if (GET_MODE_CLASS (mode) == MODE_INT
9516 && ! can_compare_p (LT, mode, ccp_jump))
9517 do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
9519 do_compare_and_jump (exp, LT, LTU, if_false_label, if_true_label);
9523 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9524 if (GET_MODE_CLASS (mode) == MODE_INT
9525 && ! can_compare_p (LE, mode, ccp_jump))
9526 do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
9528 do_compare_and_jump (exp, LE, LEU, if_false_label, if_true_label);
9532 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9533 if (GET_MODE_CLASS (mode) == MODE_INT
9534 && ! can_compare_p (GT, mode, ccp_jump))
9535 do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
9537 do_compare_and_jump (exp, GT, GTU, if_false_label, if_true_label);
9541 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9542 if (GET_MODE_CLASS (mode) == MODE_INT
9543 && ! can_compare_p (GE, mode, ccp_jump))
9544 do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
9546 do_compare_and_jump (exp, GE, GEU, if_false_label, if_true_label);
9549 case UNORDERED_EXPR:
9552 enum rtx_code cmp, rcmp;
9555 if (code == UNORDERED_EXPR)
9556 cmp = UNORDERED, rcmp = ORDERED;
9558 cmp = ORDERED, rcmp = UNORDERED;
9559 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9562 if (! can_compare_p (cmp, mode, ccp_jump)
9563 && (can_compare_p (rcmp, mode, ccp_jump)
9564 /* If the target doesn't provide either UNORDERED or ORDERED
9565 comparisons, canonicalize on UNORDERED for the library. */
9566 || rcmp == UNORDERED))
9570 do_compare_and_jump (exp, cmp, cmp, if_false_label, if_true_label);
9572 do_compare_and_jump (exp, rcmp, rcmp, if_true_label, if_false_label);
9577 enum rtx_code rcode1;
9578 enum tree_code tcode2;
9602 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9603 if (can_compare_p (rcode1, mode, ccp_jump))
9604 do_compare_and_jump (exp, rcode1, rcode1, if_false_label,
9608 tree op0 = save_expr (TREE_OPERAND (exp, 0));
9609 tree op1 = save_expr (TREE_OPERAND (exp, 1));
9612 /* If the target doesn't support combined unordered
9613 compares, decompose into UNORDERED + comparison. */
9614 cmp0 = fold (build (UNORDERED_EXPR, TREE_TYPE (exp), op0, op1));
9615 cmp1 = fold (build (tcode2, TREE_TYPE (exp), op0, op1));
9616 exp = build (TRUTH_ORIF_EXPR, TREE_TYPE (exp), cmp0, cmp1);
9617 do_jump (exp, if_false_label, if_true_label);
9623 __builtin_expect (<test>, 0) and
9624 __builtin_expect (<test>, 1)
9626 We need to do this here, so that <test> is not converted to a SCC
9627 operation on machines that use condition code registers and COMPARE
9628 like the PowerPC, and then the jump is done based on whether the SCC
9629 operation produced a 1 or 0. */
9631 /* Check for a built-in function. */
9632 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR)
9634 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
9635 tree arglist = TREE_OPERAND (exp, 1);
9637 if (TREE_CODE (fndecl) == FUNCTION_DECL
9638 && DECL_BUILT_IN (fndecl)
9639 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT
9640 && arglist != NULL_TREE
9641 && TREE_CHAIN (arglist) != NULL_TREE)
9643 rtx seq = expand_builtin_expect_jump (exp, if_false_label,
9646 if (seq != NULL_RTX)
9653 /* fall through and generate the normal code. */
9657 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
9659 /* This is not needed any more and causes poor code since it causes
9660 comparisons and tests from non-SI objects to have different code
9662 /* Copy to register to avoid generating bad insns by cse
9663 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
9664 if (!cse_not_expected && GET_CODE (temp) == MEM)
9665 temp = copy_to_reg (temp);
9667 do_pending_stack_adjust ();
9668 /* Do any postincrements in the expression that was tested. */
9671 if (GET_CODE (temp) == CONST_INT
9672 || (GET_CODE (temp) == CONST_DOUBLE && GET_MODE (temp) == VOIDmode)
9673 || GET_CODE (temp) == LABEL_REF)
9675 rtx target = temp == const0_rtx ? if_false_label : if_true_label;
9679 else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
9680 && ! can_compare_p (NE, GET_MODE (temp), ccp_jump))
9681 /* Note swapping the labels gives us not-equal. */
9682 do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label);
9683 else if (GET_MODE (temp) != VOIDmode)
9684 do_compare_rtx_and_jump (temp, CONST0_RTX (GET_MODE (temp)),
9685 NE, TREE_UNSIGNED (TREE_TYPE (exp)),
9686 GET_MODE (temp), NULL_RTX,
9687 if_false_label, if_true_label);
9692 if (drop_through_label)
9694 /* If do_jump produces code that might be jumped around,
9695 do any stack adjusts from that code, before the place
9696 where control merges in. */
9697 do_pending_stack_adjust ();
9698 emit_label (drop_through_label);
9702 /* Given a comparison expression EXP for values too wide to be compared
9703 with one insn, test the comparison and jump to the appropriate label.
9704 The code of EXP is ignored; we always test GT if SWAP is 0,
9705 and LT if SWAP is 1. */
9708 do_jump_by_parts_greater (exp, swap, if_false_label, if_true_label)
9711 rtx if_false_label, if_true_label;
9713 rtx op0 = expand_expr (TREE_OPERAND (exp, swap), NULL_RTX, VOIDmode, 0);
9714 rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), NULL_RTX, VOIDmode, 0);
9715 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9716 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
9718 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label);
9721 /* Compare OP0 with OP1, word at a time, in mode MODE.
9722 UNSIGNEDP says to do unsigned comparison.
9723 Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
9726 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label)
9727 enum machine_mode mode;
9730 rtx if_false_label, if_true_label;
9732 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
9733 rtx drop_through_label = 0;
9736 if (! if_true_label || ! if_false_label)
9737 drop_through_label = gen_label_rtx ();
9738 if (! if_true_label)
9739 if_true_label = drop_through_label;
9740 if (! if_false_label)
9741 if_false_label = drop_through_label;
9743 /* Compare a word at a time, high order first. */
9744 for (i = 0; i < nwords; i++)
9746 rtx op0_word, op1_word;
9748 if (WORDS_BIG_ENDIAN)
9750 op0_word = operand_subword_force (op0, i, mode);
9751 op1_word = operand_subword_force (op1, i, mode);
9755 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
9756 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
9759 /* All but high-order word must be compared as unsigned. */
9760 do_compare_rtx_and_jump (op0_word, op1_word, GT,
9761 (unsignedp || i > 0), word_mode, NULL_RTX,
9762 NULL_RTX, if_true_label);
9764 /* Consider lower words only if these are equal. */
9765 do_compare_rtx_and_jump (op0_word, op1_word, NE, unsignedp, word_mode,
9766 NULL_RTX, NULL_RTX, if_false_label);
9770 emit_jump (if_false_label);
9771 if (drop_through_label)
9772 emit_label (drop_through_label);
9775 /* Given an EQ_EXPR expression EXP for values too wide to be compared
9776 with one insn, test the comparison and jump to the appropriate label. */
9779 do_jump_by_parts_equality (exp, if_false_label, if_true_label)
9781 rtx if_false_label, if_true_label;
9783 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
9784 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
9785 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9786 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
9788 rtx drop_through_label = 0;
9790 if (! if_false_label)
9791 drop_through_label = if_false_label = gen_label_rtx ();
9793 for (i = 0; i < nwords; i++)
9794 do_compare_rtx_and_jump (operand_subword_force (op0, i, mode),
9795 operand_subword_force (op1, i, mode),
9796 EQ, TREE_UNSIGNED (TREE_TYPE (exp)),
9797 word_mode, NULL_RTX, if_false_label, NULL_RTX);
9800 emit_jump (if_true_label);
9801 if (drop_through_label)
9802 emit_label (drop_through_label);
9805 /* Jump according to whether OP0 is 0.
9806 We assume that OP0 has an integer mode that is too wide
9807 for the available compare insns. */
9810 do_jump_by_parts_equality_rtx (op0, if_false_label, if_true_label)
9812 rtx if_false_label, if_true_label;
9814 int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD;
9817 rtx drop_through_label = 0;
9819 /* The fastest way of doing this comparison on almost any machine is to
9820 "or" all the words and compare the result. If all have to be loaded
9821 from memory and this is a very wide item, it's possible this may
9822 be slower, but that's highly unlikely. */
9824 part = gen_reg_rtx (word_mode);
9825 emit_move_insn (part, operand_subword_force (op0, 0, GET_MODE (op0)));
9826 for (i = 1; i < nwords && part != 0; i++)
9827 part = expand_binop (word_mode, ior_optab, part,
9828 operand_subword_force (op0, i, GET_MODE (op0)),
9829 part, 1, OPTAB_WIDEN);
9833 do_compare_rtx_and_jump (part, const0_rtx, EQ, 1, word_mode,
9834 NULL_RTX, if_false_label, if_true_label);
9839 /* If we couldn't do the "or" simply, do this with a series of compares. */
9840 if (! if_false_label)
9841 drop_through_label = if_false_label = gen_label_rtx ();
9843 for (i = 0; i < nwords; i++)
9844 do_compare_rtx_and_jump (operand_subword_force (op0, i, GET_MODE (op0)),
9845 const0_rtx, EQ, 1, word_mode, NULL_RTX,
9846 if_false_label, NULL_RTX);
9849 emit_jump (if_true_label);
9851 if (drop_through_label)
9852 emit_label (drop_through_label);
9855 /* Generate code for a comparison of OP0 and OP1 with rtx code CODE.
9856 (including code to compute the values to be compared)
9857 and set (CC0) according to the result.
9858 The decision as to signed or unsigned comparison must be made by the caller.
9860 We force a stack adjustment unless there are currently
9861 things pushed on the stack that aren't yet used.
9863 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
9867 compare_from_rtx (op0, op1, code, unsignedp, mode, size)
9871 enum machine_mode mode;
9876 /* If one operand is constant, make it the second one. Only do this
9877 if the other operand is not constant as well. */
9879 if (swap_commutative_operands_p (op0, op1))
9884 code = swap_condition (code);
9889 op0 = force_not_mem (op0);
9890 op1 = force_not_mem (op1);
9893 do_pending_stack_adjust ();
9895 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
9896 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
9900 /* There's no need to do this now that combine.c can eliminate lots of
9901 sign extensions. This can be less efficient in certain cases on other
9904 /* If this is a signed equality comparison, we can do it as an
9905 unsigned comparison since zero-extension is cheaper than sign
9906 extension and comparisons with zero are done as unsigned. This is
9907 the case even on machines that can do fast sign extension, since
9908 zero-extension is easier to combine with other operations than
9909 sign-extension is. If we are comparing against a constant, we must
9910 convert it to what it would look like unsigned. */
9911 if ((code == EQ || code == NE) && ! unsignedp
9912 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
9914 if (GET_CODE (op1) == CONST_INT
9915 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
9916 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
9921 emit_cmp_insn (op0, op1, code, size, mode, unsignedp);
9923 return gen_rtx_fmt_ee (code, VOIDmode, cc0_rtx, const0_rtx);
9926 /* Like do_compare_and_jump but expects the values to compare as two rtx's.
9927 The decision as to signed or unsigned comparison must be made by the caller.
9929 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
9933 do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode, size,
9934 if_false_label, if_true_label)
9938 enum machine_mode mode;
9940 rtx if_false_label, if_true_label;
9943 int dummy_true_label = 0;
9945 /* Reverse the comparison if that is safe and we want to jump if it is
9947 if (! if_true_label && ! FLOAT_MODE_P (mode))
9949 if_true_label = if_false_label;
9951 code = reverse_condition (code);
9954 /* If one operand is constant, make it the second one. Only do this
9955 if the other operand is not constant as well. */
9957 if (swap_commutative_operands_p (op0, op1))
9962 code = swap_condition (code);
9967 op0 = force_not_mem (op0);
9968 op1 = force_not_mem (op1);
9971 do_pending_stack_adjust ();
9973 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
9974 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
9976 if (tem == const_true_rtx)
9979 emit_jump (if_true_label);
9984 emit_jump (if_false_label);
9990 /* There's no need to do this now that combine.c can eliminate lots of
9991 sign extensions. This can be less efficient in certain cases on other
9994 /* If this is a signed equality comparison, we can do it as an
9995 unsigned comparison since zero-extension is cheaper than sign
9996 extension and comparisons with zero are done as unsigned. This is
9997 the case even on machines that can do fast sign extension, since
9998 zero-extension is easier to combine with other operations than
9999 sign-extension is. If we are comparing against a constant, we must
10000 convert it to what it would look like unsigned. */
10001 if ((code == EQ || code == NE) && ! unsignedp
10002 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
10004 if (GET_CODE (op1) == CONST_INT
10005 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
10006 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
10011 if (! if_true_label)
10013 dummy_true_label = 1;
10014 if_true_label = gen_label_rtx ();
10017 emit_cmp_and_jump_insns (op0, op1, code, size, mode, unsignedp,
10020 if (if_false_label)
10021 emit_jump (if_false_label);
10022 if (dummy_true_label)
10023 emit_label (if_true_label);
10026 /* Generate code for a comparison expression EXP (including code to compute
10027 the values to be compared) and a conditional jump to IF_FALSE_LABEL and/or
10028 IF_TRUE_LABEL. One of the labels can be NULL_RTX, in which case the
10029 generated code will drop through.
10030 SIGNED_CODE should be the rtx operation for this comparison for
10031 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
10033 We force a stack adjustment unless there are currently
10034 things pushed on the stack that aren't yet used. */
10037 do_compare_and_jump (exp, signed_code, unsigned_code, if_false_label,
10040 enum rtx_code signed_code, unsigned_code;
10041 rtx if_false_label, if_true_label;
10045 enum machine_mode mode;
10047 enum rtx_code code;
10049 /* Don't crash if the comparison was erroneous. */
10050 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
10051 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
10054 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
10055 if (TREE_CODE (TREE_OPERAND (exp, 1)) == ERROR_MARK)
10058 type = TREE_TYPE (TREE_OPERAND (exp, 0));
10059 mode = TYPE_MODE (type);
10060 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
10061 && (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST
10062 || (GET_MODE_BITSIZE (mode)
10063 > GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp,
10066 /* op0 might have been replaced by promoted constant, in which
10067 case the type of second argument should be used. */
10068 type = TREE_TYPE (TREE_OPERAND (exp, 1));
10069 mode = TYPE_MODE (type);
10071 unsignedp = TREE_UNSIGNED (type);
10072 code = unsignedp ? unsigned_code : signed_code;
10074 #ifdef HAVE_canonicalize_funcptr_for_compare
10075 /* If function pointers need to be "canonicalized" before they can
10076 be reliably compared, then canonicalize them. */
10077 if (HAVE_canonicalize_funcptr_for_compare
10078 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10079 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10082 rtx new_op0 = gen_reg_rtx (mode);
10084 emit_insn (gen_canonicalize_funcptr_for_compare (new_op0, op0));
10088 if (HAVE_canonicalize_funcptr_for_compare
10089 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10090 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10093 rtx new_op1 = gen_reg_rtx (mode);
10095 emit_insn (gen_canonicalize_funcptr_for_compare (new_op1, op1));
10100 /* Do any postincrements in the expression that was tested. */
10103 do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode,
10105 ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
10106 if_false_label, if_true_label);
10109 /* Generate code to calculate EXP using a store-flag instruction
10110 and return an rtx for the result. EXP is either a comparison
10111 or a TRUTH_NOT_EXPR whose operand is a comparison.
10113 If TARGET is nonzero, store the result there if convenient.
10115 If ONLY_CHEAP is non-zero, only do this if it is likely to be very
10118 Return zero if there is no suitable set-flag instruction
10119 available on this machine.
10121 Once expand_expr has been called on the arguments of the comparison,
10122 we are committed to doing the store flag, since it is not safe to
10123 re-evaluate the expression. We emit the store-flag insn by calling
10124 emit_store_flag, but only expand the arguments if we have a reason
10125 to believe that emit_store_flag will be successful. If we think that
10126 it will, but it isn't, we have to simulate the store-flag with a
10127 set/jump/set sequence. */
10130 do_store_flag (exp, target, mode, only_cheap)
10133 enum machine_mode mode;
10136 enum rtx_code code;
10137 tree arg0, arg1, type;
10139 enum machine_mode operand_mode;
10143 enum insn_code icode;
10144 rtx subtarget = target;
10147 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
10148 result at the end. We can't simply invert the test since it would
10149 have already been inverted if it were valid. This case occurs for
10150 some floating-point comparisons. */
10152 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
10153 invert = 1, exp = TREE_OPERAND (exp, 0);
10155 arg0 = TREE_OPERAND (exp, 0);
10156 arg1 = TREE_OPERAND (exp, 1);
10158 /* Don't crash if the comparison was erroneous. */
10159 if (arg0 == error_mark_node || arg1 == error_mark_node)
10162 type = TREE_TYPE (arg0);
10163 operand_mode = TYPE_MODE (type);
10164 unsignedp = TREE_UNSIGNED (type);
10166 /* We won't bother with BLKmode store-flag operations because it would mean
10167 passing a lot of information to emit_store_flag. */
10168 if (operand_mode == BLKmode)
10171 /* We won't bother with store-flag operations involving function pointers
10172 when function pointers must be canonicalized before comparisons. */
10173 #ifdef HAVE_canonicalize_funcptr_for_compare
10174 if (HAVE_canonicalize_funcptr_for_compare
10175 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10176 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10178 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10179 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10180 == FUNCTION_TYPE))))
10187 /* Get the rtx comparison code to use. We know that EXP is a comparison
10188 operation of some type. Some comparisons against 1 and -1 can be
10189 converted to comparisons with zero. Do so here so that the tests
10190 below will be aware that we have a comparison with zero. These
10191 tests will not catch constants in the first operand, but constants
10192 are rarely passed as the first operand. */
10194 switch (TREE_CODE (exp))
10203 if (integer_onep (arg1))
10204 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
10206 code = unsignedp ? LTU : LT;
10209 if (! unsignedp && integer_all_onesp (arg1))
10210 arg1 = integer_zero_node, code = LT;
10212 code = unsignedp ? LEU : LE;
10215 if (! unsignedp && integer_all_onesp (arg1))
10216 arg1 = integer_zero_node, code = GE;
10218 code = unsignedp ? GTU : GT;
10221 if (integer_onep (arg1))
10222 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
10224 code = unsignedp ? GEU : GE;
10227 case UNORDERED_EXPR:
10253 /* Put a constant second. */
10254 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
10256 tem = arg0; arg0 = arg1; arg1 = tem;
10257 code = swap_condition (code);
10260 /* If this is an equality or inequality test of a single bit, we can
10261 do this by shifting the bit being tested to the low-order bit and
10262 masking the result with the constant 1. If the condition was EQ,
10263 we xor it with 1. This does not require an scc insn and is faster
10264 than an scc insn even if we have it. */
10266 if ((code == NE || code == EQ)
10267 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
10268 && integer_pow2p (TREE_OPERAND (arg0, 1)))
10270 tree inner = TREE_OPERAND (arg0, 0);
10271 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
10274 /* If INNER is a right shift of a constant and it plus BITNUM does
10275 not overflow, adjust BITNUM and INNER. */
10277 if (TREE_CODE (inner) == RSHIFT_EXPR
10278 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
10279 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
10280 && bitnum < TYPE_PRECISION (type)
10281 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
10282 bitnum - TYPE_PRECISION (type)))
10284 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
10285 inner = TREE_OPERAND (inner, 0);
10288 /* If we are going to be able to omit the AND below, we must do our
10289 operations as unsigned. If we must use the AND, we have a choice.
10290 Normally unsigned is faster, but for some machines signed is. */
10291 ops_unsignedp = (bitnum == TYPE_PRECISION (type) - 1 ? 1
10292 #ifdef LOAD_EXTEND_OP
10293 : (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1)
10299 if (! get_subtarget (subtarget)
10300 || GET_MODE (subtarget) != operand_mode
10301 || ! safe_from_p (subtarget, inner, 1))
10304 op0 = expand_expr (inner, subtarget, VOIDmode, 0);
10307 op0 = expand_shift (RSHIFT_EXPR, operand_mode, op0,
10308 size_int (bitnum), subtarget, ops_unsignedp);
10310 if (GET_MODE (op0) != mode)
10311 op0 = convert_to_mode (mode, op0, ops_unsignedp);
10313 if ((code == EQ && ! invert) || (code == NE && invert))
10314 op0 = expand_binop (mode, xor_optab, op0, const1_rtx, subtarget,
10315 ops_unsignedp, OPTAB_LIB_WIDEN);
10317 /* Put the AND last so it can combine with more things. */
10318 if (bitnum != TYPE_PRECISION (type) - 1)
10319 op0 = expand_and (mode, op0, const1_rtx, subtarget);
10324 /* Now see if we are likely to be able to do this. Return if not. */
10325 if (! can_compare_p (code, operand_mode, ccp_store_flag))
10328 icode = setcc_gen_code[(int) code];
10329 if (icode == CODE_FOR_nothing
10330 || (only_cheap && insn_data[(int) icode].operand[0].mode != mode))
10332 /* We can only do this if it is one of the special cases that
10333 can be handled without an scc insn. */
10334 if ((code == LT && integer_zerop (arg1))
10335 || (! only_cheap && code == GE && integer_zerop (arg1)))
10337 else if (BRANCH_COST >= 0
10338 && ! only_cheap && (code == NE || code == EQ)
10339 && TREE_CODE (type) != REAL_TYPE
10340 && ((abs_optab->handlers[(int) operand_mode].insn_code
10341 != CODE_FOR_nothing)
10342 || (ffs_optab->handlers[(int) operand_mode].insn_code
10343 != CODE_FOR_nothing)))
10349 if (! get_subtarget (target)
10350 || GET_MODE (subtarget) != operand_mode
10351 || ! safe_from_p (subtarget, arg1, 1))
10354 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
10355 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
10358 target = gen_reg_rtx (mode);
10360 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
10361 because, if the emit_store_flag does anything it will succeed and
10362 OP0 and OP1 will not be used subsequently. */
10364 result = emit_store_flag (target, code,
10365 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
10366 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
10367 operand_mode, unsignedp, 1);
10372 result = expand_binop (mode, xor_optab, result, const1_rtx,
10373 result, 0, OPTAB_LIB_WIDEN);
10377 /* If this failed, we have to do this with set/compare/jump/set code. */
10378 if (GET_CODE (target) != REG
10379 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
10380 target = gen_reg_rtx (GET_MODE (target));
10382 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
10383 result = compare_from_rtx (op0, op1, code, unsignedp,
10384 operand_mode, NULL_RTX);
10385 if (GET_CODE (result) == CONST_INT)
10386 return (((result == const0_rtx && ! invert)
10387 || (result != const0_rtx && invert))
10388 ? const0_rtx : const1_rtx);
10390 /* The code of RESULT may not match CODE if compare_from_rtx
10391 decided to swap its operands and reverse the original code.
10393 We know that compare_from_rtx returns either a CONST_INT or
10394 a new comparison code, so it is safe to just extract the
10395 code from RESULT. */
10396 code = GET_CODE (result);
10398 label = gen_label_rtx ();
10399 if (bcc_gen_fctn[(int) code] == 0)
10402 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
10403 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
10404 emit_label (label);
10410 /* Stubs in case we haven't got a casesi insn. */
10411 #ifndef HAVE_casesi
10412 # define HAVE_casesi 0
10413 # define gen_casesi(a, b, c, d, e) (0)
10414 # define CODE_FOR_casesi CODE_FOR_nothing
10417 /* If the machine does not have a case insn that compares the bounds,
10418 this means extra overhead for dispatch tables, which raises the
10419 threshold for using them. */
10420 #ifndef CASE_VALUES_THRESHOLD
10421 #define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
10422 #endif /* CASE_VALUES_THRESHOLD */
10425 case_values_threshold ()
10427 return CASE_VALUES_THRESHOLD;
10430 /* Attempt to generate a casesi instruction. Returns 1 if successful,
10431 0 otherwise (i.e. if there is no casesi instruction). */
10433 try_casesi (index_type, index_expr, minval, range,
10434 table_label, default_label)
10435 tree index_type, index_expr, minval, range;
10436 rtx table_label ATTRIBUTE_UNUSED;
10439 enum machine_mode index_mode = SImode;
10440 int index_bits = GET_MODE_BITSIZE (index_mode);
10441 rtx op1, op2, index;
10442 enum machine_mode op_mode;
10447 /* Convert the index to SImode. */
10448 if (GET_MODE_BITSIZE (TYPE_MODE (index_type)) > GET_MODE_BITSIZE (index_mode))
10450 enum machine_mode omode = TYPE_MODE (index_type);
10451 rtx rangertx = expand_expr (range, NULL_RTX, VOIDmode, 0);
10453 /* We must handle the endpoints in the original mode. */
10454 index_expr = build (MINUS_EXPR, index_type,
10455 index_expr, minval);
10456 minval = integer_zero_node;
10457 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
10458 emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
10459 omode, 1, default_label);
10460 /* Now we can safely truncate. */
10461 index = convert_to_mode (index_mode, index, 0);
10465 if (TYPE_MODE (index_type) != index_mode)
10467 index_expr = convert ((*lang_hooks.types.type_for_size)
10468 (index_bits, 0), index_expr);
10469 index_type = TREE_TYPE (index_expr);
10472 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
10475 index = protect_from_queue (index, 0);
10476 do_pending_stack_adjust ();
10478 op_mode = insn_data[(int) CODE_FOR_casesi].operand[0].mode;
10479 if (! (*insn_data[(int) CODE_FOR_casesi].operand[0].predicate)
10481 index = copy_to_mode_reg (op_mode, index);
10483 op1 = expand_expr (minval, NULL_RTX, VOIDmode, 0);
10485 op_mode = insn_data[(int) CODE_FOR_casesi].operand[1].mode;
10486 op1 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (minval)),
10487 op1, TREE_UNSIGNED (TREE_TYPE (minval)));
10488 if (! (*insn_data[(int) CODE_FOR_casesi].operand[1].predicate)
10490 op1 = copy_to_mode_reg (op_mode, op1);
10492 op2 = expand_expr (range, NULL_RTX, VOIDmode, 0);
10494 op_mode = insn_data[(int) CODE_FOR_casesi].operand[2].mode;
10495 op2 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (range)),
10496 op2, TREE_UNSIGNED (TREE_TYPE (range)));
10497 if (! (*insn_data[(int) CODE_FOR_casesi].operand[2].predicate)
10499 op2 = copy_to_mode_reg (op_mode, op2);
10501 emit_jump_insn (gen_casesi (index, op1, op2,
10502 table_label, default_label));
10506 /* Attempt to generate a tablejump instruction; same concept. */
10507 #ifndef HAVE_tablejump
10508 #define HAVE_tablejump 0
10509 #define gen_tablejump(x, y) (0)
10512 /* Subroutine of the next function.
10514 INDEX is the value being switched on, with the lowest value
10515 in the table already subtracted.
10516 MODE is its expected mode (needed if INDEX is constant).
10517 RANGE is the length of the jump table.
10518 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
10520 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
10521 index value is out of range. */
10524 do_tablejump (index, mode, range, table_label, default_label)
10525 rtx index, range, table_label, default_label;
10526 enum machine_mode mode;
10530 /* Do an unsigned comparison (in the proper mode) between the index
10531 expression and the value which represents the length of the range.
10532 Since we just finished subtracting the lower bound of the range
10533 from the index expression, this comparison allows us to simultaneously
10534 check that the original index expression value is both greater than
10535 or equal to the minimum value of the range and less than or equal to
10536 the maximum value of the range. */
10538 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
10541 /* If index is in range, it must fit in Pmode.
10542 Convert to Pmode so we can index with it. */
10544 index = convert_to_mode (Pmode, index, 1);
10546 /* Don't let a MEM slip thru, because then INDEX that comes
10547 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
10548 and break_out_memory_refs will go to work on it and mess it up. */
10549 #ifdef PIC_CASE_VECTOR_ADDRESS
10550 if (flag_pic && GET_CODE (index) != REG)
10551 index = copy_to_mode_reg (Pmode, index);
10554 /* If flag_force_addr were to affect this address
10555 it could interfere with the tricky assumptions made
10556 about addresses that contain label-refs,
10557 which may be valid only very near the tablejump itself. */
10558 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
10559 GET_MODE_SIZE, because this indicates how large insns are. The other
10560 uses should all be Pmode, because they are addresses. This code
10561 could fail if addresses and insns are not the same size. */
10562 index = gen_rtx_PLUS (Pmode,
10563 gen_rtx_MULT (Pmode, index,
10564 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
10565 gen_rtx_LABEL_REF (Pmode, table_label));
10566 #ifdef PIC_CASE_VECTOR_ADDRESS
10568 index = PIC_CASE_VECTOR_ADDRESS (index);
10571 index = memory_address_noforce (CASE_VECTOR_MODE, index);
10572 temp = gen_reg_rtx (CASE_VECTOR_MODE);
10573 vector = gen_rtx_MEM (CASE_VECTOR_MODE, index);
10574 RTX_UNCHANGING_P (vector) = 1;
10575 convert_move (temp, vector, 0);
10577 emit_jump_insn (gen_tablejump (temp, table_label));
10579 /* If we are generating PIC code or if the table is PC-relative, the
10580 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
10581 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
10586 try_tablejump (index_type, index_expr, minval, range,
10587 table_label, default_label)
10588 tree index_type, index_expr, minval, range;
10589 rtx table_label, default_label;
10593 if (! HAVE_tablejump)
10596 index_expr = fold (build (MINUS_EXPR, index_type,
10597 convert (index_type, index_expr),
10598 convert (index_type, minval)));
10599 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
10601 index = protect_from_queue (index, 0);
10602 do_pending_stack_adjust ();
10604 do_tablejump (index, TYPE_MODE (index_type),
10605 convert_modes (TYPE_MODE (index_type),
10606 TYPE_MODE (TREE_TYPE (range)),
10607 expand_expr (range, NULL_RTX,
10609 TREE_UNSIGNED (TREE_TYPE (range))),
10610 table_label, default_label);