1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
30 #include "hard-reg-set.h"
33 #include "insn-config.h"
34 #include "insn-attr.h"
35 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
42 #include "typeclass.h"
45 #include "langhooks.h"
49 /* Decide whether a function's arguments should be processed
50 from first to last or from last to first.
52 They should if the stack and args grow in opposite directions, but
53 only if we have push insns. */
57 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
58 #define PUSH_ARGS_REVERSED /* If it's last to first. */
63 #ifndef STACK_PUSH_CODE
64 #ifdef STACK_GROWS_DOWNWARD
65 #define STACK_PUSH_CODE PRE_DEC
67 #define STACK_PUSH_CODE PRE_INC
71 /* Assume that case vectors are not pc-relative. */
72 #ifndef CASE_VECTOR_PC_RELATIVE
73 #define CASE_VECTOR_PC_RELATIVE 0
76 /* If this is nonzero, we do not bother generating VOLATILE
77 around volatile memory references, and we are willing to
78 output indirect addresses. If cse is to follow, we reject
79 indirect addresses so a useful potential cse is generated;
80 if it is used only once, instruction combination will produce
81 the same indirect address eventually. */
84 /* Chain of pending expressions for PLACEHOLDER_EXPR to replace. */
85 static tree placeholder_list = 0;
87 /* This structure is used by move_by_pieces to describe the move to
98 int explicit_inc_from;
99 unsigned HOST_WIDE_INT len;
100 HOST_WIDE_INT offset;
104 /* This structure is used by store_by_pieces to describe the clear to
107 struct store_by_pieces
113 unsigned HOST_WIDE_INT len;
114 HOST_WIDE_INT offset;
115 rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
120 extern struct obstack permanent_obstack;
122 static rtx enqueue_insn PARAMS ((rtx, rtx));
123 static unsigned HOST_WIDE_INT move_by_pieces_ninsns
124 PARAMS ((unsigned HOST_WIDE_INT,
126 static void move_by_pieces_1 PARAMS ((rtx (*) (rtx, ...), enum machine_mode,
127 struct move_by_pieces *));
128 static rtx clear_by_pieces_1 PARAMS ((PTR, HOST_WIDE_INT,
130 static void clear_by_pieces PARAMS ((rtx, unsigned HOST_WIDE_INT,
132 static void store_by_pieces_1 PARAMS ((struct store_by_pieces *,
134 static void store_by_pieces_2 PARAMS ((rtx (*) (rtx, ...),
136 struct store_by_pieces *));
137 static rtx compress_float_constant PARAMS ((rtx, rtx));
138 static rtx get_subtarget PARAMS ((rtx));
139 static int is_zeros_p PARAMS ((tree));
140 static int mostly_zeros_p PARAMS ((tree));
141 static void store_constructor_field PARAMS ((rtx, unsigned HOST_WIDE_INT,
142 HOST_WIDE_INT, enum machine_mode,
143 tree, tree, int, int));
144 static void store_constructor PARAMS ((tree, rtx, int, HOST_WIDE_INT));
145 static rtx store_field PARAMS ((rtx, HOST_WIDE_INT,
146 HOST_WIDE_INT, enum machine_mode,
147 tree, enum machine_mode, int, tree,
149 static rtx var_rtx PARAMS ((tree));
150 static HOST_WIDE_INT highest_pow2_factor PARAMS ((tree));
151 static HOST_WIDE_INT highest_pow2_factor_for_type PARAMS ((tree, tree));
152 static int is_aligning_offset PARAMS ((tree, tree));
153 static rtx expand_increment PARAMS ((tree, int, int));
154 static void do_jump_by_parts_greater PARAMS ((tree, int, rtx, rtx));
155 static void do_jump_by_parts_equality PARAMS ((tree, rtx, rtx));
156 static void do_compare_and_jump PARAMS ((tree, enum rtx_code, enum rtx_code,
158 static rtx do_store_flag PARAMS ((tree, rtx, enum machine_mode, int));
160 static void emit_single_push_insn PARAMS ((enum machine_mode, rtx, tree));
162 static void do_tablejump PARAMS ((rtx, enum machine_mode, rtx, rtx, rtx));
164 /* Record for each mode whether we can move a register directly to or
165 from an object of that mode in memory. If we can't, we won't try
166 to use that mode directly when accessing a field of that mode. */
168 static char direct_load[NUM_MACHINE_MODES];
169 static char direct_store[NUM_MACHINE_MODES];
171 /* Record for each mode whether we can float-extend from memory. */
173 static bool float_extend_from_mem[NUM_MACHINE_MODES][NUM_MACHINE_MODES];
175 /* If a memory-to-memory move would take MOVE_RATIO or more simple
176 move-instruction sequences, we will do a movstr or libcall instead. */
179 #if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
182 /* If we are optimizing for space (-Os), cut down the default move ratio. */
183 #define MOVE_RATIO (optimize_size ? 3 : 15)
187 /* This macro is used to determine whether move_by_pieces should be called
188 to perform a structure copy. */
189 #ifndef MOVE_BY_PIECES_P
190 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
191 (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) MOVE_RATIO)
194 /* This array records the insn_code of insns to perform block moves. */
195 enum insn_code movstr_optab[NUM_MACHINE_MODES];
197 /* This array records the insn_code of insns to perform block clears. */
198 enum insn_code clrstr_optab[NUM_MACHINE_MODES];
200 /* SLOW_UNALIGNED_ACCESS is non-zero if unaligned accesses are very slow. */
202 #ifndef SLOW_UNALIGNED_ACCESS
203 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
206 /* This is run once per compilation to set up which modes can be used
207 directly in memory and to initialize the block move optab. */
213 enum machine_mode mode;
219 /* Try indexing by frame ptr and try by stack ptr.
220 It is known that on the Convex the stack ptr isn't a valid index.
221 With luck, one or the other is valid on any machine. */
222 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
223 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
225 insn = emit_insn (gen_rtx_SET (0, NULL_RTX, NULL_RTX));
226 pat = PATTERN (insn);
228 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
229 mode = (enum machine_mode) ((int) mode + 1))
234 direct_load[(int) mode] = direct_store[(int) mode] = 0;
235 PUT_MODE (mem, mode);
236 PUT_MODE (mem1, mode);
238 /* See if there is some register that can be used in this mode and
239 directly loaded or stored from memory. */
241 if (mode != VOIDmode && mode != BLKmode)
242 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
243 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
246 if (! HARD_REGNO_MODE_OK (regno, mode))
249 reg = gen_rtx_REG (mode, regno);
252 SET_DEST (pat) = reg;
253 if (recog (pat, insn, &num_clobbers) >= 0)
254 direct_load[(int) mode] = 1;
256 SET_SRC (pat) = mem1;
257 SET_DEST (pat) = reg;
258 if (recog (pat, insn, &num_clobbers) >= 0)
259 direct_load[(int) mode] = 1;
262 SET_DEST (pat) = mem;
263 if (recog (pat, insn, &num_clobbers) >= 0)
264 direct_store[(int) mode] = 1;
267 SET_DEST (pat) = mem1;
268 if (recog (pat, insn, &num_clobbers) >= 0)
269 direct_store[(int) mode] = 1;
273 mem = gen_rtx_MEM (VOIDmode, gen_rtx_raw_REG (Pmode, 10000));
275 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
276 mode = GET_MODE_WIDER_MODE (mode))
278 enum machine_mode srcmode;
279 for (srcmode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); srcmode != mode;
280 srcmode = GET_MODE_WIDER_MODE (srcmode))
284 ic = can_extend_p (mode, srcmode, 0);
285 if (ic == CODE_FOR_nothing)
288 PUT_MODE (mem, srcmode);
290 if ((*insn_data[ic].operand[1].predicate) (mem, srcmode))
291 float_extend_from_mem[mode][srcmode] = true;
298 /* This is run at the start of compiling a function. */
303 cfun->expr = (struct expr_status *) xmalloc (sizeof (struct expr_status));
306 pending_stack_adjust = 0;
307 stack_pointer_delta = 0;
308 inhibit_defer_pop = 0;
310 apply_args_value = 0;
316 struct expr_status *p;
321 ggc_mark_rtx (p->x_saveregs_value);
322 ggc_mark_rtx (p->x_apply_args_value);
323 ggc_mark_rtx (p->x_forced_labels);
334 /* Small sanity check that the queue is empty at the end of a function. */
337 finish_expr_for_function ()
343 /* Manage the queue of increment instructions to be output
344 for POSTINCREMENT_EXPR expressions, etc. */
346 /* Queue up to increment (or change) VAR later. BODY says how:
347 BODY should be the same thing you would pass to emit_insn
348 to increment right away. It will go to emit_insn later on.
350 The value is a QUEUED expression to be used in place of VAR
351 where you want to guarantee the pre-incrementation value of VAR. */
354 enqueue_insn (var, body)
357 pending_chain = gen_rtx_QUEUED (GET_MODE (var), var, NULL_RTX, NULL_RTX,
358 body, pending_chain);
359 return pending_chain;
362 /* Use protect_from_queue to convert a QUEUED expression
363 into something that you can put immediately into an instruction.
364 If the queued incrementation has not happened yet,
365 protect_from_queue returns the variable itself.
366 If the incrementation has happened, protect_from_queue returns a temp
367 that contains a copy of the old value of the variable.
369 Any time an rtx which might possibly be a QUEUED is to be put
370 into an instruction, it must be passed through protect_from_queue first.
371 QUEUED expressions are not meaningful in instructions.
373 Do not pass a value through protect_from_queue and then hold
374 on to it for a while before putting it in an instruction!
375 If the queue is flushed in between, incorrect code will result. */
378 protect_from_queue (x, modify)
382 RTX_CODE code = GET_CODE (x);
384 #if 0 /* A QUEUED can hang around after the queue is forced out. */
385 /* Shortcut for most common case. */
386 if (pending_chain == 0)
392 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
393 use of autoincrement. Make a copy of the contents of the memory
394 location rather than a copy of the address, but not if the value is
395 of mode BLKmode. Don't modify X in place since it might be
397 if (code == MEM && GET_MODE (x) != BLKmode
398 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
401 rtx new = replace_equiv_address_nv (x, QUEUED_VAR (y));
405 rtx temp = gen_reg_rtx (GET_MODE (x));
407 emit_insn_before (gen_move_insn (temp, new),
412 /* Copy the address into a pseudo, so that the returned value
413 remains correct across calls to emit_queue. */
414 return replace_equiv_address (new, copy_to_reg (XEXP (new, 0)));
417 /* Otherwise, recursively protect the subexpressions of all
418 the kinds of rtx's that can contain a QUEUED. */
421 rtx tem = protect_from_queue (XEXP (x, 0), 0);
422 if (tem != XEXP (x, 0))
428 else if (code == PLUS || code == MULT)
430 rtx new0 = protect_from_queue (XEXP (x, 0), 0);
431 rtx new1 = protect_from_queue (XEXP (x, 1), 0);
432 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
441 /* If the increment has not happened, use the variable itself. Copy it
442 into a new pseudo so that the value remains correct across calls to
444 if (QUEUED_INSN (x) == 0)
445 return copy_to_reg (QUEUED_VAR (x));
446 /* If the increment has happened and a pre-increment copy exists,
448 if (QUEUED_COPY (x) != 0)
449 return QUEUED_COPY (x);
450 /* The increment has happened but we haven't set up a pre-increment copy.
451 Set one up now, and use it. */
452 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
453 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
455 return QUEUED_COPY (x);
458 /* Return nonzero if X contains a QUEUED expression:
459 if it contains anything that will be altered by a queued increment.
460 We handle only combinations of MEM, PLUS, MINUS and MULT operators
461 since memory addresses generally contain only those. */
467 enum rtx_code code = GET_CODE (x);
473 return queued_subexp_p (XEXP (x, 0));
477 return (queued_subexp_p (XEXP (x, 0))
478 || queued_subexp_p (XEXP (x, 1)));
484 /* Perform all the pending incrementations. */
490 while ((p = pending_chain))
492 rtx body = QUEUED_BODY (p);
494 if (GET_CODE (body) == SEQUENCE)
496 QUEUED_INSN (p) = XVECEXP (QUEUED_BODY (p), 0, 0);
497 emit_insn (QUEUED_BODY (p));
500 QUEUED_INSN (p) = emit_insn (QUEUED_BODY (p));
501 pending_chain = QUEUED_NEXT (p);
505 /* Copy data from FROM to TO, where the machine modes are not the same.
506 Both modes may be integer, or both may be floating.
507 UNSIGNEDP should be nonzero if FROM is an unsigned type.
508 This causes zero-extension instead of sign-extension. */
511 convert_move (to, from, unsignedp)
515 enum machine_mode to_mode = GET_MODE (to);
516 enum machine_mode from_mode = GET_MODE (from);
517 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
518 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
522 /* rtx code for making an equivalent value. */
523 enum rtx_code equiv_code = (unsignedp ? ZERO_EXTEND : SIGN_EXTEND);
525 to = protect_from_queue (to, 1);
526 from = protect_from_queue (from, 0);
528 if (to_real != from_real)
531 /* If FROM is a SUBREG that indicates that we have already done at least
532 the required extension, strip it. We don't handle such SUBREGs as
535 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
536 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
537 >= GET_MODE_SIZE (to_mode))
538 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
539 from = gen_lowpart (to_mode, from), from_mode = to_mode;
541 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
544 if (to_mode == from_mode
545 || (from_mode == VOIDmode && CONSTANT_P (from)))
547 emit_move_insn (to, from);
551 if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
553 if (GET_MODE_BITSIZE (from_mode) != GET_MODE_BITSIZE (to_mode))
556 if (VECTOR_MODE_P (to_mode))
557 from = simplify_gen_subreg (to_mode, from, GET_MODE (from), 0);
559 to = simplify_gen_subreg (from_mode, to, GET_MODE (to), 0);
561 emit_move_insn (to, from);
565 if (to_real != from_real)
572 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode))
574 /* Try converting directly if the insn is supported. */
575 if ((code = can_extend_p (to_mode, from_mode, 0))
578 emit_unop_insn (code, to, from, UNKNOWN);
583 #ifdef HAVE_trunchfqf2
584 if (HAVE_trunchfqf2 && from_mode == HFmode && to_mode == QFmode)
586 emit_unop_insn (CODE_FOR_trunchfqf2, to, from, UNKNOWN);
590 #ifdef HAVE_trunctqfqf2
591 if (HAVE_trunctqfqf2 && from_mode == TQFmode && to_mode == QFmode)
593 emit_unop_insn (CODE_FOR_trunctqfqf2, to, from, UNKNOWN);
597 #ifdef HAVE_truncsfqf2
598 if (HAVE_truncsfqf2 && from_mode == SFmode && to_mode == QFmode)
600 emit_unop_insn (CODE_FOR_truncsfqf2, to, from, UNKNOWN);
604 #ifdef HAVE_truncdfqf2
605 if (HAVE_truncdfqf2 && from_mode == DFmode && to_mode == QFmode)
607 emit_unop_insn (CODE_FOR_truncdfqf2, to, from, UNKNOWN);
611 #ifdef HAVE_truncxfqf2
612 if (HAVE_truncxfqf2 && from_mode == XFmode && to_mode == QFmode)
614 emit_unop_insn (CODE_FOR_truncxfqf2, to, from, UNKNOWN);
618 #ifdef HAVE_trunctfqf2
619 if (HAVE_trunctfqf2 && from_mode == TFmode && to_mode == QFmode)
621 emit_unop_insn (CODE_FOR_trunctfqf2, to, from, UNKNOWN);
626 #ifdef HAVE_trunctqfhf2
627 if (HAVE_trunctqfhf2 && from_mode == TQFmode && to_mode == HFmode)
629 emit_unop_insn (CODE_FOR_trunctqfhf2, to, from, UNKNOWN);
633 #ifdef HAVE_truncsfhf2
634 if (HAVE_truncsfhf2 && from_mode == SFmode && to_mode == HFmode)
636 emit_unop_insn (CODE_FOR_truncsfhf2, to, from, UNKNOWN);
640 #ifdef HAVE_truncdfhf2
641 if (HAVE_truncdfhf2 && from_mode == DFmode && to_mode == HFmode)
643 emit_unop_insn (CODE_FOR_truncdfhf2, to, from, UNKNOWN);
647 #ifdef HAVE_truncxfhf2
648 if (HAVE_truncxfhf2 && from_mode == XFmode && to_mode == HFmode)
650 emit_unop_insn (CODE_FOR_truncxfhf2, to, from, UNKNOWN);
654 #ifdef HAVE_trunctfhf2
655 if (HAVE_trunctfhf2 && from_mode == TFmode && to_mode == HFmode)
657 emit_unop_insn (CODE_FOR_trunctfhf2, to, from, UNKNOWN);
662 #ifdef HAVE_truncsftqf2
663 if (HAVE_truncsftqf2 && from_mode == SFmode && to_mode == TQFmode)
665 emit_unop_insn (CODE_FOR_truncsftqf2, to, from, UNKNOWN);
669 #ifdef HAVE_truncdftqf2
670 if (HAVE_truncdftqf2 && from_mode == DFmode && to_mode == TQFmode)
672 emit_unop_insn (CODE_FOR_truncdftqf2, to, from, UNKNOWN);
676 #ifdef HAVE_truncxftqf2
677 if (HAVE_truncxftqf2 && from_mode == XFmode && to_mode == TQFmode)
679 emit_unop_insn (CODE_FOR_truncxftqf2, to, from, UNKNOWN);
683 #ifdef HAVE_trunctftqf2
684 if (HAVE_trunctftqf2 && from_mode == TFmode && to_mode == TQFmode)
686 emit_unop_insn (CODE_FOR_trunctftqf2, to, from, UNKNOWN);
691 #ifdef HAVE_truncdfsf2
692 if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
694 emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
698 #ifdef HAVE_truncxfsf2
699 if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode)
701 emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN);
705 #ifdef HAVE_trunctfsf2
706 if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
708 emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
712 #ifdef HAVE_truncxfdf2
713 if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode)
715 emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN);
719 #ifdef HAVE_trunctfdf2
720 if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
722 emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
734 libcall = extendsfdf2_libfunc;
738 libcall = extendsfxf2_libfunc;
742 libcall = extendsftf2_libfunc;
754 libcall = truncdfsf2_libfunc;
758 libcall = extenddfxf2_libfunc;
762 libcall = extenddftf2_libfunc;
774 libcall = truncxfsf2_libfunc;
778 libcall = truncxfdf2_libfunc;
790 libcall = trunctfsf2_libfunc;
794 libcall = trunctfdf2_libfunc;
806 if (libcall == (rtx) 0)
807 /* This conversion is not implemented yet. */
811 value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
813 insns = get_insns ();
815 emit_libcall_block (insns, to, value, gen_rtx_FLOAT_TRUNCATE (to_mode,
820 /* Now both modes are integers. */
822 /* Handle expanding beyond a word. */
823 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
824 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
831 enum machine_mode lowpart_mode;
832 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
834 /* Try converting directly if the insn is supported. */
835 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
838 /* If FROM is a SUBREG, put it into a register. Do this
839 so that we always generate the same set of insns for
840 better cse'ing; if an intermediate assignment occurred,
841 we won't be doing the operation directly on the SUBREG. */
842 if (optimize > 0 && GET_CODE (from) == SUBREG)
843 from = force_reg (from_mode, from);
844 emit_unop_insn (code, to, from, equiv_code);
847 /* Next, try converting via full word. */
848 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
849 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
850 != CODE_FOR_nothing))
852 if (GET_CODE (to) == REG)
853 emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
854 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
855 emit_unop_insn (code, to,
856 gen_lowpart (word_mode, to), equiv_code);
860 /* No special multiword conversion insn; do it by hand. */
863 /* Since we will turn this into a no conflict block, we must ensure
864 that the source does not overlap the target. */
866 if (reg_overlap_mentioned_p (to, from))
867 from = force_reg (from_mode, from);
869 /* Get a copy of FROM widened to a word, if necessary. */
870 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
871 lowpart_mode = word_mode;
873 lowpart_mode = from_mode;
875 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
877 lowpart = gen_lowpart (lowpart_mode, to);
878 emit_move_insn (lowpart, lowfrom);
880 /* Compute the value to put in each remaining word. */
882 fill_value = const0_rtx;
887 && insn_data[(int) CODE_FOR_slt].operand[0].mode == word_mode
888 && STORE_FLAG_VALUE == -1)
890 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
892 fill_value = gen_reg_rtx (word_mode);
893 emit_insn (gen_slt (fill_value));
899 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
900 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
902 fill_value = convert_to_mode (word_mode, fill_value, 1);
906 /* Fill the remaining words. */
907 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
909 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
910 rtx subword = operand_subword (to, index, 1, to_mode);
915 if (fill_value != subword)
916 emit_move_insn (subword, fill_value);
919 insns = get_insns ();
922 emit_no_conflict_block (insns, to, from, NULL_RTX,
923 gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
927 /* Truncating multi-word to a word or less. */
928 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
929 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
931 if (!((GET_CODE (from) == MEM
932 && ! MEM_VOLATILE_P (from)
933 && direct_load[(int) to_mode]
934 && ! mode_dependent_address_p (XEXP (from, 0)))
935 || GET_CODE (from) == REG
936 || GET_CODE (from) == SUBREG))
937 from = force_reg (from_mode, from);
938 convert_move (to, gen_lowpart (word_mode, from), 0);
942 /* Handle pointer conversion. */ /* SPEE 900220. */
943 if (to_mode == PQImode)
945 if (from_mode != QImode)
946 from = convert_to_mode (QImode, from, unsignedp);
948 #ifdef HAVE_truncqipqi2
949 if (HAVE_truncqipqi2)
951 emit_unop_insn (CODE_FOR_truncqipqi2, to, from, UNKNOWN);
954 #endif /* HAVE_truncqipqi2 */
958 if (from_mode == PQImode)
960 if (to_mode != QImode)
962 from = convert_to_mode (QImode, from, unsignedp);
967 #ifdef HAVE_extendpqiqi2
968 if (HAVE_extendpqiqi2)
970 emit_unop_insn (CODE_FOR_extendpqiqi2, to, from, UNKNOWN);
973 #endif /* HAVE_extendpqiqi2 */
978 if (to_mode == PSImode)
980 if (from_mode != SImode)
981 from = convert_to_mode (SImode, from, unsignedp);
983 #ifdef HAVE_truncsipsi2
984 if (HAVE_truncsipsi2)
986 emit_unop_insn (CODE_FOR_truncsipsi2, to, from, UNKNOWN);
989 #endif /* HAVE_truncsipsi2 */
993 if (from_mode == PSImode)
995 if (to_mode != SImode)
997 from = convert_to_mode (SImode, from, unsignedp);
1002 #ifdef HAVE_extendpsisi2
1003 if (! unsignedp && HAVE_extendpsisi2)
1005 emit_unop_insn (CODE_FOR_extendpsisi2, to, from, UNKNOWN);
1008 #endif /* HAVE_extendpsisi2 */
1009 #ifdef HAVE_zero_extendpsisi2
1010 if (unsignedp && HAVE_zero_extendpsisi2)
1012 emit_unop_insn (CODE_FOR_zero_extendpsisi2, to, from, UNKNOWN);
1015 #endif /* HAVE_zero_extendpsisi2 */
1020 if (to_mode == PDImode)
1022 if (from_mode != DImode)
1023 from = convert_to_mode (DImode, from, unsignedp);
1025 #ifdef HAVE_truncdipdi2
1026 if (HAVE_truncdipdi2)
1028 emit_unop_insn (CODE_FOR_truncdipdi2, to, from, UNKNOWN);
1031 #endif /* HAVE_truncdipdi2 */
1035 if (from_mode == PDImode)
1037 if (to_mode != DImode)
1039 from = convert_to_mode (DImode, from, unsignedp);
1044 #ifdef HAVE_extendpdidi2
1045 if (HAVE_extendpdidi2)
1047 emit_unop_insn (CODE_FOR_extendpdidi2, to, from, UNKNOWN);
1050 #endif /* HAVE_extendpdidi2 */
1055 /* Now follow all the conversions between integers
1056 no more than a word long. */
1058 /* For truncation, usually we can just refer to FROM in a narrower mode. */
1059 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
1060 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1061 GET_MODE_BITSIZE (from_mode)))
1063 if (!((GET_CODE (from) == MEM
1064 && ! MEM_VOLATILE_P (from)
1065 && direct_load[(int) to_mode]
1066 && ! mode_dependent_address_p (XEXP (from, 0)))
1067 || GET_CODE (from) == REG
1068 || GET_CODE (from) == SUBREG))
1069 from = force_reg (from_mode, from);
1070 if (GET_CODE (from) == REG && REGNO (from) < FIRST_PSEUDO_REGISTER
1071 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
1072 from = copy_to_reg (from);
1073 emit_move_insn (to, gen_lowpart (to_mode, from));
1077 /* Handle extension. */
1078 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
1080 /* Convert directly if that works. */
1081 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
1082 != CODE_FOR_nothing)
1085 from = force_not_mem (from);
1087 emit_unop_insn (code, to, from, equiv_code);
1092 enum machine_mode intermediate;
1096 /* Search for a mode to convert via. */
1097 for (intermediate = from_mode; intermediate != VOIDmode;
1098 intermediate = GET_MODE_WIDER_MODE (intermediate))
1099 if (((can_extend_p (to_mode, intermediate, unsignedp)
1100 != CODE_FOR_nothing)
1101 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
1102 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1103 GET_MODE_BITSIZE (intermediate))))
1104 && (can_extend_p (intermediate, from_mode, unsignedp)
1105 != CODE_FOR_nothing))
1107 convert_move (to, convert_to_mode (intermediate, from,
1108 unsignedp), unsignedp);
1112 /* No suitable intermediate mode.
1113 Generate what we need with shifts. */
1114 shift_amount = build_int_2 (GET_MODE_BITSIZE (to_mode)
1115 - GET_MODE_BITSIZE (from_mode), 0);
1116 from = gen_lowpart (to_mode, force_reg (from_mode, from));
1117 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
1119 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
1122 emit_move_insn (to, tmp);
1127 /* Support special truncate insns for certain modes. */
1129 if (from_mode == DImode && to_mode == SImode)
1131 #ifdef HAVE_truncdisi2
1132 if (HAVE_truncdisi2)
1134 emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
1138 convert_move (to, force_reg (from_mode, from), unsignedp);
1142 if (from_mode == DImode && to_mode == HImode)
1144 #ifdef HAVE_truncdihi2
1145 if (HAVE_truncdihi2)
1147 emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
1151 convert_move (to, force_reg (from_mode, from), unsignedp);
1155 if (from_mode == DImode && to_mode == QImode)
1157 #ifdef HAVE_truncdiqi2
1158 if (HAVE_truncdiqi2)
1160 emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
1164 convert_move (to, force_reg (from_mode, from), unsignedp);
1168 if (from_mode == SImode && to_mode == HImode)
1170 #ifdef HAVE_truncsihi2
1171 if (HAVE_truncsihi2)
1173 emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
1177 convert_move (to, force_reg (from_mode, from), unsignedp);
1181 if (from_mode == SImode && to_mode == QImode)
1183 #ifdef HAVE_truncsiqi2
1184 if (HAVE_truncsiqi2)
1186 emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
1190 convert_move (to, force_reg (from_mode, from), unsignedp);
1194 if (from_mode == HImode && to_mode == QImode)
1196 #ifdef HAVE_trunchiqi2
1197 if (HAVE_trunchiqi2)
1199 emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
1203 convert_move (to, force_reg (from_mode, from), unsignedp);
1207 if (from_mode == TImode && to_mode == DImode)
1209 #ifdef HAVE_trunctidi2
1210 if (HAVE_trunctidi2)
1212 emit_unop_insn (CODE_FOR_trunctidi2, to, from, UNKNOWN);
1216 convert_move (to, force_reg (from_mode, from), unsignedp);
1220 if (from_mode == TImode && to_mode == SImode)
1222 #ifdef HAVE_trunctisi2
1223 if (HAVE_trunctisi2)
1225 emit_unop_insn (CODE_FOR_trunctisi2, to, from, UNKNOWN);
1229 convert_move (to, force_reg (from_mode, from), unsignedp);
1233 if (from_mode == TImode && to_mode == HImode)
1235 #ifdef HAVE_trunctihi2
1236 if (HAVE_trunctihi2)
1238 emit_unop_insn (CODE_FOR_trunctihi2, to, from, UNKNOWN);
1242 convert_move (to, force_reg (from_mode, from), unsignedp);
1246 if (from_mode == TImode && to_mode == QImode)
1248 #ifdef HAVE_trunctiqi2
1249 if (HAVE_trunctiqi2)
1251 emit_unop_insn (CODE_FOR_trunctiqi2, to, from, UNKNOWN);
1255 convert_move (to, force_reg (from_mode, from), unsignedp);
1259 /* Handle truncation of volatile memrefs, and so on;
1260 the things that couldn't be truncated directly,
1261 and for which there was no special instruction. */
1262 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
1264 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
1265 emit_move_insn (to, temp);
1269 /* Mode combination is not recognized. */
1273 /* Return an rtx for a value that would result
1274 from converting X to mode MODE.
1275 Both X and MODE may be floating, or both integer.
1276 UNSIGNEDP is nonzero if X is an unsigned value.
1277 This can be done by referring to a part of X in place
1278 or by copying to a new temporary with conversion.
1280 This function *must not* call protect_from_queue
1281 except when putting X into an insn (in which case convert_move does it). */
1284 convert_to_mode (mode, x, unsignedp)
1285 enum machine_mode mode;
1289 return convert_modes (mode, VOIDmode, x, unsignedp);
1292 /* Return an rtx for a value that would result
1293 from converting X from mode OLDMODE to mode MODE.
1294 Both modes may be floating, or both integer.
1295 UNSIGNEDP is nonzero if X is an unsigned value.
1297 This can be done by referring to a part of X in place
1298 or by copying to a new temporary with conversion.
1300 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
1302 This function *must not* call protect_from_queue
1303 except when putting X into an insn (in which case convert_move does it). */
1306 convert_modes (mode, oldmode, x, unsignedp)
1307 enum machine_mode mode, oldmode;
1313 /* If FROM is a SUBREG that indicates that we have already done at least
1314 the required extension, strip it. */
1316 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
1317 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
1318 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
1319 x = gen_lowpart (mode, x);
1321 if (GET_MODE (x) != VOIDmode)
1322 oldmode = GET_MODE (x);
1324 if (mode == oldmode)
1327 /* There is one case that we must handle specially: If we are converting
1328 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
1329 we are to interpret the constant as unsigned, gen_lowpart will do
1330 the wrong if the constant appears negative. What we want to do is
1331 make the high-order word of the constant zero, not all ones. */
1333 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
1334 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
1335 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
1337 HOST_WIDE_INT val = INTVAL (x);
1339 if (oldmode != VOIDmode
1340 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
1342 int width = GET_MODE_BITSIZE (oldmode);
1344 /* We need to zero extend VAL. */
1345 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1348 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
1351 /* We can do this with a gen_lowpart if both desired and current modes
1352 are integer, and this is either a constant integer, a register, or a
1353 non-volatile MEM. Except for the constant case where MODE is no
1354 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
1356 if ((GET_CODE (x) == CONST_INT
1357 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
1358 || (GET_MODE_CLASS (mode) == MODE_INT
1359 && GET_MODE_CLASS (oldmode) == MODE_INT
1360 && (GET_CODE (x) == CONST_DOUBLE
1361 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
1362 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x)
1363 && direct_load[(int) mode])
1364 || (GET_CODE (x) == REG
1365 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
1366 GET_MODE_BITSIZE (GET_MODE (x)))))))))
1368 /* ?? If we don't know OLDMODE, we have to assume here that
1369 X does not need sign- or zero-extension. This may not be
1370 the case, but it's the best we can do. */
1371 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
1372 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
1374 HOST_WIDE_INT val = INTVAL (x);
1375 int width = GET_MODE_BITSIZE (oldmode);
1377 /* We must sign or zero-extend in this case. Start by
1378 zero-extending, then sign extend if we need to. */
1379 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1381 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
1382 val |= (HOST_WIDE_INT) (-1) << width;
1384 return gen_int_mode (val, mode);
1387 return gen_lowpart (mode, x);
1390 temp = gen_reg_rtx (mode);
1391 convert_move (temp, x, unsignedp);
1395 /* This macro is used to determine what the largest unit size that
1396 move_by_pieces can use is. */
1398 /* MOVE_MAX_PIECES is the number of bytes at a time which we can
1399 move efficiently, as opposed to MOVE_MAX which is the maximum
1400 number of bytes we can move with a single instruction. */
1402 #ifndef MOVE_MAX_PIECES
1403 #define MOVE_MAX_PIECES MOVE_MAX
1406 /* Generate several move instructions to copy LEN bytes from block FROM to
1407 block TO. (These are MEM rtx's with BLKmode). The caller must pass FROM
1408 and TO through protect_from_queue before calling.
1410 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
1411 used to push FROM to the stack.
1413 ALIGN is maximum alignment we can assume. */
1416 move_by_pieces (to, from, len, align)
1418 unsigned HOST_WIDE_INT len;
1421 struct move_by_pieces data;
1422 rtx to_addr, from_addr = XEXP (from, 0);
1423 unsigned int max_size = MOVE_MAX_PIECES + 1;
1424 enum machine_mode mode = VOIDmode, tmode;
1425 enum insn_code icode;
1428 data.from_addr = from_addr;
1431 to_addr = XEXP (to, 0);
1434 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1435 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1437 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1444 #ifdef STACK_GROWS_DOWNWARD
1450 data.to_addr = to_addr;
1453 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1454 || GET_CODE (from_addr) == POST_INC
1455 || GET_CODE (from_addr) == POST_DEC);
1457 data.explicit_inc_from = 0;
1458 data.explicit_inc_to = 0;
1459 if (data.reverse) data.offset = len;
1462 /* If copying requires more than two move insns,
1463 copy addresses to registers (to make displacements shorter)
1464 and use post-increment if available. */
1465 if (!(data.autinc_from && data.autinc_to)
1466 && move_by_pieces_ninsns (len, align) > 2)
1468 /* Find the mode of the largest move... */
1469 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1470 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1471 if (GET_MODE_SIZE (tmode) < max_size)
1474 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
1476 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1477 data.autinc_from = 1;
1478 data.explicit_inc_from = -1;
1480 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
1482 data.from_addr = copy_addr_to_reg (from_addr);
1483 data.autinc_from = 1;
1484 data.explicit_inc_from = 1;
1486 if (!data.autinc_from && CONSTANT_P (from_addr))
1487 data.from_addr = copy_addr_to_reg (from_addr);
1488 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
1490 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1492 data.explicit_inc_to = -1;
1494 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
1496 data.to_addr = copy_addr_to_reg (to_addr);
1498 data.explicit_inc_to = 1;
1500 if (!data.autinc_to && CONSTANT_P (to_addr))
1501 data.to_addr = copy_addr_to_reg (to_addr);
1504 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1505 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1506 align = MOVE_MAX * BITS_PER_UNIT;
1508 /* First move what we can in the largest integer mode, then go to
1509 successively smaller modes. */
1511 while (max_size > 1)
1513 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1514 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1515 if (GET_MODE_SIZE (tmode) < max_size)
1518 if (mode == VOIDmode)
1521 icode = mov_optab->handlers[(int) mode].insn_code;
1522 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1523 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1525 max_size = GET_MODE_SIZE (mode);
1528 /* The code above should have handled everything. */
1533 /* Return number of insns required to move L bytes by pieces.
1534 ALIGN (in bits) is maximum alignment we can assume. */
1536 static unsigned HOST_WIDE_INT
1537 move_by_pieces_ninsns (l, align)
1538 unsigned HOST_WIDE_INT l;
1541 unsigned HOST_WIDE_INT n_insns = 0;
1542 unsigned HOST_WIDE_INT max_size = MOVE_MAX + 1;
1544 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1545 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1546 align = MOVE_MAX * BITS_PER_UNIT;
1548 while (max_size > 1)
1550 enum machine_mode mode = VOIDmode, tmode;
1551 enum insn_code icode;
1553 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1554 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1555 if (GET_MODE_SIZE (tmode) < max_size)
1558 if (mode == VOIDmode)
1561 icode = mov_optab->handlers[(int) mode].insn_code;
1562 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1563 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1565 max_size = GET_MODE_SIZE (mode);
1573 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1574 with move instructions for mode MODE. GENFUN is the gen_... function
1575 to make a move insn for that mode. DATA has all the other info. */
1578 move_by_pieces_1 (genfun, mode, data)
1579 rtx (*genfun) PARAMS ((rtx, ...));
1580 enum machine_mode mode;
1581 struct move_by_pieces *data;
1583 unsigned int size = GET_MODE_SIZE (mode);
1584 rtx to1 = NULL_RTX, from1;
1586 while (data->len >= size)
1589 data->offset -= size;
1593 if (data->autinc_to)
1594 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
1597 to1 = adjust_address (data->to, mode, data->offset);
1600 if (data->autinc_from)
1601 from1 = adjust_automodify_address (data->from, mode, data->from_addr,
1604 from1 = adjust_address (data->from, mode, data->offset);
1606 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1607 emit_insn (gen_add2_insn (data->to_addr,
1608 GEN_INT (-(HOST_WIDE_INT)size)));
1609 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1610 emit_insn (gen_add2_insn (data->from_addr,
1611 GEN_INT (-(HOST_WIDE_INT)size)));
1614 emit_insn ((*genfun) (to1, from1));
1617 #ifdef PUSH_ROUNDING
1618 emit_single_push_insn (mode, from1, NULL);
1624 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1625 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1626 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1627 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1629 if (! data->reverse)
1630 data->offset += size;
1636 /* Emit code to move a block Y to a block X.
1637 This may be done with string-move instructions,
1638 with multiple scalar move instructions, or with a library call.
1640 Both X and Y must be MEM rtx's (perhaps inside VOLATILE)
1642 SIZE is an rtx that says how long they are.
1643 ALIGN is the maximum alignment we can assume they have.
1645 Return the address of the new block, if memcpy is called and returns it,
1649 emit_block_move (x, y, size)
1654 #ifdef TARGET_MEM_FUNCTIONS
1656 tree call_expr, arg_list;
1658 unsigned int align = MIN (MEM_ALIGN (x), MEM_ALIGN (y));
1660 if (GET_MODE (x) != BLKmode)
1663 if (GET_MODE (y) != BLKmode)
1666 x = protect_from_queue (x, 1);
1667 y = protect_from_queue (y, 0);
1668 size = protect_from_queue (size, 0);
1670 if (GET_CODE (x) != MEM)
1672 if (GET_CODE (y) != MEM)
1677 if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
1678 move_by_pieces (x, y, INTVAL (size), align);
1681 /* Try the most limited insn first, because there's no point
1682 including more than one in the machine description unless
1683 the more limited one has some advantage. */
1685 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
1686 enum machine_mode mode;
1688 /* Since this is a move insn, we don't care about volatility. */
1691 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1692 mode = GET_MODE_WIDER_MODE (mode))
1694 enum insn_code code = movstr_optab[(int) mode];
1695 insn_operand_predicate_fn pred;
1697 if (code != CODE_FOR_nothing
1698 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1699 here because if SIZE is less than the mode mask, as it is
1700 returned by the macro, it will definitely be less than the
1701 actual mode mask. */
1702 && ((GET_CODE (size) == CONST_INT
1703 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1704 <= (GET_MODE_MASK (mode) >> 1)))
1705 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1706 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
1707 || (*pred) (x, BLKmode))
1708 && ((pred = insn_data[(int) code].operand[1].predicate) == 0
1709 || (*pred) (y, BLKmode))
1710 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
1711 || (*pred) (opalign, VOIDmode)))
1714 rtx last = get_last_insn ();
1717 op2 = convert_to_mode (mode, size, 1);
1718 pred = insn_data[(int) code].operand[2].predicate;
1719 if (pred != 0 && ! (*pred) (op2, mode))
1720 op2 = copy_to_mode_reg (mode, op2);
1722 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1730 delete_insns_since (last);
1736 /* X, Y, or SIZE may have been passed through protect_from_queue.
1738 It is unsafe to save the value generated by protect_from_queue
1739 and reuse it later. Consider what happens if emit_queue is
1740 called before the return value from protect_from_queue is used.
1742 Expansion of the CALL_EXPR below will call emit_queue before
1743 we are finished emitting RTL for argument setup. So if we are
1744 not careful we could get the wrong value for an argument.
1746 To avoid this problem we go ahead and emit code to copy X, Y &
1747 SIZE into new pseudos. We can then place those new pseudos
1748 into an RTL_EXPR and use them later, even after a call to
1751 Note this is not strictly needed for library calls since they
1752 do not call emit_queue before loading their arguments. However,
1753 we may need to have library calls call emit_queue in the future
1754 since failing to do so could cause problems for targets which
1755 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
1756 x = copy_to_mode_reg (Pmode, XEXP (x, 0));
1757 y = copy_to_mode_reg (Pmode, XEXP (y, 0));
1759 #ifdef TARGET_MEM_FUNCTIONS
1760 size = copy_to_mode_reg (TYPE_MODE (sizetype), size);
1762 size = convert_to_mode (TYPE_MODE (integer_type_node), size,
1763 TREE_UNSIGNED (integer_type_node));
1764 size = copy_to_mode_reg (TYPE_MODE (integer_type_node), size);
1767 #ifdef TARGET_MEM_FUNCTIONS
1768 /* It is incorrect to use the libcall calling conventions to call
1769 memcpy in this context.
1771 This could be a user call to memcpy and the user may wish to
1772 examine the return value from memcpy.
1774 For targets where libcalls and normal calls have different conventions
1775 for returning pointers, we could end up generating incorrect code.
1777 So instead of using a libcall sequence we build up a suitable
1778 CALL_EXPR and expand the call in the normal fashion. */
1779 if (fn == NULL_TREE)
1783 /* This was copied from except.c, I don't know if all this is
1784 necessary in this context or not. */
1785 fn = get_identifier ("memcpy");
1786 fntype = build_pointer_type (void_type_node);
1787 fntype = build_function_type (fntype, NULL_TREE);
1788 fn = build_decl (FUNCTION_DECL, fn, fntype);
1789 ggc_add_tree_root (&fn, 1);
1790 DECL_EXTERNAL (fn) = 1;
1791 TREE_PUBLIC (fn) = 1;
1792 DECL_ARTIFICIAL (fn) = 1;
1793 TREE_NOTHROW (fn) = 1;
1794 make_decl_rtl (fn, NULL);
1795 assemble_external (fn);
1798 /* We need to make an argument list for the function call.
1800 memcpy has three arguments, the first two are void * addresses and
1801 the last is a size_t byte count for the copy. */
1803 = build_tree_list (NULL_TREE,
1804 make_tree (build_pointer_type (void_type_node), x));
1805 TREE_CHAIN (arg_list)
1806 = build_tree_list (NULL_TREE,
1807 make_tree (build_pointer_type (void_type_node), y));
1808 TREE_CHAIN (TREE_CHAIN (arg_list))
1809 = build_tree_list (NULL_TREE, make_tree (sizetype, size));
1810 TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list))) = NULL_TREE;
1812 /* Now we have to build up the CALL_EXPR itself. */
1813 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
1814 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
1815 call_expr, arg_list, NULL_TREE);
1816 TREE_SIDE_EFFECTS (call_expr) = 1;
1818 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
1820 emit_library_call (bcopy_libfunc, LCT_NORMAL,
1821 VOIDmode, 3, y, Pmode, x, Pmode,
1822 convert_to_mode (TYPE_MODE (integer_type_node), size,
1823 TREE_UNSIGNED (integer_type_node)),
1824 TYPE_MODE (integer_type_node));
1827 /* If we are initializing a readonly value, show the above call
1828 clobbered it. Otherwise, a load from it may erroneously be hoisted
1830 if (RTX_UNCHANGING_P (x))
1831 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
1837 /* Copy all or part of a value X into registers starting at REGNO.
1838 The number of registers to be filled is NREGS. */
1841 move_block_to_reg (regno, x, nregs, mode)
1845 enum machine_mode mode;
1848 #ifdef HAVE_load_multiple
1856 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1857 x = validize_mem (force_const_mem (mode, x));
1859 /* See if the machine can do this with a load multiple insn. */
1860 #ifdef HAVE_load_multiple
1861 if (HAVE_load_multiple)
1863 last = get_last_insn ();
1864 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
1872 delete_insns_since (last);
1876 for (i = 0; i < nregs; i++)
1877 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1878 operand_subword_force (x, i, mode));
1881 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1882 The number of registers to be filled is NREGS. SIZE indicates the number
1883 of bytes in the object X. */
1886 move_block_from_reg (regno, x, nregs, size)
1893 #ifdef HAVE_store_multiple
1897 enum machine_mode mode;
1902 /* If SIZE is that of a mode no bigger than a word, just use that
1903 mode's store operation. */
1904 if (size <= UNITS_PER_WORD
1905 && (mode = mode_for_size (size * BITS_PER_UNIT, MODE_INT, 0)) != BLKmode
1906 && !FUNCTION_ARG_REG_LITTLE_ENDIAN)
1908 emit_move_insn (adjust_address (x, mode, 0), gen_rtx_REG (mode, regno));
1912 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN machine must be aligned
1913 to the left before storing to memory. Note that the previous test
1914 doesn't handle all cases (e.g. SIZE == 3). */
1915 if (size < UNITS_PER_WORD
1917 && !FUNCTION_ARG_REG_LITTLE_ENDIAN)
1919 rtx tem = operand_subword (x, 0, 1, BLKmode);
1925 shift = expand_shift (LSHIFT_EXPR, word_mode,
1926 gen_rtx_REG (word_mode, regno),
1927 build_int_2 ((UNITS_PER_WORD - size)
1928 * BITS_PER_UNIT, 0), NULL_RTX, 0);
1929 emit_move_insn (tem, shift);
1933 /* See if the machine can do this with a store multiple insn. */
1934 #ifdef HAVE_store_multiple
1935 if (HAVE_store_multiple)
1937 last = get_last_insn ();
1938 pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1946 delete_insns_since (last);
1950 for (i = 0; i < nregs; i++)
1952 rtx tem = operand_subword (x, i, 1, BLKmode);
1957 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1961 /* Emit code to move a block SRC to a block DST, where DST is non-consecutive
1962 registers represented by a PARALLEL. SSIZE represents the total size of
1963 block SRC in bytes, or -1 if not known. */
1964 /* ??? If SSIZE % UNITS_PER_WORD != 0, we make the blatant assumption that
1965 the balance will be in what would be the low-order memory addresses, i.e.
1966 left justified for big endian, right justified for little endian. This
1967 happens to be true for the targets currently using this support. If this
1968 ever changes, a new target macro along the lines of FUNCTION_ARG_PADDING
1972 emit_group_load (dst, orig_src, ssize)
1979 if (GET_CODE (dst) != PARALLEL)
1982 /* Check for a NULL entry, used to indicate that the parameter goes
1983 both on the stack and in registers. */
1984 if (XEXP (XVECEXP (dst, 0, 0), 0))
1989 tmps = (rtx *) alloca (sizeof (rtx) * XVECLEN (dst, 0));
1991 /* Process the pieces. */
1992 for (i = start; i < XVECLEN (dst, 0); i++)
1994 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1995 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1996 unsigned int bytelen = GET_MODE_SIZE (mode);
1999 /* Handle trailing fragments that run over the size of the struct. */
2000 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2002 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2003 bytelen = ssize - bytepos;
2008 /* If we won't be loading directly from memory, protect the real source
2009 from strange tricks we might play; but make sure that the source can
2010 be loaded directly into the destination. */
2012 if (GET_CODE (orig_src) != MEM
2013 && (!CONSTANT_P (orig_src)
2014 || (GET_MODE (orig_src) != mode
2015 && GET_MODE (orig_src) != VOIDmode)))
2017 if (GET_MODE (orig_src) == VOIDmode)
2018 src = gen_reg_rtx (mode);
2020 src = gen_reg_rtx (GET_MODE (orig_src));
2022 emit_move_insn (src, orig_src);
2025 /* Optimize the access just a bit. */
2026 if (GET_CODE (src) == MEM
2027 && MEM_ALIGN (src) >= GET_MODE_ALIGNMENT (mode)
2028 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2029 && bytelen == GET_MODE_SIZE (mode))
2031 tmps[i] = gen_reg_rtx (mode);
2032 emit_move_insn (tmps[i], adjust_address (src, mode, bytepos));
2034 else if (GET_CODE (src) == CONCAT)
2037 && bytelen == GET_MODE_SIZE (GET_MODE (XEXP (src, 0))))
2038 || (bytepos == (HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (XEXP (src, 0)))
2039 && bytelen == GET_MODE_SIZE (GET_MODE (XEXP (src, 1)))))
2041 tmps[i] = XEXP (src, bytepos != 0);
2042 if (! CONSTANT_P (tmps[i])
2043 && (GET_CODE (tmps[i]) != REG || GET_MODE (tmps[i]) != mode))
2044 tmps[i] = extract_bit_field (tmps[i], bytelen * BITS_PER_UNIT,
2045 0, 1, NULL_RTX, mode, mode, ssize);
2047 else if (bytepos == 0)
2049 rtx mem = assign_stack_temp (GET_MODE (src),
2050 GET_MODE_SIZE (GET_MODE (src)), 0);
2051 emit_move_insn (mem, src);
2052 tmps[i] = adjust_address (mem, mode, 0);
2057 else if (CONSTANT_P (src)
2058 || (GET_CODE (src) == REG && GET_MODE (src) == mode))
2061 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
2062 bytepos * BITS_PER_UNIT, 1, NULL_RTX,
2065 if (BYTES_BIG_ENDIAN && shift)
2066 expand_binop (mode, ashl_optab, tmps[i], GEN_INT (shift),
2067 tmps[i], 0, OPTAB_WIDEN);
2072 /* Copy the extracted pieces into the proper (probable) hard regs. */
2073 for (i = start; i < XVECLEN (dst, 0); i++)
2074 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0), tmps[i]);
2077 /* Emit code to move a block SRC to a block DST, where SRC is non-consecutive
2078 registers represented by a PARALLEL. SSIZE represents the total size of
2079 block DST, or -1 if not known. */
2082 emit_group_store (orig_dst, src, ssize)
2089 if (GET_CODE (src) != PARALLEL)
2092 /* Check for a NULL entry, used to indicate that the parameter goes
2093 both on the stack and in registers. */
2094 if (XEXP (XVECEXP (src, 0, 0), 0))
2099 tmps = (rtx *) alloca (sizeof (rtx) * XVECLEN (src, 0));
2101 /* Copy the (probable) hard regs into pseudos. */
2102 for (i = start; i < XVECLEN (src, 0); i++)
2104 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
2105 tmps[i] = gen_reg_rtx (GET_MODE (reg));
2106 emit_move_insn (tmps[i], reg);
2110 /* If we won't be storing directly into memory, protect the real destination
2111 from strange tricks we might play. */
2113 if (GET_CODE (dst) == PARALLEL)
2117 /* We can get a PARALLEL dst if there is a conditional expression in
2118 a return statement. In that case, the dst and src are the same,
2119 so no action is necessary. */
2120 if (rtx_equal_p (dst, src))
2123 /* It is unclear if we can ever reach here, but we may as well handle
2124 it. Allocate a temporary, and split this into a store/load to/from
2127 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
2128 emit_group_store (temp, src, ssize);
2129 emit_group_load (dst, temp, ssize);
2132 else if (GET_CODE (dst) != MEM && GET_CODE (dst) != CONCAT)
2134 dst = gen_reg_rtx (GET_MODE (orig_dst));
2135 /* Make life a bit easier for combine. */
2136 emit_move_insn (dst, const0_rtx);
2139 /* Process the pieces. */
2140 for (i = start; i < XVECLEN (src, 0); i++)
2142 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
2143 enum machine_mode mode = GET_MODE (tmps[i]);
2144 unsigned int bytelen = GET_MODE_SIZE (mode);
2147 /* Handle trailing fragments that run over the size of the struct. */
2148 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2150 if (BYTES_BIG_ENDIAN)
2152 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2153 expand_binop (mode, ashr_optab, tmps[i], GEN_INT (shift),
2154 tmps[i], 0, OPTAB_WIDEN);
2156 bytelen = ssize - bytepos;
2159 if (GET_CODE (dst) == CONCAT)
2161 if (bytepos + bytelen <= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2162 dest = XEXP (dst, 0);
2163 else if (bytepos >= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2165 bytepos -= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0)));
2166 dest = XEXP (dst, 1);
2172 /* Optimize the access just a bit. */
2173 if (GET_CODE (dest) == MEM
2174 && MEM_ALIGN (dest) >= GET_MODE_ALIGNMENT (mode)
2175 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2176 && bytelen == GET_MODE_SIZE (mode))
2177 emit_move_insn (adjust_address (dest, mode, bytepos), tmps[i]);
2179 store_bit_field (dest, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2180 mode, tmps[i], ssize);
2185 /* Copy from the pseudo into the (probable) hard reg. */
2186 if (GET_CODE (dst) == REG)
2187 emit_move_insn (orig_dst, dst);
2190 /* Generate code to copy a BLKmode object of TYPE out of a
2191 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2192 is null, a stack temporary is created. TGTBLK is returned.
2194 The primary purpose of this routine is to handle functions
2195 that return BLKmode structures in registers. Some machines
2196 (the PA for example) want to return all small structures
2197 in registers regardless of the structure's alignment. */
2200 copy_blkmode_from_reg (tgtblk, srcreg, type)
2205 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2206 rtx src = NULL, dst = NULL;
2207 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2208 unsigned HOST_WIDE_INT bitpos, xbitpos, big_endian_correction = 0;
2212 tgtblk = assign_temp (build_qualified_type (type,
2214 | TYPE_QUAL_CONST)),
2216 preserve_temp_slots (tgtblk);
2219 /* This code assumes srcreg is at least a full word. If it isn't, copy it
2220 into a new pseudo which is a full word.
2222 If FUNCTION_ARG_REG_LITTLE_ENDIAN is set and convert_to_mode does a copy,
2223 the wrong part of the register gets copied so we fake a type conversion
2225 if (GET_MODE (srcreg) != BLKmode
2226 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
2228 if (FUNCTION_ARG_REG_LITTLE_ENDIAN)
2229 srcreg = simplify_gen_subreg (word_mode, srcreg, GET_MODE (srcreg), 0);
2231 srcreg = convert_to_mode (word_mode, srcreg, TREE_UNSIGNED (type));
2234 /* Structures whose size is not a multiple of a word are aligned
2235 to the least significant byte (to the right). On a BYTES_BIG_ENDIAN
2236 machine, this means we must skip the empty high order bytes when
2237 calculating the bit offset. */
2238 if (BYTES_BIG_ENDIAN
2239 && !FUNCTION_ARG_REG_LITTLE_ENDIAN
2240 && bytes % UNITS_PER_WORD)
2241 big_endian_correction
2242 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2244 /* Copy the structure BITSIZE bites at a time.
2246 We could probably emit more efficient code for machines which do not use
2247 strict alignment, but it doesn't seem worth the effort at the current
2249 for (bitpos = 0, xbitpos = big_endian_correction;
2250 bitpos < bytes * BITS_PER_UNIT;
2251 bitpos += bitsize, xbitpos += bitsize)
2253 /* We need a new source operand each time xbitpos is on a
2254 word boundary and when xbitpos == big_endian_correction
2255 (the first time through). */
2256 if (xbitpos % BITS_PER_WORD == 0
2257 || xbitpos == big_endian_correction)
2258 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD,
2261 /* We need a new destination operand each time bitpos is on
2263 if (bitpos % BITS_PER_WORD == 0)
2264 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
2266 /* Use xbitpos for the source extraction (right justified) and
2267 xbitpos for the destination store (left justified). */
2268 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode,
2269 extract_bit_field (src, bitsize,
2270 xbitpos % BITS_PER_WORD, 1,
2271 NULL_RTX, word_mode, word_mode,
2279 /* Add a USE expression for REG to the (possibly empty) list pointed
2280 to by CALL_FUSAGE. REG must denote a hard register. */
2283 use_reg (call_fusage, reg)
2284 rtx *call_fusage, reg;
2286 if (GET_CODE (reg) != REG
2287 || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
2291 = gen_rtx_EXPR_LIST (VOIDmode,
2292 gen_rtx_USE (VOIDmode, reg), *call_fusage);
2295 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2296 starting at REGNO. All of these registers must be hard registers. */
2299 use_regs (call_fusage, regno, nregs)
2306 if (regno + nregs > FIRST_PSEUDO_REGISTER)
2309 for (i = 0; i < nregs; i++)
2310 use_reg (call_fusage, gen_rtx_REG (reg_raw_mode[regno + i], regno + i));
2313 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2314 PARALLEL REGS. This is for calls that pass values in multiple
2315 non-contiguous locations. The Irix 6 ABI has examples of this. */
2318 use_group_regs (call_fusage, regs)
2324 for (i = 0; i < XVECLEN (regs, 0); i++)
2326 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2328 /* A NULL entry means the parameter goes both on the stack and in
2329 registers. This can also be a MEM for targets that pass values
2330 partially on the stack and partially in registers. */
2331 if (reg != 0 && GET_CODE (reg) == REG)
2332 use_reg (call_fusage, reg);
2338 can_store_by_pieces (len, constfun, constfundata, align)
2339 unsigned HOST_WIDE_INT len;
2340 rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
2344 unsigned HOST_WIDE_INT max_size, l;
2345 HOST_WIDE_INT offset = 0;
2346 enum machine_mode mode, tmode;
2347 enum insn_code icode;
2351 if (! MOVE_BY_PIECES_P (len, align))
2354 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2355 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2356 align = MOVE_MAX * BITS_PER_UNIT;
2358 /* We would first store what we can in the largest integer mode, then go to
2359 successively smaller modes. */
2362 reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2367 max_size = MOVE_MAX_PIECES + 1;
2368 while (max_size > 1)
2370 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2371 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2372 if (GET_MODE_SIZE (tmode) < max_size)
2375 if (mode == VOIDmode)
2378 icode = mov_optab->handlers[(int) mode].insn_code;
2379 if (icode != CODE_FOR_nothing
2380 && align >= GET_MODE_ALIGNMENT (mode))
2382 unsigned int size = GET_MODE_SIZE (mode);
2389 cst = (*constfun) (constfundata, offset, mode);
2390 if (!LEGITIMATE_CONSTANT_P (cst))
2400 max_size = GET_MODE_SIZE (mode);
2403 /* The code above should have handled everything. */
2411 /* Generate several move instructions to store LEN bytes generated by
2412 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2413 pointer which will be passed as argument in every CONSTFUN call.
2414 ALIGN is maximum alignment we can assume. */
2417 store_by_pieces (to, len, constfun, constfundata, align)
2419 unsigned HOST_WIDE_INT len;
2420 rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
2424 struct store_by_pieces data;
2426 if (! MOVE_BY_PIECES_P (len, align))
2428 to = protect_from_queue (to, 1);
2429 data.constfun = constfun;
2430 data.constfundata = constfundata;
2433 store_by_pieces_1 (&data, align);
2436 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2437 rtx with BLKmode). The caller must pass TO through protect_from_queue
2438 before calling. ALIGN is maximum alignment we can assume. */
2441 clear_by_pieces (to, len, align)
2443 unsigned HOST_WIDE_INT len;
2446 struct store_by_pieces data;
2448 data.constfun = clear_by_pieces_1;
2449 data.constfundata = NULL;
2452 store_by_pieces_1 (&data, align);
2455 /* Callback routine for clear_by_pieces.
2456 Return const0_rtx unconditionally. */
2459 clear_by_pieces_1 (data, offset, mode)
2460 PTR data ATTRIBUTE_UNUSED;
2461 HOST_WIDE_INT offset ATTRIBUTE_UNUSED;
2462 enum machine_mode mode ATTRIBUTE_UNUSED;
2467 /* Subroutine of clear_by_pieces and store_by_pieces.
2468 Generate several move instructions to store LEN bytes of block TO. (A MEM
2469 rtx with BLKmode). The caller must pass TO through protect_from_queue
2470 before calling. ALIGN is maximum alignment we can assume. */
2473 store_by_pieces_1 (data, align)
2474 struct store_by_pieces *data;
2477 rtx to_addr = XEXP (data->to, 0);
2478 unsigned HOST_WIDE_INT max_size = MOVE_MAX_PIECES + 1;
2479 enum machine_mode mode = VOIDmode, tmode;
2480 enum insn_code icode;
2483 data->to_addr = to_addr;
2485 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2486 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2488 data->explicit_inc_to = 0;
2490 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2492 data->offset = data->len;
2494 /* If storing requires more than two move insns,
2495 copy addresses to registers (to make displacements shorter)
2496 and use post-increment if available. */
2497 if (!data->autinc_to
2498 && move_by_pieces_ninsns (data->len, align) > 2)
2500 /* Determine the main mode we'll be using. */
2501 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2502 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2503 if (GET_MODE_SIZE (tmode) < max_size)
2506 if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
2508 data->to_addr = copy_addr_to_reg (plus_constant (to_addr, data->len));
2509 data->autinc_to = 1;
2510 data->explicit_inc_to = -1;
2513 if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2514 && ! data->autinc_to)
2516 data->to_addr = copy_addr_to_reg (to_addr);
2517 data->autinc_to = 1;
2518 data->explicit_inc_to = 1;
2521 if ( !data->autinc_to && CONSTANT_P (to_addr))
2522 data->to_addr = copy_addr_to_reg (to_addr);
2525 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2526 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2527 align = MOVE_MAX * BITS_PER_UNIT;
2529 /* First store what we can in the largest integer mode, then go to
2530 successively smaller modes. */
2532 while (max_size > 1)
2534 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2535 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2536 if (GET_MODE_SIZE (tmode) < max_size)
2539 if (mode == VOIDmode)
2542 icode = mov_optab->handlers[(int) mode].insn_code;
2543 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
2544 store_by_pieces_2 (GEN_FCN (icode), mode, data);
2546 max_size = GET_MODE_SIZE (mode);
2549 /* The code above should have handled everything. */
2554 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2555 with move instructions for mode MODE. GENFUN is the gen_... function
2556 to make a move insn for that mode. DATA has all the other info. */
2559 store_by_pieces_2 (genfun, mode, data)
2560 rtx (*genfun) PARAMS ((rtx, ...));
2561 enum machine_mode mode;
2562 struct store_by_pieces *data;
2564 unsigned int size = GET_MODE_SIZE (mode);
2567 while (data->len >= size)
2570 data->offset -= size;
2572 if (data->autinc_to)
2573 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
2576 to1 = adjust_address (data->to, mode, data->offset);
2578 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2579 emit_insn (gen_add2_insn (data->to_addr,
2580 GEN_INT (-(HOST_WIDE_INT) size)));
2582 cst = (*data->constfun) (data->constfundata, data->offset, mode);
2583 emit_insn ((*genfun) (to1, cst));
2585 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2586 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2588 if (! data->reverse)
2589 data->offset += size;
2595 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2596 its length in bytes. */
2599 clear_storage (object, size)
2603 #ifdef TARGET_MEM_FUNCTIONS
2605 tree call_expr, arg_list;
2608 unsigned int align = (GET_CODE (object) == MEM ? MEM_ALIGN (object)
2609 : GET_MODE_ALIGNMENT (GET_MODE (object)));
2611 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2612 just move a zero. Otherwise, do this a piece at a time. */
2613 if (GET_MODE (object) != BLKmode
2614 && GET_CODE (size) == CONST_INT
2615 && GET_MODE_SIZE (GET_MODE (object)) == (unsigned int) INTVAL (size))
2616 emit_move_insn (object, CONST0_RTX (GET_MODE (object)));
2619 object = protect_from_queue (object, 1);
2620 size = protect_from_queue (size, 0);
2622 if (GET_CODE (size) == CONST_INT
2623 && MOVE_BY_PIECES_P (INTVAL (size), align))
2624 clear_by_pieces (object, INTVAL (size), align);
2627 /* Try the most limited insn first, because there's no point
2628 including more than one in the machine description unless
2629 the more limited one has some advantage. */
2631 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
2632 enum machine_mode mode;
2634 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2635 mode = GET_MODE_WIDER_MODE (mode))
2637 enum insn_code code = clrstr_optab[(int) mode];
2638 insn_operand_predicate_fn pred;
2640 if (code != CODE_FOR_nothing
2641 /* We don't need MODE to be narrower than
2642 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2643 the mode mask, as it is returned by the macro, it will
2644 definitely be less than the actual mode mask. */
2645 && ((GET_CODE (size) == CONST_INT
2646 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2647 <= (GET_MODE_MASK (mode) >> 1)))
2648 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2649 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
2650 || (*pred) (object, BLKmode))
2651 && ((pred = insn_data[(int) code].operand[2].predicate) == 0
2652 || (*pred) (opalign, VOIDmode)))
2655 rtx last = get_last_insn ();
2658 op1 = convert_to_mode (mode, size, 1);
2659 pred = insn_data[(int) code].operand[1].predicate;
2660 if (pred != 0 && ! (*pred) (op1, mode))
2661 op1 = copy_to_mode_reg (mode, op1);
2663 pat = GEN_FCN ((int) code) (object, op1, opalign);
2670 delete_insns_since (last);
2674 /* OBJECT or SIZE may have been passed through protect_from_queue.
2676 It is unsafe to save the value generated by protect_from_queue
2677 and reuse it later. Consider what happens if emit_queue is
2678 called before the return value from protect_from_queue is used.
2680 Expansion of the CALL_EXPR below will call emit_queue before
2681 we are finished emitting RTL for argument setup. So if we are
2682 not careful we could get the wrong value for an argument.
2684 To avoid this problem we go ahead and emit code to copy OBJECT
2685 and SIZE into new pseudos. We can then place those new pseudos
2686 into an RTL_EXPR and use them later, even after a call to
2689 Note this is not strictly needed for library calls since they
2690 do not call emit_queue before loading their arguments. However,
2691 we may need to have library calls call emit_queue in the future
2692 since failing to do so could cause problems for targets which
2693 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
2694 object = copy_to_mode_reg (Pmode, XEXP (object, 0));
2696 #ifdef TARGET_MEM_FUNCTIONS
2697 size = copy_to_mode_reg (TYPE_MODE (sizetype), size);
2699 size = convert_to_mode (TYPE_MODE (integer_type_node), size,
2700 TREE_UNSIGNED (integer_type_node));
2701 size = copy_to_mode_reg (TYPE_MODE (integer_type_node), size);
2704 #ifdef TARGET_MEM_FUNCTIONS
2705 /* It is incorrect to use the libcall calling conventions to call
2706 memset in this context.
2708 This could be a user call to memset and the user may wish to
2709 examine the return value from memset.
2711 For targets where libcalls and normal calls have different
2712 conventions for returning pointers, we could end up generating
2715 So instead of using a libcall sequence we build up a suitable
2716 CALL_EXPR and expand the call in the normal fashion. */
2717 if (fn == NULL_TREE)
2721 /* This was copied from except.c, I don't know if all this is
2722 necessary in this context or not. */
2723 fn = get_identifier ("memset");
2724 fntype = build_pointer_type (void_type_node);
2725 fntype = build_function_type (fntype, NULL_TREE);
2726 fn = build_decl (FUNCTION_DECL, fn, fntype);
2727 ggc_add_tree_root (&fn, 1);
2728 DECL_EXTERNAL (fn) = 1;
2729 TREE_PUBLIC (fn) = 1;
2730 DECL_ARTIFICIAL (fn) = 1;
2731 TREE_NOTHROW (fn) = 1;
2732 make_decl_rtl (fn, NULL);
2733 assemble_external (fn);
2736 /* We need to make an argument list for the function call.
2738 memset has three arguments, the first is a void * addresses, the
2739 second an integer with the initialization value, the last is a
2740 size_t byte count for the copy. */
2742 = build_tree_list (NULL_TREE,
2743 make_tree (build_pointer_type (void_type_node),
2745 TREE_CHAIN (arg_list)
2746 = build_tree_list (NULL_TREE,
2747 make_tree (integer_type_node, const0_rtx));
2748 TREE_CHAIN (TREE_CHAIN (arg_list))
2749 = build_tree_list (NULL_TREE, make_tree (sizetype, size));
2750 TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list))) = NULL_TREE;
2752 /* Now we have to build up the CALL_EXPR itself. */
2753 call_expr = build1 (ADDR_EXPR,
2754 build_pointer_type (TREE_TYPE (fn)), fn);
2755 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
2756 call_expr, arg_list, NULL_TREE);
2757 TREE_SIDE_EFFECTS (call_expr) = 1;
2759 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
2761 emit_library_call (bzero_libfunc, LCT_NORMAL,
2762 VOIDmode, 2, object, Pmode, size,
2763 TYPE_MODE (integer_type_node));
2766 /* If we are initializing a readonly value, show the above call
2767 clobbered it. Otherwise, a load from it may erroneously be
2768 hoisted from a loop. */
2769 if (RTX_UNCHANGING_P (object))
2770 emit_insn (gen_rtx_CLOBBER (VOIDmode, object));
2777 /* Generate code to copy Y into X.
2778 Both Y and X must have the same mode, except that
2779 Y can be a constant with VOIDmode.
2780 This mode cannot be BLKmode; use emit_block_move for that.
2782 Return the last instruction emitted. */
2785 emit_move_insn (x, y)
2788 enum machine_mode mode = GET_MODE (x);
2789 rtx y_cst = NULL_RTX;
2792 x = protect_from_queue (x, 1);
2793 y = protect_from_queue (y, 0);
2795 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
2798 /* Never force constant_p_rtx to memory. */
2799 if (GET_CODE (y) == CONSTANT_P_RTX)
2801 else if (CONSTANT_P (y))
2804 && FLOAT_MODE_P (GET_MODE (x))
2805 && (last_insn = compress_float_constant (x, y)))
2808 if (!LEGITIMATE_CONSTANT_P (y))
2811 y = force_const_mem (mode, y);
2815 /* If X or Y are memory references, verify that their addresses are valid
2817 if (GET_CODE (x) == MEM
2818 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
2819 && ! push_operand (x, GET_MODE (x)))
2821 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
2822 x = validize_mem (x);
2824 if (GET_CODE (y) == MEM
2825 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
2827 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
2828 y = validize_mem (y);
2830 if (mode == BLKmode)
2833 last_insn = emit_move_insn_1 (x, y);
2835 if (y_cst && GET_CODE (x) == REG)
2836 set_unique_reg_note (last_insn, REG_EQUAL, y_cst);
2841 /* Low level part of emit_move_insn.
2842 Called just like emit_move_insn, but assumes X and Y
2843 are basically valid. */
2846 emit_move_insn_1 (x, y)
2849 enum machine_mode mode = GET_MODE (x);
2850 enum machine_mode submode;
2851 enum mode_class class = GET_MODE_CLASS (mode);
2853 if ((unsigned int) mode >= (unsigned int) MAX_MACHINE_MODE)
2856 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
2858 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
2860 /* Expand complex moves by moving real part and imag part, if possible. */
2861 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
2862 && BLKmode != (submode = mode_for_size ((GET_MODE_UNIT_SIZE (mode)
2864 (class == MODE_COMPLEX_INT
2865 ? MODE_INT : MODE_FLOAT),
2867 && (mov_optab->handlers[(int) submode].insn_code
2868 != CODE_FOR_nothing))
2870 /* Don't split destination if it is a stack push. */
2871 int stack = push_operand (x, GET_MODE (x));
2873 #ifdef PUSH_ROUNDING
2874 /* In case we output to the stack, but the size is smaller machine can
2875 push exactly, we need to use move instructions. */
2877 && (PUSH_ROUNDING (GET_MODE_SIZE (submode))
2878 != GET_MODE_SIZE (submode)))
2881 HOST_WIDE_INT offset1, offset2;
2883 /* Do not use anti_adjust_stack, since we don't want to update
2884 stack_pointer_delta. */
2885 temp = expand_binop (Pmode,
2886 #ifdef STACK_GROWS_DOWNWARD
2894 (GET_MODE_SIZE (GET_MODE (x)))),
2895 stack_pointer_rtx, 0, OPTAB_LIB_WIDEN);
2897 if (temp != stack_pointer_rtx)
2898 emit_move_insn (stack_pointer_rtx, temp);
2900 #ifdef STACK_GROWS_DOWNWARD
2902 offset2 = GET_MODE_SIZE (submode);
2904 offset1 = -PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)));
2905 offset2 = (-PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)))
2906 + GET_MODE_SIZE (submode));
2909 emit_move_insn (change_address (x, submode,
2910 gen_rtx_PLUS (Pmode,
2912 GEN_INT (offset1))),
2913 gen_realpart (submode, y));
2914 emit_move_insn (change_address (x, submode,
2915 gen_rtx_PLUS (Pmode,
2917 GEN_INT (offset2))),
2918 gen_imagpart (submode, y));
2922 /* If this is a stack, push the highpart first, so it
2923 will be in the argument order.
2925 In that case, change_address is used only to convert
2926 the mode, not to change the address. */
2929 /* Note that the real part always precedes the imag part in memory
2930 regardless of machine's endianness. */
2931 #ifdef STACK_GROWS_DOWNWARD
2932 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2933 (gen_rtx_MEM (submode, XEXP (x, 0)),
2934 gen_imagpart (submode, y)));
2935 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2936 (gen_rtx_MEM (submode, XEXP (x, 0)),
2937 gen_realpart (submode, y)));
2939 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2940 (gen_rtx_MEM (submode, XEXP (x, 0)),
2941 gen_realpart (submode, y)));
2942 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2943 (gen_rtx_MEM (submode, XEXP (x, 0)),
2944 gen_imagpart (submode, y)));
2949 rtx realpart_x, realpart_y;
2950 rtx imagpart_x, imagpart_y;
2952 /* If this is a complex value with each part being smaller than a
2953 word, the usual calling sequence will likely pack the pieces into
2954 a single register. Unfortunately, SUBREG of hard registers only
2955 deals in terms of words, so we have a problem converting input
2956 arguments to the CONCAT of two registers that is used elsewhere
2957 for complex values. If this is before reload, we can copy it into
2958 memory and reload. FIXME, we should see about using extract and
2959 insert on integer registers, but complex short and complex char
2960 variables should be rarely used. */
2961 if (GET_MODE_BITSIZE (mode) < 2 * BITS_PER_WORD
2962 && (reload_in_progress | reload_completed) == 0)
2965 = (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER);
2967 = (REG_P (y) && REGNO (y) < FIRST_PSEUDO_REGISTER);
2969 if (packed_dest_p || packed_src_p)
2971 enum mode_class reg_class = ((class == MODE_COMPLEX_FLOAT)
2972 ? MODE_FLOAT : MODE_INT);
2974 enum machine_mode reg_mode
2975 = mode_for_size (GET_MODE_BITSIZE (mode), reg_class, 1);
2977 if (reg_mode != BLKmode)
2979 rtx mem = assign_stack_temp (reg_mode,
2980 GET_MODE_SIZE (mode), 0);
2981 rtx cmem = adjust_address (mem, mode, 0);
2984 = N_("function using short complex types cannot be inline");
2988 rtx sreg = gen_rtx_SUBREG (reg_mode, x, 0);
2990 emit_move_insn_1 (cmem, y);
2991 return emit_move_insn_1 (sreg, mem);
2995 rtx sreg = gen_rtx_SUBREG (reg_mode, y, 0);
2997 emit_move_insn_1 (mem, sreg);
2998 return emit_move_insn_1 (x, cmem);
3004 realpart_x = gen_realpart (submode, x);
3005 realpart_y = gen_realpart (submode, y);
3006 imagpart_x = gen_imagpart (submode, x);
3007 imagpart_y = gen_imagpart (submode, y);
3009 /* Show the output dies here. This is necessary for SUBREGs
3010 of pseudos since we cannot track their lifetimes correctly;
3011 hard regs shouldn't appear here except as return values.
3012 We never want to emit such a clobber after reload. */
3014 && ! (reload_in_progress || reload_completed)
3015 && (GET_CODE (realpart_x) == SUBREG
3016 || GET_CODE (imagpart_x) == SUBREG))
3017 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
3019 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
3020 (realpart_x, realpart_y));
3021 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
3022 (imagpart_x, imagpart_y));
3025 return get_last_insn ();
3028 /* This will handle any multi-word mode that lacks a move_insn pattern.
3029 However, you will get better code if you define such patterns,
3030 even if they must turn into multiple assembler instructions. */
3031 else if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
3038 #ifdef PUSH_ROUNDING
3040 /* If X is a push on the stack, do the push now and replace
3041 X with a reference to the stack pointer. */
3042 if (push_operand (x, GET_MODE (x)))
3047 /* Do not use anti_adjust_stack, since we don't want to update
3048 stack_pointer_delta. */
3049 temp = expand_binop (Pmode,
3050 #ifdef STACK_GROWS_DOWNWARD
3058 (GET_MODE_SIZE (GET_MODE (x)))),
3059 stack_pointer_rtx, 0, OPTAB_LIB_WIDEN);
3061 if (temp != stack_pointer_rtx)
3062 emit_move_insn (stack_pointer_rtx, temp);
3064 code = GET_CODE (XEXP (x, 0));
3066 /* Just hope that small offsets off SP are OK. */
3067 if (code == POST_INC)
3068 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3069 GEN_INT (-((HOST_WIDE_INT)
3070 GET_MODE_SIZE (GET_MODE (x)))));
3071 else if (code == POST_DEC)
3072 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3073 GEN_INT (GET_MODE_SIZE (GET_MODE (x))));
3075 temp = stack_pointer_rtx;
3077 x = change_address (x, VOIDmode, temp);
3081 /* If we are in reload, see if either operand is a MEM whose address
3082 is scheduled for replacement. */
3083 if (reload_in_progress && GET_CODE (x) == MEM
3084 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
3085 x = replace_equiv_address_nv (x, inner);
3086 if (reload_in_progress && GET_CODE (y) == MEM
3087 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
3088 y = replace_equiv_address_nv (y, inner);
3094 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
3097 rtx xpart = operand_subword (x, i, 1, mode);
3098 rtx ypart = operand_subword (y, i, 1, mode);
3100 /* If we can't get a part of Y, put Y into memory if it is a
3101 constant. Otherwise, force it into a register. If we still
3102 can't get a part of Y, abort. */
3103 if (ypart == 0 && CONSTANT_P (y))
3105 y = force_const_mem (mode, y);
3106 ypart = operand_subword (y, i, 1, mode);
3108 else if (ypart == 0)
3109 ypart = operand_subword_force (y, i, mode);
3111 if (xpart == 0 || ypart == 0)
3114 need_clobber |= (GET_CODE (xpart) == SUBREG);
3116 last_insn = emit_move_insn (xpart, ypart);
3119 seq = gen_sequence ();
3122 /* Show the output dies here. This is necessary for SUBREGs
3123 of pseudos since we cannot track their lifetimes correctly;
3124 hard regs shouldn't appear here except as return values.
3125 We never want to emit such a clobber after reload. */
3127 && ! (reload_in_progress || reload_completed)
3128 && need_clobber != 0)
3129 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
3139 /* If Y is representable exactly in a narrower mode, and the target can
3140 perform the extension directly from constant or memory, then emit the
3141 move as an extension. */
3144 compress_float_constant (x, y)
3147 enum machine_mode dstmode = GET_MODE (x);
3148 enum machine_mode orig_srcmode = GET_MODE (y);
3149 enum machine_mode srcmode;
3152 REAL_VALUE_FROM_CONST_DOUBLE (r, y);
3154 for (srcmode = GET_CLASS_NARROWEST_MODE (GET_MODE_CLASS (orig_srcmode));
3155 srcmode != orig_srcmode;
3156 srcmode = GET_MODE_WIDER_MODE (srcmode))
3159 rtx trunc_y, last_insn;
3161 /* Skip if the target can't extend this way. */
3162 ic = can_extend_p (dstmode, srcmode, 0);
3163 if (ic == CODE_FOR_nothing)
3166 /* Skip if the narrowed value isn't exact. */
3167 if (! exact_real_truncate (srcmode, &r))
3170 trunc_y = CONST_DOUBLE_FROM_REAL_VALUE (r, srcmode);
3172 if (LEGITIMATE_CONSTANT_P (trunc_y))
3174 /* Skip if the target needs extra instructions to perform
3176 if (! (*insn_data[ic].operand[1].predicate) (trunc_y, srcmode))
3179 else if (float_extend_from_mem[dstmode][srcmode])
3180 trunc_y = validize_mem (force_const_mem (srcmode, trunc_y));
3184 emit_unop_insn (ic, x, trunc_y, UNKNOWN);
3185 last_insn = get_last_insn ();
3187 if (GET_CODE (x) == REG)
3188 REG_NOTES (last_insn)
3189 = gen_rtx_EXPR_LIST (REG_EQUAL, y, REG_NOTES (last_insn));
3197 /* Pushing data onto the stack. */
3199 /* Push a block of length SIZE (perhaps variable)
3200 and return an rtx to address the beginning of the block.
3201 Note that it is not possible for the value returned to be a QUEUED.
3202 The value may be virtual_outgoing_args_rtx.
3204 EXTRA is the number of bytes of padding to push in addition to SIZE.
3205 BELOW nonzero means this padding comes at low addresses;
3206 otherwise, the padding comes at high addresses. */
3209 push_block (size, extra, below)
3215 size = convert_modes (Pmode, ptr_mode, size, 1);
3216 if (CONSTANT_P (size))
3217 anti_adjust_stack (plus_constant (size, extra));
3218 else if (GET_CODE (size) == REG && extra == 0)
3219 anti_adjust_stack (size);
3222 temp = copy_to_mode_reg (Pmode, size);
3224 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
3225 temp, 0, OPTAB_LIB_WIDEN);
3226 anti_adjust_stack (temp);
3229 #ifndef STACK_GROWS_DOWNWARD
3235 temp = virtual_outgoing_args_rtx;
3236 if (extra != 0 && below)
3237 temp = plus_constant (temp, extra);
3241 if (GET_CODE (size) == CONST_INT)
3242 temp = plus_constant (virtual_outgoing_args_rtx,
3243 -INTVAL (size) - (below ? 0 : extra));
3244 else if (extra != 0 && !below)
3245 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3246 negate_rtx (Pmode, plus_constant (size, extra)));
3248 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3249 negate_rtx (Pmode, size));
3252 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
3255 #ifdef PUSH_ROUNDING
3257 /* Emit single push insn. */
3260 emit_single_push_insn (mode, x, type)
3262 enum machine_mode mode;
3266 unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
3268 enum insn_code icode;
3269 insn_operand_predicate_fn pred;
3271 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3272 /* If there is push pattern, use it. Otherwise try old way of throwing
3273 MEM representing push operation to move expander. */
3274 icode = push_optab->handlers[(int) mode].insn_code;
3275 if (icode != CODE_FOR_nothing)
3277 if (((pred = insn_data[(int) icode].operand[0].predicate)
3278 && !((*pred) (x, mode))))
3279 x = force_reg (mode, x);
3280 emit_insn (GEN_FCN (icode) (x));
3283 if (GET_MODE_SIZE (mode) == rounded_size)
3284 dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
3287 #ifdef STACK_GROWS_DOWNWARD
3288 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3289 GEN_INT (-(HOST_WIDE_INT) rounded_size));
3291 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3292 GEN_INT (rounded_size));
3294 dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
3297 dest = gen_rtx_MEM (mode, dest_addr);
3301 set_mem_attributes (dest, type, 1);
3303 if (flag_optimize_sibling_calls)
3304 /* Function incoming arguments may overlap with sibling call
3305 outgoing arguments and we cannot allow reordering of reads
3306 from function arguments with stores to outgoing arguments
3307 of sibling calls. */
3308 set_mem_alias_set (dest, 0);
3310 emit_move_insn (dest, x);
3314 /* Generate code to push X onto the stack, assuming it has mode MODE and
3316 MODE is redundant except when X is a CONST_INT (since they don't
3318 SIZE is an rtx for the size of data to be copied (in bytes),
3319 needed only if X is BLKmode.
3321 ALIGN (in bits) is maximum alignment we can assume.
3323 If PARTIAL and REG are both nonzero, then copy that many of the first
3324 words of X into registers starting with REG, and push the rest of X.
3325 The amount of space pushed is decreased by PARTIAL words,
3326 rounded *down* to a multiple of PARM_BOUNDARY.
3327 REG must be a hard register in this case.
3328 If REG is zero but PARTIAL is not, take any all others actions for an
3329 argument partially in registers, but do not actually load any
3332 EXTRA is the amount in bytes of extra space to leave next to this arg.
3333 This is ignored if an argument block has already been allocated.
3335 On a machine that lacks real push insns, ARGS_ADDR is the address of
3336 the bottom of the argument block for this call. We use indexing off there
3337 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
3338 argument block has not been preallocated.
3340 ARGS_SO_FAR is the size of args previously pushed for this call.
3342 REG_PARM_STACK_SPACE is nonzero if functions require stack space
3343 for arguments passed in registers. If nonzero, it will be the number
3344 of bytes required. */
3347 emit_push_insn (x, mode, type, size, align, partial, reg, extra,
3348 args_addr, args_so_far, reg_parm_stack_space,
3351 enum machine_mode mode;
3360 int reg_parm_stack_space;
3364 enum direction stack_direction
3365 #ifdef STACK_GROWS_DOWNWARD
3371 /* Decide where to pad the argument: `downward' for below,
3372 `upward' for above, or `none' for don't pad it.
3373 Default is below for small data on big-endian machines; else above. */
3374 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
3376 /* Invert direction if stack is post-decrement.
3378 if (STACK_PUSH_CODE == POST_DEC)
3379 if (where_pad != none)
3380 where_pad = (where_pad == downward ? upward : downward);
3382 xinner = x = protect_from_queue (x, 0);
3384 if (mode == BLKmode)
3386 /* Copy a block into the stack, entirely or partially. */
3389 int used = partial * UNITS_PER_WORD;
3390 int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
3398 /* USED is now the # of bytes we need not copy to the stack
3399 because registers will take care of them. */
3402 xinner = adjust_address (xinner, BLKmode, used);
3404 /* If the partial register-part of the arg counts in its stack size,
3405 skip the part of stack space corresponding to the registers.
3406 Otherwise, start copying to the beginning of the stack space,
3407 by setting SKIP to 0. */
3408 skip = (reg_parm_stack_space == 0) ? 0 : used;
3410 #ifdef PUSH_ROUNDING
3411 /* Do it with several push insns if that doesn't take lots of insns
3412 and if there is no difficulty with push insns that skip bytes
3413 on the stack for alignment purposes. */
3416 && GET_CODE (size) == CONST_INT
3418 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
3419 /* Here we avoid the case of a structure whose weak alignment
3420 forces many pushes of a small amount of data,
3421 and such small pushes do rounding that causes trouble. */
3422 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
3423 || align >= BIGGEST_ALIGNMENT
3424 || (PUSH_ROUNDING (align / BITS_PER_UNIT)
3425 == (align / BITS_PER_UNIT)))
3426 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
3428 /* Push padding now if padding above and stack grows down,
3429 or if padding below and stack grows up.
3430 But if space already allocated, this has already been done. */
3431 if (extra && args_addr == 0
3432 && where_pad != none && where_pad != stack_direction)
3433 anti_adjust_stack (GEN_INT (extra));
3435 move_by_pieces (NULL, xinner, INTVAL (size) - used, align);
3438 #endif /* PUSH_ROUNDING */
3442 /* Otherwise make space on the stack and copy the data
3443 to the address of that space. */
3445 /* Deduct words put into registers from the size we must copy. */
3448 if (GET_CODE (size) == CONST_INT)
3449 size = GEN_INT (INTVAL (size) - used);
3451 size = expand_binop (GET_MODE (size), sub_optab, size,
3452 GEN_INT (used), NULL_RTX, 0,
3456 /* Get the address of the stack space.
3457 In this case, we do not deal with EXTRA separately.
3458 A single stack adjust will do. */
3461 temp = push_block (size, extra, where_pad == downward);
3464 else if (GET_CODE (args_so_far) == CONST_INT)
3465 temp = memory_address (BLKmode,
3466 plus_constant (args_addr,
3467 skip + INTVAL (args_so_far)));
3469 temp = memory_address (BLKmode,
3470 plus_constant (gen_rtx_PLUS (Pmode,
3474 target = gen_rtx_MEM (BLKmode, temp);
3478 set_mem_attributes (target, type, 1);
3479 /* Function incoming arguments may overlap with sibling call
3480 outgoing arguments and we cannot allow reordering of reads
3481 from function arguments with stores to outgoing arguments
3482 of sibling calls. */
3483 set_mem_alias_set (target, 0);
3486 set_mem_align (target, align);
3488 /* TEMP is the address of the block. Copy the data there. */
3489 if (GET_CODE (size) == CONST_INT
3490 && MOVE_BY_PIECES_P ((unsigned) INTVAL (size), align))
3492 move_by_pieces (target, xinner, INTVAL (size), align);
3497 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
3498 enum machine_mode mode;
3500 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
3502 mode = GET_MODE_WIDER_MODE (mode))
3504 enum insn_code code = movstr_optab[(int) mode];
3505 insn_operand_predicate_fn pred;
3507 if (code != CODE_FOR_nothing
3508 && ((GET_CODE (size) == CONST_INT
3509 && ((unsigned HOST_WIDE_INT) INTVAL (size)
3510 <= (GET_MODE_MASK (mode) >> 1)))
3511 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
3512 && (!(pred = insn_data[(int) code].operand[0].predicate)
3513 || ((*pred) (target, BLKmode)))
3514 && (!(pred = insn_data[(int) code].operand[1].predicate)
3515 || ((*pred) (xinner, BLKmode)))
3516 && (!(pred = insn_data[(int) code].operand[3].predicate)
3517 || ((*pred) (opalign, VOIDmode))))
3519 rtx op2 = convert_to_mode (mode, size, 1);
3520 rtx last = get_last_insn ();
3523 pred = insn_data[(int) code].operand[2].predicate;
3524 if (pred != 0 && ! (*pred) (op2, mode))
3525 op2 = copy_to_mode_reg (mode, op2);
3527 pat = GEN_FCN ((int) code) (target, xinner,
3535 delete_insns_since (last);
3540 if (!ACCUMULATE_OUTGOING_ARGS)
3542 /* If the source is referenced relative to the stack pointer,
3543 copy it to another register to stabilize it. We do not need
3544 to do this if we know that we won't be changing sp. */
3546 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3547 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3548 temp = copy_to_reg (temp);
3551 /* Make inhibit_defer_pop nonzero around the library call
3552 to force it to pop the bcopy-arguments right away. */
3554 #ifdef TARGET_MEM_FUNCTIONS
3555 emit_library_call (memcpy_libfunc, LCT_NORMAL,
3556 VOIDmode, 3, temp, Pmode, XEXP (xinner, 0), Pmode,
3557 convert_to_mode (TYPE_MODE (sizetype),
3558 size, TREE_UNSIGNED (sizetype)),
3559 TYPE_MODE (sizetype));
3561 emit_library_call (bcopy_libfunc, LCT_NORMAL,
3562 VOIDmode, 3, XEXP (xinner, 0), Pmode, temp, Pmode,
3563 convert_to_mode (TYPE_MODE (integer_type_node),
3565 TREE_UNSIGNED (integer_type_node)),
3566 TYPE_MODE (integer_type_node));
3571 else if (partial > 0)
3573 /* Scalar partly in registers. */
3575 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3578 /* # words of start of argument
3579 that we must make space for but need not store. */
3580 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
3581 int args_offset = INTVAL (args_so_far);
3584 /* Push padding now if padding above and stack grows down,
3585 or if padding below and stack grows up.
3586 But if space already allocated, this has already been done. */
3587 if (extra && args_addr == 0
3588 && where_pad != none && where_pad != stack_direction)
3589 anti_adjust_stack (GEN_INT (extra));
3591 /* If we make space by pushing it, we might as well push
3592 the real data. Otherwise, we can leave OFFSET nonzero
3593 and leave the space uninitialized. */
3597 /* Now NOT_STACK gets the number of words that we don't need to
3598 allocate on the stack. */
3599 not_stack = partial - offset;
3601 /* If the partial register-part of the arg counts in its stack size,
3602 skip the part of stack space corresponding to the registers.
3603 Otherwise, start copying to the beginning of the stack space,
3604 by setting SKIP to 0. */
3605 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
3607 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
3608 x = validize_mem (force_const_mem (mode, x));
3610 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3611 SUBREGs of such registers are not allowed. */
3612 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
3613 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
3614 x = copy_to_reg (x);
3616 /* Loop over all the words allocated on the stack for this arg. */
3617 /* We can do it by words, because any scalar bigger than a word
3618 has a size a multiple of a word. */
3619 #ifndef PUSH_ARGS_REVERSED
3620 for (i = not_stack; i < size; i++)
3622 for (i = size - 1; i >= not_stack; i--)
3624 if (i >= not_stack + offset)
3625 emit_push_insn (operand_subword_force (x, i, mode),
3626 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
3628 GEN_INT (args_offset + ((i - not_stack + skip)
3630 reg_parm_stack_space, alignment_pad);
3635 rtx target = NULL_RTX;
3638 /* Push padding now if padding above and stack grows down,
3639 or if padding below and stack grows up.
3640 But if space already allocated, this has already been done. */
3641 if (extra && args_addr == 0
3642 && where_pad != none && where_pad != stack_direction)
3643 anti_adjust_stack (GEN_INT (extra));
3645 #ifdef PUSH_ROUNDING
3646 if (args_addr == 0 && PUSH_ARGS)
3647 emit_single_push_insn (mode, x, type);
3651 if (GET_CODE (args_so_far) == CONST_INT)
3653 = memory_address (mode,
3654 plus_constant (args_addr,
3655 INTVAL (args_so_far)));
3657 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
3660 dest = gen_rtx_MEM (mode, addr);
3663 set_mem_attributes (dest, type, 1);
3664 /* Function incoming arguments may overlap with sibling call
3665 outgoing arguments and we cannot allow reordering of reads
3666 from function arguments with stores to outgoing arguments
3667 of sibling calls. */
3668 set_mem_alias_set (dest, 0);
3671 emit_move_insn (dest, x);
3677 /* If part should go in registers, copy that part
3678 into the appropriate registers. Do this now, at the end,
3679 since mem-to-mem copies above may do function calls. */
3680 if (partial > 0 && reg != 0)
3682 /* Handle calls that pass values in multiple non-contiguous locations.
3683 The Irix 6 ABI has examples of this. */
3684 if (GET_CODE (reg) == PARALLEL)
3685 emit_group_load (reg, x, -1); /* ??? size? */
3687 move_block_to_reg (REGNO (reg), x, partial, mode);
3690 if (extra && args_addr == 0 && where_pad == stack_direction)
3691 anti_adjust_stack (GEN_INT (extra));
3693 if (alignment_pad && args_addr == 0)
3694 anti_adjust_stack (alignment_pad);
3697 /* Return X if X can be used as a subtarget in a sequence of arithmetic
3705 /* Only registers can be subtargets. */
3706 || GET_CODE (x) != REG
3707 /* If the register is readonly, it can't be set more than once. */
3708 || RTX_UNCHANGING_P (x)
3709 /* Don't use hard regs to avoid extending their life. */
3710 || REGNO (x) < FIRST_PSEUDO_REGISTER
3711 /* Avoid subtargets inside loops,
3712 since they hide some invariant expressions. */
3713 || preserve_subexpressions_p ())
3717 /* Expand an assignment that stores the value of FROM into TO.
3718 If WANT_VALUE is nonzero, return an rtx for the value of TO.
3719 (This may contain a QUEUED rtx;
3720 if the value is constant, this rtx is a constant.)
3721 Otherwise, the returned value is NULL_RTX.
3723 SUGGEST_REG is no longer actually used.
3724 It used to mean, copy the value through a register
3725 and return that register, if that is possible.
3726 We now use WANT_VALUE to decide whether to do this. */
3729 expand_assignment (to, from, want_value, suggest_reg)
3732 int suggest_reg ATTRIBUTE_UNUSED;
3737 /* Don't crash if the lhs of the assignment was erroneous. */
3739 if (TREE_CODE (to) == ERROR_MARK)
3741 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
3742 return want_value ? result : NULL_RTX;
3745 /* Assignment of a structure component needs special treatment
3746 if the structure component's rtx is not simply a MEM.
3747 Assignment of an array element at a constant index, and assignment of
3748 an array element in an unaligned packed structure field, has the same
3751 if (TREE_CODE (to) == COMPONENT_REF || TREE_CODE (to) == BIT_FIELD_REF
3752 || TREE_CODE (to) == ARRAY_REF || TREE_CODE (to) == ARRAY_RANGE_REF)
3754 enum machine_mode mode1;
3755 HOST_WIDE_INT bitsize, bitpos;
3763 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
3764 &unsignedp, &volatilep);
3766 /* If we are going to use store_bit_field and extract_bit_field,
3767 make sure to_rtx will be safe for multiple use. */
3769 if (mode1 == VOIDmode && want_value)
3770 tem = stabilize_reference (tem);
3772 orig_to_rtx = to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, 0);
3776 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM);
3778 if (GET_CODE (to_rtx) != MEM)
3781 #ifdef POINTERS_EXTEND_UNSIGNED
3782 if (GET_MODE (offset_rtx) != Pmode)
3783 offset_rtx = convert_memory_address (Pmode, offset_rtx);
3785 if (GET_MODE (offset_rtx) != ptr_mode)
3786 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
3789 /* A constant address in TO_RTX can have VOIDmode, we must not try
3790 to call force_reg for that case. Avoid that case. */
3791 if (GET_CODE (to_rtx) == MEM
3792 && GET_MODE (to_rtx) == BLKmode
3793 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
3795 && (bitpos % bitsize) == 0
3796 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
3797 && MEM_ALIGN (to_rtx) == GET_MODE_ALIGNMENT (mode1))
3799 to_rtx = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT);
3803 to_rtx = offset_address (to_rtx, offset_rtx,
3804 highest_pow2_factor_for_type (TREE_TYPE (to),
3808 if (GET_CODE (to_rtx) == MEM)
3810 tree old_expr = MEM_EXPR (to_rtx);
3812 /* If the field is at offset zero, we could have been given the
3813 DECL_RTX of the parent struct. Don't munge it. */
3814 to_rtx = shallow_copy_rtx (to_rtx);
3816 set_mem_attributes (to_rtx, to, 0);
3818 /* If we changed MEM_EXPR, that means we're now referencing
3819 the COMPONENT_REF, which means that MEM_OFFSET must be
3820 relative to that field. But we've not yet reflected BITPOS
3821 in TO_RTX. This will be done in store_field. Adjust for
3822 that by biasing MEM_OFFSET by -bitpos. */
3823 if (MEM_EXPR (to_rtx) != old_expr && MEM_OFFSET (to_rtx)
3824 && (bitpos / BITS_PER_UNIT) != 0)
3825 set_mem_offset (to_rtx, GEN_INT (INTVAL (MEM_OFFSET (to_rtx))
3826 - (bitpos / BITS_PER_UNIT)));
3829 /* Deal with volatile and readonly fields. The former is only done
3830 for MEM. Also set MEM_KEEP_ALIAS_SET_P if needed. */
3831 if (volatilep && GET_CODE (to_rtx) == MEM)
3833 if (to_rtx == orig_to_rtx)
3834 to_rtx = copy_rtx (to_rtx);
3835 MEM_VOLATILE_P (to_rtx) = 1;
3838 if (TREE_CODE (to) == COMPONENT_REF
3839 && TREE_READONLY (TREE_OPERAND (to, 1)))
3841 if (to_rtx == orig_to_rtx)
3842 to_rtx = copy_rtx (to_rtx);
3843 RTX_UNCHANGING_P (to_rtx) = 1;
3846 if (GET_CODE (to_rtx) == MEM && ! can_address_p (to))
3848 if (to_rtx == orig_to_rtx)
3849 to_rtx = copy_rtx (to_rtx);
3850 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
3853 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
3855 /* Spurious cast for HPUX compiler. */
3856 ? ((enum machine_mode)
3857 TYPE_MODE (TREE_TYPE (to)))
3859 unsignedp, TREE_TYPE (tem), get_alias_set (to));
3861 preserve_temp_slots (result);
3865 /* If the value is meaningful, convert RESULT to the proper mode.
3866 Otherwise, return nothing. */
3867 return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
3868 TYPE_MODE (TREE_TYPE (from)),
3870 TREE_UNSIGNED (TREE_TYPE (to)))
3874 /* If the rhs is a function call and its value is not an aggregate,
3875 call the function before we start to compute the lhs.
3876 This is needed for correct code for cases such as
3877 val = setjmp (buf) on machines where reference to val
3878 requires loading up part of an address in a separate insn.
3880 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
3881 since it might be a promoted variable where the zero- or sign- extension
3882 needs to be done. Handling this in the normal way is safe because no
3883 computation is done before the call. */
3884 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from)
3885 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
3886 && ! ((TREE_CODE (to) == VAR_DECL || TREE_CODE (to) == PARM_DECL)
3887 && GET_CODE (DECL_RTL (to)) == REG))
3892 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
3894 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
3896 /* Handle calls that return values in multiple non-contiguous locations.
3897 The Irix 6 ABI has examples of this. */
3898 if (GET_CODE (to_rtx) == PARALLEL)
3899 emit_group_load (to_rtx, value, int_size_in_bytes (TREE_TYPE (from)));
3900 else if (GET_MODE (to_rtx) == BLKmode)
3901 emit_block_move (to_rtx, value, expr_size (from));
3904 #ifdef POINTERS_EXTEND_UNSIGNED
3905 if (POINTER_TYPE_P (TREE_TYPE (to))
3906 && GET_MODE (to_rtx) != GET_MODE (value))
3907 value = convert_memory_address (GET_MODE (to_rtx), value);
3909 emit_move_insn (to_rtx, value);
3911 preserve_temp_slots (to_rtx);
3914 return want_value ? to_rtx : NULL_RTX;
3917 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
3918 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
3921 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
3923 /* Don't move directly into a return register. */
3924 if (TREE_CODE (to) == RESULT_DECL
3925 && (GET_CODE (to_rtx) == REG || GET_CODE (to_rtx) == PARALLEL))
3930 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
3932 if (GET_CODE (to_rtx) == PARALLEL)
3933 emit_group_load (to_rtx, temp, int_size_in_bytes (TREE_TYPE (from)));
3935 emit_move_insn (to_rtx, temp);
3937 preserve_temp_slots (to_rtx);
3940 return want_value ? to_rtx : NULL_RTX;
3943 /* In case we are returning the contents of an object which overlaps
3944 the place the value is being stored, use a safe function when copying
3945 a value through a pointer into a structure value return block. */
3946 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
3947 && current_function_returns_struct
3948 && !current_function_returns_pcc_struct)
3953 size = expr_size (from);
3954 from_rtx = expand_expr (from, NULL_RTX, VOIDmode, 0);
3956 #ifdef TARGET_MEM_FUNCTIONS
3957 emit_library_call (memmove_libfunc, LCT_NORMAL,
3958 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
3959 XEXP (from_rtx, 0), Pmode,
3960 convert_to_mode (TYPE_MODE (sizetype),
3961 size, TREE_UNSIGNED (sizetype)),
3962 TYPE_MODE (sizetype));
3964 emit_library_call (bcopy_libfunc, LCT_NORMAL,
3965 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
3966 XEXP (to_rtx, 0), Pmode,
3967 convert_to_mode (TYPE_MODE (integer_type_node),
3968 size, TREE_UNSIGNED (integer_type_node)),
3969 TYPE_MODE (integer_type_node));
3972 preserve_temp_slots (to_rtx);
3975 return want_value ? to_rtx : NULL_RTX;
3978 /* Compute FROM and store the value in the rtx we got. */
3981 result = store_expr (from, to_rtx, want_value);
3982 preserve_temp_slots (result);
3985 return want_value ? result : NULL_RTX;
3988 /* Generate code for computing expression EXP,
3989 and storing the value into TARGET.
3990 TARGET may contain a QUEUED rtx.
3992 If WANT_VALUE is nonzero, return a copy of the value
3993 not in TARGET, so that we can be sure to use the proper
3994 value in a containing expression even if TARGET has something
3995 else stored in it. If possible, we copy the value through a pseudo
3996 and return that pseudo. Or, if the value is constant, we try to
3997 return the constant. In some cases, we return a pseudo
3998 copied *from* TARGET.
4000 If the mode is BLKmode then we may return TARGET itself.
4001 It turns out that in BLKmode it doesn't cause a problem.
4002 because C has no operators that could combine two different
4003 assignments into the same BLKmode object with different values
4004 with no sequence point. Will other languages need this to
4007 If WANT_VALUE is 0, we return NULL, to make sure
4008 to catch quickly any cases where the caller uses the value
4009 and fails to set WANT_VALUE. */
4012 store_expr (exp, target, want_value)
4018 int dont_return_target = 0;
4019 int dont_store_target = 0;
4021 if (TREE_CODE (exp) == COMPOUND_EXPR)
4023 /* Perform first part of compound expression, then assign from second
4025 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
4027 return store_expr (TREE_OPERAND (exp, 1), target, want_value);
4029 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
4031 /* For conditional expression, get safe form of the target. Then
4032 test the condition, doing the appropriate assignment on either
4033 side. This avoids the creation of unnecessary temporaries.
4034 For non-BLKmode, it is more efficient not to do this. */
4036 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
4039 target = protect_from_queue (target, 1);
4041 do_pending_stack_adjust ();
4043 jumpifnot (TREE_OPERAND (exp, 0), lab1);
4044 start_cleanup_deferral ();
4045 store_expr (TREE_OPERAND (exp, 1), target, 0);
4046 end_cleanup_deferral ();
4048 emit_jump_insn (gen_jump (lab2));
4051 start_cleanup_deferral ();
4052 store_expr (TREE_OPERAND (exp, 2), target, 0);
4053 end_cleanup_deferral ();
4058 return want_value ? target : NULL_RTX;
4060 else if (queued_subexp_p (target))
4061 /* If target contains a postincrement, let's not risk
4062 using it as the place to generate the rhs. */
4064 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
4066 /* Expand EXP into a new pseudo. */
4067 temp = gen_reg_rtx (GET_MODE (target));
4068 temp = expand_expr (exp, temp, GET_MODE (target), 0);
4071 temp = expand_expr (exp, NULL_RTX, GET_MODE (target), 0);
4073 /* If target is volatile, ANSI requires accessing the value
4074 *from* the target, if it is accessed. So make that happen.
4075 In no case return the target itself. */
4076 if (! MEM_VOLATILE_P (target) && want_value)
4077 dont_return_target = 1;
4079 else if (want_value && GET_CODE (target) == MEM && ! MEM_VOLATILE_P (target)
4080 && GET_MODE (target) != BLKmode)
4081 /* If target is in memory and caller wants value in a register instead,
4082 arrange that. Pass TARGET as target for expand_expr so that,
4083 if EXP is another assignment, WANT_VALUE will be nonzero for it.
4084 We know expand_expr will not use the target in that case.
4085 Don't do this if TARGET is volatile because we are supposed
4086 to write it and then read it. */
4088 temp = expand_expr (exp, target, GET_MODE (target), 0);
4089 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
4091 /* If TEMP is already in the desired TARGET, only copy it from
4092 memory and don't store it there again. */
4094 || (rtx_equal_p (temp, target)
4095 && ! side_effects_p (temp) && ! side_effects_p (target)))
4096 dont_store_target = 1;
4097 temp = copy_to_reg (temp);
4099 dont_return_target = 1;
4101 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
4102 /* If this is an scalar in a register that is stored in a wider mode
4103 than the declared mode, compute the result into its declared mode
4104 and then convert to the wider mode. Our value is the computed
4107 rtx inner_target = 0;
4109 /* If we don't want a value, we can do the conversion inside EXP,
4110 which will often result in some optimizations. Do the conversion
4111 in two steps: first change the signedness, if needed, then
4112 the extend. But don't do this if the type of EXP is a subtype
4113 of something else since then the conversion might involve
4114 more than just converting modes. */
4115 if (! want_value && INTEGRAL_TYPE_P (TREE_TYPE (exp))
4116 && TREE_TYPE (TREE_TYPE (exp)) == 0)
4118 if (TREE_UNSIGNED (TREE_TYPE (exp))
4119 != SUBREG_PROMOTED_UNSIGNED_P (target))
4121 ((*lang_hooks.types.signed_or_unsigned_type)
4122 (SUBREG_PROMOTED_UNSIGNED_P (target), TREE_TYPE (exp)), exp);
4124 exp = convert ((*lang_hooks.types.type_for_mode)
4125 (GET_MODE (SUBREG_REG (target)),
4126 SUBREG_PROMOTED_UNSIGNED_P (target)),
4129 inner_target = SUBREG_REG (target);
4132 temp = expand_expr (exp, inner_target, VOIDmode, 0);
4134 /* If TEMP is a volatile MEM and we want a result value, make
4135 the access now so it gets done only once. Likewise if
4136 it contains TARGET. */
4137 if (GET_CODE (temp) == MEM && want_value
4138 && (MEM_VOLATILE_P (temp)
4139 || reg_mentioned_p (SUBREG_REG (target), XEXP (temp, 0))))
4140 temp = copy_to_reg (temp);
4142 /* If TEMP is a VOIDmode constant, use convert_modes to make
4143 sure that we properly convert it. */
4144 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
4146 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4147 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4148 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
4149 GET_MODE (target), temp,
4150 SUBREG_PROMOTED_UNSIGNED_P (target));
4153 convert_move (SUBREG_REG (target), temp,
4154 SUBREG_PROMOTED_UNSIGNED_P (target));
4156 /* If we promoted a constant, change the mode back down to match
4157 target. Otherwise, the caller might get confused by a result whose
4158 mode is larger than expected. */
4160 if (want_value && GET_MODE (temp) != GET_MODE (target))
4162 if (GET_MODE (temp) != VOIDmode)
4164 temp = gen_lowpart_SUBREG (GET_MODE (target), temp);
4165 SUBREG_PROMOTED_VAR_P (temp) = 1;
4166 SUBREG_PROMOTED_UNSIGNED_SET (temp,
4167 SUBREG_PROMOTED_UNSIGNED_P (target));
4170 temp = convert_modes (GET_MODE (target),
4171 GET_MODE (SUBREG_REG (target)),
4172 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4175 return want_value ? temp : NULL_RTX;
4179 temp = expand_expr (exp, target, GET_MODE (target), 0);
4180 /* Return TARGET if it's a specified hardware register.
4181 If TARGET is a volatile mem ref, either return TARGET
4182 or return a reg copied *from* TARGET; ANSI requires this.
4184 Otherwise, if TEMP is not TARGET, return TEMP
4185 if it is constant (for efficiency),
4186 or if we really want the correct value. */
4187 if (!(target && GET_CODE (target) == REG
4188 && REGNO (target) < FIRST_PSEUDO_REGISTER)
4189 && !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
4190 && ! rtx_equal_p (temp, target)
4191 && (CONSTANT_P (temp) || want_value))
4192 dont_return_target = 1;
4195 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
4196 the same as that of TARGET, adjust the constant. This is needed, for
4197 example, in case it is a CONST_DOUBLE and we want only a word-sized
4199 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
4200 && TREE_CODE (exp) != ERROR_MARK
4201 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
4202 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4203 temp, TREE_UNSIGNED (TREE_TYPE (exp)));
4205 /* If value was not generated in the target, store it there.
4206 Convert the value to TARGET's type first if necessary.
4207 If TEMP and TARGET compare equal according to rtx_equal_p, but
4208 one or both of them are volatile memory refs, we have to distinguish
4210 - expand_expr has used TARGET. In this case, we must not generate
4211 another copy. This can be detected by TARGET being equal according
4213 - expand_expr has not used TARGET - that means that the source just
4214 happens to have the same RTX form. Since temp will have been created
4215 by expand_expr, it will compare unequal according to == .
4216 We must generate a copy in this case, to reach the correct number
4217 of volatile memory references. */
4219 if ((! rtx_equal_p (temp, target)
4220 || (temp != target && (side_effects_p (temp)
4221 || side_effects_p (target))))
4222 && TREE_CODE (exp) != ERROR_MARK
4223 && ! dont_store_target
4224 /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
4225 but TARGET is not valid memory reference, TEMP will differ
4226 from TARGET although it is really the same location. */
4227 && (TREE_CODE_CLASS (TREE_CODE (exp)) != 'd'
4228 || target != DECL_RTL_IF_SET (exp)))
4230 target = protect_from_queue (target, 1);
4231 if (GET_MODE (temp) != GET_MODE (target)
4232 && GET_MODE (temp) != VOIDmode)
4234 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
4235 if (dont_return_target)
4237 /* In this case, we will return TEMP,
4238 so make sure it has the proper mode.
4239 But don't forget to store the value into TARGET. */
4240 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
4241 emit_move_insn (target, temp);
4244 convert_move (target, temp, unsignedp);
4247 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
4249 /* Handle copying a string constant into an array. The string
4250 constant may be shorter than the array. So copy just the string's
4251 actual length, and clear the rest. First get the size of the data
4252 type of the string, which is actually the size of the target. */
4253 rtx size = expr_size (exp);
4255 if (GET_CODE (size) == CONST_INT
4256 && INTVAL (size) < TREE_STRING_LENGTH (exp))
4257 emit_block_move (target, temp, size);
4260 /* Compute the size of the data to copy from the string. */
4262 = size_binop (MIN_EXPR,
4263 make_tree (sizetype, size),
4264 size_int (TREE_STRING_LENGTH (exp)));
4265 rtx copy_size_rtx = expand_expr (copy_size, NULL_RTX,
4269 /* Copy that much. */
4270 copy_size_rtx = convert_to_mode (ptr_mode, copy_size_rtx, 0);
4271 emit_block_move (target, temp, copy_size_rtx);
4273 /* Figure out how much is left in TARGET that we have to clear.
4274 Do all calculations in ptr_mode. */
4275 if (GET_CODE (copy_size_rtx) == CONST_INT)
4277 size = plus_constant (size, -INTVAL (copy_size_rtx));
4278 target = adjust_address (target, BLKmode,
4279 INTVAL (copy_size_rtx));
4283 size = expand_binop (TYPE_MODE (sizetype), sub_optab, size,
4284 copy_size_rtx, NULL_RTX, 0,
4287 #ifdef POINTERS_EXTEND_UNSIGNED
4288 if (GET_MODE (copy_size_rtx) != Pmode)
4289 copy_size_rtx = convert_memory_address (Pmode,
4293 target = offset_address (target, copy_size_rtx,
4294 highest_pow2_factor (copy_size));
4295 label = gen_label_rtx ();
4296 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
4297 GET_MODE (size), 0, label);
4300 if (size != const0_rtx)
4301 clear_storage (target, size);
4307 /* Handle calls that return values in multiple non-contiguous locations.
4308 The Irix 6 ABI has examples of this. */
4309 else if (GET_CODE (target) == PARALLEL)
4310 emit_group_load (target, temp, int_size_in_bytes (TREE_TYPE (exp)));
4311 else if (GET_MODE (temp) == BLKmode)
4312 emit_block_move (target, temp, expr_size (exp));
4314 emit_move_insn (target, temp);
4317 /* If we don't want a value, return NULL_RTX. */
4321 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
4322 ??? The latter test doesn't seem to make sense. */
4323 else if (dont_return_target && GET_CODE (temp) != MEM)
4326 /* Return TARGET itself if it is a hard register. */
4327 else if (want_value && GET_MODE (target) != BLKmode
4328 && ! (GET_CODE (target) == REG
4329 && REGNO (target) < FIRST_PSEUDO_REGISTER))
4330 return copy_to_reg (target);
4336 /* Return 1 if EXP just contains zeros. */
4344 switch (TREE_CODE (exp))
4348 case NON_LVALUE_EXPR:
4349 case VIEW_CONVERT_EXPR:
4350 return is_zeros_p (TREE_OPERAND (exp, 0));
4353 return integer_zerop (exp);
4357 is_zeros_p (TREE_REALPART (exp)) && is_zeros_p (TREE_IMAGPART (exp));
4360 return REAL_VALUES_IDENTICAL (TREE_REAL_CST (exp), dconst0);
4363 for (elt = TREE_VECTOR_CST_ELTS (exp); elt;
4364 elt = TREE_CHAIN (elt))
4365 if (!is_zeros_p (TREE_VALUE (elt)))
4371 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4372 return CONSTRUCTOR_ELTS (exp) == NULL_TREE;
4373 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4374 if (! is_zeros_p (TREE_VALUE (elt)))
4384 /* Return 1 if EXP contains mostly (3/4) zeros. */
4387 mostly_zeros_p (exp)
4390 if (TREE_CODE (exp) == CONSTRUCTOR)
4392 int elts = 0, zeros = 0;
4393 tree elt = CONSTRUCTOR_ELTS (exp);
4394 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4396 /* If there are no ranges of true bits, it is all zero. */
4397 return elt == NULL_TREE;
4399 for (; elt; elt = TREE_CHAIN (elt))
4401 /* We do not handle the case where the index is a RANGE_EXPR,
4402 so the statistic will be somewhat inaccurate.
4403 We do make a more accurate count in store_constructor itself,
4404 so since this function is only used for nested array elements,
4405 this should be close enough. */
4406 if (mostly_zeros_p (TREE_VALUE (elt)))
4411 return 4 * zeros >= 3 * elts;
4414 return is_zeros_p (exp);
4417 /* Helper function for store_constructor.
4418 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4419 TYPE is the type of the CONSTRUCTOR, not the element type.
4420 CLEARED is as for store_constructor.
4421 ALIAS_SET is the alias set to use for any stores.
4423 This provides a recursive shortcut back to store_constructor when it isn't
4424 necessary to go through store_field. This is so that we can pass through
4425 the cleared field to let store_constructor know that we may not have to
4426 clear a substructure if the outer structure has already been cleared. */
4429 store_constructor_field (target, bitsize, bitpos, mode, exp, type, cleared,
4432 unsigned HOST_WIDE_INT bitsize;
4433 HOST_WIDE_INT bitpos;
4434 enum machine_mode mode;
4439 if (TREE_CODE (exp) == CONSTRUCTOR
4440 && bitpos % BITS_PER_UNIT == 0
4441 /* If we have a non-zero bitpos for a register target, then we just
4442 let store_field do the bitfield handling. This is unlikely to
4443 generate unnecessary clear instructions anyways. */
4444 && (bitpos == 0 || GET_CODE (target) == MEM))
4446 if (GET_CODE (target) == MEM)
4448 = adjust_address (target,
4449 GET_MODE (target) == BLKmode
4451 % GET_MODE_ALIGNMENT (GET_MODE (target)))
4452 ? BLKmode : VOIDmode, bitpos / BITS_PER_UNIT);
4455 /* Update the alias set, if required. */
4456 if (GET_CODE (target) == MEM && ! MEM_KEEP_ALIAS_SET_P (target)
4457 && MEM_ALIAS_SET (target) != 0)
4459 target = copy_rtx (target);
4460 set_mem_alias_set (target, alias_set);
4463 store_constructor (exp, target, cleared, bitsize / BITS_PER_UNIT);
4466 store_field (target, bitsize, bitpos, mode, exp, VOIDmode, 0, type,
4470 /* Store the value of constructor EXP into the rtx TARGET.
4471 TARGET is either a REG or a MEM; we know it cannot conflict, since
4472 safe_from_p has been called.
4473 CLEARED is true if TARGET is known to have been zero'd.
4474 SIZE is the number of bytes of TARGET we are allowed to modify: this
4475 may not be the same as the size of EXP if we are assigning to a field
4476 which has been packed to exclude padding bits. */
4479 store_constructor (exp, target, cleared, size)
4485 tree type = TREE_TYPE (exp);
4486 #ifdef WORD_REGISTER_OPERATIONS
4487 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
4490 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
4491 || TREE_CODE (type) == QUAL_UNION_TYPE)
4495 /* We either clear the aggregate or indicate the value is dead. */
4496 if ((TREE_CODE (type) == UNION_TYPE
4497 || TREE_CODE (type) == QUAL_UNION_TYPE)
4499 && ! CONSTRUCTOR_ELTS (exp))
4500 /* If the constructor is empty, clear the union. */
4502 clear_storage (target, expr_size (exp));
4506 /* If we are building a static constructor into a register,
4507 set the initial value as zero so we can fold the value into
4508 a constant. But if more than one register is involved,
4509 this probably loses. */
4510 else if (! cleared && GET_CODE (target) == REG && TREE_STATIC (exp)
4511 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
4513 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4517 /* If the constructor has fewer fields than the structure
4518 or if we are initializing the structure to mostly zeros,
4519 clear the whole structure first. Don't do this if TARGET is a
4520 register whose mode size isn't equal to SIZE since clear_storage
4521 can't handle this case. */
4522 else if (! cleared && size > 0
4523 && ((list_length (CONSTRUCTOR_ELTS (exp))
4524 != fields_length (type))
4525 || mostly_zeros_p (exp))
4526 && (GET_CODE (target) != REG
4527 || ((HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target))
4530 clear_storage (target, GEN_INT (size));
4535 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4537 /* Store each element of the constructor into
4538 the corresponding field of TARGET. */
4540 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4542 tree field = TREE_PURPOSE (elt);
4543 tree value = TREE_VALUE (elt);
4544 enum machine_mode mode;
4545 HOST_WIDE_INT bitsize;
4546 HOST_WIDE_INT bitpos = 0;
4549 rtx to_rtx = target;
4551 /* Just ignore missing fields.
4552 We cleared the whole structure, above,
4553 if any fields are missing. */
4557 if (cleared && is_zeros_p (value))
4560 if (host_integerp (DECL_SIZE (field), 1))
4561 bitsize = tree_low_cst (DECL_SIZE (field), 1);
4565 unsignedp = TREE_UNSIGNED (field);
4566 mode = DECL_MODE (field);
4567 if (DECL_BIT_FIELD (field))
4570 offset = DECL_FIELD_OFFSET (field);
4571 if (host_integerp (offset, 0)
4572 && host_integerp (bit_position (field), 0))
4574 bitpos = int_bit_position (field);
4578 bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
4584 if (contains_placeholder_p (offset))
4585 offset = build (WITH_RECORD_EXPR, sizetype,
4586 offset, make_tree (TREE_TYPE (exp), target));
4588 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
4589 if (GET_CODE (to_rtx) != MEM)
4592 #ifdef POINTERS_EXTEND_UNSIGNED
4593 if (GET_MODE (offset_rtx) != Pmode)
4594 offset_rtx = convert_memory_address (Pmode, offset_rtx);
4596 if (GET_MODE (offset_rtx) != ptr_mode)
4597 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4600 to_rtx = offset_address (to_rtx, offset_rtx,
4601 highest_pow2_factor (offset));
4604 if (TREE_READONLY (field))
4606 if (GET_CODE (to_rtx) == MEM)
4607 to_rtx = copy_rtx (to_rtx);
4609 RTX_UNCHANGING_P (to_rtx) = 1;
4612 #ifdef WORD_REGISTER_OPERATIONS
4613 /* If this initializes a field that is smaller than a word, at the
4614 start of a word, try to widen it to a full word.
4615 This special case allows us to output C++ member function
4616 initializations in a form that the optimizers can understand. */
4617 if (GET_CODE (target) == REG
4618 && bitsize < BITS_PER_WORD
4619 && bitpos % BITS_PER_WORD == 0
4620 && GET_MODE_CLASS (mode) == MODE_INT
4621 && TREE_CODE (value) == INTEGER_CST
4623 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
4625 tree type = TREE_TYPE (value);
4627 if (TYPE_PRECISION (type) < BITS_PER_WORD)
4629 type = (*lang_hooks.types.type_for_size)
4630 (BITS_PER_WORD, TREE_UNSIGNED (type));
4631 value = convert (type, value);
4634 if (BYTES_BIG_ENDIAN)
4636 = fold (build (LSHIFT_EXPR, type, value,
4637 build_int_2 (BITS_PER_WORD - bitsize, 0)));
4638 bitsize = BITS_PER_WORD;
4643 if (GET_CODE (to_rtx) == MEM && !MEM_KEEP_ALIAS_SET_P (to_rtx)
4644 && DECL_NONADDRESSABLE_P (field))
4646 to_rtx = copy_rtx (to_rtx);
4647 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
4650 store_constructor_field (to_rtx, bitsize, bitpos, mode,
4651 value, type, cleared,
4652 get_alias_set (TREE_TYPE (field)));
4655 else if (TREE_CODE (type) == ARRAY_TYPE
4656 || TREE_CODE (type) == VECTOR_TYPE)
4661 tree domain = TYPE_DOMAIN (type);
4662 tree elttype = TREE_TYPE (type);
4664 HOST_WIDE_INT minelt = 0;
4665 HOST_WIDE_INT maxelt = 0;
4667 /* Vectors are like arrays, but the domain is stored via an array
4669 if (TREE_CODE (type) == VECTOR_TYPE)
4671 /* Note that although TYPE_DEBUG_REPRESENTATION_TYPE uses
4672 the same field as TYPE_DOMAIN, we are not guaranteed that
4674 domain = TYPE_DEBUG_REPRESENTATION_TYPE (type);
4675 domain = TYPE_DOMAIN (TREE_TYPE (TYPE_FIELDS (domain)));
4678 const_bounds_p = (TYPE_MIN_VALUE (domain)
4679 && TYPE_MAX_VALUE (domain)
4680 && host_integerp (TYPE_MIN_VALUE (domain), 0)
4681 && host_integerp (TYPE_MAX_VALUE (domain), 0));
4683 /* If we have constant bounds for the range of the type, get them. */
4686 minelt = tree_low_cst (TYPE_MIN_VALUE (domain), 0);
4687 maxelt = tree_low_cst (TYPE_MAX_VALUE (domain), 0);
4690 /* If the constructor has fewer elements than the array,
4691 clear the whole array first. Similarly if this is
4692 static constructor of a non-BLKmode object. */
4693 if (cleared || (GET_CODE (target) == REG && TREE_STATIC (exp)))
4697 HOST_WIDE_INT count = 0, zero_count = 0;
4698 need_to_clear = ! const_bounds_p;
4700 /* This loop is a more accurate version of the loop in
4701 mostly_zeros_p (it handles RANGE_EXPR in an index).
4702 It is also needed to check for missing elements. */
4703 for (elt = CONSTRUCTOR_ELTS (exp);
4704 elt != NULL_TREE && ! need_to_clear;
4705 elt = TREE_CHAIN (elt))
4707 tree index = TREE_PURPOSE (elt);
4708 HOST_WIDE_INT this_node_count;
4710 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4712 tree lo_index = TREE_OPERAND (index, 0);
4713 tree hi_index = TREE_OPERAND (index, 1);
4715 if (! host_integerp (lo_index, 1)
4716 || ! host_integerp (hi_index, 1))
4722 this_node_count = (tree_low_cst (hi_index, 1)
4723 - tree_low_cst (lo_index, 1) + 1);
4726 this_node_count = 1;
4728 count += this_node_count;
4729 if (mostly_zeros_p (TREE_VALUE (elt)))
4730 zero_count += this_node_count;
4733 /* Clear the entire array first if there are any missing elements,
4734 or if the incidence of zero elements is >= 75%. */
4736 && (count < maxelt - minelt + 1 || 4 * zero_count >= 3 * count))
4740 if (need_to_clear && size > 0)
4745 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4747 clear_storage (target, GEN_INT (size));
4751 else if (REG_P (target))
4752 /* Inform later passes that the old value is dead. */
4753 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4755 /* Store each element of the constructor into
4756 the corresponding element of TARGET, determined
4757 by counting the elements. */
4758 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
4760 elt = TREE_CHAIN (elt), i++)
4762 enum machine_mode mode;
4763 HOST_WIDE_INT bitsize;
4764 HOST_WIDE_INT bitpos;
4766 tree value = TREE_VALUE (elt);
4767 tree index = TREE_PURPOSE (elt);
4768 rtx xtarget = target;
4770 if (cleared && is_zeros_p (value))
4773 unsignedp = TREE_UNSIGNED (elttype);
4774 mode = TYPE_MODE (elttype);
4775 if (mode == BLKmode)
4776 bitsize = (host_integerp (TYPE_SIZE (elttype), 1)
4777 ? tree_low_cst (TYPE_SIZE (elttype), 1)
4780 bitsize = GET_MODE_BITSIZE (mode);
4782 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4784 tree lo_index = TREE_OPERAND (index, 0);
4785 tree hi_index = TREE_OPERAND (index, 1);
4786 rtx index_r, pos_rtx, hi_r, loop_top, loop_end;
4787 struct nesting *loop;
4788 HOST_WIDE_INT lo, hi, count;
4791 /* If the range is constant and "small", unroll the loop. */
4793 && host_integerp (lo_index, 0)
4794 && host_integerp (hi_index, 0)
4795 && (lo = tree_low_cst (lo_index, 0),
4796 hi = tree_low_cst (hi_index, 0),
4797 count = hi - lo + 1,
4798 (GET_CODE (target) != MEM
4800 || (host_integerp (TYPE_SIZE (elttype), 1)
4801 && (tree_low_cst (TYPE_SIZE (elttype), 1) * count
4804 lo -= minelt; hi -= minelt;
4805 for (; lo <= hi; lo++)
4807 bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0);
4809 if (GET_CODE (target) == MEM
4810 && !MEM_KEEP_ALIAS_SET_P (target)
4811 && TREE_CODE (type) == ARRAY_TYPE
4812 && TYPE_NONALIASED_COMPONENT (type))
4814 target = copy_rtx (target);
4815 MEM_KEEP_ALIAS_SET_P (target) = 1;
4818 store_constructor_field
4819 (target, bitsize, bitpos, mode, value, type, cleared,
4820 get_alias_set (elttype));
4825 hi_r = expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
4826 loop_top = gen_label_rtx ();
4827 loop_end = gen_label_rtx ();
4829 unsignedp = TREE_UNSIGNED (domain);
4831 index = build_decl (VAR_DECL, NULL_TREE, domain);
4834 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
4836 SET_DECL_RTL (index, index_r);
4837 if (TREE_CODE (value) == SAVE_EXPR
4838 && SAVE_EXPR_RTL (value) == 0)
4840 /* Make sure value gets expanded once before the
4842 expand_expr (value, const0_rtx, VOIDmode, 0);
4845 store_expr (lo_index, index_r, 0);
4846 loop = expand_start_loop (0);
4848 /* Assign value to element index. */
4850 = convert (ssizetype,
4851 fold (build (MINUS_EXPR, TREE_TYPE (index),
4852 index, TYPE_MIN_VALUE (domain))));
4853 position = size_binop (MULT_EXPR, position,
4855 TYPE_SIZE_UNIT (elttype)));
4857 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
4858 xtarget = offset_address (target, pos_rtx,
4859 highest_pow2_factor (position));
4860 xtarget = adjust_address (xtarget, mode, 0);
4861 if (TREE_CODE (value) == CONSTRUCTOR)
4862 store_constructor (value, xtarget, cleared,
4863 bitsize / BITS_PER_UNIT);
4865 store_expr (value, xtarget, 0);
4867 expand_exit_loop_if_false (loop,
4868 build (LT_EXPR, integer_type_node,
4871 expand_increment (build (PREINCREMENT_EXPR,
4873 index, integer_one_node), 0, 0);
4875 emit_label (loop_end);
4878 else if ((index != 0 && ! host_integerp (index, 0))
4879 || ! host_integerp (TYPE_SIZE (elttype), 1))
4884 index = ssize_int (1);
4887 index = convert (ssizetype,
4888 fold (build (MINUS_EXPR, index,
4889 TYPE_MIN_VALUE (domain))));
4891 position = size_binop (MULT_EXPR, index,
4893 TYPE_SIZE_UNIT (elttype)));
4894 xtarget = offset_address (target,
4895 expand_expr (position, 0, VOIDmode, 0),
4896 highest_pow2_factor (position));
4897 xtarget = adjust_address (xtarget, mode, 0);
4898 store_expr (value, xtarget, 0);
4903 bitpos = ((tree_low_cst (index, 0) - minelt)
4904 * tree_low_cst (TYPE_SIZE (elttype), 1));
4906 bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
4908 if (GET_CODE (target) == MEM && !MEM_KEEP_ALIAS_SET_P (target)
4909 && TREE_CODE (type) == ARRAY_TYPE
4910 && TYPE_NONALIASED_COMPONENT (type))
4912 target = copy_rtx (target);
4913 MEM_KEEP_ALIAS_SET_P (target) = 1;
4916 store_constructor_field (target, bitsize, bitpos, mode, value,
4917 type, cleared, get_alias_set (elttype));
4923 /* Set constructor assignments. */
4924 else if (TREE_CODE (type) == SET_TYPE)
4926 tree elt = CONSTRUCTOR_ELTS (exp);
4927 unsigned HOST_WIDE_INT nbytes = int_size_in_bytes (type), nbits;
4928 tree domain = TYPE_DOMAIN (type);
4929 tree domain_min, domain_max, bitlength;
4931 /* The default implementation strategy is to extract the constant
4932 parts of the constructor, use that to initialize the target,
4933 and then "or" in whatever non-constant ranges we need in addition.
4935 If a large set is all zero or all ones, it is
4936 probably better to set it using memset (if available) or bzero.
4937 Also, if a large set has just a single range, it may also be
4938 better to first clear all the first clear the set (using
4939 bzero/memset), and set the bits we want. */
4941 /* Check for all zeros. */
4942 if (elt == NULL_TREE && size > 0)
4945 clear_storage (target, GEN_INT (size));
4949 domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
4950 domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
4951 bitlength = size_binop (PLUS_EXPR,
4952 size_diffop (domain_max, domain_min),
4955 nbits = tree_low_cst (bitlength, 1);
4957 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
4958 are "complicated" (more than one range), initialize (the
4959 constant parts) by copying from a constant. */
4960 if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD
4961 || (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE))
4963 unsigned int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
4964 enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
4965 char *bit_buffer = (char *) alloca (nbits);
4966 HOST_WIDE_INT word = 0;
4967 unsigned int bit_pos = 0;
4968 unsigned int ibit = 0;
4969 unsigned int offset = 0; /* In bytes from beginning of set. */
4971 elt = get_set_constructor_bits (exp, bit_buffer, nbits);
4974 if (bit_buffer[ibit])
4976 if (BYTES_BIG_ENDIAN)
4977 word |= (1 << (set_word_size - 1 - bit_pos));
4979 word |= 1 << bit_pos;
4983 if (bit_pos >= set_word_size || ibit == nbits)
4985 if (word != 0 || ! cleared)
4987 rtx datum = GEN_INT (word);
4990 /* The assumption here is that it is safe to use
4991 XEXP if the set is multi-word, but not if
4992 it's single-word. */
4993 if (GET_CODE (target) == MEM)
4994 to_rtx = adjust_address (target, mode, offset);
4995 else if (offset == 0)
4999 emit_move_insn (to_rtx, datum);
5006 offset += set_word_size / BITS_PER_UNIT;
5011 /* Don't bother clearing storage if the set is all ones. */
5012 if (TREE_CHAIN (elt) != NULL_TREE
5013 || (TREE_PURPOSE (elt) == NULL_TREE
5015 : ( ! host_integerp (TREE_VALUE (elt), 0)
5016 || ! host_integerp (TREE_PURPOSE (elt), 0)
5017 || (tree_low_cst (TREE_VALUE (elt), 0)
5018 - tree_low_cst (TREE_PURPOSE (elt), 0) + 1
5019 != (HOST_WIDE_INT) nbits))))
5020 clear_storage (target, expr_size (exp));
5022 for (; elt != NULL_TREE; elt = TREE_CHAIN (elt))
5024 /* Start of range of element or NULL. */
5025 tree startbit = TREE_PURPOSE (elt);
5026 /* End of range of element, or element value. */
5027 tree endbit = TREE_VALUE (elt);
5028 #ifdef TARGET_MEM_FUNCTIONS
5029 HOST_WIDE_INT startb, endb;
5031 rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
5033 bitlength_rtx = expand_expr (bitlength,
5034 NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
5036 /* Handle non-range tuple element like [ expr ]. */
5037 if (startbit == NULL_TREE)
5039 startbit = save_expr (endbit);
5043 startbit = convert (sizetype, startbit);
5044 endbit = convert (sizetype, endbit);
5045 if (! integer_zerop (domain_min))
5047 startbit = size_binop (MINUS_EXPR, startbit, domain_min);
5048 endbit = size_binop (MINUS_EXPR, endbit, domain_min);
5050 startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
5051 EXPAND_CONST_ADDRESS);
5052 endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
5053 EXPAND_CONST_ADDRESS);
5059 ((build_qualified_type ((*lang_hooks.types.type_for_mode)
5060 (GET_MODE (target), 0),
5063 emit_move_insn (targetx, target);
5066 else if (GET_CODE (target) == MEM)
5071 #ifdef TARGET_MEM_FUNCTIONS
5072 /* Optimization: If startbit and endbit are
5073 constants divisible by BITS_PER_UNIT,
5074 call memset instead. */
5075 if (TREE_CODE (startbit) == INTEGER_CST
5076 && TREE_CODE (endbit) == INTEGER_CST
5077 && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
5078 && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0)
5080 emit_library_call (memset_libfunc, LCT_NORMAL,
5082 plus_constant (XEXP (targetx, 0),
5083 startb / BITS_PER_UNIT),
5085 constm1_rtx, TYPE_MODE (integer_type_node),
5086 GEN_INT ((endb - startb) / BITS_PER_UNIT),
5087 TYPE_MODE (sizetype));
5091 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__setbits"),
5092 LCT_NORMAL, VOIDmode, 4, XEXP (targetx, 0),
5093 Pmode, bitlength_rtx, TYPE_MODE (sizetype),
5094 startbit_rtx, TYPE_MODE (sizetype),
5095 endbit_rtx, TYPE_MODE (sizetype));
5098 emit_move_insn (target, targetx);
5106 /* Store the value of EXP (an expression tree)
5107 into a subfield of TARGET which has mode MODE and occupies
5108 BITSIZE bits, starting BITPOS bits from the start of TARGET.
5109 If MODE is VOIDmode, it means that we are storing into a bit-field.
5111 If VALUE_MODE is VOIDmode, return nothing in particular.
5112 UNSIGNEDP is not used in this case.
5114 Otherwise, return an rtx for the value stored. This rtx
5115 has mode VALUE_MODE if that is convenient to do.
5116 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
5118 TYPE is the type of the underlying object,
5120 ALIAS_SET is the alias set for the destination. This value will
5121 (in general) be different from that for TARGET, since TARGET is a
5122 reference to the containing structure. */
5125 store_field (target, bitsize, bitpos, mode, exp, value_mode, unsignedp, type,
5128 HOST_WIDE_INT bitsize;
5129 HOST_WIDE_INT bitpos;
5130 enum machine_mode mode;
5132 enum machine_mode value_mode;
5137 HOST_WIDE_INT width_mask = 0;
5139 if (TREE_CODE (exp) == ERROR_MARK)
5142 /* If we have nothing to store, do nothing unless the expression has
5145 return expand_expr (exp, const0_rtx, VOIDmode, 0);
5146 else if (bitsize >=0 && bitsize < HOST_BITS_PER_WIDE_INT)
5147 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
5149 /* If we are storing into an unaligned field of an aligned union that is
5150 in a register, we may have the mode of TARGET being an integer mode but
5151 MODE == BLKmode. In that case, get an aligned object whose size and
5152 alignment are the same as TARGET and store TARGET into it (we can avoid
5153 the store if the field being stored is the entire width of TARGET). Then
5154 call ourselves recursively to store the field into a BLKmode version of
5155 that object. Finally, load from the object into TARGET. This is not
5156 very efficient in general, but should only be slightly more expensive
5157 than the otherwise-required unaligned accesses. Perhaps this can be
5158 cleaned up later. */
5161 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
5165 (build_qualified_type (type, TYPE_QUALS (type) | TYPE_QUAL_CONST),
5167 rtx blk_object = adjust_address (object, BLKmode, 0);
5169 if (bitsize != (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (target)))
5170 emit_move_insn (object, target);
5172 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0, type,
5175 emit_move_insn (target, object);
5177 /* We want to return the BLKmode version of the data. */
5181 if (GET_CODE (target) == CONCAT)
5183 /* We're storing into a struct containing a single __complex. */
5187 return store_expr (exp, target, 0);
5190 /* If the structure is in a register or if the component
5191 is a bit field, we cannot use addressing to access it.
5192 Use bit-field techniques or SUBREG to store in it. */
5194 if (mode == VOIDmode
5195 || (mode != BLKmode && ! direct_store[(int) mode]
5196 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
5197 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
5198 || GET_CODE (target) == REG
5199 || GET_CODE (target) == SUBREG
5200 /* If the field isn't aligned enough to store as an ordinary memref,
5201 store it as a bit field. */
5202 || (mode != BLKmode && SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target))
5203 && (MEM_ALIGN (target) < GET_MODE_ALIGNMENT (mode)
5204 || bitpos % GET_MODE_ALIGNMENT (mode)))
5205 /* If the RHS and field are a constant size and the size of the
5206 RHS isn't the same size as the bitfield, we must use bitfield
5209 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
5210 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0))
5212 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
5214 /* If BITSIZE is narrower than the size of the type of EXP
5215 we will be narrowing TEMP. Normally, what's wanted are the
5216 low-order bits. However, if EXP's type is a record and this is
5217 big-endian machine, we want the upper BITSIZE bits. */
5218 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
5219 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (temp))
5220 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
5221 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
5222 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
5226 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
5228 if (mode != VOIDmode && mode != BLKmode
5229 && mode != TYPE_MODE (TREE_TYPE (exp)))
5230 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
5232 /* If the modes of TARGET and TEMP are both BLKmode, both
5233 must be in memory and BITPOS must be aligned on a byte
5234 boundary. If so, we simply do a block copy. */
5235 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
5237 if (GET_CODE (target) != MEM || GET_CODE (temp) != MEM
5238 || bitpos % BITS_PER_UNIT != 0)
5241 target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT);
5242 emit_block_move (target, temp,
5243 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
5246 return value_mode == VOIDmode ? const0_rtx : target;
5249 /* Store the value in the bitfield. */
5250 store_bit_field (target, bitsize, bitpos, mode, temp,
5251 int_size_in_bytes (type));
5253 if (value_mode != VOIDmode)
5255 /* The caller wants an rtx for the value.
5256 If possible, avoid refetching from the bitfield itself. */
5258 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
5261 enum machine_mode tmode;
5263 tmode = GET_MODE (temp);
5264 if (tmode == VOIDmode)
5268 return expand_and (tmode, temp,
5269 gen_int_mode (width_mask, tmode),
5272 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
5273 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
5274 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
5277 return extract_bit_field (target, bitsize, bitpos, unsignedp,
5278 NULL_RTX, value_mode, VOIDmode,
5279 int_size_in_bytes (type));
5285 rtx addr = XEXP (target, 0);
5286 rtx to_rtx = target;
5288 /* If a value is wanted, it must be the lhs;
5289 so make the address stable for multiple use. */
5291 if (value_mode != VOIDmode && GET_CODE (addr) != REG
5292 && ! CONSTANT_ADDRESS_P (addr)
5293 /* A frame-pointer reference is already stable. */
5294 && ! (GET_CODE (addr) == PLUS
5295 && GET_CODE (XEXP (addr, 1)) == CONST_INT
5296 && (XEXP (addr, 0) == virtual_incoming_args_rtx
5297 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
5298 to_rtx = replace_equiv_address (to_rtx, copy_to_reg (addr));
5300 /* Now build a reference to just the desired component. */
5302 to_rtx = adjust_address (target, mode, bitpos / BITS_PER_UNIT);
5304 if (to_rtx == target)
5305 to_rtx = copy_rtx (to_rtx);
5307 MEM_SET_IN_STRUCT_P (to_rtx, 1);
5308 if (!MEM_KEEP_ALIAS_SET_P (to_rtx) && MEM_ALIAS_SET (to_rtx) != 0)
5309 set_mem_alias_set (to_rtx, alias_set);
5311 return store_expr (exp, to_rtx, value_mode != VOIDmode);
5315 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
5316 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
5317 codes and find the ultimate containing object, which we return.
5319 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5320 bit position, and *PUNSIGNEDP to the signedness of the field.
5321 If the position of the field is variable, we store a tree
5322 giving the variable offset (in units) in *POFFSET.
5323 This offset is in addition to the bit position.
5324 If the position is not variable, we store 0 in *POFFSET.
5326 If any of the extraction expressions is volatile,
5327 we store 1 in *PVOLATILEP. Otherwise we don't change that.
5329 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
5330 is a mode that can be used to access the field. In that case, *PBITSIZE
5333 If the field describes a variable-sized object, *PMODE is set to
5334 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
5335 this case, but the address of the object can be found. */
5338 get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode,
5339 punsignedp, pvolatilep)
5341 HOST_WIDE_INT *pbitsize;
5342 HOST_WIDE_INT *pbitpos;
5344 enum machine_mode *pmode;
5349 enum machine_mode mode = VOIDmode;
5350 tree offset = size_zero_node;
5351 tree bit_offset = bitsize_zero_node;
5352 tree placeholder_ptr = 0;
5355 /* First get the mode, signedness, and size. We do this from just the
5356 outermost expression. */
5357 if (TREE_CODE (exp) == COMPONENT_REF)
5359 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
5360 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
5361 mode = DECL_MODE (TREE_OPERAND (exp, 1));
5363 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
5365 else if (TREE_CODE (exp) == BIT_FIELD_REF)
5367 size_tree = TREE_OPERAND (exp, 1);
5368 *punsignedp = TREE_UNSIGNED (exp);
5372 mode = TYPE_MODE (TREE_TYPE (exp));
5373 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
5375 if (mode == BLKmode)
5376 size_tree = TYPE_SIZE (TREE_TYPE (exp));
5378 *pbitsize = GET_MODE_BITSIZE (mode);
5383 if (! host_integerp (size_tree, 1))
5384 mode = BLKmode, *pbitsize = -1;
5386 *pbitsize = tree_low_cst (size_tree, 1);
5389 /* Compute cumulative bit-offset for nested component-refs and array-refs,
5390 and find the ultimate containing object. */
5393 if (TREE_CODE (exp) == BIT_FIELD_REF)
5394 bit_offset = size_binop (PLUS_EXPR, bit_offset, TREE_OPERAND (exp, 2));
5395 else if (TREE_CODE (exp) == COMPONENT_REF)
5397 tree field = TREE_OPERAND (exp, 1);
5398 tree this_offset = DECL_FIELD_OFFSET (field);
5400 /* If this field hasn't been filled in yet, don't go
5401 past it. This should only happen when folding expressions
5402 made during type construction. */
5403 if (this_offset == 0)
5405 else if (! TREE_CONSTANT (this_offset)
5406 && contains_placeholder_p (this_offset))
5407 this_offset = build (WITH_RECORD_EXPR, sizetype, this_offset, exp);
5409 offset = size_binop (PLUS_EXPR, offset, this_offset);
5410 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5411 DECL_FIELD_BIT_OFFSET (field));
5413 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
5416 else if (TREE_CODE (exp) == ARRAY_REF
5417 || TREE_CODE (exp) == ARRAY_RANGE_REF)
5419 tree index = TREE_OPERAND (exp, 1);
5420 tree array = TREE_OPERAND (exp, 0);
5421 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
5422 tree low_bound = (domain ? TYPE_MIN_VALUE (domain) : 0);
5423 tree unit_size = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (array)));
5425 /* We assume all arrays have sizes that are a multiple of a byte.
5426 First subtract the lower bound, if any, in the type of the
5427 index, then convert to sizetype and multiply by the size of the
5429 if (low_bound != 0 && ! integer_zerop (low_bound))
5430 index = fold (build (MINUS_EXPR, TREE_TYPE (index),
5433 /* If the index has a self-referential type, pass it to a
5434 WITH_RECORD_EXPR; if the component size is, pass our
5435 component to one. */
5436 if (! TREE_CONSTANT (index)
5437 && contains_placeholder_p (index))
5438 index = build (WITH_RECORD_EXPR, TREE_TYPE (index), index, exp);
5439 if (! TREE_CONSTANT (unit_size)
5440 && contains_placeholder_p (unit_size))
5441 unit_size = build (WITH_RECORD_EXPR, sizetype, unit_size, array);
5443 offset = size_binop (PLUS_EXPR, offset,
5444 size_binop (MULT_EXPR,
5445 convert (sizetype, index),
5449 else if (TREE_CODE (exp) == PLACEHOLDER_EXPR)
5451 tree new = find_placeholder (exp, &placeholder_ptr);
5453 /* If we couldn't find the replacement, return the PLACEHOLDER_EXPR.
5454 We might have been called from tree optimization where we
5455 haven't set up an object yet. */
5463 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
5464 && TREE_CODE (exp) != VIEW_CONVERT_EXPR
5465 && ! ((TREE_CODE (exp) == NOP_EXPR
5466 || TREE_CODE (exp) == CONVERT_EXPR)
5467 && (TYPE_MODE (TREE_TYPE (exp))
5468 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
5471 /* If any reference in the chain is volatile, the effect is volatile. */
5472 if (TREE_THIS_VOLATILE (exp))
5475 exp = TREE_OPERAND (exp, 0);
5478 /* If OFFSET is constant, see if we can return the whole thing as a
5479 constant bit position. Otherwise, split it up. */
5480 if (host_integerp (offset, 0)
5481 && 0 != (tem = size_binop (MULT_EXPR, convert (bitsizetype, offset),
5483 && 0 != (tem = size_binop (PLUS_EXPR, tem, bit_offset))
5484 && host_integerp (tem, 0))
5485 *pbitpos = tree_low_cst (tem, 0), *poffset = 0;
5487 *pbitpos = tree_low_cst (bit_offset, 0), *poffset = offset;
5493 /* Return 1 if T is an expression that get_inner_reference handles. */
5496 handled_component_p (t)
5499 switch (TREE_CODE (t))
5504 case ARRAY_RANGE_REF:
5505 case NON_LVALUE_EXPR:
5506 case VIEW_CONVERT_EXPR:
5511 return (TYPE_MODE (TREE_TYPE (t))
5512 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (t, 0))));
5519 /* Given an rtx VALUE that may contain additions and multiplications, return
5520 an equivalent value that just refers to a register, memory, or constant.
5521 This is done by generating instructions to perform the arithmetic and
5522 returning a pseudo-register containing the value.
5524 The returned value may be a REG, SUBREG, MEM or constant. */
5527 force_operand (value, target)
5531 /* Use subtarget as the target for operand 0 of a binary operation. */
5532 rtx subtarget = get_subtarget (target);
5533 enum rtx_code code = GET_CODE (value);
5535 /* Check for a PIC address load. */
5536 if ((code == PLUS || code == MINUS)
5537 && XEXP (value, 0) == pic_offset_table_rtx
5538 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
5539 || GET_CODE (XEXP (value, 1)) == LABEL_REF
5540 || GET_CODE (XEXP (value, 1)) == CONST))
5543 subtarget = gen_reg_rtx (GET_MODE (value));
5544 emit_move_insn (subtarget, value);
5548 if (code == ZERO_EXTEND || code == SIGN_EXTEND)
5551 target = gen_reg_rtx (GET_MODE (value));
5552 convert_move (force_operand (XEXP (value, 0), NULL), target,
5553 code == ZERO_EXTEND);
5557 if (GET_RTX_CLASS (code) == '2' || GET_RTX_CLASS (code) == 'c')
5559 op2 = XEXP (value, 1);
5560 if (!CONSTANT_P (op2) && !(GET_CODE (op2) == REG && op2 != subtarget))
5562 if (code == MINUS && GET_CODE (op2) == CONST_INT)
5565 op2 = negate_rtx (GET_MODE (value), op2);
5568 /* Check for an addition with OP2 a constant integer and our first
5569 operand a PLUS of a virtual register and something else. In that
5570 case, we want to emit the sum of the virtual register and the
5571 constant first and then add the other value. This allows virtual
5572 register instantiation to simply modify the constant rather than
5573 creating another one around this addition. */
5574 if (code == PLUS && GET_CODE (op2) == CONST_INT
5575 && GET_CODE (XEXP (value, 0)) == PLUS
5576 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
5577 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
5578 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
5580 rtx temp = expand_simple_binop (GET_MODE (value), code,
5581 XEXP (XEXP (value, 0), 0), op2,
5582 subtarget, 0, OPTAB_LIB_WIDEN);
5583 return expand_simple_binop (GET_MODE (value), code, temp,
5584 force_operand (XEXP (XEXP (value,
5586 target, 0, OPTAB_LIB_WIDEN);
5589 op1 = force_operand (XEXP (value, 0), subtarget);
5590 op2 = force_operand (op2, NULL_RTX);
5594 return expand_mult (GET_MODE (value), op1, op2, target, 1);
5596 if (!INTEGRAL_MODE_P (GET_MODE (value)))
5597 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5598 target, 1, OPTAB_LIB_WIDEN);
5600 return expand_divmod (0,
5601 FLOAT_MODE_P (GET_MODE (value))
5602 ? RDIV_EXPR : TRUNC_DIV_EXPR,
5603 GET_MODE (value), op1, op2, target, 0);
5606 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
5610 return expand_divmod (0, TRUNC_DIV_EXPR, GET_MODE (value), op1, op2,
5614 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
5618 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5619 target, 0, OPTAB_LIB_WIDEN);
5622 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5623 target, 1, OPTAB_LIB_WIDEN);
5626 if (GET_RTX_CLASS (code) == '1')
5628 op1 = force_operand (XEXP (value, 0), NULL_RTX);
5629 return expand_simple_unop (GET_MODE (value), code, op1, target, 0);
5632 #ifdef INSN_SCHEDULING
5633 /* On machines that have insn scheduling, we want all memory reference to be
5634 explicit, so we need to deal with such paradoxical SUBREGs. */
5635 if (GET_CODE (value) == SUBREG && GET_CODE (SUBREG_REG (value)) == MEM
5636 && (GET_MODE_SIZE (GET_MODE (value))
5637 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (value)))))
5639 = simplify_gen_subreg (GET_MODE (value),
5640 force_reg (GET_MODE (SUBREG_REG (value)),
5641 force_operand (SUBREG_REG (value),
5643 GET_MODE (SUBREG_REG (value)),
5644 SUBREG_BYTE (value));
5650 /* Subroutine of expand_expr: return nonzero iff there is no way that
5651 EXP can reference X, which is being modified. TOP_P is nonzero if this
5652 call is going to be used to determine whether we need a temporary
5653 for EXP, as opposed to a recursive call to this function.
5655 It is always safe for this routine to return zero since it merely
5656 searches for optimization opportunities. */
5659 safe_from_p (x, exp, top_p)
5666 static tree save_expr_list;
5669 /* If EXP has varying size, we MUST use a target since we currently
5670 have no way of allocating temporaries of variable size
5671 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
5672 So we assume here that something at a higher level has prevented a
5673 clash. This is somewhat bogus, but the best we can do. Only
5674 do this when X is BLKmode and when we are at the top level. */
5675 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
5676 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
5677 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
5678 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
5679 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
5681 && GET_MODE (x) == BLKmode)
5682 /* If X is in the outgoing argument area, it is always safe. */
5683 || (GET_CODE (x) == MEM
5684 && (XEXP (x, 0) == virtual_outgoing_args_rtx
5685 || (GET_CODE (XEXP (x, 0)) == PLUS
5686 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
5689 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
5690 find the underlying pseudo. */
5691 if (GET_CODE (x) == SUBREG)
5694 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5698 /* A SAVE_EXPR might appear many times in the expression passed to the
5699 top-level safe_from_p call, and if it has a complex subexpression,
5700 examining it multiple times could result in a combinatorial explosion.
5701 E.g. on an Alpha running at least 200MHz, a Fortran test case compiled
5702 with optimization took about 28 minutes to compile -- even though it was
5703 only a few lines long. So we mark each SAVE_EXPR we see with TREE_PRIVATE
5704 and turn that off when we are done. We keep a list of the SAVE_EXPRs
5705 we have processed. Note that the only test of top_p was above. */
5714 rtn = safe_from_p (x, exp, 0);
5716 for (t = save_expr_list; t != 0; t = TREE_CHAIN (t))
5717 TREE_PRIVATE (TREE_PURPOSE (t)) = 0;
5722 /* Now look at our tree code and possibly recurse. */
5723 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
5726 exp_rtl = DECL_RTL_IF_SET (exp);
5733 if (TREE_CODE (exp) == TREE_LIST)
5734 return ((TREE_VALUE (exp) == 0
5735 || safe_from_p (x, TREE_VALUE (exp), 0))
5736 && (TREE_CHAIN (exp) == 0
5737 || safe_from_p (x, TREE_CHAIN (exp), 0)));
5738 else if (TREE_CODE (exp) == ERROR_MARK)
5739 return 1; /* An already-visited SAVE_EXPR? */
5744 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5748 return (safe_from_p (x, TREE_OPERAND (exp, 0), 0)
5749 && safe_from_p (x, TREE_OPERAND (exp, 1), 0));
5753 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
5754 the expression. If it is set, we conflict iff we are that rtx or
5755 both are in memory. Otherwise, we check all operands of the
5756 expression recursively. */
5758 switch (TREE_CODE (exp))
5761 /* If the operand is static or we are static, we can't conflict.
5762 Likewise if we don't conflict with the operand at all. */
5763 if (staticp (TREE_OPERAND (exp, 0))
5764 || TREE_STATIC (exp)
5765 || safe_from_p (x, TREE_OPERAND (exp, 0), 0))
5768 /* Otherwise, the only way this can conflict is if we are taking
5769 the address of a DECL a that address if part of X, which is
5771 exp = TREE_OPERAND (exp, 0);
5774 if (!DECL_RTL_SET_P (exp)
5775 || GET_CODE (DECL_RTL (exp)) != MEM)
5778 exp_rtl = XEXP (DECL_RTL (exp), 0);
5783 if (GET_CODE (x) == MEM
5784 && alias_sets_conflict_p (MEM_ALIAS_SET (x),
5785 get_alias_set (exp)))
5790 /* Assume that the call will clobber all hard registers and
5792 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5793 || GET_CODE (x) == MEM)
5798 /* If a sequence exists, we would have to scan every instruction
5799 in the sequence to see if it was safe. This is probably not
5801 if (RTL_EXPR_SEQUENCE (exp))
5804 exp_rtl = RTL_EXPR_RTL (exp);
5807 case WITH_CLEANUP_EXPR:
5808 exp_rtl = WITH_CLEANUP_EXPR_RTL (exp);
5811 case CLEANUP_POINT_EXPR:
5812 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5815 exp_rtl = SAVE_EXPR_RTL (exp);
5819 /* If we've already scanned this, don't do it again. Otherwise,
5820 show we've scanned it and record for clearing the flag if we're
5822 if (TREE_PRIVATE (exp))
5825 TREE_PRIVATE (exp) = 1;
5826 if (! safe_from_p (x, TREE_OPERAND (exp, 0), 0))
5828 TREE_PRIVATE (exp) = 0;
5832 save_expr_list = tree_cons (exp, NULL_TREE, save_expr_list);
5836 /* The only operand we look at is operand 1. The rest aren't
5837 part of the expression. */
5838 return safe_from_p (x, TREE_OPERAND (exp, 1), 0);
5840 case METHOD_CALL_EXPR:
5841 /* This takes an rtx argument, but shouldn't appear here. */
5848 /* If we have an rtx, we do not need to scan our operands. */
5852 nops = first_rtl_op (TREE_CODE (exp));
5853 for (i = 0; i < nops; i++)
5854 if (TREE_OPERAND (exp, i) != 0
5855 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
5858 /* If this is a language-specific tree code, it may require
5859 special handling. */
5860 if ((unsigned int) TREE_CODE (exp)
5861 >= (unsigned int) LAST_AND_UNUSED_TREE_CODE
5862 && !(*lang_hooks.safe_from_p) (x, exp))
5866 /* If we have an rtl, find any enclosed object. Then see if we conflict
5870 if (GET_CODE (exp_rtl) == SUBREG)
5872 exp_rtl = SUBREG_REG (exp_rtl);
5873 if (GET_CODE (exp_rtl) == REG
5874 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
5878 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
5879 are memory and they conflict. */
5880 return ! (rtx_equal_p (x, exp_rtl)
5881 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
5882 && true_dependence (exp_rtl, VOIDmode, x,
5883 rtx_addr_varies_p)));
5886 /* If we reach here, it is safe. */
5890 /* Subroutine of expand_expr: return rtx if EXP is a
5891 variable or parameter; else return 0. */
5898 switch (TREE_CODE (exp))
5902 return DECL_RTL (exp);
5908 #ifdef MAX_INTEGER_COMPUTATION_MODE
5911 check_max_integer_computation_mode (exp)
5914 enum tree_code code;
5915 enum machine_mode mode;
5917 /* Strip any NOPs that don't change the mode. */
5919 code = TREE_CODE (exp);
5921 /* We must allow conversions of constants to MAX_INTEGER_COMPUTATION_MODE. */
5922 if (code == NOP_EXPR
5923 && TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
5926 /* First check the type of the overall operation. We need only look at
5927 unary, binary and relational operations. */
5928 if (TREE_CODE_CLASS (code) == '1'
5929 || TREE_CODE_CLASS (code) == '2'
5930 || TREE_CODE_CLASS (code) == '<')
5932 mode = TYPE_MODE (TREE_TYPE (exp));
5933 if (GET_MODE_CLASS (mode) == MODE_INT
5934 && mode > MAX_INTEGER_COMPUTATION_MODE)
5935 internal_error ("unsupported wide integer operation");
5938 /* Check operand of a unary op. */
5939 if (TREE_CODE_CLASS (code) == '1')
5941 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5942 if (GET_MODE_CLASS (mode) == MODE_INT
5943 && mode > MAX_INTEGER_COMPUTATION_MODE)
5944 internal_error ("unsupported wide integer operation");
5947 /* Check operands of a binary/comparison op. */
5948 if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<')
5950 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5951 if (GET_MODE_CLASS (mode) == MODE_INT
5952 && mode > MAX_INTEGER_COMPUTATION_MODE)
5953 internal_error ("unsupported wide integer operation");
5955 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1)));
5956 if (GET_MODE_CLASS (mode) == MODE_INT
5957 && mode > MAX_INTEGER_COMPUTATION_MODE)
5958 internal_error ("unsupported wide integer operation");
5963 /* Return the highest power of two that EXP is known to be a multiple of.
5964 This is used in updating alignment of MEMs in array references. */
5966 static HOST_WIDE_INT
5967 highest_pow2_factor (exp)
5970 HOST_WIDE_INT c0, c1;
5972 switch (TREE_CODE (exp))
5975 /* We can find the lowest bit that's a one. If the low
5976 HOST_BITS_PER_WIDE_INT bits are zero, return BIGGEST_ALIGNMENT.
5977 We need to handle this case since we can find it in a COND_EXPR,
5978 a MIN_EXPR, or a MAX_EXPR. If the constant overlows, we have an
5979 erroneous program, so return BIGGEST_ALIGNMENT to avoid any
5981 if (TREE_CONSTANT_OVERFLOW (exp))
5982 return BIGGEST_ALIGNMENT;
5985 /* Note: tree_low_cst is intentionally not used here,
5986 we don't care about the upper bits. */
5987 c0 = TREE_INT_CST_LOW (exp);
5989 return c0 ? c0 : BIGGEST_ALIGNMENT;
5993 case PLUS_EXPR: case MINUS_EXPR: case MIN_EXPR: case MAX_EXPR:
5994 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
5995 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
5996 return MIN (c0, c1);
5999 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6000 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6003 case ROUND_DIV_EXPR: case TRUNC_DIV_EXPR: case FLOOR_DIV_EXPR:
6005 if (integer_pow2p (TREE_OPERAND (exp, 1))
6006 && host_integerp (TREE_OPERAND (exp, 1), 1))
6008 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6009 c1 = tree_low_cst (TREE_OPERAND (exp, 1), 1);
6010 return MAX (1, c0 / c1);
6014 case NON_LVALUE_EXPR: case NOP_EXPR: case CONVERT_EXPR:
6015 case SAVE_EXPR: case WITH_RECORD_EXPR:
6016 return highest_pow2_factor (TREE_OPERAND (exp, 0));
6019 return highest_pow2_factor (TREE_OPERAND (exp, 1));
6022 c0 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6023 c1 = highest_pow2_factor (TREE_OPERAND (exp, 2));
6024 return MIN (c0, c1);
6033 /* Similar, except that it is known that the expression must be a multiple
6034 of the alignment of TYPE. */
6036 static HOST_WIDE_INT
6037 highest_pow2_factor_for_type (type, exp)
6041 HOST_WIDE_INT type_align, factor;
6043 factor = highest_pow2_factor (exp);
6044 type_align = TYPE_ALIGN (type) / BITS_PER_UNIT;
6045 return MAX (factor, type_align);
6048 /* Return an object on the placeholder list that matches EXP, a
6049 PLACEHOLDER_EXPR. An object "matches" if it is of the type of the
6050 PLACEHOLDER_EXPR or a pointer type to it. For further information, see
6051 tree.def. If no such object is found, return 0. If PLIST is nonzero, it
6052 is a location which initially points to a starting location in the
6053 placeholder list (zero means start of the list) and where a pointer into
6054 the placeholder list at which the object is found is placed. */
6057 find_placeholder (exp, plist)
6061 tree type = TREE_TYPE (exp);
6062 tree placeholder_expr;
6064 for (placeholder_expr
6065 = plist && *plist ? TREE_CHAIN (*plist) : placeholder_list;
6066 placeholder_expr != 0;
6067 placeholder_expr = TREE_CHAIN (placeholder_expr))
6069 tree need_type = TYPE_MAIN_VARIANT (type);
6072 /* Find the outermost reference that is of the type we want. If none,
6073 see if any object has a type that is a pointer to the type we
6075 for (elt = TREE_PURPOSE (placeholder_expr); elt != 0;
6076 elt = ((TREE_CODE (elt) == COMPOUND_EXPR
6077 || TREE_CODE (elt) == COND_EXPR)
6078 ? TREE_OPERAND (elt, 1)
6079 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6080 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6081 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6082 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6083 ? TREE_OPERAND (elt, 0) : 0))
6084 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
6087 *plist = placeholder_expr;
6091 for (elt = TREE_PURPOSE (placeholder_expr); elt != 0;
6093 = ((TREE_CODE (elt) == COMPOUND_EXPR
6094 || TREE_CODE (elt) == COND_EXPR)
6095 ? TREE_OPERAND (elt, 1)
6096 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6097 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6098 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6099 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6100 ? TREE_OPERAND (elt, 0) : 0))
6101 if (POINTER_TYPE_P (TREE_TYPE (elt))
6102 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
6106 *plist = placeholder_expr;
6107 return build1 (INDIRECT_REF, need_type, elt);
6114 /* expand_expr: generate code for computing expression EXP.
6115 An rtx for the computed value is returned. The value is never null.
6116 In the case of a void EXP, const0_rtx is returned.
6118 The value may be stored in TARGET if TARGET is nonzero.
6119 TARGET is just a suggestion; callers must assume that
6120 the rtx returned may not be the same as TARGET.
6122 If TARGET is CONST0_RTX, it means that the value will be ignored.
6124 If TMODE is not VOIDmode, it suggests generating the
6125 result in mode TMODE. But this is done only when convenient.
6126 Otherwise, TMODE is ignored and the value generated in its natural mode.
6127 TMODE is just a suggestion; callers must assume that
6128 the rtx returned may not have mode TMODE.
6130 Note that TARGET may have neither TMODE nor MODE. In that case, it
6131 probably will not be used.
6133 If MODIFIER is EXPAND_SUM then when EXP is an addition
6134 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
6135 or a nest of (PLUS ...) and (MINUS ...) where the terms are
6136 products as above, or REG or MEM, or constant.
6137 Ordinarily in such cases we would output mul or add instructions
6138 and then return a pseudo reg containing the sum.
6140 EXPAND_INITIALIZER is much like EXPAND_SUM except that
6141 it also marks a label as absolutely required (it can't be dead).
6142 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
6143 This is used for outputting expressions used in initializers.
6145 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
6146 with a constant address even if that address is not normally legitimate.
6147 EXPAND_INITIALIZER and EXPAND_SUM also have this effect. */
6150 expand_expr (exp, target, tmode, modifier)
6153 enum machine_mode tmode;
6154 enum expand_modifier modifier;
6157 tree type = TREE_TYPE (exp);
6158 int unsignedp = TREE_UNSIGNED (type);
6159 enum machine_mode mode;
6160 enum tree_code code = TREE_CODE (exp);
6162 rtx subtarget, original_target;
6166 /* Handle ERROR_MARK before anybody tries to access its type. */
6167 if (TREE_CODE (exp) == ERROR_MARK || TREE_CODE (type) == ERROR_MARK)
6169 op0 = CONST0_RTX (tmode);
6175 mode = TYPE_MODE (type);
6176 /* Use subtarget as the target for operand 0 of a binary operation. */
6177 subtarget = get_subtarget (target);
6178 original_target = target;
6179 ignore = (target == const0_rtx
6180 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
6181 || code == CONVERT_EXPR || code == REFERENCE_EXPR
6182 || code == COND_EXPR || code == VIEW_CONVERT_EXPR)
6183 && TREE_CODE (type) == VOID_TYPE));
6185 /* If we are going to ignore this result, we need only do something
6186 if there is a side-effect somewhere in the expression. If there
6187 is, short-circuit the most common cases here. Note that we must
6188 not call expand_expr with anything but const0_rtx in case this
6189 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
6193 if (! TREE_SIDE_EFFECTS (exp))
6196 /* Ensure we reference a volatile object even if value is ignored, but
6197 don't do this if all we are doing is taking its address. */
6198 if (TREE_THIS_VOLATILE (exp)
6199 && TREE_CODE (exp) != FUNCTION_DECL
6200 && mode != VOIDmode && mode != BLKmode
6201 && modifier != EXPAND_CONST_ADDRESS)
6203 temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
6204 if (GET_CODE (temp) == MEM)
6205 temp = copy_to_reg (temp);
6209 if (TREE_CODE_CLASS (code) == '1' || code == COMPONENT_REF
6210 || code == INDIRECT_REF || code == BUFFER_REF)
6211 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6214 else if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<'
6215 || code == ARRAY_REF || code == ARRAY_RANGE_REF)
6217 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6218 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6221 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
6222 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
6223 /* If the second operand has no side effects, just evaluate
6225 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6227 else if (code == BIT_FIELD_REF)
6229 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6230 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6231 expand_expr (TREE_OPERAND (exp, 2), const0_rtx, VOIDmode, modifier);
6238 #ifdef MAX_INTEGER_COMPUTATION_MODE
6239 /* Only check stuff here if the mode we want is different from the mode
6240 of the expression; if it's the same, check_max_integer_computiation_mode
6241 will handle it. Do we really need to check this stuff at all? */
6244 && GET_MODE (target) != mode
6245 && TREE_CODE (exp) != INTEGER_CST
6246 && TREE_CODE (exp) != PARM_DECL
6247 && TREE_CODE (exp) != ARRAY_REF
6248 && TREE_CODE (exp) != ARRAY_RANGE_REF
6249 && TREE_CODE (exp) != COMPONENT_REF
6250 && TREE_CODE (exp) != BIT_FIELD_REF
6251 && TREE_CODE (exp) != INDIRECT_REF
6252 && TREE_CODE (exp) != CALL_EXPR
6253 && TREE_CODE (exp) != VAR_DECL
6254 && TREE_CODE (exp) != RTL_EXPR)
6256 enum machine_mode mode = GET_MODE (target);
6258 if (GET_MODE_CLASS (mode) == MODE_INT
6259 && mode > MAX_INTEGER_COMPUTATION_MODE)
6260 internal_error ("unsupported wide integer operation");
6264 && TREE_CODE (exp) != INTEGER_CST
6265 && TREE_CODE (exp) != PARM_DECL
6266 && TREE_CODE (exp) != ARRAY_REF
6267 && TREE_CODE (exp) != ARRAY_RANGE_REF
6268 && TREE_CODE (exp) != COMPONENT_REF
6269 && TREE_CODE (exp) != BIT_FIELD_REF
6270 && TREE_CODE (exp) != INDIRECT_REF
6271 && TREE_CODE (exp) != VAR_DECL
6272 && TREE_CODE (exp) != CALL_EXPR
6273 && TREE_CODE (exp) != RTL_EXPR
6274 && GET_MODE_CLASS (tmode) == MODE_INT
6275 && tmode > MAX_INTEGER_COMPUTATION_MODE)
6276 internal_error ("unsupported wide integer operation");
6278 check_max_integer_computation_mode (exp);
6281 /* If will do cse, generate all results into pseudo registers
6282 since 1) that allows cse to find more things
6283 and 2) otherwise cse could produce an insn the machine
6284 cannot support. And exception is a CONSTRUCTOR into a multi-word
6285 MEM: that's much more likely to be most efficient into the MEM. */
6287 if (! cse_not_expected && mode != BLKmode && target
6288 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER)
6289 && ! (code == CONSTRUCTOR && GET_MODE_SIZE (mode) > UNITS_PER_WORD))
6296 tree function = decl_function_context (exp);
6297 /* Handle using a label in a containing function. */
6298 if (function != current_function_decl
6299 && function != inline_function_decl && function != 0)
6301 struct function *p = find_function_data (function);
6302 p->expr->x_forced_labels
6303 = gen_rtx_EXPR_LIST (VOIDmode, label_rtx (exp),
6304 p->expr->x_forced_labels);
6308 if (modifier == EXPAND_INITIALIZER)
6309 forced_labels = gen_rtx_EXPR_LIST (VOIDmode,
6314 temp = gen_rtx_MEM (FUNCTION_MODE,
6315 gen_rtx_LABEL_REF (Pmode, label_rtx (exp)));
6316 if (function != current_function_decl
6317 && function != inline_function_decl && function != 0)
6318 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
6323 if (DECL_RTL (exp) == 0)
6325 error_with_decl (exp, "prior parameter's size depends on `%s'");
6326 return CONST0_RTX (mode);
6329 /* ... fall through ... */
6332 /* If a static var's type was incomplete when the decl was written,
6333 but the type is complete now, lay out the decl now. */
6334 if (DECL_SIZE (exp) == 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
6335 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
6337 rtx value = DECL_RTL_IF_SET (exp);
6339 layout_decl (exp, 0);
6341 /* If the RTL was already set, update its mode and memory
6345 PUT_MODE (value, DECL_MODE (exp));
6346 SET_DECL_RTL (exp, 0);
6347 set_mem_attributes (value, exp, 1);
6348 SET_DECL_RTL (exp, value);
6352 /* ... fall through ... */
6356 if (DECL_RTL (exp) == 0)
6359 /* Ensure variable marked as used even if it doesn't go through
6360 a parser. If it hasn't be used yet, write out an external
6362 if (! TREE_USED (exp))
6364 assemble_external (exp);
6365 TREE_USED (exp) = 1;
6368 /* Show we haven't gotten RTL for this yet. */
6371 /* Handle variables inherited from containing functions. */
6372 context = decl_function_context (exp);
6374 /* We treat inline_function_decl as an alias for the current function
6375 because that is the inline function whose vars, types, etc.
6376 are being merged into the current function.
6377 See expand_inline_function. */
6379 if (context != 0 && context != current_function_decl
6380 && context != inline_function_decl
6381 /* If var is static, we don't need a static chain to access it. */
6382 && ! (GET_CODE (DECL_RTL (exp)) == MEM
6383 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
6387 /* Mark as non-local and addressable. */
6388 DECL_NONLOCAL (exp) = 1;
6389 if (DECL_NO_STATIC_CHAIN (current_function_decl))
6391 (*lang_hooks.mark_addressable) (exp);
6392 if (GET_CODE (DECL_RTL (exp)) != MEM)
6394 addr = XEXP (DECL_RTL (exp), 0);
6395 if (GET_CODE (addr) == MEM)
6397 = replace_equiv_address (addr,
6398 fix_lexical_addr (XEXP (addr, 0), exp));
6400 addr = fix_lexical_addr (addr, exp);
6402 temp = replace_equiv_address (DECL_RTL (exp), addr);
6405 /* This is the case of an array whose size is to be determined
6406 from its initializer, while the initializer is still being parsed.
6409 else if (GET_CODE (DECL_RTL (exp)) == MEM
6410 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
6411 temp = validize_mem (DECL_RTL (exp));
6413 /* If DECL_RTL is memory, we are in the normal case and either
6414 the address is not valid or it is not a register and -fforce-addr
6415 is specified, get the address into a register. */
6417 else if (GET_CODE (DECL_RTL (exp)) == MEM
6418 && modifier != EXPAND_CONST_ADDRESS
6419 && modifier != EXPAND_SUM
6420 && modifier != EXPAND_INITIALIZER
6421 && (! memory_address_p (DECL_MODE (exp),
6422 XEXP (DECL_RTL (exp), 0))
6424 && GET_CODE (XEXP (DECL_RTL (exp), 0)) != REG)))
6425 temp = replace_equiv_address (DECL_RTL (exp),
6426 copy_rtx (XEXP (DECL_RTL (exp), 0)));
6428 /* If we got something, return it. But first, set the alignment
6429 if the address is a register. */
6432 if (GET_CODE (temp) == MEM && GET_CODE (XEXP (temp, 0)) == REG)
6433 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
6438 /* If the mode of DECL_RTL does not match that of the decl, it
6439 must be a promoted value. We return a SUBREG of the wanted mode,
6440 but mark it so that we know that it was already extended. */
6442 if (GET_CODE (DECL_RTL (exp)) == REG
6443 && GET_MODE (DECL_RTL (exp)) != DECL_MODE (exp))
6445 /* Get the signedness used for this variable. Ensure we get the
6446 same mode we got when the variable was declared. */
6447 if (GET_MODE (DECL_RTL (exp))
6448 != promote_mode (type, DECL_MODE (exp), &unsignedp,
6449 (TREE_CODE (exp) == RESULT_DECL ? 1 : 0)))
6452 temp = gen_lowpart_SUBREG (mode, DECL_RTL (exp));
6453 SUBREG_PROMOTED_VAR_P (temp) = 1;
6454 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
6458 return DECL_RTL (exp);
6461 temp = immed_double_const (TREE_INT_CST_LOW (exp),
6462 TREE_INT_CST_HIGH (exp), mode);
6464 /* ??? If overflow is set, fold will have done an incomplete job,
6465 which can result in (plus xx (const_int 0)), which can get
6466 simplified by validate_replace_rtx during virtual register
6467 instantiation, which can result in unrecognizable insns.
6468 Avoid this by forcing all overflows into registers. */
6469 if (TREE_CONSTANT_OVERFLOW (exp)
6470 && modifier != EXPAND_INITIALIZER)
6471 temp = force_reg (mode, temp);
6476 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, 0);
6479 /* If optimized, generate immediate CONST_DOUBLE
6480 which will be turned into memory by reload if necessary.
6482 We used to force a register so that loop.c could see it. But
6483 this does not allow gen_* patterns to perform optimizations with
6484 the constants. It also produces two insns in cases like "x = 1.0;".
6485 On most machines, floating-point constants are not permitted in
6486 many insns, so we'd end up copying it to a register in any case.
6488 Now, we do the copying in expand_binop, if appropriate. */
6489 return immed_real_const (exp);
6493 if (! TREE_CST_RTL (exp))
6494 output_constant_def (exp, 1);
6496 /* TREE_CST_RTL probably contains a constant address.
6497 On RISC machines where a constant address isn't valid,
6498 make some insns to get that address into a register. */
6499 if (GET_CODE (TREE_CST_RTL (exp)) == MEM
6500 && modifier != EXPAND_CONST_ADDRESS
6501 && modifier != EXPAND_INITIALIZER
6502 && modifier != EXPAND_SUM
6503 && (! memory_address_p (mode, XEXP (TREE_CST_RTL (exp), 0))
6505 && GET_CODE (XEXP (TREE_CST_RTL (exp), 0)) != REG)))
6506 return replace_equiv_address (TREE_CST_RTL (exp),
6507 copy_rtx (XEXP (TREE_CST_RTL (exp), 0)));
6508 return TREE_CST_RTL (exp);
6510 case EXPR_WITH_FILE_LOCATION:
6513 const char *saved_input_filename = input_filename;
6514 int saved_lineno = lineno;
6515 input_filename = EXPR_WFL_FILENAME (exp);
6516 lineno = EXPR_WFL_LINENO (exp);
6517 if (EXPR_WFL_EMIT_LINE_NOTE (exp))
6518 emit_line_note (input_filename, lineno);
6519 /* Possibly avoid switching back and forth here. */
6520 to_return = expand_expr (EXPR_WFL_NODE (exp), target, tmode, modifier);
6521 input_filename = saved_input_filename;
6522 lineno = saved_lineno;
6527 context = decl_function_context (exp);
6529 /* If this SAVE_EXPR was at global context, assume we are an
6530 initialization function and move it into our context. */
6532 SAVE_EXPR_CONTEXT (exp) = current_function_decl;
6534 /* We treat inline_function_decl as an alias for the current function
6535 because that is the inline function whose vars, types, etc.
6536 are being merged into the current function.
6537 See expand_inline_function. */
6538 if (context == current_function_decl || context == inline_function_decl)
6541 /* If this is non-local, handle it. */
6544 /* The following call just exists to abort if the context is
6545 not of a containing function. */
6546 find_function_data (context);
6548 temp = SAVE_EXPR_RTL (exp);
6549 if (temp && GET_CODE (temp) == REG)
6551 put_var_into_stack (exp);
6552 temp = SAVE_EXPR_RTL (exp);
6554 if (temp == 0 || GET_CODE (temp) != MEM)
6557 replace_equiv_address (temp,
6558 fix_lexical_addr (XEXP (temp, 0), exp));
6560 if (SAVE_EXPR_RTL (exp) == 0)
6562 if (mode == VOIDmode)
6565 temp = assign_temp (build_qualified_type (type,
6567 | TYPE_QUAL_CONST)),
6570 SAVE_EXPR_RTL (exp) = temp;
6571 if (!optimize && GET_CODE (temp) == REG)
6572 save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, temp,
6575 /* If the mode of TEMP does not match that of the expression, it
6576 must be a promoted value. We pass store_expr a SUBREG of the
6577 wanted mode but mark it so that we know that it was already
6578 extended. Note that `unsignedp' was modified above in
6581 if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
6583 temp = gen_lowpart_SUBREG (mode, SAVE_EXPR_RTL (exp));
6584 SUBREG_PROMOTED_VAR_P (temp) = 1;
6585 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
6588 if (temp == const0_rtx)
6589 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
6591 store_expr (TREE_OPERAND (exp, 0), temp, 0);
6593 TREE_USED (exp) = 1;
6596 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
6597 must be a promoted value. We return a SUBREG of the wanted mode,
6598 but mark it so that we know that it was already extended. */
6600 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
6601 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
6603 /* Compute the signedness and make the proper SUBREG. */
6604 promote_mode (type, mode, &unsignedp, 0);
6605 temp = gen_lowpart_SUBREG (mode, SAVE_EXPR_RTL (exp));
6606 SUBREG_PROMOTED_VAR_P (temp) = 1;
6607 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
6611 return SAVE_EXPR_RTL (exp);
6616 temp = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
6617 TREE_OPERAND (exp, 0)
6618 = (*lang_hooks.unsave_expr_now) (TREE_OPERAND (exp, 0));
6622 case PLACEHOLDER_EXPR:
6624 tree old_list = placeholder_list;
6625 tree placeholder_expr = 0;
6627 exp = find_placeholder (exp, &placeholder_expr);
6631 placeholder_list = TREE_CHAIN (placeholder_expr);
6632 temp = expand_expr (exp, original_target, tmode, modifier);
6633 placeholder_list = old_list;
6637 /* We can't find the object or there was a missing WITH_RECORD_EXPR. */
6640 case WITH_RECORD_EXPR:
6641 /* Put the object on the placeholder list, expand our first operand,
6642 and pop the list. */
6643 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
6645 target = expand_expr (TREE_OPERAND (exp, 0), original_target, tmode,
6647 placeholder_list = TREE_CHAIN (placeholder_list);
6651 if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
6652 expand_goto (TREE_OPERAND (exp, 0));
6654 expand_computed_goto (TREE_OPERAND (exp, 0));
6658 expand_exit_loop_if_false (NULL,
6659 invert_truthvalue (TREE_OPERAND (exp, 0)));
6662 case LABELED_BLOCK_EXPR:
6663 if (LABELED_BLOCK_BODY (exp))
6664 expand_expr_stmt_value (LABELED_BLOCK_BODY (exp), 0, 1);
6665 /* Should perhaps use expand_label, but this is simpler and safer. */
6666 do_pending_stack_adjust ();
6667 emit_label (label_rtx (LABELED_BLOCK_LABEL (exp)));
6670 case EXIT_BLOCK_EXPR:
6671 if (EXIT_BLOCK_RETURN (exp))
6672 sorry ("returned value in block_exit_expr");
6673 expand_goto (LABELED_BLOCK_LABEL (EXIT_BLOCK_LABELED_BLOCK (exp)));
6678 expand_start_loop (1);
6679 expand_expr_stmt_value (TREE_OPERAND (exp, 0), 0, 1);
6687 tree vars = TREE_OPERAND (exp, 0);
6688 int vars_need_expansion = 0;
6690 /* Need to open a binding contour here because
6691 if there are any cleanups they must be contained here. */
6692 expand_start_bindings (2);
6694 /* Mark the corresponding BLOCK for output in its proper place. */
6695 if (TREE_OPERAND (exp, 2) != 0
6696 && ! TREE_USED (TREE_OPERAND (exp, 2)))
6697 (*lang_hooks.decls.insert_block) (TREE_OPERAND (exp, 2));
6699 /* If VARS have not yet been expanded, expand them now. */
6702 if (!DECL_RTL_SET_P (vars))
6704 vars_need_expansion = 1;
6707 expand_decl_init (vars);
6708 vars = TREE_CHAIN (vars);
6711 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, modifier);
6713 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
6719 if (RTL_EXPR_SEQUENCE (exp))
6721 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
6723 emit_insns (RTL_EXPR_SEQUENCE (exp));
6724 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
6726 preserve_rtl_expr_result (RTL_EXPR_RTL (exp));
6727 free_temps_for_rtl_expr (exp);
6728 return RTL_EXPR_RTL (exp);
6731 /* If we don't need the result, just ensure we evaluate any
6737 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
6738 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode, 0);
6743 /* All elts simple constants => refer to a constant in memory. But
6744 if this is a non-BLKmode mode, let it store a field at a time
6745 since that should make a CONST_INT or CONST_DOUBLE when we
6746 fold. Likewise, if we have a target we can use, it is best to
6747 store directly into the target unless the type is large enough
6748 that memcpy will be used. If we are making an initializer and
6749 all operands are constant, put it in memory as well. */
6750 else if ((TREE_STATIC (exp)
6751 && ((mode == BLKmode
6752 && ! (target != 0 && safe_from_p (target, exp, 1)))
6753 || TREE_ADDRESSABLE (exp)
6754 || (host_integerp (TYPE_SIZE_UNIT (type), 1)
6755 && (! MOVE_BY_PIECES_P
6756 (tree_low_cst (TYPE_SIZE_UNIT (type), 1),
6758 && ! mostly_zeros_p (exp))))
6759 || (modifier == EXPAND_INITIALIZER && TREE_CONSTANT (exp)))
6761 rtx constructor = output_constant_def (exp, 1);
6763 if (modifier != EXPAND_CONST_ADDRESS
6764 && modifier != EXPAND_INITIALIZER
6765 && modifier != EXPAND_SUM)
6766 constructor = validize_mem (constructor);
6772 /* Handle calls that pass values in multiple non-contiguous
6773 locations. The Irix 6 ABI has examples of this. */
6774 if (target == 0 || ! safe_from_p (target, exp, 1)
6775 || GET_CODE (target) == PARALLEL)
6777 = assign_temp (build_qualified_type (type,
6779 | (TREE_READONLY (exp)
6780 * TYPE_QUAL_CONST))),
6781 0, TREE_ADDRESSABLE (exp), 1);
6783 store_constructor (exp, target, 0,
6784 int_size_in_bytes (TREE_TYPE (exp)));
6790 tree exp1 = TREE_OPERAND (exp, 0);
6792 tree string = string_constant (exp1, &index);
6794 /* Try to optimize reads from const strings. */
6796 && TREE_CODE (string) == STRING_CST
6797 && TREE_CODE (index) == INTEGER_CST
6798 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
6799 && GET_MODE_CLASS (mode) == MODE_INT
6800 && GET_MODE_SIZE (mode) == 1
6801 && modifier != EXPAND_WRITE)
6802 return gen_int_mode (TREE_STRING_POINTER (string)
6803 [TREE_INT_CST_LOW (index)], mode);
6805 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
6806 op0 = memory_address (mode, op0);
6807 temp = gen_rtx_MEM (mode, op0);
6808 set_mem_attributes (temp, exp, 0);
6810 /* If we are writing to this object and its type is a record with
6811 readonly fields, we must mark it as readonly so it will
6812 conflict with readonly references to those fields. */
6813 if (modifier == EXPAND_WRITE && readonly_fields_p (type))
6814 RTX_UNCHANGING_P (temp) = 1;
6820 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
6824 tree array = TREE_OPERAND (exp, 0);
6825 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
6826 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
6827 tree index = convert (sizetype, TREE_OPERAND (exp, 1));
6830 /* Optimize the special-case of a zero lower bound.
6832 We convert the low_bound to sizetype to avoid some problems
6833 with constant folding. (E.g. suppose the lower bound is 1,
6834 and its mode is QI. Without the conversion, (ARRAY
6835 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
6836 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
6838 if (! integer_zerop (low_bound))
6839 index = size_diffop (index, convert (sizetype, low_bound));
6841 /* Fold an expression like: "foo"[2].
6842 This is not done in fold so it won't happen inside &.
6843 Don't fold if this is for wide characters since it's too
6844 difficult to do correctly and this is a very rare case. */
6846 if (modifier != EXPAND_CONST_ADDRESS && modifier != EXPAND_INITIALIZER
6847 && TREE_CODE (array) == STRING_CST
6848 && TREE_CODE (index) == INTEGER_CST
6849 && compare_tree_int (index, TREE_STRING_LENGTH (array)) < 0
6850 && GET_MODE_CLASS (mode) == MODE_INT
6851 && GET_MODE_SIZE (mode) == 1)
6852 return gen_int_mode (TREE_STRING_POINTER (array)
6853 [TREE_INT_CST_LOW (index)], mode);
6855 /* If this is a constant index into a constant array,
6856 just get the value from the array. Handle both the cases when
6857 we have an explicit constructor and when our operand is a variable
6858 that was declared const. */
6860 if (modifier != EXPAND_CONST_ADDRESS && modifier != EXPAND_INITIALIZER
6861 && TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array)
6862 && TREE_CODE (index) == INTEGER_CST
6863 && 0 > compare_tree_int (index,
6864 list_length (CONSTRUCTOR_ELTS
6865 (TREE_OPERAND (exp, 0)))))
6869 for (elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
6870 i = TREE_INT_CST_LOW (index);
6871 elem != 0 && i != 0; i--, elem = TREE_CHAIN (elem))
6875 return expand_expr (fold (TREE_VALUE (elem)), target, tmode,
6879 else if (optimize >= 1
6880 && modifier != EXPAND_CONST_ADDRESS
6881 && modifier != EXPAND_INITIALIZER
6882 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
6883 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
6884 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
6886 if (TREE_CODE (index) == INTEGER_CST)
6888 tree init = DECL_INITIAL (array);
6890 if (TREE_CODE (init) == CONSTRUCTOR)
6894 for (elem = CONSTRUCTOR_ELTS (init);
6896 && !tree_int_cst_equal (TREE_PURPOSE (elem), index));
6897 elem = TREE_CHAIN (elem))
6900 if (elem && !TREE_SIDE_EFFECTS (TREE_VALUE (elem)))
6901 return expand_expr (fold (TREE_VALUE (elem)), target,
6904 else if (TREE_CODE (init) == STRING_CST
6905 && 0 > compare_tree_int (index,
6906 TREE_STRING_LENGTH (init)))
6908 tree type = TREE_TYPE (TREE_TYPE (init));
6909 enum machine_mode mode = TYPE_MODE (type);
6911 if (GET_MODE_CLASS (mode) == MODE_INT
6912 && GET_MODE_SIZE (mode) == 1)
6913 return gen_int_mode (TREE_STRING_POINTER (init)
6914 [TREE_INT_CST_LOW (index)], mode);
6923 case ARRAY_RANGE_REF:
6924 /* If the operand is a CONSTRUCTOR, we can just extract the
6925 appropriate field if it is present. Don't do this if we have
6926 already written the data since we want to refer to that copy
6927 and varasm.c assumes that's what we'll do. */
6928 if (code == COMPONENT_REF
6929 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
6930 && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
6934 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
6935 elt = TREE_CHAIN (elt))
6936 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1)
6937 /* We can normally use the value of the field in the
6938 CONSTRUCTOR. However, if this is a bitfield in
6939 an integral mode that we can fit in a HOST_WIDE_INT,
6940 we must mask only the number of bits in the bitfield,
6941 since this is done implicitly by the constructor. If
6942 the bitfield does not meet either of those conditions,
6943 we can't do this optimization. */
6944 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt))
6945 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt)))
6947 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt)))
6948 <= HOST_BITS_PER_WIDE_INT))))
6950 op0 = expand_expr (TREE_VALUE (elt), target, tmode, modifier);
6951 if (DECL_BIT_FIELD (TREE_PURPOSE (elt)))
6953 HOST_WIDE_INT bitsize
6954 = TREE_INT_CST_LOW (DECL_SIZE (TREE_PURPOSE (elt)));
6955 enum machine_mode imode
6956 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt)));
6958 if (TREE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt))))
6960 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
6961 op0 = expand_and (imode, op0, op1, target);
6966 = build_int_2 (GET_MODE_BITSIZE (imode) - bitsize,
6969 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
6971 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
6981 enum machine_mode mode1;
6982 HOST_WIDE_INT bitsize, bitpos;
6985 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
6986 &mode1, &unsignedp, &volatilep);
6989 /* If we got back the original object, something is wrong. Perhaps
6990 we are evaluating an expression too early. In any event, don't
6991 infinitely recurse. */
6995 /* If TEM's type is a union of variable size, pass TARGET to the inner
6996 computation, since it will need a temporary and TARGET is known
6997 to have to do. This occurs in unchecked conversion in Ada. */
7001 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
7002 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
7004 ? target : NULL_RTX),
7006 (modifier == EXPAND_INITIALIZER
7007 || modifier == EXPAND_CONST_ADDRESS)
7008 ? modifier : EXPAND_NORMAL);
7010 /* If this is a constant, put it into a register if it is a
7011 legitimate constant and OFFSET is 0 and memory if it isn't. */
7012 if (CONSTANT_P (op0))
7014 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
7015 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
7017 op0 = force_reg (mode, op0);
7019 op0 = validize_mem (force_const_mem (mode, op0));
7024 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM);
7026 /* If this object is in a register, put it into memory.
7027 This case can't occur in C, but can in Ada if we have
7028 unchecked conversion of an expression from a scalar type to
7029 an array or record type. */
7030 if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
7031 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
7033 /* If the operand is a SAVE_EXPR, we can deal with this by
7034 forcing the SAVE_EXPR into memory. */
7035 if (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR)
7037 put_var_into_stack (TREE_OPERAND (exp, 0));
7038 op0 = SAVE_EXPR_RTL (TREE_OPERAND (exp, 0));
7043 = build_qualified_type (TREE_TYPE (tem),
7044 (TYPE_QUALS (TREE_TYPE (tem))
7045 | TYPE_QUAL_CONST));
7046 rtx memloc = assign_temp (nt, 1, 1, 1);
7048 emit_move_insn (memloc, op0);
7053 if (GET_CODE (op0) != MEM)
7056 #ifdef POINTERS_EXTEND_UNSIGNED
7057 if (GET_MODE (offset_rtx) != Pmode)
7058 offset_rtx = convert_memory_address (Pmode, offset_rtx);
7060 if (GET_MODE (offset_rtx) != ptr_mode)
7061 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
7064 /* A constant address in OP0 can have VOIDmode, we must not try
7065 to call force_reg for that case. Avoid that case. */
7066 if (GET_CODE (op0) == MEM
7067 && GET_MODE (op0) == BLKmode
7068 && GET_MODE (XEXP (op0, 0)) != VOIDmode
7070 && (bitpos % bitsize) == 0
7071 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
7072 && MEM_ALIGN (op0) == GET_MODE_ALIGNMENT (mode1))
7074 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7078 op0 = offset_address (op0, offset_rtx,
7079 highest_pow2_factor (offset));
7082 /* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
7083 record its alignment as BIGGEST_ALIGNMENT. */
7084 if (GET_CODE (op0) == MEM && bitpos == 0 && offset != 0
7085 && is_aligning_offset (offset, tem))
7086 set_mem_align (op0, BIGGEST_ALIGNMENT);
7088 /* Don't forget about volatility even if this is a bitfield. */
7089 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
7091 if (op0 == orig_op0)
7092 op0 = copy_rtx (op0);
7094 MEM_VOLATILE_P (op0) = 1;
7097 /* The following code doesn't handle CONCAT.
7098 Assume only bitpos == 0 can be used for CONCAT, due to
7099 one element arrays having the same mode as its element. */
7100 if (GET_CODE (op0) == CONCAT)
7102 if (bitpos != 0 || bitsize != GET_MODE_BITSIZE (GET_MODE (op0)))
7107 /* In cases where an aligned union has an unaligned object
7108 as a field, we might be extracting a BLKmode value from
7109 an integer-mode (e.g., SImode) object. Handle this case
7110 by doing the extract into an object as wide as the field
7111 (which we know to be the width of a basic mode), then
7112 storing into memory, and changing the mode to BLKmode. */
7113 if (mode1 == VOIDmode
7114 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
7115 || (mode1 != BLKmode && ! direct_load[(int) mode1]
7116 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
7117 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT
7118 && modifier != EXPAND_CONST_ADDRESS
7119 && modifier != EXPAND_INITIALIZER)
7120 /* If the field isn't aligned enough to fetch as a memref,
7121 fetch it as a bit field. */
7122 || (mode1 != BLKmode
7123 && SLOW_UNALIGNED_ACCESS (mode1, MEM_ALIGN (op0))
7124 && ((TYPE_ALIGN (TREE_TYPE (tem))
7125 < GET_MODE_ALIGNMENT (mode))
7126 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)))
7127 /* If the type and the field are a constant size and the
7128 size of the type isn't the same size as the bitfield,
7129 we must use bitfield operations. */
7131 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (exp)))
7133 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
7136 enum machine_mode ext_mode = mode;
7138 if (ext_mode == BLKmode
7139 && ! (target != 0 && GET_CODE (op0) == MEM
7140 && GET_CODE (target) == MEM
7141 && bitpos % BITS_PER_UNIT == 0))
7142 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
7144 if (ext_mode == BLKmode)
7146 /* In this case, BITPOS must start at a byte boundary and
7147 TARGET, if specified, must be a MEM. */
7148 if (GET_CODE (op0) != MEM
7149 || (target != 0 && GET_CODE (target) != MEM)
7150 || bitpos % BITS_PER_UNIT != 0)
7153 op0 = adjust_address (op0, VOIDmode, bitpos / BITS_PER_UNIT);
7155 target = assign_temp (type, 0, 1, 1);
7157 emit_block_move (target, op0,
7158 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
7164 op0 = validize_mem (op0);
7166 if (GET_CODE (op0) == MEM && GET_CODE (XEXP (op0, 0)) == REG)
7167 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7169 op0 = extract_bit_field (op0, bitsize, bitpos,
7170 unsignedp, target, ext_mode, ext_mode,
7171 int_size_in_bytes (TREE_TYPE (tem)));
7173 /* If the result is a record type and BITSIZE is narrower than
7174 the mode of OP0, an integral mode, and this is a big endian
7175 machine, we must put the field into the high-order bits. */
7176 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
7177 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
7178 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (op0)))
7179 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
7180 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
7184 if (mode == BLKmode)
7186 rtx new = assign_temp (build_qualified_type
7187 ((*lang_hooks.types.type_for_mode)
7189 TYPE_QUAL_CONST), 0, 1, 1);
7191 emit_move_insn (new, op0);
7192 op0 = copy_rtx (new);
7193 PUT_MODE (op0, BLKmode);
7194 set_mem_attributes (op0, exp, 1);
7200 /* If the result is BLKmode, use that to access the object
7202 if (mode == BLKmode)
7205 /* Get a reference to just this component. */
7206 if (modifier == EXPAND_CONST_ADDRESS
7207 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7208 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
7210 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7212 if (op0 == orig_op0)
7213 op0 = copy_rtx (op0);
7215 set_mem_attributes (op0, exp, 0);
7216 if (GET_CODE (XEXP (op0, 0)) == REG)
7217 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7219 MEM_VOLATILE_P (op0) |= volatilep;
7220 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
7221 || modifier == EXPAND_CONST_ADDRESS
7222 || modifier == EXPAND_INITIALIZER)
7224 else if (target == 0)
7225 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7227 convert_move (target, op0, unsignedp);
7233 rtx insn, before = get_last_insn (), vtbl_ref;
7235 /* Evaluate the interior expression. */
7236 subtarget = expand_expr (TREE_OPERAND (exp, 0), target,
7239 /* Get or create an instruction off which to hang a note. */
7240 if (REG_P (subtarget))
7243 insn = get_last_insn ();
7246 if (! INSN_P (insn))
7247 insn = prev_nonnote_insn (insn);
7251 target = gen_reg_rtx (GET_MODE (subtarget));
7252 insn = emit_move_insn (target, subtarget);
7255 /* Collect the data for the note. */
7256 vtbl_ref = XEXP (DECL_RTL (TREE_OPERAND (exp, 1)), 0);
7257 vtbl_ref = plus_constant (vtbl_ref,
7258 tree_low_cst (TREE_OPERAND (exp, 2), 0));
7259 /* Discard the initial CONST that was added. */
7260 vtbl_ref = XEXP (vtbl_ref, 0);
7263 = gen_rtx_EXPR_LIST (REG_VTABLE_REF, vtbl_ref, REG_NOTES (insn));
7268 /* Intended for a reference to a buffer of a file-object in Pascal.
7269 But it's not certain that a special tree code will really be
7270 necessary for these. INDIRECT_REF might work for them. */
7276 /* Pascal set IN expression.
7279 rlo = set_low - (set_low%bits_per_word);
7280 the_word = set [ (index - rlo)/bits_per_word ];
7281 bit_index = index % bits_per_word;
7282 bitmask = 1 << bit_index;
7283 return !!(the_word & bitmask); */
7285 tree set = TREE_OPERAND (exp, 0);
7286 tree index = TREE_OPERAND (exp, 1);
7287 int iunsignedp = TREE_UNSIGNED (TREE_TYPE (index));
7288 tree set_type = TREE_TYPE (set);
7289 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
7290 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
7291 rtx index_val = expand_expr (index, 0, VOIDmode, 0);
7292 rtx lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
7293 rtx hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
7294 rtx setval = expand_expr (set, 0, VOIDmode, 0);
7295 rtx setaddr = XEXP (setval, 0);
7296 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
7298 rtx diff, quo, rem, addr, bit, result;
7300 /* If domain is empty, answer is no. Likewise if index is constant
7301 and out of bounds. */
7302 if (((TREE_CODE (set_high_bound) == INTEGER_CST
7303 && TREE_CODE (set_low_bound) == INTEGER_CST
7304 && tree_int_cst_lt (set_high_bound, set_low_bound))
7305 || (TREE_CODE (index) == INTEGER_CST
7306 && TREE_CODE (set_low_bound) == INTEGER_CST
7307 && tree_int_cst_lt (index, set_low_bound))
7308 || (TREE_CODE (set_high_bound) == INTEGER_CST
7309 && TREE_CODE (index) == INTEGER_CST
7310 && tree_int_cst_lt (set_high_bound, index))))
7314 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7316 /* If we get here, we have to generate the code for both cases
7317 (in range and out of range). */
7319 op0 = gen_label_rtx ();
7320 op1 = gen_label_rtx ();
7322 if (! (GET_CODE (index_val) == CONST_INT
7323 && GET_CODE (lo_r) == CONST_INT))
7324 emit_cmp_and_jump_insns (index_val, lo_r, LT, NULL_RTX,
7325 GET_MODE (index_val), iunsignedp, op1);
7327 if (! (GET_CODE (index_val) == CONST_INT
7328 && GET_CODE (hi_r) == CONST_INT))
7329 emit_cmp_and_jump_insns (index_val, hi_r, GT, NULL_RTX,
7330 GET_MODE (index_val), iunsignedp, op1);
7332 /* Calculate the element number of bit zero in the first word
7334 if (GET_CODE (lo_r) == CONST_INT)
7335 rlow = GEN_INT (INTVAL (lo_r)
7336 & ~((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
7338 rlow = expand_binop (index_mode, and_optab, lo_r,
7339 GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
7340 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7342 diff = expand_binop (index_mode, sub_optab, index_val, rlow,
7343 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7345 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
7346 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7347 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
7348 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7350 addr = memory_address (byte_mode,
7351 expand_binop (index_mode, add_optab, diff,
7352 setaddr, NULL_RTX, iunsignedp,
7355 /* Extract the bit we want to examine. */
7356 bit = expand_shift (RSHIFT_EXPR, byte_mode,
7357 gen_rtx_MEM (byte_mode, addr),
7358 make_tree (TREE_TYPE (index), rem),
7360 result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
7361 GET_MODE (target) == byte_mode ? target : 0,
7362 1, OPTAB_LIB_WIDEN);
7364 if (result != target)
7365 convert_move (target, result, 1);
7367 /* Output the code to handle the out-of-range case. */
7370 emit_move_insn (target, const0_rtx);
7375 case WITH_CLEANUP_EXPR:
7376 if (WITH_CLEANUP_EXPR_RTL (exp) == 0)
7378 WITH_CLEANUP_EXPR_RTL (exp)
7379 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
7380 expand_decl_cleanup_eh (NULL_TREE, TREE_OPERAND (exp, 1),
7381 CLEANUP_EH_ONLY (exp));
7383 /* That's it for this cleanup. */
7384 TREE_OPERAND (exp, 1) = 0;
7386 return WITH_CLEANUP_EXPR_RTL (exp);
7388 case CLEANUP_POINT_EXPR:
7390 /* Start a new binding layer that will keep track of all cleanup
7391 actions to be performed. */
7392 expand_start_bindings (2);
7394 target_temp_slot_level = temp_slot_level;
7396 op0 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
7397 /* If we're going to use this value, load it up now. */
7399 op0 = force_not_mem (op0);
7400 preserve_temp_slots (op0);
7401 expand_end_bindings (NULL_TREE, 0, 0);
7406 /* Check for a built-in function. */
7407 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
7408 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7410 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7412 if (DECL_BUILT_IN_CLASS (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7413 == BUILT_IN_FRONTEND)
7414 return (*lang_hooks.expand_expr)
7415 (exp, original_target, tmode, modifier);
7417 return expand_builtin (exp, target, subtarget, tmode, ignore);
7420 return expand_call (exp, target, ignore);
7422 case NON_LVALUE_EXPR:
7425 case REFERENCE_EXPR:
7426 if (TREE_OPERAND (exp, 0) == error_mark_node)
7429 if (TREE_CODE (type) == UNION_TYPE)
7431 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
7433 /* If both input and output are BLKmode, this conversion isn't doing
7434 anything except possibly changing memory attribute. */
7435 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode)
7437 rtx result = expand_expr (TREE_OPERAND (exp, 0), target, tmode,
7440 result = copy_rtx (result);
7441 set_mem_attributes (result, exp, 0);
7446 target = assign_temp (type, 0, 1, 1);
7448 if (GET_CODE (target) == MEM)
7449 /* Store data into beginning of memory target. */
7450 store_expr (TREE_OPERAND (exp, 0),
7451 adjust_address (target, TYPE_MODE (valtype), 0), 0);
7453 else if (GET_CODE (target) == REG)
7454 /* Store this field into a union of the proper type. */
7455 store_field (target,
7456 MIN ((int_size_in_bytes (TREE_TYPE
7457 (TREE_OPERAND (exp, 0)))
7459 (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
7460 0, TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
7461 VOIDmode, 0, type, 0);
7465 /* Return the entire union. */
7469 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7471 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
7474 /* If the signedness of the conversion differs and OP0 is
7475 a promoted SUBREG, clear that indication since we now
7476 have to do the proper extension. */
7477 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
7478 && GET_CODE (op0) == SUBREG)
7479 SUBREG_PROMOTED_VAR_P (op0) = 0;
7484 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
7485 if (GET_MODE (op0) == mode)
7488 /* If OP0 is a constant, just convert it into the proper mode. */
7489 if (CONSTANT_P (op0))
7491 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7492 enum machine_mode inner_mode = TYPE_MODE (inner_type);
7494 if (modifier == EXPAND_INITIALIZER)
7495 return simplify_gen_subreg (mode, op0, inner_mode,
7496 subreg_lowpart_offset (mode,
7499 return convert_modes (mode, inner_mode, op0,
7500 TREE_UNSIGNED (inner_type));
7503 if (modifier == EXPAND_INITIALIZER)
7504 return gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
7508 convert_to_mode (mode, op0,
7509 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7511 convert_move (target, op0,
7512 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7515 case VIEW_CONVERT_EXPR:
7516 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
7518 /* If the input and output modes are both the same, we are done.
7519 Otherwise, if neither mode is BLKmode and both are within a word, we
7520 can use gen_lowpart. If neither is true, make sure the operand is
7521 in memory and convert the MEM to the new mode. */
7522 if (TYPE_MODE (type) == GET_MODE (op0))
7524 else if (TYPE_MODE (type) != BLKmode && GET_MODE (op0) != BLKmode
7525 && GET_MODE_SIZE (TYPE_MODE (type)) <= UNITS_PER_WORD
7526 && GET_MODE_SIZE (GET_MODE (op0)) <= UNITS_PER_WORD)
7527 op0 = gen_lowpart (TYPE_MODE (type), op0);
7528 else if (GET_CODE (op0) != MEM)
7530 /* If the operand is not a MEM, force it into memory. Since we
7531 are going to be be changing the mode of the MEM, don't call
7532 force_const_mem for constants because we don't allow pool
7533 constants to change mode. */
7534 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7536 if (TREE_ADDRESSABLE (exp))
7539 if (target == 0 || GET_MODE (target) != TYPE_MODE (inner_type))
7541 = assign_stack_temp_for_type
7542 (TYPE_MODE (inner_type),
7543 GET_MODE_SIZE (TYPE_MODE (inner_type)), 0, inner_type);
7545 emit_move_insn (target, op0);
7549 /* At this point, OP0 is in the correct mode. If the output type is such
7550 that the operand is known to be aligned, indicate that it is.
7551 Otherwise, we need only be concerned about alignment for non-BLKmode
7553 if (GET_CODE (op0) == MEM)
7555 op0 = copy_rtx (op0);
7557 if (TYPE_ALIGN_OK (type))
7558 set_mem_align (op0, MAX (MEM_ALIGN (op0), TYPE_ALIGN (type)));
7559 else if (TYPE_MODE (type) != BLKmode && STRICT_ALIGNMENT
7560 && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
7562 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7563 HOST_WIDE_INT temp_size
7564 = MAX (int_size_in_bytes (inner_type),
7565 (HOST_WIDE_INT) GET_MODE_SIZE (TYPE_MODE (type)));
7566 rtx new = assign_stack_temp_for_type (TYPE_MODE (type),
7567 temp_size, 0, type);
7568 rtx new_with_op0_mode = adjust_address (new, GET_MODE (op0), 0);
7570 if (TREE_ADDRESSABLE (exp))
7573 if (GET_MODE (op0) == BLKmode)
7574 emit_block_move (new_with_op0_mode, op0,
7575 GEN_INT (GET_MODE_SIZE (TYPE_MODE (type))));
7577 emit_move_insn (new_with_op0_mode, op0);
7582 op0 = adjust_address (op0, TYPE_MODE (type), 0);
7588 /* We come here from MINUS_EXPR when the second operand is a
7591 this_optab = ! unsignedp && flag_trapv
7592 && (GET_MODE_CLASS (mode) == MODE_INT)
7593 ? addv_optab : add_optab;
7595 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
7596 something else, make sure we add the register to the constant and
7597 then to the other thing. This case can occur during strength
7598 reduction and doing it this way will produce better code if the
7599 frame pointer or argument pointer is eliminated.
7601 fold-const.c will ensure that the constant is always in the inner
7602 PLUS_EXPR, so the only case we need to do anything about is if
7603 sp, ap, or fp is our second argument, in which case we must swap
7604 the innermost first argument and our second argument. */
7606 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
7607 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
7608 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
7609 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
7610 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
7611 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
7613 tree t = TREE_OPERAND (exp, 1);
7615 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7616 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
7619 /* If the result is to be ptr_mode and we are adding an integer to
7620 something, we might be forming a constant. So try to use
7621 plus_constant. If it produces a sum and we can't accept it,
7622 use force_operand. This allows P = &ARR[const] to generate
7623 efficient code on machines where a SYMBOL_REF is not a valid
7626 If this is an EXPAND_SUM call, always return the sum. */
7627 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
7628 || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
7630 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
7631 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7632 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
7636 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
7638 /* Use immed_double_const to ensure that the constant is
7639 truncated according to the mode of OP1, then sign extended
7640 to a HOST_WIDE_INT. Using the constant directly can result
7641 in non-canonical RTL in a 64x32 cross compile. */
7643 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)),
7645 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))));
7646 op1 = plus_constant (op1, INTVAL (constant_part));
7647 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7648 op1 = force_operand (op1, target);
7652 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7653 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
7654 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
7658 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7659 (modifier == EXPAND_INITIALIZER
7660 ? EXPAND_INITIALIZER : EXPAND_SUM));
7661 if (! CONSTANT_P (op0))
7663 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7664 VOIDmode, modifier);
7665 /* Don't go to both_summands if modifier
7666 says it's not right to return a PLUS. */
7667 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7671 /* Use immed_double_const to ensure that the constant is
7672 truncated according to the mode of OP1, then sign extended
7673 to a HOST_WIDE_INT. Using the constant directly can result
7674 in non-canonical RTL in a 64x32 cross compile. */
7676 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)),
7678 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
7679 op0 = plus_constant (op0, INTVAL (constant_part));
7680 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7681 op0 = force_operand (op0, target);
7686 /* No sense saving up arithmetic to be done
7687 if it's all in the wrong mode to form part of an address.
7688 And force_operand won't know whether to sign-extend or
7690 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7691 || mode != ptr_mode)
7694 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7697 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, modifier);
7698 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, modifier);
7701 /* Make sure any term that's a sum with a constant comes last. */
7702 if (GET_CODE (op0) == PLUS
7703 && CONSTANT_P (XEXP (op0, 1)))
7709 /* If adding to a sum including a constant,
7710 associate it to put the constant outside. */
7711 if (GET_CODE (op1) == PLUS
7712 && CONSTANT_P (XEXP (op1, 1)))
7714 rtx constant_term = const0_rtx;
7716 temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0);
7719 /* Ensure that MULT comes first if there is one. */
7720 else if (GET_CODE (op0) == MULT)
7721 op0 = gen_rtx_PLUS (mode, op0, XEXP (op1, 0));
7723 op0 = gen_rtx_PLUS (mode, XEXP (op1, 0), op0);
7725 /* Let's also eliminate constants from op0 if possible. */
7726 op0 = eliminate_constant_term (op0, &constant_term);
7728 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
7729 their sum should be a constant. Form it into OP1, since the
7730 result we want will then be OP0 + OP1. */
7732 temp = simplify_binary_operation (PLUS, mode, constant_term,
7737 op1 = gen_rtx_PLUS (mode, constant_term, XEXP (op1, 1));
7740 /* Put a constant term last and put a multiplication first. */
7741 if (CONSTANT_P (op0) || GET_CODE (op1) == MULT)
7742 temp = op1, op1 = op0, op0 = temp;
7744 temp = simplify_binary_operation (PLUS, mode, op0, op1);
7745 return temp ? temp : gen_rtx_PLUS (mode, op0, op1);
7748 /* For initializers, we are allowed to return a MINUS of two
7749 symbolic constants. Here we handle all cases when both operands
7751 /* Handle difference of two symbolic constants,
7752 for the sake of an initializer. */
7753 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7754 && really_constant_p (TREE_OPERAND (exp, 0))
7755 && really_constant_p (TREE_OPERAND (exp, 1)))
7757 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode,
7759 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode,
7762 /* If the last operand is a CONST_INT, use plus_constant of
7763 the negated constant. Else make the MINUS. */
7764 if (GET_CODE (op1) == CONST_INT)
7765 return plus_constant (op0, - INTVAL (op1));
7767 return gen_rtx_MINUS (mode, op0, op1);
7769 /* Convert A - const to A + (-const). */
7770 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7772 tree negated = fold (build1 (NEGATE_EXPR, type,
7773 TREE_OPERAND (exp, 1)));
7775 if (TREE_UNSIGNED (type) || TREE_OVERFLOW (negated))
7776 /* If we can't negate the constant in TYPE, leave it alone and
7777 expand_binop will negate it for us. We used to try to do it
7778 here in the signed version of TYPE, but that doesn't work
7779 on POINTER_TYPEs. */;
7782 exp = build (PLUS_EXPR, type, TREE_OPERAND (exp, 0), negated);
7786 this_optab = ! unsignedp && flag_trapv
7787 && (GET_MODE_CLASS(mode) == MODE_INT)
7788 ? subv_optab : sub_optab;
7792 /* If first operand is constant, swap them.
7793 Thus the following special case checks need only
7794 check the second operand. */
7795 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
7797 tree t1 = TREE_OPERAND (exp, 0);
7798 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
7799 TREE_OPERAND (exp, 1) = t1;
7802 /* Attempt to return something suitable for generating an
7803 indexed address, for machines that support that. */
7805 if (modifier == EXPAND_SUM && mode == ptr_mode
7806 && host_integerp (TREE_OPERAND (exp, 1), 0))
7808 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7811 /* If we knew for certain that this is arithmetic for an array
7812 reference, and we knew the bounds of the array, then we could
7813 apply the distributive law across (PLUS X C) for constant C.
7814 Without such knowledge, we risk overflowing the computation
7815 when both X and C are large, but X+C isn't. */
7816 /* ??? Could perhaps special-case EXP being unsigned and C being
7817 positive. In that case we are certain that X+C is no smaller
7818 than X and so the transformed expression will overflow iff the
7819 original would have. */
7821 if (GET_CODE (op0) != REG)
7822 op0 = force_operand (op0, NULL_RTX);
7823 if (GET_CODE (op0) != REG)
7824 op0 = copy_to_mode_reg (mode, op0);
7827 gen_rtx_MULT (mode, op0,
7828 GEN_INT (tree_low_cst (TREE_OPERAND (exp, 1), 0)));
7831 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7834 /* Check for multiplying things that have been extended
7835 from a narrower type. If this machine supports multiplying
7836 in that narrower type with a result in the desired type,
7837 do it that way, and avoid the explicit type-conversion. */
7838 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
7839 && TREE_CODE (type) == INTEGER_TYPE
7840 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7841 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
7842 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7843 && int_fits_type_p (TREE_OPERAND (exp, 1),
7844 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7845 /* Don't use a widening multiply if a shift will do. */
7846 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
7847 > HOST_BITS_PER_WIDE_INT)
7848 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
7850 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
7851 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7853 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
7854 /* If both operands are extended, they must either both
7855 be zero-extended or both be sign-extended. */
7856 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7858 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
7860 enum machine_mode innermode
7861 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
7862 optab other_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7863 ? smul_widen_optab : umul_widen_optab);
7864 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7865 ? umul_widen_optab : smul_widen_optab);
7866 if (mode == GET_MODE_WIDER_MODE (innermode))
7868 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
7870 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7871 NULL_RTX, VOIDmode, 0);
7872 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7873 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7876 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7877 NULL_RTX, VOIDmode, 0);
7880 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
7881 && innermode == word_mode)
7884 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7885 NULL_RTX, VOIDmode, 0);
7886 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7887 op1 = convert_modes (innermode, mode,
7888 expand_expr (TREE_OPERAND (exp, 1),
7889 NULL_RTX, VOIDmode, 0),
7892 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7893 NULL_RTX, VOIDmode, 0);
7894 temp = expand_binop (mode, other_optab, op0, op1, target,
7895 unsignedp, OPTAB_LIB_WIDEN);
7896 htem = expand_mult_highpart_adjust (innermode,
7897 gen_highpart (innermode, temp),
7899 gen_highpart (innermode, temp),
7901 emit_move_insn (gen_highpart (innermode, temp), htem);
7906 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7907 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7908 return expand_mult (mode, op0, op1, target, unsignedp);
7910 case TRUNC_DIV_EXPR:
7911 case FLOOR_DIV_EXPR:
7913 case ROUND_DIV_EXPR:
7914 case EXACT_DIV_EXPR:
7915 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7917 /* Possible optimization: compute the dividend with EXPAND_SUM
7918 then if the divisor is constant can optimize the case
7919 where some terms of the dividend have coeffs divisible by it. */
7920 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7921 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7922 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
7925 /* Emit a/b as a*(1/b). Later we may manage CSE the reciprocal saving
7926 expensive divide. If not, combine will rebuild the original
7928 if (flag_unsafe_math_optimizations && optimize && !optimize_size
7929 && TREE_CODE (type) == REAL_TYPE
7930 && !real_onep (TREE_OPERAND (exp, 0)))
7931 return expand_expr (build (MULT_EXPR, type, TREE_OPERAND (exp, 0),
7932 build (RDIV_EXPR, type,
7933 build_real (type, dconst1),
7934 TREE_OPERAND (exp, 1))),
7935 target, tmode, unsignedp);
7936 this_optab = sdiv_optab;
7939 case TRUNC_MOD_EXPR:
7940 case FLOOR_MOD_EXPR:
7942 case ROUND_MOD_EXPR:
7943 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7945 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7946 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7947 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
7949 case FIX_ROUND_EXPR:
7950 case FIX_FLOOR_EXPR:
7952 abort (); /* Not used for C. */
7954 case FIX_TRUNC_EXPR:
7955 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7957 target = gen_reg_rtx (mode);
7958 expand_fix (target, op0, unsignedp);
7962 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7964 target = gen_reg_rtx (mode);
7965 /* expand_float can't figure out what to do if FROM has VOIDmode.
7966 So give it the correct mode. With -O, cse will optimize this. */
7967 if (GET_MODE (op0) == VOIDmode)
7968 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7970 expand_float (target, op0,
7971 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7975 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7976 temp = expand_unop (mode,
7977 ! unsignedp && flag_trapv
7978 && (GET_MODE_CLASS(mode) == MODE_INT)
7979 ? negv_optab : neg_optab, op0, target, 0);
7985 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7987 /* Handle complex values specially. */
7988 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT
7989 || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
7990 return expand_complex_abs (mode, op0, target, unsignedp);
7992 /* Unsigned abs is simply the operand. Testing here means we don't
7993 risk generating incorrect code below. */
7994 if (TREE_UNSIGNED (type))
7997 return expand_abs (mode, op0, target, unsignedp,
7998 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
8002 target = original_target;
8003 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 1), 1)
8004 || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
8005 || GET_MODE (target) != mode
8006 || (GET_CODE (target) == REG
8007 && REGNO (target) < FIRST_PSEUDO_REGISTER))
8008 target = gen_reg_rtx (mode);
8009 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8010 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
8012 /* First try to do it with a special MIN or MAX instruction.
8013 If that does not win, use a conditional jump to select the proper
8015 this_optab = (TREE_UNSIGNED (type)
8016 ? (code == MIN_EXPR ? umin_optab : umax_optab)
8017 : (code == MIN_EXPR ? smin_optab : smax_optab));
8019 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
8024 /* At this point, a MEM target is no longer useful; we will get better
8027 if (GET_CODE (target) == MEM)
8028 target = gen_reg_rtx (mode);
8031 emit_move_insn (target, op0);
8033 op0 = gen_label_rtx ();
8035 /* If this mode is an integer too wide to compare properly,
8036 compare word by word. Rely on cse to optimize constant cases. */
8037 if (GET_MODE_CLASS (mode) == MODE_INT
8038 && ! can_compare_p (GE, mode, ccp_jump))
8040 if (code == MAX_EXPR)
8041 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
8042 target, op1, NULL_RTX, op0);
8044 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
8045 op1, target, NULL_RTX, op0);
8049 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)));
8050 do_compare_rtx_and_jump (target, op1, code == MAX_EXPR ? GE : LE,
8051 unsignedp, mode, NULL_RTX, NULL_RTX,
8054 emit_move_insn (target, op1);
8059 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8060 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
8066 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8067 temp = expand_unop (mode, ffs_optab, op0, target, 1);
8072 /* ??? Can optimize bitwise operations with one arg constant.
8073 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
8074 and (a bitwise1 b) bitwise2 b (etc)
8075 but that is probably not worth while. */
8077 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
8078 boolean values when we want in all cases to compute both of them. In
8079 general it is fastest to do TRUTH_AND_EXPR by computing both operands
8080 as actual zero-or-1 values and then bitwise anding. In cases where
8081 there cannot be any side effects, better code would be made by
8082 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
8083 how to recognize those cases. */
8085 case TRUTH_AND_EXPR:
8087 this_optab = and_optab;
8092 this_optab = ior_optab;
8095 case TRUTH_XOR_EXPR:
8097 this_optab = xor_optab;
8104 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8106 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8107 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
8110 /* Could determine the answer when only additive constants differ. Also,
8111 the addition of one can be handled by changing the condition. */
8118 case UNORDERED_EXPR:
8125 temp = do_store_flag (exp, target, tmode != VOIDmode ? tmode : mode, 0);
8129 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
8130 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
8132 && GET_CODE (original_target) == REG
8133 && (GET_MODE (original_target)
8134 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8136 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
8139 /* If temp is constant, we can just compute the result. */
8140 if (GET_CODE (temp) == CONST_INT)
8142 if (INTVAL (temp) != 0)
8143 emit_move_insn (target, const1_rtx);
8145 emit_move_insn (target, const0_rtx);
8150 if (temp != original_target)
8152 enum machine_mode mode1 = GET_MODE (temp);
8153 if (mode1 == VOIDmode)
8154 mode1 = tmode != VOIDmode ? tmode : mode;
8156 temp = copy_to_mode_reg (mode1, temp);
8159 op1 = gen_label_rtx ();
8160 emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX,
8161 GET_MODE (temp), unsignedp, op1);
8162 emit_move_insn (temp, const1_rtx);
8167 /* If no set-flag instruction, must generate a conditional
8168 store into a temporary variable. Drop through
8169 and handle this like && and ||. */
8171 case TRUTH_ANDIF_EXPR:
8172 case TRUTH_ORIF_EXPR:
8174 && (target == 0 || ! safe_from_p (target, exp, 1)
8175 /* Make sure we don't have a hard reg (such as function's return
8176 value) live across basic blocks, if not optimizing. */
8177 || (!optimize && GET_CODE (target) == REG
8178 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
8179 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
8182 emit_clr_insn (target);
8184 op1 = gen_label_rtx ();
8185 jumpifnot (exp, op1);
8188 emit_0_to_1_insn (target);
8191 return ignore ? const0_rtx : target;
8193 case TRUTH_NOT_EXPR:
8194 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
8195 /* The parser is careful to generate TRUTH_NOT_EXPR
8196 only with operands that are always zero or one. */
8197 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
8198 target, 1, OPTAB_LIB_WIDEN);
8204 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
8206 return expand_expr (TREE_OPERAND (exp, 1),
8207 (ignore ? const0_rtx : target),
8211 /* If we would have a "singleton" (see below) were it not for a
8212 conversion in each arm, bring that conversion back out. */
8213 if (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
8214 && TREE_CODE (TREE_OPERAND (exp, 2)) == NOP_EXPR
8215 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0))
8216 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 2), 0))))
8218 tree iftrue = TREE_OPERAND (TREE_OPERAND (exp, 1), 0);
8219 tree iffalse = TREE_OPERAND (TREE_OPERAND (exp, 2), 0);
8221 if ((TREE_CODE_CLASS (TREE_CODE (iftrue)) == '2'
8222 && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
8223 || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '2'
8224 && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0))
8225 || (TREE_CODE_CLASS (TREE_CODE (iftrue)) == '1'
8226 && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
8227 || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '1'
8228 && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0)))
8229 return expand_expr (build1 (NOP_EXPR, type,
8230 build (COND_EXPR, TREE_TYPE (iftrue),
8231 TREE_OPERAND (exp, 0),
8233 target, tmode, modifier);
8237 /* Note that COND_EXPRs whose type is a structure or union
8238 are required to be constructed to contain assignments of
8239 a temporary variable, so that we can evaluate them here
8240 for side effect only. If type is void, we must do likewise. */
8242 /* If an arm of the branch requires a cleanup,
8243 only that cleanup is performed. */
8246 tree binary_op = 0, unary_op = 0;
8248 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
8249 convert it to our mode, if necessary. */
8250 if (integer_onep (TREE_OPERAND (exp, 1))
8251 && integer_zerop (TREE_OPERAND (exp, 2))
8252 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8256 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
8261 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, modifier);
8262 if (GET_MODE (op0) == mode)
8266 target = gen_reg_rtx (mode);
8267 convert_move (target, op0, unsignedp);
8271 /* Check for X ? A + B : A. If we have this, we can copy A to the
8272 output and conditionally add B. Similarly for unary operations.
8273 Don't do this if X has side-effects because those side effects
8274 might affect A or B and the "?" operation is a sequence point in
8275 ANSI. (operand_equal_p tests for side effects.) */
8277 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
8278 && operand_equal_p (TREE_OPERAND (exp, 2),
8279 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8280 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
8281 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
8282 && operand_equal_p (TREE_OPERAND (exp, 1),
8283 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8284 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
8285 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
8286 && operand_equal_p (TREE_OPERAND (exp, 2),
8287 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8288 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
8289 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
8290 && operand_equal_p (TREE_OPERAND (exp, 1),
8291 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8292 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
8294 /* If we are not to produce a result, we have no target. Otherwise,
8295 if a target was specified use it; it will not be used as an
8296 intermediate target unless it is safe. If no target, use a
8301 else if (original_target
8302 && (safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
8303 || (singleton && GET_CODE (original_target) == REG
8304 && REGNO (original_target) >= FIRST_PSEUDO_REGISTER
8305 && original_target == var_rtx (singleton)))
8306 && GET_MODE (original_target) == mode
8307 #ifdef HAVE_conditional_move
8308 && (! can_conditionally_move_p (mode)
8309 || GET_CODE (original_target) == REG
8310 || TREE_ADDRESSABLE (type))
8312 && (GET_CODE (original_target) != MEM
8313 || TREE_ADDRESSABLE (type)))
8314 temp = original_target;
8315 else if (TREE_ADDRESSABLE (type))
8318 temp = assign_temp (type, 0, 0, 1);
8320 /* If we had X ? A + C : A, with C a constant power of 2, and we can
8321 do the test of X as a store-flag operation, do this as
8322 A + ((X != 0) << log C). Similarly for other simple binary
8323 operators. Only do for C == 1 if BRANCH_COST is low. */
8324 if (temp && singleton && binary_op
8325 && (TREE_CODE (binary_op) == PLUS_EXPR
8326 || TREE_CODE (binary_op) == MINUS_EXPR
8327 || TREE_CODE (binary_op) == BIT_IOR_EXPR
8328 || TREE_CODE (binary_op) == BIT_XOR_EXPR)
8329 && (BRANCH_COST >= 3 ? integer_pow2p (TREE_OPERAND (binary_op, 1))
8330 : integer_onep (TREE_OPERAND (binary_op, 1)))
8331 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8334 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR
8335 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8336 ? addv_optab : add_optab)
8337 : TREE_CODE (binary_op) == MINUS_EXPR
8338 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8339 ? subv_optab : sub_optab)
8340 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
8343 /* If we had X ? A : A + 1, do this as A + (X == 0).
8345 We have to invert the truth value here and then put it
8346 back later if do_store_flag fails. We cannot simply copy
8347 TREE_OPERAND (exp, 0) to another variable and modify that
8348 because invert_truthvalue can modify the tree pointed to
8350 if (singleton == TREE_OPERAND (exp, 1))
8351 TREE_OPERAND (exp, 0)
8352 = invert_truthvalue (TREE_OPERAND (exp, 0));
8354 result = do_store_flag (TREE_OPERAND (exp, 0),
8355 (safe_from_p (temp, singleton, 1)
8357 mode, BRANCH_COST <= 1);
8359 if (result != 0 && ! integer_onep (TREE_OPERAND (binary_op, 1)))
8360 result = expand_shift (LSHIFT_EXPR, mode, result,
8361 build_int_2 (tree_log2
8365 (safe_from_p (temp, singleton, 1)
8366 ? temp : NULL_RTX), 0);
8370 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
8371 return expand_binop (mode, boptab, op1, result, temp,
8372 unsignedp, OPTAB_LIB_WIDEN);
8374 else if (singleton == TREE_OPERAND (exp, 1))
8375 TREE_OPERAND (exp, 0)
8376 = invert_truthvalue (TREE_OPERAND (exp, 0));
8379 do_pending_stack_adjust ();
8381 op0 = gen_label_rtx ();
8383 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
8387 /* If the target conflicts with the other operand of the
8388 binary op, we can't use it. Also, we can't use the target
8389 if it is a hard register, because evaluating the condition
8390 might clobber it. */
8392 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1), 1))
8393 || (GET_CODE (temp) == REG
8394 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
8395 temp = gen_reg_rtx (mode);
8396 store_expr (singleton, temp, 0);
8399 expand_expr (singleton,
8400 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8401 if (singleton == TREE_OPERAND (exp, 1))
8402 jumpif (TREE_OPERAND (exp, 0), op0);
8404 jumpifnot (TREE_OPERAND (exp, 0), op0);
8406 start_cleanup_deferral ();
8407 if (binary_op && temp == 0)
8408 /* Just touch the other operand. */
8409 expand_expr (TREE_OPERAND (binary_op, 1),
8410 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8412 store_expr (build (TREE_CODE (binary_op), type,
8413 make_tree (type, temp),
8414 TREE_OPERAND (binary_op, 1)),
8417 store_expr (build1 (TREE_CODE (unary_op), type,
8418 make_tree (type, temp)),
8422 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
8423 comparison operator. If we have one of these cases, set the
8424 output to A, branch on A (cse will merge these two references),
8425 then set the output to FOO. */
8427 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8428 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8429 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8430 TREE_OPERAND (exp, 1), 0)
8431 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8432 || TREE_CODE (TREE_OPERAND (exp, 1)) == SAVE_EXPR)
8433 && safe_from_p (temp, TREE_OPERAND (exp, 2), 1))
8435 if (GET_CODE (temp) == REG
8436 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
8437 temp = gen_reg_rtx (mode);
8438 store_expr (TREE_OPERAND (exp, 1), temp, 0);
8439 jumpif (TREE_OPERAND (exp, 0), op0);
8441 start_cleanup_deferral ();
8442 store_expr (TREE_OPERAND (exp, 2), temp, 0);
8446 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8447 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8448 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8449 TREE_OPERAND (exp, 2), 0)
8450 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8451 || TREE_CODE (TREE_OPERAND (exp, 2)) == SAVE_EXPR)
8452 && safe_from_p (temp, TREE_OPERAND (exp, 1), 1))
8454 if (GET_CODE (temp) == REG
8455 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
8456 temp = gen_reg_rtx (mode);
8457 store_expr (TREE_OPERAND (exp, 2), temp, 0);
8458 jumpifnot (TREE_OPERAND (exp, 0), op0);
8460 start_cleanup_deferral ();
8461 store_expr (TREE_OPERAND (exp, 1), temp, 0);
8466 op1 = gen_label_rtx ();
8467 jumpifnot (TREE_OPERAND (exp, 0), op0);
8469 start_cleanup_deferral ();
8471 /* One branch of the cond can be void, if it never returns. For
8472 example A ? throw : E */
8474 && TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node)
8475 store_expr (TREE_OPERAND (exp, 1), temp, 0);
8477 expand_expr (TREE_OPERAND (exp, 1),
8478 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8479 end_cleanup_deferral ();
8481 emit_jump_insn (gen_jump (op1));
8484 start_cleanup_deferral ();
8486 && TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node)
8487 store_expr (TREE_OPERAND (exp, 2), temp, 0);
8489 expand_expr (TREE_OPERAND (exp, 2),
8490 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8493 end_cleanup_deferral ();
8504 /* Something needs to be initialized, but we didn't know
8505 where that thing was when building the tree. For example,
8506 it could be the return value of a function, or a parameter
8507 to a function which lays down in the stack, or a temporary
8508 variable which must be passed by reference.
8510 We guarantee that the expression will either be constructed
8511 or copied into our original target. */
8513 tree slot = TREE_OPERAND (exp, 0);
8514 tree cleanups = NULL_TREE;
8517 if (TREE_CODE (slot) != VAR_DECL)
8521 target = original_target;
8523 /* Set this here so that if we get a target that refers to a
8524 register variable that's already been used, put_reg_into_stack
8525 knows that it should fix up those uses. */
8526 TREE_USED (slot) = 1;
8530 if (DECL_RTL_SET_P (slot))
8532 target = DECL_RTL (slot);
8533 /* If we have already expanded the slot, so don't do
8535 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8540 target = assign_temp (type, 2, 0, 1);
8541 /* All temp slots at this level must not conflict. */
8542 preserve_temp_slots (target);
8543 SET_DECL_RTL (slot, target);
8544 if (TREE_ADDRESSABLE (slot))
8545 put_var_into_stack (slot);
8547 /* Since SLOT is not known to the called function
8548 to belong to its stack frame, we must build an explicit
8549 cleanup. This case occurs when we must build up a reference
8550 to pass the reference as an argument. In this case,
8551 it is very likely that such a reference need not be
8554 if (TREE_OPERAND (exp, 2) == 0)
8555 TREE_OPERAND (exp, 2)
8556 = (*lang_hooks.maybe_build_cleanup) (slot);
8557 cleanups = TREE_OPERAND (exp, 2);
8562 /* This case does occur, when expanding a parameter which
8563 needs to be constructed on the stack. The target
8564 is the actual stack address that we want to initialize.
8565 The function we call will perform the cleanup in this case. */
8567 /* If we have already assigned it space, use that space,
8568 not target that we were passed in, as our target
8569 parameter is only a hint. */
8570 if (DECL_RTL_SET_P (slot))
8572 target = DECL_RTL (slot);
8573 /* If we have already expanded the slot, so don't do
8575 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8580 SET_DECL_RTL (slot, target);
8581 /* If we must have an addressable slot, then make sure that
8582 the RTL that we just stored in slot is OK. */
8583 if (TREE_ADDRESSABLE (slot))
8584 put_var_into_stack (slot);
8588 exp1 = TREE_OPERAND (exp, 3) = TREE_OPERAND (exp, 1);
8589 /* Mark it as expanded. */
8590 TREE_OPERAND (exp, 1) = NULL_TREE;
8592 store_expr (exp1, target, 0);
8594 expand_decl_cleanup_eh (NULL_TREE, cleanups, CLEANUP_EH_ONLY (exp));
8601 tree lhs = TREE_OPERAND (exp, 0);
8602 tree rhs = TREE_OPERAND (exp, 1);
8604 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
8610 /* If lhs is complex, expand calls in rhs before computing it.
8611 That's so we don't compute a pointer and save it over a
8612 call. If lhs is simple, compute it first so we can give it
8613 as a target if the rhs is just a call. This avoids an
8614 extra temp and copy and that prevents a partial-subsumption
8615 which makes bad code. Actually we could treat
8616 component_ref's of vars like vars. */
8618 tree lhs = TREE_OPERAND (exp, 0);
8619 tree rhs = TREE_OPERAND (exp, 1);
8623 /* Check for |= or &= of a bitfield of size one into another bitfield
8624 of size 1. In this case, (unless we need the result of the
8625 assignment) we can do this more efficiently with a
8626 test followed by an assignment, if necessary.
8628 ??? At this point, we can't get a BIT_FIELD_REF here. But if
8629 things change so we do, this code should be enhanced to
8632 && TREE_CODE (lhs) == COMPONENT_REF
8633 && (TREE_CODE (rhs) == BIT_IOR_EXPR
8634 || TREE_CODE (rhs) == BIT_AND_EXPR)
8635 && TREE_OPERAND (rhs, 0) == lhs
8636 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
8637 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
8638 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
8640 rtx label = gen_label_rtx ();
8642 do_jump (TREE_OPERAND (rhs, 1),
8643 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
8644 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
8645 expand_assignment (lhs, convert (TREE_TYPE (rhs),
8646 (TREE_CODE (rhs) == BIT_IOR_EXPR
8648 : integer_zero_node)),
8650 do_pending_stack_adjust ();
8655 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
8661 if (!TREE_OPERAND (exp, 0))
8662 expand_null_return ();
8664 expand_return (TREE_OPERAND (exp, 0));
8667 case PREINCREMENT_EXPR:
8668 case PREDECREMENT_EXPR:
8669 return expand_increment (exp, 0, ignore);
8671 case POSTINCREMENT_EXPR:
8672 case POSTDECREMENT_EXPR:
8673 /* Faster to treat as pre-increment if result is not used. */
8674 return expand_increment (exp, ! ignore, ignore);
8677 /* Are we taking the address of a nested function? */
8678 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
8679 && decl_function_context (TREE_OPERAND (exp, 0)) != 0
8680 && ! DECL_NO_STATIC_CHAIN (TREE_OPERAND (exp, 0))
8681 && ! TREE_STATIC (exp))
8683 op0 = trampoline_address (TREE_OPERAND (exp, 0));
8684 op0 = force_operand (op0, target);
8686 /* If we are taking the address of something erroneous, just
8688 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
8690 /* If we are taking the address of a constant and are at the
8691 top level, we have to use output_constant_def since we can't
8692 call force_const_mem at top level. */
8694 && (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
8695 || (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0)))
8697 op0 = XEXP (output_constant_def (TREE_OPERAND (exp, 0), 0), 0);
8700 /* We make sure to pass const0_rtx down if we came in with
8701 ignore set, to avoid doing the cleanups twice for something. */
8702 op0 = expand_expr (TREE_OPERAND (exp, 0),
8703 ignore ? const0_rtx : NULL_RTX, VOIDmode,
8704 (modifier == EXPAND_INITIALIZER
8705 ? modifier : EXPAND_CONST_ADDRESS));
8707 /* If we are going to ignore the result, OP0 will have been set
8708 to const0_rtx, so just return it. Don't get confused and
8709 think we are taking the address of the constant. */
8713 /* Pass 1 for MODIFY, so that protect_from_queue doesn't get
8714 clever and returns a REG when given a MEM. */
8715 op0 = protect_from_queue (op0, 1);
8717 /* We would like the object in memory. If it is a constant, we can
8718 have it be statically allocated into memory. For a non-constant,
8719 we need to allocate some memory and store the value into it. */
8721 if (CONSTANT_P (op0))
8722 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
8724 else if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
8725 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF
8726 || GET_CODE (op0) == PARALLEL)
8728 /* If the operand is a SAVE_EXPR, we can deal with this by
8729 forcing the SAVE_EXPR into memory. */
8730 if (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR)
8732 put_var_into_stack (TREE_OPERAND (exp, 0));
8733 op0 = SAVE_EXPR_RTL (TREE_OPERAND (exp, 0));
8737 /* If this object is in a register, it can't be BLKmode. */
8738 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8739 rtx memloc = assign_temp (inner_type, 1, 1, 1);
8741 if (GET_CODE (op0) == PARALLEL)
8742 /* Handle calls that pass values in multiple
8743 non-contiguous locations. The Irix 6 ABI has examples
8745 emit_group_store (memloc, op0,
8746 int_size_in_bytes (inner_type));
8748 emit_move_insn (memloc, op0);
8754 if (GET_CODE (op0) != MEM)
8757 mark_temp_addr_taken (op0);
8758 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
8760 op0 = XEXP (op0, 0);
8761 #ifdef POINTERS_EXTEND_UNSIGNED
8762 if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
8763 && mode == ptr_mode)
8764 op0 = convert_memory_address (ptr_mode, op0);
8769 /* If OP0 is not aligned as least as much as the type requires, we
8770 need to make a temporary, copy OP0 to it, and take the address of
8771 the temporary. We want to use the alignment of the type, not of
8772 the operand. Note that this is incorrect for FUNCTION_TYPE, but
8773 the test for BLKmode means that can't happen. The test for
8774 BLKmode is because we never make mis-aligned MEMs with
8777 We don't need to do this at all if the machine doesn't have
8778 strict alignment. */
8779 if (STRICT_ALIGNMENT && GET_MODE (op0) == BLKmode
8780 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
8782 && MEM_ALIGN (op0) < BIGGEST_ALIGNMENT)
8784 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8786 = assign_stack_temp_for_type
8787 (TYPE_MODE (inner_type),
8788 MEM_SIZE (op0) ? INTVAL (MEM_SIZE (op0))
8789 : int_size_in_bytes (inner_type),
8790 1, build_qualified_type (inner_type,
8791 (TYPE_QUALS (inner_type)
8792 | TYPE_QUAL_CONST)));
8794 if (TYPE_ALIGN_OK (inner_type))
8797 emit_block_move (new, op0, expr_size (TREE_OPERAND (exp, 0)));
8801 op0 = force_operand (XEXP (op0, 0), target);
8805 && GET_CODE (op0) != REG
8806 && modifier != EXPAND_CONST_ADDRESS
8807 && modifier != EXPAND_INITIALIZER
8808 && modifier != EXPAND_SUM)
8809 op0 = force_reg (Pmode, op0);
8811 if (GET_CODE (op0) == REG
8812 && ! REG_USERVAR_P (op0))
8813 mark_reg_pointer (op0, TYPE_ALIGN (TREE_TYPE (type)));
8815 #ifdef POINTERS_EXTEND_UNSIGNED
8816 if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
8817 && mode == ptr_mode)
8818 op0 = convert_memory_address (ptr_mode, op0);
8823 case ENTRY_VALUE_EXPR:
8826 /* COMPLEX type for Extended Pascal & Fortran */
8829 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8832 /* Get the rtx code of the operands. */
8833 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8834 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
8837 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
8841 /* Move the real (op0) and imaginary (op1) parts to their location. */
8842 emit_move_insn (gen_realpart (mode, target), op0);
8843 emit_move_insn (gen_imagpart (mode, target), op1);
8845 insns = get_insns ();
8848 /* Complex construction should appear as a single unit. */
8849 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
8850 each with a separate pseudo as destination.
8851 It's not correct for flow to treat them as a unit. */
8852 if (GET_CODE (target) != CONCAT)
8853 emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
8861 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8862 return gen_realpart (mode, op0);
8865 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8866 return gen_imagpart (mode, op0);
8870 enum machine_mode partmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8874 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8877 target = gen_reg_rtx (mode);
8881 /* Store the realpart and the negated imagpart to target. */
8882 emit_move_insn (gen_realpart (partmode, target),
8883 gen_realpart (partmode, op0));
8885 imag_t = gen_imagpart (partmode, target);
8886 temp = expand_unop (partmode,
8887 ! unsignedp && flag_trapv
8888 && (GET_MODE_CLASS(partmode) == MODE_INT)
8889 ? negv_optab : neg_optab,
8890 gen_imagpart (partmode, op0), imag_t, 0);
8892 emit_move_insn (imag_t, temp);
8894 insns = get_insns ();
8897 /* Conjugate should appear as a single unit
8898 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
8899 each with a separate pseudo as destination.
8900 It's not correct for flow to treat them as a unit. */
8901 if (GET_CODE (target) != CONCAT)
8902 emit_no_conflict_block (insns, target, op0, NULL_RTX, NULL_RTX);
8909 case TRY_CATCH_EXPR:
8911 tree handler = TREE_OPERAND (exp, 1);
8913 expand_eh_region_start ();
8915 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8917 expand_eh_region_end_cleanup (handler);
8922 case TRY_FINALLY_EXPR:
8924 tree try_block = TREE_OPERAND (exp, 0);
8925 tree finally_block = TREE_OPERAND (exp, 1);
8926 rtx finally_label = gen_label_rtx ();
8927 rtx done_label = gen_label_rtx ();
8928 rtx return_link = gen_reg_rtx (Pmode);
8929 tree cleanup = build (GOTO_SUBROUTINE_EXPR, void_type_node,
8930 (tree) finally_label, (tree) return_link);
8931 TREE_SIDE_EFFECTS (cleanup) = 1;
8933 /* Start a new binding layer that will keep track of all cleanup
8934 actions to be performed. */
8935 expand_start_bindings (2);
8937 target_temp_slot_level = temp_slot_level;
8939 expand_decl_cleanup (NULL_TREE, cleanup);
8940 op0 = expand_expr (try_block, target, tmode, modifier);
8942 preserve_temp_slots (op0);
8943 expand_end_bindings (NULL_TREE, 0, 0);
8944 emit_jump (done_label);
8945 emit_label (finally_label);
8946 expand_expr (finally_block, const0_rtx, VOIDmode, 0);
8947 emit_indirect_jump (return_link);
8948 emit_label (done_label);
8952 case GOTO_SUBROUTINE_EXPR:
8954 rtx subr = (rtx) TREE_OPERAND (exp, 0);
8955 rtx return_link = *(rtx *) &TREE_OPERAND (exp, 1);
8956 rtx return_address = gen_label_rtx ();
8957 emit_move_insn (return_link,
8958 gen_rtx_LABEL_REF (Pmode, return_address));
8960 emit_label (return_address);
8965 return expand_builtin_va_arg (TREE_OPERAND (exp, 0), type);
8968 return get_exception_pointer (cfun);
8971 /* Function descriptors are not valid except for as
8972 initialization constants, and should not be expanded. */
8976 return (*lang_hooks.expand_expr) (exp, original_target, tmode, modifier);
8979 /* Here to do an ordinary binary operator, generating an instruction
8980 from the optab already placed in `this_optab'. */
8982 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8984 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8985 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8987 temp = expand_binop (mode, this_optab, op0, op1, target,
8988 unsignedp, OPTAB_LIB_WIDEN);
8994 /* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
8995 when applied to the address of EXP produces an address known to be
8996 aligned more than BIGGEST_ALIGNMENT. */
8999 is_aligning_offset (offset, exp)
9003 /* Strip off any conversions and WITH_RECORD_EXPR nodes. */
9004 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9005 || TREE_CODE (offset) == NOP_EXPR
9006 || TREE_CODE (offset) == CONVERT_EXPR
9007 || TREE_CODE (offset) == WITH_RECORD_EXPR)
9008 offset = TREE_OPERAND (offset, 0);
9010 /* We must now have a BIT_AND_EXPR with a constant that is one less than
9011 power of 2 and which is larger than BIGGEST_ALIGNMENT. */
9012 if (TREE_CODE (offset) != BIT_AND_EXPR
9013 || !host_integerp (TREE_OPERAND (offset, 1), 1)
9014 || compare_tree_int (TREE_OPERAND (offset, 1), BIGGEST_ALIGNMENT) <= 0
9015 || !exact_log2 (tree_low_cst (TREE_OPERAND (offset, 1), 1) + 1) < 0)
9018 /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
9019 It must be NEGATE_EXPR. Then strip any more conversions. */
9020 offset = TREE_OPERAND (offset, 0);
9021 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9022 || TREE_CODE (offset) == NOP_EXPR
9023 || TREE_CODE (offset) == CONVERT_EXPR)
9024 offset = TREE_OPERAND (offset, 0);
9026 if (TREE_CODE (offset) != NEGATE_EXPR)
9029 offset = TREE_OPERAND (offset, 0);
9030 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9031 || TREE_CODE (offset) == NOP_EXPR
9032 || TREE_CODE (offset) == CONVERT_EXPR)
9033 offset = TREE_OPERAND (offset, 0);
9035 /* This must now be the address either of EXP or of a PLACEHOLDER_EXPR
9036 whose type is the same as EXP. */
9037 return (TREE_CODE (offset) == ADDR_EXPR
9038 && (TREE_OPERAND (offset, 0) == exp
9039 || (TREE_CODE (TREE_OPERAND (offset, 0)) == PLACEHOLDER_EXPR
9040 && (TREE_TYPE (TREE_OPERAND (offset, 0))
9041 == TREE_TYPE (exp)))));
9044 /* Return the tree node if an ARG corresponds to a string constant or zero
9045 if it doesn't. If we return non-zero, set *PTR_OFFSET to the offset
9046 in bytes within the string that ARG is accessing. The type of the
9047 offset will be `sizetype'. */
9050 string_constant (arg, ptr_offset)
9056 if (TREE_CODE (arg) == ADDR_EXPR
9057 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
9059 *ptr_offset = size_zero_node;
9060 return TREE_OPERAND (arg, 0);
9062 else if (TREE_CODE (arg) == PLUS_EXPR)
9064 tree arg0 = TREE_OPERAND (arg, 0);
9065 tree arg1 = TREE_OPERAND (arg, 1);
9070 if (TREE_CODE (arg0) == ADDR_EXPR
9071 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
9073 *ptr_offset = convert (sizetype, arg1);
9074 return TREE_OPERAND (arg0, 0);
9076 else if (TREE_CODE (arg1) == ADDR_EXPR
9077 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
9079 *ptr_offset = convert (sizetype, arg0);
9080 return TREE_OPERAND (arg1, 0);
9087 /* Expand code for a post- or pre- increment or decrement
9088 and return the RTX for the result.
9089 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
9092 expand_increment (exp, post, ignore)
9098 tree incremented = TREE_OPERAND (exp, 0);
9099 optab this_optab = add_optab;
9101 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
9102 int op0_is_copy = 0;
9103 int single_insn = 0;
9104 /* 1 means we can't store into OP0 directly,
9105 because it is a subreg narrower than a word,
9106 and we don't dare clobber the rest of the word. */
9109 /* Stabilize any component ref that might need to be
9110 evaluated more than once below. */
9112 || TREE_CODE (incremented) == BIT_FIELD_REF
9113 || (TREE_CODE (incremented) == COMPONENT_REF
9114 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
9115 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
9116 incremented = stabilize_reference (incremented);
9117 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
9118 ones into save exprs so that they don't accidentally get evaluated
9119 more than once by the code below. */
9120 if (TREE_CODE (incremented) == PREINCREMENT_EXPR
9121 || TREE_CODE (incremented) == PREDECREMENT_EXPR)
9122 incremented = save_expr (incremented);
9124 /* Compute the operands as RTX.
9125 Note whether OP0 is the actual lvalue or a copy of it:
9126 I believe it is a copy iff it is a register or subreg
9127 and insns were generated in computing it. */
9129 temp = get_last_insn ();
9130 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, 0);
9132 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
9133 in place but instead must do sign- or zero-extension during assignment,
9134 so we copy it into a new register and let the code below use it as
9137 Note that we can safely modify this SUBREG since it is know not to be
9138 shared (it was made by the expand_expr call above). */
9140 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
9143 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
9147 else if (GET_CODE (op0) == SUBREG
9148 && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD)
9150 /* We cannot increment this SUBREG in place. If we are
9151 post-incrementing, get a copy of the old value. Otherwise,
9152 just mark that we cannot increment in place. */
9154 op0 = copy_to_reg (op0);
9159 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
9160 && temp != get_last_insn ());
9161 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
9163 /* Decide whether incrementing or decrementing. */
9164 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
9165 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9166 this_optab = sub_optab;
9168 /* Convert decrement by a constant into a negative increment. */
9169 if (this_optab == sub_optab
9170 && GET_CODE (op1) == CONST_INT)
9172 op1 = GEN_INT (-INTVAL (op1));
9173 this_optab = add_optab;
9176 if (TYPE_TRAP_SIGNED (TREE_TYPE (exp)))
9177 this_optab = this_optab == add_optab ? addv_optab : subv_optab;
9179 /* For a preincrement, see if we can do this with a single instruction. */
9182 icode = (int) this_optab->handlers[(int) mode].insn_code;
9183 if (icode != (int) CODE_FOR_nothing
9184 /* Make sure that OP0 is valid for operands 0 and 1
9185 of the insn we want to queue. */
9186 && (*insn_data[icode].operand[0].predicate) (op0, mode)
9187 && (*insn_data[icode].operand[1].predicate) (op0, mode)
9188 && (*insn_data[icode].operand[2].predicate) (op1, mode))
9192 /* If OP0 is not the actual lvalue, but rather a copy in a register,
9193 then we cannot just increment OP0. We must therefore contrive to
9194 increment the original value. Then, for postincrement, we can return
9195 OP0 since it is a copy of the old value. For preincrement, expand here
9196 unless we can do it with a single insn.
9198 Likewise if storing directly into OP0 would clobber high bits
9199 we need to preserve (bad_subreg). */
9200 if (op0_is_copy || (!post && !single_insn) || bad_subreg)
9202 /* This is the easiest way to increment the value wherever it is.
9203 Problems with multiple evaluation of INCREMENTED are prevented
9204 because either (1) it is a component_ref or preincrement,
9205 in which case it was stabilized above, or (2) it is an array_ref
9206 with constant index in an array in a register, which is
9207 safe to reevaluate. */
9208 tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
9209 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9210 ? MINUS_EXPR : PLUS_EXPR),
9213 TREE_OPERAND (exp, 1));
9215 while (TREE_CODE (incremented) == NOP_EXPR
9216 || TREE_CODE (incremented) == CONVERT_EXPR)
9218 newexp = convert (TREE_TYPE (incremented), newexp);
9219 incremented = TREE_OPERAND (incremented, 0);
9222 temp = expand_assignment (incremented, newexp, ! post && ! ignore , 0);
9223 return post ? op0 : temp;
9228 /* We have a true reference to the value in OP0.
9229 If there is an insn to add or subtract in this mode, queue it.
9230 Queueing the increment insn avoids the register shuffling
9231 that often results if we must increment now and first save
9232 the old value for subsequent use. */
9234 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
9235 op0 = stabilize (op0);
9238 icode = (int) this_optab->handlers[(int) mode].insn_code;
9239 if (icode != (int) CODE_FOR_nothing
9240 /* Make sure that OP0 is valid for operands 0 and 1
9241 of the insn we want to queue. */
9242 && (*insn_data[icode].operand[0].predicate) (op0, mode)
9243 && (*insn_data[icode].operand[1].predicate) (op0, mode))
9245 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9246 op1 = force_reg (mode, op1);
9248 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
9250 if (icode != (int) CODE_FOR_nothing && GET_CODE (op0) == MEM)
9252 rtx addr = (general_operand (XEXP (op0, 0), mode)
9253 ? force_reg (Pmode, XEXP (op0, 0))
9254 : copy_to_reg (XEXP (op0, 0)));
9257 op0 = replace_equiv_address (op0, addr);
9258 temp = force_reg (GET_MODE (op0), op0);
9259 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9260 op1 = force_reg (mode, op1);
9262 /* The increment queue is LIFO, thus we have to `queue'
9263 the instructions in reverse order. */
9264 enqueue_insn (op0, gen_move_insn (op0, temp));
9265 result = enqueue_insn (temp, GEN_FCN (icode) (temp, temp, op1));
9270 /* Preincrement, or we can't increment with one simple insn. */
9272 /* Save a copy of the value before inc or dec, to return it later. */
9273 temp = value = copy_to_reg (op0);
9275 /* Arrange to return the incremented value. */
9276 /* Copy the rtx because expand_binop will protect from the queue,
9277 and the results of that would be invalid for us to return
9278 if our caller does emit_queue before using our result. */
9279 temp = copy_rtx (value = op0);
9281 /* Increment however we can. */
9282 op1 = expand_binop (mode, this_optab, value, op1, op0,
9283 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
9285 /* Make sure the value is stored into OP0. */
9287 emit_move_insn (op0, op1);
9292 /* At the start of a function, record that we have no previously-pushed
9293 arguments waiting to be popped. */
9296 init_pending_stack_adjust ()
9298 pending_stack_adjust = 0;
9301 /* When exiting from function, if safe, clear out any pending stack adjust
9302 so the adjustment won't get done.
9304 Note, if the current function calls alloca, then it must have a
9305 frame pointer regardless of the value of flag_omit_frame_pointer. */
9308 clear_pending_stack_adjust ()
9310 #ifdef EXIT_IGNORE_STACK
9312 && (! flag_omit_frame_pointer || current_function_calls_alloca)
9313 && EXIT_IGNORE_STACK
9314 && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline)
9315 && ! flag_inline_functions)
9317 stack_pointer_delta -= pending_stack_adjust,
9318 pending_stack_adjust = 0;
9323 /* Pop any previously-pushed arguments that have not been popped yet. */
9326 do_pending_stack_adjust ()
9328 if (inhibit_defer_pop == 0)
9330 if (pending_stack_adjust != 0)
9331 adjust_stack (GEN_INT (pending_stack_adjust));
9332 pending_stack_adjust = 0;
9336 /* Expand conditional expressions. */
9338 /* Generate code to evaluate EXP and jump to LABEL if the value is zero.
9339 LABEL is an rtx of code CODE_LABEL, in this function and all the
9343 jumpifnot (exp, label)
9347 do_jump (exp, label, NULL_RTX);
9350 /* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
9357 do_jump (exp, NULL_RTX, label);
9360 /* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
9361 the result is zero, or IF_TRUE_LABEL if the result is one.
9362 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
9363 meaning fall through in that case.
9365 do_jump always does any pending stack adjust except when it does not
9366 actually perform a jump. An example where there is no jump
9367 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
9369 This function is responsible for optimizing cases such as
9370 &&, || and comparison operators in EXP. */
9373 do_jump (exp, if_false_label, if_true_label)
9375 rtx if_false_label, if_true_label;
9377 enum tree_code code = TREE_CODE (exp);
9378 /* Some cases need to create a label to jump to
9379 in order to properly fall through.
9380 These cases set DROP_THROUGH_LABEL nonzero. */
9381 rtx drop_through_label = 0;
9385 enum machine_mode mode;
9387 #ifdef MAX_INTEGER_COMPUTATION_MODE
9388 check_max_integer_computation_mode (exp);
9399 temp = integer_zerop (exp) ? if_false_label : if_true_label;
9405 /* This is not true with #pragma weak */
9407 /* The address of something can never be zero. */
9409 emit_jump (if_true_label);
9414 if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
9415 || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
9416 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF
9417 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_RANGE_REF)
9420 /* If we are narrowing the operand, we have to do the compare in the
9422 if ((TYPE_PRECISION (TREE_TYPE (exp))
9423 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
9425 case NON_LVALUE_EXPR:
9426 case REFERENCE_EXPR:
9431 /* These cannot change zero->non-zero or vice versa. */
9432 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9435 case WITH_RECORD_EXPR:
9436 /* Put the object on the placeholder list, recurse through our first
9437 operand, and pop the list. */
9438 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
9440 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9441 placeholder_list = TREE_CHAIN (placeholder_list);
9445 /* This is never less insns than evaluating the PLUS_EXPR followed by
9446 a test and can be longer if the test is eliminated. */
9448 /* Reduce to minus. */
9449 exp = build (MINUS_EXPR, TREE_TYPE (exp),
9450 TREE_OPERAND (exp, 0),
9451 fold (build1 (NEGATE_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
9452 TREE_OPERAND (exp, 1))));
9453 /* Process as MINUS. */
9457 /* Non-zero iff operands of minus differ. */
9458 do_compare_and_jump (build (NE_EXPR, TREE_TYPE (exp),
9459 TREE_OPERAND (exp, 0),
9460 TREE_OPERAND (exp, 1)),
9461 NE, NE, if_false_label, if_true_label);
9465 /* If we are AND'ing with a small constant, do this comparison in the
9466 smallest type that fits. If the machine doesn't have comparisons
9467 that small, it will be converted back to the wider comparison.
9468 This helps if we are testing the sign bit of a narrower object.
9469 combine can't do this for us because it can't know whether a
9470 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
9472 if (! SLOW_BYTE_ACCESS
9473 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
9474 && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
9475 && (i = tree_floor_log2 (TREE_OPERAND (exp, 1))) >= 0
9476 && (mode = mode_for_size (i + 1, MODE_INT, 0)) != BLKmode
9477 && (type = (*lang_hooks.types.type_for_mode) (mode, 1)) != 0
9478 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9479 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9480 != CODE_FOR_nothing))
9482 do_jump (convert (type, exp), if_false_label, if_true_label);
9487 case TRUTH_NOT_EXPR:
9488 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9491 case TRUTH_ANDIF_EXPR:
9492 if (if_false_label == 0)
9493 if_false_label = drop_through_label = gen_label_rtx ();
9494 do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
9495 start_cleanup_deferral ();
9496 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9497 end_cleanup_deferral ();
9500 case TRUTH_ORIF_EXPR:
9501 if (if_true_label == 0)
9502 if_true_label = drop_through_label = gen_label_rtx ();
9503 do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
9504 start_cleanup_deferral ();
9505 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9506 end_cleanup_deferral ();
9511 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
9512 preserve_temp_slots (NULL_RTX);
9516 do_pending_stack_adjust ();
9517 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9523 case ARRAY_RANGE_REF:
9525 HOST_WIDE_INT bitsize, bitpos;
9527 enum machine_mode mode;
9532 /* Get description of this reference. We don't actually care
9533 about the underlying object here. */
9534 get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode,
9535 &unsignedp, &volatilep);
9537 type = (*lang_hooks.types.type_for_size) (bitsize, unsignedp);
9538 if (! SLOW_BYTE_ACCESS
9539 && type != 0 && bitsize >= 0
9540 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9541 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9542 != CODE_FOR_nothing))
9544 do_jump (convert (type, exp), if_false_label, if_true_label);
9551 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
9552 if (integer_onep (TREE_OPERAND (exp, 1))
9553 && integer_zerop (TREE_OPERAND (exp, 2)))
9554 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9556 else if (integer_zerop (TREE_OPERAND (exp, 1))
9557 && integer_onep (TREE_OPERAND (exp, 2)))
9558 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9562 rtx label1 = gen_label_rtx ();
9563 drop_through_label = gen_label_rtx ();
9565 do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX);
9567 start_cleanup_deferral ();
9568 /* Now the THEN-expression. */
9569 do_jump (TREE_OPERAND (exp, 1),
9570 if_false_label ? if_false_label : drop_through_label,
9571 if_true_label ? if_true_label : drop_through_label);
9572 /* In case the do_jump just above never jumps. */
9573 do_pending_stack_adjust ();
9574 emit_label (label1);
9576 /* Now the ELSE-expression. */
9577 do_jump (TREE_OPERAND (exp, 2),
9578 if_false_label ? if_false_label : drop_through_label,
9579 if_true_label ? if_true_label : drop_through_label);
9580 end_cleanup_deferral ();
9586 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9588 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
9589 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
9591 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
9592 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
9595 (build (TRUTH_ANDIF_EXPR, TREE_TYPE (exp),
9596 fold (build (EQ_EXPR, TREE_TYPE (exp),
9597 fold (build1 (REALPART_EXPR,
9598 TREE_TYPE (inner_type),
9600 fold (build1 (REALPART_EXPR,
9601 TREE_TYPE (inner_type),
9603 fold (build (EQ_EXPR, TREE_TYPE (exp),
9604 fold (build1 (IMAGPART_EXPR,
9605 TREE_TYPE (inner_type),
9607 fold (build1 (IMAGPART_EXPR,
9608 TREE_TYPE (inner_type),
9610 if_false_label, if_true_label);
9613 else if (integer_zerop (TREE_OPERAND (exp, 1)))
9614 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9616 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
9617 && !can_compare_p (EQ, TYPE_MODE (inner_type), ccp_jump))
9618 do_jump_by_parts_equality (exp, if_false_label, if_true_label);
9620 do_compare_and_jump (exp, EQ, EQ, if_false_label, if_true_label);
9626 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9628 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
9629 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
9631 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
9632 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
9635 (build (TRUTH_ORIF_EXPR, TREE_TYPE (exp),
9636 fold (build (NE_EXPR, TREE_TYPE (exp),
9637 fold (build1 (REALPART_EXPR,
9638 TREE_TYPE (inner_type),
9640 fold (build1 (REALPART_EXPR,
9641 TREE_TYPE (inner_type),
9643 fold (build (NE_EXPR, TREE_TYPE (exp),
9644 fold (build1 (IMAGPART_EXPR,
9645 TREE_TYPE (inner_type),
9647 fold (build1 (IMAGPART_EXPR,
9648 TREE_TYPE (inner_type),
9650 if_false_label, if_true_label);
9653 else if (integer_zerop (TREE_OPERAND (exp, 1)))
9654 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9656 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
9657 && !can_compare_p (NE, TYPE_MODE (inner_type), ccp_jump))
9658 do_jump_by_parts_equality (exp, if_true_label, if_false_label);
9660 do_compare_and_jump (exp, NE, NE, if_false_label, if_true_label);
9665 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9666 if (GET_MODE_CLASS (mode) == MODE_INT
9667 && ! can_compare_p (LT, mode, ccp_jump))
9668 do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
9670 do_compare_and_jump (exp, LT, LTU, if_false_label, if_true_label);
9674 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9675 if (GET_MODE_CLASS (mode) == MODE_INT
9676 && ! can_compare_p (LE, mode, ccp_jump))
9677 do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
9679 do_compare_and_jump (exp, LE, LEU, if_false_label, if_true_label);
9683 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9684 if (GET_MODE_CLASS (mode) == MODE_INT
9685 && ! can_compare_p (GT, mode, ccp_jump))
9686 do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
9688 do_compare_and_jump (exp, GT, GTU, if_false_label, if_true_label);
9692 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9693 if (GET_MODE_CLASS (mode) == MODE_INT
9694 && ! can_compare_p (GE, mode, ccp_jump))
9695 do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
9697 do_compare_and_jump (exp, GE, GEU, if_false_label, if_true_label);
9700 case UNORDERED_EXPR:
9703 enum rtx_code cmp, rcmp;
9706 if (code == UNORDERED_EXPR)
9707 cmp = UNORDERED, rcmp = ORDERED;
9709 cmp = ORDERED, rcmp = UNORDERED;
9710 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9713 if (! can_compare_p (cmp, mode, ccp_jump)
9714 && (can_compare_p (rcmp, mode, ccp_jump)
9715 /* If the target doesn't provide either UNORDERED or ORDERED
9716 comparisons, canonicalize on UNORDERED for the library. */
9717 || rcmp == UNORDERED))
9721 do_compare_and_jump (exp, cmp, cmp, if_false_label, if_true_label);
9723 do_compare_and_jump (exp, rcmp, rcmp, if_true_label, if_false_label);
9728 enum rtx_code rcode1;
9729 enum tree_code tcode2;
9753 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9754 if (can_compare_p (rcode1, mode, ccp_jump))
9755 do_compare_and_jump (exp, rcode1, rcode1, if_false_label,
9759 tree op0 = save_expr (TREE_OPERAND (exp, 0));
9760 tree op1 = save_expr (TREE_OPERAND (exp, 1));
9763 /* If the target doesn't support combined unordered
9764 compares, decompose into UNORDERED + comparison. */
9765 cmp0 = fold (build (UNORDERED_EXPR, TREE_TYPE (exp), op0, op1));
9766 cmp1 = fold (build (tcode2, TREE_TYPE (exp), op0, op1));
9767 exp = build (TRUTH_ORIF_EXPR, TREE_TYPE (exp), cmp0, cmp1);
9768 do_jump (exp, if_false_label, if_true_label);
9774 __builtin_expect (<test>, 0) and
9775 __builtin_expect (<test>, 1)
9777 We need to do this here, so that <test> is not converted to a SCC
9778 operation on machines that use condition code registers and COMPARE
9779 like the PowerPC, and then the jump is done based on whether the SCC
9780 operation produced a 1 or 0. */
9782 /* Check for a built-in function. */
9783 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR)
9785 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
9786 tree arglist = TREE_OPERAND (exp, 1);
9788 if (TREE_CODE (fndecl) == FUNCTION_DECL
9789 && DECL_BUILT_IN (fndecl)
9790 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT
9791 && arglist != NULL_TREE
9792 && TREE_CHAIN (arglist) != NULL_TREE)
9794 rtx seq = expand_builtin_expect_jump (exp, if_false_label,
9797 if (seq != NULL_RTX)
9804 /* fall through and generate the normal code. */
9808 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
9810 /* This is not needed any more and causes poor code since it causes
9811 comparisons and tests from non-SI objects to have different code
9813 /* Copy to register to avoid generating bad insns by cse
9814 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
9815 if (!cse_not_expected && GET_CODE (temp) == MEM)
9816 temp = copy_to_reg (temp);
9818 do_pending_stack_adjust ();
9819 /* Do any postincrements in the expression that was tested. */
9822 if (GET_CODE (temp) == CONST_INT
9823 || (GET_CODE (temp) == CONST_DOUBLE && GET_MODE (temp) == VOIDmode)
9824 || GET_CODE (temp) == LABEL_REF)
9826 rtx target = temp == const0_rtx ? if_false_label : if_true_label;
9830 else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
9831 && ! can_compare_p (NE, GET_MODE (temp), ccp_jump))
9832 /* Note swapping the labels gives us not-equal. */
9833 do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label);
9834 else if (GET_MODE (temp) != VOIDmode)
9835 do_compare_rtx_and_jump (temp, CONST0_RTX (GET_MODE (temp)),
9836 NE, TREE_UNSIGNED (TREE_TYPE (exp)),
9837 GET_MODE (temp), NULL_RTX,
9838 if_false_label, if_true_label);
9843 if (drop_through_label)
9845 /* If do_jump produces code that might be jumped around,
9846 do any stack adjusts from that code, before the place
9847 where control merges in. */
9848 do_pending_stack_adjust ();
9849 emit_label (drop_through_label);
9853 /* Given a comparison expression EXP for values too wide to be compared
9854 with one insn, test the comparison and jump to the appropriate label.
9855 The code of EXP is ignored; we always test GT if SWAP is 0,
9856 and LT if SWAP is 1. */
9859 do_jump_by_parts_greater (exp, swap, if_false_label, if_true_label)
9862 rtx if_false_label, if_true_label;
9864 rtx op0 = expand_expr (TREE_OPERAND (exp, swap), NULL_RTX, VOIDmode, 0);
9865 rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), NULL_RTX, VOIDmode, 0);
9866 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9867 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
9869 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label);
9872 /* Compare OP0 with OP1, word at a time, in mode MODE.
9873 UNSIGNEDP says to do unsigned comparison.
9874 Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
9877 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label)
9878 enum machine_mode mode;
9881 rtx if_false_label, if_true_label;
9883 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
9884 rtx drop_through_label = 0;
9887 if (! if_true_label || ! if_false_label)
9888 drop_through_label = gen_label_rtx ();
9889 if (! if_true_label)
9890 if_true_label = drop_through_label;
9891 if (! if_false_label)
9892 if_false_label = drop_through_label;
9894 /* Compare a word at a time, high order first. */
9895 for (i = 0; i < nwords; i++)
9897 rtx op0_word, op1_word;
9899 if (WORDS_BIG_ENDIAN)
9901 op0_word = operand_subword_force (op0, i, mode);
9902 op1_word = operand_subword_force (op1, i, mode);
9906 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
9907 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
9910 /* All but high-order word must be compared as unsigned. */
9911 do_compare_rtx_and_jump (op0_word, op1_word, GT,
9912 (unsignedp || i > 0), word_mode, NULL_RTX,
9913 NULL_RTX, if_true_label);
9915 /* Consider lower words only if these are equal. */
9916 do_compare_rtx_and_jump (op0_word, op1_word, NE, unsignedp, word_mode,
9917 NULL_RTX, NULL_RTX, if_false_label);
9921 emit_jump (if_false_label);
9922 if (drop_through_label)
9923 emit_label (drop_through_label);
9926 /* Given an EQ_EXPR expression EXP for values too wide to be compared
9927 with one insn, test the comparison and jump to the appropriate label. */
9930 do_jump_by_parts_equality (exp, if_false_label, if_true_label)
9932 rtx if_false_label, if_true_label;
9934 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
9935 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
9936 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9937 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
9939 rtx drop_through_label = 0;
9941 if (! if_false_label)
9942 drop_through_label = if_false_label = gen_label_rtx ();
9944 for (i = 0; i < nwords; i++)
9945 do_compare_rtx_and_jump (operand_subword_force (op0, i, mode),
9946 operand_subword_force (op1, i, mode),
9947 EQ, TREE_UNSIGNED (TREE_TYPE (exp)),
9948 word_mode, NULL_RTX, if_false_label, NULL_RTX);
9951 emit_jump (if_true_label);
9952 if (drop_through_label)
9953 emit_label (drop_through_label);
9956 /* Jump according to whether OP0 is 0.
9957 We assume that OP0 has an integer mode that is too wide
9958 for the available compare insns. */
9961 do_jump_by_parts_equality_rtx (op0, if_false_label, if_true_label)
9963 rtx if_false_label, if_true_label;
9965 int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD;
9968 rtx drop_through_label = 0;
9970 /* The fastest way of doing this comparison on almost any machine is to
9971 "or" all the words and compare the result. If all have to be loaded
9972 from memory and this is a very wide item, it's possible this may
9973 be slower, but that's highly unlikely. */
9975 part = gen_reg_rtx (word_mode);
9976 emit_move_insn (part, operand_subword_force (op0, 0, GET_MODE (op0)));
9977 for (i = 1; i < nwords && part != 0; i++)
9978 part = expand_binop (word_mode, ior_optab, part,
9979 operand_subword_force (op0, i, GET_MODE (op0)),
9980 part, 1, OPTAB_WIDEN);
9984 do_compare_rtx_and_jump (part, const0_rtx, EQ, 1, word_mode,
9985 NULL_RTX, if_false_label, if_true_label);
9990 /* If we couldn't do the "or" simply, do this with a series of compares. */
9991 if (! if_false_label)
9992 drop_through_label = if_false_label = gen_label_rtx ();
9994 for (i = 0; i < nwords; i++)
9995 do_compare_rtx_and_jump (operand_subword_force (op0, i, GET_MODE (op0)),
9996 const0_rtx, EQ, 1, word_mode, NULL_RTX,
9997 if_false_label, NULL_RTX);
10000 emit_jump (if_true_label);
10002 if (drop_through_label)
10003 emit_label (drop_through_label);
10006 /* Generate code for a comparison of OP0 and OP1 with rtx code CODE.
10007 (including code to compute the values to be compared)
10008 and set (CC0) according to the result.
10009 The decision as to signed or unsigned comparison must be made by the caller.
10011 We force a stack adjustment unless there are currently
10012 things pushed on the stack that aren't yet used.
10014 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
10018 compare_from_rtx (op0, op1, code, unsignedp, mode, size)
10020 enum rtx_code code;
10022 enum machine_mode mode;
10027 /* If one operand is constant, make it the second one. Only do this
10028 if the other operand is not constant as well. */
10030 if (swap_commutative_operands_p (op0, op1))
10035 code = swap_condition (code);
10038 if (flag_force_mem)
10040 op0 = force_not_mem (op0);
10041 op1 = force_not_mem (op1);
10044 do_pending_stack_adjust ();
10046 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
10047 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
10051 /* There's no need to do this now that combine.c can eliminate lots of
10052 sign extensions. This can be less efficient in certain cases on other
10055 /* If this is a signed equality comparison, we can do it as an
10056 unsigned comparison since zero-extension is cheaper than sign
10057 extension and comparisons with zero are done as unsigned. This is
10058 the case even on machines that can do fast sign extension, since
10059 zero-extension is easier to combine with other operations than
10060 sign-extension is. If we are comparing against a constant, we must
10061 convert it to what it would look like unsigned. */
10062 if ((code == EQ || code == NE) && ! unsignedp
10063 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
10065 if (GET_CODE (op1) == CONST_INT
10066 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
10067 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
10072 emit_cmp_insn (op0, op1, code, size, mode, unsignedp);
10074 return gen_rtx_fmt_ee (code, VOIDmode, cc0_rtx, const0_rtx);
10077 /* Like do_compare_and_jump but expects the values to compare as two rtx's.
10078 The decision as to signed or unsigned comparison must be made by the caller.
10080 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
10084 do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode, size,
10085 if_false_label, if_true_label)
10087 enum rtx_code code;
10089 enum machine_mode mode;
10091 rtx if_false_label, if_true_label;
10094 int dummy_true_label = 0;
10096 /* Reverse the comparison if that is safe and we want to jump if it is
10098 if (! if_true_label && ! FLOAT_MODE_P (mode))
10100 if_true_label = if_false_label;
10101 if_false_label = 0;
10102 code = reverse_condition (code);
10105 /* If one operand is constant, make it the second one. Only do this
10106 if the other operand is not constant as well. */
10108 if (swap_commutative_operands_p (op0, op1))
10113 code = swap_condition (code);
10116 if (flag_force_mem)
10118 op0 = force_not_mem (op0);
10119 op1 = force_not_mem (op1);
10122 do_pending_stack_adjust ();
10124 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
10125 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
10127 if (tem == const_true_rtx)
10130 emit_jump (if_true_label);
10134 if (if_false_label)
10135 emit_jump (if_false_label);
10141 /* There's no need to do this now that combine.c can eliminate lots of
10142 sign extensions. This can be less efficient in certain cases on other
10145 /* If this is a signed equality comparison, we can do it as an
10146 unsigned comparison since zero-extension is cheaper than sign
10147 extension and comparisons with zero are done as unsigned. This is
10148 the case even on machines that can do fast sign extension, since
10149 zero-extension is easier to combine with other operations than
10150 sign-extension is. If we are comparing against a constant, we must
10151 convert it to what it would look like unsigned. */
10152 if ((code == EQ || code == NE) && ! unsignedp
10153 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
10155 if (GET_CODE (op1) == CONST_INT
10156 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
10157 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
10162 if (! if_true_label)
10164 dummy_true_label = 1;
10165 if_true_label = gen_label_rtx ();
10168 emit_cmp_and_jump_insns (op0, op1, code, size, mode, unsignedp,
10171 if (if_false_label)
10172 emit_jump (if_false_label);
10173 if (dummy_true_label)
10174 emit_label (if_true_label);
10177 /* Generate code for a comparison expression EXP (including code to compute
10178 the values to be compared) and a conditional jump to IF_FALSE_LABEL and/or
10179 IF_TRUE_LABEL. One of the labels can be NULL_RTX, in which case the
10180 generated code will drop through.
10181 SIGNED_CODE should be the rtx operation for this comparison for
10182 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
10184 We force a stack adjustment unless there are currently
10185 things pushed on the stack that aren't yet used. */
10188 do_compare_and_jump (exp, signed_code, unsigned_code, if_false_label,
10191 enum rtx_code signed_code, unsigned_code;
10192 rtx if_false_label, if_true_label;
10196 enum machine_mode mode;
10198 enum rtx_code code;
10200 /* Don't crash if the comparison was erroneous. */
10201 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
10202 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
10205 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
10206 if (TREE_CODE (TREE_OPERAND (exp, 1)) == ERROR_MARK)
10209 type = TREE_TYPE (TREE_OPERAND (exp, 0));
10210 mode = TYPE_MODE (type);
10211 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
10212 && (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST
10213 || (GET_MODE_BITSIZE (mode)
10214 > GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp,
10217 /* op0 might have been replaced by promoted constant, in which
10218 case the type of second argument should be used. */
10219 type = TREE_TYPE (TREE_OPERAND (exp, 1));
10220 mode = TYPE_MODE (type);
10222 unsignedp = TREE_UNSIGNED (type);
10223 code = unsignedp ? unsigned_code : signed_code;
10225 #ifdef HAVE_canonicalize_funcptr_for_compare
10226 /* If function pointers need to be "canonicalized" before they can
10227 be reliably compared, then canonicalize them. */
10228 if (HAVE_canonicalize_funcptr_for_compare
10229 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10230 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10233 rtx new_op0 = gen_reg_rtx (mode);
10235 emit_insn (gen_canonicalize_funcptr_for_compare (new_op0, op0));
10239 if (HAVE_canonicalize_funcptr_for_compare
10240 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10241 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10244 rtx new_op1 = gen_reg_rtx (mode);
10246 emit_insn (gen_canonicalize_funcptr_for_compare (new_op1, op1));
10251 /* Do any postincrements in the expression that was tested. */
10254 do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode,
10256 ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
10257 if_false_label, if_true_label);
10260 /* Generate code to calculate EXP using a store-flag instruction
10261 and return an rtx for the result. EXP is either a comparison
10262 or a TRUTH_NOT_EXPR whose operand is a comparison.
10264 If TARGET is nonzero, store the result there if convenient.
10266 If ONLY_CHEAP is non-zero, only do this if it is likely to be very
10269 Return zero if there is no suitable set-flag instruction
10270 available on this machine.
10272 Once expand_expr has been called on the arguments of the comparison,
10273 we are committed to doing the store flag, since it is not safe to
10274 re-evaluate the expression. We emit the store-flag insn by calling
10275 emit_store_flag, but only expand the arguments if we have a reason
10276 to believe that emit_store_flag will be successful. If we think that
10277 it will, but it isn't, we have to simulate the store-flag with a
10278 set/jump/set sequence. */
10281 do_store_flag (exp, target, mode, only_cheap)
10284 enum machine_mode mode;
10287 enum rtx_code code;
10288 tree arg0, arg1, type;
10290 enum machine_mode operand_mode;
10294 enum insn_code icode;
10295 rtx subtarget = target;
10298 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
10299 result at the end. We can't simply invert the test since it would
10300 have already been inverted if it were valid. This case occurs for
10301 some floating-point comparisons. */
10303 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
10304 invert = 1, exp = TREE_OPERAND (exp, 0);
10306 arg0 = TREE_OPERAND (exp, 0);
10307 arg1 = TREE_OPERAND (exp, 1);
10309 /* Don't crash if the comparison was erroneous. */
10310 if (arg0 == error_mark_node || arg1 == error_mark_node)
10313 type = TREE_TYPE (arg0);
10314 operand_mode = TYPE_MODE (type);
10315 unsignedp = TREE_UNSIGNED (type);
10317 /* We won't bother with BLKmode store-flag operations because it would mean
10318 passing a lot of information to emit_store_flag. */
10319 if (operand_mode == BLKmode)
10322 /* We won't bother with store-flag operations involving function pointers
10323 when function pointers must be canonicalized before comparisons. */
10324 #ifdef HAVE_canonicalize_funcptr_for_compare
10325 if (HAVE_canonicalize_funcptr_for_compare
10326 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10327 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10329 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10330 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10331 == FUNCTION_TYPE))))
10338 /* Get the rtx comparison code to use. We know that EXP is a comparison
10339 operation of some type. Some comparisons against 1 and -1 can be
10340 converted to comparisons with zero. Do so here so that the tests
10341 below will be aware that we have a comparison with zero. These
10342 tests will not catch constants in the first operand, but constants
10343 are rarely passed as the first operand. */
10345 switch (TREE_CODE (exp))
10354 if (integer_onep (arg1))
10355 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
10357 code = unsignedp ? LTU : LT;
10360 if (! unsignedp && integer_all_onesp (arg1))
10361 arg1 = integer_zero_node, code = LT;
10363 code = unsignedp ? LEU : LE;
10366 if (! unsignedp && integer_all_onesp (arg1))
10367 arg1 = integer_zero_node, code = GE;
10369 code = unsignedp ? GTU : GT;
10372 if (integer_onep (arg1))
10373 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
10375 code = unsignedp ? GEU : GE;
10378 case UNORDERED_EXPR:
10404 /* Put a constant second. */
10405 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
10407 tem = arg0; arg0 = arg1; arg1 = tem;
10408 code = swap_condition (code);
10411 /* If this is an equality or inequality test of a single bit, we can
10412 do this by shifting the bit being tested to the low-order bit and
10413 masking the result with the constant 1. If the condition was EQ,
10414 we xor it with 1. This does not require an scc insn and is faster
10415 than an scc insn even if we have it. */
10417 if ((code == NE || code == EQ)
10418 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
10419 && integer_pow2p (TREE_OPERAND (arg0, 1)))
10421 tree inner = TREE_OPERAND (arg0, 0);
10422 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
10425 /* If INNER is a right shift of a constant and it plus BITNUM does
10426 not overflow, adjust BITNUM and INNER. */
10428 if (TREE_CODE (inner) == RSHIFT_EXPR
10429 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
10430 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
10431 && bitnum < TYPE_PRECISION (type)
10432 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
10433 bitnum - TYPE_PRECISION (type)))
10435 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
10436 inner = TREE_OPERAND (inner, 0);
10439 /* If we are going to be able to omit the AND below, we must do our
10440 operations as unsigned. If we must use the AND, we have a choice.
10441 Normally unsigned is faster, but for some machines signed is. */
10442 ops_unsignedp = (bitnum == TYPE_PRECISION (type) - 1 ? 1
10443 #ifdef LOAD_EXTEND_OP
10444 : (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1)
10450 if (! get_subtarget (subtarget)
10451 || GET_MODE (subtarget) != operand_mode
10452 || ! safe_from_p (subtarget, inner, 1))
10455 op0 = expand_expr (inner, subtarget, VOIDmode, 0);
10458 op0 = expand_shift (RSHIFT_EXPR, operand_mode, op0,
10459 size_int (bitnum), subtarget, ops_unsignedp);
10461 if (GET_MODE (op0) != mode)
10462 op0 = convert_to_mode (mode, op0, ops_unsignedp);
10464 if ((code == EQ && ! invert) || (code == NE && invert))
10465 op0 = expand_binop (mode, xor_optab, op0, const1_rtx, subtarget,
10466 ops_unsignedp, OPTAB_LIB_WIDEN);
10468 /* Put the AND last so it can combine with more things. */
10469 if (bitnum != TYPE_PRECISION (type) - 1)
10470 op0 = expand_and (mode, op0, const1_rtx, subtarget);
10475 /* Now see if we are likely to be able to do this. Return if not. */
10476 if (! can_compare_p (code, operand_mode, ccp_store_flag))
10479 icode = setcc_gen_code[(int) code];
10480 if (icode == CODE_FOR_nothing
10481 || (only_cheap && insn_data[(int) icode].operand[0].mode != mode))
10483 /* We can only do this if it is one of the special cases that
10484 can be handled without an scc insn. */
10485 if ((code == LT && integer_zerop (arg1))
10486 || (! only_cheap && code == GE && integer_zerop (arg1)))
10488 else if (BRANCH_COST >= 0
10489 && ! only_cheap && (code == NE || code == EQ)
10490 && TREE_CODE (type) != REAL_TYPE
10491 && ((abs_optab->handlers[(int) operand_mode].insn_code
10492 != CODE_FOR_nothing)
10493 || (ffs_optab->handlers[(int) operand_mode].insn_code
10494 != CODE_FOR_nothing)))
10500 if (! get_subtarget (target)
10501 || GET_MODE (subtarget) != operand_mode
10502 || ! safe_from_p (subtarget, arg1, 1))
10505 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
10506 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
10509 target = gen_reg_rtx (mode);
10511 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
10512 because, if the emit_store_flag does anything it will succeed and
10513 OP0 and OP1 will not be used subsequently. */
10515 result = emit_store_flag (target, code,
10516 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
10517 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
10518 operand_mode, unsignedp, 1);
10523 result = expand_binop (mode, xor_optab, result, const1_rtx,
10524 result, 0, OPTAB_LIB_WIDEN);
10528 /* If this failed, we have to do this with set/compare/jump/set code. */
10529 if (GET_CODE (target) != REG
10530 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
10531 target = gen_reg_rtx (GET_MODE (target));
10533 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
10534 result = compare_from_rtx (op0, op1, code, unsignedp,
10535 operand_mode, NULL_RTX);
10536 if (GET_CODE (result) == CONST_INT)
10537 return (((result == const0_rtx && ! invert)
10538 || (result != const0_rtx && invert))
10539 ? const0_rtx : const1_rtx);
10541 /* The code of RESULT may not match CODE if compare_from_rtx
10542 decided to swap its operands and reverse the original code.
10544 We know that compare_from_rtx returns either a CONST_INT or
10545 a new comparison code, so it is safe to just extract the
10546 code from RESULT. */
10547 code = GET_CODE (result);
10549 label = gen_label_rtx ();
10550 if (bcc_gen_fctn[(int) code] == 0)
10553 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
10554 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
10555 emit_label (label);
10561 /* Stubs in case we haven't got a casesi insn. */
10562 #ifndef HAVE_casesi
10563 # define HAVE_casesi 0
10564 # define gen_casesi(a, b, c, d, e) (0)
10565 # define CODE_FOR_casesi CODE_FOR_nothing
10568 /* If the machine does not have a case insn that compares the bounds,
10569 this means extra overhead for dispatch tables, which raises the
10570 threshold for using them. */
10571 #ifndef CASE_VALUES_THRESHOLD
10572 #define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
10573 #endif /* CASE_VALUES_THRESHOLD */
10576 case_values_threshold ()
10578 return CASE_VALUES_THRESHOLD;
10581 /* Attempt to generate a casesi instruction. Returns 1 if successful,
10582 0 otherwise (i.e. if there is no casesi instruction). */
10584 try_casesi (index_type, index_expr, minval, range,
10585 table_label, default_label)
10586 tree index_type, index_expr, minval, range;
10587 rtx table_label ATTRIBUTE_UNUSED;
10590 enum machine_mode index_mode = SImode;
10591 int index_bits = GET_MODE_BITSIZE (index_mode);
10592 rtx op1, op2, index;
10593 enum machine_mode op_mode;
10598 /* Convert the index to SImode. */
10599 if (GET_MODE_BITSIZE (TYPE_MODE (index_type)) > GET_MODE_BITSIZE (index_mode))
10601 enum machine_mode omode = TYPE_MODE (index_type);
10602 rtx rangertx = expand_expr (range, NULL_RTX, VOIDmode, 0);
10604 /* We must handle the endpoints in the original mode. */
10605 index_expr = build (MINUS_EXPR, index_type,
10606 index_expr, minval);
10607 minval = integer_zero_node;
10608 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
10609 emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
10610 omode, 1, default_label);
10611 /* Now we can safely truncate. */
10612 index = convert_to_mode (index_mode, index, 0);
10616 if (TYPE_MODE (index_type) != index_mode)
10618 index_expr = convert ((*lang_hooks.types.type_for_size)
10619 (index_bits, 0), index_expr);
10620 index_type = TREE_TYPE (index_expr);
10623 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
10626 index = protect_from_queue (index, 0);
10627 do_pending_stack_adjust ();
10629 op_mode = insn_data[(int) CODE_FOR_casesi].operand[0].mode;
10630 if (! (*insn_data[(int) CODE_FOR_casesi].operand[0].predicate)
10632 index = copy_to_mode_reg (op_mode, index);
10634 op1 = expand_expr (minval, NULL_RTX, VOIDmode, 0);
10636 op_mode = insn_data[(int) CODE_FOR_casesi].operand[1].mode;
10637 op1 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (minval)),
10638 op1, TREE_UNSIGNED (TREE_TYPE (minval)));
10639 if (! (*insn_data[(int) CODE_FOR_casesi].operand[1].predicate)
10641 op1 = copy_to_mode_reg (op_mode, op1);
10643 op2 = expand_expr (range, NULL_RTX, VOIDmode, 0);
10645 op_mode = insn_data[(int) CODE_FOR_casesi].operand[2].mode;
10646 op2 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (range)),
10647 op2, TREE_UNSIGNED (TREE_TYPE (range)));
10648 if (! (*insn_data[(int) CODE_FOR_casesi].operand[2].predicate)
10650 op2 = copy_to_mode_reg (op_mode, op2);
10652 emit_jump_insn (gen_casesi (index, op1, op2,
10653 table_label, default_label));
10657 /* Attempt to generate a tablejump instruction; same concept. */
10658 #ifndef HAVE_tablejump
10659 #define HAVE_tablejump 0
10660 #define gen_tablejump(x, y) (0)
10663 /* Subroutine of the next function.
10665 INDEX is the value being switched on, with the lowest value
10666 in the table already subtracted.
10667 MODE is its expected mode (needed if INDEX is constant).
10668 RANGE is the length of the jump table.
10669 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
10671 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
10672 index value is out of range. */
10675 do_tablejump (index, mode, range, table_label, default_label)
10676 rtx index, range, table_label, default_label;
10677 enum machine_mode mode;
10681 /* Do an unsigned comparison (in the proper mode) between the index
10682 expression and the value which represents the length of the range.
10683 Since we just finished subtracting the lower bound of the range
10684 from the index expression, this comparison allows us to simultaneously
10685 check that the original index expression value is both greater than
10686 or equal to the minimum value of the range and less than or equal to
10687 the maximum value of the range. */
10689 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
10692 /* If index is in range, it must fit in Pmode.
10693 Convert to Pmode so we can index with it. */
10695 index = convert_to_mode (Pmode, index, 1);
10697 /* Don't let a MEM slip thru, because then INDEX that comes
10698 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
10699 and break_out_memory_refs will go to work on it and mess it up. */
10700 #ifdef PIC_CASE_VECTOR_ADDRESS
10701 if (flag_pic && GET_CODE (index) != REG)
10702 index = copy_to_mode_reg (Pmode, index);
10705 /* If flag_force_addr were to affect this address
10706 it could interfere with the tricky assumptions made
10707 about addresses that contain label-refs,
10708 which may be valid only very near the tablejump itself. */
10709 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
10710 GET_MODE_SIZE, because this indicates how large insns are. The other
10711 uses should all be Pmode, because they are addresses. This code
10712 could fail if addresses and insns are not the same size. */
10713 index = gen_rtx_PLUS (Pmode,
10714 gen_rtx_MULT (Pmode, index,
10715 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
10716 gen_rtx_LABEL_REF (Pmode, table_label));
10717 #ifdef PIC_CASE_VECTOR_ADDRESS
10719 index = PIC_CASE_VECTOR_ADDRESS (index);
10722 index = memory_address_noforce (CASE_VECTOR_MODE, index);
10723 temp = gen_reg_rtx (CASE_VECTOR_MODE);
10724 vector = gen_rtx_MEM (CASE_VECTOR_MODE, index);
10725 RTX_UNCHANGING_P (vector) = 1;
10726 convert_move (temp, vector, 0);
10728 emit_jump_insn (gen_tablejump (temp, table_label));
10730 /* If we are generating PIC code or if the table is PC-relative, the
10731 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
10732 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
10737 try_tablejump (index_type, index_expr, minval, range,
10738 table_label, default_label)
10739 tree index_type, index_expr, minval, range;
10740 rtx table_label, default_label;
10744 if (! HAVE_tablejump)
10747 index_expr = fold (build (MINUS_EXPR, index_type,
10748 convert (index_type, index_expr),
10749 convert (index_type, minval)));
10750 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
10752 index = protect_from_queue (index, 0);
10753 do_pending_stack_adjust ();
10755 do_tablejump (index, TYPE_MODE (index_type),
10756 convert_modes (TYPE_MODE (index_type),
10757 TYPE_MODE (TREE_TYPE (range)),
10758 expand_expr (range, NULL_RTX,
10760 TREE_UNSIGNED (TREE_TYPE (range))),
10761 table_label, default_label);