1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
30 #include "hard-reg-set.h"
33 #include "insn-config.h"
34 #include "insn-attr.h"
35 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
42 #include "typeclass.h"
45 #include "langhooks.h"
49 /* Decide whether a function's arguments should be processed
50 from first to last or from last to first.
52 They should if the stack and args grow in opposite directions, but
53 only if we have push insns. */
57 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
58 #define PUSH_ARGS_REVERSED /* If it's last to first. */
63 #ifndef STACK_PUSH_CODE
64 #ifdef STACK_GROWS_DOWNWARD
65 #define STACK_PUSH_CODE PRE_DEC
67 #define STACK_PUSH_CODE PRE_INC
71 /* Assume that case vectors are not pc-relative. */
72 #ifndef CASE_VECTOR_PC_RELATIVE
73 #define CASE_VECTOR_PC_RELATIVE 0
76 /* If this is nonzero, we do not bother generating VOLATILE
77 around volatile memory references, and we are willing to
78 output indirect addresses. If cse is to follow, we reject
79 indirect addresses so a useful potential cse is generated;
80 if it is used only once, instruction combination will produce
81 the same indirect address eventually. */
84 /* Chain of pending expressions for PLACEHOLDER_EXPR to replace. */
85 static tree placeholder_list = 0;
87 /* This structure is used by move_by_pieces to describe the move to
98 int explicit_inc_from;
99 unsigned HOST_WIDE_INT len;
100 HOST_WIDE_INT offset;
104 /* This structure is used by store_by_pieces to describe the clear to
107 struct store_by_pieces
113 unsigned HOST_WIDE_INT len;
114 HOST_WIDE_INT offset;
115 rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
120 extern struct obstack permanent_obstack;
122 static rtx enqueue_insn PARAMS ((rtx, rtx));
123 static unsigned HOST_WIDE_INT move_by_pieces_ninsns
124 PARAMS ((unsigned HOST_WIDE_INT,
126 static void move_by_pieces_1 PARAMS ((rtx (*) (rtx, ...), enum machine_mode,
127 struct move_by_pieces *));
128 static rtx clear_by_pieces_1 PARAMS ((PTR, HOST_WIDE_INT,
130 static void clear_by_pieces PARAMS ((rtx, unsigned HOST_WIDE_INT,
132 static void store_by_pieces_1 PARAMS ((struct store_by_pieces *,
134 static void store_by_pieces_2 PARAMS ((rtx (*) (rtx, ...),
136 struct store_by_pieces *));
137 static rtx compress_float_constant PARAMS ((rtx, rtx));
138 static rtx get_subtarget PARAMS ((rtx));
139 static int is_zeros_p PARAMS ((tree));
140 static int mostly_zeros_p PARAMS ((tree));
141 static void store_constructor_field PARAMS ((rtx, unsigned HOST_WIDE_INT,
142 HOST_WIDE_INT, enum machine_mode,
143 tree, tree, int, int));
144 static void store_constructor PARAMS ((tree, rtx, int, HOST_WIDE_INT));
145 static rtx store_field PARAMS ((rtx, HOST_WIDE_INT,
146 HOST_WIDE_INT, enum machine_mode,
147 tree, enum machine_mode, int, tree,
149 static rtx var_rtx PARAMS ((tree));
150 static HOST_WIDE_INT highest_pow2_factor PARAMS ((tree));
151 static HOST_WIDE_INT highest_pow2_factor_for_type PARAMS ((tree, tree));
152 static int is_aligning_offset PARAMS ((tree, tree));
153 static rtx expand_increment PARAMS ((tree, int, int));
154 static void do_jump_by_parts_greater PARAMS ((tree, int, rtx, rtx));
155 static void do_jump_by_parts_equality PARAMS ((tree, rtx, rtx));
156 static void do_compare_and_jump PARAMS ((tree, enum rtx_code, enum rtx_code,
158 static rtx do_store_flag PARAMS ((tree, rtx, enum machine_mode, int));
160 static void emit_single_push_insn PARAMS ((enum machine_mode, rtx, tree));
162 static void do_tablejump PARAMS ((rtx, enum machine_mode, rtx, rtx, rtx));
164 /* Record for each mode whether we can move a register directly to or
165 from an object of that mode in memory. If we can't, we won't try
166 to use that mode directly when accessing a field of that mode. */
168 static char direct_load[NUM_MACHINE_MODES];
169 static char direct_store[NUM_MACHINE_MODES];
171 /* Record for each mode whether we can float-extend from memory. */
173 static bool float_extend_from_mem[NUM_MACHINE_MODES][NUM_MACHINE_MODES];
175 /* If a memory-to-memory move would take MOVE_RATIO or more simple
176 move-instruction sequences, we will do a movstr or libcall instead. */
179 #if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
182 /* If we are optimizing for space (-Os), cut down the default move ratio. */
183 #define MOVE_RATIO (optimize_size ? 3 : 15)
187 /* This macro is used to determine whether move_by_pieces should be called
188 to perform a structure copy. */
189 #ifndef MOVE_BY_PIECES_P
190 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
191 (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) MOVE_RATIO)
194 /* This array records the insn_code of insns to perform block moves. */
195 enum insn_code movstr_optab[NUM_MACHINE_MODES];
197 /* This array records the insn_code of insns to perform block clears. */
198 enum insn_code clrstr_optab[NUM_MACHINE_MODES];
200 /* SLOW_UNALIGNED_ACCESS is non-zero if unaligned accesses are very slow. */
202 #ifndef SLOW_UNALIGNED_ACCESS
203 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
206 /* This is run once per compilation to set up which modes can be used
207 directly in memory and to initialize the block move optab. */
213 enum machine_mode mode;
217 /* Try indexing by frame ptr and try by stack ptr.
218 It is known that on the Convex the stack ptr isn't a valid index.
219 With luck, one or the other is valid on any machine. */
220 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
221 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
223 insn = rtx_alloc (INSN);
224 pat = gen_rtx_SET (0, NULL_RTX, NULL_RTX);
225 PATTERN (insn) = pat;
227 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
228 mode = (enum machine_mode) ((int) mode + 1))
233 direct_load[(int) mode] = direct_store[(int) mode] = 0;
234 PUT_MODE (mem, mode);
235 PUT_MODE (mem1, mode);
237 /* See if there is some register that can be used in this mode and
238 directly loaded or stored from memory. */
240 if (mode != VOIDmode && mode != BLKmode)
241 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
242 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
245 if (! HARD_REGNO_MODE_OK (regno, mode))
248 reg = gen_rtx_REG (mode, regno);
251 SET_DEST (pat) = reg;
252 if (recog (pat, insn, &num_clobbers) >= 0)
253 direct_load[(int) mode] = 1;
255 SET_SRC (pat) = mem1;
256 SET_DEST (pat) = reg;
257 if (recog (pat, insn, &num_clobbers) >= 0)
258 direct_load[(int) mode] = 1;
261 SET_DEST (pat) = mem;
262 if (recog (pat, insn, &num_clobbers) >= 0)
263 direct_store[(int) mode] = 1;
266 SET_DEST (pat) = mem1;
267 if (recog (pat, insn, &num_clobbers) >= 0)
268 direct_store[(int) mode] = 1;
272 mem = gen_rtx_MEM (VOIDmode, gen_rtx_raw_REG (Pmode, 10000));
274 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
275 mode = GET_MODE_WIDER_MODE (mode))
277 enum machine_mode srcmode;
278 for (srcmode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); srcmode != mode;
279 srcmode = GET_MODE_WIDER_MODE (srcmode))
283 ic = can_extend_p (mode, srcmode, 0);
284 if (ic == CODE_FOR_nothing)
287 PUT_MODE (mem, srcmode);
289 if ((*insn_data[ic].operand[1].predicate) (mem, srcmode))
290 float_extend_from_mem[mode][srcmode] = true;
295 /* This is run at the start of compiling a function. */
300 cfun->expr = (struct expr_status *) xmalloc (sizeof (struct expr_status));
303 pending_stack_adjust = 0;
304 stack_pointer_delta = 0;
305 inhibit_defer_pop = 0;
307 apply_args_value = 0;
313 struct expr_status *p;
318 ggc_mark_rtx (p->x_saveregs_value);
319 ggc_mark_rtx (p->x_apply_args_value);
320 ggc_mark_rtx (p->x_forced_labels);
331 /* Small sanity check that the queue is empty at the end of a function. */
334 finish_expr_for_function ()
340 /* Manage the queue of increment instructions to be output
341 for POSTINCREMENT_EXPR expressions, etc. */
343 /* Queue up to increment (or change) VAR later. BODY says how:
344 BODY should be the same thing you would pass to emit_insn
345 to increment right away. It will go to emit_insn later on.
347 The value is a QUEUED expression to be used in place of VAR
348 where you want to guarantee the pre-incrementation value of VAR. */
351 enqueue_insn (var, body)
354 pending_chain = gen_rtx_QUEUED (GET_MODE (var), var, NULL_RTX, NULL_RTX,
355 body, pending_chain);
356 return pending_chain;
359 /* Use protect_from_queue to convert a QUEUED expression
360 into something that you can put immediately into an instruction.
361 If the queued incrementation has not happened yet,
362 protect_from_queue returns the variable itself.
363 If the incrementation has happened, protect_from_queue returns a temp
364 that contains a copy of the old value of the variable.
366 Any time an rtx which might possibly be a QUEUED is to be put
367 into an instruction, it must be passed through protect_from_queue first.
368 QUEUED expressions are not meaningful in instructions.
370 Do not pass a value through protect_from_queue and then hold
371 on to it for a while before putting it in an instruction!
372 If the queue is flushed in between, incorrect code will result. */
375 protect_from_queue (x, modify)
379 RTX_CODE code = GET_CODE (x);
381 #if 0 /* A QUEUED can hang around after the queue is forced out. */
382 /* Shortcut for most common case. */
383 if (pending_chain == 0)
389 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
390 use of autoincrement. Make a copy of the contents of the memory
391 location rather than a copy of the address, but not if the value is
392 of mode BLKmode. Don't modify X in place since it might be
394 if (code == MEM && GET_MODE (x) != BLKmode
395 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
398 rtx new = replace_equiv_address_nv (x, QUEUED_VAR (y));
402 rtx temp = gen_reg_rtx (GET_MODE (x));
404 emit_insn_before (gen_move_insn (temp, new),
409 /* Copy the address into a pseudo, so that the returned value
410 remains correct across calls to emit_queue. */
411 return replace_equiv_address (new, copy_to_reg (XEXP (new, 0)));
414 /* Otherwise, recursively protect the subexpressions of all
415 the kinds of rtx's that can contain a QUEUED. */
418 rtx tem = protect_from_queue (XEXP (x, 0), 0);
419 if (tem != XEXP (x, 0))
425 else if (code == PLUS || code == MULT)
427 rtx new0 = protect_from_queue (XEXP (x, 0), 0);
428 rtx new1 = protect_from_queue (XEXP (x, 1), 0);
429 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
438 /* If the increment has not happened, use the variable itself. Copy it
439 into a new pseudo so that the value remains correct across calls to
441 if (QUEUED_INSN (x) == 0)
442 return copy_to_reg (QUEUED_VAR (x));
443 /* If the increment has happened and a pre-increment copy exists,
445 if (QUEUED_COPY (x) != 0)
446 return QUEUED_COPY (x);
447 /* The increment has happened but we haven't set up a pre-increment copy.
448 Set one up now, and use it. */
449 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
450 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
452 return QUEUED_COPY (x);
455 /* Return nonzero if X contains a QUEUED expression:
456 if it contains anything that will be altered by a queued increment.
457 We handle only combinations of MEM, PLUS, MINUS and MULT operators
458 since memory addresses generally contain only those. */
464 enum rtx_code code = GET_CODE (x);
470 return queued_subexp_p (XEXP (x, 0));
474 return (queued_subexp_p (XEXP (x, 0))
475 || queued_subexp_p (XEXP (x, 1)));
481 /* Perform all the pending incrementations. */
487 while ((p = pending_chain))
489 rtx body = QUEUED_BODY (p);
491 if (GET_CODE (body) == SEQUENCE)
493 QUEUED_INSN (p) = XVECEXP (QUEUED_BODY (p), 0, 0);
494 emit_insn (QUEUED_BODY (p));
497 QUEUED_INSN (p) = emit_insn (QUEUED_BODY (p));
498 pending_chain = QUEUED_NEXT (p);
502 /* Copy data from FROM to TO, where the machine modes are not the same.
503 Both modes may be integer, or both may be floating.
504 UNSIGNEDP should be nonzero if FROM is an unsigned type.
505 This causes zero-extension instead of sign-extension. */
508 convert_move (to, from, unsignedp)
512 enum machine_mode to_mode = GET_MODE (to);
513 enum machine_mode from_mode = GET_MODE (from);
514 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
515 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
519 /* rtx code for making an equivalent value. */
520 enum rtx_code equiv_code = (unsignedp ? ZERO_EXTEND : SIGN_EXTEND);
522 to = protect_from_queue (to, 1);
523 from = protect_from_queue (from, 0);
525 if (to_real != from_real)
528 /* If FROM is a SUBREG that indicates that we have already done at least
529 the required extension, strip it. We don't handle such SUBREGs as
532 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
533 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
534 >= GET_MODE_SIZE (to_mode))
535 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
536 from = gen_lowpart (to_mode, from), from_mode = to_mode;
538 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
541 if (to_mode == from_mode
542 || (from_mode == VOIDmode && CONSTANT_P (from)))
544 emit_move_insn (to, from);
548 if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
550 if (GET_MODE_BITSIZE (from_mode) != GET_MODE_BITSIZE (to_mode))
553 if (VECTOR_MODE_P (to_mode))
554 from = simplify_gen_subreg (to_mode, from, GET_MODE (from), 0);
556 to = simplify_gen_subreg (from_mode, to, GET_MODE (to), 0);
558 emit_move_insn (to, from);
562 if (to_real != from_real)
569 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode))
571 /* Try converting directly if the insn is supported. */
572 if ((code = can_extend_p (to_mode, from_mode, 0))
575 emit_unop_insn (code, to, from, UNKNOWN);
580 #ifdef HAVE_trunchfqf2
581 if (HAVE_trunchfqf2 && from_mode == HFmode && to_mode == QFmode)
583 emit_unop_insn (CODE_FOR_trunchfqf2, to, from, UNKNOWN);
587 #ifdef HAVE_trunctqfqf2
588 if (HAVE_trunctqfqf2 && from_mode == TQFmode && to_mode == QFmode)
590 emit_unop_insn (CODE_FOR_trunctqfqf2, to, from, UNKNOWN);
594 #ifdef HAVE_truncsfqf2
595 if (HAVE_truncsfqf2 && from_mode == SFmode && to_mode == QFmode)
597 emit_unop_insn (CODE_FOR_truncsfqf2, to, from, UNKNOWN);
601 #ifdef HAVE_truncdfqf2
602 if (HAVE_truncdfqf2 && from_mode == DFmode && to_mode == QFmode)
604 emit_unop_insn (CODE_FOR_truncdfqf2, to, from, UNKNOWN);
608 #ifdef HAVE_truncxfqf2
609 if (HAVE_truncxfqf2 && from_mode == XFmode && to_mode == QFmode)
611 emit_unop_insn (CODE_FOR_truncxfqf2, to, from, UNKNOWN);
615 #ifdef HAVE_trunctfqf2
616 if (HAVE_trunctfqf2 && from_mode == TFmode && to_mode == QFmode)
618 emit_unop_insn (CODE_FOR_trunctfqf2, to, from, UNKNOWN);
623 #ifdef HAVE_trunctqfhf2
624 if (HAVE_trunctqfhf2 && from_mode == TQFmode && to_mode == HFmode)
626 emit_unop_insn (CODE_FOR_trunctqfhf2, to, from, UNKNOWN);
630 #ifdef HAVE_truncsfhf2
631 if (HAVE_truncsfhf2 && from_mode == SFmode && to_mode == HFmode)
633 emit_unop_insn (CODE_FOR_truncsfhf2, to, from, UNKNOWN);
637 #ifdef HAVE_truncdfhf2
638 if (HAVE_truncdfhf2 && from_mode == DFmode && to_mode == HFmode)
640 emit_unop_insn (CODE_FOR_truncdfhf2, to, from, UNKNOWN);
644 #ifdef HAVE_truncxfhf2
645 if (HAVE_truncxfhf2 && from_mode == XFmode && to_mode == HFmode)
647 emit_unop_insn (CODE_FOR_truncxfhf2, to, from, UNKNOWN);
651 #ifdef HAVE_trunctfhf2
652 if (HAVE_trunctfhf2 && from_mode == TFmode && to_mode == HFmode)
654 emit_unop_insn (CODE_FOR_trunctfhf2, to, from, UNKNOWN);
659 #ifdef HAVE_truncsftqf2
660 if (HAVE_truncsftqf2 && from_mode == SFmode && to_mode == TQFmode)
662 emit_unop_insn (CODE_FOR_truncsftqf2, to, from, UNKNOWN);
666 #ifdef HAVE_truncdftqf2
667 if (HAVE_truncdftqf2 && from_mode == DFmode && to_mode == TQFmode)
669 emit_unop_insn (CODE_FOR_truncdftqf2, to, from, UNKNOWN);
673 #ifdef HAVE_truncxftqf2
674 if (HAVE_truncxftqf2 && from_mode == XFmode && to_mode == TQFmode)
676 emit_unop_insn (CODE_FOR_truncxftqf2, to, from, UNKNOWN);
680 #ifdef HAVE_trunctftqf2
681 if (HAVE_trunctftqf2 && from_mode == TFmode && to_mode == TQFmode)
683 emit_unop_insn (CODE_FOR_trunctftqf2, to, from, UNKNOWN);
688 #ifdef HAVE_truncdfsf2
689 if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
691 emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
695 #ifdef HAVE_truncxfsf2
696 if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode)
698 emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN);
702 #ifdef HAVE_trunctfsf2
703 if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
705 emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
709 #ifdef HAVE_truncxfdf2
710 if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode)
712 emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN);
716 #ifdef HAVE_trunctfdf2
717 if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
719 emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
731 libcall = extendsfdf2_libfunc;
735 libcall = extendsfxf2_libfunc;
739 libcall = extendsftf2_libfunc;
751 libcall = truncdfsf2_libfunc;
755 libcall = extenddfxf2_libfunc;
759 libcall = extenddftf2_libfunc;
771 libcall = truncxfsf2_libfunc;
775 libcall = truncxfdf2_libfunc;
787 libcall = trunctfsf2_libfunc;
791 libcall = trunctfdf2_libfunc;
803 if (libcall == (rtx) 0)
804 /* This conversion is not implemented yet. */
808 value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
810 insns = get_insns ();
812 emit_libcall_block (insns, to, value, gen_rtx_FLOAT_TRUNCATE (to_mode,
817 /* Now both modes are integers. */
819 /* Handle expanding beyond a word. */
820 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
821 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
828 enum machine_mode lowpart_mode;
829 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
831 /* Try converting directly if the insn is supported. */
832 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
835 /* If FROM is a SUBREG, put it into a register. Do this
836 so that we always generate the same set of insns for
837 better cse'ing; if an intermediate assignment occurred,
838 we won't be doing the operation directly on the SUBREG. */
839 if (optimize > 0 && GET_CODE (from) == SUBREG)
840 from = force_reg (from_mode, from);
841 emit_unop_insn (code, to, from, equiv_code);
844 /* Next, try converting via full word. */
845 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
846 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
847 != CODE_FOR_nothing))
849 if (GET_CODE (to) == REG)
850 emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
851 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
852 emit_unop_insn (code, to,
853 gen_lowpart (word_mode, to), equiv_code);
857 /* No special multiword conversion insn; do it by hand. */
860 /* Since we will turn this into a no conflict block, we must ensure
861 that the source does not overlap the target. */
863 if (reg_overlap_mentioned_p (to, from))
864 from = force_reg (from_mode, from);
866 /* Get a copy of FROM widened to a word, if necessary. */
867 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
868 lowpart_mode = word_mode;
870 lowpart_mode = from_mode;
872 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
874 lowpart = gen_lowpart (lowpart_mode, to);
875 emit_move_insn (lowpart, lowfrom);
877 /* Compute the value to put in each remaining word. */
879 fill_value = const0_rtx;
884 && insn_data[(int) CODE_FOR_slt].operand[0].mode == word_mode
885 && STORE_FLAG_VALUE == -1)
887 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
889 fill_value = gen_reg_rtx (word_mode);
890 emit_insn (gen_slt (fill_value));
896 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
897 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
899 fill_value = convert_to_mode (word_mode, fill_value, 1);
903 /* Fill the remaining words. */
904 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
906 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
907 rtx subword = operand_subword (to, index, 1, to_mode);
912 if (fill_value != subword)
913 emit_move_insn (subword, fill_value);
916 insns = get_insns ();
919 emit_no_conflict_block (insns, to, from, NULL_RTX,
920 gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
924 /* Truncating multi-word to a word or less. */
925 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
926 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
928 if (!((GET_CODE (from) == MEM
929 && ! MEM_VOLATILE_P (from)
930 && direct_load[(int) to_mode]
931 && ! mode_dependent_address_p (XEXP (from, 0)))
932 || GET_CODE (from) == REG
933 || GET_CODE (from) == SUBREG))
934 from = force_reg (from_mode, from);
935 convert_move (to, gen_lowpart (word_mode, from), 0);
939 /* Handle pointer conversion. */ /* SPEE 900220. */
940 if (to_mode == PQImode)
942 if (from_mode != QImode)
943 from = convert_to_mode (QImode, from, unsignedp);
945 #ifdef HAVE_truncqipqi2
946 if (HAVE_truncqipqi2)
948 emit_unop_insn (CODE_FOR_truncqipqi2, to, from, UNKNOWN);
951 #endif /* HAVE_truncqipqi2 */
955 if (from_mode == PQImode)
957 if (to_mode != QImode)
959 from = convert_to_mode (QImode, from, unsignedp);
964 #ifdef HAVE_extendpqiqi2
965 if (HAVE_extendpqiqi2)
967 emit_unop_insn (CODE_FOR_extendpqiqi2, to, from, UNKNOWN);
970 #endif /* HAVE_extendpqiqi2 */
975 if (to_mode == PSImode)
977 if (from_mode != SImode)
978 from = convert_to_mode (SImode, from, unsignedp);
980 #ifdef HAVE_truncsipsi2
981 if (HAVE_truncsipsi2)
983 emit_unop_insn (CODE_FOR_truncsipsi2, to, from, UNKNOWN);
986 #endif /* HAVE_truncsipsi2 */
990 if (from_mode == PSImode)
992 if (to_mode != SImode)
994 from = convert_to_mode (SImode, from, unsignedp);
999 #ifdef HAVE_extendpsisi2
1000 if (! unsignedp && HAVE_extendpsisi2)
1002 emit_unop_insn (CODE_FOR_extendpsisi2, to, from, UNKNOWN);
1005 #endif /* HAVE_extendpsisi2 */
1006 #ifdef HAVE_zero_extendpsisi2
1007 if (unsignedp && HAVE_zero_extendpsisi2)
1009 emit_unop_insn (CODE_FOR_zero_extendpsisi2, to, from, UNKNOWN);
1012 #endif /* HAVE_zero_extendpsisi2 */
1017 if (to_mode == PDImode)
1019 if (from_mode != DImode)
1020 from = convert_to_mode (DImode, from, unsignedp);
1022 #ifdef HAVE_truncdipdi2
1023 if (HAVE_truncdipdi2)
1025 emit_unop_insn (CODE_FOR_truncdipdi2, to, from, UNKNOWN);
1028 #endif /* HAVE_truncdipdi2 */
1032 if (from_mode == PDImode)
1034 if (to_mode != DImode)
1036 from = convert_to_mode (DImode, from, unsignedp);
1041 #ifdef HAVE_extendpdidi2
1042 if (HAVE_extendpdidi2)
1044 emit_unop_insn (CODE_FOR_extendpdidi2, to, from, UNKNOWN);
1047 #endif /* HAVE_extendpdidi2 */
1052 /* Now follow all the conversions between integers
1053 no more than a word long. */
1055 /* For truncation, usually we can just refer to FROM in a narrower mode. */
1056 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
1057 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1058 GET_MODE_BITSIZE (from_mode)))
1060 if (!((GET_CODE (from) == MEM
1061 && ! MEM_VOLATILE_P (from)
1062 && direct_load[(int) to_mode]
1063 && ! mode_dependent_address_p (XEXP (from, 0)))
1064 || GET_CODE (from) == REG
1065 || GET_CODE (from) == SUBREG))
1066 from = force_reg (from_mode, from);
1067 if (GET_CODE (from) == REG && REGNO (from) < FIRST_PSEUDO_REGISTER
1068 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
1069 from = copy_to_reg (from);
1070 emit_move_insn (to, gen_lowpart (to_mode, from));
1074 /* Handle extension. */
1075 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
1077 /* Convert directly if that works. */
1078 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
1079 != CODE_FOR_nothing)
1082 from = force_not_mem (from);
1084 emit_unop_insn (code, to, from, equiv_code);
1089 enum machine_mode intermediate;
1093 /* Search for a mode to convert via. */
1094 for (intermediate = from_mode; intermediate != VOIDmode;
1095 intermediate = GET_MODE_WIDER_MODE (intermediate))
1096 if (((can_extend_p (to_mode, intermediate, unsignedp)
1097 != CODE_FOR_nothing)
1098 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
1099 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1100 GET_MODE_BITSIZE (intermediate))))
1101 && (can_extend_p (intermediate, from_mode, unsignedp)
1102 != CODE_FOR_nothing))
1104 convert_move (to, convert_to_mode (intermediate, from,
1105 unsignedp), unsignedp);
1109 /* No suitable intermediate mode.
1110 Generate what we need with shifts. */
1111 shift_amount = build_int_2 (GET_MODE_BITSIZE (to_mode)
1112 - GET_MODE_BITSIZE (from_mode), 0);
1113 from = gen_lowpart (to_mode, force_reg (from_mode, from));
1114 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
1116 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
1119 emit_move_insn (to, tmp);
1124 /* Support special truncate insns for certain modes. */
1126 if (from_mode == DImode && to_mode == SImode)
1128 #ifdef HAVE_truncdisi2
1129 if (HAVE_truncdisi2)
1131 emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
1135 convert_move (to, force_reg (from_mode, from), unsignedp);
1139 if (from_mode == DImode && to_mode == HImode)
1141 #ifdef HAVE_truncdihi2
1142 if (HAVE_truncdihi2)
1144 emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
1148 convert_move (to, force_reg (from_mode, from), unsignedp);
1152 if (from_mode == DImode && to_mode == QImode)
1154 #ifdef HAVE_truncdiqi2
1155 if (HAVE_truncdiqi2)
1157 emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
1161 convert_move (to, force_reg (from_mode, from), unsignedp);
1165 if (from_mode == SImode && to_mode == HImode)
1167 #ifdef HAVE_truncsihi2
1168 if (HAVE_truncsihi2)
1170 emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
1174 convert_move (to, force_reg (from_mode, from), unsignedp);
1178 if (from_mode == SImode && to_mode == QImode)
1180 #ifdef HAVE_truncsiqi2
1181 if (HAVE_truncsiqi2)
1183 emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
1187 convert_move (to, force_reg (from_mode, from), unsignedp);
1191 if (from_mode == HImode && to_mode == QImode)
1193 #ifdef HAVE_trunchiqi2
1194 if (HAVE_trunchiqi2)
1196 emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
1200 convert_move (to, force_reg (from_mode, from), unsignedp);
1204 if (from_mode == TImode && to_mode == DImode)
1206 #ifdef HAVE_trunctidi2
1207 if (HAVE_trunctidi2)
1209 emit_unop_insn (CODE_FOR_trunctidi2, to, from, UNKNOWN);
1213 convert_move (to, force_reg (from_mode, from), unsignedp);
1217 if (from_mode == TImode && to_mode == SImode)
1219 #ifdef HAVE_trunctisi2
1220 if (HAVE_trunctisi2)
1222 emit_unop_insn (CODE_FOR_trunctisi2, to, from, UNKNOWN);
1226 convert_move (to, force_reg (from_mode, from), unsignedp);
1230 if (from_mode == TImode && to_mode == HImode)
1232 #ifdef HAVE_trunctihi2
1233 if (HAVE_trunctihi2)
1235 emit_unop_insn (CODE_FOR_trunctihi2, to, from, UNKNOWN);
1239 convert_move (to, force_reg (from_mode, from), unsignedp);
1243 if (from_mode == TImode && to_mode == QImode)
1245 #ifdef HAVE_trunctiqi2
1246 if (HAVE_trunctiqi2)
1248 emit_unop_insn (CODE_FOR_trunctiqi2, to, from, UNKNOWN);
1252 convert_move (to, force_reg (from_mode, from), unsignedp);
1256 /* Handle truncation of volatile memrefs, and so on;
1257 the things that couldn't be truncated directly,
1258 and for which there was no special instruction. */
1259 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
1261 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
1262 emit_move_insn (to, temp);
1266 /* Mode combination is not recognized. */
1270 /* Return an rtx for a value that would result
1271 from converting X to mode MODE.
1272 Both X and MODE may be floating, or both integer.
1273 UNSIGNEDP is nonzero if X is an unsigned value.
1274 This can be done by referring to a part of X in place
1275 or by copying to a new temporary with conversion.
1277 This function *must not* call protect_from_queue
1278 except when putting X into an insn (in which case convert_move does it). */
1281 convert_to_mode (mode, x, unsignedp)
1282 enum machine_mode mode;
1286 return convert_modes (mode, VOIDmode, x, unsignedp);
1289 /* Return an rtx for a value that would result
1290 from converting X from mode OLDMODE to mode MODE.
1291 Both modes may be floating, or both integer.
1292 UNSIGNEDP is nonzero if X is an unsigned value.
1294 This can be done by referring to a part of X in place
1295 or by copying to a new temporary with conversion.
1297 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
1299 This function *must not* call protect_from_queue
1300 except when putting X into an insn (in which case convert_move does it). */
1303 convert_modes (mode, oldmode, x, unsignedp)
1304 enum machine_mode mode, oldmode;
1310 /* If FROM is a SUBREG that indicates that we have already done at least
1311 the required extension, strip it. */
1313 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
1314 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
1315 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
1316 x = gen_lowpart (mode, x);
1318 if (GET_MODE (x) != VOIDmode)
1319 oldmode = GET_MODE (x);
1321 if (mode == oldmode)
1324 /* There is one case that we must handle specially: If we are converting
1325 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
1326 we are to interpret the constant as unsigned, gen_lowpart will do
1327 the wrong if the constant appears negative. What we want to do is
1328 make the high-order word of the constant zero, not all ones. */
1330 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
1331 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
1332 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
1334 HOST_WIDE_INT val = INTVAL (x);
1336 if (oldmode != VOIDmode
1337 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
1339 int width = GET_MODE_BITSIZE (oldmode);
1341 /* We need to zero extend VAL. */
1342 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1345 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
1348 /* We can do this with a gen_lowpart if both desired and current modes
1349 are integer, and this is either a constant integer, a register, or a
1350 non-volatile MEM. Except for the constant case where MODE is no
1351 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
1353 if ((GET_CODE (x) == CONST_INT
1354 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
1355 || (GET_MODE_CLASS (mode) == MODE_INT
1356 && GET_MODE_CLASS (oldmode) == MODE_INT
1357 && (GET_CODE (x) == CONST_DOUBLE
1358 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
1359 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x)
1360 && direct_load[(int) mode])
1361 || (GET_CODE (x) == REG
1362 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
1363 GET_MODE_BITSIZE (GET_MODE (x)))))))))
1365 /* ?? If we don't know OLDMODE, we have to assume here that
1366 X does not need sign- or zero-extension. This may not be
1367 the case, but it's the best we can do. */
1368 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
1369 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
1371 HOST_WIDE_INT val = INTVAL (x);
1372 int width = GET_MODE_BITSIZE (oldmode);
1374 /* We must sign or zero-extend in this case. Start by
1375 zero-extending, then sign extend if we need to. */
1376 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1378 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
1379 val |= (HOST_WIDE_INT) (-1) << width;
1381 return gen_int_mode (val, mode);
1384 return gen_lowpart (mode, x);
1387 temp = gen_reg_rtx (mode);
1388 convert_move (temp, x, unsignedp);
1392 /* This macro is used to determine what the largest unit size that
1393 move_by_pieces can use is. */
1395 /* MOVE_MAX_PIECES is the number of bytes at a time which we can
1396 move efficiently, as opposed to MOVE_MAX which is the maximum
1397 number of bytes we can move with a single instruction. */
1399 #ifndef MOVE_MAX_PIECES
1400 #define MOVE_MAX_PIECES MOVE_MAX
1403 /* STORE_MAX_PIECES is the number of bytes at a time that we can
1404 store efficiently. Due to internal GCC limitations, this is
1405 MOVE_MAX_PIECES limited by the number of bytes GCC can represent
1406 for an immediate constant. */
1408 #define STORE_MAX_PIECES MIN (MOVE_MAX_PIECES, 2 * sizeof (HOST_WIDE_INT))
1410 /* Generate several move instructions to copy LEN bytes from block FROM to
1411 block TO. (These are MEM rtx's with BLKmode). The caller must pass FROM
1412 and TO through protect_from_queue before calling.
1414 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
1415 used to push FROM to the stack.
1417 ALIGN is maximum alignment we can assume. */
1420 move_by_pieces (to, from, len, align)
1422 unsigned HOST_WIDE_INT len;
1425 struct move_by_pieces data;
1426 rtx to_addr, from_addr = XEXP (from, 0);
1427 unsigned int max_size = MOVE_MAX_PIECES + 1;
1428 enum machine_mode mode = VOIDmode, tmode;
1429 enum insn_code icode;
1432 data.from_addr = from_addr;
1435 to_addr = XEXP (to, 0);
1438 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1439 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1441 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1448 #ifdef STACK_GROWS_DOWNWARD
1454 data.to_addr = to_addr;
1457 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1458 || GET_CODE (from_addr) == POST_INC
1459 || GET_CODE (from_addr) == POST_DEC);
1461 data.explicit_inc_from = 0;
1462 data.explicit_inc_to = 0;
1463 if (data.reverse) data.offset = len;
1466 /* If copying requires more than two move insns,
1467 copy addresses to registers (to make displacements shorter)
1468 and use post-increment if available. */
1469 if (!(data.autinc_from && data.autinc_to)
1470 && move_by_pieces_ninsns (len, align) > 2)
1472 /* Find the mode of the largest move... */
1473 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1474 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1475 if (GET_MODE_SIZE (tmode) < max_size)
1478 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
1480 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1481 data.autinc_from = 1;
1482 data.explicit_inc_from = -1;
1484 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
1486 data.from_addr = copy_addr_to_reg (from_addr);
1487 data.autinc_from = 1;
1488 data.explicit_inc_from = 1;
1490 if (!data.autinc_from && CONSTANT_P (from_addr))
1491 data.from_addr = copy_addr_to_reg (from_addr);
1492 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
1494 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1496 data.explicit_inc_to = -1;
1498 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
1500 data.to_addr = copy_addr_to_reg (to_addr);
1502 data.explicit_inc_to = 1;
1504 if (!data.autinc_to && CONSTANT_P (to_addr))
1505 data.to_addr = copy_addr_to_reg (to_addr);
1508 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1509 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1510 align = MOVE_MAX * BITS_PER_UNIT;
1512 /* First move what we can in the largest integer mode, then go to
1513 successively smaller modes. */
1515 while (max_size > 1)
1517 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1518 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1519 if (GET_MODE_SIZE (tmode) < max_size)
1522 if (mode == VOIDmode)
1525 icode = mov_optab->handlers[(int) mode].insn_code;
1526 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1527 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1529 max_size = GET_MODE_SIZE (mode);
1532 /* The code above should have handled everything. */
1537 /* Return number of insns required to move L bytes by pieces.
1538 ALIGN (in bits) is maximum alignment we can assume. */
1540 static unsigned HOST_WIDE_INT
1541 move_by_pieces_ninsns (l, align)
1542 unsigned HOST_WIDE_INT l;
1545 unsigned HOST_WIDE_INT n_insns = 0;
1546 unsigned HOST_WIDE_INT max_size = MOVE_MAX + 1;
1548 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1549 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1550 align = MOVE_MAX * BITS_PER_UNIT;
1552 while (max_size > 1)
1554 enum machine_mode mode = VOIDmode, tmode;
1555 enum insn_code icode;
1557 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1558 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1559 if (GET_MODE_SIZE (tmode) < max_size)
1562 if (mode == VOIDmode)
1565 icode = mov_optab->handlers[(int) mode].insn_code;
1566 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1567 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1569 max_size = GET_MODE_SIZE (mode);
1577 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1578 with move instructions for mode MODE. GENFUN is the gen_... function
1579 to make a move insn for that mode. DATA has all the other info. */
1582 move_by_pieces_1 (genfun, mode, data)
1583 rtx (*genfun) PARAMS ((rtx, ...));
1584 enum machine_mode mode;
1585 struct move_by_pieces *data;
1587 unsigned int size = GET_MODE_SIZE (mode);
1588 rtx to1 = NULL_RTX, from1;
1590 while (data->len >= size)
1593 data->offset -= size;
1597 if (data->autinc_to)
1598 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
1601 to1 = adjust_address (data->to, mode, data->offset);
1604 if (data->autinc_from)
1605 from1 = adjust_automodify_address (data->from, mode, data->from_addr,
1608 from1 = adjust_address (data->from, mode, data->offset);
1610 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1611 emit_insn (gen_add2_insn (data->to_addr,
1612 GEN_INT (-(HOST_WIDE_INT)size)));
1613 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1614 emit_insn (gen_add2_insn (data->from_addr,
1615 GEN_INT (-(HOST_WIDE_INT)size)));
1618 emit_insn ((*genfun) (to1, from1));
1621 #ifdef PUSH_ROUNDING
1622 emit_single_push_insn (mode, from1, NULL);
1628 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1629 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1630 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1631 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1633 if (! data->reverse)
1634 data->offset += size;
1640 /* Emit code to move a block Y to a block X.
1641 This may be done with string-move instructions,
1642 with multiple scalar move instructions, or with a library call.
1644 Both X and Y must be MEM rtx's (perhaps inside VOLATILE)
1646 SIZE is an rtx that says how long they are.
1647 ALIGN is the maximum alignment we can assume they have.
1649 Return the address of the new block, if memcpy is called and returns it,
1653 emit_block_move (x, y, size)
1658 #ifdef TARGET_MEM_FUNCTIONS
1660 tree call_expr, arg_list;
1662 unsigned int align = MIN (MEM_ALIGN (x), MEM_ALIGN (y));
1664 if (GET_MODE (x) != BLKmode)
1667 if (GET_MODE (y) != BLKmode)
1670 x = protect_from_queue (x, 1);
1671 y = protect_from_queue (y, 0);
1672 size = protect_from_queue (size, 0);
1674 if (GET_CODE (x) != MEM)
1676 if (GET_CODE (y) != MEM)
1681 if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
1682 move_by_pieces (x, y, INTVAL (size), align);
1685 /* Try the most limited insn first, because there's no point
1686 including more than one in the machine description unless
1687 the more limited one has some advantage. */
1689 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
1690 enum machine_mode mode;
1692 /* Since this is a move insn, we don't care about volatility. */
1695 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1696 mode = GET_MODE_WIDER_MODE (mode))
1698 enum insn_code code = movstr_optab[(int) mode];
1699 insn_operand_predicate_fn pred;
1701 if (code != CODE_FOR_nothing
1702 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1703 here because if SIZE is less than the mode mask, as it is
1704 returned by the macro, it will definitely be less than the
1705 actual mode mask. */
1706 && ((GET_CODE (size) == CONST_INT
1707 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1708 <= (GET_MODE_MASK (mode) >> 1)))
1709 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1710 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
1711 || (*pred) (x, BLKmode))
1712 && ((pred = insn_data[(int) code].operand[1].predicate) == 0
1713 || (*pred) (y, BLKmode))
1714 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
1715 || (*pred) (opalign, VOIDmode)))
1718 rtx last = get_last_insn ();
1721 op2 = convert_to_mode (mode, size, 1);
1722 pred = insn_data[(int) code].operand[2].predicate;
1723 if (pred != 0 && ! (*pred) (op2, mode))
1724 op2 = copy_to_mode_reg (mode, op2);
1726 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1734 delete_insns_since (last);
1740 /* X, Y, or SIZE may have been passed through protect_from_queue.
1742 It is unsafe to save the value generated by protect_from_queue
1743 and reuse it later. Consider what happens if emit_queue is
1744 called before the return value from protect_from_queue is used.
1746 Expansion of the CALL_EXPR below will call emit_queue before
1747 we are finished emitting RTL for argument setup. So if we are
1748 not careful we could get the wrong value for an argument.
1750 To avoid this problem we go ahead and emit code to copy X, Y &
1751 SIZE into new pseudos. We can then place those new pseudos
1752 into an RTL_EXPR and use them later, even after a call to
1755 Note this is not strictly needed for library calls since they
1756 do not call emit_queue before loading their arguments. However,
1757 we may need to have library calls call emit_queue in the future
1758 since failing to do so could cause problems for targets which
1759 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
1760 x = copy_to_mode_reg (Pmode, XEXP (x, 0));
1761 y = copy_to_mode_reg (Pmode, XEXP (y, 0));
1763 #ifdef TARGET_MEM_FUNCTIONS
1764 size = copy_to_mode_reg (TYPE_MODE (sizetype), size);
1766 size = convert_to_mode (TYPE_MODE (integer_type_node), size,
1767 TREE_UNSIGNED (integer_type_node));
1768 size = copy_to_mode_reg (TYPE_MODE (integer_type_node), size);
1771 #ifdef TARGET_MEM_FUNCTIONS
1772 /* It is incorrect to use the libcall calling conventions to call
1773 memcpy in this context.
1775 This could be a user call to memcpy and the user may wish to
1776 examine the return value from memcpy.
1778 For targets where libcalls and normal calls have different conventions
1779 for returning pointers, we could end up generating incorrect code.
1781 So instead of using a libcall sequence we build up a suitable
1782 CALL_EXPR and expand the call in the normal fashion. */
1783 if (fn == NULL_TREE)
1787 /* This was copied from except.c, I don't know if all this is
1788 necessary in this context or not. */
1789 fn = get_identifier ("memcpy");
1790 fntype = build_pointer_type (void_type_node);
1791 fntype = build_function_type (fntype, NULL_TREE);
1792 fn = build_decl (FUNCTION_DECL, fn, fntype);
1793 ggc_add_tree_root (&fn, 1);
1794 DECL_EXTERNAL (fn) = 1;
1795 TREE_PUBLIC (fn) = 1;
1796 DECL_ARTIFICIAL (fn) = 1;
1797 TREE_NOTHROW (fn) = 1;
1798 make_decl_rtl (fn, NULL);
1799 assemble_external (fn);
1802 /* We need to make an argument list for the function call.
1804 memcpy has three arguments, the first two are void * addresses and
1805 the last is a size_t byte count for the copy. */
1807 = build_tree_list (NULL_TREE,
1808 make_tree (build_pointer_type (void_type_node), x));
1809 TREE_CHAIN (arg_list)
1810 = build_tree_list (NULL_TREE,
1811 make_tree (build_pointer_type (void_type_node), y));
1812 TREE_CHAIN (TREE_CHAIN (arg_list))
1813 = build_tree_list (NULL_TREE, make_tree (sizetype, size));
1814 TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list))) = NULL_TREE;
1816 /* Now we have to build up the CALL_EXPR itself. */
1817 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
1818 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
1819 call_expr, arg_list, NULL_TREE);
1820 TREE_SIDE_EFFECTS (call_expr) = 1;
1822 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
1824 emit_library_call (bcopy_libfunc, LCT_NORMAL,
1825 VOIDmode, 3, y, Pmode, x, Pmode,
1826 convert_to_mode (TYPE_MODE (integer_type_node), size,
1827 TREE_UNSIGNED (integer_type_node)),
1828 TYPE_MODE (integer_type_node));
1831 /* If we are initializing a readonly value, show the above call
1832 clobbered it. Otherwise, a load from it may erroneously be hoisted
1834 if (RTX_UNCHANGING_P (x))
1835 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
1841 /* Copy all or part of a value X into registers starting at REGNO.
1842 The number of registers to be filled is NREGS. */
1845 move_block_to_reg (regno, x, nregs, mode)
1849 enum machine_mode mode;
1852 #ifdef HAVE_load_multiple
1860 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1861 x = validize_mem (force_const_mem (mode, x));
1863 /* See if the machine can do this with a load multiple insn. */
1864 #ifdef HAVE_load_multiple
1865 if (HAVE_load_multiple)
1867 last = get_last_insn ();
1868 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
1876 delete_insns_since (last);
1880 for (i = 0; i < nregs; i++)
1881 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1882 operand_subword_force (x, i, mode));
1885 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1886 The number of registers to be filled is NREGS. SIZE indicates the number
1887 of bytes in the object X. */
1890 move_block_from_reg (regno, x, nregs, size)
1897 #ifdef HAVE_store_multiple
1901 enum machine_mode mode;
1906 /* If SIZE is that of a mode no bigger than a word, just use that
1907 mode's store operation. */
1908 if (size <= UNITS_PER_WORD
1909 && (mode = mode_for_size (size * BITS_PER_UNIT, MODE_INT, 0)) != BLKmode
1910 && !FUNCTION_ARG_REG_LITTLE_ENDIAN)
1912 emit_move_insn (adjust_address (x, mode, 0), gen_rtx_REG (mode, regno));
1916 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN machine must be aligned
1917 to the left before storing to memory. Note that the previous test
1918 doesn't handle all cases (e.g. SIZE == 3). */
1919 if (size < UNITS_PER_WORD
1921 && !FUNCTION_ARG_REG_LITTLE_ENDIAN)
1923 rtx tem = operand_subword (x, 0, 1, BLKmode);
1929 shift = expand_shift (LSHIFT_EXPR, word_mode,
1930 gen_rtx_REG (word_mode, regno),
1931 build_int_2 ((UNITS_PER_WORD - size)
1932 * BITS_PER_UNIT, 0), NULL_RTX, 0);
1933 emit_move_insn (tem, shift);
1937 /* See if the machine can do this with a store multiple insn. */
1938 #ifdef HAVE_store_multiple
1939 if (HAVE_store_multiple)
1941 last = get_last_insn ();
1942 pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1950 delete_insns_since (last);
1954 for (i = 0; i < nregs; i++)
1956 rtx tem = operand_subword (x, i, 1, BLKmode);
1961 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1965 /* Emit code to move a block SRC to a block DST, where DST is non-consecutive
1966 registers represented by a PARALLEL. SSIZE represents the total size of
1967 block SRC in bytes, or -1 if not known. */
1968 /* ??? If SSIZE % UNITS_PER_WORD != 0, we make the blatant assumption that
1969 the balance will be in what would be the low-order memory addresses, i.e.
1970 left justified for big endian, right justified for little endian. This
1971 happens to be true for the targets currently using this support. If this
1972 ever changes, a new target macro along the lines of FUNCTION_ARG_PADDING
1976 emit_group_load (dst, orig_src, ssize)
1983 if (GET_CODE (dst) != PARALLEL)
1986 /* Check for a NULL entry, used to indicate that the parameter goes
1987 both on the stack and in registers. */
1988 if (XEXP (XVECEXP (dst, 0, 0), 0))
1993 tmps = (rtx *) alloca (sizeof (rtx) * XVECLEN (dst, 0));
1995 /* Process the pieces. */
1996 for (i = start; i < XVECLEN (dst, 0); i++)
1998 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1999 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
2000 unsigned int bytelen = GET_MODE_SIZE (mode);
2003 /* Handle trailing fragments that run over the size of the struct. */
2004 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2006 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2007 bytelen = ssize - bytepos;
2012 /* If we won't be loading directly from memory, protect the real source
2013 from strange tricks we might play; but make sure that the source can
2014 be loaded directly into the destination. */
2016 if (GET_CODE (orig_src) != MEM
2017 && (!CONSTANT_P (orig_src)
2018 || (GET_MODE (orig_src) != mode
2019 && GET_MODE (orig_src) != VOIDmode)))
2021 if (GET_MODE (orig_src) == VOIDmode)
2022 src = gen_reg_rtx (mode);
2024 src = gen_reg_rtx (GET_MODE (orig_src));
2026 emit_move_insn (src, orig_src);
2029 /* Optimize the access just a bit. */
2030 if (GET_CODE (src) == MEM
2031 && MEM_ALIGN (src) >= GET_MODE_ALIGNMENT (mode)
2032 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2033 && bytelen == GET_MODE_SIZE (mode))
2035 tmps[i] = gen_reg_rtx (mode);
2036 emit_move_insn (tmps[i], adjust_address (src, mode, bytepos));
2038 else if (GET_CODE (src) == CONCAT)
2041 && bytelen == GET_MODE_SIZE (GET_MODE (XEXP (src, 0))))
2042 || (bytepos == (HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (XEXP (src, 0)))
2043 && bytelen == GET_MODE_SIZE (GET_MODE (XEXP (src, 1)))))
2045 tmps[i] = XEXP (src, bytepos != 0);
2046 if (! CONSTANT_P (tmps[i])
2047 && (GET_CODE (tmps[i]) != REG || GET_MODE (tmps[i]) != mode))
2048 tmps[i] = extract_bit_field (tmps[i], bytelen * BITS_PER_UNIT,
2049 0, 1, NULL_RTX, mode, mode, ssize);
2051 else if (bytepos == 0)
2053 rtx mem = assign_stack_temp (GET_MODE (src),
2054 GET_MODE_SIZE (GET_MODE (src)), 0);
2055 emit_move_insn (mem, src);
2056 tmps[i] = adjust_address (mem, mode, 0);
2061 else if (CONSTANT_P (src)
2062 || (GET_CODE (src) == REG && GET_MODE (src) == mode))
2065 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
2066 bytepos * BITS_PER_UNIT, 1, NULL_RTX,
2069 if (BYTES_BIG_ENDIAN && shift)
2070 expand_binop (mode, ashl_optab, tmps[i], GEN_INT (shift),
2071 tmps[i], 0, OPTAB_WIDEN);
2076 /* Copy the extracted pieces into the proper (probable) hard regs. */
2077 for (i = start; i < XVECLEN (dst, 0); i++)
2078 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0), tmps[i]);
2081 /* Emit code to move a block SRC to a block DST, where SRC is non-consecutive
2082 registers represented by a PARALLEL. SSIZE represents the total size of
2083 block DST, or -1 if not known. */
2086 emit_group_store (orig_dst, src, ssize)
2093 if (GET_CODE (src) != PARALLEL)
2096 /* Check for a NULL entry, used to indicate that the parameter goes
2097 both on the stack and in registers. */
2098 if (XEXP (XVECEXP (src, 0, 0), 0))
2103 tmps = (rtx *) alloca (sizeof (rtx) * XVECLEN (src, 0));
2105 /* Copy the (probable) hard regs into pseudos. */
2106 for (i = start; i < XVECLEN (src, 0); i++)
2108 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
2109 tmps[i] = gen_reg_rtx (GET_MODE (reg));
2110 emit_move_insn (tmps[i], reg);
2114 /* If we won't be storing directly into memory, protect the real destination
2115 from strange tricks we might play. */
2117 if (GET_CODE (dst) == PARALLEL)
2121 /* We can get a PARALLEL dst if there is a conditional expression in
2122 a return statement. In that case, the dst and src are the same,
2123 so no action is necessary. */
2124 if (rtx_equal_p (dst, src))
2127 /* It is unclear if we can ever reach here, but we may as well handle
2128 it. Allocate a temporary, and split this into a store/load to/from
2131 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
2132 emit_group_store (temp, src, ssize);
2133 emit_group_load (dst, temp, ssize);
2136 else if (GET_CODE (dst) != MEM && GET_CODE (dst) != CONCAT)
2138 dst = gen_reg_rtx (GET_MODE (orig_dst));
2139 /* Make life a bit easier for combine. */
2140 emit_move_insn (dst, const0_rtx);
2143 /* Process the pieces. */
2144 for (i = start; i < XVECLEN (src, 0); i++)
2146 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
2147 enum machine_mode mode = GET_MODE (tmps[i]);
2148 unsigned int bytelen = GET_MODE_SIZE (mode);
2151 /* Handle trailing fragments that run over the size of the struct. */
2152 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2154 if (BYTES_BIG_ENDIAN)
2156 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2157 expand_binop (mode, ashr_optab, tmps[i], GEN_INT (shift),
2158 tmps[i], 0, OPTAB_WIDEN);
2160 bytelen = ssize - bytepos;
2163 if (GET_CODE (dst) == CONCAT)
2165 if (bytepos + bytelen <= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2166 dest = XEXP (dst, 0);
2167 else if (bytepos >= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2169 bytepos -= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0)));
2170 dest = XEXP (dst, 1);
2176 /* Optimize the access just a bit. */
2177 if (GET_CODE (dest) == MEM
2178 && MEM_ALIGN (dest) >= GET_MODE_ALIGNMENT (mode)
2179 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2180 && bytelen == GET_MODE_SIZE (mode))
2181 emit_move_insn (adjust_address (dest, mode, bytepos), tmps[i]);
2183 store_bit_field (dest, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2184 mode, tmps[i], ssize);
2189 /* Copy from the pseudo into the (probable) hard reg. */
2190 if (GET_CODE (dst) == REG)
2191 emit_move_insn (orig_dst, dst);
2194 /* Generate code to copy a BLKmode object of TYPE out of a
2195 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2196 is null, a stack temporary is created. TGTBLK is returned.
2198 The primary purpose of this routine is to handle functions
2199 that return BLKmode structures in registers. Some machines
2200 (the PA for example) want to return all small structures
2201 in registers regardless of the structure's alignment. */
2204 copy_blkmode_from_reg (tgtblk, srcreg, type)
2209 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2210 rtx src = NULL, dst = NULL;
2211 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2212 unsigned HOST_WIDE_INT bitpos, xbitpos, big_endian_correction = 0;
2216 tgtblk = assign_temp (build_qualified_type (type,
2218 | TYPE_QUAL_CONST)),
2220 preserve_temp_slots (tgtblk);
2223 /* This code assumes srcreg is at least a full word. If it isn't, copy it
2224 into a new pseudo which is a full word.
2226 If FUNCTION_ARG_REG_LITTLE_ENDIAN is set and convert_to_mode does a copy,
2227 the wrong part of the register gets copied so we fake a type conversion
2229 if (GET_MODE (srcreg) != BLKmode
2230 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
2232 if (FUNCTION_ARG_REG_LITTLE_ENDIAN)
2233 srcreg = simplify_gen_subreg (word_mode, srcreg, GET_MODE (srcreg), 0);
2235 srcreg = convert_to_mode (word_mode, srcreg, TREE_UNSIGNED (type));
2238 /* Structures whose size is not a multiple of a word are aligned
2239 to the least significant byte (to the right). On a BYTES_BIG_ENDIAN
2240 machine, this means we must skip the empty high order bytes when
2241 calculating the bit offset. */
2242 if (BYTES_BIG_ENDIAN
2243 && !FUNCTION_ARG_REG_LITTLE_ENDIAN
2244 && bytes % UNITS_PER_WORD)
2245 big_endian_correction
2246 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2248 /* Copy the structure BITSIZE bites at a time.
2250 We could probably emit more efficient code for machines which do not use
2251 strict alignment, but it doesn't seem worth the effort at the current
2253 for (bitpos = 0, xbitpos = big_endian_correction;
2254 bitpos < bytes * BITS_PER_UNIT;
2255 bitpos += bitsize, xbitpos += bitsize)
2257 /* We need a new source operand each time xbitpos is on a
2258 word boundary and when xbitpos == big_endian_correction
2259 (the first time through). */
2260 if (xbitpos % BITS_PER_WORD == 0
2261 || xbitpos == big_endian_correction)
2262 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD,
2265 /* We need a new destination operand each time bitpos is on
2267 if (bitpos % BITS_PER_WORD == 0)
2268 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
2270 /* Use xbitpos for the source extraction (right justified) and
2271 xbitpos for the destination store (left justified). */
2272 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode,
2273 extract_bit_field (src, bitsize,
2274 xbitpos % BITS_PER_WORD, 1,
2275 NULL_RTX, word_mode, word_mode,
2283 /* Add a USE expression for REG to the (possibly empty) list pointed
2284 to by CALL_FUSAGE. REG must denote a hard register. */
2287 use_reg (call_fusage, reg)
2288 rtx *call_fusage, reg;
2290 if (GET_CODE (reg) != REG
2291 || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
2295 = gen_rtx_EXPR_LIST (VOIDmode,
2296 gen_rtx_USE (VOIDmode, reg), *call_fusage);
2299 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2300 starting at REGNO. All of these registers must be hard registers. */
2303 use_regs (call_fusage, regno, nregs)
2310 if (regno + nregs > FIRST_PSEUDO_REGISTER)
2313 for (i = 0; i < nregs; i++)
2314 use_reg (call_fusage, gen_rtx_REG (reg_raw_mode[regno + i], regno + i));
2317 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2318 PARALLEL REGS. This is for calls that pass values in multiple
2319 non-contiguous locations. The Irix 6 ABI has examples of this. */
2322 use_group_regs (call_fusage, regs)
2328 for (i = 0; i < XVECLEN (regs, 0); i++)
2330 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2332 /* A NULL entry means the parameter goes both on the stack and in
2333 registers. This can also be a MEM for targets that pass values
2334 partially on the stack and partially in registers. */
2335 if (reg != 0 && GET_CODE (reg) == REG)
2336 use_reg (call_fusage, reg);
2341 /* Determine whether the LEN bytes generated by CONSTFUN can be
2342 stored to memory using several move instructions. CONSTFUNDATA is
2343 a pointer which will be passed as argument in every CONSTFUN call.
2344 ALIGN is maximum alignment we can assume. Return nonzero if a
2345 call to store_by_pieces should succeed. */
2348 can_store_by_pieces (len, constfun, constfundata, align)
2349 unsigned HOST_WIDE_INT len;
2350 rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
2354 unsigned HOST_WIDE_INT max_size, l;
2355 HOST_WIDE_INT offset = 0;
2356 enum machine_mode mode, tmode;
2357 enum insn_code icode;
2361 if (! MOVE_BY_PIECES_P (len, align))
2364 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2365 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2366 align = MOVE_MAX * BITS_PER_UNIT;
2368 /* We would first store what we can in the largest integer mode, then go to
2369 successively smaller modes. */
2372 reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2377 max_size = STORE_MAX_PIECES + 1;
2378 while (max_size > 1)
2380 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2381 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2382 if (GET_MODE_SIZE (tmode) < max_size)
2385 if (mode == VOIDmode)
2388 icode = mov_optab->handlers[(int) mode].insn_code;
2389 if (icode != CODE_FOR_nothing
2390 && align >= GET_MODE_ALIGNMENT (mode))
2392 unsigned int size = GET_MODE_SIZE (mode);
2399 cst = (*constfun) (constfundata, offset, mode);
2400 if (!LEGITIMATE_CONSTANT_P (cst))
2410 max_size = GET_MODE_SIZE (mode);
2413 /* The code above should have handled everything. */
2421 /* Generate several move instructions to store LEN bytes generated by
2422 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2423 pointer which will be passed as argument in every CONSTFUN call.
2424 ALIGN is maximum alignment we can assume. */
2427 store_by_pieces (to, len, constfun, constfundata, align)
2429 unsigned HOST_WIDE_INT len;
2430 rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
2434 struct store_by_pieces data;
2436 if (! MOVE_BY_PIECES_P (len, align))
2438 to = protect_from_queue (to, 1);
2439 data.constfun = constfun;
2440 data.constfundata = constfundata;
2443 store_by_pieces_1 (&data, align);
2446 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2447 rtx with BLKmode). The caller must pass TO through protect_from_queue
2448 before calling. ALIGN is maximum alignment we can assume. */
2451 clear_by_pieces (to, len, align)
2453 unsigned HOST_WIDE_INT len;
2456 struct store_by_pieces data;
2458 data.constfun = clear_by_pieces_1;
2459 data.constfundata = NULL;
2462 store_by_pieces_1 (&data, align);
2465 /* Callback routine for clear_by_pieces.
2466 Return const0_rtx unconditionally. */
2469 clear_by_pieces_1 (data, offset, mode)
2470 PTR data ATTRIBUTE_UNUSED;
2471 HOST_WIDE_INT offset ATTRIBUTE_UNUSED;
2472 enum machine_mode mode ATTRIBUTE_UNUSED;
2477 /* Subroutine of clear_by_pieces and store_by_pieces.
2478 Generate several move instructions to store LEN bytes of block TO. (A MEM
2479 rtx with BLKmode). The caller must pass TO through protect_from_queue
2480 before calling. ALIGN is maximum alignment we can assume. */
2483 store_by_pieces_1 (data, align)
2484 struct store_by_pieces *data;
2487 rtx to_addr = XEXP (data->to, 0);
2488 unsigned HOST_WIDE_INT max_size = STORE_MAX_PIECES + 1;
2489 enum machine_mode mode = VOIDmode, tmode;
2490 enum insn_code icode;
2493 data->to_addr = to_addr;
2495 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2496 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2498 data->explicit_inc_to = 0;
2500 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2502 data->offset = data->len;
2504 /* If storing requires more than two move insns,
2505 copy addresses to registers (to make displacements shorter)
2506 and use post-increment if available. */
2507 if (!data->autinc_to
2508 && move_by_pieces_ninsns (data->len, align) > 2)
2510 /* Determine the main mode we'll be using. */
2511 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2512 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2513 if (GET_MODE_SIZE (tmode) < max_size)
2516 if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
2518 data->to_addr = copy_addr_to_reg (plus_constant (to_addr, data->len));
2519 data->autinc_to = 1;
2520 data->explicit_inc_to = -1;
2523 if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2524 && ! data->autinc_to)
2526 data->to_addr = copy_addr_to_reg (to_addr);
2527 data->autinc_to = 1;
2528 data->explicit_inc_to = 1;
2531 if ( !data->autinc_to && CONSTANT_P (to_addr))
2532 data->to_addr = copy_addr_to_reg (to_addr);
2535 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2536 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2537 align = MOVE_MAX * BITS_PER_UNIT;
2539 /* First store what we can in the largest integer mode, then go to
2540 successively smaller modes. */
2542 while (max_size > 1)
2544 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2545 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2546 if (GET_MODE_SIZE (tmode) < max_size)
2549 if (mode == VOIDmode)
2552 icode = mov_optab->handlers[(int) mode].insn_code;
2553 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
2554 store_by_pieces_2 (GEN_FCN (icode), mode, data);
2556 max_size = GET_MODE_SIZE (mode);
2559 /* The code above should have handled everything. */
2564 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2565 with move instructions for mode MODE. GENFUN is the gen_... function
2566 to make a move insn for that mode. DATA has all the other info. */
2569 store_by_pieces_2 (genfun, mode, data)
2570 rtx (*genfun) PARAMS ((rtx, ...));
2571 enum machine_mode mode;
2572 struct store_by_pieces *data;
2574 unsigned int size = GET_MODE_SIZE (mode);
2577 while (data->len >= size)
2580 data->offset -= size;
2582 if (data->autinc_to)
2583 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
2586 to1 = adjust_address (data->to, mode, data->offset);
2588 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2589 emit_insn (gen_add2_insn (data->to_addr,
2590 GEN_INT (-(HOST_WIDE_INT) size)));
2592 cst = (*data->constfun) (data->constfundata, data->offset, mode);
2593 emit_insn ((*genfun) (to1, cst));
2595 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2596 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2598 if (! data->reverse)
2599 data->offset += size;
2605 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2606 its length in bytes. */
2609 clear_storage (object, size)
2613 #ifdef TARGET_MEM_FUNCTIONS
2615 tree call_expr, arg_list;
2618 unsigned int align = (GET_CODE (object) == MEM ? MEM_ALIGN (object)
2619 : GET_MODE_ALIGNMENT (GET_MODE (object)));
2621 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2622 just move a zero. Otherwise, do this a piece at a time. */
2623 if (GET_MODE (object) != BLKmode
2624 && GET_CODE (size) == CONST_INT
2625 && GET_MODE_SIZE (GET_MODE (object)) == (unsigned int) INTVAL (size))
2626 emit_move_insn (object, CONST0_RTX (GET_MODE (object)));
2629 object = protect_from_queue (object, 1);
2630 size = protect_from_queue (size, 0);
2632 if (GET_CODE (size) == CONST_INT
2633 && MOVE_BY_PIECES_P (INTVAL (size), align))
2634 clear_by_pieces (object, INTVAL (size), align);
2637 /* Try the most limited insn first, because there's no point
2638 including more than one in the machine description unless
2639 the more limited one has some advantage. */
2641 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
2642 enum machine_mode mode;
2644 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2645 mode = GET_MODE_WIDER_MODE (mode))
2647 enum insn_code code = clrstr_optab[(int) mode];
2648 insn_operand_predicate_fn pred;
2650 if (code != CODE_FOR_nothing
2651 /* We don't need MODE to be narrower than
2652 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2653 the mode mask, as it is returned by the macro, it will
2654 definitely be less than the actual mode mask. */
2655 && ((GET_CODE (size) == CONST_INT
2656 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2657 <= (GET_MODE_MASK (mode) >> 1)))
2658 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2659 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
2660 || (*pred) (object, BLKmode))
2661 && ((pred = insn_data[(int) code].operand[2].predicate) == 0
2662 || (*pred) (opalign, VOIDmode)))
2665 rtx last = get_last_insn ();
2668 op1 = convert_to_mode (mode, size, 1);
2669 pred = insn_data[(int) code].operand[1].predicate;
2670 if (pred != 0 && ! (*pred) (op1, mode))
2671 op1 = copy_to_mode_reg (mode, op1);
2673 pat = GEN_FCN ((int) code) (object, op1, opalign);
2680 delete_insns_since (last);
2684 /* OBJECT or SIZE may have been passed through protect_from_queue.
2686 It is unsafe to save the value generated by protect_from_queue
2687 and reuse it later. Consider what happens if emit_queue is
2688 called before the return value from protect_from_queue is used.
2690 Expansion of the CALL_EXPR below will call emit_queue before
2691 we are finished emitting RTL for argument setup. So if we are
2692 not careful we could get the wrong value for an argument.
2694 To avoid this problem we go ahead and emit code to copy OBJECT
2695 and SIZE into new pseudos. We can then place those new pseudos
2696 into an RTL_EXPR and use them later, even after a call to
2699 Note this is not strictly needed for library calls since they
2700 do not call emit_queue before loading their arguments. However,
2701 we may need to have library calls call emit_queue in the future
2702 since failing to do so could cause problems for targets which
2703 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
2704 object = copy_to_mode_reg (Pmode, XEXP (object, 0));
2706 #ifdef TARGET_MEM_FUNCTIONS
2707 size = copy_to_mode_reg (TYPE_MODE (sizetype), size);
2709 size = convert_to_mode (TYPE_MODE (integer_type_node), size,
2710 TREE_UNSIGNED (integer_type_node));
2711 size = copy_to_mode_reg (TYPE_MODE (integer_type_node), size);
2714 #ifdef TARGET_MEM_FUNCTIONS
2715 /* It is incorrect to use the libcall calling conventions to call
2716 memset in this context.
2718 This could be a user call to memset and the user may wish to
2719 examine the return value from memset.
2721 For targets where libcalls and normal calls have different
2722 conventions for returning pointers, we could end up generating
2725 So instead of using a libcall sequence we build up a suitable
2726 CALL_EXPR and expand the call in the normal fashion. */
2727 if (fn == NULL_TREE)
2731 /* This was copied from except.c, I don't know if all this is
2732 necessary in this context or not. */
2733 fn = get_identifier ("memset");
2734 fntype = build_pointer_type (void_type_node);
2735 fntype = build_function_type (fntype, NULL_TREE);
2736 fn = build_decl (FUNCTION_DECL, fn, fntype);
2737 ggc_add_tree_root (&fn, 1);
2738 DECL_EXTERNAL (fn) = 1;
2739 TREE_PUBLIC (fn) = 1;
2740 DECL_ARTIFICIAL (fn) = 1;
2741 TREE_NOTHROW (fn) = 1;
2742 make_decl_rtl (fn, NULL);
2743 assemble_external (fn);
2746 /* We need to make an argument list for the function call.
2748 memset has three arguments, the first is a void * addresses, the
2749 second an integer with the initialization value, the last is a
2750 size_t byte count for the copy. */
2752 = build_tree_list (NULL_TREE,
2753 make_tree (build_pointer_type (void_type_node),
2755 TREE_CHAIN (arg_list)
2756 = build_tree_list (NULL_TREE,
2757 make_tree (integer_type_node, const0_rtx));
2758 TREE_CHAIN (TREE_CHAIN (arg_list))
2759 = build_tree_list (NULL_TREE, make_tree (sizetype, size));
2760 TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list))) = NULL_TREE;
2762 /* Now we have to build up the CALL_EXPR itself. */
2763 call_expr = build1 (ADDR_EXPR,
2764 build_pointer_type (TREE_TYPE (fn)), fn);
2765 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
2766 call_expr, arg_list, NULL_TREE);
2767 TREE_SIDE_EFFECTS (call_expr) = 1;
2769 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
2771 emit_library_call (bzero_libfunc, LCT_NORMAL,
2772 VOIDmode, 2, object, Pmode, size,
2773 TYPE_MODE (integer_type_node));
2776 /* If we are initializing a readonly value, show the above call
2777 clobbered it. Otherwise, a load from it may erroneously be
2778 hoisted from a loop. */
2779 if (RTX_UNCHANGING_P (object))
2780 emit_insn (gen_rtx_CLOBBER (VOIDmode, object));
2787 /* Generate code to copy Y into X.
2788 Both Y and X must have the same mode, except that
2789 Y can be a constant with VOIDmode.
2790 This mode cannot be BLKmode; use emit_block_move for that.
2792 Return the last instruction emitted. */
2795 emit_move_insn (x, y)
2798 enum machine_mode mode = GET_MODE (x);
2799 rtx y_cst = NULL_RTX;
2802 x = protect_from_queue (x, 1);
2803 y = protect_from_queue (y, 0);
2805 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
2808 /* Never force constant_p_rtx to memory. */
2809 if (GET_CODE (y) == CONSTANT_P_RTX)
2811 else if (CONSTANT_P (y))
2814 && FLOAT_MODE_P (GET_MODE (x))
2815 && (last_insn = compress_float_constant (x, y)))
2818 if (!LEGITIMATE_CONSTANT_P (y))
2821 y = force_const_mem (mode, y);
2825 /* If X or Y are memory references, verify that their addresses are valid
2827 if (GET_CODE (x) == MEM
2828 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
2829 && ! push_operand (x, GET_MODE (x)))
2831 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
2832 x = validize_mem (x);
2834 if (GET_CODE (y) == MEM
2835 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
2837 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
2838 y = validize_mem (y);
2840 if (mode == BLKmode)
2843 last_insn = emit_move_insn_1 (x, y);
2845 if (y_cst && GET_CODE (x) == REG)
2846 set_unique_reg_note (last_insn, REG_EQUAL, y_cst);
2851 /* Low level part of emit_move_insn.
2852 Called just like emit_move_insn, but assumes X and Y
2853 are basically valid. */
2856 emit_move_insn_1 (x, y)
2859 enum machine_mode mode = GET_MODE (x);
2860 enum machine_mode submode;
2861 enum mode_class class = GET_MODE_CLASS (mode);
2863 if ((unsigned int) mode >= (unsigned int) MAX_MACHINE_MODE)
2866 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
2868 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
2870 /* Expand complex moves by moving real part and imag part, if possible. */
2871 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
2872 && BLKmode != (submode = mode_for_size ((GET_MODE_UNIT_SIZE (mode)
2874 (class == MODE_COMPLEX_INT
2875 ? MODE_INT : MODE_FLOAT),
2877 && (mov_optab->handlers[(int) submode].insn_code
2878 != CODE_FOR_nothing))
2880 /* Don't split destination if it is a stack push. */
2881 int stack = push_operand (x, GET_MODE (x));
2883 #ifdef PUSH_ROUNDING
2884 /* In case we output to the stack, but the size is smaller machine can
2885 push exactly, we need to use move instructions. */
2887 && (PUSH_ROUNDING (GET_MODE_SIZE (submode))
2888 != GET_MODE_SIZE (submode)))
2891 HOST_WIDE_INT offset1, offset2;
2893 /* Do not use anti_adjust_stack, since we don't want to update
2894 stack_pointer_delta. */
2895 temp = expand_binop (Pmode,
2896 #ifdef STACK_GROWS_DOWNWARD
2904 (GET_MODE_SIZE (GET_MODE (x)))),
2905 stack_pointer_rtx, 0, OPTAB_LIB_WIDEN);
2907 if (temp != stack_pointer_rtx)
2908 emit_move_insn (stack_pointer_rtx, temp);
2910 #ifdef STACK_GROWS_DOWNWARD
2912 offset2 = GET_MODE_SIZE (submode);
2914 offset1 = -PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)));
2915 offset2 = (-PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)))
2916 + GET_MODE_SIZE (submode));
2919 emit_move_insn (change_address (x, submode,
2920 gen_rtx_PLUS (Pmode,
2922 GEN_INT (offset1))),
2923 gen_realpart (submode, y));
2924 emit_move_insn (change_address (x, submode,
2925 gen_rtx_PLUS (Pmode,
2927 GEN_INT (offset2))),
2928 gen_imagpart (submode, y));
2932 /* If this is a stack, push the highpart first, so it
2933 will be in the argument order.
2935 In that case, change_address is used only to convert
2936 the mode, not to change the address. */
2939 /* Note that the real part always precedes the imag part in memory
2940 regardless of machine's endianness. */
2941 #ifdef STACK_GROWS_DOWNWARD
2942 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2943 (gen_rtx_MEM (submode, XEXP (x, 0)),
2944 gen_imagpart (submode, y)));
2945 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2946 (gen_rtx_MEM (submode, XEXP (x, 0)),
2947 gen_realpart (submode, y)));
2949 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2950 (gen_rtx_MEM (submode, XEXP (x, 0)),
2951 gen_realpart (submode, y)));
2952 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2953 (gen_rtx_MEM (submode, XEXP (x, 0)),
2954 gen_imagpart (submode, y)));
2959 rtx realpart_x, realpart_y;
2960 rtx imagpart_x, imagpart_y;
2962 /* If this is a complex value with each part being smaller than a
2963 word, the usual calling sequence will likely pack the pieces into
2964 a single register. Unfortunately, SUBREG of hard registers only
2965 deals in terms of words, so we have a problem converting input
2966 arguments to the CONCAT of two registers that is used elsewhere
2967 for complex values. If this is before reload, we can copy it into
2968 memory and reload. FIXME, we should see about using extract and
2969 insert on integer registers, but complex short and complex char
2970 variables should be rarely used. */
2971 if (GET_MODE_BITSIZE (mode) < 2 * BITS_PER_WORD
2972 && (reload_in_progress | reload_completed) == 0)
2975 = (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER);
2977 = (REG_P (y) && REGNO (y) < FIRST_PSEUDO_REGISTER);
2979 if (packed_dest_p || packed_src_p)
2981 enum mode_class reg_class = ((class == MODE_COMPLEX_FLOAT)
2982 ? MODE_FLOAT : MODE_INT);
2984 enum machine_mode reg_mode
2985 = mode_for_size (GET_MODE_BITSIZE (mode), reg_class, 1);
2987 if (reg_mode != BLKmode)
2989 rtx mem = assign_stack_temp (reg_mode,
2990 GET_MODE_SIZE (mode), 0);
2991 rtx cmem = adjust_address (mem, mode, 0);
2994 = N_("function using short complex types cannot be inline");
2998 rtx sreg = gen_rtx_SUBREG (reg_mode, x, 0);
3000 emit_move_insn_1 (cmem, y);
3001 return emit_move_insn_1 (sreg, mem);
3005 rtx sreg = gen_rtx_SUBREG (reg_mode, y, 0);
3007 emit_move_insn_1 (mem, sreg);
3008 return emit_move_insn_1 (x, cmem);
3014 realpart_x = gen_realpart (submode, x);
3015 realpart_y = gen_realpart (submode, y);
3016 imagpart_x = gen_imagpart (submode, x);
3017 imagpart_y = gen_imagpart (submode, y);
3019 /* Show the output dies here. This is necessary for SUBREGs
3020 of pseudos since we cannot track their lifetimes correctly;
3021 hard regs shouldn't appear here except as return values.
3022 We never want to emit such a clobber after reload. */
3024 && ! (reload_in_progress || reload_completed)
3025 && (GET_CODE (realpart_x) == SUBREG
3026 || GET_CODE (imagpart_x) == SUBREG))
3027 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
3029 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
3030 (realpart_x, realpart_y));
3031 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
3032 (imagpart_x, imagpart_y));
3035 return get_last_insn ();
3038 /* This will handle any multi-word mode that lacks a move_insn pattern.
3039 However, you will get better code if you define such patterns,
3040 even if they must turn into multiple assembler instructions. */
3041 else if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
3048 #ifdef PUSH_ROUNDING
3050 /* If X is a push on the stack, do the push now and replace
3051 X with a reference to the stack pointer. */
3052 if (push_operand (x, GET_MODE (x)))
3057 /* Do not use anti_adjust_stack, since we don't want to update
3058 stack_pointer_delta. */
3059 temp = expand_binop (Pmode,
3060 #ifdef STACK_GROWS_DOWNWARD
3068 (GET_MODE_SIZE (GET_MODE (x)))),
3069 stack_pointer_rtx, 0, OPTAB_LIB_WIDEN);
3071 if (temp != stack_pointer_rtx)
3072 emit_move_insn (stack_pointer_rtx, temp);
3074 code = GET_CODE (XEXP (x, 0));
3076 /* Just hope that small offsets off SP are OK. */
3077 if (code == POST_INC)
3078 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3079 GEN_INT (-((HOST_WIDE_INT)
3080 GET_MODE_SIZE (GET_MODE (x)))));
3081 else if (code == POST_DEC)
3082 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3083 GEN_INT (GET_MODE_SIZE (GET_MODE (x))));
3085 temp = stack_pointer_rtx;
3087 x = change_address (x, VOIDmode, temp);
3091 /* If we are in reload, see if either operand is a MEM whose address
3092 is scheduled for replacement. */
3093 if (reload_in_progress && GET_CODE (x) == MEM
3094 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
3095 x = replace_equiv_address_nv (x, inner);
3096 if (reload_in_progress && GET_CODE (y) == MEM
3097 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
3098 y = replace_equiv_address_nv (y, inner);
3104 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
3107 rtx xpart = operand_subword (x, i, 1, mode);
3108 rtx ypart = operand_subword (y, i, 1, mode);
3110 /* If we can't get a part of Y, put Y into memory if it is a
3111 constant. Otherwise, force it into a register. If we still
3112 can't get a part of Y, abort. */
3113 if (ypart == 0 && CONSTANT_P (y))
3115 y = force_const_mem (mode, y);
3116 ypart = operand_subword (y, i, 1, mode);
3118 else if (ypart == 0)
3119 ypart = operand_subword_force (y, i, mode);
3121 if (xpart == 0 || ypart == 0)
3124 need_clobber |= (GET_CODE (xpart) == SUBREG);
3126 last_insn = emit_move_insn (xpart, ypart);
3129 seq = gen_sequence ();
3132 /* Show the output dies here. This is necessary for SUBREGs
3133 of pseudos since we cannot track their lifetimes correctly;
3134 hard regs shouldn't appear here except as return values.
3135 We never want to emit such a clobber after reload. */
3137 && ! (reload_in_progress || reload_completed)
3138 && need_clobber != 0)
3139 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
3149 /* If Y is representable exactly in a narrower mode, and the target can
3150 perform the extension directly from constant or memory, then emit the
3151 move as an extension. */
3154 compress_float_constant (x, y)
3157 enum machine_mode dstmode = GET_MODE (x);
3158 enum machine_mode orig_srcmode = GET_MODE (y);
3159 enum machine_mode srcmode;
3162 REAL_VALUE_FROM_CONST_DOUBLE (r, y);
3164 for (srcmode = GET_CLASS_NARROWEST_MODE (GET_MODE_CLASS (orig_srcmode));
3165 srcmode != orig_srcmode;
3166 srcmode = GET_MODE_WIDER_MODE (srcmode))
3169 rtx trunc_y, last_insn;
3171 /* Skip if the target can't extend this way. */
3172 ic = can_extend_p (dstmode, srcmode, 0);
3173 if (ic == CODE_FOR_nothing)
3176 /* Skip if the narrowed value isn't exact. */
3177 if (! exact_real_truncate (srcmode, &r))
3180 trunc_y = CONST_DOUBLE_FROM_REAL_VALUE (r, srcmode);
3182 if (LEGITIMATE_CONSTANT_P (trunc_y))
3184 /* Skip if the target needs extra instructions to perform
3186 if (! (*insn_data[ic].operand[1].predicate) (trunc_y, srcmode))
3189 else if (float_extend_from_mem[dstmode][srcmode])
3190 trunc_y = validize_mem (force_const_mem (srcmode, trunc_y));
3194 emit_unop_insn (ic, x, trunc_y, UNKNOWN);
3195 last_insn = get_last_insn ();
3197 if (GET_CODE (x) == REG)
3198 REG_NOTES (last_insn)
3199 = gen_rtx_EXPR_LIST (REG_EQUAL, y, REG_NOTES (last_insn));
3207 /* Pushing data onto the stack. */
3209 /* Push a block of length SIZE (perhaps variable)
3210 and return an rtx to address the beginning of the block.
3211 Note that it is not possible for the value returned to be a QUEUED.
3212 The value may be virtual_outgoing_args_rtx.
3214 EXTRA is the number of bytes of padding to push in addition to SIZE.
3215 BELOW nonzero means this padding comes at low addresses;
3216 otherwise, the padding comes at high addresses. */
3219 push_block (size, extra, below)
3225 size = convert_modes (Pmode, ptr_mode, size, 1);
3226 if (CONSTANT_P (size))
3227 anti_adjust_stack (plus_constant (size, extra));
3228 else if (GET_CODE (size) == REG && extra == 0)
3229 anti_adjust_stack (size);
3232 temp = copy_to_mode_reg (Pmode, size);
3234 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
3235 temp, 0, OPTAB_LIB_WIDEN);
3236 anti_adjust_stack (temp);
3239 #ifndef STACK_GROWS_DOWNWARD
3245 temp = virtual_outgoing_args_rtx;
3246 if (extra != 0 && below)
3247 temp = plus_constant (temp, extra);
3251 if (GET_CODE (size) == CONST_INT)
3252 temp = plus_constant (virtual_outgoing_args_rtx,
3253 -INTVAL (size) - (below ? 0 : extra));
3254 else if (extra != 0 && !below)
3255 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3256 negate_rtx (Pmode, plus_constant (size, extra)));
3258 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3259 negate_rtx (Pmode, size));
3262 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
3265 #ifdef PUSH_ROUNDING
3267 /* Emit single push insn. */
3270 emit_single_push_insn (mode, x, type)
3272 enum machine_mode mode;
3276 unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
3278 enum insn_code icode;
3279 insn_operand_predicate_fn pred;
3281 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3282 /* If there is push pattern, use it. Otherwise try old way of throwing
3283 MEM representing push operation to move expander. */
3284 icode = push_optab->handlers[(int) mode].insn_code;
3285 if (icode != CODE_FOR_nothing)
3287 if (((pred = insn_data[(int) icode].operand[0].predicate)
3288 && !((*pred) (x, mode))))
3289 x = force_reg (mode, x);
3290 emit_insn (GEN_FCN (icode) (x));
3293 if (GET_MODE_SIZE (mode) == rounded_size)
3294 dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
3297 #ifdef STACK_GROWS_DOWNWARD
3298 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3299 GEN_INT (-(HOST_WIDE_INT) rounded_size));
3301 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3302 GEN_INT (rounded_size));
3304 dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
3307 dest = gen_rtx_MEM (mode, dest_addr);
3311 set_mem_attributes (dest, type, 1);
3313 if (flag_optimize_sibling_calls)
3314 /* Function incoming arguments may overlap with sibling call
3315 outgoing arguments and we cannot allow reordering of reads
3316 from function arguments with stores to outgoing arguments
3317 of sibling calls. */
3318 set_mem_alias_set (dest, 0);
3320 emit_move_insn (dest, x);
3324 /* Generate code to push X onto the stack, assuming it has mode MODE and
3326 MODE is redundant except when X is a CONST_INT (since they don't
3328 SIZE is an rtx for the size of data to be copied (in bytes),
3329 needed only if X is BLKmode.
3331 ALIGN (in bits) is maximum alignment we can assume.
3333 If PARTIAL and REG are both nonzero, then copy that many of the first
3334 words of X into registers starting with REG, and push the rest of X.
3335 The amount of space pushed is decreased by PARTIAL words,
3336 rounded *down* to a multiple of PARM_BOUNDARY.
3337 REG must be a hard register in this case.
3338 If REG is zero but PARTIAL is not, take any all others actions for an
3339 argument partially in registers, but do not actually load any
3342 EXTRA is the amount in bytes of extra space to leave next to this arg.
3343 This is ignored if an argument block has already been allocated.
3345 On a machine that lacks real push insns, ARGS_ADDR is the address of
3346 the bottom of the argument block for this call. We use indexing off there
3347 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
3348 argument block has not been preallocated.
3350 ARGS_SO_FAR is the size of args previously pushed for this call.
3352 REG_PARM_STACK_SPACE is nonzero if functions require stack space
3353 for arguments passed in registers. If nonzero, it will be the number
3354 of bytes required. */
3357 emit_push_insn (x, mode, type, size, align, partial, reg, extra,
3358 args_addr, args_so_far, reg_parm_stack_space,
3361 enum machine_mode mode;
3370 int reg_parm_stack_space;
3374 enum direction stack_direction
3375 #ifdef STACK_GROWS_DOWNWARD
3381 /* Decide where to pad the argument: `downward' for below,
3382 `upward' for above, or `none' for don't pad it.
3383 Default is below for small data on big-endian machines; else above. */
3384 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
3386 /* Invert direction if stack is post-decrement.
3388 if (STACK_PUSH_CODE == POST_DEC)
3389 if (where_pad != none)
3390 where_pad = (where_pad == downward ? upward : downward);
3392 xinner = x = protect_from_queue (x, 0);
3394 if (mode == BLKmode)
3396 /* Copy a block into the stack, entirely or partially. */
3399 int used = partial * UNITS_PER_WORD;
3400 int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
3408 /* USED is now the # of bytes we need not copy to the stack
3409 because registers will take care of them. */
3412 xinner = adjust_address (xinner, BLKmode, used);
3414 /* If the partial register-part of the arg counts in its stack size,
3415 skip the part of stack space corresponding to the registers.
3416 Otherwise, start copying to the beginning of the stack space,
3417 by setting SKIP to 0. */
3418 skip = (reg_parm_stack_space == 0) ? 0 : used;
3420 #ifdef PUSH_ROUNDING
3421 /* Do it with several push insns if that doesn't take lots of insns
3422 and if there is no difficulty with push insns that skip bytes
3423 on the stack for alignment purposes. */
3426 && GET_CODE (size) == CONST_INT
3428 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
3429 /* Here we avoid the case of a structure whose weak alignment
3430 forces many pushes of a small amount of data,
3431 and such small pushes do rounding that causes trouble. */
3432 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
3433 || align >= BIGGEST_ALIGNMENT
3434 || (PUSH_ROUNDING (align / BITS_PER_UNIT)
3435 == (align / BITS_PER_UNIT)))
3436 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
3438 /* Push padding now if padding above and stack grows down,
3439 or if padding below and stack grows up.
3440 But if space already allocated, this has already been done. */
3441 if (extra && args_addr == 0
3442 && where_pad != none && where_pad != stack_direction)
3443 anti_adjust_stack (GEN_INT (extra));
3445 move_by_pieces (NULL, xinner, INTVAL (size) - used, align);
3448 #endif /* PUSH_ROUNDING */
3452 /* Otherwise make space on the stack and copy the data
3453 to the address of that space. */
3455 /* Deduct words put into registers from the size we must copy. */
3458 if (GET_CODE (size) == CONST_INT)
3459 size = GEN_INT (INTVAL (size) - used);
3461 size = expand_binop (GET_MODE (size), sub_optab, size,
3462 GEN_INT (used), NULL_RTX, 0,
3466 /* Get the address of the stack space.
3467 In this case, we do not deal with EXTRA separately.
3468 A single stack adjust will do. */
3471 temp = push_block (size, extra, where_pad == downward);
3474 else if (GET_CODE (args_so_far) == CONST_INT)
3475 temp = memory_address (BLKmode,
3476 plus_constant (args_addr,
3477 skip + INTVAL (args_so_far)));
3479 temp = memory_address (BLKmode,
3480 plus_constant (gen_rtx_PLUS (Pmode,
3484 target = gen_rtx_MEM (BLKmode, temp);
3488 set_mem_attributes (target, type, 1);
3489 /* Function incoming arguments may overlap with sibling call
3490 outgoing arguments and we cannot allow reordering of reads
3491 from function arguments with stores to outgoing arguments
3492 of sibling calls. */
3493 set_mem_alias_set (target, 0);
3496 set_mem_align (target, align);
3498 /* TEMP is the address of the block. Copy the data there. */
3499 if (GET_CODE (size) == CONST_INT
3500 && MOVE_BY_PIECES_P ((unsigned) INTVAL (size), align))
3502 move_by_pieces (target, xinner, INTVAL (size), align);
3507 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
3508 enum machine_mode mode;
3510 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
3512 mode = GET_MODE_WIDER_MODE (mode))
3514 enum insn_code code = movstr_optab[(int) mode];
3515 insn_operand_predicate_fn pred;
3517 if (code != CODE_FOR_nothing
3518 && ((GET_CODE (size) == CONST_INT
3519 && ((unsigned HOST_WIDE_INT) INTVAL (size)
3520 <= (GET_MODE_MASK (mode) >> 1)))
3521 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
3522 && (!(pred = insn_data[(int) code].operand[0].predicate)
3523 || ((*pred) (target, BLKmode)))
3524 && (!(pred = insn_data[(int) code].operand[1].predicate)
3525 || ((*pred) (xinner, BLKmode)))
3526 && (!(pred = insn_data[(int) code].operand[3].predicate)
3527 || ((*pred) (opalign, VOIDmode))))
3529 rtx op2 = convert_to_mode (mode, size, 1);
3530 rtx last = get_last_insn ();
3533 pred = insn_data[(int) code].operand[2].predicate;
3534 if (pred != 0 && ! (*pred) (op2, mode))
3535 op2 = copy_to_mode_reg (mode, op2);
3537 pat = GEN_FCN ((int) code) (target, xinner,
3545 delete_insns_since (last);
3550 if (!ACCUMULATE_OUTGOING_ARGS)
3552 /* If the source is referenced relative to the stack pointer,
3553 copy it to another register to stabilize it. We do not need
3554 to do this if we know that we won't be changing sp. */
3556 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3557 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3558 temp = copy_to_reg (temp);
3561 /* Make inhibit_defer_pop nonzero around the library call
3562 to force it to pop the bcopy-arguments right away. */
3564 #ifdef TARGET_MEM_FUNCTIONS
3565 emit_library_call (memcpy_libfunc, LCT_NORMAL,
3566 VOIDmode, 3, temp, Pmode, XEXP (xinner, 0), Pmode,
3567 convert_to_mode (TYPE_MODE (sizetype),
3568 size, TREE_UNSIGNED (sizetype)),
3569 TYPE_MODE (sizetype));
3571 emit_library_call (bcopy_libfunc, LCT_NORMAL,
3572 VOIDmode, 3, XEXP (xinner, 0), Pmode, temp, Pmode,
3573 convert_to_mode (TYPE_MODE (integer_type_node),
3575 TREE_UNSIGNED (integer_type_node)),
3576 TYPE_MODE (integer_type_node));
3581 else if (partial > 0)
3583 /* Scalar partly in registers. */
3585 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3588 /* # words of start of argument
3589 that we must make space for but need not store. */
3590 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
3591 int args_offset = INTVAL (args_so_far);
3594 /* Push padding now if padding above and stack grows down,
3595 or if padding below and stack grows up.
3596 But if space already allocated, this has already been done. */
3597 if (extra && args_addr == 0
3598 && where_pad != none && where_pad != stack_direction)
3599 anti_adjust_stack (GEN_INT (extra));
3601 /* If we make space by pushing it, we might as well push
3602 the real data. Otherwise, we can leave OFFSET nonzero
3603 and leave the space uninitialized. */
3607 /* Now NOT_STACK gets the number of words that we don't need to
3608 allocate on the stack. */
3609 not_stack = partial - offset;
3611 /* If the partial register-part of the arg counts in its stack size,
3612 skip the part of stack space corresponding to the registers.
3613 Otherwise, start copying to the beginning of the stack space,
3614 by setting SKIP to 0. */
3615 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
3617 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
3618 x = validize_mem (force_const_mem (mode, x));
3620 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3621 SUBREGs of such registers are not allowed. */
3622 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
3623 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
3624 x = copy_to_reg (x);
3626 /* Loop over all the words allocated on the stack for this arg. */
3627 /* We can do it by words, because any scalar bigger than a word
3628 has a size a multiple of a word. */
3629 #ifndef PUSH_ARGS_REVERSED
3630 for (i = not_stack; i < size; i++)
3632 for (i = size - 1; i >= not_stack; i--)
3634 if (i >= not_stack + offset)
3635 emit_push_insn (operand_subword_force (x, i, mode),
3636 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
3638 GEN_INT (args_offset + ((i - not_stack + skip)
3640 reg_parm_stack_space, alignment_pad);
3645 rtx target = NULL_RTX;
3648 /* Push padding now if padding above and stack grows down,
3649 or if padding below and stack grows up.
3650 But if space already allocated, this has already been done. */
3651 if (extra && args_addr == 0
3652 && where_pad != none && where_pad != stack_direction)
3653 anti_adjust_stack (GEN_INT (extra));
3655 #ifdef PUSH_ROUNDING
3656 if (args_addr == 0 && PUSH_ARGS)
3657 emit_single_push_insn (mode, x, type);
3661 if (GET_CODE (args_so_far) == CONST_INT)
3663 = memory_address (mode,
3664 plus_constant (args_addr,
3665 INTVAL (args_so_far)));
3667 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
3670 dest = gen_rtx_MEM (mode, addr);
3673 set_mem_attributes (dest, type, 1);
3674 /* Function incoming arguments may overlap with sibling call
3675 outgoing arguments and we cannot allow reordering of reads
3676 from function arguments with stores to outgoing arguments
3677 of sibling calls. */
3678 set_mem_alias_set (dest, 0);
3681 emit_move_insn (dest, x);
3687 /* If part should go in registers, copy that part
3688 into the appropriate registers. Do this now, at the end,
3689 since mem-to-mem copies above may do function calls. */
3690 if (partial > 0 && reg != 0)
3692 /* Handle calls that pass values in multiple non-contiguous locations.
3693 The Irix 6 ABI has examples of this. */
3694 if (GET_CODE (reg) == PARALLEL)
3695 emit_group_load (reg, x, -1); /* ??? size? */
3697 move_block_to_reg (REGNO (reg), x, partial, mode);
3700 if (extra && args_addr == 0 && where_pad == stack_direction)
3701 anti_adjust_stack (GEN_INT (extra));
3703 if (alignment_pad && args_addr == 0)
3704 anti_adjust_stack (alignment_pad);
3707 /* Return X if X can be used as a subtarget in a sequence of arithmetic
3715 /* Only registers can be subtargets. */
3716 || GET_CODE (x) != REG
3717 /* If the register is readonly, it can't be set more than once. */
3718 || RTX_UNCHANGING_P (x)
3719 /* Don't use hard regs to avoid extending their life. */
3720 || REGNO (x) < FIRST_PSEUDO_REGISTER
3721 /* Avoid subtargets inside loops,
3722 since they hide some invariant expressions. */
3723 || preserve_subexpressions_p ())
3727 /* Expand an assignment that stores the value of FROM into TO.
3728 If WANT_VALUE is nonzero, return an rtx for the value of TO.
3729 (This may contain a QUEUED rtx;
3730 if the value is constant, this rtx is a constant.)
3731 Otherwise, the returned value is NULL_RTX.
3733 SUGGEST_REG is no longer actually used.
3734 It used to mean, copy the value through a register
3735 and return that register, if that is possible.
3736 We now use WANT_VALUE to decide whether to do this. */
3739 expand_assignment (to, from, want_value, suggest_reg)
3742 int suggest_reg ATTRIBUTE_UNUSED;
3747 /* Don't crash if the lhs of the assignment was erroneous. */
3749 if (TREE_CODE (to) == ERROR_MARK)
3751 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
3752 return want_value ? result : NULL_RTX;
3755 /* Assignment of a structure component needs special treatment
3756 if the structure component's rtx is not simply a MEM.
3757 Assignment of an array element at a constant index, and assignment of
3758 an array element in an unaligned packed structure field, has the same
3761 if (TREE_CODE (to) == COMPONENT_REF || TREE_CODE (to) == BIT_FIELD_REF
3762 || TREE_CODE (to) == ARRAY_REF || TREE_CODE (to) == ARRAY_RANGE_REF)
3764 enum machine_mode mode1;
3765 HOST_WIDE_INT bitsize, bitpos;
3773 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
3774 &unsignedp, &volatilep);
3776 /* If we are going to use store_bit_field and extract_bit_field,
3777 make sure to_rtx will be safe for multiple use. */
3779 if (mode1 == VOIDmode && want_value)
3780 tem = stabilize_reference (tem);
3782 orig_to_rtx = to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, 0);
3786 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM);
3788 if (GET_CODE (to_rtx) != MEM)
3791 #ifdef POINTERS_EXTEND_UNSIGNED
3792 if (GET_MODE (offset_rtx) != Pmode)
3793 offset_rtx = convert_memory_address (Pmode, offset_rtx);
3795 if (GET_MODE (offset_rtx) != ptr_mode)
3796 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
3799 /* A constant address in TO_RTX can have VOIDmode, we must not try
3800 to call force_reg for that case. Avoid that case. */
3801 if (GET_CODE (to_rtx) == MEM
3802 && GET_MODE (to_rtx) == BLKmode
3803 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
3805 && (bitpos % bitsize) == 0
3806 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
3807 && MEM_ALIGN (to_rtx) == GET_MODE_ALIGNMENT (mode1))
3809 to_rtx = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT);
3813 to_rtx = offset_address (to_rtx, offset_rtx,
3814 highest_pow2_factor_for_type (TREE_TYPE (to),
3818 if (GET_CODE (to_rtx) == MEM)
3820 tree old_expr = MEM_EXPR (to_rtx);
3822 /* If the field is at offset zero, we could have been given the
3823 DECL_RTX of the parent struct. Don't munge it. */
3824 to_rtx = shallow_copy_rtx (to_rtx);
3826 set_mem_attributes (to_rtx, to, 0);
3828 /* If we changed MEM_EXPR, that means we're now referencing
3829 the COMPONENT_REF, which means that MEM_OFFSET must be
3830 relative to that field. But we've not yet reflected BITPOS
3831 in TO_RTX. This will be done in store_field. Adjust for
3832 that by biasing MEM_OFFSET by -bitpos. */
3833 if (MEM_EXPR (to_rtx) != old_expr && MEM_OFFSET (to_rtx)
3834 && (bitpos / BITS_PER_UNIT) != 0)
3835 set_mem_offset (to_rtx, GEN_INT (INTVAL (MEM_OFFSET (to_rtx))
3836 - (bitpos / BITS_PER_UNIT)));
3839 /* Deal with volatile and readonly fields. The former is only done
3840 for MEM. Also set MEM_KEEP_ALIAS_SET_P if needed. */
3841 if (volatilep && GET_CODE (to_rtx) == MEM)
3843 if (to_rtx == orig_to_rtx)
3844 to_rtx = copy_rtx (to_rtx);
3845 MEM_VOLATILE_P (to_rtx) = 1;
3848 if (TREE_CODE (to) == COMPONENT_REF
3849 && TREE_READONLY (TREE_OPERAND (to, 1)))
3851 if (to_rtx == orig_to_rtx)
3852 to_rtx = copy_rtx (to_rtx);
3853 RTX_UNCHANGING_P (to_rtx) = 1;
3856 if (GET_CODE (to_rtx) == MEM && ! can_address_p (to))
3858 if (to_rtx == orig_to_rtx)
3859 to_rtx = copy_rtx (to_rtx);
3860 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
3863 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
3865 /* Spurious cast for HPUX compiler. */
3866 ? ((enum machine_mode)
3867 TYPE_MODE (TREE_TYPE (to)))
3869 unsignedp, TREE_TYPE (tem), get_alias_set (to));
3871 preserve_temp_slots (result);
3875 /* If the value is meaningful, convert RESULT to the proper mode.
3876 Otherwise, return nothing. */
3877 return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
3878 TYPE_MODE (TREE_TYPE (from)),
3880 TREE_UNSIGNED (TREE_TYPE (to)))
3884 /* If the rhs is a function call and its value is not an aggregate,
3885 call the function before we start to compute the lhs.
3886 This is needed for correct code for cases such as
3887 val = setjmp (buf) on machines where reference to val
3888 requires loading up part of an address in a separate insn.
3890 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
3891 since it might be a promoted variable where the zero- or sign- extension
3892 needs to be done. Handling this in the normal way is safe because no
3893 computation is done before the call. */
3894 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from)
3895 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
3896 && ! ((TREE_CODE (to) == VAR_DECL || TREE_CODE (to) == PARM_DECL)
3897 && GET_CODE (DECL_RTL (to)) == REG))
3902 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
3904 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
3906 /* Handle calls that return values in multiple non-contiguous locations.
3907 The Irix 6 ABI has examples of this. */
3908 if (GET_CODE (to_rtx) == PARALLEL)
3909 emit_group_load (to_rtx, value, int_size_in_bytes (TREE_TYPE (from)));
3910 else if (GET_MODE (to_rtx) == BLKmode)
3911 emit_block_move (to_rtx, value, expr_size (from));
3914 #ifdef POINTERS_EXTEND_UNSIGNED
3915 if (POINTER_TYPE_P (TREE_TYPE (to))
3916 && GET_MODE (to_rtx) != GET_MODE (value))
3917 value = convert_memory_address (GET_MODE (to_rtx), value);
3919 emit_move_insn (to_rtx, value);
3921 preserve_temp_slots (to_rtx);
3924 return want_value ? to_rtx : NULL_RTX;
3927 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
3928 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
3931 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
3933 /* Don't move directly into a return register. */
3934 if (TREE_CODE (to) == RESULT_DECL
3935 && (GET_CODE (to_rtx) == REG || GET_CODE (to_rtx) == PARALLEL))
3940 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
3942 if (GET_CODE (to_rtx) == PARALLEL)
3943 emit_group_load (to_rtx, temp, int_size_in_bytes (TREE_TYPE (from)));
3945 emit_move_insn (to_rtx, temp);
3947 preserve_temp_slots (to_rtx);
3950 return want_value ? to_rtx : NULL_RTX;
3953 /* In case we are returning the contents of an object which overlaps
3954 the place the value is being stored, use a safe function when copying
3955 a value through a pointer into a structure value return block. */
3956 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
3957 && current_function_returns_struct
3958 && !current_function_returns_pcc_struct)
3963 size = expr_size (from);
3964 from_rtx = expand_expr (from, NULL_RTX, VOIDmode, 0);
3966 #ifdef TARGET_MEM_FUNCTIONS
3967 emit_library_call (memmove_libfunc, LCT_NORMAL,
3968 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
3969 XEXP (from_rtx, 0), Pmode,
3970 convert_to_mode (TYPE_MODE (sizetype),
3971 size, TREE_UNSIGNED (sizetype)),
3972 TYPE_MODE (sizetype));
3974 emit_library_call (bcopy_libfunc, LCT_NORMAL,
3975 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
3976 XEXP (to_rtx, 0), Pmode,
3977 convert_to_mode (TYPE_MODE (integer_type_node),
3978 size, TREE_UNSIGNED (integer_type_node)),
3979 TYPE_MODE (integer_type_node));
3982 preserve_temp_slots (to_rtx);
3985 return want_value ? to_rtx : NULL_RTX;
3988 /* Compute FROM and store the value in the rtx we got. */
3991 result = store_expr (from, to_rtx, want_value);
3992 preserve_temp_slots (result);
3995 return want_value ? result : NULL_RTX;
3998 /* Generate code for computing expression EXP,
3999 and storing the value into TARGET.
4000 TARGET may contain a QUEUED rtx.
4002 If WANT_VALUE is nonzero, return a copy of the value
4003 not in TARGET, so that we can be sure to use the proper
4004 value in a containing expression even if TARGET has something
4005 else stored in it. If possible, we copy the value through a pseudo
4006 and return that pseudo. Or, if the value is constant, we try to
4007 return the constant. In some cases, we return a pseudo
4008 copied *from* TARGET.
4010 If the mode is BLKmode then we may return TARGET itself.
4011 It turns out that in BLKmode it doesn't cause a problem.
4012 because C has no operators that could combine two different
4013 assignments into the same BLKmode object with different values
4014 with no sequence point. Will other languages need this to
4017 If WANT_VALUE is 0, we return NULL, to make sure
4018 to catch quickly any cases where the caller uses the value
4019 and fails to set WANT_VALUE. */
4022 store_expr (exp, target, want_value)
4028 int dont_return_target = 0;
4029 int dont_store_target = 0;
4031 if (TREE_CODE (exp) == COMPOUND_EXPR)
4033 /* Perform first part of compound expression, then assign from second
4035 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
4037 return store_expr (TREE_OPERAND (exp, 1), target, want_value);
4039 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
4041 /* For conditional expression, get safe form of the target. Then
4042 test the condition, doing the appropriate assignment on either
4043 side. This avoids the creation of unnecessary temporaries.
4044 For non-BLKmode, it is more efficient not to do this. */
4046 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
4049 target = protect_from_queue (target, 1);
4051 do_pending_stack_adjust ();
4053 jumpifnot (TREE_OPERAND (exp, 0), lab1);
4054 start_cleanup_deferral ();
4055 store_expr (TREE_OPERAND (exp, 1), target, 0);
4056 end_cleanup_deferral ();
4058 emit_jump_insn (gen_jump (lab2));
4061 start_cleanup_deferral ();
4062 store_expr (TREE_OPERAND (exp, 2), target, 0);
4063 end_cleanup_deferral ();
4068 return want_value ? target : NULL_RTX;
4070 else if (queued_subexp_p (target))
4071 /* If target contains a postincrement, let's not risk
4072 using it as the place to generate the rhs. */
4074 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
4076 /* Expand EXP into a new pseudo. */
4077 temp = gen_reg_rtx (GET_MODE (target));
4078 temp = expand_expr (exp, temp, GET_MODE (target), 0);
4081 temp = expand_expr (exp, NULL_RTX, GET_MODE (target), 0);
4083 /* If target is volatile, ANSI requires accessing the value
4084 *from* the target, if it is accessed. So make that happen.
4085 In no case return the target itself. */
4086 if (! MEM_VOLATILE_P (target) && want_value)
4087 dont_return_target = 1;
4089 else if (want_value && GET_CODE (target) == MEM && ! MEM_VOLATILE_P (target)
4090 && GET_MODE (target) != BLKmode)
4091 /* If target is in memory and caller wants value in a register instead,
4092 arrange that. Pass TARGET as target for expand_expr so that,
4093 if EXP is another assignment, WANT_VALUE will be nonzero for it.
4094 We know expand_expr will not use the target in that case.
4095 Don't do this if TARGET is volatile because we are supposed
4096 to write it and then read it. */
4098 temp = expand_expr (exp, target, GET_MODE (target), 0);
4099 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
4101 /* If TEMP is already in the desired TARGET, only copy it from
4102 memory and don't store it there again. */
4104 || (rtx_equal_p (temp, target)
4105 && ! side_effects_p (temp) && ! side_effects_p (target)))
4106 dont_store_target = 1;
4107 temp = copy_to_reg (temp);
4109 dont_return_target = 1;
4111 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
4112 /* If this is an scalar in a register that is stored in a wider mode
4113 than the declared mode, compute the result into its declared mode
4114 and then convert to the wider mode. Our value is the computed
4117 rtx inner_target = 0;
4119 /* If we don't want a value, we can do the conversion inside EXP,
4120 which will often result in some optimizations. Do the conversion
4121 in two steps: first change the signedness, if needed, then
4122 the extend. But don't do this if the type of EXP is a subtype
4123 of something else since then the conversion might involve
4124 more than just converting modes. */
4125 if (! want_value && INTEGRAL_TYPE_P (TREE_TYPE (exp))
4126 && TREE_TYPE (TREE_TYPE (exp)) == 0)
4128 if (TREE_UNSIGNED (TREE_TYPE (exp))
4129 != SUBREG_PROMOTED_UNSIGNED_P (target))
4131 ((*lang_hooks.types.signed_or_unsigned_type)
4132 (SUBREG_PROMOTED_UNSIGNED_P (target), TREE_TYPE (exp)), exp);
4134 exp = convert ((*lang_hooks.types.type_for_mode)
4135 (GET_MODE (SUBREG_REG (target)),
4136 SUBREG_PROMOTED_UNSIGNED_P (target)),
4139 inner_target = SUBREG_REG (target);
4142 temp = expand_expr (exp, inner_target, VOIDmode, 0);
4144 /* If TEMP is a volatile MEM and we want a result value, make
4145 the access now so it gets done only once. Likewise if
4146 it contains TARGET. */
4147 if (GET_CODE (temp) == MEM && want_value
4148 && (MEM_VOLATILE_P (temp)
4149 || reg_mentioned_p (SUBREG_REG (target), XEXP (temp, 0))))
4150 temp = copy_to_reg (temp);
4152 /* If TEMP is a VOIDmode constant, use convert_modes to make
4153 sure that we properly convert it. */
4154 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
4156 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4157 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4158 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
4159 GET_MODE (target), temp,
4160 SUBREG_PROMOTED_UNSIGNED_P (target));
4163 convert_move (SUBREG_REG (target), temp,
4164 SUBREG_PROMOTED_UNSIGNED_P (target));
4166 /* If we promoted a constant, change the mode back down to match
4167 target. Otherwise, the caller might get confused by a result whose
4168 mode is larger than expected. */
4170 if (want_value && GET_MODE (temp) != GET_MODE (target))
4172 if (GET_MODE (temp) != VOIDmode)
4174 temp = gen_lowpart_SUBREG (GET_MODE (target), temp);
4175 SUBREG_PROMOTED_VAR_P (temp) = 1;
4176 SUBREG_PROMOTED_UNSIGNED_SET (temp,
4177 SUBREG_PROMOTED_UNSIGNED_P (target));
4180 temp = convert_modes (GET_MODE (target),
4181 GET_MODE (SUBREG_REG (target)),
4182 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4185 return want_value ? temp : NULL_RTX;
4189 temp = expand_expr (exp, target, GET_MODE (target), 0);
4190 /* Return TARGET if it's a specified hardware register.
4191 If TARGET is a volatile mem ref, either return TARGET
4192 or return a reg copied *from* TARGET; ANSI requires this.
4194 Otherwise, if TEMP is not TARGET, return TEMP
4195 if it is constant (for efficiency),
4196 or if we really want the correct value. */
4197 if (!(target && GET_CODE (target) == REG
4198 && REGNO (target) < FIRST_PSEUDO_REGISTER)
4199 && !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
4200 && ! rtx_equal_p (temp, target)
4201 && (CONSTANT_P (temp) || want_value))
4202 dont_return_target = 1;
4205 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
4206 the same as that of TARGET, adjust the constant. This is needed, for
4207 example, in case it is a CONST_DOUBLE and we want only a word-sized
4209 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
4210 && TREE_CODE (exp) != ERROR_MARK
4211 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
4212 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4213 temp, TREE_UNSIGNED (TREE_TYPE (exp)));
4215 /* If value was not generated in the target, store it there.
4216 Convert the value to TARGET's type first if necessary.
4217 If TEMP and TARGET compare equal according to rtx_equal_p, but
4218 one or both of them are volatile memory refs, we have to distinguish
4220 - expand_expr has used TARGET. In this case, we must not generate
4221 another copy. This can be detected by TARGET being equal according
4223 - expand_expr has not used TARGET - that means that the source just
4224 happens to have the same RTX form. Since temp will have been created
4225 by expand_expr, it will compare unequal according to == .
4226 We must generate a copy in this case, to reach the correct number
4227 of volatile memory references. */
4229 if ((! rtx_equal_p (temp, target)
4230 || (temp != target && (side_effects_p (temp)
4231 || side_effects_p (target))))
4232 && TREE_CODE (exp) != ERROR_MARK
4233 && ! dont_store_target
4234 /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
4235 but TARGET is not valid memory reference, TEMP will differ
4236 from TARGET although it is really the same location. */
4237 && (TREE_CODE_CLASS (TREE_CODE (exp)) != 'd'
4238 || target != DECL_RTL_IF_SET (exp)))
4240 target = protect_from_queue (target, 1);
4241 if (GET_MODE (temp) != GET_MODE (target)
4242 && GET_MODE (temp) != VOIDmode)
4244 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
4245 if (dont_return_target)
4247 /* In this case, we will return TEMP,
4248 so make sure it has the proper mode.
4249 But don't forget to store the value into TARGET. */
4250 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
4251 emit_move_insn (target, temp);
4254 convert_move (target, temp, unsignedp);
4257 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
4259 /* Handle copying a string constant into an array. The string
4260 constant may be shorter than the array. So copy just the string's
4261 actual length, and clear the rest. First get the size of the data
4262 type of the string, which is actually the size of the target. */
4263 rtx size = expr_size (exp);
4265 if (GET_CODE (size) == CONST_INT
4266 && INTVAL (size) < TREE_STRING_LENGTH (exp))
4267 emit_block_move (target, temp, size);
4270 /* Compute the size of the data to copy from the string. */
4272 = size_binop (MIN_EXPR,
4273 make_tree (sizetype, size),
4274 size_int (TREE_STRING_LENGTH (exp)));
4275 rtx copy_size_rtx = expand_expr (copy_size, NULL_RTX,
4279 /* Copy that much. */
4280 copy_size_rtx = convert_to_mode (ptr_mode, copy_size_rtx, 0);
4281 emit_block_move (target, temp, copy_size_rtx);
4283 /* Figure out how much is left in TARGET that we have to clear.
4284 Do all calculations in ptr_mode. */
4285 if (GET_CODE (copy_size_rtx) == CONST_INT)
4287 size = plus_constant (size, -INTVAL (copy_size_rtx));
4288 target = adjust_address (target, BLKmode,
4289 INTVAL (copy_size_rtx));
4293 size = expand_binop (TYPE_MODE (sizetype), sub_optab, size,
4294 copy_size_rtx, NULL_RTX, 0,
4297 #ifdef POINTERS_EXTEND_UNSIGNED
4298 if (GET_MODE (copy_size_rtx) != Pmode)
4299 copy_size_rtx = convert_memory_address (Pmode,
4303 target = offset_address (target, copy_size_rtx,
4304 highest_pow2_factor (copy_size));
4305 label = gen_label_rtx ();
4306 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
4307 GET_MODE (size), 0, label);
4310 if (size != const0_rtx)
4311 clear_storage (target, size);
4317 /* Handle calls that return values in multiple non-contiguous locations.
4318 The Irix 6 ABI has examples of this. */
4319 else if (GET_CODE (target) == PARALLEL)
4320 emit_group_load (target, temp, int_size_in_bytes (TREE_TYPE (exp)));
4321 else if (GET_MODE (temp) == BLKmode)
4322 emit_block_move (target, temp, expr_size (exp));
4324 emit_move_insn (target, temp);
4327 /* If we don't want a value, return NULL_RTX. */
4331 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
4332 ??? The latter test doesn't seem to make sense. */
4333 else if (dont_return_target && GET_CODE (temp) != MEM)
4336 /* Return TARGET itself if it is a hard register. */
4337 else if (want_value && GET_MODE (target) != BLKmode
4338 && ! (GET_CODE (target) == REG
4339 && REGNO (target) < FIRST_PSEUDO_REGISTER))
4340 return copy_to_reg (target);
4346 /* Return 1 if EXP just contains zeros. */
4354 switch (TREE_CODE (exp))
4358 case NON_LVALUE_EXPR:
4359 case VIEW_CONVERT_EXPR:
4360 return is_zeros_p (TREE_OPERAND (exp, 0));
4363 return integer_zerop (exp);
4367 is_zeros_p (TREE_REALPART (exp)) && is_zeros_p (TREE_IMAGPART (exp));
4370 return REAL_VALUES_IDENTICAL (TREE_REAL_CST (exp), dconst0);
4373 for (elt = TREE_VECTOR_CST_ELTS (exp); elt;
4374 elt = TREE_CHAIN (elt))
4375 if (!is_zeros_p (TREE_VALUE (elt)))
4381 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4382 return CONSTRUCTOR_ELTS (exp) == NULL_TREE;
4383 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4384 if (! is_zeros_p (TREE_VALUE (elt)))
4394 /* Return 1 if EXP contains mostly (3/4) zeros. */
4397 mostly_zeros_p (exp)
4400 if (TREE_CODE (exp) == CONSTRUCTOR)
4402 int elts = 0, zeros = 0;
4403 tree elt = CONSTRUCTOR_ELTS (exp);
4404 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4406 /* If there are no ranges of true bits, it is all zero. */
4407 return elt == NULL_TREE;
4409 for (; elt; elt = TREE_CHAIN (elt))
4411 /* We do not handle the case where the index is a RANGE_EXPR,
4412 so the statistic will be somewhat inaccurate.
4413 We do make a more accurate count in store_constructor itself,
4414 so since this function is only used for nested array elements,
4415 this should be close enough. */
4416 if (mostly_zeros_p (TREE_VALUE (elt)))
4421 return 4 * zeros >= 3 * elts;
4424 return is_zeros_p (exp);
4427 /* Helper function for store_constructor.
4428 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4429 TYPE is the type of the CONSTRUCTOR, not the element type.
4430 CLEARED is as for store_constructor.
4431 ALIAS_SET is the alias set to use for any stores.
4433 This provides a recursive shortcut back to store_constructor when it isn't
4434 necessary to go through store_field. This is so that we can pass through
4435 the cleared field to let store_constructor know that we may not have to
4436 clear a substructure if the outer structure has already been cleared. */
4439 store_constructor_field (target, bitsize, bitpos, mode, exp, type, cleared,
4442 unsigned HOST_WIDE_INT bitsize;
4443 HOST_WIDE_INT bitpos;
4444 enum machine_mode mode;
4449 if (TREE_CODE (exp) == CONSTRUCTOR
4450 && bitpos % BITS_PER_UNIT == 0
4451 /* If we have a non-zero bitpos for a register target, then we just
4452 let store_field do the bitfield handling. This is unlikely to
4453 generate unnecessary clear instructions anyways. */
4454 && (bitpos == 0 || GET_CODE (target) == MEM))
4456 if (GET_CODE (target) == MEM)
4458 = adjust_address (target,
4459 GET_MODE (target) == BLKmode
4461 % GET_MODE_ALIGNMENT (GET_MODE (target)))
4462 ? BLKmode : VOIDmode, bitpos / BITS_PER_UNIT);
4465 /* Update the alias set, if required. */
4466 if (GET_CODE (target) == MEM && ! MEM_KEEP_ALIAS_SET_P (target)
4467 && MEM_ALIAS_SET (target) != 0)
4469 target = copy_rtx (target);
4470 set_mem_alias_set (target, alias_set);
4473 store_constructor (exp, target, cleared, bitsize / BITS_PER_UNIT);
4476 store_field (target, bitsize, bitpos, mode, exp, VOIDmode, 0, type,
4480 /* Store the value of constructor EXP into the rtx TARGET.
4481 TARGET is either a REG or a MEM; we know it cannot conflict, since
4482 safe_from_p has been called.
4483 CLEARED is true if TARGET is known to have been zero'd.
4484 SIZE is the number of bytes of TARGET we are allowed to modify: this
4485 may not be the same as the size of EXP if we are assigning to a field
4486 which has been packed to exclude padding bits. */
4489 store_constructor (exp, target, cleared, size)
4495 tree type = TREE_TYPE (exp);
4496 #ifdef WORD_REGISTER_OPERATIONS
4497 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
4500 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
4501 || TREE_CODE (type) == QUAL_UNION_TYPE)
4505 /* We either clear the aggregate or indicate the value is dead. */
4506 if ((TREE_CODE (type) == UNION_TYPE
4507 || TREE_CODE (type) == QUAL_UNION_TYPE)
4509 && ! CONSTRUCTOR_ELTS (exp))
4510 /* If the constructor is empty, clear the union. */
4512 clear_storage (target, expr_size (exp));
4516 /* If we are building a static constructor into a register,
4517 set the initial value as zero so we can fold the value into
4518 a constant. But if more than one register is involved,
4519 this probably loses. */
4520 else if (! cleared && GET_CODE (target) == REG && TREE_STATIC (exp)
4521 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
4523 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4527 /* If the constructor has fewer fields than the structure
4528 or if we are initializing the structure to mostly zeros,
4529 clear the whole structure first. Don't do this if TARGET is a
4530 register whose mode size isn't equal to SIZE since clear_storage
4531 can't handle this case. */
4532 else if (! cleared && size > 0
4533 && ((list_length (CONSTRUCTOR_ELTS (exp))
4534 != fields_length (type))
4535 || mostly_zeros_p (exp))
4536 && (GET_CODE (target) != REG
4537 || ((HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target))
4540 clear_storage (target, GEN_INT (size));
4545 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4547 /* Store each element of the constructor into
4548 the corresponding field of TARGET. */
4550 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4552 tree field = TREE_PURPOSE (elt);
4553 tree value = TREE_VALUE (elt);
4554 enum machine_mode mode;
4555 HOST_WIDE_INT bitsize;
4556 HOST_WIDE_INT bitpos = 0;
4559 rtx to_rtx = target;
4561 /* Just ignore missing fields.
4562 We cleared the whole structure, above,
4563 if any fields are missing. */
4567 if (cleared && is_zeros_p (value))
4570 if (host_integerp (DECL_SIZE (field), 1))
4571 bitsize = tree_low_cst (DECL_SIZE (field), 1);
4575 unsignedp = TREE_UNSIGNED (field);
4576 mode = DECL_MODE (field);
4577 if (DECL_BIT_FIELD (field))
4580 offset = DECL_FIELD_OFFSET (field);
4581 if (host_integerp (offset, 0)
4582 && host_integerp (bit_position (field), 0))
4584 bitpos = int_bit_position (field);
4588 bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
4594 if (contains_placeholder_p (offset))
4595 offset = build (WITH_RECORD_EXPR, sizetype,
4596 offset, make_tree (TREE_TYPE (exp), target));
4598 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
4599 if (GET_CODE (to_rtx) != MEM)
4602 #ifdef POINTERS_EXTEND_UNSIGNED
4603 if (GET_MODE (offset_rtx) != Pmode)
4604 offset_rtx = convert_memory_address (Pmode, offset_rtx);
4606 if (GET_MODE (offset_rtx) != ptr_mode)
4607 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4610 to_rtx = offset_address (to_rtx, offset_rtx,
4611 highest_pow2_factor (offset));
4614 if (TREE_READONLY (field))
4616 if (GET_CODE (to_rtx) == MEM)
4617 to_rtx = copy_rtx (to_rtx);
4619 RTX_UNCHANGING_P (to_rtx) = 1;
4622 #ifdef WORD_REGISTER_OPERATIONS
4623 /* If this initializes a field that is smaller than a word, at the
4624 start of a word, try to widen it to a full word.
4625 This special case allows us to output C++ member function
4626 initializations in a form that the optimizers can understand. */
4627 if (GET_CODE (target) == REG
4628 && bitsize < BITS_PER_WORD
4629 && bitpos % BITS_PER_WORD == 0
4630 && GET_MODE_CLASS (mode) == MODE_INT
4631 && TREE_CODE (value) == INTEGER_CST
4633 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
4635 tree type = TREE_TYPE (value);
4637 if (TYPE_PRECISION (type) < BITS_PER_WORD)
4639 type = (*lang_hooks.types.type_for_size)
4640 (BITS_PER_WORD, TREE_UNSIGNED (type));
4641 value = convert (type, value);
4644 if (BYTES_BIG_ENDIAN)
4646 = fold (build (LSHIFT_EXPR, type, value,
4647 build_int_2 (BITS_PER_WORD - bitsize, 0)));
4648 bitsize = BITS_PER_WORD;
4653 if (GET_CODE (to_rtx) == MEM && !MEM_KEEP_ALIAS_SET_P (to_rtx)
4654 && DECL_NONADDRESSABLE_P (field))
4656 to_rtx = copy_rtx (to_rtx);
4657 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
4660 store_constructor_field (to_rtx, bitsize, bitpos, mode,
4661 value, type, cleared,
4662 get_alias_set (TREE_TYPE (field)));
4665 else if (TREE_CODE (type) == ARRAY_TYPE
4666 || TREE_CODE (type) == VECTOR_TYPE)
4671 tree domain = TYPE_DOMAIN (type);
4672 tree elttype = TREE_TYPE (type);
4674 HOST_WIDE_INT minelt = 0;
4675 HOST_WIDE_INT maxelt = 0;
4677 /* Vectors are like arrays, but the domain is stored via an array
4679 if (TREE_CODE (type) == VECTOR_TYPE)
4681 /* Note that although TYPE_DEBUG_REPRESENTATION_TYPE uses
4682 the same field as TYPE_DOMAIN, we are not guaranteed that
4684 domain = TYPE_DEBUG_REPRESENTATION_TYPE (type);
4685 domain = TYPE_DOMAIN (TREE_TYPE (TYPE_FIELDS (domain)));
4688 const_bounds_p = (TYPE_MIN_VALUE (domain)
4689 && TYPE_MAX_VALUE (domain)
4690 && host_integerp (TYPE_MIN_VALUE (domain), 0)
4691 && host_integerp (TYPE_MAX_VALUE (domain), 0));
4693 /* If we have constant bounds for the range of the type, get them. */
4696 minelt = tree_low_cst (TYPE_MIN_VALUE (domain), 0);
4697 maxelt = tree_low_cst (TYPE_MAX_VALUE (domain), 0);
4700 /* If the constructor has fewer elements than the array,
4701 clear the whole array first. Similarly if this is
4702 static constructor of a non-BLKmode object. */
4703 if (cleared || (GET_CODE (target) == REG && TREE_STATIC (exp)))
4707 HOST_WIDE_INT count = 0, zero_count = 0;
4708 need_to_clear = ! const_bounds_p;
4710 /* This loop is a more accurate version of the loop in
4711 mostly_zeros_p (it handles RANGE_EXPR in an index).
4712 It is also needed to check for missing elements. */
4713 for (elt = CONSTRUCTOR_ELTS (exp);
4714 elt != NULL_TREE && ! need_to_clear;
4715 elt = TREE_CHAIN (elt))
4717 tree index = TREE_PURPOSE (elt);
4718 HOST_WIDE_INT this_node_count;
4720 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4722 tree lo_index = TREE_OPERAND (index, 0);
4723 tree hi_index = TREE_OPERAND (index, 1);
4725 if (! host_integerp (lo_index, 1)
4726 || ! host_integerp (hi_index, 1))
4732 this_node_count = (tree_low_cst (hi_index, 1)
4733 - tree_low_cst (lo_index, 1) + 1);
4736 this_node_count = 1;
4738 count += this_node_count;
4739 if (mostly_zeros_p (TREE_VALUE (elt)))
4740 zero_count += this_node_count;
4743 /* Clear the entire array first if there are any missing elements,
4744 or if the incidence of zero elements is >= 75%. */
4746 && (count < maxelt - minelt + 1 || 4 * zero_count >= 3 * count))
4750 if (need_to_clear && size > 0)
4755 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4757 clear_storage (target, GEN_INT (size));
4761 else if (REG_P (target))
4762 /* Inform later passes that the old value is dead. */
4763 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4765 /* Store each element of the constructor into
4766 the corresponding element of TARGET, determined
4767 by counting the elements. */
4768 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
4770 elt = TREE_CHAIN (elt), i++)
4772 enum machine_mode mode;
4773 HOST_WIDE_INT bitsize;
4774 HOST_WIDE_INT bitpos;
4776 tree value = TREE_VALUE (elt);
4777 tree index = TREE_PURPOSE (elt);
4778 rtx xtarget = target;
4780 if (cleared && is_zeros_p (value))
4783 unsignedp = TREE_UNSIGNED (elttype);
4784 mode = TYPE_MODE (elttype);
4785 if (mode == BLKmode)
4786 bitsize = (host_integerp (TYPE_SIZE (elttype), 1)
4787 ? tree_low_cst (TYPE_SIZE (elttype), 1)
4790 bitsize = GET_MODE_BITSIZE (mode);
4792 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4794 tree lo_index = TREE_OPERAND (index, 0);
4795 tree hi_index = TREE_OPERAND (index, 1);
4796 rtx index_r, pos_rtx, hi_r, loop_top, loop_end;
4797 struct nesting *loop;
4798 HOST_WIDE_INT lo, hi, count;
4801 /* If the range is constant and "small", unroll the loop. */
4803 && host_integerp (lo_index, 0)
4804 && host_integerp (hi_index, 0)
4805 && (lo = tree_low_cst (lo_index, 0),
4806 hi = tree_low_cst (hi_index, 0),
4807 count = hi - lo + 1,
4808 (GET_CODE (target) != MEM
4810 || (host_integerp (TYPE_SIZE (elttype), 1)
4811 && (tree_low_cst (TYPE_SIZE (elttype), 1) * count
4814 lo -= minelt; hi -= minelt;
4815 for (; lo <= hi; lo++)
4817 bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0);
4819 if (GET_CODE (target) == MEM
4820 && !MEM_KEEP_ALIAS_SET_P (target)
4821 && TREE_CODE (type) == ARRAY_TYPE
4822 && TYPE_NONALIASED_COMPONENT (type))
4824 target = copy_rtx (target);
4825 MEM_KEEP_ALIAS_SET_P (target) = 1;
4828 store_constructor_field
4829 (target, bitsize, bitpos, mode, value, type, cleared,
4830 get_alias_set (elttype));
4835 hi_r = expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
4836 loop_top = gen_label_rtx ();
4837 loop_end = gen_label_rtx ();
4839 unsignedp = TREE_UNSIGNED (domain);
4841 index = build_decl (VAR_DECL, NULL_TREE, domain);
4844 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
4846 SET_DECL_RTL (index, index_r);
4847 if (TREE_CODE (value) == SAVE_EXPR
4848 && SAVE_EXPR_RTL (value) == 0)
4850 /* Make sure value gets expanded once before the
4852 expand_expr (value, const0_rtx, VOIDmode, 0);
4855 store_expr (lo_index, index_r, 0);
4856 loop = expand_start_loop (0);
4858 /* Assign value to element index. */
4860 = convert (ssizetype,
4861 fold (build (MINUS_EXPR, TREE_TYPE (index),
4862 index, TYPE_MIN_VALUE (domain))));
4863 position = size_binop (MULT_EXPR, position,
4865 TYPE_SIZE_UNIT (elttype)));
4867 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
4868 xtarget = offset_address (target, pos_rtx,
4869 highest_pow2_factor (position));
4870 xtarget = adjust_address (xtarget, mode, 0);
4871 if (TREE_CODE (value) == CONSTRUCTOR)
4872 store_constructor (value, xtarget, cleared,
4873 bitsize / BITS_PER_UNIT);
4875 store_expr (value, xtarget, 0);
4877 expand_exit_loop_if_false (loop,
4878 build (LT_EXPR, integer_type_node,
4881 expand_increment (build (PREINCREMENT_EXPR,
4883 index, integer_one_node), 0, 0);
4885 emit_label (loop_end);
4888 else if ((index != 0 && ! host_integerp (index, 0))
4889 || ! host_integerp (TYPE_SIZE (elttype), 1))
4894 index = ssize_int (1);
4897 index = convert (ssizetype,
4898 fold (build (MINUS_EXPR, index,
4899 TYPE_MIN_VALUE (domain))));
4901 position = size_binop (MULT_EXPR, index,
4903 TYPE_SIZE_UNIT (elttype)));
4904 xtarget = offset_address (target,
4905 expand_expr (position, 0, VOIDmode, 0),
4906 highest_pow2_factor (position));
4907 xtarget = adjust_address (xtarget, mode, 0);
4908 store_expr (value, xtarget, 0);
4913 bitpos = ((tree_low_cst (index, 0) - minelt)
4914 * tree_low_cst (TYPE_SIZE (elttype), 1));
4916 bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
4918 if (GET_CODE (target) == MEM && !MEM_KEEP_ALIAS_SET_P (target)
4919 && TREE_CODE (type) == ARRAY_TYPE
4920 && TYPE_NONALIASED_COMPONENT (type))
4922 target = copy_rtx (target);
4923 MEM_KEEP_ALIAS_SET_P (target) = 1;
4926 store_constructor_field (target, bitsize, bitpos, mode, value,
4927 type, cleared, get_alias_set (elttype));
4933 /* Set constructor assignments. */
4934 else if (TREE_CODE (type) == SET_TYPE)
4936 tree elt = CONSTRUCTOR_ELTS (exp);
4937 unsigned HOST_WIDE_INT nbytes = int_size_in_bytes (type), nbits;
4938 tree domain = TYPE_DOMAIN (type);
4939 tree domain_min, domain_max, bitlength;
4941 /* The default implementation strategy is to extract the constant
4942 parts of the constructor, use that to initialize the target,
4943 and then "or" in whatever non-constant ranges we need in addition.
4945 If a large set is all zero or all ones, it is
4946 probably better to set it using memset (if available) or bzero.
4947 Also, if a large set has just a single range, it may also be
4948 better to first clear all the first clear the set (using
4949 bzero/memset), and set the bits we want. */
4951 /* Check for all zeros. */
4952 if (elt == NULL_TREE && size > 0)
4955 clear_storage (target, GEN_INT (size));
4959 domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
4960 domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
4961 bitlength = size_binop (PLUS_EXPR,
4962 size_diffop (domain_max, domain_min),
4965 nbits = tree_low_cst (bitlength, 1);
4967 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
4968 are "complicated" (more than one range), initialize (the
4969 constant parts) by copying from a constant. */
4970 if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD
4971 || (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE))
4973 unsigned int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
4974 enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
4975 char *bit_buffer = (char *) alloca (nbits);
4976 HOST_WIDE_INT word = 0;
4977 unsigned int bit_pos = 0;
4978 unsigned int ibit = 0;
4979 unsigned int offset = 0; /* In bytes from beginning of set. */
4981 elt = get_set_constructor_bits (exp, bit_buffer, nbits);
4984 if (bit_buffer[ibit])
4986 if (BYTES_BIG_ENDIAN)
4987 word |= (1 << (set_word_size - 1 - bit_pos));
4989 word |= 1 << bit_pos;
4993 if (bit_pos >= set_word_size || ibit == nbits)
4995 if (word != 0 || ! cleared)
4997 rtx datum = GEN_INT (word);
5000 /* The assumption here is that it is safe to use
5001 XEXP if the set is multi-word, but not if
5002 it's single-word. */
5003 if (GET_CODE (target) == MEM)
5004 to_rtx = adjust_address (target, mode, offset);
5005 else if (offset == 0)
5009 emit_move_insn (to_rtx, datum);
5016 offset += set_word_size / BITS_PER_UNIT;
5021 /* Don't bother clearing storage if the set is all ones. */
5022 if (TREE_CHAIN (elt) != NULL_TREE
5023 || (TREE_PURPOSE (elt) == NULL_TREE
5025 : ( ! host_integerp (TREE_VALUE (elt), 0)
5026 || ! host_integerp (TREE_PURPOSE (elt), 0)
5027 || (tree_low_cst (TREE_VALUE (elt), 0)
5028 - tree_low_cst (TREE_PURPOSE (elt), 0) + 1
5029 != (HOST_WIDE_INT) nbits))))
5030 clear_storage (target, expr_size (exp));
5032 for (; elt != NULL_TREE; elt = TREE_CHAIN (elt))
5034 /* Start of range of element or NULL. */
5035 tree startbit = TREE_PURPOSE (elt);
5036 /* End of range of element, or element value. */
5037 tree endbit = TREE_VALUE (elt);
5038 #ifdef TARGET_MEM_FUNCTIONS
5039 HOST_WIDE_INT startb, endb;
5041 rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
5043 bitlength_rtx = expand_expr (bitlength,
5044 NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
5046 /* Handle non-range tuple element like [ expr ]. */
5047 if (startbit == NULL_TREE)
5049 startbit = save_expr (endbit);
5053 startbit = convert (sizetype, startbit);
5054 endbit = convert (sizetype, endbit);
5055 if (! integer_zerop (domain_min))
5057 startbit = size_binop (MINUS_EXPR, startbit, domain_min);
5058 endbit = size_binop (MINUS_EXPR, endbit, domain_min);
5060 startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
5061 EXPAND_CONST_ADDRESS);
5062 endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
5063 EXPAND_CONST_ADDRESS);
5069 ((build_qualified_type ((*lang_hooks.types.type_for_mode)
5070 (GET_MODE (target), 0),
5073 emit_move_insn (targetx, target);
5076 else if (GET_CODE (target) == MEM)
5081 #ifdef TARGET_MEM_FUNCTIONS
5082 /* Optimization: If startbit and endbit are
5083 constants divisible by BITS_PER_UNIT,
5084 call memset instead. */
5085 if (TREE_CODE (startbit) == INTEGER_CST
5086 && TREE_CODE (endbit) == INTEGER_CST
5087 && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
5088 && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0)
5090 emit_library_call (memset_libfunc, LCT_NORMAL,
5092 plus_constant (XEXP (targetx, 0),
5093 startb / BITS_PER_UNIT),
5095 constm1_rtx, TYPE_MODE (integer_type_node),
5096 GEN_INT ((endb - startb) / BITS_PER_UNIT),
5097 TYPE_MODE (sizetype));
5101 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__setbits"),
5102 LCT_NORMAL, VOIDmode, 4, XEXP (targetx, 0),
5103 Pmode, bitlength_rtx, TYPE_MODE (sizetype),
5104 startbit_rtx, TYPE_MODE (sizetype),
5105 endbit_rtx, TYPE_MODE (sizetype));
5108 emit_move_insn (target, targetx);
5116 /* Store the value of EXP (an expression tree)
5117 into a subfield of TARGET which has mode MODE and occupies
5118 BITSIZE bits, starting BITPOS bits from the start of TARGET.
5119 If MODE is VOIDmode, it means that we are storing into a bit-field.
5121 If VALUE_MODE is VOIDmode, return nothing in particular.
5122 UNSIGNEDP is not used in this case.
5124 Otherwise, return an rtx for the value stored. This rtx
5125 has mode VALUE_MODE if that is convenient to do.
5126 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
5128 TYPE is the type of the underlying object,
5130 ALIAS_SET is the alias set for the destination. This value will
5131 (in general) be different from that for TARGET, since TARGET is a
5132 reference to the containing structure. */
5135 store_field (target, bitsize, bitpos, mode, exp, value_mode, unsignedp, type,
5138 HOST_WIDE_INT bitsize;
5139 HOST_WIDE_INT bitpos;
5140 enum machine_mode mode;
5142 enum machine_mode value_mode;
5147 HOST_WIDE_INT width_mask = 0;
5149 if (TREE_CODE (exp) == ERROR_MARK)
5152 /* If we have nothing to store, do nothing unless the expression has
5155 return expand_expr (exp, const0_rtx, VOIDmode, 0);
5156 else if (bitsize >=0 && bitsize < HOST_BITS_PER_WIDE_INT)
5157 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
5159 /* If we are storing into an unaligned field of an aligned union that is
5160 in a register, we may have the mode of TARGET being an integer mode but
5161 MODE == BLKmode. In that case, get an aligned object whose size and
5162 alignment are the same as TARGET and store TARGET into it (we can avoid
5163 the store if the field being stored is the entire width of TARGET). Then
5164 call ourselves recursively to store the field into a BLKmode version of
5165 that object. Finally, load from the object into TARGET. This is not
5166 very efficient in general, but should only be slightly more expensive
5167 than the otherwise-required unaligned accesses. Perhaps this can be
5168 cleaned up later. */
5171 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
5175 (build_qualified_type (type, TYPE_QUALS (type) | TYPE_QUAL_CONST),
5177 rtx blk_object = adjust_address (object, BLKmode, 0);
5179 if (bitsize != (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (target)))
5180 emit_move_insn (object, target);
5182 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0, type,
5185 emit_move_insn (target, object);
5187 /* We want to return the BLKmode version of the data. */
5191 if (GET_CODE (target) == CONCAT)
5193 /* We're storing into a struct containing a single __complex. */
5197 return store_expr (exp, target, 0);
5200 /* If the structure is in a register or if the component
5201 is a bit field, we cannot use addressing to access it.
5202 Use bit-field techniques or SUBREG to store in it. */
5204 if (mode == VOIDmode
5205 || (mode != BLKmode && ! direct_store[(int) mode]
5206 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
5207 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
5208 || GET_CODE (target) == REG
5209 || GET_CODE (target) == SUBREG
5210 /* If the field isn't aligned enough to store as an ordinary memref,
5211 store it as a bit field. */
5212 || (mode != BLKmode && SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target))
5213 && (MEM_ALIGN (target) < GET_MODE_ALIGNMENT (mode)
5214 || bitpos % GET_MODE_ALIGNMENT (mode)))
5215 /* If the RHS and field are a constant size and the size of the
5216 RHS isn't the same size as the bitfield, we must use bitfield
5219 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
5220 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0))
5222 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
5224 /* If BITSIZE is narrower than the size of the type of EXP
5225 we will be narrowing TEMP. Normally, what's wanted are the
5226 low-order bits. However, if EXP's type is a record and this is
5227 big-endian machine, we want the upper BITSIZE bits. */
5228 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
5229 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (temp))
5230 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
5231 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
5232 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
5236 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
5238 if (mode != VOIDmode && mode != BLKmode
5239 && mode != TYPE_MODE (TREE_TYPE (exp)))
5240 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
5242 /* If the modes of TARGET and TEMP are both BLKmode, both
5243 must be in memory and BITPOS must be aligned on a byte
5244 boundary. If so, we simply do a block copy. */
5245 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
5247 if (GET_CODE (target) != MEM || GET_CODE (temp) != MEM
5248 || bitpos % BITS_PER_UNIT != 0)
5251 target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT);
5252 emit_block_move (target, temp,
5253 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
5256 return value_mode == VOIDmode ? const0_rtx : target;
5259 /* Store the value in the bitfield. */
5260 store_bit_field (target, bitsize, bitpos, mode, temp,
5261 int_size_in_bytes (type));
5263 if (value_mode != VOIDmode)
5265 /* The caller wants an rtx for the value.
5266 If possible, avoid refetching from the bitfield itself. */
5268 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
5271 enum machine_mode tmode;
5273 tmode = GET_MODE (temp);
5274 if (tmode == VOIDmode)
5278 return expand_and (tmode, temp,
5279 gen_int_mode (width_mask, tmode),
5282 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
5283 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
5284 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
5287 return extract_bit_field (target, bitsize, bitpos, unsignedp,
5288 NULL_RTX, value_mode, VOIDmode,
5289 int_size_in_bytes (type));
5295 rtx addr = XEXP (target, 0);
5296 rtx to_rtx = target;
5298 /* If a value is wanted, it must be the lhs;
5299 so make the address stable for multiple use. */
5301 if (value_mode != VOIDmode && GET_CODE (addr) != REG
5302 && ! CONSTANT_ADDRESS_P (addr)
5303 /* A frame-pointer reference is already stable. */
5304 && ! (GET_CODE (addr) == PLUS
5305 && GET_CODE (XEXP (addr, 1)) == CONST_INT
5306 && (XEXP (addr, 0) == virtual_incoming_args_rtx
5307 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
5308 to_rtx = replace_equiv_address (to_rtx, copy_to_reg (addr));
5310 /* Now build a reference to just the desired component. */
5312 to_rtx = adjust_address (target, mode, bitpos / BITS_PER_UNIT);
5314 if (to_rtx == target)
5315 to_rtx = copy_rtx (to_rtx);
5317 MEM_SET_IN_STRUCT_P (to_rtx, 1);
5318 if (!MEM_KEEP_ALIAS_SET_P (to_rtx) && MEM_ALIAS_SET (to_rtx) != 0)
5319 set_mem_alias_set (to_rtx, alias_set);
5321 return store_expr (exp, to_rtx, value_mode != VOIDmode);
5325 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
5326 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
5327 codes and find the ultimate containing object, which we return.
5329 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5330 bit position, and *PUNSIGNEDP to the signedness of the field.
5331 If the position of the field is variable, we store a tree
5332 giving the variable offset (in units) in *POFFSET.
5333 This offset is in addition to the bit position.
5334 If the position is not variable, we store 0 in *POFFSET.
5336 If any of the extraction expressions is volatile,
5337 we store 1 in *PVOLATILEP. Otherwise we don't change that.
5339 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
5340 is a mode that can be used to access the field. In that case, *PBITSIZE
5343 If the field describes a variable-sized object, *PMODE is set to
5344 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
5345 this case, but the address of the object can be found. */
5348 get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode,
5349 punsignedp, pvolatilep)
5351 HOST_WIDE_INT *pbitsize;
5352 HOST_WIDE_INT *pbitpos;
5354 enum machine_mode *pmode;
5359 enum machine_mode mode = VOIDmode;
5360 tree offset = size_zero_node;
5361 tree bit_offset = bitsize_zero_node;
5362 tree placeholder_ptr = 0;
5365 /* First get the mode, signedness, and size. We do this from just the
5366 outermost expression. */
5367 if (TREE_CODE (exp) == COMPONENT_REF)
5369 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
5370 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
5371 mode = DECL_MODE (TREE_OPERAND (exp, 1));
5373 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
5375 else if (TREE_CODE (exp) == BIT_FIELD_REF)
5377 size_tree = TREE_OPERAND (exp, 1);
5378 *punsignedp = TREE_UNSIGNED (exp);
5382 mode = TYPE_MODE (TREE_TYPE (exp));
5383 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
5385 if (mode == BLKmode)
5386 size_tree = TYPE_SIZE (TREE_TYPE (exp));
5388 *pbitsize = GET_MODE_BITSIZE (mode);
5393 if (! host_integerp (size_tree, 1))
5394 mode = BLKmode, *pbitsize = -1;
5396 *pbitsize = tree_low_cst (size_tree, 1);
5399 /* Compute cumulative bit-offset for nested component-refs and array-refs,
5400 and find the ultimate containing object. */
5403 if (TREE_CODE (exp) == BIT_FIELD_REF)
5404 bit_offset = size_binop (PLUS_EXPR, bit_offset, TREE_OPERAND (exp, 2));
5405 else if (TREE_CODE (exp) == COMPONENT_REF)
5407 tree field = TREE_OPERAND (exp, 1);
5408 tree this_offset = DECL_FIELD_OFFSET (field);
5410 /* If this field hasn't been filled in yet, don't go
5411 past it. This should only happen when folding expressions
5412 made during type construction. */
5413 if (this_offset == 0)
5415 else if (! TREE_CONSTANT (this_offset)
5416 && contains_placeholder_p (this_offset))
5417 this_offset = build (WITH_RECORD_EXPR, sizetype, this_offset, exp);
5419 offset = size_binop (PLUS_EXPR, offset, this_offset);
5420 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5421 DECL_FIELD_BIT_OFFSET (field));
5423 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
5426 else if (TREE_CODE (exp) == ARRAY_REF
5427 || TREE_CODE (exp) == ARRAY_RANGE_REF)
5429 tree index = TREE_OPERAND (exp, 1);
5430 tree array = TREE_OPERAND (exp, 0);
5431 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
5432 tree low_bound = (domain ? TYPE_MIN_VALUE (domain) : 0);
5433 tree unit_size = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (array)));
5435 /* We assume all arrays have sizes that are a multiple of a byte.
5436 First subtract the lower bound, if any, in the type of the
5437 index, then convert to sizetype and multiply by the size of the
5439 if (low_bound != 0 && ! integer_zerop (low_bound))
5440 index = fold (build (MINUS_EXPR, TREE_TYPE (index),
5443 /* If the index has a self-referential type, pass it to a
5444 WITH_RECORD_EXPR; if the component size is, pass our
5445 component to one. */
5446 if (! TREE_CONSTANT (index)
5447 && contains_placeholder_p (index))
5448 index = build (WITH_RECORD_EXPR, TREE_TYPE (index), index, exp);
5449 if (! TREE_CONSTANT (unit_size)
5450 && contains_placeholder_p (unit_size))
5451 unit_size = build (WITH_RECORD_EXPR, sizetype, unit_size, array);
5453 offset = size_binop (PLUS_EXPR, offset,
5454 size_binop (MULT_EXPR,
5455 convert (sizetype, index),
5459 else if (TREE_CODE (exp) == PLACEHOLDER_EXPR)
5461 tree new = find_placeholder (exp, &placeholder_ptr);
5463 /* If we couldn't find the replacement, return the PLACEHOLDER_EXPR.
5464 We might have been called from tree optimization where we
5465 haven't set up an object yet. */
5473 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
5474 && TREE_CODE (exp) != VIEW_CONVERT_EXPR
5475 && ! ((TREE_CODE (exp) == NOP_EXPR
5476 || TREE_CODE (exp) == CONVERT_EXPR)
5477 && (TYPE_MODE (TREE_TYPE (exp))
5478 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
5481 /* If any reference in the chain is volatile, the effect is volatile. */
5482 if (TREE_THIS_VOLATILE (exp))
5485 exp = TREE_OPERAND (exp, 0);
5488 /* If OFFSET is constant, see if we can return the whole thing as a
5489 constant bit position. Otherwise, split it up. */
5490 if (host_integerp (offset, 0)
5491 && 0 != (tem = size_binop (MULT_EXPR, convert (bitsizetype, offset),
5493 && 0 != (tem = size_binop (PLUS_EXPR, tem, bit_offset))
5494 && host_integerp (tem, 0))
5495 *pbitpos = tree_low_cst (tem, 0), *poffset = 0;
5497 *pbitpos = tree_low_cst (bit_offset, 0), *poffset = offset;
5503 /* Return 1 if T is an expression that get_inner_reference handles. */
5506 handled_component_p (t)
5509 switch (TREE_CODE (t))
5514 case ARRAY_RANGE_REF:
5515 case NON_LVALUE_EXPR:
5516 case VIEW_CONVERT_EXPR:
5521 return (TYPE_MODE (TREE_TYPE (t))
5522 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (t, 0))));
5529 /* Given an rtx VALUE that may contain additions and multiplications, return
5530 an equivalent value that just refers to a register, memory, or constant.
5531 This is done by generating instructions to perform the arithmetic and
5532 returning a pseudo-register containing the value.
5534 The returned value may be a REG, SUBREG, MEM or constant. */
5537 force_operand (value, target)
5541 /* Use subtarget as the target for operand 0 of a binary operation. */
5542 rtx subtarget = get_subtarget (target);
5543 enum rtx_code code = GET_CODE (value);
5545 /* Check for a PIC address load. */
5546 if ((code == PLUS || code == MINUS)
5547 && XEXP (value, 0) == pic_offset_table_rtx
5548 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
5549 || GET_CODE (XEXP (value, 1)) == LABEL_REF
5550 || GET_CODE (XEXP (value, 1)) == CONST))
5553 subtarget = gen_reg_rtx (GET_MODE (value));
5554 emit_move_insn (subtarget, value);
5558 if (code == ZERO_EXTEND || code == SIGN_EXTEND)
5561 target = gen_reg_rtx (GET_MODE (value));
5562 convert_move (target, force_operand (XEXP (value, 0), NULL),
5563 code == ZERO_EXTEND);
5567 if (GET_RTX_CLASS (code) == '2' || GET_RTX_CLASS (code) == 'c')
5569 op2 = XEXP (value, 1);
5570 if (!CONSTANT_P (op2) && !(GET_CODE (op2) == REG && op2 != subtarget))
5572 if (code == MINUS && GET_CODE (op2) == CONST_INT)
5575 op2 = negate_rtx (GET_MODE (value), op2);
5578 /* Check for an addition with OP2 a constant integer and our first
5579 operand a PLUS of a virtual register and something else. In that
5580 case, we want to emit the sum of the virtual register and the
5581 constant first and then add the other value. This allows virtual
5582 register instantiation to simply modify the constant rather than
5583 creating another one around this addition. */
5584 if (code == PLUS && GET_CODE (op2) == CONST_INT
5585 && GET_CODE (XEXP (value, 0)) == PLUS
5586 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
5587 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
5588 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
5590 rtx temp = expand_simple_binop (GET_MODE (value), code,
5591 XEXP (XEXP (value, 0), 0), op2,
5592 subtarget, 0, OPTAB_LIB_WIDEN);
5593 return expand_simple_binop (GET_MODE (value), code, temp,
5594 force_operand (XEXP (XEXP (value,
5596 target, 0, OPTAB_LIB_WIDEN);
5599 op1 = force_operand (XEXP (value, 0), subtarget);
5600 op2 = force_operand (op2, NULL_RTX);
5604 return expand_mult (GET_MODE (value), op1, op2, target, 1);
5606 if (!INTEGRAL_MODE_P (GET_MODE (value)))
5607 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5608 target, 1, OPTAB_LIB_WIDEN);
5610 return expand_divmod (0,
5611 FLOAT_MODE_P (GET_MODE (value))
5612 ? RDIV_EXPR : TRUNC_DIV_EXPR,
5613 GET_MODE (value), op1, op2, target, 0);
5616 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
5620 return expand_divmod (0, TRUNC_DIV_EXPR, GET_MODE (value), op1, op2,
5624 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
5628 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5629 target, 0, OPTAB_LIB_WIDEN);
5632 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5633 target, 1, OPTAB_LIB_WIDEN);
5636 if (GET_RTX_CLASS (code) == '1')
5638 op1 = force_operand (XEXP (value, 0), NULL_RTX);
5639 return expand_simple_unop (GET_MODE (value), code, op1, target, 0);
5642 #ifdef INSN_SCHEDULING
5643 /* On machines that have insn scheduling, we want all memory reference to be
5644 explicit, so we need to deal with such paradoxical SUBREGs. */
5645 if (GET_CODE (value) == SUBREG && GET_CODE (SUBREG_REG (value)) == MEM
5646 && (GET_MODE_SIZE (GET_MODE (value))
5647 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (value)))))
5649 = simplify_gen_subreg (GET_MODE (value),
5650 force_reg (GET_MODE (SUBREG_REG (value)),
5651 force_operand (SUBREG_REG (value),
5653 GET_MODE (SUBREG_REG (value)),
5654 SUBREG_BYTE (value));
5660 /* Subroutine of expand_expr: return nonzero iff there is no way that
5661 EXP can reference X, which is being modified. TOP_P is nonzero if this
5662 call is going to be used to determine whether we need a temporary
5663 for EXP, as opposed to a recursive call to this function.
5665 It is always safe for this routine to return zero since it merely
5666 searches for optimization opportunities. */
5669 safe_from_p (x, exp, top_p)
5676 static tree save_expr_list;
5679 /* If EXP has varying size, we MUST use a target since we currently
5680 have no way of allocating temporaries of variable size
5681 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
5682 So we assume here that something at a higher level has prevented a
5683 clash. This is somewhat bogus, but the best we can do. Only
5684 do this when X is BLKmode and when we are at the top level. */
5685 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
5686 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
5687 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
5688 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
5689 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
5691 && GET_MODE (x) == BLKmode)
5692 /* If X is in the outgoing argument area, it is always safe. */
5693 || (GET_CODE (x) == MEM
5694 && (XEXP (x, 0) == virtual_outgoing_args_rtx
5695 || (GET_CODE (XEXP (x, 0)) == PLUS
5696 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
5699 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
5700 find the underlying pseudo. */
5701 if (GET_CODE (x) == SUBREG)
5704 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5708 /* A SAVE_EXPR might appear many times in the expression passed to the
5709 top-level safe_from_p call, and if it has a complex subexpression,
5710 examining it multiple times could result in a combinatorial explosion.
5711 E.g. on an Alpha running at least 200MHz, a Fortran test case compiled
5712 with optimization took about 28 minutes to compile -- even though it was
5713 only a few lines long. So we mark each SAVE_EXPR we see with TREE_PRIVATE
5714 and turn that off when we are done. We keep a list of the SAVE_EXPRs
5715 we have processed. Note that the only test of top_p was above. */
5724 rtn = safe_from_p (x, exp, 0);
5726 for (t = save_expr_list; t != 0; t = TREE_CHAIN (t))
5727 TREE_PRIVATE (TREE_PURPOSE (t)) = 0;
5732 /* Now look at our tree code and possibly recurse. */
5733 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
5736 exp_rtl = DECL_RTL_IF_SET (exp);
5743 if (TREE_CODE (exp) == TREE_LIST)
5744 return ((TREE_VALUE (exp) == 0
5745 || safe_from_p (x, TREE_VALUE (exp), 0))
5746 && (TREE_CHAIN (exp) == 0
5747 || safe_from_p (x, TREE_CHAIN (exp), 0)));
5748 else if (TREE_CODE (exp) == ERROR_MARK)
5749 return 1; /* An already-visited SAVE_EXPR? */
5754 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5758 return (safe_from_p (x, TREE_OPERAND (exp, 0), 0)
5759 && safe_from_p (x, TREE_OPERAND (exp, 1), 0));
5763 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
5764 the expression. If it is set, we conflict iff we are that rtx or
5765 both are in memory. Otherwise, we check all operands of the
5766 expression recursively. */
5768 switch (TREE_CODE (exp))
5771 /* If the operand is static or we are static, we can't conflict.
5772 Likewise if we don't conflict with the operand at all. */
5773 if (staticp (TREE_OPERAND (exp, 0))
5774 || TREE_STATIC (exp)
5775 || safe_from_p (x, TREE_OPERAND (exp, 0), 0))
5778 /* Otherwise, the only way this can conflict is if we are taking
5779 the address of a DECL a that address if part of X, which is
5781 exp = TREE_OPERAND (exp, 0);
5784 if (!DECL_RTL_SET_P (exp)
5785 || GET_CODE (DECL_RTL (exp)) != MEM)
5788 exp_rtl = XEXP (DECL_RTL (exp), 0);
5793 if (GET_CODE (x) == MEM
5794 && alias_sets_conflict_p (MEM_ALIAS_SET (x),
5795 get_alias_set (exp)))
5800 /* Assume that the call will clobber all hard registers and
5802 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5803 || GET_CODE (x) == MEM)
5808 /* If a sequence exists, we would have to scan every instruction
5809 in the sequence to see if it was safe. This is probably not
5811 if (RTL_EXPR_SEQUENCE (exp))
5814 exp_rtl = RTL_EXPR_RTL (exp);
5817 case WITH_CLEANUP_EXPR:
5818 exp_rtl = WITH_CLEANUP_EXPR_RTL (exp);
5821 case CLEANUP_POINT_EXPR:
5822 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5825 exp_rtl = SAVE_EXPR_RTL (exp);
5829 /* If we've already scanned this, don't do it again. Otherwise,
5830 show we've scanned it and record for clearing the flag if we're
5832 if (TREE_PRIVATE (exp))
5835 TREE_PRIVATE (exp) = 1;
5836 if (! safe_from_p (x, TREE_OPERAND (exp, 0), 0))
5838 TREE_PRIVATE (exp) = 0;
5842 save_expr_list = tree_cons (exp, NULL_TREE, save_expr_list);
5846 /* The only operand we look at is operand 1. The rest aren't
5847 part of the expression. */
5848 return safe_from_p (x, TREE_OPERAND (exp, 1), 0);
5850 case METHOD_CALL_EXPR:
5851 /* This takes an rtx argument, but shouldn't appear here. */
5858 /* If we have an rtx, we do not need to scan our operands. */
5862 nops = first_rtl_op (TREE_CODE (exp));
5863 for (i = 0; i < nops; i++)
5864 if (TREE_OPERAND (exp, i) != 0
5865 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
5868 /* If this is a language-specific tree code, it may require
5869 special handling. */
5870 if ((unsigned int) TREE_CODE (exp)
5871 >= (unsigned int) LAST_AND_UNUSED_TREE_CODE
5872 && !(*lang_hooks.safe_from_p) (x, exp))
5876 /* If we have an rtl, find any enclosed object. Then see if we conflict
5880 if (GET_CODE (exp_rtl) == SUBREG)
5882 exp_rtl = SUBREG_REG (exp_rtl);
5883 if (GET_CODE (exp_rtl) == REG
5884 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
5888 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
5889 are memory and they conflict. */
5890 return ! (rtx_equal_p (x, exp_rtl)
5891 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
5892 && true_dependence (exp_rtl, VOIDmode, x,
5893 rtx_addr_varies_p)));
5896 /* If we reach here, it is safe. */
5900 /* Subroutine of expand_expr: return rtx if EXP is a
5901 variable or parameter; else return 0. */
5908 switch (TREE_CODE (exp))
5912 return DECL_RTL (exp);
5918 #ifdef MAX_INTEGER_COMPUTATION_MODE
5921 check_max_integer_computation_mode (exp)
5924 enum tree_code code;
5925 enum machine_mode mode;
5927 /* Strip any NOPs that don't change the mode. */
5929 code = TREE_CODE (exp);
5931 /* We must allow conversions of constants to MAX_INTEGER_COMPUTATION_MODE. */
5932 if (code == NOP_EXPR
5933 && TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
5936 /* First check the type of the overall operation. We need only look at
5937 unary, binary and relational operations. */
5938 if (TREE_CODE_CLASS (code) == '1'
5939 || TREE_CODE_CLASS (code) == '2'
5940 || TREE_CODE_CLASS (code) == '<')
5942 mode = TYPE_MODE (TREE_TYPE (exp));
5943 if (GET_MODE_CLASS (mode) == MODE_INT
5944 && mode > MAX_INTEGER_COMPUTATION_MODE)
5945 internal_error ("unsupported wide integer operation");
5948 /* Check operand of a unary op. */
5949 if (TREE_CODE_CLASS (code) == '1')
5951 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5952 if (GET_MODE_CLASS (mode) == MODE_INT
5953 && mode > MAX_INTEGER_COMPUTATION_MODE)
5954 internal_error ("unsupported wide integer operation");
5957 /* Check operands of a binary/comparison op. */
5958 if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<')
5960 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5961 if (GET_MODE_CLASS (mode) == MODE_INT
5962 && mode > MAX_INTEGER_COMPUTATION_MODE)
5963 internal_error ("unsupported wide integer operation");
5965 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1)));
5966 if (GET_MODE_CLASS (mode) == MODE_INT
5967 && mode > MAX_INTEGER_COMPUTATION_MODE)
5968 internal_error ("unsupported wide integer operation");
5973 /* Return the highest power of two that EXP is known to be a multiple of.
5974 This is used in updating alignment of MEMs in array references. */
5976 static HOST_WIDE_INT
5977 highest_pow2_factor (exp)
5980 HOST_WIDE_INT c0, c1;
5982 switch (TREE_CODE (exp))
5985 /* We can find the lowest bit that's a one. If the low
5986 HOST_BITS_PER_WIDE_INT bits are zero, return BIGGEST_ALIGNMENT.
5987 We need to handle this case since we can find it in a COND_EXPR,
5988 a MIN_EXPR, or a MAX_EXPR. If the constant overlows, we have an
5989 erroneous program, so return BIGGEST_ALIGNMENT to avoid any
5991 if (TREE_CONSTANT_OVERFLOW (exp))
5992 return BIGGEST_ALIGNMENT;
5995 /* Note: tree_low_cst is intentionally not used here,
5996 we don't care about the upper bits. */
5997 c0 = TREE_INT_CST_LOW (exp);
5999 return c0 ? c0 : BIGGEST_ALIGNMENT;
6003 case PLUS_EXPR: case MINUS_EXPR: case MIN_EXPR: case MAX_EXPR:
6004 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6005 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6006 return MIN (c0, c1);
6009 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6010 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6013 case ROUND_DIV_EXPR: case TRUNC_DIV_EXPR: case FLOOR_DIV_EXPR:
6015 if (integer_pow2p (TREE_OPERAND (exp, 1))
6016 && host_integerp (TREE_OPERAND (exp, 1), 1))
6018 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6019 c1 = tree_low_cst (TREE_OPERAND (exp, 1), 1);
6020 return MAX (1, c0 / c1);
6024 case NON_LVALUE_EXPR: case NOP_EXPR: case CONVERT_EXPR:
6025 case SAVE_EXPR: case WITH_RECORD_EXPR:
6026 return highest_pow2_factor (TREE_OPERAND (exp, 0));
6029 return highest_pow2_factor (TREE_OPERAND (exp, 1));
6032 c0 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6033 c1 = highest_pow2_factor (TREE_OPERAND (exp, 2));
6034 return MIN (c0, c1);
6043 /* Similar, except that it is known that the expression must be a multiple
6044 of the alignment of TYPE. */
6046 static HOST_WIDE_INT
6047 highest_pow2_factor_for_type (type, exp)
6051 HOST_WIDE_INT type_align, factor;
6053 factor = highest_pow2_factor (exp);
6054 type_align = TYPE_ALIGN (type) / BITS_PER_UNIT;
6055 return MAX (factor, type_align);
6058 /* Return an object on the placeholder list that matches EXP, a
6059 PLACEHOLDER_EXPR. An object "matches" if it is of the type of the
6060 PLACEHOLDER_EXPR or a pointer type to it. For further information, see
6061 tree.def. If no such object is found, return 0. If PLIST is nonzero, it
6062 is a location which initially points to a starting location in the
6063 placeholder list (zero means start of the list) and where a pointer into
6064 the placeholder list at which the object is found is placed. */
6067 find_placeholder (exp, plist)
6071 tree type = TREE_TYPE (exp);
6072 tree placeholder_expr;
6074 for (placeholder_expr
6075 = plist && *plist ? TREE_CHAIN (*plist) : placeholder_list;
6076 placeholder_expr != 0;
6077 placeholder_expr = TREE_CHAIN (placeholder_expr))
6079 tree need_type = TYPE_MAIN_VARIANT (type);
6082 /* Find the outermost reference that is of the type we want. If none,
6083 see if any object has a type that is a pointer to the type we
6085 for (elt = TREE_PURPOSE (placeholder_expr); elt != 0;
6086 elt = ((TREE_CODE (elt) == COMPOUND_EXPR
6087 || TREE_CODE (elt) == COND_EXPR)
6088 ? TREE_OPERAND (elt, 1)
6089 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6090 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6091 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6092 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6093 ? TREE_OPERAND (elt, 0) : 0))
6094 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
6097 *plist = placeholder_expr;
6101 for (elt = TREE_PURPOSE (placeholder_expr); elt != 0;
6103 = ((TREE_CODE (elt) == COMPOUND_EXPR
6104 || TREE_CODE (elt) == COND_EXPR)
6105 ? TREE_OPERAND (elt, 1)
6106 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6107 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6108 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6109 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6110 ? TREE_OPERAND (elt, 0) : 0))
6111 if (POINTER_TYPE_P (TREE_TYPE (elt))
6112 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
6116 *plist = placeholder_expr;
6117 return build1 (INDIRECT_REF, need_type, elt);
6124 /* expand_expr: generate code for computing expression EXP.
6125 An rtx for the computed value is returned. The value is never null.
6126 In the case of a void EXP, const0_rtx is returned.
6128 The value may be stored in TARGET if TARGET is nonzero.
6129 TARGET is just a suggestion; callers must assume that
6130 the rtx returned may not be the same as TARGET.
6132 If TARGET is CONST0_RTX, it means that the value will be ignored.
6134 If TMODE is not VOIDmode, it suggests generating the
6135 result in mode TMODE. But this is done only when convenient.
6136 Otherwise, TMODE is ignored and the value generated in its natural mode.
6137 TMODE is just a suggestion; callers must assume that
6138 the rtx returned may not have mode TMODE.
6140 Note that TARGET may have neither TMODE nor MODE. In that case, it
6141 probably will not be used.
6143 If MODIFIER is EXPAND_SUM then when EXP is an addition
6144 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
6145 or a nest of (PLUS ...) and (MINUS ...) where the terms are
6146 products as above, or REG or MEM, or constant.
6147 Ordinarily in such cases we would output mul or add instructions
6148 and then return a pseudo reg containing the sum.
6150 EXPAND_INITIALIZER is much like EXPAND_SUM except that
6151 it also marks a label as absolutely required (it can't be dead).
6152 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
6153 This is used for outputting expressions used in initializers.
6155 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
6156 with a constant address even if that address is not normally legitimate.
6157 EXPAND_INITIALIZER and EXPAND_SUM also have this effect. */
6160 expand_expr (exp, target, tmode, modifier)
6163 enum machine_mode tmode;
6164 enum expand_modifier modifier;
6167 tree type = TREE_TYPE (exp);
6168 int unsignedp = TREE_UNSIGNED (type);
6169 enum machine_mode mode;
6170 enum tree_code code = TREE_CODE (exp);
6172 rtx subtarget, original_target;
6176 /* Handle ERROR_MARK before anybody tries to access its type. */
6177 if (TREE_CODE (exp) == ERROR_MARK || TREE_CODE (type) == ERROR_MARK)
6179 op0 = CONST0_RTX (tmode);
6185 mode = TYPE_MODE (type);
6186 /* Use subtarget as the target for operand 0 of a binary operation. */
6187 subtarget = get_subtarget (target);
6188 original_target = target;
6189 ignore = (target == const0_rtx
6190 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
6191 || code == CONVERT_EXPR || code == REFERENCE_EXPR
6192 || code == COND_EXPR || code == VIEW_CONVERT_EXPR)
6193 && TREE_CODE (type) == VOID_TYPE));
6195 /* If we are going to ignore this result, we need only do something
6196 if there is a side-effect somewhere in the expression. If there
6197 is, short-circuit the most common cases here. Note that we must
6198 not call expand_expr with anything but const0_rtx in case this
6199 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
6203 if (! TREE_SIDE_EFFECTS (exp))
6206 /* Ensure we reference a volatile object even if value is ignored, but
6207 don't do this if all we are doing is taking its address. */
6208 if (TREE_THIS_VOLATILE (exp)
6209 && TREE_CODE (exp) != FUNCTION_DECL
6210 && mode != VOIDmode && mode != BLKmode
6211 && modifier != EXPAND_CONST_ADDRESS)
6213 temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
6214 if (GET_CODE (temp) == MEM)
6215 temp = copy_to_reg (temp);
6219 if (TREE_CODE_CLASS (code) == '1' || code == COMPONENT_REF
6220 || code == INDIRECT_REF || code == BUFFER_REF)
6221 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6224 else if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<'
6225 || code == ARRAY_REF || code == ARRAY_RANGE_REF)
6227 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6228 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6231 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
6232 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
6233 /* If the second operand has no side effects, just evaluate
6235 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6237 else if (code == BIT_FIELD_REF)
6239 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6240 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6241 expand_expr (TREE_OPERAND (exp, 2), const0_rtx, VOIDmode, modifier);
6248 #ifdef MAX_INTEGER_COMPUTATION_MODE
6249 /* Only check stuff here if the mode we want is different from the mode
6250 of the expression; if it's the same, check_max_integer_computiation_mode
6251 will handle it. Do we really need to check this stuff at all? */
6254 && GET_MODE (target) != mode
6255 && TREE_CODE (exp) != INTEGER_CST
6256 && TREE_CODE (exp) != PARM_DECL
6257 && TREE_CODE (exp) != ARRAY_REF
6258 && TREE_CODE (exp) != ARRAY_RANGE_REF
6259 && TREE_CODE (exp) != COMPONENT_REF
6260 && TREE_CODE (exp) != BIT_FIELD_REF
6261 && TREE_CODE (exp) != INDIRECT_REF
6262 && TREE_CODE (exp) != CALL_EXPR
6263 && TREE_CODE (exp) != VAR_DECL
6264 && TREE_CODE (exp) != RTL_EXPR)
6266 enum machine_mode mode = GET_MODE (target);
6268 if (GET_MODE_CLASS (mode) == MODE_INT
6269 && mode > MAX_INTEGER_COMPUTATION_MODE)
6270 internal_error ("unsupported wide integer operation");
6274 && TREE_CODE (exp) != INTEGER_CST
6275 && TREE_CODE (exp) != PARM_DECL
6276 && TREE_CODE (exp) != ARRAY_REF
6277 && TREE_CODE (exp) != ARRAY_RANGE_REF
6278 && TREE_CODE (exp) != COMPONENT_REF
6279 && TREE_CODE (exp) != BIT_FIELD_REF
6280 && TREE_CODE (exp) != INDIRECT_REF
6281 && TREE_CODE (exp) != VAR_DECL
6282 && TREE_CODE (exp) != CALL_EXPR
6283 && TREE_CODE (exp) != RTL_EXPR
6284 && GET_MODE_CLASS (tmode) == MODE_INT
6285 && tmode > MAX_INTEGER_COMPUTATION_MODE)
6286 internal_error ("unsupported wide integer operation");
6288 check_max_integer_computation_mode (exp);
6291 /* If will do cse, generate all results into pseudo registers
6292 since 1) that allows cse to find more things
6293 and 2) otherwise cse could produce an insn the machine
6294 cannot support. And exception is a CONSTRUCTOR into a multi-word
6295 MEM: that's much more likely to be most efficient into the MEM. */
6297 if (! cse_not_expected && mode != BLKmode && target
6298 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER)
6299 && ! (code == CONSTRUCTOR && GET_MODE_SIZE (mode) > UNITS_PER_WORD))
6306 tree function = decl_function_context (exp);
6307 /* Handle using a label in a containing function. */
6308 if (function != current_function_decl
6309 && function != inline_function_decl && function != 0)
6311 struct function *p = find_function_data (function);
6312 p->expr->x_forced_labels
6313 = gen_rtx_EXPR_LIST (VOIDmode, label_rtx (exp),
6314 p->expr->x_forced_labels);
6318 if (modifier == EXPAND_INITIALIZER)
6319 forced_labels = gen_rtx_EXPR_LIST (VOIDmode,
6324 temp = gen_rtx_MEM (FUNCTION_MODE,
6325 gen_rtx_LABEL_REF (Pmode, label_rtx (exp)));
6326 if (function != current_function_decl
6327 && function != inline_function_decl && function != 0)
6328 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
6333 if (DECL_RTL (exp) == 0)
6335 error_with_decl (exp, "prior parameter's size depends on `%s'");
6336 return CONST0_RTX (mode);
6339 /* ... fall through ... */
6342 /* If a static var's type was incomplete when the decl was written,
6343 but the type is complete now, lay out the decl now. */
6344 if (DECL_SIZE (exp) == 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
6345 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
6347 rtx value = DECL_RTL_IF_SET (exp);
6349 layout_decl (exp, 0);
6351 /* If the RTL was already set, update its mode and memory
6355 PUT_MODE (value, DECL_MODE (exp));
6356 SET_DECL_RTL (exp, 0);
6357 set_mem_attributes (value, exp, 1);
6358 SET_DECL_RTL (exp, value);
6362 /* ... fall through ... */
6366 if (DECL_RTL (exp) == 0)
6369 /* Ensure variable marked as used even if it doesn't go through
6370 a parser. If it hasn't be used yet, write out an external
6372 if (! TREE_USED (exp))
6374 assemble_external (exp);
6375 TREE_USED (exp) = 1;
6378 /* Show we haven't gotten RTL for this yet. */
6381 /* Handle variables inherited from containing functions. */
6382 context = decl_function_context (exp);
6384 /* We treat inline_function_decl as an alias for the current function
6385 because that is the inline function whose vars, types, etc.
6386 are being merged into the current function.
6387 See expand_inline_function. */
6389 if (context != 0 && context != current_function_decl
6390 && context != inline_function_decl
6391 /* If var is static, we don't need a static chain to access it. */
6392 && ! (GET_CODE (DECL_RTL (exp)) == MEM
6393 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
6397 /* Mark as non-local and addressable. */
6398 DECL_NONLOCAL (exp) = 1;
6399 if (DECL_NO_STATIC_CHAIN (current_function_decl))
6401 (*lang_hooks.mark_addressable) (exp);
6402 if (GET_CODE (DECL_RTL (exp)) != MEM)
6404 addr = XEXP (DECL_RTL (exp), 0);
6405 if (GET_CODE (addr) == MEM)
6407 = replace_equiv_address (addr,
6408 fix_lexical_addr (XEXP (addr, 0), exp));
6410 addr = fix_lexical_addr (addr, exp);
6412 temp = replace_equiv_address (DECL_RTL (exp), addr);
6415 /* This is the case of an array whose size is to be determined
6416 from its initializer, while the initializer is still being parsed.
6419 else if (GET_CODE (DECL_RTL (exp)) == MEM
6420 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
6421 temp = validize_mem (DECL_RTL (exp));
6423 /* If DECL_RTL is memory, we are in the normal case and either
6424 the address is not valid or it is not a register and -fforce-addr
6425 is specified, get the address into a register. */
6427 else if (GET_CODE (DECL_RTL (exp)) == MEM
6428 && modifier != EXPAND_CONST_ADDRESS
6429 && modifier != EXPAND_SUM
6430 && modifier != EXPAND_INITIALIZER
6431 && (! memory_address_p (DECL_MODE (exp),
6432 XEXP (DECL_RTL (exp), 0))
6434 && GET_CODE (XEXP (DECL_RTL (exp), 0)) != REG)))
6435 temp = replace_equiv_address (DECL_RTL (exp),
6436 copy_rtx (XEXP (DECL_RTL (exp), 0)));
6438 /* If we got something, return it. But first, set the alignment
6439 if the address is a register. */
6442 if (GET_CODE (temp) == MEM && GET_CODE (XEXP (temp, 0)) == REG)
6443 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
6448 /* If the mode of DECL_RTL does not match that of the decl, it
6449 must be a promoted value. We return a SUBREG of the wanted mode,
6450 but mark it so that we know that it was already extended. */
6452 if (GET_CODE (DECL_RTL (exp)) == REG
6453 && GET_MODE (DECL_RTL (exp)) != DECL_MODE (exp))
6455 /* Get the signedness used for this variable. Ensure we get the
6456 same mode we got when the variable was declared. */
6457 if (GET_MODE (DECL_RTL (exp))
6458 != promote_mode (type, DECL_MODE (exp), &unsignedp,
6459 (TREE_CODE (exp) == RESULT_DECL ? 1 : 0)))
6462 temp = gen_lowpart_SUBREG (mode, DECL_RTL (exp));
6463 SUBREG_PROMOTED_VAR_P (temp) = 1;
6464 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
6468 return DECL_RTL (exp);
6471 temp = immed_double_const (TREE_INT_CST_LOW (exp),
6472 TREE_INT_CST_HIGH (exp), mode);
6474 /* ??? If overflow is set, fold will have done an incomplete job,
6475 which can result in (plus xx (const_int 0)), which can get
6476 simplified by validate_replace_rtx during virtual register
6477 instantiation, which can result in unrecognizable insns.
6478 Avoid this by forcing all overflows into registers. */
6479 if (TREE_CONSTANT_OVERFLOW (exp)
6480 && modifier != EXPAND_INITIALIZER)
6481 temp = force_reg (mode, temp);
6486 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, 0);
6489 /* If optimized, generate immediate CONST_DOUBLE
6490 which will be turned into memory by reload if necessary.
6492 We used to force a register so that loop.c could see it. But
6493 this does not allow gen_* patterns to perform optimizations with
6494 the constants. It also produces two insns in cases like "x = 1.0;".
6495 On most machines, floating-point constants are not permitted in
6496 many insns, so we'd end up copying it to a register in any case.
6498 Now, we do the copying in expand_binop, if appropriate. */
6499 return CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (exp),
6500 TYPE_MODE (TREE_TYPE (exp)));
6504 if (! TREE_CST_RTL (exp))
6505 output_constant_def (exp, 1);
6507 /* TREE_CST_RTL probably contains a constant address.
6508 On RISC machines where a constant address isn't valid,
6509 make some insns to get that address into a register. */
6510 if (GET_CODE (TREE_CST_RTL (exp)) == MEM
6511 && modifier != EXPAND_CONST_ADDRESS
6512 && modifier != EXPAND_INITIALIZER
6513 && modifier != EXPAND_SUM
6514 && (! memory_address_p (mode, XEXP (TREE_CST_RTL (exp), 0))
6516 && GET_CODE (XEXP (TREE_CST_RTL (exp), 0)) != REG)))
6517 return replace_equiv_address (TREE_CST_RTL (exp),
6518 copy_rtx (XEXP (TREE_CST_RTL (exp), 0)));
6519 return TREE_CST_RTL (exp);
6521 case EXPR_WITH_FILE_LOCATION:
6524 const char *saved_input_filename = input_filename;
6525 int saved_lineno = lineno;
6526 input_filename = EXPR_WFL_FILENAME (exp);
6527 lineno = EXPR_WFL_LINENO (exp);
6528 if (EXPR_WFL_EMIT_LINE_NOTE (exp))
6529 emit_line_note (input_filename, lineno);
6530 /* Possibly avoid switching back and forth here. */
6531 to_return = expand_expr (EXPR_WFL_NODE (exp), target, tmode, modifier);
6532 input_filename = saved_input_filename;
6533 lineno = saved_lineno;
6538 context = decl_function_context (exp);
6540 /* If this SAVE_EXPR was at global context, assume we are an
6541 initialization function and move it into our context. */
6543 SAVE_EXPR_CONTEXT (exp) = current_function_decl;
6545 /* We treat inline_function_decl as an alias for the current function
6546 because that is the inline function whose vars, types, etc.
6547 are being merged into the current function.
6548 See expand_inline_function. */
6549 if (context == current_function_decl || context == inline_function_decl)
6552 /* If this is non-local, handle it. */
6555 /* The following call just exists to abort if the context is
6556 not of a containing function. */
6557 find_function_data (context);
6559 temp = SAVE_EXPR_RTL (exp);
6560 if (temp && GET_CODE (temp) == REG)
6562 put_var_into_stack (exp);
6563 temp = SAVE_EXPR_RTL (exp);
6565 if (temp == 0 || GET_CODE (temp) != MEM)
6568 replace_equiv_address (temp,
6569 fix_lexical_addr (XEXP (temp, 0), exp));
6571 if (SAVE_EXPR_RTL (exp) == 0)
6573 if (mode == VOIDmode)
6576 temp = assign_temp (build_qualified_type (type,
6578 | TYPE_QUAL_CONST)),
6581 SAVE_EXPR_RTL (exp) = temp;
6582 if (!optimize && GET_CODE (temp) == REG)
6583 save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, temp,
6586 /* If the mode of TEMP does not match that of the expression, it
6587 must be a promoted value. We pass store_expr a SUBREG of the
6588 wanted mode but mark it so that we know that it was already
6589 extended. Note that `unsignedp' was modified above in
6592 if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
6594 temp = gen_lowpart_SUBREG (mode, SAVE_EXPR_RTL (exp));
6595 SUBREG_PROMOTED_VAR_P (temp) = 1;
6596 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
6599 if (temp == const0_rtx)
6600 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
6602 store_expr (TREE_OPERAND (exp, 0), temp, 0);
6604 TREE_USED (exp) = 1;
6607 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
6608 must be a promoted value. We return a SUBREG of the wanted mode,
6609 but mark it so that we know that it was already extended. */
6611 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
6612 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
6614 /* Compute the signedness and make the proper SUBREG. */
6615 promote_mode (type, mode, &unsignedp, 0);
6616 temp = gen_lowpart_SUBREG (mode, SAVE_EXPR_RTL (exp));
6617 SUBREG_PROMOTED_VAR_P (temp) = 1;
6618 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
6622 return SAVE_EXPR_RTL (exp);
6627 temp = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
6628 TREE_OPERAND (exp, 0)
6629 = (*lang_hooks.unsave_expr_now) (TREE_OPERAND (exp, 0));
6633 case PLACEHOLDER_EXPR:
6635 tree old_list = placeholder_list;
6636 tree placeholder_expr = 0;
6638 exp = find_placeholder (exp, &placeholder_expr);
6642 placeholder_list = TREE_CHAIN (placeholder_expr);
6643 temp = expand_expr (exp, original_target, tmode, modifier);
6644 placeholder_list = old_list;
6648 /* We can't find the object or there was a missing WITH_RECORD_EXPR. */
6651 case WITH_RECORD_EXPR:
6652 /* Put the object on the placeholder list, expand our first operand,
6653 and pop the list. */
6654 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
6656 target = expand_expr (TREE_OPERAND (exp, 0), original_target, tmode,
6658 placeholder_list = TREE_CHAIN (placeholder_list);
6662 if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
6663 expand_goto (TREE_OPERAND (exp, 0));
6665 expand_computed_goto (TREE_OPERAND (exp, 0));
6669 expand_exit_loop_if_false (NULL,
6670 invert_truthvalue (TREE_OPERAND (exp, 0)));
6673 case LABELED_BLOCK_EXPR:
6674 if (LABELED_BLOCK_BODY (exp))
6675 expand_expr_stmt_value (LABELED_BLOCK_BODY (exp), 0, 1);
6676 /* Should perhaps use expand_label, but this is simpler and safer. */
6677 do_pending_stack_adjust ();
6678 emit_label (label_rtx (LABELED_BLOCK_LABEL (exp)));
6681 case EXIT_BLOCK_EXPR:
6682 if (EXIT_BLOCK_RETURN (exp))
6683 sorry ("returned value in block_exit_expr");
6684 expand_goto (LABELED_BLOCK_LABEL (EXIT_BLOCK_LABELED_BLOCK (exp)));
6689 expand_start_loop (1);
6690 expand_expr_stmt_value (TREE_OPERAND (exp, 0), 0, 1);
6698 tree vars = TREE_OPERAND (exp, 0);
6699 int vars_need_expansion = 0;
6701 /* Need to open a binding contour here because
6702 if there are any cleanups they must be contained here. */
6703 expand_start_bindings (2);
6705 /* Mark the corresponding BLOCK for output in its proper place. */
6706 if (TREE_OPERAND (exp, 2) != 0
6707 && ! TREE_USED (TREE_OPERAND (exp, 2)))
6708 (*lang_hooks.decls.insert_block) (TREE_OPERAND (exp, 2));
6710 /* If VARS have not yet been expanded, expand them now. */
6713 if (!DECL_RTL_SET_P (vars))
6715 vars_need_expansion = 1;
6718 expand_decl_init (vars);
6719 vars = TREE_CHAIN (vars);
6722 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, modifier);
6724 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
6730 if (RTL_EXPR_SEQUENCE (exp))
6732 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
6734 emit_insns (RTL_EXPR_SEQUENCE (exp));
6735 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
6737 preserve_rtl_expr_result (RTL_EXPR_RTL (exp));
6738 free_temps_for_rtl_expr (exp);
6739 return RTL_EXPR_RTL (exp);
6742 /* If we don't need the result, just ensure we evaluate any
6748 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
6749 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode, 0);
6754 /* All elts simple constants => refer to a constant in memory. But
6755 if this is a non-BLKmode mode, let it store a field at a time
6756 since that should make a CONST_INT or CONST_DOUBLE when we
6757 fold. Likewise, if we have a target we can use, it is best to
6758 store directly into the target unless the type is large enough
6759 that memcpy will be used. If we are making an initializer and
6760 all operands are constant, put it in memory as well. */
6761 else if ((TREE_STATIC (exp)
6762 && ((mode == BLKmode
6763 && ! (target != 0 && safe_from_p (target, exp, 1)))
6764 || TREE_ADDRESSABLE (exp)
6765 || (host_integerp (TYPE_SIZE_UNIT (type), 1)
6766 && (! MOVE_BY_PIECES_P
6767 (tree_low_cst (TYPE_SIZE_UNIT (type), 1),
6769 && ! mostly_zeros_p (exp))))
6770 || (modifier == EXPAND_INITIALIZER && TREE_CONSTANT (exp)))
6772 rtx constructor = output_constant_def (exp, 1);
6774 if (modifier != EXPAND_CONST_ADDRESS
6775 && modifier != EXPAND_INITIALIZER
6776 && modifier != EXPAND_SUM)
6777 constructor = validize_mem (constructor);
6783 /* Handle calls that pass values in multiple non-contiguous
6784 locations. The Irix 6 ABI has examples of this. */
6785 if (target == 0 || ! safe_from_p (target, exp, 1)
6786 || GET_CODE (target) == PARALLEL)
6788 = assign_temp (build_qualified_type (type,
6790 | (TREE_READONLY (exp)
6791 * TYPE_QUAL_CONST))),
6792 0, TREE_ADDRESSABLE (exp), 1);
6794 store_constructor (exp, target, 0,
6795 int_size_in_bytes (TREE_TYPE (exp)));
6801 tree exp1 = TREE_OPERAND (exp, 0);
6803 tree string = string_constant (exp1, &index);
6805 /* Try to optimize reads from const strings. */
6807 && TREE_CODE (string) == STRING_CST
6808 && TREE_CODE (index) == INTEGER_CST
6809 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
6810 && GET_MODE_CLASS (mode) == MODE_INT
6811 && GET_MODE_SIZE (mode) == 1
6812 && modifier != EXPAND_WRITE)
6813 return gen_int_mode (TREE_STRING_POINTER (string)
6814 [TREE_INT_CST_LOW (index)], mode);
6816 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
6817 op0 = memory_address (mode, op0);
6818 temp = gen_rtx_MEM (mode, op0);
6819 set_mem_attributes (temp, exp, 0);
6821 /* If we are writing to this object and its type is a record with
6822 readonly fields, we must mark it as readonly so it will
6823 conflict with readonly references to those fields. */
6824 if (modifier == EXPAND_WRITE && readonly_fields_p (type))
6825 RTX_UNCHANGING_P (temp) = 1;
6831 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
6835 tree array = TREE_OPERAND (exp, 0);
6836 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
6837 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
6838 tree index = convert (sizetype, TREE_OPERAND (exp, 1));
6841 /* Optimize the special-case of a zero lower bound.
6843 We convert the low_bound to sizetype to avoid some problems
6844 with constant folding. (E.g. suppose the lower bound is 1,
6845 and its mode is QI. Without the conversion, (ARRAY
6846 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
6847 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
6849 if (! integer_zerop (low_bound))
6850 index = size_diffop (index, convert (sizetype, low_bound));
6852 /* Fold an expression like: "foo"[2].
6853 This is not done in fold so it won't happen inside &.
6854 Don't fold if this is for wide characters since it's too
6855 difficult to do correctly and this is a very rare case. */
6857 if (modifier != EXPAND_CONST_ADDRESS && modifier != EXPAND_INITIALIZER
6858 && TREE_CODE (array) == STRING_CST
6859 && TREE_CODE (index) == INTEGER_CST
6860 && compare_tree_int (index, TREE_STRING_LENGTH (array)) < 0
6861 && GET_MODE_CLASS (mode) == MODE_INT
6862 && GET_MODE_SIZE (mode) == 1)
6863 return gen_int_mode (TREE_STRING_POINTER (array)
6864 [TREE_INT_CST_LOW (index)], mode);
6866 /* If this is a constant index into a constant array,
6867 just get the value from the array. Handle both the cases when
6868 we have an explicit constructor and when our operand is a variable
6869 that was declared const. */
6871 if (modifier != EXPAND_CONST_ADDRESS && modifier != EXPAND_INITIALIZER
6872 && TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array)
6873 && TREE_CODE (index) == INTEGER_CST
6874 && 0 > compare_tree_int (index,
6875 list_length (CONSTRUCTOR_ELTS
6876 (TREE_OPERAND (exp, 0)))))
6880 for (elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
6881 i = TREE_INT_CST_LOW (index);
6882 elem != 0 && i != 0; i--, elem = TREE_CHAIN (elem))
6886 return expand_expr (fold (TREE_VALUE (elem)), target, tmode,
6890 else if (optimize >= 1
6891 && modifier != EXPAND_CONST_ADDRESS
6892 && modifier != EXPAND_INITIALIZER
6893 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
6894 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
6895 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
6897 if (TREE_CODE (index) == INTEGER_CST)
6899 tree init = DECL_INITIAL (array);
6901 if (TREE_CODE (init) == CONSTRUCTOR)
6905 for (elem = CONSTRUCTOR_ELTS (init);
6907 && !tree_int_cst_equal (TREE_PURPOSE (elem), index));
6908 elem = TREE_CHAIN (elem))
6911 if (elem && !TREE_SIDE_EFFECTS (TREE_VALUE (elem)))
6912 return expand_expr (fold (TREE_VALUE (elem)), target,
6915 else if (TREE_CODE (init) == STRING_CST
6916 && 0 > compare_tree_int (index,
6917 TREE_STRING_LENGTH (init)))
6919 tree type = TREE_TYPE (TREE_TYPE (init));
6920 enum machine_mode mode = TYPE_MODE (type);
6922 if (GET_MODE_CLASS (mode) == MODE_INT
6923 && GET_MODE_SIZE (mode) == 1)
6924 return gen_int_mode (TREE_STRING_POINTER (init)
6925 [TREE_INT_CST_LOW (index)], mode);
6934 case ARRAY_RANGE_REF:
6935 /* If the operand is a CONSTRUCTOR, we can just extract the
6936 appropriate field if it is present. Don't do this if we have
6937 already written the data since we want to refer to that copy
6938 and varasm.c assumes that's what we'll do. */
6939 if (code == COMPONENT_REF
6940 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
6941 && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
6945 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
6946 elt = TREE_CHAIN (elt))
6947 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1)
6948 /* We can normally use the value of the field in the
6949 CONSTRUCTOR. However, if this is a bitfield in
6950 an integral mode that we can fit in a HOST_WIDE_INT,
6951 we must mask only the number of bits in the bitfield,
6952 since this is done implicitly by the constructor. If
6953 the bitfield does not meet either of those conditions,
6954 we can't do this optimization. */
6955 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt))
6956 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt)))
6958 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt)))
6959 <= HOST_BITS_PER_WIDE_INT))))
6961 op0 = expand_expr (TREE_VALUE (elt), target, tmode, modifier);
6962 if (DECL_BIT_FIELD (TREE_PURPOSE (elt)))
6964 HOST_WIDE_INT bitsize
6965 = TREE_INT_CST_LOW (DECL_SIZE (TREE_PURPOSE (elt)));
6966 enum machine_mode imode
6967 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt)));
6969 if (TREE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt))))
6971 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
6972 op0 = expand_and (imode, op0, op1, target);
6977 = build_int_2 (GET_MODE_BITSIZE (imode) - bitsize,
6980 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
6982 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
6992 enum machine_mode mode1;
6993 HOST_WIDE_INT bitsize, bitpos;
6996 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
6997 &mode1, &unsignedp, &volatilep);
7000 /* If we got back the original object, something is wrong. Perhaps
7001 we are evaluating an expression too early. In any event, don't
7002 infinitely recurse. */
7006 /* If TEM's type is a union of variable size, pass TARGET to the inner
7007 computation, since it will need a temporary and TARGET is known
7008 to have to do. This occurs in unchecked conversion in Ada. */
7012 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
7013 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
7015 ? target : NULL_RTX),
7017 (modifier == EXPAND_INITIALIZER
7018 || modifier == EXPAND_CONST_ADDRESS)
7019 ? modifier : EXPAND_NORMAL);
7021 /* If this is a constant, put it into a register if it is a
7022 legitimate constant and OFFSET is 0 and memory if it isn't. */
7023 if (CONSTANT_P (op0))
7025 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
7026 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
7028 op0 = force_reg (mode, op0);
7030 op0 = validize_mem (force_const_mem (mode, op0));
7035 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM);
7037 /* If this object is in a register, put it into memory.
7038 This case can't occur in C, but can in Ada if we have
7039 unchecked conversion of an expression from a scalar type to
7040 an array or record type. */
7041 if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
7042 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
7044 /* If the operand is a SAVE_EXPR, we can deal with this by
7045 forcing the SAVE_EXPR into memory. */
7046 if (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR)
7048 put_var_into_stack (TREE_OPERAND (exp, 0));
7049 op0 = SAVE_EXPR_RTL (TREE_OPERAND (exp, 0));
7054 = build_qualified_type (TREE_TYPE (tem),
7055 (TYPE_QUALS (TREE_TYPE (tem))
7056 | TYPE_QUAL_CONST));
7057 rtx memloc = assign_temp (nt, 1, 1, 1);
7059 emit_move_insn (memloc, op0);
7064 if (GET_CODE (op0) != MEM)
7067 #ifdef POINTERS_EXTEND_UNSIGNED
7068 if (GET_MODE (offset_rtx) != Pmode)
7069 offset_rtx = convert_memory_address (Pmode, offset_rtx);
7071 if (GET_MODE (offset_rtx) != ptr_mode)
7072 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
7075 /* A constant address in OP0 can have VOIDmode, we must not try
7076 to call force_reg for that case. Avoid that case. */
7077 if (GET_CODE (op0) == MEM
7078 && GET_MODE (op0) == BLKmode
7079 && GET_MODE (XEXP (op0, 0)) != VOIDmode
7081 && (bitpos % bitsize) == 0
7082 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
7083 && MEM_ALIGN (op0) == GET_MODE_ALIGNMENT (mode1))
7085 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7089 op0 = offset_address (op0, offset_rtx,
7090 highest_pow2_factor (offset));
7093 /* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
7094 record its alignment as BIGGEST_ALIGNMENT. */
7095 if (GET_CODE (op0) == MEM && bitpos == 0 && offset != 0
7096 && is_aligning_offset (offset, tem))
7097 set_mem_align (op0, BIGGEST_ALIGNMENT);
7099 /* Don't forget about volatility even if this is a bitfield. */
7100 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
7102 if (op0 == orig_op0)
7103 op0 = copy_rtx (op0);
7105 MEM_VOLATILE_P (op0) = 1;
7108 /* The following code doesn't handle CONCAT.
7109 Assume only bitpos == 0 can be used for CONCAT, due to
7110 one element arrays having the same mode as its element. */
7111 if (GET_CODE (op0) == CONCAT)
7113 if (bitpos != 0 || bitsize != GET_MODE_BITSIZE (GET_MODE (op0)))
7118 /* In cases where an aligned union has an unaligned object
7119 as a field, we might be extracting a BLKmode value from
7120 an integer-mode (e.g., SImode) object. Handle this case
7121 by doing the extract into an object as wide as the field
7122 (which we know to be the width of a basic mode), then
7123 storing into memory, and changing the mode to BLKmode. */
7124 if (mode1 == VOIDmode
7125 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
7126 || (mode1 != BLKmode && ! direct_load[(int) mode1]
7127 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
7128 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT
7129 && modifier != EXPAND_CONST_ADDRESS
7130 && modifier != EXPAND_INITIALIZER)
7131 /* If the field isn't aligned enough to fetch as a memref,
7132 fetch it as a bit field. */
7133 || (mode1 != BLKmode
7134 && SLOW_UNALIGNED_ACCESS (mode1, MEM_ALIGN (op0))
7135 && ((TYPE_ALIGN (TREE_TYPE (tem))
7136 < GET_MODE_ALIGNMENT (mode))
7137 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)))
7138 /* If the type and the field are a constant size and the
7139 size of the type isn't the same size as the bitfield,
7140 we must use bitfield operations. */
7142 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (exp)))
7144 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
7147 enum machine_mode ext_mode = mode;
7149 if (ext_mode == BLKmode
7150 && ! (target != 0 && GET_CODE (op0) == MEM
7151 && GET_CODE (target) == MEM
7152 && bitpos % BITS_PER_UNIT == 0))
7153 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
7155 if (ext_mode == BLKmode)
7157 /* In this case, BITPOS must start at a byte boundary and
7158 TARGET, if specified, must be a MEM. */
7159 if (GET_CODE (op0) != MEM
7160 || (target != 0 && GET_CODE (target) != MEM)
7161 || bitpos % BITS_PER_UNIT != 0)
7164 op0 = adjust_address (op0, VOIDmode, bitpos / BITS_PER_UNIT);
7166 target = assign_temp (type, 0, 1, 1);
7168 emit_block_move (target, op0,
7169 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
7175 op0 = validize_mem (op0);
7177 if (GET_CODE (op0) == MEM && GET_CODE (XEXP (op0, 0)) == REG)
7178 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7180 op0 = extract_bit_field (op0, bitsize, bitpos,
7181 unsignedp, target, ext_mode, ext_mode,
7182 int_size_in_bytes (TREE_TYPE (tem)));
7184 /* If the result is a record type and BITSIZE is narrower than
7185 the mode of OP0, an integral mode, and this is a big endian
7186 machine, we must put the field into the high-order bits. */
7187 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
7188 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
7189 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (op0)))
7190 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
7191 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
7195 if (mode == BLKmode)
7197 rtx new = assign_temp (build_qualified_type
7198 ((*lang_hooks.types.type_for_mode)
7200 TYPE_QUAL_CONST), 0, 1, 1);
7202 emit_move_insn (new, op0);
7203 op0 = copy_rtx (new);
7204 PUT_MODE (op0, BLKmode);
7205 set_mem_attributes (op0, exp, 1);
7211 /* If the result is BLKmode, use that to access the object
7213 if (mode == BLKmode)
7216 /* Get a reference to just this component. */
7217 if (modifier == EXPAND_CONST_ADDRESS
7218 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7219 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
7221 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7223 if (op0 == orig_op0)
7224 op0 = copy_rtx (op0);
7226 set_mem_attributes (op0, exp, 0);
7227 if (GET_CODE (XEXP (op0, 0)) == REG)
7228 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7230 MEM_VOLATILE_P (op0) |= volatilep;
7231 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
7232 || modifier == EXPAND_CONST_ADDRESS
7233 || modifier == EXPAND_INITIALIZER)
7235 else if (target == 0)
7236 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7238 convert_move (target, op0, unsignedp);
7244 rtx insn, before = get_last_insn (), vtbl_ref;
7246 /* Evaluate the interior expression. */
7247 subtarget = expand_expr (TREE_OPERAND (exp, 0), target,
7250 /* Get or create an instruction off which to hang a note. */
7251 if (REG_P (subtarget))
7254 insn = get_last_insn ();
7257 if (! INSN_P (insn))
7258 insn = prev_nonnote_insn (insn);
7262 target = gen_reg_rtx (GET_MODE (subtarget));
7263 insn = emit_move_insn (target, subtarget);
7266 /* Collect the data for the note. */
7267 vtbl_ref = XEXP (DECL_RTL (TREE_OPERAND (exp, 1)), 0);
7268 vtbl_ref = plus_constant (vtbl_ref,
7269 tree_low_cst (TREE_OPERAND (exp, 2), 0));
7270 /* Discard the initial CONST that was added. */
7271 vtbl_ref = XEXP (vtbl_ref, 0);
7274 = gen_rtx_EXPR_LIST (REG_VTABLE_REF, vtbl_ref, REG_NOTES (insn));
7279 /* Intended for a reference to a buffer of a file-object in Pascal.
7280 But it's not certain that a special tree code will really be
7281 necessary for these. INDIRECT_REF might work for them. */
7287 /* Pascal set IN expression.
7290 rlo = set_low - (set_low%bits_per_word);
7291 the_word = set [ (index - rlo)/bits_per_word ];
7292 bit_index = index % bits_per_word;
7293 bitmask = 1 << bit_index;
7294 return !!(the_word & bitmask); */
7296 tree set = TREE_OPERAND (exp, 0);
7297 tree index = TREE_OPERAND (exp, 1);
7298 int iunsignedp = TREE_UNSIGNED (TREE_TYPE (index));
7299 tree set_type = TREE_TYPE (set);
7300 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
7301 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
7302 rtx index_val = expand_expr (index, 0, VOIDmode, 0);
7303 rtx lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
7304 rtx hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
7305 rtx setval = expand_expr (set, 0, VOIDmode, 0);
7306 rtx setaddr = XEXP (setval, 0);
7307 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
7309 rtx diff, quo, rem, addr, bit, result;
7311 /* If domain is empty, answer is no. Likewise if index is constant
7312 and out of bounds. */
7313 if (((TREE_CODE (set_high_bound) == INTEGER_CST
7314 && TREE_CODE (set_low_bound) == INTEGER_CST
7315 && tree_int_cst_lt (set_high_bound, set_low_bound))
7316 || (TREE_CODE (index) == INTEGER_CST
7317 && TREE_CODE (set_low_bound) == INTEGER_CST
7318 && tree_int_cst_lt (index, set_low_bound))
7319 || (TREE_CODE (set_high_bound) == INTEGER_CST
7320 && TREE_CODE (index) == INTEGER_CST
7321 && tree_int_cst_lt (set_high_bound, index))))
7325 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7327 /* If we get here, we have to generate the code for both cases
7328 (in range and out of range). */
7330 op0 = gen_label_rtx ();
7331 op1 = gen_label_rtx ();
7333 if (! (GET_CODE (index_val) == CONST_INT
7334 && GET_CODE (lo_r) == CONST_INT))
7335 emit_cmp_and_jump_insns (index_val, lo_r, LT, NULL_RTX,
7336 GET_MODE (index_val), iunsignedp, op1);
7338 if (! (GET_CODE (index_val) == CONST_INT
7339 && GET_CODE (hi_r) == CONST_INT))
7340 emit_cmp_and_jump_insns (index_val, hi_r, GT, NULL_RTX,
7341 GET_MODE (index_val), iunsignedp, op1);
7343 /* Calculate the element number of bit zero in the first word
7345 if (GET_CODE (lo_r) == CONST_INT)
7346 rlow = GEN_INT (INTVAL (lo_r)
7347 & ~((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
7349 rlow = expand_binop (index_mode, and_optab, lo_r,
7350 GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
7351 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7353 diff = expand_binop (index_mode, sub_optab, index_val, rlow,
7354 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7356 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
7357 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7358 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
7359 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7361 addr = memory_address (byte_mode,
7362 expand_binop (index_mode, add_optab, diff,
7363 setaddr, NULL_RTX, iunsignedp,
7366 /* Extract the bit we want to examine. */
7367 bit = expand_shift (RSHIFT_EXPR, byte_mode,
7368 gen_rtx_MEM (byte_mode, addr),
7369 make_tree (TREE_TYPE (index), rem),
7371 result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
7372 GET_MODE (target) == byte_mode ? target : 0,
7373 1, OPTAB_LIB_WIDEN);
7375 if (result != target)
7376 convert_move (target, result, 1);
7378 /* Output the code to handle the out-of-range case. */
7381 emit_move_insn (target, const0_rtx);
7386 case WITH_CLEANUP_EXPR:
7387 if (WITH_CLEANUP_EXPR_RTL (exp) == 0)
7389 WITH_CLEANUP_EXPR_RTL (exp)
7390 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
7391 expand_decl_cleanup_eh (NULL_TREE, TREE_OPERAND (exp, 1),
7392 CLEANUP_EH_ONLY (exp));
7394 /* That's it for this cleanup. */
7395 TREE_OPERAND (exp, 1) = 0;
7397 return WITH_CLEANUP_EXPR_RTL (exp);
7399 case CLEANUP_POINT_EXPR:
7401 /* Start a new binding layer that will keep track of all cleanup
7402 actions to be performed. */
7403 expand_start_bindings (2);
7405 target_temp_slot_level = temp_slot_level;
7407 op0 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
7408 /* If we're going to use this value, load it up now. */
7410 op0 = force_not_mem (op0);
7411 preserve_temp_slots (op0);
7412 expand_end_bindings (NULL_TREE, 0, 0);
7417 /* Check for a built-in function. */
7418 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
7419 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7421 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7423 if (DECL_BUILT_IN_CLASS (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7424 == BUILT_IN_FRONTEND)
7425 return (*lang_hooks.expand_expr)
7426 (exp, original_target, tmode, modifier);
7428 return expand_builtin (exp, target, subtarget, tmode, ignore);
7431 return expand_call (exp, target, ignore);
7433 case NON_LVALUE_EXPR:
7436 case REFERENCE_EXPR:
7437 if (TREE_OPERAND (exp, 0) == error_mark_node)
7440 if (TREE_CODE (type) == UNION_TYPE)
7442 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
7444 /* If both input and output are BLKmode, this conversion isn't doing
7445 anything except possibly changing memory attribute. */
7446 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode)
7448 rtx result = expand_expr (TREE_OPERAND (exp, 0), target, tmode,
7451 result = copy_rtx (result);
7452 set_mem_attributes (result, exp, 0);
7457 target = assign_temp (type, 0, 1, 1);
7459 if (GET_CODE (target) == MEM)
7460 /* Store data into beginning of memory target. */
7461 store_expr (TREE_OPERAND (exp, 0),
7462 adjust_address (target, TYPE_MODE (valtype), 0), 0);
7464 else if (GET_CODE (target) == REG)
7465 /* Store this field into a union of the proper type. */
7466 store_field (target,
7467 MIN ((int_size_in_bytes (TREE_TYPE
7468 (TREE_OPERAND (exp, 0)))
7470 (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
7471 0, TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
7472 VOIDmode, 0, type, 0);
7476 /* Return the entire union. */
7480 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7482 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
7485 /* If the signedness of the conversion differs and OP0 is
7486 a promoted SUBREG, clear that indication since we now
7487 have to do the proper extension. */
7488 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
7489 && GET_CODE (op0) == SUBREG)
7490 SUBREG_PROMOTED_VAR_P (op0) = 0;
7495 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
7496 if (GET_MODE (op0) == mode)
7499 /* If OP0 is a constant, just convert it into the proper mode. */
7500 if (CONSTANT_P (op0))
7502 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7503 enum machine_mode inner_mode = TYPE_MODE (inner_type);
7505 if (modifier == EXPAND_INITIALIZER)
7506 return simplify_gen_subreg (mode, op0, inner_mode,
7507 subreg_lowpart_offset (mode,
7510 return convert_modes (mode, inner_mode, op0,
7511 TREE_UNSIGNED (inner_type));
7514 if (modifier == EXPAND_INITIALIZER)
7515 return gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
7519 convert_to_mode (mode, op0,
7520 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7522 convert_move (target, op0,
7523 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7526 case VIEW_CONVERT_EXPR:
7527 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
7529 /* If the input and output modes are both the same, we are done.
7530 Otherwise, if neither mode is BLKmode and both are within a word, we
7531 can use gen_lowpart. If neither is true, make sure the operand is
7532 in memory and convert the MEM to the new mode. */
7533 if (TYPE_MODE (type) == GET_MODE (op0))
7535 else if (TYPE_MODE (type) != BLKmode && GET_MODE (op0) != BLKmode
7536 && GET_MODE_SIZE (TYPE_MODE (type)) <= UNITS_PER_WORD
7537 && GET_MODE_SIZE (GET_MODE (op0)) <= UNITS_PER_WORD)
7538 op0 = gen_lowpart (TYPE_MODE (type), op0);
7539 else if (GET_CODE (op0) != MEM)
7541 /* If the operand is not a MEM, force it into memory. Since we
7542 are going to be be changing the mode of the MEM, don't call
7543 force_const_mem for constants because we don't allow pool
7544 constants to change mode. */
7545 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7547 if (TREE_ADDRESSABLE (exp))
7550 if (target == 0 || GET_MODE (target) != TYPE_MODE (inner_type))
7552 = assign_stack_temp_for_type
7553 (TYPE_MODE (inner_type),
7554 GET_MODE_SIZE (TYPE_MODE (inner_type)), 0, inner_type);
7556 emit_move_insn (target, op0);
7560 /* At this point, OP0 is in the correct mode. If the output type is such
7561 that the operand is known to be aligned, indicate that it is.
7562 Otherwise, we need only be concerned about alignment for non-BLKmode
7564 if (GET_CODE (op0) == MEM)
7566 op0 = copy_rtx (op0);
7568 if (TYPE_ALIGN_OK (type))
7569 set_mem_align (op0, MAX (MEM_ALIGN (op0), TYPE_ALIGN (type)));
7570 else if (TYPE_MODE (type) != BLKmode && STRICT_ALIGNMENT
7571 && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
7573 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7574 HOST_WIDE_INT temp_size
7575 = MAX (int_size_in_bytes (inner_type),
7576 (HOST_WIDE_INT) GET_MODE_SIZE (TYPE_MODE (type)));
7577 rtx new = assign_stack_temp_for_type (TYPE_MODE (type),
7578 temp_size, 0, type);
7579 rtx new_with_op0_mode = adjust_address (new, GET_MODE (op0), 0);
7581 if (TREE_ADDRESSABLE (exp))
7584 if (GET_MODE (op0) == BLKmode)
7585 emit_block_move (new_with_op0_mode, op0,
7586 GEN_INT (GET_MODE_SIZE (TYPE_MODE (type))));
7588 emit_move_insn (new_with_op0_mode, op0);
7593 op0 = adjust_address (op0, TYPE_MODE (type), 0);
7599 /* We come here from MINUS_EXPR when the second operand is a
7602 this_optab = ! unsignedp && flag_trapv
7603 && (GET_MODE_CLASS (mode) == MODE_INT)
7604 ? addv_optab : add_optab;
7606 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
7607 something else, make sure we add the register to the constant and
7608 then to the other thing. This case can occur during strength
7609 reduction and doing it this way will produce better code if the
7610 frame pointer or argument pointer is eliminated.
7612 fold-const.c will ensure that the constant is always in the inner
7613 PLUS_EXPR, so the only case we need to do anything about is if
7614 sp, ap, or fp is our second argument, in which case we must swap
7615 the innermost first argument and our second argument. */
7617 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
7618 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
7619 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
7620 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
7621 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
7622 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
7624 tree t = TREE_OPERAND (exp, 1);
7626 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7627 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
7630 /* If the result is to be ptr_mode and we are adding an integer to
7631 something, we might be forming a constant. So try to use
7632 plus_constant. If it produces a sum and we can't accept it,
7633 use force_operand. This allows P = &ARR[const] to generate
7634 efficient code on machines where a SYMBOL_REF is not a valid
7637 If this is an EXPAND_SUM call, always return the sum. */
7638 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
7639 || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
7641 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
7642 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7643 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
7647 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
7649 /* Use immed_double_const to ensure that the constant is
7650 truncated according to the mode of OP1, then sign extended
7651 to a HOST_WIDE_INT. Using the constant directly can result
7652 in non-canonical RTL in a 64x32 cross compile. */
7654 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)),
7656 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))));
7657 op1 = plus_constant (op1, INTVAL (constant_part));
7658 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7659 op1 = force_operand (op1, target);
7663 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7664 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
7665 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
7669 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7670 (modifier == EXPAND_INITIALIZER
7671 ? EXPAND_INITIALIZER : EXPAND_SUM));
7672 if (! CONSTANT_P (op0))
7674 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7675 VOIDmode, modifier);
7676 /* Don't go to both_summands if modifier
7677 says it's not right to return a PLUS. */
7678 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7682 /* Use immed_double_const to ensure that the constant is
7683 truncated according to the mode of OP1, then sign extended
7684 to a HOST_WIDE_INT. Using the constant directly can result
7685 in non-canonical RTL in a 64x32 cross compile. */
7687 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)),
7689 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
7690 op0 = plus_constant (op0, INTVAL (constant_part));
7691 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7692 op0 = force_operand (op0, target);
7697 /* No sense saving up arithmetic to be done
7698 if it's all in the wrong mode to form part of an address.
7699 And force_operand won't know whether to sign-extend or
7701 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7702 || mode != ptr_mode)
7705 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7708 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, modifier);
7709 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, modifier);
7712 /* Make sure any term that's a sum with a constant comes last. */
7713 if (GET_CODE (op0) == PLUS
7714 && CONSTANT_P (XEXP (op0, 1)))
7720 /* If adding to a sum including a constant,
7721 associate it to put the constant outside. */
7722 if (GET_CODE (op1) == PLUS
7723 && CONSTANT_P (XEXP (op1, 1)))
7725 rtx constant_term = const0_rtx;
7727 temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0);
7730 /* Ensure that MULT comes first if there is one. */
7731 else if (GET_CODE (op0) == MULT)
7732 op0 = gen_rtx_PLUS (mode, op0, XEXP (op1, 0));
7734 op0 = gen_rtx_PLUS (mode, XEXP (op1, 0), op0);
7736 /* Let's also eliminate constants from op0 if possible. */
7737 op0 = eliminate_constant_term (op0, &constant_term);
7739 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
7740 their sum should be a constant. Form it into OP1, since the
7741 result we want will then be OP0 + OP1. */
7743 temp = simplify_binary_operation (PLUS, mode, constant_term,
7748 op1 = gen_rtx_PLUS (mode, constant_term, XEXP (op1, 1));
7751 /* Put a constant term last and put a multiplication first. */
7752 if (CONSTANT_P (op0) || GET_CODE (op1) == MULT)
7753 temp = op1, op1 = op0, op0 = temp;
7755 temp = simplify_binary_operation (PLUS, mode, op0, op1);
7756 return temp ? temp : gen_rtx_PLUS (mode, op0, op1);
7759 /* For initializers, we are allowed to return a MINUS of two
7760 symbolic constants. Here we handle all cases when both operands
7762 /* Handle difference of two symbolic constants,
7763 for the sake of an initializer. */
7764 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7765 && really_constant_p (TREE_OPERAND (exp, 0))
7766 && really_constant_p (TREE_OPERAND (exp, 1)))
7768 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode,
7770 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode,
7773 /* If the last operand is a CONST_INT, use plus_constant of
7774 the negated constant. Else make the MINUS. */
7775 if (GET_CODE (op1) == CONST_INT)
7776 return plus_constant (op0, - INTVAL (op1));
7778 return gen_rtx_MINUS (mode, op0, op1);
7780 /* Convert A - const to A + (-const). */
7781 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7783 tree negated = fold (build1 (NEGATE_EXPR, type,
7784 TREE_OPERAND (exp, 1)));
7786 if (TREE_UNSIGNED (type) || TREE_OVERFLOW (negated))
7787 /* If we can't negate the constant in TYPE, leave it alone and
7788 expand_binop will negate it for us. We used to try to do it
7789 here in the signed version of TYPE, but that doesn't work
7790 on POINTER_TYPEs. */;
7793 exp = build (PLUS_EXPR, type, TREE_OPERAND (exp, 0), negated);
7797 this_optab = ! unsignedp && flag_trapv
7798 && (GET_MODE_CLASS(mode) == MODE_INT)
7799 ? subv_optab : sub_optab;
7803 /* If first operand is constant, swap them.
7804 Thus the following special case checks need only
7805 check the second operand. */
7806 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
7808 tree t1 = TREE_OPERAND (exp, 0);
7809 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
7810 TREE_OPERAND (exp, 1) = t1;
7813 /* Attempt to return something suitable for generating an
7814 indexed address, for machines that support that. */
7816 if (modifier == EXPAND_SUM && mode == ptr_mode
7817 && host_integerp (TREE_OPERAND (exp, 1), 0))
7819 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7822 /* If we knew for certain that this is arithmetic for an array
7823 reference, and we knew the bounds of the array, then we could
7824 apply the distributive law across (PLUS X C) for constant C.
7825 Without such knowledge, we risk overflowing the computation
7826 when both X and C are large, but X+C isn't. */
7827 /* ??? Could perhaps special-case EXP being unsigned and C being
7828 positive. In that case we are certain that X+C is no smaller
7829 than X and so the transformed expression will overflow iff the
7830 original would have. */
7832 if (GET_CODE (op0) != REG)
7833 op0 = force_operand (op0, NULL_RTX);
7834 if (GET_CODE (op0) != REG)
7835 op0 = copy_to_mode_reg (mode, op0);
7838 gen_rtx_MULT (mode, op0,
7839 GEN_INT (tree_low_cst (TREE_OPERAND (exp, 1), 0)));
7842 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7845 /* Check for multiplying things that have been extended
7846 from a narrower type. If this machine supports multiplying
7847 in that narrower type with a result in the desired type,
7848 do it that way, and avoid the explicit type-conversion. */
7849 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
7850 && TREE_CODE (type) == INTEGER_TYPE
7851 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7852 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
7853 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7854 && int_fits_type_p (TREE_OPERAND (exp, 1),
7855 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7856 /* Don't use a widening multiply if a shift will do. */
7857 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
7858 > HOST_BITS_PER_WIDE_INT)
7859 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
7861 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
7862 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7864 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
7865 /* If both operands are extended, they must either both
7866 be zero-extended or both be sign-extended. */
7867 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7869 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
7871 enum machine_mode innermode
7872 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
7873 optab other_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7874 ? smul_widen_optab : umul_widen_optab);
7875 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7876 ? umul_widen_optab : smul_widen_optab);
7877 if (mode == GET_MODE_WIDER_MODE (innermode))
7879 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
7881 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7882 NULL_RTX, VOIDmode, 0);
7883 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7884 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7887 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7888 NULL_RTX, VOIDmode, 0);
7891 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
7892 && innermode == word_mode)
7895 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7896 NULL_RTX, VOIDmode, 0);
7897 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7898 op1 = convert_modes (innermode, mode,
7899 expand_expr (TREE_OPERAND (exp, 1),
7900 NULL_RTX, VOIDmode, 0),
7903 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7904 NULL_RTX, VOIDmode, 0);
7905 temp = expand_binop (mode, other_optab, op0, op1, target,
7906 unsignedp, OPTAB_LIB_WIDEN);
7907 htem = expand_mult_highpart_adjust (innermode,
7908 gen_highpart (innermode, temp),
7910 gen_highpart (innermode, temp),
7912 emit_move_insn (gen_highpart (innermode, temp), htem);
7917 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7918 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7919 return expand_mult (mode, op0, op1, target, unsignedp);
7921 case TRUNC_DIV_EXPR:
7922 case FLOOR_DIV_EXPR:
7924 case ROUND_DIV_EXPR:
7925 case EXACT_DIV_EXPR:
7926 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7928 /* Possible optimization: compute the dividend with EXPAND_SUM
7929 then if the divisor is constant can optimize the case
7930 where some terms of the dividend have coeffs divisible by it. */
7931 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7932 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7933 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
7936 /* Emit a/b as a*(1/b). Later we may manage CSE the reciprocal saving
7937 expensive divide. If not, combine will rebuild the original
7939 if (flag_unsafe_math_optimizations && optimize && !optimize_size
7940 && TREE_CODE (type) == REAL_TYPE
7941 && !real_onep (TREE_OPERAND (exp, 0)))
7942 return expand_expr (build (MULT_EXPR, type, TREE_OPERAND (exp, 0),
7943 build (RDIV_EXPR, type,
7944 build_real (type, dconst1),
7945 TREE_OPERAND (exp, 1))),
7946 target, tmode, unsignedp);
7947 this_optab = sdiv_optab;
7950 case TRUNC_MOD_EXPR:
7951 case FLOOR_MOD_EXPR:
7953 case ROUND_MOD_EXPR:
7954 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7956 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7957 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7958 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
7960 case FIX_ROUND_EXPR:
7961 case FIX_FLOOR_EXPR:
7963 abort (); /* Not used for C. */
7965 case FIX_TRUNC_EXPR:
7966 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7968 target = gen_reg_rtx (mode);
7969 expand_fix (target, op0, unsignedp);
7973 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7975 target = gen_reg_rtx (mode);
7976 /* expand_float can't figure out what to do if FROM has VOIDmode.
7977 So give it the correct mode. With -O, cse will optimize this. */
7978 if (GET_MODE (op0) == VOIDmode)
7979 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7981 expand_float (target, op0,
7982 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7986 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7987 temp = expand_unop (mode,
7988 ! unsignedp && flag_trapv
7989 && (GET_MODE_CLASS(mode) == MODE_INT)
7990 ? negv_optab : neg_optab, op0, target, 0);
7996 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7998 /* Handle complex values specially. */
7999 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT
8000 || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
8001 return expand_complex_abs (mode, op0, target, unsignedp);
8003 /* Unsigned abs is simply the operand. Testing here means we don't
8004 risk generating incorrect code below. */
8005 if (TREE_UNSIGNED (type))
8008 return expand_abs (mode, op0, target, unsignedp,
8009 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
8013 target = original_target;
8014 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 1), 1)
8015 || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
8016 || GET_MODE (target) != mode
8017 || (GET_CODE (target) == REG
8018 && REGNO (target) < FIRST_PSEUDO_REGISTER))
8019 target = gen_reg_rtx (mode);
8020 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8021 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
8023 /* First try to do it with a special MIN or MAX instruction.
8024 If that does not win, use a conditional jump to select the proper
8026 this_optab = (TREE_UNSIGNED (type)
8027 ? (code == MIN_EXPR ? umin_optab : umax_optab)
8028 : (code == MIN_EXPR ? smin_optab : smax_optab));
8030 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
8035 /* At this point, a MEM target is no longer useful; we will get better
8038 if (GET_CODE (target) == MEM)
8039 target = gen_reg_rtx (mode);
8042 emit_move_insn (target, op0);
8044 op0 = gen_label_rtx ();
8046 /* If this mode is an integer too wide to compare properly,
8047 compare word by word. Rely on cse to optimize constant cases. */
8048 if (GET_MODE_CLASS (mode) == MODE_INT
8049 && ! can_compare_p (GE, mode, ccp_jump))
8051 if (code == MAX_EXPR)
8052 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
8053 target, op1, NULL_RTX, op0);
8055 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
8056 op1, target, NULL_RTX, op0);
8060 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)));
8061 do_compare_rtx_and_jump (target, op1, code == MAX_EXPR ? GE : LE,
8062 unsignedp, mode, NULL_RTX, NULL_RTX,
8065 emit_move_insn (target, op1);
8070 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8071 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
8077 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8078 temp = expand_unop (mode, ffs_optab, op0, target, 1);
8083 /* ??? Can optimize bitwise operations with one arg constant.
8084 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
8085 and (a bitwise1 b) bitwise2 b (etc)
8086 but that is probably not worth while. */
8088 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
8089 boolean values when we want in all cases to compute both of them. In
8090 general it is fastest to do TRUTH_AND_EXPR by computing both operands
8091 as actual zero-or-1 values and then bitwise anding. In cases where
8092 there cannot be any side effects, better code would be made by
8093 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
8094 how to recognize those cases. */
8096 case TRUTH_AND_EXPR:
8098 this_optab = and_optab;
8103 this_optab = ior_optab;
8106 case TRUTH_XOR_EXPR:
8108 this_optab = xor_optab;
8115 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8117 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8118 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
8121 /* Could determine the answer when only additive constants differ. Also,
8122 the addition of one can be handled by changing the condition. */
8129 case UNORDERED_EXPR:
8136 temp = do_store_flag (exp, target, tmode != VOIDmode ? tmode : mode, 0);
8140 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
8141 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
8143 && GET_CODE (original_target) == REG
8144 && (GET_MODE (original_target)
8145 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8147 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
8150 /* If temp is constant, we can just compute the result. */
8151 if (GET_CODE (temp) == CONST_INT)
8153 if (INTVAL (temp) != 0)
8154 emit_move_insn (target, const1_rtx);
8156 emit_move_insn (target, const0_rtx);
8161 if (temp != original_target)
8163 enum machine_mode mode1 = GET_MODE (temp);
8164 if (mode1 == VOIDmode)
8165 mode1 = tmode != VOIDmode ? tmode : mode;
8167 temp = copy_to_mode_reg (mode1, temp);
8170 op1 = gen_label_rtx ();
8171 emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX,
8172 GET_MODE (temp), unsignedp, op1);
8173 emit_move_insn (temp, const1_rtx);
8178 /* If no set-flag instruction, must generate a conditional
8179 store into a temporary variable. Drop through
8180 and handle this like && and ||. */
8182 case TRUTH_ANDIF_EXPR:
8183 case TRUTH_ORIF_EXPR:
8185 && (target == 0 || ! safe_from_p (target, exp, 1)
8186 /* Make sure we don't have a hard reg (such as function's return
8187 value) live across basic blocks, if not optimizing. */
8188 || (!optimize && GET_CODE (target) == REG
8189 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
8190 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
8193 emit_clr_insn (target);
8195 op1 = gen_label_rtx ();
8196 jumpifnot (exp, op1);
8199 emit_0_to_1_insn (target);
8202 return ignore ? const0_rtx : target;
8204 case TRUTH_NOT_EXPR:
8205 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
8206 /* The parser is careful to generate TRUTH_NOT_EXPR
8207 only with operands that are always zero or one. */
8208 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
8209 target, 1, OPTAB_LIB_WIDEN);
8215 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
8217 return expand_expr (TREE_OPERAND (exp, 1),
8218 (ignore ? const0_rtx : target),
8222 /* If we would have a "singleton" (see below) were it not for a
8223 conversion in each arm, bring that conversion back out. */
8224 if (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
8225 && TREE_CODE (TREE_OPERAND (exp, 2)) == NOP_EXPR
8226 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0))
8227 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 2), 0))))
8229 tree iftrue = TREE_OPERAND (TREE_OPERAND (exp, 1), 0);
8230 tree iffalse = TREE_OPERAND (TREE_OPERAND (exp, 2), 0);
8232 if ((TREE_CODE_CLASS (TREE_CODE (iftrue)) == '2'
8233 && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
8234 || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '2'
8235 && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0))
8236 || (TREE_CODE_CLASS (TREE_CODE (iftrue)) == '1'
8237 && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
8238 || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '1'
8239 && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0)))
8240 return expand_expr (build1 (NOP_EXPR, type,
8241 build (COND_EXPR, TREE_TYPE (iftrue),
8242 TREE_OPERAND (exp, 0),
8244 target, tmode, modifier);
8248 /* Note that COND_EXPRs whose type is a structure or union
8249 are required to be constructed to contain assignments of
8250 a temporary variable, so that we can evaluate them here
8251 for side effect only. If type is void, we must do likewise. */
8253 /* If an arm of the branch requires a cleanup,
8254 only that cleanup is performed. */
8257 tree binary_op = 0, unary_op = 0;
8259 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
8260 convert it to our mode, if necessary. */
8261 if (integer_onep (TREE_OPERAND (exp, 1))
8262 && integer_zerop (TREE_OPERAND (exp, 2))
8263 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8267 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
8272 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, modifier);
8273 if (GET_MODE (op0) == mode)
8277 target = gen_reg_rtx (mode);
8278 convert_move (target, op0, unsignedp);
8282 /* Check for X ? A + B : A. If we have this, we can copy A to the
8283 output and conditionally add B. Similarly for unary operations.
8284 Don't do this if X has side-effects because those side effects
8285 might affect A or B and the "?" operation is a sequence point in
8286 ANSI. (operand_equal_p tests for side effects.) */
8288 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
8289 && operand_equal_p (TREE_OPERAND (exp, 2),
8290 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8291 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
8292 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
8293 && operand_equal_p (TREE_OPERAND (exp, 1),
8294 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8295 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
8296 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
8297 && operand_equal_p (TREE_OPERAND (exp, 2),
8298 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8299 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
8300 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
8301 && operand_equal_p (TREE_OPERAND (exp, 1),
8302 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8303 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
8305 /* If we are not to produce a result, we have no target. Otherwise,
8306 if a target was specified use it; it will not be used as an
8307 intermediate target unless it is safe. If no target, use a
8312 else if (original_target
8313 && (safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
8314 || (singleton && GET_CODE (original_target) == REG
8315 && REGNO (original_target) >= FIRST_PSEUDO_REGISTER
8316 && original_target == var_rtx (singleton)))
8317 && GET_MODE (original_target) == mode
8318 #ifdef HAVE_conditional_move
8319 && (! can_conditionally_move_p (mode)
8320 || GET_CODE (original_target) == REG
8321 || TREE_ADDRESSABLE (type))
8323 && (GET_CODE (original_target) != MEM
8324 || TREE_ADDRESSABLE (type)))
8325 temp = original_target;
8326 else if (TREE_ADDRESSABLE (type))
8329 temp = assign_temp (type, 0, 0, 1);
8331 /* If we had X ? A + C : A, with C a constant power of 2, and we can
8332 do the test of X as a store-flag operation, do this as
8333 A + ((X != 0) << log C). Similarly for other simple binary
8334 operators. Only do for C == 1 if BRANCH_COST is low. */
8335 if (temp && singleton && binary_op
8336 && (TREE_CODE (binary_op) == PLUS_EXPR
8337 || TREE_CODE (binary_op) == MINUS_EXPR
8338 || TREE_CODE (binary_op) == BIT_IOR_EXPR
8339 || TREE_CODE (binary_op) == BIT_XOR_EXPR)
8340 && (BRANCH_COST >= 3 ? integer_pow2p (TREE_OPERAND (binary_op, 1))
8341 : integer_onep (TREE_OPERAND (binary_op, 1)))
8342 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8345 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR
8346 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8347 ? addv_optab : add_optab)
8348 : TREE_CODE (binary_op) == MINUS_EXPR
8349 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8350 ? subv_optab : sub_optab)
8351 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
8354 /* If we had X ? A : A + 1, do this as A + (X == 0).
8356 We have to invert the truth value here and then put it
8357 back later if do_store_flag fails. We cannot simply copy
8358 TREE_OPERAND (exp, 0) to another variable and modify that
8359 because invert_truthvalue can modify the tree pointed to
8361 if (singleton == TREE_OPERAND (exp, 1))
8362 TREE_OPERAND (exp, 0)
8363 = invert_truthvalue (TREE_OPERAND (exp, 0));
8365 result = do_store_flag (TREE_OPERAND (exp, 0),
8366 (safe_from_p (temp, singleton, 1)
8368 mode, BRANCH_COST <= 1);
8370 if (result != 0 && ! integer_onep (TREE_OPERAND (binary_op, 1)))
8371 result = expand_shift (LSHIFT_EXPR, mode, result,
8372 build_int_2 (tree_log2
8376 (safe_from_p (temp, singleton, 1)
8377 ? temp : NULL_RTX), 0);
8381 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
8382 return expand_binop (mode, boptab, op1, result, temp,
8383 unsignedp, OPTAB_LIB_WIDEN);
8385 else if (singleton == TREE_OPERAND (exp, 1))
8386 TREE_OPERAND (exp, 0)
8387 = invert_truthvalue (TREE_OPERAND (exp, 0));
8390 do_pending_stack_adjust ();
8392 op0 = gen_label_rtx ();
8394 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
8398 /* If the target conflicts with the other operand of the
8399 binary op, we can't use it. Also, we can't use the target
8400 if it is a hard register, because evaluating the condition
8401 might clobber it. */
8403 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1), 1))
8404 || (GET_CODE (temp) == REG
8405 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
8406 temp = gen_reg_rtx (mode);
8407 store_expr (singleton, temp, 0);
8410 expand_expr (singleton,
8411 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8412 if (singleton == TREE_OPERAND (exp, 1))
8413 jumpif (TREE_OPERAND (exp, 0), op0);
8415 jumpifnot (TREE_OPERAND (exp, 0), op0);
8417 start_cleanup_deferral ();
8418 if (binary_op && temp == 0)
8419 /* Just touch the other operand. */
8420 expand_expr (TREE_OPERAND (binary_op, 1),
8421 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8423 store_expr (build (TREE_CODE (binary_op), type,
8424 make_tree (type, temp),
8425 TREE_OPERAND (binary_op, 1)),
8428 store_expr (build1 (TREE_CODE (unary_op), type,
8429 make_tree (type, temp)),
8433 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
8434 comparison operator. If we have one of these cases, set the
8435 output to A, branch on A (cse will merge these two references),
8436 then set the output to FOO. */
8438 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8439 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8440 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8441 TREE_OPERAND (exp, 1), 0)
8442 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8443 || TREE_CODE (TREE_OPERAND (exp, 1)) == SAVE_EXPR)
8444 && safe_from_p (temp, TREE_OPERAND (exp, 2), 1))
8446 if (GET_CODE (temp) == REG
8447 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
8448 temp = gen_reg_rtx (mode);
8449 store_expr (TREE_OPERAND (exp, 1), temp, 0);
8450 jumpif (TREE_OPERAND (exp, 0), op0);
8452 start_cleanup_deferral ();
8453 store_expr (TREE_OPERAND (exp, 2), temp, 0);
8457 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8458 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8459 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8460 TREE_OPERAND (exp, 2), 0)
8461 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8462 || TREE_CODE (TREE_OPERAND (exp, 2)) == SAVE_EXPR)
8463 && safe_from_p (temp, TREE_OPERAND (exp, 1), 1))
8465 if (GET_CODE (temp) == REG
8466 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
8467 temp = gen_reg_rtx (mode);
8468 store_expr (TREE_OPERAND (exp, 2), temp, 0);
8469 jumpifnot (TREE_OPERAND (exp, 0), op0);
8471 start_cleanup_deferral ();
8472 store_expr (TREE_OPERAND (exp, 1), temp, 0);
8477 op1 = gen_label_rtx ();
8478 jumpifnot (TREE_OPERAND (exp, 0), op0);
8480 start_cleanup_deferral ();
8482 /* One branch of the cond can be void, if it never returns. For
8483 example A ? throw : E */
8485 && TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node)
8486 store_expr (TREE_OPERAND (exp, 1), temp, 0);
8488 expand_expr (TREE_OPERAND (exp, 1),
8489 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8490 end_cleanup_deferral ();
8492 emit_jump_insn (gen_jump (op1));
8495 start_cleanup_deferral ();
8497 && TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node)
8498 store_expr (TREE_OPERAND (exp, 2), temp, 0);
8500 expand_expr (TREE_OPERAND (exp, 2),
8501 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8504 end_cleanup_deferral ();
8515 /* Something needs to be initialized, but we didn't know
8516 where that thing was when building the tree. For example,
8517 it could be the return value of a function, or a parameter
8518 to a function which lays down in the stack, or a temporary
8519 variable which must be passed by reference.
8521 We guarantee that the expression will either be constructed
8522 or copied into our original target. */
8524 tree slot = TREE_OPERAND (exp, 0);
8525 tree cleanups = NULL_TREE;
8528 if (TREE_CODE (slot) != VAR_DECL)
8532 target = original_target;
8534 /* Set this here so that if we get a target that refers to a
8535 register variable that's already been used, put_reg_into_stack
8536 knows that it should fix up those uses. */
8537 TREE_USED (slot) = 1;
8541 if (DECL_RTL_SET_P (slot))
8543 target = DECL_RTL (slot);
8544 /* If we have already expanded the slot, so don't do
8546 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8551 target = assign_temp (type, 2, 0, 1);
8552 /* All temp slots at this level must not conflict. */
8553 preserve_temp_slots (target);
8554 SET_DECL_RTL (slot, target);
8555 if (TREE_ADDRESSABLE (slot))
8556 put_var_into_stack (slot);
8558 /* Since SLOT is not known to the called function
8559 to belong to its stack frame, we must build an explicit
8560 cleanup. This case occurs when we must build up a reference
8561 to pass the reference as an argument. In this case,
8562 it is very likely that such a reference need not be
8565 if (TREE_OPERAND (exp, 2) == 0)
8566 TREE_OPERAND (exp, 2)
8567 = (*lang_hooks.maybe_build_cleanup) (slot);
8568 cleanups = TREE_OPERAND (exp, 2);
8573 /* This case does occur, when expanding a parameter which
8574 needs to be constructed on the stack. The target
8575 is the actual stack address that we want to initialize.
8576 The function we call will perform the cleanup in this case. */
8578 /* If we have already assigned it space, use that space,
8579 not target that we were passed in, as our target
8580 parameter is only a hint. */
8581 if (DECL_RTL_SET_P (slot))
8583 target = DECL_RTL (slot);
8584 /* If we have already expanded the slot, so don't do
8586 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8591 SET_DECL_RTL (slot, target);
8592 /* If we must have an addressable slot, then make sure that
8593 the RTL that we just stored in slot is OK. */
8594 if (TREE_ADDRESSABLE (slot))
8595 put_var_into_stack (slot);
8599 exp1 = TREE_OPERAND (exp, 3) = TREE_OPERAND (exp, 1);
8600 /* Mark it as expanded. */
8601 TREE_OPERAND (exp, 1) = NULL_TREE;
8603 store_expr (exp1, target, 0);
8605 expand_decl_cleanup_eh (NULL_TREE, cleanups, CLEANUP_EH_ONLY (exp));
8612 tree lhs = TREE_OPERAND (exp, 0);
8613 tree rhs = TREE_OPERAND (exp, 1);
8615 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
8621 /* If lhs is complex, expand calls in rhs before computing it.
8622 That's so we don't compute a pointer and save it over a
8623 call. If lhs is simple, compute it first so we can give it
8624 as a target if the rhs is just a call. This avoids an
8625 extra temp and copy and that prevents a partial-subsumption
8626 which makes bad code. Actually we could treat
8627 component_ref's of vars like vars. */
8629 tree lhs = TREE_OPERAND (exp, 0);
8630 tree rhs = TREE_OPERAND (exp, 1);
8634 /* Check for |= or &= of a bitfield of size one into another bitfield
8635 of size 1. In this case, (unless we need the result of the
8636 assignment) we can do this more efficiently with a
8637 test followed by an assignment, if necessary.
8639 ??? At this point, we can't get a BIT_FIELD_REF here. But if
8640 things change so we do, this code should be enhanced to
8643 && TREE_CODE (lhs) == COMPONENT_REF
8644 && (TREE_CODE (rhs) == BIT_IOR_EXPR
8645 || TREE_CODE (rhs) == BIT_AND_EXPR)
8646 && TREE_OPERAND (rhs, 0) == lhs
8647 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
8648 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
8649 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
8651 rtx label = gen_label_rtx ();
8653 do_jump (TREE_OPERAND (rhs, 1),
8654 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
8655 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
8656 expand_assignment (lhs, convert (TREE_TYPE (rhs),
8657 (TREE_CODE (rhs) == BIT_IOR_EXPR
8659 : integer_zero_node)),
8661 do_pending_stack_adjust ();
8666 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
8672 if (!TREE_OPERAND (exp, 0))
8673 expand_null_return ();
8675 expand_return (TREE_OPERAND (exp, 0));
8678 case PREINCREMENT_EXPR:
8679 case PREDECREMENT_EXPR:
8680 return expand_increment (exp, 0, ignore);
8682 case POSTINCREMENT_EXPR:
8683 case POSTDECREMENT_EXPR:
8684 /* Faster to treat as pre-increment if result is not used. */
8685 return expand_increment (exp, ! ignore, ignore);
8688 /* Are we taking the address of a nested function? */
8689 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
8690 && decl_function_context (TREE_OPERAND (exp, 0)) != 0
8691 && ! DECL_NO_STATIC_CHAIN (TREE_OPERAND (exp, 0))
8692 && ! TREE_STATIC (exp))
8694 op0 = trampoline_address (TREE_OPERAND (exp, 0));
8695 op0 = force_operand (op0, target);
8697 /* If we are taking the address of something erroneous, just
8699 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
8701 /* If we are taking the address of a constant and are at the
8702 top level, we have to use output_constant_def since we can't
8703 call force_const_mem at top level. */
8705 && (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
8706 || (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0)))
8708 op0 = XEXP (output_constant_def (TREE_OPERAND (exp, 0), 0), 0);
8711 /* We make sure to pass const0_rtx down if we came in with
8712 ignore set, to avoid doing the cleanups twice for something. */
8713 op0 = expand_expr (TREE_OPERAND (exp, 0),
8714 ignore ? const0_rtx : NULL_RTX, VOIDmode,
8715 (modifier == EXPAND_INITIALIZER
8716 ? modifier : EXPAND_CONST_ADDRESS));
8718 /* If we are going to ignore the result, OP0 will have been set
8719 to const0_rtx, so just return it. Don't get confused and
8720 think we are taking the address of the constant. */
8724 /* Pass 1 for MODIFY, so that protect_from_queue doesn't get
8725 clever and returns a REG when given a MEM. */
8726 op0 = protect_from_queue (op0, 1);
8728 /* We would like the object in memory. If it is a constant, we can
8729 have it be statically allocated into memory. For a non-constant,
8730 we need to allocate some memory and store the value into it. */
8732 if (CONSTANT_P (op0))
8733 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
8735 else if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
8736 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF
8737 || GET_CODE (op0) == PARALLEL)
8739 /* If the operand is a SAVE_EXPR, we can deal with this by
8740 forcing the SAVE_EXPR into memory. */
8741 if (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR)
8743 put_var_into_stack (TREE_OPERAND (exp, 0));
8744 op0 = SAVE_EXPR_RTL (TREE_OPERAND (exp, 0));
8748 /* If this object is in a register, it can't be BLKmode. */
8749 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8750 rtx memloc = assign_temp (inner_type, 1, 1, 1);
8752 if (GET_CODE (op0) == PARALLEL)
8753 /* Handle calls that pass values in multiple
8754 non-contiguous locations. The Irix 6 ABI has examples
8756 emit_group_store (memloc, op0,
8757 int_size_in_bytes (inner_type));
8759 emit_move_insn (memloc, op0);
8765 if (GET_CODE (op0) != MEM)
8768 mark_temp_addr_taken (op0);
8769 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
8771 op0 = XEXP (op0, 0);
8772 #ifdef POINTERS_EXTEND_UNSIGNED
8773 if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
8774 && mode == ptr_mode)
8775 op0 = convert_memory_address (ptr_mode, op0);
8780 /* If OP0 is not aligned as least as much as the type requires, we
8781 need to make a temporary, copy OP0 to it, and take the address of
8782 the temporary. We want to use the alignment of the type, not of
8783 the operand. Note that this is incorrect for FUNCTION_TYPE, but
8784 the test for BLKmode means that can't happen. The test for
8785 BLKmode is because we never make mis-aligned MEMs with
8788 We don't need to do this at all if the machine doesn't have
8789 strict alignment. */
8790 if (STRICT_ALIGNMENT && GET_MODE (op0) == BLKmode
8791 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
8793 && MEM_ALIGN (op0) < BIGGEST_ALIGNMENT)
8795 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8797 = assign_stack_temp_for_type
8798 (TYPE_MODE (inner_type),
8799 MEM_SIZE (op0) ? INTVAL (MEM_SIZE (op0))
8800 : int_size_in_bytes (inner_type),
8801 1, build_qualified_type (inner_type,
8802 (TYPE_QUALS (inner_type)
8803 | TYPE_QUAL_CONST)));
8805 if (TYPE_ALIGN_OK (inner_type))
8808 emit_block_move (new, op0, expr_size (TREE_OPERAND (exp, 0)));
8812 op0 = force_operand (XEXP (op0, 0), target);
8816 && GET_CODE (op0) != REG
8817 && modifier != EXPAND_CONST_ADDRESS
8818 && modifier != EXPAND_INITIALIZER
8819 && modifier != EXPAND_SUM)
8820 op0 = force_reg (Pmode, op0);
8822 if (GET_CODE (op0) == REG
8823 && ! REG_USERVAR_P (op0))
8824 mark_reg_pointer (op0, TYPE_ALIGN (TREE_TYPE (type)));
8826 #ifdef POINTERS_EXTEND_UNSIGNED
8827 if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
8828 && mode == ptr_mode)
8829 op0 = convert_memory_address (ptr_mode, op0);
8834 case ENTRY_VALUE_EXPR:
8837 /* COMPLEX type for Extended Pascal & Fortran */
8840 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8843 /* Get the rtx code of the operands. */
8844 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8845 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
8848 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
8852 /* Move the real (op0) and imaginary (op1) parts to their location. */
8853 emit_move_insn (gen_realpart (mode, target), op0);
8854 emit_move_insn (gen_imagpart (mode, target), op1);
8856 insns = get_insns ();
8859 /* Complex construction should appear as a single unit. */
8860 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
8861 each with a separate pseudo as destination.
8862 It's not correct for flow to treat them as a unit. */
8863 if (GET_CODE (target) != CONCAT)
8864 emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
8872 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8873 return gen_realpart (mode, op0);
8876 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8877 return gen_imagpart (mode, op0);
8881 enum machine_mode partmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8885 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8888 target = gen_reg_rtx (mode);
8892 /* Store the realpart and the negated imagpart to target. */
8893 emit_move_insn (gen_realpart (partmode, target),
8894 gen_realpart (partmode, op0));
8896 imag_t = gen_imagpart (partmode, target);
8897 temp = expand_unop (partmode,
8898 ! unsignedp && flag_trapv
8899 && (GET_MODE_CLASS(partmode) == MODE_INT)
8900 ? negv_optab : neg_optab,
8901 gen_imagpart (partmode, op0), imag_t, 0);
8903 emit_move_insn (imag_t, temp);
8905 insns = get_insns ();
8908 /* Conjugate should appear as a single unit
8909 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
8910 each with a separate pseudo as destination.
8911 It's not correct for flow to treat them as a unit. */
8912 if (GET_CODE (target) != CONCAT)
8913 emit_no_conflict_block (insns, target, op0, NULL_RTX, NULL_RTX);
8920 case TRY_CATCH_EXPR:
8922 tree handler = TREE_OPERAND (exp, 1);
8924 expand_eh_region_start ();
8926 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8928 expand_eh_region_end_cleanup (handler);
8933 case TRY_FINALLY_EXPR:
8935 tree try_block = TREE_OPERAND (exp, 0);
8936 tree finally_block = TREE_OPERAND (exp, 1);
8937 rtx finally_label = gen_label_rtx ();
8938 rtx done_label = gen_label_rtx ();
8939 rtx return_link = gen_reg_rtx (Pmode);
8940 tree cleanup = build (GOTO_SUBROUTINE_EXPR, void_type_node,
8941 (tree) finally_label, (tree) return_link);
8942 TREE_SIDE_EFFECTS (cleanup) = 1;
8944 /* Start a new binding layer that will keep track of all cleanup
8945 actions to be performed. */
8946 expand_start_bindings (2);
8948 target_temp_slot_level = temp_slot_level;
8950 expand_decl_cleanup (NULL_TREE, cleanup);
8951 op0 = expand_expr (try_block, target, tmode, modifier);
8953 preserve_temp_slots (op0);
8954 expand_end_bindings (NULL_TREE, 0, 0);
8955 emit_jump (done_label);
8956 emit_label (finally_label);
8957 expand_expr (finally_block, const0_rtx, VOIDmode, 0);
8958 emit_indirect_jump (return_link);
8959 emit_label (done_label);
8963 case GOTO_SUBROUTINE_EXPR:
8965 rtx subr = (rtx) TREE_OPERAND (exp, 0);
8966 rtx return_link = *(rtx *) &TREE_OPERAND (exp, 1);
8967 rtx return_address = gen_label_rtx ();
8968 emit_move_insn (return_link,
8969 gen_rtx_LABEL_REF (Pmode, return_address));
8971 emit_label (return_address);
8976 return expand_builtin_va_arg (TREE_OPERAND (exp, 0), type);
8979 return get_exception_pointer (cfun);
8982 /* Function descriptors are not valid except for as
8983 initialization constants, and should not be expanded. */
8987 return (*lang_hooks.expand_expr) (exp, original_target, tmode, modifier);
8990 /* Here to do an ordinary binary operator, generating an instruction
8991 from the optab already placed in `this_optab'. */
8993 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8995 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8996 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8998 temp = expand_binop (mode, this_optab, op0, op1, target,
8999 unsignedp, OPTAB_LIB_WIDEN);
9005 /* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
9006 when applied to the address of EXP produces an address known to be
9007 aligned more than BIGGEST_ALIGNMENT. */
9010 is_aligning_offset (offset, exp)
9014 /* Strip off any conversions and WITH_RECORD_EXPR nodes. */
9015 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9016 || TREE_CODE (offset) == NOP_EXPR
9017 || TREE_CODE (offset) == CONVERT_EXPR
9018 || TREE_CODE (offset) == WITH_RECORD_EXPR)
9019 offset = TREE_OPERAND (offset, 0);
9021 /* We must now have a BIT_AND_EXPR with a constant that is one less than
9022 power of 2 and which is larger than BIGGEST_ALIGNMENT. */
9023 if (TREE_CODE (offset) != BIT_AND_EXPR
9024 || !host_integerp (TREE_OPERAND (offset, 1), 1)
9025 || compare_tree_int (TREE_OPERAND (offset, 1), BIGGEST_ALIGNMENT) <= 0
9026 || !exact_log2 (tree_low_cst (TREE_OPERAND (offset, 1), 1) + 1) < 0)
9029 /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
9030 It must be NEGATE_EXPR. Then strip any more conversions. */
9031 offset = TREE_OPERAND (offset, 0);
9032 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9033 || TREE_CODE (offset) == NOP_EXPR
9034 || TREE_CODE (offset) == CONVERT_EXPR)
9035 offset = TREE_OPERAND (offset, 0);
9037 if (TREE_CODE (offset) != NEGATE_EXPR)
9040 offset = TREE_OPERAND (offset, 0);
9041 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9042 || TREE_CODE (offset) == NOP_EXPR
9043 || TREE_CODE (offset) == CONVERT_EXPR)
9044 offset = TREE_OPERAND (offset, 0);
9046 /* This must now be the address either of EXP or of a PLACEHOLDER_EXPR
9047 whose type is the same as EXP. */
9048 return (TREE_CODE (offset) == ADDR_EXPR
9049 && (TREE_OPERAND (offset, 0) == exp
9050 || (TREE_CODE (TREE_OPERAND (offset, 0)) == PLACEHOLDER_EXPR
9051 && (TREE_TYPE (TREE_OPERAND (offset, 0))
9052 == TREE_TYPE (exp)))));
9055 /* Return the tree node if an ARG corresponds to a string constant or zero
9056 if it doesn't. If we return non-zero, set *PTR_OFFSET to the offset
9057 in bytes within the string that ARG is accessing. The type of the
9058 offset will be `sizetype'. */
9061 string_constant (arg, ptr_offset)
9067 if (TREE_CODE (arg) == ADDR_EXPR
9068 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
9070 *ptr_offset = size_zero_node;
9071 return TREE_OPERAND (arg, 0);
9073 else if (TREE_CODE (arg) == PLUS_EXPR)
9075 tree arg0 = TREE_OPERAND (arg, 0);
9076 tree arg1 = TREE_OPERAND (arg, 1);
9081 if (TREE_CODE (arg0) == ADDR_EXPR
9082 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
9084 *ptr_offset = convert (sizetype, arg1);
9085 return TREE_OPERAND (arg0, 0);
9087 else if (TREE_CODE (arg1) == ADDR_EXPR
9088 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
9090 *ptr_offset = convert (sizetype, arg0);
9091 return TREE_OPERAND (arg1, 0);
9098 /* Expand code for a post- or pre- increment or decrement
9099 and return the RTX for the result.
9100 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
9103 expand_increment (exp, post, ignore)
9109 tree incremented = TREE_OPERAND (exp, 0);
9110 optab this_optab = add_optab;
9112 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
9113 int op0_is_copy = 0;
9114 int single_insn = 0;
9115 /* 1 means we can't store into OP0 directly,
9116 because it is a subreg narrower than a word,
9117 and we don't dare clobber the rest of the word. */
9120 /* Stabilize any component ref that might need to be
9121 evaluated more than once below. */
9123 || TREE_CODE (incremented) == BIT_FIELD_REF
9124 || (TREE_CODE (incremented) == COMPONENT_REF
9125 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
9126 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
9127 incremented = stabilize_reference (incremented);
9128 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
9129 ones into save exprs so that they don't accidentally get evaluated
9130 more than once by the code below. */
9131 if (TREE_CODE (incremented) == PREINCREMENT_EXPR
9132 || TREE_CODE (incremented) == PREDECREMENT_EXPR)
9133 incremented = save_expr (incremented);
9135 /* Compute the operands as RTX.
9136 Note whether OP0 is the actual lvalue or a copy of it:
9137 I believe it is a copy iff it is a register or subreg
9138 and insns were generated in computing it. */
9140 temp = get_last_insn ();
9141 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, 0);
9143 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
9144 in place but instead must do sign- or zero-extension during assignment,
9145 so we copy it into a new register and let the code below use it as
9148 Note that we can safely modify this SUBREG since it is know not to be
9149 shared (it was made by the expand_expr call above). */
9151 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
9154 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
9158 else if (GET_CODE (op0) == SUBREG
9159 && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD)
9161 /* We cannot increment this SUBREG in place. If we are
9162 post-incrementing, get a copy of the old value. Otherwise,
9163 just mark that we cannot increment in place. */
9165 op0 = copy_to_reg (op0);
9170 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
9171 && temp != get_last_insn ());
9172 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
9174 /* Decide whether incrementing or decrementing. */
9175 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
9176 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9177 this_optab = sub_optab;
9179 /* Convert decrement by a constant into a negative increment. */
9180 if (this_optab == sub_optab
9181 && GET_CODE (op1) == CONST_INT)
9183 op1 = GEN_INT (-INTVAL (op1));
9184 this_optab = add_optab;
9187 if (TYPE_TRAP_SIGNED (TREE_TYPE (exp)))
9188 this_optab = this_optab == add_optab ? addv_optab : subv_optab;
9190 /* For a preincrement, see if we can do this with a single instruction. */
9193 icode = (int) this_optab->handlers[(int) mode].insn_code;
9194 if (icode != (int) CODE_FOR_nothing
9195 /* Make sure that OP0 is valid for operands 0 and 1
9196 of the insn we want to queue. */
9197 && (*insn_data[icode].operand[0].predicate) (op0, mode)
9198 && (*insn_data[icode].operand[1].predicate) (op0, mode)
9199 && (*insn_data[icode].operand[2].predicate) (op1, mode))
9203 /* If OP0 is not the actual lvalue, but rather a copy in a register,
9204 then we cannot just increment OP0. We must therefore contrive to
9205 increment the original value. Then, for postincrement, we can return
9206 OP0 since it is a copy of the old value. For preincrement, expand here
9207 unless we can do it with a single insn.
9209 Likewise if storing directly into OP0 would clobber high bits
9210 we need to preserve (bad_subreg). */
9211 if (op0_is_copy || (!post && !single_insn) || bad_subreg)
9213 /* This is the easiest way to increment the value wherever it is.
9214 Problems with multiple evaluation of INCREMENTED are prevented
9215 because either (1) it is a component_ref or preincrement,
9216 in which case it was stabilized above, or (2) it is an array_ref
9217 with constant index in an array in a register, which is
9218 safe to reevaluate. */
9219 tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
9220 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9221 ? MINUS_EXPR : PLUS_EXPR),
9224 TREE_OPERAND (exp, 1));
9226 while (TREE_CODE (incremented) == NOP_EXPR
9227 || TREE_CODE (incremented) == CONVERT_EXPR)
9229 newexp = convert (TREE_TYPE (incremented), newexp);
9230 incremented = TREE_OPERAND (incremented, 0);
9233 temp = expand_assignment (incremented, newexp, ! post && ! ignore , 0);
9234 return post ? op0 : temp;
9239 /* We have a true reference to the value in OP0.
9240 If there is an insn to add or subtract in this mode, queue it.
9241 Queueing the increment insn avoids the register shuffling
9242 that often results if we must increment now and first save
9243 the old value for subsequent use. */
9245 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
9246 op0 = stabilize (op0);
9249 icode = (int) this_optab->handlers[(int) mode].insn_code;
9250 if (icode != (int) CODE_FOR_nothing
9251 /* Make sure that OP0 is valid for operands 0 and 1
9252 of the insn we want to queue. */
9253 && (*insn_data[icode].operand[0].predicate) (op0, mode)
9254 && (*insn_data[icode].operand[1].predicate) (op0, mode))
9256 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9257 op1 = force_reg (mode, op1);
9259 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
9261 if (icode != (int) CODE_FOR_nothing && GET_CODE (op0) == MEM)
9263 rtx addr = (general_operand (XEXP (op0, 0), mode)
9264 ? force_reg (Pmode, XEXP (op0, 0))
9265 : copy_to_reg (XEXP (op0, 0)));
9268 op0 = replace_equiv_address (op0, addr);
9269 temp = force_reg (GET_MODE (op0), op0);
9270 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9271 op1 = force_reg (mode, op1);
9273 /* The increment queue is LIFO, thus we have to `queue'
9274 the instructions in reverse order. */
9275 enqueue_insn (op0, gen_move_insn (op0, temp));
9276 result = enqueue_insn (temp, GEN_FCN (icode) (temp, temp, op1));
9281 /* Preincrement, or we can't increment with one simple insn. */
9283 /* Save a copy of the value before inc or dec, to return it later. */
9284 temp = value = copy_to_reg (op0);
9286 /* Arrange to return the incremented value. */
9287 /* Copy the rtx because expand_binop will protect from the queue,
9288 and the results of that would be invalid for us to return
9289 if our caller does emit_queue before using our result. */
9290 temp = copy_rtx (value = op0);
9292 /* Increment however we can. */
9293 op1 = expand_binop (mode, this_optab, value, op1, op0,
9294 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
9296 /* Make sure the value is stored into OP0. */
9298 emit_move_insn (op0, op1);
9303 /* At the start of a function, record that we have no previously-pushed
9304 arguments waiting to be popped. */
9307 init_pending_stack_adjust ()
9309 pending_stack_adjust = 0;
9312 /* When exiting from function, if safe, clear out any pending stack adjust
9313 so the adjustment won't get done.
9315 Note, if the current function calls alloca, then it must have a
9316 frame pointer regardless of the value of flag_omit_frame_pointer. */
9319 clear_pending_stack_adjust ()
9321 #ifdef EXIT_IGNORE_STACK
9323 && (! flag_omit_frame_pointer || current_function_calls_alloca)
9324 && EXIT_IGNORE_STACK
9325 && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline)
9326 && ! flag_inline_functions)
9328 stack_pointer_delta -= pending_stack_adjust,
9329 pending_stack_adjust = 0;
9334 /* Pop any previously-pushed arguments that have not been popped yet. */
9337 do_pending_stack_adjust ()
9339 if (inhibit_defer_pop == 0)
9341 if (pending_stack_adjust != 0)
9342 adjust_stack (GEN_INT (pending_stack_adjust));
9343 pending_stack_adjust = 0;
9347 /* Expand conditional expressions. */
9349 /* Generate code to evaluate EXP and jump to LABEL if the value is zero.
9350 LABEL is an rtx of code CODE_LABEL, in this function and all the
9354 jumpifnot (exp, label)
9358 do_jump (exp, label, NULL_RTX);
9361 /* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
9368 do_jump (exp, NULL_RTX, label);
9371 /* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
9372 the result is zero, or IF_TRUE_LABEL if the result is one.
9373 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
9374 meaning fall through in that case.
9376 do_jump always does any pending stack adjust except when it does not
9377 actually perform a jump. An example where there is no jump
9378 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
9380 This function is responsible for optimizing cases such as
9381 &&, || and comparison operators in EXP. */
9384 do_jump (exp, if_false_label, if_true_label)
9386 rtx if_false_label, if_true_label;
9388 enum tree_code code = TREE_CODE (exp);
9389 /* Some cases need to create a label to jump to
9390 in order to properly fall through.
9391 These cases set DROP_THROUGH_LABEL nonzero. */
9392 rtx drop_through_label = 0;
9396 enum machine_mode mode;
9398 #ifdef MAX_INTEGER_COMPUTATION_MODE
9399 check_max_integer_computation_mode (exp);
9410 temp = integer_zerop (exp) ? if_false_label : if_true_label;
9416 /* This is not true with #pragma weak */
9418 /* The address of something can never be zero. */
9420 emit_jump (if_true_label);
9425 if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
9426 || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
9427 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF
9428 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_RANGE_REF)
9431 /* If we are narrowing the operand, we have to do the compare in the
9433 if ((TYPE_PRECISION (TREE_TYPE (exp))
9434 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
9436 case NON_LVALUE_EXPR:
9437 case REFERENCE_EXPR:
9442 /* These cannot change zero->non-zero or vice versa. */
9443 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9446 case WITH_RECORD_EXPR:
9447 /* Put the object on the placeholder list, recurse through our first
9448 operand, and pop the list. */
9449 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
9451 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9452 placeholder_list = TREE_CHAIN (placeholder_list);
9456 /* This is never less insns than evaluating the PLUS_EXPR followed by
9457 a test and can be longer if the test is eliminated. */
9459 /* Reduce to minus. */
9460 exp = build (MINUS_EXPR, TREE_TYPE (exp),
9461 TREE_OPERAND (exp, 0),
9462 fold (build1 (NEGATE_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
9463 TREE_OPERAND (exp, 1))));
9464 /* Process as MINUS. */
9468 /* Non-zero iff operands of minus differ. */
9469 do_compare_and_jump (build (NE_EXPR, TREE_TYPE (exp),
9470 TREE_OPERAND (exp, 0),
9471 TREE_OPERAND (exp, 1)),
9472 NE, NE, if_false_label, if_true_label);
9476 /* If we are AND'ing with a small constant, do this comparison in the
9477 smallest type that fits. If the machine doesn't have comparisons
9478 that small, it will be converted back to the wider comparison.
9479 This helps if we are testing the sign bit of a narrower object.
9480 combine can't do this for us because it can't know whether a
9481 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
9483 if (! SLOW_BYTE_ACCESS
9484 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
9485 && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
9486 && (i = tree_floor_log2 (TREE_OPERAND (exp, 1))) >= 0
9487 && (mode = mode_for_size (i + 1, MODE_INT, 0)) != BLKmode
9488 && (type = (*lang_hooks.types.type_for_mode) (mode, 1)) != 0
9489 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9490 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9491 != CODE_FOR_nothing))
9493 do_jump (convert (type, exp), if_false_label, if_true_label);
9498 case TRUTH_NOT_EXPR:
9499 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9502 case TRUTH_ANDIF_EXPR:
9503 if (if_false_label == 0)
9504 if_false_label = drop_through_label = gen_label_rtx ();
9505 do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
9506 start_cleanup_deferral ();
9507 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9508 end_cleanup_deferral ();
9511 case TRUTH_ORIF_EXPR:
9512 if (if_true_label == 0)
9513 if_true_label = drop_through_label = gen_label_rtx ();
9514 do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
9515 start_cleanup_deferral ();
9516 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9517 end_cleanup_deferral ();
9522 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
9523 preserve_temp_slots (NULL_RTX);
9527 do_pending_stack_adjust ();
9528 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9534 case ARRAY_RANGE_REF:
9536 HOST_WIDE_INT bitsize, bitpos;
9538 enum machine_mode mode;
9543 /* Get description of this reference. We don't actually care
9544 about the underlying object here. */
9545 get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode,
9546 &unsignedp, &volatilep);
9548 type = (*lang_hooks.types.type_for_size) (bitsize, unsignedp);
9549 if (! SLOW_BYTE_ACCESS
9550 && type != 0 && bitsize >= 0
9551 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9552 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9553 != CODE_FOR_nothing))
9555 do_jump (convert (type, exp), if_false_label, if_true_label);
9562 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
9563 if (integer_onep (TREE_OPERAND (exp, 1))
9564 && integer_zerop (TREE_OPERAND (exp, 2)))
9565 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9567 else if (integer_zerop (TREE_OPERAND (exp, 1))
9568 && integer_onep (TREE_OPERAND (exp, 2)))
9569 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9573 rtx label1 = gen_label_rtx ();
9574 drop_through_label = gen_label_rtx ();
9576 do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX);
9578 start_cleanup_deferral ();
9579 /* Now the THEN-expression. */
9580 do_jump (TREE_OPERAND (exp, 1),
9581 if_false_label ? if_false_label : drop_through_label,
9582 if_true_label ? if_true_label : drop_through_label);
9583 /* In case the do_jump just above never jumps. */
9584 do_pending_stack_adjust ();
9585 emit_label (label1);
9587 /* Now the ELSE-expression. */
9588 do_jump (TREE_OPERAND (exp, 2),
9589 if_false_label ? if_false_label : drop_through_label,
9590 if_true_label ? if_true_label : drop_through_label);
9591 end_cleanup_deferral ();
9597 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9599 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
9600 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
9602 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
9603 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
9606 (build (TRUTH_ANDIF_EXPR, TREE_TYPE (exp),
9607 fold (build (EQ_EXPR, TREE_TYPE (exp),
9608 fold (build1 (REALPART_EXPR,
9609 TREE_TYPE (inner_type),
9611 fold (build1 (REALPART_EXPR,
9612 TREE_TYPE (inner_type),
9614 fold (build (EQ_EXPR, TREE_TYPE (exp),
9615 fold (build1 (IMAGPART_EXPR,
9616 TREE_TYPE (inner_type),
9618 fold (build1 (IMAGPART_EXPR,
9619 TREE_TYPE (inner_type),
9621 if_false_label, if_true_label);
9624 else if (integer_zerop (TREE_OPERAND (exp, 1)))
9625 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9627 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
9628 && !can_compare_p (EQ, TYPE_MODE (inner_type), ccp_jump))
9629 do_jump_by_parts_equality (exp, if_false_label, if_true_label);
9631 do_compare_and_jump (exp, EQ, EQ, if_false_label, if_true_label);
9637 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9639 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
9640 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
9642 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
9643 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
9646 (build (TRUTH_ORIF_EXPR, TREE_TYPE (exp),
9647 fold (build (NE_EXPR, TREE_TYPE (exp),
9648 fold (build1 (REALPART_EXPR,
9649 TREE_TYPE (inner_type),
9651 fold (build1 (REALPART_EXPR,
9652 TREE_TYPE (inner_type),
9654 fold (build (NE_EXPR, TREE_TYPE (exp),
9655 fold (build1 (IMAGPART_EXPR,
9656 TREE_TYPE (inner_type),
9658 fold (build1 (IMAGPART_EXPR,
9659 TREE_TYPE (inner_type),
9661 if_false_label, if_true_label);
9664 else if (integer_zerop (TREE_OPERAND (exp, 1)))
9665 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9667 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
9668 && !can_compare_p (NE, TYPE_MODE (inner_type), ccp_jump))
9669 do_jump_by_parts_equality (exp, if_true_label, if_false_label);
9671 do_compare_and_jump (exp, NE, NE, if_false_label, if_true_label);
9676 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9677 if (GET_MODE_CLASS (mode) == MODE_INT
9678 && ! can_compare_p (LT, mode, ccp_jump))
9679 do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
9681 do_compare_and_jump (exp, LT, LTU, if_false_label, if_true_label);
9685 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9686 if (GET_MODE_CLASS (mode) == MODE_INT
9687 && ! can_compare_p (LE, mode, ccp_jump))
9688 do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
9690 do_compare_and_jump (exp, LE, LEU, if_false_label, if_true_label);
9694 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9695 if (GET_MODE_CLASS (mode) == MODE_INT
9696 && ! can_compare_p (GT, mode, ccp_jump))
9697 do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
9699 do_compare_and_jump (exp, GT, GTU, if_false_label, if_true_label);
9703 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9704 if (GET_MODE_CLASS (mode) == MODE_INT
9705 && ! can_compare_p (GE, mode, ccp_jump))
9706 do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
9708 do_compare_and_jump (exp, GE, GEU, if_false_label, if_true_label);
9711 case UNORDERED_EXPR:
9714 enum rtx_code cmp, rcmp;
9717 if (code == UNORDERED_EXPR)
9718 cmp = UNORDERED, rcmp = ORDERED;
9720 cmp = ORDERED, rcmp = UNORDERED;
9721 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9724 if (! can_compare_p (cmp, mode, ccp_jump)
9725 && (can_compare_p (rcmp, mode, ccp_jump)
9726 /* If the target doesn't provide either UNORDERED or ORDERED
9727 comparisons, canonicalize on UNORDERED for the library. */
9728 || rcmp == UNORDERED))
9732 do_compare_and_jump (exp, cmp, cmp, if_false_label, if_true_label);
9734 do_compare_and_jump (exp, rcmp, rcmp, if_true_label, if_false_label);
9739 enum rtx_code rcode1;
9740 enum tree_code tcode2;
9764 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9765 if (can_compare_p (rcode1, mode, ccp_jump))
9766 do_compare_and_jump (exp, rcode1, rcode1, if_false_label,
9770 tree op0 = save_expr (TREE_OPERAND (exp, 0));
9771 tree op1 = save_expr (TREE_OPERAND (exp, 1));
9774 /* If the target doesn't support combined unordered
9775 compares, decompose into UNORDERED + comparison. */
9776 cmp0 = fold (build (UNORDERED_EXPR, TREE_TYPE (exp), op0, op1));
9777 cmp1 = fold (build (tcode2, TREE_TYPE (exp), op0, op1));
9778 exp = build (TRUTH_ORIF_EXPR, TREE_TYPE (exp), cmp0, cmp1);
9779 do_jump (exp, if_false_label, if_true_label);
9785 __builtin_expect (<test>, 0) and
9786 __builtin_expect (<test>, 1)
9788 We need to do this here, so that <test> is not converted to a SCC
9789 operation on machines that use condition code registers and COMPARE
9790 like the PowerPC, and then the jump is done based on whether the SCC
9791 operation produced a 1 or 0. */
9793 /* Check for a built-in function. */
9794 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR)
9796 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
9797 tree arglist = TREE_OPERAND (exp, 1);
9799 if (TREE_CODE (fndecl) == FUNCTION_DECL
9800 && DECL_BUILT_IN (fndecl)
9801 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT
9802 && arglist != NULL_TREE
9803 && TREE_CHAIN (arglist) != NULL_TREE)
9805 rtx seq = expand_builtin_expect_jump (exp, if_false_label,
9808 if (seq != NULL_RTX)
9815 /* fall through and generate the normal code. */
9819 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
9821 /* This is not needed any more and causes poor code since it causes
9822 comparisons and tests from non-SI objects to have different code
9824 /* Copy to register to avoid generating bad insns by cse
9825 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
9826 if (!cse_not_expected && GET_CODE (temp) == MEM)
9827 temp = copy_to_reg (temp);
9829 do_pending_stack_adjust ();
9830 /* Do any postincrements in the expression that was tested. */
9833 if (GET_CODE (temp) == CONST_INT
9834 || (GET_CODE (temp) == CONST_DOUBLE && GET_MODE (temp) == VOIDmode)
9835 || GET_CODE (temp) == LABEL_REF)
9837 rtx target = temp == const0_rtx ? if_false_label : if_true_label;
9841 else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
9842 && ! can_compare_p (NE, GET_MODE (temp), ccp_jump))
9843 /* Note swapping the labels gives us not-equal. */
9844 do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label);
9845 else if (GET_MODE (temp) != VOIDmode)
9846 do_compare_rtx_and_jump (temp, CONST0_RTX (GET_MODE (temp)),
9847 NE, TREE_UNSIGNED (TREE_TYPE (exp)),
9848 GET_MODE (temp), NULL_RTX,
9849 if_false_label, if_true_label);
9854 if (drop_through_label)
9856 /* If do_jump produces code that might be jumped around,
9857 do any stack adjusts from that code, before the place
9858 where control merges in. */
9859 do_pending_stack_adjust ();
9860 emit_label (drop_through_label);
9864 /* Given a comparison expression EXP for values too wide to be compared
9865 with one insn, test the comparison and jump to the appropriate label.
9866 The code of EXP is ignored; we always test GT if SWAP is 0,
9867 and LT if SWAP is 1. */
9870 do_jump_by_parts_greater (exp, swap, if_false_label, if_true_label)
9873 rtx if_false_label, if_true_label;
9875 rtx op0 = expand_expr (TREE_OPERAND (exp, swap), NULL_RTX, VOIDmode, 0);
9876 rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), NULL_RTX, VOIDmode, 0);
9877 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9878 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
9880 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label);
9883 /* Compare OP0 with OP1, word at a time, in mode MODE.
9884 UNSIGNEDP says to do unsigned comparison.
9885 Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
9888 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label)
9889 enum machine_mode mode;
9892 rtx if_false_label, if_true_label;
9894 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
9895 rtx drop_through_label = 0;
9898 if (! if_true_label || ! if_false_label)
9899 drop_through_label = gen_label_rtx ();
9900 if (! if_true_label)
9901 if_true_label = drop_through_label;
9902 if (! if_false_label)
9903 if_false_label = drop_through_label;
9905 /* Compare a word at a time, high order first. */
9906 for (i = 0; i < nwords; i++)
9908 rtx op0_word, op1_word;
9910 if (WORDS_BIG_ENDIAN)
9912 op0_word = operand_subword_force (op0, i, mode);
9913 op1_word = operand_subword_force (op1, i, mode);
9917 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
9918 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
9921 /* All but high-order word must be compared as unsigned. */
9922 do_compare_rtx_and_jump (op0_word, op1_word, GT,
9923 (unsignedp || i > 0), word_mode, NULL_RTX,
9924 NULL_RTX, if_true_label);
9926 /* Consider lower words only if these are equal. */
9927 do_compare_rtx_and_jump (op0_word, op1_word, NE, unsignedp, word_mode,
9928 NULL_RTX, NULL_RTX, if_false_label);
9932 emit_jump (if_false_label);
9933 if (drop_through_label)
9934 emit_label (drop_through_label);
9937 /* Given an EQ_EXPR expression EXP for values too wide to be compared
9938 with one insn, test the comparison and jump to the appropriate label. */
9941 do_jump_by_parts_equality (exp, if_false_label, if_true_label)
9943 rtx if_false_label, if_true_label;
9945 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
9946 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
9947 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9948 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
9950 rtx drop_through_label = 0;
9952 if (! if_false_label)
9953 drop_through_label = if_false_label = gen_label_rtx ();
9955 for (i = 0; i < nwords; i++)
9956 do_compare_rtx_and_jump (operand_subword_force (op0, i, mode),
9957 operand_subword_force (op1, i, mode),
9958 EQ, TREE_UNSIGNED (TREE_TYPE (exp)),
9959 word_mode, NULL_RTX, if_false_label, NULL_RTX);
9962 emit_jump (if_true_label);
9963 if (drop_through_label)
9964 emit_label (drop_through_label);
9967 /* Jump according to whether OP0 is 0.
9968 We assume that OP0 has an integer mode that is too wide
9969 for the available compare insns. */
9972 do_jump_by_parts_equality_rtx (op0, if_false_label, if_true_label)
9974 rtx if_false_label, if_true_label;
9976 int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD;
9979 rtx drop_through_label = 0;
9981 /* The fastest way of doing this comparison on almost any machine is to
9982 "or" all the words and compare the result. If all have to be loaded
9983 from memory and this is a very wide item, it's possible this may
9984 be slower, but that's highly unlikely. */
9986 part = gen_reg_rtx (word_mode);
9987 emit_move_insn (part, operand_subword_force (op0, 0, GET_MODE (op0)));
9988 for (i = 1; i < nwords && part != 0; i++)
9989 part = expand_binop (word_mode, ior_optab, part,
9990 operand_subword_force (op0, i, GET_MODE (op0)),
9991 part, 1, OPTAB_WIDEN);
9995 do_compare_rtx_and_jump (part, const0_rtx, EQ, 1, word_mode,
9996 NULL_RTX, if_false_label, if_true_label);
10001 /* If we couldn't do the "or" simply, do this with a series of compares. */
10002 if (! if_false_label)
10003 drop_through_label = if_false_label = gen_label_rtx ();
10005 for (i = 0; i < nwords; i++)
10006 do_compare_rtx_and_jump (operand_subword_force (op0, i, GET_MODE (op0)),
10007 const0_rtx, EQ, 1, word_mode, NULL_RTX,
10008 if_false_label, NULL_RTX);
10011 emit_jump (if_true_label);
10013 if (drop_through_label)
10014 emit_label (drop_through_label);
10017 /* Generate code for a comparison of OP0 and OP1 with rtx code CODE.
10018 (including code to compute the values to be compared)
10019 and set (CC0) according to the result.
10020 The decision as to signed or unsigned comparison must be made by the caller.
10022 We force a stack adjustment unless there are currently
10023 things pushed on the stack that aren't yet used.
10025 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
10029 compare_from_rtx (op0, op1, code, unsignedp, mode, size)
10031 enum rtx_code code;
10033 enum machine_mode mode;
10038 /* If one operand is constant, make it the second one. Only do this
10039 if the other operand is not constant as well. */
10041 if (swap_commutative_operands_p (op0, op1))
10046 code = swap_condition (code);
10049 if (flag_force_mem)
10051 op0 = force_not_mem (op0);
10052 op1 = force_not_mem (op1);
10055 do_pending_stack_adjust ();
10057 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
10058 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
10062 /* There's no need to do this now that combine.c can eliminate lots of
10063 sign extensions. This can be less efficient in certain cases on other
10066 /* If this is a signed equality comparison, we can do it as an
10067 unsigned comparison since zero-extension is cheaper than sign
10068 extension and comparisons with zero are done as unsigned. This is
10069 the case even on machines that can do fast sign extension, since
10070 zero-extension is easier to combine with other operations than
10071 sign-extension is. If we are comparing against a constant, we must
10072 convert it to what it would look like unsigned. */
10073 if ((code == EQ || code == NE) && ! unsignedp
10074 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
10076 if (GET_CODE (op1) == CONST_INT
10077 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
10078 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
10083 emit_cmp_insn (op0, op1, code, size, mode, unsignedp);
10085 return gen_rtx_fmt_ee (code, VOIDmode, cc0_rtx, const0_rtx);
10088 /* Like do_compare_and_jump but expects the values to compare as two rtx's.
10089 The decision as to signed or unsigned comparison must be made by the caller.
10091 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
10095 do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode, size,
10096 if_false_label, if_true_label)
10098 enum rtx_code code;
10100 enum machine_mode mode;
10102 rtx if_false_label, if_true_label;
10105 int dummy_true_label = 0;
10107 /* Reverse the comparison if that is safe and we want to jump if it is
10109 if (! if_true_label && ! FLOAT_MODE_P (mode))
10111 if_true_label = if_false_label;
10112 if_false_label = 0;
10113 code = reverse_condition (code);
10116 /* If one operand is constant, make it the second one. Only do this
10117 if the other operand is not constant as well. */
10119 if (swap_commutative_operands_p (op0, op1))
10124 code = swap_condition (code);
10127 if (flag_force_mem)
10129 op0 = force_not_mem (op0);
10130 op1 = force_not_mem (op1);
10133 do_pending_stack_adjust ();
10135 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
10136 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
10138 if (tem == const_true_rtx)
10141 emit_jump (if_true_label);
10145 if (if_false_label)
10146 emit_jump (if_false_label);
10152 /* There's no need to do this now that combine.c can eliminate lots of
10153 sign extensions. This can be less efficient in certain cases on other
10156 /* If this is a signed equality comparison, we can do it as an
10157 unsigned comparison since zero-extension is cheaper than sign
10158 extension and comparisons with zero are done as unsigned. This is
10159 the case even on machines that can do fast sign extension, since
10160 zero-extension is easier to combine with other operations than
10161 sign-extension is. If we are comparing against a constant, we must
10162 convert it to what it would look like unsigned. */
10163 if ((code == EQ || code == NE) && ! unsignedp
10164 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
10166 if (GET_CODE (op1) == CONST_INT
10167 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
10168 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
10173 if (! if_true_label)
10175 dummy_true_label = 1;
10176 if_true_label = gen_label_rtx ();
10179 emit_cmp_and_jump_insns (op0, op1, code, size, mode, unsignedp,
10182 if (if_false_label)
10183 emit_jump (if_false_label);
10184 if (dummy_true_label)
10185 emit_label (if_true_label);
10188 /* Generate code for a comparison expression EXP (including code to compute
10189 the values to be compared) and a conditional jump to IF_FALSE_LABEL and/or
10190 IF_TRUE_LABEL. One of the labels can be NULL_RTX, in which case the
10191 generated code will drop through.
10192 SIGNED_CODE should be the rtx operation for this comparison for
10193 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
10195 We force a stack adjustment unless there are currently
10196 things pushed on the stack that aren't yet used. */
10199 do_compare_and_jump (exp, signed_code, unsigned_code, if_false_label,
10202 enum rtx_code signed_code, unsigned_code;
10203 rtx if_false_label, if_true_label;
10207 enum machine_mode mode;
10209 enum rtx_code code;
10211 /* Don't crash if the comparison was erroneous. */
10212 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
10213 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
10216 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
10217 if (TREE_CODE (TREE_OPERAND (exp, 1)) == ERROR_MARK)
10220 type = TREE_TYPE (TREE_OPERAND (exp, 0));
10221 mode = TYPE_MODE (type);
10222 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
10223 && (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST
10224 || (GET_MODE_BITSIZE (mode)
10225 > GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp,
10228 /* op0 might have been replaced by promoted constant, in which
10229 case the type of second argument should be used. */
10230 type = TREE_TYPE (TREE_OPERAND (exp, 1));
10231 mode = TYPE_MODE (type);
10233 unsignedp = TREE_UNSIGNED (type);
10234 code = unsignedp ? unsigned_code : signed_code;
10236 #ifdef HAVE_canonicalize_funcptr_for_compare
10237 /* If function pointers need to be "canonicalized" before they can
10238 be reliably compared, then canonicalize them. */
10239 if (HAVE_canonicalize_funcptr_for_compare
10240 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10241 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10244 rtx new_op0 = gen_reg_rtx (mode);
10246 emit_insn (gen_canonicalize_funcptr_for_compare (new_op0, op0));
10250 if (HAVE_canonicalize_funcptr_for_compare
10251 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10252 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10255 rtx new_op1 = gen_reg_rtx (mode);
10257 emit_insn (gen_canonicalize_funcptr_for_compare (new_op1, op1));
10262 /* Do any postincrements in the expression that was tested. */
10265 do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode,
10267 ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
10268 if_false_label, if_true_label);
10271 /* Generate code to calculate EXP using a store-flag instruction
10272 and return an rtx for the result. EXP is either a comparison
10273 or a TRUTH_NOT_EXPR whose operand is a comparison.
10275 If TARGET is nonzero, store the result there if convenient.
10277 If ONLY_CHEAP is non-zero, only do this if it is likely to be very
10280 Return zero if there is no suitable set-flag instruction
10281 available on this machine.
10283 Once expand_expr has been called on the arguments of the comparison,
10284 we are committed to doing the store flag, since it is not safe to
10285 re-evaluate the expression. We emit the store-flag insn by calling
10286 emit_store_flag, but only expand the arguments if we have a reason
10287 to believe that emit_store_flag will be successful. If we think that
10288 it will, but it isn't, we have to simulate the store-flag with a
10289 set/jump/set sequence. */
10292 do_store_flag (exp, target, mode, only_cheap)
10295 enum machine_mode mode;
10298 enum rtx_code code;
10299 tree arg0, arg1, type;
10301 enum machine_mode operand_mode;
10305 enum insn_code icode;
10306 rtx subtarget = target;
10309 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
10310 result at the end. We can't simply invert the test since it would
10311 have already been inverted if it were valid. This case occurs for
10312 some floating-point comparisons. */
10314 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
10315 invert = 1, exp = TREE_OPERAND (exp, 0);
10317 arg0 = TREE_OPERAND (exp, 0);
10318 arg1 = TREE_OPERAND (exp, 1);
10320 /* Don't crash if the comparison was erroneous. */
10321 if (arg0 == error_mark_node || arg1 == error_mark_node)
10324 type = TREE_TYPE (arg0);
10325 operand_mode = TYPE_MODE (type);
10326 unsignedp = TREE_UNSIGNED (type);
10328 /* We won't bother with BLKmode store-flag operations because it would mean
10329 passing a lot of information to emit_store_flag. */
10330 if (operand_mode == BLKmode)
10333 /* We won't bother with store-flag operations involving function pointers
10334 when function pointers must be canonicalized before comparisons. */
10335 #ifdef HAVE_canonicalize_funcptr_for_compare
10336 if (HAVE_canonicalize_funcptr_for_compare
10337 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10338 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10340 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10341 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10342 == FUNCTION_TYPE))))
10349 /* Get the rtx comparison code to use. We know that EXP is a comparison
10350 operation of some type. Some comparisons against 1 and -1 can be
10351 converted to comparisons with zero. Do so here so that the tests
10352 below will be aware that we have a comparison with zero. These
10353 tests will not catch constants in the first operand, but constants
10354 are rarely passed as the first operand. */
10356 switch (TREE_CODE (exp))
10365 if (integer_onep (arg1))
10366 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
10368 code = unsignedp ? LTU : LT;
10371 if (! unsignedp && integer_all_onesp (arg1))
10372 arg1 = integer_zero_node, code = LT;
10374 code = unsignedp ? LEU : LE;
10377 if (! unsignedp && integer_all_onesp (arg1))
10378 arg1 = integer_zero_node, code = GE;
10380 code = unsignedp ? GTU : GT;
10383 if (integer_onep (arg1))
10384 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
10386 code = unsignedp ? GEU : GE;
10389 case UNORDERED_EXPR:
10415 /* Put a constant second. */
10416 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
10418 tem = arg0; arg0 = arg1; arg1 = tem;
10419 code = swap_condition (code);
10422 /* If this is an equality or inequality test of a single bit, we can
10423 do this by shifting the bit being tested to the low-order bit and
10424 masking the result with the constant 1. If the condition was EQ,
10425 we xor it with 1. This does not require an scc insn and is faster
10426 than an scc insn even if we have it. */
10428 if ((code == NE || code == EQ)
10429 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
10430 && integer_pow2p (TREE_OPERAND (arg0, 1)))
10432 tree inner = TREE_OPERAND (arg0, 0);
10433 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
10436 /* If INNER is a right shift of a constant and it plus BITNUM does
10437 not overflow, adjust BITNUM and INNER. */
10439 if (TREE_CODE (inner) == RSHIFT_EXPR
10440 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
10441 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
10442 && bitnum < TYPE_PRECISION (type)
10443 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
10444 bitnum - TYPE_PRECISION (type)))
10446 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
10447 inner = TREE_OPERAND (inner, 0);
10450 /* If we are going to be able to omit the AND below, we must do our
10451 operations as unsigned. If we must use the AND, we have a choice.
10452 Normally unsigned is faster, but for some machines signed is. */
10453 ops_unsignedp = (bitnum == TYPE_PRECISION (type) - 1 ? 1
10454 #ifdef LOAD_EXTEND_OP
10455 : (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1)
10461 if (! get_subtarget (subtarget)
10462 || GET_MODE (subtarget) != operand_mode
10463 || ! safe_from_p (subtarget, inner, 1))
10466 op0 = expand_expr (inner, subtarget, VOIDmode, 0);
10469 op0 = expand_shift (RSHIFT_EXPR, operand_mode, op0,
10470 size_int (bitnum), subtarget, ops_unsignedp);
10472 if (GET_MODE (op0) != mode)
10473 op0 = convert_to_mode (mode, op0, ops_unsignedp);
10475 if ((code == EQ && ! invert) || (code == NE && invert))
10476 op0 = expand_binop (mode, xor_optab, op0, const1_rtx, subtarget,
10477 ops_unsignedp, OPTAB_LIB_WIDEN);
10479 /* Put the AND last so it can combine with more things. */
10480 if (bitnum != TYPE_PRECISION (type) - 1)
10481 op0 = expand_and (mode, op0, const1_rtx, subtarget);
10486 /* Now see if we are likely to be able to do this. Return if not. */
10487 if (! can_compare_p (code, operand_mode, ccp_store_flag))
10490 icode = setcc_gen_code[(int) code];
10491 if (icode == CODE_FOR_nothing
10492 || (only_cheap && insn_data[(int) icode].operand[0].mode != mode))
10494 /* We can only do this if it is one of the special cases that
10495 can be handled without an scc insn. */
10496 if ((code == LT && integer_zerop (arg1))
10497 || (! only_cheap && code == GE && integer_zerop (arg1)))
10499 else if (BRANCH_COST >= 0
10500 && ! only_cheap && (code == NE || code == EQ)
10501 && TREE_CODE (type) != REAL_TYPE
10502 && ((abs_optab->handlers[(int) operand_mode].insn_code
10503 != CODE_FOR_nothing)
10504 || (ffs_optab->handlers[(int) operand_mode].insn_code
10505 != CODE_FOR_nothing)))
10511 if (! get_subtarget (target)
10512 || GET_MODE (subtarget) != operand_mode
10513 || ! safe_from_p (subtarget, arg1, 1))
10516 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
10517 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
10520 target = gen_reg_rtx (mode);
10522 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
10523 because, if the emit_store_flag does anything it will succeed and
10524 OP0 and OP1 will not be used subsequently. */
10526 result = emit_store_flag (target, code,
10527 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
10528 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
10529 operand_mode, unsignedp, 1);
10534 result = expand_binop (mode, xor_optab, result, const1_rtx,
10535 result, 0, OPTAB_LIB_WIDEN);
10539 /* If this failed, we have to do this with set/compare/jump/set code. */
10540 if (GET_CODE (target) != REG
10541 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
10542 target = gen_reg_rtx (GET_MODE (target));
10544 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
10545 result = compare_from_rtx (op0, op1, code, unsignedp,
10546 operand_mode, NULL_RTX);
10547 if (GET_CODE (result) == CONST_INT)
10548 return (((result == const0_rtx && ! invert)
10549 || (result != const0_rtx && invert))
10550 ? const0_rtx : const1_rtx);
10552 /* The code of RESULT may not match CODE if compare_from_rtx
10553 decided to swap its operands and reverse the original code.
10555 We know that compare_from_rtx returns either a CONST_INT or
10556 a new comparison code, so it is safe to just extract the
10557 code from RESULT. */
10558 code = GET_CODE (result);
10560 label = gen_label_rtx ();
10561 if (bcc_gen_fctn[(int) code] == 0)
10564 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
10565 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
10566 emit_label (label);
10572 /* Stubs in case we haven't got a casesi insn. */
10573 #ifndef HAVE_casesi
10574 # define HAVE_casesi 0
10575 # define gen_casesi(a, b, c, d, e) (0)
10576 # define CODE_FOR_casesi CODE_FOR_nothing
10579 /* If the machine does not have a case insn that compares the bounds,
10580 this means extra overhead for dispatch tables, which raises the
10581 threshold for using them. */
10582 #ifndef CASE_VALUES_THRESHOLD
10583 #define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
10584 #endif /* CASE_VALUES_THRESHOLD */
10587 case_values_threshold ()
10589 return CASE_VALUES_THRESHOLD;
10592 /* Attempt to generate a casesi instruction. Returns 1 if successful,
10593 0 otherwise (i.e. if there is no casesi instruction). */
10595 try_casesi (index_type, index_expr, minval, range,
10596 table_label, default_label)
10597 tree index_type, index_expr, minval, range;
10598 rtx table_label ATTRIBUTE_UNUSED;
10601 enum machine_mode index_mode = SImode;
10602 int index_bits = GET_MODE_BITSIZE (index_mode);
10603 rtx op1, op2, index;
10604 enum machine_mode op_mode;
10609 /* Convert the index to SImode. */
10610 if (GET_MODE_BITSIZE (TYPE_MODE (index_type)) > GET_MODE_BITSIZE (index_mode))
10612 enum machine_mode omode = TYPE_MODE (index_type);
10613 rtx rangertx = expand_expr (range, NULL_RTX, VOIDmode, 0);
10615 /* We must handle the endpoints in the original mode. */
10616 index_expr = build (MINUS_EXPR, index_type,
10617 index_expr, minval);
10618 minval = integer_zero_node;
10619 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
10620 emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
10621 omode, 1, default_label);
10622 /* Now we can safely truncate. */
10623 index = convert_to_mode (index_mode, index, 0);
10627 if (TYPE_MODE (index_type) != index_mode)
10629 index_expr = convert ((*lang_hooks.types.type_for_size)
10630 (index_bits, 0), index_expr);
10631 index_type = TREE_TYPE (index_expr);
10634 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
10637 index = protect_from_queue (index, 0);
10638 do_pending_stack_adjust ();
10640 op_mode = insn_data[(int) CODE_FOR_casesi].operand[0].mode;
10641 if (! (*insn_data[(int) CODE_FOR_casesi].operand[0].predicate)
10643 index = copy_to_mode_reg (op_mode, index);
10645 op1 = expand_expr (minval, NULL_RTX, VOIDmode, 0);
10647 op_mode = insn_data[(int) CODE_FOR_casesi].operand[1].mode;
10648 op1 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (minval)),
10649 op1, TREE_UNSIGNED (TREE_TYPE (minval)));
10650 if (! (*insn_data[(int) CODE_FOR_casesi].operand[1].predicate)
10652 op1 = copy_to_mode_reg (op_mode, op1);
10654 op2 = expand_expr (range, NULL_RTX, VOIDmode, 0);
10656 op_mode = insn_data[(int) CODE_FOR_casesi].operand[2].mode;
10657 op2 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (range)),
10658 op2, TREE_UNSIGNED (TREE_TYPE (range)));
10659 if (! (*insn_data[(int) CODE_FOR_casesi].operand[2].predicate)
10661 op2 = copy_to_mode_reg (op_mode, op2);
10663 emit_jump_insn (gen_casesi (index, op1, op2,
10664 table_label, default_label));
10668 /* Attempt to generate a tablejump instruction; same concept. */
10669 #ifndef HAVE_tablejump
10670 #define HAVE_tablejump 0
10671 #define gen_tablejump(x, y) (0)
10674 /* Subroutine of the next function.
10676 INDEX is the value being switched on, with the lowest value
10677 in the table already subtracted.
10678 MODE is its expected mode (needed if INDEX is constant).
10679 RANGE is the length of the jump table.
10680 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
10682 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
10683 index value is out of range. */
10686 do_tablejump (index, mode, range, table_label, default_label)
10687 rtx index, range, table_label, default_label;
10688 enum machine_mode mode;
10692 /* Do an unsigned comparison (in the proper mode) between the index
10693 expression and the value which represents the length of the range.
10694 Since we just finished subtracting the lower bound of the range
10695 from the index expression, this comparison allows us to simultaneously
10696 check that the original index expression value is both greater than
10697 or equal to the minimum value of the range and less than or equal to
10698 the maximum value of the range. */
10700 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
10703 /* If index is in range, it must fit in Pmode.
10704 Convert to Pmode so we can index with it. */
10706 index = convert_to_mode (Pmode, index, 1);
10708 /* Don't let a MEM slip thru, because then INDEX that comes
10709 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
10710 and break_out_memory_refs will go to work on it and mess it up. */
10711 #ifdef PIC_CASE_VECTOR_ADDRESS
10712 if (flag_pic && GET_CODE (index) != REG)
10713 index = copy_to_mode_reg (Pmode, index);
10716 /* If flag_force_addr were to affect this address
10717 it could interfere with the tricky assumptions made
10718 about addresses that contain label-refs,
10719 which may be valid only very near the tablejump itself. */
10720 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
10721 GET_MODE_SIZE, because this indicates how large insns are. The other
10722 uses should all be Pmode, because they are addresses. This code
10723 could fail if addresses and insns are not the same size. */
10724 index = gen_rtx_PLUS (Pmode,
10725 gen_rtx_MULT (Pmode, index,
10726 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
10727 gen_rtx_LABEL_REF (Pmode, table_label));
10728 #ifdef PIC_CASE_VECTOR_ADDRESS
10730 index = PIC_CASE_VECTOR_ADDRESS (index);
10733 index = memory_address_noforce (CASE_VECTOR_MODE, index);
10734 temp = gen_reg_rtx (CASE_VECTOR_MODE);
10735 vector = gen_rtx_MEM (CASE_VECTOR_MODE, index);
10736 RTX_UNCHANGING_P (vector) = 1;
10737 convert_move (temp, vector, 0);
10739 emit_jump_insn (gen_tablejump (temp, table_label));
10741 /* If we are generating PIC code or if the table is PC-relative, the
10742 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
10743 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
10748 try_tablejump (index_type, index_expr, minval, range,
10749 table_label, default_label)
10750 tree index_type, index_expr, minval, range;
10751 rtx table_label, default_label;
10755 if (! HAVE_tablejump)
10758 index_expr = fold (build (MINUS_EXPR, index_type,
10759 convert (index_type, index_expr),
10760 convert (index_type, minval)));
10761 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
10763 index = protect_from_queue (index, 0);
10764 do_pending_stack_adjust ();
10766 do_tablejump (index, TYPE_MODE (index_type),
10767 convert_modes (TYPE_MODE (index_type),
10768 TYPE_MODE (TREE_TYPE (range)),
10769 expand_expr (range, NULL_RTX,
10771 TREE_UNSIGNED (TREE_TYPE (range))),
10772 table_label, default_label);