1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
31 #include "hard-reg-set.h"
34 #include "insn-config.h"
35 #include "insn-attr.h"
36 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
43 #include "typeclass.h"
46 #include "langhooks.h"
50 /* Decide whether a function's arguments should be processed
51 from first to last or from last to first.
53 They should if the stack and args grow in opposite directions, but
54 only if we have push insns. */
58 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
59 #define PUSH_ARGS_REVERSED /* If it's last to first. */
64 #ifndef STACK_PUSH_CODE
65 #ifdef STACK_GROWS_DOWNWARD
66 #define STACK_PUSH_CODE PRE_DEC
68 #define STACK_PUSH_CODE PRE_INC
72 /* Assume that case vectors are not pc-relative. */
73 #ifndef CASE_VECTOR_PC_RELATIVE
74 #define CASE_VECTOR_PC_RELATIVE 0
77 /* If this is nonzero, we do not bother generating VOLATILE
78 around volatile memory references, and we are willing to
79 output indirect addresses. If cse is to follow, we reject
80 indirect addresses so a useful potential cse is generated;
81 if it is used only once, instruction combination will produce
82 the same indirect address eventually. */
85 /* Chain of pending expressions for PLACEHOLDER_EXPR to replace. */
86 static tree placeholder_list = 0;
88 /* This structure is used by move_by_pieces to describe the move to
99 int explicit_inc_from;
100 unsigned HOST_WIDE_INT len;
101 HOST_WIDE_INT offset;
105 /* This structure is used by store_by_pieces to describe the clear to
108 struct store_by_pieces
114 unsigned HOST_WIDE_INT len;
115 HOST_WIDE_INT offset;
116 rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
121 extern struct obstack permanent_obstack;
123 static rtx enqueue_insn PARAMS ((rtx, rtx));
124 static unsigned HOST_WIDE_INT move_by_pieces_ninsns
125 PARAMS ((unsigned HOST_WIDE_INT,
127 static void move_by_pieces_1 PARAMS ((rtx (*) (rtx, ...), enum machine_mode,
128 struct move_by_pieces *));
129 static rtx clear_by_pieces_1 PARAMS ((PTR, HOST_WIDE_INT,
131 static void clear_by_pieces PARAMS ((rtx, unsigned HOST_WIDE_INT,
133 static void store_by_pieces_1 PARAMS ((struct store_by_pieces *,
135 static void store_by_pieces_2 PARAMS ((rtx (*) (rtx, ...),
137 struct store_by_pieces *));
138 static rtx compress_float_constant PARAMS ((rtx, rtx));
139 static rtx get_subtarget PARAMS ((rtx));
140 static int is_zeros_p PARAMS ((tree));
141 static int mostly_zeros_p PARAMS ((tree));
142 static void store_constructor_field PARAMS ((rtx, unsigned HOST_WIDE_INT,
143 HOST_WIDE_INT, enum machine_mode,
144 tree, tree, int, int));
145 static void store_constructor PARAMS ((tree, rtx, int, HOST_WIDE_INT));
146 static rtx store_field PARAMS ((rtx, HOST_WIDE_INT,
147 HOST_WIDE_INT, enum machine_mode,
148 tree, enum machine_mode, int, tree,
150 static rtx var_rtx PARAMS ((tree));
151 static HOST_WIDE_INT highest_pow2_factor PARAMS ((tree));
152 static HOST_WIDE_INT highest_pow2_factor_for_type PARAMS ((tree, tree));
153 static int is_aligning_offset PARAMS ((tree, tree));
154 static rtx expand_increment PARAMS ((tree, int, int));
155 static void do_jump_by_parts_greater PARAMS ((tree, int, rtx, rtx));
156 static void do_jump_by_parts_equality PARAMS ((tree, rtx, rtx));
157 static void do_compare_and_jump PARAMS ((tree, enum rtx_code, enum rtx_code,
159 static rtx do_store_flag PARAMS ((tree, rtx, enum machine_mode, int));
161 static void emit_single_push_insn PARAMS ((enum machine_mode, rtx, tree));
163 static void do_tablejump PARAMS ((rtx, enum machine_mode, rtx, rtx, rtx));
165 /* Record for each mode whether we can move a register directly to or
166 from an object of that mode in memory. If we can't, we won't try
167 to use that mode directly when accessing a field of that mode. */
169 static char direct_load[NUM_MACHINE_MODES];
170 static char direct_store[NUM_MACHINE_MODES];
172 /* Record for each mode whether we can float-extend from memory. */
174 static bool float_extend_from_mem[NUM_MACHINE_MODES][NUM_MACHINE_MODES];
176 /* If a memory-to-memory move would take MOVE_RATIO or more simple
177 move-instruction sequences, we will do a movstr or libcall instead. */
180 #if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
183 /* If we are optimizing for space (-Os), cut down the default move ratio. */
184 #define MOVE_RATIO (optimize_size ? 3 : 15)
188 /* This macro is used to determine whether move_by_pieces should be called
189 to perform a structure copy. */
190 #ifndef MOVE_BY_PIECES_P
191 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
192 (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) MOVE_RATIO)
195 /* If a clear memory operation would take CLEAR_RATIO or more simple
196 move-instruction sequences, we will do a clrstr or libcall instead. */
199 #if defined (HAVE_clrstrqi) || defined (HAVE_clrstrhi) || defined (HAVE_clrstrsi) || defined (HAVE_clrstrdi) || defined (HAVE_clrstrti)
200 #define CLEAR_RATIO 2
202 /* If we are optimizing for space, cut down the default clear ratio. */
203 #define CLEAR_RATIO (optimize_size ? 3 : 15)
207 /* This macro is used to determine whether clear_by_pieces should be
208 called to clear storage. */
209 #ifndef CLEAR_BY_PIECES_P
210 #define CLEAR_BY_PIECES_P(SIZE, ALIGN) \
211 (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) CLEAR_RATIO)
214 /* This array records the insn_code of insns to perform block moves. */
215 enum insn_code movstr_optab[NUM_MACHINE_MODES];
217 /* This array records the insn_code of insns to perform block clears. */
218 enum insn_code clrstr_optab[NUM_MACHINE_MODES];
220 /* SLOW_UNALIGNED_ACCESS is non-zero if unaligned accesses are very slow. */
222 #ifndef SLOW_UNALIGNED_ACCESS
223 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
226 /* This is run once per compilation to set up which modes can be used
227 directly in memory and to initialize the block move optab. */
233 enum machine_mode mode;
238 /* Try indexing by frame ptr and try by stack ptr.
239 It is known that on the Convex the stack ptr isn't a valid index.
240 With luck, one or the other is valid on any machine. */
241 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
242 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
244 /* A scratch register we can modify in-place below to avoid
245 useless RTL allocations. */
246 reg = gen_rtx_REG (VOIDmode, -1);
248 insn = rtx_alloc (INSN);
249 pat = gen_rtx_SET (0, NULL_RTX, NULL_RTX);
250 PATTERN (insn) = pat;
252 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
253 mode = (enum machine_mode) ((int) mode + 1))
257 direct_load[(int) mode] = direct_store[(int) mode] = 0;
258 PUT_MODE (mem, mode);
259 PUT_MODE (mem1, mode);
260 PUT_MODE (reg, mode);
262 /* See if there is some register that can be used in this mode and
263 directly loaded or stored from memory. */
265 if (mode != VOIDmode && mode != BLKmode)
266 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
267 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
270 if (! HARD_REGNO_MODE_OK (regno, mode))
276 SET_DEST (pat) = reg;
277 if (recog (pat, insn, &num_clobbers) >= 0)
278 direct_load[(int) mode] = 1;
280 SET_SRC (pat) = mem1;
281 SET_DEST (pat) = reg;
282 if (recog (pat, insn, &num_clobbers) >= 0)
283 direct_load[(int) mode] = 1;
286 SET_DEST (pat) = mem;
287 if (recog (pat, insn, &num_clobbers) >= 0)
288 direct_store[(int) mode] = 1;
291 SET_DEST (pat) = mem1;
292 if (recog (pat, insn, &num_clobbers) >= 0)
293 direct_store[(int) mode] = 1;
297 mem = gen_rtx_MEM (VOIDmode, gen_rtx_raw_REG (Pmode, 10000));
299 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
300 mode = GET_MODE_WIDER_MODE (mode))
302 enum machine_mode srcmode;
303 for (srcmode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); srcmode != mode;
304 srcmode = GET_MODE_WIDER_MODE (srcmode))
308 ic = can_extend_p (mode, srcmode, 0);
309 if (ic == CODE_FOR_nothing)
312 PUT_MODE (mem, srcmode);
314 if ((*insn_data[ic].operand[1].predicate) (mem, srcmode))
315 float_extend_from_mem[mode][srcmode] = true;
320 /* This is run at the start of compiling a function. */
325 cfun->expr = (struct expr_status *) ggc_alloc (sizeof (struct expr_status));
328 pending_stack_adjust = 0;
329 stack_pointer_delta = 0;
330 inhibit_defer_pop = 0;
332 apply_args_value = 0;
336 /* Small sanity check that the queue is empty at the end of a function. */
339 finish_expr_for_function ()
345 /* Manage the queue of increment instructions to be output
346 for POSTINCREMENT_EXPR expressions, etc. */
348 /* Queue up to increment (or change) VAR later. BODY says how:
349 BODY should be the same thing you would pass to emit_insn
350 to increment right away. It will go to emit_insn later on.
352 The value is a QUEUED expression to be used in place of VAR
353 where you want to guarantee the pre-incrementation value of VAR. */
356 enqueue_insn (var, body)
359 pending_chain = gen_rtx_QUEUED (GET_MODE (var), var, NULL_RTX, NULL_RTX,
360 body, pending_chain);
361 return pending_chain;
364 /* Use protect_from_queue to convert a QUEUED expression
365 into something that you can put immediately into an instruction.
366 If the queued incrementation has not happened yet,
367 protect_from_queue returns the variable itself.
368 If the incrementation has happened, protect_from_queue returns a temp
369 that contains a copy of the old value of the variable.
371 Any time an rtx which might possibly be a QUEUED is to be put
372 into an instruction, it must be passed through protect_from_queue first.
373 QUEUED expressions are not meaningful in instructions.
375 Do not pass a value through protect_from_queue and then hold
376 on to it for a while before putting it in an instruction!
377 If the queue is flushed in between, incorrect code will result. */
380 protect_from_queue (x, modify)
384 RTX_CODE code = GET_CODE (x);
386 #if 0 /* A QUEUED can hang around after the queue is forced out. */
387 /* Shortcut for most common case. */
388 if (pending_chain == 0)
394 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
395 use of autoincrement. Make a copy of the contents of the memory
396 location rather than a copy of the address, but not if the value is
397 of mode BLKmode. Don't modify X in place since it might be
399 if (code == MEM && GET_MODE (x) != BLKmode
400 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
403 rtx new = replace_equiv_address_nv (x, QUEUED_VAR (y));
407 rtx temp = gen_reg_rtx (GET_MODE (x));
409 emit_insn_before (gen_move_insn (temp, new),
414 /* Copy the address into a pseudo, so that the returned value
415 remains correct across calls to emit_queue. */
416 return replace_equiv_address (new, copy_to_reg (XEXP (new, 0)));
419 /* Otherwise, recursively protect the subexpressions of all
420 the kinds of rtx's that can contain a QUEUED. */
423 rtx tem = protect_from_queue (XEXP (x, 0), 0);
424 if (tem != XEXP (x, 0))
430 else if (code == PLUS || code == MULT)
432 rtx new0 = protect_from_queue (XEXP (x, 0), 0);
433 rtx new1 = protect_from_queue (XEXP (x, 1), 0);
434 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
443 /* If the increment has not happened, use the variable itself. Copy it
444 into a new pseudo so that the value remains correct across calls to
446 if (QUEUED_INSN (x) == 0)
447 return copy_to_reg (QUEUED_VAR (x));
448 /* If the increment has happened and a pre-increment copy exists,
450 if (QUEUED_COPY (x) != 0)
451 return QUEUED_COPY (x);
452 /* The increment has happened but we haven't set up a pre-increment copy.
453 Set one up now, and use it. */
454 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
455 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
457 return QUEUED_COPY (x);
460 /* Return nonzero if X contains a QUEUED expression:
461 if it contains anything that will be altered by a queued increment.
462 We handle only combinations of MEM, PLUS, MINUS and MULT operators
463 since memory addresses generally contain only those. */
469 enum rtx_code code = GET_CODE (x);
475 return queued_subexp_p (XEXP (x, 0));
479 return (queued_subexp_p (XEXP (x, 0))
480 || queued_subexp_p (XEXP (x, 1)));
486 /* Perform all the pending incrementations. */
492 while ((p = pending_chain))
494 rtx body = QUEUED_BODY (p);
496 switch (GET_CODE (body))
504 QUEUED_INSN (p) = body;
508 #ifdef ENABLE_CHECKING
515 QUEUED_INSN (p) = emit_insn (body);
519 pending_chain = QUEUED_NEXT (p);
523 /* Copy data from FROM to TO, where the machine modes are not the same.
524 Both modes may be integer, or both may be floating.
525 UNSIGNEDP should be nonzero if FROM is an unsigned type.
526 This causes zero-extension instead of sign-extension. */
529 convert_move (to, from, unsignedp)
533 enum machine_mode to_mode = GET_MODE (to);
534 enum machine_mode from_mode = GET_MODE (from);
535 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
536 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
540 /* rtx code for making an equivalent value. */
541 enum rtx_code equiv_code = (unsignedp ? ZERO_EXTEND : SIGN_EXTEND);
543 to = protect_from_queue (to, 1);
544 from = protect_from_queue (from, 0);
546 if (to_real != from_real)
549 /* If FROM is a SUBREG that indicates that we have already done at least
550 the required extension, strip it. We don't handle such SUBREGs as
553 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
554 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
555 >= GET_MODE_SIZE (to_mode))
556 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
557 from = gen_lowpart (to_mode, from), from_mode = to_mode;
559 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
562 if (to_mode == from_mode
563 || (from_mode == VOIDmode && CONSTANT_P (from)))
565 emit_move_insn (to, from);
569 if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
571 if (GET_MODE_BITSIZE (from_mode) != GET_MODE_BITSIZE (to_mode))
574 if (VECTOR_MODE_P (to_mode))
575 from = simplify_gen_subreg (to_mode, from, GET_MODE (from), 0);
577 to = simplify_gen_subreg (from_mode, to, GET_MODE (to), 0);
579 emit_move_insn (to, from);
583 if (to_real != from_real)
590 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode))
592 /* Try converting directly if the insn is supported. */
593 if ((code = can_extend_p (to_mode, from_mode, 0))
596 emit_unop_insn (code, to, from, UNKNOWN);
601 #ifdef HAVE_trunchfqf2
602 if (HAVE_trunchfqf2 && from_mode == HFmode && to_mode == QFmode)
604 emit_unop_insn (CODE_FOR_trunchfqf2, to, from, UNKNOWN);
608 #ifdef HAVE_trunctqfqf2
609 if (HAVE_trunctqfqf2 && from_mode == TQFmode && to_mode == QFmode)
611 emit_unop_insn (CODE_FOR_trunctqfqf2, to, from, UNKNOWN);
615 #ifdef HAVE_truncsfqf2
616 if (HAVE_truncsfqf2 && from_mode == SFmode && to_mode == QFmode)
618 emit_unop_insn (CODE_FOR_truncsfqf2, to, from, UNKNOWN);
622 #ifdef HAVE_truncdfqf2
623 if (HAVE_truncdfqf2 && from_mode == DFmode && to_mode == QFmode)
625 emit_unop_insn (CODE_FOR_truncdfqf2, to, from, UNKNOWN);
629 #ifdef HAVE_truncxfqf2
630 if (HAVE_truncxfqf2 && from_mode == XFmode && to_mode == QFmode)
632 emit_unop_insn (CODE_FOR_truncxfqf2, to, from, UNKNOWN);
636 #ifdef HAVE_trunctfqf2
637 if (HAVE_trunctfqf2 && from_mode == TFmode && to_mode == QFmode)
639 emit_unop_insn (CODE_FOR_trunctfqf2, to, from, UNKNOWN);
644 #ifdef HAVE_trunctqfhf2
645 if (HAVE_trunctqfhf2 && from_mode == TQFmode && to_mode == HFmode)
647 emit_unop_insn (CODE_FOR_trunctqfhf2, to, from, UNKNOWN);
651 #ifdef HAVE_truncsfhf2
652 if (HAVE_truncsfhf2 && from_mode == SFmode && to_mode == HFmode)
654 emit_unop_insn (CODE_FOR_truncsfhf2, to, from, UNKNOWN);
658 #ifdef HAVE_truncdfhf2
659 if (HAVE_truncdfhf2 && from_mode == DFmode && to_mode == HFmode)
661 emit_unop_insn (CODE_FOR_truncdfhf2, to, from, UNKNOWN);
665 #ifdef HAVE_truncxfhf2
666 if (HAVE_truncxfhf2 && from_mode == XFmode && to_mode == HFmode)
668 emit_unop_insn (CODE_FOR_truncxfhf2, to, from, UNKNOWN);
672 #ifdef HAVE_trunctfhf2
673 if (HAVE_trunctfhf2 && from_mode == TFmode && to_mode == HFmode)
675 emit_unop_insn (CODE_FOR_trunctfhf2, to, from, UNKNOWN);
680 #ifdef HAVE_truncsftqf2
681 if (HAVE_truncsftqf2 && from_mode == SFmode && to_mode == TQFmode)
683 emit_unop_insn (CODE_FOR_truncsftqf2, to, from, UNKNOWN);
687 #ifdef HAVE_truncdftqf2
688 if (HAVE_truncdftqf2 && from_mode == DFmode && to_mode == TQFmode)
690 emit_unop_insn (CODE_FOR_truncdftqf2, to, from, UNKNOWN);
694 #ifdef HAVE_truncxftqf2
695 if (HAVE_truncxftqf2 && from_mode == XFmode && to_mode == TQFmode)
697 emit_unop_insn (CODE_FOR_truncxftqf2, to, from, UNKNOWN);
701 #ifdef HAVE_trunctftqf2
702 if (HAVE_trunctftqf2 && from_mode == TFmode && to_mode == TQFmode)
704 emit_unop_insn (CODE_FOR_trunctftqf2, to, from, UNKNOWN);
709 #ifdef HAVE_truncdfsf2
710 if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
712 emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
716 #ifdef HAVE_truncxfsf2
717 if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode)
719 emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN);
723 #ifdef HAVE_trunctfsf2
724 if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
726 emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
730 #ifdef HAVE_truncxfdf2
731 if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode)
733 emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN);
737 #ifdef HAVE_trunctfdf2
738 if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
740 emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
752 libcall = extendsfdf2_libfunc;
756 libcall = extendsfxf2_libfunc;
760 libcall = extendsftf2_libfunc;
772 libcall = truncdfsf2_libfunc;
776 libcall = extenddfxf2_libfunc;
780 libcall = extenddftf2_libfunc;
792 libcall = truncxfsf2_libfunc;
796 libcall = truncxfdf2_libfunc;
808 libcall = trunctfsf2_libfunc;
812 libcall = trunctfdf2_libfunc;
824 if (libcall == (rtx) 0)
825 /* This conversion is not implemented yet. */
829 value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
831 insns = get_insns ();
833 emit_libcall_block (insns, to, value, gen_rtx_FLOAT_TRUNCATE (to_mode,
838 /* Now both modes are integers. */
840 /* Handle expanding beyond a word. */
841 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
842 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
849 enum machine_mode lowpart_mode;
850 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
852 /* Try converting directly if the insn is supported. */
853 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
856 /* If FROM is a SUBREG, put it into a register. Do this
857 so that we always generate the same set of insns for
858 better cse'ing; if an intermediate assignment occurred,
859 we won't be doing the operation directly on the SUBREG. */
860 if (optimize > 0 && GET_CODE (from) == SUBREG)
861 from = force_reg (from_mode, from);
862 emit_unop_insn (code, to, from, equiv_code);
865 /* Next, try converting via full word. */
866 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
867 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
868 != CODE_FOR_nothing))
870 if (GET_CODE (to) == REG)
871 emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
872 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
873 emit_unop_insn (code, to,
874 gen_lowpart (word_mode, to), equiv_code);
878 /* No special multiword conversion insn; do it by hand. */
881 /* Since we will turn this into a no conflict block, we must ensure
882 that the source does not overlap the target. */
884 if (reg_overlap_mentioned_p (to, from))
885 from = force_reg (from_mode, from);
887 /* Get a copy of FROM widened to a word, if necessary. */
888 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
889 lowpart_mode = word_mode;
891 lowpart_mode = from_mode;
893 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
895 lowpart = gen_lowpart (lowpart_mode, to);
896 emit_move_insn (lowpart, lowfrom);
898 /* Compute the value to put in each remaining word. */
900 fill_value = const0_rtx;
905 && insn_data[(int) CODE_FOR_slt].operand[0].mode == word_mode
906 && STORE_FLAG_VALUE == -1)
908 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
910 fill_value = gen_reg_rtx (word_mode);
911 emit_insn (gen_slt (fill_value));
917 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
918 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
920 fill_value = convert_to_mode (word_mode, fill_value, 1);
924 /* Fill the remaining words. */
925 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
927 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
928 rtx subword = operand_subword (to, index, 1, to_mode);
933 if (fill_value != subword)
934 emit_move_insn (subword, fill_value);
937 insns = get_insns ();
940 emit_no_conflict_block (insns, to, from, NULL_RTX,
941 gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
945 /* Truncating multi-word to a word or less. */
946 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
947 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
949 if (!((GET_CODE (from) == MEM
950 && ! MEM_VOLATILE_P (from)
951 && direct_load[(int) to_mode]
952 && ! mode_dependent_address_p (XEXP (from, 0)))
953 || GET_CODE (from) == REG
954 || GET_CODE (from) == SUBREG))
955 from = force_reg (from_mode, from);
956 convert_move (to, gen_lowpart (word_mode, from), 0);
960 /* Handle pointer conversion. */ /* SPEE 900220. */
961 if (to_mode == PQImode)
963 if (from_mode != QImode)
964 from = convert_to_mode (QImode, from, unsignedp);
966 #ifdef HAVE_truncqipqi2
967 if (HAVE_truncqipqi2)
969 emit_unop_insn (CODE_FOR_truncqipqi2, to, from, UNKNOWN);
972 #endif /* HAVE_truncqipqi2 */
976 if (from_mode == PQImode)
978 if (to_mode != QImode)
980 from = convert_to_mode (QImode, from, unsignedp);
985 #ifdef HAVE_extendpqiqi2
986 if (HAVE_extendpqiqi2)
988 emit_unop_insn (CODE_FOR_extendpqiqi2, to, from, UNKNOWN);
991 #endif /* HAVE_extendpqiqi2 */
996 if (to_mode == PSImode)
998 if (from_mode != SImode)
999 from = convert_to_mode (SImode, from, unsignedp);
1001 #ifdef HAVE_truncsipsi2
1002 if (HAVE_truncsipsi2)
1004 emit_unop_insn (CODE_FOR_truncsipsi2, to, from, UNKNOWN);
1007 #endif /* HAVE_truncsipsi2 */
1011 if (from_mode == PSImode)
1013 if (to_mode != SImode)
1015 from = convert_to_mode (SImode, from, unsignedp);
1020 #ifdef HAVE_extendpsisi2
1021 if (! unsignedp && HAVE_extendpsisi2)
1023 emit_unop_insn (CODE_FOR_extendpsisi2, to, from, UNKNOWN);
1026 #endif /* HAVE_extendpsisi2 */
1027 #ifdef HAVE_zero_extendpsisi2
1028 if (unsignedp && HAVE_zero_extendpsisi2)
1030 emit_unop_insn (CODE_FOR_zero_extendpsisi2, to, from, UNKNOWN);
1033 #endif /* HAVE_zero_extendpsisi2 */
1038 if (to_mode == PDImode)
1040 if (from_mode != DImode)
1041 from = convert_to_mode (DImode, from, unsignedp);
1043 #ifdef HAVE_truncdipdi2
1044 if (HAVE_truncdipdi2)
1046 emit_unop_insn (CODE_FOR_truncdipdi2, to, from, UNKNOWN);
1049 #endif /* HAVE_truncdipdi2 */
1053 if (from_mode == PDImode)
1055 if (to_mode != DImode)
1057 from = convert_to_mode (DImode, from, unsignedp);
1062 #ifdef HAVE_extendpdidi2
1063 if (HAVE_extendpdidi2)
1065 emit_unop_insn (CODE_FOR_extendpdidi2, to, from, UNKNOWN);
1068 #endif /* HAVE_extendpdidi2 */
1073 /* Now follow all the conversions between integers
1074 no more than a word long. */
1076 /* For truncation, usually we can just refer to FROM in a narrower mode. */
1077 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
1078 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1079 GET_MODE_BITSIZE (from_mode)))
1081 if (!((GET_CODE (from) == MEM
1082 && ! MEM_VOLATILE_P (from)
1083 && direct_load[(int) to_mode]
1084 && ! mode_dependent_address_p (XEXP (from, 0)))
1085 || GET_CODE (from) == REG
1086 || GET_CODE (from) == SUBREG))
1087 from = force_reg (from_mode, from);
1088 if (GET_CODE (from) == REG && REGNO (from) < FIRST_PSEUDO_REGISTER
1089 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
1090 from = copy_to_reg (from);
1091 emit_move_insn (to, gen_lowpart (to_mode, from));
1095 /* Handle extension. */
1096 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
1098 /* Convert directly if that works. */
1099 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
1100 != CODE_FOR_nothing)
1103 from = force_not_mem (from);
1105 emit_unop_insn (code, to, from, equiv_code);
1110 enum machine_mode intermediate;
1114 /* Search for a mode to convert via. */
1115 for (intermediate = from_mode; intermediate != VOIDmode;
1116 intermediate = GET_MODE_WIDER_MODE (intermediate))
1117 if (((can_extend_p (to_mode, intermediate, unsignedp)
1118 != CODE_FOR_nothing)
1119 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
1120 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1121 GET_MODE_BITSIZE (intermediate))))
1122 && (can_extend_p (intermediate, from_mode, unsignedp)
1123 != CODE_FOR_nothing))
1125 convert_move (to, convert_to_mode (intermediate, from,
1126 unsignedp), unsignedp);
1130 /* No suitable intermediate mode.
1131 Generate what we need with shifts. */
1132 shift_amount = build_int_2 (GET_MODE_BITSIZE (to_mode)
1133 - GET_MODE_BITSIZE (from_mode), 0);
1134 from = gen_lowpart (to_mode, force_reg (from_mode, from));
1135 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
1137 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
1140 emit_move_insn (to, tmp);
1145 /* Support special truncate insns for certain modes. */
1147 if (from_mode == DImode && to_mode == SImode)
1149 #ifdef HAVE_truncdisi2
1150 if (HAVE_truncdisi2)
1152 emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
1156 convert_move (to, force_reg (from_mode, from), unsignedp);
1160 if (from_mode == DImode && to_mode == HImode)
1162 #ifdef HAVE_truncdihi2
1163 if (HAVE_truncdihi2)
1165 emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
1169 convert_move (to, force_reg (from_mode, from), unsignedp);
1173 if (from_mode == DImode && to_mode == QImode)
1175 #ifdef HAVE_truncdiqi2
1176 if (HAVE_truncdiqi2)
1178 emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
1182 convert_move (to, force_reg (from_mode, from), unsignedp);
1186 if (from_mode == SImode && to_mode == HImode)
1188 #ifdef HAVE_truncsihi2
1189 if (HAVE_truncsihi2)
1191 emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
1195 convert_move (to, force_reg (from_mode, from), unsignedp);
1199 if (from_mode == SImode && to_mode == QImode)
1201 #ifdef HAVE_truncsiqi2
1202 if (HAVE_truncsiqi2)
1204 emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
1208 convert_move (to, force_reg (from_mode, from), unsignedp);
1212 if (from_mode == HImode && to_mode == QImode)
1214 #ifdef HAVE_trunchiqi2
1215 if (HAVE_trunchiqi2)
1217 emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
1221 convert_move (to, force_reg (from_mode, from), unsignedp);
1225 if (from_mode == TImode && to_mode == DImode)
1227 #ifdef HAVE_trunctidi2
1228 if (HAVE_trunctidi2)
1230 emit_unop_insn (CODE_FOR_trunctidi2, to, from, UNKNOWN);
1234 convert_move (to, force_reg (from_mode, from), unsignedp);
1238 if (from_mode == TImode && to_mode == SImode)
1240 #ifdef HAVE_trunctisi2
1241 if (HAVE_trunctisi2)
1243 emit_unop_insn (CODE_FOR_trunctisi2, to, from, UNKNOWN);
1247 convert_move (to, force_reg (from_mode, from), unsignedp);
1251 if (from_mode == TImode && to_mode == HImode)
1253 #ifdef HAVE_trunctihi2
1254 if (HAVE_trunctihi2)
1256 emit_unop_insn (CODE_FOR_trunctihi2, to, from, UNKNOWN);
1260 convert_move (to, force_reg (from_mode, from), unsignedp);
1264 if (from_mode == TImode && to_mode == QImode)
1266 #ifdef HAVE_trunctiqi2
1267 if (HAVE_trunctiqi2)
1269 emit_unop_insn (CODE_FOR_trunctiqi2, to, from, UNKNOWN);
1273 convert_move (to, force_reg (from_mode, from), unsignedp);
1277 /* Handle truncation of volatile memrefs, and so on;
1278 the things that couldn't be truncated directly,
1279 and for which there was no special instruction. */
1280 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
1282 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
1283 emit_move_insn (to, temp);
1287 /* Mode combination is not recognized. */
1291 /* Return an rtx for a value that would result
1292 from converting X to mode MODE.
1293 Both X and MODE may be floating, or both integer.
1294 UNSIGNEDP is nonzero if X is an unsigned value.
1295 This can be done by referring to a part of X in place
1296 or by copying to a new temporary with conversion.
1298 This function *must not* call protect_from_queue
1299 except when putting X into an insn (in which case convert_move does it). */
1302 convert_to_mode (mode, x, unsignedp)
1303 enum machine_mode mode;
1307 return convert_modes (mode, VOIDmode, x, unsignedp);
1310 /* Return an rtx for a value that would result
1311 from converting X from mode OLDMODE to mode MODE.
1312 Both modes may be floating, or both integer.
1313 UNSIGNEDP is nonzero if X is an unsigned value.
1315 This can be done by referring to a part of X in place
1316 or by copying to a new temporary with conversion.
1318 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
1320 This function *must not* call protect_from_queue
1321 except when putting X into an insn (in which case convert_move does it). */
1324 convert_modes (mode, oldmode, x, unsignedp)
1325 enum machine_mode mode, oldmode;
1331 /* If FROM is a SUBREG that indicates that we have already done at least
1332 the required extension, strip it. */
1334 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
1335 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
1336 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
1337 x = gen_lowpart (mode, x);
1339 if (GET_MODE (x) != VOIDmode)
1340 oldmode = GET_MODE (x);
1342 if (mode == oldmode)
1345 /* There is one case that we must handle specially: If we are converting
1346 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
1347 we are to interpret the constant as unsigned, gen_lowpart will do
1348 the wrong if the constant appears negative. What we want to do is
1349 make the high-order word of the constant zero, not all ones. */
1351 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
1352 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
1353 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
1355 HOST_WIDE_INT val = INTVAL (x);
1357 if (oldmode != VOIDmode
1358 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
1360 int width = GET_MODE_BITSIZE (oldmode);
1362 /* We need to zero extend VAL. */
1363 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1366 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
1369 /* We can do this with a gen_lowpart if both desired and current modes
1370 are integer, and this is either a constant integer, a register, or a
1371 non-volatile MEM. Except for the constant case where MODE is no
1372 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
1374 if ((GET_CODE (x) == CONST_INT
1375 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
1376 || (GET_MODE_CLASS (mode) == MODE_INT
1377 && GET_MODE_CLASS (oldmode) == MODE_INT
1378 && (GET_CODE (x) == CONST_DOUBLE
1379 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
1380 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x)
1381 && direct_load[(int) mode])
1382 || (GET_CODE (x) == REG
1383 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
1384 GET_MODE_BITSIZE (GET_MODE (x)))))))))
1386 /* ?? If we don't know OLDMODE, we have to assume here that
1387 X does not need sign- or zero-extension. This may not be
1388 the case, but it's the best we can do. */
1389 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
1390 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
1392 HOST_WIDE_INT val = INTVAL (x);
1393 int width = GET_MODE_BITSIZE (oldmode);
1395 /* We must sign or zero-extend in this case. Start by
1396 zero-extending, then sign extend if we need to. */
1397 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1399 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
1400 val |= (HOST_WIDE_INT) (-1) << width;
1402 return gen_int_mode (val, mode);
1405 return gen_lowpart (mode, x);
1408 temp = gen_reg_rtx (mode);
1409 convert_move (temp, x, unsignedp);
1413 /* This macro is used to determine what the largest unit size that
1414 move_by_pieces can use is. */
1416 /* MOVE_MAX_PIECES is the number of bytes at a time which we can
1417 move efficiently, as opposed to MOVE_MAX which is the maximum
1418 number of bytes we can move with a single instruction. */
1420 #ifndef MOVE_MAX_PIECES
1421 #define MOVE_MAX_PIECES MOVE_MAX
1424 /* STORE_MAX_PIECES is the number of bytes at a time that we can
1425 store efficiently. Due to internal GCC limitations, this is
1426 MOVE_MAX_PIECES limited by the number of bytes GCC can represent
1427 for an immediate constant. */
1429 #define STORE_MAX_PIECES MIN (MOVE_MAX_PIECES, 2 * sizeof (HOST_WIDE_INT))
1431 /* Generate several move instructions to copy LEN bytes from block FROM to
1432 block TO. (These are MEM rtx's with BLKmode). The caller must pass FROM
1433 and TO through protect_from_queue before calling.
1435 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
1436 used to push FROM to the stack.
1438 ALIGN is maximum alignment we can assume. */
1441 move_by_pieces (to, from, len, align)
1443 unsigned HOST_WIDE_INT len;
1446 struct move_by_pieces data;
1447 rtx to_addr, from_addr = XEXP (from, 0);
1448 unsigned int max_size = MOVE_MAX_PIECES + 1;
1449 enum machine_mode mode = VOIDmode, tmode;
1450 enum insn_code icode;
1453 data.from_addr = from_addr;
1456 to_addr = XEXP (to, 0);
1459 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1460 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1462 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1469 #ifdef STACK_GROWS_DOWNWARD
1475 data.to_addr = to_addr;
1478 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1479 || GET_CODE (from_addr) == POST_INC
1480 || GET_CODE (from_addr) == POST_DEC);
1482 data.explicit_inc_from = 0;
1483 data.explicit_inc_to = 0;
1484 if (data.reverse) data.offset = len;
1487 /* If copying requires more than two move insns,
1488 copy addresses to registers (to make displacements shorter)
1489 and use post-increment if available. */
1490 if (!(data.autinc_from && data.autinc_to)
1491 && move_by_pieces_ninsns (len, align) > 2)
1493 /* Find the mode of the largest move... */
1494 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1495 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1496 if (GET_MODE_SIZE (tmode) < max_size)
1499 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
1501 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1502 data.autinc_from = 1;
1503 data.explicit_inc_from = -1;
1505 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
1507 data.from_addr = copy_addr_to_reg (from_addr);
1508 data.autinc_from = 1;
1509 data.explicit_inc_from = 1;
1511 if (!data.autinc_from && CONSTANT_P (from_addr))
1512 data.from_addr = copy_addr_to_reg (from_addr);
1513 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
1515 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1517 data.explicit_inc_to = -1;
1519 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
1521 data.to_addr = copy_addr_to_reg (to_addr);
1523 data.explicit_inc_to = 1;
1525 if (!data.autinc_to && CONSTANT_P (to_addr))
1526 data.to_addr = copy_addr_to_reg (to_addr);
1529 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1530 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1531 align = MOVE_MAX * BITS_PER_UNIT;
1533 /* First move what we can in the largest integer mode, then go to
1534 successively smaller modes. */
1536 while (max_size > 1)
1538 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1539 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1540 if (GET_MODE_SIZE (tmode) < max_size)
1543 if (mode == VOIDmode)
1546 icode = mov_optab->handlers[(int) mode].insn_code;
1547 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1548 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1550 max_size = GET_MODE_SIZE (mode);
1553 /* The code above should have handled everything. */
1558 /* Return number of insns required to move L bytes by pieces.
1559 ALIGN (in bits) is maximum alignment we can assume. */
1561 static unsigned HOST_WIDE_INT
1562 move_by_pieces_ninsns (l, align)
1563 unsigned HOST_WIDE_INT l;
1566 unsigned HOST_WIDE_INT n_insns = 0;
1567 unsigned HOST_WIDE_INT max_size = MOVE_MAX + 1;
1569 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1570 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1571 align = MOVE_MAX * BITS_PER_UNIT;
1573 while (max_size > 1)
1575 enum machine_mode mode = VOIDmode, tmode;
1576 enum insn_code icode;
1578 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1579 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1580 if (GET_MODE_SIZE (tmode) < max_size)
1583 if (mode == VOIDmode)
1586 icode = mov_optab->handlers[(int) mode].insn_code;
1587 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1588 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1590 max_size = GET_MODE_SIZE (mode);
1598 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1599 with move instructions for mode MODE. GENFUN is the gen_... function
1600 to make a move insn for that mode. DATA has all the other info. */
1603 move_by_pieces_1 (genfun, mode, data)
1604 rtx (*genfun) PARAMS ((rtx, ...));
1605 enum machine_mode mode;
1606 struct move_by_pieces *data;
1608 unsigned int size = GET_MODE_SIZE (mode);
1609 rtx to1 = NULL_RTX, from1;
1611 while (data->len >= size)
1614 data->offset -= size;
1618 if (data->autinc_to)
1619 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
1622 to1 = adjust_address (data->to, mode, data->offset);
1625 if (data->autinc_from)
1626 from1 = adjust_automodify_address (data->from, mode, data->from_addr,
1629 from1 = adjust_address (data->from, mode, data->offset);
1631 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1632 emit_insn (gen_add2_insn (data->to_addr,
1633 GEN_INT (-(HOST_WIDE_INT)size)));
1634 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1635 emit_insn (gen_add2_insn (data->from_addr,
1636 GEN_INT (-(HOST_WIDE_INT)size)));
1639 emit_insn ((*genfun) (to1, from1));
1642 #ifdef PUSH_ROUNDING
1643 emit_single_push_insn (mode, from1, NULL);
1649 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1650 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1651 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1652 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1654 if (! data->reverse)
1655 data->offset += size;
1661 /* Emit code to move a block Y to a block X.
1662 This may be done with string-move instructions,
1663 with multiple scalar move instructions, or with a library call.
1665 Both X and Y must be MEM rtx's (perhaps inside VOLATILE)
1667 SIZE is an rtx that says how long they are.
1668 ALIGN is the maximum alignment we can assume they have.
1670 Return the address of the new block, if memcpy is called and returns it,
1673 static GTY(()) tree block_move_fn;
1675 emit_block_move (x, y, size)
1680 #ifdef TARGET_MEM_FUNCTIONS
1681 tree call_expr, arg_list;
1683 unsigned int align = MIN (MEM_ALIGN (x), MEM_ALIGN (y));
1685 if (GET_MODE (x) != BLKmode)
1688 if (GET_MODE (y) != BLKmode)
1691 x = protect_from_queue (x, 1);
1692 y = protect_from_queue (y, 0);
1693 size = protect_from_queue (size, 0);
1695 if (GET_CODE (x) != MEM)
1697 if (GET_CODE (y) != MEM)
1702 if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
1703 move_by_pieces (x, y, INTVAL (size), align);
1706 /* Try the most limited insn first, because there's no point
1707 including more than one in the machine description unless
1708 the more limited one has some advantage. */
1710 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
1711 enum machine_mode mode;
1713 /* Since this is a move insn, we don't care about volatility. */
1716 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1717 mode = GET_MODE_WIDER_MODE (mode))
1719 enum insn_code code = movstr_optab[(int) mode];
1720 insn_operand_predicate_fn pred;
1722 if (code != CODE_FOR_nothing
1723 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1724 here because if SIZE is less than the mode mask, as it is
1725 returned by the macro, it will definitely be less than the
1726 actual mode mask. */
1727 && ((GET_CODE (size) == CONST_INT
1728 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1729 <= (GET_MODE_MASK (mode) >> 1)))
1730 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1731 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
1732 || (*pred) (x, BLKmode))
1733 && ((pred = insn_data[(int) code].operand[1].predicate) == 0
1734 || (*pred) (y, BLKmode))
1735 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
1736 || (*pred) (opalign, VOIDmode)))
1739 rtx last = get_last_insn ();
1742 op2 = convert_to_mode (mode, size, 1);
1743 pred = insn_data[(int) code].operand[2].predicate;
1744 if (pred != 0 && ! (*pred) (op2, mode))
1745 op2 = copy_to_mode_reg (mode, op2);
1747 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1755 delete_insns_since (last);
1761 /* X, Y, or SIZE may have been passed through protect_from_queue.
1763 It is unsafe to save the value generated by protect_from_queue
1764 and reuse it later. Consider what happens if emit_queue is
1765 called before the return value from protect_from_queue is used.
1767 Expansion of the CALL_EXPR below will call emit_queue before
1768 we are finished emitting RTL for argument setup. So if we are
1769 not careful we could get the wrong value for an argument.
1771 To avoid this problem we go ahead and emit code to copy X, Y &
1772 SIZE into new pseudos. We can then place those new pseudos
1773 into an RTL_EXPR and use them later, even after a call to
1776 Note this is not strictly needed for library calls since they
1777 do not call emit_queue before loading their arguments. However,
1778 we may need to have library calls call emit_queue in the future
1779 since failing to do so could cause problems for targets which
1780 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
1781 x = copy_to_mode_reg (Pmode, XEXP (x, 0));
1782 y = copy_to_mode_reg (Pmode, XEXP (y, 0));
1784 #ifdef TARGET_MEM_FUNCTIONS
1785 size = copy_to_mode_reg (TYPE_MODE (sizetype), size);
1787 size = convert_to_mode (TYPE_MODE (integer_type_node), size,
1788 TREE_UNSIGNED (integer_type_node));
1789 size = copy_to_mode_reg (TYPE_MODE (integer_type_node), size);
1792 #ifdef TARGET_MEM_FUNCTIONS
1793 /* It is incorrect to use the libcall calling conventions to call
1794 memcpy in this context.
1796 This could be a user call to memcpy and the user may wish to
1797 examine the return value from memcpy.
1799 For targets where libcalls and normal calls have different conventions
1800 for returning pointers, we could end up generating incorrect code.
1802 So instead of using a libcall sequence we build up a suitable
1803 CALL_EXPR and expand the call in the normal fashion. */
1804 if (block_move_fn == NULL_TREE)
1808 /* This was copied from except.c, I don't know if all this is
1809 necessary in this context or not. */
1810 block_move_fn = get_identifier ("memcpy");
1811 fntype = build_pointer_type (void_type_node);
1812 fntype = build_function_type (fntype, NULL_TREE);
1813 block_move_fn = build_decl (FUNCTION_DECL, block_move_fn, fntype);
1814 DECL_EXTERNAL (block_move_fn) = 1;
1815 TREE_PUBLIC (block_move_fn) = 1;
1816 DECL_ARTIFICIAL (block_move_fn) = 1;
1817 TREE_NOTHROW (block_move_fn) = 1;
1818 make_decl_rtl (block_move_fn, NULL);
1819 assemble_external (block_move_fn);
1822 /* We need to make an argument list for the function call.
1824 memcpy has three arguments, the first two are void * addresses and
1825 the last is a size_t byte count for the copy. */
1827 = build_tree_list (NULL_TREE,
1828 make_tree (build_pointer_type (void_type_node), x));
1829 TREE_CHAIN (arg_list)
1830 = build_tree_list (NULL_TREE,
1831 make_tree (build_pointer_type (void_type_node), y));
1832 TREE_CHAIN (TREE_CHAIN (arg_list))
1833 = build_tree_list (NULL_TREE, make_tree (sizetype, size));
1834 TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list))) = NULL_TREE;
1836 /* Now we have to build up the CALL_EXPR itself. */
1837 call_expr = build1 (ADDR_EXPR,
1838 build_pointer_type (TREE_TYPE (block_move_fn)),
1840 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (block_move_fn)),
1841 call_expr, arg_list, NULL_TREE);
1842 TREE_SIDE_EFFECTS (call_expr) = 1;
1844 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
1846 emit_library_call (bcopy_libfunc, LCT_NORMAL,
1847 VOIDmode, 3, y, Pmode, x, Pmode,
1848 convert_to_mode (TYPE_MODE (integer_type_node), size,
1849 TREE_UNSIGNED (integer_type_node)),
1850 TYPE_MODE (integer_type_node));
1853 /* If we are initializing a readonly value, show the above call
1854 clobbered it. Otherwise, a load from it may erroneously be hoisted
1856 if (RTX_UNCHANGING_P (x))
1857 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
1863 /* Copy all or part of a value X into registers starting at REGNO.
1864 The number of registers to be filled is NREGS. */
1867 move_block_to_reg (regno, x, nregs, mode)
1871 enum machine_mode mode;
1874 #ifdef HAVE_load_multiple
1882 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1883 x = validize_mem (force_const_mem (mode, x));
1885 /* See if the machine can do this with a load multiple insn. */
1886 #ifdef HAVE_load_multiple
1887 if (HAVE_load_multiple)
1889 last = get_last_insn ();
1890 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
1898 delete_insns_since (last);
1902 for (i = 0; i < nregs; i++)
1903 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1904 operand_subword_force (x, i, mode));
1907 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1908 The number of registers to be filled is NREGS. SIZE indicates the number
1909 of bytes in the object X. */
1912 move_block_from_reg (regno, x, nregs, size)
1919 #ifdef HAVE_store_multiple
1923 enum machine_mode mode;
1928 /* If SIZE is that of a mode no bigger than a word, just use that
1929 mode's store operation. */
1930 if (size <= UNITS_PER_WORD
1931 && (mode = mode_for_size (size * BITS_PER_UNIT, MODE_INT, 0)) != BLKmode
1932 && !FUNCTION_ARG_REG_LITTLE_ENDIAN)
1934 emit_move_insn (adjust_address (x, mode, 0), gen_rtx_REG (mode, regno));
1938 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN machine must be aligned
1939 to the left before storing to memory. Note that the previous test
1940 doesn't handle all cases (e.g. SIZE == 3). */
1941 if (size < UNITS_PER_WORD
1943 && !FUNCTION_ARG_REG_LITTLE_ENDIAN)
1945 rtx tem = operand_subword (x, 0, 1, BLKmode);
1951 shift = expand_shift (LSHIFT_EXPR, word_mode,
1952 gen_rtx_REG (word_mode, regno),
1953 build_int_2 ((UNITS_PER_WORD - size)
1954 * BITS_PER_UNIT, 0), NULL_RTX, 0);
1955 emit_move_insn (tem, shift);
1959 /* See if the machine can do this with a store multiple insn. */
1960 #ifdef HAVE_store_multiple
1961 if (HAVE_store_multiple)
1963 last = get_last_insn ();
1964 pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1972 delete_insns_since (last);
1976 for (i = 0; i < nregs; i++)
1978 rtx tem = operand_subword (x, i, 1, BLKmode);
1983 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1987 /* Emit code to move a block SRC to a block DST, where DST is non-consecutive
1988 registers represented by a PARALLEL. SSIZE represents the total size of
1989 block SRC in bytes, or -1 if not known. */
1990 /* ??? If SSIZE % UNITS_PER_WORD != 0, we make the blatant assumption that
1991 the balance will be in what would be the low-order memory addresses, i.e.
1992 left justified for big endian, right justified for little endian. This
1993 happens to be true for the targets currently using this support. If this
1994 ever changes, a new target macro along the lines of FUNCTION_ARG_PADDING
1998 emit_group_load (dst, orig_src, ssize)
2005 if (GET_CODE (dst) != PARALLEL)
2008 /* Check for a NULL entry, used to indicate that the parameter goes
2009 both on the stack and in registers. */
2010 if (XEXP (XVECEXP (dst, 0, 0), 0))
2015 tmps = (rtx *) alloca (sizeof (rtx) * XVECLEN (dst, 0));
2017 /* Process the pieces. */
2018 for (i = start; i < XVECLEN (dst, 0); i++)
2020 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
2021 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
2022 unsigned int bytelen = GET_MODE_SIZE (mode);
2025 /* Handle trailing fragments that run over the size of the struct. */
2026 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2028 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2029 bytelen = ssize - bytepos;
2034 /* If we won't be loading directly from memory, protect the real source
2035 from strange tricks we might play; but make sure that the source can
2036 be loaded directly into the destination. */
2038 if (GET_CODE (orig_src) != MEM
2039 && (!CONSTANT_P (orig_src)
2040 || (GET_MODE (orig_src) != mode
2041 && GET_MODE (orig_src) != VOIDmode)))
2043 if (GET_MODE (orig_src) == VOIDmode)
2044 src = gen_reg_rtx (mode);
2046 src = gen_reg_rtx (GET_MODE (orig_src));
2048 emit_move_insn (src, orig_src);
2051 /* Optimize the access just a bit. */
2052 if (GET_CODE (src) == MEM
2053 && MEM_ALIGN (src) >= GET_MODE_ALIGNMENT (mode)
2054 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2055 && bytelen == GET_MODE_SIZE (mode))
2057 tmps[i] = gen_reg_rtx (mode);
2058 emit_move_insn (tmps[i], adjust_address (src, mode, bytepos));
2060 else if (GET_CODE (src) == CONCAT)
2063 && bytelen == GET_MODE_SIZE (GET_MODE (XEXP (src, 0))))
2064 || (bytepos == (HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (XEXP (src, 0)))
2065 && bytelen == GET_MODE_SIZE (GET_MODE (XEXP (src, 1)))))
2067 tmps[i] = XEXP (src, bytepos != 0);
2068 if (! CONSTANT_P (tmps[i])
2069 && (GET_CODE (tmps[i]) != REG || GET_MODE (tmps[i]) != mode))
2070 tmps[i] = extract_bit_field (tmps[i], bytelen * BITS_PER_UNIT,
2071 0, 1, NULL_RTX, mode, mode, ssize);
2073 else if (bytepos == 0)
2075 rtx mem = assign_stack_temp (GET_MODE (src),
2076 GET_MODE_SIZE (GET_MODE (src)), 0);
2077 emit_move_insn (mem, src);
2078 tmps[i] = adjust_address (mem, mode, 0);
2083 else if (CONSTANT_P (src)
2084 || (GET_CODE (src) == REG && GET_MODE (src) == mode))
2087 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
2088 bytepos * BITS_PER_UNIT, 1, NULL_RTX,
2091 if (BYTES_BIG_ENDIAN && shift)
2092 expand_binop (mode, ashl_optab, tmps[i], GEN_INT (shift),
2093 tmps[i], 0, OPTAB_WIDEN);
2098 /* Copy the extracted pieces into the proper (probable) hard regs. */
2099 for (i = start; i < XVECLEN (dst, 0); i++)
2100 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0), tmps[i]);
2103 /* Emit code to move a block SRC to a block DST, where SRC is non-consecutive
2104 registers represented by a PARALLEL. SSIZE represents the total size of
2105 block DST, or -1 if not known. */
2108 emit_group_store (orig_dst, src, ssize)
2115 if (GET_CODE (src) != PARALLEL)
2118 /* Check for a NULL entry, used to indicate that the parameter goes
2119 both on the stack and in registers. */
2120 if (XEXP (XVECEXP (src, 0, 0), 0))
2125 tmps = (rtx *) alloca (sizeof (rtx) * XVECLEN (src, 0));
2127 /* Copy the (probable) hard regs into pseudos. */
2128 for (i = start; i < XVECLEN (src, 0); i++)
2130 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
2131 tmps[i] = gen_reg_rtx (GET_MODE (reg));
2132 emit_move_insn (tmps[i], reg);
2136 /* If we won't be storing directly into memory, protect the real destination
2137 from strange tricks we might play. */
2139 if (GET_CODE (dst) == PARALLEL)
2143 /* We can get a PARALLEL dst if there is a conditional expression in
2144 a return statement. In that case, the dst and src are the same,
2145 so no action is necessary. */
2146 if (rtx_equal_p (dst, src))
2149 /* It is unclear if we can ever reach here, but we may as well handle
2150 it. Allocate a temporary, and split this into a store/load to/from
2153 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
2154 emit_group_store (temp, src, ssize);
2155 emit_group_load (dst, temp, ssize);
2158 else if (GET_CODE (dst) != MEM && GET_CODE (dst) != CONCAT)
2160 dst = gen_reg_rtx (GET_MODE (orig_dst));
2161 /* Make life a bit easier for combine. */
2162 emit_move_insn (dst, const0_rtx);
2165 /* Process the pieces. */
2166 for (i = start; i < XVECLEN (src, 0); i++)
2168 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
2169 enum machine_mode mode = GET_MODE (tmps[i]);
2170 unsigned int bytelen = GET_MODE_SIZE (mode);
2173 /* Handle trailing fragments that run over the size of the struct. */
2174 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2176 if (BYTES_BIG_ENDIAN)
2178 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2179 expand_binop (mode, ashr_optab, tmps[i], GEN_INT (shift),
2180 tmps[i], 0, OPTAB_WIDEN);
2182 bytelen = ssize - bytepos;
2185 if (GET_CODE (dst) == CONCAT)
2187 if (bytepos + bytelen <= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2188 dest = XEXP (dst, 0);
2189 else if (bytepos >= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2191 bytepos -= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0)));
2192 dest = XEXP (dst, 1);
2198 /* Optimize the access just a bit. */
2199 if (GET_CODE (dest) == MEM
2200 && MEM_ALIGN (dest) >= GET_MODE_ALIGNMENT (mode)
2201 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2202 && bytelen == GET_MODE_SIZE (mode))
2203 emit_move_insn (adjust_address (dest, mode, bytepos), tmps[i]);
2205 store_bit_field (dest, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2206 mode, tmps[i], ssize);
2211 /* Copy from the pseudo into the (probable) hard reg. */
2212 if (GET_CODE (dst) == REG)
2213 emit_move_insn (orig_dst, dst);
2216 /* Generate code to copy a BLKmode object of TYPE out of a
2217 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2218 is null, a stack temporary is created. TGTBLK is returned.
2220 The primary purpose of this routine is to handle functions
2221 that return BLKmode structures in registers. Some machines
2222 (the PA for example) want to return all small structures
2223 in registers regardless of the structure's alignment. */
2226 copy_blkmode_from_reg (tgtblk, srcreg, type)
2231 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2232 rtx src = NULL, dst = NULL;
2233 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2234 unsigned HOST_WIDE_INT bitpos, xbitpos, big_endian_correction = 0;
2238 tgtblk = assign_temp (build_qualified_type (type,
2240 | TYPE_QUAL_CONST)),
2242 preserve_temp_slots (tgtblk);
2245 /* This code assumes srcreg is at least a full word. If it isn't, copy it
2246 into a new pseudo which is a full word.
2248 If FUNCTION_ARG_REG_LITTLE_ENDIAN is set and convert_to_mode does a copy,
2249 the wrong part of the register gets copied so we fake a type conversion
2251 if (GET_MODE (srcreg) != BLKmode
2252 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
2254 if (FUNCTION_ARG_REG_LITTLE_ENDIAN)
2255 srcreg = simplify_gen_subreg (word_mode, srcreg, GET_MODE (srcreg), 0);
2257 srcreg = convert_to_mode (word_mode, srcreg, TREE_UNSIGNED (type));
2260 /* Structures whose size is not a multiple of a word are aligned
2261 to the least significant byte (to the right). On a BYTES_BIG_ENDIAN
2262 machine, this means we must skip the empty high order bytes when
2263 calculating the bit offset. */
2264 if (BYTES_BIG_ENDIAN
2265 && !FUNCTION_ARG_REG_LITTLE_ENDIAN
2266 && bytes % UNITS_PER_WORD)
2267 big_endian_correction
2268 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2270 /* Copy the structure BITSIZE bites at a time.
2272 We could probably emit more efficient code for machines which do not use
2273 strict alignment, but it doesn't seem worth the effort at the current
2275 for (bitpos = 0, xbitpos = big_endian_correction;
2276 bitpos < bytes * BITS_PER_UNIT;
2277 bitpos += bitsize, xbitpos += bitsize)
2279 /* We need a new source operand each time xbitpos is on a
2280 word boundary and when xbitpos == big_endian_correction
2281 (the first time through). */
2282 if (xbitpos % BITS_PER_WORD == 0
2283 || xbitpos == big_endian_correction)
2284 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD,
2287 /* We need a new destination operand each time bitpos is on
2289 if (bitpos % BITS_PER_WORD == 0)
2290 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
2292 /* Use xbitpos for the source extraction (right justified) and
2293 xbitpos for the destination store (left justified). */
2294 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode,
2295 extract_bit_field (src, bitsize,
2296 xbitpos % BITS_PER_WORD, 1,
2297 NULL_RTX, word_mode, word_mode,
2305 /* Add a USE expression for REG to the (possibly empty) list pointed
2306 to by CALL_FUSAGE. REG must denote a hard register. */
2309 use_reg (call_fusage, reg)
2310 rtx *call_fusage, reg;
2312 if (GET_CODE (reg) != REG
2313 || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
2317 = gen_rtx_EXPR_LIST (VOIDmode,
2318 gen_rtx_USE (VOIDmode, reg), *call_fusage);
2321 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2322 starting at REGNO. All of these registers must be hard registers. */
2325 use_regs (call_fusage, regno, nregs)
2332 if (regno + nregs > FIRST_PSEUDO_REGISTER)
2335 for (i = 0; i < nregs; i++)
2336 use_reg (call_fusage, regno_reg_rtx[regno + i]);
2339 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2340 PARALLEL REGS. This is for calls that pass values in multiple
2341 non-contiguous locations. The Irix 6 ABI has examples of this. */
2344 use_group_regs (call_fusage, regs)
2350 for (i = 0; i < XVECLEN (regs, 0); i++)
2352 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2354 /* A NULL entry means the parameter goes both on the stack and in
2355 registers. This can also be a MEM for targets that pass values
2356 partially on the stack and partially in registers. */
2357 if (reg != 0 && GET_CODE (reg) == REG)
2358 use_reg (call_fusage, reg);
2363 /* Determine whether the LEN bytes generated by CONSTFUN can be
2364 stored to memory using several move instructions. CONSTFUNDATA is
2365 a pointer which will be passed as argument in every CONSTFUN call.
2366 ALIGN is maximum alignment we can assume. Return nonzero if a
2367 call to store_by_pieces should succeed. */
2370 can_store_by_pieces (len, constfun, constfundata, align)
2371 unsigned HOST_WIDE_INT len;
2372 rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
2376 unsigned HOST_WIDE_INT max_size, l;
2377 HOST_WIDE_INT offset = 0;
2378 enum machine_mode mode, tmode;
2379 enum insn_code icode;
2383 if (! MOVE_BY_PIECES_P (len, align))
2386 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2387 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2388 align = MOVE_MAX * BITS_PER_UNIT;
2390 /* We would first store what we can in the largest integer mode, then go to
2391 successively smaller modes. */
2394 reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2399 max_size = STORE_MAX_PIECES + 1;
2400 while (max_size > 1)
2402 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2403 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2404 if (GET_MODE_SIZE (tmode) < max_size)
2407 if (mode == VOIDmode)
2410 icode = mov_optab->handlers[(int) mode].insn_code;
2411 if (icode != CODE_FOR_nothing
2412 && align >= GET_MODE_ALIGNMENT (mode))
2414 unsigned int size = GET_MODE_SIZE (mode);
2421 cst = (*constfun) (constfundata, offset, mode);
2422 if (!LEGITIMATE_CONSTANT_P (cst))
2432 max_size = GET_MODE_SIZE (mode);
2435 /* The code above should have handled everything. */
2443 /* Generate several move instructions to store LEN bytes generated by
2444 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2445 pointer which will be passed as argument in every CONSTFUN call.
2446 ALIGN is maximum alignment we can assume. */
2449 store_by_pieces (to, len, constfun, constfundata, align)
2451 unsigned HOST_WIDE_INT len;
2452 rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
2456 struct store_by_pieces data;
2458 if (! MOVE_BY_PIECES_P (len, align))
2460 to = protect_from_queue (to, 1);
2461 data.constfun = constfun;
2462 data.constfundata = constfundata;
2465 store_by_pieces_1 (&data, align);
2468 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2469 rtx with BLKmode). The caller must pass TO through protect_from_queue
2470 before calling. ALIGN is maximum alignment we can assume. */
2473 clear_by_pieces (to, len, align)
2475 unsigned HOST_WIDE_INT len;
2478 struct store_by_pieces data;
2480 data.constfun = clear_by_pieces_1;
2481 data.constfundata = NULL;
2484 store_by_pieces_1 (&data, align);
2487 /* Callback routine for clear_by_pieces.
2488 Return const0_rtx unconditionally. */
2491 clear_by_pieces_1 (data, offset, mode)
2492 PTR data ATTRIBUTE_UNUSED;
2493 HOST_WIDE_INT offset ATTRIBUTE_UNUSED;
2494 enum machine_mode mode ATTRIBUTE_UNUSED;
2499 /* Subroutine of clear_by_pieces and store_by_pieces.
2500 Generate several move instructions to store LEN bytes of block TO. (A MEM
2501 rtx with BLKmode). The caller must pass TO through protect_from_queue
2502 before calling. ALIGN is maximum alignment we can assume. */
2505 store_by_pieces_1 (data, align)
2506 struct store_by_pieces *data;
2509 rtx to_addr = XEXP (data->to, 0);
2510 unsigned HOST_WIDE_INT max_size = STORE_MAX_PIECES + 1;
2511 enum machine_mode mode = VOIDmode, tmode;
2512 enum insn_code icode;
2515 data->to_addr = to_addr;
2517 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2518 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2520 data->explicit_inc_to = 0;
2522 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2524 data->offset = data->len;
2526 /* If storing requires more than two move insns,
2527 copy addresses to registers (to make displacements shorter)
2528 and use post-increment if available. */
2529 if (!data->autinc_to
2530 && move_by_pieces_ninsns (data->len, align) > 2)
2532 /* Determine the main mode we'll be using. */
2533 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2534 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2535 if (GET_MODE_SIZE (tmode) < max_size)
2538 if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
2540 data->to_addr = copy_addr_to_reg (plus_constant (to_addr, data->len));
2541 data->autinc_to = 1;
2542 data->explicit_inc_to = -1;
2545 if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2546 && ! data->autinc_to)
2548 data->to_addr = copy_addr_to_reg (to_addr);
2549 data->autinc_to = 1;
2550 data->explicit_inc_to = 1;
2553 if ( !data->autinc_to && CONSTANT_P (to_addr))
2554 data->to_addr = copy_addr_to_reg (to_addr);
2557 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2558 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2559 align = MOVE_MAX * BITS_PER_UNIT;
2561 /* First store what we can in the largest integer mode, then go to
2562 successively smaller modes. */
2564 while (max_size > 1)
2566 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2567 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2568 if (GET_MODE_SIZE (tmode) < max_size)
2571 if (mode == VOIDmode)
2574 icode = mov_optab->handlers[(int) mode].insn_code;
2575 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
2576 store_by_pieces_2 (GEN_FCN (icode), mode, data);
2578 max_size = GET_MODE_SIZE (mode);
2581 /* The code above should have handled everything. */
2586 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2587 with move instructions for mode MODE. GENFUN is the gen_... function
2588 to make a move insn for that mode. DATA has all the other info. */
2591 store_by_pieces_2 (genfun, mode, data)
2592 rtx (*genfun) PARAMS ((rtx, ...));
2593 enum machine_mode mode;
2594 struct store_by_pieces *data;
2596 unsigned int size = GET_MODE_SIZE (mode);
2599 while (data->len >= size)
2602 data->offset -= size;
2604 if (data->autinc_to)
2605 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
2608 to1 = adjust_address (data->to, mode, data->offset);
2610 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2611 emit_insn (gen_add2_insn (data->to_addr,
2612 GEN_INT (-(HOST_WIDE_INT) size)));
2614 cst = (*data->constfun) (data->constfundata, data->offset, mode);
2615 emit_insn ((*genfun) (to1, cst));
2617 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2618 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2620 if (! data->reverse)
2621 data->offset += size;
2627 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2628 its length in bytes. */
2630 static GTY(()) tree block_clear_fn;
2632 clear_storage (object, size)
2636 #ifdef TARGET_MEM_FUNCTIONS
2637 tree call_expr, arg_list;
2640 unsigned int align = (GET_CODE (object) == MEM ? MEM_ALIGN (object)
2641 : GET_MODE_ALIGNMENT (GET_MODE (object)));
2643 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2644 just move a zero. Otherwise, do this a piece at a time. */
2645 if (GET_MODE (object) != BLKmode
2646 && GET_CODE (size) == CONST_INT
2647 && GET_MODE_SIZE (GET_MODE (object)) == (unsigned int) INTVAL (size))
2648 emit_move_insn (object, CONST0_RTX (GET_MODE (object)));
2651 object = protect_from_queue (object, 1);
2652 size = protect_from_queue (size, 0);
2654 if (GET_CODE (size) == CONST_INT
2655 && CLEAR_BY_PIECES_P (INTVAL (size), align))
2656 clear_by_pieces (object, INTVAL (size), align);
2659 /* Try the most limited insn first, because there's no point
2660 including more than one in the machine description unless
2661 the more limited one has some advantage. */
2663 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
2664 enum machine_mode mode;
2666 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2667 mode = GET_MODE_WIDER_MODE (mode))
2669 enum insn_code code = clrstr_optab[(int) mode];
2670 insn_operand_predicate_fn pred;
2672 if (code != CODE_FOR_nothing
2673 /* We don't need MODE to be narrower than
2674 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2675 the mode mask, as it is returned by the macro, it will
2676 definitely be less than the actual mode mask. */
2677 && ((GET_CODE (size) == CONST_INT
2678 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2679 <= (GET_MODE_MASK (mode) >> 1)))
2680 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2681 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
2682 || (*pred) (object, BLKmode))
2683 && ((pred = insn_data[(int) code].operand[2].predicate) == 0
2684 || (*pred) (opalign, VOIDmode)))
2687 rtx last = get_last_insn ();
2690 op1 = convert_to_mode (mode, size, 1);
2691 pred = insn_data[(int) code].operand[1].predicate;
2692 if (pred != 0 && ! (*pred) (op1, mode))
2693 op1 = copy_to_mode_reg (mode, op1);
2695 pat = GEN_FCN ((int) code) (object, op1, opalign);
2702 delete_insns_since (last);
2706 /* OBJECT or SIZE may have been passed through protect_from_queue.
2708 It is unsafe to save the value generated by protect_from_queue
2709 and reuse it later. Consider what happens if emit_queue is
2710 called before the return value from protect_from_queue is used.
2712 Expansion of the CALL_EXPR below will call emit_queue before
2713 we are finished emitting RTL for argument setup. So if we are
2714 not careful we could get the wrong value for an argument.
2716 To avoid this problem we go ahead and emit code to copy OBJECT
2717 and SIZE into new pseudos. We can then place those new pseudos
2718 into an RTL_EXPR and use them later, even after a call to
2721 Note this is not strictly needed for library calls since they
2722 do not call emit_queue before loading their arguments. However,
2723 we may need to have library calls call emit_queue in the future
2724 since failing to do so could cause problems for targets which
2725 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
2726 object = copy_to_mode_reg (Pmode, XEXP (object, 0));
2728 #ifdef TARGET_MEM_FUNCTIONS
2729 size = copy_to_mode_reg (TYPE_MODE (sizetype), size);
2731 size = convert_to_mode (TYPE_MODE (integer_type_node), size,
2732 TREE_UNSIGNED (integer_type_node));
2733 size = copy_to_mode_reg (TYPE_MODE (integer_type_node), size);
2736 #ifdef TARGET_MEM_FUNCTIONS
2737 /* It is incorrect to use the libcall calling conventions to call
2738 memset in this context.
2740 This could be a user call to memset and the user may wish to
2741 examine the return value from memset.
2743 For targets where libcalls and normal calls have different
2744 conventions for returning pointers, we could end up generating
2747 So instead of using a libcall sequence we build up a suitable
2748 CALL_EXPR and expand the call in the normal fashion. */
2749 if (block_clear_fn == NULL_TREE)
2753 /* This was copied from except.c, I don't know if all this is
2754 necessary in this context or not. */
2755 block_clear_fn = get_identifier ("memset");
2756 fntype = build_pointer_type (void_type_node);
2757 fntype = build_function_type (fntype, NULL_TREE);
2758 block_clear_fn = build_decl (FUNCTION_DECL, block_clear_fn,
2760 DECL_EXTERNAL (block_clear_fn) = 1;
2761 TREE_PUBLIC (block_clear_fn) = 1;
2762 DECL_ARTIFICIAL (block_clear_fn) = 1;
2763 TREE_NOTHROW (block_clear_fn) = 1;
2764 make_decl_rtl (block_clear_fn, NULL);
2765 assemble_external (block_clear_fn);
2768 /* We need to make an argument list for the function call.
2770 memset has three arguments, the first is a void * addresses, the
2771 second an integer with the initialization value, the last is a
2772 size_t byte count for the copy. */
2774 = build_tree_list (NULL_TREE,
2775 make_tree (build_pointer_type (void_type_node),
2777 TREE_CHAIN (arg_list)
2778 = build_tree_list (NULL_TREE,
2779 make_tree (integer_type_node, const0_rtx));
2780 TREE_CHAIN (TREE_CHAIN (arg_list))
2781 = build_tree_list (NULL_TREE, make_tree (sizetype, size));
2782 TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list))) = NULL_TREE;
2784 /* Now we have to build up the CALL_EXPR itself. */
2785 call_expr = build1 (ADDR_EXPR,
2786 build_pointer_type (TREE_TYPE (block_clear_fn)),
2788 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (block_clear_fn)),
2789 call_expr, arg_list, NULL_TREE);
2790 TREE_SIDE_EFFECTS (call_expr) = 1;
2792 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
2794 emit_library_call (bzero_libfunc, LCT_NORMAL,
2795 VOIDmode, 2, object, Pmode, size,
2796 TYPE_MODE (integer_type_node));
2799 /* If we are initializing a readonly value, show the above call
2800 clobbered it. Otherwise, a load from it may erroneously be
2801 hoisted from a loop. */
2802 if (RTX_UNCHANGING_P (object))
2803 emit_insn (gen_rtx_CLOBBER (VOIDmode, object));
2810 /* Generate code to copy Y into X.
2811 Both Y and X must have the same mode, except that
2812 Y can be a constant with VOIDmode.
2813 This mode cannot be BLKmode; use emit_block_move for that.
2815 Return the last instruction emitted. */
2818 emit_move_insn (x, y)
2821 enum machine_mode mode = GET_MODE (x);
2822 rtx y_cst = NULL_RTX;
2825 x = protect_from_queue (x, 1);
2826 y = protect_from_queue (y, 0);
2828 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
2831 /* Never force constant_p_rtx to memory. */
2832 if (GET_CODE (y) == CONSTANT_P_RTX)
2834 else if (CONSTANT_P (y))
2837 && FLOAT_MODE_P (GET_MODE (x))
2838 && (last_insn = compress_float_constant (x, y)))
2841 if (!LEGITIMATE_CONSTANT_P (y))
2844 y = force_const_mem (mode, y);
2848 /* If X or Y are memory references, verify that their addresses are valid
2850 if (GET_CODE (x) == MEM
2851 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
2852 && ! push_operand (x, GET_MODE (x)))
2854 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
2855 x = validize_mem (x);
2857 if (GET_CODE (y) == MEM
2858 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
2860 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
2861 y = validize_mem (y);
2863 if (mode == BLKmode)
2866 last_insn = emit_move_insn_1 (x, y);
2868 if (y_cst && GET_CODE (x) == REG)
2869 set_unique_reg_note (last_insn, REG_EQUAL, y_cst);
2874 /* Low level part of emit_move_insn.
2875 Called just like emit_move_insn, but assumes X and Y
2876 are basically valid. */
2879 emit_move_insn_1 (x, y)
2882 enum machine_mode mode = GET_MODE (x);
2883 enum machine_mode submode;
2884 enum mode_class class = GET_MODE_CLASS (mode);
2886 if ((unsigned int) mode >= (unsigned int) MAX_MACHINE_MODE)
2889 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
2891 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
2893 /* Expand complex moves by moving real part and imag part, if possible. */
2894 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
2895 && BLKmode != (submode = mode_for_size ((GET_MODE_UNIT_SIZE (mode)
2897 (class == MODE_COMPLEX_INT
2898 ? MODE_INT : MODE_FLOAT),
2900 && (mov_optab->handlers[(int) submode].insn_code
2901 != CODE_FOR_nothing))
2903 /* Don't split destination if it is a stack push. */
2904 int stack = push_operand (x, GET_MODE (x));
2906 #ifdef PUSH_ROUNDING
2907 /* In case we output to the stack, but the size is smaller machine can
2908 push exactly, we need to use move instructions. */
2910 && (PUSH_ROUNDING (GET_MODE_SIZE (submode))
2911 != GET_MODE_SIZE (submode)))
2914 HOST_WIDE_INT offset1, offset2;
2916 /* Do not use anti_adjust_stack, since we don't want to update
2917 stack_pointer_delta. */
2918 temp = expand_binop (Pmode,
2919 #ifdef STACK_GROWS_DOWNWARD
2927 (GET_MODE_SIZE (GET_MODE (x)))),
2928 stack_pointer_rtx, 0, OPTAB_LIB_WIDEN);
2930 if (temp != stack_pointer_rtx)
2931 emit_move_insn (stack_pointer_rtx, temp);
2933 #ifdef STACK_GROWS_DOWNWARD
2935 offset2 = GET_MODE_SIZE (submode);
2937 offset1 = -PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)));
2938 offset2 = (-PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)))
2939 + GET_MODE_SIZE (submode));
2942 emit_move_insn (change_address (x, submode,
2943 gen_rtx_PLUS (Pmode,
2945 GEN_INT (offset1))),
2946 gen_realpart (submode, y));
2947 emit_move_insn (change_address (x, submode,
2948 gen_rtx_PLUS (Pmode,
2950 GEN_INT (offset2))),
2951 gen_imagpart (submode, y));
2955 /* If this is a stack, push the highpart first, so it
2956 will be in the argument order.
2958 In that case, change_address is used only to convert
2959 the mode, not to change the address. */
2962 /* Note that the real part always precedes the imag part in memory
2963 regardless of machine's endianness. */
2964 #ifdef STACK_GROWS_DOWNWARD
2965 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2966 (gen_rtx_MEM (submode, XEXP (x, 0)),
2967 gen_imagpart (submode, y)));
2968 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2969 (gen_rtx_MEM (submode, XEXP (x, 0)),
2970 gen_realpart (submode, y)));
2972 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2973 (gen_rtx_MEM (submode, XEXP (x, 0)),
2974 gen_realpart (submode, y)));
2975 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2976 (gen_rtx_MEM (submode, XEXP (x, 0)),
2977 gen_imagpart (submode, y)));
2982 rtx realpart_x, realpart_y;
2983 rtx imagpart_x, imagpart_y;
2985 /* If this is a complex value with each part being smaller than a
2986 word, the usual calling sequence will likely pack the pieces into
2987 a single register. Unfortunately, SUBREG of hard registers only
2988 deals in terms of words, so we have a problem converting input
2989 arguments to the CONCAT of two registers that is used elsewhere
2990 for complex values. If this is before reload, we can copy it into
2991 memory and reload. FIXME, we should see about using extract and
2992 insert on integer registers, but complex short and complex char
2993 variables should be rarely used. */
2994 if (GET_MODE_BITSIZE (mode) < 2 * BITS_PER_WORD
2995 && (reload_in_progress | reload_completed) == 0)
2998 = (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER);
3000 = (REG_P (y) && REGNO (y) < FIRST_PSEUDO_REGISTER);
3002 if (packed_dest_p || packed_src_p)
3004 enum mode_class reg_class = ((class == MODE_COMPLEX_FLOAT)
3005 ? MODE_FLOAT : MODE_INT);
3007 enum machine_mode reg_mode
3008 = mode_for_size (GET_MODE_BITSIZE (mode), reg_class, 1);
3010 if (reg_mode != BLKmode)
3012 rtx mem = assign_stack_temp (reg_mode,
3013 GET_MODE_SIZE (mode), 0);
3014 rtx cmem = adjust_address (mem, mode, 0);
3017 = N_("function using short complex types cannot be inline");
3021 rtx sreg = gen_rtx_SUBREG (reg_mode, x, 0);
3023 emit_move_insn_1 (cmem, y);
3024 return emit_move_insn_1 (sreg, mem);
3028 rtx sreg = gen_rtx_SUBREG (reg_mode, y, 0);
3030 emit_move_insn_1 (mem, sreg);
3031 return emit_move_insn_1 (x, cmem);
3037 realpart_x = gen_realpart (submode, x);
3038 realpart_y = gen_realpart (submode, y);
3039 imagpart_x = gen_imagpart (submode, x);
3040 imagpart_y = gen_imagpart (submode, y);
3042 /* Show the output dies here. This is necessary for SUBREGs
3043 of pseudos since we cannot track their lifetimes correctly;
3044 hard regs shouldn't appear here except as return values.
3045 We never want to emit such a clobber after reload. */
3047 && ! (reload_in_progress || reload_completed)
3048 && (GET_CODE (realpart_x) == SUBREG
3049 || GET_CODE (imagpart_x) == SUBREG))
3050 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
3052 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
3053 (realpart_x, realpart_y));
3054 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
3055 (imagpart_x, imagpart_y));
3058 return get_last_insn ();
3061 /* This will handle any multi-word or full-word mode that lacks a move_insn
3062 pattern. However, you will get better code if you define such patterns,
3063 even if they must turn into multiple assembler instructions. */
3064 else if (GET_MODE_SIZE (mode) >= UNITS_PER_WORD)
3071 #ifdef PUSH_ROUNDING
3073 /* If X is a push on the stack, do the push now and replace
3074 X with a reference to the stack pointer. */
3075 if (push_operand (x, GET_MODE (x)))
3080 /* Do not use anti_adjust_stack, since we don't want to update
3081 stack_pointer_delta. */
3082 temp = expand_binop (Pmode,
3083 #ifdef STACK_GROWS_DOWNWARD
3091 (GET_MODE_SIZE (GET_MODE (x)))),
3092 stack_pointer_rtx, 0, OPTAB_LIB_WIDEN);
3094 if (temp != stack_pointer_rtx)
3095 emit_move_insn (stack_pointer_rtx, temp);
3097 code = GET_CODE (XEXP (x, 0));
3099 /* Just hope that small offsets off SP are OK. */
3100 if (code == POST_INC)
3101 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3102 GEN_INT (-((HOST_WIDE_INT)
3103 GET_MODE_SIZE (GET_MODE (x)))));
3104 else if (code == POST_DEC)
3105 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3106 GEN_INT (GET_MODE_SIZE (GET_MODE (x))));
3108 temp = stack_pointer_rtx;
3110 x = change_address (x, VOIDmode, temp);
3114 /* If we are in reload, see if either operand is a MEM whose address
3115 is scheduled for replacement. */
3116 if (reload_in_progress && GET_CODE (x) == MEM
3117 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
3118 x = replace_equiv_address_nv (x, inner);
3119 if (reload_in_progress && GET_CODE (y) == MEM
3120 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
3121 y = replace_equiv_address_nv (y, inner);
3127 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
3130 rtx xpart = operand_subword (x, i, 1, mode);
3131 rtx ypart = operand_subword (y, i, 1, mode);
3133 /* If we can't get a part of Y, put Y into memory if it is a
3134 constant. Otherwise, force it into a register. If we still
3135 can't get a part of Y, abort. */
3136 if (ypart == 0 && CONSTANT_P (y))
3138 y = force_const_mem (mode, y);
3139 ypart = operand_subword (y, i, 1, mode);
3141 else if (ypart == 0)
3142 ypart = operand_subword_force (y, i, mode);
3144 if (xpart == 0 || ypart == 0)
3147 need_clobber |= (GET_CODE (xpart) == SUBREG);
3149 last_insn = emit_move_insn (xpart, ypart);
3155 /* Show the output dies here. This is necessary for SUBREGs
3156 of pseudos since we cannot track their lifetimes correctly;
3157 hard regs shouldn't appear here except as return values.
3158 We never want to emit such a clobber after reload. */
3160 && ! (reload_in_progress || reload_completed)
3161 && need_clobber != 0)
3162 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
3172 /* If Y is representable exactly in a narrower mode, and the target can
3173 perform the extension directly from constant or memory, then emit the
3174 move as an extension. */
3177 compress_float_constant (x, y)
3180 enum machine_mode dstmode = GET_MODE (x);
3181 enum machine_mode orig_srcmode = GET_MODE (y);
3182 enum machine_mode srcmode;
3185 REAL_VALUE_FROM_CONST_DOUBLE (r, y);
3187 for (srcmode = GET_CLASS_NARROWEST_MODE (GET_MODE_CLASS (orig_srcmode));
3188 srcmode != orig_srcmode;
3189 srcmode = GET_MODE_WIDER_MODE (srcmode))
3192 rtx trunc_y, last_insn;
3194 /* Skip if the target can't extend this way. */
3195 ic = can_extend_p (dstmode, srcmode, 0);
3196 if (ic == CODE_FOR_nothing)
3199 /* Skip if the narrowed value isn't exact. */
3200 if (! exact_real_truncate (srcmode, &r))
3203 trunc_y = CONST_DOUBLE_FROM_REAL_VALUE (r, srcmode);
3205 if (LEGITIMATE_CONSTANT_P (trunc_y))
3207 /* Skip if the target needs extra instructions to perform
3209 if (! (*insn_data[ic].operand[1].predicate) (trunc_y, srcmode))
3212 else if (float_extend_from_mem[dstmode][srcmode])
3213 trunc_y = validize_mem (force_const_mem (srcmode, trunc_y));
3217 emit_unop_insn (ic, x, trunc_y, UNKNOWN);
3218 last_insn = get_last_insn ();
3220 if (GET_CODE (x) == REG)
3221 REG_NOTES (last_insn)
3222 = gen_rtx_EXPR_LIST (REG_EQUAL, y, REG_NOTES (last_insn));
3230 /* Pushing data onto the stack. */
3232 /* Push a block of length SIZE (perhaps variable)
3233 and return an rtx to address the beginning of the block.
3234 Note that it is not possible for the value returned to be a QUEUED.
3235 The value may be virtual_outgoing_args_rtx.
3237 EXTRA is the number of bytes of padding to push in addition to SIZE.
3238 BELOW nonzero means this padding comes at low addresses;
3239 otherwise, the padding comes at high addresses. */
3242 push_block (size, extra, below)
3248 size = convert_modes (Pmode, ptr_mode, size, 1);
3249 if (CONSTANT_P (size))
3250 anti_adjust_stack (plus_constant (size, extra));
3251 else if (GET_CODE (size) == REG && extra == 0)
3252 anti_adjust_stack (size);
3255 temp = copy_to_mode_reg (Pmode, size);
3257 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
3258 temp, 0, OPTAB_LIB_WIDEN);
3259 anti_adjust_stack (temp);
3262 #ifndef STACK_GROWS_DOWNWARD
3268 temp = virtual_outgoing_args_rtx;
3269 if (extra != 0 && below)
3270 temp = plus_constant (temp, extra);
3274 if (GET_CODE (size) == CONST_INT)
3275 temp = plus_constant (virtual_outgoing_args_rtx,
3276 -INTVAL (size) - (below ? 0 : extra));
3277 else if (extra != 0 && !below)
3278 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3279 negate_rtx (Pmode, plus_constant (size, extra)));
3281 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3282 negate_rtx (Pmode, size));
3285 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
3288 #ifdef PUSH_ROUNDING
3290 /* Emit single push insn. */
3293 emit_single_push_insn (mode, x, type)
3295 enum machine_mode mode;
3299 unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
3301 enum insn_code icode;
3302 insn_operand_predicate_fn pred;
3304 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3305 /* If there is push pattern, use it. Otherwise try old way of throwing
3306 MEM representing push operation to move expander. */
3307 icode = push_optab->handlers[(int) mode].insn_code;
3308 if (icode != CODE_FOR_nothing)
3310 if (((pred = insn_data[(int) icode].operand[0].predicate)
3311 && !((*pred) (x, mode))))
3312 x = force_reg (mode, x);
3313 emit_insn (GEN_FCN (icode) (x));
3316 if (GET_MODE_SIZE (mode) == rounded_size)
3317 dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
3320 #ifdef STACK_GROWS_DOWNWARD
3321 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3322 GEN_INT (-(HOST_WIDE_INT) rounded_size));
3324 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3325 GEN_INT (rounded_size));
3327 dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
3330 dest = gen_rtx_MEM (mode, dest_addr);
3334 set_mem_attributes (dest, type, 1);
3336 if (flag_optimize_sibling_calls)
3337 /* Function incoming arguments may overlap with sibling call
3338 outgoing arguments and we cannot allow reordering of reads
3339 from function arguments with stores to outgoing arguments
3340 of sibling calls. */
3341 set_mem_alias_set (dest, 0);
3343 emit_move_insn (dest, x);
3347 /* Generate code to push X onto the stack, assuming it has mode MODE and
3349 MODE is redundant except when X is a CONST_INT (since they don't
3351 SIZE is an rtx for the size of data to be copied (in bytes),
3352 needed only if X is BLKmode.
3354 ALIGN (in bits) is maximum alignment we can assume.
3356 If PARTIAL and REG are both nonzero, then copy that many of the first
3357 words of X into registers starting with REG, and push the rest of X.
3358 The amount of space pushed is decreased by PARTIAL words,
3359 rounded *down* to a multiple of PARM_BOUNDARY.
3360 REG must be a hard register in this case.
3361 If REG is zero but PARTIAL is not, take any all others actions for an
3362 argument partially in registers, but do not actually load any
3365 EXTRA is the amount in bytes of extra space to leave next to this arg.
3366 This is ignored if an argument block has already been allocated.
3368 On a machine that lacks real push insns, ARGS_ADDR is the address of
3369 the bottom of the argument block for this call. We use indexing off there
3370 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
3371 argument block has not been preallocated.
3373 ARGS_SO_FAR is the size of args previously pushed for this call.
3375 REG_PARM_STACK_SPACE is nonzero if functions require stack space
3376 for arguments passed in registers. If nonzero, it will be the number
3377 of bytes required. */
3380 emit_push_insn (x, mode, type, size, align, partial, reg, extra,
3381 args_addr, args_so_far, reg_parm_stack_space,
3384 enum machine_mode mode;
3393 int reg_parm_stack_space;
3397 enum direction stack_direction
3398 #ifdef STACK_GROWS_DOWNWARD
3404 /* Decide where to pad the argument: `downward' for below,
3405 `upward' for above, or `none' for don't pad it.
3406 Default is below for small data on big-endian machines; else above. */
3407 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
3409 /* Invert direction if stack is post-decrement.
3411 if (STACK_PUSH_CODE == POST_DEC)
3412 if (where_pad != none)
3413 where_pad = (where_pad == downward ? upward : downward);
3415 xinner = x = protect_from_queue (x, 0);
3417 if (mode == BLKmode)
3419 /* Copy a block into the stack, entirely or partially. */
3422 int used = partial * UNITS_PER_WORD;
3423 int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
3431 /* USED is now the # of bytes we need not copy to the stack
3432 because registers will take care of them. */
3435 xinner = adjust_address (xinner, BLKmode, used);
3437 /* If the partial register-part of the arg counts in its stack size,
3438 skip the part of stack space corresponding to the registers.
3439 Otherwise, start copying to the beginning of the stack space,
3440 by setting SKIP to 0. */
3441 skip = (reg_parm_stack_space == 0) ? 0 : used;
3443 #ifdef PUSH_ROUNDING
3444 /* Do it with several push insns if that doesn't take lots of insns
3445 and if there is no difficulty with push insns that skip bytes
3446 on the stack for alignment purposes. */
3449 && GET_CODE (size) == CONST_INT
3451 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
3452 /* Here we avoid the case of a structure whose weak alignment
3453 forces many pushes of a small amount of data,
3454 and such small pushes do rounding that causes trouble. */
3455 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
3456 || align >= BIGGEST_ALIGNMENT
3457 || (PUSH_ROUNDING (align / BITS_PER_UNIT)
3458 == (align / BITS_PER_UNIT)))
3459 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
3461 /* Push padding now if padding above and stack grows down,
3462 or if padding below and stack grows up.
3463 But if space already allocated, this has already been done. */
3464 if (extra && args_addr == 0
3465 && where_pad != none && where_pad != stack_direction)
3466 anti_adjust_stack (GEN_INT (extra));
3468 move_by_pieces (NULL, xinner, INTVAL (size) - used, align);
3471 #endif /* PUSH_ROUNDING */
3475 /* Otherwise make space on the stack and copy the data
3476 to the address of that space. */
3478 /* Deduct words put into registers from the size we must copy. */
3481 if (GET_CODE (size) == CONST_INT)
3482 size = GEN_INT (INTVAL (size) - used);
3484 size = expand_binop (GET_MODE (size), sub_optab, size,
3485 GEN_INT (used), NULL_RTX, 0,
3489 /* Get the address of the stack space.
3490 In this case, we do not deal with EXTRA separately.
3491 A single stack adjust will do. */
3494 temp = push_block (size, extra, where_pad == downward);
3497 else if (GET_CODE (args_so_far) == CONST_INT)
3498 temp = memory_address (BLKmode,
3499 plus_constant (args_addr,
3500 skip + INTVAL (args_so_far)));
3502 temp = memory_address (BLKmode,
3503 plus_constant (gen_rtx_PLUS (Pmode,
3507 target = gen_rtx_MEM (BLKmode, temp);
3511 set_mem_attributes (target, type, 1);
3512 /* Function incoming arguments may overlap with sibling call
3513 outgoing arguments and we cannot allow reordering of reads
3514 from function arguments with stores to outgoing arguments
3515 of sibling calls. */
3516 set_mem_alias_set (target, 0);
3519 set_mem_align (target, align);
3521 /* TEMP is the address of the block. Copy the data there. */
3522 if (GET_CODE (size) == CONST_INT
3523 && MOVE_BY_PIECES_P ((unsigned) INTVAL (size), align))
3525 move_by_pieces (target, xinner, INTVAL (size), align);
3530 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
3531 enum machine_mode mode;
3533 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
3535 mode = GET_MODE_WIDER_MODE (mode))
3537 enum insn_code code = movstr_optab[(int) mode];
3538 insn_operand_predicate_fn pred;
3540 if (code != CODE_FOR_nothing
3541 && ((GET_CODE (size) == CONST_INT
3542 && ((unsigned HOST_WIDE_INT) INTVAL (size)
3543 <= (GET_MODE_MASK (mode) >> 1)))
3544 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
3545 && (!(pred = insn_data[(int) code].operand[0].predicate)
3546 || ((*pred) (target, BLKmode)))
3547 && (!(pred = insn_data[(int) code].operand[1].predicate)
3548 || ((*pred) (xinner, BLKmode)))
3549 && (!(pred = insn_data[(int) code].operand[3].predicate)
3550 || ((*pred) (opalign, VOIDmode))))
3552 rtx op2 = convert_to_mode (mode, size, 1);
3553 rtx last = get_last_insn ();
3556 pred = insn_data[(int) code].operand[2].predicate;
3557 if (pred != 0 && ! (*pred) (op2, mode))
3558 op2 = copy_to_mode_reg (mode, op2);
3560 pat = GEN_FCN ((int) code) (target, xinner,
3568 delete_insns_since (last);
3573 if (!ACCUMULATE_OUTGOING_ARGS)
3575 /* If the source is referenced relative to the stack pointer,
3576 copy it to another register to stabilize it. We do not need
3577 to do this if we know that we won't be changing sp. */
3579 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3580 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3581 temp = copy_to_reg (temp);
3584 /* Make inhibit_defer_pop nonzero around the library call
3585 to force it to pop the bcopy-arguments right away. */
3587 #ifdef TARGET_MEM_FUNCTIONS
3588 emit_library_call (memcpy_libfunc, LCT_NORMAL,
3589 VOIDmode, 3, temp, Pmode, XEXP (xinner, 0), Pmode,
3590 convert_to_mode (TYPE_MODE (sizetype),
3591 size, TREE_UNSIGNED (sizetype)),
3592 TYPE_MODE (sizetype));
3594 emit_library_call (bcopy_libfunc, LCT_NORMAL,
3595 VOIDmode, 3, XEXP (xinner, 0), Pmode, temp, Pmode,
3596 convert_to_mode (TYPE_MODE (integer_type_node),
3598 TREE_UNSIGNED (integer_type_node)),
3599 TYPE_MODE (integer_type_node));
3604 else if (partial > 0)
3606 /* Scalar partly in registers. */
3608 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3611 /* # words of start of argument
3612 that we must make space for but need not store. */
3613 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
3614 int args_offset = INTVAL (args_so_far);
3617 /* Push padding now if padding above and stack grows down,
3618 or if padding below and stack grows up.
3619 But if space already allocated, this has already been done. */
3620 if (extra && args_addr == 0
3621 && where_pad != none && where_pad != stack_direction)
3622 anti_adjust_stack (GEN_INT (extra));
3624 /* If we make space by pushing it, we might as well push
3625 the real data. Otherwise, we can leave OFFSET nonzero
3626 and leave the space uninitialized. */
3630 /* Now NOT_STACK gets the number of words that we don't need to
3631 allocate on the stack. */
3632 not_stack = partial - offset;
3634 /* If the partial register-part of the arg counts in its stack size,
3635 skip the part of stack space corresponding to the registers.
3636 Otherwise, start copying to the beginning of the stack space,
3637 by setting SKIP to 0. */
3638 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
3640 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
3641 x = validize_mem (force_const_mem (mode, x));
3643 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3644 SUBREGs of such registers are not allowed. */
3645 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
3646 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
3647 x = copy_to_reg (x);
3649 /* Loop over all the words allocated on the stack for this arg. */
3650 /* We can do it by words, because any scalar bigger than a word
3651 has a size a multiple of a word. */
3652 #ifndef PUSH_ARGS_REVERSED
3653 for (i = not_stack; i < size; i++)
3655 for (i = size - 1; i >= not_stack; i--)
3657 if (i >= not_stack + offset)
3658 emit_push_insn (operand_subword_force (x, i, mode),
3659 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
3661 GEN_INT (args_offset + ((i - not_stack + skip)
3663 reg_parm_stack_space, alignment_pad);
3668 rtx target = NULL_RTX;
3671 /* Push padding now if padding above and stack grows down,
3672 or if padding below and stack grows up.
3673 But if space already allocated, this has already been done. */
3674 if (extra && args_addr == 0
3675 && where_pad != none && where_pad != stack_direction)
3676 anti_adjust_stack (GEN_INT (extra));
3678 #ifdef PUSH_ROUNDING
3679 if (args_addr == 0 && PUSH_ARGS)
3680 emit_single_push_insn (mode, x, type);
3684 if (GET_CODE (args_so_far) == CONST_INT)
3686 = memory_address (mode,
3687 plus_constant (args_addr,
3688 INTVAL (args_so_far)));
3690 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
3693 dest = gen_rtx_MEM (mode, addr);
3696 set_mem_attributes (dest, type, 1);
3697 /* Function incoming arguments may overlap with sibling call
3698 outgoing arguments and we cannot allow reordering of reads
3699 from function arguments with stores to outgoing arguments
3700 of sibling calls. */
3701 set_mem_alias_set (dest, 0);
3704 emit_move_insn (dest, x);
3710 /* If part should go in registers, copy that part
3711 into the appropriate registers. Do this now, at the end,
3712 since mem-to-mem copies above may do function calls. */
3713 if (partial > 0 && reg != 0)
3715 /* Handle calls that pass values in multiple non-contiguous locations.
3716 The Irix 6 ABI has examples of this. */
3717 if (GET_CODE (reg) == PARALLEL)
3718 emit_group_load (reg, x, -1); /* ??? size? */
3720 move_block_to_reg (REGNO (reg), x, partial, mode);
3723 if (extra && args_addr == 0 && where_pad == stack_direction)
3724 anti_adjust_stack (GEN_INT (extra));
3726 if (alignment_pad && args_addr == 0)
3727 anti_adjust_stack (alignment_pad);
3730 /* Return X if X can be used as a subtarget in a sequence of arithmetic
3738 /* Only registers can be subtargets. */
3739 || GET_CODE (x) != REG
3740 /* If the register is readonly, it can't be set more than once. */
3741 || RTX_UNCHANGING_P (x)
3742 /* Don't use hard regs to avoid extending their life. */
3743 || REGNO (x) < FIRST_PSEUDO_REGISTER
3744 /* Avoid subtargets inside loops,
3745 since they hide some invariant expressions. */
3746 || preserve_subexpressions_p ())
3750 /* Expand an assignment that stores the value of FROM into TO.
3751 If WANT_VALUE is nonzero, return an rtx for the value of TO.
3752 (This may contain a QUEUED rtx;
3753 if the value is constant, this rtx is a constant.)
3754 Otherwise, the returned value is NULL_RTX.
3756 SUGGEST_REG is no longer actually used.
3757 It used to mean, copy the value through a register
3758 and return that register, if that is possible.
3759 We now use WANT_VALUE to decide whether to do this. */
3762 expand_assignment (to, from, want_value, suggest_reg)
3765 int suggest_reg ATTRIBUTE_UNUSED;
3770 /* Don't crash if the lhs of the assignment was erroneous. */
3772 if (TREE_CODE (to) == ERROR_MARK)
3774 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
3775 return want_value ? result : NULL_RTX;
3778 /* Assignment of a structure component needs special treatment
3779 if the structure component's rtx is not simply a MEM.
3780 Assignment of an array element at a constant index, and assignment of
3781 an array element in an unaligned packed structure field, has the same
3784 if (TREE_CODE (to) == COMPONENT_REF || TREE_CODE (to) == BIT_FIELD_REF
3785 || TREE_CODE (to) == ARRAY_REF || TREE_CODE (to) == ARRAY_RANGE_REF)
3787 enum machine_mode mode1;
3788 HOST_WIDE_INT bitsize, bitpos;
3796 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
3797 &unsignedp, &volatilep);
3799 /* If we are going to use store_bit_field and extract_bit_field,
3800 make sure to_rtx will be safe for multiple use. */
3802 if (mode1 == VOIDmode && want_value)
3803 tem = stabilize_reference (tem);
3805 orig_to_rtx = to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, 0);
3809 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM);
3811 if (GET_CODE (to_rtx) != MEM)
3814 #ifdef POINTERS_EXTEND_UNSIGNED
3815 if (GET_MODE (offset_rtx) != Pmode)
3816 offset_rtx = convert_memory_address (Pmode, offset_rtx);
3818 if (GET_MODE (offset_rtx) != ptr_mode)
3819 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
3822 /* A constant address in TO_RTX can have VOIDmode, we must not try
3823 to call force_reg for that case. Avoid that case. */
3824 if (GET_CODE (to_rtx) == MEM
3825 && GET_MODE (to_rtx) == BLKmode
3826 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
3828 && (bitpos % bitsize) == 0
3829 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
3830 && MEM_ALIGN (to_rtx) == GET_MODE_ALIGNMENT (mode1))
3832 to_rtx = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT);
3836 to_rtx = offset_address (to_rtx, offset_rtx,
3837 highest_pow2_factor_for_type (TREE_TYPE (to),
3841 if (GET_CODE (to_rtx) == MEM)
3843 /* If the field is at offset zero, we could have been given the
3844 DECL_RTX of the parent struct. Don't munge it. */
3845 to_rtx = shallow_copy_rtx (to_rtx);
3847 set_mem_attributes_minus_bitpos (to_rtx, to, 0, bitpos);
3850 /* Deal with volatile and readonly fields. The former is only done
3851 for MEM. Also set MEM_KEEP_ALIAS_SET_P if needed. */
3852 if (volatilep && GET_CODE (to_rtx) == MEM)
3854 if (to_rtx == orig_to_rtx)
3855 to_rtx = copy_rtx (to_rtx);
3856 MEM_VOLATILE_P (to_rtx) = 1;
3859 if (TREE_CODE (to) == COMPONENT_REF
3860 && TREE_READONLY (TREE_OPERAND (to, 1)))
3862 if (to_rtx == orig_to_rtx)
3863 to_rtx = copy_rtx (to_rtx);
3864 RTX_UNCHANGING_P (to_rtx) = 1;
3867 if (GET_CODE (to_rtx) == MEM && ! can_address_p (to))
3869 if (to_rtx == orig_to_rtx)
3870 to_rtx = copy_rtx (to_rtx);
3871 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
3874 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
3876 /* Spurious cast for HPUX compiler. */
3877 ? ((enum machine_mode)
3878 TYPE_MODE (TREE_TYPE (to)))
3880 unsignedp, TREE_TYPE (tem), get_alias_set (to));
3882 preserve_temp_slots (result);
3886 /* If the value is meaningful, convert RESULT to the proper mode.
3887 Otherwise, return nothing. */
3888 return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
3889 TYPE_MODE (TREE_TYPE (from)),
3891 TREE_UNSIGNED (TREE_TYPE (to)))
3895 /* If the rhs is a function call and its value is not an aggregate,
3896 call the function before we start to compute the lhs.
3897 This is needed for correct code for cases such as
3898 val = setjmp (buf) on machines where reference to val
3899 requires loading up part of an address in a separate insn.
3901 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
3902 since it might be a promoted variable where the zero- or sign- extension
3903 needs to be done. Handling this in the normal way is safe because no
3904 computation is done before the call. */
3905 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from)
3906 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
3907 && ! ((TREE_CODE (to) == VAR_DECL || TREE_CODE (to) == PARM_DECL)
3908 && GET_CODE (DECL_RTL (to)) == REG))
3913 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
3915 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
3917 /* Handle calls that return values in multiple non-contiguous locations.
3918 The Irix 6 ABI has examples of this. */
3919 if (GET_CODE (to_rtx) == PARALLEL)
3920 emit_group_load (to_rtx, value, int_size_in_bytes (TREE_TYPE (from)));
3921 else if (GET_MODE (to_rtx) == BLKmode)
3922 emit_block_move (to_rtx, value, expr_size (from));
3925 #ifdef POINTERS_EXTEND_UNSIGNED
3926 if (POINTER_TYPE_P (TREE_TYPE (to))
3927 && GET_MODE (to_rtx) != GET_MODE (value))
3928 value = convert_memory_address (GET_MODE (to_rtx), value);
3930 emit_move_insn (to_rtx, value);
3932 preserve_temp_slots (to_rtx);
3935 return want_value ? to_rtx : NULL_RTX;
3938 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
3939 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
3942 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
3944 /* Don't move directly into a return register. */
3945 if (TREE_CODE (to) == RESULT_DECL
3946 && (GET_CODE (to_rtx) == REG || GET_CODE (to_rtx) == PARALLEL))
3951 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
3953 if (GET_CODE (to_rtx) == PARALLEL)
3954 emit_group_load (to_rtx, temp, int_size_in_bytes (TREE_TYPE (from)));
3956 emit_move_insn (to_rtx, temp);
3958 preserve_temp_slots (to_rtx);
3961 return want_value ? to_rtx : NULL_RTX;
3964 /* In case we are returning the contents of an object which overlaps
3965 the place the value is being stored, use a safe function when copying
3966 a value through a pointer into a structure value return block. */
3967 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
3968 && current_function_returns_struct
3969 && !current_function_returns_pcc_struct)
3974 size = expr_size (from);
3975 from_rtx = expand_expr (from, NULL_RTX, VOIDmode, 0);
3977 #ifdef TARGET_MEM_FUNCTIONS
3978 emit_library_call (memmove_libfunc, LCT_NORMAL,
3979 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
3980 XEXP (from_rtx, 0), Pmode,
3981 convert_to_mode (TYPE_MODE (sizetype),
3982 size, TREE_UNSIGNED (sizetype)),
3983 TYPE_MODE (sizetype));
3985 emit_library_call (bcopy_libfunc, LCT_NORMAL,
3986 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
3987 XEXP (to_rtx, 0), Pmode,
3988 convert_to_mode (TYPE_MODE (integer_type_node),
3989 size, TREE_UNSIGNED (integer_type_node)),
3990 TYPE_MODE (integer_type_node));
3993 preserve_temp_slots (to_rtx);
3996 return want_value ? to_rtx : NULL_RTX;
3999 /* Compute FROM and store the value in the rtx we got. */
4002 result = store_expr (from, to_rtx, want_value);
4003 preserve_temp_slots (result);
4006 return want_value ? result : NULL_RTX;
4009 /* Generate code for computing expression EXP,
4010 and storing the value into TARGET.
4011 TARGET may contain a QUEUED rtx.
4013 If WANT_VALUE is nonzero, return a copy of the value
4014 not in TARGET, so that we can be sure to use the proper
4015 value in a containing expression even if TARGET has something
4016 else stored in it. If possible, we copy the value through a pseudo
4017 and return that pseudo. Or, if the value is constant, we try to
4018 return the constant. In some cases, we return a pseudo
4019 copied *from* TARGET.
4021 If the mode is BLKmode then we may return TARGET itself.
4022 It turns out that in BLKmode it doesn't cause a problem.
4023 because C has no operators that could combine two different
4024 assignments into the same BLKmode object with different values
4025 with no sequence point. Will other languages need this to
4028 If WANT_VALUE is 0, we return NULL, to make sure
4029 to catch quickly any cases where the caller uses the value
4030 and fails to set WANT_VALUE. */
4033 store_expr (exp, target, want_value)
4039 int dont_return_target = 0;
4040 int dont_store_target = 0;
4042 if (TREE_CODE (exp) == COMPOUND_EXPR)
4044 /* Perform first part of compound expression, then assign from second
4046 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
4048 return store_expr (TREE_OPERAND (exp, 1), target, want_value);
4050 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
4052 /* For conditional expression, get safe form of the target. Then
4053 test the condition, doing the appropriate assignment on either
4054 side. This avoids the creation of unnecessary temporaries.
4055 For non-BLKmode, it is more efficient not to do this. */
4057 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
4060 target = protect_from_queue (target, 1);
4062 do_pending_stack_adjust ();
4064 jumpifnot (TREE_OPERAND (exp, 0), lab1);
4065 start_cleanup_deferral ();
4066 store_expr (TREE_OPERAND (exp, 1), target, 0);
4067 end_cleanup_deferral ();
4069 emit_jump_insn (gen_jump (lab2));
4072 start_cleanup_deferral ();
4073 store_expr (TREE_OPERAND (exp, 2), target, 0);
4074 end_cleanup_deferral ();
4079 return want_value ? target : NULL_RTX;
4081 else if (queued_subexp_p (target))
4082 /* If target contains a postincrement, let's not risk
4083 using it as the place to generate the rhs. */
4085 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
4087 /* Expand EXP into a new pseudo. */
4088 temp = gen_reg_rtx (GET_MODE (target));
4089 temp = expand_expr (exp, temp, GET_MODE (target), 0);
4092 temp = expand_expr (exp, NULL_RTX, GET_MODE (target), 0);
4094 /* If target is volatile, ANSI requires accessing the value
4095 *from* the target, if it is accessed. So make that happen.
4096 In no case return the target itself. */
4097 if (! MEM_VOLATILE_P (target) && want_value)
4098 dont_return_target = 1;
4100 else if (want_value && GET_CODE (target) == MEM && ! MEM_VOLATILE_P (target)
4101 && GET_MODE (target) != BLKmode)
4102 /* If target is in memory and caller wants value in a register instead,
4103 arrange that. Pass TARGET as target for expand_expr so that,
4104 if EXP is another assignment, WANT_VALUE will be nonzero for it.
4105 We know expand_expr will not use the target in that case.
4106 Don't do this if TARGET is volatile because we are supposed
4107 to write it and then read it. */
4109 temp = expand_expr (exp, target, GET_MODE (target), 0);
4110 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
4112 /* If TEMP is already in the desired TARGET, only copy it from
4113 memory and don't store it there again. */
4115 || (rtx_equal_p (temp, target)
4116 && ! side_effects_p (temp) && ! side_effects_p (target)))
4117 dont_store_target = 1;
4118 temp = copy_to_reg (temp);
4120 dont_return_target = 1;
4122 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
4123 /* If this is an scalar in a register that is stored in a wider mode
4124 than the declared mode, compute the result into its declared mode
4125 and then convert to the wider mode. Our value is the computed
4128 rtx inner_target = 0;
4130 /* If we don't want a value, we can do the conversion inside EXP,
4131 which will often result in some optimizations. Do the conversion
4132 in two steps: first change the signedness, if needed, then
4133 the extend. But don't do this if the type of EXP is a subtype
4134 of something else since then the conversion might involve
4135 more than just converting modes. */
4136 if (! want_value && INTEGRAL_TYPE_P (TREE_TYPE (exp))
4137 && TREE_TYPE (TREE_TYPE (exp)) == 0)
4139 if (TREE_UNSIGNED (TREE_TYPE (exp))
4140 != SUBREG_PROMOTED_UNSIGNED_P (target))
4142 ((*lang_hooks.types.signed_or_unsigned_type)
4143 (SUBREG_PROMOTED_UNSIGNED_P (target), TREE_TYPE (exp)), exp);
4145 exp = convert ((*lang_hooks.types.type_for_mode)
4146 (GET_MODE (SUBREG_REG (target)),
4147 SUBREG_PROMOTED_UNSIGNED_P (target)),
4150 inner_target = SUBREG_REG (target);
4153 temp = expand_expr (exp, inner_target, VOIDmode, 0);
4155 /* If TEMP is a volatile MEM and we want a result value, make
4156 the access now so it gets done only once. Likewise if
4157 it contains TARGET. */
4158 if (GET_CODE (temp) == MEM && want_value
4159 && (MEM_VOLATILE_P (temp)
4160 || reg_mentioned_p (SUBREG_REG (target), XEXP (temp, 0))))
4161 temp = copy_to_reg (temp);
4163 /* If TEMP is a VOIDmode constant, use convert_modes to make
4164 sure that we properly convert it. */
4165 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
4167 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4168 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4169 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
4170 GET_MODE (target), temp,
4171 SUBREG_PROMOTED_UNSIGNED_P (target));
4174 convert_move (SUBREG_REG (target), temp,
4175 SUBREG_PROMOTED_UNSIGNED_P (target));
4177 /* If we promoted a constant, change the mode back down to match
4178 target. Otherwise, the caller might get confused by a result whose
4179 mode is larger than expected. */
4181 if (want_value && GET_MODE (temp) != GET_MODE (target))
4183 if (GET_MODE (temp) != VOIDmode)
4185 temp = gen_lowpart_SUBREG (GET_MODE (target), temp);
4186 SUBREG_PROMOTED_VAR_P (temp) = 1;
4187 SUBREG_PROMOTED_UNSIGNED_SET (temp,
4188 SUBREG_PROMOTED_UNSIGNED_P (target));
4191 temp = convert_modes (GET_MODE (target),
4192 GET_MODE (SUBREG_REG (target)),
4193 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4196 return want_value ? temp : NULL_RTX;
4200 temp = expand_expr (exp, target, GET_MODE (target), 0);
4201 /* Return TARGET if it's a specified hardware register.
4202 If TARGET is a volatile mem ref, either return TARGET
4203 or return a reg copied *from* TARGET; ANSI requires this.
4205 Otherwise, if TEMP is not TARGET, return TEMP
4206 if it is constant (for efficiency),
4207 or if we really want the correct value. */
4208 if (!(target && GET_CODE (target) == REG
4209 && REGNO (target) < FIRST_PSEUDO_REGISTER)
4210 && !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
4211 && ! rtx_equal_p (temp, target)
4212 && (CONSTANT_P (temp) || want_value))
4213 dont_return_target = 1;
4216 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
4217 the same as that of TARGET, adjust the constant. This is needed, for
4218 example, in case it is a CONST_DOUBLE and we want only a word-sized
4220 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
4221 && TREE_CODE (exp) != ERROR_MARK
4222 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
4223 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4224 temp, TREE_UNSIGNED (TREE_TYPE (exp)));
4226 /* If value was not generated in the target, store it there.
4227 Convert the value to TARGET's type first if necessary.
4228 If TEMP and TARGET compare equal according to rtx_equal_p, but
4229 one or both of them are volatile memory refs, we have to distinguish
4231 - expand_expr has used TARGET. In this case, we must not generate
4232 another copy. This can be detected by TARGET being equal according
4234 - expand_expr has not used TARGET - that means that the source just
4235 happens to have the same RTX form. Since temp will have been created
4236 by expand_expr, it will compare unequal according to == .
4237 We must generate a copy in this case, to reach the correct number
4238 of volatile memory references. */
4240 if ((! rtx_equal_p (temp, target)
4241 || (temp != target && (side_effects_p (temp)
4242 || side_effects_p (target))))
4243 && TREE_CODE (exp) != ERROR_MARK
4244 && ! dont_store_target
4245 /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
4246 but TARGET is not valid memory reference, TEMP will differ
4247 from TARGET although it is really the same location. */
4248 && (TREE_CODE_CLASS (TREE_CODE (exp)) != 'd'
4249 || target != DECL_RTL_IF_SET (exp)))
4251 target = protect_from_queue (target, 1);
4252 if (GET_MODE (temp) != GET_MODE (target)
4253 && GET_MODE (temp) != VOIDmode)
4255 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
4256 if (dont_return_target)
4258 /* In this case, we will return TEMP,
4259 so make sure it has the proper mode.
4260 But don't forget to store the value into TARGET. */
4261 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
4262 emit_move_insn (target, temp);
4265 convert_move (target, temp, unsignedp);
4268 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
4270 /* Handle copying a string constant into an array. The string
4271 constant may be shorter than the array. So copy just the string's
4272 actual length, and clear the rest. First get the size of the data
4273 type of the string, which is actually the size of the target. */
4274 rtx size = expr_size (exp);
4276 if (GET_CODE (size) == CONST_INT
4277 && INTVAL (size) < TREE_STRING_LENGTH (exp))
4278 emit_block_move (target, temp, size);
4281 /* Compute the size of the data to copy from the string. */
4283 = size_binop (MIN_EXPR,
4284 make_tree (sizetype, size),
4285 size_int (TREE_STRING_LENGTH (exp)));
4286 rtx copy_size_rtx = expand_expr (copy_size, NULL_RTX,
4290 /* Copy that much. */
4291 copy_size_rtx = convert_to_mode (ptr_mode, copy_size_rtx, 0);
4292 emit_block_move (target, temp, copy_size_rtx);
4294 /* Figure out how much is left in TARGET that we have to clear.
4295 Do all calculations in ptr_mode. */
4296 if (GET_CODE (copy_size_rtx) == CONST_INT)
4298 size = plus_constant (size, -INTVAL (copy_size_rtx));
4299 target = adjust_address (target, BLKmode,
4300 INTVAL (copy_size_rtx));
4304 size = expand_binop (TYPE_MODE (sizetype), sub_optab, size,
4305 copy_size_rtx, NULL_RTX, 0,
4308 #ifdef POINTERS_EXTEND_UNSIGNED
4309 if (GET_MODE (copy_size_rtx) != Pmode)
4310 copy_size_rtx = convert_memory_address (Pmode,
4314 target = offset_address (target, copy_size_rtx,
4315 highest_pow2_factor (copy_size));
4316 label = gen_label_rtx ();
4317 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
4318 GET_MODE (size), 0, label);
4321 if (size != const0_rtx)
4322 clear_storage (target, size);
4328 /* Handle calls that return values in multiple non-contiguous locations.
4329 The Irix 6 ABI has examples of this. */
4330 else if (GET_CODE (target) == PARALLEL)
4331 emit_group_load (target, temp, int_size_in_bytes (TREE_TYPE (exp)));
4332 else if (GET_MODE (temp) == BLKmode)
4333 emit_block_move (target, temp, expr_size (exp));
4335 emit_move_insn (target, temp);
4338 /* If we don't want a value, return NULL_RTX. */
4342 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
4343 ??? The latter test doesn't seem to make sense. */
4344 else if (dont_return_target && GET_CODE (temp) != MEM)
4347 /* Return TARGET itself if it is a hard register. */
4348 else if (want_value && GET_MODE (target) != BLKmode
4349 && ! (GET_CODE (target) == REG
4350 && REGNO (target) < FIRST_PSEUDO_REGISTER))
4351 return copy_to_reg (target);
4357 /* Return 1 if EXP just contains zeros. */
4365 switch (TREE_CODE (exp))
4369 case NON_LVALUE_EXPR:
4370 case VIEW_CONVERT_EXPR:
4371 return is_zeros_p (TREE_OPERAND (exp, 0));
4374 return integer_zerop (exp);
4378 is_zeros_p (TREE_REALPART (exp)) && is_zeros_p (TREE_IMAGPART (exp));
4381 return REAL_VALUES_IDENTICAL (TREE_REAL_CST (exp), dconst0);
4384 for (elt = TREE_VECTOR_CST_ELTS (exp); elt;
4385 elt = TREE_CHAIN (elt))
4386 if (!is_zeros_p (TREE_VALUE (elt)))
4392 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4393 return CONSTRUCTOR_ELTS (exp) == NULL_TREE;
4394 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4395 if (! is_zeros_p (TREE_VALUE (elt)))
4405 /* Return 1 if EXP contains mostly (3/4) zeros. */
4408 mostly_zeros_p (exp)
4411 if (TREE_CODE (exp) == CONSTRUCTOR)
4413 int elts = 0, zeros = 0;
4414 tree elt = CONSTRUCTOR_ELTS (exp);
4415 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4417 /* If there are no ranges of true bits, it is all zero. */
4418 return elt == NULL_TREE;
4420 for (; elt; elt = TREE_CHAIN (elt))
4422 /* We do not handle the case where the index is a RANGE_EXPR,
4423 so the statistic will be somewhat inaccurate.
4424 We do make a more accurate count in store_constructor itself,
4425 so since this function is only used for nested array elements,
4426 this should be close enough. */
4427 if (mostly_zeros_p (TREE_VALUE (elt)))
4432 return 4 * zeros >= 3 * elts;
4435 return is_zeros_p (exp);
4438 /* Helper function for store_constructor.
4439 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4440 TYPE is the type of the CONSTRUCTOR, not the element type.
4441 CLEARED is as for store_constructor.
4442 ALIAS_SET is the alias set to use for any stores.
4444 This provides a recursive shortcut back to store_constructor when it isn't
4445 necessary to go through store_field. This is so that we can pass through
4446 the cleared field to let store_constructor know that we may not have to
4447 clear a substructure if the outer structure has already been cleared. */
4450 store_constructor_field (target, bitsize, bitpos, mode, exp, type, cleared,
4453 unsigned HOST_WIDE_INT bitsize;
4454 HOST_WIDE_INT bitpos;
4455 enum machine_mode mode;
4460 if (TREE_CODE (exp) == CONSTRUCTOR
4461 && bitpos % BITS_PER_UNIT == 0
4462 /* If we have a non-zero bitpos for a register target, then we just
4463 let store_field do the bitfield handling. This is unlikely to
4464 generate unnecessary clear instructions anyways. */
4465 && (bitpos == 0 || GET_CODE (target) == MEM))
4467 if (GET_CODE (target) == MEM)
4469 = adjust_address (target,
4470 GET_MODE (target) == BLKmode
4472 % GET_MODE_ALIGNMENT (GET_MODE (target)))
4473 ? BLKmode : VOIDmode, bitpos / BITS_PER_UNIT);
4476 /* Update the alias set, if required. */
4477 if (GET_CODE (target) == MEM && ! MEM_KEEP_ALIAS_SET_P (target)
4478 && MEM_ALIAS_SET (target) != 0)
4480 target = copy_rtx (target);
4481 set_mem_alias_set (target, alias_set);
4484 store_constructor (exp, target, cleared, bitsize / BITS_PER_UNIT);
4487 store_field (target, bitsize, bitpos, mode, exp, VOIDmode, 0, type,
4491 /* Store the value of constructor EXP into the rtx TARGET.
4492 TARGET is either a REG or a MEM; we know it cannot conflict, since
4493 safe_from_p has been called.
4494 CLEARED is true if TARGET is known to have been zero'd.
4495 SIZE is the number of bytes of TARGET we are allowed to modify: this
4496 may not be the same as the size of EXP if we are assigning to a field
4497 which has been packed to exclude padding bits. */
4500 store_constructor (exp, target, cleared, size)
4506 tree type = TREE_TYPE (exp);
4507 #ifdef WORD_REGISTER_OPERATIONS
4508 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
4511 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
4512 || TREE_CODE (type) == QUAL_UNION_TYPE)
4516 /* We either clear the aggregate or indicate the value is dead. */
4517 if ((TREE_CODE (type) == UNION_TYPE
4518 || TREE_CODE (type) == QUAL_UNION_TYPE)
4520 && ! CONSTRUCTOR_ELTS (exp))
4521 /* If the constructor is empty, clear the union. */
4523 clear_storage (target, expr_size (exp));
4527 /* If we are building a static constructor into a register,
4528 set the initial value as zero so we can fold the value into
4529 a constant. But if more than one register is involved,
4530 this probably loses. */
4531 else if (! cleared && GET_CODE (target) == REG && TREE_STATIC (exp)
4532 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
4534 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4538 /* If the constructor has fewer fields than the structure
4539 or if we are initializing the structure to mostly zeros,
4540 clear the whole structure first. Don't do this if TARGET is a
4541 register whose mode size isn't equal to SIZE since clear_storage
4542 can't handle this case. */
4543 else if (! cleared && size > 0
4544 && ((list_length (CONSTRUCTOR_ELTS (exp))
4545 != fields_length (type))
4546 || mostly_zeros_p (exp))
4547 && (GET_CODE (target) != REG
4548 || ((HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target))
4551 clear_storage (target, GEN_INT (size));
4556 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4558 /* Store each element of the constructor into
4559 the corresponding field of TARGET. */
4561 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4563 tree field = TREE_PURPOSE (elt);
4564 tree value = TREE_VALUE (elt);
4565 enum machine_mode mode;
4566 HOST_WIDE_INT bitsize;
4567 HOST_WIDE_INT bitpos = 0;
4570 rtx to_rtx = target;
4572 /* Just ignore missing fields.
4573 We cleared the whole structure, above,
4574 if any fields are missing. */
4578 if (cleared && is_zeros_p (value))
4581 if (host_integerp (DECL_SIZE (field), 1))
4582 bitsize = tree_low_cst (DECL_SIZE (field), 1);
4586 unsignedp = TREE_UNSIGNED (field);
4587 mode = DECL_MODE (field);
4588 if (DECL_BIT_FIELD (field))
4591 offset = DECL_FIELD_OFFSET (field);
4592 if (host_integerp (offset, 0)
4593 && host_integerp (bit_position (field), 0))
4595 bitpos = int_bit_position (field);
4599 bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
4605 if (contains_placeholder_p (offset))
4606 offset = build (WITH_RECORD_EXPR, sizetype,
4607 offset, make_tree (TREE_TYPE (exp), target));
4609 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
4610 if (GET_CODE (to_rtx) != MEM)
4613 #ifdef POINTERS_EXTEND_UNSIGNED
4614 if (GET_MODE (offset_rtx) != Pmode)
4615 offset_rtx = convert_memory_address (Pmode, offset_rtx);
4617 if (GET_MODE (offset_rtx) != ptr_mode)
4618 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4621 to_rtx = offset_address (to_rtx, offset_rtx,
4622 highest_pow2_factor (offset));
4625 if (TREE_READONLY (field))
4627 if (GET_CODE (to_rtx) == MEM)
4628 to_rtx = copy_rtx (to_rtx);
4630 RTX_UNCHANGING_P (to_rtx) = 1;
4633 #ifdef WORD_REGISTER_OPERATIONS
4634 /* If this initializes a field that is smaller than a word, at the
4635 start of a word, try to widen it to a full word.
4636 This special case allows us to output C++ member function
4637 initializations in a form that the optimizers can understand. */
4638 if (GET_CODE (target) == REG
4639 && bitsize < BITS_PER_WORD
4640 && bitpos % BITS_PER_WORD == 0
4641 && GET_MODE_CLASS (mode) == MODE_INT
4642 && TREE_CODE (value) == INTEGER_CST
4644 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
4646 tree type = TREE_TYPE (value);
4648 if (TYPE_PRECISION (type) < BITS_PER_WORD)
4650 type = (*lang_hooks.types.type_for_size)
4651 (BITS_PER_WORD, TREE_UNSIGNED (type));
4652 value = convert (type, value);
4655 if (BYTES_BIG_ENDIAN)
4657 = fold (build (LSHIFT_EXPR, type, value,
4658 build_int_2 (BITS_PER_WORD - bitsize, 0)));
4659 bitsize = BITS_PER_WORD;
4664 if (GET_CODE (to_rtx) == MEM && !MEM_KEEP_ALIAS_SET_P (to_rtx)
4665 && DECL_NONADDRESSABLE_P (field))
4667 to_rtx = copy_rtx (to_rtx);
4668 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
4671 store_constructor_field (to_rtx, bitsize, bitpos, mode,
4672 value, type, cleared,
4673 get_alias_set (TREE_TYPE (field)));
4676 else if (TREE_CODE (type) == ARRAY_TYPE
4677 || TREE_CODE (type) == VECTOR_TYPE)
4682 tree domain = TYPE_DOMAIN (type);
4683 tree elttype = TREE_TYPE (type);
4685 HOST_WIDE_INT minelt = 0;
4686 HOST_WIDE_INT maxelt = 0;
4688 /* Vectors are like arrays, but the domain is stored via an array
4690 if (TREE_CODE (type) == VECTOR_TYPE)
4692 /* Note that although TYPE_DEBUG_REPRESENTATION_TYPE uses
4693 the same field as TYPE_DOMAIN, we are not guaranteed that
4695 domain = TYPE_DEBUG_REPRESENTATION_TYPE (type);
4696 domain = TYPE_DOMAIN (TREE_TYPE (TYPE_FIELDS (domain)));
4699 const_bounds_p = (TYPE_MIN_VALUE (domain)
4700 && TYPE_MAX_VALUE (domain)
4701 && host_integerp (TYPE_MIN_VALUE (domain), 0)
4702 && host_integerp (TYPE_MAX_VALUE (domain), 0));
4704 /* If we have constant bounds for the range of the type, get them. */
4707 minelt = tree_low_cst (TYPE_MIN_VALUE (domain), 0);
4708 maxelt = tree_low_cst (TYPE_MAX_VALUE (domain), 0);
4711 /* If the constructor has fewer elements than the array,
4712 clear the whole array first. Similarly if this is
4713 static constructor of a non-BLKmode object. */
4714 if (cleared || (GET_CODE (target) == REG && TREE_STATIC (exp)))
4718 HOST_WIDE_INT count = 0, zero_count = 0;
4719 need_to_clear = ! const_bounds_p;
4721 /* This loop is a more accurate version of the loop in
4722 mostly_zeros_p (it handles RANGE_EXPR in an index).
4723 It is also needed to check for missing elements. */
4724 for (elt = CONSTRUCTOR_ELTS (exp);
4725 elt != NULL_TREE && ! need_to_clear;
4726 elt = TREE_CHAIN (elt))
4728 tree index = TREE_PURPOSE (elt);
4729 HOST_WIDE_INT this_node_count;
4731 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4733 tree lo_index = TREE_OPERAND (index, 0);
4734 tree hi_index = TREE_OPERAND (index, 1);
4736 if (! host_integerp (lo_index, 1)
4737 || ! host_integerp (hi_index, 1))
4743 this_node_count = (tree_low_cst (hi_index, 1)
4744 - tree_low_cst (lo_index, 1) + 1);
4747 this_node_count = 1;
4749 count += this_node_count;
4750 if (mostly_zeros_p (TREE_VALUE (elt)))
4751 zero_count += this_node_count;
4754 /* Clear the entire array first if there are any missing elements,
4755 or if the incidence of zero elements is >= 75%. */
4757 && (count < maxelt - minelt + 1 || 4 * zero_count >= 3 * count))
4761 if (need_to_clear && size > 0)
4766 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4768 clear_storage (target, GEN_INT (size));
4772 else if (REG_P (target))
4773 /* Inform later passes that the old value is dead. */
4774 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4776 /* Store each element of the constructor into
4777 the corresponding element of TARGET, determined
4778 by counting the elements. */
4779 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
4781 elt = TREE_CHAIN (elt), i++)
4783 enum machine_mode mode;
4784 HOST_WIDE_INT bitsize;
4785 HOST_WIDE_INT bitpos;
4787 tree value = TREE_VALUE (elt);
4788 tree index = TREE_PURPOSE (elt);
4789 rtx xtarget = target;
4791 if (cleared && is_zeros_p (value))
4794 unsignedp = TREE_UNSIGNED (elttype);
4795 mode = TYPE_MODE (elttype);
4796 if (mode == BLKmode)
4797 bitsize = (host_integerp (TYPE_SIZE (elttype), 1)
4798 ? tree_low_cst (TYPE_SIZE (elttype), 1)
4801 bitsize = GET_MODE_BITSIZE (mode);
4803 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4805 tree lo_index = TREE_OPERAND (index, 0);
4806 tree hi_index = TREE_OPERAND (index, 1);
4807 rtx index_r, pos_rtx, hi_r, loop_top, loop_end;
4808 struct nesting *loop;
4809 HOST_WIDE_INT lo, hi, count;
4812 /* If the range is constant and "small", unroll the loop. */
4814 && host_integerp (lo_index, 0)
4815 && host_integerp (hi_index, 0)
4816 && (lo = tree_low_cst (lo_index, 0),
4817 hi = tree_low_cst (hi_index, 0),
4818 count = hi - lo + 1,
4819 (GET_CODE (target) != MEM
4821 || (host_integerp (TYPE_SIZE (elttype), 1)
4822 && (tree_low_cst (TYPE_SIZE (elttype), 1) * count
4825 lo -= minelt; hi -= minelt;
4826 for (; lo <= hi; lo++)
4828 bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0);
4830 if (GET_CODE (target) == MEM
4831 && !MEM_KEEP_ALIAS_SET_P (target)
4832 && TREE_CODE (type) == ARRAY_TYPE
4833 && TYPE_NONALIASED_COMPONENT (type))
4835 target = copy_rtx (target);
4836 MEM_KEEP_ALIAS_SET_P (target) = 1;
4839 store_constructor_field
4840 (target, bitsize, bitpos, mode, value, type, cleared,
4841 get_alias_set (elttype));
4846 hi_r = expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
4847 loop_top = gen_label_rtx ();
4848 loop_end = gen_label_rtx ();
4850 unsignedp = TREE_UNSIGNED (domain);
4852 index = build_decl (VAR_DECL, NULL_TREE, domain);
4855 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
4857 SET_DECL_RTL (index, index_r);
4858 if (TREE_CODE (value) == SAVE_EXPR
4859 && SAVE_EXPR_RTL (value) == 0)
4861 /* Make sure value gets expanded once before the
4863 expand_expr (value, const0_rtx, VOIDmode, 0);
4866 store_expr (lo_index, index_r, 0);
4867 loop = expand_start_loop (0);
4869 /* Assign value to element index. */
4871 = convert (ssizetype,
4872 fold (build (MINUS_EXPR, TREE_TYPE (index),
4873 index, TYPE_MIN_VALUE (domain))));
4874 position = size_binop (MULT_EXPR, position,
4876 TYPE_SIZE_UNIT (elttype)));
4878 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
4879 xtarget = offset_address (target, pos_rtx,
4880 highest_pow2_factor (position));
4881 xtarget = adjust_address (xtarget, mode, 0);
4882 if (TREE_CODE (value) == CONSTRUCTOR)
4883 store_constructor (value, xtarget, cleared,
4884 bitsize / BITS_PER_UNIT);
4886 store_expr (value, xtarget, 0);
4888 expand_exit_loop_if_false (loop,
4889 build (LT_EXPR, integer_type_node,
4892 expand_increment (build (PREINCREMENT_EXPR,
4894 index, integer_one_node), 0, 0);
4896 emit_label (loop_end);
4899 else if ((index != 0 && ! host_integerp (index, 0))
4900 || ! host_integerp (TYPE_SIZE (elttype), 1))
4905 index = ssize_int (1);
4908 index = convert (ssizetype,
4909 fold (build (MINUS_EXPR, index,
4910 TYPE_MIN_VALUE (domain))));
4912 position = size_binop (MULT_EXPR, index,
4914 TYPE_SIZE_UNIT (elttype)));
4915 xtarget = offset_address (target,
4916 expand_expr (position, 0, VOIDmode, 0),
4917 highest_pow2_factor (position));
4918 xtarget = adjust_address (xtarget, mode, 0);
4919 store_expr (value, xtarget, 0);
4924 bitpos = ((tree_low_cst (index, 0) - minelt)
4925 * tree_low_cst (TYPE_SIZE (elttype), 1));
4927 bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
4929 if (GET_CODE (target) == MEM && !MEM_KEEP_ALIAS_SET_P (target)
4930 && TREE_CODE (type) == ARRAY_TYPE
4931 && TYPE_NONALIASED_COMPONENT (type))
4933 target = copy_rtx (target);
4934 MEM_KEEP_ALIAS_SET_P (target) = 1;
4937 store_constructor_field (target, bitsize, bitpos, mode, value,
4938 type, cleared, get_alias_set (elttype));
4944 /* Set constructor assignments. */
4945 else if (TREE_CODE (type) == SET_TYPE)
4947 tree elt = CONSTRUCTOR_ELTS (exp);
4948 unsigned HOST_WIDE_INT nbytes = int_size_in_bytes (type), nbits;
4949 tree domain = TYPE_DOMAIN (type);
4950 tree domain_min, domain_max, bitlength;
4952 /* The default implementation strategy is to extract the constant
4953 parts of the constructor, use that to initialize the target,
4954 and then "or" in whatever non-constant ranges we need in addition.
4956 If a large set is all zero or all ones, it is
4957 probably better to set it using memset (if available) or bzero.
4958 Also, if a large set has just a single range, it may also be
4959 better to first clear all the first clear the set (using
4960 bzero/memset), and set the bits we want. */
4962 /* Check for all zeros. */
4963 if (elt == NULL_TREE && size > 0)
4966 clear_storage (target, GEN_INT (size));
4970 domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
4971 domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
4972 bitlength = size_binop (PLUS_EXPR,
4973 size_diffop (domain_max, domain_min),
4976 nbits = tree_low_cst (bitlength, 1);
4978 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
4979 are "complicated" (more than one range), initialize (the
4980 constant parts) by copying from a constant. */
4981 if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD
4982 || (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE))
4984 unsigned int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
4985 enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
4986 char *bit_buffer = (char *) alloca (nbits);
4987 HOST_WIDE_INT word = 0;
4988 unsigned int bit_pos = 0;
4989 unsigned int ibit = 0;
4990 unsigned int offset = 0; /* In bytes from beginning of set. */
4992 elt = get_set_constructor_bits (exp, bit_buffer, nbits);
4995 if (bit_buffer[ibit])
4997 if (BYTES_BIG_ENDIAN)
4998 word |= (1 << (set_word_size - 1 - bit_pos));
5000 word |= 1 << bit_pos;
5004 if (bit_pos >= set_word_size || ibit == nbits)
5006 if (word != 0 || ! cleared)
5008 rtx datum = GEN_INT (word);
5011 /* The assumption here is that it is safe to use
5012 XEXP if the set is multi-word, but not if
5013 it's single-word. */
5014 if (GET_CODE (target) == MEM)
5015 to_rtx = adjust_address (target, mode, offset);
5016 else if (offset == 0)
5020 emit_move_insn (to_rtx, datum);
5027 offset += set_word_size / BITS_PER_UNIT;
5032 /* Don't bother clearing storage if the set is all ones. */
5033 if (TREE_CHAIN (elt) != NULL_TREE
5034 || (TREE_PURPOSE (elt) == NULL_TREE
5036 : ( ! host_integerp (TREE_VALUE (elt), 0)
5037 || ! host_integerp (TREE_PURPOSE (elt), 0)
5038 || (tree_low_cst (TREE_VALUE (elt), 0)
5039 - tree_low_cst (TREE_PURPOSE (elt), 0) + 1
5040 != (HOST_WIDE_INT) nbits))))
5041 clear_storage (target, expr_size (exp));
5043 for (; elt != NULL_TREE; elt = TREE_CHAIN (elt))
5045 /* Start of range of element or NULL. */
5046 tree startbit = TREE_PURPOSE (elt);
5047 /* End of range of element, or element value. */
5048 tree endbit = TREE_VALUE (elt);
5049 #ifdef TARGET_MEM_FUNCTIONS
5050 HOST_WIDE_INT startb, endb;
5052 rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
5054 bitlength_rtx = expand_expr (bitlength,
5055 NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
5057 /* Handle non-range tuple element like [ expr ]. */
5058 if (startbit == NULL_TREE)
5060 startbit = save_expr (endbit);
5064 startbit = convert (sizetype, startbit);
5065 endbit = convert (sizetype, endbit);
5066 if (! integer_zerop (domain_min))
5068 startbit = size_binop (MINUS_EXPR, startbit, domain_min);
5069 endbit = size_binop (MINUS_EXPR, endbit, domain_min);
5071 startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
5072 EXPAND_CONST_ADDRESS);
5073 endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
5074 EXPAND_CONST_ADDRESS);
5080 ((build_qualified_type ((*lang_hooks.types.type_for_mode)
5081 (GET_MODE (target), 0),
5084 emit_move_insn (targetx, target);
5087 else if (GET_CODE (target) == MEM)
5092 #ifdef TARGET_MEM_FUNCTIONS
5093 /* Optimization: If startbit and endbit are
5094 constants divisible by BITS_PER_UNIT,
5095 call memset instead. */
5096 if (TREE_CODE (startbit) == INTEGER_CST
5097 && TREE_CODE (endbit) == INTEGER_CST
5098 && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
5099 && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0)
5101 emit_library_call (memset_libfunc, LCT_NORMAL,
5103 plus_constant (XEXP (targetx, 0),
5104 startb / BITS_PER_UNIT),
5106 constm1_rtx, TYPE_MODE (integer_type_node),
5107 GEN_INT ((endb - startb) / BITS_PER_UNIT),
5108 TYPE_MODE (sizetype));
5112 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__setbits"),
5113 LCT_NORMAL, VOIDmode, 4, XEXP (targetx, 0),
5114 Pmode, bitlength_rtx, TYPE_MODE (sizetype),
5115 startbit_rtx, TYPE_MODE (sizetype),
5116 endbit_rtx, TYPE_MODE (sizetype));
5119 emit_move_insn (target, targetx);
5127 /* Store the value of EXP (an expression tree)
5128 into a subfield of TARGET which has mode MODE and occupies
5129 BITSIZE bits, starting BITPOS bits from the start of TARGET.
5130 If MODE is VOIDmode, it means that we are storing into a bit-field.
5132 If VALUE_MODE is VOIDmode, return nothing in particular.
5133 UNSIGNEDP is not used in this case.
5135 Otherwise, return an rtx for the value stored. This rtx
5136 has mode VALUE_MODE if that is convenient to do.
5137 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
5139 TYPE is the type of the underlying object,
5141 ALIAS_SET is the alias set for the destination. This value will
5142 (in general) be different from that for TARGET, since TARGET is a
5143 reference to the containing structure. */
5146 store_field (target, bitsize, bitpos, mode, exp, value_mode, unsignedp, type,
5149 HOST_WIDE_INT bitsize;
5150 HOST_WIDE_INT bitpos;
5151 enum machine_mode mode;
5153 enum machine_mode value_mode;
5158 HOST_WIDE_INT width_mask = 0;
5160 if (TREE_CODE (exp) == ERROR_MARK)
5163 /* If we have nothing to store, do nothing unless the expression has
5166 return expand_expr (exp, const0_rtx, VOIDmode, 0);
5167 else if (bitsize >=0 && bitsize < HOST_BITS_PER_WIDE_INT)
5168 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
5170 /* If we are storing into an unaligned field of an aligned union that is
5171 in a register, we may have the mode of TARGET being an integer mode but
5172 MODE == BLKmode. In that case, get an aligned object whose size and
5173 alignment are the same as TARGET and store TARGET into it (we can avoid
5174 the store if the field being stored is the entire width of TARGET). Then
5175 call ourselves recursively to store the field into a BLKmode version of
5176 that object. Finally, load from the object into TARGET. This is not
5177 very efficient in general, but should only be slightly more expensive
5178 than the otherwise-required unaligned accesses. Perhaps this can be
5179 cleaned up later. */
5182 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
5186 (build_qualified_type (type, TYPE_QUALS (type) | TYPE_QUAL_CONST),
5188 rtx blk_object = adjust_address (object, BLKmode, 0);
5190 if (bitsize != (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (target)))
5191 emit_move_insn (object, target);
5193 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0, type,
5196 emit_move_insn (target, object);
5198 /* We want to return the BLKmode version of the data. */
5202 if (GET_CODE (target) == CONCAT)
5204 /* We're storing into a struct containing a single __complex. */
5208 return store_expr (exp, target, 0);
5211 /* If the structure is in a register or if the component
5212 is a bit field, we cannot use addressing to access it.
5213 Use bit-field techniques or SUBREG to store in it. */
5215 if (mode == VOIDmode
5216 || (mode != BLKmode && ! direct_store[(int) mode]
5217 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
5218 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
5219 || GET_CODE (target) == REG
5220 || GET_CODE (target) == SUBREG
5221 /* If the field isn't aligned enough to store as an ordinary memref,
5222 store it as a bit field. */
5223 || (mode != BLKmode && SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target))
5224 && (MEM_ALIGN (target) < GET_MODE_ALIGNMENT (mode)
5225 || bitpos % GET_MODE_ALIGNMENT (mode)))
5226 /* If the RHS and field are a constant size and the size of the
5227 RHS isn't the same size as the bitfield, we must use bitfield
5230 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
5231 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0))
5233 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
5235 /* If BITSIZE is narrower than the size of the type of EXP
5236 we will be narrowing TEMP. Normally, what's wanted are the
5237 low-order bits. However, if EXP's type is a record and this is
5238 big-endian machine, we want the upper BITSIZE bits. */
5239 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
5240 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (temp))
5241 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
5242 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
5243 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
5247 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
5249 if (mode != VOIDmode && mode != BLKmode
5250 && mode != TYPE_MODE (TREE_TYPE (exp)))
5251 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
5253 /* If the modes of TARGET and TEMP are both BLKmode, both
5254 must be in memory and BITPOS must be aligned on a byte
5255 boundary. If so, we simply do a block copy. */
5256 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
5258 if (GET_CODE (target) != MEM || GET_CODE (temp) != MEM
5259 || bitpos % BITS_PER_UNIT != 0)
5262 target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT);
5263 emit_block_move (target, temp,
5264 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
5267 return value_mode == VOIDmode ? const0_rtx : target;
5270 /* Store the value in the bitfield. */
5271 store_bit_field (target, bitsize, bitpos, mode, temp,
5272 int_size_in_bytes (type));
5274 if (value_mode != VOIDmode)
5276 /* The caller wants an rtx for the value.
5277 If possible, avoid refetching from the bitfield itself. */
5279 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
5282 enum machine_mode tmode;
5284 tmode = GET_MODE (temp);
5285 if (tmode == VOIDmode)
5289 return expand_and (tmode, temp,
5290 gen_int_mode (width_mask, tmode),
5293 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
5294 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
5295 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
5298 return extract_bit_field (target, bitsize, bitpos, unsignedp,
5299 NULL_RTX, value_mode, VOIDmode,
5300 int_size_in_bytes (type));
5306 rtx addr = XEXP (target, 0);
5307 rtx to_rtx = target;
5309 /* If a value is wanted, it must be the lhs;
5310 so make the address stable for multiple use. */
5312 if (value_mode != VOIDmode && GET_CODE (addr) != REG
5313 && ! CONSTANT_ADDRESS_P (addr)
5314 /* A frame-pointer reference is already stable. */
5315 && ! (GET_CODE (addr) == PLUS
5316 && GET_CODE (XEXP (addr, 1)) == CONST_INT
5317 && (XEXP (addr, 0) == virtual_incoming_args_rtx
5318 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
5319 to_rtx = replace_equiv_address (to_rtx, copy_to_reg (addr));
5321 /* Now build a reference to just the desired component. */
5323 to_rtx = adjust_address (target, mode, bitpos / BITS_PER_UNIT);
5325 if (to_rtx == target)
5326 to_rtx = copy_rtx (to_rtx);
5328 MEM_SET_IN_STRUCT_P (to_rtx, 1);
5329 if (!MEM_KEEP_ALIAS_SET_P (to_rtx) && MEM_ALIAS_SET (to_rtx) != 0)
5330 set_mem_alias_set (to_rtx, alias_set);
5332 return store_expr (exp, to_rtx, value_mode != VOIDmode);
5336 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
5337 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
5338 codes and find the ultimate containing object, which we return.
5340 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5341 bit position, and *PUNSIGNEDP to the signedness of the field.
5342 If the position of the field is variable, we store a tree
5343 giving the variable offset (in units) in *POFFSET.
5344 This offset is in addition to the bit position.
5345 If the position is not variable, we store 0 in *POFFSET.
5347 If any of the extraction expressions is volatile,
5348 we store 1 in *PVOLATILEP. Otherwise we don't change that.
5350 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
5351 is a mode that can be used to access the field. In that case, *PBITSIZE
5354 If the field describes a variable-sized object, *PMODE is set to
5355 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
5356 this case, but the address of the object can be found. */
5359 get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode,
5360 punsignedp, pvolatilep)
5362 HOST_WIDE_INT *pbitsize;
5363 HOST_WIDE_INT *pbitpos;
5365 enum machine_mode *pmode;
5370 enum machine_mode mode = VOIDmode;
5371 tree offset = size_zero_node;
5372 tree bit_offset = bitsize_zero_node;
5373 tree placeholder_ptr = 0;
5376 /* First get the mode, signedness, and size. We do this from just the
5377 outermost expression. */
5378 if (TREE_CODE (exp) == COMPONENT_REF)
5380 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
5381 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
5382 mode = DECL_MODE (TREE_OPERAND (exp, 1));
5384 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
5386 else if (TREE_CODE (exp) == BIT_FIELD_REF)
5388 size_tree = TREE_OPERAND (exp, 1);
5389 *punsignedp = TREE_UNSIGNED (exp);
5393 mode = TYPE_MODE (TREE_TYPE (exp));
5394 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
5396 if (mode == BLKmode)
5397 size_tree = TYPE_SIZE (TREE_TYPE (exp));
5399 *pbitsize = GET_MODE_BITSIZE (mode);
5404 if (! host_integerp (size_tree, 1))
5405 mode = BLKmode, *pbitsize = -1;
5407 *pbitsize = tree_low_cst (size_tree, 1);
5410 /* Compute cumulative bit-offset for nested component-refs and array-refs,
5411 and find the ultimate containing object. */
5414 if (TREE_CODE (exp) == BIT_FIELD_REF)
5415 bit_offset = size_binop (PLUS_EXPR, bit_offset, TREE_OPERAND (exp, 2));
5416 else if (TREE_CODE (exp) == COMPONENT_REF)
5418 tree field = TREE_OPERAND (exp, 1);
5419 tree this_offset = DECL_FIELD_OFFSET (field);
5421 /* If this field hasn't been filled in yet, don't go
5422 past it. This should only happen when folding expressions
5423 made during type construction. */
5424 if (this_offset == 0)
5426 else if (! TREE_CONSTANT (this_offset)
5427 && contains_placeholder_p (this_offset))
5428 this_offset = build (WITH_RECORD_EXPR, sizetype, this_offset, exp);
5430 offset = size_binop (PLUS_EXPR, offset, this_offset);
5431 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5432 DECL_FIELD_BIT_OFFSET (field));
5434 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
5437 else if (TREE_CODE (exp) == ARRAY_REF
5438 || TREE_CODE (exp) == ARRAY_RANGE_REF)
5440 tree index = TREE_OPERAND (exp, 1);
5441 tree array = TREE_OPERAND (exp, 0);
5442 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
5443 tree low_bound = (domain ? TYPE_MIN_VALUE (domain) : 0);
5444 tree unit_size = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (array)));
5446 /* We assume all arrays have sizes that are a multiple of a byte.
5447 First subtract the lower bound, if any, in the type of the
5448 index, then convert to sizetype and multiply by the size of the
5450 if (low_bound != 0 && ! integer_zerop (low_bound))
5451 index = fold (build (MINUS_EXPR, TREE_TYPE (index),
5454 /* If the index has a self-referential type, pass it to a
5455 WITH_RECORD_EXPR; if the component size is, pass our
5456 component to one. */
5457 if (! TREE_CONSTANT (index)
5458 && contains_placeholder_p (index))
5459 index = build (WITH_RECORD_EXPR, TREE_TYPE (index), index, exp);
5460 if (! TREE_CONSTANT (unit_size)
5461 && contains_placeholder_p (unit_size))
5462 unit_size = build (WITH_RECORD_EXPR, sizetype, unit_size, array);
5464 offset = size_binop (PLUS_EXPR, offset,
5465 size_binop (MULT_EXPR,
5466 convert (sizetype, index),
5470 else if (TREE_CODE (exp) == PLACEHOLDER_EXPR)
5472 tree new = find_placeholder (exp, &placeholder_ptr);
5474 /* If we couldn't find the replacement, return the PLACEHOLDER_EXPR.
5475 We might have been called from tree optimization where we
5476 haven't set up an object yet. */
5484 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
5485 && TREE_CODE (exp) != VIEW_CONVERT_EXPR
5486 && ! ((TREE_CODE (exp) == NOP_EXPR
5487 || TREE_CODE (exp) == CONVERT_EXPR)
5488 && (TYPE_MODE (TREE_TYPE (exp))
5489 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
5492 /* If any reference in the chain is volatile, the effect is volatile. */
5493 if (TREE_THIS_VOLATILE (exp))
5496 exp = TREE_OPERAND (exp, 0);
5499 /* If OFFSET is constant, see if we can return the whole thing as a
5500 constant bit position. Otherwise, split it up. */
5501 if (host_integerp (offset, 0)
5502 && 0 != (tem = size_binop (MULT_EXPR, convert (bitsizetype, offset),
5504 && 0 != (tem = size_binop (PLUS_EXPR, tem, bit_offset))
5505 && host_integerp (tem, 0))
5506 *pbitpos = tree_low_cst (tem, 0), *poffset = 0;
5508 *pbitpos = tree_low_cst (bit_offset, 0), *poffset = offset;
5514 /* Return 1 if T is an expression that get_inner_reference handles. */
5517 handled_component_p (t)
5520 switch (TREE_CODE (t))
5525 case ARRAY_RANGE_REF:
5526 case NON_LVALUE_EXPR:
5527 case VIEW_CONVERT_EXPR:
5532 return (TYPE_MODE (TREE_TYPE (t))
5533 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (t, 0))));
5540 /* Given an rtx VALUE that may contain additions and multiplications, return
5541 an equivalent value that just refers to a register, memory, or constant.
5542 This is done by generating instructions to perform the arithmetic and
5543 returning a pseudo-register containing the value.
5545 The returned value may be a REG, SUBREG, MEM or constant. */
5548 force_operand (value, target)
5552 /* Use subtarget as the target for operand 0 of a binary operation. */
5553 rtx subtarget = get_subtarget (target);
5554 enum rtx_code code = GET_CODE (value);
5556 /* Check for a PIC address load. */
5557 if ((code == PLUS || code == MINUS)
5558 && XEXP (value, 0) == pic_offset_table_rtx
5559 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
5560 || GET_CODE (XEXP (value, 1)) == LABEL_REF
5561 || GET_CODE (XEXP (value, 1)) == CONST))
5564 subtarget = gen_reg_rtx (GET_MODE (value));
5565 emit_move_insn (subtarget, value);
5569 if (code == ZERO_EXTEND || code == SIGN_EXTEND)
5572 target = gen_reg_rtx (GET_MODE (value));
5573 convert_move (target, force_operand (XEXP (value, 0), NULL),
5574 code == ZERO_EXTEND);
5578 if (GET_RTX_CLASS (code) == '2' || GET_RTX_CLASS (code) == 'c')
5580 op2 = XEXP (value, 1);
5581 if (!CONSTANT_P (op2) && !(GET_CODE (op2) == REG && op2 != subtarget))
5583 if (code == MINUS && GET_CODE (op2) == CONST_INT)
5586 op2 = negate_rtx (GET_MODE (value), op2);
5589 /* Check for an addition with OP2 a constant integer and our first
5590 operand a PLUS of a virtual register and something else. In that
5591 case, we want to emit the sum of the virtual register and the
5592 constant first and then add the other value. This allows virtual
5593 register instantiation to simply modify the constant rather than
5594 creating another one around this addition. */
5595 if (code == PLUS && GET_CODE (op2) == CONST_INT
5596 && GET_CODE (XEXP (value, 0)) == PLUS
5597 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
5598 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
5599 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
5601 rtx temp = expand_simple_binop (GET_MODE (value), code,
5602 XEXP (XEXP (value, 0), 0), op2,
5603 subtarget, 0, OPTAB_LIB_WIDEN);
5604 return expand_simple_binop (GET_MODE (value), code, temp,
5605 force_operand (XEXP (XEXP (value,
5607 target, 0, OPTAB_LIB_WIDEN);
5610 op1 = force_operand (XEXP (value, 0), subtarget);
5611 op2 = force_operand (op2, NULL_RTX);
5615 return expand_mult (GET_MODE (value), op1, op2, target, 1);
5617 if (!INTEGRAL_MODE_P (GET_MODE (value)))
5618 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5619 target, 1, OPTAB_LIB_WIDEN);
5621 return expand_divmod (0,
5622 FLOAT_MODE_P (GET_MODE (value))
5623 ? RDIV_EXPR : TRUNC_DIV_EXPR,
5624 GET_MODE (value), op1, op2, target, 0);
5627 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
5631 return expand_divmod (0, TRUNC_DIV_EXPR, GET_MODE (value), op1, op2,
5635 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
5639 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5640 target, 0, OPTAB_LIB_WIDEN);
5643 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5644 target, 1, OPTAB_LIB_WIDEN);
5647 if (GET_RTX_CLASS (code) == '1')
5649 op1 = force_operand (XEXP (value, 0), NULL_RTX);
5650 return expand_simple_unop (GET_MODE (value), code, op1, target, 0);
5653 #ifdef INSN_SCHEDULING
5654 /* On machines that have insn scheduling, we want all memory reference to be
5655 explicit, so we need to deal with such paradoxical SUBREGs. */
5656 if (GET_CODE (value) == SUBREG && GET_CODE (SUBREG_REG (value)) == MEM
5657 && (GET_MODE_SIZE (GET_MODE (value))
5658 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (value)))))
5660 = simplify_gen_subreg (GET_MODE (value),
5661 force_reg (GET_MODE (SUBREG_REG (value)),
5662 force_operand (SUBREG_REG (value),
5664 GET_MODE (SUBREG_REG (value)),
5665 SUBREG_BYTE (value));
5671 /* Subroutine of expand_expr: return nonzero iff there is no way that
5672 EXP can reference X, which is being modified. TOP_P is nonzero if this
5673 call is going to be used to determine whether we need a temporary
5674 for EXP, as opposed to a recursive call to this function.
5676 It is always safe for this routine to return zero since it merely
5677 searches for optimization opportunities. */
5680 safe_from_p (x, exp, top_p)
5687 static tree save_expr_list;
5690 /* If EXP has varying size, we MUST use a target since we currently
5691 have no way of allocating temporaries of variable size
5692 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
5693 So we assume here that something at a higher level has prevented a
5694 clash. This is somewhat bogus, but the best we can do. Only
5695 do this when X is BLKmode and when we are at the top level. */
5696 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
5697 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
5698 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
5699 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
5700 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
5702 && GET_MODE (x) == BLKmode)
5703 /* If X is in the outgoing argument area, it is always safe. */
5704 || (GET_CODE (x) == MEM
5705 && (XEXP (x, 0) == virtual_outgoing_args_rtx
5706 || (GET_CODE (XEXP (x, 0)) == PLUS
5707 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
5710 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
5711 find the underlying pseudo. */
5712 if (GET_CODE (x) == SUBREG)
5715 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5719 /* A SAVE_EXPR might appear many times in the expression passed to the
5720 top-level safe_from_p call, and if it has a complex subexpression,
5721 examining it multiple times could result in a combinatorial explosion.
5722 E.g. on an Alpha running at least 200MHz, a Fortran test case compiled
5723 with optimization took about 28 minutes to compile -- even though it was
5724 only a few lines long. So we mark each SAVE_EXPR we see with TREE_PRIVATE
5725 and turn that off when we are done. We keep a list of the SAVE_EXPRs
5726 we have processed. Note that the only test of top_p was above. */
5735 rtn = safe_from_p (x, exp, 0);
5737 for (t = save_expr_list; t != 0; t = TREE_CHAIN (t))
5738 TREE_PRIVATE (TREE_PURPOSE (t)) = 0;
5743 /* Now look at our tree code and possibly recurse. */
5744 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
5747 exp_rtl = DECL_RTL_IF_SET (exp);
5754 if (TREE_CODE (exp) == TREE_LIST)
5755 return ((TREE_VALUE (exp) == 0
5756 || safe_from_p (x, TREE_VALUE (exp), 0))
5757 && (TREE_CHAIN (exp) == 0
5758 || safe_from_p (x, TREE_CHAIN (exp), 0)));
5759 else if (TREE_CODE (exp) == ERROR_MARK)
5760 return 1; /* An already-visited SAVE_EXPR? */
5765 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5769 return (safe_from_p (x, TREE_OPERAND (exp, 0), 0)
5770 && safe_from_p (x, TREE_OPERAND (exp, 1), 0));
5774 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
5775 the expression. If it is set, we conflict iff we are that rtx or
5776 both are in memory. Otherwise, we check all operands of the
5777 expression recursively. */
5779 switch (TREE_CODE (exp))
5782 /* If the operand is static or we are static, we can't conflict.
5783 Likewise if we don't conflict with the operand at all. */
5784 if (staticp (TREE_OPERAND (exp, 0))
5785 || TREE_STATIC (exp)
5786 || safe_from_p (x, TREE_OPERAND (exp, 0), 0))
5789 /* Otherwise, the only way this can conflict is if we are taking
5790 the address of a DECL a that address if part of X, which is
5792 exp = TREE_OPERAND (exp, 0);
5795 if (!DECL_RTL_SET_P (exp)
5796 || GET_CODE (DECL_RTL (exp)) != MEM)
5799 exp_rtl = XEXP (DECL_RTL (exp), 0);
5804 if (GET_CODE (x) == MEM
5805 && alias_sets_conflict_p (MEM_ALIAS_SET (x),
5806 get_alias_set (exp)))
5811 /* Assume that the call will clobber all hard registers and
5813 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5814 || GET_CODE (x) == MEM)
5819 /* If a sequence exists, we would have to scan every instruction
5820 in the sequence to see if it was safe. This is probably not
5822 if (RTL_EXPR_SEQUENCE (exp))
5825 exp_rtl = RTL_EXPR_RTL (exp);
5828 case WITH_CLEANUP_EXPR:
5829 exp_rtl = WITH_CLEANUP_EXPR_RTL (exp);
5832 case CLEANUP_POINT_EXPR:
5833 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5836 exp_rtl = SAVE_EXPR_RTL (exp);
5840 /* If we've already scanned this, don't do it again. Otherwise,
5841 show we've scanned it and record for clearing the flag if we're
5843 if (TREE_PRIVATE (exp))
5846 TREE_PRIVATE (exp) = 1;
5847 if (! safe_from_p (x, TREE_OPERAND (exp, 0), 0))
5849 TREE_PRIVATE (exp) = 0;
5853 save_expr_list = tree_cons (exp, NULL_TREE, save_expr_list);
5857 /* The only operand we look at is operand 1. The rest aren't
5858 part of the expression. */
5859 return safe_from_p (x, TREE_OPERAND (exp, 1), 0);
5861 case METHOD_CALL_EXPR:
5862 /* This takes an rtx argument, but shouldn't appear here. */
5869 /* If we have an rtx, we do not need to scan our operands. */
5873 nops = first_rtl_op (TREE_CODE (exp));
5874 for (i = 0; i < nops; i++)
5875 if (TREE_OPERAND (exp, i) != 0
5876 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
5879 /* If this is a language-specific tree code, it may require
5880 special handling. */
5881 if ((unsigned int) TREE_CODE (exp)
5882 >= (unsigned int) LAST_AND_UNUSED_TREE_CODE
5883 && !(*lang_hooks.safe_from_p) (x, exp))
5887 /* If we have an rtl, find any enclosed object. Then see if we conflict
5891 if (GET_CODE (exp_rtl) == SUBREG)
5893 exp_rtl = SUBREG_REG (exp_rtl);
5894 if (GET_CODE (exp_rtl) == REG
5895 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
5899 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
5900 are memory and they conflict. */
5901 return ! (rtx_equal_p (x, exp_rtl)
5902 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
5903 && true_dependence (exp_rtl, VOIDmode, x,
5904 rtx_addr_varies_p)));
5907 /* If we reach here, it is safe. */
5911 /* Subroutine of expand_expr: return rtx if EXP is a
5912 variable or parameter; else return 0. */
5919 switch (TREE_CODE (exp))
5923 return DECL_RTL (exp);
5929 #ifdef MAX_INTEGER_COMPUTATION_MODE
5932 check_max_integer_computation_mode (exp)
5935 enum tree_code code;
5936 enum machine_mode mode;
5938 /* Strip any NOPs that don't change the mode. */
5940 code = TREE_CODE (exp);
5942 /* We must allow conversions of constants to MAX_INTEGER_COMPUTATION_MODE. */
5943 if (code == NOP_EXPR
5944 && TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
5947 /* First check the type of the overall operation. We need only look at
5948 unary, binary and relational operations. */
5949 if (TREE_CODE_CLASS (code) == '1'
5950 || TREE_CODE_CLASS (code) == '2'
5951 || TREE_CODE_CLASS (code) == '<')
5953 mode = TYPE_MODE (TREE_TYPE (exp));
5954 if (GET_MODE_CLASS (mode) == MODE_INT
5955 && mode > MAX_INTEGER_COMPUTATION_MODE)
5956 internal_error ("unsupported wide integer operation");
5959 /* Check operand of a unary op. */
5960 if (TREE_CODE_CLASS (code) == '1')
5962 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5963 if (GET_MODE_CLASS (mode) == MODE_INT
5964 && mode > MAX_INTEGER_COMPUTATION_MODE)
5965 internal_error ("unsupported wide integer operation");
5968 /* Check operands of a binary/comparison op. */
5969 if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<')
5971 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5972 if (GET_MODE_CLASS (mode) == MODE_INT
5973 && mode > MAX_INTEGER_COMPUTATION_MODE)
5974 internal_error ("unsupported wide integer operation");
5976 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1)));
5977 if (GET_MODE_CLASS (mode) == MODE_INT
5978 && mode > MAX_INTEGER_COMPUTATION_MODE)
5979 internal_error ("unsupported wide integer operation");
5984 /* Return the highest power of two that EXP is known to be a multiple of.
5985 This is used in updating alignment of MEMs in array references. */
5987 static HOST_WIDE_INT
5988 highest_pow2_factor (exp)
5991 HOST_WIDE_INT c0, c1;
5993 switch (TREE_CODE (exp))
5996 /* We can find the lowest bit that's a one. If the low
5997 HOST_BITS_PER_WIDE_INT bits are zero, return BIGGEST_ALIGNMENT.
5998 We need to handle this case since we can find it in a COND_EXPR,
5999 a MIN_EXPR, or a MAX_EXPR. If the constant overlows, we have an
6000 erroneous program, so return BIGGEST_ALIGNMENT to avoid any
6002 if (TREE_CONSTANT_OVERFLOW (exp))
6003 return BIGGEST_ALIGNMENT;
6006 /* Note: tree_low_cst is intentionally not used here,
6007 we don't care about the upper bits. */
6008 c0 = TREE_INT_CST_LOW (exp);
6010 return c0 ? c0 : BIGGEST_ALIGNMENT;
6014 case PLUS_EXPR: case MINUS_EXPR: case MIN_EXPR: case MAX_EXPR:
6015 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6016 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6017 return MIN (c0, c1);
6020 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6021 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6024 case ROUND_DIV_EXPR: case TRUNC_DIV_EXPR: case FLOOR_DIV_EXPR:
6026 if (integer_pow2p (TREE_OPERAND (exp, 1))
6027 && host_integerp (TREE_OPERAND (exp, 1), 1))
6029 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6030 c1 = tree_low_cst (TREE_OPERAND (exp, 1), 1);
6031 return MAX (1, c0 / c1);
6035 case NON_LVALUE_EXPR: case NOP_EXPR: case CONVERT_EXPR:
6036 case SAVE_EXPR: case WITH_RECORD_EXPR:
6037 return highest_pow2_factor (TREE_OPERAND (exp, 0));
6040 return highest_pow2_factor (TREE_OPERAND (exp, 1));
6043 c0 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6044 c1 = highest_pow2_factor (TREE_OPERAND (exp, 2));
6045 return MIN (c0, c1);
6054 /* Similar, except that it is known that the expression must be a multiple
6055 of the alignment of TYPE. */
6057 static HOST_WIDE_INT
6058 highest_pow2_factor_for_type (type, exp)
6062 HOST_WIDE_INT type_align, factor;
6064 factor = highest_pow2_factor (exp);
6065 type_align = TYPE_ALIGN (type) / BITS_PER_UNIT;
6066 return MAX (factor, type_align);
6069 /* Return an object on the placeholder list that matches EXP, a
6070 PLACEHOLDER_EXPR. An object "matches" if it is of the type of the
6071 PLACEHOLDER_EXPR or a pointer type to it. For further information, see
6072 tree.def. If no such object is found, return 0. If PLIST is nonzero, it
6073 is a location which initially points to a starting location in the
6074 placeholder list (zero means start of the list) and where a pointer into
6075 the placeholder list at which the object is found is placed. */
6078 find_placeholder (exp, plist)
6082 tree type = TREE_TYPE (exp);
6083 tree placeholder_expr;
6085 for (placeholder_expr
6086 = plist && *plist ? TREE_CHAIN (*plist) : placeholder_list;
6087 placeholder_expr != 0;
6088 placeholder_expr = TREE_CHAIN (placeholder_expr))
6090 tree need_type = TYPE_MAIN_VARIANT (type);
6093 /* Find the outermost reference that is of the type we want. If none,
6094 see if any object has a type that is a pointer to the type we
6096 for (elt = TREE_PURPOSE (placeholder_expr); elt != 0;
6097 elt = ((TREE_CODE (elt) == COMPOUND_EXPR
6098 || TREE_CODE (elt) == COND_EXPR)
6099 ? TREE_OPERAND (elt, 1)
6100 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6101 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6102 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6103 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6104 ? TREE_OPERAND (elt, 0) : 0))
6105 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
6108 *plist = placeholder_expr;
6112 for (elt = TREE_PURPOSE (placeholder_expr); elt != 0;
6114 = ((TREE_CODE (elt) == COMPOUND_EXPR
6115 || TREE_CODE (elt) == COND_EXPR)
6116 ? TREE_OPERAND (elt, 1)
6117 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6118 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6119 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6120 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6121 ? TREE_OPERAND (elt, 0) : 0))
6122 if (POINTER_TYPE_P (TREE_TYPE (elt))
6123 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
6127 *plist = placeholder_expr;
6128 return build1 (INDIRECT_REF, need_type, elt);
6135 /* expand_expr: generate code for computing expression EXP.
6136 An rtx for the computed value is returned. The value is never null.
6137 In the case of a void EXP, const0_rtx is returned.
6139 The value may be stored in TARGET if TARGET is nonzero.
6140 TARGET is just a suggestion; callers must assume that
6141 the rtx returned may not be the same as TARGET.
6143 If TARGET is CONST0_RTX, it means that the value will be ignored.
6145 If TMODE is not VOIDmode, it suggests generating the
6146 result in mode TMODE. But this is done only when convenient.
6147 Otherwise, TMODE is ignored and the value generated in its natural mode.
6148 TMODE is just a suggestion; callers must assume that
6149 the rtx returned may not have mode TMODE.
6151 Note that TARGET may have neither TMODE nor MODE. In that case, it
6152 probably will not be used.
6154 If MODIFIER is EXPAND_SUM then when EXP is an addition
6155 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
6156 or a nest of (PLUS ...) and (MINUS ...) where the terms are
6157 products as above, or REG or MEM, or constant.
6158 Ordinarily in such cases we would output mul or add instructions
6159 and then return a pseudo reg containing the sum.
6161 EXPAND_INITIALIZER is much like EXPAND_SUM except that
6162 it also marks a label as absolutely required (it can't be dead).
6163 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
6164 This is used for outputting expressions used in initializers.
6166 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
6167 with a constant address even if that address is not normally legitimate.
6168 EXPAND_INITIALIZER and EXPAND_SUM also have this effect. */
6171 expand_expr (exp, target, tmode, modifier)
6174 enum machine_mode tmode;
6175 enum expand_modifier modifier;
6178 tree type = TREE_TYPE (exp);
6179 int unsignedp = TREE_UNSIGNED (type);
6180 enum machine_mode mode;
6181 enum tree_code code = TREE_CODE (exp);
6183 rtx subtarget, original_target;
6187 /* Handle ERROR_MARK before anybody tries to access its type. */
6188 if (TREE_CODE (exp) == ERROR_MARK || TREE_CODE (type) == ERROR_MARK)
6190 op0 = CONST0_RTX (tmode);
6196 mode = TYPE_MODE (type);
6197 /* Use subtarget as the target for operand 0 of a binary operation. */
6198 subtarget = get_subtarget (target);
6199 original_target = target;
6200 ignore = (target == const0_rtx
6201 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
6202 || code == CONVERT_EXPR || code == REFERENCE_EXPR
6203 || code == COND_EXPR || code == VIEW_CONVERT_EXPR)
6204 && TREE_CODE (type) == VOID_TYPE));
6206 /* If we are going to ignore this result, we need only do something
6207 if there is a side-effect somewhere in the expression. If there
6208 is, short-circuit the most common cases here. Note that we must
6209 not call expand_expr with anything but const0_rtx in case this
6210 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
6214 if (! TREE_SIDE_EFFECTS (exp))
6217 /* Ensure we reference a volatile object even if value is ignored, but
6218 don't do this if all we are doing is taking its address. */
6219 if (TREE_THIS_VOLATILE (exp)
6220 && TREE_CODE (exp) != FUNCTION_DECL
6221 && mode != VOIDmode && mode != BLKmode
6222 && modifier != EXPAND_CONST_ADDRESS)
6224 temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
6225 if (GET_CODE (temp) == MEM)
6226 temp = copy_to_reg (temp);
6230 if (TREE_CODE_CLASS (code) == '1' || code == COMPONENT_REF
6231 || code == INDIRECT_REF || code == BUFFER_REF)
6232 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6235 else if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<'
6236 || code == ARRAY_REF || code == ARRAY_RANGE_REF)
6238 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6239 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6242 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
6243 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
6244 /* If the second operand has no side effects, just evaluate
6246 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6248 else if (code == BIT_FIELD_REF)
6250 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6251 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6252 expand_expr (TREE_OPERAND (exp, 2), const0_rtx, VOIDmode, modifier);
6259 #ifdef MAX_INTEGER_COMPUTATION_MODE
6260 /* Only check stuff here if the mode we want is different from the mode
6261 of the expression; if it's the same, check_max_integer_computiation_mode
6262 will handle it. Do we really need to check this stuff at all? */
6265 && GET_MODE (target) != mode
6266 && TREE_CODE (exp) != INTEGER_CST
6267 && TREE_CODE (exp) != PARM_DECL
6268 && TREE_CODE (exp) != ARRAY_REF
6269 && TREE_CODE (exp) != ARRAY_RANGE_REF
6270 && TREE_CODE (exp) != COMPONENT_REF
6271 && TREE_CODE (exp) != BIT_FIELD_REF
6272 && TREE_CODE (exp) != INDIRECT_REF
6273 && TREE_CODE (exp) != CALL_EXPR
6274 && TREE_CODE (exp) != VAR_DECL
6275 && TREE_CODE (exp) != RTL_EXPR)
6277 enum machine_mode mode = GET_MODE (target);
6279 if (GET_MODE_CLASS (mode) == MODE_INT
6280 && mode > MAX_INTEGER_COMPUTATION_MODE)
6281 internal_error ("unsupported wide integer operation");
6285 && TREE_CODE (exp) != INTEGER_CST
6286 && TREE_CODE (exp) != PARM_DECL
6287 && TREE_CODE (exp) != ARRAY_REF
6288 && TREE_CODE (exp) != ARRAY_RANGE_REF
6289 && TREE_CODE (exp) != COMPONENT_REF
6290 && TREE_CODE (exp) != BIT_FIELD_REF
6291 && TREE_CODE (exp) != INDIRECT_REF
6292 && TREE_CODE (exp) != VAR_DECL
6293 && TREE_CODE (exp) != CALL_EXPR
6294 && TREE_CODE (exp) != RTL_EXPR
6295 && GET_MODE_CLASS (tmode) == MODE_INT
6296 && tmode > MAX_INTEGER_COMPUTATION_MODE)
6297 internal_error ("unsupported wide integer operation");
6299 check_max_integer_computation_mode (exp);
6302 /* If will do cse, generate all results into pseudo registers
6303 since 1) that allows cse to find more things
6304 and 2) otherwise cse could produce an insn the machine
6305 cannot support. And exception is a CONSTRUCTOR into a multi-word
6306 MEM: that's much more likely to be most efficient into the MEM. */
6308 if (! cse_not_expected && mode != BLKmode && target
6309 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER)
6310 && ! (code == CONSTRUCTOR && GET_MODE_SIZE (mode) > UNITS_PER_WORD))
6317 tree function = decl_function_context (exp);
6318 /* Handle using a label in a containing function. */
6319 if (function != current_function_decl
6320 && function != inline_function_decl && function != 0)
6322 struct function *p = find_function_data (function);
6323 p->expr->x_forced_labels
6324 = gen_rtx_EXPR_LIST (VOIDmode, label_rtx (exp),
6325 p->expr->x_forced_labels);
6329 if (modifier == EXPAND_INITIALIZER)
6330 forced_labels = gen_rtx_EXPR_LIST (VOIDmode,
6335 temp = gen_rtx_MEM (FUNCTION_MODE,
6336 gen_rtx_LABEL_REF (Pmode, label_rtx (exp)));
6337 if (function != current_function_decl
6338 && function != inline_function_decl && function != 0)
6339 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
6344 if (DECL_RTL (exp) == 0)
6346 error_with_decl (exp, "prior parameter's size depends on `%s'");
6347 return CONST0_RTX (mode);
6350 /* ... fall through ... */
6353 /* If a static var's type was incomplete when the decl was written,
6354 but the type is complete now, lay out the decl now. */
6355 if (DECL_SIZE (exp) == 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
6356 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
6358 rtx value = DECL_RTL_IF_SET (exp);
6360 layout_decl (exp, 0);
6362 /* If the RTL was already set, update its mode and memory
6366 PUT_MODE (value, DECL_MODE (exp));
6367 SET_DECL_RTL (exp, 0);
6368 set_mem_attributes (value, exp, 1);
6369 SET_DECL_RTL (exp, value);
6373 /* ... fall through ... */
6377 if (DECL_RTL (exp) == 0)
6380 /* Ensure variable marked as used even if it doesn't go through
6381 a parser. If it hasn't be used yet, write out an external
6383 if (! TREE_USED (exp))
6385 assemble_external (exp);
6386 TREE_USED (exp) = 1;
6389 /* Show we haven't gotten RTL for this yet. */
6392 /* Handle variables inherited from containing functions. */
6393 context = decl_function_context (exp);
6395 /* We treat inline_function_decl as an alias for the current function
6396 because that is the inline function whose vars, types, etc.
6397 are being merged into the current function.
6398 See expand_inline_function. */
6400 if (context != 0 && context != current_function_decl
6401 && context != inline_function_decl
6402 /* If var is static, we don't need a static chain to access it. */
6403 && ! (GET_CODE (DECL_RTL (exp)) == MEM
6404 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
6408 /* Mark as non-local and addressable. */
6409 DECL_NONLOCAL (exp) = 1;
6410 if (DECL_NO_STATIC_CHAIN (current_function_decl))
6412 (*lang_hooks.mark_addressable) (exp);
6413 if (GET_CODE (DECL_RTL (exp)) != MEM)
6415 addr = XEXP (DECL_RTL (exp), 0);
6416 if (GET_CODE (addr) == MEM)
6418 = replace_equiv_address (addr,
6419 fix_lexical_addr (XEXP (addr, 0), exp));
6421 addr = fix_lexical_addr (addr, exp);
6423 temp = replace_equiv_address (DECL_RTL (exp), addr);
6426 /* This is the case of an array whose size is to be determined
6427 from its initializer, while the initializer is still being parsed.
6430 else if (GET_CODE (DECL_RTL (exp)) == MEM
6431 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
6432 temp = validize_mem (DECL_RTL (exp));
6434 /* If DECL_RTL is memory, we are in the normal case and either
6435 the address is not valid or it is not a register and -fforce-addr
6436 is specified, get the address into a register. */
6438 else if (GET_CODE (DECL_RTL (exp)) == MEM
6439 && modifier != EXPAND_CONST_ADDRESS
6440 && modifier != EXPAND_SUM
6441 && modifier != EXPAND_INITIALIZER
6442 && (! memory_address_p (DECL_MODE (exp),
6443 XEXP (DECL_RTL (exp), 0))
6445 && GET_CODE (XEXP (DECL_RTL (exp), 0)) != REG)))
6446 temp = replace_equiv_address (DECL_RTL (exp),
6447 copy_rtx (XEXP (DECL_RTL (exp), 0)));
6449 /* If we got something, return it. But first, set the alignment
6450 if the address is a register. */
6453 if (GET_CODE (temp) == MEM && GET_CODE (XEXP (temp, 0)) == REG)
6454 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
6459 /* If the mode of DECL_RTL does not match that of the decl, it
6460 must be a promoted value. We return a SUBREG of the wanted mode,
6461 but mark it so that we know that it was already extended. */
6463 if (GET_CODE (DECL_RTL (exp)) == REG
6464 && GET_MODE (DECL_RTL (exp)) != DECL_MODE (exp))
6466 /* Get the signedness used for this variable. Ensure we get the
6467 same mode we got when the variable was declared. */
6468 if (GET_MODE (DECL_RTL (exp))
6469 != promote_mode (type, DECL_MODE (exp), &unsignedp,
6470 (TREE_CODE (exp) == RESULT_DECL ? 1 : 0)))
6473 temp = gen_lowpart_SUBREG (mode, DECL_RTL (exp));
6474 SUBREG_PROMOTED_VAR_P (temp) = 1;
6475 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
6479 return DECL_RTL (exp);
6482 temp = immed_double_const (TREE_INT_CST_LOW (exp),
6483 TREE_INT_CST_HIGH (exp), mode);
6485 /* ??? If overflow is set, fold will have done an incomplete job,
6486 which can result in (plus xx (const_int 0)), which can get
6487 simplified by validate_replace_rtx during virtual register
6488 instantiation, which can result in unrecognizable insns.
6489 Avoid this by forcing all overflows into registers. */
6490 if (TREE_CONSTANT_OVERFLOW (exp)
6491 && modifier != EXPAND_INITIALIZER)
6492 temp = force_reg (mode, temp);
6497 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, 0);
6500 /* If optimized, generate immediate CONST_DOUBLE
6501 which will be turned into memory by reload if necessary.
6503 We used to force a register so that loop.c could see it. But
6504 this does not allow gen_* patterns to perform optimizations with
6505 the constants. It also produces two insns in cases like "x = 1.0;".
6506 On most machines, floating-point constants are not permitted in
6507 many insns, so we'd end up copying it to a register in any case.
6509 Now, we do the copying in expand_binop, if appropriate. */
6510 return CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (exp),
6511 TYPE_MODE (TREE_TYPE (exp)));
6515 if (! TREE_CST_RTL (exp))
6516 output_constant_def (exp, 1);
6518 /* TREE_CST_RTL probably contains a constant address.
6519 On RISC machines where a constant address isn't valid,
6520 make some insns to get that address into a register. */
6521 if (GET_CODE (TREE_CST_RTL (exp)) == MEM
6522 && modifier != EXPAND_CONST_ADDRESS
6523 && modifier != EXPAND_INITIALIZER
6524 && modifier != EXPAND_SUM
6525 && (! memory_address_p (mode, XEXP (TREE_CST_RTL (exp), 0))
6527 && GET_CODE (XEXP (TREE_CST_RTL (exp), 0)) != REG)))
6528 return replace_equiv_address (TREE_CST_RTL (exp),
6529 copy_rtx (XEXP (TREE_CST_RTL (exp), 0)));
6530 return TREE_CST_RTL (exp);
6532 case EXPR_WITH_FILE_LOCATION:
6535 const char *saved_input_filename = input_filename;
6536 int saved_lineno = lineno;
6537 input_filename = EXPR_WFL_FILENAME (exp);
6538 lineno = EXPR_WFL_LINENO (exp);
6539 if (EXPR_WFL_EMIT_LINE_NOTE (exp))
6540 emit_line_note (input_filename, lineno);
6541 /* Possibly avoid switching back and forth here. */
6542 to_return = expand_expr (EXPR_WFL_NODE (exp), target, tmode, modifier);
6543 input_filename = saved_input_filename;
6544 lineno = saved_lineno;
6549 context = decl_function_context (exp);
6551 /* If this SAVE_EXPR was at global context, assume we are an
6552 initialization function and move it into our context. */
6554 SAVE_EXPR_CONTEXT (exp) = current_function_decl;
6556 /* We treat inline_function_decl as an alias for the current function
6557 because that is the inline function whose vars, types, etc.
6558 are being merged into the current function.
6559 See expand_inline_function. */
6560 if (context == current_function_decl || context == inline_function_decl)
6563 /* If this is non-local, handle it. */
6566 /* The following call just exists to abort if the context is
6567 not of a containing function. */
6568 find_function_data (context);
6570 temp = SAVE_EXPR_RTL (exp);
6571 if (temp && GET_CODE (temp) == REG)
6573 put_var_into_stack (exp);
6574 temp = SAVE_EXPR_RTL (exp);
6576 if (temp == 0 || GET_CODE (temp) != MEM)
6579 replace_equiv_address (temp,
6580 fix_lexical_addr (XEXP (temp, 0), exp));
6582 if (SAVE_EXPR_RTL (exp) == 0)
6584 if (mode == VOIDmode)
6587 temp = assign_temp (build_qualified_type (type,
6589 | TYPE_QUAL_CONST)),
6592 SAVE_EXPR_RTL (exp) = temp;
6593 if (!optimize && GET_CODE (temp) == REG)
6594 save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, temp,
6597 /* If the mode of TEMP does not match that of the expression, it
6598 must be a promoted value. We pass store_expr a SUBREG of the
6599 wanted mode but mark it so that we know that it was already
6600 extended. Note that `unsignedp' was modified above in
6603 if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
6605 temp = gen_lowpart_SUBREG (mode, SAVE_EXPR_RTL (exp));
6606 SUBREG_PROMOTED_VAR_P (temp) = 1;
6607 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
6610 if (temp == const0_rtx)
6611 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
6613 store_expr (TREE_OPERAND (exp, 0), temp, 0);
6615 TREE_USED (exp) = 1;
6618 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
6619 must be a promoted value. We return a SUBREG of the wanted mode,
6620 but mark it so that we know that it was already extended. */
6622 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
6623 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
6625 /* Compute the signedness and make the proper SUBREG. */
6626 promote_mode (type, mode, &unsignedp, 0);
6627 temp = gen_lowpart_SUBREG (mode, SAVE_EXPR_RTL (exp));
6628 SUBREG_PROMOTED_VAR_P (temp) = 1;
6629 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
6633 return SAVE_EXPR_RTL (exp);
6638 temp = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
6639 TREE_OPERAND (exp, 0)
6640 = (*lang_hooks.unsave_expr_now) (TREE_OPERAND (exp, 0));
6644 case PLACEHOLDER_EXPR:
6646 tree old_list = placeholder_list;
6647 tree placeholder_expr = 0;
6649 exp = find_placeholder (exp, &placeholder_expr);
6653 placeholder_list = TREE_CHAIN (placeholder_expr);
6654 temp = expand_expr (exp, original_target, tmode, modifier);
6655 placeholder_list = old_list;
6659 /* We can't find the object or there was a missing WITH_RECORD_EXPR. */
6662 case WITH_RECORD_EXPR:
6663 /* Put the object on the placeholder list, expand our first operand,
6664 and pop the list. */
6665 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
6667 target = expand_expr (TREE_OPERAND (exp, 0), original_target, tmode,
6669 placeholder_list = TREE_CHAIN (placeholder_list);
6673 if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
6674 expand_goto (TREE_OPERAND (exp, 0));
6676 expand_computed_goto (TREE_OPERAND (exp, 0));
6680 expand_exit_loop_if_false (NULL,
6681 invert_truthvalue (TREE_OPERAND (exp, 0)));
6684 case LABELED_BLOCK_EXPR:
6685 if (LABELED_BLOCK_BODY (exp))
6686 expand_expr_stmt_value (LABELED_BLOCK_BODY (exp), 0, 1);
6687 /* Should perhaps use expand_label, but this is simpler and safer. */
6688 do_pending_stack_adjust ();
6689 emit_label (label_rtx (LABELED_BLOCK_LABEL (exp)));
6692 case EXIT_BLOCK_EXPR:
6693 if (EXIT_BLOCK_RETURN (exp))
6694 sorry ("returned value in block_exit_expr");
6695 expand_goto (LABELED_BLOCK_LABEL (EXIT_BLOCK_LABELED_BLOCK (exp)));
6700 expand_start_loop (1);
6701 expand_expr_stmt_value (TREE_OPERAND (exp, 0), 0, 1);
6709 tree vars = TREE_OPERAND (exp, 0);
6710 int vars_need_expansion = 0;
6712 /* Need to open a binding contour here because
6713 if there are any cleanups they must be contained here. */
6714 expand_start_bindings (2);
6716 /* Mark the corresponding BLOCK for output in its proper place. */
6717 if (TREE_OPERAND (exp, 2) != 0
6718 && ! TREE_USED (TREE_OPERAND (exp, 2)))
6719 (*lang_hooks.decls.insert_block) (TREE_OPERAND (exp, 2));
6721 /* If VARS have not yet been expanded, expand them now. */
6724 if (!DECL_RTL_SET_P (vars))
6726 vars_need_expansion = 1;
6729 expand_decl_init (vars);
6730 vars = TREE_CHAIN (vars);
6733 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, modifier);
6735 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
6741 if (RTL_EXPR_SEQUENCE (exp))
6743 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
6745 emit_insn (RTL_EXPR_SEQUENCE (exp));
6746 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
6748 preserve_rtl_expr_result (RTL_EXPR_RTL (exp));
6749 free_temps_for_rtl_expr (exp);
6750 return RTL_EXPR_RTL (exp);
6753 /* If we don't need the result, just ensure we evaluate any
6759 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
6760 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode, 0);
6765 /* All elts simple constants => refer to a constant in memory. But
6766 if this is a non-BLKmode mode, let it store a field at a time
6767 since that should make a CONST_INT or CONST_DOUBLE when we
6768 fold. Likewise, if we have a target we can use, it is best to
6769 store directly into the target unless the type is large enough
6770 that memcpy will be used. If we are making an initializer and
6771 all operands are constant, put it in memory as well.
6773 FIXME: Avoid trying to fill vector constructors piece-meal.
6774 Output them with output_constant_def below unless we're sure
6775 they're zeros. This should go away when vector initializers
6776 are treated like VECTOR_CST instead of arrays.
6778 else if ((TREE_STATIC (exp)
6779 && ((mode == BLKmode
6780 && ! (target != 0 && safe_from_p (target, exp, 1)))
6781 || TREE_ADDRESSABLE (exp)
6782 || (host_integerp (TYPE_SIZE_UNIT (type), 1)
6783 && (! MOVE_BY_PIECES_P
6784 (tree_low_cst (TYPE_SIZE_UNIT (type), 1),
6786 && ((TREE_CODE (type) == VECTOR_TYPE
6787 && !is_zeros_p (exp))
6788 || ! mostly_zeros_p (exp)))))
6789 || (modifier == EXPAND_INITIALIZER && TREE_CONSTANT (exp)))
6791 rtx constructor = output_constant_def (exp, 1);
6793 if (modifier != EXPAND_CONST_ADDRESS
6794 && modifier != EXPAND_INITIALIZER
6795 && modifier != EXPAND_SUM)
6796 constructor = validize_mem (constructor);
6802 /* Handle calls that pass values in multiple non-contiguous
6803 locations. The Irix 6 ABI has examples of this. */
6804 if (target == 0 || ! safe_from_p (target, exp, 1)
6805 || GET_CODE (target) == PARALLEL)
6807 = assign_temp (build_qualified_type (type,
6809 | (TREE_READONLY (exp)
6810 * TYPE_QUAL_CONST))),
6811 0, TREE_ADDRESSABLE (exp), 1);
6813 store_constructor (exp, target, 0,
6814 int_size_in_bytes (TREE_TYPE (exp)));
6820 tree exp1 = TREE_OPERAND (exp, 0);
6822 tree string = string_constant (exp1, &index);
6824 /* Try to optimize reads from const strings. */
6826 && TREE_CODE (string) == STRING_CST
6827 && TREE_CODE (index) == INTEGER_CST
6828 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
6829 && GET_MODE_CLASS (mode) == MODE_INT
6830 && GET_MODE_SIZE (mode) == 1
6831 && modifier != EXPAND_WRITE)
6832 return gen_int_mode (TREE_STRING_POINTER (string)
6833 [TREE_INT_CST_LOW (index)], mode);
6835 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
6836 op0 = memory_address (mode, op0);
6837 temp = gen_rtx_MEM (mode, op0);
6838 set_mem_attributes (temp, exp, 0);
6840 /* If we are writing to this object and its type is a record with
6841 readonly fields, we must mark it as readonly so it will
6842 conflict with readonly references to those fields. */
6843 if (modifier == EXPAND_WRITE && readonly_fields_p (type))
6844 RTX_UNCHANGING_P (temp) = 1;
6850 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
6854 tree array = TREE_OPERAND (exp, 0);
6855 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
6856 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
6857 tree index = convert (sizetype, TREE_OPERAND (exp, 1));
6860 /* Optimize the special-case of a zero lower bound.
6862 We convert the low_bound to sizetype to avoid some problems
6863 with constant folding. (E.g. suppose the lower bound is 1,
6864 and its mode is QI. Without the conversion, (ARRAY
6865 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
6866 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
6868 if (! integer_zerop (low_bound))
6869 index = size_diffop (index, convert (sizetype, low_bound));
6871 /* Fold an expression like: "foo"[2].
6872 This is not done in fold so it won't happen inside &.
6873 Don't fold if this is for wide characters since it's too
6874 difficult to do correctly and this is a very rare case. */
6876 if (modifier != EXPAND_CONST_ADDRESS && modifier != EXPAND_INITIALIZER
6877 && TREE_CODE (array) == STRING_CST
6878 && TREE_CODE (index) == INTEGER_CST
6879 && compare_tree_int (index, TREE_STRING_LENGTH (array)) < 0
6880 && GET_MODE_CLASS (mode) == MODE_INT
6881 && GET_MODE_SIZE (mode) == 1)
6882 return gen_int_mode (TREE_STRING_POINTER (array)
6883 [TREE_INT_CST_LOW (index)], mode);
6885 /* If this is a constant index into a constant array,
6886 just get the value from the array. Handle both the cases when
6887 we have an explicit constructor and when our operand is a variable
6888 that was declared const. */
6890 if (modifier != EXPAND_CONST_ADDRESS && modifier != EXPAND_INITIALIZER
6891 && TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array)
6892 && TREE_CODE (index) == INTEGER_CST
6893 && 0 > compare_tree_int (index,
6894 list_length (CONSTRUCTOR_ELTS
6895 (TREE_OPERAND (exp, 0)))))
6899 for (elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
6900 i = TREE_INT_CST_LOW (index);
6901 elem != 0 && i != 0; i--, elem = TREE_CHAIN (elem))
6905 return expand_expr (fold (TREE_VALUE (elem)), target, tmode,
6909 else if (optimize >= 1
6910 && modifier != EXPAND_CONST_ADDRESS
6911 && modifier != EXPAND_INITIALIZER
6912 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
6913 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
6914 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
6916 if (TREE_CODE (index) == INTEGER_CST)
6918 tree init = DECL_INITIAL (array);
6920 if (TREE_CODE (init) == CONSTRUCTOR)
6924 for (elem = CONSTRUCTOR_ELTS (init);
6926 && !tree_int_cst_equal (TREE_PURPOSE (elem), index));
6927 elem = TREE_CHAIN (elem))
6930 if (elem && !TREE_SIDE_EFFECTS (TREE_VALUE (elem)))
6931 return expand_expr (fold (TREE_VALUE (elem)), target,
6934 else if (TREE_CODE (init) == STRING_CST
6935 && 0 > compare_tree_int (index,
6936 TREE_STRING_LENGTH (init)))
6938 tree type = TREE_TYPE (TREE_TYPE (init));
6939 enum machine_mode mode = TYPE_MODE (type);
6941 if (GET_MODE_CLASS (mode) == MODE_INT
6942 && GET_MODE_SIZE (mode) == 1)
6943 return gen_int_mode (TREE_STRING_POINTER (init)
6944 [TREE_INT_CST_LOW (index)], mode);
6953 case ARRAY_RANGE_REF:
6954 /* If the operand is a CONSTRUCTOR, we can just extract the
6955 appropriate field if it is present. Don't do this if we have
6956 already written the data since we want to refer to that copy
6957 and varasm.c assumes that's what we'll do. */
6958 if (code == COMPONENT_REF
6959 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
6960 && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
6964 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
6965 elt = TREE_CHAIN (elt))
6966 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1)
6967 /* We can normally use the value of the field in the
6968 CONSTRUCTOR. However, if this is a bitfield in
6969 an integral mode that we can fit in a HOST_WIDE_INT,
6970 we must mask only the number of bits in the bitfield,
6971 since this is done implicitly by the constructor. If
6972 the bitfield does not meet either of those conditions,
6973 we can't do this optimization. */
6974 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt))
6975 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt)))
6977 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt)))
6978 <= HOST_BITS_PER_WIDE_INT))))
6980 op0 = expand_expr (TREE_VALUE (elt), target, tmode, modifier);
6981 if (DECL_BIT_FIELD (TREE_PURPOSE (elt)))
6983 HOST_WIDE_INT bitsize
6984 = TREE_INT_CST_LOW (DECL_SIZE (TREE_PURPOSE (elt)));
6985 enum machine_mode imode
6986 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt)));
6988 if (TREE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt))))
6990 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
6991 op0 = expand_and (imode, op0, op1, target);
6996 = build_int_2 (GET_MODE_BITSIZE (imode) - bitsize,
6999 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
7001 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
7011 enum machine_mode mode1;
7012 HOST_WIDE_INT bitsize, bitpos;
7015 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
7016 &mode1, &unsignedp, &volatilep);
7019 /* If we got back the original object, something is wrong. Perhaps
7020 we are evaluating an expression too early. In any event, don't
7021 infinitely recurse. */
7025 /* If TEM's type is a union of variable size, pass TARGET to the inner
7026 computation, since it will need a temporary and TARGET is known
7027 to have to do. This occurs in unchecked conversion in Ada. */
7031 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
7032 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
7034 ? target : NULL_RTX),
7036 (modifier == EXPAND_INITIALIZER
7037 || modifier == EXPAND_CONST_ADDRESS)
7038 ? modifier : EXPAND_NORMAL);
7040 /* If this is a constant, put it into a register if it is a
7041 legitimate constant and OFFSET is 0 and memory if it isn't. */
7042 if (CONSTANT_P (op0))
7044 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
7045 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
7047 op0 = force_reg (mode, op0);
7049 op0 = validize_mem (force_const_mem (mode, op0));
7054 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM);
7056 /* If this object is in a register, put it into memory.
7057 This case can't occur in C, but can in Ada if we have
7058 unchecked conversion of an expression from a scalar type to
7059 an array or record type. */
7060 if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
7061 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
7063 /* If the operand is a SAVE_EXPR, we can deal with this by
7064 forcing the SAVE_EXPR into memory. */
7065 if (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR)
7067 put_var_into_stack (TREE_OPERAND (exp, 0));
7068 op0 = SAVE_EXPR_RTL (TREE_OPERAND (exp, 0));
7073 = build_qualified_type (TREE_TYPE (tem),
7074 (TYPE_QUALS (TREE_TYPE (tem))
7075 | TYPE_QUAL_CONST));
7076 rtx memloc = assign_temp (nt, 1, 1, 1);
7078 emit_move_insn (memloc, op0);
7083 if (GET_CODE (op0) != MEM)
7086 #ifdef POINTERS_EXTEND_UNSIGNED
7087 if (GET_MODE (offset_rtx) != Pmode)
7088 offset_rtx = convert_memory_address (Pmode, offset_rtx);
7090 if (GET_MODE (offset_rtx) != ptr_mode)
7091 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
7094 /* A constant address in OP0 can have VOIDmode, we must not try
7095 to call force_reg for that case. Avoid that case. */
7096 if (GET_CODE (op0) == MEM
7097 && GET_MODE (op0) == BLKmode
7098 && GET_MODE (XEXP (op0, 0)) != VOIDmode
7100 && (bitpos % bitsize) == 0
7101 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
7102 && MEM_ALIGN (op0) == GET_MODE_ALIGNMENT (mode1))
7104 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7108 op0 = offset_address (op0, offset_rtx,
7109 highest_pow2_factor (offset));
7112 /* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
7113 record its alignment as BIGGEST_ALIGNMENT. */
7114 if (GET_CODE (op0) == MEM && bitpos == 0 && offset != 0
7115 && is_aligning_offset (offset, tem))
7116 set_mem_align (op0, BIGGEST_ALIGNMENT);
7118 /* Don't forget about volatility even if this is a bitfield. */
7119 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
7121 if (op0 == orig_op0)
7122 op0 = copy_rtx (op0);
7124 MEM_VOLATILE_P (op0) = 1;
7127 /* The following code doesn't handle CONCAT.
7128 Assume only bitpos == 0 can be used for CONCAT, due to
7129 one element arrays having the same mode as its element. */
7130 if (GET_CODE (op0) == CONCAT)
7132 if (bitpos != 0 || bitsize != GET_MODE_BITSIZE (GET_MODE (op0)))
7137 /* In cases where an aligned union has an unaligned object
7138 as a field, we might be extracting a BLKmode value from
7139 an integer-mode (e.g., SImode) object. Handle this case
7140 by doing the extract into an object as wide as the field
7141 (which we know to be the width of a basic mode), then
7142 storing into memory, and changing the mode to BLKmode. */
7143 if (mode1 == VOIDmode
7144 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
7145 || (mode1 != BLKmode && ! direct_load[(int) mode1]
7146 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
7147 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT
7148 && modifier != EXPAND_CONST_ADDRESS
7149 && modifier != EXPAND_INITIALIZER)
7150 /* If the field isn't aligned enough to fetch as a memref,
7151 fetch it as a bit field. */
7152 || (mode1 != BLKmode
7153 && SLOW_UNALIGNED_ACCESS (mode1, MEM_ALIGN (op0))
7154 && ((TYPE_ALIGN (TREE_TYPE (tem))
7155 < GET_MODE_ALIGNMENT (mode))
7156 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)))
7157 /* If the type and the field are a constant size and the
7158 size of the type isn't the same size as the bitfield,
7159 we must use bitfield operations. */
7161 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (exp)))
7163 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
7166 enum machine_mode ext_mode = mode;
7168 if (ext_mode == BLKmode
7169 && ! (target != 0 && GET_CODE (op0) == MEM
7170 && GET_CODE (target) == MEM
7171 && bitpos % BITS_PER_UNIT == 0))
7172 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
7174 if (ext_mode == BLKmode)
7176 /* In this case, BITPOS must start at a byte boundary and
7177 TARGET, if specified, must be a MEM. */
7178 if (GET_CODE (op0) != MEM
7179 || (target != 0 && GET_CODE (target) != MEM)
7180 || bitpos % BITS_PER_UNIT != 0)
7183 op0 = adjust_address (op0, VOIDmode, bitpos / BITS_PER_UNIT);
7185 target = assign_temp (type, 0, 1, 1);
7187 emit_block_move (target, op0,
7188 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
7194 op0 = validize_mem (op0);
7196 if (GET_CODE (op0) == MEM && GET_CODE (XEXP (op0, 0)) == REG)
7197 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7199 op0 = extract_bit_field (op0, bitsize, bitpos,
7200 unsignedp, target, ext_mode, ext_mode,
7201 int_size_in_bytes (TREE_TYPE (tem)));
7203 /* If the result is a record type and BITSIZE is narrower than
7204 the mode of OP0, an integral mode, and this is a big endian
7205 machine, we must put the field into the high-order bits. */
7206 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
7207 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
7208 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (op0)))
7209 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
7210 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
7214 if (mode == BLKmode)
7216 rtx new = assign_temp (build_qualified_type
7217 ((*lang_hooks.types.type_for_mode)
7219 TYPE_QUAL_CONST), 0, 1, 1);
7221 emit_move_insn (new, op0);
7222 op0 = copy_rtx (new);
7223 PUT_MODE (op0, BLKmode);
7224 set_mem_attributes (op0, exp, 1);
7230 /* If the result is BLKmode, use that to access the object
7232 if (mode == BLKmode)
7235 /* Get a reference to just this component. */
7236 if (modifier == EXPAND_CONST_ADDRESS
7237 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7238 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
7240 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7242 if (op0 == orig_op0)
7243 op0 = copy_rtx (op0);
7245 set_mem_attributes (op0, exp, 0);
7246 if (GET_CODE (XEXP (op0, 0)) == REG)
7247 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7249 MEM_VOLATILE_P (op0) |= volatilep;
7250 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
7251 || modifier == EXPAND_CONST_ADDRESS
7252 || modifier == EXPAND_INITIALIZER)
7254 else if (target == 0)
7255 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7257 convert_move (target, op0, unsignedp);
7263 rtx insn, before = get_last_insn (), vtbl_ref;
7265 /* Evaluate the interior expression. */
7266 subtarget = expand_expr (TREE_OPERAND (exp, 0), target,
7269 /* Get or create an instruction off which to hang a note. */
7270 if (REG_P (subtarget))
7273 insn = get_last_insn ();
7276 if (! INSN_P (insn))
7277 insn = prev_nonnote_insn (insn);
7281 target = gen_reg_rtx (GET_MODE (subtarget));
7282 insn = emit_move_insn (target, subtarget);
7285 /* Collect the data for the note. */
7286 vtbl_ref = XEXP (DECL_RTL (TREE_OPERAND (exp, 1)), 0);
7287 vtbl_ref = plus_constant (vtbl_ref,
7288 tree_low_cst (TREE_OPERAND (exp, 2), 0));
7289 /* Discard the initial CONST that was added. */
7290 vtbl_ref = XEXP (vtbl_ref, 0);
7293 = gen_rtx_EXPR_LIST (REG_VTABLE_REF, vtbl_ref, REG_NOTES (insn));
7298 /* Intended for a reference to a buffer of a file-object in Pascal.
7299 But it's not certain that a special tree code will really be
7300 necessary for these. INDIRECT_REF might work for them. */
7306 /* Pascal set IN expression.
7309 rlo = set_low - (set_low%bits_per_word);
7310 the_word = set [ (index - rlo)/bits_per_word ];
7311 bit_index = index % bits_per_word;
7312 bitmask = 1 << bit_index;
7313 return !!(the_word & bitmask); */
7315 tree set = TREE_OPERAND (exp, 0);
7316 tree index = TREE_OPERAND (exp, 1);
7317 int iunsignedp = TREE_UNSIGNED (TREE_TYPE (index));
7318 tree set_type = TREE_TYPE (set);
7319 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
7320 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
7321 rtx index_val = expand_expr (index, 0, VOIDmode, 0);
7322 rtx lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
7323 rtx hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
7324 rtx setval = expand_expr (set, 0, VOIDmode, 0);
7325 rtx setaddr = XEXP (setval, 0);
7326 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
7328 rtx diff, quo, rem, addr, bit, result;
7330 /* If domain is empty, answer is no. Likewise if index is constant
7331 and out of bounds. */
7332 if (((TREE_CODE (set_high_bound) == INTEGER_CST
7333 && TREE_CODE (set_low_bound) == INTEGER_CST
7334 && tree_int_cst_lt (set_high_bound, set_low_bound))
7335 || (TREE_CODE (index) == INTEGER_CST
7336 && TREE_CODE (set_low_bound) == INTEGER_CST
7337 && tree_int_cst_lt (index, set_low_bound))
7338 || (TREE_CODE (set_high_bound) == INTEGER_CST
7339 && TREE_CODE (index) == INTEGER_CST
7340 && tree_int_cst_lt (set_high_bound, index))))
7344 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7346 /* If we get here, we have to generate the code for both cases
7347 (in range and out of range). */
7349 op0 = gen_label_rtx ();
7350 op1 = gen_label_rtx ();
7352 if (! (GET_CODE (index_val) == CONST_INT
7353 && GET_CODE (lo_r) == CONST_INT))
7354 emit_cmp_and_jump_insns (index_val, lo_r, LT, NULL_RTX,
7355 GET_MODE (index_val), iunsignedp, op1);
7357 if (! (GET_CODE (index_val) == CONST_INT
7358 && GET_CODE (hi_r) == CONST_INT))
7359 emit_cmp_and_jump_insns (index_val, hi_r, GT, NULL_RTX,
7360 GET_MODE (index_val), iunsignedp, op1);
7362 /* Calculate the element number of bit zero in the first word
7364 if (GET_CODE (lo_r) == CONST_INT)
7365 rlow = GEN_INT (INTVAL (lo_r)
7366 & ~((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
7368 rlow = expand_binop (index_mode, and_optab, lo_r,
7369 GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
7370 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7372 diff = expand_binop (index_mode, sub_optab, index_val, rlow,
7373 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7375 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
7376 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7377 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
7378 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7380 addr = memory_address (byte_mode,
7381 expand_binop (index_mode, add_optab, diff,
7382 setaddr, NULL_RTX, iunsignedp,
7385 /* Extract the bit we want to examine. */
7386 bit = expand_shift (RSHIFT_EXPR, byte_mode,
7387 gen_rtx_MEM (byte_mode, addr),
7388 make_tree (TREE_TYPE (index), rem),
7390 result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
7391 GET_MODE (target) == byte_mode ? target : 0,
7392 1, OPTAB_LIB_WIDEN);
7394 if (result != target)
7395 convert_move (target, result, 1);
7397 /* Output the code to handle the out-of-range case. */
7400 emit_move_insn (target, const0_rtx);
7405 case WITH_CLEANUP_EXPR:
7406 if (WITH_CLEANUP_EXPR_RTL (exp) == 0)
7408 WITH_CLEANUP_EXPR_RTL (exp)
7409 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
7410 expand_decl_cleanup_eh (NULL_TREE, TREE_OPERAND (exp, 1),
7411 CLEANUP_EH_ONLY (exp));
7413 /* That's it for this cleanup. */
7414 TREE_OPERAND (exp, 1) = 0;
7416 return WITH_CLEANUP_EXPR_RTL (exp);
7418 case CLEANUP_POINT_EXPR:
7420 /* Start a new binding layer that will keep track of all cleanup
7421 actions to be performed. */
7422 expand_start_bindings (2);
7424 target_temp_slot_level = temp_slot_level;
7426 op0 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
7427 /* If we're going to use this value, load it up now. */
7429 op0 = force_not_mem (op0);
7430 preserve_temp_slots (op0);
7431 expand_end_bindings (NULL_TREE, 0, 0);
7436 /* Check for a built-in function. */
7437 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
7438 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7440 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7442 if (DECL_BUILT_IN_CLASS (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7443 == BUILT_IN_FRONTEND)
7444 return (*lang_hooks.expand_expr)
7445 (exp, original_target, tmode, modifier);
7447 return expand_builtin (exp, target, subtarget, tmode, ignore);
7450 return expand_call (exp, target, ignore);
7452 case NON_LVALUE_EXPR:
7455 case REFERENCE_EXPR:
7456 if (TREE_OPERAND (exp, 0) == error_mark_node)
7459 if (TREE_CODE (type) == UNION_TYPE)
7461 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
7463 /* If both input and output are BLKmode, this conversion isn't doing
7464 anything except possibly changing memory attribute. */
7465 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode)
7467 rtx result = expand_expr (TREE_OPERAND (exp, 0), target, tmode,
7470 result = copy_rtx (result);
7471 set_mem_attributes (result, exp, 0);
7476 target = assign_temp (type, 0, 1, 1);
7478 if (GET_CODE (target) == MEM)
7479 /* Store data into beginning of memory target. */
7480 store_expr (TREE_OPERAND (exp, 0),
7481 adjust_address (target, TYPE_MODE (valtype), 0), 0);
7483 else if (GET_CODE (target) == REG)
7484 /* Store this field into a union of the proper type. */
7485 store_field (target,
7486 MIN ((int_size_in_bytes (TREE_TYPE
7487 (TREE_OPERAND (exp, 0)))
7489 (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
7490 0, TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
7491 VOIDmode, 0, type, 0);
7495 /* Return the entire union. */
7499 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7501 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
7504 /* If the signedness of the conversion differs and OP0 is
7505 a promoted SUBREG, clear that indication since we now
7506 have to do the proper extension. */
7507 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
7508 && GET_CODE (op0) == SUBREG)
7509 SUBREG_PROMOTED_VAR_P (op0) = 0;
7514 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
7515 if (GET_MODE (op0) == mode)
7518 /* If OP0 is a constant, just convert it into the proper mode. */
7519 if (CONSTANT_P (op0))
7521 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7522 enum machine_mode inner_mode = TYPE_MODE (inner_type);
7524 if (modifier == EXPAND_INITIALIZER)
7525 return simplify_gen_subreg (mode, op0, inner_mode,
7526 subreg_lowpart_offset (mode,
7529 return convert_modes (mode, inner_mode, op0,
7530 TREE_UNSIGNED (inner_type));
7533 if (modifier == EXPAND_INITIALIZER)
7534 return gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
7538 convert_to_mode (mode, op0,
7539 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7541 convert_move (target, op0,
7542 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7545 case VIEW_CONVERT_EXPR:
7546 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
7548 /* If the input and output modes are both the same, we are done.
7549 Otherwise, if neither mode is BLKmode and both are within a word, we
7550 can use gen_lowpart. If neither is true, make sure the operand is
7551 in memory and convert the MEM to the new mode. */
7552 if (TYPE_MODE (type) == GET_MODE (op0))
7554 else if (TYPE_MODE (type) != BLKmode && GET_MODE (op0) != BLKmode
7555 && GET_MODE_SIZE (TYPE_MODE (type)) <= UNITS_PER_WORD
7556 && GET_MODE_SIZE (GET_MODE (op0)) <= UNITS_PER_WORD)
7557 op0 = gen_lowpart (TYPE_MODE (type), op0);
7558 else if (GET_CODE (op0) != MEM)
7560 /* If the operand is not a MEM, force it into memory. Since we
7561 are going to be be changing the mode of the MEM, don't call
7562 force_const_mem for constants because we don't allow pool
7563 constants to change mode. */
7564 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7566 if (TREE_ADDRESSABLE (exp))
7569 if (target == 0 || GET_MODE (target) != TYPE_MODE (inner_type))
7571 = assign_stack_temp_for_type
7572 (TYPE_MODE (inner_type),
7573 GET_MODE_SIZE (TYPE_MODE (inner_type)), 0, inner_type);
7575 emit_move_insn (target, op0);
7579 /* At this point, OP0 is in the correct mode. If the output type is such
7580 that the operand is known to be aligned, indicate that it is.
7581 Otherwise, we need only be concerned about alignment for non-BLKmode
7583 if (GET_CODE (op0) == MEM)
7585 op0 = copy_rtx (op0);
7587 if (TYPE_ALIGN_OK (type))
7588 set_mem_align (op0, MAX (MEM_ALIGN (op0), TYPE_ALIGN (type)));
7589 else if (TYPE_MODE (type) != BLKmode && STRICT_ALIGNMENT
7590 && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
7592 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7593 HOST_WIDE_INT temp_size
7594 = MAX (int_size_in_bytes (inner_type),
7595 (HOST_WIDE_INT) GET_MODE_SIZE (TYPE_MODE (type)));
7596 rtx new = assign_stack_temp_for_type (TYPE_MODE (type),
7597 temp_size, 0, type);
7598 rtx new_with_op0_mode = adjust_address (new, GET_MODE (op0), 0);
7600 if (TREE_ADDRESSABLE (exp))
7603 if (GET_MODE (op0) == BLKmode)
7604 emit_block_move (new_with_op0_mode, op0,
7605 GEN_INT (GET_MODE_SIZE (TYPE_MODE (type))));
7607 emit_move_insn (new_with_op0_mode, op0);
7612 op0 = adjust_address (op0, TYPE_MODE (type), 0);
7618 /* We come here from MINUS_EXPR when the second operand is a
7621 this_optab = ! unsignedp && flag_trapv
7622 && (GET_MODE_CLASS (mode) == MODE_INT)
7623 ? addv_optab : add_optab;
7625 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
7626 something else, make sure we add the register to the constant and
7627 then to the other thing. This case can occur during strength
7628 reduction and doing it this way will produce better code if the
7629 frame pointer or argument pointer is eliminated.
7631 fold-const.c will ensure that the constant is always in the inner
7632 PLUS_EXPR, so the only case we need to do anything about is if
7633 sp, ap, or fp is our second argument, in which case we must swap
7634 the innermost first argument and our second argument. */
7636 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
7637 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
7638 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
7639 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
7640 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
7641 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
7643 tree t = TREE_OPERAND (exp, 1);
7645 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7646 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
7649 /* If the result is to be ptr_mode and we are adding an integer to
7650 something, we might be forming a constant. So try to use
7651 plus_constant. If it produces a sum and we can't accept it,
7652 use force_operand. This allows P = &ARR[const] to generate
7653 efficient code on machines where a SYMBOL_REF is not a valid
7656 If this is an EXPAND_SUM call, always return the sum. */
7657 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
7658 || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
7660 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
7661 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7662 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
7666 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
7668 /* Use immed_double_const to ensure that the constant is
7669 truncated according to the mode of OP1, then sign extended
7670 to a HOST_WIDE_INT. Using the constant directly can result
7671 in non-canonical RTL in a 64x32 cross compile. */
7673 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)),
7675 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))));
7676 op1 = plus_constant (op1, INTVAL (constant_part));
7677 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7678 op1 = force_operand (op1, target);
7682 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7683 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
7684 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
7688 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7689 (modifier == EXPAND_INITIALIZER
7690 ? EXPAND_INITIALIZER : EXPAND_SUM));
7691 if (! CONSTANT_P (op0))
7693 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7694 VOIDmode, modifier);
7695 /* Don't go to both_summands if modifier
7696 says it's not right to return a PLUS. */
7697 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7701 /* Use immed_double_const to ensure that the constant is
7702 truncated according to the mode of OP1, then sign extended
7703 to a HOST_WIDE_INT. Using the constant directly can result
7704 in non-canonical RTL in a 64x32 cross compile. */
7706 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)),
7708 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
7709 op0 = plus_constant (op0, INTVAL (constant_part));
7710 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7711 op0 = force_operand (op0, target);
7716 /* No sense saving up arithmetic to be done
7717 if it's all in the wrong mode to form part of an address.
7718 And force_operand won't know whether to sign-extend or
7720 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7721 || mode != ptr_mode)
7724 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7727 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, modifier);
7728 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, modifier);
7731 /* Make sure any term that's a sum with a constant comes last. */
7732 if (GET_CODE (op0) == PLUS
7733 && CONSTANT_P (XEXP (op0, 1)))
7739 /* If adding to a sum including a constant,
7740 associate it to put the constant outside. */
7741 if (GET_CODE (op1) == PLUS
7742 && CONSTANT_P (XEXP (op1, 1)))
7744 rtx constant_term = const0_rtx;
7746 temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0);
7749 /* Ensure that MULT comes first if there is one. */
7750 else if (GET_CODE (op0) == MULT)
7751 op0 = gen_rtx_PLUS (mode, op0, XEXP (op1, 0));
7753 op0 = gen_rtx_PLUS (mode, XEXP (op1, 0), op0);
7755 /* Let's also eliminate constants from op0 if possible. */
7756 op0 = eliminate_constant_term (op0, &constant_term);
7758 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
7759 their sum should be a constant. Form it into OP1, since the
7760 result we want will then be OP0 + OP1. */
7762 temp = simplify_binary_operation (PLUS, mode, constant_term,
7767 op1 = gen_rtx_PLUS (mode, constant_term, XEXP (op1, 1));
7770 /* Put a constant term last and put a multiplication first. */
7771 if (CONSTANT_P (op0) || GET_CODE (op1) == MULT)
7772 temp = op1, op1 = op0, op0 = temp;
7774 temp = simplify_binary_operation (PLUS, mode, op0, op1);
7775 return temp ? temp : gen_rtx_PLUS (mode, op0, op1);
7778 /* For initializers, we are allowed to return a MINUS of two
7779 symbolic constants. Here we handle all cases when both operands
7781 /* Handle difference of two symbolic constants,
7782 for the sake of an initializer. */
7783 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7784 && really_constant_p (TREE_OPERAND (exp, 0))
7785 && really_constant_p (TREE_OPERAND (exp, 1)))
7787 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode,
7789 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode,
7792 /* If the last operand is a CONST_INT, use plus_constant of
7793 the negated constant. Else make the MINUS. */
7794 if (GET_CODE (op1) == CONST_INT)
7795 return plus_constant (op0, - INTVAL (op1));
7797 return gen_rtx_MINUS (mode, op0, op1);
7799 /* Convert A - const to A + (-const). */
7800 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7802 tree negated = fold (build1 (NEGATE_EXPR, type,
7803 TREE_OPERAND (exp, 1)));
7805 if (TREE_UNSIGNED (type) || TREE_OVERFLOW (negated))
7806 /* If we can't negate the constant in TYPE, leave it alone and
7807 expand_binop will negate it for us. We used to try to do it
7808 here in the signed version of TYPE, but that doesn't work
7809 on POINTER_TYPEs. */;
7812 exp = build (PLUS_EXPR, type, TREE_OPERAND (exp, 0), negated);
7816 this_optab = ! unsignedp && flag_trapv
7817 && (GET_MODE_CLASS(mode) == MODE_INT)
7818 ? subv_optab : sub_optab;
7822 /* If first operand is constant, swap them.
7823 Thus the following special case checks need only
7824 check the second operand. */
7825 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
7827 tree t1 = TREE_OPERAND (exp, 0);
7828 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
7829 TREE_OPERAND (exp, 1) = t1;
7832 /* Attempt to return something suitable for generating an
7833 indexed address, for machines that support that. */
7835 if (modifier == EXPAND_SUM && mode == ptr_mode
7836 && host_integerp (TREE_OPERAND (exp, 1), 0))
7838 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7841 /* If we knew for certain that this is arithmetic for an array
7842 reference, and we knew the bounds of the array, then we could
7843 apply the distributive law across (PLUS X C) for constant C.
7844 Without such knowledge, we risk overflowing the computation
7845 when both X and C are large, but X+C isn't. */
7846 /* ??? Could perhaps special-case EXP being unsigned and C being
7847 positive. In that case we are certain that X+C is no smaller
7848 than X and so the transformed expression will overflow iff the
7849 original would have. */
7851 if (GET_CODE (op0) != REG)
7852 op0 = force_operand (op0, NULL_RTX);
7853 if (GET_CODE (op0) != REG)
7854 op0 = copy_to_mode_reg (mode, op0);
7857 gen_rtx_MULT (mode, op0,
7858 GEN_INT (tree_low_cst (TREE_OPERAND (exp, 1), 0)));
7861 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7864 /* Check for multiplying things that have been extended
7865 from a narrower type. If this machine supports multiplying
7866 in that narrower type with a result in the desired type,
7867 do it that way, and avoid the explicit type-conversion. */
7868 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
7869 && TREE_CODE (type) == INTEGER_TYPE
7870 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7871 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
7872 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7873 && int_fits_type_p (TREE_OPERAND (exp, 1),
7874 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7875 /* Don't use a widening multiply if a shift will do. */
7876 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
7877 > HOST_BITS_PER_WIDE_INT)
7878 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
7880 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
7881 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7883 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
7884 /* If both operands are extended, they must either both
7885 be zero-extended or both be sign-extended. */
7886 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7888 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
7890 enum machine_mode innermode
7891 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
7892 optab other_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7893 ? smul_widen_optab : umul_widen_optab);
7894 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7895 ? umul_widen_optab : smul_widen_optab);
7896 if (mode == GET_MODE_WIDER_MODE (innermode))
7898 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
7900 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7901 NULL_RTX, VOIDmode, 0);
7902 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7903 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7906 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7907 NULL_RTX, VOIDmode, 0);
7910 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
7911 && innermode == word_mode)
7914 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7915 NULL_RTX, VOIDmode, 0);
7916 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7917 op1 = convert_modes (innermode, mode,
7918 expand_expr (TREE_OPERAND (exp, 1),
7919 NULL_RTX, VOIDmode, 0),
7922 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7923 NULL_RTX, VOIDmode, 0);
7924 temp = expand_binop (mode, other_optab, op0, op1, target,
7925 unsignedp, OPTAB_LIB_WIDEN);
7926 htem = expand_mult_highpart_adjust (innermode,
7927 gen_highpart (innermode, temp),
7929 gen_highpart (innermode, temp),
7931 emit_move_insn (gen_highpart (innermode, temp), htem);
7936 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7937 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7938 return expand_mult (mode, op0, op1, target, unsignedp);
7940 case TRUNC_DIV_EXPR:
7941 case FLOOR_DIV_EXPR:
7943 case ROUND_DIV_EXPR:
7944 case EXACT_DIV_EXPR:
7945 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7947 /* Possible optimization: compute the dividend with EXPAND_SUM
7948 then if the divisor is constant can optimize the case
7949 where some terms of the dividend have coeffs divisible by it. */
7950 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7951 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7952 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
7955 /* Emit a/b as a*(1/b). Later we may manage CSE the reciprocal saving
7956 expensive divide. If not, combine will rebuild the original
7958 if (flag_unsafe_math_optimizations && optimize && !optimize_size
7959 && TREE_CODE (type) == REAL_TYPE
7960 && !real_onep (TREE_OPERAND (exp, 0)))
7961 return expand_expr (build (MULT_EXPR, type, TREE_OPERAND (exp, 0),
7962 build (RDIV_EXPR, type,
7963 build_real (type, dconst1),
7964 TREE_OPERAND (exp, 1))),
7965 target, tmode, unsignedp);
7966 this_optab = sdiv_optab;
7969 case TRUNC_MOD_EXPR:
7970 case FLOOR_MOD_EXPR:
7972 case ROUND_MOD_EXPR:
7973 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7975 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7976 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7977 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
7979 case FIX_ROUND_EXPR:
7980 case FIX_FLOOR_EXPR:
7982 abort (); /* Not used for C. */
7984 case FIX_TRUNC_EXPR:
7985 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7987 target = gen_reg_rtx (mode);
7988 expand_fix (target, op0, unsignedp);
7992 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7994 target = gen_reg_rtx (mode);
7995 /* expand_float can't figure out what to do if FROM has VOIDmode.
7996 So give it the correct mode. With -O, cse will optimize this. */
7997 if (GET_MODE (op0) == VOIDmode)
7998 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
8000 expand_float (target, op0,
8001 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
8005 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8006 temp = expand_unop (mode,
8007 ! unsignedp && flag_trapv
8008 && (GET_MODE_CLASS(mode) == MODE_INT)
8009 ? negv_optab : neg_optab, op0, target, 0);
8015 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8017 /* Handle complex values specially. */
8018 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT
8019 || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
8020 return expand_complex_abs (mode, op0, target, unsignedp);
8022 /* Unsigned abs is simply the operand. Testing here means we don't
8023 risk generating incorrect code below. */
8024 if (TREE_UNSIGNED (type))
8027 return expand_abs (mode, op0, target, unsignedp,
8028 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
8032 target = original_target;
8033 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 1), 1)
8034 || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
8035 || GET_MODE (target) != mode
8036 || (GET_CODE (target) == REG
8037 && REGNO (target) < FIRST_PSEUDO_REGISTER))
8038 target = gen_reg_rtx (mode);
8039 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8040 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
8042 /* First try to do it with a special MIN or MAX instruction.
8043 If that does not win, use a conditional jump to select the proper
8045 this_optab = (TREE_UNSIGNED (type)
8046 ? (code == MIN_EXPR ? umin_optab : umax_optab)
8047 : (code == MIN_EXPR ? smin_optab : smax_optab));
8049 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
8054 /* At this point, a MEM target is no longer useful; we will get better
8057 if (GET_CODE (target) == MEM)
8058 target = gen_reg_rtx (mode);
8061 emit_move_insn (target, op0);
8063 op0 = gen_label_rtx ();
8065 /* If this mode is an integer too wide to compare properly,
8066 compare word by word. Rely on cse to optimize constant cases. */
8067 if (GET_MODE_CLASS (mode) == MODE_INT
8068 && ! can_compare_p (GE, mode, ccp_jump))
8070 if (code == MAX_EXPR)
8071 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
8072 target, op1, NULL_RTX, op0);
8074 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
8075 op1, target, NULL_RTX, op0);
8079 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)));
8080 do_compare_rtx_and_jump (target, op1, code == MAX_EXPR ? GE : LE,
8081 unsignedp, mode, NULL_RTX, NULL_RTX,
8084 emit_move_insn (target, op1);
8089 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8090 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
8096 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8097 temp = expand_unop (mode, ffs_optab, op0, target, 1);
8102 /* ??? Can optimize bitwise operations with one arg constant.
8103 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
8104 and (a bitwise1 b) bitwise2 b (etc)
8105 but that is probably not worth while. */
8107 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
8108 boolean values when we want in all cases to compute both of them. In
8109 general it is fastest to do TRUTH_AND_EXPR by computing both operands
8110 as actual zero-or-1 values and then bitwise anding. In cases where
8111 there cannot be any side effects, better code would be made by
8112 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
8113 how to recognize those cases. */
8115 case TRUTH_AND_EXPR:
8117 this_optab = and_optab;
8122 this_optab = ior_optab;
8125 case TRUTH_XOR_EXPR:
8127 this_optab = xor_optab;
8134 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8136 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8137 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
8140 /* Could determine the answer when only additive constants differ. Also,
8141 the addition of one can be handled by changing the condition. */
8148 case UNORDERED_EXPR:
8155 temp = do_store_flag (exp, target, tmode != VOIDmode ? tmode : mode, 0);
8159 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
8160 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
8162 && GET_CODE (original_target) == REG
8163 && (GET_MODE (original_target)
8164 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8166 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
8169 /* If temp is constant, we can just compute the result. */
8170 if (GET_CODE (temp) == CONST_INT)
8172 if (INTVAL (temp) != 0)
8173 emit_move_insn (target, const1_rtx);
8175 emit_move_insn (target, const0_rtx);
8180 if (temp != original_target)
8182 enum machine_mode mode1 = GET_MODE (temp);
8183 if (mode1 == VOIDmode)
8184 mode1 = tmode != VOIDmode ? tmode : mode;
8186 temp = copy_to_mode_reg (mode1, temp);
8189 op1 = gen_label_rtx ();
8190 emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX,
8191 GET_MODE (temp), unsignedp, op1);
8192 emit_move_insn (temp, const1_rtx);
8197 /* If no set-flag instruction, must generate a conditional
8198 store into a temporary variable. Drop through
8199 and handle this like && and ||. */
8201 case TRUTH_ANDIF_EXPR:
8202 case TRUTH_ORIF_EXPR:
8204 && (target == 0 || ! safe_from_p (target, exp, 1)
8205 /* Make sure we don't have a hard reg (such as function's return
8206 value) live across basic blocks, if not optimizing. */
8207 || (!optimize && GET_CODE (target) == REG
8208 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
8209 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
8212 emit_clr_insn (target);
8214 op1 = gen_label_rtx ();
8215 jumpifnot (exp, op1);
8218 emit_0_to_1_insn (target);
8221 return ignore ? const0_rtx : target;
8223 case TRUTH_NOT_EXPR:
8224 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
8225 /* The parser is careful to generate TRUTH_NOT_EXPR
8226 only with operands that are always zero or one. */
8227 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
8228 target, 1, OPTAB_LIB_WIDEN);
8234 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
8236 return expand_expr (TREE_OPERAND (exp, 1),
8237 (ignore ? const0_rtx : target),
8241 /* If we would have a "singleton" (see below) were it not for a
8242 conversion in each arm, bring that conversion back out. */
8243 if (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
8244 && TREE_CODE (TREE_OPERAND (exp, 2)) == NOP_EXPR
8245 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0))
8246 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 2), 0))))
8248 tree iftrue = TREE_OPERAND (TREE_OPERAND (exp, 1), 0);
8249 tree iffalse = TREE_OPERAND (TREE_OPERAND (exp, 2), 0);
8251 if ((TREE_CODE_CLASS (TREE_CODE (iftrue)) == '2'
8252 && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
8253 || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '2'
8254 && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0))
8255 || (TREE_CODE_CLASS (TREE_CODE (iftrue)) == '1'
8256 && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
8257 || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '1'
8258 && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0)))
8259 return expand_expr (build1 (NOP_EXPR, type,
8260 build (COND_EXPR, TREE_TYPE (iftrue),
8261 TREE_OPERAND (exp, 0),
8263 target, tmode, modifier);
8267 /* Note that COND_EXPRs whose type is a structure or union
8268 are required to be constructed to contain assignments of
8269 a temporary variable, so that we can evaluate them here
8270 for side effect only. If type is void, we must do likewise. */
8272 /* If an arm of the branch requires a cleanup,
8273 only that cleanup is performed. */
8276 tree binary_op = 0, unary_op = 0;
8278 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
8279 convert it to our mode, if necessary. */
8280 if (integer_onep (TREE_OPERAND (exp, 1))
8281 && integer_zerop (TREE_OPERAND (exp, 2))
8282 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8286 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
8291 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, modifier);
8292 if (GET_MODE (op0) == mode)
8296 target = gen_reg_rtx (mode);
8297 convert_move (target, op0, unsignedp);
8301 /* Check for X ? A + B : A. If we have this, we can copy A to the
8302 output and conditionally add B. Similarly for unary operations.
8303 Don't do this if X has side-effects because those side effects
8304 might affect A or B and the "?" operation is a sequence point in
8305 ANSI. (operand_equal_p tests for side effects.) */
8307 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
8308 && operand_equal_p (TREE_OPERAND (exp, 2),
8309 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8310 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
8311 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
8312 && operand_equal_p (TREE_OPERAND (exp, 1),
8313 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8314 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
8315 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
8316 && operand_equal_p (TREE_OPERAND (exp, 2),
8317 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8318 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
8319 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
8320 && operand_equal_p (TREE_OPERAND (exp, 1),
8321 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8322 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
8324 /* If we are not to produce a result, we have no target. Otherwise,
8325 if a target was specified use it; it will not be used as an
8326 intermediate target unless it is safe. If no target, use a
8331 else if (original_target
8332 && (safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
8333 || (singleton && GET_CODE (original_target) == REG
8334 && REGNO (original_target) >= FIRST_PSEUDO_REGISTER
8335 && original_target == var_rtx (singleton)))
8336 && GET_MODE (original_target) == mode
8337 #ifdef HAVE_conditional_move
8338 && (! can_conditionally_move_p (mode)
8339 || GET_CODE (original_target) == REG
8340 || TREE_ADDRESSABLE (type))
8342 && (GET_CODE (original_target) != MEM
8343 || TREE_ADDRESSABLE (type)))
8344 temp = original_target;
8345 else if (TREE_ADDRESSABLE (type))
8348 temp = assign_temp (type, 0, 0, 1);
8350 /* If we had X ? A + C : A, with C a constant power of 2, and we can
8351 do the test of X as a store-flag operation, do this as
8352 A + ((X != 0) << log C). Similarly for other simple binary
8353 operators. Only do for C == 1 if BRANCH_COST is low. */
8354 if (temp && singleton && binary_op
8355 && (TREE_CODE (binary_op) == PLUS_EXPR
8356 || TREE_CODE (binary_op) == MINUS_EXPR
8357 || TREE_CODE (binary_op) == BIT_IOR_EXPR
8358 || TREE_CODE (binary_op) == BIT_XOR_EXPR)
8359 && (BRANCH_COST >= 3 ? integer_pow2p (TREE_OPERAND (binary_op, 1))
8360 : integer_onep (TREE_OPERAND (binary_op, 1)))
8361 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8364 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR
8365 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8366 ? addv_optab : add_optab)
8367 : TREE_CODE (binary_op) == MINUS_EXPR
8368 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8369 ? subv_optab : sub_optab)
8370 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
8373 /* If we had X ? A : A + 1, do this as A + (X == 0).
8375 We have to invert the truth value here and then put it
8376 back later if do_store_flag fails. We cannot simply copy
8377 TREE_OPERAND (exp, 0) to another variable and modify that
8378 because invert_truthvalue can modify the tree pointed to
8380 if (singleton == TREE_OPERAND (exp, 1))
8381 TREE_OPERAND (exp, 0)
8382 = invert_truthvalue (TREE_OPERAND (exp, 0));
8384 result = do_store_flag (TREE_OPERAND (exp, 0),
8385 (safe_from_p (temp, singleton, 1)
8387 mode, BRANCH_COST <= 1);
8389 if (result != 0 && ! integer_onep (TREE_OPERAND (binary_op, 1)))
8390 result = expand_shift (LSHIFT_EXPR, mode, result,
8391 build_int_2 (tree_log2
8395 (safe_from_p (temp, singleton, 1)
8396 ? temp : NULL_RTX), 0);
8400 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
8401 return expand_binop (mode, boptab, op1, result, temp,
8402 unsignedp, OPTAB_LIB_WIDEN);
8404 else if (singleton == TREE_OPERAND (exp, 1))
8405 TREE_OPERAND (exp, 0)
8406 = invert_truthvalue (TREE_OPERAND (exp, 0));
8409 do_pending_stack_adjust ();
8411 op0 = gen_label_rtx ();
8413 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
8417 /* If the target conflicts with the other operand of the
8418 binary op, we can't use it. Also, we can't use the target
8419 if it is a hard register, because evaluating the condition
8420 might clobber it. */
8422 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1), 1))
8423 || (GET_CODE (temp) == REG
8424 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
8425 temp = gen_reg_rtx (mode);
8426 store_expr (singleton, temp, 0);
8429 expand_expr (singleton,
8430 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8431 if (singleton == TREE_OPERAND (exp, 1))
8432 jumpif (TREE_OPERAND (exp, 0), op0);
8434 jumpifnot (TREE_OPERAND (exp, 0), op0);
8436 start_cleanup_deferral ();
8437 if (binary_op && temp == 0)
8438 /* Just touch the other operand. */
8439 expand_expr (TREE_OPERAND (binary_op, 1),
8440 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8442 store_expr (build (TREE_CODE (binary_op), type,
8443 make_tree (type, temp),
8444 TREE_OPERAND (binary_op, 1)),
8447 store_expr (build1 (TREE_CODE (unary_op), type,
8448 make_tree (type, temp)),
8452 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
8453 comparison operator. If we have one of these cases, set the
8454 output to A, branch on A (cse will merge these two references),
8455 then set the output to FOO. */
8457 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8458 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8459 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8460 TREE_OPERAND (exp, 1), 0)
8461 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8462 || TREE_CODE (TREE_OPERAND (exp, 1)) == SAVE_EXPR)
8463 && safe_from_p (temp, TREE_OPERAND (exp, 2), 1))
8465 if (GET_CODE (temp) == REG
8466 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
8467 temp = gen_reg_rtx (mode);
8468 store_expr (TREE_OPERAND (exp, 1), temp, 0);
8469 jumpif (TREE_OPERAND (exp, 0), op0);
8471 start_cleanup_deferral ();
8472 store_expr (TREE_OPERAND (exp, 2), temp, 0);
8476 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8477 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8478 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8479 TREE_OPERAND (exp, 2), 0)
8480 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8481 || TREE_CODE (TREE_OPERAND (exp, 2)) == SAVE_EXPR)
8482 && safe_from_p (temp, TREE_OPERAND (exp, 1), 1))
8484 if (GET_CODE (temp) == REG
8485 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
8486 temp = gen_reg_rtx (mode);
8487 store_expr (TREE_OPERAND (exp, 2), temp, 0);
8488 jumpifnot (TREE_OPERAND (exp, 0), op0);
8490 start_cleanup_deferral ();
8491 store_expr (TREE_OPERAND (exp, 1), temp, 0);
8496 op1 = gen_label_rtx ();
8497 jumpifnot (TREE_OPERAND (exp, 0), op0);
8499 start_cleanup_deferral ();
8501 /* One branch of the cond can be void, if it never returns. For
8502 example A ? throw : E */
8504 && TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node)
8505 store_expr (TREE_OPERAND (exp, 1), temp, 0);
8507 expand_expr (TREE_OPERAND (exp, 1),
8508 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8509 end_cleanup_deferral ();
8511 emit_jump_insn (gen_jump (op1));
8514 start_cleanup_deferral ();
8516 && TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node)
8517 store_expr (TREE_OPERAND (exp, 2), temp, 0);
8519 expand_expr (TREE_OPERAND (exp, 2),
8520 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8523 end_cleanup_deferral ();
8534 /* Something needs to be initialized, but we didn't know
8535 where that thing was when building the tree. For example,
8536 it could be the return value of a function, or a parameter
8537 to a function which lays down in the stack, or a temporary
8538 variable which must be passed by reference.
8540 We guarantee that the expression will either be constructed
8541 or copied into our original target. */
8543 tree slot = TREE_OPERAND (exp, 0);
8544 tree cleanups = NULL_TREE;
8547 if (TREE_CODE (slot) != VAR_DECL)
8551 target = original_target;
8553 /* Set this here so that if we get a target that refers to a
8554 register variable that's already been used, put_reg_into_stack
8555 knows that it should fix up those uses. */
8556 TREE_USED (slot) = 1;
8560 if (DECL_RTL_SET_P (slot))
8562 target = DECL_RTL (slot);
8563 /* If we have already expanded the slot, so don't do
8565 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8570 target = assign_temp (type, 2, 0, 1);
8571 /* All temp slots at this level must not conflict. */
8572 preserve_temp_slots (target);
8573 SET_DECL_RTL (slot, target);
8574 if (TREE_ADDRESSABLE (slot))
8575 put_var_into_stack (slot);
8577 /* Since SLOT is not known to the called function
8578 to belong to its stack frame, we must build an explicit
8579 cleanup. This case occurs when we must build up a reference
8580 to pass the reference as an argument. In this case,
8581 it is very likely that such a reference need not be
8584 if (TREE_OPERAND (exp, 2) == 0)
8585 TREE_OPERAND (exp, 2)
8586 = (*lang_hooks.maybe_build_cleanup) (slot);
8587 cleanups = TREE_OPERAND (exp, 2);
8592 /* This case does occur, when expanding a parameter which
8593 needs to be constructed on the stack. The target
8594 is the actual stack address that we want to initialize.
8595 The function we call will perform the cleanup in this case. */
8597 /* If we have already assigned it space, use that space,
8598 not target that we were passed in, as our target
8599 parameter is only a hint. */
8600 if (DECL_RTL_SET_P (slot))
8602 target = DECL_RTL (slot);
8603 /* If we have already expanded the slot, so don't do
8605 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8610 SET_DECL_RTL (slot, target);
8611 /* If we must have an addressable slot, then make sure that
8612 the RTL that we just stored in slot is OK. */
8613 if (TREE_ADDRESSABLE (slot))
8614 put_var_into_stack (slot);
8618 exp1 = TREE_OPERAND (exp, 3) = TREE_OPERAND (exp, 1);
8619 /* Mark it as expanded. */
8620 TREE_OPERAND (exp, 1) = NULL_TREE;
8622 store_expr (exp1, target, 0);
8624 expand_decl_cleanup_eh (NULL_TREE, cleanups, CLEANUP_EH_ONLY (exp));
8631 tree lhs = TREE_OPERAND (exp, 0);
8632 tree rhs = TREE_OPERAND (exp, 1);
8634 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
8640 /* If lhs is complex, expand calls in rhs before computing it.
8641 That's so we don't compute a pointer and save it over a
8642 call. If lhs is simple, compute it first so we can give it
8643 as a target if the rhs is just a call. This avoids an
8644 extra temp and copy and that prevents a partial-subsumption
8645 which makes bad code. Actually we could treat
8646 component_ref's of vars like vars. */
8648 tree lhs = TREE_OPERAND (exp, 0);
8649 tree rhs = TREE_OPERAND (exp, 1);
8653 /* Check for |= or &= of a bitfield of size one into another bitfield
8654 of size 1. In this case, (unless we need the result of the
8655 assignment) we can do this more efficiently with a
8656 test followed by an assignment, if necessary.
8658 ??? At this point, we can't get a BIT_FIELD_REF here. But if
8659 things change so we do, this code should be enhanced to
8662 && TREE_CODE (lhs) == COMPONENT_REF
8663 && (TREE_CODE (rhs) == BIT_IOR_EXPR
8664 || TREE_CODE (rhs) == BIT_AND_EXPR)
8665 && TREE_OPERAND (rhs, 0) == lhs
8666 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
8667 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
8668 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
8670 rtx label = gen_label_rtx ();
8672 do_jump (TREE_OPERAND (rhs, 1),
8673 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
8674 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
8675 expand_assignment (lhs, convert (TREE_TYPE (rhs),
8676 (TREE_CODE (rhs) == BIT_IOR_EXPR
8678 : integer_zero_node)),
8680 do_pending_stack_adjust ();
8685 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
8691 if (!TREE_OPERAND (exp, 0))
8692 expand_null_return ();
8694 expand_return (TREE_OPERAND (exp, 0));
8697 case PREINCREMENT_EXPR:
8698 case PREDECREMENT_EXPR:
8699 return expand_increment (exp, 0, ignore);
8701 case POSTINCREMENT_EXPR:
8702 case POSTDECREMENT_EXPR:
8703 /* Faster to treat as pre-increment if result is not used. */
8704 return expand_increment (exp, ! ignore, ignore);
8707 /* Are we taking the address of a nested function? */
8708 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
8709 && decl_function_context (TREE_OPERAND (exp, 0)) != 0
8710 && ! DECL_NO_STATIC_CHAIN (TREE_OPERAND (exp, 0))
8711 && ! TREE_STATIC (exp))
8713 op0 = trampoline_address (TREE_OPERAND (exp, 0));
8714 op0 = force_operand (op0, target);
8716 /* If we are taking the address of something erroneous, just
8718 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
8720 /* If we are taking the address of a constant and are at the
8721 top level, we have to use output_constant_def since we can't
8722 call force_const_mem at top level. */
8724 && (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
8725 || (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0)))
8727 op0 = XEXP (output_constant_def (TREE_OPERAND (exp, 0), 0), 0);
8730 /* We make sure to pass const0_rtx down if we came in with
8731 ignore set, to avoid doing the cleanups twice for something. */
8732 op0 = expand_expr (TREE_OPERAND (exp, 0),
8733 ignore ? const0_rtx : NULL_RTX, VOIDmode,
8734 (modifier == EXPAND_INITIALIZER
8735 ? modifier : EXPAND_CONST_ADDRESS));
8737 /* If we are going to ignore the result, OP0 will have been set
8738 to const0_rtx, so just return it. Don't get confused and
8739 think we are taking the address of the constant. */
8743 /* Pass 1 for MODIFY, so that protect_from_queue doesn't get
8744 clever and returns a REG when given a MEM. */
8745 op0 = protect_from_queue (op0, 1);
8747 /* We would like the object in memory. If it is a constant, we can
8748 have it be statically allocated into memory. For a non-constant,
8749 we need to allocate some memory and store the value into it. */
8751 if (CONSTANT_P (op0))
8752 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
8754 else if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
8755 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF
8756 || GET_CODE (op0) == PARALLEL)
8758 /* If the operand is a SAVE_EXPR, we can deal with this by
8759 forcing the SAVE_EXPR into memory. */
8760 if (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR)
8762 put_var_into_stack (TREE_OPERAND (exp, 0));
8763 op0 = SAVE_EXPR_RTL (TREE_OPERAND (exp, 0));
8767 /* If this object is in a register, it can't be BLKmode. */
8768 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8769 rtx memloc = assign_temp (inner_type, 1, 1, 1);
8771 if (GET_CODE (op0) == PARALLEL)
8772 /* Handle calls that pass values in multiple
8773 non-contiguous locations. The Irix 6 ABI has examples
8775 emit_group_store (memloc, op0,
8776 int_size_in_bytes (inner_type));
8778 emit_move_insn (memloc, op0);
8784 if (GET_CODE (op0) != MEM)
8787 mark_temp_addr_taken (op0);
8788 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
8790 op0 = XEXP (op0, 0);
8791 #ifdef POINTERS_EXTEND_UNSIGNED
8792 if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
8793 && mode == ptr_mode)
8794 op0 = convert_memory_address (ptr_mode, op0);
8799 /* If OP0 is not aligned as least as much as the type requires, we
8800 need to make a temporary, copy OP0 to it, and take the address of
8801 the temporary. We want to use the alignment of the type, not of
8802 the operand. Note that this is incorrect for FUNCTION_TYPE, but
8803 the test for BLKmode means that can't happen. The test for
8804 BLKmode is because we never make mis-aligned MEMs with
8807 We don't need to do this at all if the machine doesn't have
8808 strict alignment. */
8809 if (STRICT_ALIGNMENT && GET_MODE (op0) == BLKmode
8810 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
8812 && MEM_ALIGN (op0) < BIGGEST_ALIGNMENT)
8814 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8816 = assign_stack_temp_for_type
8817 (TYPE_MODE (inner_type),
8818 MEM_SIZE (op0) ? INTVAL (MEM_SIZE (op0))
8819 : int_size_in_bytes (inner_type),
8820 1, build_qualified_type (inner_type,
8821 (TYPE_QUALS (inner_type)
8822 | TYPE_QUAL_CONST)));
8824 if (TYPE_ALIGN_OK (inner_type))
8827 emit_block_move (new, op0, expr_size (TREE_OPERAND (exp, 0)));
8831 op0 = force_operand (XEXP (op0, 0), target);
8835 && GET_CODE (op0) != REG
8836 && modifier != EXPAND_CONST_ADDRESS
8837 && modifier != EXPAND_INITIALIZER
8838 && modifier != EXPAND_SUM)
8839 op0 = force_reg (Pmode, op0);
8841 if (GET_CODE (op0) == REG
8842 && ! REG_USERVAR_P (op0))
8843 mark_reg_pointer (op0, TYPE_ALIGN (TREE_TYPE (type)));
8845 #ifdef POINTERS_EXTEND_UNSIGNED
8846 if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
8847 && mode == ptr_mode)
8848 op0 = convert_memory_address (ptr_mode, op0);
8853 case ENTRY_VALUE_EXPR:
8856 /* COMPLEX type for Extended Pascal & Fortran */
8859 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8862 /* Get the rtx code of the operands. */
8863 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8864 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
8867 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
8871 /* Move the real (op0) and imaginary (op1) parts to their location. */
8872 emit_move_insn (gen_realpart (mode, target), op0);
8873 emit_move_insn (gen_imagpart (mode, target), op1);
8875 insns = get_insns ();
8878 /* Complex construction should appear as a single unit. */
8879 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
8880 each with a separate pseudo as destination.
8881 It's not correct for flow to treat them as a unit. */
8882 if (GET_CODE (target) != CONCAT)
8883 emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
8891 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8892 return gen_realpart (mode, op0);
8895 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8896 return gen_imagpart (mode, op0);
8900 enum machine_mode partmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8904 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8907 target = gen_reg_rtx (mode);
8911 /* Store the realpart and the negated imagpart to target. */
8912 emit_move_insn (gen_realpart (partmode, target),
8913 gen_realpart (partmode, op0));
8915 imag_t = gen_imagpart (partmode, target);
8916 temp = expand_unop (partmode,
8917 ! unsignedp && flag_trapv
8918 && (GET_MODE_CLASS(partmode) == MODE_INT)
8919 ? negv_optab : neg_optab,
8920 gen_imagpart (partmode, op0), imag_t, 0);
8922 emit_move_insn (imag_t, temp);
8924 insns = get_insns ();
8927 /* Conjugate should appear as a single unit
8928 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
8929 each with a separate pseudo as destination.
8930 It's not correct for flow to treat them as a unit. */
8931 if (GET_CODE (target) != CONCAT)
8932 emit_no_conflict_block (insns, target, op0, NULL_RTX, NULL_RTX);
8939 case TRY_CATCH_EXPR:
8941 tree handler = TREE_OPERAND (exp, 1);
8943 expand_eh_region_start ();
8945 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8947 expand_eh_region_end_cleanup (handler);
8952 case TRY_FINALLY_EXPR:
8954 tree try_block = TREE_OPERAND (exp, 0);
8955 tree finally_block = TREE_OPERAND (exp, 1);
8957 if (!optimize || unsafe_for_reeval (finally_block) > 1)
8959 /* In this case, wrapping FINALLY_BLOCK in an UNSAVE_EXPR
8960 is not sufficient, so we cannot expand the block twice.
8961 So we play games with GOTO_SUBROUTINE_EXPR to let us
8962 expand the thing only once. */
8963 /* When not optimizing, we go ahead with this form since
8964 (1) user breakpoints operate more predictably without
8965 code duplication, and
8966 (2) we're not running any of the global optimizers
8967 that would explode in time/space with the highly
8968 connected CFG created by the indirect branching. */
8970 rtx finally_label = gen_label_rtx ();
8971 rtx done_label = gen_label_rtx ();
8972 rtx return_link = gen_reg_rtx (Pmode);
8973 tree cleanup = build (GOTO_SUBROUTINE_EXPR, void_type_node,
8974 (tree) finally_label, (tree) return_link);
8975 TREE_SIDE_EFFECTS (cleanup) = 1;
8977 /* Start a new binding layer that will keep track of all cleanup
8978 actions to be performed. */
8979 expand_start_bindings (2);
8980 target_temp_slot_level = temp_slot_level;
8982 expand_decl_cleanup (NULL_TREE, cleanup);
8983 op0 = expand_expr (try_block, target, tmode, modifier);
8985 preserve_temp_slots (op0);
8986 expand_end_bindings (NULL_TREE, 0, 0);
8987 emit_jump (done_label);
8988 emit_label (finally_label);
8989 expand_expr (finally_block, const0_rtx, VOIDmode, 0);
8990 emit_indirect_jump (return_link);
8991 emit_label (done_label);
8995 expand_start_bindings (2);
8996 target_temp_slot_level = temp_slot_level;
8998 expand_decl_cleanup (NULL_TREE, finally_block);
8999 op0 = expand_expr (try_block, target, tmode, modifier);
9001 preserve_temp_slots (op0);
9002 expand_end_bindings (NULL_TREE, 0, 0);
9008 case GOTO_SUBROUTINE_EXPR:
9010 rtx subr = (rtx) TREE_OPERAND (exp, 0);
9011 rtx return_link = *(rtx *) &TREE_OPERAND (exp, 1);
9012 rtx return_address = gen_label_rtx ();
9013 emit_move_insn (return_link,
9014 gen_rtx_LABEL_REF (Pmode, return_address));
9016 emit_label (return_address);
9021 return expand_builtin_va_arg (TREE_OPERAND (exp, 0), type);
9024 return get_exception_pointer (cfun);
9027 /* Function descriptors are not valid except for as
9028 initialization constants, and should not be expanded. */
9032 return (*lang_hooks.expand_expr) (exp, original_target, tmode, modifier);
9035 /* Here to do an ordinary binary operator, generating an instruction
9036 from the optab already placed in `this_optab'. */
9038 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
9040 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
9041 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
9043 temp = expand_binop (mode, this_optab, op0, op1, target,
9044 unsignedp, OPTAB_LIB_WIDEN);
9050 /* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
9051 when applied to the address of EXP produces an address known to be
9052 aligned more than BIGGEST_ALIGNMENT. */
9055 is_aligning_offset (offset, exp)
9059 /* Strip off any conversions and WITH_RECORD_EXPR nodes. */
9060 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9061 || TREE_CODE (offset) == NOP_EXPR
9062 || TREE_CODE (offset) == CONVERT_EXPR
9063 || TREE_CODE (offset) == WITH_RECORD_EXPR)
9064 offset = TREE_OPERAND (offset, 0);
9066 /* We must now have a BIT_AND_EXPR with a constant that is one less than
9067 power of 2 and which is larger than BIGGEST_ALIGNMENT. */
9068 if (TREE_CODE (offset) != BIT_AND_EXPR
9069 || !host_integerp (TREE_OPERAND (offset, 1), 1)
9070 || compare_tree_int (TREE_OPERAND (offset, 1), BIGGEST_ALIGNMENT) <= 0
9071 || !exact_log2 (tree_low_cst (TREE_OPERAND (offset, 1), 1) + 1) < 0)
9074 /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
9075 It must be NEGATE_EXPR. Then strip any more conversions. */
9076 offset = TREE_OPERAND (offset, 0);
9077 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9078 || TREE_CODE (offset) == NOP_EXPR
9079 || TREE_CODE (offset) == CONVERT_EXPR)
9080 offset = TREE_OPERAND (offset, 0);
9082 if (TREE_CODE (offset) != NEGATE_EXPR)
9085 offset = TREE_OPERAND (offset, 0);
9086 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9087 || TREE_CODE (offset) == NOP_EXPR
9088 || TREE_CODE (offset) == CONVERT_EXPR)
9089 offset = TREE_OPERAND (offset, 0);
9091 /* This must now be the address either of EXP or of a PLACEHOLDER_EXPR
9092 whose type is the same as EXP. */
9093 return (TREE_CODE (offset) == ADDR_EXPR
9094 && (TREE_OPERAND (offset, 0) == exp
9095 || (TREE_CODE (TREE_OPERAND (offset, 0)) == PLACEHOLDER_EXPR
9096 && (TREE_TYPE (TREE_OPERAND (offset, 0))
9097 == TREE_TYPE (exp)))));
9100 /* Return the tree node if an ARG corresponds to a string constant or zero
9101 if it doesn't. If we return non-zero, set *PTR_OFFSET to the offset
9102 in bytes within the string that ARG is accessing. The type of the
9103 offset will be `sizetype'. */
9106 string_constant (arg, ptr_offset)
9112 if (TREE_CODE (arg) == ADDR_EXPR
9113 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
9115 *ptr_offset = size_zero_node;
9116 return TREE_OPERAND (arg, 0);
9118 else if (TREE_CODE (arg) == PLUS_EXPR)
9120 tree arg0 = TREE_OPERAND (arg, 0);
9121 tree arg1 = TREE_OPERAND (arg, 1);
9126 if (TREE_CODE (arg0) == ADDR_EXPR
9127 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
9129 *ptr_offset = convert (sizetype, arg1);
9130 return TREE_OPERAND (arg0, 0);
9132 else if (TREE_CODE (arg1) == ADDR_EXPR
9133 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
9135 *ptr_offset = convert (sizetype, arg0);
9136 return TREE_OPERAND (arg1, 0);
9143 /* Expand code for a post- or pre- increment or decrement
9144 and return the RTX for the result.
9145 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
9148 expand_increment (exp, post, ignore)
9154 tree incremented = TREE_OPERAND (exp, 0);
9155 optab this_optab = add_optab;
9157 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
9158 int op0_is_copy = 0;
9159 int single_insn = 0;
9160 /* 1 means we can't store into OP0 directly,
9161 because it is a subreg narrower than a word,
9162 and we don't dare clobber the rest of the word. */
9165 /* Stabilize any component ref that might need to be
9166 evaluated more than once below. */
9168 || TREE_CODE (incremented) == BIT_FIELD_REF
9169 || (TREE_CODE (incremented) == COMPONENT_REF
9170 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
9171 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
9172 incremented = stabilize_reference (incremented);
9173 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
9174 ones into save exprs so that they don't accidentally get evaluated
9175 more than once by the code below. */
9176 if (TREE_CODE (incremented) == PREINCREMENT_EXPR
9177 || TREE_CODE (incremented) == PREDECREMENT_EXPR)
9178 incremented = save_expr (incremented);
9180 /* Compute the operands as RTX.
9181 Note whether OP0 is the actual lvalue or a copy of it:
9182 I believe it is a copy iff it is a register or subreg
9183 and insns were generated in computing it. */
9185 temp = get_last_insn ();
9186 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, 0);
9188 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
9189 in place but instead must do sign- or zero-extension during assignment,
9190 so we copy it into a new register and let the code below use it as
9193 Note that we can safely modify this SUBREG since it is know not to be
9194 shared (it was made by the expand_expr call above). */
9196 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
9199 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
9203 else if (GET_CODE (op0) == SUBREG
9204 && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD)
9206 /* We cannot increment this SUBREG in place. If we are
9207 post-incrementing, get a copy of the old value. Otherwise,
9208 just mark that we cannot increment in place. */
9210 op0 = copy_to_reg (op0);
9215 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
9216 && temp != get_last_insn ());
9217 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
9219 /* Decide whether incrementing or decrementing. */
9220 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
9221 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9222 this_optab = sub_optab;
9224 /* Convert decrement by a constant into a negative increment. */
9225 if (this_optab == sub_optab
9226 && GET_CODE (op1) == CONST_INT)
9228 op1 = GEN_INT (-INTVAL (op1));
9229 this_optab = add_optab;
9232 if (TYPE_TRAP_SIGNED (TREE_TYPE (exp)))
9233 this_optab = this_optab == add_optab ? addv_optab : subv_optab;
9235 /* For a preincrement, see if we can do this with a single instruction. */
9238 icode = (int) this_optab->handlers[(int) mode].insn_code;
9239 if (icode != (int) CODE_FOR_nothing
9240 /* Make sure that OP0 is valid for operands 0 and 1
9241 of the insn we want to queue. */
9242 && (*insn_data[icode].operand[0].predicate) (op0, mode)
9243 && (*insn_data[icode].operand[1].predicate) (op0, mode)
9244 && (*insn_data[icode].operand[2].predicate) (op1, mode))
9248 /* If OP0 is not the actual lvalue, but rather a copy in a register,
9249 then we cannot just increment OP0. We must therefore contrive to
9250 increment the original value. Then, for postincrement, we can return
9251 OP0 since it is a copy of the old value. For preincrement, expand here
9252 unless we can do it with a single insn.
9254 Likewise if storing directly into OP0 would clobber high bits
9255 we need to preserve (bad_subreg). */
9256 if (op0_is_copy || (!post && !single_insn) || bad_subreg)
9258 /* This is the easiest way to increment the value wherever it is.
9259 Problems with multiple evaluation of INCREMENTED are prevented
9260 because either (1) it is a component_ref or preincrement,
9261 in which case it was stabilized above, or (2) it is an array_ref
9262 with constant index in an array in a register, which is
9263 safe to reevaluate. */
9264 tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
9265 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9266 ? MINUS_EXPR : PLUS_EXPR),
9269 TREE_OPERAND (exp, 1));
9271 while (TREE_CODE (incremented) == NOP_EXPR
9272 || TREE_CODE (incremented) == CONVERT_EXPR)
9274 newexp = convert (TREE_TYPE (incremented), newexp);
9275 incremented = TREE_OPERAND (incremented, 0);
9278 temp = expand_assignment (incremented, newexp, ! post && ! ignore , 0);
9279 return post ? op0 : temp;
9284 /* We have a true reference to the value in OP0.
9285 If there is an insn to add or subtract in this mode, queue it.
9286 Queueing the increment insn avoids the register shuffling
9287 that often results if we must increment now and first save
9288 the old value for subsequent use. */
9290 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
9291 op0 = stabilize (op0);
9294 icode = (int) this_optab->handlers[(int) mode].insn_code;
9295 if (icode != (int) CODE_FOR_nothing
9296 /* Make sure that OP0 is valid for operands 0 and 1
9297 of the insn we want to queue. */
9298 && (*insn_data[icode].operand[0].predicate) (op0, mode)
9299 && (*insn_data[icode].operand[1].predicate) (op0, mode))
9301 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9302 op1 = force_reg (mode, op1);
9304 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
9306 if (icode != (int) CODE_FOR_nothing && GET_CODE (op0) == MEM)
9308 rtx addr = (general_operand (XEXP (op0, 0), mode)
9309 ? force_reg (Pmode, XEXP (op0, 0))
9310 : copy_to_reg (XEXP (op0, 0)));
9313 op0 = replace_equiv_address (op0, addr);
9314 temp = force_reg (GET_MODE (op0), op0);
9315 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9316 op1 = force_reg (mode, op1);
9318 /* The increment queue is LIFO, thus we have to `queue'
9319 the instructions in reverse order. */
9320 enqueue_insn (op0, gen_move_insn (op0, temp));
9321 result = enqueue_insn (temp, GEN_FCN (icode) (temp, temp, op1));
9326 /* Preincrement, or we can't increment with one simple insn. */
9328 /* Save a copy of the value before inc or dec, to return it later. */
9329 temp = value = copy_to_reg (op0);
9331 /* Arrange to return the incremented value. */
9332 /* Copy the rtx because expand_binop will protect from the queue,
9333 and the results of that would be invalid for us to return
9334 if our caller does emit_queue before using our result. */
9335 temp = copy_rtx (value = op0);
9337 /* Increment however we can. */
9338 op1 = expand_binop (mode, this_optab, value, op1, op0,
9339 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
9341 /* Make sure the value is stored into OP0. */
9343 emit_move_insn (op0, op1);
9348 /* At the start of a function, record that we have no previously-pushed
9349 arguments waiting to be popped. */
9352 init_pending_stack_adjust ()
9354 pending_stack_adjust = 0;
9357 /* When exiting from function, if safe, clear out any pending stack adjust
9358 so the adjustment won't get done.
9360 Note, if the current function calls alloca, then it must have a
9361 frame pointer regardless of the value of flag_omit_frame_pointer. */
9364 clear_pending_stack_adjust ()
9366 #ifdef EXIT_IGNORE_STACK
9368 && (! flag_omit_frame_pointer || current_function_calls_alloca)
9369 && EXIT_IGNORE_STACK
9370 && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline)
9371 && ! flag_inline_functions)
9373 stack_pointer_delta -= pending_stack_adjust,
9374 pending_stack_adjust = 0;
9379 /* Pop any previously-pushed arguments that have not been popped yet. */
9382 do_pending_stack_adjust ()
9384 if (inhibit_defer_pop == 0)
9386 if (pending_stack_adjust != 0)
9387 adjust_stack (GEN_INT (pending_stack_adjust));
9388 pending_stack_adjust = 0;
9392 /* Expand conditional expressions. */
9394 /* Generate code to evaluate EXP and jump to LABEL if the value is zero.
9395 LABEL is an rtx of code CODE_LABEL, in this function and all the
9399 jumpifnot (exp, label)
9403 do_jump (exp, label, NULL_RTX);
9406 /* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
9413 do_jump (exp, NULL_RTX, label);
9416 /* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
9417 the result is zero, or IF_TRUE_LABEL if the result is one.
9418 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
9419 meaning fall through in that case.
9421 do_jump always does any pending stack adjust except when it does not
9422 actually perform a jump. An example where there is no jump
9423 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
9425 This function is responsible for optimizing cases such as
9426 &&, || and comparison operators in EXP. */
9429 do_jump (exp, if_false_label, if_true_label)
9431 rtx if_false_label, if_true_label;
9433 enum tree_code code = TREE_CODE (exp);
9434 /* Some cases need to create a label to jump to
9435 in order to properly fall through.
9436 These cases set DROP_THROUGH_LABEL nonzero. */
9437 rtx drop_through_label = 0;
9441 enum machine_mode mode;
9443 #ifdef MAX_INTEGER_COMPUTATION_MODE
9444 check_max_integer_computation_mode (exp);
9455 temp = integer_zerop (exp) ? if_false_label : if_true_label;
9461 /* This is not true with #pragma weak */
9463 /* The address of something can never be zero. */
9465 emit_jump (if_true_label);
9470 if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
9471 || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
9472 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF
9473 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_RANGE_REF)
9476 /* If we are narrowing the operand, we have to do the compare in the
9478 if ((TYPE_PRECISION (TREE_TYPE (exp))
9479 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
9481 case NON_LVALUE_EXPR:
9482 case REFERENCE_EXPR:
9487 /* These cannot change zero->non-zero or vice versa. */
9488 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9491 case WITH_RECORD_EXPR:
9492 /* Put the object on the placeholder list, recurse through our first
9493 operand, and pop the list. */
9494 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
9496 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9497 placeholder_list = TREE_CHAIN (placeholder_list);
9501 /* This is never less insns than evaluating the PLUS_EXPR followed by
9502 a test and can be longer if the test is eliminated. */
9504 /* Reduce to minus. */
9505 exp = build (MINUS_EXPR, TREE_TYPE (exp),
9506 TREE_OPERAND (exp, 0),
9507 fold (build1 (NEGATE_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
9508 TREE_OPERAND (exp, 1))));
9509 /* Process as MINUS. */
9513 /* Non-zero iff operands of minus differ. */
9514 do_compare_and_jump (build (NE_EXPR, TREE_TYPE (exp),
9515 TREE_OPERAND (exp, 0),
9516 TREE_OPERAND (exp, 1)),
9517 NE, NE, if_false_label, if_true_label);
9521 /* If we are AND'ing with a small constant, do this comparison in the
9522 smallest type that fits. If the machine doesn't have comparisons
9523 that small, it will be converted back to the wider comparison.
9524 This helps if we are testing the sign bit of a narrower object.
9525 combine can't do this for us because it can't know whether a
9526 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
9528 if (! SLOW_BYTE_ACCESS
9529 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
9530 && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
9531 && (i = tree_floor_log2 (TREE_OPERAND (exp, 1))) >= 0
9532 && (mode = mode_for_size (i + 1, MODE_INT, 0)) != BLKmode
9533 && (type = (*lang_hooks.types.type_for_mode) (mode, 1)) != 0
9534 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9535 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9536 != CODE_FOR_nothing))
9538 do_jump (convert (type, exp), if_false_label, if_true_label);
9543 case TRUTH_NOT_EXPR:
9544 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9547 case TRUTH_ANDIF_EXPR:
9548 if (if_false_label == 0)
9549 if_false_label = drop_through_label = gen_label_rtx ();
9550 do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
9551 start_cleanup_deferral ();
9552 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9553 end_cleanup_deferral ();
9556 case TRUTH_ORIF_EXPR:
9557 if (if_true_label == 0)
9558 if_true_label = drop_through_label = gen_label_rtx ();
9559 do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
9560 start_cleanup_deferral ();
9561 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9562 end_cleanup_deferral ();
9567 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
9568 preserve_temp_slots (NULL_RTX);
9572 do_pending_stack_adjust ();
9573 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9579 case ARRAY_RANGE_REF:
9581 HOST_WIDE_INT bitsize, bitpos;
9583 enum machine_mode mode;
9588 /* Get description of this reference. We don't actually care
9589 about the underlying object here. */
9590 get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode,
9591 &unsignedp, &volatilep);
9593 type = (*lang_hooks.types.type_for_size) (bitsize, unsignedp);
9594 if (! SLOW_BYTE_ACCESS
9595 && type != 0 && bitsize >= 0
9596 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9597 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9598 != CODE_FOR_nothing))
9600 do_jump (convert (type, exp), if_false_label, if_true_label);
9607 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
9608 if (integer_onep (TREE_OPERAND (exp, 1))
9609 && integer_zerop (TREE_OPERAND (exp, 2)))
9610 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9612 else if (integer_zerop (TREE_OPERAND (exp, 1))
9613 && integer_onep (TREE_OPERAND (exp, 2)))
9614 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9618 rtx label1 = gen_label_rtx ();
9619 drop_through_label = gen_label_rtx ();
9621 do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX);
9623 start_cleanup_deferral ();
9624 /* Now the THEN-expression. */
9625 do_jump (TREE_OPERAND (exp, 1),
9626 if_false_label ? if_false_label : drop_through_label,
9627 if_true_label ? if_true_label : drop_through_label);
9628 /* In case the do_jump just above never jumps. */
9629 do_pending_stack_adjust ();
9630 emit_label (label1);
9632 /* Now the ELSE-expression. */
9633 do_jump (TREE_OPERAND (exp, 2),
9634 if_false_label ? if_false_label : drop_through_label,
9635 if_true_label ? if_true_label : drop_through_label);
9636 end_cleanup_deferral ();
9642 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9644 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
9645 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
9647 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
9648 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
9651 (build (TRUTH_ANDIF_EXPR, TREE_TYPE (exp),
9652 fold (build (EQ_EXPR, TREE_TYPE (exp),
9653 fold (build1 (REALPART_EXPR,
9654 TREE_TYPE (inner_type),
9656 fold (build1 (REALPART_EXPR,
9657 TREE_TYPE (inner_type),
9659 fold (build (EQ_EXPR, TREE_TYPE (exp),
9660 fold (build1 (IMAGPART_EXPR,
9661 TREE_TYPE (inner_type),
9663 fold (build1 (IMAGPART_EXPR,
9664 TREE_TYPE (inner_type),
9666 if_false_label, if_true_label);
9669 else if (integer_zerop (TREE_OPERAND (exp, 1)))
9670 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9672 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
9673 && !can_compare_p (EQ, TYPE_MODE (inner_type), ccp_jump))
9674 do_jump_by_parts_equality (exp, if_false_label, if_true_label);
9676 do_compare_and_jump (exp, EQ, EQ, if_false_label, if_true_label);
9682 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9684 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
9685 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
9687 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
9688 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
9691 (build (TRUTH_ORIF_EXPR, TREE_TYPE (exp),
9692 fold (build (NE_EXPR, TREE_TYPE (exp),
9693 fold (build1 (REALPART_EXPR,
9694 TREE_TYPE (inner_type),
9696 fold (build1 (REALPART_EXPR,
9697 TREE_TYPE (inner_type),
9699 fold (build (NE_EXPR, TREE_TYPE (exp),
9700 fold (build1 (IMAGPART_EXPR,
9701 TREE_TYPE (inner_type),
9703 fold (build1 (IMAGPART_EXPR,
9704 TREE_TYPE (inner_type),
9706 if_false_label, if_true_label);
9709 else if (integer_zerop (TREE_OPERAND (exp, 1)))
9710 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9712 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
9713 && !can_compare_p (NE, TYPE_MODE (inner_type), ccp_jump))
9714 do_jump_by_parts_equality (exp, if_true_label, if_false_label);
9716 do_compare_and_jump (exp, NE, NE, if_false_label, if_true_label);
9721 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9722 if (GET_MODE_CLASS (mode) == MODE_INT
9723 && ! can_compare_p (LT, mode, ccp_jump))
9724 do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
9726 do_compare_and_jump (exp, LT, LTU, if_false_label, if_true_label);
9730 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9731 if (GET_MODE_CLASS (mode) == MODE_INT
9732 && ! can_compare_p (LE, mode, ccp_jump))
9733 do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
9735 do_compare_and_jump (exp, LE, LEU, if_false_label, if_true_label);
9739 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9740 if (GET_MODE_CLASS (mode) == MODE_INT
9741 && ! can_compare_p (GT, mode, ccp_jump))
9742 do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
9744 do_compare_and_jump (exp, GT, GTU, if_false_label, if_true_label);
9748 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9749 if (GET_MODE_CLASS (mode) == MODE_INT
9750 && ! can_compare_p (GE, mode, ccp_jump))
9751 do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
9753 do_compare_and_jump (exp, GE, GEU, if_false_label, if_true_label);
9756 case UNORDERED_EXPR:
9759 enum rtx_code cmp, rcmp;
9762 if (code == UNORDERED_EXPR)
9763 cmp = UNORDERED, rcmp = ORDERED;
9765 cmp = ORDERED, rcmp = UNORDERED;
9766 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9769 if (! can_compare_p (cmp, mode, ccp_jump)
9770 && (can_compare_p (rcmp, mode, ccp_jump)
9771 /* If the target doesn't provide either UNORDERED or ORDERED
9772 comparisons, canonicalize on UNORDERED for the library. */
9773 || rcmp == UNORDERED))
9777 do_compare_and_jump (exp, cmp, cmp, if_false_label, if_true_label);
9779 do_compare_and_jump (exp, rcmp, rcmp, if_true_label, if_false_label);
9784 enum rtx_code rcode1;
9785 enum tree_code tcode2;
9809 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9810 if (can_compare_p (rcode1, mode, ccp_jump))
9811 do_compare_and_jump (exp, rcode1, rcode1, if_false_label,
9815 tree op0 = save_expr (TREE_OPERAND (exp, 0));
9816 tree op1 = save_expr (TREE_OPERAND (exp, 1));
9819 /* If the target doesn't support combined unordered
9820 compares, decompose into UNORDERED + comparison. */
9821 cmp0 = fold (build (UNORDERED_EXPR, TREE_TYPE (exp), op0, op1));
9822 cmp1 = fold (build (tcode2, TREE_TYPE (exp), op0, op1));
9823 exp = build (TRUTH_ORIF_EXPR, TREE_TYPE (exp), cmp0, cmp1);
9824 do_jump (exp, if_false_label, if_true_label);
9830 __builtin_expect (<test>, 0) and
9831 __builtin_expect (<test>, 1)
9833 We need to do this here, so that <test> is not converted to a SCC
9834 operation on machines that use condition code registers and COMPARE
9835 like the PowerPC, and then the jump is done based on whether the SCC
9836 operation produced a 1 or 0. */
9838 /* Check for a built-in function. */
9839 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR)
9841 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
9842 tree arglist = TREE_OPERAND (exp, 1);
9844 if (TREE_CODE (fndecl) == FUNCTION_DECL
9845 && DECL_BUILT_IN (fndecl)
9846 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT
9847 && arglist != NULL_TREE
9848 && TREE_CHAIN (arglist) != NULL_TREE)
9850 rtx seq = expand_builtin_expect_jump (exp, if_false_label,
9853 if (seq != NULL_RTX)
9860 /* fall through and generate the normal code. */
9864 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
9866 /* This is not needed any more and causes poor code since it causes
9867 comparisons and tests from non-SI objects to have different code
9869 /* Copy to register to avoid generating bad insns by cse
9870 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
9871 if (!cse_not_expected && GET_CODE (temp) == MEM)
9872 temp = copy_to_reg (temp);
9874 do_pending_stack_adjust ();
9875 /* Do any postincrements in the expression that was tested. */
9878 if (GET_CODE (temp) == CONST_INT
9879 || (GET_CODE (temp) == CONST_DOUBLE && GET_MODE (temp) == VOIDmode)
9880 || GET_CODE (temp) == LABEL_REF)
9882 rtx target = temp == const0_rtx ? if_false_label : if_true_label;
9886 else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
9887 && ! can_compare_p (NE, GET_MODE (temp), ccp_jump))
9888 /* Note swapping the labels gives us not-equal. */
9889 do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label);
9890 else if (GET_MODE (temp) != VOIDmode)
9891 do_compare_rtx_and_jump (temp, CONST0_RTX (GET_MODE (temp)),
9892 NE, TREE_UNSIGNED (TREE_TYPE (exp)),
9893 GET_MODE (temp), NULL_RTX,
9894 if_false_label, if_true_label);
9899 if (drop_through_label)
9901 /* If do_jump produces code that might be jumped around,
9902 do any stack adjusts from that code, before the place
9903 where control merges in. */
9904 do_pending_stack_adjust ();
9905 emit_label (drop_through_label);
9909 /* Given a comparison expression EXP for values too wide to be compared
9910 with one insn, test the comparison and jump to the appropriate label.
9911 The code of EXP is ignored; we always test GT if SWAP is 0,
9912 and LT if SWAP is 1. */
9915 do_jump_by_parts_greater (exp, swap, if_false_label, if_true_label)
9918 rtx if_false_label, if_true_label;
9920 rtx op0 = expand_expr (TREE_OPERAND (exp, swap), NULL_RTX, VOIDmode, 0);
9921 rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), NULL_RTX, VOIDmode, 0);
9922 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9923 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
9925 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label);
9928 /* Compare OP0 with OP1, word at a time, in mode MODE.
9929 UNSIGNEDP says to do unsigned comparison.
9930 Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
9933 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label)
9934 enum machine_mode mode;
9937 rtx if_false_label, if_true_label;
9939 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
9940 rtx drop_through_label = 0;
9943 if (! if_true_label || ! if_false_label)
9944 drop_through_label = gen_label_rtx ();
9945 if (! if_true_label)
9946 if_true_label = drop_through_label;
9947 if (! if_false_label)
9948 if_false_label = drop_through_label;
9950 /* Compare a word at a time, high order first. */
9951 for (i = 0; i < nwords; i++)
9953 rtx op0_word, op1_word;
9955 if (WORDS_BIG_ENDIAN)
9957 op0_word = operand_subword_force (op0, i, mode);
9958 op1_word = operand_subword_force (op1, i, mode);
9962 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
9963 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
9966 /* All but high-order word must be compared as unsigned. */
9967 do_compare_rtx_and_jump (op0_word, op1_word, GT,
9968 (unsignedp || i > 0), word_mode, NULL_RTX,
9969 NULL_RTX, if_true_label);
9971 /* Consider lower words only if these are equal. */
9972 do_compare_rtx_and_jump (op0_word, op1_word, NE, unsignedp, word_mode,
9973 NULL_RTX, NULL_RTX, if_false_label);
9977 emit_jump (if_false_label);
9978 if (drop_through_label)
9979 emit_label (drop_through_label);
9982 /* Given an EQ_EXPR expression EXP for values too wide to be compared
9983 with one insn, test the comparison and jump to the appropriate label. */
9986 do_jump_by_parts_equality (exp, if_false_label, if_true_label)
9988 rtx if_false_label, if_true_label;
9990 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
9991 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
9992 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9993 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
9995 rtx drop_through_label = 0;
9997 if (! if_false_label)
9998 drop_through_label = if_false_label = gen_label_rtx ();
10000 for (i = 0; i < nwords; i++)
10001 do_compare_rtx_and_jump (operand_subword_force (op0, i, mode),
10002 operand_subword_force (op1, i, mode),
10003 EQ, TREE_UNSIGNED (TREE_TYPE (exp)),
10004 word_mode, NULL_RTX, if_false_label, NULL_RTX);
10007 emit_jump (if_true_label);
10008 if (drop_through_label)
10009 emit_label (drop_through_label);
10012 /* Jump according to whether OP0 is 0.
10013 We assume that OP0 has an integer mode that is too wide
10014 for the available compare insns. */
10017 do_jump_by_parts_equality_rtx (op0, if_false_label, if_true_label)
10019 rtx if_false_label, if_true_label;
10021 int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD;
10024 rtx drop_through_label = 0;
10026 /* The fastest way of doing this comparison on almost any machine is to
10027 "or" all the words and compare the result. If all have to be loaded
10028 from memory and this is a very wide item, it's possible this may
10029 be slower, but that's highly unlikely. */
10031 part = gen_reg_rtx (word_mode);
10032 emit_move_insn (part, operand_subword_force (op0, 0, GET_MODE (op0)));
10033 for (i = 1; i < nwords && part != 0; i++)
10034 part = expand_binop (word_mode, ior_optab, part,
10035 operand_subword_force (op0, i, GET_MODE (op0)),
10036 part, 1, OPTAB_WIDEN);
10040 do_compare_rtx_and_jump (part, const0_rtx, EQ, 1, word_mode,
10041 NULL_RTX, if_false_label, if_true_label);
10046 /* If we couldn't do the "or" simply, do this with a series of compares. */
10047 if (! if_false_label)
10048 drop_through_label = if_false_label = gen_label_rtx ();
10050 for (i = 0; i < nwords; i++)
10051 do_compare_rtx_and_jump (operand_subword_force (op0, i, GET_MODE (op0)),
10052 const0_rtx, EQ, 1, word_mode, NULL_RTX,
10053 if_false_label, NULL_RTX);
10056 emit_jump (if_true_label);
10058 if (drop_through_label)
10059 emit_label (drop_through_label);
10062 /* Generate code for a comparison of OP0 and OP1 with rtx code CODE.
10063 (including code to compute the values to be compared)
10064 and set (CC0) according to the result.
10065 The decision as to signed or unsigned comparison must be made by the caller.
10067 We force a stack adjustment unless there are currently
10068 things pushed on the stack that aren't yet used.
10070 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
10074 compare_from_rtx (op0, op1, code, unsignedp, mode, size)
10076 enum rtx_code code;
10078 enum machine_mode mode;
10081 enum rtx_code ucode;
10084 /* If one operand is constant, make it the second one. Only do this
10085 if the other operand is not constant as well. */
10087 if (swap_commutative_operands_p (op0, op1))
10092 code = swap_condition (code);
10095 if (flag_force_mem)
10097 op0 = force_not_mem (op0);
10098 op1 = force_not_mem (op1);
10101 do_pending_stack_adjust ();
10103 ucode = unsignedp ? unsigned_condition (code) : code;
10104 if ((tem = simplify_relational_operation (ucode, mode, op0, op1)) != 0)
10108 /* There's no need to do this now that combine.c can eliminate lots of
10109 sign extensions. This can be less efficient in certain cases on other
10112 /* If this is a signed equality comparison, we can do it as an
10113 unsigned comparison since zero-extension is cheaper than sign
10114 extension and comparisons with zero are done as unsigned. This is
10115 the case even on machines that can do fast sign extension, since
10116 zero-extension is easier to combine with other operations than
10117 sign-extension is. If we are comparing against a constant, we must
10118 convert it to what it would look like unsigned. */
10119 if ((code == EQ || code == NE) && ! unsignedp
10120 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
10122 if (GET_CODE (op1) == CONST_INT
10123 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
10124 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
10129 emit_cmp_insn (op0, op1, code, size, mode, unsignedp);
10132 return gen_rtx_fmt_ee (code, VOIDmode, cc0_rtx, const0_rtx);
10134 return gen_rtx_fmt_ee (code, VOIDmode, op0, op1);
10138 /* Like do_compare_and_jump but expects the values to compare as two rtx's.
10139 The decision as to signed or unsigned comparison must be made by the caller.
10141 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
10145 do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode, size,
10146 if_false_label, if_true_label)
10148 enum rtx_code code;
10150 enum machine_mode mode;
10152 rtx if_false_label, if_true_label;
10154 enum rtx_code ucode;
10156 int dummy_true_label = 0;
10158 /* Reverse the comparison if that is safe and we want to jump if it is
10160 if (! if_true_label && ! FLOAT_MODE_P (mode))
10162 if_true_label = if_false_label;
10163 if_false_label = 0;
10164 code = reverse_condition (code);
10167 /* If one operand is constant, make it the second one. Only do this
10168 if the other operand is not constant as well. */
10170 if (swap_commutative_operands_p (op0, op1))
10175 code = swap_condition (code);
10178 if (flag_force_mem)
10180 op0 = force_not_mem (op0);
10181 op1 = force_not_mem (op1);
10184 do_pending_stack_adjust ();
10186 ucode = unsignedp ? unsigned_condition (code) : code;
10187 if ((tem = simplify_relational_operation (ucode, mode, op0, op1)) != 0)
10189 if (tem == const_true_rtx)
10192 emit_jump (if_true_label);
10196 if (if_false_label)
10197 emit_jump (if_false_label);
10203 /* There's no need to do this now that combine.c can eliminate lots of
10204 sign extensions. This can be less efficient in certain cases on other
10207 /* If this is a signed equality comparison, we can do it as an
10208 unsigned comparison since zero-extension is cheaper than sign
10209 extension and comparisons with zero are done as unsigned. This is
10210 the case even on machines that can do fast sign extension, since
10211 zero-extension is easier to combine with other operations than
10212 sign-extension is. If we are comparing against a constant, we must
10213 convert it to what it would look like unsigned. */
10214 if ((code == EQ || code == NE) && ! unsignedp
10215 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
10217 if (GET_CODE (op1) == CONST_INT
10218 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
10219 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
10224 if (! if_true_label)
10226 dummy_true_label = 1;
10227 if_true_label = gen_label_rtx ();
10230 emit_cmp_and_jump_insns (op0, op1, code, size, mode, unsignedp,
10233 if (if_false_label)
10234 emit_jump (if_false_label);
10235 if (dummy_true_label)
10236 emit_label (if_true_label);
10239 /* Generate code for a comparison expression EXP (including code to compute
10240 the values to be compared) and a conditional jump to IF_FALSE_LABEL and/or
10241 IF_TRUE_LABEL. One of the labels can be NULL_RTX, in which case the
10242 generated code will drop through.
10243 SIGNED_CODE should be the rtx operation for this comparison for
10244 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
10246 We force a stack adjustment unless there are currently
10247 things pushed on the stack that aren't yet used. */
10250 do_compare_and_jump (exp, signed_code, unsigned_code, if_false_label,
10253 enum rtx_code signed_code, unsigned_code;
10254 rtx if_false_label, if_true_label;
10258 enum machine_mode mode;
10260 enum rtx_code code;
10262 /* Don't crash if the comparison was erroneous. */
10263 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
10264 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
10267 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
10268 if (TREE_CODE (TREE_OPERAND (exp, 1)) == ERROR_MARK)
10271 type = TREE_TYPE (TREE_OPERAND (exp, 0));
10272 mode = TYPE_MODE (type);
10273 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
10274 && (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST
10275 || (GET_MODE_BITSIZE (mode)
10276 > GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp,
10279 /* op0 might have been replaced by promoted constant, in which
10280 case the type of second argument should be used. */
10281 type = TREE_TYPE (TREE_OPERAND (exp, 1));
10282 mode = TYPE_MODE (type);
10284 unsignedp = TREE_UNSIGNED (type);
10285 code = unsignedp ? unsigned_code : signed_code;
10287 #ifdef HAVE_canonicalize_funcptr_for_compare
10288 /* If function pointers need to be "canonicalized" before they can
10289 be reliably compared, then canonicalize them. */
10290 if (HAVE_canonicalize_funcptr_for_compare
10291 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10292 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10295 rtx new_op0 = gen_reg_rtx (mode);
10297 emit_insn (gen_canonicalize_funcptr_for_compare (new_op0, op0));
10301 if (HAVE_canonicalize_funcptr_for_compare
10302 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10303 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10306 rtx new_op1 = gen_reg_rtx (mode);
10308 emit_insn (gen_canonicalize_funcptr_for_compare (new_op1, op1));
10313 /* Do any postincrements in the expression that was tested. */
10316 do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode,
10318 ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
10319 if_false_label, if_true_label);
10322 /* Generate code to calculate EXP using a store-flag instruction
10323 and return an rtx for the result. EXP is either a comparison
10324 or a TRUTH_NOT_EXPR whose operand is a comparison.
10326 If TARGET is nonzero, store the result there if convenient.
10328 If ONLY_CHEAP is non-zero, only do this if it is likely to be very
10331 Return zero if there is no suitable set-flag instruction
10332 available on this machine.
10334 Once expand_expr has been called on the arguments of the comparison,
10335 we are committed to doing the store flag, since it is not safe to
10336 re-evaluate the expression. We emit the store-flag insn by calling
10337 emit_store_flag, but only expand the arguments if we have a reason
10338 to believe that emit_store_flag will be successful. If we think that
10339 it will, but it isn't, we have to simulate the store-flag with a
10340 set/jump/set sequence. */
10343 do_store_flag (exp, target, mode, only_cheap)
10346 enum machine_mode mode;
10349 enum rtx_code code;
10350 tree arg0, arg1, type;
10352 enum machine_mode operand_mode;
10356 enum insn_code icode;
10357 rtx subtarget = target;
10360 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
10361 result at the end. We can't simply invert the test since it would
10362 have already been inverted if it were valid. This case occurs for
10363 some floating-point comparisons. */
10365 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
10366 invert = 1, exp = TREE_OPERAND (exp, 0);
10368 arg0 = TREE_OPERAND (exp, 0);
10369 arg1 = TREE_OPERAND (exp, 1);
10371 /* Don't crash if the comparison was erroneous. */
10372 if (arg0 == error_mark_node || arg1 == error_mark_node)
10375 type = TREE_TYPE (arg0);
10376 operand_mode = TYPE_MODE (type);
10377 unsignedp = TREE_UNSIGNED (type);
10379 /* We won't bother with BLKmode store-flag operations because it would mean
10380 passing a lot of information to emit_store_flag. */
10381 if (operand_mode == BLKmode)
10384 /* We won't bother with store-flag operations involving function pointers
10385 when function pointers must be canonicalized before comparisons. */
10386 #ifdef HAVE_canonicalize_funcptr_for_compare
10387 if (HAVE_canonicalize_funcptr_for_compare
10388 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10389 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10391 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10392 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10393 == FUNCTION_TYPE))))
10400 /* Get the rtx comparison code to use. We know that EXP is a comparison
10401 operation of some type. Some comparisons against 1 and -1 can be
10402 converted to comparisons with zero. Do so here so that the tests
10403 below will be aware that we have a comparison with zero. These
10404 tests will not catch constants in the first operand, but constants
10405 are rarely passed as the first operand. */
10407 switch (TREE_CODE (exp))
10416 if (integer_onep (arg1))
10417 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
10419 code = unsignedp ? LTU : LT;
10422 if (! unsignedp && integer_all_onesp (arg1))
10423 arg1 = integer_zero_node, code = LT;
10425 code = unsignedp ? LEU : LE;
10428 if (! unsignedp && integer_all_onesp (arg1))
10429 arg1 = integer_zero_node, code = GE;
10431 code = unsignedp ? GTU : GT;
10434 if (integer_onep (arg1))
10435 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
10437 code = unsignedp ? GEU : GE;
10440 case UNORDERED_EXPR:
10466 /* Put a constant second. */
10467 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
10469 tem = arg0; arg0 = arg1; arg1 = tem;
10470 code = swap_condition (code);
10473 /* If this is an equality or inequality test of a single bit, we can
10474 do this by shifting the bit being tested to the low-order bit and
10475 masking the result with the constant 1. If the condition was EQ,
10476 we xor it with 1. This does not require an scc insn and is faster
10477 than an scc insn even if we have it. */
10479 if ((code == NE || code == EQ)
10480 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
10481 && integer_pow2p (TREE_OPERAND (arg0, 1)))
10483 tree inner = TREE_OPERAND (arg0, 0);
10484 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
10487 /* If INNER is a right shift of a constant and it plus BITNUM does
10488 not overflow, adjust BITNUM and INNER. */
10490 if (TREE_CODE (inner) == RSHIFT_EXPR
10491 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
10492 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
10493 && bitnum < TYPE_PRECISION (type)
10494 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
10495 bitnum - TYPE_PRECISION (type)))
10497 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
10498 inner = TREE_OPERAND (inner, 0);
10501 /* If we are going to be able to omit the AND below, we must do our
10502 operations as unsigned. If we must use the AND, we have a choice.
10503 Normally unsigned is faster, but for some machines signed is. */
10504 ops_unsignedp = (bitnum == TYPE_PRECISION (type) - 1 ? 1
10505 #ifdef LOAD_EXTEND_OP
10506 : (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1)
10512 if (! get_subtarget (subtarget)
10513 || GET_MODE (subtarget) != operand_mode
10514 || ! safe_from_p (subtarget, inner, 1))
10517 op0 = expand_expr (inner, subtarget, VOIDmode, 0);
10520 op0 = expand_shift (RSHIFT_EXPR, operand_mode, op0,
10521 size_int (bitnum), subtarget, ops_unsignedp);
10523 if (GET_MODE (op0) != mode)
10524 op0 = convert_to_mode (mode, op0, ops_unsignedp);
10526 if ((code == EQ && ! invert) || (code == NE && invert))
10527 op0 = expand_binop (mode, xor_optab, op0, const1_rtx, subtarget,
10528 ops_unsignedp, OPTAB_LIB_WIDEN);
10530 /* Put the AND last so it can combine with more things. */
10531 if (bitnum != TYPE_PRECISION (type) - 1)
10532 op0 = expand_and (mode, op0, const1_rtx, subtarget);
10537 /* Now see if we are likely to be able to do this. Return if not. */
10538 if (! can_compare_p (code, operand_mode, ccp_store_flag))
10541 icode = setcc_gen_code[(int) code];
10542 if (icode == CODE_FOR_nothing
10543 || (only_cheap && insn_data[(int) icode].operand[0].mode != mode))
10545 /* We can only do this if it is one of the special cases that
10546 can be handled without an scc insn. */
10547 if ((code == LT && integer_zerop (arg1))
10548 || (! only_cheap && code == GE && integer_zerop (arg1)))
10550 else if (BRANCH_COST >= 0
10551 && ! only_cheap && (code == NE || code == EQ)
10552 && TREE_CODE (type) != REAL_TYPE
10553 && ((abs_optab->handlers[(int) operand_mode].insn_code
10554 != CODE_FOR_nothing)
10555 || (ffs_optab->handlers[(int) operand_mode].insn_code
10556 != CODE_FOR_nothing)))
10562 if (! get_subtarget (target)
10563 || GET_MODE (subtarget) != operand_mode
10564 || ! safe_from_p (subtarget, arg1, 1))
10567 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
10568 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
10571 target = gen_reg_rtx (mode);
10573 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
10574 because, if the emit_store_flag does anything it will succeed and
10575 OP0 and OP1 will not be used subsequently. */
10577 result = emit_store_flag (target, code,
10578 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
10579 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
10580 operand_mode, unsignedp, 1);
10585 result = expand_binop (mode, xor_optab, result, const1_rtx,
10586 result, 0, OPTAB_LIB_WIDEN);
10590 /* If this failed, we have to do this with set/compare/jump/set code. */
10591 if (GET_CODE (target) != REG
10592 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
10593 target = gen_reg_rtx (GET_MODE (target));
10595 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
10596 result = compare_from_rtx (op0, op1, code, unsignedp,
10597 operand_mode, NULL_RTX);
10598 if (GET_CODE (result) == CONST_INT)
10599 return (((result == const0_rtx && ! invert)
10600 || (result != const0_rtx && invert))
10601 ? const0_rtx : const1_rtx);
10603 /* The code of RESULT may not match CODE if compare_from_rtx
10604 decided to swap its operands and reverse the original code.
10606 We know that compare_from_rtx returns either a CONST_INT or
10607 a new comparison code, so it is safe to just extract the
10608 code from RESULT. */
10609 code = GET_CODE (result);
10611 label = gen_label_rtx ();
10612 if (bcc_gen_fctn[(int) code] == 0)
10615 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
10616 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
10617 emit_label (label);
10623 /* Stubs in case we haven't got a casesi insn. */
10624 #ifndef HAVE_casesi
10625 # define HAVE_casesi 0
10626 # define gen_casesi(a, b, c, d, e) (0)
10627 # define CODE_FOR_casesi CODE_FOR_nothing
10630 /* If the machine does not have a case insn that compares the bounds,
10631 this means extra overhead for dispatch tables, which raises the
10632 threshold for using them. */
10633 #ifndef CASE_VALUES_THRESHOLD
10634 #define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
10635 #endif /* CASE_VALUES_THRESHOLD */
10638 case_values_threshold ()
10640 return CASE_VALUES_THRESHOLD;
10643 /* Attempt to generate a casesi instruction. Returns 1 if successful,
10644 0 otherwise (i.e. if there is no casesi instruction). */
10646 try_casesi (index_type, index_expr, minval, range,
10647 table_label, default_label)
10648 tree index_type, index_expr, minval, range;
10649 rtx table_label ATTRIBUTE_UNUSED;
10652 enum machine_mode index_mode = SImode;
10653 int index_bits = GET_MODE_BITSIZE (index_mode);
10654 rtx op1, op2, index;
10655 enum machine_mode op_mode;
10660 /* Convert the index to SImode. */
10661 if (GET_MODE_BITSIZE (TYPE_MODE (index_type)) > GET_MODE_BITSIZE (index_mode))
10663 enum machine_mode omode = TYPE_MODE (index_type);
10664 rtx rangertx = expand_expr (range, NULL_RTX, VOIDmode, 0);
10666 /* We must handle the endpoints in the original mode. */
10667 index_expr = build (MINUS_EXPR, index_type,
10668 index_expr, minval);
10669 minval = integer_zero_node;
10670 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
10671 emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
10672 omode, 1, default_label);
10673 /* Now we can safely truncate. */
10674 index = convert_to_mode (index_mode, index, 0);
10678 if (TYPE_MODE (index_type) != index_mode)
10680 index_expr = convert ((*lang_hooks.types.type_for_size)
10681 (index_bits, 0), index_expr);
10682 index_type = TREE_TYPE (index_expr);
10685 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
10688 index = protect_from_queue (index, 0);
10689 do_pending_stack_adjust ();
10691 op_mode = insn_data[(int) CODE_FOR_casesi].operand[0].mode;
10692 if (! (*insn_data[(int) CODE_FOR_casesi].operand[0].predicate)
10694 index = copy_to_mode_reg (op_mode, index);
10696 op1 = expand_expr (minval, NULL_RTX, VOIDmode, 0);
10698 op_mode = insn_data[(int) CODE_FOR_casesi].operand[1].mode;
10699 op1 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (minval)),
10700 op1, TREE_UNSIGNED (TREE_TYPE (minval)));
10701 if (! (*insn_data[(int) CODE_FOR_casesi].operand[1].predicate)
10703 op1 = copy_to_mode_reg (op_mode, op1);
10705 op2 = expand_expr (range, NULL_RTX, VOIDmode, 0);
10707 op_mode = insn_data[(int) CODE_FOR_casesi].operand[2].mode;
10708 op2 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (range)),
10709 op2, TREE_UNSIGNED (TREE_TYPE (range)));
10710 if (! (*insn_data[(int) CODE_FOR_casesi].operand[2].predicate)
10712 op2 = copy_to_mode_reg (op_mode, op2);
10714 emit_jump_insn (gen_casesi (index, op1, op2,
10715 table_label, default_label));
10719 /* Attempt to generate a tablejump instruction; same concept. */
10720 #ifndef HAVE_tablejump
10721 #define HAVE_tablejump 0
10722 #define gen_tablejump(x, y) (0)
10725 /* Subroutine of the next function.
10727 INDEX is the value being switched on, with the lowest value
10728 in the table already subtracted.
10729 MODE is its expected mode (needed if INDEX is constant).
10730 RANGE is the length of the jump table.
10731 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
10733 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
10734 index value is out of range. */
10737 do_tablejump (index, mode, range, table_label, default_label)
10738 rtx index, range, table_label, default_label;
10739 enum machine_mode mode;
10743 /* Do an unsigned comparison (in the proper mode) between the index
10744 expression and the value which represents the length of the range.
10745 Since we just finished subtracting the lower bound of the range
10746 from the index expression, this comparison allows us to simultaneously
10747 check that the original index expression value is both greater than
10748 or equal to the minimum value of the range and less than or equal to
10749 the maximum value of the range. */
10751 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
10754 /* If index is in range, it must fit in Pmode.
10755 Convert to Pmode so we can index with it. */
10757 index = convert_to_mode (Pmode, index, 1);
10759 /* Don't let a MEM slip thru, because then INDEX that comes
10760 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
10761 and break_out_memory_refs will go to work on it and mess it up. */
10762 #ifdef PIC_CASE_VECTOR_ADDRESS
10763 if (flag_pic && GET_CODE (index) != REG)
10764 index = copy_to_mode_reg (Pmode, index);
10767 /* If flag_force_addr were to affect this address
10768 it could interfere with the tricky assumptions made
10769 about addresses that contain label-refs,
10770 which may be valid only very near the tablejump itself. */
10771 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
10772 GET_MODE_SIZE, because this indicates how large insns are. The other
10773 uses should all be Pmode, because they are addresses. This code
10774 could fail if addresses and insns are not the same size. */
10775 index = gen_rtx_PLUS (Pmode,
10776 gen_rtx_MULT (Pmode, index,
10777 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
10778 gen_rtx_LABEL_REF (Pmode, table_label));
10779 #ifdef PIC_CASE_VECTOR_ADDRESS
10781 index = PIC_CASE_VECTOR_ADDRESS (index);
10784 index = memory_address_noforce (CASE_VECTOR_MODE, index);
10785 temp = gen_reg_rtx (CASE_VECTOR_MODE);
10786 vector = gen_rtx_MEM (CASE_VECTOR_MODE, index);
10787 RTX_UNCHANGING_P (vector) = 1;
10788 convert_move (temp, vector, 0);
10790 emit_jump_insn (gen_tablejump (temp, table_label));
10792 /* If we are generating PIC code or if the table is PC-relative, the
10793 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
10794 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
10799 try_tablejump (index_type, index_expr, minval, range,
10800 table_label, default_label)
10801 tree index_type, index_expr, minval, range;
10802 rtx table_label, default_label;
10806 if (! HAVE_tablejump)
10809 index_expr = fold (build (MINUS_EXPR, index_type,
10810 convert (index_type, index_expr),
10811 convert (index_type, minval)));
10812 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
10814 index = protect_from_queue (index, 0);
10815 do_pending_stack_adjust ();
10817 do_tablejump (index, TYPE_MODE (index_type),
10818 convert_modes (TYPE_MODE (index_type),
10819 TYPE_MODE (TREE_TYPE (range)),
10820 expand_expr (range, NULL_RTX,
10822 TREE_UNSIGNED (TREE_TYPE (range))),
10823 table_label, default_label);
10827 /* Nonzero if the mode is a valid vector mode for this architecture.
10828 This returns nonzero even if there is no hardware support for the
10829 vector mode, but we can emulate with narrower modes. */
10832 vector_mode_valid_p (mode)
10833 enum machine_mode mode;
10835 enum mode_class class = GET_MODE_CLASS (mode);
10836 enum machine_mode innermode;
10838 /* Doh! What's going on? */
10839 if (class != MODE_VECTOR_INT
10840 && class != MODE_VECTOR_FLOAT)
10843 /* Hardware support. Woo hoo! */
10844 if (VECTOR_MODE_SUPPORTED_P (mode))
10847 innermode = GET_MODE_INNER (mode);
10849 /* We should probably return 1 if requesting V4DI and we have no DI,
10850 but we have V2DI, but this is probably very unlikely. */
10852 /* If we have support for the inner mode, we can safely emulate it.
10853 We may not have V2DI, but me can emulate with a pair of DIs. */
10854 return mov_optab->handlers[innermode].insn_code != CODE_FOR_nothing;
10857 #include "gt-expr.h"