1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
31 #include "hard-reg-set.h"
34 #include "insn-config.h"
35 #include "insn-attr.h"
36 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
43 #include "typeclass.h"
46 #include "langhooks.h"
50 /* Decide whether a function's arguments should be processed
51 from first to last or from last to first.
53 They should if the stack and args grow in opposite directions, but
54 only if we have push insns. */
58 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
59 #define PUSH_ARGS_REVERSED /* If it's last to first. */
64 #ifndef STACK_PUSH_CODE
65 #ifdef STACK_GROWS_DOWNWARD
66 #define STACK_PUSH_CODE PRE_DEC
68 #define STACK_PUSH_CODE PRE_INC
72 /* Assume that case vectors are not pc-relative. */
73 #ifndef CASE_VECTOR_PC_RELATIVE
74 #define CASE_VECTOR_PC_RELATIVE 0
77 /* If this is nonzero, we do not bother generating VOLATILE
78 around volatile memory references, and we are willing to
79 output indirect addresses. If cse is to follow, we reject
80 indirect addresses so a useful potential cse is generated;
81 if it is used only once, instruction combination will produce
82 the same indirect address eventually. */
85 /* Chain of pending expressions for PLACEHOLDER_EXPR to replace. */
86 static tree placeholder_list = 0;
88 /* This structure is used by move_by_pieces to describe the move to
99 int explicit_inc_from;
100 unsigned HOST_WIDE_INT len;
101 HOST_WIDE_INT offset;
105 /* This structure is used by store_by_pieces to describe the clear to
108 struct store_by_pieces
114 unsigned HOST_WIDE_INT len;
115 HOST_WIDE_INT offset;
116 rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
121 extern struct obstack permanent_obstack;
123 static rtx enqueue_insn PARAMS ((rtx, rtx));
124 static unsigned HOST_WIDE_INT move_by_pieces_ninsns
125 PARAMS ((unsigned HOST_WIDE_INT,
127 static void move_by_pieces_1 PARAMS ((rtx (*) (rtx, ...), enum machine_mode,
128 struct move_by_pieces *));
129 static rtx clear_by_pieces_1 PARAMS ((PTR, HOST_WIDE_INT,
131 static void clear_by_pieces PARAMS ((rtx, unsigned HOST_WIDE_INT,
133 static void store_by_pieces_1 PARAMS ((struct store_by_pieces *,
135 static void store_by_pieces_2 PARAMS ((rtx (*) (rtx, ...),
137 struct store_by_pieces *));
138 static rtx compress_float_constant PARAMS ((rtx, rtx));
139 static rtx get_subtarget PARAMS ((rtx));
140 static int is_zeros_p PARAMS ((tree));
141 static int mostly_zeros_p PARAMS ((tree));
142 static void store_constructor_field PARAMS ((rtx, unsigned HOST_WIDE_INT,
143 HOST_WIDE_INT, enum machine_mode,
144 tree, tree, int, int));
145 static void store_constructor PARAMS ((tree, rtx, int, HOST_WIDE_INT));
146 static rtx store_field PARAMS ((rtx, HOST_WIDE_INT,
147 HOST_WIDE_INT, enum machine_mode,
148 tree, enum machine_mode, int, tree,
150 static rtx var_rtx PARAMS ((tree));
151 static HOST_WIDE_INT highest_pow2_factor PARAMS ((tree));
152 static HOST_WIDE_INT highest_pow2_factor_for_type PARAMS ((tree, tree));
153 static int is_aligning_offset PARAMS ((tree, tree));
154 static rtx expand_increment PARAMS ((tree, int, int));
155 static void do_jump_by_parts_greater PARAMS ((tree, int, rtx, rtx));
156 static void do_jump_by_parts_equality PARAMS ((tree, rtx, rtx));
157 static void do_compare_and_jump PARAMS ((tree, enum rtx_code, enum rtx_code,
159 static rtx do_store_flag PARAMS ((tree, rtx, enum machine_mode, int));
161 static void emit_single_push_insn PARAMS ((enum machine_mode, rtx, tree));
163 static void do_tablejump PARAMS ((rtx, enum machine_mode, rtx, rtx, rtx));
165 /* Record for each mode whether we can move a register directly to or
166 from an object of that mode in memory. If we can't, we won't try
167 to use that mode directly when accessing a field of that mode. */
169 static char direct_load[NUM_MACHINE_MODES];
170 static char direct_store[NUM_MACHINE_MODES];
172 /* Record for each mode whether we can float-extend from memory. */
174 static bool float_extend_from_mem[NUM_MACHINE_MODES][NUM_MACHINE_MODES];
176 /* If a memory-to-memory move would take MOVE_RATIO or more simple
177 move-instruction sequences, we will do a movstr or libcall instead. */
180 #if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
183 /* If we are optimizing for space (-Os), cut down the default move ratio. */
184 #define MOVE_RATIO (optimize_size ? 3 : 15)
188 /* This macro is used to determine whether move_by_pieces should be called
189 to perform a structure copy. */
190 #ifndef MOVE_BY_PIECES_P
191 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
192 (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) MOVE_RATIO)
195 /* If a clear memory operation would take CLEAR_RATIO or more simple
196 move-instruction sequences, we will do a clrstr or libcall instead. */
199 #if defined (HAVE_clrstrqi) || defined (HAVE_clrstrhi) || defined (HAVE_clrstrsi) || defined (HAVE_clrstrdi) || defined (HAVE_clrstrti)
200 #define CLEAR_RATIO 2
202 /* If we are optimizing for space, cut down the default clear ratio. */
203 #define CLEAR_RATIO (optimize_size ? 3 : 15)
207 /* This macro is used to determine whether clear_by_pieces should be
208 called to clear storage. */
209 #ifndef CLEAR_BY_PIECES_P
210 #define CLEAR_BY_PIECES_P(SIZE, ALIGN) \
211 (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) CLEAR_RATIO)
214 /* This array records the insn_code of insns to perform block moves. */
215 enum insn_code movstr_optab[NUM_MACHINE_MODES];
217 /* This array records the insn_code of insns to perform block clears. */
218 enum insn_code clrstr_optab[NUM_MACHINE_MODES];
220 /* SLOW_UNALIGNED_ACCESS is non-zero if unaligned accesses are very slow. */
222 #ifndef SLOW_UNALIGNED_ACCESS
223 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
226 /* This is run once per compilation to set up which modes can be used
227 directly in memory and to initialize the block move optab. */
233 enum machine_mode mode;
238 /* Try indexing by frame ptr and try by stack ptr.
239 It is known that on the Convex the stack ptr isn't a valid index.
240 With luck, one or the other is valid on any machine. */
241 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
242 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
244 /* A scratch register we can modify in-place below to avoid
245 useless RTL allocations. */
246 reg = gen_rtx_REG (VOIDmode, -1);
248 insn = rtx_alloc (INSN);
249 pat = gen_rtx_SET (0, NULL_RTX, NULL_RTX);
250 PATTERN (insn) = pat;
252 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
253 mode = (enum machine_mode) ((int) mode + 1))
257 direct_load[(int) mode] = direct_store[(int) mode] = 0;
258 PUT_MODE (mem, mode);
259 PUT_MODE (mem1, mode);
260 PUT_MODE (reg, mode);
262 /* See if there is some register that can be used in this mode and
263 directly loaded or stored from memory. */
265 if (mode != VOIDmode && mode != BLKmode)
266 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
267 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
270 if (! HARD_REGNO_MODE_OK (regno, mode))
276 SET_DEST (pat) = reg;
277 if (recog (pat, insn, &num_clobbers) >= 0)
278 direct_load[(int) mode] = 1;
280 SET_SRC (pat) = mem1;
281 SET_DEST (pat) = reg;
282 if (recog (pat, insn, &num_clobbers) >= 0)
283 direct_load[(int) mode] = 1;
286 SET_DEST (pat) = mem;
287 if (recog (pat, insn, &num_clobbers) >= 0)
288 direct_store[(int) mode] = 1;
291 SET_DEST (pat) = mem1;
292 if (recog (pat, insn, &num_clobbers) >= 0)
293 direct_store[(int) mode] = 1;
297 mem = gen_rtx_MEM (VOIDmode, gen_rtx_raw_REG (Pmode, 10000));
299 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
300 mode = GET_MODE_WIDER_MODE (mode))
302 enum machine_mode srcmode;
303 for (srcmode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); srcmode != mode;
304 srcmode = GET_MODE_WIDER_MODE (srcmode))
308 ic = can_extend_p (mode, srcmode, 0);
309 if (ic == CODE_FOR_nothing)
312 PUT_MODE (mem, srcmode);
314 if ((*insn_data[ic].operand[1].predicate) (mem, srcmode))
315 float_extend_from_mem[mode][srcmode] = true;
320 /* This is run at the start of compiling a function. */
325 cfun->expr = (struct expr_status *) ggc_alloc (sizeof (struct expr_status));
328 pending_stack_adjust = 0;
329 stack_pointer_delta = 0;
330 inhibit_defer_pop = 0;
332 apply_args_value = 0;
336 /* Small sanity check that the queue is empty at the end of a function. */
339 finish_expr_for_function ()
345 /* Manage the queue of increment instructions to be output
346 for POSTINCREMENT_EXPR expressions, etc. */
348 /* Queue up to increment (or change) VAR later. BODY says how:
349 BODY should be the same thing you would pass to emit_insn
350 to increment right away. It will go to emit_insn later on.
352 The value is a QUEUED expression to be used in place of VAR
353 where you want to guarantee the pre-incrementation value of VAR. */
356 enqueue_insn (var, body)
359 pending_chain = gen_rtx_QUEUED (GET_MODE (var), var, NULL_RTX, NULL_RTX,
360 body, pending_chain);
361 return pending_chain;
364 /* Use protect_from_queue to convert a QUEUED expression
365 into something that you can put immediately into an instruction.
366 If the queued incrementation has not happened yet,
367 protect_from_queue returns the variable itself.
368 If the incrementation has happened, protect_from_queue returns a temp
369 that contains a copy of the old value of the variable.
371 Any time an rtx which might possibly be a QUEUED is to be put
372 into an instruction, it must be passed through protect_from_queue first.
373 QUEUED expressions are not meaningful in instructions.
375 Do not pass a value through protect_from_queue and then hold
376 on to it for a while before putting it in an instruction!
377 If the queue is flushed in between, incorrect code will result. */
380 protect_from_queue (x, modify)
384 RTX_CODE code = GET_CODE (x);
386 #if 0 /* A QUEUED can hang around after the queue is forced out. */
387 /* Shortcut for most common case. */
388 if (pending_chain == 0)
394 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
395 use of autoincrement. Make a copy of the contents of the memory
396 location rather than a copy of the address, but not if the value is
397 of mode BLKmode. Don't modify X in place since it might be
399 if (code == MEM && GET_MODE (x) != BLKmode
400 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
403 rtx new = replace_equiv_address_nv (x, QUEUED_VAR (y));
407 rtx temp = gen_reg_rtx (GET_MODE (x));
409 emit_insn_before (gen_move_insn (temp, new),
414 /* Copy the address into a pseudo, so that the returned value
415 remains correct across calls to emit_queue. */
416 return replace_equiv_address (new, copy_to_reg (XEXP (new, 0)));
419 /* Otherwise, recursively protect the subexpressions of all
420 the kinds of rtx's that can contain a QUEUED. */
423 rtx tem = protect_from_queue (XEXP (x, 0), 0);
424 if (tem != XEXP (x, 0))
430 else if (code == PLUS || code == MULT)
432 rtx new0 = protect_from_queue (XEXP (x, 0), 0);
433 rtx new1 = protect_from_queue (XEXP (x, 1), 0);
434 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
443 /* If the increment has not happened, use the variable itself. Copy it
444 into a new pseudo so that the value remains correct across calls to
446 if (QUEUED_INSN (x) == 0)
447 return copy_to_reg (QUEUED_VAR (x));
448 /* If the increment has happened and a pre-increment copy exists,
450 if (QUEUED_COPY (x) != 0)
451 return QUEUED_COPY (x);
452 /* The increment has happened but we haven't set up a pre-increment copy.
453 Set one up now, and use it. */
454 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
455 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
457 return QUEUED_COPY (x);
460 /* Return nonzero if X contains a QUEUED expression:
461 if it contains anything that will be altered by a queued increment.
462 We handle only combinations of MEM, PLUS, MINUS and MULT operators
463 since memory addresses generally contain only those. */
469 enum rtx_code code = GET_CODE (x);
475 return queued_subexp_p (XEXP (x, 0));
479 return (queued_subexp_p (XEXP (x, 0))
480 || queued_subexp_p (XEXP (x, 1)));
486 /* Perform all the pending incrementations. */
492 while ((p = pending_chain))
494 rtx body = QUEUED_BODY (p);
496 switch (GET_CODE (body))
504 QUEUED_INSN (p) = body;
508 #ifdef ENABLE_CHECKING
515 QUEUED_INSN (p) = emit_insn (body);
519 pending_chain = QUEUED_NEXT (p);
523 /* Copy data from FROM to TO, where the machine modes are not the same.
524 Both modes may be integer, or both may be floating.
525 UNSIGNEDP should be nonzero if FROM is an unsigned type.
526 This causes zero-extension instead of sign-extension. */
529 convert_move (to, from, unsignedp)
533 enum machine_mode to_mode = GET_MODE (to);
534 enum machine_mode from_mode = GET_MODE (from);
535 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
536 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
540 /* rtx code for making an equivalent value. */
541 enum rtx_code equiv_code = (unsignedp ? ZERO_EXTEND : SIGN_EXTEND);
543 to = protect_from_queue (to, 1);
544 from = protect_from_queue (from, 0);
546 if (to_real != from_real)
549 /* If FROM is a SUBREG that indicates that we have already done at least
550 the required extension, strip it. We don't handle such SUBREGs as
553 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
554 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
555 >= GET_MODE_SIZE (to_mode))
556 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
557 from = gen_lowpart (to_mode, from), from_mode = to_mode;
559 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
562 if (to_mode == from_mode
563 || (from_mode == VOIDmode && CONSTANT_P (from)))
565 emit_move_insn (to, from);
569 if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
571 if (GET_MODE_BITSIZE (from_mode) != GET_MODE_BITSIZE (to_mode))
574 if (VECTOR_MODE_P (to_mode))
575 from = simplify_gen_subreg (to_mode, from, GET_MODE (from), 0);
577 to = simplify_gen_subreg (from_mode, to, GET_MODE (to), 0);
579 emit_move_insn (to, from);
583 if (to_real != from_real)
590 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode))
592 /* Try converting directly if the insn is supported. */
593 if ((code = can_extend_p (to_mode, from_mode, 0))
596 emit_unop_insn (code, to, from, UNKNOWN);
601 #ifdef HAVE_trunchfqf2
602 if (HAVE_trunchfqf2 && from_mode == HFmode && to_mode == QFmode)
604 emit_unop_insn (CODE_FOR_trunchfqf2, to, from, UNKNOWN);
608 #ifdef HAVE_trunctqfqf2
609 if (HAVE_trunctqfqf2 && from_mode == TQFmode && to_mode == QFmode)
611 emit_unop_insn (CODE_FOR_trunctqfqf2, to, from, UNKNOWN);
615 #ifdef HAVE_truncsfqf2
616 if (HAVE_truncsfqf2 && from_mode == SFmode && to_mode == QFmode)
618 emit_unop_insn (CODE_FOR_truncsfqf2, to, from, UNKNOWN);
622 #ifdef HAVE_truncdfqf2
623 if (HAVE_truncdfqf2 && from_mode == DFmode && to_mode == QFmode)
625 emit_unop_insn (CODE_FOR_truncdfqf2, to, from, UNKNOWN);
629 #ifdef HAVE_truncxfqf2
630 if (HAVE_truncxfqf2 && from_mode == XFmode && to_mode == QFmode)
632 emit_unop_insn (CODE_FOR_truncxfqf2, to, from, UNKNOWN);
636 #ifdef HAVE_trunctfqf2
637 if (HAVE_trunctfqf2 && from_mode == TFmode && to_mode == QFmode)
639 emit_unop_insn (CODE_FOR_trunctfqf2, to, from, UNKNOWN);
644 #ifdef HAVE_trunctqfhf2
645 if (HAVE_trunctqfhf2 && from_mode == TQFmode && to_mode == HFmode)
647 emit_unop_insn (CODE_FOR_trunctqfhf2, to, from, UNKNOWN);
651 #ifdef HAVE_truncsfhf2
652 if (HAVE_truncsfhf2 && from_mode == SFmode && to_mode == HFmode)
654 emit_unop_insn (CODE_FOR_truncsfhf2, to, from, UNKNOWN);
658 #ifdef HAVE_truncdfhf2
659 if (HAVE_truncdfhf2 && from_mode == DFmode && to_mode == HFmode)
661 emit_unop_insn (CODE_FOR_truncdfhf2, to, from, UNKNOWN);
665 #ifdef HAVE_truncxfhf2
666 if (HAVE_truncxfhf2 && from_mode == XFmode && to_mode == HFmode)
668 emit_unop_insn (CODE_FOR_truncxfhf2, to, from, UNKNOWN);
672 #ifdef HAVE_trunctfhf2
673 if (HAVE_trunctfhf2 && from_mode == TFmode && to_mode == HFmode)
675 emit_unop_insn (CODE_FOR_trunctfhf2, to, from, UNKNOWN);
680 #ifdef HAVE_truncsftqf2
681 if (HAVE_truncsftqf2 && from_mode == SFmode && to_mode == TQFmode)
683 emit_unop_insn (CODE_FOR_truncsftqf2, to, from, UNKNOWN);
687 #ifdef HAVE_truncdftqf2
688 if (HAVE_truncdftqf2 && from_mode == DFmode && to_mode == TQFmode)
690 emit_unop_insn (CODE_FOR_truncdftqf2, to, from, UNKNOWN);
694 #ifdef HAVE_truncxftqf2
695 if (HAVE_truncxftqf2 && from_mode == XFmode && to_mode == TQFmode)
697 emit_unop_insn (CODE_FOR_truncxftqf2, to, from, UNKNOWN);
701 #ifdef HAVE_trunctftqf2
702 if (HAVE_trunctftqf2 && from_mode == TFmode && to_mode == TQFmode)
704 emit_unop_insn (CODE_FOR_trunctftqf2, to, from, UNKNOWN);
709 #ifdef HAVE_truncdfsf2
710 if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
712 emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
716 #ifdef HAVE_truncxfsf2
717 if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode)
719 emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN);
723 #ifdef HAVE_trunctfsf2
724 if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
726 emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
730 #ifdef HAVE_truncxfdf2
731 if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode)
733 emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN);
737 #ifdef HAVE_trunctfdf2
738 if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
740 emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
752 libcall = extendsfdf2_libfunc;
756 libcall = extendsfxf2_libfunc;
760 libcall = extendsftf2_libfunc;
772 libcall = truncdfsf2_libfunc;
776 libcall = extenddfxf2_libfunc;
780 libcall = extenddftf2_libfunc;
792 libcall = truncxfsf2_libfunc;
796 libcall = truncxfdf2_libfunc;
808 libcall = trunctfsf2_libfunc;
812 libcall = trunctfdf2_libfunc;
824 if (libcall == (rtx) 0)
825 /* This conversion is not implemented yet. */
829 value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
831 insns = get_insns ();
833 emit_libcall_block (insns, to, value, gen_rtx_FLOAT_TRUNCATE (to_mode,
838 /* Now both modes are integers. */
840 /* Handle expanding beyond a word. */
841 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
842 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
849 enum machine_mode lowpart_mode;
850 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
852 /* Try converting directly if the insn is supported. */
853 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
856 /* If FROM is a SUBREG, put it into a register. Do this
857 so that we always generate the same set of insns for
858 better cse'ing; if an intermediate assignment occurred,
859 we won't be doing the operation directly on the SUBREG. */
860 if (optimize > 0 && GET_CODE (from) == SUBREG)
861 from = force_reg (from_mode, from);
862 emit_unop_insn (code, to, from, equiv_code);
865 /* Next, try converting via full word. */
866 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
867 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
868 != CODE_FOR_nothing))
870 if (GET_CODE (to) == REG)
871 emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
872 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
873 emit_unop_insn (code, to,
874 gen_lowpart (word_mode, to), equiv_code);
878 /* No special multiword conversion insn; do it by hand. */
881 /* Since we will turn this into a no conflict block, we must ensure
882 that the source does not overlap the target. */
884 if (reg_overlap_mentioned_p (to, from))
885 from = force_reg (from_mode, from);
887 /* Get a copy of FROM widened to a word, if necessary. */
888 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
889 lowpart_mode = word_mode;
891 lowpart_mode = from_mode;
893 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
895 lowpart = gen_lowpart (lowpart_mode, to);
896 emit_move_insn (lowpart, lowfrom);
898 /* Compute the value to put in each remaining word. */
900 fill_value = const0_rtx;
905 && insn_data[(int) CODE_FOR_slt].operand[0].mode == word_mode
906 && STORE_FLAG_VALUE == -1)
908 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
910 fill_value = gen_reg_rtx (word_mode);
911 emit_insn (gen_slt (fill_value));
917 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
918 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
920 fill_value = convert_to_mode (word_mode, fill_value, 1);
924 /* Fill the remaining words. */
925 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
927 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
928 rtx subword = operand_subword (to, index, 1, to_mode);
933 if (fill_value != subword)
934 emit_move_insn (subword, fill_value);
937 insns = get_insns ();
940 emit_no_conflict_block (insns, to, from, NULL_RTX,
941 gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
945 /* Truncating multi-word to a word or less. */
946 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
947 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
949 if (!((GET_CODE (from) == MEM
950 && ! MEM_VOLATILE_P (from)
951 && direct_load[(int) to_mode]
952 && ! mode_dependent_address_p (XEXP (from, 0)))
953 || GET_CODE (from) == REG
954 || GET_CODE (from) == SUBREG))
955 from = force_reg (from_mode, from);
956 convert_move (to, gen_lowpart (word_mode, from), 0);
960 /* Handle pointer conversion. */ /* SPEE 900220. */
961 if (to_mode == PQImode)
963 if (from_mode != QImode)
964 from = convert_to_mode (QImode, from, unsignedp);
966 #ifdef HAVE_truncqipqi2
967 if (HAVE_truncqipqi2)
969 emit_unop_insn (CODE_FOR_truncqipqi2, to, from, UNKNOWN);
972 #endif /* HAVE_truncqipqi2 */
976 if (from_mode == PQImode)
978 if (to_mode != QImode)
980 from = convert_to_mode (QImode, from, unsignedp);
985 #ifdef HAVE_extendpqiqi2
986 if (HAVE_extendpqiqi2)
988 emit_unop_insn (CODE_FOR_extendpqiqi2, to, from, UNKNOWN);
991 #endif /* HAVE_extendpqiqi2 */
996 if (to_mode == PSImode)
998 if (from_mode != SImode)
999 from = convert_to_mode (SImode, from, unsignedp);
1001 #ifdef HAVE_truncsipsi2
1002 if (HAVE_truncsipsi2)
1004 emit_unop_insn (CODE_FOR_truncsipsi2, to, from, UNKNOWN);
1007 #endif /* HAVE_truncsipsi2 */
1011 if (from_mode == PSImode)
1013 if (to_mode != SImode)
1015 from = convert_to_mode (SImode, from, unsignedp);
1020 #ifdef HAVE_extendpsisi2
1021 if (! unsignedp && HAVE_extendpsisi2)
1023 emit_unop_insn (CODE_FOR_extendpsisi2, to, from, UNKNOWN);
1026 #endif /* HAVE_extendpsisi2 */
1027 #ifdef HAVE_zero_extendpsisi2
1028 if (unsignedp && HAVE_zero_extendpsisi2)
1030 emit_unop_insn (CODE_FOR_zero_extendpsisi2, to, from, UNKNOWN);
1033 #endif /* HAVE_zero_extendpsisi2 */
1038 if (to_mode == PDImode)
1040 if (from_mode != DImode)
1041 from = convert_to_mode (DImode, from, unsignedp);
1043 #ifdef HAVE_truncdipdi2
1044 if (HAVE_truncdipdi2)
1046 emit_unop_insn (CODE_FOR_truncdipdi2, to, from, UNKNOWN);
1049 #endif /* HAVE_truncdipdi2 */
1053 if (from_mode == PDImode)
1055 if (to_mode != DImode)
1057 from = convert_to_mode (DImode, from, unsignedp);
1062 #ifdef HAVE_extendpdidi2
1063 if (HAVE_extendpdidi2)
1065 emit_unop_insn (CODE_FOR_extendpdidi2, to, from, UNKNOWN);
1068 #endif /* HAVE_extendpdidi2 */
1073 /* Now follow all the conversions between integers
1074 no more than a word long. */
1076 /* For truncation, usually we can just refer to FROM in a narrower mode. */
1077 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
1078 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1079 GET_MODE_BITSIZE (from_mode)))
1081 if (!((GET_CODE (from) == MEM
1082 && ! MEM_VOLATILE_P (from)
1083 && direct_load[(int) to_mode]
1084 && ! mode_dependent_address_p (XEXP (from, 0)))
1085 || GET_CODE (from) == REG
1086 || GET_CODE (from) == SUBREG))
1087 from = force_reg (from_mode, from);
1088 if (GET_CODE (from) == REG && REGNO (from) < FIRST_PSEUDO_REGISTER
1089 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
1090 from = copy_to_reg (from);
1091 emit_move_insn (to, gen_lowpart (to_mode, from));
1095 /* Handle extension. */
1096 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
1098 /* Convert directly if that works. */
1099 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
1100 != CODE_FOR_nothing)
1103 from = force_not_mem (from);
1105 emit_unop_insn (code, to, from, equiv_code);
1110 enum machine_mode intermediate;
1114 /* Search for a mode to convert via. */
1115 for (intermediate = from_mode; intermediate != VOIDmode;
1116 intermediate = GET_MODE_WIDER_MODE (intermediate))
1117 if (((can_extend_p (to_mode, intermediate, unsignedp)
1118 != CODE_FOR_nothing)
1119 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
1120 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1121 GET_MODE_BITSIZE (intermediate))))
1122 && (can_extend_p (intermediate, from_mode, unsignedp)
1123 != CODE_FOR_nothing))
1125 convert_move (to, convert_to_mode (intermediate, from,
1126 unsignedp), unsignedp);
1130 /* No suitable intermediate mode.
1131 Generate what we need with shifts. */
1132 shift_amount = build_int_2 (GET_MODE_BITSIZE (to_mode)
1133 - GET_MODE_BITSIZE (from_mode), 0);
1134 from = gen_lowpart (to_mode, force_reg (from_mode, from));
1135 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
1137 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
1140 emit_move_insn (to, tmp);
1145 /* Support special truncate insns for certain modes. */
1147 if (from_mode == DImode && to_mode == SImode)
1149 #ifdef HAVE_truncdisi2
1150 if (HAVE_truncdisi2)
1152 emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
1156 convert_move (to, force_reg (from_mode, from), unsignedp);
1160 if (from_mode == DImode && to_mode == HImode)
1162 #ifdef HAVE_truncdihi2
1163 if (HAVE_truncdihi2)
1165 emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
1169 convert_move (to, force_reg (from_mode, from), unsignedp);
1173 if (from_mode == DImode && to_mode == QImode)
1175 #ifdef HAVE_truncdiqi2
1176 if (HAVE_truncdiqi2)
1178 emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
1182 convert_move (to, force_reg (from_mode, from), unsignedp);
1186 if (from_mode == SImode && to_mode == HImode)
1188 #ifdef HAVE_truncsihi2
1189 if (HAVE_truncsihi2)
1191 emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
1195 convert_move (to, force_reg (from_mode, from), unsignedp);
1199 if (from_mode == SImode && to_mode == QImode)
1201 #ifdef HAVE_truncsiqi2
1202 if (HAVE_truncsiqi2)
1204 emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
1208 convert_move (to, force_reg (from_mode, from), unsignedp);
1212 if (from_mode == HImode && to_mode == QImode)
1214 #ifdef HAVE_trunchiqi2
1215 if (HAVE_trunchiqi2)
1217 emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
1221 convert_move (to, force_reg (from_mode, from), unsignedp);
1225 if (from_mode == TImode && to_mode == DImode)
1227 #ifdef HAVE_trunctidi2
1228 if (HAVE_trunctidi2)
1230 emit_unop_insn (CODE_FOR_trunctidi2, to, from, UNKNOWN);
1234 convert_move (to, force_reg (from_mode, from), unsignedp);
1238 if (from_mode == TImode && to_mode == SImode)
1240 #ifdef HAVE_trunctisi2
1241 if (HAVE_trunctisi2)
1243 emit_unop_insn (CODE_FOR_trunctisi2, to, from, UNKNOWN);
1247 convert_move (to, force_reg (from_mode, from), unsignedp);
1251 if (from_mode == TImode && to_mode == HImode)
1253 #ifdef HAVE_trunctihi2
1254 if (HAVE_trunctihi2)
1256 emit_unop_insn (CODE_FOR_trunctihi2, to, from, UNKNOWN);
1260 convert_move (to, force_reg (from_mode, from), unsignedp);
1264 if (from_mode == TImode && to_mode == QImode)
1266 #ifdef HAVE_trunctiqi2
1267 if (HAVE_trunctiqi2)
1269 emit_unop_insn (CODE_FOR_trunctiqi2, to, from, UNKNOWN);
1273 convert_move (to, force_reg (from_mode, from), unsignedp);
1277 /* Handle truncation of volatile memrefs, and so on;
1278 the things that couldn't be truncated directly,
1279 and for which there was no special instruction. */
1280 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
1282 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
1283 emit_move_insn (to, temp);
1287 /* Mode combination is not recognized. */
1291 /* Return an rtx for a value that would result
1292 from converting X to mode MODE.
1293 Both X and MODE may be floating, or both integer.
1294 UNSIGNEDP is nonzero if X is an unsigned value.
1295 This can be done by referring to a part of X in place
1296 or by copying to a new temporary with conversion.
1298 This function *must not* call protect_from_queue
1299 except when putting X into an insn (in which case convert_move does it). */
1302 convert_to_mode (mode, x, unsignedp)
1303 enum machine_mode mode;
1307 return convert_modes (mode, VOIDmode, x, unsignedp);
1310 /* Return an rtx for a value that would result
1311 from converting X from mode OLDMODE to mode MODE.
1312 Both modes may be floating, or both integer.
1313 UNSIGNEDP is nonzero if X is an unsigned value.
1315 This can be done by referring to a part of X in place
1316 or by copying to a new temporary with conversion.
1318 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
1320 This function *must not* call protect_from_queue
1321 except when putting X into an insn (in which case convert_move does it). */
1324 convert_modes (mode, oldmode, x, unsignedp)
1325 enum machine_mode mode, oldmode;
1331 /* If FROM is a SUBREG that indicates that we have already done at least
1332 the required extension, strip it. */
1334 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
1335 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
1336 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
1337 x = gen_lowpart (mode, x);
1339 if (GET_MODE (x) != VOIDmode)
1340 oldmode = GET_MODE (x);
1342 if (mode == oldmode)
1345 /* There is one case that we must handle specially: If we are converting
1346 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
1347 we are to interpret the constant as unsigned, gen_lowpart will do
1348 the wrong if the constant appears negative. What we want to do is
1349 make the high-order word of the constant zero, not all ones. */
1351 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
1352 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
1353 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
1355 HOST_WIDE_INT val = INTVAL (x);
1357 if (oldmode != VOIDmode
1358 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
1360 int width = GET_MODE_BITSIZE (oldmode);
1362 /* We need to zero extend VAL. */
1363 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1366 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
1369 /* We can do this with a gen_lowpart if both desired and current modes
1370 are integer, and this is either a constant integer, a register, or a
1371 non-volatile MEM. Except for the constant case where MODE is no
1372 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
1374 if ((GET_CODE (x) == CONST_INT
1375 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
1376 || (GET_MODE_CLASS (mode) == MODE_INT
1377 && GET_MODE_CLASS (oldmode) == MODE_INT
1378 && (GET_CODE (x) == CONST_DOUBLE
1379 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
1380 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x)
1381 && direct_load[(int) mode])
1382 || (GET_CODE (x) == REG
1383 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
1384 GET_MODE_BITSIZE (GET_MODE (x)))))))))
1386 /* ?? If we don't know OLDMODE, we have to assume here that
1387 X does not need sign- or zero-extension. This may not be
1388 the case, but it's the best we can do. */
1389 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
1390 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
1392 HOST_WIDE_INT val = INTVAL (x);
1393 int width = GET_MODE_BITSIZE (oldmode);
1395 /* We must sign or zero-extend in this case. Start by
1396 zero-extending, then sign extend if we need to. */
1397 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1399 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
1400 val |= (HOST_WIDE_INT) (-1) << width;
1402 return gen_int_mode (val, mode);
1405 return gen_lowpart (mode, x);
1408 temp = gen_reg_rtx (mode);
1409 convert_move (temp, x, unsignedp);
1413 /* This macro is used to determine what the largest unit size that
1414 move_by_pieces can use is. */
1416 /* MOVE_MAX_PIECES is the number of bytes at a time which we can
1417 move efficiently, as opposed to MOVE_MAX which is the maximum
1418 number of bytes we can move with a single instruction. */
1420 #ifndef MOVE_MAX_PIECES
1421 #define MOVE_MAX_PIECES MOVE_MAX
1424 /* STORE_MAX_PIECES is the number of bytes at a time that we can
1425 store efficiently. Due to internal GCC limitations, this is
1426 MOVE_MAX_PIECES limited by the number of bytes GCC can represent
1427 for an immediate constant. */
1429 #define STORE_MAX_PIECES MIN (MOVE_MAX_PIECES, 2 * sizeof (HOST_WIDE_INT))
1431 /* Generate several move instructions to copy LEN bytes from block FROM to
1432 block TO. (These are MEM rtx's with BLKmode). The caller must pass FROM
1433 and TO through protect_from_queue before calling.
1435 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
1436 used to push FROM to the stack.
1438 ALIGN is maximum alignment we can assume. */
1441 move_by_pieces (to, from, len, align)
1443 unsigned HOST_WIDE_INT len;
1446 struct move_by_pieces data;
1447 rtx to_addr, from_addr = XEXP (from, 0);
1448 unsigned int max_size = MOVE_MAX_PIECES + 1;
1449 enum machine_mode mode = VOIDmode, tmode;
1450 enum insn_code icode;
1453 data.from_addr = from_addr;
1456 to_addr = XEXP (to, 0);
1459 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1460 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1462 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1469 #ifdef STACK_GROWS_DOWNWARD
1475 data.to_addr = to_addr;
1478 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1479 || GET_CODE (from_addr) == POST_INC
1480 || GET_CODE (from_addr) == POST_DEC);
1482 data.explicit_inc_from = 0;
1483 data.explicit_inc_to = 0;
1484 if (data.reverse) data.offset = len;
1487 /* If copying requires more than two move insns,
1488 copy addresses to registers (to make displacements shorter)
1489 and use post-increment if available. */
1490 if (!(data.autinc_from && data.autinc_to)
1491 && move_by_pieces_ninsns (len, align) > 2)
1493 /* Find the mode of the largest move... */
1494 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1495 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1496 if (GET_MODE_SIZE (tmode) < max_size)
1499 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
1501 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1502 data.autinc_from = 1;
1503 data.explicit_inc_from = -1;
1505 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
1507 data.from_addr = copy_addr_to_reg (from_addr);
1508 data.autinc_from = 1;
1509 data.explicit_inc_from = 1;
1511 if (!data.autinc_from && CONSTANT_P (from_addr))
1512 data.from_addr = copy_addr_to_reg (from_addr);
1513 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
1515 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1517 data.explicit_inc_to = -1;
1519 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
1521 data.to_addr = copy_addr_to_reg (to_addr);
1523 data.explicit_inc_to = 1;
1525 if (!data.autinc_to && CONSTANT_P (to_addr))
1526 data.to_addr = copy_addr_to_reg (to_addr);
1529 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1530 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1531 align = MOVE_MAX * BITS_PER_UNIT;
1533 /* First move what we can in the largest integer mode, then go to
1534 successively smaller modes. */
1536 while (max_size > 1)
1538 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1539 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1540 if (GET_MODE_SIZE (tmode) < max_size)
1543 if (mode == VOIDmode)
1546 icode = mov_optab->handlers[(int) mode].insn_code;
1547 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1548 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1550 max_size = GET_MODE_SIZE (mode);
1553 /* The code above should have handled everything. */
1558 /* Return number of insns required to move L bytes by pieces.
1559 ALIGN (in bits) is maximum alignment we can assume. */
1561 static unsigned HOST_WIDE_INT
1562 move_by_pieces_ninsns (l, align)
1563 unsigned HOST_WIDE_INT l;
1566 unsigned HOST_WIDE_INT n_insns = 0;
1567 unsigned HOST_WIDE_INT max_size = MOVE_MAX + 1;
1569 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1570 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1571 align = MOVE_MAX * BITS_PER_UNIT;
1573 while (max_size > 1)
1575 enum machine_mode mode = VOIDmode, tmode;
1576 enum insn_code icode;
1578 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1579 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1580 if (GET_MODE_SIZE (tmode) < max_size)
1583 if (mode == VOIDmode)
1586 icode = mov_optab->handlers[(int) mode].insn_code;
1587 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1588 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1590 max_size = GET_MODE_SIZE (mode);
1598 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1599 with move instructions for mode MODE. GENFUN is the gen_... function
1600 to make a move insn for that mode. DATA has all the other info. */
1603 move_by_pieces_1 (genfun, mode, data)
1604 rtx (*genfun) PARAMS ((rtx, ...));
1605 enum machine_mode mode;
1606 struct move_by_pieces *data;
1608 unsigned int size = GET_MODE_SIZE (mode);
1609 rtx to1 = NULL_RTX, from1;
1611 while (data->len >= size)
1614 data->offset -= size;
1618 if (data->autinc_to)
1619 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
1622 to1 = adjust_address (data->to, mode, data->offset);
1625 if (data->autinc_from)
1626 from1 = adjust_automodify_address (data->from, mode, data->from_addr,
1629 from1 = adjust_address (data->from, mode, data->offset);
1631 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1632 emit_insn (gen_add2_insn (data->to_addr,
1633 GEN_INT (-(HOST_WIDE_INT)size)));
1634 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1635 emit_insn (gen_add2_insn (data->from_addr,
1636 GEN_INT (-(HOST_WIDE_INT)size)));
1639 emit_insn ((*genfun) (to1, from1));
1642 #ifdef PUSH_ROUNDING
1643 emit_single_push_insn (mode, from1, NULL);
1649 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1650 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1651 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1652 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1654 if (! data->reverse)
1655 data->offset += size;
1661 /* Emit code to move a block Y to a block X.
1662 This may be done with string-move instructions,
1663 with multiple scalar move instructions, or with a library call.
1665 Both X and Y must be MEM rtx's (perhaps inside VOLATILE)
1667 SIZE is an rtx that says how long they are.
1668 ALIGN is the maximum alignment we can assume they have.
1670 Return the address of the new block, if memcpy is called and returns it,
1673 static GTY(()) tree block_move_fn;
1675 emit_block_move (x, y, size)
1680 #ifdef TARGET_MEM_FUNCTIONS
1681 tree call_expr, arg_list;
1683 unsigned int align = MIN (MEM_ALIGN (x), MEM_ALIGN (y));
1685 if (GET_MODE (x) != BLKmode)
1688 if (GET_MODE (y) != BLKmode)
1691 x = protect_from_queue (x, 1);
1692 y = protect_from_queue (y, 0);
1693 size = protect_from_queue (size, 0);
1695 if (GET_CODE (x) != MEM)
1697 if (GET_CODE (y) != MEM)
1702 if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
1703 move_by_pieces (x, y, INTVAL (size), align);
1706 /* Try the most limited insn first, because there's no point
1707 including more than one in the machine description unless
1708 the more limited one has some advantage. */
1710 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
1711 enum machine_mode mode;
1713 /* Since this is a move insn, we don't care about volatility. */
1716 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1717 mode = GET_MODE_WIDER_MODE (mode))
1719 enum insn_code code = movstr_optab[(int) mode];
1720 insn_operand_predicate_fn pred;
1722 if (code != CODE_FOR_nothing
1723 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1724 here because if SIZE is less than the mode mask, as it is
1725 returned by the macro, it will definitely be less than the
1726 actual mode mask. */
1727 && ((GET_CODE (size) == CONST_INT
1728 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1729 <= (GET_MODE_MASK (mode) >> 1)))
1730 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1731 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
1732 || (*pred) (x, BLKmode))
1733 && ((pred = insn_data[(int) code].operand[1].predicate) == 0
1734 || (*pred) (y, BLKmode))
1735 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
1736 || (*pred) (opalign, VOIDmode)))
1739 rtx last = get_last_insn ();
1742 op2 = convert_to_mode (mode, size, 1);
1743 pred = insn_data[(int) code].operand[2].predicate;
1744 if (pred != 0 && ! (*pred) (op2, mode))
1745 op2 = copy_to_mode_reg (mode, op2);
1747 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1755 delete_insns_since (last);
1761 /* X, Y, or SIZE may have been passed through protect_from_queue.
1763 It is unsafe to save the value generated by protect_from_queue
1764 and reuse it later. Consider what happens if emit_queue is
1765 called before the return value from protect_from_queue is used.
1767 Expansion of the CALL_EXPR below will call emit_queue before
1768 we are finished emitting RTL for argument setup. So if we are
1769 not careful we could get the wrong value for an argument.
1771 To avoid this problem we go ahead and emit code to copy X, Y &
1772 SIZE into new pseudos. We can then place those new pseudos
1773 into an RTL_EXPR and use them later, even after a call to
1776 Note this is not strictly needed for library calls since they
1777 do not call emit_queue before loading their arguments. However,
1778 we may need to have library calls call emit_queue in the future
1779 since failing to do so could cause problems for targets which
1780 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
1781 x = copy_to_mode_reg (Pmode, XEXP (x, 0));
1782 y = copy_to_mode_reg (Pmode, XEXP (y, 0));
1784 #ifdef TARGET_MEM_FUNCTIONS
1785 size = copy_to_mode_reg (TYPE_MODE (sizetype), size);
1787 size = convert_to_mode (TYPE_MODE (integer_type_node), size,
1788 TREE_UNSIGNED (integer_type_node));
1789 size = copy_to_mode_reg (TYPE_MODE (integer_type_node), size);
1792 #ifdef TARGET_MEM_FUNCTIONS
1793 /* It is incorrect to use the libcall calling conventions to call
1794 memcpy in this context.
1796 This could be a user call to memcpy and the user may wish to
1797 examine the return value from memcpy.
1799 For targets where libcalls and normal calls have different conventions
1800 for returning pointers, we could end up generating incorrect code.
1802 So instead of using a libcall sequence we build up a suitable
1803 CALL_EXPR and expand the call in the normal fashion. */
1804 if (block_move_fn == NULL_TREE)
1808 /* This was copied from except.c, I don't know if all this is
1809 necessary in this context or not. */
1810 block_move_fn = get_identifier ("memcpy");
1811 fntype = build_pointer_type (void_type_node);
1812 fntype = build_function_type (fntype, NULL_TREE);
1813 block_move_fn = build_decl (FUNCTION_DECL, block_move_fn, fntype);
1814 DECL_EXTERNAL (block_move_fn) = 1;
1815 TREE_PUBLIC (block_move_fn) = 1;
1816 DECL_ARTIFICIAL (block_move_fn) = 1;
1817 TREE_NOTHROW (block_move_fn) = 1;
1818 make_decl_rtl (block_move_fn, NULL);
1819 assemble_external (block_move_fn);
1822 /* We need to make an argument list for the function call.
1824 memcpy has three arguments, the first two are void * addresses and
1825 the last is a size_t byte count for the copy. */
1827 = build_tree_list (NULL_TREE,
1828 make_tree (build_pointer_type (void_type_node), x));
1829 TREE_CHAIN (arg_list)
1830 = build_tree_list (NULL_TREE,
1831 make_tree (build_pointer_type (void_type_node), y));
1832 TREE_CHAIN (TREE_CHAIN (arg_list))
1833 = build_tree_list (NULL_TREE, make_tree (sizetype, size));
1834 TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list))) = NULL_TREE;
1836 /* Now we have to build up the CALL_EXPR itself. */
1837 call_expr = build1 (ADDR_EXPR,
1838 build_pointer_type (TREE_TYPE (block_move_fn)),
1840 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (block_move_fn)),
1841 call_expr, arg_list, NULL_TREE);
1842 TREE_SIDE_EFFECTS (call_expr) = 1;
1844 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
1846 emit_library_call (bcopy_libfunc, LCT_NORMAL,
1847 VOIDmode, 3, y, Pmode, x, Pmode,
1848 convert_to_mode (TYPE_MODE (integer_type_node), size,
1849 TREE_UNSIGNED (integer_type_node)),
1850 TYPE_MODE (integer_type_node));
1853 /* If we are initializing a readonly value, show the above call
1854 clobbered it. Otherwise, a load from it may erroneously be hoisted
1856 if (RTX_UNCHANGING_P (x))
1857 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
1863 /* Copy all or part of a value X into registers starting at REGNO.
1864 The number of registers to be filled is NREGS. */
1867 move_block_to_reg (regno, x, nregs, mode)
1871 enum machine_mode mode;
1874 #ifdef HAVE_load_multiple
1882 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1883 x = validize_mem (force_const_mem (mode, x));
1885 /* See if the machine can do this with a load multiple insn. */
1886 #ifdef HAVE_load_multiple
1887 if (HAVE_load_multiple)
1889 last = get_last_insn ();
1890 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
1898 delete_insns_since (last);
1902 for (i = 0; i < nregs; i++)
1903 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1904 operand_subword_force (x, i, mode));
1907 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1908 The number of registers to be filled is NREGS. SIZE indicates the number
1909 of bytes in the object X. */
1912 move_block_from_reg (regno, x, nregs, size)
1919 #ifdef HAVE_store_multiple
1923 enum machine_mode mode;
1928 /* If SIZE is that of a mode no bigger than a word, just use that
1929 mode's store operation. */
1930 if (size <= UNITS_PER_WORD
1931 && (mode = mode_for_size (size * BITS_PER_UNIT, MODE_INT, 0)) != BLKmode
1932 && !FUNCTION_ARG_REG_LITTLE_ENDIAN)
1934 emit_move_insn (adjust_address (x, mode, 0), gen_rtx_REG (mode, regno));
1938 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN machine must be aligned
1939 to the left before storing to memory. Note that the previous test
1940 doesn't handle all cases (e.g. SIZE == 3). */
1941 if (size < UNITS_PER_WORD
1943 && !FUNCTION_ARG_REG_LITTLE_ENDIAN)
1945 rtx tem = operand_subword (x, 0, 1, BLKmode);
1951 shift = expand_shift (LSHIFT_EXPR, word_mode,
1952 gen_rtx_REG (word_mode, regno),
1953 build_int_2 ((UNITS_PER_WORD - size)
1954 * BITS_PER_UNIT, 0), NULL_RTX, 0);
1955 emit_move_insn (tem, shift);
1959 /* See if the machine can do this with a store multiple insn. */
1960 #ifdef HAVE_store_multiple
1961 if (HAVE_store_multiple)
1963 last = get_last_insn ();
1964 pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1972 delete_insns_since (last);
1976 for (i = 0; i < nregs; i++)
1978 rtx tem = operand_subword (x, i, 1, BLKmode);
1983 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1987 /* Emit code to move a block SRC to a block DST, where DST is non-consecutive
1988 registers represented by a PARALLEL. SSIZE represents the total size of
1989 block SRC in bytes, or -1 if not known. */
1990 /* ??? If SSIZE % UNITS_PER_WORD != 0, we make the blatant assumption that
1991 the balance will be in what would be the low-order memory addresses, i.e.
1992 left justified for big endian, right justified for little endian. This
1993 happens to be true for the targets currently using this support. If this
1994 ever changes, a new target macro along the lines of FUNCTION_ARG_PADDING
1998 emit_group_load (dst, orig_src, ssize)
2005 if (GET_CODE (dst) != PARALLEL)
2008 /* Check for a NULL entry, used to indicate that the parameter goes
2009 both on the stack and in registers. */
2010 if (XEXP (XVECEXP (dst, 0, 0), 0))
2015 tmps = (rtx *) alloca (sizeof (rtx) * XVECLEN (dst, 0));
2017 /* Process the pieces. */
2018 for (i = start; i < XVECLEN (dst, 0); i++)
2020 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
2021 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
2022 unsigned int bytelen = GET_MODE_SIZE (mode);
2025 /* Handle trailing fragments that run over the size of the struct. */
2026 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2028 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2029 bytelen = ssize - bytepos;
2034 /* If we won't be loading directly from memory, protect the real source
2035 from strange tricks we might play; but make sure that the source can
2036 be loaded directly into the destination. */
2038 if (GET_CODE (orig_src) != MEM
2039 && (!CONSTANT_P (orig_src)
2040 || (GET_MODE (orig_src) != mode
2041 && GET_MODE (orig_src) != VOIDmode)))
2043 if (GET_MODE (orig_src) == VOIDmode)
2044 src = gen_reg_rtx (mode);
2046 src = gen_reg_rtx (GET_MODE (orig_src));
2048 emit_move_insn (src, orig_src);
2051 /* Optimize the access just a bit. */
2052 if (GET_CODE (src) == MEM
2053 && MEM_ALIGN (src) >= GET_MODE_ALIGNMENT (mode)
2054 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2055 && bytelen == GET_MODE_SIZE (mode))
2057 tmps[i] = gen_reg_rtx (mode);
2058 emit_move_insn (tmps[i], adjust_address (src, mode, bytepos));
2060 else if (GET_CODE (src) == CONCAT)
2063 && bytelen == GET_MODE_SIZE (GET_MODE (XEXP (src, 0))))
2064 || (bytepos == (HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (XEXP (src, 0)))
2065 && bytelen == GET_MODE_SIZE (GET_MODE (XEXP (src, 1)))))
2067 tmps[i] = XEXP (src, bytepos != 0);
2068 if (! CONSTANT_P (tmps[i])
2069 && (GET_CODE (tmps[i]) != REG || GET_MODE (tmps[i]) != mode))
2070 tmps[i] = extract_bit_field (tmps[i], bytelen * BITS_PER_UNIT,
2071 0, 1, NULL_RTX, mode, mode, ssize);
2073 else if (bytepos == 0)
2075 rtx mem = assign_stack_temp (GET_MODE (src),
2076 GET_MODE_SIZE (GET_MODE (src)), 0);
2077 emit_move_insn (mem, src);
2078 tmps[i] = adjust_address (mem, mode, 0);
2083 else if (CONSTANT_P (src)
2084 || (GET_CODE (src) == REG && GET_MODE (src) == mode))
2087 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
2088 bytepos * BITS_PER_UNIT, 1, NULL_RTX,
2091 if (BYTES_BIG_ENDIAN && shift)
2092 expand_binop (mode, ashl_optab, tmps[i], GEN_INT (shift),
2093 tmps[i], 0, OPTAB_WIDEN);
2098 /* Copy the extracted pieces into the proper (probable) hard regs. */
2099 for (i = start; i < XVECLEN (dst, 0); i++)
2100 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0), tmps[i]);
2103 /* Emit code to move a block SRC to a block DST, where SRC is non-consecutive
2104 registers represented by a PARALLEL. SSIZE represents the total size of
2105 block DST, or -1 if not known. */
2108 emit_group_store (orig_dst, src, ssize)
2115 if (GET_CODE (src) != PARALLEL)
2118 /* Check for a NULL entry, used to indicate that the parameter goes
2119 both on the stack and in registers. */
2120 if (XEXP (XVECEXP (src, 0, 0), 0))
2125 tmps = (rtx *) alloca (sizeof (rtx) * XVECLEN (src, 0));
2127 /* Copy the (probable) hard regs into pseudos. */
2128 for (i = start; i < XVECLEN (src, 0); i++)
2130 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
2131 tmps[i] = gen_reg_rtx (GET_MODE (reg));
2132 emit_move_insn (tmps[i], reg);
2136 /* If we won't be storing directly into memory, protect the real destination
2137 from strange tricks we might play. */
2139 if (GET_CODE (dst) == PARALLEL)
2143 /* We can get a PARALLEL dst if there is a conditional expression in
2144 a return statement. In that case, the dst and src are the same,
2145 so no action is necessary. */
2146 if (rtx_equal_p (dst, src))
2149 /* It is unclear if we can ever reach here, but we may as well handle
2150 it. Allocate a temporary, and split this into a store/load to/from
2153 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
2154 emit_group_store (temp, src, ssize);
2155 emit_group_load (dst, temp, ssize);
2158 else if (GET_CODE (dst) != MEM && GET_CODE (dst) != CONCAT)
2160 dst = gen_reg_rtx (GET_MODE (orig_dst));
2161 /* Make life a bit easier for combine. */
2162 emit_move_insn (dst, const0_rtx);
2165 /* Process the pieces. */
2166 for (i = start; i < XVECLEN (src, 0); i++)
2168 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
2169 enum machine_mode mode = GET_MODE (tmps[i]);
2170 unsigned int bytelen = GET_MODE_SIZE (mode);
2173 /* Handle trailing fragments that run over the size of the struct. */
2174 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2176 if (BYTES_BIG_ENDIAN)
2178 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2179 expand_binop (mode, ashr_optab, tmps[i], GEN_INT (shift),
2180 tmps[i], 0, OPTAB_WIDEN);
2182 bytelen = ssize - bytepos;
2185 if (GET_CODE (dst) == CONCAT)
2187 if (bytepos + bytelen <= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2188 dest = XEXP (dst, 0);
2189 else if (bytepos >= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2191 bytepos -= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0)));
2192 dest = XEXP (dst, 1);
2198 /* Optimize the access just a bit. */
2199 if (GET_CODE (dest) == MEM
2200 && MEM_ALIGN (dest) >= GET_MODE_ALIGNMENT (mode)
2201 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2202 && bytelen == GET_MODE_SIZE (mode))
2203 emit_move_insn (adjust_address (dest, mode, bytepos), tmps[i]);
2205 store_bit_field (dest, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2206 mode, tmps[i], ssize);
2211 /* Copy from the pseudo into the (probable) hard reg. */
2212 if (GET_CODE (dst) == REG)
2213 emit_move_insn (orig_dst, dst);
2216 /* Generate code to copy a BLKmode object of TYPE out of a
2217 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2218 is null, a stack temporary is created. TGTBLK is returned.
2220 The primary purpose of this routine is to handle functions
2221 that return BLKmode structures in registers. Some machines
2222 (the PA for example) want to return all small structures
2223 in registers regardless of the structure's alignment. */
2226 copy_blkmode_from_reg (tgtblk, srcreg, type)
2231 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2232 rtx src = NULL, dst = NULL;
2233 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2234 unsigned HOST_WIDE_INT bitpos, xbitpos, big_endian_correction = 0;
2238 tgtblk = assign_temp (build_qualified_type (type,
2240 | TYPE_QUAL_CONST)),
2242 preserve_temp_slots (tgtblk);
2245 /* This code assumes srcreg is at least a full word. If it isn't, copy it
2246 into a new pseudo which is a full word.
2248 If FUNCTION_ARG_REG_LITTLE_ENDIAN is set and convert_to_mode does a copy,
2249 the wrong part of the register gets copied so we fake a type conversion
2251 if (GET_MODE (srcreg) != BLKmode
2252 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
2254 if (FUNCTION_ARG_REG_LITTLE_ENDIAN)
2255 srcreg = simplify_gen_subreg (word_mode, srcreg, GET_MODE (srcreg), 0);
2257 srcreg = convert_to_mode (word_mode, srcreg, TREE_UNSIGNED (type));
2260 /* Structures whose size is not a multiple of a word are aligned
2261 to the least significant byte (to the right). On a BYTES_BIG_ENDIAN
2262 machine, this means we must skip the empty high order bytes when
2263 calculating the bit offset. */
2264 if (BYTES_BIG_ENDIAN
2265 && !FUNCTION_ARG_REG_LITTLE_ENDIAN
2266 && bytes % UNITS_PER_WORD)
2267 big_endian_correction
2268 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2270 /* Copy the structure BITSIZE bites at a time.
2272 We could probably emit more efficient code for machines which do not use
2273 strict alignment, but it doesn't seem worth the effort at the current
2275 for (bitpos = 0, xbitpos = big_endian_correction;
2276 bitpos < bytes * BITS_PER_UNIT;
2277 bitpos += bitsize, xbitpos += bitsize)
2279 /* We need a new source operand each time xbitpos is on a
2280 word boundary and when xbitpos == big_endian_correction
2281 (the first time through). */
2282 if (xbitpos % BITS_PER_WORD == 0
2283 || xbitpos == big_endian_correction)
2284 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD,
2287 /* We need a new destination operand each time bitpos is on
2289 if (bitpos % BITS_PER_WORD == 0)
2290 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
2292 /* Use xbitpos for the source extraction (right justified) and
2293 xbitpos for the destination store (left justified). */
2294 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode,
2295 extract_bit_field (src, bitsize,
2296 xbitpos % BITS_PER_WORD, 1,
2297 NULL_RTX, word_mode, word_mode,
2305 /* Add a USE expression for REG to the (possibly empty) list pointed
2306 to by CALL_FUSAGE. REG must denote a hard register. */
2309 use_reg (call_fusage, reg)
2310 rtx *call_fusage, reg;
2312 if (GET_CODE (reg) != REG
2313 || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
2317 = gen_rtx_EXPR_LIST (VOIDmode,
2318 gen_rtx_USE (VOIDmode, reg), *call_fusage);
2321 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2322 starting at REGNO. All of these registers must be hard registers. */
2325 use_regs (call_fusage, regno, nregs)
2332 if (regno + nregs > FIRST_PSEUDO_REGISTER)
2335 for (i = 0; i < nregs; i++)
2336 use_reg (call_fusage, regno_reg_rtx[regno + i]);
2339 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2340 PARALLEL REGS. This is for calls that pass values in multiple
2341 non-contiguous locations. The Irix 6 ABI has examples of this. */
2344 use_group_regs (call_fusage, regs)
2350 for (i = 0; i < XVECLEN (regs, 0); i++)
2352 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2354 /* A NULL entry means the parameter goes both on the stack and in
2355 registers. This can also be a MEM for targets that pass values
2356 partially on the stack and partially in registers. */
2357 if (reg != 0 && GET_CODE (reg) == REG)
2358 use_reg (call_fusage, reg);
2363 /* Determine whether the LEN bytes generated by CONSTFUN can be
2364 stored to memory using several move instructions. CONSTFUNDATA is
2365 a pointer which will be passed as argument in every CONSTFUN call.
2366 ALIGN is maximum alignment we can assume. Return nonzero if a
2367 call to store_by_pieces should succeed. */
2370 can_store_by_pieces (len, constfun, constfundata, align)
2371 unsigned HOST_WIDE_INT len;
2372 rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
2376 unsigned HOST_WIDE_INT max_size, l;
2377 HOST_WIDE_INT offset = 0;
2378 enum machine_mode mode, tmode;
2379 enum insn_code icode;
2383 if (! MOVE_BY_PIECES_P (len, align))
2386 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2387 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2388 align = MOVE_MAX * BITS_PER_UNIT;
2390 /* We would first store what we can in the largest integer mode, then go to
2391 successively smaller modes. */
2394 reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2399 max_size = STORE_MAX_PIECES + 1;
2400 while (max_size > 1)
2402 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2403 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2404 if (GET_MODE_SIZE (tmode) < max_size)
2407 if (mode == VOIDmode)
2410 icode = mov_optab->handlers[(int) mode].insn_code;
2411 if (icode != CODE_FOR_nothing
2412 && align >= GET_MODE_ALIGNMENT (mode))
2414 unsigned int size = GET_MODE_SIZE (mode);
2421 cst = (*constfun) (constfundata, offset, mode);
2422 if (!LEGITIMATE_CONSTANT_P (cst))
2432 max_size = GET_MODE_SIZE (mode);
2435 /* The code above should have handled everything. */
2443 /* Generate several move instructions to store LEN bytes generated by
2444 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2445 pointer which will be passed as argument in every CONSTFUN call.
2446 ALIGN is maximum alignment we can assume. */
2449 store_by_pieces (to, len, constfun, constfundata, align)
2451 unsigned HOST_WIDE_INT len;
2452 rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
2456 struct store_by_pieces data;
2458 if (! MOVE_BY_PIECES_P (len, align))
2460 to = protect_from_queue (to, 1);
2461 data.constfun = constfun;
2462 data.constfundata = constfundata;
2465 store_by_pieces_1 (&data, align);
2468 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2469 rtx with BLKmode). The caller must pass TO through protect_from_queue
2470 before calling. ALIGN is maximum alignment we can assume. */
2473 clear_by_pieces (to, len, align)
2475 unsigned HOST_WIDE_INT len;
2478 struct store_by_pieces data;
2480 data.constfun = clear_by_pieces_1;
2481 data.constfundata = NULL;
2484 store_by_pieces_1 (&data, align);
2487 /* Callback routine for clear_by_pieces.
2488 Return const0_rtx unconditionally. */
2491 clear_by_pieces_1 (data, offset, mode)
2492 PTR data ATTRIBUTE_UNUSED;
2493 HOST_WIDE_INT offset ATTRIBUTE_UNUSED;
2494 enum machine_mode mode ATTRIBUTE_UNUSED;
2499 /* Subroutine of clear_by_pieces and store_by_pieces.
2500 Generate several move instructions to store LEN bytes of block TO. (A MEM
2501 rtx with BLKmode). The caller must pass TO through protect_from_queue
2502 before calling. ALIGN is maximum alignment we can assume. */
2505 store_by_pieces_1 (data, align)
2506 struct store_by_pieces *data;
2509 rtx to_addr = XEXP (data->to, 0);
2510 unsigned HOST_WIDE_INT max_size = STORE_MAX_PIECES + 1;
2511 enum machine_mode mode = VOIDmode, tmode;
2512 enum insn_code icode;
2515 data->to_addr = to_addr;
2517 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2518 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2520 data->explicit_inc_to = 0;
2522 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2524 data->offset = data->len;
2526 /* If storing requires more than two move insns,
2527 copy addresses to registers (to make displacements shorter)
2528 and use post-increment if available. */
2529 if (!data->autinc_to
2530 && move_by_pieces_ninsns (data->len, align) > 2)
2532 /* Determine the main mode we'll be using. */
2533 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2534 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2535 if (GET_MODE_SIZE (tmode) < max_size)
2538 if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
2540 data->to_addr = copy_addr_to_reg (plus_constant (to_addr, data->len));
2541 data->autinc_to = 1;
2542 data->explicit_inc_to = -1;
2545 if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2546 && ! data->autinc_to)
2548 data->to_addr = copy_addr_to_reg (to_addr);
2549 data->autinc_to = 1;
2550 data->explicit_inc_to = 1;
2553 if ( !data->autinc_to && CONSTANT_P (to_addr))
2554 data->to_addr = copy_addr_to_reg (to_addr);
2557 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2558 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2559 align = MOVE_MAX * BITS_PER_UNIT;
2561 /* First store what we can in the largest integer mode, then go to
2562 successively smaller modes. */
2564 while (max_size > 1)
2566 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2567 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2568 if (GET_MODE_SIZE (tmode) < max_size)
2571 if (mode == VOIDmode)
2574 icode = mov_optab->handlers[(int) mode].insn_code;
2575 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
2576 store_by_pieces_2 (GEN_FCN (icode), mode, data);
2578 max_size = GET_MODE_SIZE (mode);
2581 /* The code above should have handled everything. */
2586 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2587 with move instructions for mode MODE. GENFUN is the gen_... function
2588 to make a move insn for that mode. DATA has all the other info. */
2591 store_by_pieces_2 (genfun, mode, data)
2592 rtx (*genfun) PARAMS ((rtx, ...));
2593 enum machine_mode mode;
2594 struct store_by_pieces *data;
2596 unsigned int size = GET_MODE_SIZE (mode);
2599 while (data->len >= size)
2602 data->offset -= size;
2604 if (data->autinc_to)
2605 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
2608 to1 = adjust_address (data->to, mode, data->offset);
2610 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2611 emit_insn (gen_add2_insn (data->to_addr,
2612 GEN_INT (-(HOST_WIDE_INT) size)));
2614 cst = (*data->constfun) (data->constfundata, data->offset, mode);
2615 emit_insn ((*genfun) (to1, cst));
2617 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2618 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2620 if (! data->reverse)
2621 data->offset += size;
2627 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2628 its length in bytes. */
2630 static GTY(()) tree block_clear_fn;
2632 clear_storage (object, size)
2636 #ifdef TARGET_MEM_FUNCTIONS
2637 tree call_expr, arg_list;
2640 unsigned int align = (GET_CODE (object) == MEM ? MEM_ALIGN (object)
2641 : GET_MODE_ALIGNMENT (GET_MODE (object)));
2643 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2644 just move a zero. Otherwise, do this a piece at a time. */
2645 if (GET_MODE (object) != BLKmode
2646 && GET_CODE (size) == CONST_INT
2647 && GET_MODE_SIZE (GET_MODE (object)) == (unsigned int) INTVAL (size))
2648 emit_move_insn (object, CONST0_RTX (GET_MODE (object)));
2651 object = protect_from_queue (object, 1);
2652 size = protect_from_queue (size, 0);
2654 if (GET_CODE (size) == CONST_INT
2655 && CLEAR_BY_PIECES_P (INTVAL (size), align))
2656 clear_by_pieces (object, INTVAL (size), align);
2659 /* Try the most limited insn first, because there's no point
2660 including more than one in the machine description unless
2661 the more limited one has some advantage. */
2663 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
2664 enum machine_mode mode;
2666 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2667 mode = GET_MODE_WIDER_MODE (mode))
2669 enum insn_code code = clrstr_optab[(int) mode];
2670 insn_operand_predicate_fn pred;
2672 if (code != CODE_FOR_nothing
2673 /* We don't need MODE to be narrower than
2674 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2675 the mode mask, as it is returned by the macro, it will
2676 definitely be less than the actual mode mask. */
2677 && ((GET_CODE (size) == CONST_INT
2678 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2679 <= (GET_MODE_MASK (mode) >> 1)))
2680 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2681 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
2682 || (*pred) (object, BLKmode))
2683 && ((pred = insn_data[(int) code].operand[2].predicate) == 0
2684 || (*pred) (opalign, VOIDmode)))
2687 rtx last = get_last_insn ();
2690 op1 = convert_to_mode (mode, size, 1);
2691 pred = insn_data[(int) code].operand[1].predicate;
2692 if (pred != 0 && ! (*pred) (op1, mode))
2693 op1 = copy_to_mode_reg (mode, op1);
2695 pat = GEN_FCN ((int) code) (object, op1, opalign);
2702 delete_insns_since (last);
2706 /* OBJECT or SIZE may have been passed through protect_from_queue.
2708 It is unsafe to save the value generated by protect_from_queue
2709 and reuse it later. Consider what happens if emit_queue is
2710 called before the return value from protect_from_queue is used.
2712 Expansion of the CALL_EXPR below will call emit_queue before
2713 we are finished emitting RTL for argument setup. So if we are
2714 not careful we could get the wrong value for an argument.
2716 To avoid this problem we go ahead and emit code to copy OBJECT
2717 and SIZE into new pseudos. We can then place those new pseudos
2718 into an RTL_EXPR and use them later, even after a call to
2721 Note this is not strictly needed for library calls since they
2722 do not call emit_queue before loading their arguments. However,
2723 we may need to have library calls call emit_queue in the future
2724 since failing to do so could cause problems for targets which
2725 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
2726 object = copy_to_mode_reg (Pmode, XEXP (object, 0));
2728 #ifdef TARGET_MEM_FUNCTIONS
2729 size = copy_to_mode_reg (TYPE_MODE (sizetype), size);
2731 size = convert_to_mode (TYPE_MODE (integer_type_node), size,
2732 TREE_UNSIGNED (integer_type_node));
2733 size = copy_to_mode_reg (TYPE_MODE (integer_type_node), size);
2736 #ifdef TARGET_MEM_FUNCTIONS
2737 /* It is incorrect to use the libcall calling conventions to call
2738 memset in this context.
2740 This could be a user call to memset and the user may wish to
2741 examine the return value from memset.
2743 For targets where libcalls and normal calls have different
2744 conventions for returning pointers, we could end up generating
2747 So instead of using a libcall sequence we build up a suitable
2748 CALL_EXPR and expand the call in the normal fashion. */
2749 if (block_clear_fn == NULL_TREE)
2753 /* This was copied from except.c, I don't know if all this is
2754 necessary in this context or not. */
2755 block_clear_fn = get_identifier ("memset");
2756 fntype = build_pointer_type (void_type_node);
2757 fntype = build_function_type (fntype, NULL_TREE);
2758 block_clear_fn = build_decl (FUNCTION_DECL, block_clear_fn,
2760 DECL_EXTERNAL (block_clear_fn) = 1;
2761 TREE_PUBLIC (block_clear_fn) = 1;
2762 DECL_ARTIFICIAL (block_clear_fn) = 1;
2763 TREE_NOTHROW (block_clear_fn) = 1;
2764 make_decl_rtl (block_clear_fn, NULL);
2765 assemble_external (block_clear_fn);
2768 /* We need to make an argument list for the function call.
2770 memset has three arguments, the first is a void * addresses, the
2771 second an integer with the initialization value, the last is a
2772 size_t byte count for the copy. */
2774 = build_tree_list (NULL_TREE,
2775 make_tree (build_pointer_type (void_type_node),
2777 TREE_CHAIN (arg_list)
2778 = build_tree_list (NULL_TREE,
2779 make_tree (integer_type_node, const0_rtx));
2780 TREE_CHAIN (TREE_CHAIN (arg_list))
2781 = build_tree_list (NULL_TREE, make_tree (sizetype, size));
2782 TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list))) = NULL_TREE;
2784 /* Now we have to build up the CALL_EXPR itself. */
2785 call_expr = build1 (ADDR_EXPR,
2786 build_pointer_type (TREE_TYPE (block_clear_fn)),
2788 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (block_clear_fn)),
2789 call_expr, arg_list, NULL_TREE);
2790 TREE_SIDE_EFFECTS (call_expr) = 1;
2792 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
2794 emit_library_call (bzero_libfunc, LCT_NORMAL,
2795 VOIDmode, 2, object, Pmode, size,
2796 TYPE_MODE (integer_type_node));
2799 /* If we are initializing a readonly value, show the above call
2800 clobbered it. Otherwise, a load from it may erroneously be
2801 hoisted from a loop. */
2802 if (RTX_UNCHANGING_P (object))
2803 emit_insn (gen_rtx_CLOBBER (VOIDmode, object));
2810 /* Generate code to copy Y into X.
2811 Both Y and X must have the same mode, except that
2812 Y can be a constant with VOIDmode.
2813 This mode cannot be BLKmode; use emit_block_move for that.
2815 Return the last instruction emitted. */
2818 emit_move_insn (x, y)
2821 enum machine_mode mode = GET_MODE (x);
2822 rtx y_cst = NULL_RTX;
2825 x = protect_from_queue (x, 1);
2826 y = protect_from_queue (y, 0);
2828 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
2831 /* Never force constant_p_rtx to memory. */
2832 if (GET_CODE (y) == CONSTANT_P_RTX)
2834 else if (CONSTANT_P (y))
2837 && FLOAT_MODE_P (GET_MODE (x))
2838 && (last_insn = compress_float_constant (x, y)))
2841 if (!LEGITIMATE_CONSTANT_P (y))
2844 y = force_const_mem (mode, y);
2848 /* If X or Y are memory references, verify that their addresses are valid
2850 if (GET_CODE (x) == MEM
2851 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
2852 && ! push_operand (x, GET_MODE (x)))
2854 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
2855 x = validize_mem (x);
2857 if (GET_CODE (y) == MEM
2858 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
2860 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
2861 y = validize_mem (y);
2863 if (mode == BLKmode)
2866 last_insn = emit_move_insn_1 (x, y);
2868 if (y_cst && GET_CODE (x) == REG)
2869 set_unique_reg_note (last_insn, REG_EQUAL, y_cst);
2874 /* Low level part of emit_move_insn.
2875 Called just like emit_move_insn, but assumes X and Y
2876 are basically valid. */
2879 emit_move_insn_1 (x, y)
2882 enum machine_mode mode = GET_MODE (x);
2883 enum machine_mode submode;
2884 enum mode_class class = GET_MODE_CLASS (mode);
2886 if ((unsigned int) mode >= (unsigned int) MAX_MACHINE_MODE)
2889 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
2891 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
2893 /* Expand complex moves by moving real part and imag part, if possible. */
2894 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
2895 && BLKmode != (submode = mode_for_size ((GET_MODE_UNIT_SIZE (mode)
2897 (class == MODE_COMPLEX_INT
2898 ? MODE_INT : MODE_FLOAT),
2900 && (mov_optab->handlers[(int) submode].insn_code
2901 != CODE_FOR_nothing))
2903 /* Don't split destination if it is a stack push. */
2904 int stack = push_operand (x, GET_MODE (x));
2906 #ifdef PUSH_ROUNDING
2907 /* In case we output to the stack, but the size is smaller machine can
2908 push exactly, we need to use move instructions. */
2910 && (PUSH_ROUNDING (GET_MODE_SIZE (submode))
2911 != GET_MODE_SIZE (submode)))
2914 HOST_WIDE_INT offset1, offset2;
2916 /* Do not use anti_adjust_stack, since we don't want to update
2917 stack_pointer_delta. */
2918 temp = expand_binop (Pmode,
2919 #ifdef STACK_GROWS_DOWNWARD
2927 (GET_MODE_SIZE (GET_MODE (x)))),
2928 stack_pointer_rtx, 0, OPTAB_LIB_WIDEN);
2930 if (temp != stack_pointer_rtx)
2931 emit_move_insn (stack_pointer_rtx, temp);
2933 #ifdef STACK_GROWS_DOWNWARD
2935 offset2 = GET_MODE_SIZE (submode);
2937 offset1 = -PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)));
2938 offset2 = (-PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)))
2939 + GET_MODE_SIZE (submode));
2942 emit_move_insn (change_address (x, submode,
2943 gen_rtx_PLUS (Pmode,
2945 GEN_INT (offset1))),
2946 gen_realpart (submode, y));
2947 emit_move_insn (change_address (x, submode,
2948 gen_rtx_PLUS (Pmode,
2950 GEN_INT (offset2))),
2951 gen_imagpart (submode, y));
2955 /* If this is a stack, push the highpart first, so it
2956 will be in the argument order.
2958 In that case, change_address is used only to convert
2959 the mode, not to change the address. */
2962 /* Note that the real part always precedes the imag part in memory
2963 regardless of machine's endianness. */
2964 #ifdef STACK_GROWS_DOWNWARD
2965 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2966 (gen_rtx_MEM (submode, XEXP (x, 0)),
2967 gen_imagpart (submode, y)));
2968 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2969 (gen_rtx_MEM (submode, XEXP (x, 0)),
2970 gen_realpart (submode, y)));
2972 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2973 (gen_rtx_MEM (submode, XEXP (x, 0)),
2974 gen_realpart (submode, y)));
2975 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2976 (gen_rtx_MEM (submode, XEXP (x, 0)),
2977 gen_imagpart (submode, y)));
2982 rtx realpart_x, realpart_y;
2983 rtx imagpart_x, imagpart_y;
2985 /* If this is a complex value with each part being smaller than a
2986 word, the usual calling sequence will likely pack the pieces into
2987 a single register. Unfortunately, SUBREG of hard registers only
2988 deals in terms of words, so we have a problem converting input
2989 arguments to the CONCAT of two registers that is used elsewhere
2990 for complex values. If this is before reload, we can copy it into
2991 memory and reload. FIXME, we should see about using extract and
2992 insert on integer registers, but complex short and complex char
2993 variables should be rarely used. */
2994 if (GET_MODE_BITSIZE (mode) < 2 * BITS_PER_WORD
2995 && (reload_in_progress | reload_completed) == 0)
2998 = (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER);
3000 = (REG_P (y) && REGNO (y) < FIRST_PSEUDO_REGISTER);
3002 if (packed_dest_p || packed_src_p)
3004 enum mode_class reg_class = ((class == MODE_COMPLEX_FLOAT)
3005 ? MODE_FLOAT : MODE_INT);
3007 enum machine_mode reg_mode
3008 = mode_for_size (GET_MODE_BITSIZE (mode), reg_class, 1);
3010 if (reg_mode != BLKmode)
3012 rtx mem = assign_stack_temp (reg_mode,
3013 GET_MODE_SIZE (mode), 0);
3014 rtx cmem = adjust_address (mem, mode, 0);
3017 = N_("function using short complex types cannot be inline");
3021 rtx sreg = gen_rtx_SUBREG (reg_mode, x, 0);
3023 emit_move_insn_1 (cmem, y);
3024 return emit_move_insn_1 (sreg, mem);
3028 rtx sreg = gen_rtx_SUBREG (reg_mode, y, 0);
3030 emit_move_insn_1 (mem, sreg);
3031 return emit_move_insn_1 (x, cmem);
3037 realpart_x = gen_realpart (submode, x);
3038 realpart_y = gen_realpart (submode, y);
3039 imagpart_x = gen_imagpart (submode, x);
3040 imagpart_y = gen_imagpart (submode, y);
3042 /* Show the output dies here. This is necessary for SUBREGs
3043 of pseudos since we cannot track their lifetimes correctly;
3044 hard regs shouldn't appear here except as return values.
3045 We never want to emit such a clobber after reload. */
3047 && ! (reload_in_progress || reload_completed)
3048 && (GET_CODE (realpart_x) == SUBREG
3049 || GET_CODE (imagpart_x) == SUBREG))
3050 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
3052 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
3053 (realpart_x, realpart_y));
3054 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
3055 (imagpart_x, imagpart_y));
3058 return get_last_insn ();
3061 /* This will handle any multi-word or full-word mode that lacks a move_insn
3062 pattern. However, you will get better code if you define such patterns,
3063 even if they must turn into multiple assembler instructions. */
3064 else if (GET_MODE_SIZE (mode) >= UNITS_PER_WORD)
3071 #ifdef PUSH_ROUNDING
3073 /* If X is a push on the stack, do the push now and replace
3074 X with a reference to the stack pointer. */
3075 if (push_operand (x, GET_MODE (x)))
3080 /* Do not use anti_adjust_stack, since we don't want to update
3081 stack_pointer_delta. */
3082 temp = expand_binop (Pmode,
3083 #ifdef STACK_GROWS_DOWNWARD
3091 (GET_MODE_SIZE (GET_MODE (x)))),
3092 stack_pointer_rtx, 0, OPTAB_LIB_WIDEN);
3094 if (temp != stack_pointer_rtx)
3095 emit_move_insn (stack_pointer_rtx, temp);
3097 code = GET_CODE (XEXP (x, 0));
3099 /* Just hope that small offsets off SP are OK. */
3100 if (code == POST_INC)
3101 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3102 GEN_INT (-((HOST_WIDE_INT)
3103 GET_MODE_SIZE (GET_MODE (x)))));
3104 else if (code == POST_DEC)
3105 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3106 GEN_INT (GET_MODE_SIZE (GET_MODE (x))));
3108 temp = stack_pointer_rtx;
3110 x = change_address (x, VOIDmode, temp);
3114 /* If we are in reload, see if either operand is a MEM whose address
3115 is scheduled for replacement. */
3116 if (reload_in_progress && GET_CODE (x) == MEM
3117 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
3118 x = replace_equiv_address_nv (x, inner);
3119 if (reload_in_progress && GET_CODE (y) == MEM
3120 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
3121 y = replace_equiv_address_nv (y, inner);
3127 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
3130 rtx xpart = operand_subword (x, i, 1, mode);
3131 rtx ypart = operand_subword (y, i, 1, mode);
3133 /* If we can't get a part of Y, put Y into memory if it is a
3134 constant. Otherwise, force it into a register. If we still
3135 can't get a part of Y, abort. */
3136 if (ypart == 0 && CONSTANT_P (y))
3138 y = force_const_mem (mode, y);
3139 ypart = operand_subword (y, i, 1, mode);
3141 else if (ypart == 0)
3142 ypart = operand_subword_force (y, i, mode);
3144 if (xpart == 0 || ypart == 0)
3147 need_clobber |= (GET_CODE (xpart) == SUBREG);
3149 last_insn = emit_move_insn (xpart, ypart);
3155 /* Show the output dies here. This is necessary for SUBREGs
3156 of pseudos since we cannot track their lifetimes correctly;
3157 hard regs shouldn't appear here except as return values.
3158 We never want to emit such a clobber after reload. */
3160 && ! (reload_in_progress || reload_completed)
3161 && need_clobber != 0)
3162 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
3172 /* If Y is representable exactly in a narrower mode, and the target can
3173 perform the extension directly from constant or memory, then emit the
3174 move as an extension. */
3177 compress_float_constant (x, y)
3180 enum machine_mode dstmode = GET_MODE (x);
3181 enum machine_mode orig_srcmode = GET_MODE (y);
3182 enum machine_mode srcmode;
3185 REAL_VALUE_FROM_CONST_DOUBLE (r, y);
3187 for (srcmode = GET_CLASS_NARROWEST_MODE (GET_MODE_CLASS (orig_srcmode));
3188 srcmode != orig_srcmode;
3189 srcmode = GET_MODE_WIDER_MODE (srcmode))
3192 rtx trunc_y, last_insn;
3194 /* Skip if the target can't extend this way. */
3195 ic = can_extend_p (dstmode, srcmode, 0);
3196 if (ic == CODE_FOR_nothing)
3199 /* Skip if the narrowed value isn't exact. */
3200 if (! exact_real_truncate (srcmode, &r))
3203 trunc_y = CONST_DOUBLE_FROM_REAL_VALUE (r, srcmode);
3205 if (LEGITIMATE_CONSTANT_P (trunc_y))
3207 /* Skip if the target needs extra instructions to perform
3209 if (! (*insn_data[ic].operand[1].predicate) (trunc_y, srcmode))
3212 else if (float_extend_from_mem[dstmode][srcmode])
3213 trunc_y = validize_mem (force_const_mem (srcmode, trunc_y));
3217 emit_unop_insn (ic, x, trunc_y, UNKNOWN);
3218 last_insn = get_last_insn ();
3220 if (GET_CODE (x) == REG)
3221 REG_NOTES (last_insn)
3222 = gen_rtx_EXPR_LIST (REG_EQUAL, y, REG_NOTES (last_insn));
3230 /* Pushing data onto the stack. */
3232 /* Push a block of length SIZE (perhaps variable)
3233 and return an rtx to address the beginning of the block.
3234 Note that it is not possible for the value returned to be a QUEUED.
3235 The value may be virtual_outgoing_args_rtx.
3237 EXTRA is the number of bytes of padding to push in addition to SIZE.
3238 BELOW nonzero means this padding comes at low addresses;
3239 otherwise, the padding comes at high addresses. */
3242 push_block (size, extra, below)
3248 size = convert_modes (Pmode, ptr_mode, size, 1);
3249 if (CONSTANT_P (size))
3250 anti_adjust_stack (plus_constant (size, extra));
3251 else if (GET_CODE (size) == REG && extra == 0)
3252 anti_adjust_stack (size);
3255 temp = copy_to_mode_reg (Pmode, size);
3257 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
3258 temp, 0, OPTAB_LIB_WIDEN);
3259 anti_adjust_stack (temp);
3262 #ifndef STACK_GROWS_DOWNWARD
3268 temp = virtual_outgoing_args_rtx;
3269 if (extra != 0 && below)
3270 temp = plus_constant (temp, extra);
3274 if (GET_CODE (size) == CONST_INT)
3275 temp = plus_constant (virtual_outgoing_args_rtx,
3276 -INTVAL (size) - (below ? 0 : extra));
3277 else if (extra != 0 && !below)
3278 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3279 negate_rtx (Pmode, plus_constant (size, extra)));
3281 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3282 negate_rtx (Pmode, size));
3285 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
3288 #ifdef PUSH_ROUNDING
3290 /* Emit single push insn. */
3293 emit_single_push_insn (mode, x, type)
3295 enum machine_mode mode;
3299 unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
3301 enum insn_code icode;
3302 insn_operand_predicate_fn pred;
3304 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3305 /* If there is push pattern, use it. Otherwise try old way of throwing
3306 MEM representing push operation to move expander. */
3307 icode = push_optab->handlers[(int) mode].insn_code;
3308 if (icode != CODE_FOR_nothing)
3310 if (((pred = insn_data[(int) icode].operand[0].predicate)
3311 && !((*pred) (x, mode))))
3312 x = force_reg (mode, x);
3313 emit_insn (GEN_FCN (icode) (x));
3316 if (GET_MODE_SIZE (mode) == rounded_size)
3317 dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
3320 #ifdef STACK_GROWS_DOWNWARD
3321 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3322 GEN_INT (-(HOST_WIDE_INT) rounded_size));
3324 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3325 GEN_INT (rounded_size));
3327 dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
3330 dest = gen_rtx_MEM (mode, dest_addr);
3334 set_mem_attributes (dest, type, 1);
3336 if (flag_optimize_sibling_calls)
3337 /* Function incoming arguments may overlap with sibling call
3338 outgoing arguments and we cannot allow reordering of reads
3339 from function arguments with stores to outgoing arguments
3340 of sibling calls. */
3341 set_mem_alias_set (dest, 0);
3343 emit_move_insn (dest, x);
3347 /* Generate code to push X onto the stack, assuming it has mode MODE and
3349 MODE is redundant except when X is a CONST_INT (since they don't
3351 SIZE is an rtx for the size of data to be copied (in bytes),
3352 needed only if X is BLKmode.
3354 ALIGN (in bits) is maximum alignment we can assume.
3356 If PARTIAL and REG are both nonzero, then copy that many of the first
3357 words of X into registers starting with REG, and push the rest of X.
3358 The amount of space pushed is decreased by PARTIAL words,
3359 rounded *down* to a multiple of PARM_BOUNDARY.
3360 REG must be a hard register in this case.
3361 If REG is zero but PARTIAL is not, take any all others actions for an
3362 argument partially in registers, but do not actually load any
3365 EXTRA is the amount in bytes of extra space to leave next to this arg.
3366 This is ignored if an argument block has already been allocated.
3368 On a machine that lacks real push insns, ARGS_ADDR is the address of
3369 the bottom of the argument block for this call. We use indexing off there
3370 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
3371 argument block has not been preallocated.
3373 ARGS_SO_FAR is the size of args previously pushed for this call.
3375 REG_PARM_STACK_SPACE is nonzero if functions require stack space
3376 for arguments passed in registers. If nonzero, it will be the number
3377 of bytes required. */
3380 emit_push_insn (x, mode, type, size, align, partial, reg, extra,
3381 args_addr, args_so_far, reg_parm_stack_space,
3384 enum machine_mode mode;
3393 int reg_parm_stack_space;
3397 enum direction stack_direction
3398 #ifdef STACK_GROWS_DOWNWARD
3404 /* Decide where to pad the argument: `downward' for below,
3405 `upward' for above, or `none' for don't pad it.
3406 Default is below for small data on big-endian machines; else above. */
3407 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
3409 /* Invert direction if stack is post-decrement.
3411 if (STACK_PUSH_CODE == POST_DEC)
3412 if (where_pad != none)
3413 where_pad = (where_pad == downward ? upward : downward);
3415 xinner = x = protect_from_queue (x, 0);
3417 if (mode == BLKmode)
3419 /* Copy a block into the stack, entirely or partially. */
3422 int used = partial * UNITS_PER_WORD;
3423 int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
3431 /* USED is now the # of bytes we need not copy to the stack
3432 because registers will take care of them. */
3435 xinner = adjust_address (xinner, BLKmode, used);
3437 /* If the partial register-part of the arg counts in its stack size,
3438 skip the part of stack space corresponding to the registers.
3439 Otherwise, start copying to the beginning of the stack space,
3440 by setting SKIP to 0. */
3441 skip = (reg_parm_stack_space == 0) ? 0 : used;
3443 #ifdef PUSH_ROUNDING
3444 /* Do it with several push insns if that doesn't take lots of insns
3445 and if there is no difficulty with push insns that skip bytes
3446 on the stack for alignment purposes. */
3449 && GET_CODE (size) == CONST_INT
3451 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
3452 /* Here we avoid the case of a structure whose weak alignment
3453 forces many pushes of a small amount of data,
3454 and such small pushes do rounding that causes trouble. */
3455 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
3456 || align >= BIGGEST_ALIGNMENT
3457 || (PUSH_ROUNDING (align / BITS_PER_UNIT)
3458 == (align / BITS_PER_UNIT)))
3459 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
3461 /* Push padding now if padding above and stack grows down,
3462 or if padding below and stack grows up.
3463 But if space already allocated, this has already been done. */
3464 if (extra && args_addr == 0
3465 && where_pad != none && where_pad != stack_direction)
3466 anti_adjust_stack (GEN_INT (extra));
3468 move_by_pieces (NULL, xinner, INTVAL (size) - used, align);
3471 #endif /* PUSH_ROUNDING */
3475 /* Otherwise make space on the stack and copy the data
3476 to the address of that space. */
3478 /* Deduct words put into registers from the size we must copy. */
3481 if (GET_CODE (size) == CONST_INT)
3482 size = GEN_INT (INTVAL (size) - used);
3484 size = expand_binop (GET_MODE (size), sub_optab, size,
3485 GEN_INT (used), NULL_RTX, 0,
3489 /* Get the address of the stack space.
3490 In this case, we do not deal with EXTRA separately.
3491 A single stack adjust will do. */
3494 temp = push_block (size, extra, where_pad == downward);
3497 else if (GET_CODE (args_so_far) == CONST_INT)
3498 temp = memory_address (BLKmode,
3499 plus_constant (args_addr,
3500 skip + INTVAL (args_so_far)));
3502 temp = memory_address (BLKmode,
3503 plus_constant (gen_rtx_PLUS (Pmode,
3507 target = gen_rtx_MEM (BLKmode, temp);
3511 set_mem_attributes (target, type, 1);
3512 /* Function incoming arguments may overlap with sibling call
3513 outgoing arguments and we cannot allow reordering of reads
3514 from function arguments with stores to outgoing arguments
3515 of sibling calls. */
3516 set_mem_alias_set (target, 0);
3519 set_mem_align (target, align);
3521 /* TEMP is the address of the block. Copy the data there. */
3522 if (GET_CODE (size) == CONST_INT
3523 && MOVE_BY_PIECES_P ((unsigned) INTVAL (size), align))
3525 move_by_pieces (target, xinner, INTVAL (size), align);
3530 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
3531 enum machine_mode mode;
3533 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
3535 mode = GET_MODE_WIDER_MODE (mode))
3537 enum insn_code code = movstr_optab[(int) mode];
3538 insn_operand_predicate_fn pred;
3540 if (code != CODE_FOR_nothing
3541 && ((GET_CODE (size) == CONST_INT
3542 && ((unsigned HOST_WIDE_INT) INTVAL (size)
3543 <= (GET_MODE_MASK (mode) >> 1)))
3544 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
3545 && (!(pred = insn_data[(int) code].operand[0].predicate)
3546 || ((*pred) (target, BLKmode)))
3547 && (!(pred = insn_data[(int) code].operand[1].predicate)
3548 || ((*pred) (xinner, BLKmode)))
3549 && (!(pred = insn_data[(int) code].operand[3].predicate)
3550 || ((*pred) (opalign, VOIDmode))))
3552 rtx op2 = convert_to_mode (mode, size, 1);
3553 rtx last = get_last_insn ();
3556 pred = insn_data[(int) code].operand[2].predicate;
3557 if (pred != 0 && ! (*pred) (op2, mode))
3558 op2 = copy_to_mode_reg (mode, op2);
3560 pat = GEN_FCN ((int) code) (target, xinner,
3568 delete_insns_since (last);
3573 if (!ACCUMULATE_OUTGOING_ARGS)
3575 /* If the source is referenced relative to the stack pointer,
3576 copy it to another register to stabilize it. We do not need
3577 to do this if we know that we won't be changing sp. */
3579 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3580 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3581 temp = copy_to_reg (temp);
3584 /* Make inhibit_defer_pop nonzero around the library call
3585 to force it to pop the bcopy-arguments right away. */
3587 #ifdef TARGET_MEM_FUNCTIONS
3588 emit_library_call (memcpy_libfunc, LCT_NORMAL,
3589 VOIDmode, 3, temp, Pmode, XEXP (xinner, 0), Pmode,
3590 convert_to_mode (TYPE_MODE (sizetype),
3591 size, TREE_UNSIGNED (sizetype)),
3592 TYPE_MODE (sizetype));
3594 emit_library_call (bcopy_libfunc, LCT_NORMAL,
3595 VOIDmode, 3, XEXP (xinner, 0), Pmode, temp, Pmode,
3596 convert_to_mode (TYPE_MODE (integer_type_node),
3598 TREE_UNSIGNED (integer_type_node)),
3599 TYPE_MODE (integer_type_node));
3604 else if (partial > 0)
3606 /* Scalar partly in registers. */
3608 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3611 /* # words of start of argument
3612 that we must make space for but need not store. */
3613 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
3614 int args_offset = INTVAL (args_so_far);
3617 /* Push padding now if padding above and stack grows down,
3618 or if padding below and stack grows up.
3619 But if space already allocated, this has already been done. */
3620 if (extra && args_addr == 0
3621 && where_pad != none && where_pad != stack_direction)
3622 anti_adjust_stack (GEN_INT (extra));
3624 /* If we make space by pushing it, we might as well push
3625 the real data. Otherwise, we can leave OFFSET nonzero
3626 and leave the space uninitialized. */
3630 /* Now NOT_STACK gets the number of words that we don't need to
3631 allocate on the stack. */
3632 not_stack = partial - offset;
3634 /* If the partial register-part of the arg counts in its stack size,
3635 skip the part of stack space corresponding to the registers.
3636 Otherwise, start copying to the beginning of the stack space,
3637 by setting SKIP to 0. */
3638 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
3640 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
3641 x = validize_mem (force_const_mem (mode, x));
3643 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3644 SUBREGs of such registers are not allowed. */
3645 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
3646 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
3647 x = copy_to_reg (x);
3649 /* Loop over all the words allocated on the stack for this arg. */
3650 /* We can do it by words, because any scalar bigger than a word
3651 has a size a multiple of a word. */
3652 #ifndef PUSH_ARGS_REVERSED
3653 for (i = not_stack; i < size; i++)
3655 for (i = size - 1; i >= not_stack; i--)
3657 if (i >= not_stack + offset)
3658 emit_push_insn (operand_subword_force (x, i, mode),
3659 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
3661 GEN_INT (args_offset + ((i - not_stack + skip)
3663 reg_parm_stack_space, alignment_pad);
3668 rtx target = NULL_RTX;
3671 /* Push padding now if padding above and stack grows down,
3672 or if padding below and stack grows up.
3673 But if space already allocated, this has already been done. */
3674 if (extra && args_addr == 0
3675 && where_pad != none && where_pad != stack_direction)
3676 anti_adjust_stack (GEN_INT (extra));
3678 #ifdef PUSH_ROUNDING
3679 if (args_addr == 0 && PUSH_ARGS)
3680 emit_single_push_insn (mode, x, type);
3684 if (GET_CODE (args_so_far) == CONST_INT)
3686 = memory_address (mode,
3687 plus_constant (args_addr,
3688 INTVAL (args_so_far)));
3690 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
3693 dest = gen_rtx_MEM (mode, addr);
3696 set_mem_attributes (dest, type, 1);
3697 /* Function incoming arguments may overlap with sibling call
3698 outgoing arguments and we cannot allow reordering of reads
3699 from function arguments with stores to outgoing arguments
3700 of sibling calls. */
3701 set_mem_alias_set (dest, 0);
3704 emit_move_insn (dest, x);
3710 /* If part should go in registers, copy that part
3711 into the appropriate registers. Do this now, at the end,
3712 since mem-to-mem copies above may do function calls. */
3713 if (partial > 0 && reg != 0)
3715 /* Handle calls that pass values in multiple non-contiguous locations.
3716 The Irix 6 ABI has examples of this. */
3717 if (GET_CODE (reg) == PARALLEL)
3718 emit_group_load (reg, x, -1); /* ??? size? */
3720 move_block_to_reg (REGNO (reg), x, partial, mode);
3723 if (extra && args_addr == 0 && where_pad == stack_direction)
3724 anti_adjust_stack (GEN_INT (extra));
3726 if (alignment_pad && args_addr == 0)
3727 anti_adjust_stack (alignment_pad);
3730 /* Return X if X can be used as a subtarget in a sequence of arithmetic
3738 /* Only registers can be subtargets. */
3739 || GET_CODE (x) != REG
3740 /* If the register is readonly, it can't be set more than once. */
3741 || RTX_UNCHANGING_P (x)
3742 /* Don't use hard regs to avoid extending their life. */
3743 || REGNO (x) < FIRST_PSEUDO_REGISTER
3744 /* Avoid subtargets inside loops,
3745 since they hide some invariant expressions. */
3746 || preserve_subexpressions_p ())
3750 /* Expand an assignment that stores the value of FROM into TO.
3751 If WANT_VALUE is nonzero, return an rtx for the value of TO.
3752 (This may contain a QUEUED rtx;
3753 if the value is constant, this rtx is a constant.)
3754 Otherwise, the returned value is NULL_RTX.
3756 SUGGEST_REG is no longer actually used.
3757 It used to mean, copy the value through a register
3758 and return that register, if that is possible.
3759 We now use WANT_VALUE to decide whether to do this. */
3762 expand_assignment (to, from, want_value, suggest_reg)
3765 int suggest_reg ATTRIBUTE_UNUSED;
3770 /* Don't crash if the lhs of the assignment was erroneous. */
3772 if (TREE_CODE (to) == ERROR_MARK)
3774 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
3775 return want_value ? result : NULL_RTX;
3778 /* Assignment of a structure component needs special treatment
3779 if the structure component's rtx is not simply a MEM.
3780 Assignment of an array element at a constant index, and assignment of
3781 an array element in an unaligned packed structure field, has the same
3784 if (TREE_CODE (to) == COMPONENT_REF || TREE_CODE (to) == BIT_FIELD_REF
3785 || TREE_CODE (to) == ARRAY_REF || TREE_CODE (to) == ARRAY_RANGE_REF)
3787 enum machine_mode mode1;
3788 HOST_WIDE_INT bitsize, bitpos;
3796 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
3797 &unsignedp, &volatilep);
3799 /* If we are going to use store_bit_field and extract_bit_field,
3800 make sure to_rtx will be safe for multiple use. */
3802 if (mode1 == VOIDmode && want_value)
3803 tem = stabilize_reference (tem);
3805 orig_to_rtx = to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, 0);
3809 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM);
3811 if (GET_CODE (to_rtx) != MEM)
3814 #ifdef POINTERS_EXTEND_UNSIGNED
3815 if (GET_MODE (offset_rtx) != Pmode)
3816 offset_rtx = convert_memory_address (Pmode, offset_rtx);
3818 if (GET_MODE (offset_rtx) != ptr_mode)
3819 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
3822 /* A constant address in TO_RTX can have VOIDmode, we must not try
3823 to call force_reg for that case. Avoid that case. */
3824 if (GET_CODE (to_rtx) == MEM
3825 && GET_MODE (to_rtx) == BLKmode
3826 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
3828 && (bitpos % bitsize) == 0
3829 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
3830 && MEM_ALIGN (to_rtx) == GET_MODE_ALIGNMENT (mode1))
3832 to_rtx = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT);
3836 to_rtx = offset_address (to_rtx, offset_rtx,
3837 highest_pow2_factor_for_type (TREE_TYPE (to),
3841 if (GET_CODE (to_rtx) == MEM)
3843 /* If the field is at offset zero, we could have been given the
3844 DECL_RTX of the parent struct. Don't munge it. */
3845 to_rtx = shallow_copy_rtx (to_rtx);
3847 set_mem_attributes_minus_bitpos (to_rtx, to, 0, bitpos);
3850 /* Deal with volatile and readonly fields. The former is only done
3851 for MEM. Also set MEM_KEEP_ALIAS_SET_P if needed. */
3852 if (volatilep && GET_CODE (to_rtx) == MEM)
3854 if (to_rtx == orig_to_rtx)
3855 to_rtx = copy_rtx (to_rtx);
3856 MEM_VOLATILE_P (to_rtx) = 1;
3859 if (TREE_CODE (to) == COMPONENT_REF
3860 && TREE_READONLY (TREE_OPERAND (to, 1)))
3862 if (to_rtx == orig_to_rtx)
3863 to_rtx = copy_rtx (to_rtx);
3864 RTX_UNCHANGING_P (to_rtx) = 1;
3867 if (GET_CODE (to_rtx) == MEM && ! can_address_p (to))
3869 if (to_rtx == orig_to_rtx)
3870 to_rtx = copy_rtx (to_rtx);
3871 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
3874 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
3876 /* Spurious cast for HPUX compiler. */
3877 ? ((enum machine_mode)
3878 TYPE_MODE (TREE_TYPE (to)))
3880 unsignedp, TREE_TYPE (tem), get_alias_set (to));
3882 preserve_temp_slots (result);
3886 /* If the value is meaningful, convert RESULT to the proper mode.
3887 Otherwise, return nothing. */
3888 return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
3889 TYPE_MODE (TREE_TYPE (from)),
3891 TREE_UNSIGNED (TREE_TYPE (to)))
3895 /* If the rhs is a function call and its value is not an aggregate,
3896 call the function before we start to compute the lhs.
3897 This is needed for correct code for cases such as
3898 val = setjmp (buf) on machines where reference to val
3899 requires loading up part of an address in a separate insn.
3901 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
3902 since it might be a promoted variable where the zero- or sign- extension
3903 needs to be done. Handling this in the normal way is safe because no
3904 computation is done before the call. */
3905 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from)
3906 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
3907 && ! ((TREE_CODE (to) == VAR_DECL || TREE_CODE (to) == PARM_DECL)
3908 && GET_CODE (DECL_RTL (to)) == REG))
3913 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
3915 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
3917 /* Handle calls that return values in multiple non-contiguous locations.
3918 The Irix 6 ABI has examples of this. */
3919 if (GET_CODE (to_rtx) == PARALLEL)
3920 emit_group_load (to_rtx, value, int_size_in_bytes (TREE_TYPE (from)));
3921 else if (GET_MODE (to_rtx) == BLKmode)
3922 emit_block_move (to_rtx, value, expr_size (from));
3925 #ifdef POINTERS_EXTEND_UNSIGNED
3926 if (POINTER_TYPE_P (TREE_TYPE (to))
3927 && GET_MODE (to_rtx) != GET_MODE (value))
3928 value = convert_memory_address (GET_MODE (to_rtx), value);
3930 emit_move_insn (to_rtx, value);
3932 preserve_temp_slots (to_rtx);
3935 return want_value ? to_rtx : NULL_RTX;
3938 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
3939 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
3942 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
3944 /* Don't move directly into a return register. */
3945 if (TREE_CODE (to) == RESULT_DECL
3946 && (GET_CODE (to_rtx) == REG || GET_CODE (to_rtx) == PARALLEL))
3951 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
3953 if (GET_CODE (to_rtx) == PARALLEL)
3954 emit_group_load (to_rtx, temp, int_size_in_bytes (TREE_TYPE (from)));
3956 emit_move_insn (to_rtx, temp);
3958 preserve_temp_slots (to_rtx);
3961 return want_value ? to_rtx : NULL_RTX;
3964 /* In case we are returning the contents of an object which overlaps
3965 the place the value is being stored, use a safe function when copying
3966 a value through a pointer into a structure value return block. */
3967 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
3968 && current_function_returns_struct
3969 && !current_function_returns_pcc_struct)
3974 size = expr_size (from);
3975 from_rtx = expand_expr (from, NULL_RTX, VOIDmode, 0);
3977 #ifdef TARGET_MEM_FUNCTIONS
3978 emit_library_call (memmove_libfunc, LCT_NORMAL,
3979 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
3980 XEXP (from_rtx, 0), Pmode,
3981 convert_to_mode (TYPE_MODE (sizetype),
3982 size, TREE_UNSIGNED (sizetype)),
3983 TYPE_MODE (sizetype));
3985 emit_library_call (bcopy_libfunc, LCT_NORMAL,
3986 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
3987 XEXP (to_rtx, 0), Pmode,
3988 convert_to_mode (TYPE_MODE (integer_type_node),
3989 size, TREE_UNSIGNED (integer_type_node)),
3990 TYPE_MODE (integer_type_node));
3993 preserve_temp_slots (to_rtx);
3996 return want_value ? to_rtx : NULL_RTX;
3999 /* Compute FROM and store the value in the rtx we got. */
4002 result = store_expr (from, to_rtx, want_value);
4003 preserve_temp_slots (result);
4006 return want_value ? result : NULL_RTX;
4009 /* Generate code for computing expression EXP,
4010 and storing the value into TARGET.
4011 TARGET may contain a QUEUED rtx.
4013 If WANT_VALUE is nonzero, return a copy of the value
4014 not in TARGET, so that we can be sure to use the proper
4015 value in a containing expression even if TARGET has something
4016 else stored in it. If possible, we copy the value through a pseudo
4017 and return that pseudo. Or, if the value is constant, we try to
4018 return the constant. In some cases, we return a pseudo
4019 copied *from* TARGET.
4021 If the mode is BLKmode then we may return TARGET itself.
4022 It turns out that in BLKmode it doesn't cause a problem.
4023 because C has no operators that could combine two different
4024 assignments into the same BLKmode object with different values
4025 with no sequence point. Will other languages need this to
4028 If WANT_VALUE is 0, we return NULL, to make sure
4029 to catch quickly any cases where the caller uses the value
4030 and fails to set WANT_VALUE. */
4033 store_expr (exp, target, want_value)
4039 int dont_return_target = 0;
4040 int dont_store_target = 0;
4042 if (TREE_CODE (exp) == COMPOUND_EXPR)
4044 /* Perform first part of compound expression, then assign from second
4046 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
4048 return store_expr (TREE_OPERAND (exp, 1), target, want_value);
4050 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
4052 /* For conditional expression, get safe form of the target. Then
4053 test the condition, doing the appropriate assignment on either
4054 side. This avoids the creation of unnecessary temporaries.
4055 For non-BLKmode, it is more efficient not to do this. */
4057 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
4060 target = protect_from_queue (target, 1);
4062 do_pending_stack_adjust ();
4064 jumpifnot (TREE_OPERAND (exp, 0), lab1);
4065 start_cleanup_deferral ();
4066 store_expr (TREE_OPERAND (exp, 1), target, 0);
4067 end_cleanup_deferral ();
4069 emit_jump_insn (gen_jump (lab2));
4072 start_cleanup_deferral ();
4073 store_expr (TREE_OPERAND (exp, 2), target, 0);
4074 end_cleanup_deferral ();
4079 return want_value ? target : NULL_RTX;
4081 else if (queued_subexp_p (target))
4082 /* If target contains a postincrement, let's not risk
4083 using it as the place to generate the rhs. */
4085 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
4087 /* Expand EXP into a new pseudo. */
4088 temp = gen_reg_rtx (GET_MODE (target));
4089 temp = expand_expr (exp, temp, GET_MODE (target), 0);
4092 temp = expand_expr (exp, NULL_RTX, GET_MODE (target), 0);
4094 /* If target is volatile, ANSI requires accessing the value
4095 *from* the target, if it is accessed. So make that happen.
4096 In no case return the target itself. */
4097 if (! MEM_VOLATILE_P (target) && want_value)
4098 dont_return_target = 1;
4100 else if (want_value && GET_CODE (target) == MEM && ! MEM_VOLATILE_P (target)
4101 && GET_MODE (target) != BLKmode)
4102 /* If target is in memory and caller wants value in a register instead,
4103 arrange that. Pass TARGET as target for expand_expr so that,
4104 if EXP is another assignment, WANT_VALUE will be nonzero for it.
4105 We know expand_expr will not use the target in that case.
4106 Don't do this if TARGET is volatile because we are supposed
4107 to write it and then read it. */
4109 temp = expand_expr (exp, target, GET_MODE (target), 0);
4110 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
4112 /* If TEMP is already in the desired TARGET, only copy it from
4113 memory and don't store it there again. */
4115 || (rtx_equal_p (temp, target)
4116 && ! side_effects_p (temp) && ! side_effects_p (target)))
4117 dont_store_target = 1;
4118 temp = copy_to_reg (temp);
4120 dont_return_target = 1;
4122 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
4123 /* If this is an scalar in a register that is stored in a wider mode
4124 than the declared mode, compute the result into its declared mode
4125 and then convert to the wider mode. Our value is the computed
4128 rtx inner_target = 0;
4130 /* If we don't want a value, we can do the conversion inside EXP,
4131 which will often result in some optimizations. Do the conversion
4132 in two steps: first change the signedness, if needed, then
4133 the extend. But don't do this if the type of EXP is a subtype
4134 of something else since then the conversion might involve
4135 more than just converting modes. */
4136 if (! want_value && INTEGRAL_TYPE_P (TREE_TYPE (exp))
4137 && TREE_TYPE (TREE_TYPE (exp)) == 0)
4139 if (TREE_UNSIGNED (TREE_TYPE (exp))
4140 != SUBREG_PROMOTED_UNSIGNED_P (target))
4142 ((*lang_hooks.types.signed_or_unsigned_type)
4143 (SUBREG_PROMOTED_UNSIGNED_P (target), TREE_TYPE (exp)), exp);
4145 exp = convert ((*lang_hooks.types.type_for_mode)
4146 (GET_MODE (SUBREG_REG (target)),
4147 SUBREG_PROMOTED_UNSIGNED_P (target)),
4150 inner_target = SUBREG_REG (target);
4153 temp = expand_expr (exp, inner_target, VOIDmode, 0);
4155 /* If TEMP is a volatile MEM and we want a result value, make
4156 the access now so it gets done only once. Likewise if
4157 it contains TARGET. */
4158 if (GET_CODE (temp) == MEM && want_value
4159 && (MEM_VOLATILE_P (temp)
4160 || reg_mentioned_p (SUBREG_REG (target), XEXP (temp, 0))))
4161 temp = copy_to_reg (temp);
4163 /* If TEMP is a VOIDmode constant, use convert_modes to make
4164 sure that we properly convert it. */
4165 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
4167 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4168 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4169 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
4170 GET_MODE (target), temp,
4171 SUBREG_PROMOTED_UNSIGNED_P (target));
4174 convert_move (SUBREG_REG (target), temp,
4175 SUBREG_PROMOTED_UNSIGNED_P (target));
4177 /* If we promoted a constant, change the mode back down to match
4178 target. Otherwise, the caller might get confused by a result whose
4179 mode is larger than expected. */
4181 if (want_value && GET_MODE (temp) != GET_MODE (target))
4183 if (GET_MODE (temp) != VOIDmode)
4185 temp = gen_lowpart_SUBREG (GET_MODE (target), temp);
4186 SUBREG_PROMOTED_VAR_P (temp) = 1;
4187 SUBREG_PROMOTED_UNSIGNED_SET (temp,
4188 SUBREG_PROMOTED_UNSIGNED_P (target));
4191 temp = convert_modes (GET_MODE (target),
4192 GET_MODE (SUBREG_REG (target)),
4193 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4196 return want_value ? temp : NULL_RTX;
4200 temp = expand_expr (exp, target, GET_MODE (target), 0);
4201 /* Return TARGET if it's a specified hardware register.
4202 If TARGET is a volatile mem ref, either return TARGET
4203 or return a reg copied *from* TARGET; ANSI requires this.
4205 Otherwise, if TEMP is not TARGET, return TEMP
4206 if it is constant (for efficiency),
4207 or if we really want the correct value. */
4208 if (!(target && GET_CODE (target) == REG
4209 && REGNO (target) < FIRST_PSEUDO_REGISTER)
4210 && !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
4211 && ! rtx_equal_p (temp, target)
4212 && (CONSTANT_P (temp) || want_value))
4213 dont_return_target = 1;
4216 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
4217 the same as that of TARGET, adjust the constant. This is needed, for
4218 example, in case it is a CONST_DOUBLE and we want only a word-sized
4220 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
4221 && TREE_CODE (exp) != ERROR_MARK
4222 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
4223 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4224 temp, TREE_UNSIGNED (TREE_TYPE (exp)));
4226 /* If value was not generated in the target, store it there.
4227 Convert the value to TARGET's type first if necessary.
4228 If TEMP and TARGET compare equal according to rtx_equal_p, but
4229 one or both of them are volatile memory refs, we have to distinguish
4231 - expand_expr has used TARGET. In this case, we must not generate
4232 another copy. This can be detected by TARGET being equal according
4234 - expand_expr has not used TARGET - that means that the source just
4235 happens to have the same RTX form. Since temp will have been created
4236 by expand_expr, it will compare unequal according to == .
4237 We must generate a copy in this case, to reach the correct number
4238 of volatile memory references. */
4240 if ((! rtx_equal_p (temp, target)
4241 || (temp != target && (side_effects_p (temp)
4242 || side_effects_p (target))))
4243 && TREE_CODE (exp) != ERROR_MARK
4244 /* If there's nothing to copy, don't bother. */
4245 && expr_size (exp) != const0_rtx
4246 && ! dont_store_target
4247 /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
4248 but TARGET is not valid memory reference, TEMP will differ
4249 from TARGET although it is really the same location. */
4250 && (TREE_CODE_CLASS (TREE_CODE (exp)) != 'd'
4251 || target != DECL_RTL_IF_SET (exp)))
4253 target = protect_from_queue (target, 1);
4254 if (GET_MODE (temp) != GET_MODE (target)
4255 && GET_MODE (temp) != VOIDmode)
4257 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
4258 if (dont_return_target)
4260 /* In this case, we will return TEMP,
4261 so make sure it has the proper mode.
4262 But don't forget to store the value into TARGET. */
4263 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
4264 emit_move_insn (target, temp);
4267 convert_move (target, temp, unsignedp);
4270 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
4272 /* Handle copying a string constant into an array. The string
4273 constant may be shorter than the array. So copy just the string's
4274 actual length, and clear the rest. First get the size of the data
4275 type of the string, which is actually the size of the target. */
4276 rtx size = expr_size (exp);
4278 if (GET_CODE (size) == CONST_INT
4279 && INTVAL (size) < TREE_STRING_LENGTH (exp))
4280 emit_block_move (target, temp, size);
4283 /* Compute the size of the data to copy from the string. */
4285 = size_binop (MIN_EXPR,
4286 make_tree (sizetype, size),
4287 size_int (TREE_STRING_LENGTH (exp)));
4288 rtx copy_size_rtx = expand_expr (copy_size, NULL_RTX,
4292 /* Copy that much. */
4293 copy_size_rtx = convert_to_mode (ptr_mode, copy_size_rtx, 0);
4294 emit_block_move (target, temp, copy_size_rtx);
4296 /* Figure out how much is left in TARGET that we have to clear.
4297 Do all calculations in ptr_mode. */
4298 if (GET_CODE (copy_size_rtx) == CONST_INT)
4300 size = plus_constant (size, -INTVAL (copy_size_rtx));
4301 target = adjust_address (target, BLKmode,
4302 INTVAL (copy_size_rtx));
4306 size = expand_binop (TYPE_MODE (sizetype), sub_optab, size,
4307 copy_size_rtx, NULL_RTX, 0,
4310 #ifdef POINTERS_EXTEND_UNSIGNED
4311 if (GET_MODE (copy_size_rtx) != Pmode)
4312 copy_size_rtx = convert_memory_address (Pmode,
4316 target = offset_address (target, copy_size_rtx,
4317 highest_pow2_factor (copy_size));
4318 label = gen_label_rtx ();
4319 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
4320 GET_MODE (size), 0, label);
4323 if (size != const0_rtx)
4324 clear_storage (target, size);
4330 /* Handle calls that return values in multiple non-contiguous locations.
4331 The Irix 6 ABI has examples of this. */
4332 else if (GET_CODE (target) == PARALLEL)
4333 emit_group_load (target, temp, int_size_in_bytes (TREE_TYPE (exp)));
4334 else if (GET_MODE (temp) == BLKmode)
4335 emit_block_move (target, temp, expr_size (exp));
4337 emit_move_insn (target, temp);
4340 /* If we don't want a value, return NULL_RTX. */
4344 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
4345 ??? The latter test doesn't seem to make sense. */
4346 else if (dont_return_target && GET_CODE (temp) != MEM)
4349 /* Return TARGET itself if it is a hard register. */
4350 else if (want_value && GET_MODE (target) != BLKmode
4351 && ! (GET_CODE (target) == REG
4352 && REGNO (target) < FIRST_PSEUDO_REGISTER))
4353 return copy_to_reg (target);
4359 /* Return 1 if EXP just contains zeros. */
4367 switch (TREE_CODE (exp))
4371 case NON_LVALUE_EXPR:
4372 case VIEW_CONVERT_EXPR:
4373 return is_zeros_p (TREE_OPERAND (exp, 0));
4376 return integer_zerop (exp);
4380 is_zeros_p (TREE_REALPART (exp)) && is_zeros_p (TREE_IMAGPART (exp));
4383 return REAL_VALUES_IDENTICAL (TREE_REAL_CST (exp), dconst0);
4386 for (elt = TREE_VECTOR_CST_ELTS (exp); elt;
4387 elt = TREE_CHAIN (elt))
4388 if (!is_zeros_p (TREE_VALUE (elt)))
4394 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4395 return CONSTRUCTOR_ELTS (exp) == NULL_TREE;
4396 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4397 if (! is_zeros_p (TREE_VALUE (elt)))
4407 /* Return 1 if EXP contains mostly (3/4) zeros. */
4410 mostly_zeros_p (exp)
4413 if (TREE_CODE (exp) == CONSTRUCTOR)
4415 int elts = 0, zeros = 0;
4416 tree elt = CONSTRUCTOR_ELTS (exp);
4417 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4419 /* If there are no ranges of true bits, it is all zero. */
4420 return elt == NULL_TREE;
4422 for (; elt; elt = TREE_CHAIN (elt))
4424 /* We do not handle the case where the index is a RANGE_EXPR,
4425 so the statistic will be somewhat inaccurate.
4426 We do make a more accurate count in store_constructor itself,
4427 so since this function is only used for nested array elements,
4428 this should be close enough. */
4429 if (mostly_zeros_p (TREE_VALUE (elt)))
4434 return 4 * zeros >= 3 * elts;
4437 return is_zeros_p (exp);
4440 /* Helper function for store_constructor.
4441 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4442 TYPE is the type of the CONSTRUCTOR, not the element type.
4443 CLEARED is as for store_constructor.
4444 ALIAS_SET is the alias set to use for any stores.
4446 This provides a recursive shortcut back to store_constructor when it isn't
4447 necessary to go through store_field. This is so that we can pass through
4448 the cleared field to let store_constructor know that we may not have to
4449 clear a substructure if the outer structure has already been cleared. */
4452 store_constructor_field (target, bitsize, bitpos, mode, exp, type, cleared,
4455 unsigned HOST_WIDE_INT bitsize;
4456 HOST_WIDE_INT bitpos;
4457 enum machine_mode mode;
4462 if (TREE_CODE (exp) == CONSTRUCTOR
4463 && bitpos % BITS_PER_UNIT == 0
4464 /* If we have a non-zero bitpos for a register target, then we just
4465 let store_field do the bitfield handling. This is unlikely to
4466 generate unnecessary clear instructions anyways. */
4467 && (bitpos == 0 || GET_CODE (target) == MEM))
4469 if (GET_CODE (target) == MEM)
4471 = adjust_address (target,
4472 GET_MODE (target) == BLKmode
4474 % GET_MODE_ALIGNMENT (GET_MODE (target)))
4475 ? BLKmode : VOIDmode, bitpos / BITS_PER_UNIT);
4478 /* Update the alias set, if required. */
4479 if (GET_CODE (target) == MEM && ! MEM_KEEP_ALIAS_SET_P (target)
4480 && MEM_ALIAS_SET (target) != 0)
4482 target = copy_rtx (target);
4483 set_mem_alias_set (target, alias_set);
4486 store_constructor (exp, target, cleared, bitsize / BITS_PER_UNIT);
4489 store_field (target, bitsize, bitpos, mode, exp, VOIDmode, 0, type,
4493 /* Store the value of constructor EXP into the rtx TARGET.
4494 TARGET is either a REG or a MEM; we know it cannot conflict, since
4495 safe_from_p has been called.
4496 CLEARED is true if TARGET is known to have been zero'd.
4497 SIZE is the number of bytes of TARGET we are allowed to modify: this
4498 may not be the same as the size of EXP if we are assigning to a field
4499 which has been packed to exclude padding bits. */
4502 store_constructor (exp, target, cleared, size)
4508 tree type = TREE_TYPE (exp);
4509 #ifdef WORD_REGISTER_OPERATIONS
4510 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
4513 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
4514 || TREE_CODE (type) == QUAL_UNION_TYPE)
4518 /* We either clear the aggregate or indicate the value is dead. */
4519 if ((TREE_CODE (type) == UNION_TYPE
4520 || TREE_CODE (type) == QUAL_UNION_TYPE)
4522 && ! CONSTRUCTOR_ELTS (exp))
4523 /* If the constructor is empty, clear the union. */
4525 clear_storage (target, expr_size (exp));
4529 /* If we are building a static constructor into a register,
4530 set the initial value as zero so we can fold the value into
4531 a constant. But if more than one register is involved,
4532 this probably loses. */
4533 else if (! cleared && GET_CODE (target) == REG && TREE_STATIC (exp)
4534 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
4536 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4540 /* If the constructor has fewer fields than the structure
4541 or if we are initializing the structure to mostly zeros,
4542 clear the whole structure first. Don't do this if TARGET is a
4543 register whose mode size isn't equal to SIZE since clear_storage
4544 can't handle this case. */
4545 else if (! cleared && size > 0
4546 && ((list_length (CONSTRUCTOR_ELTS (exp))
4547 != fields_length (type))
4548 || mostly_zeros_p (exp))
4549 && (GET_CODE (target) != REG
4550 || ((HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target))
4553 clear_storage (target, GEN_INT (size));
4558 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4560 /* Store each element of the constructor into
4561 the corresponding field of TARGET. */
4563 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4565 tree field = TREE_PURPOSE (elt);
4566 tree value = TREE_VALUE (elt);
4567 enum machine_mode mode;
4568 HOST_WIDE_INT bitsize;
4569 HOST_WIDE_INT bitpos = 0;
4572 rtx to_rtx = target;
4574 /* Just ignore missing fields.
4575 We cleared the whole structure, above,
4576 if any fields are missing. */
4580 if (cleared && is_zeros_p (value))
4583 if (host_integerp (DECL_SIZE (field), 1))
4584 bitsize = tree_low_cst (DECL_SIZE (field), 1);
4588 unsignedp = TREE_UNSIGNED (field);
4589 mode = DECL_MODE (field);
4590 if (DECL_BIT_FIELD (field))
4593 offset = DECL_FIELD_OFFSET (field);
4594 if (host_integerp (offset, 0)
4595 && host_integerp (bit_position (field), 0))
4597 bitpos = int_bit_position (field);
4601 bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
4607 if (contains_placeholder_p (offset))
4608 offset = build (WITH_RECORD_EXPR, sizetype,
4609 offset, make_tree (TREE_TYPE (exp), target));
4611 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
4612 if (GET_CODE (to_rtx) != MEM)
4615 #ifdef POINTERS_EXTEND_UNSIGNED
4616 if (GET_MODE (offset_rtx) != Pmode)
4617 offset_rtx = convert_memory_address (Pmode, offset_rtx);
4619 if (GET_MODE (offset_rtx) != ptr_mode)
4620 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4623 to_rtx = offset_address (to_rtx, offset_rtx,
4624 highest_pow2_factor (offset));
4627 if (TREE_READONLY (field))
4629 if (GET_CODE (to_rtx) == MEM)
4630 to_rtx = copy_rtx (to_rtx);
4632 RTX_UNCHANGING_P (to_rtx) = 1;
4635 #ifdef WORD_REGISTER_OPERATIONS
4636 /* If this initializes a field that is smaller than a word, at the
4637 start of a word, try to widen it to a full word.
4638 This special case allows us to output C++ member function
4639 initializations in a form that the optimizers can understand. */
4640 if (GET_CODE (target) == REG
4641 && bitsize < BITS_PER_WORD
4642 && bitpos % BITS_PER_WORD == 0
4643 && GET_MODE_CLASS (mode) == MODE_INT
4644 && TREE_CODE (value) == INTEGER_CST
4646 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
4648 tree type = TREE_TYPE (value);
4650 if (TYPE_PRECISION (type) < BITS_PER_WORD)
4652 type = (*lang_hooks.types.type_for_size)
4653 (BITS_PER_WORD, TREE_UNSIGNED (type));
4654 value = convert (type, value);
4657 if (BYTES_BIG_ENDIAN)
4659 = fold (build (LSHIFT_EXPR, type, value,
4660 build_int_2 (BITS_PER_WORD - bitsize, 0)));
4661 bitsize = BITS_PER_WORD;
4666 if (GET_CODE (to_rtx) == MEM && !MEM_KEEP_ALIAS_SET_P (to_rtx)
4667 && DECL_NONADDRESSABLE_P (field))
4669 to_rtx = copy_rtx (to_rtx);
4670 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
4673 store_constructor_field (to_rtx, bitsize, bitpos, mode,
4674 value, type, cleared,
4675 get_alias_set (TREE_TYPE (field)));
4678 else if (TREE_CODE (type) == ARRAY_TYPE
4679 || TREE_CODE (type) == VECTOR_TYPE)
4684 tree domain = TYPE_DOMAIN (type);
4685 tree elttype = TREE_TYPE (type);
4687 HOST_WIDE_INT minelt = 0;
4688 HOST_WIDE_INT maxelt = 0;
4690 /* Vectors are like arrays, but the domain is stored via an array
4692 if (TREE_CODE (type) == VECTOR_TYPE)
4694 /* Note that although TYPE_DEBUG_REPRESENTATION_TYPE uses
4695 the same field as TYPE_DOMAIN, we are not guaranteed that
4697 domain = TYPE_DEBUG_REPRESENTATION_TYPE (type);
4698 domain = TYPE_DOMAIN (TREE_TYPE (TYPE_FIELDS (domain)));
4701 const_bounds_p = (TYPE_MIN_VALUE (domain)
4702 && TYPE_MAX_VALUE (domain)
4703 && host_integerp (TYPE_MIN_VALUE (domain), 0)
4704 && host_integerp (TYPE_MAX_VALUE (domain), 0));
4706 /* If we have constant bounds for the range of the type, get them. */
4709 minelt = tree_low_cst (TYPE_MIN_VALUE (domain), 0);
4710 maxelt = tree_low_cst (TYPE_MAX_VALUE (domain), 0);
4713 /* If the constructor has fewer elements than the array,
4714 clear the whole array first. Similarly if this is
4715 static constructor of a non-BLKmode object. */
4716 if (cleared || (GET_CODE (target) == REG && TREE_STATIC (exp)))
4720 HOST_WIDE_INT count = 0, zero_count = 0;
4721 need_to_clear = ! const_bounds_p;
4723 /* This loop is a more accurate version of the loop in
4724 mostly_zeros_p (it handles RANGE_EXPR in an index).
4725 It is also needed to check for missing elements. */
4726 for (elt = CONSTRUCTOR_ELTS (exp);
4727 elt != NULL_TREE && ! need_to_clear;
4728 elt = TREE_CHAIN (elt))
4730 tree index = TREE_PURPOSE (elt);
4731 HOST_WIDE_INT this_node_count;
4733 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4735 tree lo_index = TREE_OPERAND (index, 0);
4736 tree hi_index = TREE_OPERAND (index, 1);
4738 if (! host_integerp (lo_index, 1)
4739 || ! host_integerp (hi_index, 1))
4745 this_node_count = (tree_low_cst (hi_index, 1)
4746 - tree_low_cst (lo_index, 1) + 1);
4749 this_node_count = 1;
4751 count += this_node_count;
4752 if (mostly_zeros_p (TREE_VALUE (elt)))
4753 zero_count += this_node_count;
4756 /* Clear the entire array first if there are any missing elements,
4757 or if the incidence of zero elements is >= 75%. */
4759 && (count < maxelt - minelt + 1 || 4 * zero_count >= 3 * count))
4763 if (need_to_clear && size > 0)
4768 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4770 clear_storage (target, GEN_INT (size));
4774 else if (REG_P (target))
4775 /* Inform later passes that the old value is dead. */
4776 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4778 /* Store each element of the constructor into
4779 the corresponding element of TARGET, determined
4780 by counting the elements. */
4781 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
4783 elt = TREE_CHAIN (elt), i++)
4785 enum machine_mode mode;
4786 HOST_WIDE_INT bitsize;
4787 HOST_WIDE_INT bitpos;
4789 tree value = TREE_VALUE (elt);
4790 tree index = TREE_PURPOSE (elt);
4791 rtx xtarget = target;
4793 if (cleared && is_zeros_p (value))
4796 unsignedp = TREE_UNSIGNED (elttype);
4797 mode = TYPE_MODE (elttype);
4798 if (mode == BLKmode)
4799 bitsize = (host_integerp (TYPE_SIZE (elttype), 1)
4800 ? tree_low_cst (TYPE_SIZE (elttype), 1)
4803 bitsize = GET_MODE_BITSIZE (mode);
4805 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4807 tree lo_index = TREE_OPERAND (index, 0);
4808 tree hi_index = TREE_OPERAND (index, 1);
4809 rtx index_r, pos_rtx, hi_r, loop_top, loop_end;
4810 struct nesting *loop;
4811 HOST_WIDE_INT lo, hi, count;
4814 /* If the range is constant and "small", unroll the loop. */
4816 && host_integerp (lo_index, 0)
4817 && host_integerp (hi_index, 0)
4818 && (lo = tree_low_cst (lo_index, 0),
4819 hi = tree_low_cst (hi_index, 0),
4820 count = hi - lo + 1,
4821 (GET_CODE (target) != MEM
4823 || (host_integerp (TYPE_SIZE (elttype), 1)
4824 && (tree_low_cst (TYPE_SIZE (elttype), 1) * count
4827 lo -= minelt; hi -= minelt;
4828 for (; lo <= hi; lo++)
4830 bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0);
4832 if (GET_CODE (target) == MEM
4833 && !MEM_KEEP_ALIAS_SET_P (target)
4834 && TREE_CODE (type) == ARRAY_TYPE
4835 && TYPE_NONALIASED_COMPONENT (type))
4837 target = copy_rtx (target);
4838 MEM_KEEP_ALIAS_SET_P (target) = 1;
4841 store_constructor_field
4842 (target, bitsize, bitpos, mode, value, type, cleared,
4843 get_alias_set (elttype));
4848 hi_r = expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
4849 loop_top = gen_label_rtx ();
4850 loop_end = gen_label_rtx ();
4852 unsignedp = TREE_UNSIGNED (domain);
4854 index = build_decl (VAR_DECL, NULL_TREE, domain);
4857 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
4859 SET_DECL_RTL (index, index_r);
4860 if (TREE_CODE (value) == SAVE_EXPR
4861 && SAVE_EXPR_RTL (value) == 0)
4863 /* Make sure value gets expanded once before the
4865 expand_expr (value, const0_rtx, VOIDmode, 0);
4868 store_expr (lo_index, index_r, 0);
4869 loop = expand_start_loop (0);
4871 /* Assign value to element index. */
4873 = convert (ssizetype,
4874 fold (build (MINUS_EXPR, TREE_TYPE (index),
4875 index, TYPE_MIN_VALUE (domain))));
4876 position = size_binop (MULT_EXPR, position,
4878 TYPE_SIZE_UNIT (elttype)));
4880 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
4881 xtarget = offset_address (target, pos_rtx,
4882 highest_pow2_factor (position));
4883 xtarget = adjust_address (xtarget, mode, 0);
4884 if (TREE_CODE (value) == CONSTRUCTOR)
4885 store_constructor (value, xtarget, cleared,
4886 bitsize / BITS_PER_UNIT);
4888 store_expr (value, xtarget, 0);
4890 expand_exit_loop_if_false (loop,
4891 build (LT_EXPR, integer_type_node,
4894 expand_increment (build (PREINCREMENT_EXPR,
4896 index, integer_one_node), 0, 0);
4898 emit_label (loop_end);
4901 else if ((index != 0 && ! host_integerp (index, 0))
4902 || ! host_integerp (TYPE_SIZE (elttype), 1))
4907 index = ssize_int (1);
4910 index = convert (ssizetype,
4911 fold (build (MINUS_EXPR, index,
4912 TYPE_MIN_VALUE (domain))));
4914 position = size_binop (MULT_EXPR, index,
4916 TYPE_SIZE_UNIT (elttype)));
4917 xtarget = offset_address (target,
4918 expand_expr (position, 0, VOIDmode, 0),
4919 highest_pow2_factor (position));
4920 xtarget = adjust_address (xtarget, mode, 0);
4921 store_expr (value, xtarget, 0);
4926 bitpos = ((tree_low_cst (index, 0) - minelt)
4927 * tree_low_cst (TYPE_SIZE (elttype), 1));
4929 bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
4931 if (GET_CODE (target) == MEM && !MEM_KEEP_ALIAS_SET_P (target)
4932 && TREE_CODE (type) == ARRAY_TYPE
4933 && TYPE_NONALIASED_COMPONENT (type))
4935 target = copy_rtx (target);
4936 MEM_KEEP_ALIAS_SET_P (target) = 1;
4939 store_constructor_field (target, bitsize, bitpos, mode, value,
4940 type, cleared, get_alias_set (elttype));
4946 /* Set constructor assignments. */
4947 else if (TREE_CODE (type) == SET_TYPE)
4949 tree elt = CONSTRUCTOR_ELTS (exp);
4950 unsigned HOST_WIDE_INT nbytes = int_size_in_bytes (type), nbits;
4951 tree domain = TYPE_DOMAIN (type);
4952 tree domain_min, domain_max, bitlength;
4954 /* The default implementation strategy is to extract the constant
4955 parts of the constructor, use that to initialize the target,
4956 and then "or" in whatever non-constant ranges we need in addition.
4958 If a large set is all zero or all ones, it is
4959 probably better to set it using memset (if available) or bzero.
4960 Also, if a large set has just a single range, it may also be
4961 better to first clear all the first clear the set (using
4962 bzero/memset), and set the bits we want. */
4964 /* Check for all zeros. */
4965 if (elt == NULL_TREE && size > 0)
4968 clear_storage (target, GEN_INT (size));
4972 domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
4973 domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
4974 bitlength = size_binop (PLUS_EXPR,
4975 size_diffop (domain_max, domain_min),
4978 nbits = tree_low_cst (bitlength, 1);
4980 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
4981 are "complicated" (more than one range), initialize (the
4982 constant parts) by copying from a constant. */
4983 if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD
4984 || (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE))
4986 unsigned int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
4987 enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
4988 char *bit_buffer = (char *) alloca (nbits);
4989 HOST_WIDE_INT word = 0;
4990 unsigned int bit_pos = 0;
4991 unsigned int ibit = 0;
4992 unsigned int offset = 0; /* In bytes from beginning of set. */
4994 elt = get_set_constructor_bits (exp, bit_buffer, nbits);
4997 if (bit_buffer[ibit])
4999 if (BYTES_BIG_ENDIAN)
5000 word |= (1 << (set_word_size - 1 - bit_pos));
5002 word |= 1 << bit_pos;
5006 if (bit_pos >= set_word_size || ibit == nbits)
5008 if (word != 0 || ! cleared)
5010 rtx datum = GEN_INT (word);
5013 /* The assumption here is that it is safe to use
5014 XEXP if the set is multi-word, but not if
5015 it's single-word. */
5016 if (GET_CODE (target) == MEM)
5017 to_rtx = adjust_address (target, mode, offset);
5018 else if (offset == 0)
5022 emit_move_insn (to_rtx, datum);
5029 offset += set_word_size / BITS_PER_UNIT;
5034 /* Don't bother clearing storage if the set is all ones. */
5035 if (TREE_CHAIN (elt) != NULL_TREE
5036 || (TREE_PURPOSE (elt) == NULL_TREE
5038 : ( ! host_integerp (TREE_VALUE (elt), 0)
5039 || ! host_integerp (TREE_PURPOSE (elt), 0)
5040 || (tree_low_cst (TREE_VALUE (elt), 0)
5041 - tree_low_cst (TREE_PURPOSE (elt), 0) + 1
5042 != (HOST_WIDE_INT) nbits))))
5043 clear_storage (target, expr_size (exp));
5045 for (; elt != NULL_TREE; elt = TREE_CHAIN (elt))
5047 /* Start of range of element or NULL. */
5048 tree startbit = TREE_PURPOSE (elt);
5049 /* End of range of element, or element value. */
5050 tree endbit = TREE_VALUE (elt);
5051 #ifdef TARGET_MEM_FUNCTIONS
5052 HOST_WIDE_INT startb, endb;
5054 rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
5056 bitlength_rtx = expand_expr (bitlength,
5057 NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
5059 /* Handle non-range tuple element like [ expr ]. */
5060 if (startbit == NULL_TREE)
5062 startbit = save_expr (endbit);
5066 startbit = convert (sizetype, startbit);
5067 endbit = convert (sizetype, endbit);
5068 if (! integer_zerop (domain_min))
5070 startbit = size_binop (MINUS_EXPR, startbit, domain_min);
5071 endbit = size_binop (MINUS_EXPR, endbit, domain_min);
5073 startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
5074 EXPAND_CONST_ADDRESS);
5075 endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
5076 EXPAND_CONST_ADDRESS);
5082 ((build_qualified_type ((*lang_hooks.types.type_for_mode)
5083 (GET_MODE (target), 0),
5086 emit_move_insn (targetx, target);
5089 else if (GET_CODE (target) == MEM)
5094 #ifdef TARGET_MEM_FUNCTIONS
5095 /* Optimization: If startbit and endbit are
5096 constants divisible by BITS_PER_UNIT,
5097 call memset instead. */
5098 if (TREE_CODE (startbit) == INTEGER_CST
5099 && TREE_CODE (endbit) == INTEGER_CST
5100 && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
5101 && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0)
5103 emit_library_call (memset_libfunc, LCT_NORMAL,
5105 plus_constant (XEXP (targetx, 0),
5106 startb / BITS_PER_UNIT),
5108 constm1_rtx, TYPE_MODE (integer_type_node),
5109 GEN_INT ((endb - startb) / BITS_PER_UNIT),
5110 TYPE_MODE (sizetype));
5114 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__setbits"),
5115 LCT_NORMAL, VOIDmode, 4, XEXP (targetx, 0),
5116 Pmode, bitlength_rtx, TYPE_MODE (sizetype),
5117 startbit_rtx, TYPE_MODE (sizetype),
5118 endbit_rtx, TYPE_MODE (sizetype));
5121 emit_move_insn (target, targetx);
5129 /* Store the value of EXP (an expression tree)
5130 into a subfield of TARGET which has mode MODE and occupies
5131 BITSIZE bits, starting BITPOS bits from the start of TARGET.
5132 If MODE is VOIDmode, it means that we are storing into a bit-field.
5134 If VALUE_MODE is VOIDmode, return nothing in particular.
5135 UNSIGNEDP is not used in this case.
5137 Otherwise, return an rtx for the value stored. This rtx
5138 has mode VALUE_MODE if that is convenient to do.
5139 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
5141 TYPE is the type of the underlying object,
5143 ALIAS_SET is the alias set for the destination. This value will
5144 (in general) be different from that for TARGET, since TARGET is a
5145 reference to the containing structure. */
5148 store_field (target, bitsize, bitpos, mode, exp, value_mode, unsignedp, type,
5151 HOST_WIDE_INT bitsize;
5152 HOST_WIDE_INT bitpos;
5153 enum machine_mode mode;
5155 enum machine_mode value_mode;
5160 HOST_WIDE_INT width_mask = 0;
5162 if (TREE_CODE (exp) == ERROR_MARK)
5165 /* If we have nothing to store, do nothing unless the expression has
5168 return expand_expr (exp, const0_rtx, VOIDmode, 0);
5169 else if (bitsize >=0 && bitsize < HOST_BITS_PER_WIDE_INT)
5170 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
5172 /* If we are storing into an unaligned field of an aligned union that is
5173 in a register, we may have the mode of TARGET being an integer mode but
5174 MODE == BLKmode. In that case, get an aligned object whose size and
5175 alignment are the same as TARGET and store TARGET into it (we can avoid
5176 the store if the field being stored is the entire width of TARGET). Then
5177 call ourselves recursively to store the field into a BLKmode version of
5178 that object. Finally, load from the object into TARGET. This is not
5179 very efficient in general, but should only be slightly more expensive
5180 than the otherwise-required unaligned accesses. Perhaps this can be
5181 cleaned up later. */
5184 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
5188 (build_qualified_type (type, TYPE_QUALS (type) | TYPE_QUAL_CONST),
5190 rtx blk_object = adjust_address (object, BLKmode, 0);
5192 if (bitsize != (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (target)))
5193 emit_move_insn (object, target);
5195 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0, type,
5198 emit_move_insn (target, object);
5200 /* We want to return the BLKmode version of the data. */
5204 if (GET_CODE (target) == CONCAT)
5206 /* We're storing into a struct containing a single __complex. */
5210 return store_expr (exp, target, 0);
5213 /* If the structure is in a register or if the component
5214 is a bit field, we cannot use addressing to access it.
5215 Use bit-field techniques or SUBREG to store in it. */
5217 if (mode == VOIDmode
5218 || (mode != BLKmode && ! direct_store[(int) mode]
5219 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
5220 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
5221 || GET_CODE (target) == REG
5222 || GET_CODE (target) == SUBREG
5223 /* If the field isn't aligned enough to store as an ordinary memref,
5224 store it as a bit field. */
5225 || (mode != BLKmode && SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target))
5226 && (MEM_ALIGN (target) < GET_MODE_ALIGNMENT (mode)
5227 || bitpos % GET_MODE_ALIGNMENT (mode)))
5228 /* If the RHS and field are a constant size and the size of the
5229 RHS isn't the same size as the bitfield, we must use bitfield
5232 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
5233 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0))
5235 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
5237 /* If BITSIZE is narrower than the size of the type of EXP
5238 we will be narrowing TEMP. Normally, what's wanted are the
5239 low-order bits. However, if EXP's type is a record and this is
5240 big-endian machine, we want the upper BITSIZE bits. */
5241 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
5242 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (temp))
5243 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
5244 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
5245 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
5249 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
5251 if (mode != VOIDmode && mode != BLKmode
5252 && mode != TYPE_MODE (TREE_TYPE (exp)))
5253 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
5255 /* If the modes of TARGET and TEMP are both BLKmode, both
5256 must be in memory and BITPOS must be aligned on a byte
5257 boundary. If so, we simply do a block copy. */
5258 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
5260 if (GET_CODE (target) != MEM || GET_CODE (temp) != MEM
5261 || bitpos % BITS_PER_UNIT != 0)
5264 target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT);
5265 emit_block_move (target, temp,
5266 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
5269 return value_mode == VOIDmode ? const0_rtx : target;
5272 /* Store the value in the bitfield. */
5273 store_bit_field (target, bitsize, bitpos, mode, temp,
5274 int_size_in_bytes (type));
5276 if (value_mode != VOIDmode)
5278 /* The caller wants an rtx for the value.
5279 If possible, avoid refetching from the bitfield itself. */
5281 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
5284 enum machine_mode tmode;
5286 tmode = GET_MODE (temp);
5287 if (tmode == VOIDmode)
5291 return expand_and (tmode, temp,
5292 gen_int_mode (width_mask, tmode),
5295 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
5296 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
5297 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
5300 return extract_bit_field (target, bitsize, bitpos, unsignedp,
5301 NULL_RTX, value_mode, VOIDmode,
5302 int_size_in_bytes (type));
5308 rtx addr = XEXP (target, 0);
5309 rtx to_rtx = target;
5311 /* If a value is wanted, it must be the lhs;
5312 so make the address stable for multiple use. */
5314 if (value_mode != VOIDmode && GET_CODE (addr) != REG
5315 && ! CONSTANT_ADDRESS_P (addr)
5316 /* A frame-pointer reference is already stable. */
5317 && ! (GET_CODE (addr) == PLUS
5318 && GET_CODE (XEXP (addr, 1)) == CONST_INT
5319 && (XEXP (addr, 0) == virtual_incoming_args_rtx
5320 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
5321 to_rtx = replace_equiv_address (to_rtx, copy_to_reg (addr));
5323 /* Now build a reference to just the desired component. */
5325 to_rtx = adjust_address (target, mode, bitpos / BITS_PER_UNIT);
5327 if (to_rtx == target)
5328 to_rtx = copy_rtx (to_rtx);
5330 MEM_SET_IN_STRUCT_P (to_rtx, 1);
5331 if (!MEM_KEEP_ALIAS_SET_P (to_rtx) && MEM_ALIAS_SET (to_rtx) != 0)
5332 set_mem_alias_set (to_rtx, alias_set);
5334 return store_expr (exp, to_rtx, value_mode != VOIDmode);
5338 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
5339 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
5340 codes and find the ultimate containing object, which we return.
5342 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5343 bit position, and *PUNSIGNEDP to the signedness of the field.
5344 If the position of the field is variable, we store a tree
5345 giving the variable offset (in units) in *POFFSET.
5346 This offset is in addition to the bit position.
5347 If the position is not variable, we store 0 in *POFFSET.
5349 If any of the extraction expressions is volatile,
5350 we store 1 in *PVOLATILEP. Otherwise we don't change that.
5352 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
5353 is a mode that can be used to access the field. In that case, *PBITSIZE
5356 If the field describes a variable-sized object, *PMODE is set to
5357 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
5358 this case, but the address of the object can be found. */
5361 get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode,
5362 punsignedp, pvolatilep)
5364 HOST_WIDE_INT *pbitsize;
5365 HOST_WIDE_INT *pbitpos;
5367 enum machine_mode *pmode;
5372 enum machine_mode mode = VOIDmode;
5373 tree offset = size_zero_node;
5374 tree bit_offset = bitsize_zero_node;
5375 tree placeholder_ptr = 0;
5378 /* First get the mode, signedness, and size. We do this from just the
5379 outermost expression. */
5380 if (TREE_CODE (exp) == COMPONENT_REF)
5382 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
5383 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
5384 mode = DECL_MODE (TREE_OPERAND (exp, 1));
5386 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
5388 else if (TREE_CODE (exp) == BIT_FIELD_REF)
5390 size_tree = TREE_OPERAND (exp, 1);
5391 *punsignedp = TREE_UNSIGNED (exp);
5395 mode = TYPE_MODE (TREE_TYPE (exp));
5396 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
5398 if (mode == BLKmode)
5399 size_tree = TYPE_SIZE (TREE_TYPE (exp));
5401 *pbitsize = GET_MODE_BITSIZE (mode);
5406 if (! host_integerp (size_tree, 1))
5407 mode = BLKmode, *pbitsize = -1;
5409 *pbitsize = tree_low_cst (size_tree, 1);
5412 /* Compute cumulative bit-offset for nested component-refs and array-refs,
5413 and find the ultimate containing object. */
5416 if (TREE_CODE (exp) == BIT_FIELD_REF)
5417 bit_offset = size_binop (PLUS_EXPR, bit_offset, TREE_OPERAND (exp, 2));
5418 else if (TREE_CODE (exp) == COMPONENT_REF)
5420 tree field = TREE_OPERAND (exp, 1);
5421 tree this_offset = DECL_FIELD_OFFSET (field);
5423 /* If this field hasn't been filled in yet, don't go
5424 past it. This should only happen when folding expressions
5425 made during type construction. */
5426 if (this_offset == 0)
5428 else if (! TREE_CONSTANT (this_offset)
5429 && contains_placeholder_p (this_offset))
5430 this_offset = build (WITH_RECORD_EXPR, sizetype, this_offset, exp);
5432 offset = size_binop (PLUS_EXPR, offset, this_offset);
5433 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5434 DECL_FIELD_BIT_OFFSET (field));
5436 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
5439 else if (TREE_CODE (exp) == ARRAY_REF
5440 || TREE_CODE (exp) == ARRAY_RANGE_REF)
5442 tree index = TREE_OPERAND (exp, 1);
5443 tree array = TREE_OPERAND (exp, 0);
5444 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
5445 tree low_bound = (domain ? TYPE_MIN_VALUE (domain) : 0);
5446 tree unit_size = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (array)));
5448 /* We assume all arrays have sizes that are a multiple of a byte.
5449 First subtract the lower bound, if any, in the type of the
5450 index, then convert to sizetype and multiply by the size of the
5452 if (low_bound != 0 && ! integer_zerop (low_bound))
5453 index = fold (build (MINUS_EXPR, TREE_TYPE (index),
5456 /* If the index has a self-referential type, pass it to a
5457 WITH_RECORD_EXPR; if the component size is, pass our
5458 component to one. */
5459 if (! TREE_CONSTANT (index)
5460 && contains_placeholder_p (index))
5461 index = build (WITH_RECORD_EXPR, TREE_TYPE (index), index, exp);
5462 if (! TREE_CONSTANT (unit_size)
5463 && contains_placeholder_p (unit_size))
5464 unit_size = build (WITH_RECORD_EXPR, sizetype, unit_size, array);
5466 offset = size_binop (PLUS_EXPR, offset,
5467 size_binop (MULT_EXPR,
5468 convert (sizetype, index),
5472 else if (TREE_CODE (exp) == PLACEHOLDER_EXPR)
5474 tree new = find_placeholder (exp, &placeholder_ptr);
5476 /* If we couldn't find the replacement, return the PLACEHOLDER_EXPR.
5477 We might have been called from tree optimization where we
5478 haven't set up an object yet. */
5486 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
5487 && TREE_CODE (exp) != VIEW_CONVERT_EXPR
5488 && ! ((TREE_CODE (exp) == NOP_EXPR
5489 || TREE_CODE (exp) == CONVERT_EXPR)
5490 && (TYPE_MODE (TREE_TYPE (exp))
5491 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
5494 /* If any reference in the chain is volatile, the effect is volatile. */
5495 if (TREE_THIS_VOLATILE (exp))
5498 exp = TREE_OPERAND (exp, 0);
5501 /* If OFFSET is constant, see if we can return the whole thing as a
5502 constant bit position. Otherwise, split it up. */
5503 if (host_integerp (offset, 0)
5504 && 0 != (tem = size_binop (MULT_EXPR, convert (bitsizetype, offset),
5506 && 0 != (tem = size_binop (PLUS_EXPR, tem, bit_offset))
5507 && host_integerp (tem, 0))
5508 *pbitpos = tree_low_cst (tem, 0), *poffset = 0;
5510 *pbitpos = tree_low_cst (bit_offset, 0), *poffset = offset;
5516 /* Return 1 if T is an expression that get_inner_reference handles. */
5519 handled_component_p (t)
5522 switch (TREE_CODE (t))
5527 case ARRAY_RANGE_REF:
5528 case NON_LVALUE_EXPR:
5529 case VIEW_CONVERT_EXPR:
5534 return (TYPE_MODE (TREE_TYPE (t))
5535 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (t, 0))));
5542 /* Given an rtx VALUE that may contain additions and multiplications, return
5543 an equivalent value that just refers to a register, memory, or constant.
5544 This is done by generating instructions to perform the arithmetic and
5545 returning a pseudo-register containing the value.
5547 The returned value may be a REG, SUBREG, MEM or constant. */
5550 force_operand (value, target)
5554 /* Use subtarget as the target for operand 0 of a binary operation. */
5555 rtx subtarget = get_subtarget (target);
5556 enum rtx_code code = GET_CODE (value);
5558 /* Check for a PIC address load. */
5559 if ((code == PLUS || code == MINUS)
5560 && XEXP (value, 0) == pic_offset_table_rtx
5561 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
5562 || GET_CODE (XEXP (value, 1)) == LABEL_REF
5563 || GET_CODE (XEXP (value, 1)) == CONST))
5566 subtarget = gen_reg_rtx (GET_MODE (value));
5567 emit_move_insn (subtarget, value);
5571 if (code == ZERO_EXTEND || code == SIGN_EXTEND)
5574 target = gen_reg_rtx (GET_MODE (value));
5575 convert_move (target, force_operand (XEXP (value, 0), NULL),
5576 code == ZERO_EXTEND);
5580 if (GET_RTX_CLASS (code) == '2' || GET_RTX_CLASS (code) == 'c')
5582 op2 = XEXP (value, 1);
5583 if (!CONSTANT_P (op2) && !(GET_CODE (op2) == REG && op2 != subtarget))
5585 if (code == MINUS && GET_CODE (op2) == CONST_INT)
5588 op2 = negate_rtx (GET_MODE (value), op2);
5591 /* Check for an addition with OP2 a constant integer and our first
5592 operand a PLUS of a virtual register and something else. In that
5593 case, we want to emit the sum of the virtual register and the
5594 constant first and then add the other value. This allows virtual
5595 register instantiation to simply modify the constant rather than
5596 creating another one around this addition. */
5597 if (code == PLUS && GET_CODE (op2) == CONST_INT
5598 && GET_CODE (XEXP (value, 0)) == PLUS
5599 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
5600 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
5601 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
5603 rtx temp = expand_simple_binop (GET_MODE (value), code,
5604 XEXP (XEXP (value, 0), 0), op2,
5605 subtarget, 0, OPTAB_LIB_WIDEN);
5606 return expand_simple_binop (GET_MODE (value), code, temp,
5607 force_operand (XEXP (XEXP (value,
5609 target, 0, OPTAB_LIB_WIDEN);
5612 op1 = force_operand (XEXP (value, 0), subtarget);
5613 op2 = force_operand (op2, NULL_RTX);
5617 return expand_mult (GET_MODE (value), op1, op2, target, 1);
5619 if (!INTEGRAL_MODE_P (GET_MODE (value)))
5620 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5621 target, 1, OPTAB_LIB_WIDEN);
5623 return expand_divmod (0,
5624 FLOAT_MODE_P (GET_MODE (value))
5625 ? RDIV_EXPR : TRUNC_DIV_EXPR,
5626 GET_MODE (value), op1, op2, target, 0);
5629 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
5633 return expand_divmod (0, TRUNC_DIV_EXPR, GET_MODE (value), op1, op2,
5637 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
5641 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5642 target, 0, OPTAB_LIB_WIDEN);
5645 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5646 target, 1, OPTAB_LIB_WIDEN);
5649 if (GET_RTX_CLASS (code) == '1')
5651 op1 = force_operand (XEXP (value, 0), NULL_RTX);
5652 return expand_simple_unop (GET_MODE (value), code, op1, target, 0);
5655 #ifdef INSN_SCHEDULING
5656 /* On machines that have insn scheduling, we want all memory reference to be
5657 explicit, so we need to deal with such paradoxical SUBREGs. */
5658 if (GET_CODE (value) == SUBREG && GET_CODE (SUBREG_REG (value)) == MEM
5659 && (GET_MODE_SIZE (GET_MODE (value))
5660 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (value)))))
5662 = simplify_gen_subreg (GET_MODE (value),
5663 force_reg (GET_MODE (SUBREG_REG (value)),
5664 force_operand (SUBREG_REG (value),
5666 GET_MODE (SUBREG_REG (value)),
5667 SUBREG_BYTE (value));
5673 /* Subroutine of expand_expr: return nonzero iff there is no way that
5674 EXP can reference X, which is being modified. TOP_P is nonzero if this
5675 call is going to be used to determine whether we need a temporary
5676 for EXP, as opposed to a recursive call to this function.
5678 It is always safe for this routine to return zero since it merely
5679 searches for optimization opportunities. */
5682 safe_from_p (x, exp, top_p)
5689 static tree save_expr_list;
5692 /* If EXP has varying size, we MUST use a target since we currently
5693 have no way of allocating temporaries of variable size
5694 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
5695 So we assume here that something at a higher level has prevented a
5696 clash. This is somewhat bogus, but the best we can do. Only
5697 do this when X is BLKmode and when we are at the top level. */
5698 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
5699 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
5700 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
5701 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
5702 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
5704 && GET_MODE (x) == BLKmode)
5705 /* If X is in the outgoing argument area, it is always safe. */
5706 || (GET_CODE (x) == MEM
5707 && (XEXP (x, 0) == virtual_outgoing_args_rtx
5708 || (GET_CODE (XEXP (x, 0)) == PLUS
5709 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
5712 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
5713 find the underlying pseudo. */
5714 if (GET_CODE (x) == SUBREG)
5717 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5721 /* A SAVE_EXPR might appear many times in the expression passed to the
5722 top-level safe_from_p call, and if it has a complex subexpression,
5723 examining it multiple times could result in a combinatorial explosion.
5724 E.g. on an Alpha running at least 200MHz, a Fortran test case compiled
5725 with optimization took about 28 minutes to compile -- even though it was
5726 only a few lines long. So we mark each SAVE_EXPR we see with TREE_PRIVATE
5727 and turn that off when we are done. We keep a list of the SAVE_EXPRs
5728 we have processed. Note that the only test of top_p was above. */
5737 rtn = safe_from_p (x, exp, 0);
5739 for (t = save_expr_list; t != 0; t = TREE_CHAIN (t))
5740 TREE_PRIVATE (TREE_PURPOSE (t)) = 0;
5745 /* Now look at our tree code and possibly recurse. */
5746 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
5749 exp_rtl = DECL_RTL_IF_SET (exp);
5756 if (TREE_CODE (exp) == TREE_LIST)
5757 return ((TREE_VALUE (exp) == 0
5758 || safe_from_p (x, TREE_VALUE (exp), 0))
5759 && (TREE_CHAIN (exp) == 0
5760 || safe_from_p (x, TREE_CHAIN (exp), 0)));
5761 else if (TREE_CODE (exp) == ERROR_MARK)
5762 return 1; /* An already-visited SAVE_EXPR? */
5767 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5771 return (safe_from_p (x, TREE_OPERAND (exp, 0), 0)
5772 && safe_from_p (x, TREE_OPERAND (exp, 1), 0));
5776 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
5777 the expression. If it is set, we conflict iff we are that rtx or
5778 both are in memory. Otherwise, we check all operands of the
5779 expression recursively. */
5781 switch (TREE_CODE (exp))
5784 /* If the operand is static or we are static, we can't conflict.
5785 Likewise if we don't conflict with the operand at all. */
5786 if (staticp (TREE_OPERAND (exp, 0))
5787 || TREE_STATIC (exp)
5788 || safe_from_p (x, TREE_OPERAND (exp, 0), 0))
5791 /* Otherwise, the only way this can conflict is if we are taking
5792 the address of a DECL a that address if part of X, which is
5794 exp = TREE_OPERAND (exp, 0);
5797 if (!DECL_RTL_SET_P (exp)
5798 || GET_CODE (DECL_RTL (exp)) != MEM)
5801 exp_rtl = XEXP (DECL_RTL (exp), 0);
5806 if (GET_CODE (x) == MEM
5807 && alias_sets_conflict_p (MEM_ALIAS_SET (x),
5808 get_alias_set (exp)))
5813 /* Assume that the call will clobber all hard registers and
5815 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5816 || GET_CODE (x) == MEM)
5821 /* If a sequence exists, we would have to scan every instruction
5822 in the sequence to see if it was safe. This is probably not
5824 if (RTL_EXPR_SEQUENCE (exp))
5827 exp_rtl = RTL_EXPR_RTL (exp);
5830 case WITH_CLEANUP_EXPR:
5831 exp_rtl = WITH_CLEANUP_EXPR_RTL (exp);
5834 case CLEANUP_POINT_EXPR:
5835 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5838 exp_rtl = SAVE_EXPR_RTL (exp);
5842 /* If we've already scanned this, don't do it again. Otherwise,
5843 show we've scanned it and record for clearing the flag if we're
5845 if (TREE_PRIVATE (exp))
5848 TREE_PRIVATE (exp) = 1;
5849 if (! safe_from_p (x, TREE_OPERAND (exp, 0), 0))
5851 TREE_PRIVATE (exp) = 0;
5855 save_expr_list = tree_cons (exp, NULL_TREE, save_expr_list);
5859 /* The only operand we look at is operand 1. The rest aren't
5860 part of the expression. */
5861 return safe_from_p (x, TREE_OPERAND (exp, 1), 0);
5863 case METHOD_CALL_EXPR:
5864 /* This takes an rtx argument, but shouldn't appear here. */
5871 /* If we have an rtx, we do not need to scan our operands. */
5875 nops = first_rtl_op (TREE_CODE (exp));
5876 for (i = 0; i < nops; i++)
5877 if (TREE_OPERAND (exp, i) != 0
5878 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
5881 /* If this is a language-specific tree code, it may require
5882 special handling. */
5883 if ((unsigned int) TREE_CODE (exp)
5884 >= (unsigned int) LAST_AND_UNUSED_TREE_CODE
5885 && !(*lang_hooks.safe_from_p) (x, exp))
5889 /* If we have an rtl, find any enclosed object. Then see if we conflict
5893 if (GET_CODE (exp_rtl) == SUBREG)
5895 exp_rtl = SUBREG_REG (exp_rtl);
5896 if (GET_CODE (exp_rtl) == REG
5897 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
5901 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
5902 are memory and they conflict. */
5903 return ! (rtx_equal_p (x, exp_rtl)
5904 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
5905 && true_dependence (exp_rtl, VOIDmode, x,
5906 rtx_addr_varies_p)));
5909 /* If we reach here, it is safe. */
5913 /* Subroutine of expand_expr: return rtx if EXP is a
5914 variable or parameter; else return 0. */
5921 switch (TREE_CODE (exp))
5925 return DECL_RTL (exp);
5931 #ifdef MAX_INTEGER_COMPUTATION_MODE
5934 check_max_integer_computation_mode (exp)
5937 enum tree_code code;
5938 enum machine_mode mode;
5940 /* Strip any NOPs that don't change the mode. */
5942 code = TREE_CODE (exp);
5944 /* We must allow conversions of constants to MAX_INTEGER_COMPUTATION_MODE. */
5945 if (code == NOP_EXPR
5946 && TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
5949 /* First check the type of the overall operation. We need only look at
5950 unary, binary and relational operations. */
5951 if (TREE_CODE_CLASS (code) == '1'
5952 || TREE_CODE_CLASS (code) == '2'
5953 || TREE_CODE_CLASS (code) == '<')
5955 mode = TYPE_MODE (TREE_TYPE (exp));
5956 if (GET_MODE_CLASS (mode) == MODE_INT
5957 && mode > MAX_INTEGER_COMPUTATION_MODE)
5958 internal_error ("unsupported wide integer operation");
5961 /* Check operand of a unary op. */
5962 if (TREE_CODE_CLASS (code) == '1')
5964 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5965 if (GET_MODE_CLASS (mode) == MODE_INT
5966 && mode > MAX_INTEGER_COMPUTATION_MODE)
5967 internal_error ("unsupported wide integer operation");
5970 /* Check operands of a binary/comparison op. */
5971 if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<')
5973 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5974 if (GET_MODE_CLASS (mode) == MODE_INT
5975 && mode > MAX_INTEGER_COMPUTATION_MODE)
5976 internal_error ("unsupported wide integer operation");
5978 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1)));
5979 if (GET_MODE_CLASS (mode) == MODE_INT
5980 && mode > MAX_INTEGER_COMPUTATION_MODE)
5981 internal_error ("unsupported wide integer operation");
5986 /* Return the highest power of two that EXP is known to be a multiple of.
5987 This is used in updating alignment of MEMs in array references. */
5989 static HOST_WIDE_INT
5990 highest_pow2_factor (exp)
5993 HOST_WIDE_INT c0, c1;
5995 switch (TREE_CODE (exp))
5998 /* We can find the lowest bit that's a one. If the low
5999 HOST_BITS_PER_WIDE_INT bits are zero, return BIGGEST_ALIGNMENT.
6000 We need to handle this case since we can find it in a COND_EXPR,
6001 a MIN_EXPR, or a MAX_EXPR. If the constant overlows, we have an
6002 erroneous program, so return BIGGEST_ALIGNMENT to avoid any
6004 if (TREE_CONSTANT_OVERFLOW (exp))
6005 return BIGGEST_ALIGNMENT;
6008 /* Note: tree_low_cst is intentionally not used here,
6009 we don't care about the upper bits. */
6010 c0 = TREE_INT_CST_LOW (exp);
6012 return c0 ? c0 : BIGGEST_ALIGNMENT;
6016 case PLUS_EXPR: case MINUS_EXPR: case MIN_EXPR: case MAX_EXPR:
6017 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6018 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6019 return MIN (c0, c1);
6022 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6023 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6026 case ROUND_DIV_EXPR: case TRUNC_DIV_EXPR: case FLOOR_DIV_EXPR:
6028 if (integer_pow2p (TREE_OPERAND (exp, 1))
6029 && host_integerp (TREE_OPERAND (exp, 1), 1))
6031 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6032 c1 = tree_low_cst (TREE_OPERAND (exp, 1), 1);
6033 return MAX (1, c0 / c1);
6037 case NON_LVALUE_EXPR: case NOP_EXPR: case CONVERT_EXPR:
6038 case SAVE_EXPR: case WITH_RECORD_EXPR:
6039 return highest_pow2_factor (TREE_OPERAND (exp, 0));
6042 return highest_pow2_factor (TREE_OPERAND (exp, 1));
6045 c0 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6046 c1 = highest_pow2_factor (TREE_OPERAND (exp, 2));
6047 return MIN (c0, c1);
6056 /* Similar, except that it is known that the expression must be a multiple
6057 of the alignment of TYPE. */
6059 static HOST_WIDE_INT
6060 highest_pow2_factor_for_type (type, exp)
6064 HOST_WIDE_INT type_align, factor;
6066 factor = highest_pow2_factor (exp);
6067 type_align = TYPE_ALIGN (type) / BITS_PER_UNIT;
6068 return MAX (factor, type_align);
6071 /* Return an object on the placeholder list that matches EXP, a
6072 PLACEHOLDER_EXPR. An object "matches" if it is of the type of the
6073 PLACEHOLDER_EXPR or a pointer type to it. For further information, see
6074 tree.def. If no such object is found, return 0. If PLIST is nonzero, it
6075 is a location which initially points to a starting location in the
6076 placeholder list (zero means start of the list) and where a pointer into
6077 the placeholder list at which the object is found is placed. */
6080 find_placeholder (exp, plist)
6084 tree type = TREE_TYPE (exp);
6085 tree placeholder_expr;
6087 for (placeholder_expr
6088 = plist && *plist ? TREE_CHAIN (*plist) : placeholder_list;
6089 placeholder_expr != 0;
6090 placeholder_expr = TREE_CHAIN (placeholder_expr))
6092 tree need_type = TYPE_MAIN_VARIANT (type);
6095 /* Find the outermost reference that is of the type we want. If none,
6096 see if any object has a type that is a pointer to the type we
6098 for (elt = TREE_PURPOSE (placeholder_expr); elt != 0;
6099 elt = ((TREE_CODE (elt) == COMPOUND_EXPR
6100 || TREE_CODE (elt) == COND_EXPR)
6101 ? TREE_OPERAND (elt, 1)
6102 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6103 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6104 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6105 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6106 ? TREE_OPERAND (elt, 0) : 0))
6107 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
6110 *plist = placeholder_expr;
6114 for (elt = TREE_PURPOSE (placeholder_expr); elt != 0;
6116 = ((TREE_CODE (elt) == COMPOUND_EXPR
6117 || TREE_CODE (elt) == COND_EXPR)
6118 ? TREE_OPERAND (elt, 1)
6119 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6120 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6121 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6122 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6123 ? TREE_OPERAND (elt, 0) : 0))
6124 if (POINTER_TYPE_P (TREE_TYPE (elt))
6125 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
6129 *plist = placeholder_expr;
6130 return build1 (INDIRECT_REF, need_type, elt);
6137 /* expand_expr: generate code for computing expression EXP.
6138 An rtx for the computed value is returned. The value is never null.
6139 In the case of a void EXP, const0_rtx is returned.
6141 The value may be stored in TARGET if TARGET is nonzero.
6142 TARGET is just a suggestion; callers must assume that
6143 the rtx returned may not be the same as TARGET.
6145 If TARGET is CONST0_RTX, it means that the value will be ignored.
6147 If TMODE is not VOIDmode, it suggests generating the
6148 result in mode TMODE. But this is done only when convenient.
6149 Otherwise, TMODE is ignored and the value generated in its natural mode.
6150 TMODE is just a suggestion; callers must assume that
6151 the rtx returned may not have mode TMODE.
6153 Note that TARGET may have neither TMODE nor MODE. In that case, it
6154 probably will not be used.
6156 If MODIFIER is EXPAND_SUM then when EXP is an addition
6157 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
6158 or a nest of (PLUS ...) and (MINUS ...) where the terms are
6159 products as above, or REG or MEM, or constant.
6160 Ordinarily in such cases we would output mul or add instructions
6161 and then return a pseudo reg containing the sum.
6163 EXPAND_INITIALIZER is much like EXPAND_SUM except that
6164 it also marks a label as absolutely required (it can't be dead).
6165 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
6166 This is used for outputting expressions used in initializers.
6168 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
6169 with a constant address even if that address is not normally legitimate.
6170 EXPAND_INITIALIZER and EXPAND_SUM also have this effect. */
6173 expand_expr (exp, target, tmode, modifier)
6176 enum machine_mode tmode;
6177 enum expand_modifier modifier;
6180 tree type = TREE_TYPE (exp);
6181 int unsignedp = TREE_UNSIGNED (type);
6182 enum machine_mode mode;
6183 enum tree_code code = TREE_CODE (exp);
6185 rtx subtarget, original_target;
6189 /* Handle ERROR_MARK before anybody tries to access its type. */
6190 if (TREE_CODE (exp) == ERROR_MARK || TREE_CODE (type) == ERROR_MARK)
6192 op0 = CONST0_RTX (tmode);
6198 mode = TYPE_MODE (type);
6199 /* Use subtarget as the target for operand 0 of a binary operation. */
6200 subtarget = get_subtarget (target);
6201 original_target = target;
6202 ignore = (target == const0_rtx
6203 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
6204 || code == CONVERT_EXPR || code == REFERENCE_EXPR
6205 || code == COND_EXPR || code == VIEW_CONVERT_EXPR)
6206 && TREE_CODE (type) == VOID_TYPE));
6208 /* If we are going to ignore this result, we need only do something
6209 if there is a side-effect somewhere in the expression. If there
6210 is, short-circuit the most common cases here. Note that we must
6211 not call expand_expr with anything but const0_rtx in case this
6212 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
6216 if (! TREE_SIDE_EFFECTS (exp))
6219 /* Ensure we reference a volatile object even if value is ignored, but
6220 don't do this if all we are doing is taking its address. */
6221 if (TREE_THIS_VOLATILE (exp)
6222 && TREE_CODE (exp) != FUNCTION_DECL
6223 && mode != VOIDmode && mode != BLKmode
6224 && modifier != EXPAND_CONST_ADDRESS)
6226 temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
6227 if (GET_CODE (temp) == MEM)
6228 temp = copy_to_reg (temp);
6232 if (TREE_CODE_CLASS (code) == '1' || code == COMPONENT_REF
6233 || code == INDIRECT_REF || code == BUFFER_REF)
6234 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6237 else if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<'
6238 || code == ARRAY_REF || code == ARRAY_RANGE_REF)
6240 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6241 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6244 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
6245 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
6246 /* If the second operand has no side effects, just evaluate
6248 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6250 else if (code == BIT_FIELD_REF)
6252 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6253 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6254 expand_expr (TREE_OPERAND (exp, 2), const0_rtx, VOIDmode, modifier);
6261 #ifdef MAX_INTEGER_COMPUTATION_MODE
6262 /* Only check stuff here if the mode we want is different from the mode
6263 of the expression; if it's the same, check_max_integer_computiation_mode
6264 will handle it. Do we really need to check this stuff at all? */
6267 && GET_MODE (target) != mode
6268 && TREE_CODE (exp) != INTEGER_CST
6269 && TREE_CODE (exp) != PARM_DECL
6270 && TREE_CODE (exp) != ARRAY_REF
6271 && TREE_CODE (exp) != ARRAY_RANGE_REF
6272 && TREE_CODE (exp) != COMPONENT_REF
6273 && TREE_CODE (exp) != BIT_FIELD_REF
6274 && TREE_CODE (exp) != INDIRECT_REF
6275 && TREE_CODE (exp) != CALL_EXPR
6276 && TREE_CODE (exp) != VAR_DECL
6277 && TREE_CODE (exp) != RTL_EXPR)
6279 enum machine_mode mode = GET_MODE (target);
6281 if (GET_MODE_CLASS (mode) == MODE_INT
6282 && mode > MAX_INTEGER_COMPUTATION_MODE)
6283 internal_error ("unsupported wide integer operation");
6287 && TREE_CODE (exp) != INTEGER_CST
6288 && TREE_CODE (exp) != PARM_DECL
6289 && TREE_CODE (exp) != ARRAY_REF
6290 && TREE_CODE (exp) != ARRAY_RANGE_REF
6291 && TREE_CODE (exp) != COMPONENT_REF
6292 && TREE_CODE (exp) != BIT_FIELD_REF
6293 && TREE_CODE (exp) != INDIRECT_REF
6294 && TREE_CODE (exp) != VAR_DECL
6295 && TREE_CODE (exp) != CALL_EXPR
6296 && TREE_CODE (exp) != RTL_EXPR
6297 && GET_MODE_CLASS (tmode) == MODE_INT
6298 && tmode > MAX_INTEGER_COMPUTATION_MODE)
6299 internal_error ("unsupported wide integer operation");
6301 check_max_integer_computation_mode (exp);
6304 /* If will do cse, generate all results into pseudo registers
6305 since 1) that allows cse to find more things
6306 and 2) otherwise cse could produce an insn the machine
6307 cannot support. And exception is a CONSTRUCTOR into a multi-word
6308 MEM: that's much more likely to be most efficient into the MEM. */
6310 if (! cse_not_expected && mode != BLKmode && target
6311 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER)
6312 && ! (code == CONSTRUCTOR && GET_MODE_SIZE (mode) > UNITS_PER_WORD))
6319 tree function = decl_function_context (exp);
6320 /* Handle using a label in a containing function. */
6321 if (function != current_function_decl
6322 && function != inline_function_decl && function != 0)
6324 struct function *p = find_function_data (function);
6325 p->expr->x_forced_labels
6326 = gen_rtx_EXPR_LIST (VOIDmode, label_rtx (exp),
6327 p->expr->x_forced_labels);
6331 if (modifier == EXPAND_INITIALIZER)
6332 forced_labels = gen_rtx_EXPR_LIST (VOIDmode,
6337 temp = gen_rtx_MEM (FUNCTION_MODE,
6338 gen_rtx_LABEL_REF (Pmode, label_rtx (exp)));
6339 if (function != current_function_decl
6340 && function != inline_function_decl && function != 0)
6341 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
6346 if (DECL_RTL (exp) == 0)
6348 error_with_decl (exp, "prior parameter's size depends on `%s'");
6349 return CONST0_RTX (mode);
6352 /* ... fall through ... */
6355 /* If a static var's type was incomplete when the decl was written,
6356 but the type is complete now, lay out the decl now. */
6357 if (DECL_SIZE (exp) == 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
6358 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
6360 rtx value = DECL_RTL_IF_SET (exp);
6362 layout_decl (exp, 0);
6364 /* If the RTL was already set, update its mode and memory
6368 PUT_MODE (value, DECL_MODE (exp));
6369 SET_DECL_RTL (exp, 0);
6370 set_mem_attributes (value, exp, 1);
6371 SET_DECL_RTL (exp, value);
6375 /* ... fall through ... */
6379 if (DECL_RTL (exp) == 0)
6382 /* Ensure variable marked as used even if it doesn't go through
6383 a parser. If it hasn't be used yet, write out an external
6385 if (! TREE_USED (exp))
6387 assemble_external (exp);
6388 TREE_USED (exp) = 1;
6391 /* Show we haven't gotten RTL for this yet. */
6394 /* Handle variables inherited from containing functions. */
6395 context = decl_function_context (exp);
6397 /* We treat inline_function_decl as an alias for the current function
6398 because that is the inline function whose vars, types, etc.
6399 are being merged into the current function.
6400 See expand_inline_function. */
6402 if (context != 0 && context != current_function_decl
6403 && context != inline_function_decl
6404 /* If var is static, we don't need a static chain to access it. */
6405 && ! (GET_CODE (DECL_RTL (exp)) == MEM
6406 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
6410 /* Mark as non-local and addressable. */
6411 DECL_NONLOCAL (exp) = 1;
6412 if (DECL_NO_STATIC_CHAIN (current_function_decl))
6414 (*lang_hooks.mark_addressable) (exp);
6415 if (GET_CODE (DECL_RTL (exp)) != MEM)
6417 addr = XEXP (DECL_RTL (exp), 0);
6418 if (GET_CODE (addr) == MEM)
6420 = replace_equiv_address (addr,
6421 fix_lexical_addr (XEXP (addr, 0), exp));
6423 addr = fix_lexical_addr (addr, exp);
6425 temp = replace_equiv_address (DECL_RTL (exp), addr);
6428 /* This is the case of an array whose size is to be determined
6429 from its initializer, while the initializer is still being parsed.
6432 else if (GET_CODE (DECL_RTL (exp)) == MEM
6433 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
6434 temp = validize_mem (DECL_RTL (exp));
6436 /* If DECL_RTL is memory, we are in the normal case and either
6437 the address is not valid or it is not a register and -fforce-addr
6438 is specified, get the address into a register. */
6440 else if (GET_CODE (DECL_RTL (exp)) == MEM
6441 && modifier != EXPAND_CONST_ADDRESS
6442 && modifier != EXPAND_SUM
6443 && modifier != EXPAND_INITIALIZER
6444 && (! memory_address_p (DECL_MODE (exp),
6445 XEXP (DECL_RTL (exp), 0))
6447 && GET_CODE (XEXP (DECL_RTL (exp), 0)) != REG)))
6448 temp = replace_equiv_address (DECL_RTL (exp),
6449 copy_rtx (XEXP (DECL_RTL (exp), 0)));
6451 /* If we got something, return it. But first, set the alignment
6452 if the address is a register. */
6455 if (GET_CODE (temp) == MEM && GET_CODE (XEXP (temp, 0)) == REG)
6456 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
6461 /* If the mode of DECL_RTL does not match that of the decl, it
6462 must be a promoted value. We return a SUBREG of the wanted mode,
6463 but mark it so that we know that it was already extended. */
6465 if (GET_CODE (DECL_RTL (exp)) == REG
6466 && GET_MODE (DECL_RTL (exp)) != DECL_MODE (exp))
6468 /* Get the signedness used for this variable. Ensure we get the
6469 same mode we got when the variable was declared. */
6470 if (GET_MODE (DECL_RTL (exp))
6471 != promote_mode (type, DECL_MODE (exp), &unsignedp,
6472 (TREE_CODE (exp) == RESULT_DECL ? 1 : 0)))
6475 temp = gen_lowpart_SUBREG (mode, DECL_RTL (exp));
6476 SUBREG_PROMOTED_VAR_P (temp) = 1;
6477 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
6481 return DECL_RTL (exp);
6484 temp = immed_double_const (TREE_INT_CST_LOW (exp),
6485 TREE_INT_CST_HIGH (exp), mode);
6487 /* ??? If overflow is set, fold will have done an incomplete job,
6488 which can result in (plus xx (const_int 0)), which can get
6489 simplified by validate_replace_rtx during virtual register
6490 instantiation, which can result in unrecognizable insns.
6491 Avoid this by forcing all overflows into registers. */
6492 if (TREE_CONSTANT_OVERFLOW (exp)
6493 && modifier != EXPAND_INITIALIZER)
6494 temp = force_reg (mode, temp);
6499 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, 0);
6502 /* If optimized, generate immediate CONST_DOUBLE
6503 which will be turned into memory by reload if necessary.
6505 We used to force a register so that loop.c could see it. But
6506 this does not allow gen_* patterns to perform optimizations with
6507 the constants. It also produces two insns in cases like "x = 1.0;".
6508 On most machines, floating-point constants are not permitted in
6509 many insns, so we'd end up copying it to a register in any case.
6511 Now, we do the copying in expand_binop, if appropriate. */
6512 return CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (exp),
6513 TYPE_MODE (TREE_TYPE (exp)));
6517 if (! TREE_CST_RTL (exp))
6518 output_constant_def (exp, 1);
6520 /* TREE_CST_RTL probably contains a constant address.
6521 On RISC machines where a constant address isn't valid,
6522 make some insns to get that address into a register. */
6523 if (GET_CODE (TREE_CST_RTL (exp)) == MEM
6524 && modifier != EXPAND_CONST_ADDRESS
6525 && modifier != EXPAND_INITIALIZER
6526 && modifier != EXPAND_SUM
6527 && (! memory_address_p (mode, XEXP (TREE_CST_RTL (exp), 0))
6529 && GET_CODE (XEXP (TREE_CST_RTL (exp), 0)) != REG)))
6530 return replace_equiv_address (TREE_CST_RTL (exp),
6531 copy_rtx (XEXP (TREE_CST_RTL (exp), 0)));
6532 return TREE_CST_RTL (exp);
6534 case EXPR_WITH_FILE_LOCATION:
6537 const char *saved_input_filename = input_filename;
6538 int saved_lineno = lineno;
6539 input_filename = EXPR_WFL_FILENAME (exp);
6540 lineno = EXPR_WFL_LINENO (exp);
6541 if (EXPR_WFL_EMIT_LINE_NOTE (exp))
6542 emit_line_note (input_filename, lineno);
6543 /* Possibly avoid switching back and forth here. */
6544 to_return = expand_expr (EXPR_WFL_NODE (exp), target, tmode, modifier);
6545 input_filename = saved_input_filename;
6546 lineno = saved_lineno;
6551 context = decl_function_context (exp);
6553 /* If this SAVE_EXPR was at global context, assume we are an
6554 initialization function and move it into our context. */
6556 SAVE_EXPR_CONTEXT (exp) = current_function_decl;
6558 /* We treat inline_function_decl as an alias for the current function
6559 because that is the inline function whose vars, types, etc.
6560 are being merged into the current function.
6561 See expand_inline_function. */
6562 if (context == current_function_decl || context == inline_function_decl)
6565 /* If this is non-local, handle it. */
6568 /* The following call just exists to abort if the context is
6569 not of a containing function. */
6570 find_function_data (context);
6572 temp = SAVE_EXPR_RTL (exp);
6573 if (temp && GET_CODE (temp) == REG)
6575 put_var_into_stack (exp);
6576 temp = SAVE_EXPR_RTL (exp);
6578 if (temp == 0 || GET_CODE (temp) != MEM)
6581 replace_equiv_address (temp,
6582 fix_lexical_addr (XEXP (temp, 0), exp));
6584 if (SAVE_EXPR_RTL (exp) == 0)
6586 if (mode == VOIDmode)
6589 temp = assign_temp (build_qualified_type (type,
6591 | TYPE_QUAL_CONST)),
6594 SAVE_EXPR_RTL (exp) = temp;
6595 if (!optimize && GET_CODE (temp) == REG)
6596 save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, temp,
6599 /* If the mode of TEMP does not match that of the expression, it
6600 must be a promoted value. We pass store_expr a SUBREG of the
6601 wanted mode but mark it so that we know that it was already
6602 extended. Note that `unsignedp' was modified above in
6605 if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
6607 temp = gen_lowpart_SUBREG (mode, SAVE_EXPR_RTL (exp));
6608 SUBREG_PROMOTED_VAR_P (temp) = 1;
6609 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
6612 if (temp == const0_rtx)
6613 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
6615 store_expr (TREE_OPERAND (exp, 0), temp, 0);
6617 TREE_USED (exp) = 1;
6620 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
6621 must be a promoted value. We return a SUBREG of the wanted mode,
6622 but mark it so that we know that it was already extended. */
6624 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
6625 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
6627 /* Compute the signedness and make the proper SUBREG. */
6628 promote_mode (type, mode, &unsignedp, 0);
6629 temp = gen_lowpart_SUBREG (mode, SAVE_EXPR_RTL (exp));
6630 SUBREG_PROMOTED_VAR_P (temp) = 1;
6631 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
6635 return SAVE_EXPR_RTL (exp);
6640 temp = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
6641 TREE_OPERAND (exp, 0)
6642 = (*lang_hooks.unsave_expr_now) (TREE_OPERAND (exp, 0));
6646 case PLACEHOLDER_EXPR:
6648 tree old_list = placeholder_list;
6649 tree placeholder_expr = 0;
6651 exp = find_placeholder (exp, &placeholder_expr);
6655 placeholder_list = TREE_CHAIN (placeholder_expr);
6656 temp = expand_expr (exp, original_target, tmode, modifier);
6657 placeholder_list = old_list;
6661 /* We can't find the object or there was a missing WITH_RECORD_EXPR. */
6664 case WITH_RECORD_EXPR:
6665 /* Put the object on the placeholder list, expand our first operand,
6666 and pop the list. */
6667 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
6669 target = expand_expr (TREE_OPERAND (exp, 0), original_target, tmode,
6671 placeholder_list = TREE_CHAIN (placeholder_list);
6675 if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
6676 expand_goto (TREE_OPERAND (exp, 0));
6678 expand_computed_goto (TREE_OPERAND (exp, 0));
6682 expand_exit_loop_if_false (NULL,
6683 invert_truthvalue (TREE_OPERAND (exp, 0)));
6686 case LABELED_BLOCK_EXPR:
6687 if (LABELED_BLOCK_BODY (exp))
6688 expand_expr_stmt_value (LABELED_BLOCK_BODY (exp), 0, 1);
6689 /* Should perhaps use expand_label, but this is simpler and safer. */
6690 do_pending_stack_adjust ();
6691 emit_label (label_rtx (LABELED_BLOCK_LABEL (exp)));
6694 case EXIT_BLOCK_EXPR:
6695 if (EXIT_BLOCK_RETURN (exp))
6696 sorry ("returned value in block_exit_expr");
6697 expand_goto (LABELED_BLOCK_LABEL (EXIT_BLOCK_LABELED_BLOCK (exp)));
6702 expand_start_loop (1);
6703 expand_expr_stmt_value (TREE_OPERAND (exp, 0), 0, 1);
6711 tree vars = TREE_OPERAND (exp, 0);
6712 int vars_need_expansion = 0;
6714 /* Need to open a binding contour here because
6715 if there are any cleanups they must be contained here. */
6716 expand_start_bindings (2);
6718 /* Mark the corresponding BLOCK for output in its proper place. */
6719 if (TREE_OPERAND (exp, 2) != 0
6720 && ! TREE_USED (TREE_OPERAND (exp, 2)))
6721 (*lang_hooks.decls.insert_block) (TREE_OPERAND (exp, 2));
6723 /* If VARS have not yet been expanded, expand them now. */
6726 if (!DECL_RTL_SET_P (vars))
6728 vars_need_expansion = 1;
6731 expand_decl_init (vars);
6732 vars = TREE_CHAIN (vars);
6735 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, modifier);
6737 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
6743 if (RTL_EXPR_SEQUENCE (exp))
6745 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
6747 emit_insn (RTL_EXPR_SEQUENCE (exp));
6748 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
6750 preserve_rtl_expr_result (RTL_EXPR_RTL (exp));
6751 free_temps_for_rtl_expr (exp);
6752 return RTL_EXPR_RTL (exp);
6755 /* If we don't need the result, just ensure we evaluate any
6761 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
6762 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode, 0);
6767 /* All elts simple constants => refer to a constant in memory. But
6768 if this is a non-BLKmode mode, let it store a field at a time
6769 since that should make a CONST_INT or CONST_DOUBLE when we
6770 fold. Likewise, if we have a target we can use, it is best to
6771 store directly into the target unless the type is large enough
6772 that memcpy will be used. If we are making an initializer and
6773 all operands are constant, put it in memory as well.
6775 FIXME: Avoid trying to fill vector constructors piece-meal.
6776 Output them with output_constant_def below unless we're sure
6777 they're zeros. This should go away when vector initializers
6778 are treated like VECTOR_CST instead of arrays.
6780 else if ((TREE_STATIC (exp)
6781 && ((mode == BLKmode
6782 && ! (target != 0 && safe_from_p (target, exp, 1)))
6783 || TREE_ADDRESSABLE (exp)
6784 || (host_integerp (TYPE_SIZE_UNIT (type), 1)
6785 && (! MOVE_BY_PIECES_P
6786 (tree_low_cst (TYPE_SIZE_UNIT (type), 1),
6788 && ((TREE_CODE (type) == VECTOR_TYPE
6789 && !is_zeros_p (exp))
6790 || ! mostly_zeros_p (exp)))))
6791 || (modifier == EXPAND_INITIALIZER && TREE_CONSTANT (exp)))
6793 rtx constructor = output_constant_def (exp, 1);
6795 if (modifier != EXPAND_CONST_ADDRESS
6796 && modifier != EXPAND_INITIALIZER
6797 && modifier != EXPAND_SUM)
6798 constructor = validize_mem (constructor);
6804 /* Handle calls that pass values in multiple non-contiguous
6805 locations. The Irix 6 ABI has examples of this. */
6806 if (target == 0 || ! safe_from_p (target, exp, 1)
6807 || GET_CODE (target) == PARALLEL)
6809 = assign_temp (build_qualified_type (type,
6811 | (TREE_READONLY (exp)
6812 * TYPE_QUAL_CONST))),
6813 0, TREE_ADDRESSABLE (exp), 1);
6815 store_constructor (exp, target, 0, int_expr_size (exp));
6821 tree exp1 = TREE_OPERAND (exp, 0);
6823 tree string = string_constant (exp1, &index);
6825 /* Try to optimize reads from const strings. */
6827 && TREE_CODE (string) == STRING_CST
6828 && TREE_CODE (index) == INTEGER_CST
6829 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
6830 && GET_MODE_CLASS (mode) == MODE_INT
6831 && GET_MODE_SIZE (mode) == 1
6832 && modifier != EXPAND_WRITE)
6833 return gen_int_mode (TREE_STRING_POINTER (string)
6834 [TREE_INT_CST_LOW (index)], mode);
6836 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
6837 op0 = memory_address (mode, op0);
6838 temp = gen_rtx_MEM (mode, op0);
6839 set_mem_attributes (temp, exp, 0);
6841 /* If we are writing to this object and its type is a record with
6842 readonly fields, we must mark it as readonly so it will
6843 conflict with readonly references to those fields. */
6844 if (modifier == EXPAND_WRITE && readonly_fields_p (type))
6845 RTX_UNCHANGING_P (temp) = 1;
6851 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
6855 tree array = TREE_OPERAND (exp, 0);
6856 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
6857 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
6858 tree index = convert (sizetype, TREE_OPERAND (exp, 1));
6861 /* Optimize the special-case of a zero lower bound.
6863 We convert the low_bound to sizetype to avoid some problems
6864 with constant folding. (E.g. suppose the lower bound is 1,
6865 and its mode is QI. Without the conversion, (ARRAY
6866 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
6867 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
6869 if (! integer_zerop (low_bound))
6870 index = size_diffop (index, convert (sizetype, low_bound));
6872 /* Fold an expression like: "foo"[2].
6873 This is not done in fold so it won't happen inside &.
6874 Don't fold if this is for wide characters since it's too
6875 difficult to do correctly and this is a very rare case. */
6877 if (modifier != EXPAND_CONST_ADDRESS && modifier != EXPAND_INITIALIZER
6878 && TREE_CODE (array) == STRING_CST
6879 && TREE_CODE (index) == INTEGER_CST
6880 && compare_tree_int (index, TREE_STRING_LENGTH (array)) < 0
6881 && GET_MODE_CLASS (mode) == MODE_INT
6882 && GET_MODE_SIZE (mode) == 1)
6883 return gen_int_mode (TREE_STRING_POINTER (array)
6884 [TREE_INT_CST_LOW (index)], mode);
6886 /* If this is a constant index into a constant array,
6887 just get the value from the array. Handle both the cases when
6888 we have an explicit constructor and when our operand is a variable
6889 that was declared const. */
6891 if (modifier != EXPAND_CONST_ADDRESS && modifier != EXPAND_INITIALIZER
6892 && TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array)
6893 && TREE_CODE (index) == INTEGER_CST
6894 && 0 > compare_tree_int (index,
6895 list_length (CONSTRUCTOR_ELTS
6896 (TREE_OPERAND (exp, 0)))))
6900 for (elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
6901 i = TREE_INT_CST_LOW (index);
6902 elem != 0 && i != 0; i--, elem = TREE_CHAIN (elem))
6906 return expand_expr (fold (TREE_VALUE (elem)), target, tmode,
6910 else if (optimize >= 1
6911 && modifier != EXPAND_CONST_ADDRESS
6912 && modifier != EXPAND_INITIALIZER
6913 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
6914 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
6915 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
6917 if (TREE_CODE (index) == INTEGER_CST)
6919 tree init = DECL_INITIAL (array);
6921 if (TREE_CODE (init) == CONSTRUCTOR)
6925 for (elem = CONSTRUCTOR_ELTS (init);
6927 && !tree_int_cst_equal (TREE_PURPOSE (elem), index));
6928 elem = TREE_CHAIN (elem))
6931 if (elem && !TREE_SIDE_EFFECTS (TREE_VALUE (elem)))
6932 return expand_expr (fold (TREE_VALUE (elem)), target,
6935 else if (TREE_CODE (init) == STRING_CST
6936 && 0 > compare_tree_int (index,
6937 TREE_STRING_LENGTH (init)))
6939 tree type = TREE_TYPE (TREE_TYPE (init));
6940 enum machine_mode mode = TYPE_MODE (type);
6942 if (GET_MODE_CLASS (mode) == MODE_INT
6943 && GET_MODE_SIZE (mode) == 1)
6944 return gen_int_mode (TREE_STRING_POINTER (init)
6945 [TREE_INT_CST_LOW (index)], mode);
6954 case ARRAY_RANGE_REF:
6955 /* If the operand is a CONSTRUCTOR, we can just extract the
6956 appropriate field if it is present. Don't do this if we have
6957 already written the data since we want to refer to that copy
6958 and varasm.c assumes that's what we'll do. */
6959 if (code == COMPONENT_REF
6960 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
6961 && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
6965 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
6966 elt = TREE_CHAIN (elt))
6967 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1)
6968 /* We can normally use the value of the field in the
6969 CONSTRUCTOR. However, if this is a bitfield in
6970 an integral mode that we can fit in a HOST_WIDE_INT,
6971 we must mask only the number of bits in the bitfield,
6972 since this is done implicitly by the constructor. If
6973 the bitfield does not meet either of those conditions,
6974 we can't do this optimization. */
6975 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt))
6976 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt)))
6978 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt)))
6979 <= HOST_BITS_PER_WIDE_INT))))
6981 op0 = expand_expr (TREE_VALUE (elt), target, tmode, modifier);
6982 if (DECL_BIT_FIELD (TREE_PURPOSE (elt)))
6984 HOST_WIDE_INT bitsize
6985 = TREE_INT_CST_LOW (DECL_SIZE (TREE_PURPOSE (elt)));
6986 enum machine_mode imode
6987 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt)));
6989 if (TREE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt))))
6991 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
6992 op0 = expand_and (imode, op0, op1, target);
6997 = build_int_2 (GET_MODE_BITSIZE (imode) - bitsize,
7000 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
7002 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
7012 enum machine_mode mode1;
7013 HOST_WIDE_INT bitsize, bitpos;
7016 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
7017 &mode1, &unsignedp, &volatilep);
7020 /* If we got back the original object, something is wrong. Perhaps
7021 we are evaluating an expression too early. In any event, don't
7022 infinitely recurse. */
7026 /* If TEM's type is a union of variable size, pass TARGET to the inner
7027 computation, since it will need a temporary and TARGET is known
7028 to have to do. This occurs in unchecked conversion in Ada. */
7032 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
7033 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
7035 ? target : NULL_RTX),
7037 (modifier == EXPAND_INITIALIZER
7038 || modifier == EXPAND_CONST_ADDRESS)
7039 ? modifier : EXPAND_NORMAL);
7041 /* If this is a constant, put it into a register if it is a
7042 legitimate constant and OFFSET is 0 and memory if it isn't. */
7043 if (CONSTANT_P (op0))
7045 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
7046 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
7048 op0 = force_reg (mode, op0);
7050 op0 = validize_mem (force_const_mem (mode, op0));
7055 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM);
7057 /* If this object is in a register, put it into memory.
7058 This case can't occur in C, but can in Ada if we have
7059 unchecked conversion of an expression from a scalar type to
7060 an array or record type. */
7061 if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
7062 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
7064 /* If the operand is a SAVE_EXPR, we can deal with this by
7065 forcing the SAVE_EXPR into memory. */
7066 if (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR)
7068 put_var_into_stack (TREE_OPERAND (exp, 0));
7069 op0 = SAVE_EXPR_RTL (TREE_OPERAND (exp, 0));
7074 = build_qualified_type (TREE_TYPE (tem),
7075 (TYPE_QUALS (TREE_TYPE (tem))
7076 | TYPE_QUAL_CONST));
7077 rtx memloc = assign_temp (nt, 1, 1, 1);
7079 emit_move_insn (memloc, op0);
7084 if (GET_CODE (op0) != MEM)
7087 #ifdef POINTERS_EXTEND_UNSIGNED
7088 if (GET_MODE (offset_rtx) != Pmode)
7089 offset_rtx = convert_memory_address (Pmode, offset_rtx);
7091 if (GET_MODE (offset_rtx) != ptr_mode)
7092 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
7095 /* A constant address in OP0 can have VOIDmode, we must not try
7096 to call force_reg for that case. Avoid that case. */
7097 if (GET_CODE (op0) == MEM
7098 && GET_MODE (op0) == BLKmode
7099 && GET_MODE (XEXP (op0, 0)) != VOIDmode
7101 && (bitpos % bitsize) == 0
7102 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
7103 && MEM_ALIGN (op0) == GET_MODE_ALIGNMENT (mode1))
7105 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7109 op0 = offset_address (op0, offset_rtx,
7110 highest_pow2_factor (offset));
7113 /* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
7114 record its alignment as BIGGEST_ALIGNMENT. */
7115 if (GET_CODE (op0) == MEM && bitpos == 0 && offset != 0
7116 && is_aligning_offset (offset, tem))
7117 set_mem_align (op0, BIGGEST_ALIGNMENT);
7119 /* Don't forget about volatility even if this is a bitfield. */
7120 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
7122 if (op0 == orig_op0)
7123 op0 = copy_rtx (op0);
7125 MEM_VOLATILE_P (op0) = 1;
7128 /* The following code doesn't handle CONCAT.
7129 Assume only bitpos == 0 can be used for CONCAT, due to
7130 one element arrays having the same mode as its element. */
7131 if (GET_CODE (op0) == CONCAT)
7133 if (bitpos != 0 || bitsize != GET_MODE_BITSIZE (GET_MODE (op0)))
7138 /* In cases where an aligned union has an unaligned object
7139 as a field, we might be extracting a BLKmode value from
7140 an integer-mode (e.g., SImode) object. Handle this case
7141 by doing the extract into an object as wide as the field
7142 (which we know to be the width of a basic mode), then
7143 storing into memory, and changing the mode to BLKmode. */
7144 if (mode1 == VOIDmode
7145 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
7146 || (mode1 != BLKmode && ! direct_load[(int) mode1]
7147 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
7148 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT
7149 && modifier != EXPAND_CONST_ADDRESS
7150 && modifier != EXPAND_INITIALIZER)
7151 /* If the field isn't aligned enough to fetch as a memref,
7152 fetch it as a bit field. */
7153 || (mode1 != BLKmode
7154 && SLOW_UNALIGNED_ACCESS (mode1, MEM_ALIGN (op0))
7155 && ((TYPE_ALIGN (TREE_TYPE (tem))
7156 < GET_MODE_ALIGNMENT (mode))
7157 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)))
7158 /* If the type and the field are a constant size and the
7159 size of the type isn't the same size as the bitfield,
7160 we must use bitfield operations. */
7162 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (exp)))
7164 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
7167 enum machine_mode ext_mode = mode;
7169 if (ext_mode == BLKmode
7170 && ! (target != 0 && GET_CODE (op0) == MEM
7171 && GET_CODE (target) == MEM
7172 && bitpos % BITS_PER_UNIT == 0))
7173 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
7175 if (ext_mode == BLKmode)
7177 /* In this case, BITPOS must start at a byte boundary and
7178 TARGET, if specified, must be a MEM. */
7179 if (GET_CODE (op0) != MEM
7180 || (target != 0 && GET_CODE (target) != MEM)
7181 || bitpos % BITS_PER_UNIT != 0)
7184 op0 = adjust_address (op0, VOIDmode, bitpos / BITS_PER_UNIT);
7186 target = assign_temp (type, 0, 1, 1);
7188 emit_block_move (target, op0,
7189 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
7195 op0 = validize_mem (op0);
7197 if (GET_CODE (op0) == MEM && GET_CODE (XEXP (op0, 0)) == REG)
7198 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7200 op0 = extract_bit_field (op0, bitsize, bitpos,
7201 unsignedp, target, ext_mode, ext_mode,
7202 int_size_in_bytes (TREE_TYPE (tem)));
7204 /* If the result is a record type and BITSIZE is narrower than
7205 the mode of OP0, an integral mode, and this is a big endian
7206 machine, we must put the field into the high-order bits. */
7207 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
7208 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
7209 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (op0)))
7210 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
7211 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
7215 if (mode == BLKmode)
7217 rtx new = assign_temp (build_qualified_type
7218 ((*lang_hooks.types.type_for_mode)
7220 TYPE_QUAL_CONST), 0, 1, 1);
7222 emit_move_insn (new, op0);
7223 op0 = copy_rtx (new);
7224 PUT_MODE (op0, BLKmode);
7225 set_mem_attributes (op0, exp, 1);
7231 /* If the result is BLKmode, use that to access the object
7233 if (mode == BLKmode)
7236 /* Get a reference to just this component. */
7237 if (modifier == EXPAND_CONST_ADDRESS
7238 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7239 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
7241 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7243 if (op0 == orig_op0)
7244 op0 = copy_rtx (op0);
7246 set_mem_attributes (op0, exp, 0);
7247 if (GET_CODE (XEXP (op0, 0)) == REG)
7248 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7250 MEM_VOLATILE_P (op0) |= volatilep;
7251 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
7252 || modifier == EXPAND_CONST_ADDRESS
7253 || modifier == EXPAND_INITIALIZER)
7255 else if (target == 0)
7256 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7258 convert_move (target, op0, unsignedp);
7264 rtx insn, before = get_last_insn (), vtbl_ref;
7266 /* Evaluate the interior expression. */
7267 subtarget = expand_expr (TREE_OPERAND (exp, 0), target,
7270 /* Get or create an instruction off which to hang a note. */
7271 if (REG_P (subtarget))
7274 insn = get_last_insn ();
7277 if (! INSN_P (insn))
7278 insn = prev_nonnote_insn (insn);
7282 target = gen_reg_rtx (GET_MODE (subtarget));
7283 insn = emit_move_insn (target, subtarget);
7286 /* Collect the data for the note. */
7287 vtbl_ref = XEXP (DECL_RTL (TREE_OPERAND (exp, 1)), 0);
7288 vtbl_ref = plus_constant (vtbl_ref,
7289 tree_low_cst (TREE_OPERAND (exp, 2), 0));
7290 /* Discard the initial CONST that was added. */
7291 vtbl_ref = XEXP (vtbl_ref, 0);
7294 = gen_rtx_EXPR_LIST (REG_VTABLE_REF, vtbl_ref, REG_NOTES (insn));
7299 /* Intended for a reference to a buffer of a file-object in Pascal.
7300 But it's not certain that a special tree code will really be
7301 necessary for these. INDIRECT_REF might work for them. */
7307 /* Pascal set IN expression.
7310 rlo = set_low - (set_low%bits_per_word);
7311 the_word = set [ (index - rlo)/bits_per_word ];
7312 bit_index = index % bits_per_word;
7313 bitmask = 1 << bit_index;
7314 return !!(the_word & bitmask); */
7316 tree set = TREE_OPERAND (exp, 0);
7317 tree index = TREE_OPERAND (exp, 1);
7318 int iunsignedp = TREE_UNSIGNED (TREE_TYPE (index));
7319 tree set_type = TREE_TYPE (set);
7320 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
7321 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
7322 rtx index_val = expand_expr (index, 0, VOIDmode, 0);
7323 rtx lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
7324 rtx hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
7325 rtx setval = expand_expr (set, 0, VOIDmode, 0);
7326 rtx setaddr = XEXP (setval, 0);
7327 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
7329 rtx diff, quo, rem, addr, bit, result;
7331 /* If domain is empty, answer is no. Likewise if index is constant
7332 and out of bounds. */
7333 if (((TREE_CODE (set_high_bound) == INTEGER_CST
7334 && TREE_CODE (set_low_bound) == INTEGER_CST
7335 && tree_int_cst_lt (set_high_bound, set_low_bound))
7336 || (TREE_CODE (index) == INTEGER_CST
7337 && TREE_CODE (set_low_bound) == INTEGER_CST
7338 && tree_int_cst_lt (index, set_low_bound))
7339 || (TREE_CODE (set_high_bound) == INTEGER_CST
7340 && TREE_CODE (index) == INTEGER_CST
7341 && tree_int_cst_lt (set_high_bound, index))))
7345 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7347 /* If we get here, we have to generate the code for both cases
7348 (in range and out of range). */
7350 op0 = gen_label_rtx ();
7351 op1 = gen_label_rtx ();
7353 if (! (GET_CODE (index_val) == CONST_INT
7354 && GET_CODE (lo_r) == CONST_INT))
7355 emit_cmp_and_jump_insns (index_val, lo_r, LT, NULL_RTX,
7356 GET_MODE (index_val), iunsignedp, op1);
7358 if (! (GET_CODE (index_val) == CONST_INT
7359 && GET_CODE (hi_r) == CONST_INT))
7360 emit_cmp_and_jump_insns (index_val, hi_r, GT, NULL_RTX,
7361 GET_MODE (index_val), iunsignedp, op1);
7363 /* Calculate the element number of bit zero in the first word
7365 if (GET_CODE (lo_r) == CONST_INT)
7366 rlow = GEN_INT (INTVAL (lo_r)
7367 & ~((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
7369 rlow = expand_binop (index_mode, and_optab, lo_r,
7370 GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
7371 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7373 diff = expand_binop (index_mode, sub_optab, index_val, rlow,
7374 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7376 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
7377 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7378 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
7379 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7381 addr = memory_address (byte_mode,
7382 expand_binop (index_mode, add_optab, diff,
7383 setaddr, NULL_RTX, iunsignedp,
7386 /* Extract the bit we want to examine. */
7387 bit = expand_shift (RSHIFT_EXPR, byte_mode,
7388 gen_rtx_MEM (byte_mode, addr),
7389 make_tree (TREE_TYPE (index), rem),
7391 result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
7392 GET_MODE (target) == byte_mode ? target : 0,
7393 1, OPTAB_LIB_WIDEN);
7395 if (result != target)
7396 convert_move (target, result, 1);
7398 /* Output the code to handle the out-of-range case. */
7401 emit_move_insn (target, const0_rtx);
7406 case WITH_CLEANUP_EXPR:
7407 if (WITH_CLEANUP_EXPR_RTL (exp) == 0)
7409 WITH_CLEANUP_EXPR_RTL (exp)
7410 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
7411 expand_decl_cleanup_eh (NULL_TREE, TREE_OPERAND (exp, 1),
7412 CLEANUP_EH_ONLY (exp));
7414 /* That's it for this cleanup. */
7415 TREE_OPERAND (exp, 1) = 0;
7417 return WITH_CLEANUP_EXPR_RTL (exp);
7419 case CLEANUP_POINT_EXPR:
7421 /* Start a new binding layer that will keep track of all cleanup
7422 actions to be performed. */
7423 expand_start_bindings (2);
7425 target_temp_slot_level = temp_slot_level;
7427 op0 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
7428 /* If we're going to use this value, load it up now. */
7430 op0 = force_not_mem (op0);
7431 preserve_temp_slots (op0);
7432 expand_end_bindings (NULL_TREE, 0, 0);
7437 /* Check for a built-in function. */
7438 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
7439 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7441 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7443 if (DECL_BUILT_IN_CLASS (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7444 == BUILT_IN_FRONTEND)
7445 return (*lang_hooks.expand_expr)
7446 (exp, original_target, tmode, modifier);
7448 return expand_builtin (exp, target, subtarget, tmode, ignore);
7451 return expand_call (exp, target, ignore);
7453 case NON_LVALUE_EXPR:
7456 case REFERENCE_EXPR:
7457 if (TREE_OPERAND (exp, 0) == error_mark_node)
7460 if (TREE_CODE (type) == UNION_TYPE)
7462 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
7464 /* If both input and output are BLKmode, this conversion isn't doing
7465 anything except possibly changing memory attribute. */
7466 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode)
7468 rtx result = expand_expr (TREE_OPERAND (exp, 0), target, tmode,
7471 result = copy_rtx (result);
7472 set_mem_attributes (result, exp, 0);
7477 target = assign_temp (type, 0, 1, 1);
7479 if (GET_CODE (target) == MEM)
7480 /* Store data into beginning of memory target. */
7481 store_expr (TREE_OPERAND (exp, 0),
7482 adjust_address (target, TYPE_MODE (valtype), 0), 0);
7484 else if (GET_CODE (target) == REG)
7485 /* Store this field into a union of the proper type. */
7486 store_field (target,
7487 MIN ((int_size_in_bytes (TREE_TYPE
7488 (TREE_OPERAND (exp, 0)))
7490 (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
7491 0, TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
7492 VOIDmode, 0, type, 0);
7496 /* Return the entire union. */
7500 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7502 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
7505 /* If the signedness of the conversion differs and OP0 is
7506 a promoted SUBREG, clear that indication since we now
7507 have to do the proper extension. */
7508 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
7509 && GET_CODE (op0) == SUBREG)
7510 SUBREG_PROMOTED_VAR_P (op0) = 0;
7515 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
7516 if (GET_MODE (op0) == mode)
7519 /* If OP0 is a constant, just convert it into the proper mode. */
7520 if (CONSTANT_P (op0))
7522 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7523 enum machine_mode inner_mode = TYPE_MODE (inner_type);
7525 if (modifier == EXPAND_INITIALIZER)
7526 return simplify_gen_subreg (mode, op0, inner_mode,
7527 subreg_lowpart_offset (mode,
7530 return convert_modes (mode, inner_mode, op0,
7531 TREE_UNSIGNED (inner_type));
7534 if (modifier == EXPAND_INITIALIZER)
7535 return gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
7539 convert_to_mode (mode, op0,
7540 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7542 convert_move (target, op0,
7543 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7546 case VIEW_CONVERT_EXPR:
7547 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
7549 /* If the input and output modes are both the same, we are done.
7550 Otherwise, if neither mode is BLKmode and both are within a word, we
7551 can use gen_lowpart. If neither is true, make sure the operand is
7552 in memory and convert the MEM to the new mode. */
7553 if (TYPE_MODE (type) == GET_MODE (op0))
7555 else if (TYPE_MODE (type) != BLKmode && GET_MODE (op0) != BLKmode
7556 && GET_MODE_SIZE (TYPE_MODE (type)) <= UNITS_PER_WORD
7557 && GET_MODE_SIZE (GET_MODE (op0)) <= UNITS_PER_WORD)
7558 op0 = gen_lowpart (TYPE_MODE (type), op0);
7559 else if (GET_CODE (op0) != MEM)
7561 /* If the operand is not a MEM, force it into memory. Since we
7562 are going to be be changing the mode of the MEM, don't call
7563 force_const_mem for constants because we don't allow pool
7564 constants to change mode. */
7565 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7567 if (TREE_ADDRESSABLE (exp))
7570 if (target == 0 || GET_MODE (target) != TYPE_MODE (inner_type))
7572 = assign_stack_temp_for_type
7573 (TYPE_MODE (inner_type),
7574 GET_MODE_SIZE (TYPE_MODE (inner_type)), 0, inner_type);
7576 emit_move_insn (target, op0);
7580 /* At this point, OP0 is in the correct mode. If the output type is such
7581 that the operand is known to be aligned, indicate that it is.
7582 Otherwise, we need only be concerned about alignment for non-BLKmode
7584 if (GET_CODE (op0) == MEM)
7586 op0 = copy_rtx (op0);
7588 if (TYPE_ALIGN_OK (type))
7589 set_mem_align (op0, MAX (MEM_ALIGN (op0), TYPE_ALIGN (type)));
7590 else if (TYPE_MODE (type) != BLKmode && STRICT_ALIGNMENT
7591 && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
7593 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7594 HOST_WIDE_INT temp_size
7595 = MAX (int_size_in_bytes (inner_type),
7596 (HOST_WIDE_INT) GET_MODE_SIZE (TYPE_MODE (type)));
7597 rtx new = assign_stack_temp_for_type (TYPE_MODE (type),
7598 temp_size, 0, type);
7599 rtx new_with_op0_mode = adjust_address (new, GET_MODE (op0), 0);
7601 if (TREE_ADDRESSABLE (exp))
7604 if (GET_MODE (op0) == BLKmode)
7605 emit_block_move (new_with_op0_mode, op0,
7606 GEN_INT (GET_MODE_SIZE (TYPE_MODE (type))));
7608 emit_move_insn (new_with_op0_mode, op0);
7613 op0 = adjust_address (op0, TYPE_MODE (type), 0);
7619 /* We come here from MINUS_EXPR when the second operand is a
7622 this_optab = ! unsignedp && flag_trapv
7623 && (GET_MODE_CLASS (mode) == MODE_INT)
7624 ? addv_optab : add_optab;
7626 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
7627 something else, make sure we add the register to the constant and
7628 then to the other thing. This case can occur during strength
7629 reduction and doing it this way will produce better code if the
7630 frame pointer or argument pointer is eliminated.
7632 fold-const.c will ensure that the constant is always in the inner
7633 PLUS_EXPR, so the only case we need to do anything about is if
7634 sp, ap, or fp is our second argument, in which case we must swap
7635 the innermost first argument and our second argument. */
7637 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
7638 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
7639 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
7640 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
7641 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
7642 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
7644 tree t = TREE_OPERAND (exp, 1);
7646 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7647 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
7650 /* If the result is to be ptr_mode and we are adding an integer to
7651 something, we might be forming a constant. So try to use
7652 plus_constant. If it produces a sum and we can't accept it,
7653 use force_operand. This allows P = &ARR[const] to generate
7654 efficient code on machines where a SYMBOL_REF is not a valid
7657 If this is an EXPAND_SUM call, always return the sum. */
7658 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
7659 || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
7661 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
7662 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7663 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
7667 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
7669 /* Use immed_double_const to ensure that the constant is
7670 truncated according to the mode of OP1, then sign extended
7671 to a HOST_WIDE_INT. Using the constant directly can result
7672 in non-canonical RTL in a 64x32 cross compile. */
7674 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)),
7676 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))));
7677 op1 = plus_constant (op1, INTVAL (constant_part));
7678 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7679 op1 = force_operand (op1, target);
7683 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7684 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
7685 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
7689 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7690 (modifier == EXPAND_INITIALIZER
7691 ? EXPAND_INITIALIZER : EXPAND_SUM));
7692 if (! CONSTANT_P (op0))
7694 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7695 VOIDmode, modifier);
7696 /* Don't go to both_summands if modifier
7697 says it's not right to return a PLUS. */
7698 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7702 /* Use immed_double_const to ensure that the constant is
7703 truncated according to the mode of OP1, then sign extended
7704 to a HOST_WIDE_INT. Using the constant directly can result
7705 in non-canonical RTL in a 64x32 cross compile. */
7707 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)),
7709 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
7710 op0 = plus_constant (op0, INTVAL (constant_part));
7711 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7712 op0 = force_operand (op0, target);
7717 /* No sense saving up arithmetic to be done
7718 if it's all in the wrong mode to form part of an address.
7719 And force_operand won't know whether to sign-extend or
7721 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7722 || mode != ptr_mode)
7725 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7728 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, modifier);
7729 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, modifier);
7732 /* Make sure any term that's a sum with a constant comes last. */
7733 if (GET_CODE (op0) == PLUS
7734 && CONSTANT_P (XEXP (op0, 1)))
7740 /* If adding to a sum including a constant,
7741 associate it to put the constant outside. */
7742 if (GET_CODE (op1) == PLUS
7743 && CONSTANT_P (XEXP (op1, 1)))
7745 rtx constant_term = const0_rtx;
7747 temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0);
7750 /* Ensure that MULT comes first if there is one. */
7751 else if (GET_CODE (op0) == MULT)
7752 op0 = gen_rtx_PLUS (mode, op0, XEXP (op1, 0));
7754 op0 = gen_rtx_PLUS (mode, XEXP (op1, 0), op0);
7756 /* Let's also eliminate constants from op0 if possible. */
7757 op0 = eliminate_constant_term (op0, &constant_term);
7759 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
7760 their sum should be a constant. Form it into OP1, since the
7761 result we want will then be OP0 + OP1. */
7763 temp = simplify_binary_operation (PLUS, mode, constant_term,
7768 op1 = gen_rtx_PLUS (mode, constant_term, XEXP (op1, 1));
7771 /* Put a constant term last and put a multiplication first. */
7772 if (CONSTANT_P (op0) || GET_CODE (op1) == MULT)
7773 temp = op1, op1 = op0, op0 = temp;
7775 temp = simplify_binary_operation (PLUS, mode, op0, op1);
7776 return temp ? temp : gen_rtx_PLUS (mode, op0, op1);
7779 /* For initializers, we are allowed to return a MINUS of two
7780 symbolic constants. Here we handle all cases when both operands
7782 /* Handle difference of two symbolic constants,
7783 for the sake of an initializer. */
7784 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7785 && really_constant_p (TREE_OPERAND (exp, 0))
7786 && really_constant_p (TREE_OPERAND (exp, 1)))
7788 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode,
7790 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode,
7793 /* If the last operand is a CONST_INT, use plus_constant of
7794 the negated constant. Else make the MINUS. */
7795 if (GET_CODE (op1) == CONST_INT)
7796 return plus_constant (op0, - INTVAL (op1));
7798 return gen_rtx_MINUS (mode, op0, op1);
7800 /* Convert A - const to A + (-const). */
7801 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7803 tree negated = fold (build1 (NEGATE_EXPR, type,
7804 TREE_OPERAND (exp, 1)));
7806 if (TREE_UNSIGNED (type) || TREE_OVERFLOW (negated))
7807 /* If we can't negate the constant in TYPE, leave it alone and
7808 expand_binop will negate it for us. We used to try to do it
7809 here in the signed version of TYPE, but that doesn't work
7810 on POINTER_TYPEs. */;
7813 exp = build (PLUS_EXPR, type, TREE_OPERAND (exp, 0), negated);
7817 this_optab = ! unsignedp && flag_trapv
7818 && (GET_MODE_CLASS(mode) == MODE_INT)
7819 ? subv_optab : sub_optab;
7823 /* If first operand is constant, swap them.
7824 Thus the following special case checks need only
7825 check the second operand. */
7826 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
7828 tree t1 = TREE_OPERAND (exp, 0);
7829 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
7830 TREE_OPERAND (exp, 1) = t1;
7833 /* Attempt to return something suitable for generating an
7834 indexed address, for machines that support that. */
7836 if (modifier == EXPAND_SUM && mode == ptr_mode
7837 && host_integerp (TREE_OPERAND (exp, 1), 0))
7839 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7842 /* If we knew for certain that this is arithmetic for an array
7843 reference, and we knew the bounds of the array, then we could
7844 apply the distributive law across (PLUS X C) for constant C.
7845 Without such knowledge, we risk overflowing the computation
7846 when both X and C are large, but X+C isn't. */
7847 /* ??? Could perhaps special-case EXP being unsigned and C being
7848 positive. In that case we are certain that X+C is no smaller
7849 than X and so the transformed expression will overflow iff the
7850 original would have. */
7852 if (GET_CODE (op0) != REG)
7853 op0 = force_operand (op0, NULL_RTX);
7854 if (GET_CODE (op0) != REG)
7855 op0 = copy_to_mode_reg (mode, op0);
7858 gen_rtx_MULT (mode, op0,
7859 GEN_INT (tree_low_cst (TREE_OPERAND (exp, 1), 0)));
7862 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7865 /* Check for multiplying things that have been extended
7866 from a narrower type. If this machine supports multiplying
7867 in that narrower type with a result in the desired type,
7868 do it that way, and avoid the explicit type-conversion. */
7869 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
7870 && TREE_CODE (type) == INTEGER_TYPE
7871 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7872 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
7873 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7874 && int_fits_type_p (TREE_OPERAND (exp, 1),
7875 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7876 /* Don't use a widening multiply if a shift will do. */
7877 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
7878 > HOST_BITS_PER_WIDE_INT)
7879 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
7881 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
7882 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7884 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
7885 /* If both operands are extended, they must either both
7886 be zero-extended or both be sign-extended. */
7887 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7889 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
7891 enum machine_mode innermode
7892 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
7893 optab other_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7894 ? smul_widen_optab : umul_widen_optab);
7895 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7896 ? umul_widen_optab : smul_widen_optab);
7897 if (mode == GET_MODE_WIDER_MODE (innermode))
7899 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
7901 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7902 NULL_RTX, VOIDmode, 0);
7903 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7904 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7907 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7908 NULL_RTX, VOIDmode, 0);
7911 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
7912 && innermode == word_mode)
7915 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7916 NULL_RTX, VOIDmode, 0);
7917 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7918 op1 = convert_modes (innermode, mode,
7919 expand_expr (TREE_OPERAND (exp, 1),
7920 NULL_RTX, VOIDmode, 0),
7923 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7924 NULL_RTX, VOIDmode, 0);
7925 temp = expand_binop (mode, other_optab, op0, op1, target,
7926 unsignedp, OPTAB_LIB_WIDEN);
7927 htem = expand_mult_highpart_adjust (innermode,
7928 gen_highpart (innermode, temp),
7930 gen_highpart (innermode, temp),
7932 emit_move_insn (gen_highpart (innermode, temp), htem);
7937 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7938 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7939 return expand_mult (mode, op0, op1, target, unsignedp);
7941 case TRUNC_DIV_EXPR:
7942 case FLOOR_DIV_EXPR:
7944 case ROUND_DIV_EXPR:
7945 case EXACT_DIV_EXPR:
7946 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7948 /* Possible optimization: compute the dividend with EXPAND_SUM
7949 then if the divisor is constant can optimize the case
7950 where some terms of the dividend have coeffs divisible by it. */
7951 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7952 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7953 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
7956 /* Emit a/b as a*(1/b). Later we may manage CSE the reciprocal saving
7957 expensive divide. If not, combine will rebuild the original
7959 if (flag_unsafe_math_optimizations && optimize && !optimize_size
7960 && TREE_CODE (type) == REAL_TYPE
7961 && !real_onep (TREE_OPERAND (exp, 0)))
7962 return expand_expr (build (MULT_EXPR, type, TREE_OPERAND (exp, 0),
7963 build (RDIV_EXPR, type,
7964 build_real (type, dconst1),
7965 TREE_OPERAND (exp, 1))),
7966 target, tmode, unsignedp);
7967 this_optab = sdiv_optab;
7970 case TRUNC_MOD_EXPR:
7971 case FLOOR_MOD_EXPR:
7973 case ROUND_MOD_EXPR:
7974 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7976 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7977 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7978 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
7980 case FIX_ROUND_EXPR:
7981 case FIX_FLOOR_EXPR:
7983 abort (); /* Not used for C. */
7985 case FIX_TRUNC_EXPR:
7986 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7988 target = gen_reg_rtx (mode);
7989 expand_fix (target, op0, unsignedp);
7993 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7995 target = gen_reg_rtx (mode);
7996 /* expand_float can't figure out what to do if FROM has VOIDmode.
7997 So give it the correct mode. With -O, cse will optimize this. */
7998 if (GET_MODE (op0) == VOIDmode)
7999 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
8001 expand_float (target, op0,
8002 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
8006 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8007 temp = expand_unop (mode,
8008 ! unsignedp && flag_trapv
8009 && (GET_MODE_CLASS(mode) == MODE_INT)
8010 ? negv_optab : neg_optab, op0, target, 0);
8016 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8018 /* Handle complex values specially. */
8019 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT
8020 || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
8021 return expand_complex_abs (mode, op0, target, unsignedp);
8023 /* Unsigned abs is simply the operand. Testing here means we don't
8024 risk generating incorrect code below. */
8025 if (TREE_UNSIGNED (type))
8028 return expand_abs (mode, op0, target, unsignedp,
8029 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
8033 target = original_target;
8034 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 1), 1)
8035 || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
8036 || GET_MODE (target) != mode
8037 || (GET_CODE (target) == REG
8038 && REGNO (target) < FIRST_PSEUDO_REGISTER))
8039 target = gen_reg_rtx (mode);
8040 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8041 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
8043 /* First try to do it with a special MIN or MAX instruction.
8044 If that does not win, use a conditional jump to select the proper
8046 this_optab = (TREE_UNSIGNED (type)
8047 ? (code == MIN_EXPR ? umin_optab : umax_optab)
8048 : (code == MIN_EXPR ? smin_optab : smax_optab));
8050 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
8055 /* At this point, a MEM target is no longer useful; we will get better
8058 if (GET_CODE (target) == MEM)
8059 target = gen_reg_rtx (mode);
8062 emit_move_insn (target, op0);
8064 op0 = gen_label_rtx ();
8066 /* If this mode is an integer too wide to compare properly,
8067 compare word by word. Rely on cse to optimize constant cases. */
8068 if (GET_MODE_CLASS (mode) == MODE_INT
8069 && ! can_compare_p (GE, mode, ccp_jump))
8071 if (code == MAX_EXPR)
8072 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
8073 target, op1, NULL_RTX, op0);
8075 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
8076 op1, target, NULL_RTX, op0);
8080 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)));
8081 do_compare_rtx_and_jump (target, op1, code == MAX_EXPR ? GE : LE,
8082 unsignedp, mode, NULL_RTX, NULL_RTX,
8085 emit_move_insn (target, op1);
8090 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8091 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
8097 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8098 temp = expand_unop (mode, ffs_optab, op0, target, 1);
8103 /* ??? Can optimize bitwise operations with one arg constant.
8104 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
8105 and (a bitwise1 b) bitwise2 b (etc)
8106 but that is probably not worth while. */
8108 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
8109 boolean values when we want in all cases to compute both of them. In
8110 general it is fastest to do TRUTH_AND_EXPR by computing both operands
8111 as actual zero-or-1 values and then bitwise anding. In cases where
8112 there cannot be any side effects, better code would be made by
8113 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
8114 how to recognize those cases. */
8116 case TRUTH_AND_EXPR:
8118 this_optab = and_optab;
8123 this_optab = ior_optab;
8126 case TRUTH_XOR_EXPR:
8128 this_optab = xor_optab;
8135 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8137 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8138 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
8141 /* Could determine the answer when only additive constants differ. Also,
8142 the addition of one can be handled by changing the condition. */
8149 case UNORDERED_EXPR:
8156 temp = do_store_flag (exp, target, tmode != VOIDmode ? tmode : mode, 0);
8160 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
8161 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
8163 && GET_CODE (original_target) == REG
8164 && (GET_MODE (original_target)
8165 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8167 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
8170 /* If temp is constant, we can just compute the result. */
8171 if (GET_CODE (temp) == CONST_INT)
8173 if (INTVAL (temp) != 0)
8174 emit_move_insn (target, const1_rtx);
8176 emit_move_insn (target, const0_rtx);
8181 if (temp != original_target)
8183 enum machine_mode mode1 = GET_MODE (temp);
8184 if (mode1 == VOIDmode)
8185 mode1 = tmode != VOIDmode ? tmode : mode;
8187 temp = copy_to_mode_reg (mode1, temp);
8190 op1 = gen_label_rtx ();
8191 emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX,
8192 GET_MODE (temp), unsignedp, op1);
8193 emit_move_insn (temp, const1_rtx);
8198 /* If no set-flag instruction, must generate a conditional
8199 store into a temporary variable. Drop through
8200 and handle this like && and ||. */
8202 case TRUTH_ANDIF_EXPR:
8203 case TRUTH_ORIF_EXPR:
8205 && (target == 0 || ! safe_from_p (target, exp, 1)
8206 /* Make sure we don't have a hard reg (such as function's return
8207 value) live across basic blocks, if not optimizing. */
8208 || (!optimize && GET_CODE (target) == REG
8209 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
8210 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
8213 emit_clr_insn (target);
8215 op1 = gen_label_rtx ();
8216 jumpifnot (exp, op1);
8219 emit_0_to_1_insn (target);
8222 return ignore ? const0_rtx : target;
8224 case TRUTH_NOT_EXPR:
8225 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
8226 /* The parser is careful to generate TRUTH_NOT_EXPR
8227 only with operands that are always zero or one. */
8228 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
8229 target, 1, OPTAB_LIB_WIDEN);
8235 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
8237 return expand_expr (TREE_OPERAND (exp, 1),
8238 (ignore ? const0_rtx : target),
8242 /* If we would have a "singleton" (see below) were it not for a
8243 conversion in each arm, bring that conversion back out. */
8244 if (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
8245 && TREE_CODE (TREE_OPERAND (exp, 2)) == NOP_EXPR
8246 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0))
8247 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 2), 0))))
8249 tree iftrue = TREE_OPERAND (TREE_OPERAND (exp, 1), 0);
8250 tree iffalse = TREE_OPERAND (TREE_OPERAND (exp, 2), 0);
8252 if ((TREE_CODE_CLASS (TREE_CODE (iftrue)) == '2'
8253 && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
8254 || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '2'
8255 && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0))
8256 || (TREE_CODE_CLASS (TREE_CODE (iftrue)) == '1'
8257 && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
8258 || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '1'
8259 && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0)))
8260 return expand_expr (build1 (NOP_EXPR, type,
8261 build (COND_EXPR, TREE_TYPE (iftrue),
8262 TREE_OPERAND (exp, 0),
8264 target, tmode, modifier);
8268 /* Note that COND_EXPRs whose type is a structure or union
8269 are required to be constructed to contain assignments of
8270 a temporary variable, so that we can evaluate them here
8271 for side effect only. If type is void, we must do likewise. */
8273 /* If an arm of the branch requires a cleanup,
8274 only that cleanup is performed. */
8277 tree binary_op = 0, unary_op = 0;
8279 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
8280 convert it to our mode, if necessary. */
8281 if (integer_onep (TREE_OPERAND (exp, 1))
8282 && integer_zerop (TREE_OPERAND (exp, 2))
8283 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8287 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
8292 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, modifier);
8293 if (GET_MODE (op0) == mode)
8297 target = gen_reg_rtx (mode);
8298 convert_move (target, op0, unsignedp);
8302 /* Check for X ? A + B : A. If we have this, we can copy A to the
8303 output and conditionally add B. Similarly for unary operations.
8304 Don't do this if X has side-effects because those side effects
8305 might affect A or B and the "?" operation is a sequence point in
8306 ANSI. (operand_equal_p tests for side effects.) */
8308 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
8309 && operand_equal_p (TREE_OPERAND (exp, 2),
8310 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8311 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
8312 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
8313 && operand_equal_p (TREE_OPERAND (exp, 1),
8314 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8315 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
8316 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
8317 && operand_equal_p (TREE_OPERAND (exp, 2),
8318 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8319 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
8320 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
8321 && operand_equal_p (TREE_OPERAND (exp, 1),
8322 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8323 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
8325 /* If we are not to produce a result, we have no target. Otherwise,
8326 if a target was specified use it; it will not be used as an
8327 intermediate target unless it is safe. If no target, use a
8332 else if (original_target
8333 && (safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
8334 || (singleton && GET_CODE (original_target) == REG
8335 && REGNO (original_target) >= FIRST_PSEUDO_REGISTER
8336 && original_target == var_rtx (singleton)))
8337 && GET_MODE (original_target) == mode
8338 #ifdef HAVE_conditional_move
8339 && (! can_conditionally_move_p (mode)
8340 || GET_CODE (original_target) == REG
8341 || TREE_ADDRESSABLE (type))
8343 && (GET_CODE (original_target) != MEM
8344 || TREE_ADDRESSABLE (type)))
8345 temp = original_target;
8346 else if (TREE_ADDRESSABLE (type))
8349 temp = assign_temp (type, 0, 0, 1);
8351 /* If we had X ? A + C : A, with C a constant power of 2, and we can
8352 do the test of X as a store-flag operation, do this as
8353 A + ((X != 0) << log C). Similarly for other simple binary
8354 operators. Only do for C == 1 if BRANCH_COST is low. */
8355 if (temp && singleton && binary_op
8356 && (TREE_CODE (binary_op) == PLUS_EXPR
8357 || TREE_CODE (binary_op) == MINUS_EXPR
8358 || TREE_CODE (binary_op) == BIT_IOR_EXPR
8359 || TREE_CODE (binary_op) == BIT_XOR_EXPR)
8360 && (BRANCH_COST >= 3 ? integer_pow2p (TREE_OPERAND (binary_op, 1))
8361 : integer_onep (TREE_OPERAND (binary_op, 1)))
8362 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8365 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR
8366 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8367 ? addv_optab : add_optab)
8368 : TREE_CODE (binary_op) == MINUS_EXPR
8369 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8370 ? subv_optab : sub_optab)
8371 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
8374 /* If we had X ? A : A + 1, do this as A + (X == 0).
8376 We have to invert the truth value here and then put it
8377 back later if do_store_flag fails. We cannot simply copy
8378 TREE_OPERAND (exp, 0) to another variable and modify that
8379 because invert_truthvalue can modify the tree pointed to
8381 if (singleton == TREE_OPERAND (exp, 1))
8382 TREE_OPERAND (exp, 0)
8383 = invert_truthvalue (TREE_OPERAND (exp, 0));
8385 result = do_store_flag (TREE_OPERAND (exp, 0),
8386 (safe_from_p (temp, singleton, 1)
8388 mode, BRANCH_COST <= 1);
8390 if (result != 0 && ! integer_onep (TREE_OPERAND (binary_op, 1)))
8391 result = expand_shift (LSHIFT_EXPR, mode, result,
8392 build_int_2 (tree_log2
8396 (safe_from_p (temp, singleton, 1)
8397 ? temp : NULL_RTX), 0);
8401 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
8402 return expand_binop (mode, boptab, op1, result, temp,
8403 unsignedp, OPTAB_LIB_WIDEN);
8405 else if (singleton == TREE_OPERAND (exp, 1))
8406 TREE_OPERAND (exp, 0)
8407 = invert_truthvalue (TREE_OPERAND (exp, 0));
8410 do_pending_stack_adjust ();
8412 op0 = gen_label_rtx ();
8414 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
8418 /* If the target conflicts with the other operand of the
8419 binary op, we can't use it. Also, we can't use the target
8420 if it is a hard register, because evaluating the condition
8421 might clobber it. */
8423 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1), 1))
8424 || (GET_CODE (temp) == REG
8425 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
8426 temp = gen_reg_rtx (mode);
8427 store_expr (singleton, temp, 0);
8430 expand_expr (singleton,
8431 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8432 if (singleton == TREE_OPERAND (exp, 1))
8433 jumpif (TREE_OPERAND (exp, 0), op0);
8435 jumpifnot (TREE_OPERAND (exp, 0), op0);
8437 start_cleanup_deferral ();
8438 if (binary_op && temp == 0)
8439 /* Just touch the other operand. */
8440 expand_expr (TREE_OPERAND (binary_op, 1),
8441 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8443 store_expr (build (TREE_CODE (binary_op), type,
8444 make_tree (type, temp),
8445 TREE_OPERAND (binary_op, 1)),
8448 store_expr (build1 (TREE_CODE (unary_op), type,
8449 make_tree (type, temp)),
8453 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
8454 comparison operator. If we have one of these cases, set the
8455 output to A, branch on A (cse will merge these two references),
8456 then set the output to FOO. */
8458 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8459 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8460 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8461 TREE_OPERAND (exp, 1), 0)
8462 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8463 || TREE_CODE (TREE_OPERAND (exp, 1)) == SAVE_EXPR)
8464 && safe_from_p (temp, TREE_OPERAND (exp, 2), 1))
8466 if (GET_CODE (temp) == REG
8467 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
8468 temp = gen_reg_rtx (mode);
8469 store_expr (TREE_OPERAND (exp, 1), temp, 0);
8470 jumpif (TREE_OPERAND (exp, 0), op0);
8472 start_cleanup_deferral ();
8473 store_expr (TREE_OPERAND (exp, 2), temp, 0);
8477 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8478 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8479 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8480 TREE_OPERAND (exp, 2), 0)
8481 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8482 || TREE_CODE (TREE_OPERAND (exp, 2)) == SAVE_EXPR)
8483 && safe_from_p (temp, TREE_OPERAND (exp, 1), 1))
8485 if (GET_CODE (temp) == REG
8486 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
8487 temp = gen_reg_rtx (mode);
8488 store_expr (TREE_OPERAND (exp, 2), temp, 0);
8489 jumpifnot (TREE_OPERAND (exp, 0), op0);
8491 start_cleanup_deferral ();
8492 store_expr (TREE_OPERAND (exp, 1), temp, 0);
8497 op1 = gen_label_rtx ();
8498 jumpifnot (TREE_OPERAND (exp, 0), op0);
8500 start_cleanup_deferral ();
8502 /* One branch of the cond can be void, if it never returns. For
8503 example A ? throw : E */
8505 && TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node)
8506 store_expr (TREE_OPERAND (exp, 1), temp, 0);
8508 expand_expr (TREE_OPERAND (exp, 1),
8509 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8510 end_cleanup_deferral ();
8512 emit_jump_insn (gen_jump (op1));
8515 start_cleanup_deferral ();
8517 && TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node)
8518 store_expr (TREE_OPERAND (exp, 2), temp, 0);
8520 expand_expr (TREE_OPERAND (exp, 2),
8521 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8524 end_cleanup_deferral ();
8535 /* Something needs to be initialized, but we didn't know
8536 where that thing was when building the tree. For example,
8537 it could be the return value of a function, or a parameter
8538 to a function which lays down in the stack, or a temporary
8539 variable which must be passed by reference.
8541 We guarantee that the expression will either be constructed
8542 or copied into our original target. */
8544 tree slot = TREE_OPERAND (exp, 0);
8545 tree cleanups = NULL_TREE;
8548 if (TREE_CODE (slot) != VAR_DECL)
8552 target = original_target;
8554 /* Set this here so that if we get a target that refers to a
8555 register variable that's already been used, put_reg_into_stack
8556 knows that it should fix up those uses. */
8557 TREE_USED (slot) = 1;
8561 if (DECL_RTL_SET_P (slot))
8563 target = DECL_RTL (slot);
8564 /* If we have already expanded the slot, so don't do
8566 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8571 target = assign_temp (type, 2, 0, 1);
8572 /* All temp slots at this level must not conflict. */
8573 preserve_temp_slots (target);
8574 SET_DECL_RTL (slot, target);
8575 if (TREE_ADDRESSABLE (slot))
8576 put_var_into_stack (slot);
8578 /* Since SLOT is not known to the called function
8579 to belong to its stack frame, we must build an explicit
8580 cleanup. This case occurs when we must build up a reference
8581 to pass the reference as an argument. In this case,
8582 it is very likely that such a reference need not be
8585 if (TREE_OPERAND (exp, 2) == 0)
8586 TREE_OPERAND (exp, 2)
8587 = (*lang_hooks.maybe_build_cleanup) (slot);
8588 cleanups = TREE_OPERAND (exp, 2);
8593 /* This case does occur, when expanding a parameter which
8594 needs to be constructed on the stack. The target
8595 is the actual stack address that we want to initialize.
8596 The function we call will perform the cleanup in this case. */
8598 /* If we have already assigned it space, use that space,
8599 not target that we were passed in, as our target
8600 parameter is only a hint. */
8601 if (DECL_RTL_SET_P (slot))
8603 target = DECL_RTL (slot);
8604 /* If we have already expanded the slot, so don't do
8606 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8611 SET_DECL_RTL (slot, target);
8612 /* If we must have an addressable slot, then make sure that
8613 the RTL that we just stored in slot is OK. */
8614 if (TREE_ADDRESSABLE (slot))
8615 put_var_into_stack (slot);
8619 exp1 = TREE_OPERAND (exp, 3) = TREE_OPERAND (exp, 1);
8620 /* Mark it as expanded. */
8621 TREE_OPERAND (exp, 1) = NULL_TREE;
8623 store_expr (exp1, target, 0);
8625 expand_decl_cleanup_eh (NULL_TREE, cleanups, CLEANUP_EH_ONLY (exp));
8632 tree lhs = TREE_OPERAND (exp, 0);
8633 tree rhs = TREE_OPERAND (exp, 1);
8635 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
8641 /* If lhs is complex, expand calls in rhs before computing it.
8642 That's so we don't compute a pointer and save it over a
8643 call. If lhs is simple, compute it first so we can give it
8644 as a target if the rhs is just a call. This avoids an
8645 extra temp and copy and that prevents a partial-subsumption
8646 which makes bad code. Actually we could treat
8647 component_ref's of vars like vars. */
8649 tree lhs = TREE_OPERAND (exp, 0);
8650 tree rhs = TREE_OPERAND (exp, 1);
8654 /* Check for |= or &= of a bitfield of size one into another bitfield
8655 of size 1. In this case, (unless we need the result of the
8656 assignment) we can do this more efficiently with a
8657 test followed by an assignment, if necessary.
8659 ??? At this point, we can't get a BIT_FIELD_REF here. But if
8660 things change so we do, this code should be enhanced to
8663 && TREE_CODE (lhs) == COMPONENT_REF
8664 && (TREE_CODE (rhs) == BIT_IOR_EXPR
8665 || TREE_CODE (rhs) == BIT_AND_EXPR)
8666 && TREE_OPERAND (rhs, 0) == lhs
8667 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
8668 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
8669 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
8671 rtx label = gen_label_rtx ();
8673 do_jump (TREE_OPERAND (rhs, 1),
8674 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
8675 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
8676 expand_assignment (lhs, convert (TREE_TYPE (rhs),
8677 (TREE_CODE (rhs) == BIT_IOR_EXPR
8679 : integer_zero_node)),
8681 do_pending_stack_adjust ();
8686 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
8692 if (!TREE_OPERAND (exp, 0))
8693 expand_null_return ();
8695 expand_return (TREE_OPERAND (exp, 0));
8698 case PREINCREMENT_EXPR:
8699 case PREDECREMENT_EXPR:
8700 return expand_increment (exp, 0, ignore);
8702 case POSTINCREMENT_EXPR:
8703 case POSTDECREMENT_EXPR:
8704 /* Faster to treat as pre-increment if result is not used. */
8705 return expand_increment (exp, ! ignore, ignore);
8708 /* Are we taking the address of a nested function? */
8709 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
8710 && decl_function_context (TREE_OPERAND (exp, 0)) != 0
8711 && ! DECL_NO_STATIC_CHAIN (TREE_OPERAND (exp, 0))
8712 && ! TREE_STATIC (exp))
8714 op0 = trampoline_address (TREE_OPERAND (exp, 0));
8715 op0 = force_operand (op0, target);
8717 /* If we are taking the address of something erroneous, just
8719 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
8721 /* If we are taking the address of a constant and are at the
8722 top level, we have to use output_constant_def since we can't
8723 call force_const_mem at top level. */
8725 && (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
8726 || (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0)))
8728 op0 = XEXP (output_constant_def (TREE_OPERAND (exp, 0), 0), 0);
8731 /* We make sure to pass const0_rtx down if we came in with
8732 ignore set, to avoid doing the cleanups twice for something. */
8733 op0 = expand_expr (TREE_OPERAND (exp, 0),
8734 ignore ? const0_rtx : NULL_RTX, VOIDmode,
8735 (modifier == EXPAND_INITIALIZER
8736 ? modifier : EXPAND_CONST_ADDRESS));
8738 /* If we are going to ignore the result, OP0 will have been set
8739 to const0_rtx, so just return it. Don't get confused and
8740 think we are taking the address of the constant. */
8744 /* Pass 1 for MODIFY, so that protect_from_queue doesn't get
8745 clever and returns a REG when given a MEM. */
8746 op0 = protect_from_queue (op0, 1);
8748 /* We would like the object in memory. If it is a constant, we can
8749 have it be statically allocated into memory. For a non-constant,
8750 we need to allocate some memory and store the value into it. */
8752 if (CONSTANT_P (op0))
8753 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
8755 else if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
8756 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF
8757 || GET_CODE (op0) == PARALLEL)
8759 /* If the operand is a SAVE_EXPR, we can deal with this by
8760 forcing the SAVE_EXPR into memory. */
8761 if (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR)
8763 put_var_into_stack (TREE_OPERAND (exp, 0));
8764 op0 = SAVE_EXPR_RTL (TREE_OPERAND (exp, 0));
8768 /* If this object is in a register, it can't be BLKmode. */
8769 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8770 rtx memloc = assign_temp (inner_type, 1, 1, 1);
8772 if (GET_CODE (op0) == PARALLEL)
8773 /* Handle calls that pass values in multiple
8774 non-contiguous locations. The Irix 6 ABI has examples
8776 emit_group_store (memloc, op0,
8777 int_size_in_bytes (inner_type));
8779 emit_move_insn (memloc, op0);
8785 if (GET_CODE (op0) != MEM)
8788 mark_temp_addr_taken (op0);
8789 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
8791 op0 = XEXP (op0, 0);
8792 #ifdef POINTERS_EXTEND_UNSIGNED
8793 if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
8794 && mode == ptr_mode)
8795 op0 = convert_memory_address (ptr_mode, op0);
8800 /* If OP0 is not aligned as least as much as the type requires, we
8801 need to make a temporary, copy OP0 to it, and take the address of
8802 the temporary. We want to use the alignment of the type, not of
8803 the operand. Note that this is incorrect for FUNCTION_TYPE, but
8804 the test for BLKmode means that can't happen. The test for
8805 BLKmode is because we never make mis-aligned MEMs with
8808 We don't need to do this at all if the machine doesn't have
8809 strict alignment. */
8810 if (STRICT_ALIGNMENT && GET_MODE (op0) == BLKmode
8811 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
8813 && MEM_ALIGN (op0) < BIGGEST_ALIGNMENT)
8815 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8817 = assign_stack_temp_for_type
8818 (TYPE_MODE (inner_type),
8819 MEM_SIZE (op0) ? INTVAL (MEM_SIZE (op0))
8820 : int_size_in_bytes (inner_type),
8821 1, build_qualified_type (inner_type,
8822 (TYPE_QUALS (inner_type)
8823 | TYPE_QUAL_CONST)));
8825 if (TYPE_ALIGN_OK (inner_type))
8828 emit_block_move (new, op0, expr_size (TREE_OPERAND (exp, 0)));
8832 op0 = force_operand (XEXP (op0, 0), target);
8836 && GET_CODE (op0) != REG
8837 && modifier != EXPAND_CONST_ADDRESS
8838 && modifier != EXPAND_INITIALIZER
8839 && modifier != EXPAND_SUM)
8840 op0 = force_reg (Pmode, op0);
8842 if (GET_CODE (op0) == REG
8843 && ! REG_USERVAR_P (op0))
8844 mark_reg_pointer (op0, TYPE_ALIGN (TREE_TYPE (type)));
8846 #ifdef POINTERS_EXTEND_UNSIGNED
8847 if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
8848 && mode == ptr_mode)
8849 op0 = convert_memory_address (ptr_mode, op0);
8854 case ENTRY_VALUE_EXPR:
8857 /* COMPLEX type for Extended Pascal & Fortran */
8860 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8863 /* Get the rtx code of the operands. */
8864 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8865 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
8868 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
8872 /* Move the real (op0) and imaginary (op1) parts to their location. */
8873 emit_move_insn (gen_realpart (mode, target), op0);
8874 emit_move_insn (gen_imagpart (mode, target), op1);
8876 insns = get_insns ();
8879 /* Complex construction should appear as a single unit. */
8880 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
8881 each with a separate pseudo as destination.
8882 It's not correct for flow to treat them as a unit. */
8883 if (GET_CODE (target) != CONCAT)
8884 emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
8892 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8893 return gen_realpart (mode, op0);
8896 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8897 return gen_imagpart (mode, op0);
8901 enum machine_mode partmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8905 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8908 target = gen_reg_rtx (mode);
8912 /* Store the realpart and the negated imagpart to target. */
8913 emit_move_insn (gen_realpart (partmode, target),
8914 gen_realpart (partmode, op0));
8916 imag_t = gen_imagpart (partmode, target);
8917 temp = expand_unop (partmode,
8918 ! unsignedp && flag_trapv
8919 && (GET_MODE_CLASS(partmode) == MODE_INT)
8920 ? negv_optab : neg_optab,
8921 gen_imagpart (partmode, op0), imag_t, 0);
8923 emit_move_insn (imag_t, temp);
8925 insns = get_insns ();
8928 /* Conjugate should appear as a single unit
8929 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
8930 each with a separate pseudo as destination.
8931 It's not correct for flow to treat them as a unit. */
8932 if (GET_CODE (target) != CONCAT)
8933 emit_no_conflict_block (insns, target, op0, NULL_RTX, NULL_RTX);
8940 case TRY_CATCH_EXPR:
8942 tree handler = TREE_OPERAND (exp, 1);
8944 expand_eh_region_start ();
8946 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8948 expand_eh_region_end_cleanup (handler);
8953 case TRY_FINALLY_EXPR:
8955 tree try_block = TREE_OPERAND (exp, 0);
8956 tree finally_block = TREE_OPERAND (exp, 1);
8958 if (!optimize || unsafe_for_reeval (finally_block) > 1)
8960 /* In this case, wrapping FINALLY_BLOCK in an UNSAVE_EXPR
8961 is not sufficient, so we cannot expand the block twice.
8962 So we play games with GOTO_SUBROUTINE_EXPR to let us
8963 expand the thing only once. */
8964 /* When not optimizing, we go ahead with this form since
8965 (1) user breakpoints operate more predictably without
8966 code duplication, and
8967 (2) we're not running any of the global optimizers
8968 that would explode in time/space with the highly
8969 connected CFG created by the indirect branching. */
8971 rtx finally_label = gen_label_rtx ();
8972 rtx done_label = gen_label_rtx ();
8973 rtx return_link = gen_reg_rtx (Pmode);
8974 tree cleanup = build (GOTO_SUBROUTINE_EXPR, void_type_node,
8975 (tree) finally_label, (tree) return_link);
8976 TREE_SIDE_EFFECTS (cleanup) = 1;
8978 /* Start a new binding layer that will keep track of all cleanup
8979 actions to be performed. */
8980 expand_start_bindings (2);
8981 target_temp_slot_level = temp_slot_level;
8983 expand_decl_cleanup (NULL_TREE, cleanup);
8984 op0 = expand_expr (try_block, target, tmode, modifier);
8986 preserve_temp_slots (op0);
8987 expand_end_bindings (NULL_TREE, 0, 0);
8988 emit_jump (done_label);
8989 emit_label (finally_label);
8990 expand_expr (finally_block, const0_rtx, VOIDmode, 0);
8991 emit_indirect_jump (return_link);
8992 emit_label (done_label);
8996 expand_start_bindings (2);
8997 target_temp_slot_level = temp_slot_level;
8999 expand_decl_cleanup (NULL_TREE, finally_block);
9000 op0 = expand_expr (try_block, target, tmode, modifier);
9002 preserve_temp_slots (op0);
9003 expand_end_bindings (NULL_TREE, 0, 0);
9009 case GOTO_SUBROUTINE_EXPR:
9011 rtx subr = (rtx) TREE_OPERAND (exp, 0);
9012 rtx return_link = *(rtx *) &TREE_OPERAND (exp, 1);
9013 rtx return_address = gen_label_rtx ();
9014 emit_move_insn (return_link,
9015 gen_rtx_LABEL_REF (Pmode, return_address));
9017 emit_label (return_address);
9022 return expand_builtin_va_arg (TREE_OPERAND (exp, 0), type);
9025 return get_exception_pointer (cfun);
9028 /* Function descriptors are not valid except for as
9029 initialization constants, and should not be expanded. */
9033 return (*lang_hooks.expand_expr) (exp, original_target, tmode, modifier);
9036 /* Here to do an ordinary binary operator, generating an instruction
9037 from the optab already placed in `this_optab'. */
9039 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
9041 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
9042 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
9044 temp = expand_binop (mode, this_optab, op0, op1, target,
9045 unsignedp, OPTAB_LIB_WIDEN);
9051 /* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
9052 when applied to the address of EXP produces an address known to be
9053 aligned more than BIGGEST_ALIGNMENT. */
9056 is_aligning_offset (offset, exp)
9060 /* Strip off any conversions and WITH_RECORD_EXPR nodes. */
9061 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9062 || TREE_CODE (offset) == NOP_EXPR
9063 || TREE_CODE (offset) == CONVERT_EXPR
9064 || TREE_CODE (offset) == WITH_RECORD_EXPR)
9065 offset = TREE_OPERAND (offset, 0);
9067 /* We must now have a BIT_AND_EXPR with a constant that is one less than
9068 power of 2 and which is larger than BIGGEST_ALIGNMENT. */
9069 if (TREE_CODE (offset) != BIT_AND_EXPR
9070 || !host_integerp (TREE_OPERAND (offset, 1), 1)
9071 || compare_tree_int (TREE_OPERAND (offset, 1), BIGGEST_ALIGNMENT) <= 0
9072 || !exact_log2 (tree_low_cst (TREE_OPERAND (offset, 1), 1) + 1) < 0)
9075 /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
9076 It must be NEGATE_EXPR. Then strip any more conversions. */
9077 offset = TREE_OPERAND (offset, 0);
9078 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9079 || TREE_CODE (offset) == NOP_EXPR
9080 || TREE_CODE (offset) == CONVERT_EXPR)
9081 offset = TREE_OPERAND (offset, 0);
9083 if (TREE_CODE (offset) != NEGATE_EXPR)
9086 offset = TREE_OPERAND (offset, 0);
9087 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9088 || TREE_CODE (offset) == NOP_EXPR
9089 || TREE_CODE (offset) == CONVERT_EXPR)
9090 offset = TREE_OPERAND (offset, 0);
9092 /* This must now be the address either of EXP or of a PLACEHOLDER_EXPR
9093 whose type is the same as EXP. */
9094 return (TREE_CODE (offset) == ADDR_EXPR
9095 && (TREE_OPERAND (offset, 0) == exp
9096 || (TREE_CODE (TREE_OPERAND (offset, 0)) == PLACEHOLDER_EXPR
9097 && (TREE_TYPE (TREE_OPERAND (offset, 0))
9098 == TREE_TYPE (exp)))));
9101 /* Return the tree node if an ARG corresponds to a string constant or zero
9102 if it doesn't. If we return non-zero, set *PTR_OFFSET to the offset
9103 in bytes within the string that ARG is accessing. The type of the
9104 offset will be `sizetype'. */
9107 string_constant (arg, ptr_offset)
9113 if (TREE_CODE (arg) == ADDR_EXPR
9114 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
9116 *ptr_offset = size_zero_node;
9117 return TREE_OPERAND (arg, 0);
9119 else if (TREE_CODE (arg) == PLUS_EXPR)
9121 tree arg0 = TREE_OPERAND (arg, 0);
9122 tree arg1 = TREE_OPERAND (arg, 1);
9127 if (TREE_CODE (arg0) == ADDR_EXPR
9128 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
9130 *ptr_offset = convert (sizetype, arg1);
9131 return TREE_OPERAND (arg0, 0);
9133 else if (TREE_CODE (arg1) == ADDR_EXPR
9134 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
9136 *ptr_offset = convert (sizetype, arg0);
9137 return TREE_OPERAND (arg1, 0);
9144 /* Expand code for a post- or pre- increment or decrement
9145 and return the RTX for the result.
9146 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
9149 expand_increment (exp, post, ignore)
9155 tree incremented = TREE_OPERAND (exp, 0);
9156 optab this_optab = add_optab;
9158 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
9159 int op0_is_copy = 0;
9160 int single_insn = 0;
9161 /* 1 means we can't store into OP0 directly,
9162 because it is a subreg narrower than a word,
9163 and we don't dare clobber the rest of the word. */
9166 /* Stabilize any component ref that might need to be
9167 evaluated more than once below. */
9169 || TREE_CODE (incremented) == BIT_FIELD_REF
9170 || (TREE_CODE (incremented) == COMPONENT_REF
9171 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
9172 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
9173 incremented = stabilize_reference (incremented);
9174 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
9175 ones into save exprs so that they don't accidentally get evaluated
9176 more than once by the code below. */
9177 if (TREE_CODE (incremented) == PREINCREMENT_EXPR
9178 || TREE_CODE (incremented) == PREDECREMENT_EXPR)
9179 incremented = save_expr (incremented);
9181 /* Compute the operands as RTX.
9182 Note whether OP0 is the actual lvalue or a copy of it:
9183 I believe it is a copy iff it is a register or subreg
9184 and insns were generated in computing it. */
9186 temp = get_last_insn ();
9187 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, 0);
9189 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
9190 in place but instead must do sign- or zero-extension during assignment,
9191 so we copy it into a new register and let the code below use it as
9194 Note that we can safely modify this SUBREG since it is know not to be
9195 shared (it was made by the expand_expr call above). */
9197 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
9200 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
9204 else if (GET_CODE (op0) == SUBREG
9205 && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD)
9207 /* We cannot increment this SUBREG in place. If we are
9208 post-incrementing, get a copy of the old value. Otherwise,
9209 just mark that we cannot increment in place. */
9211 op0 = copy_to_reg (op0);
9216 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
9217 && temp != get_last_insn ());
9218 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
9220 /* Decide whether incrementing or decrementing. */
9221 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
9222 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9223 this_optab = sub_optab;
9225 /* Convert decrement by a constant into a negative increment. */
9226 if (this_optab == sub_optab
9227 && GET_CODE (op1) == CONST_INT)
9229 op1 = GEN_INT (-INTVAL (op1));
9230 this_optab = add_optab;
9233 if (TYPE_TRAP_SIGNED (TREE_TYPE (exp)))
9234 this_optab = this_optab == add_optab ? addv_optab : subv_optab;
9236 /* For a preincrement, see if we can do this with a single instruction. */
9239 icode = (int) this_optab->handlers[(int) mode].insn_code;
9240 if (icode != (int) CODE_FOR_nothing
9241 /* Make sure that OP0 is valid for operands 0 and 1
9242 of the insn we want to queue. */
9243 && (*insn_data[icode].operand[0].predicate) (op0, mode)
9244 && (*insn_data[icode].operand[1].predicate) (op0, mode)
9245 && (*insn_data[icode].operand[2].predicate) (op1, mode))
9249 /* If OP0 is not the actual lvalue, but rather a copy in a register,
9250 then we cannot just increment OP0. We must therefore contrive to
9251 increment the original value. Then, for postincrement, we can return
9252 OP0 since it is a copy of the old value. For preincrement, expand here
9253 unless we can do it with a single insn.
9255 Likewise if storing directly into OP0 would clobber high bits
9256 we need to preserve (bad_subreg). */
9257 if (op0_is_copy || (!post && !single_insn) || bad_subreg)
9259 /* This is the easiest way to increment the value wherever it is.
9260 Problems with multiple evaluation of INCREMENTED are prevented
9261 because either (1) it is a component_ref or preincrement,
9262 in which case it was stabilized above, or (2) it is an array_ref
9263 with constant index in an array in a register, which is
9264 safe to reevaluate. */
9265 tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
9266 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9267 ? MINUS_EXPR : PLUS_EXPR),
9270 TREE_OPERAND (exp, 1));
9272 while (TREE_CODE (incremented) == NOP_EXPR
9273 || TREE_CODE (incremented) == CONVERT_EXPR)
9275 newexp = convert (TREE_TYPE (incremented), newexp);
9276 incremented = TREE_OPERAND (incremented, 0);
9279 temp = expand_assignment (incremented, newexp, ! post && ! ignore , 0);
9280 return post ? op0 : temp;
9285 /* We have a true reference to the value in OP0.
9286 If there is an insn to add or subtract in this mode, queue it.
9287 Queueing the increment insn avoids the register shuffling
9288 that often results if we must increment now and first save
9289 the old value for subsequent use. */
9291 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
9292 op0 = stabilize (op0);
9295 icode = (int) this_optab->handlers[(int) mode].insn_code;
9296 if (icode != (int) CODE_FOR_nothing
9297 /* Make sure that OP0 is valid for operands 0 and 1
9298 of the insn we want to queue. */
9299 && (*insn_data[icode].operand[0].predicate) (op0, mode)
9300 && (*insn_data[icode].operand[1].predicate) (op0, mode))
9302 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9303 op1 = force_reg (mode, op1);
9305 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
9307 if (icode != (int) CODE_FOR_nothing && GET_CODE (op0) == MEM)
9309 rtx addr = (general_operand (XEXP (op0, 0), mode)
9310 ? force_reg (Pmode, XEXP (op0, 0))
9311 : copy_to_reg (XEXP (op0, 0)));
9314 op0 = replace_equiv_address (op0, addr);
9315 temp = force_reg (GET_MODE (op0), op0);
9316 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9317 op1 = force_reg (mode, op1);
9319 /* The increment queue is LIFO, thus we have to `queue'
9320 the instructions in reverse order. */
9321 enqueue_insn (op0, gen_move_insn (op0, temp));
9322 result = enqueue_insn (temp, GEN_FCN (icode) (temp, temp, op1));
9327 /* Preincrement, or we can't increment with one simple insn. */
9329 /* Save a copy of the value before inc or dec, to return it later. */
9330 temp = value = copy_to_reg (op0);
9332 /* Arrange to return the incremented value. */
9333 /* Copy the rtx because expand_binop will protect from the queue,
9334 and the results of that would be invalid for us to return
9335 if our caller does emit_queue before using our result. */
9336 temp = copy_rtx (value = op0);
9338 /* Increment however we can. */
9339 op1 = expand_binop (mode, this_optab, value, op1, op0,
9340 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
9342 /* Make sure the value is stored into OP0. */
9344 emit_move_insn (op0, op1);
9349 /* At the start of a function, record that we have no previously-pushed
9350 arguments waiting to be popped. */
9353 init_pending_stack_adjust ()
9355 pending_stack_adjust = 0;
9358 /* When exiting from function, if safe, clear out any pending stack adjust
9359 so the adjustment won't get done.
9361 Note, if the current function calls alloca, then it must have a
9362 frame pointer regardless of the value of flag_omit_frame_pointer. */
9365 clear_pending_stack_adjust ()
9367 #ifdef EXIT_IGNORE_STACK
9369 && (! flag_omit_frame_pointer || current_function_calls_alloca)
9370 && EXIT_IGNORE_STACK
9371 && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline)
9372 && ! flag_inline_functions)
9374 stack_pointer_delta -= pending_stack_adjust,
9375 pending_stack_adjust = 0;
9380 /* Pop any previously-pushed arguments that have not been popped yet. */
9383 do_pending_stack_adjust ()
9385 if (inhibit_defer_pop == 0)
9387 if (pending_stack_adjust != 0)
9388 adjust_stack (GEN_INT (pending_stack_adjust));
9389 pending_stack_adjust = 0;
9393 /* Expand conditional expressions. */
9395 /* Generate code to evaluate EXP and jump to LABEL if the value is zero.
9396 LABEL is an rtx of code CODE_LABEL, in this function and all the
9400 jumpifnot (exp, label)
9404 do_jump (exp, label, NULL_RTX);
9407 /* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
9414 do_jump (exp, NULL_RTX, label);
9417 /* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
9418 the result is zero, or IF_TRUE_LABEL if the result is one.
9419 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
9420 meaning fall through in that case.
9422 do_jump always does any pending stack adjust except when it does not
9423 actually perform a jump. An example where there is no jump
9424 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
9426 This function is responsible for optimizing cases such as
9427 &&, || and comparison operators in EXP. */
9430 do_jump (exp, if_false_label, if_true_label)
9432 rtx if_false_label, if_true_label;
9434 enum tree_code code = TREE_CODE (exp);
9435 /* Some cases need to create a label to jump to
9436 in order to properly fall through.
9437 These cases set DROP_THROUGH_LABEL nonzero. */
9438 rtx drop_through_label = 0;
9442 enum machine_mode mode;
9444 #ifdef MAX_INTEGER_COMPUTATION_MODE
9445 check_max_integer_computation_mode (exp);
9456 temp = integer_zerop (exp) ? if_false_label : if_true_label;
9462 /* This is not true with #pragma weak */
9464 /* The address of something can never be zero. */
9466 emit_jump (if_true_label);
9471 if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
9472 || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
9473 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF
9474 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_RANGE_REF)
9477 /* If we are narrowing the operand, we have to do the compare in the
9479 if ((TYPE_PRECISION (TREE_TYPE (exp))
9480 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
9482 case NON_LVALUE_EXPR:
9483 case REFERENCE_EXPR:
9488 /* These cannot change zero->non-zero or vice versa. */
9489 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9492 case WITH_RECORD_EXPR:
9493 /* Put the object on the placeholder list, recurse through our first
9494 operand, and pop the list. */
9495 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
9497 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9498 placeholder_list = TREE_CHAIN (placeholder_list);
9502 /* This is never less insns than evaluating the PLUS_EXPR followed by
9503 a test and can be longer if the test is eliminated. */
9505 /* Reduce to minus. */
9506 exp = build (MINUS_EXPR, TREE_TYPE (exp),
9507 TREE_OPERAND (exp, 0),
9508 fold (build1 (NEGATE_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
9509 TREE_OPERAND (exp, 1))));
9510 /* Process as MINUS. */
9514 /* Non-zero iff operands of minus differ. */
9515 do_compare_and_jump (build (NE_EXPR, TREE_TYPE (exp),
9516 TREE_OPERAND (exp, 0),
9517 TREE_OPERAND (exp, 1)),
9518 NE, NE, if_false_label, if_true_label);
9522 /* If we are AND'ing with a small constant, do this comparison in the
9523 smallest type that fits. If the machine doesn't have comparisons
9524 that small, it will be converted back to the wider comparison.
9525 This helps if we are testing the sign bit of a narrower object.
9526 combine can't do this for us because it can't know whether a
9527 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
9529 if (! SLOW_BYTE_ACCESS
9530 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
9531 && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
9532 && (i = tree_floor_log2 (TREE_OPERAND (exp, 1))) >= 0
9533 && (mode = mode_for_size (i + 1, MODE_INT, 0)) != BLKmode
9534 && (type = (*lang_hooks.types.type_for_mode) (mode, 1)) != 0
9535 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9536 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9537 != CODE_FOR_nothing))
9539 do_jump (convert (type, exp), if_false_label, if_true_label);
9544 case TRUTH_NOT_EXPR:
9545 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9548 case TRUTH_ANDIF_EXPR:
9549 if (if_false_label == 0)
9550 if_false_label = drop_through_label = gen_label_rtx ();
9551 do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
9552 start_cleanup_deferral ();
9553 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9554 end_cleanup_deferral ();
9557 case TRUTH_ORIF_EXPR:
9558 if (if_true_label == 0)
9559 if_true_label = drop_through_label = gen_label_rtx ();
9560 do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
9561 start_cleanup_deferral ();
9562 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9563 end_cleanup_deferral ();
9568 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
9569 preserve_temp_slots (NULL_RTX);
9573 do_pending_stack_adjust ();
9574 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9580 case ARRAY_RANGE_REF:
9582 HOST_WIDE_INT bitsize, bitpos;
9584 enum machine_mode mode;
9589 /* Get description of this reference. We don't actually care
9590 about the underlying object here. */
9591 get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode,
9592 &unsignedp, &volatilep);
9594 type = (*lang_hooks.types.type_for_size) (bitsize, unsignedp);
9595 if (! SLOW_BYTE_ACCESS
9596 && type != 0 && bitsize >= 0
9597 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9598 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9599 != CODE_FOR_nothing))
9601 do_jump (convert (type, exp), if_false_label, if_true_label);
9608 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
9609 if (integer_onep (TREE_OPERAND (exp, 1))
9610 && integer_zerop (TREE_OPERAND (exp, 2)))
9611 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9613 else if (integer_zerop (TREE_OPERAND (exp, 1))
9614 && integer_onep (TREE_OPERAND (exp, 2)))
9615 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9619 rtx label1 = gen_label_rtx ();
9620 drop_through_label = gen_label_rtx ();
9622 do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX);
9624 start_cleanup_deferral ();
9625 /* Now the THEN-expression. */
9626 do_jump (TREE_OPERAND (exp, 1),
9627 if_false_label ? if_false_label : drop_through_label,
9628 if_true_label ? if_true_label : drop_through_label);
9629 /* In case the do_jump just above never jumps. */
9630 do_pending_stack_adjust ();
9631 emit_label (label1);
9633 /* Now the ELSE-expression. */
9634 do_jump (TREE_OPERAND (exp, 2),
9635 if_false_label ? if_false_label : drop_through_label,
9636 if_true_label ? if_true_label : drop_through_label);
9637 end_cleanup_deferral ();
9643 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9645 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
9646 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
9648 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
9649 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
9652 (build (TRUTH_ANDIF_EXPR, TREE_TYPE (exp),
9653 fold (build (EQ_EXPR, TREE_TYPE (exp),
9654 fold (build1 (REALPART_EXPR,
9655 TREE_TYPE (inner_type),
9657 fold (build1 (REALPART_EXPR,
9658 TREE_TYPE (inner_type),
9660 fold (build (EQ_EXPR, TREE_TYPE (exp),
9661 fold (build1 (IMAGPART_EXPR,
9662 TREE_TYPE (inner_type),
9664 fold (build1 (IMAGPART_EXPR,
9665 TREE_TYPE (inner_type),
9667 if_false_label, if_true_label);
9670 else if (integer_zerop (TREE_OPERAND (exp, 1)))
9671 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9673 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
9674 && !can_compare_p (EQ, TYPE_MODE (inner_type), ccp_jump))
9675 do_jump_by_parts_equality (exp, if_false_label, if_true_label);
9677 do_compare_and_jump (exp, EQ, EQ, if_false_label, if_true_label);
9683 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9685 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
9686 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
9688 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
9689 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
9692 (build (TRUTH_ORIF_EXPR, TREE_TYPE (exp),
9693 fold (build (NE_EXPR, TREE_TYPE (exp),
9694 fold (build1 (REALPART_EXPR,
9695 TREE_TYPE (inner_type),
9697 fold (build1 (REALPART_EXPR,
9698 TREE_TYPE (inner_type),
9700 fold (build (NE_EXPR, TREE_TYPE (exp),
9701 fold (build1 (IMAGPART_EXPR,
9702 TREE_TYPE (inner_type),
9704 fold (build1 (IMAGPART_EXPR,
9705 TREE_TYPE (inner_type),
9707 if_false_label, if_true_label);
9710 else if (integer_zerop (TREE_OPERAND (exp, 1)))
9711 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9713 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
9714 && !can_compare_p (NE, TYPE_MODE (inner_type), ccp_jump))
9715 do_jump_by_parts_equality (exp, if_true_label, if_false_label);
9717 do_compare_and_jump (exp, NE, NE, if_false_label, if_true_label);
9722 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9723 if (GET_MODE_CLASS (mode) == MODE_INT
9724 && ! can_compare_p (LT, mode, ccp_jump))
9725 do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
9727 do_compare_and_jump (exp, LT, LTU, if_false_label, if_true_label);
9731 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9732 if (GET_MODE_CLASS (mode) == MODE_INT
9733 && ! can_compare_p (LE, mode, ccp_jump))
9734 do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
9736 do_compare_and_jump (exp, LE, LEU, if_false_label, if_true_label);
9740 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9741 if (GET_MODE_CLASS (mode) == MODE_INT
9742 && ! can_compare_p (GT, mode, ccp_jump))
9743 do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
9745 do_compare_and_jump (exp, GT, GTU, if_false_label, if_true_label);
9749 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9750 if (GET_MODE_CLASS (mode) == MODE_INT
9751 && ! can_compare_p (GE, mode, ccp_jump))
9752 do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
9754 do_compare_and_jump (exp, GE, GEU, if_false_label, if_true_label);
9757 case UNORDERED_EXPR:
9760 enum rtx_code cmp, rcmp;
9763 if (code == UNORDERED_EXPR)
9764 cmp = UNORDERED, rcmp = ORDERED;
9766 cmp = ORDERED, rcmp = UNORDERED;
9767 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9770 if (! can_compare_p (cmp, mode, ccp_jump)
9771 && (can_compare_p (rcmp, mode, ccp_jump)
9772 /* If the target doesn't provide either UNORDERED or ORDERED
9773 comparisons, canonicalize on UNORDERED for the library. */
9774 || rcmp == UNORDERED))
9778 do_compare_and_jump (exp, cmp, cmp, if_false_label, if_true_label);
9780 do_compare_and_jump (exp, rcmp, rcmp, if_true_label, if_false_label);
9785 enum rtx_code rcode1;
9786 enum tree_code tcode2;
9810 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9811 if (can_compare_p (rcode1, mode, ccp_jump))
9812 do_compare_and_jump (exp, rcode1, rcode1, if_false_label,
9816 tree op0 = save_expr (TREE_OPERAND (exp, 0));
9817 tree op1 = save_expr (TREE_OPERAND (exp, 1));
9820 /* If the target doesn't support combined unordered
9821 compares, decompose into UNORDERED + comparison. */
9822 cmp0 = fold (build (UNORDERED_EXPR, TREE_TYPE (exp), op0, op1));
9823 cmp1 = fold (build (tcode2, TREE_TYPE (exp), op0, op1));
9824 exp = build (TRUTH_ORIF_EXPR, TREE_TYPE (exp), cmp0, cmp1);
9825 do_jump (exp, if_false_label, if_true_label);
9831 __builtin_expect (<test>, 0) and
9832 __builtin_expect (<test>, 1)
9834 We need to do this here, so that <test> is not converted to a SCC
9835 operation on machines that use condition code registers and COMPARE
9836 like the PowerPC, and then the jump is done based on whether the SCC
9837 operation produced a 1 or 0. */
9839 /* Check for a built-in function. */
9840 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR)
9842 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
9843 tree arglist = TREE_OPERAND (exp, 1);
9845 if (TREE_CODE (fndecl) == FUNCTION_DECL
9846 && DECL_BUILT_IN (fndecl)
9847 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT
9848 && arglist != NULL_TREE
9849 && TREE_CHAIN (arglist) != NULL_TREE)
9851 rtx seq = expand_builtin_expect_jump (exp, if_false_label,
9854 if (seq != NULL_RTX)
9861 /* fall through and generate the normal code. */
9865 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
9867 /* This is not needed any more and causes poor code since it causes
9868 comparisons and tests from non-SI objects to have different code
9870 /* Copy to register to avoid generating bad insns by cse
9871 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
9872 if (!cse_not_expected && GET_CODE (temp) == MEM)
9873 temp = copy_to_reg (temp);
9875 do_pending_stack_adjust ();
9876 /* Do any postincrements in the expression that was tested. */
9879 if (GET_CODE (temp) == CONST_INT
9880 || (GET_CODE (temp) == CONST_DOUBLE && GET_MODE (temp) == VOIDmode)
9881 || GET_CODE (temp) == LABEL_REF)
9883 rtx target = temp == const0_rtx ? if_false_label : if_true_label;
9887 else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
9888 && ! can_compare_p (NE, GET_MODE (temp), ccp_jump))
9889 /* Note swapping the labels gives us not-equal. */
9890 do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label);
9891 else if (GET_MODE (temp) != VOIDmode)
9892 do_compare_rtx_and_jump (temp, CONST0_RTX (GET_MODE (temp)),
9893 NE, TREE_UNSIGNED (TREE_TYPE (exp)),
9894 GET_MODE (temp), NULL_RTX,
9895 if_false_label, if_true_label);
9900 if (drop_through_label)
9902 /* If do_jump produces code that might be jumped around,
9903 do any stack adjusts from that code, before the place
9904 where control merges in. */
9905 do_pending_stack_adjust ();
9906 emit_label (drop_through_label);
9910 /* Given a comparison expression EXP for values too wide to be compared
9911 with one insn, test the comparison and jump to the appropriate label.
9912 The code of EXP is ignored; we always test GT if SWAP is 0,
9913 and LT if SWAP is 1. */
9916 do_jump_by_parts_greater (exp, swap, if_false_label, if_true_label)
9919 rtx if_false_label, if_true_label;
9921 rtx op0 = expand_expr (TREE_OPERAND (exp, swap), NULL_RTX, VOIDmode, 0);
9922 rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), NULL_RTX, VOIDmode, 0);
9923 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9924 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
9926 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label);
9929 /* Compare OP0 with OP1, word at a time, in mode MODE.
9930 UNSIGNEDP says to do unsigned comparison.
9931 Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
9934 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label)
9935 enum machine_mode mode;
9938 rtx if_false_label, if_true_label;
9940 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
9941 rtx drop_through_label = 0;
9944 if (! if_true_label || ! if_false_label)
9945 drop_through_label = gen_label_rtx ();
9946 if (! if_true_label)
9947 if_true_label = drop_through_label;
9948 if (! if_false_label)
9949 if_false_label = drop_through_label;
9951 /* Compare a word at a time, high order first. */
9952 for (i = 0; i < nwords; i++)
9954 rtx op0_word, op1_word;
9956 if (WORDS_BIG_ENDIAN)
9958 op0_word = operand_subword_force (op0, i, mode);
9959 op1_word = operand_subword_force (op1, i, mode);
9963 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
9964 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
9967 /* All but high-order word must be compared as unsigned. */
9968 do_compare_rtx_and_jump (op0_word, op1_word, GT,
9969 (unsignedp || i > 0), word_mode, NULL_RTX,
9970 NULL_RTX, if_true_label);
9972 /* Consider lower words only if these are equal. */
9973 do_compare_rtx_and_jump (op0_word, op1_word, NE, unsignedp, word_mode,
9974 NULL_RTX, NULL_RTX, if_false_label);
9978 emit_jump (if_false_label);
9979 if (drop_through_label)
9980 emit_label (drop_through_label);
9983 /* Given an EQ_EXPR expression EXP for values too wide to be compared
9984 with one insn, test the comparison and jump to the appropriate label. */
9987 do_jump_by_parts_equality (exp, if_false_label, if_true_label)
9989 rtx if_false_label, if_true_label;
9991 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
9992 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
9993 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9994 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
9996 rtx drop_through_label = 0;
9998 if (! if_false_label)
9999 drop_through_label = if_false_label = gen_label_rtx ();
10001 for (i = 0; i < nwords; i++)
10002 do_compare_rtx_and_jump (operand_subword_force (op0, i, mode),
10003 operand_subword_force (op1, i, mode),
10004 EQ, TREE_UNSIGNED (TREE_TYPE (exp)),
10005 word_mode, NULL_RTX, if_false_label, NULL_RTX);
10008 emit_jump (if_true_label);
10009 if (drop_through_label)
10010 emit_label (drop_through_label);
10013 /* Jump according to whether OP0 is 0.
10014 We assume that OP0 has an integer mode that is too wide
10015 for the available compare insns. */
10018 do_jump_by_parts_equality_rtx (op0, if_false_label, if_true_label)
10020 rtx if_false_label, if_true_label;
10022 int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD;
10025 rtx drop_through_label = 0;
10027 /* The fastest way of doing this comparison on almost any machine is to
10028 "or" all the words and compare the result. If all have to be loaded
10029 from memory and this is a very wide item, it's possible this may
10030 be slower, but that's highly unlikely. */
10032 part = gen_reg_rtx (word_mode);
10033 emit_move_insn (part, operand_subword_force (op0, 0, GET_MODE (op0)));
10034 for (i = 1; i < nwords && part != 0; i++)
10035 part = expand_binop (word_mode, ior_optab, part,
10036 operand_subword_force (op0, i, GET_MODE (op0)),
10037 part, 1, OPTAB_WIDEN);
10041 do_compare_rtx_and_jump (part, const0_rtx, EQ, 1, word_mode,
10042 NULL_RTX, if_false_label, if_true_label);
10047 /* If we couldn't do the "or" simply, do this with a series of compares. */
10048 if (! if_false_label)
10049 drop_through_label = if_false_label = gen_label_rtx ();
10051 for (i = 0; i < nwords; i++)
10052 do_compare_rtx_and_jump (operand_subword_force (op0, i, GET_MODE (op0)),
10053 const0_rtx, EQ, 1, word_mode, NULL_RTX,
10054 if_false_label, NULL_RTX);
10057 emit_jump (if_true_label);
10059 if (drop_through_label)
10060 emit_label (drop_through_label);
10063 /* Generate code for a comparison of OP0 and OP1 with rtx code CODE.
10064 (including code to compute the values to be compared)
10065 and set (CC0) according to the result.
10066 The decision as to signed or unsigned comparison must be made by the caller.
10068 We force a stack adjustment unless there are currently
10069 things pushed on the stack that aren't yet used.
10071 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
10075 compare_from_rtx (op0, op1, code, unsignedp, mode, size)
10077 enum rtx_code code;
10079 enum machine_mode mode;
10082 enum rtx_code ucode;
10085 /* If one operand is constant, make it the second one. Only do this
10086 if the other operand is not constant as well. */
10088 if (swap_commutative_operands_p (op0, op1))
10093 code = swap_condition (code);
10096 if (flag_force_mem)
10098 op0 = force_not_mem (op0);
10099 op1 = force_not_mem (op1);
10102 do_pending_stack_adjust ();
10104 ucode = unsignedp ? unsigned_condition (code) : code;
10105 if ((tem = simplify_relational_operation (ucode, mode, op0, op1)) != 0)
10109 /* There's no need to do this now that combine.c can eliminate lots of
10110 sign extensions. This can be less efficient in certain cases on other
10113 /* If this is a signed equality comparison, we can do it as an
10114 unsigned comparison since zero-extension is cheaper than sign
10115 extension and comparisons with zero are done as unsigned. This is
10116 the case even on machines that can do fast sign extension, since
10117 zero-extension is easier to combine with other operations than
10118 sign-extension is. If we are comparing against a constant, we must
10119 convert it to what it would look like unsigned. */
10120 if ((code == EQ || code == NE) && ! unsignedp
10121 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
10123 if (GET_CODE (op1) == CONST_INT
10124 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
10125 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
10130 emit_cmp_insn (op0, op1, code, size, mode, unsignedp);
10133 return gen_rtx_fmt_ee (code, VOIDmode, cc0_rtx, const0_rtx);
10135 return gen_rtx_fmt_ee (code, VOIDmode, op0, op1);
10139 /* Like do_compare_and_jump but expects the values to compare as two rtx's.
10140 The decision as to signed or unsigned comparison must be made by the caller.
10142 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
10146 do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode, size,
10147 if_false_label, if_true_label)
10149 enum rtx_code code;
10151 enum machine_mode mode;
10153 rtx if_false_label, if_true_label;
10155 enum rtx_code ucode;
10157 int dummy_true_label = 0;
10159 /* Reverse the comparison if that is safe and we want to jump if it is
10161 if (! if_true_label && ! FLOAT_MODE_P (mode))
10163 if_true_label = if_false_label;
10164 if_false_label = 0;
10165 code = reverse_condition (code);
10168 /* If one operand is constant, make it the second one. Only do this
10169 if the other operand is not constant as well. */
10171 if (swap_commutative_operands_p (op0, op1))
10176 code = swap_condition (code);
10179 if (flag_force_mem)
10181 op0 = force_not_mem (op0);
10182 op1 = force_not_mem (op1);
10185 do_pending_stack_adjust ();
10187 ucode = unsignedp ? unsigned_condition (code) : code;
10188 if ((tem = simplify_relational_operation (ucode, mode, op0, op1)) != 0)
10190 if (tem == const_true_rtx)
10193 emit_jump (if_true_label);
10197 if (if_false_label)
10198 emit_jump (if_false_label);
10204 /* There's no need to do this now that combine.c can eliminate lots of
10205 sign extensions. This can be less efficient in certain cases on other
10208 /* If this is a signed equality comparison, we can do it as an
10209 unsigned comparison since zero-extension is cheaper than sign
10210 extension and comparisons with zero are done as unsigned. This is
10211 the case even on machines that can do fast sign extension, since
10212 zero-extension is easier to combine with other operations than
10213 sign-extension is. If we are comparing against a constant, we must
10214 convert it to what it would look like unsigned. */
10215 if ((code == EQ || code == NE) && ! unsignedp
10216 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
10218 if (GET_CODE (op1) == CONST_INT
10219 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
10220 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
10225 if (! if_true_label)
10227 dummy_true_label = 1;
10228 if_true_label = gen_label_rtx ();
10231 emit_cmp_and_jump_insns (op0, op1, code, size, mode, unsignedp,
10234 if (if_false_label)
10235 emit_jump (if_false_label);
10236 if (dummy_true_label)
10237 emit_label (if_true_label);
10240 /* Generate code for a comparison expression EXP (including code to compute
10241 the values to be compared) and a conditional jump to IF_FALSE_LABEL and/or
10242 IF_TRUE_LABEL. One of the labels can be NULL_RTX, in which case the
10243 generated code will drop through.
10244 SIGNED_CODE should be the rtx operation for this comparison for
10245 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
10247 We force a stack adjustment unless there are currently
10248 things pushed on the stack that aren't yet used. */
10251 do_compare_and_jump (exp, signed_code, unsigned_code, if_false_label,
10254 enum rtx_code signed_code, unsigned_code;
10255 rtx if_false_label, if_true_label;
10259 enum machine_mode mode;
10261 enum rtx_code code;
10263 /* Don't crash if the comparison was erroneous. */
10264 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
10265 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
10268 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
10269 if (TREE_CODE (TREE_OPERAND (exp, 1)) == ERROR_MARK)
10272 type = TREE_TYPE (TREE_OPERAND (exp, 0));
10273 mode = TYPE_MODE (type);
10274 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
10275 && (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST
10276 || (GET_MODE_BITSIZE (mode)
10277 > GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp,
10280 /* op0 might have been replaced by promoted constant, in which
10281 case the type of second argument should be used. */
10282 type = TREE_TYPE (TREE_OPERAND (exp, 1));
10283 mode = TYPE_MODE (type);
10285 unsignedp = TREE_UNSIGNED (type);
10286 code = unsignedp ? unsigned_code : signed_code;
10288 #ifdef HAVE_canonicalize_funcptr_for_compare
10289 /* If function pointers need to be "canonicalized" before they can
10290 be reliably compared, then canonicalize them. */
10291 if (HAVE_canonicalize_funcptr_for_compare
10292 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10293 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10296 rtx new_op0 = gen_reg_rtx (mode);
10298 emit_insn (gen_canonicalize_funcptr_for_compare (new_op0, op0));
10302 if (HAVE_canonicalize_funcptr_for_compare
10303 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10304 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10307 rtx new_op1 = gen_reg_rtx (mode);
10309 emit_insn (gen_canonicalize_funcptr_for_compare (new_op1, op1));
10314 /* Do any postincrements in the expression that was tested. */
10317 do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode,
10319 ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
10320 if_false_label, if_true_label);
10323 /* Generate code to calculate EXP using a store-flag instruction
10324 and return an rtx for the result. EXP is either a comparison
10325 or a TRUTH_NOT_EXPR whose operand is a comparison.
10327 If TARGET is nonzero, store the result there if convenient.
10329 If ONLY_CHEAP is non-zero, only do this if it is likely to be very
10332 Return zero if there is no suitable set-flag instruction
10333 available on this machine.
10335 Once expand_expr has been called on the arguments of the comparison,
10336 we are committed to doing the store flag, since it is not safe to
10337 re-evaluate the expression. We emit the store-flag insn by calling
10338 emit_store_flag, but only expand the arguments if we have a reason
10339 to believe that emit_store_flag will be successful. If we think that
10340 it will, but it isn't, we have to simulate the store-flag with a
10341 set/jump/set sequence. */
10344 do_store_flag (exp, target, mode, only_cheap)
10347 enum machine_mode mode;
10350 enum rtx_code code;
10351 tree arg0, arg1, type;
10353 enum machine_mode operand_mode;
10357 enum insn_code icode;
10358 rtx subtarget = target;
10361 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
10362 result at the end. We can't simply invert the test since it would
10363 have already been inverted if it were valid. This case occurs for
10364 some floating-point comparisons. */
10366 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
10367 invert = 1, exp = TREE_OPERAND (exp, 0);
10369 arg0 = TREE_OPERAND (exp, 0);
10370 arg1 = TREE_OPERAND (exp, 1);
10372 /* Don't crash if the comparison was erroneous. */
10373 if (arg0 == error_mark_node || arg1 == error_mark_node)
10376 type = TREE_TYPE (arg0);
10377 operand_mode = TYPE_MODE (type);
10378 unsignedp = TREE_UNSIGNED (type);
10380 /* We won't bother with BLKmode store-flag operations because it would mean
10381 passing a lot of information to emit_store_flag. */
10382 if (operand_mode == BLKmode)
10385 /* We won't bother with store-flag operations involving function pointers
10386 when function pointers must be canonicalized before comparisons. */
10387 #ifdef HAVE_canonicalize_funcptr_for_compare
10388 if (HAVE_canonicalize_funcptr_for_compare
10389 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10390 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10392 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10393 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10394 == FUNCTION_TYPE))))
10401 /* Get the rtx comparison code to use. We know that EXP is a comparison
10402 operation of some type. Some comparisons against 1 and -1 can be
10403 converted to comparisons with zero. Do so here so that the tests
10404 below will be aware that we have a comparison with zero. These
10405 tests will not catch constants in the first operand, but constants
10406 are rarely passed as the first operand. */
10408 switch (TREE_CODE (exp))
10417 if (integer_onep (arg1))
10418 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
10420 code = unsignedp ? LTU : LT;
10423 if (! unsignedp && integer_all_onesp (arg1))
10424 arg1 = integer_zero_node, code = LT;
10426 code = unsignedp ? LEU : LE;
10429 if (! unsignedp && integer_all_onesp (arg1))
10430 arg1 = integer_zero_node, code = GE;
10432 code = unsignedp ? GTU : GT;
10435 if (integer_onep (arg1))
10436 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
10438 code = unsignedp ? GEU : GE;
10441 case UNORDERED_EXPR:
10467 /* Put a constant second. */
10468 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
10470 tem = arg0; arg0 = arg1; arg1 = tem;
10471 code = swap_condition (code);
10474 /* If this is an equality or inequality test of a single bit, we can
10475 do this by shifting the bit being tested to the low-order bit and
10476 masking the result with the constant 1. If the condition was EQ,
10477 we xor it with 1. This does not require an scc insn and is faster
10478 than an scc insn even if we have it. */
10480 if ((code == NE || code == EQ)
10481 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
10482 && integer_pow2p (TREE_OPERAND (arg0, 1)))
10484 tree inner = TREE_OPERAND (arg0, 0);
10485 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
10488 /* If INNER is a right shift of a constant and it plus BITNUM does
10489 not overflow, adjust BITNUM and INNER. */
10491 if (TREE_CODE (inner) == RSHIFT_EXPR
10492 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
10493 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
10494 && bitnum < TYPE_PRECISION (type)
10495 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
10496 bitnum - TYPE_PRECISION (type)))
10498 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
10499 inner = TREE_OPERAND (inner, 0);
10502 /* If we are going to be able to omit the AND below, we must do our
10503 operations as unsigned. If we must use the AND, we have a choice.
10504 Normally unsigned is faster, but for some machines signed is. */
10505 ops_unsignedp = (bitnum == TYPE_PRECISION (type) - 1 ? 1
10506 #ifdef LOAD_EXTEND_OP
10507 : (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1)
10513 if (! get_subtarget (subtarget)
10514 || GET_MODE (subtarget) != operand_mode
10515 || ! safe_from_p (subtarget, inner, 1))
10518 op0 = expand_expr (inner, subtarget, VOIDmode, 0);
10521 op0 = expand_shift (RSHIFT_EXPR, operand_mode, op0,
10522 size_int (bitnum), subtarget, ops_unsignedp);
10524 if (GET_MODE (op0) != mode)
10525 op0 = convert_to_mode (mode, op0, ops_unsignedp);
10527 if ((code == EQ && ! invert) || (code == NE && invert))
10528 op0 = expand_binop (mode, xor_optab, op0, const1_rtx, subtarget,
10529 ops_unsignedp, OPTAB_LIB_WIDEN);
10531 /* Put the AND last so it can combine with more things. */
10532 if (bitnum != TYPE_PRECISION (type) - 1)
10533 op0 = expand_and (mode, op0, const1_rtx, subtarget);
10538 /* Now see if we are likely to be able to do this. Return if not. */
10539 if (! can_compare_p (code, operand_mode, ccp_store_flag))
10542 icode = setcc_gen_code[(int) code];
10543 if (icode == CODE_FOR_nothing
10544 || (only_cheap && insn_data[(int) icode].operand[0].mode != mode))
10546 /* We can only do this if it is one of the special cases that
10547 can be handled without an scc insn. */
10548 if ((code == LT && integer_zerop (arg1))
10549 || (! only_cheap && code == GE && integer_zerop (arg1)))
10551 else if (BRANCH_COST >= 0
10552 && ! only_cheap && (code == NE || code == EQ)
10553 && TREE_CODE (type) != REAL_TYPE
10554 && ((abs_optab->handlers[(int) operand_mode].insn_code
10555 != CODE_FOR_nothing)
10556 || (ffs_optab->handlers[(int) operand_mode].insn_code
10557 != CODE_FOR_nothing)))
10563 if (! get_subtarget (target)
10564 || GET_MODE (subtarget) != operand_mode
10565 || ! safe_from_p (subtarget, arg1, 1))
10568 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
10569 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
10572 target = gen_reg_rtx (mode);
10574 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
10575 because, if the emit_store_flag does anything it will succeed and
10576 OP0 and OP1 will not be used subsequently. */
10578 result = emit_store_flag (target, code,
10579 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
10580 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
10581 operand_mode, unsignedp, 1);
10586 result = expand_binop (mode, xor_optab, result, const1_rtx,
10587 result, 0, OPTAB_LIB_WIDEN);
10591 /* If this failed, we have to do this with set/compare/jump/set code. */
10592 if (GET_CODE (target) != REG
10593 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
10594 target = gen_reg_rtx (GET_MODE (target));
10596 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
10597 result = compare_from_rtx (op0, op1, code, unsignedp,
10598 operand_mode, NULL_RTX);
10599 if (GET_CODE (result) == CONST_INT)
10600 return (((result == const0_rtx && ! invert)
10601 || (result != const0_rtx && invert))
10602 ? const0_rtx : const1_rtx);
10604 /* The code of RESULT may not match CODE if compare_from_rtx
10605 decided to swap its operands and reverse the original code.
10607 We know that compare_from_rtx returns either a CONST_INT or
10608 a new comparison code, so it is safe to just extract the
10609 code from RESULT. */
10610 code = GET_CODE (result);
10612 label = gen_label_rtx ();
10613 if (bcc_gen_fctn[(int) code] == 0)
10616 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
10617 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
10618 emit_label (label);
10624 /* Stubs in case we haven't got a casesi insn. */
10625 #ifndef HAVE_casesi
10626 # define HAVE_casesi 0
10627 # define gen_casesi(a, b, c, d, e) (0)
10628 # define CODE_FOR_casesi CODE_FOR_nothing
10631 /* If the machine does not have a case insn that compares the bounds,
10632 this means extra overhead for dispatch tables, which raises the
10633 threshold for using them. */
10634 #ifndef CASE_VALUES_THRESHOLD
10635 #define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
10636 #endif /* CASE_VALUES_THRESHOLD */
10639 case_values_threshold ()
10641 return CASE_VALUES_THRESHOLD;
10644 /* Attempt to generate a casesi instruction. Returns 1 if successful,
10645 0 otherwise (i.e. if there is no casesi instruction). */
10647 try_casesi (index_type, index_expr, minval, range,
10648 table_label, default_label)
10649 tree index_type, index_expr, minval, range;
10650 rtx table_label ATTRIBUTE_UNUSED;
10653 enum machine_mode index_mode = SImode;
10654 int index_bits = GET_MODE_BITSIZE (index_mode);
10655 rtx op1, op2, index;
10656 enum machine_mode op_mode;
10661 /* Convert the index to SImode. */
10662 if (GET_MODE_BITSIZE (TYPE_MODE (index_type)) > GET_MODE_BITSIZE (index_mode))
10664 enum machine_mode omode = TYPE_MODE (index_type);
10665 rtx rangertx = expand_expr (range, NULL_RTX, VOIDmode, 0);
10667 /* We must handle the endpoints in the original mode. */
10668 index_expr = build (MINUS_EXPR, index_type,
10669 index_expr, minval);
10670 minval = integer_zero_node;
10671 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
10672 emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
10673 omode, 1, default_label);
10674 /* Now we can safely truncate. */
10675 index = convert_to_mode (index_mode, index, 0);
10679 if (TYPE_MODE (index_type) != index_mode)
10681 index_expr = convert ((*lang_hooks.types.type_for_size)
10682 (index_bits, 0), index_expr);
10683 index_type = TREE_TYPE (index_expr);
10686 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
10689 index = protect_from_queue (index, 0);
10690 do_pending_stack_adjust ();
10692 op_mode = insn_data[(int) CODE_FOR_casesi].operand[0].mode;
10693 if (! (*insn_data[(int) CODE_FOR_casesi].operand[0].predicate)
10695 index = copy_to_mode_reg (op_mode, index);
10697 op1 = expand_expr (minval, NULL_RTX, VOIDmode, 0);
10699 op_mode = insn_data[(int) CODE_FOR_casesi].operand[1].mode;
10700 op1 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (minval)),
10701 op1, TREE_UNSIGNED (TREE_TYPE (minval)));
10702 if (! (*insn_data[(int) CODE_FOR_casesi].operand[1].predicate)
10704 op1 = copy_to_mode_reg (op_mode, op1);
10706 op2 = expand_expr (range, NULL_RTX, VOIDmode, 0);
10708 op_mode = insn_data[(int) CODE_FOR_casesi].operand[2].mode;
10709 op2 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (range)),
10710 op2, TREE_UNSIGNED (TREE_TYPE (range)));
10711 if (! (*insn_data[(int) CODE_FOR_casesi].operand[2].predicate)
10713 op2 = copy_to_mode_reg (op_mode, op2);
10715 emit_jump_insn (gen_casesi (index, op1, op2,
10716 table_label, default_label));
10720 /* Attempt to generate a tablejump instruction; same concept. */
10721 #ifndef HAVE_tablejump
10722 #define HAVE_tablejump 0
10723 #define gen_tablejump(x, y) (0)
10726 /* Subroutine of the next function.
10728 INDEX is the value being switched on, with the lowest value
10729 in the table already subtracted.
10730 MODE is its expected mode (needed if INDEX is constant).
10731 RANGE is the length of the jump table.
10732 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
10734 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
10735 index value is out of range. */
10738 do_tablejump (index, mode, range, table_label, default_label)
10739 rtx index, range, table_label, default_label;
10740 enum machine_mode mode;
10744 /* Do an unsigned comparison (in the proper mode) between the index
10745 expression and the value which represents the length of the range.
10746 Since we just finished subtracting the lower bound of the range
10747 from the index expression, this comparison allows us to simultaneously
10748 check that the original index expression value is both greater than
10749 or equal to the minimum value of the range and less than or equal to
10750 the maximum value of the range. */
10752 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
10755 /* If index is in range, it must fit in Pmode.
10756 Convert to Pmode so we can index with it. */
10758 index = convert_to_mode (Pmode, index, 1);
10760 /* Don't let a MEM slip thru, because then INDEX that comes
10761 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
10762 and break_out_memory_refs will go to work on it and mess it up. */
10763 #ifdef PIC_CASE_VECTOR_ADDRESS
10764 if (flag_pic && GET_CODE (index) != REG)
10765 index = copy_to_mode_reg (Pmode, index);
10768 /* If flag_force_addr were to affect this address
10769 it could interfere with the tricky assumptions made
10770 about addresses that contain label-refs,
10771 which may be valid only very near the tablejump itself. */
10772 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
10773 GET_MODE_SIZE, because this indicates how large insns are. The other
10774 uses should all be Pmode, because they are addresses. This code
10775 could fail if addresses and insns are not the same size. */
10776 index = gen_rtx_PLUS (Pmode,
10777 gen_rtx_MULT (Pmode, index,
10778 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
10779 gen_rtx_LABEL_REF (Pmode, table_label));
10780 #ifdef PIC_CASE_VECTOR_ADDRESS
10782 index = PIC_CASE_VECTOR_ADDRESS (index);
10785 index = memory_address_noforce (CASE_VECTOR_MODE, index);
10786 temp = gen_reg_rtx (CASE_VECTOR_MODE);
10787 vector = gen_rtx_MEM (CASE_VECTOR_MODE, index);
10788 RTX_UNCHANGING_P (vector) = 1;
10789 convert_move (temp, vector, 0);
10791 emit_jump_insn (gen_tablejump (temp, table_label));
10793 /* If we are generating PIC code or if the table is PC-relative, the
10794 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
10795 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
10800 try_tablejump (index_type, index_expr, minval, range,
10801 table_label, default_label)
10802 tree index_type, index_expr, minval, range;
10803 rtx table_label, default_label;
10807 if (! HAVE_tablejump)
10810 index_expr = fold (build (MINUS_EXPR, index_type,
10811 convert (index_type, index_expr),
10812 convert (index_type, minval)));
10813 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
10815 index = protect_from_queue (index, 0);
10816 do_pending_stack_adjust ();
10818 do_tablejump (index, TYPE_MODE (index_type),
10819 convert_modes (TYPE_MODE (index_type),
10820 TYPE_MODE (TREE_TYPE (range)),
10821 expand_expr (range, NULL_RTX,
10823 TREE_UNSIGNED (TREE_TYPE (range))),
10824 table_label, default_label);
10828 /* Nonzero if the mode is a valid vector mode for this architecture.
10829 This returns nonzero even if there is no hardware support for the
10830 vector mode, but we can emulate with narrower modes. */
10833 vector_mode_valid_p (mode)
10834 enum machine_mode mode;
10836 enum mode_class class = GET_MODE_CLASS (mode);
10837 enum machine_mode innermode;
10839 /* Doh! What's going on? */
10840 if (class != MODE_VECTOR_INT
10841 && class != MODE_VECTOR_FLOAT)
10844 /* Hardware support. Woo hoo! */
10845 if (VECTOR_MODE_SUPPORTED_P (mode))
10848 innermode = GET_MODE_INNER (mode);
10850 /* We should probably return 1 if requesting V4DI and we have no DI,
10851 but we have V2DI, but this is probably very unlikely. */
10853 /* If we have support for the inner mode, we can safely emulate it.
10854 We may not have V2DI, but me can emulate with a pair of DIs. */
10855 return mov_optab->handlers[innermode].insn_code != CODE_FOR_nothing;
10858 #include "gt-expr.h"